aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/functional/data/rpc_decodescript.json2
-rwxr-xr-xtest/functional/feature_abortnode.py4
-rwxr-xr-xtest/functional/feature_addrman.py31
-rwxr-xr-xtest/functional/feature_anchors.py63
-rwxr-xr-xtest/functional/feature_assumeutxo.py257
-rwxr-xr-xtest/functional/feature_block.py14
-rwxr-xr-xtest/functional/feature_blocksdir.py4
-rwxr-xr-xtest/functional/feature_cltv.py10
-rwxr-xr-xtest/functional/feature_config_args.py13
-rwxr-xr-xtest/functional/feature_csv_activation.py16
-rwxr-xr-xtest/functional/feature_dersig.py4
-rwxr-xr-xtest/functional/feature_dirsymlinks.py2
-rwxr-xr-xtest/functional/feature_init.py24
-rwxr-xr-xtest/functional/feature_loadblock.py2
-rwxr-xr-xtest/functional/feature_nulldummy.py2
-rwxr-xr-xtest/functional/feature_pruning.py4
-rwxr-xr-xtest/functional/feature_reindex.py2
-rwxr-xr-xtest/functional/feature_reindex_readonly.py70
-rwxr-xr-xtest/functional/feature_remove_pruned_files_on_startup.py8
-rwxr-xr-xtest/functional/feature_segwit.py17
-rwxr-xr-xtest/functional/feature_txindex_compatibility.py35
-rwxr-xr-xtest/functional/feature_unsupported_utxo_db.py4
-rwxr-xr-xtest/functional/interface_usdt_mempool.py49
-rwxr-xr-xtest/functional/interface_usdt_net.py11
-rwxr-xr-xtest/functional/interface_usdt_utxocache.py68
-rwxr-xr-xtest/functional/interface_usdt_validation.py5
-rwxr-xr-xtest/functional/mempool_compatibility.py11
-rwxr-xr-xtest/functional/mempool_datacarrier.py30
-rwxr-xr-xtest/functional/mempool_limit.py257
-rwxr-xr-xtest/functional/mempool_persist.py53
-rwxr-xr-xtest/functional/mempool_reorg.py91
-rwxr-xr-xtest/functional/mempool_sigoplimit.py46
-rwxr-xr-xtest/functional/mining_basic.py48
-rwxr-xr-xtest/functional/mining_getblocktemplate_longpoll.py9
-rwxr-xr-xtest/functional/p2p_addr_relay.py12
-rwxr-xr-xtest/functional/p2p_addrfetch.py2
-rwxr-xr-xtest/functional/p2p_addrv2_relay.py25
-rwxr-xr-xtest/functional/p2p_blockfilters.py56
-rwxr-xr-xtest/functional/p2p_blocksonly.py2
-rwxr-xr-xtest/functional/p2p_compactblocks_blocksonly.py4
-rwxr-xr-xtest/functional/p2p_filter.py2
-rwxr-xr-xtest/functional/p2p_getaddr_caching.py9
-rwxr-xr-xtest/functional/p2p_ibd_stalling.py2
-rwxr-xr-xtest/functional/p2p_ibd_txrelay.py2
-rwxr-xr-xtest/functional/p2p_invalid_block.py2
-rwxr-xr-xtest/functional/p2p_invalid_locator.py2
-rwxr-xr-xtest/functional/p2p_net_deadlock.py37
-rwxr-xr-xtest/functional/p2p_orphan_handling.py416
-rwxr-xr-xtest/functional/p2p_permissions.py6
-rwxr-xr-xtest/functional/p2p_segwit.py52
-rwxr-xr-xtest/functional/p2p_v2_transport.py153
-rwxr-xr-xtest/functional/rpc_blockchain.py4
-rwxr-xr-xtest/functional/rpc_decodescript.py2
-rwxr-xr-xtest/functional/rpc_deriveaddresses.py5
-rwxr-xr-xtest/functional/rpc_dumptxoutset.py19
-rwxr-xr-xtest/functional/rpc_estimatefee.py3
-rwxr-xr-xtest/functional/rpc_misc.py2
-rwxr-xr-xtest/functional/rpc_net.py177
-rwxr-xr-xtest/functional/rpc_packages.py8
-rwxr-xr-xtest/functional/rpc_psbt.py89
-rwxr-xr-xtest/functional/rpc_signer.py2
-rwxr-xr-xtest/functional/rpc_signrawtransactionwithkey.py50
-rw-r--r--test/functional/test_framework/key.py33
-rwxr-xr-xtest/functional/test_framework/messages.py64
-rw-r--r--test/functional/test_framework/netutil.py5
-rwxr-xr-xtest/functional/test_framework/p2p.py18
-rw-r--r--test/functional/test_framework/script.py19
-rw-r--r--test/functional/test_framework/siphash.py4
-rw-r--r--test/functional/test_framework/socks5.py8
-rwxr-xr-xtest/functional/test_framework/test_framework.py43
-rwxr-xr-xtest/functional/test_framework/test_node.py71
-rw-r--r--test/functional/test_framework/wallet.py42
-rwxr-xr-xtest/functional/test_runner.py21
-rwxr-xr-xtest/functional/tool_wallet.py72
-rwxr-xr-xtest/functional/wallet_backup.py12
-rwxr-xr-xtest/functional/wallet_basic.py10
-rwxr-xr-xtest/functional/wallet_bumpfee.py90
-rwxr-xr-xtest/functional/wallet_createwallet.py6
-rwxr-xr-xtest/functional/wallet_descriptor.py8
-rwxr-xr-xtest/functional/wallet_dump.py2
-rwxr-xr-xtest/functional/wallet_encryption.py6
-rwxr-xr-xtest/functional/wallet_fundrawtransaction.py86
-rwxr-xr-xtest/functional/wallet_hd.py4
-rwxr-xr-xtest/functional/wallet_keypool.py6
-rwxr-xr-xtest/functional/wallet_migration.py135
-rwxr-xr-xtest/functional/wallet_miniscript.py12
-rwxr-xr-xtest/functional/wallet_multisig_descriptor_psbt.py3
-rwxr-xr-xtest/functional/wallet_pruning.py2
-rwxr-xr-xtest/functional/wallet_resendwallettransactions.py34
-rwxr-xr-xtest/functional/wallet_send.py7
-rwxr-xr-xtest/functional/wallet_signer.py9
-rwxr-xr-xtest/functional/wallet_signrawtransactionwithwallet.py13
-rwxr-xr-xtest/functional/wallet_spend_unconfirmed.py508
-rwxr-xr-xtest/fuzz/test_runner.py52
-rwxr-xr-xtest/get_previous_releases.py9
-rw-r--r--test/lint/README.md4
-rwxr-xr-xtest/lint/lint-assertions.py12
-rwxr-xr-xtest/lint/lint-circular-dependencies.py1
-rwxr-xr-xtest/lint/lint-format-strings.py2
-rwxr-xr-xtest/lint/lint-include-guards.py3
-rwxr-xr-xtest/lint/lint-includes.py3
-rwxr-xr-xtest/lint/lint-logs.py34
-rwxr-xr-xtest/lint/lint-python-utf8-encoding.py2
-rwxr-xr-xtest/lint/lint-python.py15
-rwxr-xr-xtest/lint/lint-shell.py8
-rwxr-xr-xtest/lint/run-lint-format-strings.py8
-rw-r--r--test/sanitizer_suppressions/ubsan16
107 files changed, 3291 insertions, 642 deletions
diff --git a/test/functional/data/rpc_decodescript.json b/test/functional/data/rpc_decodescript.json
index 5f3e725d4c..4a15ae8792 100644
--- a/test/functional/data/rpc_decodescript.json
+++ b/test/functional/data/rpc_decodescript.json
@@ -69,7 +69,7 @@
"p2sh": "2N34iiGoUUkVSPiaaTFpJjB1FR9TXQu3PGM",
"segwit": {
"asm": "0 96c2368fc30514a438a8bd909f93c49a1549d77198ccbdb792043b666cb24f42",
- "desc": "wsh(raw(02eeee))#gtay4y0z",
+ "desc": "addr(bcrt1qjmprdr7rq522gw9ghkgfly7yng25n4m3nrxtmdujqsakvm9jfapqk795l5)#5akkdska",
"hex": "002096c2368fc30514a438a8bd909f93c49a1549d77198ccbdb792043b666cb24f42",
"address": "bcrt1qjmprdr7rq522gw9ghkgfly7yng25n4m3nrxtmdujqsakvm9jfapqk795l5",
"type": "witness_v0_scripthash",
diff --git a/test/functional/feature_abortnode.py b/test/functional/feature_abortnode.py
index afee9597ad..740d3b7f0e 100755
--- a/test/functional/feature_abortnode.py
+++ b/test/functional/feature_abortnode.py
@@ -25,7 +25,7 @@ class AbortNodeTest(BitcoinTestFramework):
self.generate(self.nodes[0], 3, sync_fun=self.no_op)
# Deleting the undo file will result in reorg failure
- (self.nodes[0].chain_path / "blocks" / "rev00000.dat").unlink()
+ (self.nodes[0].blocks_path / "rev00000.dat").unlink()
# Connecting to a node with a more work chain will trigger a reorg
# attempt.
@@ -36,7 +36,7 @@ class AbortNodeTest(BitcoinTestFramework):
# Check that node0 aborted
self.log.info("Waiting for crash")
- self.nodes[0].wait_until_stopped(timeout=5, expect_error=True)
+ self.nodes[0].wait_until_stopped(timeout=5, expect_error=True, expected_stderr="Error: A fatal internal error occurred, see debug.log for details")
self.log.info("Node crashed - now verifying restart fails")
self.nodes[0].assert_start_raises_init_error()
diff --git a/test/functional/feature_addrman.py b/test/functional/feature_addrman.py
index 7877f9d302..9839993115 100755
--- a/test/functional/feature_addrman.py
+++ b/test/functional/feature_addrman.py
@@ -9,12 +9,12 @@ import re
import struct
from test_framework.messages import ser_uint256, hash256
+from test_framework.netutil import ADDRMAN_NEW_BUCKET_COUNT, ADDRMAN_TRIED_BUCKET_COUNT, ADDRMAN_BUCKET_SIZE
from test_framework.p2p import MAGIC_BYTES
from test_framework.test_framework import BitcoinTestFramework
from test_framework.test_node import ErrorMatch
from test_framework.util import assert_equal
-
def serialize_addrman(
*,
format=1,
@@ -32,12 +32,12 @@ def serialize_addrman(
r += struct.pack("B", format)
r += struct.pack("B", INCOMPATIBILITY_BASE + lowest_compatible)
r += ser_uint256(bucket_key)
- r += struct.pack("i", len_new or len(new))
- r += struct.pack("i", len_tried or len(tried))
+ r += struct.pack("<i", len_new or len(new))
+ r += struct.pack("<i", len_tried or len(tried))
ADDRMAN_NEW_BUCKET_COUNT = 1 << 10
- r += struct.pack("i", ADDRMAN_NEW_BUCKET_COUNT ^ (1 << 30))
+ r += struct.pack("<i", ADDRMAN_NEW_BUCKET_COUNT ^ (1 << 30))
for _ in range(ADDRMAN_NEW_BUCKET_COUNT):
- r += struct.pack("i", 0)
+ r += struct.pack("<i", 0)
checksum = hash256(r)
r += mock_checksum or checksum
return r
@@ -117,17 +117,34 @@ class AddrmanTest(BitcoinTestFramework):
self.log.info("Check that corrupt addrman cannot be read (len_tried)")
self.stop_node(0)
+ max_len_tried = ADDRMAN_TRIED_BUCKET_COUNT * ADDRMAN_BUCKET_SIZE
write_addrman(peers_dat, len_tried=-1)
self.nodes[0].assert_start_raises_init_error(
- expected_msg=init_error("Corrupt AddrMan serialization: nTried=-1, should be in \\[0, 16384\\]:.*"),
+ expected_msg=init_error(f"Corrupt AddrMan serialization: nTried=-1, should be in \\[0, {max_len_tried}\\]:.*"),
+ match=ErrorMatch.FULL_REGEX,
+ )
+
+ self.log.info("Check that corrupt addrman cannot be read (large len_tried)")
+ write_addrman(peers_dat, len_tried=max_len_tried + 1)
+ self.nodes[0].assert_start_raises_init_error(
+ expected_msg=init_error(f"Corrupt AddrMan serialization: nTried={max_len_tried + 1}, should be in \\[0, {max_len_tried}\\]:.*"),
match=ErrorMatch.FULL_REGEX,
)
self.log.info("Check that corrupt addrman cannot be read (len_new)")
self.stop_node(0)
+ max_len_new = ADDRMAN_NEW_BUCKET_COUNT * ADDRMAN_BUCKET_SIZE
write_addrman(peers_dat, len_new=-1)
self.nodes[0].assert_start_raises_init_error(
- expected_msg=init_error("Corrupt AddrMan serialization: nNew=-1, should be in \\[0, 65536\\]:.*"),
+ expected_msg=init_error(f"Corrupt AddrMan serialization: nNew=-1, should be in \\[0, {max_len_new}\\]:.*"),
+ match=ErrorMatch.FULL_REGEX,
+ )
+
+ self.log.info("Check that corrupt addrman cannot be read (large len_new)")
+ self.stop_node(0)
+ write_addrman(peers_dat, len_new=max_len_new + 1)
+ self.nodes[0].assert_start_raises_init_error(
+ expected_msg=init_error(f"Corrupt AddrMan serialization: nNew={max_len_new + 1}, should be in \\[0, {max_len_new}\\]:.*"),
match=ErrorMatch.FULL_REGEX,
)
diff --git a/test/functional/feature_anchors.py b/test/functional/feature_anchors.py
index 0961f21a40..3b75a06d9e 100755
--- a/test/functional/feature_anchors.py
+++ b/test/functional/feature_anchors.py
@@ -6,12 +6,15 @@
import os
-from test_framework.p2p import P2PInterface
+from test_framework.p2p import P2PInterface, P2P_SERVICES
+from test_framework.socks5 import Socks5Configuration, Socks5Server
+from test_framework.messages import CAddress, hash256
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import check_node_connections
+from test_framework.util import check_node_connections, assert_equal, p2p_port
INBOUND_CONNECTIONS = 5
BLOCK_RELAY_CONNECTIONS = 2
+ONION_ADDR = "pg6mmjiyjmcrsslvykfwnntlaru7p5svn6y2ymmju6nubxndf4pscryd.onion:8333"
class AnchorsTest(BitcoinTestFramework):
@@ -54,7 +57,7 @@ class AnchorsTest(BitcoinTestFramework):
else:
inbound_nodes_port.append(hex(int(addr_split[1]))[2:])
- self.log.info("Stop node 0")
+ self.log.debug("Stop node")
self.stop_node(0)
# It should contain only the block-relay-only addresses
@@ -78,12 +81,64 @@ class AnchorsTest(BitcoinTestFramework):
tweaked_contents[20:20] = b'1'
out_file_handler.write(bytes(tweaked_contents))
- self.log.info("Start node")
+ self.log.debug("Start node")
self.start_node(0)
self.log.info("When node starts, check if anchors.dat doesn't exist anymore")
assert not os.path.exists(node_anchors_path)
+ self.log.info("Ensure addrv2 support")
+ # Use proxies to catch outbound connections to networks with 256-bit addresses
+ onion_conf = Socks5Configuration()
+ onion_conf.auth = True
+ onion_conf.unauth = True
+ onion_conf.addr = ('127.0.0.1', p2p_port(self.num_nodes))
+ onion_conf.keep_alive = True
+ onion_proxy = Socks5Server(onion_conf)
+ onion_proxy.start()
+ self.restart_node(0, extra_args=[f"-onion={onion_conf.addr[0]}:{onion_conf.addr[1]}"])
+
+ self.log.info("Add 256-bit-address block-relay-only connections to node")
+ self.nodes[0].addconnection(ONION_ADDR, 'block-relay-only')
+
+ self.log.debug("Stop node")
+ with self.nodes[0].assert_debug_log([f"DumpAnchors: Flush 1 outbound block-relay-only peer addresses to anchors.dat"]):
+ self.stop_node(0)
+ # Manually close keep_alive proxy connection
+ onion_proxy.stop()
+
+ self.log.info("Check for addrv2 addresses in anchors.dat")
+ caddr = CAddress()
+ caddr.net = CAddress.NET_TORV3
+ caddr.ip, port_str = ONION_ADDR.split(":")
+ caddr.port = int(port_str)
+ # TorV3 addrv2 serialization:
+ # time(4) | services(1) | networkID(1) | address length(1) | address(32)
+ expected_pubkey = caddr.serialize_v2()[7:39].hex()
+
+ # position of services byte of first addr in anchors.dat
+ # network magic, vector length, version, nTime
+ services_index = 4 + 1 + 4 + 4
+ data = bytes()
+ with open(node_anchors_path, "rb") as file_handler:
+ data = file_handler.read()
+ assert_equal(data[services_index], 0x00) # services == NONE
+ anchors2 = data.hex()
+ assert expected_pubkey in anchors2
+
+ with open(node_anchors_path, "wb") as file_handler:
+ # Modify service flags for this address even though we never connected to it.
+ # This is necessary because on restart we will not attempt an anchor connection
+ # to a host without our required services, even if its address is in the anchors.dat file
+ new_data = bytearray(data)[:-32]
+ new_data[services_index] = P2P_SERVICES
+ new_data_hash = hash256(new_data)
+ file_handler.write(new_data + new_data_hash)
+
+ self.log.info("Restarting node attempts to reconnect to anchors")
+ with self.nodes[0].assert_debug_log([f"Trying to make an anchor connection to {ONION_ADDR}"]):
+ self.start_node(0, extra_args=[f"-onion={onion_conf.addr[0]}:{onion_conf.addr[1]}"])
+
if __name__ == "__main__":
AnchorsTest().main()
diff --git a/test/functional/feature_assumeutxo.py b/test/functional/feature_assumeutxo.py
new file mode 100755
index 0000000000..be0715df32
--- /dev/null
+++ b/test/functional/feature_assumeutxo.py
@@ -0,0 +1,257 @@
+#!/usr/bin/env python3
+# Copyright (c) 2021 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test for assumeutxo, a means of quickly bootstrapping a node using
+a serialized version of the UTXO set at a certain height, which corresponds
+to a hash that has been compiled into bitcoind.
+
+The assumeutxo value generated and used here is committed to in
+`CRegTestParams::m_assumeutxo_data` in `src/chainparams.cpp`.
+
+## Possible test improvements
+
+- TODO: test submitting a transaction and verifying it appears in mempool
+- TODO: test what happens with -reindex and -reindex-chainstate before the
+ snapshot is validated, and make sure it's deleted successfully.
+
+Interesting test cases could be loading an assumeutxo snapshot file with:
+
+- TODO: An invalid hash
+- TODO: Valid hash but invalid snapshot file (bad coin height or truncated file or
+ bad other serialization)
+- TODO: Valid snapshot file, but referencing an unknown block
+- TODO: Valid snapshot file, but referencing a snapshot block that turns out to be
+ invalid, or has an invalid parent
+- TODO: Valid snapshot file and snapshot block, but the block is not on the
+ most-work chain
+
+Interesting starting states could be loading a snapshot when the current chain tip is:
+
+- TODO: An ancestor of snapshot block
+- TODO: Not an ancestor of the snapshot block but has less work
+- TODO: The snapshot block
+- TODO: A descendant of the snapshot block
+- TODO: Not an ancestor or a descendant of the snapshot block and has more work
+
+"""
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal, wait_until_helper
+
+START_HEIGHT = 199
+SNAPSHOT_BASE_HEIGHT = 299
+FINAL_HEIGHT = 399
+COMPLETE_IDX = {'synced': True, 'best_block_height': FINAL_HEIGHT}
+
+
+class AssumeutxoTest(BitcoinTestFramework):
+
+ def set_test_params(self):
+ """Use the pregenerated, deterministic chain up to height 199."""
+ self.num_nodes = 3
+ self.rpc_timeout = 120
+ self.extra_args = [
+ [],
+ ["-fastprune", "-prune=1", "-blockfilterindex=1", "-coinstatsindex=1"],
+ ["-txindex=1", "-blockfilterindex=1", "-coinstatsindex=1"],
+ ]
+
+ def setup_network(self):
+ """Start with the nodes disconnected so that one can generate a snapshot
+ including blocks the other hasn't yet seen."""
+ self.add_nodes(3)
+ self.start_nodes(extra_args=self.extra_args)
+
+ def run_test(self):
+ """
+ Bring up two (disconnected) nodes, mine some new blocks on the first,
+ and generate a UTXO snapshot.
+
+ Load the snapshot into the second, ensure it syncs to tip and completes
+ background validation when connected to the first.
+ """
+ n0 = self.nodes[0]
+ n1 = self.nodes[1]
+ n2 = self.nodes[2]
+
+ # Mock time for a deterministic chain
+ for n in self.nodes:
+ n.setmocktime(n.getblockheader(n.getbestblockhash())['time'])
+
+ self.sync_blocks()
+
+ def no_sync():
+ pass
+
+ # Generate a series of blocks that `n0` will have in the snapshot,
+ # but that n1 doesn't yet see. In order for the snapshot to activate,
+ # though, we have to ferry over the new headers to n1 so that it
+ # isn't waiting forever to see the header of the snapshot's base block
+ # while disconnected from n0.
+ for i in range(100):
+ self.generate(n0, nblocks=1, sync_fun=no_sync)
+ newblock = n0.getblock(n0.getbestblockhash(), 0)
+
+ # make n1 aware of the new header, but don't give it the block.
+ n1.submitheader(newblock)
+ n2.submitheader(newblock)
+
+ # Ensure everyone is seeing the same headers.
+ for n in self.nodes:
+ assert_equal(n.getblockchaininfo()["headers"], SNAPSHOT_BASE_HEIGHT)
+
+ self.log.info("-- Testing assumeutxo + some indexes + pruning")
+
+ assert_equal(n0.getblockcount(), SNAPSHOT_BASE_HEIGHT)
+ assert_equal(n1.getblockcount(), START_HEIGHT)
+
+ self.log.info(f"Creating a UTXO snapshot at height {SNAPSHOT_BASE_HEIGHT}")
+ dump_output = n0.dumptxoutset('utxos.dat')
+
+ assert_equal(
+ dump_output['txoutset_hash'],
+ 'ef45ccdca5898b6c2145e4581d2b88c56564dd389e4bd75a1aaf6961d3edd3c0')
+ assert_equal(dump_output['nchaintx'], 300)
+ assert_equal(n0.getblockchaininfo()["blocks"], SNAPSHOT_BASE_HEIGHT)
+
+ # Mine more blocks on top of the snapshot that n1 hasn't yet seen. This
+ # will allow us to test n1's sync-to-tip on top of a snapshot.
+ self.generate(n0, nblocks=100, sync_fun=no_sync)
+
+ assert_equal(n0.getblockcount(), FINAL_HEIGHT)
+ assert_equal(n1.getblockcount(), START_HEIGHT)
+
+ assert_equal(n0.getblockchaininfo()["blocks"], FINAL_HEIGHT)
+
+ self.log.info(f"Loading snapshot into second node from {dump_output['path']}")
+ loaded = n1.loadtxoutset(dump_output['path'])
+ assert_equal(loaded['coins_loaded'], SNAPSHOT_BASE_HEIGHT)
+ assert_equal(loaded['base_height'], SNAPSHOT_BASE_HEIGHT)
+
+ monitor = n1.getchainstates()
+ assert_equal(monitor['normal']['blocks'], START_HEIGHT)
+ assert_equal(monitor['snapshot']['blocks'], SNAPSHOT_BASE_HEIGHT)
+ assert_equal(monitor['snapshot']['snapshot_blockhash'], dump_output['base_hash'])
+
+ assert_equal(n1.getblockchaininfo()["blocks"], SNAPSHOT_BASE_HEIGHT)
+
+ PAUSE_HEIGHT = FINAL_HEIGHT - 40
+
+ self.log.info("Restarting node to stop at height %d", PAUSE_HEIGHT)
+ self.restart_node(1, extra_args=[
+ f"-stopatheight={PAUSE_HEIGHT}", *self.extra_args[1]])
+
+ # Finally connect the nodes and let them sync.
+ #
+ # Set `wait_for_connect=False` to avoid a race between performing connection
+ # assertions and the -stopatheight tripping.
+ self.connect_nodes(0, 1, wait_for_connect=False)
+
+ n1.wait_until_stopped(timeout=5)
+
+ self.log.info("Checking that blocks are segmented on disk")
+ assert self.has_blockfile(n1, "00000"), "normal blockfile missing"
+ assert self.has_blockfile(n1, "00001"), "assumed blockfile missing"
+ assert not self.has_blockfile(n1, "00002"), "too many blockfiles"
+
+ self.log.info("Restarted node before snapshot validation completed, reloading...")
+ self.restart_node(1, extra_args=self.extra_args[1])
+ self.connect_nodes(0, 1)
+
+ self.log.info(f"Ensuring snapshot chain syncs to tip. ({FINAL_HEIGHT})")
+
+ def check_for_final_height():
+ chainstates = n1.getchainstates()
+ # The background validation may have completed before we run our first
+ # check, so accept a final blockheight from either chainstate type.
+ cs = chainstates.get('snapshot') or chainstates.get('normal')
+ return cs['blocks'] == FINAL_HEIGHT
+
+ wait_until_helper(check_for_final_height)
+ self.sync_blocks(nodes=(n0, n1))
+
+ self.log.info("Ensuring background validation completes")
+ # N.B.: the `snapshot` key disappears once the background validation is complete.
+ wait_until_helper(lambda: not n1.getchainstates().get('snapshot'))
+
+ # Ensure indexes have synced.
+ completed_idx_state = {
+ 'basic block filter index': COMPLETE_IDX,
+ 'coinstatsindex': COMPLETE_IDX,
+ }
+ self.wait_until(lambda: n1.getindexinfo() == completed_idx_state)
+
+
+ for i in (0, 1):
+ n = self.nodes[i]
+ self.log.info(f"Restarting node {i} to ensure (Check|Load)BlockIndex passes")
+ self.restart_node(i, extra_args=self.extra_args[i])
+
+ assert_equal(n.getblockchaininfo()["blocks"], FINAL_HEIGHT)
+
+ assert_equal(n.getchainstates()['normal']['blocks'], FINAL_HEIGHT)
+ assert_equal(n.getchainstates().get('snapshot'), None)
+
+ if i != 0:
+ # Ensure indexes have synced for the assumeutxo node
+ self.wait_until(lambda: n.getindexinfo() == completed_idx_state)
+
+
+ # Node 2: all indexes + reindex
+ # -----------------------------
+
+ self.log.info("-- Testing all indexes + reindex")
+ assert_equal(n2.getblockcount(), START_HEIGHT)
+
+ self.log.info(f"Loading snapshot into third node from {dump_output['path']}")
+ loaded = n2.loadtxoutset(dump_output['path'])
+ assert_equal(loaded['coins_loaded'], SNAPSHOT_BASE_HEIGHT)
+ assert_equal(loaded['base_height'], SNAPSHOT_BASE_HEIGHT)
+
+ monitor = n2.getchainstates()
+ assert_equal(monitor['normal']['blocks'], START_HEIGHT)
+ assert_equal(monitor['snapshot']['blocks'], SNAPSHOT_BASE_HEIGHT)
+ assert_equal(monitor['snapshot']['snapshot_blockhash'], dump_output['base_hash'])
+
+ self.connect_nodes(0, 2)
+ wait_until_helper(lambda: n2.getchainstates()['snapshot']['blocks'] == FINAL_HEIGHT)
+ self.sync_blocks()
+
+ self.log.info("Ensuring background validation completes")
+ wait_until_helper(lambda: not n2.getchainstates().get('snapshot'))
+
+ completed_idx_state = {
+ 'basic block filter index': COMPLETE_IDX,
+ 'coinstatsindex': COMPLETE_IDX,
+ 'txindex': COMPLETE_IDX,
+ }
+ self.wait_until(lambda: n2.getindexinfo() == completed_idx_state)
+
+ for i in (0, 2):
+ n = self.nodes[i]
+ self.log.info(f"Restarting node {i} to ensure (Check|Load)BlockIndex passes")
+ self.restart_node(i, extra_args=self.extra_args[i])
+
+ assert_equal(n.getblockchaininfo()["blocks"], FINAL_HEIGHT)
+
+ assert_equal(n.getchainstates()['normal']['blocks'], FINAL_HEIGHT)
+ assert_equal(n.getchainstates().get('snapshot'), None)
+
+ if i != 0:
+ # Ensure indexes have synced for the assumeutxo node
+ self.wait_until(lambda: n.getindexinfo() == completed_idx_state)
+
+ self.log.info("Test -reindex-chainstate of an assumeutxo-synced node")
+ self.restart_node(2, extra_args=[
+ '-reindex-chainstate=1', *self.extra_args[2]])
+ assert_equal(n2.getblockchaininfo()["blocks"], FINAL_HEIGHT)
+ wait_until_helper(lambda: n2.getblockcount() == FINAL_HEIGHT)
+
+ self.log.info("Test -reindex of an assumeutxo-synced node")
+ self.restart_node(2, extra_args=['-reindex=1', *self.extra_args[2]])
+ self.connect_nodes(0, 2)
+ wait_until_helper(lambda: n2.getblockcount() == FINAL_HEIGHT)
+
+
+if __name__ == '__main__':
+ AssumeutxoTest().main()
diff --git a/test/functional/feature_block.py b/test/functional/feature_block.py
index 765db97445..58ef1e761d 100755
--- a/test/functional/feature_block.py
+++ b/test/functional/feature_block.py
@@ -43,8 +43,7 @@ from test_framework.script import (
OP_INVALIDOPCODE,
OP_RETURN,
OP_TRUE,
- SIGHASH_ALL,
- LegacySignatureHash,
+ sign_input_legacy,
)
from test_framework.script_util import (
script_to_p2sh_script,
@@ -539,12 +538,8 @@ class FullBlockTest(BitcoinTestFramework):
# second input is corresponding P2SH output from b39
tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b''))
# Note: must pass the redeem_script (not p2sh_script) to the signature hash function
- (sighash, err) = LegacySignatureHash(redeem_script, tx, 1, SIGHASH_ALL)
- sig = self.coinbase_key.sign_ecdsa(sighash) + bytes(bytearray([SIGHASH_ALL]))
- scriptSig = CScript([sig, redeem_script])
-
- tx.vin[1].scriptSig = scriptSig
- tx.rehash()
+ tx.vin[1].scriptSig = CScript([redeem_script])
+ sign_input_legacy(tx, 1, redeem_script, self.coinbase_key)
new_txs.append(tx)
lastOutpoint = COutPoint(tx.sha256, 0)
@@ -1338,8 +1333,7 @@ class FullBlockTest(BitcoinTestFramework):
if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend
tx.vin[0].scriptSig = CScript()
return
- (sighash, err) = LegacySignatureHash(spend_tx.vout[0].scriptPubKey, tx, 0, SIGHASH_ALL)
- tx.vin[0].scriptSig = CScript([self.coinbase_key.sign_ecdsa(sighash) + bytes(bytearray([SIGHASH_ALL]))])
+ sign_input_legacy(tx, 0, spend_tx.vout[0].scriptPubKey, self.coinbase_key)
def create_and_sign_transaction(self, spend_tx, value, script=CScript([OP_TRUE])):
tx = self.create_tx(spend_tx, 0, value, script)
diff --git a/test/functional/feature_blocksdir.py b/test/functional/feature_blocksdir.py
index 99763ab97f..76b9277e2f 100755
--- a/test/functional/feature_blocksdir.py
+++ b/test/functional/feature_blocksdir.py
@@ -18,7 +18,7 @@ class BlocksdirTest(BitcoinTestFramework):
def run_test(self):
self.stop_node(0)
- assert os.path.isdir(os.path.join(self.nodes[0].chain_path, "blocks"))
+ assert os.path.isdir(os.path.join(self.nodes[0].blocks_path))
assert not os.path.isdir(os.path.join(self.nodes[0].datadir, "blocks"))
shutil.rmtree(self.nodes[0].datadir)
initialize_datadir(self.options.tmpdir, 0, self.chain)
@@ -31,7 +31,7 @@ class BlocksdirTest(BitcoinTestFramework):
self.log.info("mining blocks..")
self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address)
assert os.path.isfile(os.path.join(blocksdir_path, self.chain, "blocks", "blk00000.dat"))
- assert os.path.isdir(os.path.join(self.nodes[0].chain_path, "blocks", "index"))
+ assert os.path.isdir(os.path.join(self.nodes[0].blocks_path, "index"))
if __name__ == '__main__':
diff --git a/test/functional/feature_cltv.py b/test/functional/feature_cltv.py
index 7730db9672..8c45fb5a4d 100755
--- a/test/functional/feature_cltv.py
+++ b/test/functional/feature_cltv.py
@@ -151,11 +151,11 @@ class BIP65Test(BitcoinTestFramework):
cltv_invalidate(spendtx, i)
expected_cltv_reject_reason = [
- "non-mandatory-script-verify-flag (Operation not valid with the current stack size)",
- "non-mandatory-script-verify-flag (Negative locktime)",
- "non-mandatory-script-verify-flag (Locktime requirement not satisfied)",
- "non-mandatory-script-verify-flag (Locktime requirement not satisfied)",
- "non-mandatory-script-verify-flag (Locktime requirement not satisfied)",
+ "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)",
+ "mandatory-script-verify-flag-failed (Negative locktime)",
+ "mandatory-script-verify-flag-failed (Locktime requirement not satisfied)",
+ "mandatory-script-verify-flag-failed (Locktime requirement not satisfied)",
+ "mandatory-script-verify-flag-failed (Locktime requirement not satisfied)",
][i]
# First we show that this tx is valid except for CLTV by getting it
# rejected from the mempool for exactly that reason.
diff --git a/test/functional/feature_config_args.py b/test/functional/feature_config_args.py
index 2927355bda..97ee9538dc 100755
--- a/test/functional/feature_config_args.py
+++ b/test/functional/feature_config_args.py
@@ -249,28 +249,24 @@ class ConfArgsTest(BitcoinTestFramework):
# No peers.dat exists and -dnsseed=0
# We expect the node will fallback immediately to fixed seeds
assert not os.path.exists(os.path.join(default_data_dir, "peers.dat"))
- start = time.time()
with self.nodes[0].assert_debug_log(expected_msgs=[
"Loaded 0 addresses from peers.dat",
"DNS seeding disabled",
"Adding fixed seeds as -dnsseed=0 (or IPv4/IPv6 connections are disabled via -onlynet) and neither -addnode nor -seednode are provided\n",
]):
self.start_node(0, extra_args=['-dnsseed=0', '-fixedseeds=1'])
- assert time.time() - start < 60
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(['-dnsseed=1', '-onlynet=i2p', '-i2psam=127.0.0.1:7656'], "Error: Incompatible options: -dnsseed=1 was explicitly specified, but -onlynet forbids connections to IPv4/IPv6")
# No peers.dat exists and dns seeds are disabled.
# We expect the node will not add fixed seeds when explicitly disabled.
assert not os.path.exists(os.path.join(default_data_dir, "peers.dat"))
- start = time.time()
with self.nodes[0].assert_debug_log(expected_msgs=[
"Loaded 0 addresses from peers.dat",
"DNS seeding disabled",
"Fixed seeds are disabled",
]):
self.start_node(0, extra_args=['-dnsseed=0', '-fixedseeds=0'])
- assert time.time() - start < 60
self.stop_node(0)
# No peers.dat exists and -dnsseed=0, but a -addnode is provided
@@ -371,6 +367,14 @@ class ConfArgsTest(BitcoinTestFramework):
f'is being used instead.') + r"[\s\S]*", env=env, match=ErrorMatch.FULL_REGEX)
node.args = node_args
+ def test_acceptstalefeeestimates_arg_support(self):
+ self.log.info("Test -acceptstalefeeestimates option support")
+ conf_file = self.nodes[0].datadir_path / "bitcoin.conf"
+ for chain, chain_name in {("main", ""), ("test", "testnet3"), ("signet", "signet")}:
+ util.write_config(conf_file, n=0, chain=chain_name, extra_config='acceptstalefeeestimates=1\n')
+ self.nodes[0].assert_start_raises_init_error(expected_msg=f'Error: acceptstalefeeestimates is not supported on {chain} chain.')
+ util.write_config(conf_file, n=0, chain="regtest") # Reset to regtest
+
def run_test(self):
self.test_log_buffer()
self.test_args_log()
@@ -383,6 +387,7 @@ class ConfArgsTest(BitcoinTestFramework):
self.test_invalid_command_line_options()
self.test_ignored_conf()
self.test_ignored_default_conf()
+ self.test_acceptstalefeeestimates_arg_support()
# Remove the -datadir argument so it doesn't override the config file
self.nodes[0].args = [arg for arg in self.nodes[0].args if not arg.startswith("-datadir")]
diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py
index a88a97c813..92e4187f3c 100755
--- a/test/functional/feature_csv_activation.py
+++ b/test/functional/feature_csv_activation.py
@@ -407,9 +407,9 @@ class BIP68_112_113Test(BitcoinTestFramework):
# -1 OP_CSV tx and (empty stack) OP_CSV tx should fail
self.send_blocks([self.create_test_block([bip112tx_special_v1])], success=False,
- reject_reason='non-mandatory-script-verify-flag (Negative locktime)')
+ reject_reason='mandatory-script-verify-flag-failed (Negative locktime)')
self.send_blocks([self.create_test_block([bip112tx_emptystack_v1])], success=False,
- reject_reason='non-mandatory-script-verify-flag (Operation not valid with the current stack size)')
+ reject_reason='mandatory-script-verify-flag-failed (Operation not valid with the current stack size)')
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']]
@@ -424,15 +424,15 @@ class BIP68_112_113Test(BitcoinTestFramework):
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
for tx in fail_txs:
self.send_blocks([self.create_test_block([tx])], success=False,
- reject_reason='non-mandatory-script-verify-flag (Locktime requirement not satisfied)')
+ reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)')
self.log.info("Test version 2 txs")
# -1 OP_CSV tx and (empty stack) OP_CSV tx should fail
self.send_blocks([self.create_test_block([bip112tx_special_v2])], success=False,
- reject_reason='non-mandatory-script-verify-flag (Negative locktime)')
+ reject_reason='mandatory-script-verify-flag-failed (Negative locktime)')
self.send_blocks([self.create_test_block([bip112tx_emptystack_v2])], success=False,
- reject_reason='non-mandatory-script-verify-flag (Operation not valid with the current stack size)')
+ reject_reason='mandatory-script-verify-flag-failed (Operation not valid with the current stack size)')
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']]
@@ -448,20 +448,20 @@ class BIP68_112_113Test(BitcoinTestFramework):
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']]
for tx in fail_txs:
self.send_blocks([self.create_test_block([tx])], success=False,
- reject_reason='non-mandatory-script-verify-flag (Locktime requirement not satisfied)')
+ reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)')
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']]
for tx in fail_txs:
self.send_blocks([self.create_test_block([tx])], success=False,
- reject_reason='non-mandatory-script-verify-flag (Locktime requirement not satisfied)')
+ reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)')
# If sequencelock types mismatch, tx should fail
fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']]
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]
for tx in fail_txs:
self.send_blocks([self.create_test_block([tx])], success=False,
- reject_reason='non-mandatory-script-verify-flag (Locktime requirement not satisfied)')
+ reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)')
# Remaining txs should pass, just test masking works properly
success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']]
diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py
index 4a66863d91..44c12b2a59 100755
--- a/test/functional/feature_dersig.py
+++ b/test/functional/feature_dersig.py
@@ -120,7 +120,7 @@ class BIP66Test(BitcoinTestFramework):
'txid': spendtx.hash,
'wtxid': spendtx.getwtxid(),
'allowed': False,
- 'reject-reason': 'non-mandatory-script-verify-flag (Non-canonical DER signature)',
+ 'reject-reason': 'mandatory-script-verify-flag-failed (Non-canonical DER signature)',
}],
self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()], maxfeerate=0),
)
@@ -130,7 +130,7 @@ class BIP66Test(BitcoinTestFramework):
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
- with self.nodes[0].assert_debug_log(expected_msgs=[f'CheckInputScripts on {block.vtx[-1].hash} failed with non-mandatory-script-verify-flag (Non-canonical DER signature)']):
+ with self.nodes[0].assert_debug_log(expected_msgs=[f'CheckInputScripts on {block.vtx[-1].hash} failed with mandatory-script-verify-flag-failed (Non-canonical DER signature)']):
peer.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
peer.sync_with_ping()
diff --git a/test/functional/feature_dirsymlinks.py b/test/functional/feature_dirsymlinks.py
index 288754c04c..96f4aed08a 100755
--- a/test/functional/feature_dirsymlinks.py
+++ b/test/functional/feature_dirsymlinks.py
@@ -26,7 +26,7 @@ class SymlinkTest(BitcoinTestFramework):
self.stop_node(0)
rename_and_link(
- from_name=self.nodes[0].chain_path / "blocks",
+ from_name=self.nodes[0].blocks_path,
to_name=dir_new_blocks,
)
rename_and_link(
diff --git a/test/functional/feature_init.py b/test/functional/feature_init.py
index 64ca312b84..94f5116f9b 100755
--- a/test/functional/feature_init.py
+++ b/test/functional/feature_init.py
@@ -5,6 +5,7 @@
"""Stress tests related to node initialization."""
import os
from pathlib import Path
+import shutil
from test_framework.test_framework import BitcoinTestFramework, SkipTest
from test_framework.test_node import ErrorMatch
@@ -47,7 +48,7 @@ class InitStressTest(BitcoinTestFramework):
def start_expecting_error(err_fragment):
node.assert_start_raises_init_error(
- extra_args=['-txindex=1', '-blockfilterindex=1', '-coinstatsindex=1'],
+ extra_args=['-txindex=1', '-blockfilterindex=1', '-coinstatsindex=1', '-checkblocks=200', '-checklevel=4'],
expected_msg=err_fragment,
match=ErrorMatch.PARTIAL_REGEX,
)
@@ -101,9 +102,9 @@ class InitStressTest(BitcoinTestFramework):
}
files_to_perturb = {
- 'blocks/index/*.ldb': 'Error opening block database.',
+ 'blocks/index/*.ldb': 'Error loading block database.',
'chainstate/*.ldb': 'Error opening block database.',
- 'blocks/blk*.dat': 'Error opening block database.',
+ 'blocks/blk*.dat': 'Corrupted block database detected.',
}
for file_patt, err_fragment in files_to_delete.items():
@@ -124,18 +125,31 @@ class InitStressTest(BitcoinTestFramework):
check_clean_start()
self.stop_node(0)
+ self.log.info("Test startup errors after perturbing certain essential files")
for file_patt, err_fragment in files_to_perturb.items():
+ shutil.copytree(node.chain_path / "blocks", node.chain_path / "blocks_bak")
+ shutil.copytree(node.chain_path / "chainstate", node.chain_path / "chainstate_bak")
target_files = list(node.chain_path.glob(file_patt))
for target_file in target_files:
self.log.info(f"Perturbing file to ensure failure {target_file}")
- with open(target_file, "rb") as tf_read, open(target_file, "wb") as tf_write:
+ with open(target_file, "rb") as tf_read:
contents = tf_read.read()
tweaked_contents = bytearray(contents)
- tweaked_contents[50:250] = b'1' * 200
+ # Since the genesis block is not checked by -checkblocks, the
+ # perturbation window must be chosen such that a higher block
+ # in blk*.dat is affected.
+ tweaked_contents[150:350] = b'1' * 200
+ with open(target_file, "wb") as tf_write:
tf_write.write(bytes(tweaked_contents))
start_expecting_error(err_fragment)
+ shutil.rmtree(node.chain_path / "blocks")
+ shutil.rmtree(node.chain_path / "chainstate")
+ shutil.move(node.chain_path / "blocks_bak", node.chain_path / "blocks")
+ shutil.move(node.chain_path / "chainstate_bak", node.chain_path / "chainstate")
+
+
if __name__ == '__main__':
InitStressTest().main()
diff --git a/test/functional/feature_loadblock.py b/test/functional/feature_loadblock.py
index c90ccc4936..12d65fde68 100755
--- a/test/functional/feature_loadblock.py
+++ b/test/functional/feature_loadblock.py
@@ -37,7 +37,7 @@ class LoadblockTest(BitcoinTestFramework):
cfg_file = os.path.join(data_dir, "linearize.cfg")
bootstrap_file = os.path.join(self.options.tmpdir, "bootstrap.dat")
genesis_block = self.nodes[0].getblockhash(0)
- blocks_dir = self.nodes[0].chain_path / "blocks"
+ blocks_dir = self.nodes[0].blocks_path
hash_list = tempfile.NamedTemporaryFile(dir=data_dir,
mode='w',
delete=False,
diff --git a/test/functional/feature_nulldummy.py b/test/functional/feature_nulldummy.py
index 7b2a29bdb4..f896cb6f43 100755
--- a/test/functional/feature_nulldummy.py
+++ b/test/functional/feature_nulldummy.py
@@ -37,7 +37,7 @@ from test_framework.util import (
from test_framework.wallet import getnewdestination
from test_framework.wallet_util import generate_keypair
-NULLDUMMY_ERROR = "non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero)"
+NULLDUMMY_ERROR = "mandatory-script-verify-flag-failed (Dummy CHECKMULTISIG argument must be zero)"
def invalidate_nulldummy_tx(tx):
diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py
index 15dd4827ae..4b548ef0f3 100755
--- a/test/functional/feature_pruning.py
+++ b/test/functional/feature_pruning.py
@@ -91,7 +91,7 @@ class PruneTest(BitcoinTestFramework):
def setup_network(self):
self.setup_nodes()
- self.prunedir = os.path.join(self.nodes[2].chain_path, 'blocks', '')
+ self.prunedir = os.path.join(self.nodes[2].blocks_path, '')
self.connect_nodes(0, 1)
self.connect_nodes(1, 2)
@@ -290,7 +290,7 @@ class PruneTest(BitcoinTestFramework):
assert_equal(ret + 1, node.getblockchaininfo()['pruneheight'])
def has_block(index):
- return os.path.isfile(os.path.join(self.nodes[node_number].chain_path, "blocks", f"blk{index:05}.dat"))
+ return os.path.isfile(os.path.join(self.nodes[node_number].blocks_path, f"blk{index:05}.dat"))
# should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000)
assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
diff --git a/test/functional/feature_reindex.py b/test/functional/feature_reindex.py
index fcbb49d420..83f1c5003c 100755
--- a/test/functional/feature_reindex.py
+++ b/test/functional/feature_reindex.py
@@ -38,7 +38,7 @@ class ReindexTest(BitcoinTestFramework):
# In this test environment, blocks will always be in order (since
# we're generating them rather than getting them from peers), so to
# test out-of-order handling, swap blocks 1 and 2 on disk.
- blk0 = self.nodes[0].chain_path / "blocks" / "blk00000.dat"
+ blk0 = self.nodes[0].blocks_path / "blk00000.dat"
with open(blk0, 'r+b') as bf:
# Read at least the first few blocks (including genesis)
b = bf.read(2000)
diff --git a/test/functional/feature_reindex_readonly.py b/test/functional/feature_reindex_readonly.py
new file mode 100755
index 0000000000..26531f472b
--- /dev/null
+++ b/test/functional/feature_reindex_readonly.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python3
+# Copyright (c) 2023-present The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test running bitcoind with -reindex from a read-only blockstore
+- Start a node, generate blocks, then restart with -reindex after setting blk files to read-only
+"""
+
+import os
+import platform
+import stat
+import subprocess
+from test_framework.test_framework import BitcoinTestFramework
+
+
+class BlockstoreReindexTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+ self.extra_args = [["-fastprune"]]
+
+ def reindex_readonly(self):
+ self.log.debug("Generate block big enough to start second block file")
+ fastprune_blockfile_size = 0x10000
+ opreturn = "6a"
+ nulldata = fastprune_blockfile_size * "ff"
+ self.generateblock(self.nodes[0], output=f"raw({opreturn}{nulldata})", transactions=[])
+ self.stop_node(0)
+
+ assert (self.nodes[0].chain_path / "blocks" / "blk00000.dat").exists()
+ assert (self.nodes[0].chain_path / "blocks" / "blk00001.dat").exists()
+
+ self.log.debug("Make the first block file read-only")
+ filename = self.nodes[0].chain_path / "blocks" / "blk00000.dat"
+ filename.chmod(stat.S_IREAD)
+
+ used_chattr = False
+ if platform.system() == "Linux":
+ try:
+ subprocess.run(['chattr', '+i', filename], capture_output=True, check=True)
+ used_chattr = True
+ self.log.info("Made file immutable with chattr")
+ except subprocess.CalledProcessError as e:
+ self.log.warning(str(e))
+ if e.stdout:
+ self.log.warning(f"stdout: {e.stdout}")
+ if e.stderr:
+ self.log.warning(f"stderr: {e.stderr}")
+ if os.getuid() == 0:
+ self.log.warning("Return early on Linux under root, because chattr failed.")
+ self.log.warning("This should only happen due to missing capabilities in a container.")
+ self.log.warning("Make sure to --cap-add LINUX_IMMUTABLE if you want to run this test.")
+ return
+
+ self.log.debug("Attempt to restart and reindex the node with the unwritable block file")
+ with self.nodes[0].assert_debug_log(expected_msgs=['FlushStateToDisk', 'failed to open file'], unexpected_msgs=[]):
+ self.nodes[0].assert_start_raises_init_error(extra_args=['-reindex', '-fastprune'],
+ expected_msg="Error: A fatal internal error occurred, see debug.log for details")
+
+ if used_chattr:
+ subprocess.check_call(['chattr', '-i', filename])
+
+ filename.chmod(0o777)
+
+ def run_test(self):
+ self.reindex_readonly()
+
+
+if __name__ == '__main__':
+ BlockstoreReindexTest().main()
diff --git a/test/functional/feature_remove_pruned_files_on_startup.py b/test/functional/feature_remove_pruned_files_on_startup.py
index a55e08ef1a..c128587949 100755
--- a/test/functional/feature_remove_pruned_files_on_startup.py
+++ b/test/functional/feature_remove_pruned_files_on_startup.py
@@ -20,10 +20,10 @@ class FeatureRemovePrunedFilesOnStartupTest(BitcoinTestFramework):
self.sync_blocks()
def run_test(self):
- blk0 = self.nodes[0].chain_path / "blocks" / "blk00000.dat"
- rev0 = self.nodes[0].chain_path / "blocks" / "rev00000.dat"
- blk1 = self.nodes[0].chain_path / "blocks" / "blk00001.dat"
- rev1 = self.nodes[0].chain_path / "blocks" / "rev00001.dat"
+ blk0 = self.nodes[0].blocks_path / "blk00000.dat"
+ rev0 = self.nodes[0].blocks_path / "rev00000.dat"
+ blk1 = self.nodes[0].blocks_path / "blk00001.dat"
+ rev1 = self.nodes[0].blocks_path / "rev00001.dat"
self.mine_batches(800)
fo1 = os.open(blk0, os.O_RDONLY)
fo2 = os.open(rev1, os.O_RDONLY)
diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py
index 77f3e4feda..6c467fa613 100755
--- a/test/functional/feature_segwit.py
+++ b/test/functional/feature_segwit.py
@@ -89,6 +89,7 @@ class SegWitTest(BitcoinTestFramework):
[
"-acceptnonstdtxn=1",
"-rpcserialversion=0",
+ "-deprecatedrpc=serialversion",
"-testactivationheight=segwit@165",
"-addresstype=legacy",
],
@@ -215,13 +216,13 @@ class SegWitTest(BitcoinTestFramework):
self.log.info("Verify default node can't accept txs with missing witness")
# unsigned, no scriptsig
- self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program hash mismatch)", wit_ids[NODE_0][P2WPKH][0], sign=False)
- self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program was passed an empty witness)", wit_ids[NODE_0][P2WSH][0], sign=False)
+ self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program hash mismatch)", wit_ids[NODE_0][P2WPKH][0], sign=False)
+ self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program was passed an empty witness)", wit_ids[NODE_0][P2WSH][0], sign=False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WPKH][0], sign=False)
self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WSH][0], sign=False)
# unsigned with redeem script
- self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program hash mismatch)", p2sh_ids[NODE_0][P2WPKH][0], sign=False, redeem_script=witness_script(False, self.pubkey[0]))
- self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program was passed an empty witness)", p2sh_ids[NODE_0][P2WSH][0], sign=False, redeem_script=witness_script(True, self.pubkey[0]))
+ self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program hash mismatch)", p2sh_ids[NODE_0][P2WPKH][0], sign=False, redeem_script=witness_script(False, self.pubkey[0]))
+ self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program was passed an empty witness)", p2sh_ids[NODE_0][P2WSH][0], sign=False, redeem_script=witness_script(True, self.pubkey[0]))
self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag")
assert self.nodes[2].getblock(blockhash, False) != self.nodes[0].getblock(blockhash, False)
@@ -244,10 +245,10 @@ class SegWitTest(BitcoinTestFramework):
assert_equal(witnesses[0], '00' * 32)
self.log.info("Verify witness txs without witness data are invalid after the fork")
- self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', wit_ids[NODE_2][P2WPKH][2], sign=False)
- self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', wit_ids[NODE_2][P2WSH][2], sign=False)
- self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', p2sh_ids[NODE_2][P2WPKH][2], sign=False, redeem_script=witness_script(False, self.pubkey[2]))
- self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', p2sh_ids[NODE_2][P2WSH][2], sign=False, redeem_script=witness_script(True, self.pubkey[2]))
+ self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program hash mismatch)', wit_ids[NODE_2][P2WPKH][2], sign=False)
+ self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program was passed an empty witness)', wit_ids[NODE_2][P2WSH][2], sign=False)
+ self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program hash mismatch)', p2sh_ids[NODE_2][P2WPKH][2], sign=False, redeem_script=witness_script(False, self.pubkey[2]))
+ self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program was passed an empty witness)', p2sh_ids[NODE_2][P2WSH][2], sign=False, redeem_script=witness_script(True, self.pubkey[2]))
self.log.info("Verify default node can now use witness txs")
self.success_mine(self.nodes[0], wit_ids[NODE_0][P2WPKH][0], True)
diff --git a/test/functional/feature_txindex_compatibility.py b/test/functional/feature_txindex_compatibility.py
index a5b25cbd71..939271b385 100755
--- a/test/functional/feature_txindex_compatibility.py
+++ b/test/functional/feature_txindex_compatibility.py
@@ -7,20 +7,19 @@
Previous releases are required by this test, see test/README.md.
"""
-import os
import shutil
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_raises_rpc_error
from test_framework.wallet import MiniWallet
class TxindexCompatibilityTest(BitcoinTestFramework):
def set_test_params(self):
- self.num_nodes = 3
+ self.num_nodes = 2
self.extra_args = [
["-reindex", "-txindex"],
[],
- [],
]
def skip_test_if_missing_module(self):
@@ -33,12 +32,10 @@ class TxindexCompatibilityTest(BitcoinTestFramework):
versions=[
160300, # Last release with legacy txindex
None, # For MiniWallet, without migration code
- 220000, # Last release with migration code (0.17.x - 22.x)
],
)
self.start_nodes()
self.connect_nodes(0, 1)
- self.connect_nodes(1, 2)
def run_test(self):
mini_wallet = MiniWallet(self.nodes[1])
@@ -47,45 +44,23 @@ class TxindexCompatibilityTest(BitcoinTestFramework):
self.generate(self.nodes[1], 1)
self.log.info("Check legacy txindex")
+ assert_raises_rpc_error(-5, "Use -txindex", lambda: self.nodes[1].getrawtransaction(txid=spend_utxo["txid"]))
self.nodes[0].getrawtransaction(txid=spend_utxo["txid"]) # Requires -txindex
self.stop_nodes()
legacy_chain_dir = self.nodes[0].chain_path
- self.log.info("Migrate legacy txindex")
- migrate_chain_dir = self.nodes[2].chain_path
- shutil.rmtree(migrate_chain_dir)
- shutil.copytree(legacy_chain_dir, migrate_chain_dir)
- with self.nodes[2].assert_debug_log([
- "Upgrading txindex database...",
- "txindex is enabled at height 200",
- ]):
- self.start_node(2, extra_args=["-txindex"])
- self.nodes[2].getrawtransaction(txid=spend_utxo["txid"]) # Requires -txindex
-
self.log.info("Drop legacy txindex")
drop_index_chain_dir = self.nodes[1].chain_path
shutil.rmtree(drop_index_chain_dir)
shutil.copytree(legacy_chain_dir, drop_index_chain_dir)
- self.nodes[1].assert_start_raises_init_error(
- extra_args=["-txindex"],
- expected_msg="Error: The block index db contains a legacy 'txindex'. To clear the occupied disk space, run a full -reindex, otherwise ignore this error. This error message will not be displayed again.",
- )
# Build txindex from scratch and check there is no error this time
self.start_node(1, extra_args=["-txindex"])
- self.nodes[2].getrawtransaction(txid=spend_utxo["txid"]) # Requires -txindex
+ self.wait_until(lambda: self.nodes[1].getindexinfo()["txindex"]["synced"] == True)
+ self.nodes[1].getrawtransaction(txid=spend_utxo["txid"]) # Requires -txindex
self.stop_nodes()
- self.log.info("Check migrated txindex cannot be read by legacy node")
- err_msg = f": You need to rebuild the database using -reindex to change -txindex.{os.linesep}Please restart with -reindex or -reindex-chainstate to recover."
- shutil.rmtree(legacy_chain_dir)
- shutil.copytree(migrate_chain_dir, legacy_chain_dir)
- self.nodes[0].assert_start_raises_init_error(extra_args=["-txindex"], expected_msg=err_msg)
- shutil.rmtree(legacy_chain_dir)
- shutil.copytree(drop_index_chain_dir, legacy_chain_dir)
- self.nodes[0].assert_start_raises_init_error(extra_args=["-txindex"], expected_msg=err_msg)
-
if __name__ == "__main__":
TxindexCompatibilityTest().main()
diff --git a/test/functional/feature_unsupported_utxo_db.py b/test/functional/feature_unsupported_utxo_db.py
index 1c8c08d1d8..6acf551216 100755
--- a/test/functional/feature_unsupported_utxo_db.py
+++ b/test/functional/feature_unsupported_utxo_db.py
@@ -40,9 +40,9 @@ class UnsupportedUtxoDbTest(BitcoinTestFramework):
self.log.info("Check init error")
legacy_utxos_dir = self.nodes[0].chain_path / "chainstate"
- legacy_blocks_dir = self.nodes[0].chain_path / "blocks"
+ legacy_blocks_dir = self.nodes[0].blocks_path
recent_utxos_dir = self.nodes[1].chain_path / "chainstate"
- recent_blocks_dir = self.nodes[1].chain_path / "blocks"
+ recent_blocks_dir = self.nodes[1].blocks_path
shutil.copytree(legacy_utxos_dir, recent_utxos_dir)
shutil.copytree(legacy_blocks_dir, recent_blocks_dir)
self.nodes[1].assert_start_raises_init_error(
diff --git a/test/functional/interface_usdt_mempool.py b/test/functional/interface_usdt_mempool.py
index f138fa44cc..d1e274480c 100755
--- a/test/functional/interface_usdt_mempool.py
+++ b/test/functional/interface_usdt_mempool.py
@@ -137,9 +137,7 @@ class MempoolTracepointTest(BitcoinTestFramework):
"""Add a transaction to the mempool and make sure the tracepoint returns
the expected txid, vsize, and fee."""
- EXPECTED_ADDED_EVENTS = 1
- handled_added_events = 0
- event = None
+ events = []
self.log.info("Hooking into mempool:added tracepoint...")
node = self.nodes[0]
@@ -148,9 +146,7 @@ class MempoolTracepointTest(BitcoinTestFramework):
bpf = BPF(text=MEMPOOL_TRACEPOINTS_PROGRAM, usdt_contexts=[ctx], debug=0)
def handle_added_event(_, data, __):
- nonlocal event, handled_added_events
- event = bpf["added_events"].event(data)
- handled_added_events += 1
+ events.append(bpf["added_events"].event(data))
bpf["added_events"].open_perf_buffer(handle_added_event)
@@ -165,7 +161,8 @@ class MempoolTracepointTest(BitcoinTestFramework):
self.generate(node, 1)
self.log.info("Ensuring mempool:added event was handled successfully...")
- assert_equal(EXPECTED_ADDED_EVENTS, handled_added_events)
+ assert_equal(1, len(events))
+ event = events[0]
assert_equal(bytes(event.hash)[::-1].hex(), tx["txid"])
assert_equal(event.vsize, tx["tx"].get_vsize())
assert_equal(event.fee, fee)
@@ -177,9 +174,7 @@ class MempoolTracepointTest(BitcoinTestFramework):
"""Expire a transaction from the mempool and make sure the tracepoint returns
the expected txid, expiry reason, vsize, and fee."""
- EXPECTED_REMOVED_EVENTS = 1
- handled_removed_events = 0
- event = None
+ events = []
self.log.info("Hooking into mempool:removed tracepoint...")
node = self.nodes[0]
@@ -188,9 +183,7 @@ class MempoolTracepointTest(BitcoinTestFramework):
bpf = BPF(text=MEMPOOL_TRACEPOINTS_PROGRAM, usdt_contexts=[ctx], debug=0)
def handle_removed_event(_, data, __):
- nonlocal event, handled_removed_events
- event = bpf["removed_events"].event(data)
- handled_removed_events += 1
+ events.append(bpf["removed_events"].event(data))
bpf["removed_events"].open_perf_buffer(handle_removed_event)
@@ -212,7 +205,8 @@ class MempoolTracepointTest(BitcoinTestFramework):
bpf.perf_buffer_poll(timeout=200)
self.log.info("Ensuring mempool:removed event was handled successfully...")
- assert_equal(EXPECTED_REMOVED_EVENTS, handled_removed_events)
+ assert_equal(1, len(events))
+ event = events[0]
assert_equal(bytes(event.hash)[::-1].hex(), txid)
assert_equal(event.reason.decode("UTF-8"), "expiry")
assert_equal(event.vsize, tx["tx"].get_vsize())
@@ -226,9 +220,7 @@ class MempoolTracepointTest(BitcoinTestFramework):
"""Replace one and two transactions in the mempool and make sure the tracepoint
returns the expected txids, vsizes, and fees."""
- EXPECTED_REPLACED_EVENTS = 1
- handled_replaced_events = 0
- event = None
+ events = []
self.log.info("Hooking into mempool:replaced tracepoint...")
node = self.nodes[0]
@@ -237,9 +229,7 @@ class MempoolTracepointTest(BitcoinTestFramework):
bpf = BPF(text=MEMPOOL_TRACEPOINTS_PROGRAM, usdt_contexts=[ctx], debug=0)
def handle_replaced_event(_, data, __):
- nonlocal event, handled_replaced_events
- event = bpf["replaced_events"].event(data)
- handled_replaced_events += 1
+ events.append(bpf["replaced_events"].event(data))
bpf["replaced_events"].open_perf_buffer(handle_replaced_event)
@@ -261,7 +251,8 @@ class MempoolTracepointTest(BitcoinTestFramework):
bpf.perf_buffer_poll(timeout=200)
self.log.info("Ensuring mempool:replaced event was handled successfully...")
- assert_equal(EXPECTED_REPLACED_EVENTS, handled_replaced_events)
+ assert_equal(1, len(events))
+ event = events[0]
assert_equal(bytes(event.replaced_hash)[::-1].hex(), original_tx["txid"])
assert_equal(event.replaced_vsize, original_tx["tx"].get_vsize())
assert_equal(event.replaced_fee, original_fee)
@@ -277,9 +268,7 @@ class MempoolTracepointTest(BitcoinTestFramework):
"""Create an invalid transaction and make sure the tracepoint returns
the expected txid, rejection reason, peer id, and peer address."""
- EXPECTED_REJECTED_EVENTS = 1
- handled_rejected_events = 0
- event = None
+ events = []
self.log.info("Adding P2P connection...")
node = self.nodes[0]
@@ -291,9 +280,7 @@ class MempoolTracepointTest(BitcoinTestFramework):
bpf = BPF(text=MEMPOOL_TRACEPOINTS_PROGRAM, usdt_contexts=[ctx], debug=0)
def handle_rejected_event(_, data, __):
- nonlocal event, handled_rejected_events
- event = bpf["rejected_events"].event(data)
- handled_rejected_events += 1
+ events.append(bpf["rejected_events"].event(data))
bpf["rejected_events"].open_perf_buffer(handle_rejected_event)
@@ -305,9 +292,13 @@ class MempoolTracepointTest(BitcoinTestFramework):
bpf.perf_buffer_poll(timeout=200)
self.log.info("Ensuring mempool:rejected event was handled successfully...")
- assert_equal(EXPECTED_REJECTED_EVENTS, handled_rejected_events)
+ assert_equal(1, len(events))
+ event = events[0]
assert_equal(bytes(event.hash)[::-1].hex(), tx["tx"].hash)
- assert_equal(event.reason.decode("UTF-8"), "min relay fee not met")
+ # The next test is already known to fail, so disable it to avoid
+ # wasting CPU time and developer time. See
+ # https://github.com/bitcoin/bitcoin/issues/27380
+ #assert_equal(event.reason.decode("UTF-8"), "min relay fee not met")
bpf.cleanup()
self.generate(self.wallet, 1)
diff --git a/test/functional/interface_usdt_net.py b/test/functional/interface_usdt_net.py
index d1f94637c9..e15ac3c1f2 100755
--- a/test/functional/interface_usdt_net.py
+++ b/test/functional/interface_usdt_net.py
@@ -121,11 +121,11 @@ class NetTracepointTest(BitcoinTestFramework):
checked_outbound_version_msg = 0
events = []
- def check_p2p_message(event, inbound):
+ def check_p2p_message(event, is_inbound):
nonlocal checked_inbound_version_msg, checked_outbound_version_msg
if event.msg_type.decode("utf-8") == "version":
self.log.info(
- f"check_p2p_message(): {'inbound' if inbound else 'outbound'} {event}")
+ f"check_p2p_message(): {'inbound' if is_inbound else 'outbound'} {event}")
peer = self.nodes[0].getpeerinfo()[0]
msg = msg_version()
msg.deserialize(BytesIO(bytes(event.msg[:event.msg_size])))
@@ -133,13 +133,12 @@ class NetTracepointTest(BitcoinTestFramework):
assert_equal(peer["addr"], event.peer_addr.decode("utf-8"))
assert_equal(peer["connection_type"],
event.peer_conn_type.decode("utf-8"))
- if inbound:
+ if is_inbound:
checked_inbound_version_msg += 1
else:
checked_outbound_version_msg += 1
def handle_inbound(_, data, __):
- nonlocal events
event = ctypes.cast(data, ctypes.POINTER(P2PMessage)).contents
events.append((event, True))
@@ -157,8 +156,8 @@ class NetTracepointTest(BitcoinTestFramework):
self.log.info(
"check receipt and content of in- and outbound version messages")
- for event, inbound in events:
- check_p2p_message(event, inbound)
+ for event, is_inbound in events:
+ check_p2p_message(event, is_inbound)
assert_equal(EXPECTED_INOUTBOUND_VERSION_MSG,
checked_inbound_version_msg)
assert_equal(EXPECTED_INOUTBOUND_VERSION_MSG,
diff --git a/test/functional/interface_usdt_utxocache.py b/test/functional/interface_usdt_utxocache.py
index 5f2ba49026..2fc5981451 100755
--- a/test/functional/interface_usdt_utxocache.py
+++ b/test/functional/interface_usdt_utxocache.py
@@ -252,43 +252,30 @@ class UTXOCacheTracepointTest(BitcoinTestFramework):
# that the handle_* functions succeeded.
EXPECTED_HANDLE_ADD_SUCCESS = 2
EXPECTED_HANDLE_SPENT_SUCCESS = 1
- handle_add_succeeds = 0
- handle_spent_succeeds = 0
- expected_utxocache_spents = []
expected_utxocache_adds = []
+ expected_utxocache_spents = []
+
+ actual_utxocache_adds = []
+ actual_utxocache_spents = []
+
+ def compare_utxo_with_event(utxo, event):
+ """Compare a utxo dict to the event produced by BPF"""
+ assert_equal(utxo["txid"], bytes(event.txid[::-1]).hex())
+ assert_equal(utxo["index"], event.index)
+ assert_equal(utxo["height"], event.height)
+ assert_equal(utxo["value"], event.value)
+ assert_equal(utxo["is_coinbase"], event.is_coinbase)
def handle_utxocache_add(_, data, __):
- nonlocal handle_add_succeeds
event = ctypes.cast(data, ctypes.POINTER(UTXOCacheChange)).contents
self.log.info(f"handle_utxocache_add(): {event}")
- add = expected_utxocache_adds.pop(0)
- try:
- assert_equal(add["txid"], bytes(event.txid[::-1]).hex())
- assert_equal(add["index"], event.index)
- assert_equal(add["height"], event.height)
- assert_equal(add["value"], event.value)
- assert_equal(add["is_coinbase"], event.is_coinbase)
- except AssertionError:
- self.log.exception("Assertion failed")
- else:
- handle_add_succeeds += 1
+ actual_utxocache_adds.append(event)
def handle_utxocache_spent(_, data, __):
- nonlocal handle_spent_succeeds
event = ctypes.cast(data, ctypes.POINTER(UTXOCacheChange)).contents
self.log.info(f"handle_utxocache_spent(): {event}")
- spent = expected_utxocache_spents.pop(0)
- try:
- assert_equal(spent["txid"], bytes(event.txid[::-1]).hex())
- assert_equal(spent["index"], event.index)
- assert_equal(spent["height"], event.height)
- assert_equal(spent["value"], event.value)
- assert_equal(spent["is_coinbase"], event.is_coinbase)
- except AssertionError:
- self.log.exception("Assertion failed")
- else:
- handle_spent_succeeds += 1
+ actual_utxocache_spents.append(event)
bpf["utxocache_add"].open_perf_buffer(handle_utxocache_add)
bpf["utxocache_spent"].open_perf_buffer(handle_utxocache_spent)
@@ -324,19 +311,18 @@ class UTXOCacheTracepointTest(BitcoinTestFramework):
"is_coinbase": block_index == 0,
})
- assert_equal(EXPECTED_HANDLE_ADD_SUCCESS, len(expected_utxocache_adds))
- assert_equal(EXPECTED_HANDLE_SPENT_SUCCESS,
- len(expected_utxocache_spents))
-
bpf.perf_buffer_poll(timeout=200)
- bpf.cleanup()
+
+ assert_equal(EXPECTED_HANDLE_ADD_SUCCESS, len(expected_utxocache_adds), len(actual_utxocache_adds))
+ assert_equal(EXPECTED_HANDLE_SPENT_SUCCESS, len(expected_utxocache_spents), len(actual_utxocache_spents))
self.log.info(
f"check that we successfully traced {EXPECTED_HANDLE_ADD_SUCCESS} adds and {EXPECTED_HANDLE_SPENT_SUCCESS} spent")
- assert_equal(0, len(expected_utxocache_adds))
- assert_equal(0, len(expected_utxocache_spents))
- assert_equal(EXPECTED_HANDLE_ADD_SUCCESS, handle_add_succeeds)
- assert_equal(EXPECTED_HANDLE_SPENT_SUCCESS, handle_spent_succeeds)
+ for expected_utxo, actual_event in zip(expected_utxocache_adds + expected_utxocache_spents,
+ actual_utxocache_adds + actual_utxocache_spents):
+ compare_utxo_with_event(expected_utxo, actual_event)
+
+ bpf.cleanup()
def test_flush(self):
""" Tests the utxocache:flush tracepoint API.
@@ -367,9 +353,13 @@ class UTXOCacheTracepointTest(BitcoinTestFramework):
"size": event.size
})
# sanity checks only
- assert event.memory > 0
- assert event.duration > 0
- handle_flush_succeeds += 1
+ try:
+ assert event.memory > 0
+ assert event.duration > 0
+ except AssertionError:
+ self.log.exception("Assertion error")
+ else:
+ handle_flush_succeeds += 1
bpf["utxocache_flush"].open_perf_buffer(handle_utxocache_flush)
diff --git a/test/functional/interface_usdt_validation.py b/test/functional/interface_usdt_validation.py
index f9d9b525cd..e29b2c46eb 100755
--- a/test/functional/interface_usdt_validation.py
+++ b/test/functional/interface_usdt_validation.py
@@ -86,7 +86,6 @@ class ValidationTracepointTest(BitcoinTestFramework):
self.duration)
BLOCKS_EXPECTED = 2
- blocks_checked = 0
expected_blocks = dict()
events = []
@@ -98,11 +97,9 @@ class ValidationTracepointTest(BitcoinTestFramework):
usdt_contexts=[ctx], debug=0)
def handle_blockconnected(_, data, __):
- nonlocal events, blocks_checked
event = ctypes.cast(data, ctypes.POINTER(Block)).contents
self.log.info(f"handle_blockconnected(): {event}")
events.append(event)
- blocks_checked += 1
bpf["block_connected"].open_perf_buffer(
handle_blockconnected)
@@ -127,7 +124,7 @@ class ValidationTracepointTest(BitcoinTestFramework):
# only plausibility checks
assert event.duration > 0
del expected_blocks[block_hash]
- assert_equal(BLOCKS_EXPECTED, blocks_checked)
+ assert_equal(BLOCKS_EXPECTED, len(events))
assert_equal(0, len(expected_blocks))
bpf.cleanup()
diff --git a/test/functional/mempool_compatibility.py b/test/functional/mempool_compatibility.py
index 3f632d3d56..fd3e219586 100755
--- a/test/functional/mempool_compatibility.py
+++ b/test/functional/mempool_compatibility.py
@@ -10,8 +10,6 @@ In case we need to break mempool compatibility we can continue to use the test b
Previous releases are required by this test, see test/README.md.
"""
-import os
-
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
from test_framework.wallet import (
@@ -23,6 +21,7 @@ from test_framework.wallet import (
class MempoolCompatibilityTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
+ self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_previous_releases()
@@ -55,9 +54,9 @@ class MempoolCompatibilityTest(BitcoinTestFramework):
self.stop_node(1)
self.log.info("Move mempool.dat from old to new node")
- old_node_mempool = os.path.join(old_node.chain_path, 'mempool.dat')
- new_node_mempool = os.path.join(new_node.chain_path, 'mempool.dat')
- os.rename(old_node_mempool, new_node_mempool)
+ old_node_mempool = old_node.chain_path / "mempool.dat"
+ new_node_mempool = new_node.chain_path / "mempool.dat"
+ old_node_mempool.rename(new_node_mempool)
self.log.info("Start new node and verify mempool contains the tx")
self.start_node(1)
@@ -70,7 +69,7 @@ class MempoolCompatibilityTest(BitcoinTestFramework):
self.stop_node(1)
self.log.info("Move mempool.dat from new to old node")
- os.rename(new_node_mempool, old_node_mempool)
+ new_node_mempool.rename(old_node_mempool)
self.log.info("Start old node again and verify mempool contains both txs")
self.start_node(0, ['-nowallet'])
diff --git a/test/functional/mempool_datacarrier.py b/test/functional/mempool_datacarrier.py
index c370d8fa91..951bf37ae8 100755
--- a/test/functional/mempool_datacarrier.py
+++ b/test/functional/mempool_datacarrier.py
@@ -22,16 +22,18 @@ from test_framework.wallet import MiniWallet
class DataCarrierTest(BitcoinTestFramework):
def set_test_params(self):
- self.num_nodes = 3
+ self.num_nodes = 4
self.extra_args = [
[],
["-datacarrier=0"],
- ["-datacarrier=1", f"-datacarriersize={MAX_OP_RETURN_RELAY - 1}"]
+ ["-datacarrier=1", f"-datacarriersize={MAX_OP_RETURN_RELAY - 1}"],
+ ["-datacarrier=1", f"-datacarriersize=2"],
]
- def test_null_data_transaction(self, node: TestNode, data: bytes, success: bool) -> None:
+ def test_null_data_transaction(self, node: TestNode, data, success: bool) -> None:
tx = self.wallet.create_self_transfer(fee_rate=0)["tx"]
- tx.vout.append(CTxOut(nValue=0, scriptPubKey=CScript([OP_RETURN, data])))
+ data = [] if data is None else [data]
+ tx.vout.append(CTxOut(nValue=0, scriptPubKey=CScript([OP_RETURN] + data)))
tx.vout[0].nValue -= tx.get_vsize() # simply pay 1sat/vbyte fee
tx_hex = tx.serialize().hex()
@@ -49,6 +51,8 @@ class DataCarrierTest(BitcoinTestFramework):
default_size_data = random_bytes(MAX_OP_RETURN_RELAY - 3)
too_long_data = random_bytes(MAX_OP_RETURN_RELAY - 2)
small_data = random_bytes(MAX_OP_RETURN_RELAY - 4)
+ one_byte = random_bytes(1)
+ zero_bytes = random_bytes(0)
self.log.info("Testing null data transaction with default -datacarrier and -datacarriersize values.")
self.test_null_data_transaction(node=self.nodes[0], data=default_size_data, success=True)
@@ -65,6 +69,24 @@ class DataCarrierTest(BitcoinTestFramework):
self.log.info("Testing a null data transaction with a size smaller than accepted by -datacarriersize.")
self.test_null_data_transaction(node=self.nodes[2], data=small_data, success=True)
+ self.log.info("Testing a null data transaction with no data.")
+ self.test_null_data_transaction(node=self.nodes[0], data=None, success=True)
+ self.test_null_data_transaction(node=self.nodes[1], data=None, success=False)
+ self.test_null_data_transaction(node=self.nodes[2], data=None, success=True)
+ self.test_null_data_transaction(node=self.nodes[3], data=None, success=True)
+
+ self.log.info("Testing a null data transaction with zero bytes of data.")
+ self.test_null_data_transaction(node=self.nodes[0], data=zero_bytes, success=True)
+ self.test_null_data_transaction(node=self.nodes[1], data=zero_bytes, success=False)
+ self.test_null_data_transaction(node=self.nodes[2], data=zero_bytes, success=True)
+ self.test_null_data_transaction(node=self.nodes[3], data=zero_bytes, success=True)
+
+ self.log.info("Testing a null data transaction with one byte of data.")
+ self.test_null_data_transaction(node=self.nodes[0], data=one_byte, success=True)
+ self.test_null_data_transaction(node=self.nodes[1], data=one_byte, success=False)
+ self.test_null_data_transaction(node=self.nodes[2], data=one_byte, success=True)
+ self.test_null_data_transaction(node=self.nodes[3], data=one_byte, success=False)
+
if __name__ == '__main__':
DataCarrierTest().main()
diff --git a/test/functional/mempool_limit.py b/test/functional/mempool_limit.py
index f3f4b42ad0..a1147f70f3 100755
--- a/test/functional/mempool_limit.py
+++ b/test/functional/mempool_limit.py
@@ -34,29 +34,27 @@ class MempoolLimitTest(BitcoinTestFramework):
]]
self.supports_cli = False
- def run_test(self):
+ def fill_mempool(self):
+ """Fill mempool until eviction."""
+ self.log.info("Fill the mempool until eviction is triggered and the mempoolminfee rises")
txouts = gen_return_txouts()
node = self.nodes[0]
- miniwallet = MiniWallet(node)
+ miniwallet = self.wallet
relayfee = node.getnetworkinfo()['relayfee']
- self.log.info('Check that mempoolminfee is minrelaytxfee')
- assert_equal(node.getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
- assert_equal(node.getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
-
tx_batch_size = 1
num_of_batches = 75
# Generate UTXOs to flood the mempool
# 1 to create a tx initially that will be evicted from the mempool later
- # 3 batches of multiple transactions with a fee rate much higher than the previous UTXO
+ # 75 transactions each with a fee rate higher than the previous one
# And 1 more to verify that this tx does not get added to the mempool with a fee rate less than the mempoolminfee
# And 2 more for the package cpfp test
- self.generate(miniwallet, 1 + (num_of_batches * tx_batch_size) + 1 + 2)
+ self.generate(miniwallet, 1 + (num_of_batches * tx_batch_size))
# Mine 99 blocks so that the UTXOs are allowed to be spent
self.generate(node, COINBASE_MATURITY - 1)
- self.log.info('Create a mempool tx that will be evicted')
+ self.log.debug("Create a mempool tx that will be evicted")
tx_to_be_evicted_id = miniwallet.send_self_transfer(from_node=node, fee_rate=relayfee)["txid"]
# Increase the tx fee rate to give the subsequent transactions a higher priority in the mempool
@@ -64,21 +62,246 @@ class MempoolLimitTest(BitcoinTestFramework):
# by 130 should result in a fee that corresponds to 2x of that fee rate
base_fee = relayfee * 130
- self.log.info("Fill up the mempool with txs with higher fee rate")
- for batch_of_txid in range(num_of_batches):
- fee = (batch_of_txid + 1) * base_fee
- create_lots_of_big_transactions(miniwallet, node, fee, tx_batch_size, txouts)
+ self.log.debug("Fill up the mempool with txs with higher fee rate")
+ with node.assert_debug_log(["rolling minimum fee bumped"]):
+ for batch_of_txid in range(num_of_batches):
+ fee = (batch_of_txid + 1) * base_fee
+ create_lots_of_big_transactions(miniwallet, node, fee, tx_batch_size, txouts)
- self.log.info('The tx should be evicted by now')
+ self.log.debug("The tx should be evicted by now")
# The number of transactions created should be greater than the ones present in the mempool
assert_greater_than(tx_batch_size * num_of_batches, len(node.getrawmempool()))
# Initial tx created should not be present in the mempool anymore as it had a lower fee rate
assert tx_to_be_evicted_id not in node.getrawmempool()
- self.log.info('Check that mempoolminfee is larger than minrelaytxfee')
+ self.log.debug("Check that mempoolminfee is larger than minrelaytxfee")
assert_equal(node.getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
assert_greater_than(node.getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
+ def test_rbf_carveout_disallowed(self):
+ node = self.nodes[0]
+
+ self.log.info("Check that individually-evaluated transactions in a package don't increase package limits for other subpackage parts")
+
+ # We set chain limits to 2 ancestors, 1 descendant, then try to get a parents-and-child chain of 2 in mempool
+ #
+ # A: Solo transaction to be RBF'd (to bump descendant limit for package later)
+ # B: First transaction in package, RBFs A by itself under individual evaluation, which would give it +1 descendant limit
+ # C: Second transaction in package, spends B. If the +1 descendant limit persisted, would make it into mempool
+
+ self.restart_node(0, extra_args=self.extra_args[0] + ["-limitancestorcount=2", "-limitdescendantcount=1"])
+
+ # Generate a confirmed utxo we will double-spend
+ rbf_utxo = self.wallet.send_self_transfer(
+ from_node=node,
+ confirmed_only=True
+ )["new_utxo"]
+ self.generate(node, 1)
+
+ # tx_A needs to be RBF'd, set minfee at set size
+ A_weight = 1000
+ mempoolmin_feerate = node.getmempoolinfo()["mempoolminfee"]
+ tx_A = self.wallet.send_self_transfer(
+ from_node=node,
+ fee=(mempoolmin_feerate / 1000) * (A_weight // 4) + Decimal('0.000001'),
+ target_weight=A_weight,
+ utxo_to_spend=rbf_utxo,
+ confirmed_only=True
+ )
+
+ # RBF's tx_A, is not yet submitted
+ tx_B = self.wallet.create_self_transfer(
+ fee=tx_A["fee"] * 4,
+ target_weight=A_weight,
+ utxo_to_spend=rbf_utxo,
+ confirmed_only=True
+ )
+
+ # Spends tx_B's output, too big for cpfp carveout (because that would also increase the descendant limit by 1)
+ non_cpfp_carveout_weight = 40001 # EXTRA_DESCENDANT_TX_SIZE_LIMIT + 1
+ tx_C = self.wallet.create_self_transfer(
+ target_weight=non_cpfp_carveout_weight,
+ fee = (mempoolmin_feerate / 1000) * (non_cpfp_carveout_weight // 4) + Decimal('0.000001'),
+ utxo_to_spend=tx_B["new_utxo"],
+ confirmed_only=True
+ )
+
+ assert_raises_rpc_error(-26, "too-long-mempool-chain", node.submitpackage, [tx_B["hex"], tx_C["hex"]])
+
+ def test_mid_package_eviction(self):
+ node = self.nodes[0]
+ self.log.info("Check a package where each parent passes the current mempoolminfee but would cause eviction before package submission terminates")
+
+ self.restart_node(0, extra_args=self.extra_args[0])
+
+ # Restarting the node resets mempool minimum feerate
+ assert_equal(node.getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
+ assert_equal(node.getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
+
+ self.fill_mempool()
+ current_info = node.getmempoolinfo()
+ mempoolmin_feerate = current_info["mempoolminfee"]
+
+ package_hex = []
+ # UTXOs to be spent by the ultimate child transaction
+ parent_utxos = []
+
+ evicted_weight = 8000
+ # Mempool transaction which is evicted due to being at the "bottom" of the mempool when the
+ # mempool overflows and evicts by descendant score. It's important that the eviction doesn't
+ # happen in the middle of package evaluation, as it can invalidate the coins cache.
+ mempool_evicted_tx = self.wallet.send_self_transfer(
+ from_node=node,
+ fee=(mempoolmin_feerate / 1000) * (evicted_weight // 4) + Decimal('0.000001'),
+ target_weight=evicted_weight,
+ confirmed_only=True
+ )
+ # Already in mempool when package is submitted.
+ assert mempool_evicted_tx["txid"] in node.getrawmempool()
+
+ # This parent spends the above mempool transaction that exists when its inputs are first
+ # looked up, but disappears later. It is rejected for being too low fee (but eligible for
+ # reconsideration), and its inputs are cached. When the mempool transaction is evicted, its
+ # coin is no longer available, but the cache could still contains the tx.
+ cpfp_parent = self.wallet.create_self_transfer(
+ utxo_to_spend=mempool_evicted_tx["new_utxo"],
+ fee_rate=mempoolmin_feerate - Decimal('0.00001'),
+ confirmed_only=True)
+ package_hex.append(cpfp_parent["hex"])
+ parent_utxos.append(cpfp_parent["new_utxo"])
+ assert_equal(node.testmempoolaccept([cpfp_parent["hex"]])[0]["reject-reason"], "mempool min fee not met")
+
+ self.wallet.rescan_utxos()
+
+ # Series of parents that don't need CPFP and are submitted individually. Each one is large and
+ # high feerate, which means they should trigger eviction but not be evicted.
+ parent_weight = 100000
+ num_big_parents = 3
+ assert_greater_than(parent_weight * num_big_parents, current_info["maxmempool"] - current_info["bytes"])
+ parent_fee = (100 * mempoolmin_feerate / 1000) * (parent_weight // 4)
+
+ big_parent_txids = []
+ for i in range(num_big_parents):
+ parent = self.wallet.create_self_transfer(fee=parent_fee, target_weight=parent_weight, confirmed_only=True)
+ parent_utxos.append(parent["new_utxo"])
+ package_hex.append(parent["hex"])
+ big_parent_txids.append(parent["txid"])
+ # There is room for each of these transactions independently
+ assert node.testmempoolaccept([parent["hex"]])[0]["allowed"]
+
+ # Create a child spending everything, bumping cpfp_parent just above mempool minimum
+ # feerate. It's important not to bump too much as otherwise mempool_evicted_tx would not be
+ # evicted, making this test much less meaningful.
+ approx_child_vsize = self.wallet.create_self_transfer_multi(utxos_to_spend=parent_utxos)["tx"].get_vsize()
+ cpfp_fee = (mempoolmin_feerate / 1000) * (cpfp_parent["tx"].get_vsize() + approx_child_vsize) - cpfp_parent["fee"]
+ # Specific number of satoshis to fit within a small window. The parent_cpfp + child package needs to be
+ # - When there is mid-package eviction, high enough feerate to meet the new mempoolminfee
+ # - When there is no mid-package eviction, low enough feerate to be evicted immediately after submission.
+ magic_satoshis = 1200
+ cpfp_satoshis = int(cpfp_fee * COIN) + magic_satoshis
+
+ child = self.wallet.create_self_transfer_multi(utxos_to_spend=parent_utxos, fee_per_output=cpfp_satoshis)
+ package_hex.append(child["hex"])
+
+ # Package should be submitted, temporarily exceeding maxmempool, and then evicted.
+ with node.assert_debug_log(expected_msgs=["rolling minimum fee bumped"]):
+ assert_raises_rpc_error(-26, "mempool full", node.submitpackage, package_hex)
+
+ # Maximum size must never be exceeded.
+ assert_greater_than(node.getmempoolinfo()["maxmempool"], node.getmempoolinfo()["bytes"])
+
+ # Evicted transaction and its descendants must not be in mempool.
+ resulting_mempool_txids = node.getrawmempool()
+ assert mempool_evicted_tx["txid"] not in resulting_mempool_txids
+ assert cpfp_parent["txid"] not in resulting_mempool_txids
+ assert child["txid"] not in resulting_mempool_txids
+ for txid in big_parent_txids:
+ assert txid in resulting_mempool_txids
+
+ def test_mid_package_replacement(self):
+ node = self.nodes[0]
+ self.log.info("Check a package where an early tx depends on a later-replaced mempool tx")
+
+ self.restart_node(0, extra_args=self.extra_args[0])
+
+ # Restarting the node resets mempool minimum feerate
+ assert_equal(node.getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
+ assert_equal(node.getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
+
+ self.fill_mempool()
+ current_info = node.getmempoolinfo()
+ mempoolmin_feerate = current_info["mempoolminfee"]
+
+ # Mempool transaction which is evicted due to being at the "bottom" of the mempool when the
+ # mempool overflows and evicts by descendant score. It's important that the eviction doesn't
+ # happen in the middle of package evaluation, as it can invalidate the coins cache.
+ double_spent_utxo = self.wallet.get_utxo(confirmed_only=True)
+ replaced_tx = self.wallet.send_self_transfer(
+ from_node=node,
+ utxo_to_spend=double_spent_utxo,
+ fee_rate=mempoolmin_feerate,
+ confirmed_only=True
+ )
+ # Already in mempool when package is submitted.
+ assert replaced_tx["txid"] in node.getrawmempool()
+
+ # This parent spends the above mempool transaction that exists when its inputs are first
+ # looked up, but disappears later. It is rejected for being too low fee (but eligible for
+ # reconsideration), and its inputs are cached. When the mempool transaction is evicted, its
+ # coin is no longer available, but the cache could still contain the tx.
+ cpfp_parent = self.wallet.create_self_transfer(
+ utxo_to_spend=replaced_tx["new_utxo"],
+ fee_rate=mempoolmin_feerate - Decimal('0.00001'),
+ confirmed_only=True)
+
+ self.wallet.rescan_utxos()
+
+ # Parent that replaces the parent of cpfp_parent.
+ replacement_tx = self.wallet.create_self_transfer(
+ utxo_to_spend=double_spent_utxo,
+ fee_rate=10*mempoolmin_feerate,
+ confirmed_only=True
+ )
+ parent_utxos = [cpfp_parent["new_utxo"], replacement_tx["new_utxo"]]
+
+ # Create a child spending everything, CPFPing the low-feerate parent.
+ approx_child_vsize = self.wallet.create_self_transfer_multi(utxos_to_spend=parent_utxos)["tx"].get_vsize()
+ cpfp_fee = (2 * mempoolmin_feerate / 1000) * (cpfp_parent["tx"].get_vsize() + approx_child_vsize) - cpfp_parent["fee"]
+ child = self.wallet.create_self_transfer_multi(utxos_to_spend=parent_utxos, fee_per_output=int(cpfp_fee * COIN))
+ # It's very important that the cpfp_parent is before replacement_tx so that its input (from
+ # replaced_tx) is first looked up *before* replacement_tx is submitted.
+ package_hex = [cpfp_parent["hex"], replacement_tx["hex"], child["hex"]]
+
+ # Package should be submitted, temporarily exceeding maxmempool, and then evicted.
+ assert_raises_rpc_error(-26, "bad-txns-inputs-missingorspent", node.submitpackage, package_hex)
+
+ # Maximum size must never be exceeded.
+ assert_greater_than(node.getmempoolinfo()["maxmempool"], node.getmempoolinfo()["bytes"])
+
+ resulting_mempool_txids = node.getrawmempool()
+ # The replacement should be successful.
+ assert replacement_tx["txid"] in resulting_mempool_txids
+ # The replaced tx and all of its descendants must not be in mempool.
+ assert replaced_tx["txid"] not in resulting_mempool_txids
+ assert cpfp_parent["txid"] not in resulting_mempool_txids
+ assert child["txid"] not in resulting_mempool_txids
+
+
+ def run_test(self):
+ node = self.nodes[0]
+ self.wallet = MiniWallet(node)
+ miniwallet = self.wallet
+
+ # Generate coins needed to create transactions in the subtests (excluding coins used in fill_mempool).
+ self.generate(miniwallet, 20)
+
+ relayfee = node.getnetworkinfo()['relayfee']
+ self.log.info('Check that mempoolminfee is minrelaytxfee')
+ assert_equal(node.getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
+ assert_equal(node.getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
+
+ self.fill_mempool()
+
# Deliberately try to create a tx with a fee less than the minimum mempool fee to assert that it does not get added to the mempool
self.log.info('Create a mempool tx that will not pass mempoolminfee')
assert_raises_rpc_error(-26, "mempool min fee not met", miniwallet.send_self_transfer, from_node=node, fee_rate=relayfee)
@@ -149,6 +372,10 @@ class MempoolLimitTest(BitcoinTestFramework):
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(["-maxmempool=4"], "Error: -maxmempool must be at least 5 MB")
+ self.test_mid_package_replacement()
+ self.test_mid_package_eviction()
+ self.test_rbf_carveout_disallowed()
+
if __name__ == '__main__':
MempoolLimitTest().main()
diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py
index a1335ff069..32a927084a 100755
--- a/test/functional/mempool_persist.py
+++ b/test/functional/mempool_persist.py
@@ -46,7 +46,7 @@ from test_framework.util import (
assert_greater_than_or_equal,
assert_raises_rpc_error,
)
-from test_framework.wallet import MiniWallet
+from test_framework.wallet import MiniWallet, COIN
class MempoolPersistTest(BitcoinTestFramework):
@@ -159,6 +159,16 @@ class MempoolPersistTest(BitcoinTestFramework):
assert self.nodes[0].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 0)
+ self.log.debug("Import mempool at runtime to node0.")
+ assert_equal({}, self.nodes[0].importmempool(mempooldat0))
+ assert_equal(len(self.nodes[0].getrawmempool()), 7)
+ fees = self.nodes[0].getmempoolentry(txid=last_txid)["fees"]
+ assert_equal(fees["base"], fees["modified"])
+ assert_equal({}, self.nodes[0].importmempool(mempooldat0, {"apply_fee_delta_priority": True, "apply_unbroadcast_set": True}))
+ assert_equal(2, self.nodes[0].getmempoolinfo()["unbroadcastcount"])
+ fees = self.nodes[0].getmempoolentry(txid=last_txid)["fees"]
+ assert_equal(fees["base"] + Decimal("0.00001000"), fees["modified"])
+
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
@@ -186,6 +196,7 @@ class MempoolPersistTest(BitcoinTestFramework):
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
+ self.test_importmempool_union()
self.test_persist_unbroadcast()
def test_persist_unbroadcast(self):
@@ -210,6 +221,46 @@ class MempoolPersistTest(BitcoinTestFramework):
node0.mockscheduler(16 * 60) # 15 min + 1 for buffer
self.wait_until(lambda: len(conn.get_invs()) == 1)
+ def test_importmempool_union(self):
+ self.log.debug("Submit different transactions to node0 and node1's mempools")
+ self.start_node(0)
+ self.start_node(2)
+ tx_node0 = self.mini_wallet.send_self_transfer(from_node=self.nodes[0])
+ tx_node1 = self.mini_wallet.send_self_transfer(from_node=self.nodes[1])
+ tx_node01 = self.mini_wallet.create_self_transfer()
+ tx_node01_secret = self.mini_wallet.create_self_transfer()
+ self.nodes[0].prioritisetransaction(tx_node01["txid"], 0, COIN)
+ self.nodes[0].prioritisetransaction(tx_node01_secret["txid"], 0, 2 * COIN)
+ self.nodes[1].prioritisetransaction(tx_node01_secret["txid"], 0, 3 * COIN)
+ self.nodes[0].sendrawtransaction(tx_node01["hex"])
+ self.nodes[1].sendrawtransaction(tx_node01["hex"])
+ assert tx_node0["txid"] in self.nodes[0].getrawmempool()
+ assert not tx_node0["txid"] in self.nodes[1].getrawmempool()
+ assert not tx_node1["txid"] in self.nodes[0].getrawmempool()
+ assert tx_node1["txid"] in self.nodes[1].getrawmempool()
+ assert tx_node01["txid"] in self.nodes[0].getrawmempool()
+ assert tx_node01["txid"] in self.nodes[1].getrawmempool()
+ assert not tx_node01_secret["txid"] in self.nodes[0].getrawmempool()
+ assert not tx_node01_secret["txid"] in self.nodes[1].getrawmempool()
+
+ self.log.debug("Check that importmempool can add txns without replacing the entire mempool")
+ mempooldat0 = str(self.nodes[0].chain_path / "mempool.dat")
+ result0 = self.nodes[0].savemempool()
+ assert_equal(mempooldat0, result0["filename"])
+ assert_equal({}, self.nodes[1].importmempool(mempooldat0, {"apply_fee_delta_priority": True}))
+ # All transactions should be in node1's mempool now.
+ assert tx_node0["txid"] in self.nodes[1].getrawmempool()
+ assert tx_node1["txid"] in self.nodes[1].getrawmempool()
+ assert not tx_node1["txid"] in self.nodes[0].getrawmempool()
+ # For transactions that already existed, priority should be changed
+ entry_node01 = self.nodes[1].getmempoolentry(tx_node01["txid"])
+ assert_equal(entry_node01["fees"]["base"] + 1, entry_node01["fees"]["modified"])
+ # Deltas for not-yet-submitted transactions should be applied as well (prioritisation is stackable).
+ self.nodes[1].sendrawtransaction(tx_node01_secret["hex"])
+ entry_node01_secret = self.nodes[1].getmempoolentry(tx_node01_secret["txid"])
+ assert_equal(entry_node01_secret["fees"]["base"] + 5, entry_node01_secret["fees"]["modified"])
+ self.stop_nodes()
+
if __name__ == "__main__":
MempoolPersistTest().main()
diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py
index 3a5bc1ebcd..691518ea09 100755
--- a/test/functional/mempool_reorg.py
+++ b/test/functional/mempool_reorg.py
@@ -8,6 +8,17 @@ Test re-org scenarios with a mempool that contains transactions
that spend (directly or indirectly) coinbase transactions.
"""
+import time
+
+from test_framework.messages import (
+ CInv,
+ MSG_WTX,
+ msg_getdata,
+)
+from test_framework.p2p import (
+ P2PTxInvStore,
+ p2p_lock,
+)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
from test_framework.wallet import MiniWallet
@@ -22,8 +33,84 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
[]
]
+ def test_reorg_relay(self):
+ self.log.info("Test that transactions from disconnected blocks are available for relay immediately")
+ # Prevent time from moving forward
+ self.nodes[1].setmocktime(int(time.time()))
+ self.connect_nodes(0, 1)
+ self.generate(self.wallet, 3)
+
+ # Disconnect node0 and node1 to create different chains.
+ self.disconnect_nodes(0, 1)
+ # Connect a peer to node1, which doesn't have immediate tx relay
+ peer1 = self.nodes[1].add_p2p_connection(P2PTxInvStore())
+
+ # Create a transaction that is included in a block.
+ tx_disconnected = self.wallet.send_self_transfer(from_node=self.nodes[1])
+ self.generate(self.nodes[1], 1, sync_fun=self.no_op)
+
+ # Create a transaction and submit it to node1's mempool.
+ tx_before_reorg = self.wallet.send_self_transfer(from_node=self.nodes[1])
+
+ # Create a child of that transaction and submit it to node1's mempool.
+ tx_child = self.wallet.send_self_transfer(utxo_to_spend=tx_disconnected["new_utxo"], from_node=self.nodes[1])
+ assert_equal(self.nodes[1].getmempoolentry(tx_child["txid"])["ancestorcount"], 1)
+ assert_equal(len(peer1.get_invs()), 0)
+
+ # node0 has a longer chain in which tx_disconnected was not confirmed.
+ self.generate(self.nodes[0], 3, sync_fun=self.no_op)
+
+ # Reconnect the nodes and sync chains. node0's chain should win.
+ self.connect_nodes(0, 1)
+ self.sync_blocks()
+
+ # Child now has an ancestor from the disconnected block
+ assert_equal(self.nodes[1].getmempoolentry(tx_child["txid"])["ancestorcount"], 2)
+ assert_equal(self.nodes[1].getmempoolentry(tx_before_reorg["txid"])["ancestorcount"], 1)
+
+ # peer1 should not have received an inv for any of the transactions during this time, as no
+ # mocktime has elapsed for those transactions to be announced. Likewise, it cannot
+ # request very recent, unanounced transactions.
+ assert_equal(len(peer1.get_invs()), 0)
+ # It's too early to request these two transactions
+ requests_too_recent = msg_getdata([CInv(t=MSG_WTX, h=int(tx["tx"].getwtxid(), 16)) for tx in [tx_before_reorg, tx_child]])
+ peer1.send_and_ping(requests_too_recent)
+ for _ in range(len(requests_too_recent.inv)):
+ peer1.sync_with_ping()
+ with p2p_lock:
+ assert "tx" not in peer1.last_message
+ assert "notfound" in peer1.last_message
+
+ # Request the tx from the disconnected block
+ request_disconnected_tx = msg_getdata([CInv(t=MSG_WTX, h=int(tx_disconnected["tx"].getwtxid(), 16))])
+ peer1.send_and_ping(request_disconnected_tx)
+
+ # The tx from the disconnected block was never announced, and it entered the mempool later
+ # than the transactions that are too recent.
+ assert_equal(len(peer1.get_invs()), 0)
+ with p2p_lock:
+ # However, the node will answer requests for the tx from the recently-disconnected block.
+ assert_equal(peer1.last_message["tx"].tx.getwtxid(),tx_disconnected["tx"].getwtxid())
+
+ self.nodes[1].setmocktime(int(time.time()) + 300)
+ peer1.sync_with_ping()
+ # the transactions are now announced
+ assert_equal(len(peer1.get_invs()), 3)
+ for _ in range(3):
+ # make sure all tx requests have been responded to
+ peer1.sync_with_ping()
+ last_tx_received = peer1.last_message["tx"]
+
+ tx_after_reorg = self.wallet.send_self_transfer(from_node=self.nodes[1])
+ request_after_reorg = msg_getdata([CInv(t=MSG_WTX, h=int(tx_after_reorg["tx"].getwtxid(), 16))])
+ assert tx_after_reorg["txid"] in self.nodes[1].getrawmempool()
+ peer1.send_and_ping(request_after_reorg)
+ with p2p_lock:
+ assert_equal(peer1.last_message["tx"], last_tx_received)
+
def run_test(self):
- wallet = MiniWallet(self.nodes[0])
+ self.wallet = MiniWallet(self.nodes[0])
+ wallet = self.wallet
# Start with a 200 block chain
assert_equal(self.nodes[0].getblockcount(), 200)
@@ -103,6 +190,8 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
assert_equal(set(self.nodes[0].getrawmempool()), set())
self.sync_all()
+ self.test_reorg_relay()
+
if __name__ == '__main__':
MempoolCoinbaseTest().main()
diff --git a/test/functional/mempool_sigoplimit.py b/test/functional/mempool_sigoplimit.py
index 962b2b19bd..fbec6d0dc8 100755
--- a/test/functional/mempool_sigoplimit.py
+++ b/test/functional/mempool_sigoplimit.py
@@ -3,6 +3,7 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test sigop limit mempool policy (`-bytespersigop` parameter)"""
+from decimal import Decimal
from math import ceil
from test_framework.messages import (
@@ -25,6 +26,7 @@ from test_framework.script import (
OP_TRUE,
)
from test_framework.script_util import (
+ keys_to_multisig_script,
script_to_p2wsh_script,
)
from test_framework.test_framework import BitcoinTestFramework
@@ -32,9 +34,10 @@ from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
+ assert_raises_rpc_error,
)
from test_framework.wallet import MiniWallet
-
+from test_framework.wallet_util import generate_keypair
DEFAULT_BYTES_PER_SIGOP = 20 # default setting
@@ -133,6 +136,45 @@ class BytesPerSigOpTest(BitcoinTestFramework):
assert_equal(entry_parent['descendantcount'], 2)
assert_equal(entry_parent['descendantsize'], parent_tx.get_vsize() + sigop_equivalent_vsize)
+ def test_sigops_package(self):
+ self.log.info("Test a overly-large sigops-vbyte hits package limits")
+ # Make a 2-transaction package which fails vbyte checks even though
+ # separately they would work.
+ self.restart_node(0, extra_args=["-bytespersigop=5000"] + self.extra_args[0])
+
+ def create_bare_multisig_tx(utxo_to_spend=None):
+ _, pubkey = generate_keypair()
+ amount_for_bare = 50000
+ tx_dict = self.wallet.create_self_transfer(fee=Decimal("3"), utxo_to_spend=utxo_to_spend)
+ tx_utxo = tx_dict["new_utxo"]
+ tx = tx_dict["tx"]
+ tx.vout.append(CTxOut(amount_for_bare, keys_to_multisig_script([pubkey], k=1)))
+ tx.vout[0].nValue -= amount_for_bare
+ tx_utxo["txid"] = tx.rehash()
+ tx_utxo["value"] -= Decimal("0.00005000")
+ return (tx_utxo, tx)
+
+ tx_parent_utxo, tx_parent = create_bare_multisig_tx()
+ tx_child_utxo, tx_child = create_bare_multisig_tx(tx_parent_utxo)
+
+ # Separately, the parent tx is ok
+ parent_individual_testres = self.nodes[0].testmempoolaccept([tx_parent.serialize().hex()])[0]
+ assert parent_individual_testres["allowed"]
+ # Multisig is counted as MAX_PUBKEYS_PER_MULTISIG = 20 sigops
+ assert_equal(parent_individual_testres["vsize"], 5000 * 20)
+
+ # But together, it's exceeding limits in the *package* context. If sigops adjusted vsize wasn't being checked
+ # here, it would get further in validation and give too-long-mempool-chain error instead.
+ packet_test = self.nodes[0].testmempoolaccept([tx_parent.serialize().hex(), tx_child.serialize().hex()])
+ assert_equal([x["package-error"] for x in packet_test], ["package-mempool-limits", "package-mempool-limits"])
+
+ # When we actually try to submit, the parent makes it into the mempool, but the child would exceed ancestor vsize limits
+ assert_raises_rpc_error(-26, "too-long-mempool-chain", self.nodes[0].submitpackage, [tx_parent.serialize().hex(), tx_child.serialize().hex()])
+ assert tx_parent.rehash() in self.nodes[0].getrawmempool()
+
+ # Transactions are tiny in weight
+ assert_greater_than(2000, tx_parent.get_weight() + tx_child.get_weight())
+
def run_test(self):
self.wallet = MiniWallet(self.nodes[0])
@@ -149,6 +191,8 @@ class BytesPerSigOpTest(BitcoinTestFramework):
self.generate(self.wallet, 1)
+ self.test_sigops_package()
+
if __name__ == '__main__':
BytesPerSigOpTest().main()
diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py
index aabf06ee53..da796d3f70 100755
--- a/test/functional/mining_basic.py
+++ b/test/functional/mining_basic.py
@@ -18,9 +18,10 @@ from test_framework.blocktools import (
TIME_GENESIS_BLOCK,
)
from test_framework.messages import (
+ BLOCK_HEADER_SIZE,
CBlock,
CBlockHeader,
- BLOCK_HEADER_SIZE,
+ COIN,
ser_uint256,
)
from test_framework.p2p import P2PDataStore
@@ -28,12 +29,14 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
+ get_fee,
)
from test_framework.wallet import MiniWallet
VERSIONBITS_TOP_BITS = 0x20000000
VERSIONBITS_DEPLOYMENT_TESTDUMMY_BIT = 28
+DEFAULT_BLOCK_MIN_TX_FEE = 1000 # default `-blockmintxfee` setting [sat/kvB]
def assert_template(node, block, expect, rehash=True):
@@ -73,6 +76,45 @@ class MiningTest(BitcoinTestFramework):
self.restart_node(0)
self.connect_nodes(0, 1)
+ def test_blockmintxfee_parameter(self):
+ self.log.info("Test -blockmintxfee setting")
+ self.restart_node(0, extra_args=['-minrelaytxfee=0', '-persistmempool=0'])
+ node = self.nodes[0]
+
+ # test default (no parameter), zero and a bunch of arbitrary blockmintxfee rates [sat/kvB]
+ for blockmintxfee_sat_kvb in (DEFAULT_BLOCK_MIN_TX_FEE, 0, 50, 100, 500, 2500, 5000, 21000, 333333, 2500000):
+ blockmintxfee_btc_kvb = blockmintxfee_sat_kvb / Decimal(COIN)
+ if blockmintxfee_sat_kvb == DEFAULT_BLOCK_MIN_TX_FEE:
+ self.log.info(f"-> Default -blockmintxfee setting ({blockmintxfee_sat_kvb} sat/kvB)...")
+ else:
+ blockmintxfee_parameter = f"-blockmintxfee={blockmintxfee_btc_kvb:.8f}"
+ self.log.info(f"-> Test {blockmintxfee_parameter} ({blockmintxfee_sat_kvb} sat/kvB)...")
+ self.restart_node(0, extra_args=[blockmintxfee_parameter, '-minrelaytxfee=0', '-persistmempool=0'])
+ self.wallet.rescan_utxos() # to avoid spending outputs of txs that are not in mempool anymore after restart
+
+ # submit one tx with exactly the blockmintxfee rate, and one slightly below
+ tx_with_min_feerate = self.wallet.send_self_transfer(from_node=node, fee_rate=blockmintxfee_btc_kvb)
+ assert_equal(tx_with_min_feerate["fee"], get_fee(tx_with_min_feerate["tx"].get_vsize(), blockmintxfee_btc_kvb))
+ if blockmintxfee_btc_kvb > 0:
+ lowerfee_btc_kvb = blockmintxfee_btc_kvb - Decimal(10)/COIN # 0.01 sat/vbyte lower
+ tx_below_min_feerate = self.wallet.send_self_transfer(from_node=node, fee_rate=lowerfee_btc_kvb)
+ assert_equal(tx_below_min_feerate["fee"], get_fee(tx_below_min_feerate["tx"].get_vsize(), lowerfee_btc_kvb))
+ else: # go below zero fee by using modified fees
+ tx_below_min_feerate = self.wallet.send_self_transfer(from_node=node, fee_rate=blockmintxfee_btc_kvb)
+ node.prioritisetransaction(tx_below_min_feerate["txid"], 0, -1)
+
+ # check that tx below specified fee-rate is neither in template nor in the actual block
+ block_template = node.getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)
+ block_template_txids = [tx['txid'] for tx in block_template['transactions']]
+ self.generate(self.wallet, 1, sync_fun=self.no_op)
+ block = node.getblock(node.getbestblockhash(), verbosity=2)
+ block_txids = [tx['txid'] for tx in block['tx']]
+
+ assert tx_with_min_feerate['txid'] in block_template_txids
+ assert tx_with_min_feerate['txid'] in block_txids
+ assert tx_below_min_feerate['txid'] not in block_template_txids
+ assert tx_below_min_feerate['txid'] not in block_txids
+
def run_test(self):
node = self.nodes[0]
self.wallet = MiniWallet(node)
@@ -130,7 +172,7 @@ class MiningTest(BitcoinTestFramework):
block.vtx = [coinbase_tx]
self.log.info("getblocktemplate: segwit rule must be set")
- assert_raises_rpc_error(-8, "getblocktemplate must be called with the segwit rule set", node.getblocktemplate)
+ assert_raises_rpc_error(-8, "getblocktemplate must be called with the segwit rule set", node.getblocktemplate, {})
self.log.info("getblocktemplate: Test valid block")
assert_template(node, block, None)
@@ -279,6 +321,8 @@ class MiningTest(BitcoinTestFramework):
node.submitheader(hexdata=CBlockHeader(bad_block_root).serialize().hex())
assert_equal(node.submitblock(hexdata=block.serialize().hex()), 'duplicate') # valid
+ self.test_blockmintxfee_parameter()
+
if __name__ == '__main__':
MiningTest().main()
diff --git a/test/functional/mining_getblocktemplate_longpoll.py b/test/functional/mining_getblocktemplate_longpoll.py
index 53182eb79e..c0e7195c82 100755
--- a/test/functional/mining_getblocktemplate_longpoll.py
+++ b/test/functional/mining_getblocktemplate_longpoll.py
@@ -41,7 +41,8 @@ class GetBlockTemplateLPTest(BitcoinTestFramework):
self.log.info("Test that longpoll waits if we do nothing")
thr = LongpollThread(self.nodes[0])
- thr.start()
+ with self.nodes[0].assert_debug_log(["ThreadRPCServer method=getblocktemplate"], timeout=3):
+ thr.start()
# check that thread still lives
thr.join(5) # wait 5 seconds or until thread exits
assert thr.is_alive()
@@ -55,14 +56,16 @@ class GetBlockTemplateLPTest(BitcoinTestFramework):
self.log.info("Test that longpoll will terminate if we generate a block ourselves")
thr = LongpollThread(self.nodes[0])
- thr.start()
+ with self.nodes[0].assert_debug_log(["ThreadRPCServer method=getblocktemplate"], timeout=3):
+ thr.start()
self.generate(self.nodes[0], 1) # generate a block on own node
thr.join(5) # wait 5 seconds or until thread exits
assert not thr.is_alive()
self.log.info("Test that introducing a new transaction into the mempool will terminate the longpoll")
thr = LongpollThread(self.nodes[0])
- thr.start()
+ with self.nodes[0].assert_debug_log(["ThreadRPCServer method=getblocktemplate"], timeout=3):
+ thr.start()
# generate a transaction and submit it
self.miniwallet.send_self_transfer(from_node=random.choice(self.nodes))
# after one minute, every 10 seconds the mempool is probed, so in 80 seconds it should have returned
diff --git a/test/functional/p2p_addr_relay.py b/test/functional/p2p_addr_relay.py
index e002a520c6..63cd10896d 100755
--- a/test/functional/p2p_addr_relay.py
+++ b/test/functional/p2p_addr_relay.py
@@ -133,7 +133,7 @@ class AddrTest(BitcoinTestFramework):
self.mocktime += 10 * 60
self.nodes[0].setmocktime(self.mocktime)
for peer in receivers:
- peer.sync_send_with_ping()
+ peer.sync_with_ping()
def oversized_addr_test(self):
self.log.info('Send an addr message that is too large')
@@ -299,6 +299,16 @@ class AddrTest(BitcoinTestFramework):
assert_equal(block_relay_peer.num_ipv4_received, 0)
assert inbound_peer.num_ipv4_received > 100
+ self.log.info('Check that we answer getaddr messages only once per connection')
+ received_addrs_before = inbound_peer.num_ipv4_received
+ with self.nodes[0].assert_debug_log(['Ignoring repeated "getaddr".']):
+ inbound_peer.send_and_ping(msg_getaddr())
+ self.mocktime += 10 * 60
+ self.nodes[0].setmocktime(self.mocktime)
+ inbound_peer.sync_with_ping()
+ received_addrs_after = inbound_peer.num_ipv4_received
+ assert_equal(received_addrs_before, received_addrs_after)
+
self.nodes[0].disconnect_p2ps()
def blocksonly_mode_tests(self):
diff --git a/test/functional/p2p_addrfetch.py b/test/functional/p2p_addrfetch.py
index 25efd50040..3ead653ba6 100755
--- a/test/functional/p2p_addrfetch.py
+++ b/test/functional/p2p_addrfetch.py
@@ -48,7 +48,7 @@ class P2PAddrFetch(BitcoinTestFramework):
self.assert_getpeerinfo(peer_ids=[peer_id])
self.log.info("Check that we send getaddr but don't try to sync headers with the addr-fetch peer")
- peer.sync_send_with_ping()
+ peer.sync_with_ping()
with p2p_lock:
assert peer.message_count['getaddr'] == 1
assert peer.message_count['getheaders'] == 0
diff --git a/test/functional/p2p_addrv2_relay.py b/test/functional/p2p_addrv2_relay.py
index 9ab190871f..f9a8c44be2 100755
--- a/test/functional/p2p_addrv2_relay.py
+++ b/test/functional/p2p_addrv2_relay.py
@@ -20,19 +20,24 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
I2P_ADDR = "c4gfnttsuwqomiygupdqqqyy5y5emnk5c73hrfvatri67prd7vyq.b32.i2p"
+ONION_ADDR = "pg6mmjiyjmcrsslvykfwnntlaru7p5svn6y2ymmju6nubxndf4pscryd.onion"
ADDRS = []
for i in range(10):
addr = CAddress()
addr.time = int(time.time()) + i
+ addr.port = 8333 + i
addr.nServices = P2P_SERVICES
- # Add one I2P address at an arbitrary position.
+ # Add one I2P and one onion V3 address at an arbitrary position.
if i == 5:
addr.net = addr.NET_I2P
addr.ip = I2P_ADDR
+ addr.port = 0
+ elif i == 8:
+ addr.net = addr.NET_TORV3
+ addr.ip = ONION_ADDR
else:
addr.ip = f"123.123.123.{i % 256}"
- addr.port = 8333 + i
ADDRS.append(addr)
@@ -52,6 +57,17 @@ class AddrReceiver(P2PInterface):
self.wait_until(lambda: "addrv2" in self.last_message)
+def calc_addrv2_msg_size(addrs):
+ size = 1 # vector length byte
+ for addr in addrs:
+ size += 4 # time
+ size += 1 # services, COMPACTSIZE(P2P_SERVICES)
+ size += 1 # network id
+ size += 1 # address length byte
+ size += addr.ADDRV2_ADDRESS_LENGTH[addr.net] # address
+ size += 2 # port
+ return size
+
class AddrTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
@@ -71,9 +87,10 @@ class AddrTest(BitcoinTestFramework):
self.log.info('Check that addrv2 message content is relayed and added to addrman')
addr_receiver = self.nodes[0].add_p2p_connection(AddrReceiver())
msg.addrs = ADDRS
+ msg_size = calc_addrv2_msg_size(ADDRS)
with self.nodes[0].assert_debug_log([
- 'received: addrv2 (159 bytes) peer=0',
- 'sending addrv2 (159 bytes) peer=1',
+ f'received: addrv2 ({msg_size} bytes) peer=0',
+ f'sending addrv2 ({msg_size} bytes) peer=1',
]):
addr_source.send_and_ping(msg)
self.nodes[0].setmocktime(int(time.time()) + 30 * 60)
diff --git a/test/functional/p2p_blockfilters.py b/test/functional/p2p_blockfilters.py
index e4908735c9..680fa9c7fa 100755
--- a/test/functional/p2p_blockfilters.py
+++ b/test/functional/p2p_blockfilters.py
@@ -211,38 +211,56 @@ class CompactFiltersTest(BitcoinTestFramework):
]
for request in requests:
peer_1 = self.nodes[1].add_p2p_connection(P2PInterface())
- peer_1.send_message(request)
- peer_1.wait_for_disconnect()
+ with self.nodes[1].assert_debug_log(expected_msgs=["requested unsupported block filter type"]):
+ peer_1.send_message(request)
+ peer_1.wait_for_disconnect()
self.log.info("Check that invalid requests result in disconnection.")
requests = [
# Requesting too many filters results in disconnection.
- msg_getcfilters(
- filter_type=FILTER_TYPE_BASIC,
- start_height=0,
- stop_hash=int(main_block_hash, 16),
+ (
+ msg_getcfilters(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=0,
+ stop_hash=int(main_block_hash, 16),
+ ), "requested too many cfilters/cfheaders"
),
# Requesting too many filter headers results in disconnection.
- msg_getcfheaders(
- filter_type=FILTER_TYPE_BASIC,
- start_height=0,
- stop_hash=int(tip_hash, 16),
+ (
+ msg_getcfheaders(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=0,
+ stop_hash=int(tip_hash, 16),
+ ), "requested too many cfilters/cfheaders"
),
# Requesting unknown filter type results in disconnection.
- msg_getcfcheckpt(
- filter_type=255,
- stop_hash=int(main_block_hash, 16),
+ (
+ msg_getcfcheckpt(
+ filter_type=255,
+ stop_hash=int(main_block_hash, 16),
+ ), "requested unsupported block filter type"
),
# Requesting unknown hash results in disconnection.
- msg_getcfcheckpt(
- filter_type=FILTER_TYPE_BASIC,
- stop_hash=123456789,
+ (
+ msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=123456789,
+ ), "requested invalid block hash"
+ ),
+ (
+ # Request with (start block height > stop block height) results in disconnection.
+ msg_getcfheaders(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=1000,
+ stop_hash=int(self.nodes[0].getblockhash(999), 16),
+ ), "sent invalid getcfilters/getcfheaders with start height 1000 and stop height 999"
),
]
- for request in requests:
+ for request, expected_log_msg in requests:
peer_0 = self.nodes[0].add_p2p_connection(P2PInterface())
- peer_0.send_message(request)
- peer_0.wait_for_disconnect()
+ with self.nodes[0].assert_debug_log(expected_msgs=[expected_log_msg]):
+ peer_0.send_message(request)
+ peer_0.wait_for_disconnect()
self.log.info("Test -peerblockfilters without -blockfilterindex raises an error")
self.stop_node(0)
diff --git a/test/functional/p2p_blocksonly.py b/test/functional/p2p_blocksonly.py
index 110a1bd03f..637644e6e4 100755
--- a/test/functional/p2p_blocksonly.py
+++ b/test/functional/p2p_blocksonly.py
@@ -101,7 +101,7 @@ class P2PBlocksOnly(BitcoinTestFramework):
# Bump time forward to ensure m_next_inv_send_time timer pops
self.nodes[0].setmocktime(int(time.time()) + 60)
- conn.sync_send_with_ping()
+ conn.sync_with_ping()
assert int(txid, 16) not in conn.get_invs()
def check_p2p_inv_violation(self, peer):
diff --git a/test/functional/p2p_compactblocks_blocksonly.py b/test/functional/p2p_compactblocks_blocksonly.py
index 3d0c421a93..761cd3a218 100755
--- a/test/functional/p2p_compactblocks_blocksonly.py
+++ b/test/functional/p2p_compactblocks_blocksonly.py
@@ -94,11 +94,11 @@ class P2PCompactBlocksBlocksOnly(BitcoinTestFramework):
block1 = self.build_block_on_tip()
p2p_conn_blocksonly.send_message(msg_headers(headers=[CBlockHeader(block1)]))
- p2p_conn_blocksonly.sync_send_with_ping()
+ p2p_conn_blocksonly.sync_with_ping()
assert_equal(p2p_conn_blocksonly.last_message['getdata'].inv, [CInv(MSG_BLOCK | MSG_WITNESS_FLAG, block1.sha256)])
p2p_conn_high_bw.send_message(msg_headers(headers=[CBlockHeader(block1)]))
- p2p_conn_high_bw.sync_send_with_ping()
+ p2p_conn_high_bw.sync_with_ping()
assert_equal(p2p_conn_high_bw.last_message['getdata'].inv, [CInv(MSG_CMPCT_BLOCK, block1.sha256)])
self.log.info("Test that getdata(CMPCT) is still sent on BIP152 low bandwidth connections"
diff --git a/test/functional/p2p_filter.py b/test/functional/p2p_filter.py
index 6699cc3528..665f57365f 100755
--- a/test/functional/p2p_filter.py
+++ b/test/functional/p2p_filter.py
@@ -177,7 +177,7 @@ class FilterTest(BitcoinTestFramework):
filter_peer.merkleblock_received = False
filter_peer.tx_received = False
self.wallet.send_to(from_node=self.nodes[0], scriptPubKey=getnewdestination()[1], amount=7 * COIN)
- filter_peer.sync_send_with_ping()
+ filter_peer.sync_with_ping()
assert not filter_peer.merkleblock_received
assert not filter_peer.tx_received
diff --git a/test/functional/p2p_getaddr_caching.py b/test/functional/p2p_getaddr_caching.py
index 1c9ad7289b..60b43c32ae 100755
--- a/test/functional/p2p_getaddr_caching.py
+++ b/test/functional/p2p_getaddr_caching.py
@@ -6,7 +6,6 @@
import time
-from test_framework.messages import msg_getaddr
from test_framework.p2p import (
P2PInterface,
p2p_lock
@@ -21,6 +20,7 @@ from test_framework.util import (
MAX_ADDR_TO_SEND = 1000
MAX_PCT_ADDR_TO_SEND = 23
+
class AddrReceiver(P2PInterface):
def __init__(self):
@@ -70,11 +70,8 @@ class AddrTest(BitcoinTestFramework):
cur_mock_time = int(time.time())
for i in range(N):
addr_receiver_local = self.nodes[0].add_p2p_connection(AddrReceiver())
- addr_receiver_local.send_and_ping(msg_getaddr())
addr_receiver_onion1 = self.nodes[0].add_p2p_connection(AddrReceiver(), dstport=self.onion_port1)
- addr_receiver_onion1.send_and_ping(msg_getaddr())
addr_receiver_onion2 = self.nodes[0].add_p2p_connection(AddrReceiver(), dstport=self.onion_port2)
- addr_receiver_onion2.send_and_ping(msg_getaddr())
# Trigger response
cur_mock_time += 5 * 60
@@ -105,11 +102,8 @@ class AddrTest(BitcoinTestFramework):
self.log.info('After time passed, see a new response to addr request')
addr_receiver_local = self.nodes[0].add_p2p_connection(AddrReceiver())
- addr_receiver_local.send_and_ping(msg_getaddr())
addr_receiver_onion1 = self.nodes[0].add_p2p_connection(AddrReceiver(), dstport=self.onion_port1)
- addr_receiver_onion1.send_and_ping(msg_getaddr())
addr_receiver_onion2 = self.nodes[0].add_p2p_connection(AddrReceiver(), dstport=self.onion_port2)
- addr_receiver_onion2.send_and_ping(msg_getaddr())
# Trigger response
cur_mock_time += 5 * 60
@@ -123,5 +117,6 @@ class AddrTest(BitcoinTestFramework):
assert set(last_response_on_onion_bind1) != set(addr_receiver_onion1.get_received_addrs())
assert set(last_response_on_onion_bind2) != set(addr_receiver_onion2.get_received_addrs())
+
if __name__ == '__main__':
AddrTest().main()
diff --git a/test/functional/p2p_ibd_stalling.py b/test/functional/p2p_ibd_stalling.py
index aca98ceb3f..0eb37fa92f 100755
--- a/test/functional/p2p_ibd_stalling.py
+++ b/test/functional/p2p_ibd_stalling.py
@@ -151,7 +151,7 @@ class P2PIBDStallingTest(BitcoinTestFramework):
def all_sync_send_with_ping(self, peers):
for p in peers:
if p.is_connected:
- p.sync_send_with_ping()
+ p.sync_with_ping()
def is_block_requested(self, peers, hash):
for p in peers:
diff --git a/test/functional/p2p_ibd_txrelay.py b/test/functional/p2p_ibd_txrelay.py
index 65a94ad31c..b93e39a925 100755
--- a/test/functional/p2p_ibd_txrelay.py
+++ b/test/functional/p2p_ibd_txrelay.py
@@ -53,7 +53,7 @@ class P2PIBDTxRelayTest(BitcoinTestFramework):
peer_inver.send_and_ping(msg_inv([CInv(t=MSG_WTX, h=txid)]))
# The node should not send a getdata, but if it did, it would first delay 2 seconds
self.nodes[0].setmocktime(int(time.time() + NONPREF_PEER_TX_DELAY))
- peer_inver.sync_send_with_ping()
+ peer_inver.sync_with_ping()
with p2p_lock:
assert txid not in peer_inver.getdata_requests
self.nodes[0].disconnect_p2ps()
diff --git a/test/functional/p2p_invalid_block.py b/test/functional/p2p_invalid_block.py
index 710f3d93e1..806fd9c6cb 100755
--- a/test/functional/p2p_invalid_block.py
+++ b/test/functional/p2p_invalid_block.py
@@ -46,12 +46,10 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
self.log.info("Create a new block with an anyone-can-spend coinbase")
- height = 1
block = create_block(tip, create_coinbase(height), block_time)
block.solve()
# Save the coinbase for later
block1 = block
- tip = block.sha256
peer.send_blocks_and_test([block1], node, success=True)
self.log.info("Mature the block.")
diff --git a/test/functional/p2p_invalid_locator.py b/test/functional/p2p_invalid_locator.py
index 626422370a..32a23532a2 100755
--- a/test/functional/p2p_invalid_locator.py
+++ b/test/functional/p2p_invalid_locator.py
@@ -32,7 +32,7 @@ class InvalidLocatorTest(BitcoinTestFramework):
within_max_peer = node.add_p2p_connection(P2PInterface())
msg.locator.vHave = [int(node.getblockhash(i - 1), 16) for i in range(block_count, block_count - (MAX_LOCATOR_SZ), -1)]
within_max_peer.send_message(msg)
- if type(msg) == msg_getheaders:
+ if type(msg) is msg_getheaders:
within_max_peer.wait_for_header(node.getbestblockhash())
else:
within_max_peer.wait_for_block(int(node.getbestblockhash(), 16))
diff --git a/test/functional/p2p_net_deadlock.py b/test/functional/p2p_net_deadlock.py
new file mode 100755
index 0000000000..f69fe52146
--- /dev/null
+++ b/test/functional/p2p_net_deadlock.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python3
+# Copyright (c) 2023-present The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+import threading
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import random_bytes
+
+
+class NetDeadlockTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 2
+
+ def run_test(self):
+ node0 = self.nodes[0]
+ node1 = self.nodes[1]
+
+ self.log.info("Simultaneously send a large message on both sides")
+ rand_msg = random_bytes(4000000).hex()
+
+ thread0 = threading.Thread(target=node0.sendmsgtopeer, args=(0, "unknown", rand_msg))
+ thread1 = threading.Thread(target=node1.sendmsgtopeer, args=(0, "unknown", rand_msg))
+
+ thread0.start()
+ thread1.start()
+ thread0.join()
+ thread1.join()
+
+ self.log.info("Check whether a deadlock happened")
+ self.generate(node0, 1)
+ self.sync_blocks()
+
+
+if __name__ == '__main__':
+ NetDeadlockTest().main()
diff --git a/test/functional/p2p_orphan_handling.py b/test/functional/p2p_orphan_handling.py
new file mode 100755
index 0000000000..6166c62aa2
--- /dev/null
+++ b/test/functional/p2p_orphan_handling.py
@@ -0,0 +1,416 @@
+#!/usr/bin/env python3
+# Copyright (c) 2023 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+import time
+
+from test_framework.messages import (
+ CInv,
+ MSG_TX,
+ MSG_WITNESS_TX,
+ MSG_WTX,
+ msg_getdata,
+ msg_inv,
+ msg_notfound,
+ msg_tx,
+ tx_from_hex,
+)
+from test_framework.p2p import (
+ GETDATA_TX_INTERVAL,
+ NONPREF_PEER_TX_DELAY,
+ OVERLOADED_PEER_TX_DELAY,
+ p2p_lock,
+ P2PTxInvStore,
+ TXID_RELAY_DELAY,
+)
+from test_framework.util import (
+ assert_equal,
+)
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.wallet import (
+ MiniWallet,
+ MiniWalletMode,
+)
+
+# Time to bump forward (using setmocktime) before waiting for the node to send getdata(tx) in response
+# to an inv(tx), in seconds. This delay includes all possible delays + 1, so it should only be used
+# when the value of the delay is not interesting. If we want to test that the node waits x seconds
+# for one peer and y seconds for another, use specific values instead.
+TXREQUEST_TIME_SKIP = NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY + OVERLOADED_PEER_TX_DELAY + 1
+
+def cleanup(func):
+ # Time to fastfoward (using setmocktime) in between subtests to ensure they do not interfere with
+ # one another, in seconds. Equal to 12 hours, which is enough to expire anything that may exist
+ # (though nothing should since state should be cleared) in p2p data structures.
+ LONG_TIME_SKIP = 12 * 60 * 60
+
+ def wrapper(self):
+ try:
+ func(self)
+ finally:
+ # Clear mempool
+ self.generate(self.nodes[0], 1)
+ self.nodes[0].disconnect_p2ps()
+ self.nodes[0].bumpmocktime(LONG_TIME_SKIP)
+ return wrapper
+
+class PeerTxRelayer(P2PTxInvStore):
+ """A P2PTxInvStore that also remembers all of the getdata and tx messages it receives."""
+ def __init__(self):
+ super().__init__()
+ self._tx_received = []
+ self._getdata_received = []
+
+ @property
+ def tx_received(self):
+ with p2p_lock:
+ return self._tx_received
+
+ @property
+ def getdata_received(self):
+ with p2p_lock:
+ return self._getdata_received
+
+ def on_tx(self, message):
+ self._tx_received.append(message)
+
+ def on_getdata(self, message):
+ self._getdata_received.append(message)
+
+ def wait_for_parent_requests(self, txids):
+ """Wait for requests for missing parents by txid with witness data (MSG_WITNESS_TX or
+ WitnessTx). Requires that the getdata message match these txids exactly; all txids must be
+ requested and no additional requests are allowed."""
+ def test_function():
+ last_getdata = self.last_message.get('getdata')
+ if not last_getdata:
+ return False
+ return len(last_getdata.inv) == len(txids) and all([item.type == MSG_WITNESS_TX and item.hash in txids for item in last_getdata.inv])
+ self.wait_until(test_function, timeout=10)
+
+ def assert_no_immediate_response(self, message):
+ """Check that the node does not immediately respond to this message with any of getdata,
+ inv, tx. The node may respond later.
+ """
+ prev_lastmessage = self.last_message
+ self.send_and_ping(message)
+ after_lastmessage = self.last_message
+ for msgtype in ["getdata", "inv", "tx"]:
+ if msgtype not in prev_lastmessage:
+ assert msgtype not in after_lastmessage
+ else:
+ assert_equal(prev_lastmessage[msgtype], after_lastmessage[msgtype])
+
+ def assert_never_requested(self, txhash):
+ """Check that the node has never sent us a getdata for this hash (int type)"""
+ for getdata in self.getdata_received:
+ for request in getdata.inv:
+ assert request.hash != txhash
+
+class OrphanHandlingTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+ self.extra_args = [[]]
+
+ def create_parent_and_child(self):
+ """Create package with 1 parent and 1 child, normal fees (no cpfp)."""
+ parent = self.wallet.create_self_transfer()
+ child = self.wallet.create_self_transfer(utxo_to_spend=parent['new_utxo'])
+ return child["tx"].getwtxid(), child["tx"], parent["tx"]
+
+ def relay_transaction(self, peer, tx):
+ """Relay transaction using MSG_WTX"""
+ wtxid = int(tx.getwtxid(), 16)
+ peer.send_and_ping(msg_inv([CInv(t=MSG_WTX, h=wtxid)]))
+ self.nodes[0].bumpmocktime(TXREQUEST_TIME_SKIP)
+ peer.wait_for_getdata([wtxid])
+ peer.send_and_ping(msg_tx(tx))
+
+ @cleanup
+ def test_arrival_timing_orphan(self):
+ self.log.info("Test missing parents that arrive during delay are not requested")
+ node = self.nodes[0]
+ tx_parent_arrives = self.wallet.create_self_transfer()
+ tx_parent_doesnt_arrive = self.wallet.create_self_transfer()
+ # Fake orphan spends nonexistent outputs of the two parents
+ tx_fake_orphan = self.wallet.create_self_transfer_multi(utxos_to_spend=[
+ {"txid": tx_parent_doesnt_arrive["txid"], "vout": 10, "value": tx_parent_doesnt_arrive["new_utxo"]["value"]},
+ {"txid": tx_parent_arrives["txid"], "vout": 10, "value": tx_parent_arrives["new_utxo"]["value"]}
+ ])
+
+ peer_spy = node.add_p2p_connection(PeerTxRelayer())
+ peer_normal = node.add_p2p_connection(PeerTxRelayer())
+ # This transaction is an orphan because it is missing inputs. It is a "fake" orphan that the
+ # spy peer has crafted to learn information about tx_parent_arrives even though it isn't
+ # able to spend a real output of it, but it could also just be a normal, real child tx.
+ # The node should not immediately respond with a request for orphan parents.
+ # Also, no request should be sent later because it will be resolved by
+ # the time the request is scheduled to be sent.
+ peer_spy.assert_no_immediate_response(msg_tx(tx_fake_orphan["tx"]))
+
+ # Node receives transaction. It attempts to obfuscate the exact timing at which this
+ # transaction entered its mempool. Send unsolicited because otherwise we need to wait for
+ # request delays.
+ peer_normal.send_and_ping(msg_tx(tx_parent_arrives["tx"]))
+ assert tx_parent_arrives["txid"] in node.getrawmempool()
+
+ # Spy peer should not be able to query the node for the parent yet, since it hasn't been
+ # announced / insufficient time has elapsed.
+ parent_inv = CInv(t=MSG_WTX, h=int(tx_parent_arrives["tx"].getwtxid(), 16))
+ assert_equal(len(peer_spy.get_invs()), 0)
+ peer_spy.assert_no_immediate_response(msg_getdata([parent_inv]))
+
+ # Request would be scheduled with this delay because it is not a preferred relay peer.
+ self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY)
+ peer_spy.assert_never_requested(int(tx_parent_arrives["txid"], 16))
+ peer_spy.assert_never_requested(int(tx_parent_doesnt_arrive["txid"], 16))
+ # Request would be scheduled with this delay because it is by txid.
+ self.nodes[0].bumpmocktime(TXID_RELAY_DELAY)
+ peer_spy.wait_for_parent_requests([int(tx_parent_doesnt_arrive["txid"], 16)])
+ peer_spy.assert_never_requested(int(tx_parent_arrives["txid"], 16))
+
+ @cleanup
+ def test_orphan_rejected_parents_exceptions(self):
+ node = self.nodes[0]
+ peer1 = node.add_p2p_connection(PeerTxRelayer())
+ peer2 = node.add_p2p_connection(PeerTxRelayer())
+
+ self.log.info("Test orphan handling when a nonsegwit parent is known to be invalid")
+ parent_low_fee_nonsegwit = self.wallet_nonsegwit.create_self_transfer(fee_rate=0)
+ assert_equal(parent_low_fee_nonsegwit["txid"], parent_low_fee_nonsegwit["tx"].getwtxid())
+ parent_other = self.wallet_nonsegwit.create_self_transfer()
+ child_nonsegwit = self.wallet_nonsegwit.create_self_transfer_multi(
+ utxos_to_spend=[parent_other["new_utxo"], parent_low_fee_nonsegwit["new_utxo"]])
+
+ # Relay the parent. It should be rejected because it pays 0 fees.
+ self.relay_transaction(peer1, parent_low_fee_nonsegwit["tx"])
+ assert parent_low_fee_nonsegwit["txid"] not in node.getrawmempool()
+
+ # Relay the child. It should not be accepted because it has missing inputs.
+ # Its parent should not be requested because its hash (txid == wtxid) has been added to the rejection filter.
+ with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(child_nonsegwit["txid"])]):
+ self.relay_transaction(peer2, child_nonsegwit["tx"])
+ assert child_nonsegwit["txid"] not in node.getrawmempool()
+
+ # No parents are requested.
+ self.nodes[0].bumpmocktime(GETDATA_TX_INTERVAL)
+ peer1.assert_never_requested(int(parent_other["txid"], 16))
+ peer2.assert_never_requested(int(parent_other["txid"], 16))
+ peer2.assert_never_requested(int(parent_low_fee_nonsegwit["txid"], 16))
+
+ self.log.info("Test orphan handling when a segwit parent was invalid but may be retried with another witness")
+ parent_low_fee = self.wallet.create_self_transfer(fee_rate=0)
+ child_low_fee = self.wallet.create_self_transfer(utxo_to_spend=parent_low_fee["new_utxo"])
+
+ # Relay the low fee parent. It should not be accepted.
+ self.relay_transaction(peer1, parent_low_fee["tx"])
+ assert parent_low_fee["txid"] not in node.getrawmempool()
+
+ # Relay the child. It should not be accepted because it has missing inputs.
+ self.relay_transaction(peer2, child_low_fee["tx"])
+ assert child_low_fee["txid"] not in node.getrawmempool()
+
+ # The parent should be requested because even though the txid commits to the fee, it doesn't
+ # commit to the feerate. Delayed because it's by txid and this is not a preferred relay peer.
+ self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
+ peer2.wait_for_getdata([int(parent_low_fee["tx"].rehash(), 16)])
+
+ self.log.info("Test orphan handling when a parent was previously downloaded with witness stripped")
+ parent_normal = self.wallet.create_self_transfer()
+ parent1_witness_stripped = tx_from_hex(parent_normal["tx"].serialize_without_witness().hex())
+ child_invalid_witness = self.wallet.create_self_transfer(utxo_to_spend=parent_normal["new_utxo"])
+
+ # Relay the parent with witness stripped. It should not be accepted.
+ self.relay_transaction(peer1, parent1_witness_stripped)
+ assert_equal(parent_normal["txid"], parent1_witness_stripped.rehash())
+ assert parent1_witness_stripped.rehash() not in node.getrawmempool()
+
+ # Relay the child. It should not be accepted because it has missing inputs.
+ self.relay_transaction(peer2, child_invalid_witness["tx"])
+ assert child_invalid_witness["txid"] not in node.getrawmempool()
+
+ # The parent should be requested since the unstripped wtxid would differ. Delayed because
+ # it's by txid and this is not a preferred relay peer.
+ self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
+ peer2.wait_for_getdata([int(parent_normal["tx"].rehash(), 16)])
+
+ # parent_normal can be relayed again even though parent1_witness_stripped was rejected
+ self.relay_transaction(peer1, parent_normal["tx"])
+ assert_equal(set(node.getrawmempool()), set([parent_normal["txid"], child_invalid_witness["txid"]]))
+
+ @cleanup
+ def test_orphan_multiple_parents(self):
+ node = self.nodes[0]
+ peer = node.add_p2p_connection(PeerTxRelayer())
+
+ self.log.info("Test orphan parent requests with a mixture of confirmed, in-mempool and missing parents")
+ # This UTXO confirmed a long time ago.
+ utxo_conf_old = self.wallet.send_self_transfer(from_node=node)["new_utxo"]
+ txid_conf_old = utxo_conf_old["txid"]
+ self.generate(self.wallet, 10)
+
+ # Create a fake reorg to trigger BlockDisconnected, which resets the rolling bloom filter.
+ # The alternative is to mine thousands of transactions to push it out of the filter.
+ last_block = node.getbestblockhash()
+ node.invalidateblock(last_block)
+ node.preciousblock(last_block)
+ node.syncwithvalidationinterfacequeue()
+
+ # This UTXO confirmed recently.
+ utxo_conf_recent = self.wallet.send_self_transfer(from_node=node)["new_utxo"]
+ self.generate(node, 1)
+
+ # This UTXO is unconfirmed and in the mempool.
+ assert_equal(len(node.getrawmempool()), 0)
+ mempool_tx = self.wallet.send_self_transfer(from_node=node)
+ utxo_unconf_mempool = mempool_tx["new_utxo"]
+
+ # This UTXO is unconfirmed and missing.
+ missing_tx = self.wallet.create_self_transfer()
+ utxo_unconf_missing = missing_tx["new_utxo"]
+ assert missing_tx["txid"] not in node.getrawmempool()
+
+ orphan = self.wallet.create_self_transfer_multi(utxos_to_spend=[utxo_conf_old,
+ utxo_conf_recent, utxo_unconf_mempool, utxo_unconf_missing])
+
+ self.relay_transaction(peer, orphan["tx"])
+ self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
+ peer.sync_with_ping()
+ assert_equal(len(peer.last_message["getdata"].inv), 2)
+ peer.wait_for_parent_requests([int(txid_conf_old, 16), int(missing_tx["txid"], 16)])
+
+ # Even though the peer would send a notfound for the "old" confirmed transaction, the node
+ # doesn't give up on the orphan. Once all of the missing parents are received, it should be
+ # submitted to mempool.
+ peer.send_message(msg_notfound(vec=[CInv(MSG_WITNESS_TX, int(txid_conf_old, 16))]))
+ peer.send_and_ping(msg_tx(missing_tx["tx"]))
+ peer.sync_with_ping()
+ assert_equal(node.getmempoolentry(orphan["txid"])["ancestorcount"], 3)
+
+ @cleanup
+ def test_orphans_overlapping_parents(self):
+ node = self.nodes[0]
+ # In the process of relaying inflight_parent_AB
+ peer_txrequest = node.add_p2p_connection(PeerTxRelayer())
+ # Sends the orphans
+ peer_orphans = node.add_p2p_connection(PeerTxRelayer())
+
+ confirmed_utxos = [self.wallet_nonsegwit.get_utxo() for _ in range(4)]
+ assert all([utxo["confirmations"] > 0 for utxo in confirmed_utxos])
+ self.log.info("Test handling of multiple orphans with missing parents that are already being requested")
+ # Parent of child_A only
+ missing_parent_A = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[0])
+ # Parents of child_A and child_B
+ missing_parent_AB = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[1])
+ inflight_parent_AB = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[2])
+ # Parent of child_B only
+ missing_parent_B = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[3])
+ child_A = self.wallet_nonsegwit.create_self_transfer_multi(
+ utxos_to_spend=[missing_parent_A["new_utxo"], missing_parent_AB["new_utxo"], inflight_parent_AB["new_utxo"]]
+ )
+ child_B = self.wallet_nonsegwit.create_self_transfer_multi(
+ utxos_to_spend=[missing_parent_B["new_utxo"], missing_parent_AB["new_utxo"], inflight_parent_AB["new_utxo"]]
+ )
+
+ # The wtxid and txid need to be the same for the node to recognize that the missing input
+ # and in-flight request for inflight_parent_AB are the same transaction.
+ assert_equal(inflight_parent_AB["txid"], inflight_parent_AB["tx"].getwtxid())
+
+ # Announce inflight_parent_AB and wait for getdata
+ peer_txrequest.send_and_ping(msg_inv([CInv(t=MSG_WTX, h=int(inflight_parent_AB["tx"].getwtxid(), 16))]))
+ self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY)
+ peer_txrequest.wait_for_getdata([int(inflight_parent_AB["tx"].getwtxid(), 16)])
+
+ self.log.info("Test that the node does not request a parent if it has an in-flight txrequest")
+ # Relay orphan child_A
+ self.relay_transaction(peer_orphans, child_A["tx"])
+ self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
+ # There are 3 missing parents. missing_parent_A and missing_parent_AB should be requested.
+ # But inflight_parent_AB should not, because there is already an in-flight request for it.
+ peer_orphans.wait_for_parent_requests([int(missing_parent_A["txid"], 16), int(missing_parent_AB["txid"], 16)])
+
+ self.log.info("Test that the node does not request a parent if it has an in-flight orphan parent request")
+ # Relay orphan child_B
+ self.relay_transaction(peer_orphans, child_B["tx"])
+ self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
+ # Only missing_parent_B should be requested. Not inflight_parent_AB or missing_parent_AB
+ # because they are already being requested from peer_txrequest and peer_orphans respectively.
+ peer_orphans.wait_for_parent_requests([int(missing_parent_B["txid"], 16)])
+ peer_orphans.assert_never_requested(int(inflight_parent_AB["txid"], 16))
+
+ @cleanup
+ def test_orphan_of_orphan(self):
+ node = self.nodes[0]
+ peer = node.add_p2p_connection(PeerTxRelayer())
+
+ self.log.info("Test handling of an orphan with a parent who is another orphan")
+ missing_grandparent = self.wallet_nonsegwit.create_self_transfer()
+ missing_parent_orphan = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=missing_grandparent["new_utxo"])
+ missing_parent = self.wallet_nonsegwit.create_self_transfer()
+ orphan = self.wallet_nonsegwit.create_self_transfer_multi(utxos_to_spend=[missing_parent["new_utxo"], missing_parent_orphan["new_utxo"]])
+
+ # The node should put missing_parent_orphan into the orphanage and request missing_grandparent
+ self.relay_transaction(peer, missing_parent_orphan["tx"])
+ self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
+ peer.wait_for_parent_requests([int(missing_grandparent["txid"], 16)])
+
+ # The node should put the orphan into the orphanage and request missing_parent, skipping
+ # missing_parent_orphan because it already has it in the orphanage.
+ self.relay_transaction(peer, orphan["tx"])
+ self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
+ peer.wait_for_parent_requests([int(missing_parent["txid"], 16)])
+
+ @cleanup
+ def test_orphan_inherit_rejection(self):
+ node = self.nodes[0]
+ peer1 = node.add_p2p_connection(PeerTxRelayer())
+ peer2 = node.add_p2p_connection(PeerTxRelayer())
+ peer3 = node.add_p2p_connection(PeerTxRelayer())
+
+ self.log.info("Test that an orphan with rejected parents, along with any descendants, cannot be retried with an alternate witness")
+ parent_low_fee_nonsegwit = self.wallet_nonsegwit.create_self_transfer(fee_rate=0)
+ assert_equal(parent_low_fee_nonsegwit["txid"], parent_low_fee_nonsegwit["tx"].getwtxid())
+ child = self.wallet.create_self_transfer(utxo_to_spend=parent_low_fee_nonsegwit["new_utxo"])
+ grandchild = self.wallet.create_self_transfer(utxo_to_spend=child["new_utxo"])
+ assert child["txid"] != child["tx"].getwtxid()
+ assert grandchild["txid"] != grandchild["tx"].getwtxid()
+
+ # Relay the parent. It should be rejected because it pays 0 fees.
+ self.relay_transaction(peer1, parent_low_fee_nonsegwit["tx"])
+
+ # Relay the child. It should be rejected for having missing parents, and this rejection is
+ # cached by txid and wtxid.
+ with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(child["txid"])]):
+ self.relay_transaction(peer1, child["tx"])
+ assert_equal(0, len(node.getrawmempool()))
+ peer1.assert_never_requested(parent_low_fee_nonsegwit["txid"])
+
+ # Grandchild should also not be kept in orphanage because its parent has been rejected.
+ with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(grandchild["txid"])]):
+ self.relay_transaction(peer2, grandchild["tx"])
+ assert_equal(0, len(node.getrawmempool()))
+ peer2.assert_never_requested(child["txid"])
+ peer2.assert_never_requested(child["tx"].getwtxid())
+
+ # The child should never be requested, even if announced again with potentially different witness.
+ peer3.send_and_ping(msg_inv([CInv(t=MSG_TX, h=int(child["txid"], 16))]))
+ self.nodes[0].bumpmocktime(TXREQUEST_TIME_SKIP)
+ peer3.assert_never_requested(child["txid"])
+
+ def run_test(self):
+ self.nodes[0].setmocktime(int(time.time()))
+ self.wallet_nonsegwit = MiniWallet(self.nodes[0], mode=MiniWalletMode.RAW_P2PK)
+ self.generate(self.wallet_nonsegwit, 10)
+ self.wallet = MiniWallet(self.nodes[0])
+ self.generate(self.wallet, 160)
+ self.test_arrival_timing_orphan()
+ self.test_orphan_rejected_parents_exceptions()
+ self.test_orphan_multiple_parents()
+ self.test_orphans_overlapping_parents()
+ self.test_orphan_of_orphan()
+ self.test_orphan_inherit_rejection()
+
+
+if __name__ == '__main__':
+ OrphanHandlingTest().main()
diff --git a/test/functional/p2p_permissions.py b/test/functional/p2p_permissions.py
index f84bbf67e6..6153e4a156 100755
--- a/test/functional/p2p_permissions.py
+++ b/test/functional/p2p_permissions.py
@@ -106,7 +106,7 @@ class P2PPermissionsTests(BitcoinTestFramework):
self.log.debug("Check that node[1] will send the tx to node[0] even though it is already in the mempool")
self.connect_nodes(1, 0)
- with self.nodes[1].assert_debug_log(["Force relaying tx {} from peer=0".format(txid)]):
+ with self.nodes[1].assert_debug_log(["Force relaying tx {} (wtxid={}) from peer=0".format(txid, tx.getwtxid())]):
p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1])
self.wait_until(lambda: txid in self.nodes[0].getrawmempool())
@@ -119,14 +119,14 @@ class P2PPermissionsTests(BitcoinTestFramework):
[tx],
self.nodes[1],
success=False,
- reject_reason='{} from peer=0 was not accepted: txn-mempool-conflict'.format(txid)
+ reject_reason='{} (wtxid={}) from peer=0 was not accepted: txn-mempool-conflict'.format(txid, tx.getwtxid())
)
p2p_rebroadcast_wallet.send_txs_and_test(
[tx],
self.nodes[1],
success=False,
- reject_reason='Not relaying non-mempool transaction {} from forcerelay peer=0'.format(txid)
+ reject_reason='Not relaying non-mempool transaction {} (wtxid={}) from forcerelay peer=0'.format(txid, tx.getwtxid())
)
def checkpermission(self, args, expectedPermissions):
diff --git a/test/functional/p2p_segwit.py b/test/functional/p2p_segwit.py
index bfae190c66..b398ef51e1 100755
--- a/test/functional/p2p_segwit.py
+++ b/test/functional/p2p_segwit.py
@@ -70,9 +70,9 @@ from test_framework.script import (
SIGHASH_ANYONECANPAY,
SIGHASH_NONE,
SIGHASH_SINGLE,
- SegwitV0SignatureHash,
- LegacySignatureHash,
hash160,
+ sign_input_legacy,
+ sign_input_segwitv0,
)
from test_framework.script_util import (
key_to_p2pk_script,
@@ -121,10 +121,8 @@ def subtest(func):
def sign_p2pk_witness_input(script, tx_to, in_idx, hashtype, value, key):
"""Add signature for a P2PK witness script."""
- tx_hash = SegwitV0SignatureHash(script, tx_to, in_idx, hashtype, value)
- signature = key.sign_ecdsa(tx_hash) + chr(hashtype).encode('latin-1')
- tx_to.wit.vtxinwit[in_idx].scriptWitness.stack = [signature, script]
- tx_to.rehash()
+ tx_to.wit.vtxinwit[in_idx].scriptWitness.stack = [script]
+ sign_input_segwitv0(tx_to, in_idx, script, value, key, hashtype)
def test_transaction_acceptance(node, p2p, tx, with_witness, accepted, reason=None):
"""Send a transaction to the node and check that it's accepted to the mempool
@@ -512,10 +510,10 @@ class SegWitTest(BitcoinTestFramework):
# without a witness is invalid).
# Note: The reject reason for this failure could be
# 'block-validation-failed' (if script check threads > 1) or
- # 'non-mandatory-script-verify-flag (Witness program was passed an
+ # 'mandatory-script-verify-flag-failed (Witness program was passed an
# empty witness)' (otherwise).
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=False,
- reason='non-mandatory-script-verify-flag (Witness program was passed an empty witness)')
+ reason='mandatory-script-verify-flag-failed (Witness program was passed an empty witness)')
self.utxo.pop(0)
self.utxo.append(UTXO(txid, 2, value))
@@ -708,7 +706,7 @@ class SegWitTest(BitcoinTestFramework):
# segwit activation. Note that older bitcoind's that are not
# segwit-aware would also reject this for failing CLEANSTACK.
with self.nodes[0].assert_debug_log(
- expected_msgs=(spend_tx.hash, 'was not accepted: non-mandatory-script-verify-flag (Witness program was passed an empty witness)')):
+ expected_msgs=(spend_tx.hash, 'was not accepted: mandatory-script-verify-flag-failed (Witness program was passed an empty witness)')):
test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False)
# Try to put the witness script in the scriptSig, should also fail.
@@ -999,7 +997,7 @@ class SegWitTest(BitcoinTestFramework):
# Extra witness data should not be allowed.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
- reason='non-mandatory-script-verify-flag (Witness provided for non-witness script)')
+ reason='mandatory-script-verify-flag-failed (Witness provided for non-witness script)')
# Try extra signature data. Ok if we're not spending a witness output.
block.vtx[1].wit.vtxinwit = []
@@ -1025,7 +1023,7 @@ class SegWitTest(BitcoinTestFramework):
# This has extra witness data, so it should fail.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
- reason='non-mandatory-script-verify-flag (Stack size must be exactly one after execution)')
+ reason='mandatory-script-verify-flag-failed (Stack size must be exactly one after execution)')
# Now get rid of the extra witness, but add extra scriptSig data
tx2.vin[0].scriptSig = CScript([OP_TRUE])
@@ -1038,7 +1036,7 @@ class SegWitTest(BitcoinTestFramework):
# This has extra signature data for a witness input, so it should fail.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
- reason='non-mandatory-script-verify-flag (Witness requires empty scriptSig)')
+ reason='mandatory-script-verify-flag-failed (Witness requires empty scriptSig)')
# Now get rid of the extra scriptsig on the witness input, and verify
# success (even with extra scriptsig data in the non-witness input)
@@ -1077,7 +1075,7 @@ class SegWitTest(BitcoinTestFramework):
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
- reason='non-mandatory-script-verify-flag (Push value size limit exceeded)')
+ reason='mandatory-script-verify-flag-failed (Push value size limit exceeded)')
# Now reduce the length of the stack element
tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a' * (MAX_SCRIPT_ELEMENT_SIZE)
@@ -1118,7 +1116,7 @@ class SegWitTest(BitcoinTestFramework):
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
- reason='non-mandatory-script-verify-flag (Script is too big)')
+ reason='mandatory-script-verify-flag-failed (Script is too big)')
# Try again with one less byte in the witness script
witness_script = CScript([b'a' * MAX_SCRIPT_ELEMENT_SIZE] * 19 + [OP_DROP] * 62 + [OP_TRUE])
@@ -1210,7 +1208,7 @@ class SegWitTest(BitcoinTestFramework):
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
- reason='non-mandatory-script-verify-flag (Operation not valid with the current stack size)')
+ reason='mandatory-script-verify-flag-failed (Operation not valid with the current stack size)')
# Fix the broken witness and the block should be accepted.
tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_script]
@@ -1476,11 +1474,9 @@ class SegWitTest(BitcoinTestFramework):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_wsh))
script = keyhash_to_p2pkh_script(pubkeyhash)
- sig_hash = SegwitV0SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
- signature = key.sign_ecdsa(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
tx2.wit.vtxinwit.append(CTxInWitness())
- tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey]
- tx2.rehash()
+ tx2.wit.vtxinwit[0].scriptWitness.stack = [pubkey]
+ sign_input_segwitv0(tx2, 0, script, tx.vout[0].nValue, key)
# Should fail policy test.
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
@@ -1529,10 +1525,8 @@ class SegWitTest(BitcoinTestFramework):
tx5 = CTransaction()
tx5.vin.append(CTxIn(COutPoint(tx4.sha256, 0), b""))
tx5.vout.append(CTxOut(tx4.vout[0].nValue - 1000, CScript([OP_TRUE])))
- (sig_hash, err) = LegacySignatureHash(script_pubkey, tx5, 0, SIGHASH_ALL)
- signature = key.sign_ecdsa(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
- tx5.vin[0].scriptSig = CScript([signature, pubkey])
- tx5.rehash()
+ tx5.vin[0].scriptSig = CScript([pubkey])
+ sign_input_legacy(tx5, 0, script_pubkey, key)
# Should pass policy and consensus.
test_transaction_acceptance(self.nodes[0], self.test_node, tx5, True, True)
block = self.build_next_block()
@@ -1574,7 +1568,7 @@ class SegWitTest(BitcoinTestFramework):
sign_p2pk_witness_input(witness_script, tx, 0, hashtype, prev_utxo.nValue + 1, key)
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
- reason='non-mandatory-script-verify-flag (Script evaluated without error '
+ reason='mandatory-script-verify-flag-failed (Script evaluated without error '
'but finished with a false/empty top stack element')
# Too-small input value
@@ -1582,7 +1576,7 @@ class SegWitTest(BitcoinTestFramework):
block.vtx.pop() # remove last tx
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
- reason='non-mandatory-script-verify-flag (Script evaluated without error '
+ reason='mandatory-script-verify-flag-failed (Script evaluated without error '
'but finished with a false/empty top stack element')
# Now try correct value
@@ -1678,15 +1672,17 @@ class SegWitTest(BitcoinTestFramework):
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
script = keyhash_to_p2pkh_script(pubkeyhash)
- sig_hash = SegwitV0SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue)
- signature = key.sign_ecdsa(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL
+ tx2.wit.vtxinwit.append(CTxInWitness())
+ sign_input_segwitv0(tx2, 0, script, tx.vout[0].nValue, key)
+ signature = tx2.wit.vtxinwit[0].scriptWitness.stack.pop()
# Check that we can't have a scriptSig
tx2.vin[0].scriptSig = CScript([signature, pubkey])
+ tx2.rehash()
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
- reason='non-mandatory-script-verify-flag (Witness requires empty scriptSig)')
+ reason='mandatory-script-verify-flag-failed (Witness requires empty scriptSig)')
# Move the signature to the witness.
block.vtx.pop()
diff --git a/test/functional/p2p_v2_transport.py b/test/functional/p2p_v2_transport.py
new file mode 100755
index 0000000000..dd564fed88
--- /dev/null
+++ b/test/functional/p2p_v2_transport.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python3
+# Copyright (c) 2021-present The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""
+Test v2 transport
+"""
+import socket
+
+from test_framework.messages import NODE_P2P_V2
+from test_framework.p2p import MAGIC_BYTES
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ p2p_port,
+)
+
+
+class V2TransportTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 5
+ self.extra_args = [["-v2transport=1"], ["-v2transport=1"], ["-v2transport=0"], ["-v2transport=0"], ["-v2transport=0"]]
+
+ def run_test(self):
+ sending_handshake = "start sending v2 handshake to peer"
+ downgrading_to_v1 = "retrying with v1 transport protocol for peer"
+ self.disconnect_nodes(0, 1)
+ self.disconnect_nodes(1, 2)
+ self.disconnect_nodes(2, 3)
+ self.disconnect_nodes(3, 4)
+
+ # verify local services
+ network_info = self.nodes[2].getnetworkinfo()
+ assert_equal(int(network_info["localservices"], 16) & NODE_P2P_V2, 0)
+ assert "P2P_V2" not in network_info["localservicesnames"]
+ network_info = self.nodes[1].getnetworkinfo()
+ assert_equal(int(network_info["localservices"], 16) & NODE_P2P_V2, NODE_P2P_V2)
+ assert "P2P_V2" in network_info["localservicesnames"]
+
+ # V2 nodes can sync with V2 nodes
+ assert_equal(self.nodes[0].getblockcount(), 0)
+ assert_equal(self.nodes[1].getblockcount(), 0)
+ with self.nodes[0].assert_debug_log(expected_msgs=[sending_handshake],
+ unexpected_msgs=[downgrading_to_v1]):
+ self.connect_nodes(0, 1, peer_advertises_v2=True)
+ self.generate(self.nodes[0], 5, sync_fun=lambda: self.sync_all(self.nodes[0:2]))
+ assert_equal(self.nodes[1].getblockcount(), 5)
+ # verify there is a v2 connection between node 0 and 1
+ node_0_info = self.nodes[0].getpeerinfo()
+ node_1_info = self.nodes[0].getpeerinfo()
+ assert_equal(len(node_0_info), 1)
+ assert_equal(len(node_1_info), 1)
+ assert_equal(node_0_info[0]["transport_protocol_type"], "v2")
+ assert_equal(node_1_info[0]["transport_protocol_type"], "v2")
+ assert_equal(len(node_0_info[0]["session_id"]), 64)
+ assert_equal(len(node_1_info[0]["session_id"]), 64)
+ assert_equal(node_0_info[0]["session_id"], node_1_info[0]["session_id"])
+
+ # V1 nodes can sync with each other
+ assert_equal(self.nodes[2].getblockcount(), 0)
+ assert_equal(self.nodes[3].getblockcount(), 0)
+ with self.nodes[2].assert_debug_log(expected_msgs=[],
+ unexpected_msgs=[sending_handshake, downgrading_to_v1]):
+ self.connect_nodes(2, 3, peer_advertises_v2=False)
+ self.generate(self.nodes[2], 8, sync_fun=lambda: self.sync_all(self.nodes[2:4]))
+ assert_equal(self.nodes[3].getblockcount(), 8)
+ assert self.nodes[0].getbestblockhash() != self.nodes[2].getbestblockhash()
+ # verify there is a v1 connection between node 2 and 3
+ node_2_info = self.nodes[2].getpeerinfo()
+ node_3_info = self.nodes[3].getpeerinfo()
+ assert_equal(len(node_2_info), 1)
+ assert_equal(len(node_3_info), 1)
+ assert_equal(node_2_info[0]["transport_protocol_type"], "v1")
+ assert_equal(node_3_info[0]["transport_protocol_type"], "v1")
+ assert_equal(len(node_2_info[0]["session_id"]), 0)
+ assert_equal(len(node_3_info[0]["session_id"]), 0)
+
+ # V1 nodes can sync with V2 nodes
+ self.disconnect_nodes(0, 1)
+ self.disconnect_nodes(2, 3)
+ with self.nodes[2].assert_debug_log(expected_msgs=[],
+ unexpected_msgs=[sending_handshake, downgrading_to_v1]):
+ self.connect_nodes(2, 1, peer_advertises_v2=False) # cannot enable v2 on v1 node
+ self.sync_all(self.nodes[1:3])
+ assert_equal(self.nodes[1].getblockcount(), 8)
+ assert self.nodes[0].getbestblockhash() != self.nodes[1].getbestblockhash()
+ # verify there is a v1 connection between node 1 and 2
+ node_1_info = self.nodes[1].getpeerinfo()
+ node_2_info = self.nodes[2].getpeerinfo()
+ assert_equal(len(node_1_info), 1)
+ assert_equal(len(node_2_info), 1)
+ assert_equal(node_1_info[0]["transport_protocol_type"], "v1")
+ assert_equal(node_2_info[0]["transport_protocol_type"], "v1")
+ assert_equal(len(node_1_info[0]["session_id"]), 0)
+ assert_equal(len(node_2_info[0]["session_id"]), 0)
+
+ # V2 nodes can sync with V1 nodes
+ self.disconnect_nodes(1, 2)
+ with self.nodes[0].assert_debug_log(expected_msgs=[],
+ unexpected_msgs=[sending_handshake, downgrading_to_v1]):
+ self.connect_nodes(0, 3, peer_advertises_v2=False)
+ self.sync_all([self.nodes[0], self.nodes[3]])
+ assert_equal(self.nodes[0].getblockcount(), 8)
+ # verify there is a v1 connection between node 0 and 3
+ node_0_info = self.nodes[0].getpeerinfo()
+ node_3_info = self.nodes[3].getpeerinfo()
+ assert_equal(len(node_0_info), 1)
+ assert_equal(len(node_3_info), 1)
+ assert_equal(node_0_info[0]["transport_protocol_type"], "v1")
+ assert_equal(node_3_info[0]["transport_protocol_type"], "v1")
+ assert_equal(len(node_0_info[0]["session_id"]), 0)
+ assert_equal(len(node_3_info[0]["session_id"]), 0)
+
+ # V2 node mines another block and everyone gets it
+ self.connect_nodes(0, 1, peer_advertises_v2=True)
+ self.connect_nodes(1, 2, peer_advertises_v2=False)
+ self.generate(self.nodes[1], 1, sync_fun=lambda: self.sync_all(self.nodes[0:4]))
+ assert_equal(self.nodes[0].getblockcount(), 9) # sync_all() verifies tip hashes match
+
+ # V1 node mines another block and everyone gets it
+ self.generate(self.nodes[3], 2, sync_fun=lambda: self.sync_all(self.nodes[0:4]))
+ assert_equal(self.nodes[2].getblockcount(), 11) # sync_all() verifies tip hashes match
+
+ assert_equal(self.nodes[4].getblockcount(), 0)
+ # Peer 4 is v1 p2p, but is falsely advertised as v2.
+ with self.nodes[1].assert_debug_log(expected_msgs=[sending_handshake, downgrading_to_v1]):
+ self.connect_nodes(1, 4, peer_advertises_v2=True)
+ self.sync_all()
+ assert_equal(self.nodes[4].getblockcount(), 11)
+
+ # Check v1 prefix detection
+ V1_PREFIX = MAGIC_BYTES["regtest"] + b"version\x00\x00\x00\x00\x00"
+ assert_equal(len(V1_PREFIX), 16)
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ num_peers = len(self.nodes[0].getpeerinfo())
+ s.connect(("127.0.0.1", p2p_port(0)))
+ self.wait_until(lambda: len(self.nodes[0].getpeerinfo()) == num_peers + 1)
+ s.sendall(V1_PREFIX[:-1])
+ assert_equal(self.nodes[0].getpeerinfo()[-1]["transport_protocol_type"], "detecting")
+ s.sendall(bytes([V1_PREFIX[-1]])) # send out last prefix byte
+ self.wait_until(lambda: self.nodes[0].getpeerinfo()[-1]["transport_protocol_type"] == "v1")
+
+ # Check wrong network prefix detection (hits if the next 12 bytes correspond to a v1 version message)
+ wrong_network_magic_prefix = MAGIC_BYTES["signet"] + V1_PREFIX[4:]
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.connect(("127.0.0.1", p2p_port(0)))
+ with self.nodes[0].assert_debug_log("V2 transport error: V1 peer with wrong MessageStart"):
+ s.sendall(wrong_network_magic_prefix + b"somepayload")
+
+
+if __name__ == '__main__':
+ V2TransportTest().main()
diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py
index 5f2bece733..18a0a0c6cc 100755
--- a/test/functional/rpc_blockchain.py
+++ b/test/functional/rpc_blockchain.py
@@ -577,8 +577,8 @@ class BlockchainTest(BitcoinTestFramework):
self.log.info("Test that getblock with verbosity 2 and 3 still works with pruned Undo data")
def move_block_file(old, new):
- old_path = self.nodes[0].chain_path / "blocks" / old
- new_path = self.nodes[0].chain_path / "blocks" / new
+ old_path = self.nodes[0].blocks_path / old
+ new_path = self.nodes[0].blocks_path / new
old_path.rename(new_path)
# Move instead of deleting so we can restore chain state afterwards
diff --git a/test/functional/rpc_decodescript.py b/test/functional/rpc_decodescript.py
index 673836bd04..f37e61ab50 100755
--- a/test/functional/rpc_decodescript.py
+++ b/test/functional/rpc_decodescript.py
@@ -271,7 +271,7 @@ class DecodeScriptTest(BitcoinTestFramework):
assert res["segwit"]["desc"] == "wsh(and_v(and_v(v:hash160(ffffffffffffffffffffffffffffffffffffffff),v:pk(0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0)),older(1)))#gm8xz4fl"
# Miniscript-incompatible offered HTLC
res = self.nodes[0].decodescript("82012088a914ffffffffffffffffffffffffffffffffffffffff882102ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffacb2")
- assert res["segwit"]["desc"] == "wsh(raw(82012088a914ffffffffffffffffffffffffffffffffffffffff882102ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffacb2))#ra6w2xa7"
+ assert res["segwit"]["desc"] == "addr(bcrt1q73qyfypp47hvgnkjqnav0j3k2lq3v76wg22dk8tmwuz5sfgv66xsvxg6uu)#9p3q328s"
# Miniscript-compatible multisig bigger than 520 byte P2SH limit.
res = self.nodes[0].decodescript("5b21020e0338c96a8870479f2396c373cc7696ba124e8635d41b0ea581112b678172612102675333a4e4b8fb51d9d4e22fa5a8eaced3fdac8a8cbf9be8c030f75712e6af992102896807d54bc55c24981f24a453c60ad3e8993d693732288068a23df3d9f50d4821029e51a5ef5db3137051de8323b001749932f2ff0d34c82e96a2c2461de96ae56c2102a4e1a9638d46923272c266631d94d36bdb03a64ee0e14c7518e49d2f29bc401021031c41fdbcebe17bec8d49816e00ca1b5ac34766b91c9f2ac37d39c63e5e008afb2103079e252e85abffd3c401a69b087e590a9b86f33f574f08129ccbd3521ecf516b2103111cf405b627e22135b3b3733a4a34aa5723fb0f58379a16d32861bf576b0ec2210318f331b3e5d38156da6633b31929c5b220349859cc9ca3d33fb4e68aa08401742103230dae6b4ac93480aeab26d000841298e3b8f6157028e47b0897c1e025165de121035abff4281ff00660f99ab27bb53e6b33689c2cd8dcd364bc3c90ca5aea0d71a62103bd45cddfacf2083b14310ae4a84e25de61e451637346325222747b157446614c2103cc297026b06c71cbfa52089149157b5ff23de027ac5ab781800a578192d175462103d3bde5d63bdb3a6379b461be64dad45eabff42f758543a9645afd42f6d4248282103ed1e8d5109c9ed66f7941bc53cc71137baa76d50d274bda8d5e8ffbd6e61fe9a5fae736402c00fb269522103aab896d53a8e7d6433137bbba940f9c521e085dd07e60994579b64a6d992cf79210291b7d0b1b692f8f524516ed950872e5da10fb1b808b5a526dedc6fed1cf29807210386aa9372fbab374593466bc5451dc59954e90787f08060964d95c87ef34ca5bb53ae68")
assert_equal(res["segwit"]["desc"], "wsh(or_d(multi(11,020e0338c96a8870479f2396c373cc7696ba124e8635d41b0ea581112b67817261,02675333a4e4b8fb51d9d4e22fa5a8eaced3fdac8a8cbf9be8c030f75712e6af99,02896807d54bc55c24981f24a453c60ad3e8993d693732288068a23df3d9f50d48,029e51a5ef5db3137051de8323b001749932f2ff0d34c82e96a2c2461de96ae56c,02a4e1a9638d46923272c266631d94d36bdb03a64ee0e14c7518e49d2f29bc4010,031c41fdbcebe17bec8d49816e00ca1b5ac34766b91c9f2ac37d39c63e5e008afb,03079e252e85abffd3c401a69b087e590a9b86f33f574f08129ccbd3521ecf516b,03111cf405b627e22135b3b3733a4a34aa5723fb0f58379a16d32861bf576b0ec2,0318f331b3e5d38156da6633b31929c5b220349859cc9ca3d33fb4e68aa0840174,03230dae6b4ac93480aeab26d000841298e3b8f6157028e47b0897c1e025165de1,035abff4281ff00660f99ab27bb53e6b33689c2cd8dcd364bc3c90ca5aea0d71a6,03bd45cddfacf2083b14310ae4a84e25de61e451637346325222747b157446614c,03cc297026b06c71cbfa52089149157b5ff23de027ac5ab781800a578192d17546,03d3bde5d63bdb3a6379b461be64dad45eabff42f758543a9645afd42f6d424828,03ed1e8d5109c9ed66f7941bc53cc71137baa76d50d274bda8d5e8ffbd6e61fe9a),and_v(v:older(4032),multi(2,03aab896d53a8e7d6433137bbba940f9c521e085dd07e60994579b64a6d992cf79,0291b7d0b1b692f8f524516ed950872e5da10fb1b808b5a526dedc6fed1cf29807,0386aa9372fbab374593466bc5451dc59954e90787f08060964d95c87ef34ca5bb))))#7jwwklk4")
diff --git a/test/functional/rpc_deriveaddresses.py b/test/functional/rpc_deriveaddresses.py
index e96b6bda90..64994d6bb3 100755
--- a/test/functional/rpc_deriveaddresses.py
+++ b/test/functional/rpc_deriveaddresses.py
@@ -42,7 +42,10 @@ class DeriveaddressesTest(BitcoinTestFramework):
assert_raises_rpc_error(-8, "Range should be greater or equal than 0", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"), [-1, 0])
combo_descriptor = descsum_create("combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)")
- assert_equal(self.nodes[0].deriveaddresses(combo_descriptor), ["mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", "mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", address, "2NDvEwGfpEqJWfybzpKPHF2XH3jwoQV3D7x"])
+ assert_equal(self.nodes[0].deriveaddresses(combo_descriptor), ["mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", address, "2NDvEwGfpEqJWfybzpKPHF2XH3jwoQV3D7x"])
+
+ # P2PK does not have a valid address
+ assert_raises_rpc_error(-5, "Descriptor does not have a corresponding address", self.nodes[0].deriveaddresses, descsum_create("pk(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK)"))
# Before #26275, bitcoind would crash when deriveaddresses was
# called with derivation index 2147483647, which is the maximum
diff --git a/test/functional/rpc_dumptxoutset.py b/test/functional/rpc_dumptxoutset.py
index 4260e95629..2cae602cc2 100755
--- a/test/functional/rpc_dumptxoutset.py
+++ b/test/functional/rpc_dumptxoutset.py
@@ -4,13 +4,15 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the generation of UTXO snapshots using `dumptxoutset`.
"""
+from pathlib import Path
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, assert_raises_rpc_error
-
-import hashlib
-from pathlib import Path
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+ sha256sum_file,
+)
class DumptxoutsetTest(BitcoinTestFramework):
@@ -39,11 +41,10 @@ class DumptxoutsetTest(BitcoinTestFramework):
out['base_hash'],
'09abf0e7b510f61ca6cf33bab104e9ee99b3528b371d27a2d4b39abb800fba7e')
- with open(str(expected_path), 'rb') as f:
- digest = hashlib.sha256(f.read()).hexdigest()
- # UTXO snapshot hash should be deterministic based on mocked time.
- assert_equal(
- digest, 'b1bacb602eacf5fbc9a7c2ef6eeb0d229c04e98bdf0c2ea5929012cd0eae3830')
+ # UTXO snapshot hash should be deterministic based on mocked time.
+ assert_equal(
+ sha256sum_file(str(expected_path)).hex(),
+ 'b1bacb602eacf5fbc9a7c2ef6eeb0d229c04e98bdf0c2ea5929012cd0eae3830')
assert_equal(
out['txoutset_hash'], '1f7e3befd45dc13ae198dfbb22869a9c5c4196f8e9ef9735831af1288033f890')
diff --git a/test/functional/rpc_estimatefee.py b/test/functional/rpc_estimatefee.py
index dad3cbcf0c..6643799a76 100755
--- a/test/functional/rpc_estimatefee.py
+++ b/test/functional/rpc_estimatefee.py
@@ -36,6 +36,9 @@ class EstimateFeeTest(BitcoinTestFramework):
assert_raises_rpc_error(-1, "estimatesmartfee", self.nodes[0].estimatesmartfee, 1, 'ECONOMICAL', 1)
assert_raises_rpc_error(-1, "estimaterawfee", self.nodes[0].estimaterawfee, 1, 1, 1)
+ # max value of 1008 per src/policy/fees.h
+ assert_raises_rpc_error(-8, "Invalid conf_target, must be between 1 and 1008", self.nodes[0].estimaterawfee, 1009)
+
# valid calls
self.nodes[0].estimatesmartfee(1)
# self.nodes[0].estimatesmartfee(1, None)
diff --git a/test/functional/rpc_misc.py b/test/functional/rpc_misc.py
index 43d1e2c731..20485c01d3 100755
--- a/test/functional/rpc_misc.py
+++ b/test/functional/rpc_misc.py
@@ -27,7 +27,7 @@ class RpcMiscTest(BitcoinTestFramework):
self.log.info("test CHECK_NONFATAL")
assert_raises_rpc_error(
-1,
- 'Internal bug detected: "request.params[9].get_str() != "trigger_internal_bug""',
+ 'Internal bug detected: request.params[9].get_str() != "trigger_internal_bug"',
lambda: node.echo(arg9='trigger_internal_bug'),
)
diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py
index 5fdd5daddf..2c7f974d0b 100755
--- a/test/functional/rpc_net.py
+++ b/test/functional/rpc_net.py
@@ -12,6 +12,7 @@ from itertools import product
import time
import test_framework.messages
+from test_framework.netutil import ADDRMAN_NEW_BUCKET_COUNT, ADDRMAN_TRIED_BUCKET_COUNT, ADDRMAN_BUCKET_SIZE
from test_framework.p2p import (
P2PInterface,
P2P_SERVICES,
@@ -61,10 +62,13 @@ class NetTest(BitcoinTestFramework):
self.test_getpeerinfo()
self.test_getnettotals()
self.test_getnetworkinfo()
- self.test_getaddednodeinfo()
+ self.test_addnode_getaddednodeinfo()
self.test_service_flags()
self.test_getnodeaddresses()
self.test_addpeeraddress()
+ self.test_sendmsgtopeer()
+ self.test_getaddrmaninfo()
+ self.test_getrawaddrman()
def test_connection_count(self):
self.log.info("Test getconnectioncount")
@@ -140,11 +144,13 @@ class NetTest(BitcoinTestFramework):
"relaytxes": False,
"services": "0000000000000000",
"servicesnames": [],
+ "session_id": "",
"startingheight": -1,
"subver": "",
"synced_blocks": -1,
"synced_headers": -1,
"timeoffset": 0,
+ "transport_protocol_type": "v1",
"version": 0,
},
)
@@ -203,8 +209,8 @@ class NetTest(BitcoinTestFramework):
# Check dynamically generated networks list in getnetworkinfo help output.
assert "(ipv4, ipv6, onion, i2p, cjdns)" in self.nodes[0].help("getnetworkinfo")
- def test_getaddednodeinfo(self):
- self.log.info("Test getaddednodeinfo")
+ def test_addnode_getaddednodeinfo(self):
+ self.log.info("Test addnode and getaddednodeinfo")
assert_equal(self.nodes[0].getaddednodeinfo(), [])
# add a node (node2) to node0
ip_port = "127.0.0.1:{}".format(p2p_port(2))
@@ -218,6 +224,8 @@ class NetTest(BitcoinTestFramework):
# check that node can be removed
self.nodes[0].addnode(node=ip_port, command='remove')
assert_equal(self.nodes[0].getaddednodeinfo(), [])
+ # check that an invalid command returns an error
+ assert_raises_rpc_error(-1, 'addnode "node" "command"', self.nodes[0].addnode, node=ip_port, command='abc')
# check that trying to remove the node again returns an error
assert_raises_rpc_error(-24, "Node could not be removed", self.nodes[0].addnode, node=ip_port, command='remove')
# check that a non-existent node returns an error
@@ -328,6 +336,169 @@ class NetTest(BitcoinTestFramework):
addrs = node.getnodeaddresses(count=0) # getnodeaddresses re-runs the addrman checks
assert_equal(len(addrs), 2)
+ def test_sendmsgtopeer(self):
+ node = self.nodes[0]
+
+ self.restart_node(0)
+ self.connect_nodes(0, 1)
+
+ self.log.info("Test sendmsgtopeer")
+ self.log.debug("Send a valid message")
+ with self.nodes[1].assert_debug_log(expected_msgs=["received: addr"]):
+ node.sendmsgtopeer(peer_id=0, msg_type="addr", msg="FFFFFF")
+
+ self.log.debug("Test error for sending to non-existing peer")
+ assert_raises_rpc_error(-1, "Error: Could not send message to peer", node.sendmsgtopeer, peer_id=100, msg_type="addr", msg="FF")
+
+ self.log.debug("Test that zero-length msg_type is allowed")
+ node.sendmsgtopeer(peer_id=0, msg_type="addr", msg="")
+
+ self.log.debug("Test error for msg_type that is too long")
+ assert_raises_rpc_error(-8, "Error: msg_type too long, max length is 12", node.sendmsgtopeer, peer_id=0, msg_type="long_msg_type", msg="FF")
+
+ self.log.debug("Test that unknown msg_type is allowed")
+ node.sendmsgtopeer(peer_id=0, msg_type="unknown", msg="FF")
+
+ self.log.debug("Test that empty msg is allowed")
+ node.sendmsgtopeer(peer_id=0, msg_type="addr", msg="FF")
+
+ self.log.debug("Test that oversized messages are allowed, but get us disconnected")
+ zero_byte_string = b'\x00' * 4000001
+ node.sendmsgtopeer(peer_id=0, msg_type="addr", msg=zero_byte_string.hex())
+ self.wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0, timeout=10)
+
+ def test_getaddrmaninfo(self):
+ self.log.info("Test getaddrmaninfo")
+ node = self.nodes[1]
+
+ self.log.debug("Test that getaddrmaninfo is a hidden RPC")
+ # It is hidden from general help, but its detailed help may be called directly.
+ assert "getaddrmaninfo" not in node.help()
+ assert "getaddrmaninfo" in node.help("getaddrmaninfo")
+
+ # current count of ipv4 addresses in addrman is {'new':1, 'tried':1}
+ self.log.info("Test that count of addresses in addrman match expected values")
+ res = node.getaddrmaninfo()
+ assert_equal(res["ipv4"]["new"], 1)
+ assert_equal(res["ipv4"]["tried"], 1)
+ assert_equal(res["ipv4"]["total"], 2)
+ assert_equal(res["all_networks"]["new"], 1)
+ assert_equal(res["all_networks"]["tried"], 1)
+ assert_equal(res["all_networks"]["total"], 2)
+ for net in ["ipv6", "onion", "i2p", "cjdns"]:
+ assert_equal(res[net]["new"], 0)
+ assert_equal(res[net]["tried"], 0)
+ assert_equal(res[net]["total"], 0)
+
+ def test_getrawaddrman(self):
+ self.log.info("Test getrawaddrman")
+ node = self.nodes[1]
+
+ self.log.debug("Test that getrawaddrman is a hidden RPC")
+ # It is hidden from general help, but its detailed help may be called directly.
+ assert "getrawaddrman" not in node.help()
+ assert "getrawaddrman" in node.help("getrawaddrman")
+
+ def check_addr_information(result, expected):
+ """Utility to compare a getrawaddrman result entry with an expected entry"""
+ assert_equal(result["address"], expected["address"])
+ assert_equal(result["port"], expected["port"])
+ assert_equal(result["services"], expected["services"])
+ assert_equal(result["network"], expected["network"])
+ assert_equal(result["source"], expected["source"])
+ assert_equal(result["source_network"], expected["source_network"])
+ # To avoid failing on slow test runners, use a 10s vspan here.
+ assert_approx(result["time"], time.time(), vspan=10)
+
+ def check_getrawaddrman_entries(expected):
+ """Utility to compare a getrawaddrman result with expected addrman contents"""
+ getrawaddrman = node.getrawaddrman()
+ getaddrmaninfo = node.getaddrmaninfo()
+ for (table_name, table_info) in expected.items():
+ assert_equal(len(getrawaddrman[table_name]), len(table_info["entries"]))
+ assert_equal(len(getrawaddrman[table_name]), getaddrmaninfo["all_networks"][table_name])
+
+ for bucket_position in getrawaddrman[table_name].keys():
+ bucket = int(bucket_position.split("/")[0])
+ position = int(bucket_position.split("/")[1])
+
+ # bucket and position only be sanity checked here as the
+ # test-addrman isn't deterministic
+ assert 0 <= int(bucket) < table_info["bucket_count"]
+ assert 0 <= int(position) < ADDRMAN_BUCKET_SIZE
+
+ entry = getrawaddrman[table_name][bucket_position]
+ expected_entry = list(filter(lambda e: e["address"] == entry["address"], table_info["entries"]))[0]
+ check_addr_information(entry, expected_entry)
+
+ # we expect one addrman new and tried table entry, which were added in a previous test
+ expected = {
+ "new": {
+ "bucket_count": ADDRMAN_NEW_BUCKET_COUNT,
+ "entries": [
+ {
+ "address": "2.0.0.0",
+ "port": 8333,
+ "services": 9,
+ "network": "ipv4",
+ "source": "2.0.0.0",
+ "source_network": "ipv4",
+ }
+ ]
+ },
+ "tried": {
+ "bucket_count": ADDRMAN_TRIED_BUCKET_COUNT,
+ "entries": [
+ {
+ "address": "1.2.3.4",
+ "port": 8333,
+ "services": 9,
+ "network": "ipv4",
+ "source": "1.2.3.4",
+ "source_network": "ipv4",
+ }
+ ]
+ }
+ }
+
+ self.log.debug("Test that the getrawaddrman contains information about the addresses added in a previous test")
+ check_getrawaddrman_entries(expected)
+
+ self.log.debug("Add one new address to each addrman table")
+ expected["new"]["entries"].append({
+ "address": "2803:0:1234:abcd::1",
+ "services": 9,
+ "network": "ipv6",
+ "source": "2803:0:1234:abcd::1",
+ "source_network": "ipv6",
+ "port": -1, # set once addpeeraddress is successful
+ })
+ expected["tried"]["entries"].append({
+ "address": "nrfj6inpyf73gpkyool35hcmne5zwfmse3jl3aw23vk7chdemalyaqad.onion",
+ "services": 9,
+ "network": "onion",
+ "source": "nrfj6inpyf73gpkyool35hcmne5zwfmse3jl3aw23vk7chdemalyaqad.onion",
+ "source_network": "onion",
+ "port": -1, # set once addpeeraddress is successful
+ })
+
+ port = 0
+ for (table_name, table_info) in expected.items():
+ # There's a slight chance that the to-be-added address collides with an already
+ # present table entry. To avoid this, we increment the port until an address has been
+ # added. Incrementing the port changes the position in the new table bucket (bucket
+ # stays the same) and changes both the bucket and the position in the tried table.
+ while True:
+ if node.addpeeraddress(address=table_info["entries"][1]["address"], port=port, tried=table_name == "tried")["success"]:
+ table_info["entries"][1]["port"] = port
+ self.log.debug(f"Added {table_info['entries'][1]['address']} to {table_name} table")
+ break
+ else:
+ port += 1
+
+ self.log.debug("Test that the newly added addresses appear in getrawaddrman")
+ check_getrawaddrman_entries(expected)
+
if __name__ == '__main__':
NetTest().main()
diff --git a/test/functional/rpc_packages.py b/test/functional/rpc_packages.py
index ae1a498e28..9c4960aa1e 100755
--- a/test/functional/rpc_packages.py
+++ b/test/functional/rpc_packages.py
@@ -212,8 +212,8 @@ class RPCPackagesTest(BitcoinTestFramework):
coin = self.wallet.get_utxo()
# tx1 and tx2 share the same inputs
- tx1 = self.wallet.create_self_transfer(utxo_to_spend=coin)
- tx2 = self.wallet.create_self_transfer(utxo_to_spend=coin)
+ tx1 = self.wallet.create_self_transfer(utxo_to_spend=coin, fee_rate=DEFAULT_FEE)
+ tx2 = self.wallet.create_self_transfer(utxo_to_spend=coin, fee_rate=2*DEFAULT_FEE)
# Ensure tx1 and tx2 are valid by themselves
assert node.testmempoolaccept([tx1["hex"]])[0]["allowed"]
@@ -222,8 +222,8 @@ class RPCPackagesTest(BitcoinTestFramework):
self.log.info("Test duplicate transactions in the same package")
testres = node.testmempoolaccept([tx1["hex"], tx1["hex"]])
assert_equal(testres, [
- {"txid": tx1["txid"], "wtxid": tx1["wtxid"], "package-error": "conflict-in-package"},
- {"txid": tx1["txid"], "wtxid": tx1["wtxid"], "package-error": "conflict-in-package"}
+ {"txid": tx1["txid"], "wtxid": tx1["wtxid"], "package-error": "package-contains-duplicates"},
+ {"txid": tx1["txid"], "wtxid": tx1["wtxid"], "package-error": "package-contains-duplicates"}
])
self.log.info("Test conflicting transactions in the same package")
diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py
index c4ed4da0f2..60df48f025 100755
--- a/test/functional/rpc_psbt.py
+++ b/test/functional/rpc_psbt.py
@@ -106,11 +106,11 @@ class PSBTTest(BitcoinTestFramework):
assert "non_witness_utxo" in mining_node.decodepsbt(psbt_new.to_base64())["inputs"][0]
# Have the offline node sign the PSBT (which will remove the non-witness UTXO)
- signed_psbt = offline_node.walletprocesspsbt(psbt_new.to_base64())["psbt"]
- assert not "non_witness_utxo" in mining_node.decodepsbt(signed_psbt)["inputs"][0]
+ signed_psbt = offline_node.walletprocesspsbt(psbt_new.to_base64())
+ assert not "non_witness_utxo" in mining_node.decodepsbt(signed_psbt["psbt"])["inputs"][0]
# Make sure we can mine the resulting transaction
- txid = mining_node.sendrawtransaction(mining_node.finalizepsbt(signed_psbt)["hex"])
+ txid = mining_node.sendrawtransaction(signed_psbt["hex"])
self.generate(mining_node, nblocks=1, sync_fun=lambda: self.sync_all([online_node, mining_node]))
assert_equal(online_node.gettxout(txid,0)["confirmations"], 1)
@@ -142,9 +142,8 @@ class PSBTTest(BitcoinTestFramework):
utxo1 = tx1_inputs[0]
assert_equal(unconfirmed_txid, utxo1['txid'])
- signed_tx1 = wallet.walletprocesspsbt(psbtx1)['psbt']
- final_tx1 = wallet.finalizepsbt(signed_tx1)['hex']
- txid1 = self.nodes[0].sendrawtransaction(final_tx1)
+ signed_tx1 = wallet.walletprocesspsbt(psbtx1)
+ txid1 = self.nodes[0].sendrawtransaction(signed_tx1['hex'])
mempool = self.nodes[0].getrawmempool()
assert txid1 in mempool
@@ -157,9 +156,8 @@ class PSBTTest(BitcoinTestFramework):
self.log.info("Fail to broadcast a new PSBT with maxconf 0 due to BIP125 rules to verify it actually chose unconfirmed outputs")
psbt_invalid = wallet.walletcreatefundedpsbt([{'txid': utxo1['txid'], 'vout': utxo1['vout']}], {target_address: 1}, 0, {'add_inputs': True, 'maxconf': 0, 'fee_rate': 10})['psbt']
- signed_invalid = wallet.walletprocesspsbt(psbt_invalid)['psbt']
- final_invalid = wallet.finalizepsbt(signed_invalid)['hex']
- assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, final_invalid)
+ signed_invalid = wallet.walletprocesspsbt(psbt_invalid)
+ assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, signed_invalid['hex'])
self.log.info("Craft a replacement adding inputs with highest confs possible")
psbtx2 = wallet.walletcreatefundedpsbt([{'txid': utxo1['txid'], 'vout': utxo1['vout']}], {target_address: 1}, 0, {'add_inputs': True, 'minconf': 2, 'fee_rate': 10})['psbt']
@@ -169,9 +167,8 @@ class PSBTTest(BitcoinTestFramework):
if vin['txid'] != unconfirmed_txid:
assert_greater_than_or_equal(self.nodes[0].gettxout(vin['txid'], vin['vout'])['confirmations'], 2)
- signed_tx2 = wallet.walletprocesspsbt(psbtx2)['psbt']
- final_tx2 = wallet.finalizepsbt(signed_tx2)['hex']
- txid2 = self.nodes[0].sendrawtransaction(final_tx2)
+ signed_tx2 = wallet.walletprocesspsbt(psbtx2)
+ txid2 = self.nodes[0].sendrawtransaction(signed_tx2['hex'])
mempool = self.nodes[0].getrawmempool()
assert txid1 not in mempool
@@ -217,12 +214,21 @@ class PSBTTest(BitcoinTestFramework):
self.nodes[0].walletpassphrase(passphrase="password", timeout=1000000)
- # Sign the transaction and send
- signed_tx = self.nodes[0].walletprocesspsbt(psbt=psbtx, finalize=False)['psbt']
- finalized_tx = self.nodes[0].walletprocesspsbt(psbt=psbtx, finalize=True)['psbt']
- assert signed_tx != finalized_tx
- final_tx = self.nodes[0].finalizepsbt(signed_tx)['hex']
- self.nodes[0].sendrawtransaction(final_tx)
+ # Sign the transaction but don't finalize
+ processed_psbt = self.nodes[0].walletprocesspsbt(psbt=psbtx, finalize=False)
+ assert "hex" not in processed_psbt
+ signed_psbt = processed_psbt['psbt']
+
+ # Finalize and send
+ finalized_hex = self.nodes[0].finalizepsbt(signed_psbt)['hex']
+ self.nodes[0].sendrawtransaction(finalized_hex)
+
+ # Alternative method: sign AND finalize in one command
+ processed_finalized_psbt = self.nodes[0].walletprocesspsbt(psbt=psbtx, finalize=True)
+ finalized_psbt = processed_finalized_psbt['psbt']
+ finalized_psbt_hex = processed_finalized_psbt['hex']
+ assert signed_psbt != finalized_psbt
+ assert finalized_psbt_hex == finalized_hex
# Manually selected inputs can be locked:
assert_equal(len(self.nodes[0].listlockunspent()), 0)
@@ -296,7 +302,7 @@ class PSBTTest(BitcoinTestFramework):
# Check decodepsbt fee calculation (input values shall only be counted once per UTXO)
assert_equal(decoded['fee'], created_psbt['fee'])
assert_equal(walletprocesspsbt_out['complete'], True)
- self.nodes[1].sendrawtransaction(self.nodes[1].finalizepsbt(walletprocesspsbt_out['psbt'])['hex'])
+ self.nodes[1].sendrawtransaction(walletprocesspsbt_out['hex'])
self.log.info("Test walletcreatefundedpsbt fee rate of 10000 sat/vB and 0.1 BTC/kvB produces a total fee at or slightly below -maxtxfee (~0.05290000)")
res1 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {"fee_rate": 10000, "add_inputs": True})
@@ -327,7 +333,7 @@ class PSBTTest(BitcoinTestFramework):
assert_raises_rpc_error(-3, "Invalid amount",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {param: invalid_value, "add_inputs": True})
# Test fee_rate values that cannot be represented in sat/vB.
- for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]:
+ for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]:
assert_raises_rpc_error(-3, "Invalid amount",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"fee_rate": invalid_value, "add_inputs": True})
@@ -387,7 +393,7 @@ class PSBTTest(BitcoinTestFramework):
# partially sign with node 2. This should be complete and sendable
walletprocesspsbt_out = self.nodes[2].walletprocesspsbt(psbtx)
assert_equal(walletprocesspsbt_out['complete'], True)
- self.nodes[2].sendrawtransaction(self.nodes[2].finalizepsbt(walletprocesspsbt_out['psbt'])['hex'])
+ self.nodes[2].sendrawtransaction(walletprocesspsbt_out['hex'])
# check that walletprocesspsbt fails to decode a non-psbt
rawtx = self.nodes[1].createrawtransaction([{"txid":txid,"vout":p2wpkh_pos}], {self.nodes[1].getnewaddress():9.99})
@@ -739,14 +745,13 @@ class PSBTTest(BitcoinTestFramework):
assert not signed['complete']
signed = self.nodes[0].walletprocesspsbt(signed['psbt'])
assert signed['complete']
- self.nodes[0].finalizepsbt(signed['psbt'])
psbt = wallet.walletcreatefundedpsbt([ext_utxo], {self.nodes[0].getnewaddress(): 15}, 0, {"add_inputs": True, "solving_data":{"descriptors": [desc]}})
signed = wallet.walletprocesspsbt(psbt['psbt'])
assert not signed['complete']
signed = self.nodes[0].walletprocesspsbt(signed['psbt'])
assert signed['complete']
- final = self.nodes[0].finalizepsbt(signed['psbt'], False)
+ final = signed['hex']
dec = self.nodes[0].decodepsbt(signed["psbt"])
for i, txin in enumerate(dec["tx"]["vin"]):
@@ -781,8 +786,8 @@ class PSBTTest(BitcoinTestFramework):
)
signed = wallet.walletprocesspsbt(psbt["psbt"])
signed = self.nodes[0].walletprocesspsbt(signed["psbt"])
- final = self.nodes[0].finalizepsbt(signed["psbt"])
- assert self.nodes[0].testmempoolaccept([final["hex"]])[0]["allowed"]
+ final = signed["hex"]
+ assert self.nodes[0].testmempoolaccept([final])[0]["allowed"]
# Reducing the weight should have a lower fee
psbt2 = wallet.walletcreatefundedpsbt(
inputs=[{"txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": low_input_weight}],
@@ -837,8 +842,8 @@ class PSBTTest(BitcoinTestFramework):
self.nodes[0].importprivkey(privkey)
psbt = watchonly.sendall([wallet.getnewaddress()])["psbt"]
- psbt = self.nodes[0].walletprocesspsbt(psbt)["psbt"]
- self.nodes[0].sendrawtransaction(self.nodes[0].finalizepsbt(psbt)["hex"])
+ signed_tx = self.nodes[0].walletprocesspsbt(psbt)
+ self.nodes[0].sendrawtransaction(signed_tx["hex"])
# Same test but for taproot
if self.options.descriptors:
@@ -853,8 +858,8 @@ class PSBTTest(BitcoinTestFramework):
self.nodes[0].importdescriptors([{"desc": descsum_create("tr({})".format(privkey)), "timestamp":"now"}])
psbt = watchonly.sendall([wallet.getnewaddress(), addr])["psbt"]
- psbt = self.nodes[0].walletprocesspsbt(psbt)["psbt"]
- txid = self.nodes[0].sendrawtransaction(self.nodes[0].finalizepsbt(psbt)["hex"])
+ processed_psbt = self.nodes[0].walletprocesspsbt(psbt)
+ txid = self.nodes[0].sendrawtransaction(processed_psbt["hex"])
vout = find_vout_for_address(self.nodes[0], txid, addr)
# Make sure tap tree is in psbt
@@ -871,7 +876,7 @@ class PSBTTest(BitcoinTestFramework):
vout = find_vout_for_address(self.nodes[0], txid, addr)
psbt = self.nodes[0].createpsbt([{"txid": txid, "vout": vout}], [{self.nodes[0].getnewaddress(): 0.9999}])
signed = self.nodes[0].walletprocesspsbt(psbt)
- rawtx = self.nodes[0].finalizepsbt(signed["psbt"])["hex"]
+ rawtx = signed["hex"]
self.nodes[0].sendrawtransaction(rawtx)
self.generate(self.nodes[0], 1)
@@ -883,6 +888,9 @@ class PSBTTest(BitcoinTestFramework):
comb_psbt = self.nodes[0].combinepsbt([psbt, parsed_psbt.to_base64()])
assert_equal(comb_psbt, psbt)
+ self.log.info("Test walletprocesspsbt raises if an invalid sighashtype is passed")
+ assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[0].walletprocesspsbt, psbt, sighashtype="all")
+
self.log.info("Test decoding PSBT with per-input preimage types")
# note that the decodepsbt RPC doesn't check whether preimages and hashes match
hash_ripemd160, preimage_ripemd160 = random_bytes(20), random_bytes(50)
@@ -970,17 +978,26 @@ class PSBTTest(BitcoinTestFramework):
test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo', 'non_witness_utxo'])
# Test that the psbt is not finalized and does not have bip32_derivs unless specified
- psbt = self.nodes[2].descriptorprocesspsbt(psbt=psbt, descriptors=[descriptor], sighashtype="ALL", bip32derivs=True, finalize=False)["psbt"]
- decoded = self.nodes[2].decodepsbt(psbt)
+ processed_psbt = self.nodes[2].descriptorprocesspsbt(psbt=psbt, descriptors=[descriptor], sighashtype="ALL", bip32derivs=True, finalize=False)
+ decoded = self.nodes[2].decodepsbt(processed_psbt['psbt'])
test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo', 'non_witness_utxo', 'partial_signatures', 'bip32_derivs'])
- psbt = self.nodes[2].descriptorprocesspsbt(psbt=psbt, descriptors=[descriptor], sighashtype="ALL", bip32derivs=False, finalize=True)["psbt"]
- decoded = self.nodes[2].decodepsbt(psbt)
+ # If psbt not finalized, test that result does not have hex
+ assert "hex" not in processed_psbt
+
+ processed_psbt = self.nodes[2].descriptorprocesspsbt(psbt=psbt, descriptors=[descriptor], sighashtype="ALL", bip32derivs=False, finalize=True)
+ decoded = self.nodes[2].decodepsbt(processed_psbt['psbt'])
test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo', 'non_witness_utxo', 'final_scriptwitness'])
+ # Test psbt is complete
+ assert_equal(processed_psbt['complete'], True)
+
# Broadcast transaction
- rawtx = self.nodes[2].finalizepsbt(psbt)["hex"]
- self.nodes[2].sendrawtransaction(rawtx)
+ self.nodes[2].sendrawtransaction(processed_psbt['hex'])
+
+ self.log.info("Test descriptorprocesspsbt raises if an invalid sighashtype is passed")
+ assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[2].descriptorprocesspsbt, psbt, [descriptor], sighashtype="all")
+
if __name__ == '__main__':
PSBTTest().main()
diff --git a/test/functional/rpc_signer.py b/test/functional/rpc_signer.py
index 5ba0d35835..488682e959 100755
--- a/test/functional/rpc_signer.py
+++ b/test/functional/rpc_signer.py
@@ -21,7 +21,7 @@ class RPCSignerTest(BitcoinTestFramework):
def mock_signer_path(self):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mocks', 'signer.py')
if platform.system() == "Windows":
- return "py " + path
+ return "py -3 " + path
else:
return path
diff --git a/test/functional/rpc_signrawtransactionwithkey.py b/test/functional/rpc_signrawtransactionwithkey.py
index ac7a86704f..0913f5057e 100755
--- a/test/functional/rpc_signrawtransactionwithkey.py
+++ b/test/functional/rpc_signrawtransactionwithkey.py
@@ -14,6 +14,7 @@ from test_framework.address import (
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
+ assert_raises_rpc_error,
find_vout_for_address,
)
from test_framework.script_util import (
@@ -33,6 +34,14 @@ from decimal import (
Decimal,
)
+INPUTS = [
+ # Valid pay-to-pubkey scripts
+ {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
+ 'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
+ {'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0,
+ 'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'},
+]
+OUTPUTS = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
class SignRawTransactionWithKeyTest(BitcoinTestFramework):
def set_test_params(self):
@@ -47,6 +56,11 @@ class SignRawTransactionWithKeyTest(BitcoinTestFramework):
txid = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransactionwithkey(rawtx, [self.nodes[0].get_deterministic_priv_key().key])["hex"], 0)
return txid
+ def assert_signing_completed_successfully(self, signed_tx):
+ assert 'errors' not in signed_tx
+ assert 'complete' in signed_tx
+ assert_equal(signed_tx['complete'], True)
+
def successful_signing_test(self):
"""Create and sign a valid raw transaction with one input.
@@ -56,25 +70,10 @@ class SignRawTransactionWithKeyTest(BitcoinTestFramework):
2) No script verification error occurred"""
self.log.info("Test valid raw transaction with one input")
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N', 'cVKpPfVKSJxKqVpE9awvXNWuLHCa5j5tiE7K6zbUSptFpTEtiFrA']
+ rawTx = self.nodes[0].createrawtransaction(INPUTS, OUTPUTS)
+ rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, INPUTS)
- inputs = [
- # Valid pay-to-pubkey scripts
- {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
- 'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
- {'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0,
- 'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'},
- ]
-
- outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
-
- rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
- rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, inputs)
-
- # 1) The transaction has a complete set of signatures
- assert rawTxSigned['complete']
-
- # 2) No script verification error occurred
- assert 'errors' not in rawTxSigned
+ self.assert_signing_completed_successfully(rawTxSigned)
def witness_script_test(self):
self.log.info("Test signing transaction to P2SH-P2WSH addresses without wallet")
@@ -95,9 +94,7 @@ class SignRawTransactionWithKeyTest(BitcoinTestFramework):
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([unspent_output], {getnewdestination()[2]: Decimal("49.998")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [unspent_output])
- # Check the signing completed successfully
- assert 'complete' in spending_tx_signed
- assert_equal(spending_tx_signed['complete'], True)
+ self.assert_signing_completed_successfully(spending_tx_signed)
# Now test with P2PKH and P2PK scripts as the witnessScript
for tx_type in ['P2PKH', 'P2PK']: # these tests are order-independent
@@ -120,14 +117,19 @@ class SignRawTransactionWithKeyTest(BitcoinTestFramework):
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([{'txid': txid, 'vout': vout}], {getnewdestination()[2]: Decimal("9.999")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [{'txid': txid, 'vout': vout, 'scriptPubKey': script_pub_key, 'redeemScript': redeem_script, 'witnessScript': witness_script, 'amount': 10}])
- # Check the signing completed successfully
- assert 'complete' in spending_tx_signed
- assert_equal(spending_tx_signed['complete'], True)
+ self.assert_signing_completed_successfully(spending_tx_signed)
self.nodes[0].sendrawtransaction(spending_tx_signed['hex'])
+ def invalid_sighashtype_test(self):
+ self.log.info("Test signing transaction with invalid sighashtype")
+ tx = self.nodes[0].createrawtransaction(INPUTS, OUTPUTS)
+ privkeys = [self.nodes[0].get_deterministic_priv_key().key]
+ assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[0].signrawtransactionwithkey, tx, privkeys, sighashtype="all")
+
def run_test(self):
self.successful_signing_test()
self.witness_script_test()
+ self.invalid_sighashtype_test()
if __name__ == '__main__':
diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py
index c250fc6fe8..6c1892539f 100644
--- a/test/functional/test_framework/key.py
+++ b/test/functional/test_framework/key.py
@@ -290,24 +290,33 @@ def sign_schnorr(key, msg, aux=None, flip_p=False, flip_r=False):
class TestFrameworkKey(unittest.TestCase):
- def test_schnorr(self):
- """Test the Python Schnorr implementation."""
+ def test_ecdsa_and_schnorr(self):
+ """Test the Python ECDSA and Schnorr implementations."""
+ def random_bitflip(sig):
+ sig = list(sig)
+ sig[random.randrange(len(sig))] ^= (1 << (random.randrange(8)))
+ return bytes(sig)
+
byte_arrays = [generate_privkey() for _ in range(3)] + [v.to_bytes(32, 'big') for v in [0, ORDER - 1, ORDER, 2**256 - 1]]
keys = {}
- for privkey in byte_arrays: # build array of key/pubkey pairs
- pubkey, _ = compute_xonly_pubkey(privkey)
- if pubkey is not None:
- keys[privkey] = pubkey
+ for privkey_bytes in byte_arrays: # build array of key/pubkey pairs
+ privkey = ECKey()
+ privkey.set(privkey_bytes, compressed=True)
+ if privkey.is_valid:
+ keys[privkey] = privkey.get_pubkey()
for msg in byte_arrays: # test every combination of message, signing key, verification key
for sign_privkey, _ in keys.items():
- sig = sign_schnorr(sign_privkey, msg)
+ sig_ecdsa = sign_privkey.sign_ecdsa(msg)
+ sig_schnorr = sign_schnorr(sign_privkey.get_bytes(), msg)
for verify_privkey, verify_pubkey in keys.items():
+ verify_xonly_pubkey = verify_pubkey.get_bytes()[1:]
if verify_privkey == sign_privkey:
- self.assertTrue(verify_schnorr(verify_pubkey, sig, msg))
- sig = list(sig)
- sig[random.randrange(64)] ^= (1 << (random.randrange(8))) # damaging signature should break things
- sig = bytes(sig)
- self.assertFalse(verify_schnorr(verify_pubkey, sig, msg))
+ self.assertTrue(verify_pubkey.verify_ecdsa(sig_ecdsa, msg))
+ self.assertTrue(verify_schnorr(verify_xonly_pubkey, sig_schnorr, msg))
+ sig_ecdsa = random_bitflip(sig_ecdsa) # damaging signature should break things
+ sig_schnorr = random_bitflip(sig_schnorr)
+ self.assertFalse(verify_pubkey.verify_ecdsa(sig_ecdsa, msg))
+ self.assertFalse(verify_schnorr(verify_xonly_pubkey, sig_schnorr, msg))
def test_schnorr_testvectors(self):
"""Implement the BIP340 test vectors (read from bip340_test_vectors.csv)."""
diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py
index a6764365c5..8f3aea8785 100755
--- a/test/functional/test_framework/messages.py
+++ b/test/functional/test_framework/messages.py
@@ -27,6 +27,7 @@ import random
import socket
import struct
import time
+import unittest
from test_framework.siphash import siphash256
from test_framework.util import assert_equal
@@ -51,6 +52,7 @@ NODE_BLOOM = (1 << 2)
NODE_WITNESS = (1 << 3)
NODE_COMPACT_FILTERS = (1 << 6)
NODE_NETWORK_LIMITED = (1 << 10)
+NODE_P2P_V2 = (1 << 11)
MSG_TX = 1
MSG_BLOCK = 2
@@ -77,6 +79,10 @@ def sha256(s):
return hashlib.sha256(s).digest()
+def sha3(s):
+ return hashlib.sha3_256(s).digest()
+
+
def hash256(s):
return sha256(sha256(s))
@@ -229,16 +235,25 @@ class CAddress:
# see https://github.com/bitcoin/bips/blob/master/bip-0155.mediawiki
NET_IPV4 = 1
+ NET_IPV6 = 2
+ NET_TORV3 = 4
NET_I2P = 5
+ NET_CJDNS = 6
ADDRV2_NET_NAME = {
NET_IPV4: "IPv4",
- NET_I2P: "I2P"
+ NET_IPV6: "IPv6",
+ NET_TORV3: "TorV3",
+ NET_I2P: "I2P",
+ NET_CJDNS: "CJDNS"
}
ADDRV2_ADDRESS_LENGTH = {
NET_IPV4: 4,
- NET_I2P: 32
+ NET_IPV6: 16,
+ NET_TORV3: 32,
+ NET_I2P: 32,
+ NET_CJDNS: 16
}
I2P_PAD = "===="
@@ -285,7 +300,7 @@ class CAddress:
self.nServices = deser_compact_size(f)
self.net = struct.unpack("B", f.read(1))[0]
- assert self.net in (self.NET_IPV4, self.NET_I2P)
+ assert self.net in self.ADDRV2_NET_NAME
address_length = deser_compact_size(f)
assert address_length == self.ADDRV2_ADDRESS_LENGTH[self.net]
@@ -293,14 +308,25 @@ class CAddress:
addr_bytes = f.read(address_length)
if self.net == self.NET_IPV4:
self.ip = socket.inet_ntoa(addr_bytes)
- else:
+ elif self.net == self.NET_IPV6:
+ self.ip = socket.inet_ntop(socket.AF_INET6, addr_bytes)
+ elif self.net == self.NET_TORV3:
+ prefix = b".onion checksum"
+ version = bytes([3])
+ checksum = sha3(prefix + addr_bytes + version)[:2]
+ self.ip = b32encode(addr_bytes + checksum + version).decode("ascii").lower() + ".onion"
+ elif self.net == self.NET_I2P:
self.ip = b32encode(addr_bytes)[0:-len(self.I2P_PAD)].decode("ascii").lower() + ".b32.i2p"
+ elif self.net == self.NET_CJDNS:
+ self.ip = socket.inet_ntop(socket.AF_INET6, addr_bytes)
+ else:
+ raise Exception(f"Address type not supported")
self.port = struct.unpack(">H", f.read(2))[0]
def serialize_v2(self):
"""Serialize in addrv2 format (BIP155)"""
- assert self.net in (self.NET_IPV4, self.NET_I2P)
+ assert self.net in self.ADDRV2_NET_NAME
r = b""
r += struct.pack("<I", self.time)
r += ser_compact_size(self.nServices)
@@ -308,10 +334,20 @@ class CAddress:
r += ser_compact_size(self.ADDRV2_ADDRESS_LENGTH[self.net])
if self.net == self.NET_IPV4:
r += socket.inet_aton(self.ip)
- else:
+ elif self.net == self.NET_IPV6:
+ r += socket.inet_pton(socket.AF_INET6, self.ip)
+ elif self.net == self.NET_TORV3:
+ sfx = ".onion"
+ assert self.ip.endswith(sfx)
+ r += b32decode(self.ip[0:-len(sfx)], True)[0:32]
+ elif self.net == self.NET_I2P:
sfx = ".b32.i2p"
assert self.ip.endswith(sfx)
r += b32decode(self.ip[0:-len(sfx)] + self.I2P_PAD, True)
+ elif self.net == self.NET_CJDNS:
+ r += socket.inet_pton(socket.AF_INET6, self.ip)
+ else:
+ raise Exception(f"Address type not supported")
r += struct.pack(">H", self.port)
return r
@@ -1852,3 +1888,19 @@ class msg_sendtxrcncl:
def __repr__(self):
return "msg_sendtxrcncl(version=%lu, salt=%lu)" %\
(self.version, self.salt)
+
+class TestFrameworkScript(unittest.TestCase):
+ def test_addrv2_encode_decode(self):
+ def check_addrv2(ip, net):
+ addr = CAddress()
+ addr.net, addr.ip = net, ip
+ ser = addr.serialize_v2()
+ actual = CAddress()
+ actual.deserialize_v2(BytesIO(ser))
+ self.assertEqual(actual, addr)
+
+ check_addrv2("1.65.195.98", CAddress.NET_IPV4)
+ check_addrv2("2001:41f0::62:6974:636f:696e", CAddress.NET_IPV6)
+ check_addrv2("2bqghnldu6mcug4pikzprwhtjjnsyederctvci6klcwzepnjd46ikjyd.onion", CAddress.NET_TORV3)
+ check_addrv2("255fhcp6ajvftnyo7bwz3an3t4a4brhopm3bamyh2iu5r3gnr2rq.b32.i2p", CAddress.NET_I2P)
+ check_addrv2("fc32:17ea:e415:c3bf:9808:149d:b5a2:c9aa", CAddress.NET_CJDNS)
diff --git a/test/functional/test_framework/netutil.py b/test/functional/test_framework/netutil.py
index fcea4b2f68..838f40fcaa 100644
--- a/test/functional/test_framework/netutil.py
+++ b/test/functional/test_framework/netutil.py
@@ -25,6 +25,11 @@ import os
STATE_LISTEN = '0A'
# STATE_CLOSING = '0B'
+# Address manager size constants as defined in addrman_impl.h
+ADDRMAN_NEW_BUCKET_COUNT = 1 << 10
+ADDRMAN_TRIED_BUCKET_COUNT = 1 << 8
+ADDRMAN_BUCKET_SIZE = 1 << 6
+
def get_socket_inodes(pid):
'''
Get list of socket inodes for process pid.
diff --git a/test/functional/test_framework/p2p.py b/test/functional/test_framework/p2p.py
index 2433e52671..ceb4bbd7de 100755
--- a/test/functional/test_framework/p2p.py
+++ b/test/functional/test_framework/p2p.py
@@ -95,6 +95,12 @@ P2P_SUBVERSION = "/python-p2p-tester:0.0.3/"
P2P_VERSION_RELAY = 1
# Delay after receiving a tx inv before requesting transactions from non-preferred peers, in seconds
NONPREF_PEER_TX_DELAY = 2
+# Delay for requesting transactions via txids if we have wtxid-relaying peers, in seconds
+TXID_RELAY_DELAY = 2
+# Delay for requesting transactions if the peer has MAX_PEER_TX_REQUEST_IN_FLIGHT or more requests
+OVERLOADED_PEER_TX_DELAY = 2
+# How long to wait before downloading a transaction from an additional peer
+GETDATA_TX_INTERVAL = 60
MESSAGEMAP = {
b"addr": msg_addr,
@@ -552,16 +558,12 @@ class P2PInterface(P2PConnection):
self.send_message(message)
self.sync_with_ping(timeout=timeout)
- def sync_send_with_ping(self, timeout=60):
- """Ensure SendMessages is called on this connection"""
- # Calling sync_with_ping twice requires that the node calls
+ def sync_with_ping(self, timeout=60):
+ """Ensure ProcessMessages and SendMessages is called on this connection"""
+ # Sending two pings back-to-back, requires that the node calls
# `ProcessMessage` twice, and thus ensures `SendMessages` must have
# been called at least once
- self.sync_with_ping()
- self.sync_with_ping()
-
- def sync_with_ping(self, timeout=60):
- """Ensure ProcessMessages is called on this connection"""
+ self.send_message(msg_ping(nonce=0))
self.send_message(msg_ping(nonce=self.ping_counter))
def test_function():
diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py
index 443cae86a1..17a954cb22 100644
--- a/test/functional/test_framework/script.py
+++ b/test/functional/test_framework/script.py
@@ -689,6 +689,25 @@ def LegacySignatureHash(*args, **kwargs):
else:
return (hash256(msg), err)
+def sign_input_legacy(tx, input_index, input_scriptpubkey, privkey, sighash_type=SIGHASH_ALL):
+ """Add legacy ECDSA signature for a given transaction input. Note that the signature
+ is prepended to the scriptSig field, i.e. additional data pushes necessary for more
+ complex spends than P2PK (e.g. pubkey for P2PKH) can be already set before."""
+ (sighash, err) = LegacySignatureHash(input_scriptpubkey, tx, input_index, sighash_type)
+ assert err is None
+ der_sig = privkey.sign_ecdsa(sighash)
+ tx.vin[input_index].scriptSig = bytes(CScript([der_sig + bytes([sighash_type])])) + tx.vin[input_index].scriptSig
+ tx.rehash()
+
+def sign_input_segwitv0(tx, input_index, input_scriptpubkey, input_amount, privkey, sighash_type=SIGHASH_ALL):
+ """Add segwitv0 ECDSA signature for a given transaction input. Note that the signature
+ is inserted at the bottom of the witness stack, i.e. additional witness data
+ needed (e.g. pubkey for P2WPKH) can already be set before."""
+ sighash = SegwitV0SignatureHash(input_scriptpubkey, tx, input_index, sighash_type, input_amount)
+ der_sig = privkey.sign_ecdsa(sighash)
+ tx.wit.vtxinwit[input_index].scriptWitness.stack.insert(0, der_sig + bytes([sighash_type]))
+ tx.rehash()
+
# TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided.
# Performance optimization probably not necessary for python tests, however.
# Note that this corresponds to sigversion == 1 in EvalScript, which is used
diff --git a/test/functional/test_framework/siphash.py b/test/functional/test_framework/siphash.py
index 884dbcab46..bd13b2c948 100644
--- a/test/functional/test_framework/siphash.py
+++ b/test/functional/test_framework/siphash.py
@@ -31,7 +31,7 @@ def siphash_round(v0, v1, v2, v3):
def siphash(k0, k1, data):
- assert type(data) == bytes
+ assert type(data) is bytes
v0 = 0x736f6d6570736575 ^ k0
v1 = 0x646f72616e646f6d ^ k1
v2 = 0x6c7967656e657261 ^ k0
@@ -61,5 +61,5 @@ def siphash(k0, k1, data):
def siphash256(k0, k1, num):
- assert type(num) == int
+ assert type(num) is int
return siphash(k0, k1, num.to_bytes(32, 'little'))
diff --git a/test/functional/test_framework/socks5.py b/test/functional/test_framework/socks5.py
index 799b1c74b8..0ca06a7396 100644
--- a/test/functional/test_framework/socks5.py
+++ b/test/functional/test_framework/socks5.py
@@ -40,6 +40,7 @@ class Socks5Configuration():
self.af = socket.AF_INET # Bind address family
self.unauth = False # Support unauthenticated
self.auth = False # Support authentication
+ self.keep_alive = False # Do not automatically close connections
class Socks5Command():
"""Information about an incoming socks5 command."""
@@ -115,13 +116,14 @@ class Socks5Connection():
cmdin = Socks5Command(cmd, atyp, addr, port, username, password)
self.serv.queue.put(cmdin)
- logger.info('Proxy: %s', cmdin)
+ logger.debug('Proxy: %s', cmdin)
# Fall through to disconnect
except Exception as e:
logger.exception("socks5 request handling failed.")
self.serv.queue.put(e)
finally:
- self.conn.close()
+ if not self.serv.keep_alive:
+ self.conn.close()
class Socks5Server():
def __init__(self, conf):
@@ -133,6 +135,7 @@ class Socks5Server():
self.running = False
self.thread = None
self.queue = queue.Queue() # report connections and exceptions to client
+ self.keep_alive = conf.keep_alive
def run(self):
while self.running:
@@ -157,4 +160,3 @@ class Socks5Server():
s.connect(self.conf.addr)
s.close()
self.thread.join()
-
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index 73e7516ea7..a34c34713e 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -189,6 +189,8 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
parser.add_argument("--randomseed", type=int,
help="set a random seed for deterministically reproducing a previous test run")
parser.add_argument("--timeout-factor", dest="timeout_factor", type=float, help="adjust test timeouts by a factor. Setting it to 0 disables all timeouts")
+ parser.add_argument("--v2transport", dest="v2transport", default=False, action="store_true",
+ help="use BIP324 v2 connections between all nodes by default")
self.add_options(parser)
# Running TestShell in a Jupyter notebook causes an additional -f argument
@@ -504,6 +506,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
assert_equal(len(binary), num_nodes)
assert_equal(len(binary_cli), num_nodes)
for i in range(num_nodes):
+ args = list(extra_args[i])
+ if self.options.v2transport and ("-v2transport=0" not in args):
+ args.append("-v2transport=1")
test_node_i = TestNode(
i,
get_datadir_path(self.options.tmpdir, i),
@@ -517,7 +522,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
coverage_dir=self.options.coveragedir,
cwd=self.options.tmpdir,
extra_conf=extra_confs[i],
- extra_args=extra_args[i],
+ extra_args=args,
use_cli=self.options.usecli,
start_perf=self.options.perf,
use_valgrind=self.options.valgrind,
@@ -581,13 +586,33 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
def wait_for_node_exit(self, i, timeout):
self.nodes[i].process.wait(timeout)
- def connect_nodes(self, a, b):
+ def connect_nodes(self, a, b, *, peer_advertises_v2=None, wait_for_connect: bool = True):
+ """
+ Kwargs:
+ wait_for_connect: if True, block until the nodes are verified as connected. You might
+ want to disable this when using -stopatheight with one of the connected nodes,
+ since there will be a race between the actual connection and performing
+ the assertions before one node shuts down.
+ """
from_connection = self.nodes[a]
to_connection = self.nodes[b]
from_num_peers = 1 + len(from_connection.getpeerinfo())
to_num_peers = 1 + len(to_connection.getpeerinfo())
ip_port = "127.0.0.1:" + str(p2p_port(b))
- from_connection.addnode(ip_port, "onetry")
+
+ if peer_advertises_v2 is None:
+ peer_advertises_v2 = self.options.v2transport
+
+ if peer_advertises_v2:
+ from_connection.addnode(node=ip_port, command="onetry", v2transport=True)
+ else:
+ # skip the optional third argument (default false) for
+ # compatibility with older clients
+ from_connection.addnode(ip_port, "onetry")
+
+ if not wait_for_connect:
+ return
+
# poll until version handshake complete to avoid race conditions
# with transaction relaying
# See comments in net_processing:
@@ -595,12 +620,12 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
# * Must have a verack message before anything else
self.wait_until(lambda: sum(peer['version'] != 0 for peer in from_connection.getpeerinfo()) == from_num_peers)
self.wait_until(lambda: sum(peer['version'] != 0 for peer in to_connection.getpeerinfo()) == to_num_peers)
- self.wait_until(lambda: sum(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in from_connection.getpeerinfo()) == from_num_peers)
- self.wait_until(lambda: sum(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in to_connection.getpeerinfo()) == to_num_peers)
+ self.wait_until(lambda: sum(peer['bytesrecv_per_msg'].pop('verack', 0) >= 21 for peer in from_connection.getpeerinfo()) == from_num_peers)
+ self.wait_until(lambda: sum(peer['bytesrecv_per_msg'].pop('verack', 0) >= 21 for peer in to_connection.getpeerinfo()) == to_num_peers)
# The message bytes are counted before processing the message, so make
# sure it was fully processed by waiting for a ping.
- self.wait_until(lambda: sum(peer["bytesrecv_per_msg"].pop("pong", 0) >= 32 for peer in from_connection.getpeerinfo()) == from_num_peers)
- self.wait_until(lambda: sum(peer["bytesrecv_per_msg"].pop("pong", 0) >= 32 for peer in to_connection.getpeerinfo()) == to_num_peers)
+ self.wait_until(lambda: sum(peer["bytesrecv_per_msg"].pop("pong", 0) >= 29 for peer in from_connection.getpeerinfo()) == from_num_peers)
+ self.wait_until(lambda: sum(peer["bytesrecv_per_msg"].pop("pong", 0) >= 29 for peer in to_connection.getpeerinfo()) == to_num_peers)
def disconnect_nodes(self, a, b):
def disconnect_nodes_helper(node_a, node_b):
@@ -979,3 +1004,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
def is_bdb_compiled(self):
"""Checks whether the wallet module was compiled with BDB support."""
return self.config["components"].getboolean("USE_BDB")
+
+ def has_blockfile(self, node, filenum: str):
+ blocksdir = os.path.join(node.datadir, self.chain, 'blocks', '')
+ return os.path.isfile(os.path.join(blocksdir, f"blk{filenum}.dat"))
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
index e5e3061def..6e12f6c964 100755
--- a/test/functional/test_framework/test_node.py
+++ b/test/functional/test_framework/test_node.py
@@ -144,6 +144,8 @@ class TestNode():
self.p2ps = []
self.timeout_factor = timeout_factor
+ self.mocktime = None
+
AddressKeyPair = collections.namedtuple('AddressKeyPair', ['address', 'key'])
PRIV_KEYS = [
# address , privkey
@@ -324,6 +326,15 @@ class TestNode():
assert not invalid_call
return self.__getattr__('generatetodescriptor')(*args, **kwargs)
+ def setmocktime(self, timestamp):
+ """Wrapper for setmocktime RPC, sets self.mocktime"""
+ if timestamp == 0:
+ # setmocktime(0) resets to system time.
+ self.mocktime = None
+ else:
+ self.mocktime = timestamp
+ return self.__getattr__('setmocktime')(timestamp)
+
def get_wallet_rpc(self, wallet_name):
if self.use_cli:
return RPCOverloadWrapper(self.cli("-rpcwallet={}".format(wallet_name)), True, self.descriptors)
@@ -353,21 +364,13 @@ class TestNode():
for profile_name in tuple(self.perf_subprocesses.keys()):
self._stop_perf(profile_name)
- # Check that stderr is as expected
- self.stderr.seek(0)
- stderr = self.stderr.read().decode('utf-8').strip()
- if stderr != expected_stderr:
- raise AssertionError("Unexpected stderr {} != {}".format(stderr, expected_stderr))
-
- self.stdout.close()
- self.stderr.close()
-
del self.p2ps[:]
+ assert (not expected_stderr) or wait_until_stopped # Must wait to check stderr
if wait_until_stopped:
- self.wait_until_stopped()
+ self.wait_until_stopped(expected_stderr=expected_stderr)
- def is_node_stopped(self, expected_ret_code=0):
+ def is_node_stopped(self, *, expected_stderr="", expected_ret_code=0):
"""Checks whether the node has stopped.
Returns True if the node has stopped. False otherwise.
@@ -381,6 +384,15 @@ class TestNode():
# process has stopped. Assert that it didn't return an error code.
assert return_code == expected_ret_code, self._node_msg(
f"Node returned unexpected exit code ({return_code}) vs ({expected_ret_code}) when stopping")
+ # Check that stderr is as expected
+ self.stderr.seek(0)
+ stderr = self.stderr.read().decode('utf-8').strip()
+ if stderr != expected_stderr:
+ raise AssertionError("Unexpected stderr {} != {}".format(stderr, expected_stderr))
+
+ self.stdout.close()
+ self.stderr.close()
+
self.running = False
self.process = None
self.rpc_connected = False
@@ -388,9 +400,9 @@ class TestNode():
self.log.debug("Node stopped")
return True
- def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT, expect_error=False):
+ def wait_until_stopped(self, *, timeout=BITCOIND_PROC_WAIT_TIMEOUT, expect_error=False, **kwargs):
expected_ret_code = 1 if expect_error else 0 # Whether node shutdown return EXIT_FAILURE or EXIT_SUCCESS
- wait_until_helper(lambda: self.is_node_stopped(expected_ret_code=expected_ret_code), timeout=timeout, timeout_factor=self.timeout_factor)
+ wait_until_helper(lambda: self.is_node_stopped(expected_ret_code=expected_ret_code, **kwargs), timeout=timeout, timeout_factor=self.timeout_factor)
def replace_in_config(self, replacements):
"""
@@ -420,11 +432,15 @@ class TestNode():
return self.chain_path / 'debug.log'
@property
+ def blocks_path(self) -> Path:
+ return self.chain_path / "blocks"
+
+ @property
def wallets_path(self) -> Path:
return self.chain_path / "wallets"
- def debug_log_bytes(self) -> int:
- with open(self.debug_log_path, encoding='utf-8') as dl:
+ def debug_log_size(self, **kwargs) -> int:
+ with open(self.debug_log_path, **kwargs) as dl:
dl.seek(0, 2)
return dl.tell()
@@ -433,13 +449,13 @@ class TestNode():
if unexpected_msgs is None:
unexpected_msgs = []
time_end = time.time() + timeout * self.timeout_factor
- prev_size = self.debug_log_bytes()
+ prev_size = self.debug_log_size(encoding="utf-8") # Must use same encoding that is used to read() below
yield
while True:
found = True
- with open(self.debug_log_path, encoding='utf-8') as dl:
+ with open(self.debug_log_path, encoding="utf-8", errors="replace") as dl:
dl.seek(prev_size)
log = dl.read()
print_log = " - " + "\n - ".join(log.splitlines())
@@ -464,7 +480,7 @@ class TestNode():
the number of log lines we encountered when matching
"""
time_end = time.time() + timeout * self.timeout_factor
- prev_size = self.debug_log_bytes()
+ prev_size = self.debug_log_size(mode="rb") # Must use same mode that is used to read() below
yield
@@ -643,10 +659,14 @@ class TestNode():
# in comparison to the upside of making tests less fragile and unexpected intermittent errors less likely.
p2p_conn.sync_with_ping()
- # Consistency check that the Bitcoin Core has received our user agent string. This checks the
- # node's newest peer. It could be racy if another Bitcoin Core node has connected since we opened
- # our connection, but we don't expect that to happen.
- assert_equal(self.getpeerinfo()[-1]['subver'], P2P_SUBVERSION)
+ # Consistency check that the node received our user agent string.
+ # Find our connection in getpeerinfo by our address:port and theirs, as this combination is unique.
+ sockname = p2p_conn._transport.get_extra_info("socket").getsockname()
+ our_addr_and_port = f"{sockname[0]}:{sockname[1]}"
+ dst_addr_and_port = f"{p2p_conn.dstaddr}:{p2p_conn.dstport}"
+ info = [peer for peer in self.getpeerinfo() if peer["addr"] == our_addr_and_port and peer["addrbind"] == dst_addr_and_port]
+ assert_equal(len(info), 1)
+ assert_equal(info[0]["subver"], P2P_SUBVERSION)
return p2p_conn
@@ -695,6 +715,13 @@ class TestNode():
wait_until_helper(lambda: self.num_test_p2p_connections() == 0, timeout_factor=self.timeout_factor)
+ def bumpmocktime(self, seconds):
+ """Fast forward using setmocktime to self.mocktime + seconds. Requires setmocktime to have
+ been called at some point in the past."""
+ assert self.mocktime
+ self.mocktime += seconds
+ self.setmocktime(self.mocktime)
+
class TestNodeCLIAttr:
def __init__(self, cli, command):
diff --git a/test/functional/test_framework/wallet.py b/test/functional/test_framework/wallet.py
index 271095ea21..035a482f4c 100644
--- a/test/functional/test_framework/wallet.py
+++ b/test/functional/test_framework/wallet.py
@@ -36,12 +36,11 @@ from test_framework.messages import (
)
from test_framework.script import (
CScript,
- LegacySignatureHash,
LEAF_VERSION_TAPSCRIPT,
OP_NOP,
OP_RETURN,
OP_TRUE,
- SIGHASH_ALL,
+ sign_input_legacy,
taproot_construct,
)
from test_framework.script_util import (
@@ -166,18 +165,16 @@ class MiniWallet:
def sign_tx(self, tx, fixed_length=True):
if self._mode == MiniWalletMode.RAW_P2PK:
- (sighash, err) = LegacySignatureHash(CScript(self._scriptPubKey), tx, 0, SIGHASH_ALL)
- assert err is None
# for exact fee calculation, create only signatures with fixed size by default (>49.89% probability):
# 65 bytes: high-R val (33 bytes) + low-S val (32 bytes)
- # with the DER header/skeleton data of 6 bytes added, this leads to a target size of 71 bytes
- der_sig = b''
- while not len(der_sig) == 71:
- der_sig = self._priv_key.sign_ecdsa(sighash)
+ # with the DER header/skeleton data of 6 bytes added, plus 2 bytes scriptSig overhead
+ # (OP_PUSHn and SIGHASH_ALL), this leads to a scriptSig target size of 73 bytes
+ tx.vin[0].scriptSig = b''
+ while not len(tx.vin[0].scriptSig) == 73:
+ tx.vin[0].scriptSig = b''
+ sign_input_legacy(tx, 0, self._scriptPubKey, self._priv_key)
if not fixed_length:
break
- tx.vin[0].scriptSig = CScript([der_sig + bytes(bytearray([SIGHASH_ALL]))])
- tx.rehash()
elif self._mode == MiniWalletMode.RAW_OP_TRUE:
for i in tx.vin:
i.scriptSig = CScript([OP_NOP] * 43) # pad to identical size
@@ -211,7 +208,7 @@ class MiniWallet:
assert_equal(self._mode, MiniWalletMode.ADDRESS_OP_TRUE)
return self._address
- def get_utxo(self, *, txid: str = '', vout: Optional[int] = None, mark_as_spent=True) -> dict:
+ def get_utxo(self, *, txid: str = '', vout: Optional[int] = None, mark_as_spent=True, confirmed_only=False) -> dict:
"""
Returns a utxo and marks it as spent (pops it from the internal list)
@@ -227,19 +224,23 @@ class MiniWallet:
utxo_filter = reversed(mature_coins) # By default the largest utxo
if vout is not None:
utxo_filter = filter(lambda utxo: vout == utxo['vout'], utxo_filter)
+ if confirmed_only:
+ utxo_filter = filter(lambda utxo: utxo['confirmations'] > 0, utxo_filter)
index = self._utxos.index(next(utxo_filter))
if mark_as_spent:
return self._utxos.pop(index)
else:
return self._utxos[index]
- def get_utxos(self, *, include_immature_coinbase=False, mark_as_spent=True):
+ def get_utxos(self, *, include_immature_coinbase=False, mark_as_spent=True, confirmed_only=False):
"""Returns the list of all utxos and optionally mark them as spent"""
if not include_immature_coinbase:
blocks_height = self._test_node.getblockchaininfo()['blocks']
utxo_filter = filter(lambda utxo: not utxo['coinbase'] or COINBASE_MATURITY - 1 <= blocks_height - utxo['height'], self._utxos)
else:
utxo_filter = self._utxos
+ if confirmed_only:
+ utxo_filter = filter(lambda utxo: utxo['confirmations'] > 0, utxo_filter)
utxos = deepcopy(list(utxo_filter))
if mark_as_spent:
self._utxos = []
@@ -289,14 +290,15 @@ class MiniWallet:
locktime=0,
sequence=0,
fee_per_output=1000,
- target_weight=0
+ target_weight=0,
+ confirmed_only=False
):
"""
Create and return a transaction that spends the given UTXOs and creates a
certain number of outputs with equal amounts. The output amounts can be
set by amount_per_output or automatically calculated with a fee_per_output.
"""
- utxos_to_spend = utxos_to_spend or [self.get_utxo()]
+ utxos_to_spend = utxos_to_spend or [self.get_utxo(confirmed_only=confirmed_only)]
sequence = [sequence] * len(utxos_to_spend) if type(sequence) is int else sequence
assert_equal(len(utxos_to_spend), len(sequence))
@@ -336,9 +338,17 @@ class MiniWallet:
"tx": tx,
}
- def create_self_transfer(self, *, fee_rate=Decimal("0.003"), fee=Decimal("0"), utxo_to_spend=None, locktime=0, sequence=0, target_weight=0):
+ def create_self_transfer(self, *,
+ fee_rate=Decimal("0.003"),
+ fee=Decimal("0"),
+ utxo_to_spend=None,
+ locktime=0,
+ sequence=0,
+ target_weight=0,
+ confirmed_only=False
+ ):
"""Create and return a tx with the specified fee. If fee is 0, use fee_rate, where the resulting fee may be exact or at most one satoshi higher than needed."""
- utxo_to_spend = utxo_to_spend or self.get_utxo()
+ utxo_to_spend = utxo_to_spend or self.get_utxo(confirmed_only=confirmed_only)
assert fee_rate >= 0
assert fee >= 0
# calculate fee
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py
index 9762476a5d..933ea276e7 100755
--- a/test/functional/test_runner.py
+++ b/test/functional/test_runner.py
@@ -76,6 +76,7 @@ TEST_FRAMEWORK_MODULES = [
"blocktools",
"ellswift",
"key",
+ "messages",
"muhash",
"ripemd160",
"script",
@@ -116,12 +117,14 @@ BASE_SCRIPTS = [
'wallet_backup.py --descriptors',
'feature_segwit.py --legacy-wallet',
'feature_segwit.py --descriptors',
+ 'feature_segwit.py --descriptors --v2transport',
'p2p_tx_download.py',
'wallet_avoidreuse.py --legacy-wallet',
'wallet_avoidreuse.py --descriptors',
'feature_abortnode.py',
'wallet_address_types.py --legacy-wallet',
'wallet_address_types.py --descriptors',
+ 'p2p_orphan_handling.py',
'wallet_basic.py --legacy-wallet',
'wallet_basic.py --descriptors',
'feature_maxtipage.py',
@@ -160,6 +163,7 @@ BASE_SCRIPTS = [
'wallet_abandonconflict.py --legacy-wallet',
'wallet_abandonconflict.py --descriptors',
'feature_reindex.py',
+ 'feature_reindex_readonly.py',
'wallet_labels.py --legacy-wallet',
'wallet_labels.py --descriptors',
'p2p_compactblocks.py',
@@ -192,6 +196,7 @@ BASE_SCRIPTS = [
'wallet_avoid_mixing_output_types.py --descriptors',
'mempool_reorg.py',
'p2p_block_sync.py',
+ 'p2p_block_sync.py --v2transport',
'wallet_createwallet.py --legacy-wallet',
'wallet_createwallet.py --usecli',
'wallet_createwallet.py --descriptors',
@@ -218,10 +223,13 @@ BASE_SCRIPTS = [
'wallet_transactiontime_rescan.py --legacy-wallet',
'p2p_addrv2_relay.py',
'p2p_compactblocks_hb.py',
+ 'p2p_compactblocks_hb.py --v2transport',
'p2p_disconnect_ban.py',
+ 'p2p_disconnect_ban.py --v2transport',
'feature_posix_fs_permissions.py',
'rpc_decodescript.py',
'rpc_blockchain.py',
+ 'rpc_blockchain.py --v2transport',
'rpc_deprecated.py',
'wallet_disable.py',
'wallet_change_address.py --legacy-wallet',
@@ -242,7 +250,10 @@ BASE_SCRIPTS = [
'mining_prioritisetransaction.py',
'p2p_invalid_locator.py',
'p2p_invalid_block.py',
+ 'p2p_invalid_block.py --v2transport',
'p2p_invalid_tx.py',
+ 'p2p_invalid_tx.py --v2transport',
+ 'p2p_v2_transport.py',
'example_test.py',
'wallet_txn_doublespend.py --legacy-wallet',
'wallet_multisig_descriptor_psbt.py --descriptors',
@@ -264,8 +275,12 @@ BASE_SCRIPTS = [
'wallet_importprunedfunds.py --legacy-wallet',
'wallet_importprunedfunds.py --descriptors',
'p2p_leak_tx.py',
+ 'p2p_leak_tx.py --v2transport',
'p2p_eviction.py',
'p2p_ibd_stalling.py',
+ 'p2p_ibd_stalling.py --v2transport',
+ 'p2p_net_deadlock.py',
+ 'p2p_net_deadlock.py --v2transport',
'wallet_signmessagewithaddress.py',
'rpc_signmessagewithprivkey.py',
'rpc_generate.py',
@@ -312,6 +327,7 @@ BASE_SCRIPTS = [
'wallet_sendall.py --descriptors',
'wallet_create_tx.py --descriptors',
'wallet_inactive_hdchains.py --legacy-wallet',
+ 'wallet_spend_unconfirmed.py',
'p2p_fingerprint.py',
'feature_uacomment.py',
'feature_init.py',
@@ -319,6 +335,7 @@ BASE_SCRIPTS = [
'wallet_coinbase_category.py --descriptors',
'feature_filelock.py',
'feature_loadblock.py',
+ 'feature_assumeutxo.py',
'p2p_dos_header_tree.py',
'p2p_add_connections.py',
'feature_bind_port_discover.py',
@@ -783,8 +800,8 @@ def check_script_prefixes():
def check_script_list(*, src_dir, fail_on_warn):
"""Check scripts directory.
- Check that there are no scripts in the functional tests directory which are
- not being run by pull-tester.py."""
+ Check that all python files in this directory are categorized
+ as a test script or meta script."""
script_dir = src_dir + '/test/functional/'
python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")])
missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS)))
diff --git a/test/functional/tool_wallet.py b/test/functional/tool_wallet.py
index 327dd43e5a..8b0c8ce405 100755
--- a/test/functional/tool_wallet.py
+++ b/test/functional/tool_wallet.py
@@ -4,7 +4,6 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoin-wallet."""
-import hashlib
import os
import stat
import subprocess
@@ -13,9 +12,10 @@ import textwrap
from collections import OrderedDict
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal
-
-BUFFER_SIZE = 16 * 1024
+from test_framework.util import (
+ assert_equal,
+ sha256sum_file,
+)
class ToolWalletTest(BitcoinTestFramework):
@@ -53,12 +53,7 @@ class ToolWalletTest(BitcoinTestFramework):
assert_equal(p.poll(), 0)
def wallet_shasum(self):
- h = hashlib.sha1()
- mv = memoryview(bytearray(BUFFER_SIZE))
- with open(self.wallet_path, 'rb', buffering=0) as f:
- for n in iter(lambda: f.readinto(mv), 0):
- h.update(mv[:n])
- return h.hexdigest()
+ return sha256sum_file(self.wallet_path).hex()
def wallet_timestamp(self):
return os.path.getmtime(self.wallet_path)
@@ -399,6 +394,62 @@ class ToolWalletTest(BitcoinTestFramework):
self.assert_raises_tool_error('Error: Checksum is not the correct size', '-wallet=badload', '-dumpfile={}'.format(bad_sum_wallet_dump), 'createfromdump')
assert not (self.nodes[0].wallets_path / "badload").is_dir()
+ def test_chainless_conflicts(self):
+ self.log.info("Test wallet tool when wallet contains conflicting transactions")
+ self.restart_node(0)
+ self.generate(self.nodes[0], 101)
+
+ def_wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
+
+ self.nodes[0].createwallet("conflicts")
+ wallet = self.nodes[0].get_wallet_rpc("conflicts")
+ def_wallet.sendtoaddress(wallet.getnewaddress(), 10)
+ self.generate(self.nodes[0], 1)
+
+ # parent tx
+ parent_txid = wallet.sendtoaddress(wallet.getnewaddress(), 9)
+ parent_txid_bytes = bytes.fromhex(parent_txid)[::-1]
+ conflict_utxo = wallet.gettransaction(txid=parent_txid, verbose=True)["decoded"]["vin"][0]
+
+ # The specific assertion in MarkConflicted being tested requires that the parent tx is already loaded
+ # by the time the child tx is loaded. Since transactions end up being loaded in txid order due to how both
+ # and sqlite store things, we can just grind the child tx until it has a txid that is greater than the parent's.
+ locktime = 500000000 # Use locktime as nonce, starting at unix timestamp minimum
+ addr = wallet.getnewaddress()
+ while True:
+ child_send_res = wallet.send(outputs=[{addr: 8}], add_to_wallet=False, locktime=locktime)
+ child_txid = child_send_res["txid"]
+ child_txid_bytes = bytes.fromhex(child_txid)[::-1]
+ if (child_txid_bytes > parent_txid_bytes):
+ wallet.sendrawtransaction(child_send_res["hex"])
+ break
+ locktime += 1
+
+ # conflict with parent
+ conflict_unsigned = self.nodes[0].createrawtransaction(inputs=[conflict_utxo], outputs=[{wallet.getnewaddress(): 9.9999}])
+ conflict_signed = wallet.signrawtransactionwithwallet(conflict_unsigned)["hex"]
+ conflict_txid = self.nodes[0].sendrawtransaction(conflict_signed)
+ self.generate(self.nodes[0], 1)
+ assert_equal(wallet.gettransaction(txid=parent_txid)["confirmations"], -1)
+ assert_equal(wallet.gettransaction(txid=child_txid)["confirmations"], -1)
+ assert_equal(wallet.gettransaction(txid=conflict_txid)["confirmations"], 1)
+
+ self.stop_node(0)
+
+ # Wallet tool should successfully give info for this wallet
+ expected_output = textwrap.dedent(f'''\
+ Wallet info
+ ===========
+ Name: conflicts
+ Format: {"sqlite" if self.options.descriptors else "bdb"}
+ Descriptors: {"yes" if self.options.descriptors else "no"}
+ Encrypted: no
+ HD (hd seed available): yes
+ Keypool Size: {"8" if self.options.descriptors else "1"}
+ Transactions: 4
+ Address Book: 4
+ ''')
+ self.assert_tool_output(expected_output, "-wallet=conflicts", "info")
def run_test(self):
self.wallet_path = os.path.join(self.nodes[0].wallets_path, self.default_wallet_name, self.wallet_data_filename)
@@ -412,6 +463,7 @@ class ToolWalletTest(BitcoinTestFramework):
# Salvage is a legacy wallet only thing
self.test_salvage()
self.test_dump_createfromdump()
+ self.test_chainless_conflicts()
if __name__ == '__main__':
ToolWalletTest().main()
diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py
index fa92ebd436..9f6f54c7a6 100755
--- a/test/functional/wallet_backup.py
+++ b/test/functional/wallet_backup.py
@@ -140,11 +140,6 @@ class WalletBackupTest(BitcoinTestFramework):
assert_raises_rpc_error(-36, error_message, node.restorewallet, wallet_name, backup_file)
assert os.path.exists(wallet_file)
- def init_three(self):
- self.init_wallet(node=0)
- self.init_wallet(node=1)
- self.init_wallet(node=2)
-
def run_test(self):
self.log.info("Generating initial blockchain")
self.generate(self.nodes[0], 1)
@@ -226,11 +221,14 @@ class WalletBackupTest(BitcoinTestFramework):
self.erase_three()
#start node2 with no chain
- shutil.rmtree(os.path.join(self.nodes[2].chain_path, 'blocks'))
+ shutil.rmtree(os.path.join(self.nodes[2].blocks_path))
shutil.rmtree(os.path.join(self.nodes[2].chain_path, 'chainstate'))
self.start_three(["-nowallet"])
- self.init_three()
+ # Create new wallets for the three nodes.
+ # We will use this empty wallets to test the 'importwallet()' RPC command below.
+ for node_num in range(3):
+ self.nodes[node_num].createwallet(wallet_name=self.default_wallet_name, descriptors=self.options.descriptors, load_on_startup=True)
assert_equal(self.nodes[0].getbalance(), 0)
assert_equal(self.nodes[1].getbalance(), 0)
diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py
index a1b805c09e..01149a0977 100755
--- a/test/functional/wallet_basic.py
+++ b/test/functional/wallet_basic.py
@@ -310,8 +310,7 @@ class WalletTest(BitcoinTestFramework):
node_0_bal += amount
assert_equal(self.nodes[0].getbalance(), node_0_bal)
- for key in ["totalFee", "feeRate"]:
- assert_raises_rpc_error(-8, "Unknown named parameter key", self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=1, key=1)
+ assert_raises_rpc_error(-8, "Unknown named parameter feeRate", self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=1, feeRate=1)
# Test setting explicit fee rate just below the minimum.
self.log.info("Test sendmany raises 'fee rate too low' if fee_rate of 0.99999999 is passed")
@@ -328,7 +327,7 @@ class WalletTest(BitcoinTestFramework):
for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]:
assert_raises_rpc_error(-3, msg, self.nodes[2].sendmany, amounts={address: 1.0}, fee_rate=invalid_value)
# Test fee_rate values that cannot be represented in sat/vB.
- for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]:
+ for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]:
assert_raises_rpc_error(-3, msg, self.nodes[2].sendmany, amounts={address: 10}, fee_rate=invalid_value)
# Test fee_rate out of range (negative number).
assert_raises_rpc_error(-3, OUT_OF_RANGE, self.nodes[2].sendmany, amounts={address: 10}, fee_rate=-1)
@@ -505,9 +504,6 @@ class WalletTest(BitcoinTestFramework):
fee = prebalance - postbalance - amount
assert_fee_amount(fee, tx_size, Decimal(fee_rate_btc_kvb))
- for key in ["totalFee", "feeRate"]:
- assert_raises_rpc_error(-8, "Unknown named parameter key", self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=1, key=1)
-
# Test setting explicit fee rate just below the minimum.
self.log.info("Test sendtoaddress raises 'fee rate too low' if fee_rate of 0.99999999 is passed")
assert_raises_rpc_error(-6, "Fee rate (0.999 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)",
@@ -523,7 +519,7 @@ class WalletTest(BitcoinTestFramework):
for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]:
assert_raises_rpc_error(-3, msg, self.nodes[2].sendtoaddress, address=address, amount=1.0, fee_rate=invalid_value)
# Test fee_rate values that cannot be represented in sat/vB.
- for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]:
+ for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]:
assert_raises_rpc_error(-3, msg, self.nodes[2].sendtoaddress, address=address, amount=10, fee_rate=invalid_value)
# Test fee_rate out of range (negative number).
assert_raises_rpc_error(-3, OUT_OF_RANGE, self.nodes[2].sendtoaddress, address=address, amount=1.0, fee_rate=-1)
diff --git a/test/functional/wallet_bumpfee.py b/test/functional/wallet_bumpfee.py
index b9ebf64c22..fea933a93b 100755
--- a/test/functional/wallet_bumpfee.py
+++ b/test/functional/wallet_bumpfee.py
@@ -24,9 +24,11 @@ from test_framework.messages import (
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
+ assert_fee_amount,
assert_greater_than,
assert_raises_rpc_error,
get_fee,
+ find_vout_for_address,
)
from test_framework.wallet import MiniWallet
@@ -109,6 +111,8 @@ class BumpFeeTest(BitcoinTestFramework):
test_small_output_with_feerate_succeeds(self, rbf_node, dest_address)
test_no_more_inputs_fails(self, rbf_node, dest_address)
self.test_bump_back_to_yourself()
+ self.test_provided_change_pos(rbf_node)
+ self.test_single_output()
# Context independent tests
test_feerate_checks_replaced_outputs(self, rbf_node, peer_node)
@@ -137,7 +141,7 @@ class BumpFeeTest(BitcoinTestFramework):
for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]:
assert_raises_rpc_error(-3, msg, rbf_node.bumpfee, rbfid, fee_rate=invalid_value)
# Test fee_rate values that cannot be represented in sat/vB.
- for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]:
+ for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]:
assert_raises_rpc_error(-3, msg, rbf_node.bumpfee, rbfid, fee_rate=invalid_value)
# Test fee_rate out of range (negative number).
assert_raises_rpc_error(-3, "Amount out of range", rbf_node.bumpfee, rbfid, fee_rate=-1)
@@ -174,6 +178,13 @@ class BumpFeeTest(BitcoinTestFramework):
assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data",
rbf_node.bumpfee, rbfid, {"outputs": [{"data": "deadbeef"}, {"data": "deadbeef"}]})
+ self.log.info("Test original_change_index option")
+ assert_raises_rpc_error(-1, "JSON integer out of range", rbf_node.bumpfee, rbfid, {"original_change_index": -1})
+ assert_raises_rpc_error(-8, "Change position is out of range", rbf_node.bumpfee, rbfid, {"original_change_index": 2})
+
+ self.log.info("Test outputs and original_change_index cannot both be provided")
+ assert_raises_rpc_error(-8, "The options 'outputs' and 'original_change_index' are incompatible. You can only either specify a new set of outputs, or designate a change output to be recycled.", rbf_node.bumpfee, rbfid, {"original_change_index": 2, "outputs": [{dest_address: 0.1}]})
+
self.clear_mempool()
def test_bump_back_to_yourself(self):
@@ -225,6 +236,72 @@ class BumpFeeTest(BitcoinTestFramework):
node.unloadwallet("back_to_yourself")
+ def test_provided_change_pos(self, rbf_node):
+ self.log.info("Test the original_change_index option")
+
+ change_addr = rbf_node.getnewaddress()
+ dest_addr = rbf_node.getnewaddress()
+ assert_equal(rbf_node.getaddressinfo(change_addr)["ischange"], False)
+ assert_equal(rbf_node.getaddressinfo(dest_addr)["ischange"], False)
+
+ send_res = rbf_node.send(outputs=[{dest_addr: 1}], options={"change_address": change_addr})
+ assert send_res["complete"]
+ txid = send_res["txid"]
+
+ tx = rbf_node.gettransaction(txid=txid, verbose=True)
+ assert_equal(len(tx["decoded"]["vout"]), 2)
+
+ change_pos = find_vout_for_address(rbf_node, txid, change_addr)
+ change_value = tx["decoded"]["vout"][change_pos]["value"]
+
+ bumped = rbf_node.bumpfee(txid, {"original_change_index": change_pos})
+ new_txid = bumped["txid"]
+
+ new_tx = rbf_node.gettransaction(txid=new_txid, verbose=True)
+ assert_equal(len(new_tx["decoded"]["vout"]), 2)
+ new_change_pos = find_vout_for_address(rbf_node, new_txid, change_addr)
+ new_change_value = new_tx["decoded"]["vout"][new_change_pos]["value"]
+
+ assert_greater_than(change_value, new_change_value)
+
+
+ def test_single_output(self):
+ self.log.info("Test that single output txs can be bumped")
+ node = self.nodes[1]
+
+ node.createwallet("single_out_rbf")
+ wallet = node.get_wallet_rpc("single_out_rbf")
+
+ addr = wallet.getnewaddress()
+ amount = Decimal("0.001")
+ # Make 2 UTXOs
+ self.nodes[0].sendtoaddress(addr, amount)
+ self.nodes[0].sendtoaddress(addr, amount)
+ self.generate(self.nodes[0], 1)
+ utxos = wallet.listunspent()
+
+ tx = wallet.sendall(recipients=[wallet.getnewaddress()], fee_rate=2, options={"inputs": [utxos[0]]})
+
+ # Set the only output with a crazy high feerate as change, should fail as the output would be dust
+ assert_raises_rpc_error(-4, "The transaction amount is too small to pay the fee", wallet.bumpfee, txid=tx["txid"], options={"fee_rate": 1100, "original_change_index": 0})
+
+ # Specify single output as change successfully
+ bumped = wallet.bumpfee(txid=tx["txid"], options={"fee_rate": 10, "original_change_index": 0})
+ bumped_tx = wallet.gettransaction(txid=bumped["txid"], verbose=True)
+ assert_equal(len(bumped_tx["decoded"]["vout"]), 1)
+ assert_equal(len(bumped_tx["decoded"]["vin"]), 1)
+ assert_equal(bumped_tx["decoded"]["vout"][0]["value"] + bumped["fee"], amount)
+ assert_fee_amount(bumped["fee"], bumped_tx["decoded"]["vsize"], Decimal(10) / Decimal(1e8) * 1000)
+
+ # Bumping without specifying change adds a new input and output
+ bumped = wallet.bumpfee(txid=bumped["txid"], options={"fee_rate": 20})
+ bumped_tx = wallet.gettransaction(txid=bumped["txid"], verbose=True)
+ assert_equal(len(bumped_tx["decoded"]["vout"]), 2)
+ assert_equal(len(bumped_tx["decoded"]["vin"]), 2)
+ assert_fee_amount(bumped["fee"], bumped_tx["decoded"]["vsize"], Decimal(20) / Decimal(1e8) * 1000)
+
+ wallet.unloadwallet()
+
def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address):
self.log.info('Test simple bumpfee: {}'.format(mode))
rbfid = spend_one_input(rbf_node, dest_address)
@@ -326,8 +403,7 @@ def test_notmine_bumpfee(self, rbf_node, peer_node, dest_address):
def finish_psbtbumpfee(psbt):
psbt = rbf_node.walletprocesspsbt(psbt)
psbt = peer_node.walletprocesspsbt(psbt["psbt"])
- final = rbf_node.finalizepsbt(psbt["psbt"])
- res = rbf_node.testmempoolaccept([final["hex"]])
+ res = rbf_node.testmempoolaccept([psbt["hex"]])
assert res[0]["allowed"]
assert_greater_than(res[0]["fees"]["base"], old_fee)
@@ -561,8 +637,7 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
psbt = watcher.walletcreatefundedpsbt([watcher.listunspent()[0]], {dest_address: 0.0005}, 0,
{"fee_rate": 1, "add_inputs": False}, True)['psbt']
psbt_signed = signer.walletprocesspsbt(psbt=psbt, sign=True, sighashtype="ALL", bip32derivs=True)
- psbt_final = watcher.finalizepsbt(psbt_signed["psbt"])
- original_txid = watcher.sendrawtransaction(psbt_final["hex"])
+ original_txid = watcher.sendrawtransaction(psbt_signed["hex"])
assert_equal(len(watcher.decodepsbt(psbt)["tx"]["vin"]), 1)
# bumpfee can't be used on watchonly wallets
@@ -577,11 +652,10 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
# Sign bumped transaction
bumped_psbt_signed = signer.walletprocesspsbt(psbt=bumped_psbt["psbt"], sign=True, sighashtype="ALL", bip32derivs=True)
- bumped_psbt_final = watcher.finalizepsbt(bumped_psbt_signed["psbt"])
- assert bumped_psbt_final["complete"]
+ assert bumped_psbt_signed["complete"]
# Broadcast bumped transaction
- bumped_txid = watcher.sendrawtransaction(bumped_psbt_final["hex"])
+ bumped_txid = watcher.sendrawtransaction(bumped_psbt_signed["hex"])
assert bumped_txid in rbf_node.getrawmempool()
assert original_txid not in rbf_node.getrawmempool()
diff --git a/test/functional/wallet_createwallet.py b/test/functional/wallet_createwallet.py
index 75b507c387..eb83e11f36 100755
--- a/test/functional/wallet_createwallet.py
+++ b/test/functional/wallet_createwallet.py
@@ -109,7 +109,7 @@ class CreateWalletTest(BitcoinTestFramework):
assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getnewaddress)
assert_raises_rpc_error(-4, "Error: This wallet has no available keys", w4.getrawchangeaddress)
# Now set a seed and it should work. Wallet should also be encrypted
- w4.walletpassphrase('pass', 60)
+ w4.walletpassphrase("pass", 999000)
if self.options.descriptors:
w4.importdescriptors([{
'desc': descsum_create('wpkh(tprv8ZgxMBicQKsPcwuZGKp8TeWppSuLMiLe2d9PupB14QpPeQsqoj3LneJLhGHH13xESfvASyd4EFLJvLrG8b7DrLxEuV7hpF9uUc6XruKA1Wq/0h/*)'),
@@ -142,7 +142,7 @@ class CreateWalletTest(BitcoinTestFramework):
self.nodes[0].createwallet(wallet_name='wblank', disable_private_keys=False, blank=True, passphrase='thisisapassphrase')
wblank = node.get_wallet_rpc('wblank')
assert_raises_rpc_error(-13, "Error: Please enter the wallet passphrase with walletpassphrase first.", wblank.signmessage, "needanargument", "test")
- wblank.walletpassphrase('thisisapassphrase', 60)
+ wblank.walletpassphrase("thisisapassphrase", 999000)
assert_raises_rpc_error(-4, "Error: This wallet has no available keys", wblank.getnewaddress)
assert_raises_rpc_error(-4, "Error: This wallet has no available keys", wblank.getrawchangeaddress)
@@ -151,7 +151,7 @@ class CreateWalletTest(BitcoinTestFramework):
self.nodes[0].createwallet(wallet_name='w6', disable_private_keys=False, blank=False, passphrase='thisisapassphrase')
w6 = node.get_wallet_rpc('w6')
assert_raises_rpc_error(-13, "Error: Please enter the wallet passphrase with walletpassphrase first.", w6.signmessage, "needanargument", "test")
- w6.walletpassphrase('thisisapassphrase', 60)
+ w6.walletpassphrase("thisisapassphrase", 999000)
w6.signmessage(w6.getnewaddress('', 'legacy'), "test")
w6.keypoolrefill(1)
# There should only be 1 key for legacy, 3 for descriptors
diff --git a/test/functional/wallet_descriptor.py b/test/functional/wallet_descriptor.py
index f4b67bae1b..6af01f8cfd 100755
--- a/test/functional/wallet_descriptor.py
+++ b/test/functional/wallet_descriptor.py
@@ -129,7 +129,7 @@ class WalletDescriptorTest(BitcoinTestFramework):
# Encrypt wallet 0
send_wrpc.encryptwallet('pass')
- send_wrpc.walletpassphrase('pass', 10)
+ send_wrpc.walletpassphrase("pass", 999000)
addr = send_wrpc.getnewaddress()
info2 = send_wrpc.getaddressinfo(addr)
assert info1['hdmasterfingerprint'] != info2['hdmasterfingerprint']
@@ -143,7 +143,7 @@ class WalletDescriptorTest(BitcoinTestFramework):
send_wrpc.getnewaddress()
self.log.info("Test that unlock is needed when deriving only hardened keys in an encrypted wallet")
- send_wrpc.walletpassphrase('pass', 10)
+ send_wrpc.walletpassphrase("pass", 999000)
send_wrpc.importdescriptors([{
"desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/*h)#y4dfsj7n",
"timestamp": "now",
@@ -235,9 +235,11 @@ class WalletDescriptorTest(BitcoinTestFramework):
self.nodes[0].createwallet(wallet_name="crashme", descriptors=True)
self.nodes[0].unloadwallet("crashme")
wallet_db = os.path.join(self.nodes[0].wallets_path, "crashme", self.wallet_data_filename)
- with sqlite3.connect(wallet_db) as conn:
+ conn = sqlite3.connect(wallet_db)
+ with conn:
# add "cscript" entry: key type is uint160 (20 bytes), value type is CScript (zero-length here)
conn.execute('INSERT INTO main VALUES(?, ?)', (b'\x07cscript' + b'\x00'*20, b'\x00'))
+ conn.close()
assert_raises_rpc_error(-4, "Unexpected legacy entry in descriptor wallet found.", self.nodes[0].loadwallet, "crashme")
diff --git a/test/functional/wallet_dump.py b/test/functional/wallet_dump.py
index cf20ff1239..8c68d03f97 100755
--- a/test/functional/wallet_dump.py
+++ b/test/functional/wallet_dump.py
@@ -173,7 +173,7 @@ class WalletDumpTest(BitcoinTestFramework):
# encrypt wallet, restart, unlock and dump
self.nodes[0].encryptwallet('test')
- self.nodes[0].walletpassphrase('test', 100)
+ self.nodes[0].walletpassphrase("test", 999000)
# Should be a no-op:
self.nodes[0].keypoolrefill()
self.nodes[0].dumpwallet(wallet_enc_dump)
diff --git a/test/functional/wallet_encryption.py b/test/functional/wallet_encryption.py
index 88b9ebbddd..e8381ba8f2 100755
--- a/test/functional/wallet_encryption.py
+++ b/test/functional/wallet_encryption.py
@@ -59,7 +59,7 @@ class WalletEncryptionTest(BitcoinTestFramework):
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10)
# Test walletlock
- self.nodes[0].walletpassphrase(passphrase, 84600)
+ self.nodes[0].walletpassphrase(passphrase, 999000)
sig = self.nodes[0].signmessage(address, msg)
assert self.nodes[0].verifymessage(address, sig, msg)
self.nodes[0].walletlock()
@@ -68,7 +68,7 @@ class WalletEncryptionTest(BitcoinTestFramework):
# Test passphrase changes
self.nodes[0].walletpassphrasechange(passphrase, passphrase2)
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10)
- self.nodes[0].walletpassphrase(passphrase2, 10)
+ self.nodes[0].walletpassphrase(passphrase2, 999000)
sig = self.nodes[0].signmessage(address, msg)
assert self.nodes[0].verifymessage(address, sig, msg)
self.nodes[0].walletlock()
@@ -97,7 +97,7 @@ class WalletEncryptionTest(BitcoinTestFramework):
self.nodes[0].walletpassphrasechange(passphrase2, passphrase_with_nulls)
# walletpassphrasechange should not stop at null characters
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase_with_nulls.partition("\0")[0], 10)
- self.nodes[0].walletpassphrase(passphrase_with_nulls, 10)
+ self.nodes[0].walletpassphrase(passphrase_with_nulls, 999000)
sig = self.nodes[0].signmessage(address, msg)
assert self.nodes[0].verifymessage(address, sig, msg)
self.nodes[0].walletlock()
diff --git a/test/functional/wallet_fundrawtransaction.py b/test/functional/wallet_fundrawtransaction.py
index 46706d6ad2..ca4feefb2b 100755
--- a/test/functional/wallet_fundrawtransaction.py
+++ b/test/functional/wallet_fundrawtransaction.py
@@ -23,6 +23,7 @@ from test_framework.util import (
assert_raises_rpc_error,
count_bytes,
find_vout_for_address,
+ get_fee,
)
from test_framework.wallet_util import generate_keypair
@@ -183,7 +184,6 @@ class RawTransactionsTest(BitcoinTestFramework):
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
- dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert len(dec_tx['vin']) > 0 #test that we have enough inputs
@@ -193,8 +193,6 @@ class RawTransactionsTest(BitcoinTestFramework):
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
- dec_tx = self.nodes[2].decoderawtransaction(rawtx)
-
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert len(dec_tx['vin']) > 0 #test if we have enough inputs
@@ -206,13 +204,9 @@ class RawTransactionsTest(BitcoinTestFramework):
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
- dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
- totalOut = 0
- for out in dec_tx['vout']:
- totalOut += out['value']
assert len(dec_tx['vin']) > 0
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
@@ -335,10 +329,8 @@ class RawTransactionsTest(BitcoinTestFramework):
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
- totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
- totalOut += out['value']
if out['scriptPubKey']['address'] in outputs:
matchingOuts+=1
else:
@@ -364,12 +356,9 @@ class RawTransactionsTest(BitcoinTestFramework):
# Should fail without add_inputs:
assert_raises_rpc_error(-4, ERR_NOT_ENOUGH_PRESET_INPUTS, self.nodes[2].fundrawtransaction, rawtx, add_inputs=False)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, add_inputs=True)
-
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
- totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
- totalOut += out['value']
if out['scriptPubKey']['address'] in outputs:
matchingOuts+=1
@@ -400,10 +389,8 @@ class RawTransactionsTest(BitcoinTestFramework):
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, add_inputs=True)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
- totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
- totalOut += out['value']
if out['scriptPubKey']['address'] in outputs:
matchingOuts+=1
@@ -569,8 +556,7 @@ class RawTransactionsTest(BitcoinTestFramework):
funded_psbt = wmulti.walletcreatefundedpsbt(inputs=inputs, outputs=outputs, changeAddress=w2.getrawchangeaddress())['psbt']
signed_psbt = w2.walletprocesspsbt(funded_psbt)
- final_psbt = w2.finalizepsbt(signed_psbt['psbt'])
- self.nodes[2].sendrawtransaction(final_psbt['hex'])
+ self.nodes[2].sendrawtransaction(signed_psbt['hex'])
self.generate(self.nodes[2], 1)
# Make sure funds are received at node1.
@@ -581,11 +567,22 @@ class RawTransactionsTest(BitcoinTestFramework):
def test_locked_wallet(self):
self.log.info("Test fundrawtxn with locked wallet and hardened derivation")
- self.nodes[1].encryptwallet("test")
+ df_wallet = self.nodes[1].get_wallet_rpc(self.default_wallet_name)
+ self.nodes[1].createwallet(wallet_name="locked_wallet", descriptors=self.options.descriptors)
+ wallet = self.nodes[1].get_wallet_rpc("locked_wallet")
+ # This test is not meant to exercise fee estimation. Making sure all txs are sent at a consistent fee rate.
+ wallet.settxfee(self.min_relay_tx_fee)
+
+ # Add some balance to the wallet (this will be reverted at the end of the test)
+ df_wallet.sendall(recipients=[wallet.getnewaddress()])
+ self.generate(self.nodes[1], 1)
+
+ # Encrypt wallet and import descriptors
+ wallet.encryptwallet("test")
if self.options.descriptors:
- self.nodes[1].walletpassphrase('test', 10)
- self.nodes[1].importdescriptors([{
+ wallet.walletpassphrase("test", 999000)
+ wallet.importdescriptors([{
'desc': descsum_create('wpkh(tprv8ZgxMBicQKsPdYeeZbPSKd2KYLmeVKtcFA7kqCxDvDR13MQ6us8HopUR2wLcS2ZKPhLyKsqpDL2FtL73LMHcgoCL7DXsciA8eX8nbjCR2eG/0h/*h)'),
'timestamp': 'now',
'active': True
@@ -596,49 +593,60 @@ class RawTransactionsTest(BitcoinTestFramework):
'active': True,
'internal': True
}])
- self.nodes[1].walletlock()
+ wallet.walletlock()
# Drain the keypool.
- self.nodes[1].getnewaddress()
- self.nodes[1].getrawchangeaddress()
+ wallet.getnewaddress()
+ wallet.getrawchangeaddress()
+
+ # Choose input
+ inputs = wallet.listunspent()
+
+ # Deduce exact fee to produce a changeless transaction
+ tx_size = 110 # Total tx size: 110 vbytes, p2wpkh -> p2wpkh. Input 68 vbytes + rest of tx is 42 vbytes.
+ value = inputs[0]["amount"] - get_fee(tx_size, self.min_relay_tx_fee)
- # Choose 2 inputs
- inputs = self.nodes[1].listunspent()[0:2]
- value = sum(inp["amount"] for inp in inputs) - Decimal("0.00000500") # Pay a 500 sat fee
outputs = {self.nodes[0].getnewaddress():value}
- rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
+ rawtx = wallet.createrawtransaction(inputs, outputs)
# fund a transaction that does not require a new key for the change output
- self.nodes[1].fundrawtransaction(rawtx)
+ funded_tx = wallet.fundrawtransaction(rawtx)
+ assert_equal(funded_tx["changepos"], -1)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
outputs = {self.nodes[0].getnewaddress():value - Decimal("0.1")}
- rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
- assert_raises_rpc_error(-4, "Transaction needs a change address, but we can't generate it.", self.nodes[1].fundrawtransaction, rawtx)
+ rawtx = wallet.createrawtransaction(inputs, outputs)
+ assert_raises_rpc_error(-4, "Transaction needs a change address, but we can't generate it.", wallet.fundrawtransaction, rawtx)
# Refill the keypool.
- self.nodes[1].walletpassphrase("test", 100)
- self.nodes[1].keypoolrefill(8) #need to refill the keypool to get an internal change address
- self.nodes[1].walletlock()
+ wallet.walletpassphrase("test", 999000)
+ wallet.keypoolrefill(8) #need to refill the keypool to get an internal change address
+ wallet.walletlock()
- assert_raises_rpc_error(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2)
+ assert_raises_rpc_error(-13, "walletpassphrase", wallet.sendtoaddress, self.nodes[0].getnewaddress(), 1.2)
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():1.1}
- rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
- fundedTx = self.nodes[1].fundrawtransaction(rawtx)
+ rawtx = wallet.createrawtransaction(inputs, outputs)
+ fundedTx = wallet.fundrawtransaction(rawtx)
+ assert fundedTx["changepos"] != -1
# Now we need to unlock.
- self.nodes[1].walletpassphrase("test", 600)
- signedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex'])
- self.nodes[1].sendrawtransaction(signedTx['hex'])
+ wallet.walletpassphrase("test", 999000)
+ signedTx = wallet.signrawtransactionwithwallet(fundedTx['hex'])
+ wallet.sendrawtransaction(signedTx['hex'])
self.generate(self.nodes[1], 1)
# Make sure funds are received at node1.
assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance())
+ # Restore pre-test wallet state
+ wallet.sendall(recipients=[df_wallet.getnewaddress(), df_wallet.getnewaddress(), df_wallet.getnewaddress()])
+ wallet.unloadwallet()
+ self.generate(self.nodes[1], 1)
+
def test_many_inputs_fee(self):
"""Multiple (~19) inputs tx test | Compare fee."""
self.log.info("Test fundrawtxn fee with many inputs")
@@ -829,7 +837,7 @@ class RawTransactionsTest(BitcoinTestFramework):
for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]:
assert_raises_rpc_error(-3, "Invalid amount", node.fundrawtransaction, rawtx, add_inputs=True, **{param: invalid_value})
# Test fee_rate values that cannot be represented in sat/vB.
- for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]:
+ for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]:
assert_raises_rpc_error(-3, "Invalid amount",
node.fundrawtransaction, rawtx, fee_rate=invalid_value, add_inputs=True)
diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py
index 0fb0d7ea97..62f8301c16 100755
--- a/test/functional/wallet_hd.py
+++ b/test/functional/wallet_hd.py
@@ -87,7 +87,7 @@ class WalletHDTest(BitcoinTestFramework):
self.stop_node(1)
# we need to delete the complete chain directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
- shutil.rmtree(os.path.join(self.nodes[1].chain_path, "blocks"))
+ shutil.rmtree(os.path.join(self.nodes[1].blocks_path))
shutil.rmtree(os.path.join(self.nodes[1].chain_path, "chainstate"))
shutil.copyfile(
os.path.join(self.nodes[1].datadir, "hd.bak"),
@@ -115,7 +115,7 @@ class WalletHDTest(BitcoinTestFramework):
# Try a RPC based rescan
self.stop_node(1)
- shutil.rmtree(os.path.join(self.nodes[1].chain_path, "blocks"))
+ shutil.rmtree(os.path.join(self.nodes[1].blocks_path))
shutil.rmtree(os.path.join(self.nodes[1].chain_path, "chainstate"))
shutil.copyfile(
os.path.join(self.nodes[1].datadir, "hd.bak"),
diff --git a/test/functional/wallet_keypool.py b/test/functional/wallet_keypool.py
index a39db3bfb8..0ba8a46bae 100755
--- a/test/functional/wallet_keypool.py
+++ b/test/functional/wallet_keypool.py
@@ -85,7 +85,7 @@ class KeyPoolTest(BitcoinTestFramework):
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min)
- nodes[0].walletpassphrase('test', 12000)
+ nodes[0].walletpassphrase("test", 999000)
nodes[0].keypoolrefill(6)
nodes[0].walletlock()
wi = nodes[0].getwalletinfo()
@@ -131,7 +131,7 @@ class KeyPoolTest(BitcoinTestFramework):
nodes[0].getnewaddress()
assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].getnewaddress)
- nodes[0].walletpassphrase('test', 100)
+ nodes[0].walletpassphrase("test", 999000)
nodes[0].keypoolrefill(100)
wi = nodes[0].getwalletinfo()
if self.options.descriptors:
@@ -170,7 +170,7 @@ class KeyPoolTest(BitcoinTestFramework):
else:
res = w2.importmulti([{'desc': desc, 'timestamp': 'now'}])
assert_equal(res[0]['success'], True)
- w1.walletpassphrase('test', 100)
+ w1.walletpassphrase("test", 999000)
res = w1.sendtoaddress(address=address, amount=0.00010000)
self.generate(nodes[0], 1)
diff --git a/test/functional/wallet_migration.py b/test/functional/wallet_migration.py
index 278ef1ec87..bcd71197bf 100755
--- a/test/functional/wallet_migration.py
+++ b/test/functional/wallet_migration.py
@@ -6,8 +6,11 @@
import random
import shutil
+from test_framework.address import script_to_p2sh
from test_framework.descriptors import descsum_create
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.messages import COIN, CTransaction, CTxOut
+from test_framework.script_util import key_to_p2pkh_script, script_to_p2sh_script, script_to_p2wsh_script
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
@@ -132,13 +135,22 @@ class WalletMigrationTest(BitcoinTestFramework):
self.generate(self.nodes[0], 1)
bal = basic1.getbalance()
txs = basic1.listtransactions()
+ addr_gps = basic1.listaddressgroupings()
- basic1.migratewallet()
+ basic1_migrate = basic1.migratewallet()
assert_equal(basic1.getwalletinfo()["descriptors"], True)
self.assert_is_sqlite("basic1")
assert_equal(basic1.getbalance(), bal)
self.assert_list_txs_equal(basic1.listtransactions(), txs)
+ self.log.info("Test backup file can be successfully restored")
+ self.nodes[0].restorewallet("basic1_restored", basic1_migrate['backup_path'])
+ basic1_restored = self.nodes[0].get_wallet_rpc("basic1_restored")
+ basic1_restored_wi = basic1_restored.getwalletinfo()
+ assert_equal(basic1_restored_wi['balance'], bal)
+ assert_equal(basic1_restored.listaddressgroupings(), addr_gps)
+ self.assert_list_txs_equal(basic1_restored.listtransactions(), txs)
+
# restart node and verify that everything is still there
self.restart_node(0)
default = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
@@ -639,6 +651,125 @@ class WalletMigrationTest(BitcoinTestFramework):
for addr_info in [addr_external, addr_external_with_label]:
check(addr_info, wallet_solvables)
+ def test_migrate_raw_p2sh(self):
+ self.log.info("Test migration of watch-only raw p2sh script")
+ df_wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
+ wallet = self.create_legacy_wallet("raw_p2sh")
+
+ def send_to_script(script, amount):
+ tx = CTransaction()
+ tx.vout.append(CTxOut(nValue=amount*COIN, scriptPubKey=script))
+
+ hex_tx = df_wallet.fundrawtransaction(tx.serialize().hex())['hex']
+ signed_tx = df_wallet.signrawtransactionwithwallet(hex_tx)
+ df_wallet.sendrawtransaction(signed_tx['hex'])
+ self.generate(self.nodes[0], 1)
+
+ # Craft sh(pkh(key)) script and send coins to it
+ pubkey = df_wallet.getaddressinfo(df_wallet.getnewaddress())["pubkey"]
+ script_pkh = key_to_p2pkh_script(pubkey)
+ script_sh_pkh = script_to_p2sh_script(script_pkh)
+ send_to_script(script=script_sh_pkh, amount=2)
+
+ # Import script and check balance
+ wallet.rpc.importaddress(address=script_pkh.hex(), label="raw_spk", rescan=True, p2sh=True)
+ assert_equal(wallet.getbalances()['watchonly']['trusted'], 2)
+
+ # Craft wsh(pkh(key)) and send coins to it
+ pubkey = df_wallet.getaddressinfo(df_wallet.getnewaddress())["pubkey"]
+ script_wsh_pkh = script_to_p2wsh_script(key_to_p2pkh_script(pubkey))
+ send_to_script(script=script_wsh_pkh, amount=3)
+
+ # Import script and check balance
+ wallet.rpc.importaddress(address=script_wsh_pkh.hex(), label="raw_spk2", rescan=True, p2sh=False)
+ assert_equal(wallet.getbalances()['watchonly']['trusted'], 5)
+
+ # Import sh(pkh()) script, by using importaddress(), with the p2sh flag enabled.
+ # This will wrap the script under another sh level, which is invalid!, and store it inside the wallet.
+ # The migration process must skip the invalid scripts and the addressbook records linked to them.
+ # They are not being watched by the current wallet, nor should be watched by the migrated one.
+ label_sh_pkh = "raw_sh_pkh"
+ script_pkh = key_to_p2pkh_script(df_wallet.getaddressinfo(df_wallet.getnewaddress())["pubkey"])
+ script_sh_pkh = script_to_p2sh_script(script_pkh)
+ addy_script_sh_pkh = script_to_p2sh(script_pkh) # valid script address
+ addy_script_double_sh_pkh = script_to_p2sh(script_sh_pkh) # invalid script address
+
+ # Note: 'importaddress()' will add two scripts, a valid one sh(pkh()) and an invalid one 'sh(sh(pkh()))'.
+ # Both of them will be stored with the same addressbook label. And only the latter one should
+ # be discarded during migration. The first one must be migrated.
+ wallet.rpc.importaddress(address=script_sh_pkh.hex(), label=label_sh_pkh, rescan=False, p2sh=True)
+
+ # Migrate wallet and re-check balance
+ info_migration = wallet.migratewallet()
+ wallet_wo = self.nodes[0].get_wallet_rpc(info_migration["watchonly_name"])
+
+ # Watch-only balance is under "mine".
+ assert_equal(wallet_wo.getbalances()['mine']['trusted'], 5)
+ # The watch-only scripts are no longer part of the main wallet
+ assert_equal(wallet.getbalances()['mine']['trusted'], 0)
+
+ # The invalid sh(sh(pk())) script label must not be part of the main wallet anymore
+ assert label_sh_pkh not in wallet.listlabels()
+ # But, the standard sh(pkh()) script should be part of the watch-only wallet.
+ addrs_by_label = wallet_wo.getaddressesbylabel(label_sh_pkh)
+ assert addy_script_sh_pkh in addrs_by_label
+ assert addy_script_double_sh_pkh not in addrs_by_label
+
+ # Also, the watch-only wallet should have the descriptor for the standard sh(pkh())
+ desc = descsum_create(f"addr({addy_script_sh_pkh})")
+ assert next(it['desc'] for it in wallet_wo.listdescriptors()['descriptors'] if it['desc'] == desc)
+ # And doesn't have a descriptor for the invalid one
+ desc_invalid = descsum_create(f"addr({addy_script_double_sh_pkh})")
+ assert_equal(next((it['desc'] for it in wallet_wo.listdescriptors()['descriptors'] if it['desc'] == desc_invalid), None), None)
+
+ # Just in case, also verify wallet restart
+ self.nodes[0].unloadwallet(info_migration["watchonly_name"])
+ self.nodes[0].loadwallet(info_migration["watchonly_name"])
+ assert_equal(wallet_wo.getbalances()['mine']['trusted'], 5)
+
+ def test_conflict_txs(self):
+ self.log.info("Test migration when wallet contains conflicting transactions")
+ def_wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
+
+ wallet = self.create_legacy_wallet("conflicts")
+ def_wallet.sendtoaddress(wallet.getnewaddress(), 10)
+ self.generate(self.nodes[0], 1)
+
+ # parent tx
+ parent_txid = wallet.sendtoaddress(wallet.getnewaddress(), 9)
+ parent_txid_bytes = bytes.fromhex(parent_txid)[::-1]
+ conflict_utxo = wallet.gettransaction(txid=parent_txid, verbose=True)["decoded"]["vin"][0]
+
+ # The specific assertion in MarkConflicted being tested requires that the parent tx is already loaded
+ # by the time the child tx is loaded. Since transactions end up being loaded in txid order due to how both
+ # and sqlite store things, we can just grind the child tx until it has a txid that is greater than the parent's.
+ locktime = 500000000 # Use locktime as nonce, starting at unix timestamp minimum
+ addr = wallet.getnewaddress()
+ while True:
+ child_send_res = wallet.send(outputs=[{addr: 8}], add_to_wallet=False, locktime=locktime)
+ child_txid = child_send_res["txid"]
+ child_txid_bytes = bytes.fromhex(child_txid)[::-1]
+ if (child_txid_bytes > parent_txid_bytes):
+ wallet.sendrawtransaction(child_send_res["hex"])
+ break
+ locktime += 1
+
+ # conflict with parent
+ conflict_unsigned = self.nodes[0].createrawtransaction(inputs=[conflict_utxo], outputs=[{wallet.getnewaddress(): 9.9999}])
+ conflict_signed = wallet.signrawtransactionwithwallet(conflict_unsigned)["hex"]
+ conflict_txid = self.nodes[0].sendrawtransaction(conflict_signed)
+ self.generate(self.nodes[0], 1)
+ assert_equal(wallet.gettransaction(txid=parent_txid)["confirmations"], -1)
+ assert_equal(wallet.gettransaction(txid=child_txid)["confirmations"], -1)
+ assert_equal(wallet.gettransaction(txid=conflict_txid)["confirmations"], 1)
+
+ wallet.migratewallet()
+ assert_equal(wallet.gettransaction(txid=parent_txid)["confirmations"], -1)
+ assert_equal(wallet.gettransaction(txid=child_txid)["confirmations"], -1)
+ assert_equal(wallet.gettransaction(txid=conflict_txid)["confirmations"], 1)
+
+ wallet.unloadwallet()
+
def run_test(self):
self.generate(self.nodes[0], 101)
@@ -654,6 +785,8 @@ class WalletMigrationTest(BitcoinTestFramework):
self.test_default_wallet()
self.test_direct_file()
self.test_addressbook()
+ self.test_migrate_raw_p2sh()
+ self.test_conflict_txs()
if __name__ == '__main__':
WalletMigrationTest().main()
diff --git a/test/functional/wallet_miniscript.py b/test/functional/wallet_miniscript.py
index 7bc3424bf4..45f0df1c76 100755
--- a/test/functional/wallet_miniscript.py
+++ b/test/functional/wallet_miniscript.py
@@ -277,6 +277,18 @@ class WalletMiniscriptTest(BitcoinTestFramework):
assert not res["success"]
assert "is not sane: witnesses without signature exist" in res["error"]["message"]
+ # Sanity check we wouldn't let an unspendable Miniscript descriptor in
+ res = self.ms_wo_wallet.importdescriptors(
+ [
+ {
+ "desc": descsum_create("wsh(0)"),
+ "active": False,
+ "timestamp": "now",
+ }
+ ]
+ )[0]
+ assert not res["success"] and "is not satisfiable" in res["error"]["message"]
+
# Test we can track any type of Miniscript
for ms in MINISCRIPTS:
self.watchonly_test(ms)
diff --git a/test/functional/wallet_multisig_descriptor_psbt.py b/test/functional/wallet_multisig_descriptor_psbt.py
index 28bee1911e..68bf45f7e3 100755
--- a/test/functional/wallet_multisig_descriptor_psbt.py
+++ b/test/functional/wallet_multisig_descriptor_psbt.py
@@ -150,8 +150,7 @@ class WalletMultisigDescriptorPSBTTest(BitcoinTestFramework):
signing_wallet = participants["signers"][m]
psbt = signing_wallet.walletprocesspsbt(psbt["psbt"])
assert_equal(psbt["complete"], m == self.M - 1)
- finalized = coordinator_wallet.finalizepsbt(psbt["psbt"])
- coordinator_wallet.sendrawtransaction(finalized["hex"])
+ coordinator_wallet.sendrawtransaction(psbt["hex"])
self.log.info("Check that balances are correct after the transaction has been included in a block.")
self.generate(self.nodes[0], 1)
diff --git a/test/functional/wallet_pruning.py b/test/functional/wallet_pruning.py
index 9e6061287c..06bd992da7 100755
--- a/test/functional/wallet_pruning.py
+++ b/test/functional/wallet_pruning.py
@@ -106,7 +106,7 @@ class WalletPruningTest(BitcoinTestFramework):
def has_block(self, block_index):
"""Checks if the pruned node has the specific blk0000*.dat file"""
- return os.path.isfile(os.path.join(self.nodes[1].chain_path, "blocks", f"blk{block_index:05}.dat"))
+ return os.path.isfile(os.path.join(self.nodes[1].blocks_path, f"blk{block_index:05}.dat"))
def create_wallet(self, wallet_name, *, unload=False):
"""Creates and dumps a wallet on the non-pruned node0 to be later import by the pruned node"""
diff --git a/test/functional/wallet_resendwallettransactions.py b/test/functional/wallet_resendwallettransactions.py
index 7bdb6f5e3a..f61e1edc1d 100755
--- a/test/functional/wallet_resendwallettransactions.py
+++ b/test/functional/wallet_resendwallettransactions.py
@@ -5,6 +5,8 @@
"""Test that the wallet resends transactions periodically."""
import time
+from decimal import Decimal
+
from test_framework.blocktools import (
create_block,
create_coinbase,
@@ -15,6 +17,8 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
+ get_fee,
+ try_rpc,
)
class ResendWalletTransactionsTest(BitcoinTestFramework):
@@ -86,18 +90,34 @@ class ResendWalletTransactionsTest(BitcoinTestFramework):
# ordering of mapWallet is, if the child is not before the parent, we will create a new
# child (via bumpfee) and remove the old child (via removeprunedfunds) until we get the
# ordering of child before parent.
- child_txid = node.send(outputs=[{addr: 0.5}], inputs=[{"txid":txid, "vout":0}])["txid"]
+ child_inputs = [{"txid": txid, "vout": 0}]
+ child_txid = node.sendall(recipients=[addr], inputs=child_inputs)["txid"]
+ # Get the child tx's info for manual bumping
+ child_tx_info = node.gettransaction(txid=child_txid, verbose=True)
+ child_output_value = child_tx_info["decoded"]["vout"][0]["value"]
+ # Include an additional 1 vbyte buffer to handle when we have a smaller signature
+ additional_child_fee = get_fee(child_tx_info["decoded"]["vsize"] + 1, Decimal(0.00001100))
while True:
txids = node.listreceivedbyaddress(minconf=0, address_filter=addr)[0]["txids"]
if txids == [child_txid, txid]:
break
- bumped = node.bumpfee(child_txid)
+ # Manually bump the tx
+ # The inputs and the output address stay the same, just changing the amount for the new fee
+ child_output_value -= additional_child_fee
+ bumped_raw = node.createrawtransaction(inputs=child_inputs, outputs=[{addr: child_output_value}])
+ bumped = node.signrawtransactionwithwallet(bumped_raw)
+ bumped_txid = node.decoderawtransaction(bumped["hex"])["txid"]
+ # Sometimes we will get a signature that is a little bit shorter than we expect which causes the
+ # feerate to be a bit higher, then the followup to be a bit lower. This results in a replacement
+ # that can't be broadcast. We can just skip that and keep grinding.
+ if try_rpc(-26, "insufficient fee, rejecting replacement", node.sendrawtransaction, bumped["hex"]):
+ continue
# The scheduler queue creates a copy of the added tx after
# send/bumpfee and re-adds it to the wallet (undoing the next
# removeprunedfunds). So empty the scheduler queue:
node.syncwithvalidationinterfacequeue()
node.removeprunedfunds(child_txid)
- child_txid = bumped["txid"]
+ child_txid = bumped_txid
entry_time = node.getmempoolentry(child_txid)["time"]
block_time = entry_time + 6 * 60
@@ -108,9 +128,13 @@ class ResendWalletTransactionsTest(BitcoinTestFramework):
# Set correct m_best_block_time, which is used in ResubmitWalletTransactions
node.syncwithvalidationinterfacequeue()
- # Evict these txs from the mempool
evict_time = block_time + 60 * 60 * DEFAULT_MEMPOOL_EXPIRY_HOURS + 5
- node.setmocktime(evict_time)
+ # Flush out currently scheduled resubmit attempt now so that there can't be one right between eviction and check.
+ with node.assert_debug_log(['resubmit 2 unconfirmed transactions']):
+ node.setmocktime(evict_time)
+ node.mockscheduler(60)
+
+ # Evict these txs from the mempool
indep_send = node.send(outputs=[{node.getnewaddress(): 1}], inputs=[indep_utxo])
node.getmempoolentry(indep_send["txid"])
assert_raises_rpc_error(-5, "Transaction not in mempool", node.getmempoolentry, txid)
diff --git a/test/functional/wallet_send.py b/test/functional/wallet_send.py
index d7bb6ab1e7..6ce2a56bfc 100755
--- a/test/functional/wallet_send.py
+++ b/test/functional/wallet_send.py
@@ -387,7 +387,7 @@ class WalletSendTest(BitcoinTestFramework):
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg))
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg))
# Test fee_rate values that cannot be represented in sat/vB.
- for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]:
+ for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]:
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg))
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg))
# Test fee_rate out of range (negative number).
@@ -530,13 +530,11 @@ class WalletSendTest(BitcoinTestFramework):
signed = ext_wallet.walletprocesspsbt(res["psbt"])
signed = ext_fund.walletprocesspsbt(res["psbt"])
assert signed["complete"]
- self.nodes[0].finalizepsbt(signed["psbt"])
res = self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[ext_utxo], add_inputs=True, psbt=True, include_watching=True, solving_data={"descriptors": [desc]})
signed = ext_wallet.walletprocesspsbt(res["psbt"])
signed = ext_fund.walletprocesspsbt(res["psbt"])
assert signed["complete"]
- self.nodes[0].finalizepsbt(signed["psbt"])
dec = self.nodes[0].decodepsbt(signed["psbt"])
for i, txin in enumerate(dec["tx"]["vin"]):
@@ -574,8 +572,7 @@ class WalletSendTest(BitcoinTestFramework):
signed = ext_wallet.walletprocesspsbt(res["psbt"])
signed = ext_fund.walletprocesspsbt(res["psbt"])
assert signed["complete"]
- tx = self.nodes[0].finalizepsbt(signed["psbt"])
- testres = self.nodes[0].testmempoolaccept([tx["hex"]])[0]
+ testres = self.nodes[0].testmempoolaccept([signed["hex"]])[0]
assert_equal(testres["allowed"], True)
assert_fee_amount(testres["fees"]["base"], testres["vsize"], Decimal(0.0001))
diff --git a/test/functional/wallet_signer.py b/test/functional/wallet_signer.py
index 3e7c613e55..32a1887153 100755
--- a/test/functional/wallet_signer.py
+++ b/test/functional/wallet_signer.py
@@ -25,21 +25,21 @@ class WalletSignerTest(BitcoinTestFramework):
def mock_signer_path(self):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mocks', 'signer.py')
if platform.system() == "Windows":
- return "py " + path
+ return "py -3 " + path
else:
return path
def mock_invalid_signer_path(self):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mocks', 'invalid_signer.py')
if platform.system() == "Windows":
- return "py " + path
+ return "py -3 " + path
else:
return path
def mock_multi_signers_path(self):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mocks', 'multi_signers.py')
if platform.system() == "Windows":
- return "py " + path
+ return "py -3 " + path
else:
return path
@@ -169,8 +169,7 @@ class WalletSignerTest(BitcoinTestFramework):
dest = self.nodes[0].getnewaddress(address_type='bech32')
mock_psbt = mock_wallet.walletcreatefundedpsbt([], {dest:0.5}, 0, {'replaceable': True}, True)['psbt']
mock_psbt_signed = mock_wallet.walletprocesspsbt(psbt=mock_psbt, sign=True, sighashtype="ALL", bip32derivs=True)
- mock_psbt_final = mock_wallet.finalizepsbt(mock_psbt_signed["psbt"])
- mock_tx = mock_psbt_final["hex"]
+ mock_tx = mock_psbt_signed["hex"]
assert mock_wallet.testmempoolaccept([mock_tx])[0]["allowed"]
# # Create a new wallet and populate with specific public keys, in order
diff --git a/test/functional/wallet_signrawtransactionwithwallet.py b/test/functional/wallet_signrawtransactionwithwallet.py
index 3d2f41cb83..d560dfdc11 100755
--- a/test/functional/wallet_signrawtransactionwithwallet.py
+++ b/test/functional/wallet_signrawtransactionwithwallet.py
@@ -33,6 +33,10 @@ from decimal import (
getcontext,
)
+
+RAW_TX = '020000000156b958f78e3f24e0b2f4e4db1255426b0902027cb37e3ddadb52e37c3557dddb0000000000ffffffff01c0a6b929010000001600149a2ee8c77140a053f36018ac8124a6ececc1668a00000000'
+
+
class SignRawTransactionWithWalletTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
@@ -47,10 +51,12 @@ class SignRawTransactionWithWalletTest(BitcoinTestFramework):
def test_with_lock_outputs(self):
self.log.info("Test correct error reporting when trying to sign a locked output")
self.nodes[0].encryptwallet("password")
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signrawtransactionwithwallet, RAW_TX)
+ self.nodes[0].walletpassphrase("password", 9999)
- rawTx = '020000000156b958f78e3f24e0b2f4e4db1255426b0902027cb37e3ddadb52e37c3557dddb0000000000ffffffff01c0a6b929010000001600149a2ee8c77140a053f36018ac8124a6ececc1668a00000000'
-
- assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signrawtransactionwithwallet, rawTx)
+ def test_with_invalid_sighashtype(self):
+ self.log.info("Test signrawtransactionwithwallet raises if an invalid sighashtype is passed")
+ assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[0].signrawtransactionwithwallet, hexstring=RAW_TX, sighashtype="all")
def script_verification_error_test(self):
"""Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script.
@@ -299,6 +305,7 @@ class SignRawTransactionWithWalletTest(BitcoinTestFramework):
self.script_verification_error_test()
self.OP_1NEGATE_test()
self.test_with_lock_outputs()
+ self.test_with_invalid_sighashtype()
self.test_fully_signed_tx()
self.test_signing_with_csv()
self.test_signing_with_cltv()
diff --git a/test/functional/wallet_spend_unconfirmed.py b/test/functional/wallet_spend_unconfirmed.py
new file mode 100755
index 0000000000..bfcdeaeaa8
--- /dev/null
+++ b/test/functional/wallet_spend_unconfirmed.py
@@ -0,0 +1,508 @@
+#!/usr/bin/env python3
+# Copyright (c) 2022 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+from decimal import Decimal, getcontext
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_greater_than_or_equal,
+ assert_equal,
+ find_vout_for_address,
+)
+
+class UnconfirmedInputTest(BitcoinTestFramework):
+ def add_options(self, parser):
+ self.add_wallet_options(parser)
+
+ def set_test_params(self):
+ getcontext().prec=9
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+
+ def setup_and_fund_wallet(self, walletname):
+ self.nodes[0].createwallet(walletname)
+ wallet = self.nodes[0].get_wallet_rpc(walletname)
+
+ self.def_wallet.sendtoaddress(address=wallet.getnewaddress(), amount=2)
+ self.generate(self.nodes[0], 1) # confirm funding tx
+ return wallet
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def calc_fee_rate(self, tx):
+ fee = Decimal(-1e8) * tx["fee"]
+ vsize = tx["decoded"]["vsize"]
+ return fee / vsize
+
+ def calc_set_fee_rate(self, txs):
+ fees = Decimal(-1e8) * sum([tx["fee"] for tx in txs]) # fee is negative!
+ vsizes = sum([tx["decoded"]["vsize"] for tx in txs])
+ return fees / vsizes
+
+ def assert_spends_only_parents(self, tx, parent_txids):
+ parent_checklist = parent_txids.copy()
+ number_inputs = len(tx["decoded"]["vin"])
+ assert_equal(number_inputs, len(parent_txids))
+ for i in range(number_inputs):
+ txid_of_input = tx["decoded"]["vin"][i]["txid"]
+ assert txid_of_input in parent_checklist
+ parent_checklist.remove(txid_of_input)
+
+ def assert_undershoots_target(self, tx):
+ resulting_fee_rate = self.calc_fee_rate(tx)
+ assert_greater_than_or_equal(self.target_fee_rate, resulting_fee_rate)
+
+ def assert_beats_target(self, tx):
+ resulting_fee_rate = self.calc_fee_rate(tx)
+ assert_greater_than_or_equal(resulting_fee_rate, self.target_fee_rate)
+
+ # Meta-Test: try feerate testing function on confirmed UTXO
+ def test_target_feerate_confirmed(self):
+ self.log.info("Start test feerate with confirmed input")
+ wallet = self.setup_and_fund_wallet("confirmed_wallet")
+
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=0.5, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+ self.assert_beats_target(ancestor_aware_tx)
+
+ wallet.unloadwallet()
+
+ # Spend unconfirmed UTXO from high-feerate parent
+ def test_target_feerate_unconfirmed_high(self):
+ self.log.info("Start test feerate with high feerate unconfirmed input")
+ wallet = self.setup_and_fund_wallet("unconfirmed_high_wallet")
+
+ # Send unconfirmed transaction with high feerate to testing wallet
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1, fee_rate=3*self.target_fee_rate)
+ parent_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+ self.assert_beats_target(parent_tx)
+
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=0.5, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+
+ wallet.unloadwallet()
+
+ # Spend unconfirmed UTXO from low-feerate parent. Expect that parent gets
+ # bumped to target feerate.
+ def test_target_feerate_unconfirmed_low(self):
+ self.log.info("Start test feerate with low feerate unconfirmed input")
+ wallet = self.setup_and_fund_wallet("unconfirmed_low_wallet")
+
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1, fee_rate=1)
+ parent_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+
+ self.assert_undershoots_target(parent_tx)
+
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=0.5, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([parent_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Spend UTXO with unconfirmed low feerate parent and grandparent
+ # txs. Expect that both ancestors get bumped to target feerate.
+ def test_chain_of_unconfirmed_low(self):
+ self.log.info("Start test with parent and grandparent tx")
+ wallet = self.setup_and_fund_wallet("unconfirmed_low_chain_wallet")
+
+ grandparent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1.8, fee_rate=1)
+ gp_tx = wallet.gettransaction(txid=grandparent_txid, verbose=True)
+
+ self.assert_undershoots_target(gp_tx)
+
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1.5, fee_rate=2)
+ p_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+
+ self.assert_undershoots_target(p_tx)
+
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=1.3, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([gp_tx, p_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Spend unconfirmed UTXOs from two low feerate parent txs.
+ def test_two_low_feerate_unconfirmed_parents(self):
+ self.log.info("Start test with two unconfirmed parent txs")
+ wallet = self.setup_and_fund_wallet("two_parents_wallet")
+
+ # Add second UTXO to tested wallet
+ self.def_wallet.sendtoaddress(address=wallet.getnewaddress(), amount=2)
+ self.generate(self.nodes[0], 1) # confirm funding tx
+
+ parent_one_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1.5, fee_rate=2)
+ p_one_tx = wallet.gettransaction(txid=parent_one_txid, verbose=True)
+ self.assert_undershoots_target(p_one_tx)
+
+ parent_two_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1.5, fee_rate=1)
+ p_two_tx = wallet.gettransaction(txid=parent_two_txid, verbose=True)
+ self.assert_undershoots_target(p_two_tx)
+
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=2.8, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_one_txid, parent_two_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([p_one_tx, p_two_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Spend two unconfirmed inputs, one each from low and high feerate parents
+ def test_mixed_feerate_unconfirmed_parents(self):
+ self.log.info("Start test with two unconfirmed parent txs one of which has a higher feerate")
+ wallet = self.setup_and_fund_wallet("two_mixed_parents_wallet")
+
+ # Add second UTXO to tested wallet
+ self.def_wallet.sendtoaddress(address=wallet.getnewaddress(), amount=2)
+ self.generate(self.nodes[0], 1) # confirm funding tx
+
+ high_parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1.5, fee_rate=self.target_fee_rate*2)
+ p_high_tx = wallet.gettransaction(txid=high_parent_txid, verbose=True)
+ # This time the parent is greater than the child
+ self.assert_beats_target(p_high_tx)
+
+ parent_low_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1.5, fee_rate=1)
+ p_low_tx = wallet.gettransaction(txid=parent_low_txid, verbose=True)
+ # Other parent needs bump
+ self.assert_undershoots_target(p_low_tx)
+
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=2.8, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_low_txid, high_parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([p_high_tx, p_low_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+
+ resulting_bumped_ancestry_fee_rate = self.calc_set_fee_rate([p_low_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_bumped_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_bumped_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Spend from chain with high feerate grandparent and low feerate parent
+ def test_chain_of_high_low(self):
+ self.log.info("Start test with low parent and high grandparent tx")
+ wallet = self.setup_and_fund_wallet("high_low_chain_wallet")
+
+ grandparent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1.8, fee_rate=self.target_fee_rate * 10)
+ gp_tx = wallet.gettransaction(txid=grandparent_txid, verbose=True)
+ # grandparent has higher feerate
+ self.assert_beats_target(gp_tx)
+
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1.5, fee_rate=1)
+ # parent is low feerate
+ p_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+ self.assert_undershoots_target(p_tx)
+
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=1.3, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([p_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+ resulting_ancestry_fee_rate_with_high_feerate_gp = self.calc_set_fee_rate([gp_tx, p_tx, ancestor_aware_tx])
+ # Check that we bumped the parent without relying on the grandparent
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate_with_high_feerate_gp, self.target_fee_rate*1.1)
+
+ wallet.unloadwallet()
+
+ # Spend UTXO from chain of unconfirmed transactions with low feerate
+ # grandparent and even lower feerate parent
+ def test_chain_of_high_low_below_target_feerate(self):
+ self.log.info("Start test with low parent and higher low grandparent tx")
+ wallet = self.setup_and_fund_wallet("low_and_lower_chain_wallet")
+
+ grandparent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1.8, fee_rate=5)
+ gp_tx = wallet.gettransaction(txid=grandparent_txid, verbose=True)
+
+ # grandparent has higher feerate, but below target
+ self.assert_undershoots_target(gp_tx)
+
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1.5, fee_rate=1)
+ p_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+ # parent even lower
+ self.assert_undershoots_target(p_tx)
+
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=1.3, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([gp_tx, p_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Test fee calculation when bumping while using subtract fee from output (SFFO)
+ def test_target_feerate_unconfirmed_low_sffo(self):
+ self.log.info("Start test feerate with low feerate unconfirmed input, while subtracting from output")
+ wallet = self.setup_and_fund_wallet("unconfirmed_low_wallet_sffo")
+
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1, fee_rate=1)
+ parent_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+
+ self.assert_undershoots_target(parent_tx)
+
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=0.5, fee_rate=self.target_fee_rate, subtractfeefromamount=True)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([parent_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Test that parents of preset unconfirmed inputs get cpfp'ed
+ def test_preset_input_cpfp(self):
+ self.log.info("Start test with preset input from low feerate unconfirmed transaction")
+ wallet = self.setup_and_fund_wallet("preset_input")
+
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1, fee_rate=1)
+ parent_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+
+ self.assert_undershoots_target(parent_tx)
+
+ number_outputs = len(parent_tx["decoded"]["vout"])
+ assert_equal(number_outputs, 2)
+
+ # we don't care which of the two outputs we spent, they're both ours
+ ancestor_aware_txid = wallet.send(outputs=[{self.def_wallet.getnewaddress(): 0.5}], fee_rate=self.target_fee_rate, options={"add_inputs": True, "inputs": [{"txid": parent_txid, "vout": 0}]})["txid"]
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([parent_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Test that RBFing a transaction with unconfirmed input gets the right feerate
+ def test_rbf_bumping(self):
+ self.log.info("Start test to rbf a transaction unconfirmed input to bump it")
+ wallet = self.setup_and_fund_wallet("bump")
+
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1, fee_rate=1)
+ parent_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+
+ self.assert_undershoots_target(parent_tx)
+
+ to_be_rbfed_ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=0.5, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=to_be_rbfed_ancestor_aware_txid, verbose=True)
+
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([parent_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+
+ bumped_ancestor_aware_txid = wallet.bumpfee(txid=to_be_rbfed_ancestor_aware_txid, options={"fee_rate": self.target_fee_rate * 2} )["txid"]
+ bumped_ancestor_aware_tx = wallet.gettransaction(txid=bumped_ancestor_aware_txid, verbose=True)
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ resulting_bumped_fee_rate = self.calc_fee_rate(bumped_ancestor_aware_tx)
+ assert_greater_than_or_equal(resulting_bumped_fee_rate, 2*self.target_fee_rate)
+ resulting_bumped_ancestry_fee_rate = self.calc_set_fee_rate([parent_tx, bumped_ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_bumped_ancestry_fee_rate, 2*self.target_fee_rate)
+ assert_greater_than_or_equal(2*self.target_fee_rate*1.01, resulting_bumped_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Test that transaction spending two UTXOs with overlapping ancestry does not bump shared ancestors twice
+ def test_target_feerate_unconfirmed_low_overlapping_ancestry(self):
+ self.log.info("Start test where two UTXOs have overlapping ancestry")
+ wallet = self.setup_and_fund_wallet("overlapping_ancestry_wallet")
+
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1, fee_rate=1)
+ two_output_parent_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+
+ self.assert_undershoots_target(two_output_parent_tx)
+
+ # spend both outputs from parent transaction
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=1.5, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid, parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([two_output_parent_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Test that new transaction ignores sibling transaction with low feerate
+ def test_sibling_tx_gets_ignored(self):
+ self.log.info("Start test where a low-fee sibling tx gets created and check that bumping ignores it")
+ wallet = self.setup_and_fund_wallet("ignore-sibling")
+
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1, fee_rate=2)
+ parent_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+
+ self.assert_undershoots_target(parent_tx)
+
+ # create sibling tx
+ sibling_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=0.9, fee_rate=1)
+ sibling_tx = wallet.gettransaction(txid=sibling_txid, verbose=True)
+ self.assert_undershoots_target(sibling_tx)
+
+ # spend both outputs from parent transaction
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=0.5, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([parent_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Test that new transaction only pays for itself when high feerate sibling pays for parent
+ def test_sibling_tx_bumps_parent(self):
+ self.log.info("Start test where a high-fee sibling tx bumps the parent")
+ wallet = self.setup_and_fund_wallet("generous-sibling")
+
+ parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1, fee_rate=1)
+ parent_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+ self.assert_undershoots_target(parent_tx)
+
+ # create sibling tx
+ sibling_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=0.9, fee_rate=3*self.target_fee_rate)
+ sibling_tx = wallet.gettransaction(txid=sibling_txid, verbose=True)
+ self.assert_beats_target(sibling_tx)
+
+ # spend both outputs from parent transaction
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=0.5, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ # Child is only paying for itself…
+ resulting_fee_rate = self.calc_fee_rate(ancestor_aware_tx)
+ assert_greater_than_or_equal(1.05 * self.target_fee_rate, resulting_fee_rate)
+ # …because sibling bumped to parent to ~50 s/vB, while our target is 30 s/vB
+ resulting_ancestry_fee_rate_sibling = self.calc_set_fee_rate([parent_tx, sibling_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate_sibling, self.target_fee_rate)
+ # and our resulting "ancestry feerate" is therefore BELOW target feerate
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([parent_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(self.target_fee_rate, resulting_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+ # Spend a confirmed and an unconfirmed input at the same time
+ def test_confirmed_and_unconfirmed_parent(self):
+ self.log.info("Start test with one unconfirmed and one confirmed input")
+ wallet = self.setup_and_fund_wallet("confirmed_and_unconfirmed_wallet")
+ confirmed_parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=1, fee_rate=self.target_fee_rate)
+ self.generate(self.nodes[0], 1) # Wallet has two confirmed UTXOs of ~1BTC each
+ unconfirmed_parent_txid = wallet.sendtoaddress(address=wallet.getnewaddress(), amount=0.5, fee_rate=0.5*self.target_fee_rate)
+
+ # wallet has one confirmed UTXO of 1BTC and two unconfirmed UTXOs of ~0.5BTC each
+ ancestor_aware_txid = wallet.sendtoaddress(address=self.def_wallet.getnewaddress(), amount=1.4, fee_rate=self.target_fee_rate)
+ ancestor_aware_tx = wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+ self.assert_spends_only_parents(ancestor_aware_tx, [confirmed_parent_txid, unconfirmed_parent_txid])
+ resulting_fee_rate = self.calc_fee_rate(ancestor_aware_tx)
+ assert_greater_than_or_equal(resulting_fee_rate, self.target_fee_rate)
+
+ wallet.unloadwallet()
+
+ def test_external_input_unconfirmed_low(self):
+ self.log.info("Send funds to an external wallet then build tx that bumps parent by spending external input")
+ wallet = self.setup_and_fund_wallet("test_external_wallet")
+
+ external_address = self.def_wallet.getnewaddress()
+ address_info = self.def_wallet.getaddressinfo(external_address)
+ external_descriptor = address_info["desc"]
+ parent_txid = wallet.sendtoaddress(address=external_address, amount=1, fee_rate=1)
+ parent_tx = wallet.gettransaction(txid=parent_txid, verbose=True)
+
+ self.assert_undershoots_target(parent_tx)
+
+ spend_res = wallet.send(outputs=[{self.def_wallet.getnewaddress(): 0.5}], fee_rate=self.target_fee_rate, options={"inputs":[{"txid":parent_txid, "vout":find_vout_for_address(self.nodes[0], parent_txid, external_address)}], "solving_data":{"descriptors":[external_descriptor]}})
+ signed_psbt = self.def_wallet.walletprocesspsbt(spend_res["psbt"])
+ external_tx = self.def_wallet.finalizepsbt(signed_psbt["psbt"])
+ ancestor_aware_txid = self.def_wallet.sendrawtransaction(external_tx["hex"])
+
+ ancestor_aware_tx = self.def_wallet.gettransaction(txid=ancestor_aware_txid, verbose=True)
+
+ self.assert_spends_only_parents(ancestor_aware_tx, [parent_txid])
+
+ self.assert_beats_target(ancestor_aware_tx)
+ resulting_ancestry_fee_rate = self.calc_set_fee_rate([parent_tx, ancestor_aware_tx])
+ assert_greater_than_or_equal(resulting_ancestry_fee_rate, self.target_fee_rate)
+ assert_greater_than_or_equal(self.target_fee_rate*1.01, resulting_ancestry_fee_rate)
+
+ wallet.unloadwallet()
+
+
+ def run_test(self):
+ self.log.info("Starting UnconfirmedInputTest!")
+ self.target_fee_rate = 30
+ self.def_wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
+ self.generate(self.nodes[0], 110)
+
+ self.test_target_feerate_confirmed()
+
+ self.test_target_feerate_unconfirmed_high()
+
+ self.test_target_feerate_unconfirmed_low()
+
+ self.test_chain_of_unconfirmed_low()
+
+ self.test_two_low_feerate_unconfirmed_parents()
+
+ self.test_mixed_feerate_unconfirmed_parents()
+
+ self.test_chain_of_high_low()
+
+ self.test_chain_of_high_low_below_target_feerate()
+
+ self.test_target_feerate_unconfirmed_low_sffo()
+
+ self.test_preset_input_cpfp()
+
+ self.test_rbf_bumping()
+
+ self.test_target_feerate_unconfirmed_low_overlapping_ancestry()
+
+ self.test_sibling_tx_gets_ignored()
+
+ self.test_sibling_tx_bumps_parent()
+
+ self.test_confirmed_and_unconfirmed_parent()
+
+ self.test_external_input_unconfirmed_low()
+
+if __name__ == '__main__':
+ UnconfirmedInputTest().main()
diff --git a/test/fuzz/test_runner.py b/test/fuzz/test_runner.py
index ef1583d446..c9975af225 100755
--- a/test/fuzz/test_runner.py
+++ b/test/fuzz/test_runner.py
@@ -20,8 +20,7 @@ def get_fuzz_env(*, target, source_dir):
'FUZZ': target,
'UBSAN_OPTIONS':
f'suppressions={source_dir}/test/sanitizer_suppressions/ubsan:print_stacktrace=1:halt_on_error=1:report_error_type=1',
- 'ASAN_OPTIONS': # symbolizer disabled due to https://github.com/google/sanitizers/issues/1364#issuecomment-761072085
- 'symbolize=0:detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1',
+ "ASAN_OPTIONS": "detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1",
}
@@ -193,27 +192,52 @@ def main():
)
-def generate_corpus(*, fuzz_pool, src_dir, build_dir, corpus_dir, targets):
- """Generates new corpus.
+def transform_process_message_target(targets, src_dir):
+ """Add a target per process message, and also keep ("process_message", {}) to allow for
+ cross-pollination, or unlimited search"""
+
+ p2p_msg_target = "process_message"
+ if (p2p_msg_target, {}) in targets:
+ lines = subprocess.run(
+ ["git", "grep", "--function-context", "g_all_net_message_types{", src_dir / "src" / "protocol.cpp"],
+ check=True,
+ stdout=subprocess.PIPE,
+ text=True,
+ ).stdout.splitlines()
+ lines = [l.split("::", 1)[1].split(",")[0].lower() for l in lines if l.startswith("src/protocol.cpp- NetMsgType::")]
+ assert len(lines)
+ targets += [(p2p_msg_target, {"LIMIT_TO_MESSAGE_TYPE": m}) for m in lines]
+ return targets
+
+
+def transform_rpc_target(targets, src_dir):
+ """Add a target per RPC command, and also keep ("rpc", {}) to allow for cross-pollination,
+ or unlimited search"""
- Run {targets} without input, and outputs the generated corpus to
- {corpus_dir}.
- """
- logging.info("Generating corpus to {}".format(corpus_dir))
rpc_target = "rpc"
- has_rpc = rpc_target in targets
- if has_rpc:
- targets.remove(rpc_target)
- targets = [(t, {}) for t in targets]
- if has_rpc:
+ if (rpc_target, {}) in targets:
lines = subprocess.run(
- ["git", "grep", "--function-context", "RPC_COMMANDS_SAFE_FOR_FUZZING{", os.path.join(src_dir, "src", "test", "fuzz", "rpc.cpp")],
+ ["git", "grep", "--function-context", "RPC_COMMANDS_SAFE_FOR_FUZZING{", src_dir / "src" / "test" / "fuzz" / "rpc.cpp"],
check=True,
stdout=subprocess.PIPE,
text=True,
).stdout.splitlines()
lines = [l.split("\"", 1)[1].split("\"")[0] for l in lines if l.startswith("src/test/fuzz/rpc.cpp- \"")]
+ assert len(lines)
targets += [(rpc_target, {"LIMIT_TO_RPC_COMMAND": r}) for r in lines]
+ return targets
+
+
+def generate_corpus(*, fuzz_pool, src_dir, build_dir, corpus_dir, targets):
+ """Generates new corpus.
+
+ Run {targets} without input, and outputs the generated corpus to
+ {corpus_dir}.
+ """
+ logging.info("Generating corpus to {}".format(corpus_dir))
+ targets = [(t, {}) for t in targets] # expand to add dictionary for target-specific env variables
+ targets = transform_process_message_target(targets, Path(src_dir))
+ targets = transform_rpc_target(targets, Path(src_dir))
def job(command, t, t_env):
logging.debug(f"Running '{command}'")
diff --git a/test/get_previous_releases.py b/test/get_previous_releases.py
index 4a27454ff2..459693102b 100755
--- a/test/get_previous_releases.py
+++ b/test/get_previous_releases.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
#
-# Copyright (c) 2018-2022 The Bitcoin Core developers
+# Copyright (c) 2018-present The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
@@ -216,14 +216,11 @@ def build_release(tag, args) -> int:
print('Tag {} not found'.format(tag))
return 1
ret = subprocess.run([
- 'git', 'clone', githubUrl, tag
+ 'git', 'clone', f'--branch={tag}', '--depth=1', githubUrl, tag
]).returncode
if ret:
return ret
with pushd(tag):
- ret = subprocess.run(['git', 'checkout', tag]).returncode
- if ret:
- return ret
host = args.host
if args.depends:
with pushd('depends'):
@@ -259,6 +256,8 @@ def check_host(args) -> int:
if args.download_binary:
platforms = {
'aarch64-*-linux*': 'aarch64-linux-gnu',
+ 'powerpc64le-*-linux-*': 'powerpc64le-linux-gnu',
+ 'riscv64-*-linux*': 'riscv64-linux-gnu',
'x86_64-*-linux*': 'x86_64-linux-gnu',
'x86_64-apple-darwin*': 'x86_64-apple-darwin',
'aarch64-apple-darwin*': 'arm64-apple-darwin',
diff --git a/test/lint/README.md b/test/lint/README.md
index 704922d7ab..d9cfeb50ed 100644
--- a/test/lint/README.md
+++ b/test/lint/README.md
@@ -7,10 +7,8 @@ To run linters locally with the same versions as the CI environment, use the inc
Dockerfile:
```sh
-cd ./ci/lint
-docker build -t bitcoin-linter .
+DOCKER_BUILDKIT=1 docker build -t bitcoin-linter --file "./ci/lint_imagefile" ./
-cd /root/of/bitcoin/repo
docker run --rm -v $(pwd):/bitcoin -it bitcoin-linter
```
diff --git a/test/lint/lint-assertions.py b/test/lint/lint-assertions.py
index 6da59b0d48..d9f86b22b8 100755
--- a/test/lint/lint-assertions.py
+++ b/test/lint/lint-assertions.py
@@ -23,20 +23,10 @@ def git_grep(params: [], error_msg: ""):
def main():
- # PRE31-C (SEI CERT C Coding Standard):
- # "Assertions should not contain assignments, increment, or decrement operators."
- exit_code = git_grep([
- "-E",
- r"[^_]assert\(.*(\+\+|\-\-|[^=!<>]=[^=!<>]).*\);",
- "--",
- "*.cpp",
- "*.h",
- ], "Assertions should not have side effects:")
-
# Aborting the whole process is undesirable for RPC code. So nonfatal
# checks should be used over assert. See: src/util/check.h
# src/rpc/server.cpp is excluded from this check since it's mostly meta-code.
- exit_code |= git_grep([
+ exit_code = git_grep([
"-nE",
r"\<(A|a)ss(ume|ert) *\(.*\);",
"--",
diff --git a/test/lint/lint-circular-dependencies.py b/test/lint/lint-circular-dependencies.py
index 307b4dca5a..e366a08bd2 100755
--- a/test/lint/lint-circular-dependencies.py
+++ b/test/lint/lint-circular-dependencies.py
@@ -19,7 +19,6 @@ EXPECTED_CIRCULAR_DEPENDENCIES = (
"qt/recentrequeststablemodel -> qt/walletmodel -> qt/recentrequeststablemodel",
"qt/sendcoinsdialog -> qt/walletmodel -> qt/sendcoinsdialog",
"qt/transactiontablemodel -> qt/walletmodel -> qt/transactiontablemodel",
- "wallet/fees -> wallet/wallet -> wallet/fees",
"wallet/wallet -> wallet/walletdb -> wallet/wallet",
"kernel/coinstats -> validation -> kernel/coinstats",
"kernel/mempool_persist -> validation -> kernel/mempool_persist",
diff --git a/test/lint/lint-format-strings.py b/test/lint/lint-format-strings.py
index 43addab2f3..5ac5840ecf 100755
--- a/test/lint/lint-format-strings.py
+++ b/test/lint/lint-format-strings.py
@@ -77,7 +77,7 @@ def main():
matching_files_filtered = []
for matching_file in matching_files:
- if not re.search('^src/(leveldb|secp256k1|minisketch|tinyformat|test/fuzz/strprintf.cpp)', matching_file):
+ if not re.search('^src/(leveldb|secp256k1|minisketch|tinyformat|test/fuzz/strprintf.cpp)|contrib/devtools/bitcoin-tidy/example_logprintf.cpp', matching_file):
matching_files_filtered.append(matching_file)
matching_files_filtered.sort()
diff --git a/test/lint/lint-include-guards.py b/test/lint/lint-include-guards.py
index 5867aae028..48b918e9da 100755
--- a/test/lint/lint-include-guards.py
+++ b/test/lint/lint-include-guards.py
@@ -17,7 +17,8 @@ from typing import List
HEADER_ID_PREFIX = 'BITCOIN_'
HEADER_ID_SUFFIX = '_H'
-EXCLUDE_FILES_WITH_PREFIX = ['src/crypto/ctaes',
+EXCLUDE_FILES_WITH_PREFIX = ['contrib/devtools/bitcoin-tidy',
+ 'src/crypto/ctaes',
'src/leveldb',
'src/crc32c',
'src/secp256k1',
diff --git a/test/lint/lint-includes.py b/test/lint/lint-includes.py
index b14caa4855..8e79ba5121 100755
--- a/test/lint/lint-includes.py
+++ b/test/lint/lint-includes.py
@@ -15,7 +15,8 @@ import sys
from subprocess import check_output, CalledProcessError
-EXCLUDED_DIRS = ["src/leveldb/",
+EXCLUDED_DIRS = ["contrib/devtools/bitcoin-tidy/",
+ "src/leveldb/",
"src/crc32c/",
"src/secp256k1/",
"src/minisketch/",
diff --git a/test/lint/lint-logs.py b/test/lint/lint-logs.py
deleted file mode 100755
index de04a1aeca..0000000000
--- a/test/lint/lint-logs.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (c) 2018-2022 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-#
-# Check that all logs are terminated with '\n'
-#
-# Some logs are continued over multiple lines. They should be explicitly
-# commented with /* Continued */
-
-import re
-import sys
-
-from subprocess import check_output
-
-
-def main():
- logs_list = check_output(["git", "grep", "--extended-regexp", r"(LogPrintLevel|LogPrintfCategory|LogPrintf?)\(", "--", "*.cpp"], text=True, encoding="utf8").splitlines()
-
- unterminated_logs = [line for line in logs_list if not re.search(r'(\\n"|/\* Continued \*/)', line)]
-
- if unterminated_logs != []:
- print("All calls to LogPrintf(), LogPrintfCategory(), LogPrint(), LogPrintLevel(), and WalletLogPrintf() should be terminated with \"\\n\".")
- print("")
-
- for line in unterminated_logs:
- print(line)
-
- sys.exit(1)
-
-
-if __name__ == "__main__":
- main()
diff --git a/test/lint/lint-python-utf8-encoding.py b/test/lint/lint-python-utf8-encoding.py
index 64d04bff57..8c9266470f 100755
--- a/test/lint/lint-python-utf8-encoding.py
+++ b/test/lint/lint-python-utf8-encoding.py
@@ -28,7 +28,7 @@ def check_fileopens():
if e.returncode > 1:
raise e
- filtered_fileopens = [fileopen for fileopen in fileopens if not re.search(r"encoding=.(ascii|utf8|utf-8).|open\([^,]*, ['\"][^'\"]*b[^'\"]*['\"]", fileopen)]
+ filtered_fileopens = [fileopen for fileopen in fileopens if not re.search(r"encoding=.(ascii|utf8|utf-8).|open\([^,]*, (\*\*kwargs|['\"][^'\"]*b[^'\"]*['\"])", fileopen)]
return filtered_fileopens
diff --git a/test/lint/lint-python.py b/test/lint/lint-python.py
index 539d0acb5d..eabd13322e 100755
--- a/test/lint/lint-python.py
+++ b/test/lint/lint-python.py
@@ -9,12 +9,17 @@ Check for specified flake8 and mypy warnings in python files.
"""
import os
-import pkg_resources
+from pathlib import Path
import subprocess
import sys
+from importlib.metadata import metadata, PackageNotFoundError
+
+# Customize mypy cache dir via environment variable
+cache_dir = Path(__file__).parent.parent / ".mypy_cache"
+os.environ["MYPY_CACHE_DIR"] = str(cache_dir)
+
DEPS = ['flake8', 'lief', 'mypy', 'pyzmq']
-MYPY_CACHE_DIR = f"{os.getenv('BASE_ROOT_DIR', '')}/test/.mypy_cache"
# All .py files, except those in src/ (to exclude subtrees there)
FLAKE_FILES_ARGS = ['git', 'ls-files', '*.py', ':!:src/*.py']
@@ -99,10 +104,10 @@ ENABLED = (
def check_dependencies():
- working_set = {pkg.key for pkg in pkg_resources.working_set}
-
for dep in DEPS:
- if dep not in working_set:
+ try:
+ metadata(dep)
+ except PackageNotFoundError:
print(f"Skipping Python linting since {dep} is not installed.")
exit(0)
diff --git a/test/lint/lint-shell.py b/test/lint/lint-shell.py
index 1646bf0d3e..db84ca3d39 100755
--- a/test/lint/lint-shell.py
+++ b/test/lint/lint-shell.py
@@ -67,9 +67,13 @@ def main():
'*.sh',
]
files = get_files(files_cmd)
- # remove everything that doesn't match this regex
reg = re.compile(r'src/[leveldb,secp256k1,minisketch]')
- files[:] = [file for file in files if not reg.match(file)]
+
+ def should_exclude(fname: str) -> bool:
+ return bool(reg.match(fname)) or 'test_utxo_snapshots.sh' in fname
+
+ # remove everything that doesn't match this regex
+ files[:] = [file for file in files if not should_exclude(file)]
# build the `shellcheck` command
shellcheck_cmd = [
diff --git a/test/lint/run-lint-format-strings.py b/test/lint/run-lint-format-strings.py
index ed98b1b2f8..244bf5956f 100755
--- a/test/lint/run-lint-format-strings.py
+++ b/test/lint/run-lint-format-strings.py
@@ -20,10 +20,10 @@ FALSE_POSITIVES = [
("src/clientversion.cpp", "strprintf(_(COPYRIGHT_HOLDERS).translated, COPYRIGHT_HOLDERS_SUBSTITUTION)"),
("src/test/translation_tests.cpp", "strprintf(format, arg)"),
("src/validationinterface.cpp", "LogPrint(BCLog::VALIDATION, fmt \"\\n\", __VA_ARGS__)"),
- ("src/wallet/wallet.h", "WalletLogPrintf(std::string fmt, Params... parameters)"),
- ("src/wallet/wallet.h", "LogPrintf((\"%s \" + fmt).c_str(), GetDisplayName(), parameters...)"),
- ("src/wallet/scriptpubkeyman.h", "WalletLogPrintf(std::string fmt, Params... parameters)"),
- ("src/wallet/scriptpubkeyman.h", "LogPrintf((\"%s \" + fmt).c_str(), m_storage.GetDisplayName(), parameters...)"),
+ ("src/wallet/wallet.h", "WalletLogPrintf(const char* fmt, Params... parameters)"),
+ ("src/wallet/wallet.h", "LogPrintf((\"%s \" + std::string{fmt}).c_str(), GetDisplayName(), parameters...)"),
+ ("src/wallet/scriptpubkeyman.h", "WalletLogPrintf(const char* fmt, Params... parameters)"),
+ ("src/wallet/scriptpubkeyman.h", "LogPrintf((\"%s \" + std::string{fmt}).c_str(), m_storage.GetDisplayName(), parameters...)"),
]
diff --git a/test/sanitizer_suppressions/ubsan b/test/sanitizer_suppressions/ubsan
index 74703b04ec..533e2eae51 100644
--- a/test/sanitizer_suppressions/ubsan
+++ b/test/sanitizer_suppressions/ubsan
@@ -1,9 +1,7 @@
+# Suppressions should use `sanitize-type:ClassName::MethodName`.
+
# -fsanitize=undefined suppressions
# =================================
-# The suppressions would be `sanitize-type:ClassName::MethodName`,
-# however due to a bug in clang the symbolizer is disabled and thus no symbol
-# names can be used.
-# See https://github.com/google/sanitizers/issues/1364
# -fsanitize=integer suppressions
# ===============================
@@ -11,26 +9,28 @@
# ------------
# Suppressions in dependencies that are developed outside this repository.
unsigned-integer-overflow:*/include/c++/
-# unsigned-integer-overflow in FuzzedDataProvider's ConsumeIntegralInRange
-unsigned-integer-overflow:FuzzedDataProvider.h
+unsigned-integer-overflow:FuzzedDataProvider::ConsumeIntegralInRange
unsigned-integer-overflow:leveldb/
unsigned-integer-overflow:minisketch/
+unsigned-integer-overflow:secp256k1/
unsigned-integer-overflow:test/fuzz/crypto_diff_fuzz_chacha20.cpp
implicit-integer-sign-change:*/include/boost/
implicit-integer-sign-change:*/include/c++/
implicit-integer-sign-change:*/new_allocator.h
implicit-integer-sign-change:crc32c/
-# implicit-integer-sign-change in FuzzedDataProvider's ConsumeIntegralInRange
-implicit-integer-sign-change:FuzzedDataProvider.h
implicit-integer-sign-change:minisketch/
+implicit-integer-sign-change:secp256k1/
implicit-signed-integer-truncation:*/include/c++/
implicit-signed-integer-truncation:leveldb/
+implicit-signed-integer-truncation:secp256k1/
implicit-unsigned-integer-truncation:*/include/c++/
implicit-unsigned-integer-truncation:leveldb/
+implicit-unsigned-integer-truncation:secp256k1/
implicit-unsigned-integer-truncation:test/fuzz/crypto_diff_fuzz_chacha20.cpp
shift-base:*/include/c++/
shift-base:leveldb/
shift-base:minisketch/
+shift-base:secp256k1/
shift-base:test/fuzz/crypto_diff_fuzz_chacha20.cpp
# Unsigned integer overflow occurs when the result of an unsigned integer
# computation cannot be represented in its type. Unlike signed integer overflow,