aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/functional/README.md28
-rwxr-xr-xtest/functional/example_test.py29
-rwxr-xr-xtest/functional/feature_abortnode.py6
-rwxr-xr-xtest/functional/feature_assumevalid.py2
-rwxr-xr-xtest/functional/feature_backwards_compatibility.py10
-rwxr-xr-xtest/functional/feature_bip68_sequence.py17
-rwxr-xr-xtest/functional/feature_block.py10
-rwxr-xr-xtest/functional/feature_cltv.py16
-rwxr-xr-xtest/functional/feature_config_args.py8
-rwxr-xr-xtest/functional/feature_csv_activation.py6
-rwxr-xr-xtest/functional/feature_dersig.py16
-rwxr-xr-xtest/functional/feature_fee_estimation.py7
-rwxr-xr-xtest/functional/feature_filelock.py5
-rwxr-xr-xtest/functional/feature_maxuploadtarget.py10
-rwxr-xr-xtest/functional/feature_minchainwork.py4
-rwxr-xr-xtest/functional/feature_notifications.py23
-rwxr-xr-xtest/functional/feature_nulldummy.py15
-rwxr-xr-xtest/functional/feature_proxy.py41
-rwxr-xr-xtest/functional/feature_pruning.py38
-rwxr-xr-xtest/functional/feature_segwit.py5
-rwxr-xr-xtest/functional/feature_settings.py13
-rwxr-xr-xtest/functional/feature_shutdown.py4
-rwxr-xr-xtest/functional/feature_signet.py74
-rwxr-xr-xtest/functional/feature_taproot.py1458
-rwxr-xr-xtest/functional/feature_versionbits_warning.py13
-rwxr-xr-xtest/functional/interface_bitcoin_cli.py11
-rwxr-xr-xtest/functional/interface_rpc.py2
-rwxr-xr-xtest/functional/interface_zmq.py437
-rwxr-xr-xtest/functional/mempool_accept.py17
-rwxr-xr-xtest/functional/mempool_compatibility.py3
-rwxr-xr-xtest/functional/mempool_packages.py11
-rwxr-xr-xtest/functional/mempool_persist.py11
-rwxr-xr-xtest/functional/mempool_unbroadcast.py10
-rwxr-xr-xtest/functional/mining_basic.py11
-rwxr-xr-xtest/functional/mining_getblocktemplate_longpoll.py33
-rwxr-xr-xtest/functional/p2p_addr_relay.py4
-rwxr-xr-xtest/functional/p2p_addrv2_relay.py79
-rwxr-xr-xtest/functional/p2p_blockfilters.py26
-rwxr-xr-xtest/functional/p2p_blocksonly.py36
-rwxr-xr-xtest/functional/p2p_compactblocks.py112
-rwxr-xr-xtest/functional/p2p_disconnect_ban.py18
-rwxr-xr-xtest/functional/p2p_dos_header_tree.py18
-rwxr-xr-xtest/functional/p2p_eviction.py8
-rwxr-xr-xtest/functional/p2p_feefilter.py77
-rwxr-xr-xtest/functional/p2p_filter.py12
-rwxr-xr-xtest/functional/p2p_fingerprint.py12
-rwxr-xr-xtest/functional/p2p_getaddr_caching.py53
-rwxr-xr-xtest/functional/p2p_getdata.py4
-rwxr-xr-xtest/functional/p2p_invalid_block.py16
-rwxr-xr-xtest/functional/p2p_invalid_locator.py16
-rwxr-xr-xtest/functional/p2p_invalid_messages.py114
-rwxr-xr-xtest/functional/p2p_invalid_tx.py17
-rwxr-xr-xtest/functional/p2p_leak.py77
-rwxr-xr-xtest/functional/p2p_leak_tx.py19
-rwxr-xr-xtest/functional/p2p_nobloomfilter_messages.py2
-rwxr-xr-xtest/functional/p2p_node_network_limited.py21
-rwxr-xr-xtest/functional/p2p_permissions.py32
-rwxr-xr-xtest/functional/p2p_ping.py2
-rwxr-xr-xtest/functional/p2p_segwit.py61
-rwxr-xr-xtest/functional/p2p_sendheaders.py31
-rwxr-xr-xtest/functional/p2p_timeouts.py2
-rwxr-xr-xtest/functional/p2p_tx_download.py168
-rwxr-xr-xtest/functional/p2p_unrequested_blocks.py9
-rwxr-xr-xtest/functional/rpc_blockchain.py48
-rwxr-xr-xtest/functional/rpc_createmultisig.py3
-rwxr-xr-xtest/functional/rpc_deprecated.py37
-rwxr-xr-xtest/functional/rpc_fundrawtransaction.py13
-rwxr-xr-xtest/functional/rpc_generate.py36
-rwxr-xr-xtest/functional/rpc_getblockfilter.py5
-rwxr-xr-xtest/functional/rpc_getdescriptorinfo.py1
-rwxr-xr-xtest/functional/rpc_getpeerinfo_deprecation.py (renamed from test/functional/rpc_getpeerinfo_banscore_deprecation.py)20
-rwxr-xr-xtest/functional/rpc_invalidateblock.py12
-rwxr-xr-xtest/functional/rpc_misc.py32
-rwxr-xr-xtest/functional/rpc_net.py153
-rwxr-xr-xtest/functional/rpc_preciousblock.py7
-rwxr-xr-xtest/functional/rpc_psbt.py51
-rwxr-xr-xtest/functional/rpc_rawtransaction.py13
-rwxr-xr-xtest/functional/rpc_setban.py7
-rwxr-xr-xtest/functional/rpc_signrawtransaction.py20
-rwxr-xr-xtest/functional/rpc_txoutproof.py86
-rw-r--r--test/functional/test_framework/address.py72
-rw-r--r--test/functional/test_framework/bip340_test_vectors.csv16
-rw-r--r--test/functional/test_framework/blocktools.py51
-rw-r--r--test/functional/test_framework/key.py212
-rwxr-xr-xtest/functional/test_framework/messages.py131
-rw-r--r--test/functional/test_framework/muhash.py110
-rwxr-xr-xtest/functional/test_framework/p2p.py (renamed from test/functional/test_framework/mininode.py)43
-rw-r--r--test/functional/test_framework/script.py133
-rw-r--r--test/functional/test_framework/segwit_addr.py22
-rwxr-xr-xtest/functional/test_framework/test_framework.py91
-rwxr-xr-xtest/functional/test_framework/test_node.py23
-rw-r--r--test/functional/test_framework/util.py139
-rw-r--r--test/functional/test_framework/wallet.py68
-rwxr-xr-xtest/functional/test_runner.py21
-rwxr-xr-xtest/functional/tool_wallet.py27
-rwxr-xr-xtest/functional/wallet_abandonconflict.py6
-rwxr-xr-xtest/functional/wallet_address_types.py16
-rwxr-xr-xtest/functional/wallet_avoidreuse.py3
-rwxr-xr-xtest/functional/wallet_backup.py78
-rwxr-xr-xtest/functional/wallet_balance.py7
-rwxr-xr-xtest/functional/wallet_basic.py55
-rwxr-xr-xtest/functional/wallet_bumpfee.py88
-rwxr-xr-xtest/functional/wallet_create_tx.py8
-rwxr-xr-xtest/functional/wallet_descriptor.py6
-rwxr-xr-xtest/functional/wallet_disable.py1
-rwxr-xr-xtest/functional/wallet_dump.py2
-rwxr-xr-xtest/functional/wallet_groups.py90
-rwxr-xr-xtest/functional/wallet_hd.py11
-rwxr-xr-xtest/functional/wallet_import_rescan.py7
-rwxr-xr-xtest/functional/wallet_importmulti.py2
-rwxr-xr-xtest/functional/wallet_keypool.py2
-rwxr-xr-xtest/functional/wallet_keypool_topup.py11
-rwxr-xr-xtest/functional/wallet_listsinceblock.py25
-rwxr-xr-xtest/functional/wallet_multiwallet.py73
-rwxr-xr-xtest/functional/wallet_reorgsrestore.py14
-rwxr-xr-xtest/functional/wallet_resendwallettransactions.py32
-rwxr-xr-xtest/functional/wallet_send.py345
-rwxr-xr-xtest/functional/wallet_startup.py58
-rwxr-xr-xtest/functional/wallet_txn_clone.py8
-rwxr-xr-xtest/functional/wallet_txn_doublespend.py6
-rwxr-xr-xtest/functional/wallet_upgradewallet.py4
-rwxr-xr-xtest/functional/wallet_zapwallettxes.py79
-rwxr-xr-xtest/fuzz/test_runner.py72
-rwxr-xr-xtest/get_previous_releases.py228
-rwxr-xr-xtest/lint/check-doc.py2
-rwxr-xr-xtest/lint/commit-script-check.sh2
-rwxr-xr-xtest/lint/lint-cpp.sh21
-rwxr-xr-xtest/lint/lint-git-commit-check.sh8
-rwxr-xr-xtest/lint/lint-locale-dependence.sh33
-rwxr-xr-xtest/lint/lint-whitespace.sh23
-rw-r--r--test/sanitizer_suppressions/tsan3
-rw-r--r--test/sanitizer_suppressions/ubsan1
132 files changed, 5208 insertions, 1375 deletions
diff --git a/test/functional/README.md b/test/functional/README.md
index aff5f714f2..82b30fed51 100644
--- a/test/functional/README.md
+++ b/test/functional/README.md
@@ -87,7 +87,9 @@ P2P messages. These can be found in the following source files:
#### Using the P2P interface
-- [messages.py](test_framework/messages.py) contains all the definitions for objects that pass
+- `P2P`s can be used to test specific P2P protocol behavior.
+[p2p.py](test_framework/p2p.py) contains test framework p2p objects and
+[messages.py](test_framework/messages.py) contains all the definitions for objects passed
over the network (`CBlock`, `CTransaction`, etc, along with the network-level
wrappers for them, `msg_block`, `msg_tx`, etc).
@@ -100,8 +102,22 @@ contains the higher level logic for processing P2P payloads and connecting to
the Bitcoin Core node application logic. For custom behaviour, subclass the
P2PInterface object and override the callback methods.
-- Can be used to write tests where specific P2P protocol behavior is tested.
-Examples tests are [p2p_unrequested_blocks.py](p2p_unrequested_blocks.py),
+`P2PConnection`s can be used as such:
+
+```python
+p2p_conn = node.add_p2p_connection(P2PInterface())
+p2p_conn.send_and_ping(msg)
+```
+
+They can also be referenced by indexing into a `TestNode`'s `p2ps` list, which
+contains the list of test framework `p2p` objects connected to itself
+(it does not include any `TestNode`s):
+
+```python
+node.p2ps[0].sync_with_ping()
+```
+
+More examples can be found in [p2p_unrequested_blocks.py](p2p_unrequested_blocks.py),
[p2p_compactblocks.py](p2p_compactblocks.py).
#### Prototyping tests
@@ -127,8 +143,8 @@ Base class for functional tests.
#### [util.py](test_framework/util.py)
Generally useful functions.
-#### [mininode.py](test_framework/mininode.py)
-Basic code to support P2P connectivity to a bitcoind.
+#### [p2p.py](test_framework/p2p.py)
+Test objects for interacting with a bitcoind node over the p2p interface.
#### [script.py](test_framework/script.py)
Utilities for manipulating transaction scripts (originally from python-bitcoinlib)
@@ -157,7 +173,7 @@ way is the use the `profile_with_perf` context manager, e.g.
with node.profile_with_perf("send-big-msgs"):
# Perform activity on the node you're interested in profiling, e.g.:
for _ in range(10000):
- node.p2p.send_message(some_large_message)
+ node.p2ps[0].send_message(some_large_message)
```
To see useful textual output, run
diff --git a/test/functional/example_test.py b/test/functional/example_test.py
index 34e4999329..c28bb7115f 100755
--- a/test/functional/example_test.py
+++ b/test/functional/example_test.py
@@ -16,17 +16,15 @@ from collections import defaultdict
# Avoid wildcard * imports
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.messages import CInv, MSG_BLOCK
-from test_framework.mininode import (
+from test_framework.p2p import (
P2PInterface,
- mininode_lock,
msg_block,
msg_getdata,
+ p2p_lock,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
- wait_until,
)
# P2PInterface is a class containing callbacks to be executed when a P2P
@@ -116,7 +114,7 @@ class ExampleTest(BitcoinTestFramework):
# In this test, we're not connecting node2 to node0 or node1. Calls to
# sync_all() should not include node2, since we're not expecting it to
# sync.
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.sync_all(self.nodes[0:2])
# Use setup_nodes() to customize the node start behaviour (for example if
@@ -137,7 +135,7 @@ class ExampleTest(BitcoinTestFramework):
"""Main test logic"""
# Create P2P connections will wait for a verack to make sure the connection is fully up
- self.nodes[0].add_p2p_connection(BaseNode())
+ peer_messaging = self.nodes[0].add_p2p_connection(BaseNode())
# Generating a block on one of the nodes will get us out of IBD
blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
@@ -167,14 +165,14 @@ class ExampleTest(BitcoinTestFramework):
height = self.nodes[0].getblockcount()
for _ in range(10):
- # Use the mininode and blocktools functionality to manually build a block
+ # Use the blocktools functionality to manually build a block.
# Calling the generate() rpc is easier, but this allows us to exactly
# control the blocks and transactions.
block = create_block(self.tip, create_coinbase(height+1), self.block_time)
block.solve()
block_message = msg_block(block)
# Send message is used to send a P2P message to the node over our P2PInterface
- self.nodes[0].p2p.send_message(block_message)
+ peer_messaging.send_message(block_message)
self.tip = block.sha256
blocks.append(self.tip)
self.block_time += 1
@@ -184,7 +182,7 @@ class ExampleTest(BitcoinTestFramework):
self.nodes[1].waitforblockheight(11)
self.log.info("Connect node2 and node1")
- connect_nodes(self.nodes[1], 2)
+ self.connect_nodes(1, 2)
self.log.info("Wait for node2 to receive all the blocks from node1")
self.sync_all()
@@ -192,26 +190,27 @@ class ExampleTest(BitcoinTestFramework):
self.log.info("Add P2P connection to node2")
self.nodes[0].disconnect_p2ps()
- self.nodes[2].add_p2p_connection(BaseNode())
+ peer_receiving = self.nodes[2].add_p2p_connection(BaseNode())
self.log.info("Test that node2 propagates all the blocks to us")
getdata_request = msg_getdata()
for block in blocks:
getdata_request.inv.append(CInv(MSG_BLOCK, block))
- self.nodes[2].p2p.send_message(getdata_request)
+ peer_receiving.send_message(getdata_request)
# wait_until() will loop until a predicate condition is met. Use it to test properties of the
# P2PInterface objects.
- wait_until(lambda: sorted(blocks) == sorted(list(self.nodes[2].p2p.block_receive_map.keys())), timeout=5, lock=mininode_lock)
+ peer_receiving.wait_until(lambda: sorted(blocks) == sorted(list(peer_receiving.block_receive_map.keys())), timeout=5)
self.log.info("Check that each block was received only once")
# The network thread uses a global lock on data access to the P2PConnection objects when sending and receiving
# messages. The test thread should acquire the global lock before accessing any P2PConnection data to avoid locking
- # and synchronization issues. Note wait_until() acquires this global lock when testing the predicate.
- with mininode_lock:
- for block in self.nodes[2].p2p.block_receive_map.values():
+ # and synchronization issues. Note p2p.wait_until() acquires this global lock internally when testing the predicate.
+ with p2p_lock:
+ for block in peer_receiving.block_receive_map.values():
assert_equal(block, 1)
+
if __name__ == '__main__':
ExampleTest().main()
diff --git a/test/functional/feature_abortnode.py b/test/functional/feature_abortnode.py
index 75267de80b..8abfdef3a1 100755
--- a/test/functional/feature_abortnode.py
+++ b/test/functional/feature_abortnode.py
@@ -11,7 +11,7 @@
"""
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import wait_until, get_datadir_path, connect_nodes
+from test_framework.util import get_datadir_path
import os
@@ -36,12 +36,12 @@ class AbortNodeTest(BitcoinTestFramework):
# attempt.
self.nodes[1].generate(3)
with self.nodes[0].assert_debug_log(["Failed to disconnect block"]):
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.nodes[1].generate(1)
# Check that node0 aborted
self.log.info("Waiting for crash")
- wait_until(lambda: self.nodes[0].is_node_stopped(), timeout=200)
+ self.nodes[0].wait_until_stopped(timeout=200)
self.log.info("Node crashed - now verifying restart fails")
self.nodes[0].assert_start_raises_init_error()
diff --git a/test/functional/feature_assumevalid.py b/test/functional/feature_assumevalid.py
index f19ee12f95..603d7f5d3b 100755
--- a/test/functional/feature_assumevalid.py
+++ b/test/functional/feature_assumevalid.py
@@ -42,7 +42,7 @@ from test_framework.messages import (
msg_block,
msg_headers,
)
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.script import (CScript, OP_TRUE)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
diff --git a/test/functional/feature_backwards_compatibility.py b/test/functional/feature_backwards_compatibility.py
index 07dd0f8f82..21776d85c9 100755
--- a/test/functional/feature_backwards_compatibility.py
+++ b/test/functional/feature_backwards_compatibility.py
@@ -6,7 +6,7 @@
Test various backwards compatibility scenarios. Download the previous node binaries:
-contrib/devtools/previous_release.py -b v0.19.1 v0.18.1 v0.17.1 v0.16.3 v0.15.2
+test/get_previous_releases.py -b v0.19.1 v0.18.1 v0.17.2 v0.16.3 v0.15.2
v0.15.2 is not required by this test, but it is used in wallet_upgradewallet.py.
Due to a hardfork in regtest, it can't be used to sync nodes.
@@ -40,9 +40,10 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # Pre-release: use to receive coins, swap wallets, etc
["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.19.1
["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.18.1
- ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.17.1
- ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.16.3
+ ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.17.2
+ ["-nowallet", "-walletrbf=1", "-addresstype=bech32", "-wallet=wallet.dat"], # v0.16.3
]
+ self.wallet_names = [self.default_wallet_name]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
@@ -54,11 +55,12 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
None,
190100,
180100,
- 170100,
+ 170200,
160300,
])
self.start_nodes()
+ self.import_deterministic_coinbase_privkeys()
def run_test(self):
self.nodes[0].generatetoaddress(101, self.nodes[0].getnewaddress())
diff --git a/test/functional/feature_bip68_sequence.py b/test/functional/feature_bip68_sequence.py
index 1253c45418..60492350ee 100755
--- a/test/functional/feature_bip68_sequence.py
+++ b/test/functional/feature_bip68_sequence.py
@@ -6,7 +6,7 @@
import time
-from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment
+from test_framework.blocktools import create_block, NORMAL_GBT_REQUEST_PARAMS, add_witness_commitment
from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -275,6 +275,8 @@ class BIP68Test(BitcoinTestFramework):
# Advance the time on the node so that we can test timelocks
self.nodes[0].setmocktime(cur_time+600)
+ # Save block template now to use for the reorg later
+ tmpl = self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)
self.nodes[0].generate(1)
assert tx2.hash not in self.nodes[0].getrawmempool()
@@ -318,16 +320,15 @@ class BIP68Test(BitcoinTestFramework):
# diagram above).
# This would cause tx2 to be added back to the mempool, which in turn causes
# tx3 to be removed.
- tip = int(self.nodes[0].getblockhash(self.nodes[0].getblockcount()-1), 16)
- height = self.nodes[0].getblockcount()
for i in range(2):
- block = create_block(tip, create_coinbase(height), cur_time)
- block.nVersion = 3
+ block = create_block(tmpl=tmpl, ntime=cur_time)
block.rehash()
block.solve()
tip = block.sha256
- height += 1
assert_equal(None if i == 1 else 'inconclusive', self.nodes[0].submitblock(ToHex(block)))
+ tmpl = self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)
+ tmpl['previousblockhash'] = '%x' % tip
+ tmpl['transactions'] = []
cur_time += 1
mempool = self.nodes[0].getrawmempool()
@@ -375,9 +376,7 @@ class BIP68Test(BitcoinTestFramework):
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx3))
# make a block that violates bip68; ensure that the tip updates
- tip = int(self.nodes[0].getbestblockhash(), 16)
- block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1))
- block.nVersion = 3
+ block = create_block(tmpl=self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS))
block.vtx.extend([tx1, tx2, tx3])
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
diff --git a/test/functional/feature_block.py b/test/functional/feature_block.py
index c74761869b..19753d73ef 100755
--- a/test/functional/feature_block.py
+++ b/test/functional/feature_block.py
@@ -26,7 +26,7 @@ from test_framework.messages import (
uint256_from_compact,
uint256_from_str,
)
-from test_framework.mininode import P2PDataStore
+from test_framework.p2p import P2PDataStore
from test_framework.script import (
CScript,
MAX_SCRIPT_ELEMENT_SIZE,
@@ -53,7 +53,7 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
from data import invalid_txs
-# Use this class for tests that require behavior other than normal "mininode" behavior.
+# Use this class for tests that require behavior other than normal p2p behavior.
# For now, it is used to serialize a bloated varint (b64).
class CBrokenBlock(CBlock):
def initialize(self, base_block):
@@ -1386,14 +1386,14 @@ class FullBlockTest(BitcoinTestFramework):
"""Add a P2P connection to the node.
Helper to connect and wait for version handshake."""
- self.nodes[0].add_p2p_connection(P2PDataStore())
+ self.helper_peer = self.nodes[0].add_p2p_connection(P2PDataStore())
# We need to wait for the initial getheaders from the peer before we
# start populating our blockstore. If we don't, then we may run ahead
# to the next subtest before we receive the getheaders. We'd then send
# an INV for the next block and receive two getheaders - one for the
# IBD and one for the INV. We'd respond to both and could get
# unexpectedly disconnected if the DoS score for that error is 50.
- self.nodes[0].p2p.wait_for_getheaders(timeout=timeout)
+ self.helper_peer.wait_for_getheaders(timeout=timeout)
def reconnect_p2p(self, timeout=60):
"""Tear down and bootstrap the P2P connection to the node.
@@ -1407,7 +1407,7 @@ class FullBlockTest(BitcoinTestFramework):
"""Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.
Call with success = False if the tip shouldn't advance to the most recent block."""
- self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_reason=reject_reason, force_send=force_send, timeout=timeout, expect_disconnect=reconnect)
+ self.helper_peer.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_reason=reject_reason, force_send=force_send, timeout=timeout, expect_disconnect=reconnect)
if reconnect:
self.reconnect_p2p(timeout=timeout)
diff --git a/test/functional/feature_cltv.py b/test/functional/feature_cltv.py
index fd0330924d..aad255c4a9 100755
--- a/test/functional/feature_cltv.py
+++ b/test/functional/feature_cltv.py
@@ -10,7 +10,7 @@ Test that the CHECKLOCKTIMEVERIFY soft-fork activates at (regtest) block height
from test_framework.blocktools import create_coinbase, create_block, create_transaction
from test_framework.messages import CTransaction, msg_block, ToHex
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.script import CScript, OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP, CScriptNum
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -75,7 +75,7 @@ class BIP65Test(BitcoinTestFramework):
)
def run_test(self):
- self.nodes[0].add_p2p_connection(P2PInterface())
+ peer = self.nodes[0].add_p2p_connection(P2PInterface())
self.test_cltv_info(is_active=False)
@@ -99,7 +99,7 @@ class BIP65Test(BitcoinTestFramework):
block.solve()
self.test_cltv_info(is_active=False) # Not active as of current tip and next block does not need to obey rules
- self.nodes[0].p2p.send_and_ping(msg_block(block))
+ peer.send_and_ping(msg_block(block))
self.test_cltv_info(is_active=True) # Not active as of current tip, but next block must obey rules
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
@@ -111,9 +111,9 @@ class BIP65Test(BitcoinTestFramework):
block.solve()
with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000003)'.format(block.hash)]):
- self.nodes[0].p2p.send_and_ping(msg_block(block))
+ peer.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
- self.nodes[0].p2p.sync_with_ping()
+ peer.sync_with_ping()
self.log.info("Test that invalid-according-to-cltv transactions cannot appear in a block")
block.nVersion = 4
@@ -136,9 +136,9 @@ class BIP65Test(BitcoinTestFramework):
block.solve()
with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputScripts on {} failed with non-mandatory-script-verify-flag (Negative locktime)'.format(block.vtx[-1].hash)]):
- self.nodes[0].p2p.send_and_ping(msg_block(block))
+ peer.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
- self.nodes[0].p2p.sync_with_ping()
+ peer.sync_with_ping()
self.log.info("Test that a version 4 block with a valid-according-to-CLTV transaction is accepted")
spendtx = cltv_validate(self.nodes[0], spendtx, CLTV_HEIGHT - 1)
@@ -150,7 +150,7 @@ class BIP65Test(BitcoinTestFramework):
block.solve()
self.test_cltv_info(is_active=True) # Not active as of current tip, but next block must obey rules
- self.nodes[0].p2p.send_and_ping(msg_block(block))
+ peer.send_and_ping(msg_block(block))
self.test_cltv_info(is_active=True) # Active as of current tip
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
diff --git a/test/functional/feature_config_args.py b/test/functional/feature_config_args.py
index 34e856c1ba..60533e196f 100755
--- a/test/functional/feature_config_args.py
+++ b/test/functional/feature_config_args.py
@@ -14,6 +14,7 @@ class ConfArgsTest(BitcoinTestFramework):
self.setup_clean_chain = True
self.num_nodes = 1
self.supports_cli = False
+ self.wallet_names = []
def test_config_file_parser(self):
# Assume node is stopped
@@ -78,6 +79,12 @@ class ConfArgsTest(BitcoinTestFramework):
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
+ def test_invalid_command_line_options(self):
+ self.nodes[0].assert_start_raises_init_error(
+ expected_msg='Error: No proxy server specified. Use -proxy=<ip> or -proxy=<ip:port>.',
+ extra_args=['-proxy'],
+ )
+
def test_log_buffer(self):
with self.nodes[0].assert_debug_log(expected_msgs=['Warning: parsed potentially confusing double-negative -connect=0\n']):
self.start_node(0, extra_args=['-noconnect=0'])
@@ -146,6 +153,7 @@ class ConfArgsTest(BitcoinTestFramework):
self.test_networkactive()
self.test_config_file_parser()
+ self.test_invalid_command_line_options()
# Remove the -datadir argument so it doesn't override the config file
self.nodes[0].args = [arg for arg in self.nodes[0].args if not arg.startswith("-datadir")]
diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py
index dfb3683143..39e8bca751 100755
--- a/test/functional/feature_csv_activation.py
+++ b/test/functional/feature_csv_activation.py
@@ -44,7 +44,7 @@ import time
from test_framework.blocktools import create_coinbase, create_block, create_transaction
from test_framework.messages import ToHex, CTransaction
-from test_framework.mininode import P2PDataStore
+from test_framework.p2p import P2PDataStore
from test_framework.script import (
CScript,
OP_CHECKSEQUENCEVERIFY,
@@ -182,10 +182,10 @@ class BIP68_112_113Test(BitcoinTestFramework):
"""Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.
Call with success = False if the tip shouldn't advance to the most recent block."""
- self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_reason=reject_reason)
+ self.helper_peer.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_reason=reject_reason)
def run_test(self):
- self.nodes[0].add_p2p_connection(P2PDataStore())
+ self.helper_peer = self.nodes[0].add_p2p_connection(P2PDataStore())
self.log.info("Generate blocks in the past for coinbase outputs.")
long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py
index 05fdacd451..3f7efdbded 100755
--- a/test/functional/feature_dersig.py
+++ b/test/functional/feature_dersig.py
@@ -9,7 +9,7 @@ Test that the DERSIG soft-fork activates at (regtest) height 1251.
from test_framework.blocktools import create_coinbase, create_block, create_transaction
from test_framework.messages import msg_block
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.script import CScript
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -59,7 +59,7 @@ class BIP66Test(BitcoinTestFramework):
)
def run_test(self):
- self.nodes[0].add_p2p_connection(P2PInterface())
+ peer = self.nodes[0].add_p2p_connection(P2PInterface())
self.test_dersig_info(is_active=False)
@@ -84,7 +84,7 @@ class BIP66Test(BitcoinTestFramework):
block.solve()
self.test_dersig_info(is_active=False) # Not active as of current tip and next block does not need to obey rules
- self.nodes[0].p2p.send_and_ping(msg_block(block))
+ peer.send_and_ping(msg_block(block))
self.test_dersig_info(is_active=True) # Not active as of current tip, but next block must obey rules
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
@@ -97,9 +97,9 @@ class BIP66Test(BitcoinTestFramework):
block.solve()
with self.nodes[0].assert_debug_log(expected_msgs=['{}, bad-version(0x00000002)'.format(block.hash)]):
- self.nodes[0].p2p.send_and_ping(msg_block(block))
+ peer.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
- self.nodes[0].p2p.sync_with_ping()
+ peer.sync_with_ping()
self.log.info("Test that transactions with non-DER signatures cannot appear in a block")
block.nVersion = 3
@@ -123,9 +123,9 @@ class BIP66Test(BitcoinTestFramework):
block.solve()
with self.nodes[0].assert_debug_log(expected_msgs=['CheckInputScripts on {} failed with non-mandatory-script-verify-flag (Non-canonical DER signature)'.format(block.vtx[-1].hash)]):
- self.nodes[0].p2p.send_and_ping(msg_block(block))
+ peer.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
- self.nodes[0].p2p.sync_with_ping()
+ peer.sync_with_ping()
self.log.info("Test that a version 3 block with a DERSIG-compliant transaction is accepted")
block.vtx[1] = create_transaction(self.nodes[0], self.coinbase_txids[1], self.nodeaddress, amount=1.0)
@@ -134,7 +134,7 @@ class BIP66Test(BitcoinTestFramework):
block.solve()
self.test_dersig_info(is_active=True) # Not active as of current tip, but next block must obey rules
- self.nodes[0].p2p.send_and_ping(msg_block(block))
+ peer.send_and_ping(msg_block(block))
self.test_dersig_info(is_active=True) # Active as of current tip
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
diff --git a/test/functional/feature_fee_estimation.py b/test/functional/feature_fee_estimation.py
index 702a1d9995..8a8a0c7614 100755
--- a/test/functional/feature_fee_estimation.py
+++ b/test/functional/feature_fee_estimation.py
@@ -13,7 +13,6 @@ from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
- connect_nodes,
satoshi_round,
)
@@ -232,9 +231,9 @@ class EstimateFeeTest(BitcoinTestFramework):
# so the estimates would not be affected by the splitting transactions
self.start_node(1)
self.start_node(2)
- connect_nodes(self.nodes[1], 0)
- connect_nodes(self.nodes[0], 2)
- connect_nodes(self.nodes[2], 1)
+ self.connect_nodes(1, 0)
+ self.connect_nodes(0, 2)
+ self.connect_nodes(2, 1)
self.sync_all()
diff --git a/test/functional/feature_filelock.py b/test/functional/feature_filelock.py
index b56ffe179f..7de9a589be 100755
--- a/test/functional/feature_filelock.py
+++ b/test/functional/feature_filelock.py
@@ -15,7 +15,7 @@ class FilelockTest(BitcoinTestFramework):
def setup_network(self):
self.add_nodes(self.num_nodes, extra_args=None)
- self.nodes[0].start([])
+ self.nodes[0].start()
self.nodes[0].wait_for_rpc_connection()
def run_test(self):
@@ -27,10 +27,11 @@ class FilelockTest(BitcoinTestFramework):
self.nodes[1].assert_start_raises_init_error(extra_args=['-datadir={}'.format(self.nodes[0].datadir), '-noserver'], expected_msg=expected_msg)
if self.is_wallet_compiled():
+ self.nodes[0].createwallet(self.default_wallet_name)
wallet_dir = os.path.join(datadir, 'wallets')
self.log.info("Check that we can't start a second bitcoind instance using the same wallet")
expected_msg = "Error: Error initializing wallet database environment"
- self.nodes[1].assert_start_raises_init_error(extra_args=['-walletdir={}'.format(wallet_dir), '-noserver'], expected_msg=expected_msg, match=ErrorMatch.PARTIAL_REGEX)
+ self.nodes[1].assert_start_raises_init_error(extra_args=['-walletdir={}'.format(wallet_dir), '-wallet=' + self.default_wallet_name, '-noserver'], expected_msg=expected_msg, match=ErrorMatch.PARTIAL_REGEX)
if __name__ == '__main__':
FilelockTest().main()
diff --git a/test/functional/feature_maxuploadtarget.py b/test/functional/feature_maxuploadtarget.py
index 0dc2839191..d0a94658ff 100755
--- a/test/functional/feature_maxuploadtarget.py
+++ b/test/functional/feature_maxuploadtarget.py
@@ -14,7 +14,7 @@ from collections import defaultdict
import time
from test_framework.messages import CInv, MSG_BLOCK, msg_getdata
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, mine_large_block
@@ -145,16 +145,16 @@ class MaxUploadTest(BitcoinTestFramework):
self.restart_node(0, ["-whitelist=download@127.0.0.1", "-maxuploadtarget=1"])
# Reconnect to self.nodes[0]
- self.nodes[0].add_p2p_connection(TestP2PConn())
+ peer = self.nodes[0].add_p2p_connection(TestP2PConn())
#retrieve 20 blocks which should be enough to break the 1MB limit
getdata_request.inv = [CInv(MSG_BLOCK, big_new_block)]
for i in range(20):
- self.nodes[0].p2p.send_and_ping(getdata_request)
- assert_equal(self.nodes[0].p2p.block_receive_map[big_new_block], i+1)
+ peer.send_and_ping(getdata_request)
+ assert_equal(peer.block_receive_map[big_new_block], i+1)
getdata_request.inv = [CInv(MSG_BLOCK, big_old_block)]
- self.nodes[0].p2p.send_and_ping(getdata_request)
+ peer.send_and_ping(getdata_request)
self.log.info("Peer still connected after trying to download old block (download permission)")
peer_info = self.nodes[0].getpeerinfo()
diff --git a/test/functional/feature_minchainwork.py b/test/functional/feature_minchainwork.py
index dbff6f15f2..abf87e8f0c 100755
--- a/test/functional/feature_minchainwork.py
+++ b/test/functional/feature_minchainwork.py
@@ -18,7 +18,7 @@ only succeeds past a given node once its nMinimumChainWork has been exceeded.
import time
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import connect_nodes, assert_equal
+from test_framework.util import assert_equal
# 2 hashes required per regtest block (with no difficulty adjustment)
REGTEST_WORK_PER_BLOCK = 2
@@ -39,7 +39,7 @@ class MinimumChainWorkTest(BitcoinTestFramework):
# block relay to inbound peers.
self.setup_nodes()
for i in range(self.num_nodes-1):
- connect_nodes(self.nodes[i+1], i)
+ self.connect_nodes(i+1, i)
def run_test(self):
# Start building a chain on node0. node2 shouldn't be able to sync until node1's
diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py
index dd4c318cee..cf102f321c 100755
--- a/test/functional/feature_notifications.py
+++ b/test/functional/feature_notifications.py
@@ -9,9 +9,6 @@ from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE, keyhash_to_p2pkh
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- wait_until,
- connect_nodes,
- disconnect_nodes,
hex_str_to_bytes,
)
@@ -19,7 +16,7 @@ from test_framework.util import (
# Windows disallow control characters (0-31) and /\?%:|"<>
FILE_CHAR_START = 32 if os.name == 'nt' else 1
FILE_CHAR_END = 128
-FILE_CHAR_BLOCKLIST = '/\\?%*:|"<>' if os.name == 'nt' else '/'
+FILE_CHARS_DISALLOWED = '/\\?%*:|"<>' if os.name == 'nt' else '/'
def notify_outputname(walletname, txid):
@@ -32,7 +29,7 @@ class NotificationsTest(BitcoinTestFramework):
self.setup_clean_chain = True
def setup_network(self):
- self.wallet = ''.join(chr(i) for i in range(FILE_CHAR_START, FILE_CHAR_END) if chr(i) not in FILE_CHAR_BLOCKLIST)
+ self.wallet = ''.join(chr(i) for i in range(FILE_CHAR_START, FILE_CHAR_END) if chr(i) not in FILE_CHARS_DISALLOWED)
self.alertnotify_dir = os.path.join(self.options.tmpdir, "alertnotify")
self.blocknotify_dir = os.path.join(self.options.tmpdir, "blocknotify")
self.walletnotify_dir = os.path.join(self.options.tmpdir, "walletnotify")
@@ -46,8 +43,8 @@ class NotificationsTest(BitcoinTestFramework):
"-blocknotify=echo > {}".format(os.path.join(self.blocknotify_dir, '%s'))],
["-blockversion=211",
"-rescan",
- "-wallet={}".format(self.wallet),
"-walletnotify=echo > {}".format(os.path.join(self.walletnotify_dir, notify_outputname('%w', '%s')))]]
+ self.wallet_names = [self.default_wallet_name, self.wallet]
super().setup_network()
def run_test(self):
@@ -56,7 +53,7 @@ class NotificationsTest(BitcoinTestFramework):
blocks = self.nodes[1].generatetoaddress(block_count, self.nodes[1].getnewaddress() if self.is_wallet_compiled() else ADDRESS_BCRT1_UNSPENDABLE)
# wait at most 10 seconds for expected number of files before reading the content
- wait_until(lambda: len(os.listdir(self.blocknotify_dir)) == block_count, timeout=10)
+ self.wait_until(lambda: len(os.listdir(self.blocknotify_dir)) == block_count, timeout=10)
# directory content should equal the generated blocks hashes
assert_equal(sorted(blocks), sorted(os.listdir(self.blocknotify_dir)))
@@ -64,7 +61,7 @@ class NotificationsTest(BitcoinTestFramework):
if self.is_wallet_compiled():
self.log.info("test -walletnotify")
# wait at most 10 seconds for expected number of files before reading the content
- wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
+ self.wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
# directory content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: notify_outputname(self.wallet, t['txid']), self.nodes[1].listtransactions("*", block_count)))
@@ -76,9 +73,9 @@ class NotificationsTest(BitcoinTestFramework):
self.log.info("test -walletnotify after rescan")
# restart node to rescan to force wallet notifications
self.start_node(1)
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
- wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
+ self.wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10)
# directory content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: notify_outputname(self.wallet, t['txid']), self.nodes[1].listtransactions("*", block_count)))
@@ -127,12 +124,12 @@ class NotificationsTest(BitcoinTestFramework):
# Bump tx2 as bump2 and generate a block on node 0 while
# disconnected, then reconnect and check for notifications on node 1
# about newly confirmed bump2 and newly conflicted tx2.
- disconnect_nodes(self.nodes[0], 1)
+ self.disconnect_nodes(0, 1)
bump2 = self.nodes[0].bumpfee(tx2)["txid"]
self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
assert_equal(self.nodes[0].gettransaction(bump2)["confirmations"], 1)
assert_equal(tx2 in self.nodes[1].getrawmempool(), True)
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.sync_blocks()
self.expect_wallet_notify([bump2, tx2])
assert_equal(self.nodes[1].gettransaction(bump2)["confirmations"], 1)
@@ -140,7 +137,7 @@ class NotificationsTest(BitcoinTestFramework):
# TODO: add test for `-alertnotify` large fork notifications
def expect_wallet_notify(self, tx_ids):
- wait_until(lambda: len(os.listdir(self.walletnotify_dir)) >= len(tx_ids), timeout=10)
+ self.wait_until(lambda: len(os.listdir(self.walletnotify_dir)) >= len(tx_ids), timeout=10)
assert_equal(sorted(notify_outputname(self.wallet, tx_id) for tx_id in tx_ids), sorted(os.listdir(self.walletnotify_dir)))
for tx_file in os.listdir(self.walletnotify_dir):
os.remove(os.path.join(self.walletnotify_dir, tx_file))
diff --git a/test/functional/feature_nulldummy.py b/test/functional/feature_nulldummy.py
index ff55cb76d9..d1196a4bbd 100755
--- a/test/functional/feature_nulldummy.py
+++ b/test/functional/feature_nulldummy.py
@@ -14,7 +14,7 @@ Generate 427 more blocks.
"""
import time
-from test_framework.blocktools import create_coinbase, create_block, create_transaction, add_witness_commitment
+from test_framework.blocktools import NORMAL_GBT_REQUEST_PARAMS, create_block, create_transaction, add_witness_commitment
from test_framework.messages import CTransaction
from test_framework.script import CScript
from test_framework.test_framework import BitcoinTestFramework
@@ -37,14 +37,15 @@ def trueDummy(tx):
class NULLDUMMYTest(BitcoinTestFramework):
def set_test_params(self):
- self.num_nodes = 1
+ # Need two nodes only so GBT doesn't complain that it's not connected
+ self.num_nodes = 2
self.setup_clean_chain = True
# This script tests NULLDUMMY activation, which is part of the 'segwit' deployment, so we go through
# normal segwit activation here (and don't use the default always-on behaviour).
self.extra_args = [[
'-segwitheight=432',
'-addresstype=legacy',
- ]]
+ ]] * 2
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
@@ -61,7 +62,6 @@ class NULLDUMMYTest(BitcoinTestFramework):
coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
self.nodes[0].generate(427) # Block 429
self.lastblockhash = self.nodes[0].getbestblockhash()
- self.tip = int("0x" + self.lastblockhash, 0)
self.lastblockheight = 429
self.lastblocktime = int(time.time()) + 429
@@ -102,8 +102,10 @@ class NULLDUMMYTest(BitcoinTestFramework):
self.block_submit(self.nodes[0], test6txs, True, True)
def block_submit(self, node, txs, witness=False, accept=False):
- block = create_block(self.tip, create_coinbase(self.lastblockheight + 1), self.lastblocktime + 1)
- block.nVersion = 4
+ tmpl = node.getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)
+ assert_equal(tmpl['previousblockhash'], self.lastblockhash)
+ assert_equal(tmpl['height'], self.lastblockheight + 1)
+ block = create_block(tmpl=tmpl, ntime=self.lastblocktime + 1)
for tx in txs:
tx.rehash()
block.vtx.append(tx)
@@ -114,7 +116,6 @@ class NULLDUMMYTest(BitcoinTestFramework):
assert_equal(None if accept else 'block-validation-failed', node.submitblock(block.serialize().hex()))
if (accept):
assert_equal(node.getbestblockhash(), block.hash)
- self.tip = block.sha256
self.lastblockhash = block.hash
self.lastblocktime += 1
self.lastblockheight += 1
diff --git a/test/functional/feature_proxy.py b/test/functional/feature_proxy.py
index be323d355e..dfae58e860 100755
--- a/test/functional/feature_proxy.py
+++ b/test/functional/feature_proxy.py
@@ -18,8 +18,9 @@ Test plan:
- proxy on IPv6
- Create various proxies (as threads)
-- Create bitcoinds that connect to them
-- Manipulate the bitcoinds using addnode (onetry) an observe effects
+- Create nodes that connect to them
+- Manipulate the peer connections using addnode (onetry) and observe effects
+- Test the getpeerinfo `network` field for the peer
addnode connect to IPv4
addnode connect to IPv6
@@ -40,6 +41,12 @@ from test_framework.util import (
from test_framework.netutil import test_ipv6_local
RANGE_BEGIN = PORT_MIN + 2 * PORT_RANGE # Start after p2p and rpc ports
+# From GetNetworkName() in netbase.cpp:
+NET_UNROUTABLE = ""
+NET_IPV4 = "ipv4"
+NET_IPV6 = "ipv6"
+NET_ONION = "onion"
+
class ProxyTest(BitcoinTestFramework):
def set_test_params(self):
@@ -90,10 +97,16 @@ class ProxyTest(BitcoinTestFramework):
self.add_nodes(self.num_nodes, extra_args=args)
self.start_nodes()
+ def network_test(self, node, addr, network):
+ for peer in node.getpeerinfo():
+ if peer["addr"] == addr:
+ assert_equal(peer["network"], network)
+
def node_test(self, node, proxies, auth, test_onion=True):
rv = []
- # Test: outgoing IPv4 connection through node
- node.addnode("15.61.23.23:1234", "onetry")
+ addr = "15.61.23.23:1234"
+ self.log.debug("Test: outgoing IPv4 connection through node for address {}".format(addr))
+ node.addnode(addr, "onetry")
cmd = proxies[0].queue.get()
assert isinstance(cmd, Socks5Command)
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
@@ -104,10 +117,12 @@ class ProxyTest(BitcoinTestFramework):
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
+ self.network_test(node, addr, network=NET_IPV4)
if self.have_ipv6:
- # Test: outgoing IPv6 connection through node
- node.addnode("[1233:3432:2434:2343:3234:2345:6546:4534]:5443", "onetry")
+ addr = "[1233:3432:2434:2343:3234:2345:6546:4534]:5443"
+ self.log.debug("Test: outgoing IPv6 connection through node for address {}".format(addr))
+ node.addnode(addr, "onetry")
cmd = proxies[1].queue.get()
assert isinstance(cmd, Socks5Command)
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
@@ -118,10 +133,12 @@ class ProxyTest(BitcoinTestFramework):
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
+ self.network_test(node, addr, network=NET_IPV6)
if test_onion:
- # Test: outgoing onion connection through node
- node.addnode("bitcoinostk4e4re.onion:8333", "onetry")
+ addr = "bitcoinostk4e4re.onion:8333"
+ self.log.debug("Test: outgoing onion connection through node for address {}".format(addr))
+ node.addnode(addr, "onetry")
cmd = proxies[2].queue.get()
assert isinstance(cmd, Socks5Command)
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
@@ -131,9 +148,11 @@ class ProxyTest(BitcoinTestFramework):
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
+ self.network_test(node, addr, network=NET_ONION)
- # Test: outgoing DNS name connection through node
- node.addnode("node.noumenon:8333", "onetry")
+ addr = "node.noumenon:8333"
+ self.log.debug("Test: outgoing DNS name connection through node for address {}".format(addr))
+ node.addnode(addr, "onetry")
cmd = proxies[3].queue.get()
assert isinstance(cmd, Socks5Command)
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
@@ -143,6 +162,7 @@ class ProxyTest(BitcoinTestFramework):
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
+ self.network_test(node, addr, network=NET_UNROUTABLE)
return rv
@@ -197,5 +217,6 @@ class ProxyTest(BitcoinTestFramework):
assert_equal(n3[net]['proxy_randomize_credentials'], False)
assert_equal(n3['onion']['reachable'], False)
+
if __name__ == '__main__':
ProxyTest().main()
diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py
index 02fa88f7c8..f09bffe2d4 100755
--- a/test/functional/feature_pruning.py
+++ b/test/functional/feature_pruning.py
@@ -18,9 +18,6 @@ from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
- connect_nodes,
- disconnect_nodes,
- wait_until,
)
# Rescans start at the earliest block up to 2 hours before a key timestamp, so
@@ -103,18 +100,17 @@ class PruneTest(BitcoinTestFramework):
self.prunedir = os.path.join(self.nodes[2].datadir, self.chain, 'blocks', '')
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[1], 2)
- connect_nodes(self.nodes[0], 2)
- connect_nodes(self.nodes[0], 3)
- connect_nodes(self.nodes[0], 4)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(1, 2)
+ self.connect_nodes(0, 2)
+ self.connect_nodes(0, 3)
+ self.connect_nodes(0, 4)
self.sync_blocks(self.nodes[0:5])
def setup_nodes(self):
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
- for n in self.nodes:
- n.importprivkey(privkey=n.get_deterministic_priv_key().key, label='coinbase', rescan=False)
+ self.import_deterministic_coinbase_privkeys()
def create_big_chain(self):
# Start by creating some coinbases we can spend later
@@ -136,7 +132,7 @@ class PruneTest(BitcoinTestFramework):
mine_large_blocks(self.nodes[0], 25)
# Wait for blk00000.dat to be pruned
- wait_until(lambda: not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), timeout=30)
+ self.wait_until(lambda: not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), timeout=30)
self.log.info("Success")
usage = calc_usage(self.prunedir)
@@ -150,8 +146,8 @@ class PruneTest(BitcoinTestFramework):
for _ in range(12):
# Disconnect node 0 so it can mine a longer reorg chain without knowing about node 1's soon-to-be-stale chain
# Node 2 stays connected, so it hears about the stale blocks and then reorg's when node0 reconnects
- disconnect_nodes(self.nodes[0], 1)
- disconnect_nodes(self.nodes[0], 2)
+ self.disconnect_nodes(0, 1)
+ self.disconnect_nodes(0, 2)
# Mine 24 blocks in node 1
mine_large_blocks(self.nodes[1], 24)
@@ -159,8 +155,8 @@ class PruneTest(BitcoinTestFramework):
mine_large_blocks(self.nodes[0], 25)
# Create connections in the order so both nodes can see the reorg at the same time
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(0, 2)
self.sync_blocks(self.nodes[0:3])
self.log.info("Usage can be over target because of high stale rate: %d" % calc_usage(self.prunedir))
@@ -189,15 +185,15 @@ class PruneTest(BitcoinTestFramework):
self.log.info("New best height: %d" % self.nodes[1].getblockcount())
# Disconnect node1 and generate the new chain
- disconnect_nodes(self.nodes[0], 1)
- disconnect_nodes(self.nodes[1], 2)
+ self.disconnect_nodes(0, 1)
+ self.disconnect_nodes(1, 2)
self.log.info("Generating new longer chain of 300 more blocks")
self.nodes[1].generate(300)
self.log.info("Reconnect nodes")
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[1], 2)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(1, 2)
self.sync_blocks(self.nodes[0:3], timeout=120)
self.log.info("Verify height on node 2: %d" % self.nodes[2].getblockcount())
@@ -250,7 +246,7 @@ class PruneTest(BitcoinTestFramework):
self.log.info("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload")
# Wait for Node 2 to reorg to proper height
- wait_until(lambda: self.nodes[2].getblockcount() >= goalbestheight, timeout=900)
+ self.wait_until(lambda: self.nodes[2].getblockcount() >= goalbestheight, timeout=900)
assert_equal(self.nodes[2].getbestblockhash(), goalbesthash)
# Verify we can now have the data for a block previously pruned
assert_equal(self.nodes[2].getblock(self.forkhash)["height"], self.forkheight)
@@ -338,7 +334,7 @@ class PruneTest(BitcoinTestFramework):
# check that wallet loads successfully when restarting a pruned node after IBD.
# this was reported to fail in #7494.
self.log.info("Syncing node 5 to test wallet")
- connect_nodes(self.nodes[0], 5)
+ self.connect_nodes(0, 5)
nds = [self.nodes[0], self.nodes[5]]
self.sync_blocks(nds, wait=5, timeout=300)
self.restart_node(5, extra_args=["-prune=550"]) # restart to trigger rescan
diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py
index 0842972779..7bd2fc7847 100755
--- a/test/functional/feature_segwit.py
+++ b/test/functional/feature_segwit.py
@@ -22,7 +22,6 @@ from test_framework.util import (
assert_equal,
assert_is_hex_string,
assert_raises_rpc_error,
- connect_nodes,
hex_str_to_bytes,
try_rpc,
)
@@ -61,14 +60,12 @@ class SegWitTest(BitcoinTestFramework):
],
[
"-acceptnonstdtxn=1",
- "-blockversion=4",
"-rpcserialversion=1",
"-segwitheight=432",
"-addresstype=legacy",
],
[
"-acceptnonstdtxn=1",
- "-blockversion=536870915",
"-segwitheight=432",
"-addresstype=legacy",
],
@@ -80,7 +77,7 @@ class SegWitTest(BitcoinTestFramework):
def setup_network(self):
super().setup_network()
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 2)
self.sync_all()
def success_mine(self, node, txid, sign, redeem_script=""):
diff --git a/test/functional/feature_settings.py b/test/functional/feature_settings.py
index c565854bb0..5a0236401d 100755
--- a/test/functional/feature_settings.py
+++ b/test/functional/feature_settings.py
@@ -17,6 +17,7 @@ class SettingsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
+ self.wallet_names = []
def run_test(self):
node, = self.nodes
@@ -30,19 +31,25 @@ class SettingsTest(BitcoinTestFramework):
# Assert settings are parsed and logged
with settings.open("w") as fp:
- json.dump({"string": "string", "num": 5, "bool": True, "null": None, "list": [6,7]}, fp)
+ json.dump({"string": "string", "num": 5, "bool": True, "null": None, "list": [6, 7]}, fp)
with node.assert_debug_log(expected_msgs=[
+ 'Ignoring unknown rw_settings value bool',
+ 'Ignoring unknown rw_settings value list',
+ 'Ignoring unknown rw_settings value null',
+ 'Ignoring unknown rw_settings value num',
+ 'Ignoring unknown rw_settings value string',
'Setting file arg: string = "string"',
'Setting file arg: num = 5',
'Setting file arg: bool = true',
'Setting file arg: null = null',
- 'Setting file arg: list = [6,7]']):
+ 'Setting file arg: list = [6,7]',
+ ]):
self.start_node(0)
self.stop_node(0)
# Assert settings are unchanged after shutdown
with settings.open() as fp:
- assert_equal(json.load(fp), {"string": "string", "num": 5, "bool": True, "null": None, "list": [6,7]})
+ assert_equal(json.load(fp), {"string": "string", "num": 5, "bool": True, "null": None, "list": [6, 7]})
# Test invalid json
with settings.open("w") as fp:
diff --git a/test/functional/feature_shutdown.py b/test/functional/feature_shutdown.py
index d782d3b1d8..a76e0f1b50 100755
--- a/test/functional/feature_shutdown.py
+++ b/test/functional/feature_shutdown.py
@@ -5,7 +5,7 @@
"""Test bitcoind shutdown."""
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, get_rpc_proxy, wait_until
+from test_framework.util import assert_equal, get_rpc_proxy
from threading import Thread
def test_long_call(node):
@@ -25,7 +25,7 @@ class ShutdownTest(BitcoinTestFramework):
node.getblockcount()
Thread(target=test_long_call, args=(node,)).start()
# Wait until the server is executing the above `waitfornewblock`.
- wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2)
+ self.wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2)
# Wait 1 second after requesting shutdown but not before the `stop` call
# finishes. This is to ensure event loop waits for current connections
# to close.
diff --git a/test/functional/feature_signet.py b/test/functional/feature_signet.py
new file mode 100755
index 0000000000..96c581dede
--- /dev/null
+++ b/test/functional/feature_signet.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019-2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test basic signet functionality"""
+
+from decimal import Decimal
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+signet_blocks = [
+ '00000020f61eee3b63a380a477a063af32b2bbc97c9ff9f01f2c4225e973988108000000f575c83235984e7dc4afc1f30944c170462e84437ab6f2d52e16878a79e4678bd1914d5fae77031eccf4070001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025151feffffff0200f2052a010000001600149243f727dd5343293eb83174324019ec16c2630f0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402205e423a8754336ca99dbe16509b877ef1bf98d008836c725005b3c787c41ebe46022047246e4467ad7cc7f1ad98662afcaf14c115e0095a227c7b05c5182591c23e7e01000120000000000000000000000000000000000000000000000000000000000000000000000000',
+ '00000020533b53ded9bff4adc94101d32400a144c54edc5ed492a3b26c63b2d686000000b38fef50592017cfafbcab88eb3d9cf50b2c801711cad8299495d26df5e54812e7914d5fae77031ecfdd0b0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025251feffffff0200f2052a01000000160014fd09839740f0e0b4fc6d5e2527e4022aa9b89dfa0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022031d64a1692cdad1fc0ced69838169fe19ae01be524d831b95fcf5ea4e6541c3c02204f9dea0801df8b4d0cd0857c62ab35c6c25cc47c930630dc7fe723531daa3e9b01000120000000000000000000000000000000000000000000000000000000000000000000000000',
+ '000000202960f3752f0bfa8858a3e333294aedc7808025e868c9dc03e71d88bb320000007765fcd3d5b4966beb338bba2675dc2cf2ad28d4ad1d83bdb6f286e7e27ac1f807924d5fae77031e81d60b0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025351feffffff0200f2052a010000001600141e5fb426042692ae0e87c070e78c39307a5661c20000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402205de93694763a42954865bcf1540cb82958bc62d0ec4eee02070fb7937cd037f4022067f333753bce47b10bc25eb6e1f311482e994c862a7e0b2d41ab1c8679fd1b1101000120000000000000000000000000000000000000000000000000000000000000000000000000',
+ '00000020b06443a13ae1d3d50faef5ecad38c6818194dc46abca3e972e2aacdae800000069a5829097e80fee00ac49a56ea9f82d741a6af84d32b3bc455cf31871e2a8ac27924d5fae77031e9c91050001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025451feffffff0200f2052a0100000016001430db2f8225dcf7751361ab38735de08190318cb70000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402200936f5f9872f6df5dd242026ad52241a68423f7f682e79169a8d85a374eab9b802202cd2979c48b321b3453e65e8f92460db3fca93cbea8539b450c959f4fbe630c601000120000000000000000000000000000000000000000000000000000000000000000000000000',
+ '000000207ed403758a4f228a1939418a155e2ebd4ae6b26e5ffd0ae433123f7694010000542e80b609c5bc58af5bdf492e26d4f60cd43a3966c2e063c50444c29b3757a636924d5fae77031ee8601d0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025551feffffff0200f2052a01000000160014edc207e014df34fa3885dff97d1129d356e1186a0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022021a3656609f85a66a2c5672ed9322c2158d57251040d2716ed202a1fe14f0c12022057d68bc6611f7a9424a7e00bbf3e27e6ae6b096f60bac624a094bc97a59aa1ff01000120000000000000000000000000000000000000000000000000000000000000000000000000',
+ '000000205bea0a88d1422c3df08d766ad72df95084d0700e6f873b75dd4e986c7703000002b57516d33ed60c2bdd9f93d6d5614083324c837e68e5ba6e04287a7285633585924d5fae77031ed171960001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025651feffffff0200f2052a010000001600143ae612599cf96f2442ce572633e0251116eaa52f0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022059a7c54de76bfdbb1dd44c78ea2dbd2bb4e97f4abad38965f41e76433e56423c022054bf17f04fe17415c0141f60eebd2b839200f574d8ad8d55a0917b92b0eb913401000120000000000000000000000000000000000000000000000000000000000000000000000000',
+ '00000020daf3b60d374b19476461f97540498dcfa2eb7016238ec6b1d022f82fb60100007a7ae65b53cb988c2ec92d2384996713821d5645ffe61c9acea60da75cd5edfa1a944d5fae77031e9dbb050001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025751feffffff0200f2052a01000000160014ef2dceae02e35f8137de76768ae3345d99ca68860000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402202b3f946d6447f9bf17d00f3696cede7ee70b785495e5498274ee682a493befd5022045fc0bcf9332243168b5d35507175f9f374a8eba2336873885d12aada67ea5f601000120000000000000000000000000000000000000000000000000000000000000000000000000',
+ '00000020457cc5f3c2e1a5655bc20e20e48d33e1b7ea68786c614032b5c518f0b6000000541f36942d82c6e7248275ff15c8933487fbe1819c67a9ecc0f4b70bb7e6cf672a944d5fae77031e8f39860001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025851feffffff0200f2052a0100000016001472a27906947c06d034b38ba2fa13c6391a4832790000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402202d62805ce60cbd60591f97f949b5ea5bd7e2307bcde343e6ea8394da92758e72022053a25370b0aa20da100189b7899a8f8675a0fdc60e38ece6b8a4f98edd94569e01000120000000000000000000000000000000000000000000000000000000000000000000000000',
+ '00000020a2eb61eb4f3831baa3a3363e1b42db4462663f756f07423e81ed30322102000077224de7dea0f8d0ec22b1d2e2e255f0a987b96fe7200e1a2e6373f48a2f5b7894954d5fae77031e36867e0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025951feffffff0200f2052a01000000160014aa0ad9f26801258382e0734dceec03a4a75f60240000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402206fa0d59990eed369bd7375767c9a6c9369fae209152b8674e520da270605528c0220749eed3b12dbe3f583f505d21803e4aef59c8e24c5831951eafa4f15a8f92c4e01000120000000000000000000000000000000000000000000000000000000000000000000000000',
+ '00000020a868e8514be5e46dabd6a122132f423f36a43b716a40c394e2a8d063e1010000f4c6c717e99d800c699c25a2006a75a0c5c09f432a936f385e6fce139cdbd1a5e9964d5fae77031e7d026e0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025a51feffffff0200f2052a01000000160014aaa671c82b138e3b8f510cd801e5f2bd0aa305940000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022042309f4c3c7a1a2ac8c24f890f962df1c0086cec10be0868087cfc427520cb2702201dafee8911c269b7e786e242045bb57cef3f5b0f177010c6159abae42f646cc501000120000000000000000000000000000000000000000000000000000000000000000000000000',
+]
+
+
+class SignetBasicTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.chain = "signet"
+ self.num_nodes = 6
+ self.setup_clean_chain = True
+ shared_args1 = ["-signetchallenge=51"] # OP_TRUE
+ shared_args2 = [] # default challenge
+ # we use the exact same challenge except we do it as a 2-of-2, which means it should fail
+ shared_args3 = ["-signetchallenge=522103ad5e0edad18cb1f0fc0d28a3d4f1f3e445640337489abb10404f2d1e086be430210359ef5021964fe22d6f8e05b2463c9540ce96883fe3b278760f048f5189f2e6c452ae"]
+
+ self.extra_args = [
+ shared_args1, shared_args1,
+ shared_args2, shared_args2,
+ shared_args3, shared_args3,
+ ]
+
+ def run_test(self):
+ self.log.info("basic tests using OP_TRUE challenge")
+
+ self.log.info('getmininginfo')
+ mining_info = self.nodes[0].getmininginfo()
+ assert_equal(mining_info['blocks'], 0)
+ assert_equal(mining_info['chain'], 'signet')
+ assert 'currentblocktx' not in mining_info
+ assert 'currentblockweight' not in mining_info
+ assert_equal(mining_info['networkhashps'], Decimal('0'))
+ assert_equal(mining_info['pooledtx'], 0)
+
+ self.nodes[0].generate(1)
+
+ self.log.info("pregenerated signet blocks check")
+
+ height = 0
+ for block in signet_blocks:
+ assert_equal(self.nodes[2].submitblock(block), None)
+ height += 1
+ assert_equal(self.nodes[2].getblockcount(), height)
+
+ self.log.info("pregenerated signet blocks check (incompatible solution)")
+
+ assert_equal(self.nodes[4].submitblock(signet_blocks[0]), 'bad-signet-blksig')
+
+ self.log.info("test that signet logs the network magic on node start")
+ with self.nodes[0].assert_debug_log(["Signet derived magic (message start)"]):
+ self.restart_node(0)
+
+
+if __name__ == '__main__':
+ SignetBasicTest().main()
diff --git a/test/functional/feature_taproot.py b/test/functional/feature_taproot.py
new file mode 100755
index 0000000000..7b534c1c2f
--- /dev/null
+++ b/test/functional/feature_taproot.py
@@ -0,0 +1,1458 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019-2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+# Test Taproot softfork (BIPs 340-342)
+
+from test_framework.blocktools import (
+ create_coinbase,
+ create_block,
+ add_witness_commitment,
+ MAX_BLOCK_SIGOPS_WEIGHT,
+ WITNESS_SCALE_FACTOR,
+)
+from test_framework.messages import (
+ COutPoint,
+ CTransaction,
+ CTxIn,
+ CTxInWitness,
+ CTxOut,
+ ToHex,
+)
+from test_framework.script import (
+ ANNEX_TAG,
+ CScript,
+ CScriptNum,
+ CScriptOp,
+ LEAF_VERSION_TAPSCRIPT,
+ LegacySignatureHash,
+ LOCKTIME_THRESHOLD,
+ MAX_SCRIPT_ELEMENT_SIZE,
+ OP_0,
+ OP_1,
+ OP_2,
+ OP_3,
+ OP_4,
+ OP_5,
+ OP_6,
+ OP_7,
+ OP_8,
+ OP_9,
+ OP_10,
+ OP_11,
+ OP_12,
+ OP_16,
+ OP_2DROP,
+ OP_2DUP,
+ OP_CHECKMULTISIG,
+ OP_CHECKMULTISIGVERIFY,
+ OP_CHECKSIG,
+ OP_CHECKSIGADD,
+ OP_CHECKSIGVERIFY,
+ OP_CODESEPARATOR,
+ OP_DROP,
+ OP_DUP,
+ OP_ELSE,
+ OP_ENDIF,
+ OP_EQUAL,
+ OP_EQUALVERIFY,
+ OP_HASH160,
+ OP_IF,
+ OP_NOP,
+ OP_NOT,
+ OP_NOTIF,
+ OP_PUSHDATA1,
+ OP_RETURN,
+ OP_SWAP,
+ OP_VERIFY,
+ SIGHASH_DEFAULT,
+ SIGHASH_ALL,
+ SIGHASH_NONE,
+ SIGHASH_SINGLE,
+ SIGHASH_ANYONECANPAY,
+ SegwitV0SignatureHash,
+ TaprootSignatureHash,
+ is_op_success,
+ taproot_construct,
+)
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_raises_rpc_error, assert_equal
+from test_framework.key import generate_privkey, compute_xonly_pubkey, sign_schnorr, tweak_add_privkey, ECKey
+from test_framework.address import (
+ hash160,
+ sha256,
+)
+from collections import OrderedDict, namedtuple
+from io import BytesIO
+import json
+import hashlib
+import os
+import random
+
+# === Framework for building spending transactions. ===
+#
+# The computation is represented as a "context" dict, whose entries store potentially-unevaluated expressions that
+# refer to lower-level ones. By overwriting these expression, many aspects - both high and low level - of the signing
+# process can be overridden.
+#
+# Specifically, a context object is a dict that maps names to compositions of:
+# - values
+# - lists of values
+# - callables which, when fed the context object as argument, produce any of these
+#
+# The DEFAULT_CONTEXT object specifies a standard signing process, with many overridable knobs.
+#
+# The get(ctx, name) function can evaluate a name, and cache its result in the context.
+# getter(name) can be used to construct a callable that evaluates name. For example:
+#
+# ctx1 = {**DEFAULT_CONTEXT, inputs=[getter("sign"), b'\x01']}
+#
+# creates a context where the script inputs are a signature plus the bytes 0x01.
+#
+# override(expr, name1=expr1, name2=expr2, ...) can be used to cause an expression to be evaluated in a selectively
+# modified context. For example:
+#
+# ctx2 = {**DEFAULT_CONTEXT, sighash=override(default_sighash, hashtype=SIGHASH_DEFAULT)}
+#
+# creates a context ctx2 where the sighash is modified to use hashtype=SIGHASH_DEFAULT. This differs from
+#
+# ctx3 = {**DEFAULT_CONTEXT, hashtype=SIGHASH_DEFAULT}
+#
+# in that ctx3 will globally use hashtype=SIGHASH_DEFAULT (including in the hashtype byte appended to the signature)
+# while ctx2 only uses the modified hashtype inside the sighash calculation.
+
+def deep_eval(ctx, expr):
+ """Recursively replace any callables c in expr (including inside lists) with c(ctx)."""
+ while callable(expr):
+ expr = expr(ctx)
+ if isinstance(expr, list):
+ expr = [deep_eval(ctx, x) for x in expr]
+ return expr
+
+# Data type to represent fully-evaluated expressions in a context dict (so we can avoid reevaluating them).
+Final = namedtuple("Final", "value")
+
+def get(ctx, name):
+ """Evaluate name in context ctx."""
+ assert name in ctx, "Missing '%s' in context" % name
+ expr = ctx[name]
+ if not isinstance(expr, Final):
+ # Evaluate and cache the result.
+ expr = Final(deep_eval(ctx, expr))
+ ctx[name] = expr
+ return expr.value
+
+def getter(name):
+ """Return a callable that evaluates name in its passed context."""
+ return lambda ctx: get(ctx, name)
+
+def override(expr, **kwargs):
+ """Return a callable that evaluates expr in a modified context."""
+ return lambda ctx: deep_eval({**ctx, **kwargs}, expr)
+
+# === Implementations for the various default expressions in DEFAULT_CONTEXT ===
+
+def default_hashtype(ctx):
+ """Default expression for "hashtype": SIGHASH_DEFAULT for taproot, SIGHASH_ALL otherwise."""
+ mode = get(ctx, "mode")
+ if mode == "taproot":
+ return SIGHASH_DEFAULT
+ else:
+ return SIGHASH_ALL
+
+def default_tapleaf(ctx):
+ """Default expression for "tapleaf": looking up leaf in tap[2]."""
+ return get(ctx, "tap").leaves[get(ctx, "leaf")]
+
+def default_script_taproot(ctx):
+ """Default expression for "script_taproot": tapleaf.script."""
+ return get(ctx, "tapleaf").script
+
+def default_leafversion(ctx):
+ """Default expression for "leafversion": tapleaf.version"""
+ return get(ctx, "tapleaf").version
+
+def default_negflag(ctx):
+ """Default expression for "negflag": tap.negflag."""
+ return get(ctx, "tap").negflag
+
+def default_pubkey_inner(ctx):
+ """Default expression for "pubkey_inner": tap.inner_pubkey."""
+ return get(ctx, "tap").inner_pubkey
+
+def default_merklebranch(ctx):
+ """Default expression for "merklebranch": tapleaf.merklebranch."""
+ return get(ctx, "tapleaf").merklebranch
+
+def default_controlblock(ctx):
+ """Default expression for "controlblock": combine leafversion, negflag, pubkey_inner, merklebranch."""
+ return bytes([get(ctx, "leafversion") + get(ctx, "negflag")]) + get(ctx, "pubkey_inner") + get(ctx, "merklebranch")
+
+def default_sighash(ctx):
+ """Default expression for "sighash": depending on mode, compute BIP341, BIP143, or legacy sighash."""
+ tx = get(ctx, "tx")
+ idx = get(ctx, "idx")
+ hashtype = get(ctx, "hashtype_actual")
+ mode = get(ctx, "mode")
+ if mode == "taproot":
+ # BIP341 signature hash
+ utxos = get(ctx, "utxos")
+ annex = get(ctx, "annex")
+ if get(ctx, "leaf") is not None:
+ codeseppos = get(ctx, "codeseppos")
+ leaf_ver = get(ctx, "leafversion")
+ script = get(ctx, "script_taproot")
+ return TaprootSignatureHash(tx, utxos, hashtype, idx, scriptpath=True, script=script, leaf_ver=leaf_ver, codeseparator_pos=codeseppos, annex=annex)
+ else:
+ return TaprootSignatureHash(tx, utxos, hashtype, idx, scriptpath=False, annex=annex)
+ elif mode == "witv0":
+ # BIP143 signature hash
+ scriptcode = get(ctx, "scriptcode")
+ utxos = get(ctx, "utxos")
+ return SegwitV0SignatureHash(scriptcode, tx, idx, hashtype, utxos[idx].nValue)
+ else:
+ # Pre-segwit signature hash
+ scriptcode = get(ctx, "scriptcode")
+ return LegacySignatureHash(scriptcode, tx, idx, hashtype)[0]
+
+def default_tweak(ctx):
+ """Default expression for "tweak": None if a leaf is specified, tap[0] otherwise."""
+ if get(ctx, "leaf") is None:
+ return get(ctx, "tap").tweak
+ return None
+
+def default_key_tweaked(ctx):
+ """Default expression for "key_tweaked": key if tweak is None, tweaked with it otherwise."""
+ key = get(ctx, "key")
+ tweak = get(ctx, "tweak")
+ if tweak is None:
+ return key
+ else:
+ return tweak_add_privkey(key, tweak)
+
+def default_signature(ctx):
+ """Default expression for "signature": BIP340 signature or ECDSA signature depending on mode."""
+ sighash = get(ctx, "sighash")
+ if get(ctx, "mode") == "taproot":
+ key = get(ctx, "key_tweaked")
+ flip_r = get(ctx, "flag_flip_r")
+ flip_p = get(ctx, "flag_flip_p")
+ return sign_schnorr(key, sighash, flip_r=flip_r, flip_p=flip_p)
+ else:
+ key = get(ctx, "key")
+ return key.sign_ecdsa(sighash)
+
+def default_hashtype_actual(ctx):
+ """Default expression for "hashtype_actual": hashtype, unless mismatching SIGHASH_SINGLE in taproot."""
+ hashtype = get(ctx, "hashtype")
+ mode = get(ctx, "mode")
+ if mode != "taproot":
+ return hashtype
+ idx = get(ctx, "idx")
+ tx = get(ctx, "tx")
+ if hashtype & 3 == SIGHASH_SINGLE and idx >= len(tx.vout):
+ return (hashtype & ~3) | SIGHASH_NONE
+ return hashtype
+
+def default_bytes_hashtype(ctx):
+ """Default expression for "bytes_hashtype": bytes([hashtype_actual]) if not 0, b"" otherwise."""
+ return bytes([x for x in [get(ctx, "hashtype_actual")] if x != 0])
+
+def default_sign(ctx):
+ """Default expression for "sign": concatenation of signature and bytes_hashtype."""
+ return get(ctx, "signature") + get(ctx, "bytes_hashtype")
+
+def default_inputs_keypath(ctx):
+ """Default expression for "inputs_keypath": a signature."""
+ return [get(ctx, "sign")]
+
+def default_witness_taproot(ctx):
+ """Default expression for "witness_taproot", consisting of inputs, script, control block, and annex as needed."""
+ annex = get(ctx, "annex")
+ suffix_annex = []
+ if annex is not None:
+ suffix_annex = [annex]
+ if get(ctx, "leaf") is None:
+ return get(ctx, "inputs_keypath") + suffix_annex
+ else:
+ return get(ctx, "inputs") + [bytes(get(ctx, "script_taproot")), get(ctx, "controlblock")] + suffix_annex
+
+def default_witness_witv0(ctx):
+ """Default expression for "witness_witv0", consisting of inputs and witness script, as needed."""
+ script = get(ctx, "script_witv0")
+ inputs = get(ctx, "inputs")
+ if script is None:
+ return inputs
+ else:
+ return inputs + [script]
+
+def default_witness(ctx):
+ """Default expression for "witness", delegating to "witness_taproot" or "witness_witv0" as needed."""
+ mode = get(ctx, "mode")
+ if mode == "taproot":
+ return get(ctx, "witness_taproot")
+ elif mode == "witv0":
+ return get(ctx, "witness_witv0")
+ else:
+ return []
+
+def default_scriptsig(ctx):
+ """Default expression for "scriptsig", consisting of inputs and redeemscript, as needed."""
+ scriptsig = []
+ mode = get(ctx, "mode")
+ if mode == "legacy":
+ scriptsig = get(ctx, "inputs")
+ redeemscript = get(ctx, "script_p2sh")
+ if redeemscript is not None:
+ scriptsig += [bytes(redeemscript)]
+ return scriptsig
+
+# The default context object.
+DEFAULT_CONTEXT = {
+ # == The main expressions to evaluate. Only override these for unusual or invalid spends. ==
+ # The overall witness stack, as a list of bytes objects.
+ "witness": default_witness,
+ # The overall scriptsig, as a list of CScript objects (to be concatenated) and bytes objects (to be pushed)
+ "scriptsig": default_scriptsig,
+
+ # == Expressions you'll generally only override for intentionally invalid spends. ==
+ # The witness stack for spending a taproot output.
+ "witness_taproot": default_witness_taproot,
+ # The witness stack for spending a P2WPKH/P2WSH output.
+ "witness_witv0": default_witness_witv0,
+ # The script inputs for a taproot key path spend.
+ "inputs_keypath": default_inputs_keypath,
+ # The actual hashtype to use (usually equal to hashtype, but in taproot SIGHASH_SINGLE is not always allowed).
+ "hashtype_actual": default_hashtype_actual,
+ # The bytes object for a full signature (including hashtype byte, if needed).
+ "bytes_hashtype": default_bytes_hashtype,
+ # A full script signature (bytes including hashtype, if needed)
+ "sign": default_sign,
+ # An ECDSA or Schnorr signature (excluding hashtype byte).
+ "signature": default_signature,
+ # The 32-byte tweaked key (equal to key for script path spends, or key+tweak for key path spends).
+ "key_tweaked": default_key_tweaked,
+ # The tweak to use (None for script path spends, the actual tweak for key path spends).
+ "tweak": default_tweak,
+ # The sighash value (32 bytes)
+ "sighash": default_sighash,
+ # The information about the chosen script path spend (TaprootLeafInfo object).
+ "tapleaf": default_tapleaf,
+ # The script to push, and include in the sighash, for a taproot script path spend.
+ "script_taproot": default_script_taproot,
+ # The inner pubkey for a taproot script path spend (32 bytes).
+ "pubkey_inner": default_pubkey_inner,
+ # The negation flag of the inner pubkey for a taproot script path spend.
+ "negflag": default_negflag,
+ # The leaf version to include in the sighash (this does not affect the one in the control block).
+ "leafversion": default_leafversion,
+ # The Merkle path to include in the control block for a script path spend.
+ "merklebranch": default_merklebranch,
+ # The control block to push for a taproot script path spend.
+ "controlblock": default_controlblock,
+ # Whether to produce signatures with invalid P sign (Schnorr signatures only).
+ "flag_flip_p": False,
+ # Whether to produce signatures with invalid R sign (Schnorr signatures only).
+ "flag_flip_r": False,
+
+ # == Parameters that can be changed without invalidating, but do have a default: ==
+ # The hashtype (as an integer).
+ "hashtype": default_hashtype,
+ # The annex (only when mode=="taproot").
+ "annex": None,
+ # The codeseparator position (only when mode=="taproot").
+ "codeseppos": -1,
+ # The redeemscript to add to the scriptSig (if P2SH; None implies not P2SH).
+ "script_p2sh": None,
+ # The script to add to the witness in (if P2WSH; None implies P2WPKH)
+ "script_witv0": None,
+ # The leaf to use in taproot spends (if script path spend; None implies key path spend).
+ "leaf": None,
+ # The input arguments to provide to the executed script
+ "inputs": [],
+
+ # == Parameters to be set before evaluation: ==
+ # - mode: what spending style to use ("taproot", "witv0", or "legacy").
+ # - key: the (untweaked) private key to sign with (ECKey object for ECDSA, 32 bytes for Schnorr).
+ # - tap: the TaprootInfo object (see taproot_construct; needed in mode=="taproot").
+ # - tx: the transaction to sign.
+ # - utxos: the UTXOs being spent (needed in mode=="witv0" and mode=="taproot").
+ # - idx: the input position being signed.
+ # - scriptcode: the scriptcode to include in legacy and witv0 sighashes.
+}
+
+def flatten(lst):
+ ret = []
+ for elem in lst:
+ if isinstance(elem, list):
+ ret += flatten(elem)
+ else:
+ ret.append(elem)
+ return ret
+
+def spend(tx, idx, utxos, **kwargs):
+ """Sign transaction input idx of tx, provided utxos is the list of outputs being spent.
+
+ Additional arguments may be provided that override any aspect of the signing process.
+ See DEFAULT_CONTEXT above for what can be overridden, and what must be provided.
+ """
+
+ ctx = {**DEFAULT_CONTEXT, "tx":tx, "idx":idx, "utxos":utxos, **kwargs}
+
+ def to_script(elem):
+ """If fed a CScript, return it; if fed bytes, return a CScript that pushes it."""
+ if isinstance(elem, CScript):
+ return elem
+ else:
+ return CScript([elem])
+
+ scriptsig_list = flatten(get(ctx, "scriptsig"))
+ scriptsig = CScript(b"".join(bytes(to_script(elem)) for elem in scriptsig_list))
+ witness_stack = flatten(get(ctx, "witness"))
+ return (scriptsig, witness_stack)
+
+
+# === Spender objects ===
+#
+# Each spender is a tuple of:
+# - A scriptPubKey which is to be spent from (CScript)
+# - A comment describing the test (string)
+# - Whether the spending (on itself) is expected to be standard (bool)
+# - A tx-signing lambda returning (scriptsig, witness_stack), taking as inputs:
+# - A transaction to sign (CTransaction)
+# - An input position (int)
+# - The spent UTXOs by this transaction (list of CTxOut)
+# - Whether to produce a valid spend (bool)
+# - A string with an expected error message for failure case if known
+# - The (pre-taproot) sigops weight consumed by a successful spend
+# - Whether this spend cannot fail
+# - Whether this test demands being placed in a txin with no corresponding txout (for testing SIGHASH_SINGLE behavior)
+
+Spender = namedtuple("Spender", "script,comment,is_standard,sat_function,err_msg,sigops_weight,no_fail,need_vin_vout_mismatch")
+
+def make_spender(comment, *, tap=None, witv0=False, script=None, pkh=None, p2sh=False, spk_mutate_pre_p2sh=None, failure=None, standard=True, err_msg=None, sigops_weight=0, need_vin_vout_mismatch=False, **kwargs):
+ """Helper for constructing Spender objects using the context signing framework.
+
+ * tap: a TaprootInfo object (see taproot_construct), for Taproot spends (cannot be combined with pkh, witv0, or script)
+ * witv0: boolean indicating the use of witness v0 spending (needs one of script or pkh)
+ * script: the actual script executed (for bare/P2WSH/P2SH spending)
+ * pkh: the public key for P2PKH or P2WPKH spending
+ * p2sh: whether the output is P2SH wrapper (this is supported even for Taproot, where it makes the output unencumbered)
+ * spk_mutate_pre_psh: a callable to be applied to the script (before potentially P2SH-wrapping it)
+ * failure: a dict of entries to override in the context when intentionally failing to spend (if None, no_fail will be set)
+ * standard: whether the (valid version of) spending is expected to be standard
+ * err_msg: a string with an expected error message for failure (or None, if not cared about)
+ * sigops_weight: the pre-taproot sigops weight consumed by a successful spend
+ """
+
+ conf = dict()
+
+ # Compute scriptPubKey and set useful defaults based on the inputs.
+ if witv0:
+ assert tap is None
+ conf["mode"] = "witv0"
+ if pkh is not None:
+ # P2WPKH
+ assert script is None
+ pubkeyhash = hash160(pkh)
+ spk = CScript([OP_0, pubkeyhash])
+ conf["scriptcode"] = CScript([OP_DUP, OP_HASH160, pubkeyhash, OP_EQUALVERIFY, OP_CHECKSIG])
+ conf["script_witv0"] = None
+ conf["inputs"] = [getter("sign"), pkh]
+ elif script is not None:
+ # P2WSH
+ spk = CScript([OP_0, sha256(script)])
+ conf["scriptcode"] = script
+ conf["script_witv0"] = script
+ else:
+ assert False
+ elif tap is None:
+ conf["mode"] = "legacy"
+ if pkh is not None:
+ # P2PKH
+ assert script is None
+ pubkeyhash = hash160(pkh)
+ spk = CScript([OP_DUP, OP_HASH160, pubkeyhash, OP_EQUALVERIFY, OP_CHECKSIG])
+ conf["scriptcode"] = spk
+ conf["inputs"] = [getter("sign"), pkh]
+ elif script is not None:
+ # bare
+ spk = script
+ conf["scriptcode"] = script
+ else:
+ assert False
+ else:
+ assert script is None
+ conf["mode"] = "taproot"
+ conf["tap"] = tap
+ spk = tap.scriptPubKey
+
+ if spk_mutate_pre_p2sh is not None:
+ spk = spk_mutate_pre_p2sh(spk)
+
+ if p2sh:
+ # P2SH wrapper can be combined with anything else
+ conf["script_p2sh"] = spk
+ spk = CScript([OP_HASH160, hash160(spk), OP_EQUAL])
+
+ conf = {**conf, **kwargs}
+
+ def sat_fn(tx, idx, utxos, valid):
+ if valid:
+ return spend(tx, idx, utxos, **conf)
+ else:
+ assert failure is not None
+ return spend(tx, idx, utxos, **{**conf, **failure})
+
+ return Spender(script=spk, comment=comment, is_standard=standard, sat_function=sat_fn, err_msg=err_msg, sigops_weight=sigops_weight, no_fail=failure is None, need_vin_vout_mismatch=need_vin_vout_mismatch)
+
+def add_spender(spenders, *args, **kwargs):
+ """Make a spender using make_spender, and add it to spenders."""
+ spenders.append(make_spender(*args, **kwargs))
+
+# === Helpers for the test ===
+
+def random_checksig_style(pubkey):
+ """Creates a random CHECKSIG* tapscript that would succeed with only the valid signature on witness stack."""
+ return bytes(CScript([pubkey, OP_CHECKSIG]))
+ opcode = random.choice([OP_CHECKSIG, OP_CHECKSIGVERIFY, OP_CHECKSIGADD])
+ if (opcode == OP_CHECKSIGVERIFY):
+ ret = CScript([pubkey, opcode, OP_1])
+ elif (opcode == OP_CHECKSIGADD):
+ num = random.choice([0, 0x7fffffff, -0x7fffffff])
+ ret = CScript([num, pubkey, opcode, num + 1, OP_EQUAL])
+ else:
+ ret = CScript([pubkey, opcode])
+ return bytes(ret)
+
+def random_bytes(n):
+ """Return a random bytes object of length n."""
+ return bytes(random.getrandbits(8) for i in range(n))
+
+def bitflipper(expr):
+ """Return a callable that evaluates expr and returns it with a random bitflip."""
+ def fn(ctx):
+ sub = deep_eval(ctx, expr)
+ assert isinstance(sub, bytes)
+ return (int.from_bytes(sub, 'little') ^ (1 << random.randrange(len(sub) * 8))).to_bytes(len(sub), 'little')
+ return fn
+
+def zero_appender(expr):
+ """Return a callable that evaluates expr and returns it with a zero added."""
+ return lambda ctx: deep_eval(ctx, expr) + b"\x00"
+
+def byte_popper(expr):
+ """Return a callable that evaluates expr and returns it with its last byte removed."""
+ return lambda ctx: deep_eval(ctx, expr)[:-1]
+
+# Expected error strings
+
+ERR_SIG_SIZE = {"err_msg": "Invalid Schnorr signature size"}
+ERR_SIG_HASHTYPE = {"err_msg": "Invalid Schnorr signature hash type"}
+ERR_SIG_SCHNORR = {"err_msg": "Invalid Schnorr signature"}
+ERR_OP_RETURN = {"err_msg": "OP_RETURN was encountered"}
+ERR_CONTROLBLOCK_SIZE = {"err_msg": "Invalid Taproot control block size"}
+ERR_WITNESS_PROGRAM_MISMATCH = {"err_msg": "Witness program hash mismatch"}
+ERR_PUSH_LIMIT = {"err_msg": "Push value size limit exceeded"}
+ERR_DISABLED_OPCODE = {"err_msg": "Attempted to use a disabled opcode"}
+ERR_TAPSCRIPT_CHECKMULTISIG = {"err_msg": "OP_CHECKMULTISIG(VERIFY) is not available in tapscript"}
+ERR_MINIMALIF = {"err_msg": "OP_IF/NOTIF argument must be minimal in tapscript"}
+ERR_UNKNOWN_PUBKEY = {"err_msg": "Public key is neither compressed or uncompressed"}
+ERR_STACK_SIZE = {"err_msg": "Stack size limit exceeded"}
+ERR_CLEANSTACK = {"err_msg": "Stack size must be exactly one after execution"}
+ERR_STACK_EMPTY = {"err_msg": "Operation not valid with the current stack size"}
+ERR_SIGOPS_RATIO = {"err_msg": "Too much signature validation relative to witness weight"}
+ERR_UNDECODABLE = {"err_msg": "Opcode missing or not understood"}
+ERR_NO_SUCCESS = {"err_msg": "Script evaluated without error but finished with a false/empty top stack element"}
+ERR_EMPTY_WITNESS = {"err_msg": "Witness program was passed an empty witness"}
+ERR_CHECKSIGVERIFY = {"err_msg": "Script failed an OP_CHECKSIGVERIFY operation"}
+
+VALID_SIGHASHES_ECDSA = [
+ SIGHASH_ALL,
+ SIGHASH_NONE,
+ SIGHASH_SINGLE,
+ SIGHASH_ANYONECANPAY + SIGHASH_ALL,
+ SIGHASH_ANYONECANPAY + SIGHASH_NONE,
+ SIGHASH_ANYONECANPAY + SIGHASH_SINGLE
+]
+
+VALID_SIGHASHES_TAPROOT = [SIGHASH_DEFAULT] + VALID_SIGHASHES_ECDSA
+
+VALID_SIGHASHES_TAPROOT_SINGLE = [
+ SIGHASH_SINGLE,
+ SIGHASH_ANYONECANPAY + SIGHASH_SINGLE
+]
+
+VALID_SIGHASHES_TAPROOT_NO_SINGLE = [h for h in VALID_SIGHASHES_TAPROOT if h not in VALID_SIGHASHES_TAPROOT_SINGLE]
+
+SIGHASH_BITFLIP = {"failure": {"sighash": bitflipper(default_sighash)}}
+SIG_POP_BYTE = {"failure": {"sign": byte_popper(default_sign)}}
+SINGLE_SIG = {"inputs": [getter("sign")]}
+SIG_ADD_ZERO = {"failure": {"sign": zero_appender(default_sign)}}
+
+DUST_LIMIT = 600
+MIN_FEE = 50000
+
+# === Actual test cases ===
+
+
+def spenders_taproot_active():
+ """Return a list of Spenders for testing post-Taproot activation behavior."""
+
+ secs = [generate_privkey() for _ in range(8)]
+ pubs = [compute_xonly_pubkey(sec)[0] for sec in secs]
+
+ spenders = []
+
+ # == Tests for BIP340 signature validation. ==
+ # These are primarily tested through the test vectors implemented in libsecp256k1, and in src/tests/key_tests.cpp.
+ # Some things are tested programmatically as well here.
+
+ tap = taproot_construct(pubs[0])
+ # Test with key with bit flipped.
+ add_spender(spenders, "sig/key", tap=tap, key=secs[0], failure={"key_tweaked": bitflipper(default_key_tweaked)}, **ERR_SIG_SCHNORR)
+ # Test with sighash with bit flipped.
+ add_spender(spenders, "sig/sighash", tap=tap, key=secs[0], failure={"sighash": bitflipper(default_sighash)}, **ERR_SIG_SCHNORR)
+ # Test with invalid R sign.
+ add_spender(spenders, "sig/flip_r", tap=tap, key=secs[0], failure={"flag_flip_r": True}, **ERR_SIG_SCHNORR)
+ # Test with invalid P sign.
+ add_spender(spenders, "sig/flip_p", tap=tap, key=secs[0], failure={"flag_flip_p": True}, **ERR_SIG_SCHNORR)
+ # Test with signature with bit flipped.
+ add_spender(spenders, "sig/bitflip", tap=tap, key=secs[0], failure={"signature": bitflipper(default_signature)}, **ERR_SIG_SCHNORR)
+
+ # == Tests for signature hashing ==
+
+ # Run all tests once with no annex, and once with a valid random annex.
+ for annex in [None, lambda _: bytes([ANNEX_TAG]) + random_bytes(random.randrange(0, 250))]:
+ # Non-empty annex is non-standard
+ no_annex = annex is None
+
+ # Sighash mutation tests (test all sighash combinations)
+ for hashtype in VALID_SIGHASHES_TAPROOT:
+ common = {"annex": annex, "hashtype": hashtype, "standard": no_annex}
+
+ # Pure pubkey
+ tap = taproot_construct(pubs[0])
+ add_spender(spenders, "sighash/purepk", tap=tap, key=secs[0], **common, **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
+
+ # Pubkey/P2PK script combination
+ scripts = [("s0", CScript(random_checksig_style(pubs[1])))]
+ tap = taproot_construct(pubs[0], scripts)
+ add_spender(spenders, "sighash/keypath_hashtype_%x" % hashtype, tap=tap, key=secs[0], **common, **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/scriptpath_hashtype_%x" % hashtype, tap=tap, leaf="s0", key=secs[1], **common, **SINGLE_SIG, **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
+
+ # Test SIGHASH_SINGLE behavior in combination with mismatching outputs
+ if hashtype in VALID_SIGHASHES_TAPROOT_SINGLE:
+ add_spender(spenders, "sighash/keypath_hashtype_mis_%x" % hashtype, tap=tap, key=secs[0], annex=annex, standard=no_annex, hashtype_actual=random.choice(VALID_SIGHASHES_TAPROOT_NO_SINGLE), failure={"hashtype_actual": hashtype}, **ERR_SIG_HASHTYPE, need_vin_vout_mismatch=True)
+ add_spender(spenders, "sighash/scriptpath_hashtype_mis_%x" % hashtype, tap=tap, leaf="s0", key=secs[1], annex=annex, standard=no_annex, hashtype_actual=random.choice(VALID_SIGHASHES_TAPROOT_NO_SINGLE), **SINGLE_SIG, failure={"hashtype_actual": hashtype}, **ERR_SIG_HASHTYPE, need_vin_vout_mismatch=True)
+
+ # Test OP_CODESEPARATOR impact on sighashing.
+ hashtype = lambda _: random.choice(VALID_SIGHASHES_TAPROOT)
+ common = {"annex": annex, "hashtype": hashtype, "standard": no_annex}
+ scripts = [
+ ("pk_codesep", CScript(random_checksig_style(pubs[1]) + bytes([OP_CODESEPARATOR]))), # codesep after checksig
+ ("codesep_pk", CScript(bytes([OP_CODESEPARATOR]) + random_checksig_style(pubs[1]))), # codesep before checksig
+ ("branched_codesep", CScript([random_bytes(random.randrange(511)), OP_DROP, OP_IF, OP_CODESEPARATOR, pubs[0], OP_ELSE, OP_CODESEPARATOR, pubs[1], OP_ENDIF, OP_CHECKSIG])), # branch dependent codesep
+ ]
+ random.shuffle(scripts)
+ tap = taproot_construct(pubs[0], scripts)
+ add_spender(spenders, "sighash/pk_codesep", tap=tap, leaf="pk_codesep", key=secs[1], **common, **SINGLE_SIG, **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/codesep_pk", tap=tap, leaf="codesep_pk", key=secs[1], codeseppos=0, **common, **SINGLE_SIG, **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/branched_codesep/left", tap=tap, leaf="branched_codesep", key=secs[0], codeseppos=3, **common, inputs=[getter("sign"), b'\x01'], **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/branched_codesep/right", tap=tap, leaf="branched_codesep", key=secs[1], codeseppos=6, **common, inputs=[getter("sign"), b''], **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
+
+ # Reusing the scripts above, test that various features affect the sighash.
+ add_spender(spenders, "sighash/annex", tap=tap, leaf="pk_codesep", key=secs[1], hashtype=hashtype, standard=False, **SINGLE_SIG, annex=bytes([ANNEX_TAG]), failure={"sighash": override(default_sighash, annex=None)}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/script", tap=tap, leaf="pk_codesep", key=secs[1], **common, **SINGLE_SIG, failure={"sighash": override(default_sighash, script_taproot=tap.leaves["codesep_pk"].script)}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/leafver", tap=tap, leaf="pk_codesep", key=secs[1], **common, **SINGLE_SIG, failure={"sighash": override(default_sighash, leafversion=random.choice([x & 0xFE for x in range(0x100) if x & 0xFE != 0xC0]))}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/scriptpath", tap=tap, leaf="pk_codesep", key=secs[1], **common, **SINGLE_SIG, failure={"sighash": override(default_sighash, leaf=None)}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/keypath", tap=tap, key=secs[0], **common, failure={"sighash": override(default_sighash, leaf="pk_codesep")}, **ERR_SIG_SCHNORR)
+
+ # Test that invalid hashtypes don't work, both in key path and script path spends
+ hashtype = lambda _: random.choice(VALID_SIGHASHES_TAPROOT)
+ for invalid_hashtype in [x for x in range(0x100) if x not in VALID_SIGHASHES_TAPROOT]:
+ add_spender(spenders, "sighash/keypath_unk_hashtype_%x" % invalid_hashtype, tap=tap, key=secs[0], hashtype=hashtype, failure={"hashtype": invalid_hashtype}, **ERR_SIG_HASHTYPE)
+ add_spender(spenders, "sighash/scriptpath_unk_hashtype_%x" % invalid_hashtype, tap=tap, leaf="pk_codesep", key=secs[1], **SINGLE_SIG, hashtype=hashtype, failure={"hashtype": invalid_hashtype}, **ERR_SIG_HASHTYPE)
+
+ # Test that hashtype 0 cannot have a hashtype byte, and 1 must have one.
+ add_spender(spenders, "sighash/hashtype0_byte_keypath", tap=tap, key=secs[0], hashtype=SIGHASH_DEFAULT, failure={"bytes_hashtype": bytes([SIGHASH_DEFAULT])}, **ERR_SIG_HASHTYPE)
+ add_spender(spenders, "sighash/hashtype0_byte_scriptpath", tap=tap, leaf="pk_codesep", key=secs[1], **SINGLE_SIG, hashtype=SIGHASH_DEFAULT, failure={"bytes_hashtype": bytes([SIGHASH_DEFAULT])}, **ERR_SIG_HASHTYPE)
+ add_spender(spenders, "sighash/hashtype1_byte_keypath", tap=tap, key=secs[0], hashtype=SIGHASH_ALL, failure={"bytes_hashtype": b''}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/hashtype1_byte_scriptpath", tap=tap, leaf="pk_codesep", key=secs[1], **SINGLE_SIG, hashtype=SIGHASH_ALL, failure={"bytes_hashtype": b''}, **ERR_SIG_SCHNORR)
+ # Test that hashtype 0 and hashtype 1 cannot be transmuted into each other.
+ add_spender(spenders, "sighash/hashtype0to1_keypath", tap=tap, key=secs[0], hashtype=SIGHASH_DEFAULT, failure={"bytes_hashtype": bytes([SIGHASH_ALL])}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/hashtype0to1_scriptpath", tap=tap, leaf="pk_codesep", key=secs[1], **SINGLE_SIG, hashtype=SIGHASH_DEFAULT, failure={"bytes_hashtype": bytes([SIGHASH_ALL])}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/hashtype1to0_keypath", tap=tap, key=secs[0], hashtype=SIGHASH_ALL, failure={"bytes_hashtype": b''}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "sighash/hashtype1to0_scriptpath", tap=tap, leaf="pk_codesep", key=secs[1], **SINGLE_SIG, hashtype=SIGHASH_ALL, failure={"bytes_hashtype": b''}, **ERR_SIG_SCHNORR)
+
+ # Test aspects of signatures with unusual lengths
+ for hashtype in [SIGHASH_DEFAULT, random.choice(VALID_SIGHASHES_TAPROOT)]:
+ scripts = [
+ ("csv", CScript([pubs[2], OP_CHECKSIGVERIFY, OP_1])),
+ ("cs_pos", CScript([pubs[2], OP_CHECKSIG])),
+ ("csa_pos", CScript([OP_0, pubs[2], OP_CHECKSIGADD, OP_1, OP_EQUAL])),
+ ("cs_neg", CScript([pubs[2], OP_CHECKSIG, OP_NOT])),
+ ("csa_neg", CScript([OP_2, pubs[2], OP_CHECKSIGADD, OP_2, OP_EQUAL]))
+ ]
+ random.shuffle(scripts)
+ tap = taproot_construct(pubs[3], scripts)
+ # Empty signatures
+ add_spender(spenders, "siglen/empty_keypath", tap=tap, key=secs[3], hashtype=hashtype, failure={"sign": b""}, **ERR_SIG_SIZE)
+ add_spender(spenders, "siglen/empty_csv", tap=tap, key=secs[2], leaf="csv", hashtype=hashtype, **SINGLE_SIG, failure={"sign": b""}, **ERR_CHECKSIGVERIFY)
+ add_spender(spenders, "siglen/empty_cs", tap=tap, key=secs[2], leaf="cs_pos", hashtype=hashtype, **SINGLE_SIG, failure={"sign": b""}, **ERR_NO_SUCCESS)
+ add_spender(spenders, "siglen/empty_csa", tap=tap, key=secs[2], leaf="csa_pos", hashtype=hashtype, **SINGLE_SIG, failure={"sign": b""}, **ERR_NO_SUCCESS)
+ add_spender(spenders, "siglen/empty_cs_neg", tap=tap, key=secs[2], leaf="cs_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", failure={"sign": lambda _: random_bytes(random.randrange(1, 63))}, **ERR_SIG_SIZE)
+ add_spender(spenders, "siglen/empty_csa_neg", tap=tap, key=secs[2], leaf="csa_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", failure={"sign": lambda _: random_bytes(random.randrange(66, 100))}, **ERR_SIG_SIZE)
+ # Appending a zero byte to signatures invalidates them
+ add_spender(spenders, "siglen/padzero_keypath", tap=tap, key=secs[3], hashtype=hashtype, **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
+ add_spender(spenders, "siglen/padzero_csv", tap=tap, key=secs[2], leaf="csv", hashtype=hashtype, **SINGLE_SIG, **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
+ add_spender(spenders, "siglen/padzero_cs", tap=tap, key=secs[2], leaf="cs_pos", hashtype=hashtype, **SINGLE_SIG, **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
+ add_spender(spenders, "siglen/padzero_csa", tap=tap, key=secs[2], leaf="csa_pos", hashtype=hashtype, **SINGLE_SIG, **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
+ add_spender(spenders, "siglen/padzero_cs_neg", tap=tap, key=secs[2], leaf="cs_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
+ add_spender(spenders, "siglen/padzero_csa_neg", tap=tap, key=secs[2], leaf="csa_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", **SIG_ADD_ZERO, **(ERR_SIG_HASHTYPE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SIZE))
+ # Removing the last byte from signatures invalidates them
+ add_spender(spenders, "siglen/popbyte_keypath", tap=tap, key=secs[3], hashtype=hashtype, **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
+ add_spender(spenders, "siglen/popbyte_csv", tap=tap, key=secs[2], leaf="csv", hashtype=hashtype, **SINGLE_SIG, **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
+ add_spender(spenders, "siglen/popbyte_cs", tap=tap, key=secs[2], leaf="cs_pos", hashtype=hashtype, **SINGLE_SIG, **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
+ add_spender(spenders, "siglen/popbyte_csa", tap=tap, key=secs[2], leaf="csa_pos", hashtype=hashtype, **SINGLE_SIG, **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
+ add_spender(spenders, "siglen/popbyte_cs_neg", tap=tap, key=secs[2], leaf="cs_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
+ add_spender(spenders, "siglen/popbyte_csa_neg", tap=tap, key=secs[2], leaf="csa_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", **SIG_POP_BYTE, **(ERR_SIG_SIZE if hashtype == SIGHASH_DEFAULT else ERR_SIG_SCHNORR))
+ # Verify that an invalid signature is not allowed, not even when the CHECKSIG* is expected to fail.
+ add_spender(spenders, "siglen/invalid_cs_neg", tap=tap, key=secs[2], leaf="cs_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", failure={"sign": default_sign, "sighash": bitflipper(default_sighash)}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "siglen/invalid_csa_neg", tap=tap, key=secs[2], leaf="csa_neg", hashtype=hashtype, **SINGLE_SIG, sign=b"", failure={"sign": default_sign, "sighash": bitflipper(default_sighash)}, **ERR_SIG_SCHNORR)
+
+ # == Test that BIP341 spending only applies to witness version 1, program length 32, no P2SH ==
+
+ for p2sh in [False, True]:
+ for witver in range(1, 17):
+ for witlen in [20, 31, 32, 33]:
+ def mutate(spk):
+ prog = spk[2:]
+ assert len(prog) == 32
+ if witlen < 32:
+ prog = prog[0:witlen]
+ elif witlen > 32:
+ prog += bytes([0 for _ in range(witlen - 32)])
+ return CScript([CScriptOp.encode_op_n(witver), prog])
+ scripts = [("s0", CScript([pubs[0], OP_CHECKSIG])), ("dummy", CScript([OP_RETURN]))]
+ tap = taproot_construct(pubs[1], scripts)
+ if not p2sh and witver == 1 and witlen == 32:
+ add_spender(spenders, "applic/keypath", p2sh=p2sh, spk_mutate_pre_p2sh=mutate, tap=tap, key=secs[1], **SIGHASH_BITFLIP, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "applic/scriptpath", p2sh=p2sh, leaf="s0", spk_mutate_pre_p2sh=mutate, tap=tap, key=secs[0], **SINGLE_SIG, failure={"leaf": "dummy"}, **ERR_OP_RETURN)
+ else:
+ add_spender(spenders, "applic/keypath", p2sh=p2sh, spk_mutate_pre_p2sh=mutate, tap=tap, key=secs[1], standard=False)
+ add_spender(spenders, "applic/scriptpath", p2sh=p2sh, leaf="s0", spk_mutate_pre_p2sh=mutate, tap=tap, key=secs[0], **SINGLE_SIG, standard=False)
+
+ # == Test various aspects of BIP341 spending paths ==
+
+ # A set of functions that compute the hashing partner in a Merkle tree, designed to exercise
+ # edge cases. This relies on the taproot_construct feature that a lambda can be passed in
+ # instead of a subtree, to compute the partner to be hashed with.
+ PARTNER_MERKLE_FN = [
+ # Combine with itself
+ lambda h: h,
+ # Combine with hash 0
+ lambda h: bytes([0 for _ in range(32)]),
+ # Combine with hash 2^256-1
+ lambda h: bytes([0xff for _ in range(32)]),
+ # Combine with itself-1 (BE)
+ lambda h: (int.from_bytes(h, 'big') - 1).to_bytes(32, 'big'),
+ # Combine with itself+1 (BE)
+ lambda h: (int.from_bytes(h, 'big') + 1).to_bytes(32, 'big'),
+ # Combine with itself-1 (LE)
+ lambda h: (int.from_bytes(h, 'little') - 1).to_bytes(32, 'big'),
+ # Combine with itself+1 (LE)
+ lambda h: (int.from_bytes(h, 'little') + 1).to_bytes(32, 'little'),
+ # Combine with random bitflipped version of self.
+ lambda h: (int.from_bytes(h, 'little') ^ (1 << random.randrange(256))).to_bytes(32, 'little')
+ ]
+ # Start with a tree of that has depth 1 for "128deep" and depth 2 for "129deep".
+ scripts = [("128deep", CScript([pubs[0], OP_CHECKSIG])), [("129deep", CScript([pubs[0], OP_CHECKSIG])), random.choice(PARTNER_MERKLE_FN)]]
+ # Add 127 nodes on top of that tree, so that "128deep" and "129deep" end up at their designated depths.
+ for _ in range(127):
+ scripts = [scripts, random.choice(PARTNER_MERKLE_FN)]
+ tap = taproot_construct(pubs[0], scripts)
+ # Test that spends with a depth of 128 work, but 129 doesn't (even with a tree with weird Merkle branches in it).
+ add_spender(spenders, "spendpath/merklelimit", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"leaf": "129deep"}, **ERR_CONTROLBLOCK_SIZE)
+ # Test that flipping the negation bit invalidates spends.
+ add_spender(spenders, "spendpath/negflag", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"negflag": lambda ctx: 1 - default_negflag(ctx)}, **ERR_WITNESS_PROGRAM_MISMATCH)
+ # Test that bitflips in the Merkle branch invalidate it.
+ add_spender(spenders, "spendpath/bitflipmerkle", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"merklebranch": bitflipper(default_merklebranch)}, **ERR_WITNESS_PROGRAM_MISMATCH)
+ # Test that bitflips in the inner pubkey invalidate it.
+ add_spender(spenders, "spendpath/bitflippubkey", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"pubkey_inner": bitflipper(default_pubkey_inner)}, **ERR_WITNESS_PROGRAM_MISMATCH)
+ # Test that empty witnesses are invalid.
+ add_spender(spenders, "spendpath/emptywit", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"witness": []}, **ERR_EMPTY_WITNESS)
+ # Test that adding garbage to the control block invalidates it.
+ add_spender(spenders, "spendpath/padlongcontrol", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"controlblock": lambda ctx: default_controlblock(ctx) + random_bytes(random.randrange(1, 32))}, **ERR_CONTROLBLOCK_SIZE)
+ # Test that truncating the control block invalidates it.
+ add_spender(spenders, "spendpath/trunclongcontrol", tap=tap, leaf="128deep", **SINGLE_SIG, key=secs[0], failure={"controlblock": lambda ctx: default_merklebranch(ctx)[0:random.randrange(1, 32)]}, **ERR_CONTROLBLOCK_SIZE)
+
+ scripts = [("s", CScript([pubs[0], OP_CHECKSIG]))]
+ tap = taproot_construct(pubs[1], scripts)
+ # Test that adding garbage to the control block invalidates it.
+ add_spender(spenders, "spendpath/padshortcontrol", tap=tap, leaf="s", **SINGLE_SIG, key=secs[0], failure={"controlblock": lambda ctx: default_controlblock(ctx) + random_bytes(random.randrange(1, 32))}, **ERR_CONTROLBLOCK_SIZE)
+ # Test that truncating the control block invalidates it.
+ add_spender(spenders, "spendpath/truncshortcontrol", tap=tap, leaf="s", **SINGLE_SIG, key=secs[0], failure={"controlblock": lambda ctx: default_merklebranch(ctx)[0:random.randrange(1, 32)]}, **ERR_CONTROLBLOCK_SIZE)
+ # Test that truncating the control block to 1 byte ("-1 Merkle length") invalidates it
+ add_spender(spenders, "spendpath/trunc1shortcontrol", tap=tap, leaf="s", **SINGLE_SIG, key=secs[0], failure={"controlblock": lambda ctx: default_merklebranch(ctx)[0:1]}, **ERR_CONTROLBLOCK_SIZE)
+
+ # == Test BIP342 edge cases ==
+
+ csa_low_val = random.randrange(0, 17) # Within range for OP_n
+ csa_low_result = csa_low_val + 1
+
+ csa_high_val = random.randrange(17, 100) if random.getrandbits(1) else random.randrange(-100, -1) # Outside OP_n range
+ csa_high_result = csa_high_val + 1
+
+ OVERSIZE_NUMBER = 2**31
+ assert_equal(len(CScriptNum.encode(CScriptNum(OVERSIZE_NUMBER))), 6)
+ assert_equal(len(CScriptNum.encode(CScriptNum(OVERSIZE_NUMBER-1))), 5)
+
+ big_choices = []
+ big_scriptops = []
+ for i in range(1000):
+ r = random.randrange(len(pubs))
+ big_choices.append(r)
+ big_scriptops += [pubs[r], OP_CHECKSIGVERIFY]
+
+
+ def big_spend_inputs(ctx):
+ """Helper function to construct the script input for t33/t34 below."""
+ # Instead of signing 999 times, precompute signatures for every (key, hashtype) combination
+ sigs = {}
+ for ht in VALID_SIGHASHES_TAPROOT:
+ for k in range(len(pubs)):
+ sigs[(k, ht)] = override(default_sign, hashtype=ht, key=secs[k])(ctx)
+ num = get(ctx, "num")
+ return [sigs[(big_choices[i], random.choice(VALID_SIGHASHES_TAPROOT))] for i in range(num - 1, -1, -1)]
+
+ # Various BIP342 features
+ scripts = [
+ # 0) drop stack element and OP_CHECKSIG
+ ("t0", CScript([OP_DROP, pubs[1], OP_CHECKSIG])),
+ # 1) normal OP_CHECKSIG
+ ("t1", CScript([pubs[1], OP_CHECKSIG])),
+ # 2) normal OP_CHECKSIGVERIFY
+ ("t2", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_1])),
+ # 3) Hypothetical OP_CHECKMULTISIG script that takes a single sig as input
+ ("t3", CScript([OP_0, OP_SWAP, OP_1, pubs[1], OP_1, OP_CHECKMULTISIG])),
+ # 4) Hypothetical OP_CHECKMULTISIGVERIFY script that takes a single sig as input
+ ("t4", CScript([OP_0, OP_SWAP, OP_1, pubs[1], OP_1, OP_CHECKMULTISIGVERIFY, OP_1])),
+ # 5) OP_IF script that needs a true input
+ ("t5", CScript([OP_IF, pubs[1], OP_CHECKSIG, OP_ELSE, OP_RETURN, OP_ENDIF])),
+ # 6) OP_NOTIF script that needs a true input
+ ("t6", CScript([OP_NOTIF, OP_RETURN, OP_ELSE, pubs[1], OP_CHECKSIG, OP_ENDIF])),
+ # 7) OP_CHECKSIG with an empty key
+ ("t7", CScript([OP_0, OP_CHECKSIG])),
+ # 8) OP_CHECKSIGVERIFY with an empty key
+ ("t8", CScript([OP_0, OP_CHECKSIGVERIFY, OP_1])),
+ # 9) normal OP_CHECKSIGADD that also ensures return value is correct
+ ("t9", CScript([csa_low_val, pubs[1], OP_CHECKSIGADD, csa_low_result, OP_EQUAL])),
+ # 10) OP_CHECKSIGADD with empty key
+ ("t10", CScript([csa_low_val, OP_0, OP_CHECKSIGADD, csa_low_result, OP_EQUAL])),
+ # 11) OP_CHECKSIGADD with missing counter stack element
+ ("t11", CScript([pubs[1], OP_CHECKSIGADD, OP_1, OP_EQUAL])),
+ # 12) OP_CHECKSIG that needs invalid signature
+ ("t12", CScript([pubs[1], OP_CHECKSIGVERIFY, pubs[0], OP_CHECKSIG, OP_NOT])),
+ # 13) OP_CHECKSIG with empty key that needs invalid signature
+ ("t13", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_0, OP_CHECKSIG, OP_NOT])),
+ # 14) OP_CHECKSIGADD that needs invalid signature
+ ("t14", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_0, pubs[0], OP_CHECKSIGADD, OP_NOT])),
+ # 15) OP_CHECKSIGADD with empty key that needs invalid signature
+ ("t15", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_0, OP_0, OP_CHECKSIGADD, OP_NOT])),
+ # 16) OP_CHECKSIG with unknown pubkey type
+ ("t16", CScript([OP_1, OP_CHECKSIG])),
+ # 17) OP_CHECKSIGADD with unknown pubkey type
+ ("t17", CScript([OP_0, OP_1, OP_CHECKSIGADD])),
+ # 18) OP_CHECKSIGVERIFY with unknown pubkey type
+ ("t18", CScript([OP_1, OP_CHECKSIGVERIFY, OP_1])),
+ # 19) script longer than 10000 bytes and over 201 non-push opcodes
+ ("t19", CScript([OP_0, OP_0, OP_2DROP] * 10001 + [pubs[1], OP_CHECKSIG])),
+ # 20) OP_CHECKSIGVERIFY with empty key
+ ("t20", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_0, OP_0, OP_CHECKSIGVERIFY, OP_1])),
+ # 21) Script that grows the stack to 1000 elements
+ ("t21", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_1] + [OP_DUP] * 999 + [OP_DROP] * 999)),
+ # 22) Script that grows the stack to 1001 elements
+ ("t22", CScript([pubs[1], OP_CHECKSIGVERIFY, OP_1] + [OP_DUP] * 1000 + [OP_DROP] * 1000)),
+ # 23) Script that expects an input stack of 1000 elements
+ ("t23", CScript([OP_DROP] * 999 + [pubs[1], OP_CHECKSIG])),
+ # 24) Script that expects an input stack of 1001 elements
+ ("t24", CScript([OP_DROP] * 1000 + [pubs[1], OP_CHECKSIG])),
+ # 25) Script that pushes a MAX_SCRIPT_ELEMENT_SIZE-bytes element
+ ("t25", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE), OP_DROP, pubs[1], OP_CHECKSIG])),
+ # 26) Script that pushes a (MAX_SCRIPT_ELEMENT_SIZE+1)-bytes element
+ ("t26", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1), OP_DROP, pubs[1], OP_CHECKSIG])),
+ # 27) CHECKSIGADD that must fail because numeric argument number is >4 bytes
+ ("t27", CScript([CScriptNum(OVERSIZE_NUMBER), pubs[1], OP_CHECKSIGADD])),
+ # 28) Pushes random CScriptNum value, checks OP_CHECKSIGADD result
+ ("t28", CScript([csa_high_val, pubs[1], OP_CHECKSIGADD, csa_high_result, OP_EQUAL])),
+ # 29) CHECKSIGADD that succeeds with proper sig because numeric argument number is <=4 bytes
+ ("t29", CScript([CScriptNum(OVERSIZE_NUMBER-1), pubs[1], OP_CHECKSIGADD])),
+ # 30) Variant of t1 with "normal" 33-byte pubkey
+ ("t30", CScript([b'\x03' + pubs[1], OP_CHECKSIG])),
+ # 31) Variant of t2 with "normal" 33-byte pubkey
+ ("t31", CScript([b'\x02' + pubs[1], OP_CHECKSIGVERIFY, OP_1])),
+ # 32) Variant of t28 with "normal" 33-byte pubkey
+ ("t32", CScript([csa_high_val, b'\x03' + pubs[1], OP_CHECKSIGADD, csa_high_result, OP_EQUAL])),
+ # 33) 999-of-999 multisig
+ ("t33", CScript(big_scriptops[:1998] + [OP_1])),
+ # 34) 1000-of-1000 multisig
+ ("t34", CScript(big_scriptops[:2000] + [OP_1])),
+ # 35) Variant of t9 that uses a non-minimally encoded input arg
+ ("t35", CScript([bytes([csa_low_val]), pubs[1], OP_CHECKSIGADD, csa_low_result, OP_EQUAL])),
+ # 36) Empty script
+ ("t36", CScript([])),
+ ]
+ # Add many dummies to test huge trees
+ for j in range(100000):
+ scripts.append((None, CScript([OP_RETURN, random.randrange(100000)])))
+ random.shuffle(scripts)
+ tap = taproot_construct(pubs[0], scripts)
+ common = {
+ "hashtype": hashtype,
+ "key": secs[1],
+ "tap": tap,
+ }
+ # Test that MAX_SCRIPT_ELEMENT_SIZE byte stack element inputs are valid, but not one more (and 80 bytes is standard but 81 is not).
+ add_spender(spenders, "tapscript/inputmaxlimit", leaf="t0", **common, standard=False, inputs=[getter("sign"), random_bytes(MAX_SCRIPT_ELEMENT_SIZE)], failure={"inputs": [getter("sign"), random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1)]}, **ERR_PUSH_LIMIT)
+ add_spender(spenders, "tapscript/input80limit", leaf="t0", **common, inputs=[getter("sign"), random_bytes(80)])
+ add_spender(spenders, "tapscript/input81limit", leaf="t0", **common, standard=False, inputs=[getter("sign"), random_bytes(81)])
+ # Test that OP_CHECKMULTISIG and OP_CHECKMULTISIGVERIFY cause failure, but OP_CHECKSIG and OP_CHECKSIGVERIFY work.
+ add_spender(spenders, "tapscript/disabled_checkmultisig", leaf="t1", **common, **SINGLE_SIG, failure={"leaf": "t3"}, **ERR_TAPSCRIPT_CHECKMULTISIG)
+ add_spender(spenders, "tapscript/disabled_checkmultisigverify", leaf="t2", **common, **SINGLE_SIG, failure={"leaf": "t4"}, **ERR_TAPSCRIPT_CHECKMULTISIG)
+ # Test that OP_IF and OP_NOTIF do not accept non-0x01 as truth value (the MINIMALIF rule is consensus in Tapscript)
+ add_spender(spenders, "tapscript/minimalif", leaf="t5", **common, inputs=[getter("sign"), b'\x01'], failure={"inputs": [getter("sign"), b'\x02']}, **ERR_MINIMALIF)
+ add_spender(spenders, "tapscript/minimalnotif", leaf="t6", **common, inputs=[getter("sign"), b'\x01'], failure={"inputs": [getter("sign"), b'\x03']}, **ERR_MINIMALIF)
+ add_spender(spenders, "tapscript/minimalif", leaf="t5", **common, inputs=[getter("sign"), b'\x01'], failure={"inputs": [getter("sign"), b'\x0001']}, **ERR_MINIMALIF)
+ add_spender(spenders, "tapscript/minimalnotif", leaf="t6", **common, inputs=[getter("sign"), b'\x01'], failure={"inputs": [getter("sign"), b'\x0100']}, **ERR_MINIMALIF)
+ # Test that 1-byte public keys (which are unknown) are acceptable but nonstandard with unrelated signatures, but 0-byte public keys are not valid.
+ add_spender(spenders, "tapscript/unkpk/checksig", leaf="t16", standard=False, **common, **SINGLE_SIG, failure={"leaf": "t7"}, **ERR_UNKNOWN_PUBKEY)
+ add_spender(spenders, "tapscript/unkpk/checksigadd", leaf="t17", standard=False, **common, **SINGLE_SIG, failure={"leaf": "t10"}, **ERR_UNKNOWN_PUBKEY)
+ add_spender(spenders, "tapscript/unkpk/checksigverify", leaf="t18", standard=False, **common, **SINGLE_SIG, failure={"leaf": "t8"}, **ERR_UNKNOWN_PUBKEY)
+ # Test that 33-byte public keys (which are unknown) are acceptable but nonstandard with valid signatures, but normal pubkeys are not valid in that case.
+ add_spender(spenders, "tapscript/oldpk/checksig", leaf="t30", standard=False, **common, **SINGLE_SIG, sighash=bitflipper(default_sighash), failure={"leaf": "t1"}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "tapscript/oldpk/checksigadd", leaf="t31", standard=False, **common, **SINGLE_SIG, sighash=bitflipper(default_sighash), failure={"leaf": "t2"}, **ERR_SIG_SCHNORR)
+ add_spender(spenders, "tapscript/oldpk/checksigverify", leaf="t32", standard=False, **common, **SINGLE_SIG, sighash=bitflipper(default_sighash), failure={"leaf": "t28"}, **ERR_SIG_SCHNORR)
+ # Test that 0-byte public keys are not acceptable.
+ add_spender(spenders, "tapscript/emptypk/checksig", leaf="t1", **SINGLE_SIG, **common, failure={"leaf": "t7"}, **ERR_UNKNOWN_PUBKEY)
+ add_spender(spenders, "tapscript/emptypk/checksigverify", leaf="t2", **SINGLE_SIG, **common, failure={"leaf": "t8"}, **ERR_UNKNOWN_PUBKEY)
+ add_spender(spenders, "tapscript/emptypk/checksigadd", leaf="t9", **SINGLE_SIG, **common, failure={"leaf": "t10"}, **ERR_UNKNOWN_PUBKEY)
+ add_spender(spenders, "tapscript/emptypk/checksigadd", leaf="t35", standard=False, **SINGLE_SIG, **common, failure={"leaf": "t10"}, **ERR_UNKNOWN_PUBKEY)
+ # Test that OP_CHECKSIGADD results are as expected
+ add_spender(spenders, "tapscript/checksigaddresults", leaf="t28", **SINGLE_SIG, **common, failure={"leaf": "t27"}, err_msg="unknown error")
+ add_spender(spenders, "tapscript/checksigaddoversize", leaf="t29", **SINGLE_SIG, **common, failure={"leaf": "t27"}, err_msg="unknown error")
+ # Test that OP_CHECKSIGADD requires 3 stack elements.
+ add_spender(spenders, "tapscript/checksigadd3args", leaf="t9", **SINGLE_SIG, **common, failure={"leaf": "t11"}, **ERR_STACK_EMPTY)
+ # Test that empty signatures do not cause script failure in OP_CHECKSIG and OP_CHECKSIGADD (but do fail with empty pubkey, and do fail OP_CHECKSIGVERIFY)
+ add_spender(spenders, "tapscript/emptysigs/checksig", leaf="t12", **common, inputs=[b'', getter("sign")], failure={"leaf": "t13"}, **ERR_UNKNOWN_PUBKEY)
+ add_spender(spenders, "tapscript/emptysigs/nochecksigverify", leaf="t12", **common, inputs=[b'', getter("sign")], failure={"leaf": "t20"}, **ERR_UNKNOWN_PUBKEY)
+ add_spender(spenders, "tapscript/emptysigs/checksigadd", leaf="t14", **common, inputs=[b'', getter("sign")], failure={"leaf": "t15"}, **ERR_UNKNOWN_PUBKEY)
+ # Test that scripts over 10000 bytes (and over 201 non-push ops) are acceptable.
+ add_spender(spenders, "tapscript/no10000limit", leaf="t19", **SINGLE_SIG, **common)
+ # Test that a stack size of 1000 elements is permitted, but 1001 isn't.
+ add_spender(spenders, "tapscript/1000stack", leaf="t21", **SINGLE_SIG, **common, failure={"leaf": "t22"}, **ERR_STACK_SIZE)
+ # Test that an input stack size of 1000 elements is permitted, but 1001 isn't.
+ add_spender(spenders, "tapscript/1000inputs", leaf="t23", **common, inputs=[getter("sign")] + [b'' for _ in range(999)], failure={"leaf": "t24", "inputs": [getter("sign")] + [b'' for _ in range(1000)]}, **ERR_STACK_SIZE)
+ # Test that pushing a MAX_SCRIPT_ELEMENT_SIZE byte stack element is valid, but one longer is not.
+ add_spender(spenders, "tapscript/pushmaxlimit", leaf="t25", **common, **SINGLE_SIG, failure={"leaf": "t26"}, **ERR_PUSH_LIMIT)
+ # Test that 999-of-999 multisig works (but 1000-of-1000 triggers stack size limits)
+ add_spender(spenders, "tapscript/bigmulti", leaf="t33", **common, inputs=big_spend_inputs, num=999, failure={"leaf": "t34", "num": 1000}, **ERR_STACK_SIZE)
+ # Test that the CLEANSTACK rule is consensus critical in tapscript
+ add_spender(spenders, "tapscript/cleanstack", leaf="t36", tap=tap, inputs=[b'\x01'], failure={"inputs": [b'\x01', b'\x01']}, **ERR_CLEANSTACK)
+
+ # == Test for sigops ratio limit ==
+
+ # Given a number n, and a public key pk, functions that produce a (CScript, sigops). Each script takes as
+ # input a valid signature with the passed pk followed by a dummy push of bytes that are to be dropped, and
+ # will execute sigops signature checks.
+ SIGOPS_RATIO_SCRIPTS = [
+ # n OP_CHECKSIGVERFIYs and 1 OP_CHECKSIG.
+ lambda n, pk: (CScript([OP_DROP, pk] + [OP_2DUP, OP_CHECKSIGVERIFY] * n + [OP_CHECKSIG]), n + 1),
+ # n OP_CHECKSIGVERIFYs and 1 OP_CHECKSIGADD, but also one unexecuted OP_CHECKSIGVERIFY.
+ lambda n, pk: (CScript([OP_DROP, pk, OP_0, OP_IF, OP_2DUP, OP_CHECKSIGVERIFY, OP_ENDIF] + [OP_2DUP, OP_CHECKSIGVERIFY] * n + [OP_2, OP_SWAP, OP_CHECKSIGADD, OP_3, OP_EQUAL]), n + 1),
+ # n OP_CHECKSIGVERIFYs and 1 OP_CHECKSIGADD, but also one unexecuted OP_CHECKSIG.
+ lambda n, pk: (CScript([random_bytes(220), OP_2DROP, pk, OP_1, OP_NOTIF, OP_2DUP, OP_CHECKSIG, OP_VERIFY, OP_ENDIF] + [OP_2DUP, OP_CHECKSIGVERIFY] * n + [OP_4, OP_SWAP, OP_CHECKSIGADD, OP_5, OP_EQUAL]), n + 1),
+ # n OP_CHECKSIGVERFIYs and 1 OP_CHECKSIGADD, but also one unexecuted OP_CHECKSIGADD.
+ lambda n, pk: (CScript([OP_DROP, pk, OP_1, OP_IF, OP_ELSE, OP_2DUP, OP_6, OP_SWAP, OP_CHECKSIGADD, OP_7, OP_EQUALVERIFY, OP_ENDIF] + [OP_2DUP, OP_CHECKSIGVERIFY] * n + [OP_8, OP_SWAP, OP_CHECKSIGADD, OP_9, OP_EQUAL]), n + 1),
+ # n+1 OP_CHECKSIGs, but also one OP_CHECKSIG with an empty signature.
+ lambda n, pk: (CScript([OP_DROP, OP_0, pk, OP_CHECKSIG, OP_NOT, OP_VERIFY, pk] + [OP_2DUP, OP_CHECKSIG, OP_VERIFY] * n + [OP_CHECKSIG]), n + 1),
+ # n OP_CHECKSIGADDs and 1 OP_CHECKSIG, but also an OP_CHECKSIGADD with an empty signature.
+ lambda n, pk: (CScript([OP_DROP, OP_0, OP_10, pk, OP_CHECKSIGADD, OP_10, OP_EQUALVERIFY, pk] + [OP_2DUP, OP_16, OP_SWAP, OP_CHECKSIGADD, b'\x11', OP_EQUALVERIFY] * n + [OP_CHECKSIG]), n + 1),
+ ]
+ for annex in [None, bytes([ANNEX_TAG]) + random_bytes(random.randrange(1000))]:
+ for hashtype in [SIGHASH_DEFAULT, SIGHASH_ALL]:
+ for pubkey in [pubs[1], random_bytes(random.choice([x for x in range(2, 81) if x != 32]))]:
+ for fn_num, fn in enumerate(SIGOPS_RATIO_SCRIPTS):
+ merkledepth = random.randrange(129)
+
+
+ def predict_sigops_ratio(n, dummy_size):
+ """Predict whether spending fn(n, pubkey) with dummy_size will pass the ratio test."""
+ script, sigops = fn(n, pubkey)
+ # Predict the size of the witness for a given choice of n
+ stacklen_size = 1
+ sig_size = 64 + (hashtype != SIGHASH_DEFAULT)
+ siglen_size = 1
+ dummylen_size = 1 + 2 * (dummy_size >= 253)
+ script_size = len(script)
+ scriptlen_size = 1 + 2 * (script_size >= 253)
+ control_size = 33 + 32 * merkledepth
+ controllen_size = 1 + 2 * (control_size >= 253)
+ annex_size = 0 if annex is None else len(annex)
+ annexlen_size = 0 if annex is None else 1 + 2 * (annex_size >= 253)
+ witsize = stacklen_size + sig_size + siglen_size + dummy_size + dummylen_size + script_size + scriptlen_size + control_size + controllen_size + annex_size + annexlen_size
+ # sigops ratio test
+ return witsize + 50 >= 50 * sigops
+ # Make sure n is high enough that with empty dummy, the script is not valid
+ n = 0
+ while predict_sigops_ratio(n, 0):
+ n += 1
+ # But allow picking a bit higher still
+ n += random.randrange(5)
+ # Now pick dummy size *just* large enough that the overall construction passes
+ dummylen = 0
+ while not predict_sigops_ratio(n, dummylen):
+ dummylen += 1
+ scripts = [("s", fn(n, pubkey)[0])]
+ for _ in range(merkledepth):
+ scripts = [scripts, random.choice(PARTNER_MERKLE_FN)]
+ tap = taproot_construct(pubs[0], scripts)
+ standard = annex is None and dummylen <= 80 and len(pubkey) == 32
+ add_spender(spenders, "tapscript/sigopsratio_%i" % fn_num, tap=tap, leaf="s", annex=annex, hashtype=hashtype, key=secs[1], inputs=[getter("sign"), random_bytes(dummylen)], standard=standard, failure={"inputs": [getter("sign"), random_bytes(dummylen - 1)]}, **ERR_SIGOPS_RATIO)
+
+ # Future leaf versions
+ for leafver in range(0, 0x100, 2):
+ if leafver == LEAF_VERSION_TAPSCRIPT or leafver == ANNEX_TAG:
+ # Skip the defined LEAF_VERSION_TAPSCRIPT, and the ANNEX_TAG which is not usable as leaf version
+ continue
+ scripts = [
+ ("bare_c0", CScript([OP_NOP])),
+ ("bare_unkver", CScript([OP_NOP]), leafver),
+ ("return_c0", CScript([OP_RETURN])),
+ ("return_unkver", CScript([OP_RETURN]), leafver),
+ ("undecodable_c0", CScript([OP_PUSHDATA1])),
+ ("undecodable_unkver", CScript([OP_PUSHDATA1]), leafver),
+ ("bigpush_c0", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1), OP_DROP])),
+ ("bigpush_unkver", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1), OP_DROP]), leafver),
+ ("1001push_c0", CScript([OP_0] * 1001)),
+ ("1001push_unkver", CScript([OP_0] * 1001), leafver),
+ ]
+ random.shuffle(scripts)
+ tap = taproot_construct(pubs[0], scripts)
+ add_spender(spenders, "unkver/bare", standard=False, tap=tap, leaf="bare_unkver", failure={"leaf": "bare_c0"}, **ERR_CLEANSTACK)
+ add_spender(spenders, "unkver/return", standard=False, tap=tap, leaf="return_unkver", failure={"leaf": "return_c0"}, **ERR_OP_RETURN)
+ add_spender(spenders, "unkver/undecodable", standard=False, tap=tap, leaf="undecodable_unkver", failure={"leaf": "undecodable_c0"}, **ERR_UNDECODABLE)
+ add_spender(spenders, "unkver/bigpush", standard=False, tap=tap, leaf="bigpush_unkver", failure={"leaf": "bigpush_c0"}, **ERR_PUSH_LIMIT)
+ add_spender(spenders, "unkver/1001push", standard=False, tap=tap, leaf="1001push_unkver", failure={"leaf": "1001push_c0"}, **ERR_STACK_SIZE)
+ add_spender(spenders, "unkver/1001inputs", standard=False, tap=tap, leaf="bare_unkver", inputs=[b'']*1001, failure={"leaf": "bare_c0"}, **ERR_STACK_SIZE)
+
+ # OP_SUCCESSx tests.
+ hashtype = lambda _: random.choice(VALID_SIGHASHES_TAPROOT)
+ for opval in range(76, 0x100):
+ opcode = CScriptOp(opval)
+ if not is_op_success(opcode):
+ continue
+ scripts = [
+ ("bare_success", CScript([opcode])),
+ ("bare_nop", CScript([OP_NOP])),
+ ("unexecif_success", CScript([OP_0, OP_IF, opcode, OP_ENDIF])),
+ ("unexecif_nop", CScript([OP_0, OP_IF, OP_NOP, OP_ENDIF])),
+ ("return_success", CScript([OP_RETURN, opcode])),
+ ("return_nop", CScript([OP_RETURN, OP_NOP])),
+ ("undecodable_success", CScript([opcode, OP_PUSHDATA1])),
+ ("undecodable_nop", CScript([OP_NOP, OP_PUSHDATA1])),
+ ("undecodable_bypassed_success", CScript([OP_PUSHDATA1, OP_2, opcode])),
+ ("bigpush_success", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1), OP_DROP, opcode])),
+ ("bigpush_nop", CScript([random_bytes(MAX_SCRIPT_ELEMENT_SIZE+1), OP_DROP, OP_NOP])),
+ ("1001push_success", CScript([OP_0] * 1001 + [opcode])),
+ ("1001push_nop", CScript([OP_0] * 1001 + [OP_NOP])),
+ ]
+ random.shuffle(scripts)
+ tap = taproot_construct(pubs[0], scripts)
+ add_spender(spenders, "opsuccess/bare", standard=False, tap=tap, leaf="bare_success", failure={"leaf": "bare_nop"}, **ERR_CLEANSTACK)
+ add_spender(spenders, "opsuccess/unexecif", standard=False, tap=tap, leaf="unexecif_success", failure={"leaf": "unexecif_nop"}, **ERR_CLEANSTACK)
+ add_spender(spenders, "opsuccess/return", standard=False, tap=tap, leaf="return_success", failure={"leaf": "return_nop"}, **ERR_OP_RETURN)
+ add_spender(spenders, "opsuccess/undecodable", standard=False, tap=tap, leaf="undecodable_success", failure={"leaf": "undecodable_nop"}, **ERR_UNDECODABLE)
+ add_spender(spenders, "opsuccess/undecodable_bypass", standard=False, tap=tap, leaf="undecodable_success", failure={"leaf": "undecodable_bypassed_success"}, **ERR_UNDECODABLE)
+ add_spender(spenders, "opsuccess/bigpush", standard=False, tap=tap, leaf="bigpush_success", failure={"leaf": "bigpush_nop"}, **ERR_PUSH_LIMIT)
+ add_spender(spenders, "opsuccess/1001push", standard=False, tap=tap, leaf="1001push_success", failure={"leaf": "1001push_nop"}, **ERR_STACK_SIZE)
+ add_spender(spenders, "opsuccess/1001inputs", standard=False, tap=tap, leaf="bare_success", inputs=[b'']*1001, failure={"leaf": "bare_nop"}, **ERR_STACK_SIZE)
+
+ # Non-OP_SUCCESSx (verify that those aren't accidentally treated as OP_SUCCESSx)
+ for opval in range(0, 0x100):
+ opcode = CScriptOp(opval)
+ if is_op_success(opcode):
+ continue
+ scripts = [
+ ("normal", CScript([OP_RETURN, opcode] + [OP_NOP] * 75)),
+ ("op_success", CScript([OP_RETURN, CScriptOp(0x50)]))
+ ]
+ tap = taproot_construct(pubs[0], scripts)
+ add_spender(spenders, "alwaysvalid/notsuccessx", tap=tap, leaf="op_success", inputs=[], standard=False, failure={"leaf": "normal"}) # err_msg differs based on opcode
+
+ # == Legacy tests ==
+
+ # Also add a few legacy spends into the mix, so that transactions which combine taproot and pre-taproot spends get tested too.
+ for compressed in [False, True]:
+ eckey1 = ECKey()
+ eckey1.set(generate_privkey(), compressed)
+ pubkey1 = eckey1.get_pubkey().get_bytes()
+ eckey2 = ECKey()
+ eckey2.set(generate_privkey(), compressed)
+ for p2sh in [False, True]:
+ for witv0 in [False, True]:
+ for hashtype in VALID_SIGHASHES_ECDSA + [random.randrange(0x04, 0x80), random.randrange(0x84, 0x100)]:
+ standard = (hashtype in VALID_SIGHASHES_ECDSA) and (compressed or not witv0)
+ add_spender(spenders, "legacy/pk-wrongkey", hashtype=hashtype, p2sh=p2sh, witv0=witv0, standard=standard, script=CScript([pubkey1, OP_CHECKSIG]), **SINGLE_SIG, key=eckey1, failure={"key": eckey2}, sigops_weight=4-3*witv0, **ERR_NO_SUCCESS)
+ add_spender(spenders, "legacy/pkh-sighashflip", hashtype=hashtype, p2sh=p2sh, witv0=witv0, standard=standard, pkh=pubkey1, key=eckey1, **SIGHASH_BITFLIP, sigops_weight=4-3*witv0, **ERR_NO_SUCCESS)
+
+ # Verify that OP_CHECKSIGADD wasn't accidentally added to pre-taproot validation logic.
+ for p2sh in [False, True]:
+ for witv0 in [False, True]:
+ for hashtype in VALID_SIGHASHES_ECDSA + [random.randrange(0x04, 0x80), random.randrange(0x84, 0x100)]:
+ standard = hashtype in VALID_SIGHASHES_ECDSA and (p2sh or witv0)
+ add_spender(spenders, "compat/nocsa", hashtype=hashtype, p2sh=p2sh, witv0=witv0, standard=standard, script=CScript([OP_IF, OP_11, pubkey1, OP_CHECKSIGADD, OP_12, OP_EQUAL, OP_ELSE, pubkey1, OP_CHECKSIG, OP_ENDIF]), key=eckey1, sigops_weight=4-3*witv0, inputs=[getter("sign"), b''], failure={"inputs": [getter("sign"), b'\x01']}, **ERR_UNDECODABLE)
+
+ return spenders
+
+def spenders_taproot_inactive():
+ """Spenders for testing that pre-activation Taproot rules don't apply."""
+
+ spenders = []
+
+ sec = generate_privkey()
+ pub, _ = compute_xonly_pubkey(sec)
+ scripts = [
+ ("pk", CScript([pub, OP_CHECKSIG])),
+ ("future_leaf", CScript([pub, OP_CHECKSIG]), 0xc2),
+ ("op_success", CScript([pub, OP_CHECKSIG, OP_0, OP_IF, CScriptOp(0x50), OP_ENDIF])),
+ ]
+ tap = taproot_construct(pub, scripts)
+
+ # Test that keypath spending is valid & standard if compliant, but valid and nonstandard otherwise.
+ add_spender(spenders, "inactive/keypath_valid", key=sec, tap=tap)
+ add_spender(spenders, "inactive/keypath_invalidsig", key=sec, tap=tap, standard=False, sighash=bitflipper(default_sighash))
+ add_spender(spenders, "inactive/keypath_empty", key=sec, tap=tap, standard=False, witness=[])
+
+ # Same for scriptpath spending (but using future features like annex, leaf versions, or OP_SUCCESS is nonstandard).
+ add_spender(spenders, "inactive/scriptpath_valid", key=sec, tap=tap, leaf="pk", inputs=[getter("sign")])
+ add_spender(spenders, "inactive/scriptpath_invalidsig", key=sec, tap=tap, leaf="pk", standard=False, inputs=[getter("sign")], sighash=bitflipper(default_sighash))
+ add_spender(spenders, "inactive/scriptpath_invalidcb", key=sec, tap=tap, leaf="pk", standard=False, inputs=[getter("sign")], controlblock=bitflipper(default_controlblock))
+ add_spender(spenders, "inactive/scriptpath_valid_unkleaf", key=sec, tap=tap, leaf="future_leaf", standard=False, inputs=[getter("sign")])
+ add_spender(spenders, "inactive/scriptpath_invalid_unkleaf", key=sec, tap=tap, leaf="future_leaf", standard=False, inputs=[getter("sign")], sighash=bitflipper(default_sighash))
+ add_spender(spenders, "inactive/scriptpath_valid_opsuccess", key=sec, tap=tap, leaf="op_success", standard=False, inputs=[getter("sign")])
+ add_spender(spenders, "inactive/scriptpath_valid_opsuccess", key=sec, tap=tap, leaf="op_success", standard=False, inputs=[getter("sign")], sighash=bitflipper(default_sighash))
+
+ return spenders
+
+# Consensus validation flags to use in dumps for tests with "legacy/" or "inactive/" prefix.
+LEGACY_FLAGS = "P2SH,DERSIG,CHECKLOCKTIMEVERIFY,CHECKSEQUENCEVERIFY,WITNESS,NULLDUMMY"
+# Consensus validation flags to use in dumps for all other tests.
+TAPROOT_FLAGS = "P2SH,DERSIG,CHECKLOCKTIMEVERIFY,CHECKSEQUENCEVERIFY,WITNESS,NULLDUMMY,TAPROOT"
+
+def dump_json_test(tx, input_utxos, idx, success, failure):
+ spender = input_utxos[idx].spender
+ # Determine flags to dump
+ flags = LEGACY_FLAGS if spender.comment.startswith("legacy/") or spender.comment.startswith("inactive/") else TAPROOT_FLAGS
+
+ fields = [
+ ("tx", tx.serialize().hex()),
+ ("prevouts", [x.output.serialize().hex() for x in input_utxos]),
+ ("index", idx),
+ ("flags", flags),
+ ("comment", spender.comment)
+ ]
+
+ # The "final" field indicates that a spend should be always valid, even with more validation flags enabled
+ # than the listed ones. Use standardness as a proxy for this (which gives a conservative underestimate).
+ if spender.is_standard:
+ fields.append(("final", True))
+
+ def dump_witness(wit):
+ return OrderedDict([("scriptSig", wit[0].hex()), ("witness", [x.hex() for x in wit[1]])])
+ if success is not None:
+ fields.append(("success", dump_witness(success)))
+ if failure is not None:
+ fields.append(("failure", dump_witness(failure)))
+
+ # Write the dump to $TEST_DUMP_DIR/x/xyz... where x,y,z,... are the SHA1 sum of the dump (which makes the
+ # file naming scheme compatible with fuzzing infrastructure).
+ dump = json.dumps(OrderedDict(fields)) + ",\n"
+ sha1 = hashlib.sha1(dump.encode("utf-8")).hexdigest()
+ dirname = os.environ.get("TEST_DUMP_DIR", ".") + ("/%s" % sha1[0])
+ os.makedirs(dirname, exist_ok=True)
+ with open(dirname + ("/%s" % sha1), 'w', encoding="utf8") as f:
+ f.write(dump)
+
+# Data type to keep track of UTXOs, where they were created, and how to spend them.
+UTXOData = namedtuple('UTXOData', 'outpoint,output,spender')
+
+class TaprootTest(BitcoinTestFramework):
+ def add_options(self, parser):
+ parser.add_argument("--dumptests", dest="dump_tests", default=False, action="store_true",
+ help="Dump generated test cases to directory set by TEST_DUMP_DIR environment variable")
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def set_test_params(self):
+ self.num_nodes = 2
+ self.setup_clean_chain = True
+ # Node 0 has Taproot inactive, Node 1 active.
+ self.extra_args = [["-whitelist=127.0.0.1", "-par=1", "-vbparams=taproot:1:1"], ["-whitelist=127.0.0.1", "-par=1"]]
+
+ def block_submit(self, node, txs, msg, err_msg, cb_pubkey=None, fees=0, sigops_weight=0, witness=False, accept=False):
+
+ # Deplete block of any non-tapscript sigops using a single additional 0-value coinbase output.
+ # It is not impossible to fit enough tapscript sigops to hit the old 80k limit without
+ # busting txin-level limits. We simply have to account for the p2pk outputs in all
+ # transactions.
+ extra_output_script = CScript([OP_CHECKSIG]*((MAX_BLOCK_SIGOPS_WEIGHT - sigops_weight) // WITNESS_SCALE_FACTOR))
+
+ block = create_block(self.tip, create_coinbase(self.lastblockheight + 1, pubkey=cb_pubkey, extra_output_script=extra_output_script, fees=fees), self.lastblocktime + 1)
+ block.nVersion = 4
+ for tx in txs:
+ tx.rehash()
+ block.vtx.append(tx)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ witness and add_witness_commitment(block)
+ block.rehash()
+ block.solve()
+ block_response = node.submitblock(block.serialize(True).hex())
+ if err_msg is not None:
+ assert block_response is not None and err_msg in block_response, "Missing error message '%s' from block response '%s': %s" % (err_msg, "(None)" if block_response is None else block_response, msg)
+ if (accept):
+ assert node.getbestblockhash() == block.hash, "Failed to accept: %s (response: %s)" % (msg, block_response)
+ self.tip = block.sha256
+ self.lastblockhash = block.hash
+ self.lastblocktime += 1
+ self.lastblockheight += 1
+ else:
+ assert node.getbestblockhash() == self.lastblockhash, "Failed to reject: " + msg
+
+ def test_spenders(self, node, spenders, input_counts):
+ """Run randomized tests with a number of "spenders".
+
+ Steps:
+ 1) Generate an appropriate UTXO for each spender to test spend conditions
+ 2) Generate 100 random addresses of all wallet types: pkh/sh_wpkh/wpkh
+ 3) Select random number of inputs from (1)
+ 4) Select random number of addresses from (2) as outputs
+
+ Each spender embodies a test; in a large randomized test, it is verified
+ that toggling the valid argument to each lambda toggles the validity of
+ the transaction. This is accomplished by constructing transactions consisting
+ of all valid inputs, except one invalid one.
+ """
+
+ # Construct a bunch of sPKs that send coins back to the host wallet
+ self.log.info("- Constructing addresses for returning coins")
+ host_spks = []
+ host_pubkeys = []
+ for i in range(16):
+ addr = node.getnewaddress(address_type=random.choice(["legacy", "p2sh-segwit", "bech32"]))
+ info = node.getaddressinfo(addr)
+ spk = bytes.fromhex(info['scriptPubKey'])
+ host_spks.append(spk)
+ host_pubkeys.append(bytes.fromhex(info['pubkey']))
+
+ # Initialize variables used by block_submit().
+ self.lastblockhash = node.getbestblockhash()
+ self.tip = int(self.lastblockhash, 16)
+ block = node.getblock(self.lastblockhash)
+ self.lastblockheight = block['height']
+ self.lastblocktime = block['time']
+
+ # Create transactions spending up to 50 of the wallet's inputs, with one output for each spender, and
+ # one change output at the end. The transaction is constructed on the Python side to enable
+ # having multiple outputs to the same address and outputs with no assigned address. The wallet
+ # is then asked to sign it through signrawtransactionwithwallet, and then added to a block on the
+ # Python side (to bypass standardness rules).
+ self.log.info("- Creating test UTXOs...")
+ random.shuffle(spenders)
+ normal_utxos = []
+ mismatching_utxos = [] # UTXOs with input that requires mismatching output position
+ done = 0
+ while done < len(spenders):
+ # Compute how many UTXOs to create with this transaction
+ count_this_tx = min(len(spenders) - done, (len(spenders) + 4) // 5, 10000)
+
+ fund_tx = CTransaction()
+ # Add the 50 highest-value inputs
+ unspents = node.listunspent()
+ random.shuffle(unspents)
+ unspents.sort(key=lambda x: int(x["amount"] * 100000000), reverse=True)
+ if len(unspents) > 50:
+ unspents = unspents[:50]
+ random.shuffle(unspents)
+ balance = 0
+ for unspent in unspents:
+ balance += int(unspent["amount"] * 100000000)
+ txid = int(unspent["txid"], 16)
+ fund_tx.vin.append(CTxIn(COutPoint(txid, int(unspent["vout"])), CScript()))
+ # Add outputs
+ cur_progress = done / len(spenders)
+ next_progress = (done + count_this_tx) / len(spenders)
+ change_goal = (1.0 - 0.6 * next_progress) / (1.0 - 0.6 * cur_progress) * balance
+ self.log.debug("Create %i UTXOs in a transaction spending %i inputs worth %.8f (sending ~%.8f to change)" % (count_this_tx, len(unspents), balance * 0.00000001, change_goal * 0.00000001))
+ for i in range(count_this_tx):
+ avg = (balance - change_goal) / (count_this_tx - i)
+ amount = int(random.randrange(int(avg*0.85 + 0.5), int(avg*1.15 + 0.5)) + 0.5)
+ balance -= amount
+ fund_tx.vout.append(CTxOut(amount, spenders[done + i].script))
+ # Add change
+ fund_tx.vout.append(CTxOut(balance - 10000, random.choice(host_spks)))
+ # Ask the wallet to sign
+ ss = BytesIO(bytes.fromhex(node.signrawtransactionwithwallet(ToHex(fund_tx))["hex"]))
+ fund_tx.deserialize(ss)
+ # Construct UTXOData entries
+ fund_tx.rehash()
+ for i in range(count_this_tx):
+ utxodata = UTXOData(outpoint=COutPoint(fund_tx.sha256, i), output=fund_tx.vout[i], spender=spenders[done])
+ if utxodata.spender.need_vin_vout_mismatch:
+ mismatching_utxos.append(utxodata)
+ else:
+ normal_utxos.append(utxodata)
+ done += 1
+ # Mine into a block
+ self.block_submit(node, [fund_tx], "Funding tx", None, random.choice(host_pubkeys), 10000, MAX_BLOCK_SIGOPS_WEIGHT, True, True)
+
+ # Consume groups of choice(input_coins) from utxos in a tx, testing the spenders.
+ self.log.info("- Running %i spending tests" % done)
+ random.shuffle(normal_utxos)
+ random.shuffle(mismatching_utxos)
+ assert done == len(normal_utxos) + len(mismatching_utxos)
+
+ left = done
+ while left:
+ # Construct CTransaction with random nVersion, nLocktime
+ tx = CTransaction()
+ tx.nVersion = random.choice([1, 2, random.randint(-0x80000000, 0x7fffffff)])
+ min_sequence = (tx.nVersion != 1 and tx.nVersion != 0) * 0x80000000 # The minimum sequence number to disable relative locktime
+ if random.choice([True, False]):
+ tx.nLockTime = random.randrange(LOCKTIME_THRESHOLD, self.lastblocktime - 7200) # all absolute locktimes in the past
+ else:
+ tx.nLockTime = random.randrange(self.lastblockheight + 1) # all block heights in the past
+
+ # Decide how many UTXOs to test with.
+ acceptable = [n for n in input_counts if n <= left and (left - n > max(input_counts) or (left - n) in [0] + input_counts)]
+ num_inputs = random.choice(acceptable)
+
+ # If we have UTXOs that require mismatching inputs/outputs left, include exactly one of those
+ # unless there is only one normal UTXO left (as tests with mismatching UTXOs require at least one
+ # normal UTXO to go in the first position), and we don't want to run out of normal UTXOs.
+ input_utxos = []
+ while len(mismatching_utxos) and (len(input_utxos) == 0 or len(normal_utxos) == 1):
+ input_utxos.append(mismatching_utxos.pop())
+ left -= 1
+
+ # Top up until we hit num_inputs (but include at least one normal UTXO always).
+ for _ in range(max(1, num_inputs - len(input_utxos))):
+ input_utxos.append(normal_utxos.pop())
+ left -= 1
+
+ # The first input cannot require a mismatching output (as there is at least one output).
+ while True:
+ random.shuffle(input_utxos)
+ if not input_utxos[0].spender.need_vin_vout_mismatch:
+ break
+ first_mismatch_input = None
+ for i in range(len(input_utxos)):
+ if input_utxos[i].spender.need_vin_vout_mismatch:
+ first_mismatch_input = i
+ assert first_mismatch_input is None or first_mismatch_input > 0
+
+ # Decide fee, and add CTxIns to tx.
+ amount = sum(utxo.output.nValue for utxo in input_utxos)
+ fee = min(random.randrange(MIN_FEE * 2, MIN_FEE * 4), amount - DUST_LIMIT) # 10000-20000 sat fee
+ in_value = amount - fee
+ tx.vin = [CTxIn(outpoint=utxo.outpoint, nSequence=random.randint(min_sequence, 0xffffffff)) for utxo in input_utxos]
+ tx.wit.vtxinwit = [CTxInWitness() for _ in range(len(input_utxos))]
+ sigops_weight = sum(utxo.spender.sigops_weight for utxo in input_utxos)
+ self.log.debug("Test: %s" % (", ".join(utxo.spender.comment for utxo in input_utxos)))
+
+ # Add 1 to 4 random outputs (but constrained by inputs that require mismatching outputs)
+ num_outputs = random.choice(range(1, 1 + min(4, 4 if first_mismatch_input is None else first_mismatch_input)))
+ assert in_value >= 0 and fee - num_outputs * DUST_LIMIT >= MIN_FEE
+ for i in range(num_outputs):
+ tx.vout.append(CTxOut())
+ if in_value <= DUST_LIMIT:
+ tx.vout[-1].nValue = DUST_LIMIT
+ elif i < num_outputs - 1:
+ tx.vout[-1].nValue = in_value
+ else:
+ tx.vout[-1].nValue = random.randint(DUST_LIMIT, in_value)
+ in_value -= tx.vout[-1].nValue
+ tx.vout[-1].scriptPubKey = random.choice(host_spks)
+ sigops_weight += CScript(tx.vout[-1].scriptPubKey).GetSigOpCount(False) * WITNESS_SCALE_FACTOR
+ fee += in_value
+ assert fee >= 0
+
+ # Select coinbase pubkey
+ cb_pubkey = random.choice(host_pubkeys)
+ sigops_weight += 1 * WITNESS_SCALE_FACTOR
+
+ # Precompute one satisfying and one failing scriptSig/witness for each input.
+ input_data = []
+ for i in range(len(input_utxos)):
+ fn = input_utxos[i].spender.sat_function
+ fail = None
+ success = fn(tx, i, [utxo.output for utxo in input_utxos], True)
+ if not input_utxos[i].spender.no_fail:
+ fail = fn(tx, i, [utxo.output for utxo in input_utxos], False)
+ input_data.append((fail, success))
+ if self.options.dump_tests:
+ dump_json_test(tx, input_utxos, i, success, fail)
+
+ # Sign each input incorrectly once on each complete signing pass, except the very last.
+ for fail_input in list(range(len(input_utxos))) + [None]:
+ # Skip trying to fail at spending something that can't be made to fail.
+ if fail_input is not None and input_utxos[fail_input].spender.no_fail:
+ continue
+ # Expected message with each input failure, may be None(which is ignored)
+ expected_fail_msg = None if fail_input is None else input_utxos[fail_input].spender.err_msg
+ # Fill inputs/witnesses
+ for i in range(len(input_utxos)):
+ tx.vin[i].scriptSig = input_data[i][i != fail_input][0]
+ tx.wit.vtxinwit[i].scriptWitness.stack = input_data[i][i != fail_input][1]
+ # Submit to mempool to check standardness
+ is_standard_tx = fail_input is None and all(utxo.spender.is_standard for utxo in input_utxos) and tx.nVersion >= 1 and tx.nVersion <= 2
+ tx.rehash()
+ msg = ','.join(utxo.spender.comment + ("*" if n == fail_input else "") for n, utxo in enumerate(input_utxos))
+ if is_standard_tx:
+ node.sendrawtransaction(tx.serialize().hex(), 0)
+ assert node.getmempoolentry(tx.hash) is not None, "Failed to accept into mempool: " + msg
+ else:
+ assert_raises_rpc_error(-26, None, node.sendrawtransaction, tx.serialize().hex(), 0)
+ # Submit in a block
+ self.block_submit(node, [tx], msg, witness=True, accept=fail_input is None, cb_pubkey=cb_pubkey, fees=fee, sigops_weight=sigops_weight, err_msg=expected_fail_msg)
+
+ if (len(spenders) - left) // 200 > (len(spenders) - left - len(input_utxos)) // 200:
+ self.log.info(" - %i tests done" % (len(spenders) - left))
+
+ assert left == 0
+ assert len(normal_utxos) == 0
+ assert len(mismatching_utxos) == 0
+ self.log.info(" - Done")
+
+ def run_test(self):
+ self.connect_nodes(0, 1)
+
+ # Post-taproot activation tests go first (pre-taproot tests' blocks are invalid post-taproot).
+ self.log.info("Post-activation tests...")
+ self.nodes[1].generate(101)
+ self.test_spenders(self.nodes[1], spenders_taproot_active(), input_counts=[1, 2, 2, 2, 2, 3])
+
+ # Transfer % of funds to pre-taproot node.
+ addr = self.nodes[0].getnewaddress()
+ self.nodes[1].sendtoaddress(address=addr, amount=int(self.nodes[1].getbalance() * 70000000) / 100000000)
+ self.nodes[1].generate(1)
+ self.sync_blocks()
+
+ # Pre-taproot activation tests.
+ self.log.info("Pre-activation tests...")
+ self.test_spenders(self.nodes[0], spenders_taproot_inactive(), input_counts=[1, 2, 2, 2, 2, 3])
+
+
+if __name__ == '__main__':
+ TaprootTest().main()
diff --git a/test/functional/feature_versionbits_warning.py b/test/functional/feature_versionbits_warning.py
index 0713925141..2e4f4796b0 100755
--- a/test/functional/feature_versionbits_warning.py
+++ b/test/functional/feature_versionbits_warning.py
@@ -12,9 +12,8 @@ import re
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import msg_block
-from test_framework.mininode import P2PInterface, mininode_lock
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import wait_until
VB_PERIOD = 144 # versionbits period length for regtest
VB_THRESHOLD = 108 # versionbits activation threshold for regtest
@@ -62,7 +61,7 @@ class VersionBitsWarningTest(BitcoinTestFramework):
def run_test(self):
node = self.nodes[0]
- node.add_p2p_connection(P2PInterface())
+ peer = node.add_p2p_connection(P2PInterface())
node_deterministic_address = node.get_deterministic_priv_key().address
# Mine one period worth of blocks
@@ -70,7 +69,7 @@ class VersionBitsWarningTest(BitcoinTestFramework):
self.log.info("Check that there is no warning if previous VB_BLOCKS have <VB_THRESHOLD blocks with unknown versionbits version.")
# Build one period of blocks with < VB_THRESHOLD blocks signaling some unknown bit
- self.send_blocks_with_version(node.p2p, VB_THRESHOLD - 1, VB_UNKNOWN_VERSION)
+ self.send_blocks_with_version(peer, VB_THRESHOLD - 1, VB_UNKNOWN_VERSION)
node.generatetoaddress(VB_PERIOD - VB_THRESHOLD + 1, node_deterministic_address)
# Check that we're not getting any versionbit-related errors in get*info()
@@ -78,7 +77,7 @@ class VersionBitsWarningTest(BitcoinTestFramework):
assert not VB_PATTERN.match(node.getnetworkinfo()["warnings"])
# Build one period of blocks with VB_THRESHOLD blocks signaling some unknown bit
- self.send_blocks_with_version(node.p2p, VB_THRESHOLD, VB_UNKNOWN_VERSION)
+ self.send_blocks_with_version(peer, VB_THRESHOLD, VB_UNKNOWN_VERSION)
node.generatetoaddress(VB_PERIOD - VB_THRESHOLD, node_deterministic_address)
self.log.info("Check that there is a warning if previous VB_BLOCKS have >=VB_THRESHOLD blocks with unknown versionbits version.")
@@ -91,14 +90,14 @@ class VersionBitsWarningTest(BitcoinTestFramework):
# Generating one block guarantees that we'll get out of IBD
node.generatetoaddress(1, node_deterministic_address)
- wait_until(lambda: not node.getblockchaininfo()['initialblockdownload'], timeout=10, lock=mininode_lock)
+ self.wait_until(lambda: not node.getblockchaininfo()['initialblockdownload'])
# Generating one more block will be enough to generate an error.
node.generatetoaddress(1, node_deterministic_address)
# Check that get*info() shows the versionbits unknown rules warning
assert WARN_UNKNOWN_RULES_ACTIVE in node.getmininginfo()["warnings"]
assert WARN_UNKNOWN_RULES_ACTIVE in node.getnetworkinfo()["warnings"]
# Check that the alert file shows the versionbits unknown rules warning
- wait_until(lambda: self.versionbits_in_alert_file(), timeout=60)
+ self.wait_until(lambda: self.versionbits_in_alert_file())
if __name__ == '__main__':
VersionBitsWarningTest().main()
diff --git a/test/functional/interface_bitcoin_cli.py b/test/functional/interface_bitcoin_cli.py
index 80003aca0d..1257dff1ae 100755
--- a/test/functional/interface_bitcoin_cli.py
+++ b/test/functional/interface_bitcoin_cli.py
@@ -71,7 +71,14 @@ class TestBitcoinCli(BitcoinTestFramework):
assert_equal(cli_get_info['blocks'], blockchain_info['blocks'])
assert_equal(cli_get_info['headers'], blockchain_info['headers'])
assert_equal(cli_get_info['timeoffset'], network_info['timeoffset'])
- assert_equal(cli_get_info['connections'], network_info['connections'])
+ assert_equal(
+ cli_get_info['connections'],
+ {
+ 'in': network_info['connections_in'],
+ 'out': network_info['connections_out'],
+ 'total': network_info['connections']
+ }
+ )
assert_equal(cli_get_info['proxy'], network_info['networks'][0]['proxy'])
assert_equal(cli_get_info['difficulty'], blockchain_info['difficulty'])
assert_equal(cli_get_info['chain'], blockchain_info['chain'])
@@ -88,7 +95,7 @@ class TestBitcoinCli(BitcoinTestFramework):
assert_equal(self.nodes[0].cli.getwalletinfo(), wallet_info)
# Setup to test -getinfo, -generate, and -rpcwallet= with multiple wallets.
- wallets = ['', 'Encrypted', 'secret']
+ wallets = [self.default_wallet_name, 'Encrypted', 'secret']
amounts = [BALANCE + Decimal('9.999928'), Decimal(9), Decimal(31)]
self.nodes[0].createwallet(wallet_name=wallets[1])
self.nodes[0].createwallet(wallet_name=wallets[2])
diff --git a/test/functional/interface_rpc.py b/test/functional/interface_rpc.py
index ac2e73fc5c..9c877aaeae 100755
--- a/test/functional/interface_rpc.py
+++ b/test/functional/interface_rpc.py
@@ -45,7 +45,7 @@ class RPCInterfaceTest(BitcoinTestFramework):
# work fine.
{"method": "invalidmethod", "id": 2},
# Another call that should succeed.
- {"method": "getbestblockhash", "id": 3},
+ {"method": "getblockhash", "id": 3, "params": [0]},
])
result_by_id = {}
diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py
index 89c55f31f3..d675ae174c 100755
--- a/test/functional/interface_zmq.py
+++ b/test/functional/interface_zmq.py
@@ -5,13 +5,23 @@
"""Test the ZMQ notification interface."""
import struct
-from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
+from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE, ADDRESS_BCRT1_P2WSH_OP_TRUE
+from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.messages import CTransaction, hash256
-from test_framework.util import assert_equal, connect_nodes
+from test_framework.messages import CTransaction, hash256, FromHex
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+)
from io import BytesIO
from time import sleep
+# Test may be skipped and not have zmq installed
+try:
+ import zmq
+except ImportError:
+ pass
+
def hash256_reversed(byte_str):
return hash256(byte_str)[::-1]
@@ -21,7 +31,6 @@ class ZMQSubscriber:
self.socket = socket
self.topic = topic
- import zmq
self.socket.setsockopt(zmq.SUBSCRIBE, self.topic)
def receive(self):
@@ -33,6 +42,22 @@ class ZMQSubscriber:
self.sequence += 1
return body
+ def receive_sequence(self):
+ topic, body, seq = self.socket.recv_multipart()
+ # Topic should match the subscriber topic.
+ assert_equal(topic, self.topic)
+ # Sequence should be incremental.
+ assert_equal(struct.unpack('<I', seq)[-1], self.sequence)
+ self.sequence += 1
+ hash = body[:32].hex()
+ label = chr(body[32])
+ mempool_sequence = None if len(body) != 32+1+8 else struct.unpack("<Q", body[32+1:])[0]
+ if mempool_sequence is not None:
+ assert label == "A" or label == "R"
+ else:
+ assert label == "D" or label == "C"
+ return (hash, label, mempool_sequence)
+
class ZMQTest (BitcoinTestFramework):
def set_test_params(self):
@@ -43,39 +68,43 @@ class ZMQTest (BitcoinTestFramework):
self.skip_if_no_bitcoind_zmq()
def run_test(self):
- import zmq
self.ctx = zmq.Context()
try:
self.test_basic()
+ self.test_sequence()
+ self.test_mempool_sync()
self.test_reorg()
+ self.test_multiple_interfaces()
finally:
# Destroy the ZMQ context.
self.log.debug("Destroying ZMQ context")
self.ctx.destroy(linger=None)
def test_basic(self):
- # All messages are received in the same socket which means
- # that this test fails if the publishing order changes.
- # Note that the publishing order is not defined in the documentation and
- # is subject to change.
- import zmq
# Invalid zmq arguments don't take down the node, see #17185.
self.restart_node(0, ["-zmqpubrawtx=foo", "-zmqpubhashtx=bar"])
address = 'tcp://127.0.0.1:28332'
- socket = self.ctx.socket(zmq.SUB)
- socket.set(zmq.RCVTIMEO, 60000)
+ sockets = []
+ subs = []
+ services = [b"hashblock", b"hashtx", b"rawblock", b"rawtx"]
+ for service in services:
+ sockets.append(self.ctx.socket(zmq.SUB))
+ sockets[-1].set(zmq.RCVTIMEO, 60000)
+ subs.append(ZMQSubscriber(sockets[-1], service))
# Subscribe to all available topics.
- hashblock = ZMQSubscriber(socket, b"hashblock")
- hashtx = ZMQSubscriber(socket, b"hashtx")
- rawblock = ZMQSubscriber(socket, b"rawblock")
- rawtx = ZMQSubscriber(socket, b"rawtx")
+ hashblock = subs[0]
+ hashtx = subs[1]
+ rawblock = subs[2]
+ rawtx = subs[3]
self.restart_node(0, ["-zmqpub%s=%s" % (sub.topic.decode(), address) for sub in [hashblock, hashtx, rawblock, rawtx]])
- connect_nodes(self.nodes[0], 1)
- socket.connect(address)
+ self.connect_nodes(0, 1)
+ for socket in sockets:
+ socket.connect(address)
+
# Relax so that the subscriber is ready before publishing zmq messages
sleep(0.2)
@@ -96,15 +125,16 @@ class ZMQTest (BitcoinTestFramework):
tx.calc_sha256()
assert_equal(tx.hash, txid.hex())
+ # Should receive the generated raw block.
+ block = rawblock.receive()
+ assert_equal(genhashes[x], hash256_reversed(block[:80]).hex())
+
# Should receive the generated block hash.
hash = hashblock.receive().hex()
assert_equal(genhashes[x], hash)
# The block should only have the coinbase txid.
assert_equal([txid.hex()], self.nodes[1].getblock(hash)["tx"])
- # Should receive the generated raw block.
- block = rawblock.receive()
- assert_equal(genhashes[x], hash256_reversed(block[:80]).hex())
if self.is_wallet_compiled():
self.log.info("Wait for tx from second node")
@@ -119,6 +149,13 @@ class ZMQTest (BitcoinTestFramework):
hex = rawtx.receive()
assert_equal(payment_txid, hash256_reversed(hex).hex())
+ # Mining the block with this tx should result in second notification
+ # after coinbase tx notification
+ self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ hashtx.receive()
+ txid = hashtx.receive()
+ assert_equal(payment_txid, txid.hex())
+
self.log.info("Test the getzmqnotifications RPC")
assert_equal(self.nodes[0].getzmqnotifications(), [
@@ -131,30 +168,366 @@ class ZMQTest (BitcoinTestFramework):
assert_equal(self.nodes[1].getzmqnotifications(), [])
def test_reorg(self):
- import zmq
+ if not self.is_wallet_compiled():
+ self.log.info("Skipping reorg test because wallet is disabled")
+ return
+
address = 'tcp://127.0.0.1:28333'
- socket = self.ctx.socket(zmq.SUB)
- socket.set(zmq.RCVTIMEO, 60000)
- hashblock = ZMQSubscriber(socket, b'hashblock')
+
+ services = [b"hashblock", b"hashtx"]
+ sockets = []
+ subs = []
+ for service in services:
+ sockets.append(self.ctx.socket(zmq.SUB))
+ # 2 second timeout to check end of notifications
+ sockets[-1].set(zmq.RCVTIMEO, 2000)
+ subs.append(ZMQSubscriber(sockets[-1], service))
+
+ # Subscribe to all available topics.
+ hashblock = subs[0]
+ hashtx = subs[1]
# Should only notify the tip if a reorg occurs
- self.restart_node(0, ['-zmqpub%s=%s' % (hashblock.topic.decode(), address)])
- socket.connect(address)
+ self.restart_node(0, ["-zmqpub%s=%s" % (sub.topic.decode(), address) for sub in [hashblock, hashtx]])
+ for socket in sockets:
+ socket.connect(address)
# Relax so that the subscriber is ready before publishing zmq messages
sleep(0.2)
- # Generate 1 block in nodes[0] and receive all notifications
- self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ # Generate 1 block in nodes[0] with 1 mempool tx and receive all notifications
+ payment_txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
+ disconnect_block = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0]
+ disconnect_cb = self.nodes[0].getblock(disconnect_block)["tx"][0]
assert_equal(self.nodes[0].getbestblockhash(), hashblock.receive().hex())
+ assert_equal(hashtx.receive().hex(), payment_txid)
+ assert_equal(hashtx.receive().hex(), disconnect_cb)
- # Generate 2 blocks in nodes[1]
- self.nodes[1].generatetoaddress(2, ADDRESS_BCRT1_UNSPENDABLE)
+ # Generate 2 blocks in nodes[1] to a different address to ensure split
+ connect_blocks = self.nodes[1].generatetoaddress(2, ADDRESS_BCRT1_P2WSH_OP_TRUE)
# nodes[0] will reorg chain after connecting back nodes[1]
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
+ self.sync_blocks() # tx in mempool valid but not advertised
# Should receive nodes[1] tip
assert_equal(self.nodes[1].getbestblockhash(), hashblock.receive().hex())
+ # During reorg:
+ # Get old payment transaction notification from disconnect and disconnected cb
+ assert_equal(hashtx.receive().hex(), payment_txid)
+ assert_equal(hashtx.receive().hex(), disconnect_cb)
+ # And the payment transaction again due to mempool entry
+ assert_equal(hashtx.receive().hex(), payment_txid)
+ assert_equal(hashtx.receive().hex(), payment_txid)
+ # And the new connected coinbases
+ for i in [0, 1]:
+ assert_equal(hashtx.receive().hex(), self.nodes[1].getblock(connect_blocks[i])["tx"][0])
+
+ # If we do a simple invalidate we announce the disconnected coinbase
+ self.nodes[0].invalidateblock(connect_blocks[1])
+ assert_equal(hashtx.receive().hex(), self.nodes[1].getblock(connect_blocks[1])["tx"][0])
+ # And the current tip
+ assert_equal(hashtx.receive().hex(), self.nodes[1].getblock(connect_blocks[0])["tx"][0])
+
+ def test_sequence(self):
+ """
+ Sequence zmq notifications give every blockhash and txhash in order
+ of processing, regardless of IBD, re-orgs, etc.
+ Format of messages:
+ <32-byte hash>C : Blockhash connected
+ <32-byte hash>D : Blockhash disconnected
+ <32-byte hash>R<8-byte LE uint> : Transactionhash removed from mempool for non-block inclusion reason
+ <32-byte hash>A<8-byte LE uint> : Transactionhash added mempool
+ """
+ self.log.info("Testing 'sequence' publisher")
+ address = 'tcp://127.0.0.1:28333'
+ socket = self.ctx.socket(zmq.SUB)
+ socket.set(zmq.RCVTIMEO, 60000)
+ seq = ZMQSubscriber(socket, b'sequence')
+
+ self.restart_node(0, ['-zmqpub%s=%s' % (seq.topic.decode(), address)])
+ socket.connect(address)
+ # Relax so that the subscriber is ready before publishing zmq messages
+ sleep(0.2)
+
+ # Mempool sequence number starts at 1
+ seq_num = 1
+
+ # Generate 1 block in nodes[0] and receive all notifications
+ dc_block = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0]
+
+ # Note: We are not notified of any block transactions, coinbase or mined
+ assert_equal((self.nodes[0].getbestblockhash(), "C", None), seq.receive_sequence())
+
+ # Generate 2 blocks in nodes[1] to a different address to ensure a chain split
+ self.nodes[1].generatetoaddress(2, ADDRESS_BCRT1_P2WSH_OP_TRUE)
+
+ # nodes[0] will reorg chain after connecting back nodes[1]
+ self.connect_nodes(0, 1)
+
+ # Then we receive all block (dis)connect notifications for the 2 block reorg
+ assert_equal((dc_block, "D", None), seq.receive_sequence())
+ block_count = self.nodes[1].getblockcount()
+ assert_equal((self.nodes[1].getblockhash(block_count-1), "C", None), seq.receive_sequence())
+ assert_equal((self.nodes[1].getblockhash(block_count), "C", None), seq.receive_sequence())
+
+ # Rest of test requires wallet functionality
+ if self.is_wallet_compiled():
+ self.log.info("Wait for tx from second node")
+ payment_txid = self.nodes[1].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=5.0, replaceable=True)
+ self.sync_all()
+ self.log.info("Testing sequence notifications with mempool sequence values")
+
+ # Should receive the broadcasted txid.
+ assert_equal((payment_txid, "A", seq_num), seq.receive_sequence())
+ seq_num += 1
+
+ self.log.info("Testing RBF notification")
+ # Replace it to test eviction/addition notification
+ rbf_info = self.nodes[1].bumpfee(payment_txid)
+ self.sync_all()
+ assert_equal((payment_txid, "R", seq_num), seq.receive_sequence())
+ seq_num += 1
+ assert_equal((rbf_info["txid"], "A", seq_num), seq.receive_sequence())
+ seq_num += 1
+
+ # Doesn't get published when mined, make a block and tx to "flush" the possibility
+ # though the mempool sequence number does go up by the number of transactions
+ # removed from the mempool by the block mining it.
+ mempool_size = len(self.nodes[0].getrawmempool())
+ c_block = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0]
+ self.sync_all()
+ # Make sure the number of mined transactions matches the number of txs out of mempool
+ mempool_size_delta = mempool_size - len(self.nodes[0].getrawmempool())
+ assert_equal(len(self.nodes[0].getblock(c_block)["tx"])-1, mempool_size_delta)
+ seq_num += mempool_size_delta
+ payment_txid_2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
+ self.sync_all()
+ assert_equal((c_block, "C", None), seq.receive_sequence())
+ assert_equal((payment_txid_2, "A", seq_num), seq.receive_sequence())
+ seq_num += 1
+
+ # Spot check getrawmempool results that they only show up when asked for
+ assert type(self.nodes[0].getrawmempool()) is list
+ assert type(self.nodes[0].getrawmempool(mempool_sequence=False)) is list
+ assert "mempool_sequence" not in self.nodes[0].getrawmempool(verbose=True)
+ assert_raises_rpc_error(-8, "Verbose results cannot contain mempool sequence values.", self.nodes[0].getrawmempool, True, True)
+ assert_equal(self.nodes[0].getrawmempool(mempool_sequence=True)["mempool_sequence"], seq_num)
+
+ self.log.info("Testing reorg notifications")
+ # Manually invalidate the last block to test mempool re-entry
+ # N.B. This part could be made more lenient in exact ordering
+ # since it greatly depends on inner-workings of blocks/mempool
+ # during "deep" re-orgs. Probably should "re-construct"
+ # blockchain/mempool state from notifications instead.
+ block_count = self.nodes[0].getblockcount()
+ best_hash = self.nodes[0].getbestblockhash()
+ self.nodes[0].invalidateblock(best_hash)
+ sleep(2) # Bit of room to make sure transaction things happened
+
+ # Make sure getrawmempool mempool_sequence results aren't "queued" but immediately reflective
+ # of the time they were gathered.
+ assert self.nodes[0].getrawmempool(mempool_sequence=True)["mempool_sequence"] > seq_num
+
+ assert_equal((best_hash, "D", None), seq.receive_sequence())
+ assert_equal((rbf_info["txid"], "A", seq_num), seq.receive_sequence())
+ seq_num += 1
+
+ # Other things may happen but aren't wallet-deterministic so we don't test for them currently
+ self.nodes[0].reconsiderblock(best_hash)
+ self.nodes[1].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ self.sync_all()
+
+ self.log.info("Evict mempool transaction by block conflict")
+ orig_txid = self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=1.0, replaceable=True)
+
+ # More to be simply mined
+ more_tx = []
+ for _ in range(5):
+ more_tx.append(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.1))
+
+ raw_tx = self.nodes[0].getrawtransaction(orig_txid)
+ bump_info = self.nodes[0].bumpfee(orig_txid)
+ # Mine the pre-bump tx
+ block = create_block(int(self.nodes[0].getbestblockhash(), 16), create_coinbase(self.nodes[0].getblockcount()+1))
+ tx = FromHex(CTransaction(), raw_tx)
+ block.vtx.append(tx)
+ for txid in more_tx:
+ tx = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
+ block.vtx.append(tx)
+ add_witness_commitment(block)
+ block.solve()
+ assert_equal(self.nodes[0].submitblock(block.serialize().hex()), None)
+ tip = self.nodes[0].getbestblockhash()
+ assert_equal(int(tip, 16), block.sha256)
+ orig_txid_2 = self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=1.0, replaceable=True)
+
+ # Flush old notifications until evicted tx original entry
+ (hash_str, label, mempool_seq) = seq.receive_sequence()
+ while hash_str != orig_txid:
+ (hash_str, label, mempool_seq) = seq.receive_sequence()
+ mempool_seq += 1
+
+ # Added original tx
+ assert_equal(label, "A")
+ # More transactions to be simply mined
+ for i in range(len(more_tx)):
+ assert_equal((more_tx[i], "A", mempool_seq), seq.receive_sequence())
+ mempool_seq += 1
+ # Bumped by rbf
+ assert_equal((orig_txid, "R", mempool_seq), seq.receive_sequence())
+ mempool_seq += 1
+ assert_equal((bump_info["txid"], "A", mempool_seq), seq.receive_sequence())
+ mempool_seq += 1
+ # Conflict announced first, then block
+ assert_equal((bump_info["txid"], "R", mempool_seq), seq.receive_sequence())
+ mempool_seq += 1
+ assert_equal((tip, "C", None), seq.receive_sequence())
+ mempool_seq += len(more_tx)
+ # Last tx
+ assert_equal((orig_txid_2, "A", mempool_seq), seq.receive_sequence())
+ mempool_seq += 1
+ self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ self.sync_all() # want to make sure we didn't break "consensus" for other tests
+
+ def test_mempool_sync(self):
+ """
+ Use sequence notification plus getrawmempool sequence results to "sync mempool"
+ """
+ if not self.is_wallet_compiled():
+ self.log.info("Skipping mempool sync test")
+ return
+
+ self.log.info("Testing 'mempool sync' usage of sequence notifier")
+ address = 'tcp://127.0.0.1:28333'
+ socket = self.ctx.socket(zmq.SUB)
+ socket.set(zmq.RCVTIMEO, 60000)
+ seq = ZMQSubscriber(socket, b'sequence')
+
+ self.restart_node(0, ['-zmqpub%s=%s' % (seq.topic.decode(), address)])
+ self.connect_nodes(0, 1)
+ socket.connect(address)
+ # Relax so that the subscriber is ready before publishing zmq messages
+ sleep(0.2)
+
+ # In-memory counter, should always start at 1
+ next_mempool_seq = self.nodes[0].getrawmempool(mempool_sequence=True)["mempool_sequence"]
+ assert_equal(next_mempool_seq, 1)
+
+ # Some transactions have been happening but we aren't consuming zmq notifications yet
+ # or we lost a ZMQ message somehow and want to start over
+ txids = []
+ num_txs = 5
+ for _ in range(num_txs):
+ txids.append(self.nodes[1].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=1.0, replaceable=True))
+ self.sync_all()
+
+ # 1) Consume backlog until we get a mempool sequence number
+ (hash_str, label, zmq_mem_seq) = seq.receive_sequence()
+ while zmq_mem_seq is None:
+ (hash_str, label, zmq_mem_seq) = seq.receive_sequence()
+
+ assert label == "A" or label == "R"
+ assert hash_str is not None
+
+ # 2) We need to "seed" our view of the mempool
+ mempool_snapshot = self.nodes[0].getrawmempool(mempool_sequence=True)
+ mempool_view = set(mempool_snapshot["txids"])
+ get_raw_seq = mempool_snapshot["mempool_sequence"]
+ assert_equal(get_raw_seq, 6)
+ # Snapshot may be too old compared to zmq message we read off latest
+ while zmq_mem_seq >= get_raw_seq:
+ sleep(2)
+ mempool_snapshot = self.nodes[0].getrawmempool(mempool_sequence=True)
+ mempool_view = set(mempool_snapshot["txids"])
+ get_raw_seq = mempool_snapshot["mempool_sequence"]
+
+ # Things continue to happen in the "interim" while waiting for snapshot results
+ # We have node 0 do all these to avoid p2p races with RBF announcements
+ for _ in range(num_txs):
+ txids.append(self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=0.1, replaceable=True))
+ self.nodes[0].bumpfee(txids[-1])
+ self.sync_all()
+ self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ final_txid = self.nodes[0].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=0.1, replaceable=True)
+
+ # 3) Consume ZMQ backlog until we get to "now" for the mempool snapshot
+ while True:
+ if zmq_mem_seq == get_raw_seq - 1:
+ break
+ (hash_str, label, mempool_sequence) = seq.receive_sequence()
+ if mempool_sequence is not None:
+ zmq_mem_seq = mempool_sequence
+ if zmq_mem_seq > get_raw_seq:
+ raise Exception("We somehow jumped mempool sequence numbers! zmq_mem_seq: {} > get_raw_seq: {}".format(zmq_mem_seq, get_raw_seq))
+
+ # 4) Moving forward, we apply the delta to our local view
+ # remaining txs(5) + 1 rbf(A+R) + 1 block connect + 1 final tx
+ expected_sequence = get_raw_seq
+ r_gap = 0
+ for _ in range(num_txs + 2 + 1 + 1):
+ (hash_str, label, mempool_sequence) = seq.receive_sequence()
+ if mempool_sequence is not None:
+ if mempool_sequence != expected_sequence:
+ # Detected "R" gap, means this a conflict eviction, and mempool tx are being evicted before its
+ # position in the incoming block message "C"
+ if label == "R":
+ assert mempool_sequence > expected_sequence
+ r_gap += mempool_sequence - expected_sequence
+ else:
+ raise Exception("WARNING: txhash has unexpected mempool sequence value: {} vs expected {}".format(mempool_sequence, expected_sequence))
+ if label == "A":
+ assert hash_str not in mempool_view
+ mempool_view.add(hash_str)
+ expected_sequence = mempool_sequence + 1
+ elif label == "R":
+ assert hash_str in mempool_view
+ mempool_view.remove(hash_str)
+ expected_sequence = mempool_sequence + 1
+ elif label == "C":
+ # (Attempt to) remove all txids from known block connects
+ block_txids = self.nodes[0].getblock(hash_str)["tx"][1:]
+ for txid in block_txids:
+ if txid in mempool_view:
+ expected_sequence += 1
+ mempool_view.remove(txid)
+ expected_sequence -= r_gap
+ r_gap = 0
+ elif label == "D":
+ # Not useful for mempool tracking per se
+ continue
+ else:
+ raise Exception("Unexpected ZMQ sequence label!")
+
+ assert_equal(self.nodes[0].getrawmempool(), [final_txid])
+ assert_equal(self.nodes[0].getrawmempool(mempool_sequence=True)["mempool_sequence"], expected_sequence)
+
+ # 5) If you miss a zmq/mempool sequence number, go back to step (2)
+
+ self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+
+ def test_multiple_interfaces(self):
+ # Set up two subscribers with different addresses
+ subscribers = []
+ for i in range(2):
+ address = 'tcp://127.0.0.1:%d' % (28334 + i)
+ socket = self.ctx.socket(zmq.SUB)
+ socket.set(zmq.RCVTIMEO, 60000)
+ hashblock = ZMQSubscriber(socket, b"hashblock")
+ socket.connect(address)
+ subscribers.append({'address': address, 'hashblock': hashblock})
+
+ self.restart_node(0, ['-zmqpub%s=%s' % (subscriber['hashblock'].topic.decode(), subscriber['address']) for subscriber in subscribers])
+
+ # Relax so that the subscriber is ready before publishing zmq messages
+ sleep(0.2)
+
+ # Generate 1 block in nodes[0] and receive all notifications
+ self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+
+ # Should receive the same block hash on both subscribers
+ assert_equal(self.nodes[0].getbestblockhash(), subscribers[0]['hashblock'].receive().hex())
+ assert_equal(self.nodes[0].getbestblockhash(), subscribers[1]['hashblock'].receive().hex())
+
if __name__ == '__main__':
ZMQTest().main()
diff --git a/test/functional/mempool_accept.py b/test/functional/mempool_accept.py
index 6df8f1c3ec..fc4c775668 100755
--- a/test/functional/mempool_accept.py
+++ b/test/functional/mempool_accept.py
@@ -4,6 +4,7 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool acceptance of raw transactions."""
+from decimal import Decimal
from io import BytesIO
import math
@@ -82,29 +83,31 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
)
self.log.info('A transaction not in the mempool')
- fee = 0.00000700
+ fee = Decimal('0.000007')
raw_tx_0 = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{"txid": txid_in_block, "vout": 0, "sequence": BIP125_SEQUENCE_NUMBER}], # RBF is used later
- outputs=[{node.getnewaddress(): 0.3 - fee}],
+ outputs=[{node.getnewaddress(): Decimal('0.3') - fee}],
))['hex']
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
txid_0 = tx.rehash()
self.check_mempool_result(
- result_expected=[{'txid': txid_0, 'allowed': True}],
+ result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee}}],
rawtxs=[raw_tx_0],
)
self.log.info('A final transaction not in the mempool')
coin = coins.pop() # Pick a random coin(base) to spend
+ output_amount = Decimal('0.025')
raw_tx_final = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{'txid': coin['txid'], 'vout': coin['vout'], "sequence": 0xffffffff}], # SEQUENCE_FINAL
- outputs=[{node.getnewaddress(): 0.025}],
+ outputs=[{node.getnewaddress(): output_amount}],
locktime=node.getblockcount() + 2000, # Can be anything
))['hex']
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_final)))
+ fee_expected = coin['amount'] - output_amount
self.check_mempool_result(
- result_expected=[{'txid': tx.rehash(), 'allowed': True}],
+ result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee_expected}}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
@@ -127,7 +130,7 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
txid_0 = tx.rehash()
self.check_mempool_result(
- result_expected=[{'txid': txid_0, 'allowed': True}],
+ result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': (2 * fee)}}],
rawtxs=[raw_tx_0],
)
@@ -187,7 +190,7 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
# Reference tx should be valid on itself
self.check_mempool_result(
- result_expected=[{'txid': tx.rehash(), 'allowed': True}],
+ result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': { 'base': Decimal('0.1') - Decimal('0.05')}}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
diff --git a/test/functional/mempool_compatibility.py b/test/functional/mempool_compatibility.py
index 31fb751904..7168cb4ab2 100755
--- a/test/functional/mempool_compatibility.py
+++ b/test/functional/mempool_compatibility.py
@@ -8,7 +8,7 @@ NOTE: The test is designed to prevent cases when compatibility is broken acciden
In case we need to break mempool compatibility we can continue to use the test by just bumping the version number.
Download node binaries:
-contrib/devtools/previous_release.py -b v0.19.1 v0.18.1 v0.17.1 v0.16.3 v0.15.2
+test/get_previous_releases.py -b v0.19.1 v0.18.1 v0.17.2 v0.16.3 v0.15.2
Only v0.15.2 is required by this test. The rest is used in other backwards compatibility tests.
"""
@@ -21,6 +21,7 @@ from test_framework.test_framework import BitcoinTestFramework
class MempoolCompatibilityTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
+ self.wallet_names = [None, self.default_wallet_name]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
diff --git a/test/functional/mempool_packages.py b/test/functional/mempool_packages.py
index 98dac30ace..d7cb7db9f8 100755
--- a/test/functional/mempool_packages.py
+++ b/test/functional/mempool_packages.py
@@ -7,13 +7,12 @@
from decimal import Decimal
from test_framework.messages import COIN
-from test_framework.mininode import P2PTxInvStore
+from test_framework.p2p import P2PTxInvStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
satoshi_round,
- wait_until,
)
# default limits
@@ -59,7 +58,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
def run_test(self):
# Mine some blocks and have them mature.
- self.nodes[0].add_p2p_connection(P2PTxInvStore()) # keep track of invs
+ peer_inv_store = self.nodes[0].add_p2p_connection(P2PTxInvStore()) # keep track of invs
self.nodes[0].generate(101)
utxo = self.nodes[0].listunspent(10)
txid = utxo[0]['txid']
@@ -81,7 +80,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
# Wait until mempool transactions have passed initial broadcast (sent inv and received getdata)
# Otherwise, getrawmempool may be inconsistent with getmempoolentry if unbroadcast changes in between
- self.nodes[0].p2p.wait_for_broadcast(witness_chain)
+ peer_inv_store.wait_for_broadcast(witness_chain)
# Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor
# count and fees should look correct
@@ -269,8 +268,8 @@ class MempoolPackagesTest(BitcoinTestFramework):
# - txs from previous ancestor test (-> custom ancestor limit)
# - parent tx for descendant test
# - txs chained off parent tx (-> custom descendant limit)
- wait_until(lambda: len(self.nodes[1].getrawmempool(False)) ==
- MAX_ANCESTORS_CUSTOM + 1 + MAX_DESCENDANTS_CUSTOM, timeout=10)
+ self.wait_until(lambda: len(self.nodes[1].getrawmempool(False)) ==
+ MAX_ANCESTORS_CUSTOM + 1 + MAX_DESCENDANTS_CUSTOM, timeout=10)
mempool0 = self.nodes[0].getrawmempool(False)
mempool1 = self.nodes[1].getrawmempool(False)
assert set(mempool1).issubset(set(mempool0))
diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py
index 85c4d6d570..70cd4ebb3b 100755
--- a/test/functional/mempool_persist.py
+++ b/test/functional/mempool_persist.py
@@ -39,15 +39,12 @@ from decimal import Decimal
import os
import time
+from test_framework.p2p import P2PTxInvStore
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.mininode import P2PTxInvStore
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
assert_raises_rpc_error,
- connect_nodes,
- disconnect_nodes,
- wait_until,
)
@@ -84,11 +81,11 @@ class MempoolPersistTest(BitcoinTestFramework):
assert_greater_than_or_equal(tx_creation_time_higher, tx_creation_time)
# disconnect nodes & make a txn that remains in the unbroadcast set.
- disconnect_nodes(self.nodes[0], 1)
+ self.disconnect_nodes(0, 1)
assert(len(self.nodes[0].getpeerinfo()) == 0)
assert(len(self.nodes[0].p2ps) == 0)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("12"))
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 2)
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
@@ -172,7 +169,7 @@ class MempoolPersistTest(BitcoinTestFramework):
# check that txn gets broadcast due to unbroadcast logic
conn = node0.add_p2p_connection(P2PTxInvStore())
node0.mockscheduler(16*60) # 15 min + 1 for buffer
- wait_until(lambda: len(conn.get_invs()) == 1)
+ self.wait_until(lambda: len(conn.get_invs()) == 1)
if __name__ == '__main__':
MempoolPersistTest().main()
diff --git a/test/functional/mempool_unbroadcast.py b/test/functional/mempool_unbroadcast.py
index 365d011157..b475b65e68 100755
--- a/test/functional/mempool_unbroadcast.py
+++ b/test/functional/mempool_unbroadcast.py
@@ -7,13 +7,11 @@ to peers until a GETDATA is received."""
import time
-from test_framework.mininode import P2PTxInvStore
+from test_framework.p2p import P2PTxInvStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
create_confirmed_utxos,
- disconnect_nodes,
)
MAX_INITIAL_BROADCAST_DELAY = 15 * 60 # 15 minutes in seconds
@@ -36,7 +34,7 @@ class MempoolUnbroadcastTest(BitcoinTestFramework):
min_relay_fee = node.getnetworkinfo()["relayfee"]
utxos = create_confirmed_utxos(min_relay_fee, node, 10)
- disconnect_nodes(node, 1)
+ self.disconnect_nodes(0, 1)
self.log.info("Generate transactions that only node 0 knows about")
@@ -70,7 +68,7 @@ class MempoolUnbroadcastTest(BitcoinTestFramework):
self.restart_node(0)
self.log.info("Reconnect nodes & check if they are sent to node 1")
- connect_nodes(node, 1)
+ self.connect_nodes(0, 1)
# fast forward into the future & ensure that the second node has the txns
node.mockscheduler(MAX_INITIAL_BROADCAST_DELAY)
@@ -91,7 +89,7 @@ class MempoolUnbroadcastTest(BitcoinTestFramework):
time.sleep(2) # allow sufficient time for possibility of broadcast
assert_equal(len(conn.get_invs()), 0)
- disconnect_nodes(node, 1)
+ self.disconnect_nodes(0, 1)
node.disconnect_p2ps()
def test_txn_removal(self):
diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py
index 63d1ccfb36..80635e33c5 100755
--- a/test/functional/mining_basic.py
+++ b/test/functional/mining_basic.py
@@ -20,12 +20,11 @@ from test_framework.messages import (
CBlockHeader,
BLOCK_HEADER_SIZE,
)
-from test_framework.mininode import P2PDataStore
+from test_framework.p2p import P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
- connect_nodes,
)
@@ -56,7 +55,7 @@ class MiningTest(BitcoinTestFramework):
assert_equal(mining_info['currentblocktx'], 0)
assert_equal(mining_info['currentblockweight'], 4000)
self.restart_node(0)
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
def run_test(self):
self.mine_chain()
@@ -234,9 +233,9 @@ class MiningTest(BitcoinTestFramework):
assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader(hexdata=CBlockHeader(bad_block_time).serialize().hex()))
# Should ask for the block from a p2p node, if they announce the header as well:
- node.add_p2p_connection(P2PDataStore())
- node.p2p.wait_for_getheaders(timeout=5) # Drop the first getheaders
- node.p2p.send_blocks_and_test(blocks=[block], node=node)
+ peer = node.add_p2p_connection(P2PDataStore())
+ peer.wait_for_getheaders(timeout=5) # Drop the first getheaders
+ peer.send_blocks_and_test(blocks=[block], node=node)
# Must be active now:
assert chain_tip(block.hash, status='active', branchlen=0) in node.getchaintips()
diff --git a/test/functional/mining_getblocktemplate_longpoll.py b/test/functional/mining_getblocktemplate_longpoll.py
index 6d0b241e57..2adafb1fdb 100755
--- a/test/functional/mining_getblocktemplate_longpoll.py
+++ b/test/functional/mining_getblocktemplate_longpoll.py
@@ -5,11 +5,13 @@
"""Test longpolling with getblocktemplate."""
from decimal import Decimal
+import random
+import threading
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import get_rpc_proxy, random_transaction
+from test_framework.util import get_rpc_proxy
+from test_framework.wallet import MiniWallet
-import threading
class LongpollThread(threading.Thread):
def __init__(self, node):
@@ -29,45 +31,48 @@ class GetBlockTemplateLPTest(BitcoinTestFramework):
self.num_nodes = 2
self.supports_cli = False
- def skip_test_if_missing_module(self):
- self.skip_if_no_wallet()
-
def run_test(self):
self.log.info("Warning: this test will take about 70 seconds in the best case. Be patient.")
+ self.log.info("Test that longpollid doesn't change between successive getblocktemplate() invocations if nothing else happens")
self.nodes[0].generate(10)
template = self.nodes[0].getblocktemplate({'rules': ['segwit']})
longpollid = template['longpollid']
- # longpollid should not change between successive invocations if nothing else happens
template2 = self.nodes[0].getblocktemplate({'rules': ['segwit']})
assert template2['longpollid'] == longpollid
- # Test 1: test that the longpolling wait if we do nothing
+ self.log.info("Test that longpoll waits if we do nothing")
thr = LongpollThread(self.nodes[0])
thr.start()
# check that thread still lives
thr.join(5) # wait 5 seconds or until thread exits
assert thr.is_alive()
- # Test 2: test that longpoll will terminate if another node generates a block
- self.nodes[1].generate(1) # generate a block on another node
+ miniwallets = [ MiniWallet(node) for node in self.nodes ]
+ self.log.info("Test that longpoll will terminate if another node generates a block")
+ miniwallets[1].generate(1) # generate a block on another node
# check that thread will exit now that new transaction entered mempool
thr.join(5) # wait 5 seconds or until thread exits
assert not thr.is_alive()
- # Test 3: test that longpoll will terminate if we generate a block ourselves
+ self.log.info("Test that longpoll will terminate if we generate a block ourselves")
thr = LongpollThread(self.nodes[0])
thr.start()
- self.nodes[0].generate(1) # generate a block on another node
+ miniwallets[0].generate(1) # generate a block on own node
thr.join(5) # wait 5 seconds or until thread exits
assert not thr.is_alive()
- # Test 4: test that introducing a new transaction into the mempool will terminate the longpoll
+ # Add enough mature utxos to the wallets, so that all txs spend confirmed coins
+ self.nodes[0].generate(100)
+ self.sync_blocks()
+
+ self.log.info("Test that introducing a new transaction into the mempool will terminate the longpoll")
thr = LongpollThread(self.nodes[0])
thr.start()
# generate a random transaction and submit it
min_relay_fee = self.nodes[0].getnetworkinfo()["relayfee"]
- # min_relay_fee is fee per 1000 bytes, which should be more than enough.
- (txid, txhex, fee) = random_transaction(self.nodes, Decimal("1.1"), min_relay_fee, Decimal("0.001"), 20)
+ fee_rate = min_relay_fee + Decimal('0.00000010') * random.randint(0,20)
+ miniwallets[0].send_self_transfer(from_node=random.choice(self.nodes),
+ fee_rate=fee_rate)
# after one minute, every 10 seconds the mempool is probed, so in 80 seconds it should have returned
thr.join(60 + 20)
assert not thr.is_alive()
diff --git a/test/functional/p2p_addr_relay.py b/test/functional/p2p_addr_relay.py
index 5c7e27a3a8..80f262d0d3 100755
--- a/test/functional/p2p_addr_relay.py
+++ b/test/functional/p2p_addr_relay.py
@@ -12,9 +12,7 @@ from test_framework.messages import (
NODE_WITNESS,
msg_addr,
)
-from test_framework.mininode import (
- P2PInterface,
-)
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
diff --git a/test/functional/p2p_addrv2_relay.py b/test/functional/p2p_addrv2_relay.py
new file mode 100755
index 0000000000..23ce3e5d04
--- /dev/null
+++ b/test/functional/p2p_addrv2_relay.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python3
+# Copyright (c) 2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""
+Test addrv2 relay
+"""
+
+import time
+
+from test_framework.messages import (
+ CAddress,
+ msg_addrv2,
+ NODE_NETWORK,
+ NODE_WITNESS,
+)
+from test_framework.p2p import P2PInterface
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+ADDRS = []
+for i in range(10):
+ addr = CAddress()
+ addr.time = int(time.time()) + i
+ addr.nServices = NODE_NETWORK | NODE_WITNESS
+ addr.ip = "123.123.123.{}".format(i % 256)
+ addr.port = 8333 + i
+ ADDRS.append(addr)
+
+
+class AddrReceiver(P2PInterface):
+ addrv2_received_and_checked = False
+
+ def __init__(self):
+ super().__init__(support_addrv2 = True)
+
+ def on_addrv2(self, message):
+ for addr in message.addrs:
+ assert_equal(addr.nServices, 9)
+ assert addr.ip.startswith('123.123.123.')
+ assert (8333 <= addr.port < 8343)
+ self.addrv2_received_and_checked = True
+
+ def wait_for_addrv2(self):
+ self.wait_until(lambda: "addrv2" in self.last_message)
+
+
+class AddrTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+
+ def run_test(self):
+ self.log.info('Create connection that sends addrv2 messages')
+ addr_source = self.nodes[0].add_p2p_connection(P2PInterface())
+ msg = msg_addrv2()
+
+ self.log.info('Send too-large addrv2 message')
+ msg.addrs = ADDRS * 101
+ with self.nodes[0].assert_debug_log(['addrv2 message size = 1010']):
+ addr_source.send_and_ping(msg)
+
+ self.log.info('Check that addrv2 message content is relayed and added to addrman')
+ addr_receiver = self.nodes[0].add_p2p_connection(AddrReceiver())
+ msg.addrs = ADDRS
+ with self.nodes[0].assert_debug_log([
+ 'Added 10 addresses from 127.0.0.1: 0 tried',
+ 'received: addrv2 (131 bytes) peer=0',
+ 'sending addrv2 (131 bytes) peer=1',
+ ]):
+ addr_source.send_and_ping(msg)
+ self.nodes[0].setmocktime(int(time.time()) + 30 * 60)
+ addr_receiver.wait_for_addrv2()
+
+ assert addr_receiver.addrv2_received_and_checked
+
+
+if __name__ == '__main__':
+ AddrTest().main()
diff --git a/test/functional/p2p_blockfilters.py b/test/functional/p2p_blockfilters.py
index 6d947ac660..3250cbecf9 100755
--- a/test/functional/p2p_blockfilters.py
+++ b/test/functional/p2p_blockfilters.py
@@ -4,12 +4,13 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests NODE_COMPACT_FILTERS (BIP 157/158).
-Tests that a node configured with -blockfilterindex and -peerblockfilters can serve
-cfheaders and cfcheckpts.
+Tests that a node configured with -blockfilterindex and -peerblockfilters signals
+NODE_COMPACT_FILTERS and can serve cfilters, cfheaders and cfcheckpts.
"""
from test_framework.messages import (
FILTER_TYPE_BASIC,
+ NODE_COMPACT_FILTERS,
hash256,
msg_getcfcheckpt,
msg_getcfheaders,
@@ -17,13 +18,10 @@ from test_framework.messages import (
ser_uint256,
uint256_from_str,
)
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
- disconnect_nodes,
- wait_until,
)
class CFiltersClient(P2PInterface):
@@ -61,14 +59,22 @@ class CompactFiltersTest(BitcoinTestFramework):
self.sync_blocks(timeout=600)
# Stale blocks by disconnecting nodes 0 & 1, mining, then reconnecting
- disconnect_nodes(self.nodes[0], 1)
+ self.disconnect_nodes(0, 1)
self.nodes[0].generate(1)
- wait_until(lambda: self.nodes[0].getblockcount() == 1000)
+ self.wait_until(lambda: self.nodes[0].getblockcount() == 1000)
stale_block_hash = self.nodes[0].getblockhash(1000)
self.nodes[1].generate(1001)
- wait_until(lambda: self.nodes[1].getblockcount() == 2000)
+ self.wait_until(lambda: self.nodes[1].getblockcount() == 2000)
+
+ # Check that nodes have signalled NODE_COMPACT_FILTERS correctly.
+ assert node0.nServices & NODE_COMPACT_FILTERS != 0
+ assert node1.nServices & NODE_COMPACT_FILTERS == 0
+
+ # Check that the localservices is as expected.
+ assert int(self.nodes[0].getnetworkinfo()['localservices'], 16) & NODE_COMPACT_FILTERS != 0
+ assert int(self.nodes[1].getnetworkinfo()['localservices'], 16) & NODE_COMPACT_FILTERS == 0
self.log.info("get cfcheckpt on chain to be re-orged out.")
request = msg_getcfcheckpt(
@@ -82,7 +88,7 @@ class CompactFiltersTest(BitcoinTestFramework):
assert_equal(len(response.headers), 1)
self.log.info("Reorg node 0 to a new chain.")
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.sync_blocks(timeout=600)
main_block_hash = self.nodes[0].getblockhash(1000)
diff --git a/test/functional/p2p_blocksonly.py b/test/functional/p2p_blocksonly.py
index 27e6b669f6..e80422d1cf 100755
--- a/test/functional/p2p_blocksonly.py
+++ b/test/functional/p2p_blocksonly.py
@@ -5,7 +5,7 @@
"""Test p2p blocksonly"""
from test_framework.messages import msg_tx, CTransaction, FromHex
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
@@ -17,7 +17,7 @@ class P2PBlocksOnly(BitcoinTestFramework):
self.extra_args = [["-blocksonly"]]
def run_test(self):
- self.nodes[0].add_p2p_connection(P2PInterface())
+ block_relay_peer = self.nodes[0].add_p2p_connection(P2PInterface())
self.log.info('Check that txs from p2p are rejected and result in disconnect')
prevtx = self.nodes[0].getblock(self.nodes[0].getblockhash(1), 2)['tx'][0]
@@ -41,45 +41,49 @@ class P2PBlocksOnly(BitcoinTestFramework):
)['hex']
assert_equal(self.nodes[0].getnetworkinfo()['localrelay'], False)
with self.nodes[0].assert_debug_log(['transaction sent in violation of protocol peer=0']):
- self.nodes[0].p2p.send_message(msg_tx(FromHex(CTransaction(), sigtx)))
- self.nodes[0].p2p.wait_for_disconnect()
+ block_relay_peer.send_message(msg_tx(FromHex(CTransaction(), sigtx)))
+ block_relay_peer.wait_for_disconnect()
assert_equal(self.nodes[0].getmempoolinfo()['size'], 0)
# Remove the disconnected peer and add a new one.
del self.nodes[0].p2ps[0]
- self.nodes[0].add_p2p_connection(P2PInterface())
+ tx_relay_peer = self.nodes[0].add_p2p_connection(P2PInterface())
self.log.info('Check that txs from rpc are not rejected and relayed to other peers')
assert_equal(self.nodes[0].getpeerinfo()[0]['relaytxes'], True)
txid = self.nodes[0].testmempoolaccept([sigtx])[0]['txid']
with self.nodes[0].assert_debug_log(['received getdata for: wtx {} peer=1'.format(txid)]):
self.nodes[0].sendrawtransaction(sigtx)
- self.nodes[0].p2p.wait_for_tx(txid)
+ tx_relay_peer.wait_for_tx(txid)
assert_equal(self.nodes[0].getmempoolinfo()['size'], 1)
- self.log.info('Check that txs from forcerelay peers are not rejected and relayed to others')
- self.log.info("Restarting node 0 with forcerelay permission and blocksonly")
- self.restart_node(0, ["-persistmempool=0", "-whitelist=127.0.0.1", "-whitelistforcerelay", "-blocksonly"])
+ self.log.info('Check that txs from peers with relay-permission are not rejected and relayed to others')
+ self.log.info("Restarting node 0 with relay permission and blocksonly")
+ self.restart_node(0, ["-persistmempool=0", "-whitelist=relay@127.0.0.1", "-blocksonly", '-deprecatedrpc=whitelisted'])
assert_equal(self.nodes[0].getrawmempool(), [])
first_peer = self.nodes[0].add_p2p_connection(P2PInterface())
second_peer = self.nodes[0].add_p2p_connection(P2PInterface())
peer_1_info = self.nodes[0].getpeerinfo()[0]
- assert_equal(peer_1_info['whitelisted'], True)
- assert_equal(peer_1_info['permissions'], ['noban', 'forcerelay', 'relay', 'mempool', 'download'])
+ assert_equal(peer_1_info['permissions'], ['relay'])
peer_2_info = self.nodes[0].getpeerinfo()[1]
- assert_equal(peer_2_info['whitelisted'], True)
- assert_equal(peer_2_info['permissions'], ['noban', 'forcerelay', 'relay', 'mempool', 'download'])
+ assert_equal(peer_2_info['permissions'], ['relay'])
assert_equal(self.nodes[0].testmempoolaccept([sigtx])[0]['allowed'], True)
txid = self.nodes[0].testmempoolaccept([sigtx])[0]['txid']
- self.log.info('Check that the tx from forcerelay first_peer is relayed to others (ie.second_peer)')
+ self.log.info('Check that the tx from first_peer with relay-permission is relayed to others (ie.second_peer)')
with self.nodes[0].assert_debug_log(["received getdata"]):
+ # Note that normally, first_peer would never send us transactions since we're a blocksonly node.
+ # By activating blocksonly, we explicitly tell our peers that they should not send us transactions,
+ # and Bitcoin Core respects that choice and will not send transactions.
+ # But if, for some reason, first_peer decides to relay transactions to us anyway, we should relay them to
+ # second_peer since we gave relay permission to first_peer.
+ # See https://github.com/bitcoin/bitcoin/issues/19943 for details.
first_peer.send_message(msg_tx(FromHex(CTransaction(), sigtx)))
- self.log.info('Check that the forcerelay peer is still connected after sending the transaction')
+ self.log.info('Check that the peer with relay-permission is still connected after sending the transaction')
assert_equal(first_peer.is_connected, True)
second_peer.wait_for_tx(txid)
assert_equal(self.nodes[0].getmempoolinfo()['size'], 1)
- self.log.info("Forcerelay peer's transaction is accepted and relayed")
+ self.log.info("Relay-permission peer's transaction is accepted and relayed")
if __name__ == '__main__':
diff --git a/test/functional/p2p_compactblocks.py b/test/functional/p2p_compactblocks.py
index 225d393e1b..611bffb25f 100755
--- a/test/functional/p2p_compactblocks.py
+++ b/test/functional/p2p_compactblocks.py
@@ -9,12 +9,12 @@ Version 2 compact blocks are post-segwit (wtxids)
"""
import random
-from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment
+from test_framework.blocktools import create_block, NORMAL_GBT_REQUEST_PARAMS, add_witness_commitment
from test_framework.messages import BlockTransactions, BlockTransactionsRequest, calculate_shortid, CBlock, CBlockHeader, CInv, COutPoint, CTransaction, CTxIn, CTxInWitness, CTxOut, FromHex, HeaderAndShortIDs, msg_no_witness_block, msg_no_witness_blocktxn, msg_cmpctblock, msg_getblocktxn, msg_getdata, msg_getheaders, msg_headers, msg_inv, msg_sendcmpct, msg_sendheaders, msg_tx, msg_block, msg_blocktxn, MSG_BLOCK, MSG_CMPCT_BLOCK, MSG_WITNESS_FLAG, NODE_NETWORK, P2PHeaderAndShortIDs, PrefilledTransaction, ser_uint256, ToHex
-from test_framework.mininode import mininode_lock, P2PInterface
+from test_framework.p2p import p2p_lock, P2PInterface
from test_framework.script import CScript, OP_TRUE, OP_DROP
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, wait_until, softfork_active
+from test_framework.util import assert_equal, softfork_active
# TestP2PConn: A peer we use to send messages to bitcoind, and store responses.
class TestP2PConn(P2PInterface):
@@ -48,12 +48,12 @@ class TestP2PConn(P2PInterface):
self.block_announced = True
self.announced_blockhashes.add(x.hash)
- # Requires caller to hold mininode_lock
+ # Requires caller to hold p2p_lock
def received_block_announcement(self):
return self.block_announced
def clear_block_announcement(self):
- with mininode_lock:
+ with p2p_lock:
self.block_announced = False
self.last_message.pop("inv", None)
self.last_message.pop("headers", None)
@@ -73,7 +73,7 @@ class TestP2PConn(P2PInterface):
def request_headers_and_sync(self, locator, hashstop=0):
self.clear_block_announcement()
self.get_headers(locator, hashstop)
- wait_until(self.received_block_announcement, timeout=30, lock=mininode_lock)
+ self.wait_until(self.received_block_announcement, timeout=30)
self.clear_block_announcement()
# Block until a block announcement for a particular block hash is
@@ -81,7 +81,7 @@ class TestP2PConn(P2PInterface):
def wait_for_block_announcement(self, block_hash, timeout=30):
def received_hash():
return (block_hash in self.announced_blockhashes)
- wait_until(received_hash, timeout=timeout, lock=mininode_lock)
+ self.wait_until(received_hash, timeout=timeout)
def send_await_disconnect(self, message, timeout=30):
"""Sends a message to the node and wait for disconnect.
@@ -89,7 +89,7 @@ class TestP2PConn(P2PInterface):
This is used when we want to send a message into the node that we expect
will get us disconnected, eg an invalid block."""
self.send_message(message)
- wait_until(lambda: not self.is_connected, timeout=timeout, lock=mininode_lock)
+ self.wait_for_disconnect(timeout)
class CompactBlocksTest(BitcoinTestFramework):
def set_test_params(self):
@@ -104,11 +104,7 @@ class CompactBlocksTest(BitcoinTestFramework):
self.skip_if_no_wallet()
def build_block_on_tip(self, node, segwit=False):
- height = node.getblockcount()
- tip = node.getbestblockhash()
- mtp = node.getblockheader(tip)['mediantime']
- block = create_block(int(tip, 16), create_coinbase(height + 1), mtp + 1)
- block.nVersion = 4
+ block = create_block(tmpl=node.getblocktemplate(NORMAL_GBT_REQUEST_PARAMS))
if segwit:
add_witness_commitment(block)
block.solve()
@@ -154,8 +150,8 @@ class CompactBlocksTest(BitcoinTestFramework):
# Make sure we get a SENDCMPCT message from our peer
def received_sendcmpct():
return (len(test_node.last_sendcmpct) > 0)
- wait_until(received_sendcmpct, timeout=30, lock=mininode_lock)
- with mininode_lock:
+ test_node.wait_until(received_sendcmpct, timeout=30)
+ with p2p_lock:
# Check that the first version received is the preferred one
assert_equal(test_node.last_sendcmpct[0].version, preferred_version)
# And that we receive versions down to 1.
@@ -170,7 +166,7 @@ class CompactBlocksTest(BitcoinTestFramework):
peer.wait_for_block_announcement(block_hash, timeout=30)
assert peer.block_announced
- with mininode_lock:
+ with p2p_lock:
assert predicate(peer), (
"block_hash={!r}, cmpctblock={!r}, inv={!r}".format(
block_hash, peer.last_message.get("cmpctblock", None), peer.last_message.get("inv", None)))
@@ -188,28 +184,21 @@ class CompactBlocksTest(BitcoinTestFramework):
test_node.request_headers_and_sync(locator=[tip])
# Now try a SENDCMPCT message with too-high version
- sendcmpct = msg_sendcmpct()
- sendcmpct.version = preferred_version + 1
- sendcmpct.announce = True
- test_node.send_and_ping(sendcmpct)
+ test_node.send_and_ping(msg_sendcmpct(announce=True, version=preferred_version+1))
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message)
# Headers sync before next test.
test_node.request_headers_and_sync(locator=[tip])
# Now try a SENDCMPCT message with valid version, but announce=False
- sendcmpct.version = preferred_version
- sendcmpct.announce = False
- test_node.send_and_ping(sendcmpct)
+ test_node.send_and_ping(msg_sendcmpct(announce=False, version=preferred_version))
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message)
# Headers sync before next test.
test_node.request_headers_and_sync(locator=[tip])
# Finally, try a SENDCMPCT message with announce=True
- sendcmpct.version = preferred_version
- sendcmpct.announce = True
- test_node.send_and_ping(sendcmpct)
+ test_node.send_and_ping(msg_sendcmpct(announce=True, version=preferred_version))
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Try one more time (no headers sync should be needed!)
@@ -220,23 +209,17 @@ class CompactBlocksTest(BitcoinTestFramework):
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Try one more time, after sending a version-1, announce=false message.
- sendcmpct.version = preferred_version - 1
- sendcmpct.announce = False
- test_node.send_and_ping(sendcmpct)
+ test_node.send_and_ping(msg_sendcmpct(announce=False, version=preferred_version-1))
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Now turn off announcements
- sendcmpct.version = preferred_version
- sendcmpct.announce = False
- test_node.send_and_ping(sendcmpct)
+ test_node.send_and_ping(msg_sendcmpct(announce=False, version=preferred_version))
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message and "headers" in p.last_message)
if old_node is not None:
# Verify that a peer using an older protocol version can receive
# announcements from this node.
- sendcmpct.version = preferred_version - 1
- sendcmpct.announce = True
- old_node.send_and_ping(sendcmpct)
+ old_node.send_and_ping(msg_sendcmpct(announce=True, version=preferred_version-1))
# Header sync
old_node.request_headers_and_sync(locator=[tip])
check_announcement_of_new_block(node, old_node, lambda p: "cmpctblock" in p.last_message)
@@ -294,11 +277,11 @@ class CompactBlocksTest(BitcoinTestFramework):
block.rehash()
# Wait until the block was announced (via compact blocks)
- wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30, lock=mininode_lock)
+ test_node.wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30)
# Now fetch and check the compact block
header_and_shortids = None
- with mininode_lock:
+ with p2p_lock:
# Convert the on-the-wire representation to absolute indexes
header_and_shortids = HeaderAndShortIDs(test_node.last_message["cmpctblock"].header_and_shortids)
self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block)
@@ -308,11 +291,11 @@ class CompactBlocksTest(BitcoinTestFramework):
inv = CInv(MSG_CMPCT_BLOCK, block_hash)
test_node.send_message(msg_getdata([inv]))
- wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30, lock=mininode_lock)
+ test_node.wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30)
# Now fetch and check the compact block
header_and_shortids = None
- with mininode_lock:
+ with p2p_lock:
# Convert the on-the-wire representation to absolute indexes
header_and_shortids = HeaderAndShortIDs(test_node.last_message["cmpctblock"].header_and_shortids)
self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block)
@@ -378,7 +361,7 @@ class CompactBlocksTest(BitcoinTestFramework):
if announce == "inv":
test_node.send_message(msg_inv([CInv(MSG_BLOCK, block.sha256)]))
- wait_until(lambda: "getheaders" in test_node.last_message, timeout=30, lock=mininode_lock)
+ test_node.wait_until(lambda: "getheaders" in test_node.last_message, timeout=30)
test_node.send_header_for_blocks([block])
else:
test_node.send_header_for_blocks([block])
@@ -397,7 +380,7 @@ class CompactBlocksTest(BitcoinTestFramework):
test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)
# Expect a getblocktxn message.
- with mininode_lock:
+ with p2p_lock:
assert "getblocktxn" in test_node.last_message
absolute_indexes = test_node.last_message["getblocktxn"].block_txn_request.to_absolute()
assert_equal(absolute_indexes, [0]) # should be a coinbase request
@@ -439,7 +422,7 @@ class CompactBlocksTest(BitcoinTestFramework):
def test_getblocktxn_response(compact_block, peer, expected_result):
msg = msg_cmpctblock(compact_block.to_p2p())
peer.send_and_ping(msg)
- with mininode_lock:
+ with p2p_lock:
assert "getblocktxn" in peer.last_message
absolute_indexes = peer.last_message["getblocktxn"].block_txn_request.to_absolute()
assert_equal(absolute_indexes, expected_result)
@@ -504,13 +487,13 @@ class CompactBlocksTest(BitcoinTestFramework):
assert tx.hash in mempool
# Clear out last request.
- with mininode_lock:
+ with p2p_lock:
test_node.last_message.pop("getblocktxn", None)
# Send compact block
comp_block.initialize_from_block(block, prefill_list=[0], use_witness=with_witness)
test_tip_after_message(node, test_node, msg_cmpctblock(comp_block.to_p2p()), block.sha256)
- with mininode_lock:
+ with p2p_lock:
# Shouldn't have gotten a request for any transaction
assert "getblocktxn" not in test_node.last_message
@@ -537,7 +520,7 @@ class CompactBlocksTest(BitcoinTestFramework):
comp_block.initialize_from_block(block, prefill_list=[0], use_witness=(version == 2))
test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
absolute_indexes = []
- with mininode_lock:
+ with p2p_lock:
assert "getblocktxn" in test_node.last_message
absolute_indexes = test_node.last_message["getblocktxn"].block_txn_request.to_absolute()
assert_equal(absolute_indexes, [6, 7, 8, 9, 10])
@@ -588,10 +571,10 @@ class CompactBlocksTest(BitcoinTestFramework):
num_to_request = random.randint(1, len(block.vtx))
msg.block_txn_request.from_absolute(sorted(random.sample(range(len(block.vtx)), num_to_request)))
test_node.send_message(msg)
- wait_until(lambda: "blocktxn" in test_node.last_message, timeout=10, lock=mininode_lock)
+ test_node.wait_until(lambda: "blocktxn" in test_node.last_message, timeout=10)
[tx.calc_sha256() for tx in block.vtx]
- with mininode_lock:
+ with p2p_lock:
assert_equal(test_node.last_message["blocktxn"].block_transactions.blockhash, int(block_hash, 16))
all_indices = msg.block_txn_request.to_absolute()
for index in all_indices:
@@ -611,11 +594,11 @@ class CompactBlocksTest(BitcoinTestFramework):
# allowed depth for a blocktxn response.
block_hash = node.getblockhash(current_height)
msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), [0])
- with mininode_lock:
+ with p2p_lock:
test_node.last_message.pop("block", None)
test_node.last_message.pop("blocktxn", None)
test_node.send_and_ping(msg)
- with mininode_lock:
+ with p2p_lock:
test_node.last_message["block"].block.calc_sha256()
assert_equal(test_node.last_message["block"].block.sha256, int(block_hash, 16))
assert "blocktxn" not in test_node.last_message
@@ -628,21 +611,21 @@ class CompactBlocksTest(BitcoinTestFramework):
for _ in range(MAX_CMPCTBLOCK_DEPTH + 1):
test_node.clear_block_announcement()
new_blocks.append(node.generate(1)[0])
- wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
+ test_node.wait_until(test_node.received_block_announcement, timeout=30)
test_node.clear_block_announcement()
test_node.send_message(msg_getdata([CInv(MSG_CMPCT_BLOCK, int(new_blocks[0], 16))]))
- wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30, lock=mininode_lock)
+ test_node.wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30)
test_node.clear_block_announcement()
node.generate(1)
- wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
+ test_node.wait_until(test_node.received_block_announcement, timeout=30)
test_node.clear_block_announcement()
- with mininode_lock:
+ with p2p_lock:
test_node.last_message.pop("block", None)
test_node.send_message(msg_getdata([CInv(MSG_CMPCT_BLOCK, int(new_blocks[0], 16))]))
- wait_until(lambda: "block" in test_node.last_message, timeout=30, lock=mininode_lock)
- with mininode_lock:
+ test_node.wait_until(lambda: "block" in test_node.last_message, timeout=30)
+ with p2p_lock:
test_node.last_message["block"].block.calc_sha256()
assert_equal(test_node.last_message["block"].block.sha256, int(new_blocks[0], 16))
@@ -670,10 +653,10 @@ class CompactBlocksTest(BitcoinTestFramework):
# (to avoid fingerprinting attacks).
msg = msg_getblocktxn()
msg.block_txn_request = BlockTransactionsRequest(block.sha256, [0])
- with mininode_lock:
+ with p2p_lock:
test_node.last_message.pop("blocktxn", None)
test_node.send_and_ping(msg)
- with mininode_lock:
+ with p2p_lock:
assert "blocktxn" not in test_node.last_message
def test_end_to_end_block_relay(self, listeners):
@@ -689,8 +672,8 @@ class CompactBlocksTest(BitcoinTestFramework):
node.submitblock(ToHex(block))
for l in listeners:
- wait_until(lambda: "cmpctblock" in l.last_message, timeout=30, lock=mininode_lock)
- with mininode_lock:
+ l.wait_until(lambda: "cmpctblock" in l.last_message, timeout=30)
+ with p2p_lock:
for l in listeners:
l.last_message["cmpctblock"].header_and_shortids.header.calc_sha256()
assert_equal(l.last_message["cmpctblock"].header_and_shortids.header.sha256, block.sha256)
@@ -729,11 +712,7 @@ class CompactBlocksTest(BitcoinTestFramework):
node = self.nodes[0]
tip = node.getbestblockhash()
peer.get_headers(locator=[int(tip, 16)], hashstop=0)
-
- msg = msg_sendcmpct()
- msg.version = peer.cmpct_version
- msg.announce = True
- peer.send_and_ping(msg)
+ peer.send_and_ping(msg_sendcmpct(announce=True, version=peer.cmpct_version))
def test_compactblock_reconstruction_multiple_peers(self, stalling_peer, delivery_peer):
node = self.nodes[0]
@@ -747,7 +726,7 @@ class CompactBlocksTest(BitcoinTestFramework):
cmpct_block.initialize_from_block(block)
msg = msg_cmpctblock(cmpct_block.to_p2p())
peer.send_and_ping(msg)
- with mininode_lock:
+ with p2p_lock:
assert "getblocktxn" in peer.last_message
return block, cmpct_block
@@ -786,6 +765,9 @@ class CompactBlocksTest(BitcoinTestFramework):
assert_equal(int(node.getbestblockhash(), 16), block.sha256)
def run_test(self):
+ # Get the nodes out of IBD
+ self.nodes[0].generate(1)
+
# Setup the p2p connections
self.segwit_node = self.nodes[0].add_p2p_connection(TestP2PConn(cmpct_version=2))
self.old_node = self.nodes[0].add_p2p_connection(TestP2PConn(cmpct_version=1), services=NODE_NETWORK)
diff --git a/test/functional/p2p_disconnect_ban.py b/test/functional/p2p_disconnect_ban.py
index 09b9ebeb2d..3088a8aa46 100755
--- a/test/functional/p2p_disconnect_ban.py
+++ b/test/functional/p2p_disconnect_ban.py
@@ -9,8 +9,6 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
- connect_nodes,
- wait_until,
)
class DisconnectBanTest(BitcoinTestFramework):
@@ -20,15 +18,15 @@ class DisconnectBanTest(BitcoinTestFramework):
def run_test(self):
self.log.info("Connect nodes both way")
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[1], 0)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(1, 0)
self.log.info("Test setban and listbanned RPCs")
self.log.info("setban: successfully ban single IP address")
assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point
self.nodes[1].setban(subnet="127.0.0.1", command="add")
- wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10)
+ self.wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10)
assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point
assert_equal(len(self.nodes[1].listbanned()), 1)
@@ -79,8 +77,8 @@ class DisconnectBanTest(BitcoinTestFramework):
# Clear ban lists
self.nodes[1].clearbanned()
self.log.info("Connect nodes both way")
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[1], 0)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(1, 0)
self.log.info("Test disconnectnode RPCs")
@@ -95,18 +93,18 @@ class DisconnectBanTest(BitcoinTestFramework):
self.log.info("disconnectnode: successfully disconnect node by address")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
self.nodes[0].disconnectnode(address=address1)
- wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
+ self.wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully reconnect node")
- connect_nodes(self.nodes[0], 1) # reconnect the node
+ self.connect_nodes(0, 1) # reconnect the node
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully disconnect node by node id")
id1 = self.nodes[0].getpeerinfo()[0]['id']
self.nodes[0].disconnectnode(nodeid=id1)
- wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
+ self.wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['id'] == id1]
if __name__ == '__main__':
diff --git a/test/functional/p2p_dos_header_tree.py b/test/functional/p2p_dos_header_tree.py
index f8552cf53d..2349afa1ee 100755
--- a/test/functional/p2p_dos_header_tree.py
+++ b/test/functional/p2p_dos_header_tree.py
@@ -8,7 +8,7 @@ from test_framework.messages import (
CBlockHeader,
FromHex,
)
-from test_framework.mininode import (
+from test_framework.p2p import (
P2PInterface,
msg_headers,
)
@@ -46,8 +46,8 @@ class RejectLowDifficultyHeadersTest(BitcoinTestFramework):
self.headers_fork = [FromHex(CBlockHeader(), h) for h in self.headers_fork]
self.log.info("Feed all non-fork headers, including and up to the first checkpoint")
- self.nodes[0].add_p2p_connection(P2PInterface())
- self.nodes[0].p2p.send_and_ping(msg_headers(self.headers))
+ peer_checkpoint = self.nodes[0].add_p2p_connection(P2PInterface())
+ peer_checkpoint.send_and_ping(msg_headers(self.headers))
assert {
'height': 546,
'hash': '000000002a936ca763904c3c35fce2f3556c559c0214345d31b1bcebf76acb70',
@@ -57,14 +57,14 @@ class RejectLowDifficultyHeadersTest(BitcoinTestFramework):
self.log.info("Feed all fork headers (fails due to checkpoint)")
with self.nodes[0].assert_debug_log(['bad-fork-prior-to-checkpoint']):
- self.nodes[0].p2p.send_message(msg_headers(self.headers_fork))
- self.nodes[0].p2p.wait_for_disconnect()
+ peer_checkpoint.send_message(msg_headers(self.headers_fork))
+ peer_checkpoint.wait_for_disconnect()
self.log.info("Feed all fork headers (succeeds without checkpoint)")
# On node 0 it succeeds because checkpoints are disabled
self.restart_node(0, extra_args=['-nocheckpoints'])
- self.nodes[0].add_p2p_connection(P2PInterface())
- self.nodes[0].p2p.send_and_ping(msg_headers(self.headers_fork))
+ peer_no_checkpoint = self.nodes[0].add_p2p_connection(P2PInterface())
+ peer_no_checkpoint.send_and_ping(msg_headers(self.headers_fork))
assert {
"height": 2,
"hash": "00000000b0494bd6c3d5ff79c497cfce40831871cbf39b1bc28bd1dac817dc39",
@@ -73,8 +73,8 @@ class RejectLowDifficultyHeadersTest(BitcoinTestFramework):
} in self.nodes[0].getchaintips()
# On node 1 it succeeds because no checkpoint has been reached yet by a chain tip
- self.nodes[1].add_p2p_connection(P2PInterface())
- self.nodes[1].p2p.send_and_ping(msg_headers(self.headers_fork))
+ peer_before_checkpoint = self.nodes[1].add_p2p_connection(P2PInterface())
+ peer_before_checkpoint.send_and_ping(msg_headers(self.headers_fork))
assert {
"height": 2,
"hash": "00000000b0494bd6c3d5ff79c497cfce40831871cbf39b1bc28bd1dac817dc39",
diff --git a/test/functional/p2p_eviction.py b/test/functional/p2p_eviction.py
index b2b3a89aab..72a255991c 100755
--- a/test/functional/p2p_eviction.py
+++ b/test/functional/p2p_eviction.py
@@ -15,11 +15,11 @@ Therefore, this test is limited to the remaining protection criteria.
import time
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.mininode import P2PInterface, P2PDataStore
-from test_framework.util import assert_equal, wait_until
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import CTransaction, FromHex, msg_pong, msg_tx
+from test_framework.p2p import P2PDataStore, P2PInterface
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
class SlowP2PDataStore(P2PDataStore):
@@ -92,7 +92,7 @@ class P2PEvict(BitcoinTestFramework):
for _ in range(8):
fastpeer = node.add_p2p_connection(P2PInterface())
current_peer += 1
- wait_until(lambda: "ping" in fastpeer.last_message, timeout=10)
+ self.wait_until(lambda: "ping" in fastpeer.last_message, timeout=10)
# Make sure by asking the node what the actual min pings are
peerinfo = node.getpeerinfo()
diff --git a/test/functional/p2p_feefilter.py b/test/functional/p2p_feefilter.py
index 0b51d8f4bb..51b7623fe3 100755
--- a/test/functional/p2p_feefilter.py
+++ b/test/functional/p2p_feefilter.py
@@ -5,26 +5,12 @@
"""Test processing of feefilter messages."""
from decimal import Decimal
-import time
from test_framework.messages import MSG_TX, MSG_WTX, msg_feefilter
-from test_framework.mininode import mininode_lock, P2PInterface
+from test_framework.p2p import P2PInterface, p2p_lock
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
-
-
-def hashToHex(hash):
- return format(hash, '064x')
-
-
-# Wait up to 60 secs to see if the testnode has received all the expected invs
-def allInvsMatch(invsExpected, testnode):
- for _ in range(60):
- with mininode_lock:
- if (sorted(invsExpected) == sorted(testnode.txinvs)):
- return True
- time.sleep(1)
- return False
+from test_framework.wallet import MiniWallet
class FeefilterConn(P2PInterface):
@@ -34,7 +20,7 @@ class FeefilterConn(P2PInterface):
self.feefilter_received = True
def assert_feefilter_received(self, recv: bool):
- with mininode_lock:
+ with p2p_lock:
assert_equal(self.feefilter_received, recv)
@@ -46,10 +32,14 @@ class TestP2PConn(P2PInterface):
def on_inv(self, message):
for i in message.inv:
if (i.type == MSG_TX) or (i.type == MSG_WTX):
- self.txinvs.append(hashToHex(i.hash))
+ self.txinvs.append('{:064x}'.format(i.hash))
+
+ def wait_for_invs_to_match(self, invs_expected):
+ invs_expected.sort()
+ self.wait_until(lambda: invs_expected == sorted(self.txinvs))
def clear_invs(self):
- with mininode_lock:
+ with p2p_lock:
self.txinvs = []
@@ -61,10 +51,12 @@ class FeeFilterTest(BitcoinTestFramework):
# mempool and wallet feerate calculation based on GetFee
# rounding down 3 places, leading to stranded transactions.
# See issue #16499
- self.extra_args = [["-minrelaytxfee=0.00000100", "-mintxfee=0.00000100"]] * self.num_nodes
-
- def skip_test_if_missing_module(self):
- self.skip_if_no_wallet()
+ # grant noban permission to all peers to speed up tx relay / mempool sync
+ self.extra_args = [[
+ "-minrelaytxfee=0.00000100",
+ "-mintxfee=0.00000100",
+ "-whitelist=noban@127.0.0.1",
+ ]] * self.num_nodes
def run_test(self):
self.test_feefilter_forcerelay()
@@ -85,29 +77,28 @@ class FeeFilterTest(BitcoinTestFramework):
def test_feefilter(self):
node1 = self.nodes[1]
node0 = self.nodes[0]
+ miniwallet = MiniWallet(node1)
+ # Add enough mature utxos to the wallet, so that all txs spend confirmed coins
+ miniwallet.generate(5)
+ node1.generate(100)
conn = self.nodes[0].add_p2p_connection(TestP2PConn())
- # Test that invs are received by test connection for all txs at
- # feerate of .2 sat/byte
- node1.settxfee(Decimal("0.00000200"))
- txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for _ in range(3)]
- assert allInvsMatch(txids, conn)
+ self.log.info("Test txs paying 0.2 sat/byte are received by test connection")
+ txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00000200'), from_node=node1)['wtxid'] for _ in range(3)]
+ conn.wait_for_invs_to_match(txids)
conn.clear_invs()
- # Set a filter of .15 sat/byte on test connection
+ # Set a fee filter of 0.15 sat/byte on test connection
conn.send_and_ping(msg_feefilter(150))
- # Test that txs are still being received by test connection (paying .15 sat/byte)
- node1.settxfee(Decimal("0.00000150"))
- txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for _ in range(3)]
- assert allInvsMatch(txids, conn)
+ self.log.info("Test txs paying 0.15 sat/byte are received by test connection")
+ txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00000150'), from_node=node1)['wtxid'] for _ in range(3)]
+ conn.wait_for_invs_to_match(txids)
conn.clear_invs()
- # Change tx fee rate to .1 sat/byte and test they are no longer received
- # by the test connection
- node1.settxfee(Decimal("0.00000100"))
- [node1.sendtoaddress(node1.getnewaddress(), 1) for _ in range(3)]
+ self.log.info("Test txs paying 0.1 sat/byte are no longer received by test connection")
+ txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00000100'), from_node=node1)['wtxid'] for _ in range(3)]
self.sync_mempools() # must be sure node 0 has received all txs
# Send one transaction from node0 that should be received, so that we
@@ -117,15 +108,15 @@ class FeeFilterTest(BitcoinTestFramework):
# to 35 entries in an inv, which means that when this next transaction
# is eligible for relay, the prior transactions from node1 are eligible
# as well.
- node0.settxfee(Decimal("0.00020000"))
- txids = [node0.sendtoaddress(node0.getnewaddress(), 1)]
- assert allInvsMatch(txids, conn)
+ txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00020000'), from_node=node0)['wtxid'] for _ in range(1)]
+ conn.wait_for_invs_to_match(txids)
conn.clear_invs()
+ self.sync_mempools() # must be sure node 1 has received all txs
- # Remove fee filter and check that txs are received again
+ self.log.info("Remove fee filter and check txs are received again")
conn.send_and_ping(msg_feefilter(0))
- txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for _ in range(3)]
- assert allInvsMatch(txids, conn)
+ txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00020000'), from_node=node1)['wtxid'] for _ in range(3)]
+ conn.wait_for_invs_to_match(txids)
conn.clear_invs()
diff --git a/test/functional/p2p_filter.py b/test/functional/p2p_filter.py
index ce3856fc95..642a217047 100755
--- a/test/functional/p2p_filter.py
+++ b/test/functional/p2p_filter.py
@@ -19,7 +19,7 @@ from test_framework.messages import (
msg_mempool,
msg_version,
)
-from test_framework.mininode import P2PInterface, mininode_lock
+from test_framework.p2p import P2PInterface, p2p_lock
from test_framework.script import MAX_SCRIPT_ELEMENT_SIZE
from test_framework.test_framework import BitcoinTestFramework
@@ -60,22 +60,22 @@ class P2PBloomFilter(P2PInterface):
@property
def tx_received(self):
- with mininode_lock:
+ with p2p_lock:
return self._tx_received
@tx_received.setter
def tx_received(self, value):
- with mininode_lock:
+ with p2p_lock:
self._tx_received = value
@property
def merkleblock_received(self):
- with mininode_lock:
+ with p2p_lock:
return self._merkleblock_received
@merkleblock_received.setter
def merkleblock_received(self, value):
- with mininode_lock:
+ with p2p_lock:
self._merkleblock_received = value
@@ -131,7 +131,7 @@ class FilterTest(BitcoinTestFramework):
self.log.debug("Send a mempool msg after connecting and check that the tx is received")
self.nodes[0].add_p2p_connection(filter_peer)
filter_peer.send_and_ping(filter_peer.watch_filter_init)
- self.nodes[0].p2p.send_message(msg_mempool())
+ filter_peer.send_message(msg_mempool())
filter_peer.wait_for_tx(txid)
def test_frelay_false(self, filter_peer):
diff --git a/test/functional/p2p_fingerprint.py b/test/functional/p2p_fingerprint.py
index d743abe681..aaf862e6c8 100755
--- a/test/functional/p2p_fingerprint.py
+++ b/test/functional/p2p_fingerprint.py
@@ -12,7 +12,7 @@ import time
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.messages import CInv, MSG_BLOCK
-from test_framework.mininode import (
+from test_framework.p2p import (
P2PInterface,
msg_headers,
msg_block,
@@ -22,9 +22,9 @@ from test_framework.mininode import (
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- wait_until,
)
+
class P2PFingerprintTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
@@ -102,12 +102,12 @@ class P2PFingerprintTest(BitcoinTestFramework):
# Check that getdata request for stale block succeeds
self.send_block_request(stale_hash, node0)
test_function = lambda: self.last_block_equals(stale_hash, node0)
- wait_until(test_function, timeout=3)
+ self.wait_until(test_function, timeout=3)
# Check that getheader request for stale block header succeeds
self.send_header_request(stale_hash, node0)
test_function = lambda: self.last_header_equals(stale_hash, node0)
- wait_until(test_function, timeout=3)
+ self.wait_until(test_function, timeout=3)
# Longest chain is extended so stale is much older than chain tip
self.nodes[0].setmocktime(0)
@@ -138,11 +138,11 @@ class P2PFingerprintTest(BitcoinTestFramework):
self.send_block_request(block_hash, node0)
test_function = lambda: self.last_block_equals(block_hash, node0)
- wait_until(test_function, timeout=3)
+ self.wait_until(test_function, timeout=3)
self.send_header_request(block_hash, node0)
test_function = lambda: self.last_header_equals(block_hash, node0)
- wait_until(test_function, timeout=3)
+ self.wait_until(test_function, timeout=3)
if __name__ == '__main__':
P2PFingerprintTest().main()
diff --git a/test/functional/p2p_getaddr_caching.py b/test/functional/p2p_getaddr_caching.py
index c9278eab92..2b75ad5175 100755
--- a/test/functional/p2p_getaddr_caching.py
+++ b/test/functional/p2p_getaddr_caching.py
@@ -5,37 +5,20 @@
"""Test addr response caching"""
import time
-from test_framework.messages import (
- CAddress,
- NODE_NETWORK,
- NODE_WITNESS,
- msg_addr,
- msg_getaddr,
-)
-from test_framework.mininode import (
+
+from test_framework.messages import msg_getaddr
+from test_framework.p2p import (
P2PInterface,
- mininode_lock
+ p2p_lock
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
+# As defined in net_processing.
MAX_ADDR_TO_SEND = 1000
-
-def gen_addrs(n):
- addrs = []
- for i in range(n):
- addr = CAddress()
- addr.time = int(time.time())
- addr.nServices = NODE_NETWORK | NODE_WITNESS
- # Use first octets to occupy different AddrMan buckets
- first_octet = i >> 8
- second_octet = i % 256
- addr.ip = "{}.{}.1.1".format(first_octet, second_octet)
- addr.port = 8333
- addrs.append(addr)
- return addrs
+MAX_PCT_ADDR_TO_SEND = 23
class AddrReceiver(P2PInterface):
@@ -44,7 +27,7 @@ class AddrReceiver(P2PInterface):
self.received_addrs = None
def get_received_addrs(self):
- with mininode_lock:
+ with p2p_lock:
return self.received_addrs
def on_addr(self, message):
@@ -62,18 +45,16 @@ class AddrTest(BitcoinTestFramework):
self.num_nodes = 1
def run_test(self):
- self.log.info('Create connection that sends and requests addr messages')
- addr_source = self.nodes[0].add_p2p_connection(P2PInterface())
-
- msg_send_addrs = msg_addr()
self.log.info('Fill peer AddrMan with a lot of records')
- # Since these addrs are sent from the same source, not all of them will be stored,
- # because we allocate a limited number of AddrMan buckets per addr source.
- total_addrs = 10000
- addrs = gen_addrs(total_addrs)
- for i in range(int(total_addrs/MAX_ADDR_TO_SEND)):
- msg_send_addrs.addrs = addrs[i * MAX_ADDR_TO_SEND:(i + 1) * MAX_ADDR_TO_SEND]
- addr_source.send_and_ping(msg_send_addrs)
+ for i in range(10000):
+ first_octet = i >> 8
+ second_octet = i % 256
+ a = "{}.{}.1.1".format(first_octet, second_octet)
+ self.nodes[0].addpeeraddress(a, 8333)
+
+ # Need to make sure we hit MAX_ADDR_TO_SEND records in the addr response later because
+ # only a fraction of all known addresses can be cached and returned.
+ assert(len(self.nodes[0].getnodeaddresses(0)) > int(MAX_ADDR_TO_SEND / (MAX_PCT_ADDR_TO_SEND / 100)))
responses = []
self.log.info('Send many addr requests within short time to receive same response')
@@ -89,7 +70,7 @@ class AddrTest(BitcoinTestFramework):
responses.append(addr_receiver.get_received_addrs())
for response in responses[1:]:
assert_equal(response, responses[0])
- assert(len(response) < MAX_ADDR_TO_SEND)
+ assert(len(response) == MAX_ADDR_TO_SEND)
cur_mock_time += 3 * 24 * 60 * 60
self.nodes[0].setmocktime(cur_mock_time)
diff --git a/test/functional/p2p_getdata.py b/test/functional/p2p_getdata.py
index d1b11c2c61..89d68d5ba0 100755
--- a/test/functional/p2p_getdata.py
+++ b/test/functional/p2p_getdata.py
@@ -9,7 +9,7 @@ from test_framework.messages import (
CInv,
msg_getdata,
)
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
@@ -42,7 +42,7 @@ class GetdataTest(BitcoinTestFramework):
good_getdata = msg_getdata()
good_getdata.inv.append(CInv(t=2, h=best_block))
p2p_block_store.send_and_ping(good_getdata)
- p2p_block_store.wait_until(lambda: self.nodes[0].p2ps[0].blocks[best_block] == 1)
+ p2p_block_store.wait_until(lambda: p2p_block_store.blocks[best_block] == 1)
if __name__ == '__main__':
diff --git a/test/functional/p2p_invalid_block.py b/test/functional/p2p_invalid_block.py
index e280a62997..483f25f48c 100755
--- a/test/functional/p2p_invalid_block.py
+++ b/test/functional/p2p_invalid_block.py
@@ -14,7 +14,7 @@ import copy
from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script
from test_framework.messages import COIN
-from test_framework.mininode import P2PDataStore
+from test_framework.p2p import P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
@@ -27,7 +27,7 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
def run_test(self):
# Add p2p connection to node0
node = self.nodes[0] # convenience reference to the node
- node.add_p2p_connection(P2PDataStore())
+ peer = node.add_p2p_connection(P2PDataStore())
best_block = node.getblock(node.getbestblockhash())
tip = int(node.getbestblockhash(), 16)
@@ -42,7 +42,7 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
# Save the coinbase for later
block1 = block
tip = block.sha256
- node.p2p.send_blocks_and_test([block1], node, success=True)
+ peer.send_blocks_and_test([block1], node, success=True)
self.log.info("Mature the block.")
node.generatetoaddress(100, node.get_deterministic_priv_key().address)
@@ -80,7 +80,7 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
assert_equal(orig_hash, block2.rehash())
assert block2_orig.vtx != block2.vtx
- node.p2p.send_blocks_and_test([block2], node, success=False, reject_reason='bad-txns-duplicate')
+ peer.send_blocks_and_test([block2], node, success=False, reject_reason='bad-txns-duplicate')
# Check transactions for duplicate inputs (CVE-2018-17144)
self.log.info("Test duplicate input block.")
@@ -91,7 +91,7 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
block2_dup.hashMerkleRoot = block2_dup.calc_merkle_root()
block2_dup.rehash()
block2_dup.solve()
- node.p2p.send_blocks_and_test([block2_dup], node, success=False, reject_reason='bad-txns-inputs-duplicate')
+ peer.send_blocks_and_test([block2_dup], node, success=False, reject_reason='bad-txns-inputs-duplicate')
self.log.info("Test very broken block.")
@@ -104,14 +104,14 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
block3.rehash()
block3.solve()
- node.p2p.send_blocks_and_test([block3], node, success=False, reject_reason='bad-cb-amount')
+ peer.send_blocks_and_test([block3], node, success=False, reject_reason='bad-cb-amount')
# Complete testing of CVE-2012-2459 by sending the original block.
# It should be accepted even though it has the same hash as the mutated one.
self.log.info("Test accepting original block after rejecting its mutated version.")
- node.p2p.send_blocks_and_test([block2_orig], node, success=True, timeout=5)
+ peer.send_blocks_and_test([block2_orig], node, success=True, timeout=5)
# Update tip info
height += 1
@@ -131,7 +131,7 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
block4.rehash()
block4.solve()
self.log.info("Test inflation by duplicating input")
- node.p2p.send_blocks_and_test([block4], node, success=False, reject_reason='bad-txns-inputs-duplicate')
+ peer.send_blocks_and_test([block4], node, success=False, reject_reason='bad-txns-inputs-duplicate')
if __name__ == '__main__':
InvalidBlockRequestTest().main()
diff --git a/test/functional/p2p_invalid_locator.py b/test/functional/p2p_invalid_locator.py
index 0155eb21f0..e4fc9fd178 100755
--- a/test/functional/p2p_invalid_locator.py
+++ b/test/functional/p2p_invalid_locator.py
@@ -6,7 +6,7 @@
"""
from test_framework.messages import msg_getheaders, msg_getblocks, MAX_LOCATOR_SZ
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
@@ -23,20 +23,20 @@ class InvalidLocatorTest(BitcoinTestFramework):
block_count = node.getblockcount()
for msg in [msg_getheaders(), msg_getblocks()]:
self.log.info('Wait for disconnect when sending {} hashes in locator'.format(MAX_LOCATOR_SZ + 1))
- node.add_p2p_connection(P2PInterface())
+ exceed_max_peer = node.add_p2p_connection(P2PInterface())
msg.locator.vHave = [int(node.getblockhash(i - 1), 16) for i in range(block_count, block_count - (MAX_LOCATOR_SZ + 1), -1)]
- node.p2p.send_message(msg)
- node.p2p.wait_for_disconnect()
+ exceed_max_peer.send_message(msg)
+ exceed_max_peer.wait_for_disconnect()
node.disconnect_p2ps()
self.log.info('Wait for response when sending {} hashes in locator'.format(MAX_LOCATOR_SZ))
- node.add_p2p_connection(P2PInterface())
+ within_max_peer = node.add_p2p_connection(P2PInterface())
msg.locator.vHave = [int(node.getblockhash(i - 1), 16) for i in range(block_count, block_count - (MAX_LOCATOR_SZ), -1)]
- node.p2p.send_message(msg)
+ within_max_peer.send_message(msg)
if type(msg) == msg_getheaders:
- node.p2p.wait_for_header(node.getbestblockhash())
+ within_max_peer.wait_for_header(node.getbestblockhash())
else:
- node.p2p.wait_for_block(int(node.getbestblockhash(), 16))
+ within_max_peer.wait_for_block(int(node.getbestblockhash(), 16))
if __name__ == '__main__':
diff --git a/test/functional/p2p_invalid_messages.py b/test/functional/p2p_invalid_messages.py
index d9a9ae5188..db72a361d9 100755
--- a/test/functional/p2p_invalid_messages.py
+++ b/test/functional/p2p_invalid_messages.py
@@ -4,6 +4,9 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid network messages."""
+import struct
+import time
+
from test_framework.messages import (
CBlockHeader,
CInv,
@@ -17,18 +20,19 @@ from test_framework.messages import (
MSG_TX,
ser_string,
)
-from test_framework.mininode import (
+from test_framework.p2p import (
P2PDataStore,
P2PInterface,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- wait_until,
+ hex_str_to_bytes,
)
VALID_DATA_LIMIT = MAX_PROTOCOL_MESSAGE_LENGTH - 5 # Account for the 5-byte length prefix
+
class msg_unrecognized:
"""Nonsensical message. Modeled after similar types in test_framework.messages."""
@@ -44,6 +48,11 @@ class msg_unrecognized:
return "{}(data={})".format(self.msgtype, self.str_data)
+class SenderOfAddrV2(P2PInterface):
+ def wait_for_sendaddrv2(self):
+ self.wait_until(lambda: 'sendaddrv2' in self.last_message)
+
+
class InvalidMessagesTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
@@ -55,6 +64,10 @@ class InvalidMessagesTest(BitcoinTestFramework):
self.test_checksum()
self.test_size()
self.test_msgtype()
+ self.test_addrv2_empty()
+ self.test_addrv2_no_addresses()
+ self.test_addrv2_too_long_address()
+ self.test_addrv2_unrecognized_network()
self.test_oversized_inv_msg()
self.test_oversized_getdata_msg()
self.test_oversized_headers_msg()
@@ -65,13 +78,13 @@ class InvalidMessagesTest(BitcoinTestFramework):
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
# Create valid message
msg = conn.build_message(msg_ping(nonce=12345))
- cut_pos = 12 # Chosen at an arbitrary position within the header
+ cut_pos = 12 # Chosen at an arbitrary position within the header
# Send message in two pieces
- before = int(self.nodes[0].getnettotals()['totalbytesrecv'])
+ before = self.nodes[0].getnettotals()['totalbytesrecv']
conn.send_raw_message(msg[:cut_pos])
# Wait until node has processed the first half of the message
- wait_until(lambda: int(self.nodes[0].getnettotals()['totalbytesrecv']) != before)
- middle = int(self.nodes[0].getnettotals()['totalbytesrecv'])
+ self.wait_until(lambda: self.nodes[0].getnettotals()['totalbytesrecv'] != before)
+ middle = self.nodes[0].getnettotals()['totalbytesrecv']
# If this assert fails, we've hit an unlikely race
# where the test framework sent a message in between the two halves
assert_equal(middle, before + cut_pos)
@@ -82,7 +95,7 @@ class InvalidMessagesTest(BitcoinTestFramework):
def test_magic_bytes(self):
self.log.info("Test message with invalid magic bytes disconnects peer")
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
- with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: INVALID MESSAGESTART badmsg']):
+ with self.nodes[0].assert_debug_log(['HEADER ERROR - MESSAGESTART (badmsg, 2 bytes), received ffffffff']):
msg = conn.build_message(msg_unrecognized(str_data="d"))
# modify magic bytes
msg = b'\xff' * 4 + msg[4:]
@@ -101,12 +114,14 @@ class InvalidMessagesTest(BitcoinTestFramework):
msg = msg[:cut_len] + b'\xff' * 4 + msg[cut_len + 4:]
conn.send_raw_message(msg)
conn.sync_with_ping(timeout=1)
+ # Check that traffic is accounted for (24 bytes header + 2 bytes payload)
+ assert_equal(self.nodes[0].getpeerinfo()[0]['bytesrecv_per_msg']['*other*'], 26)
self.nodes[0].disconnect_p2ps()
def test_size(self):
self.log.info("Test message with oversized payload disconnects peer")
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
- with self.nodes[0].assert_debug_log(['']):
+ with self.nodes[0].assert_debug_log(['HEADER ERROR - SIZE (badmsg, 4000001 bytes)']):
msg = msg_unrecognized(str_data="d" * (VALID_DATA_LIMIT + 1))
msg = conn.build_message(msg)
conn.send_raw_message(msg)
@@ -116,16 +131,95 @@ class InvalidMessagesTest(BitcoinTestFramework):
def test_msgtype(self):
self.log.info("Test message with invalid message type logs an error")
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
- with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: ERRORS IN HEADER']):
+ with self.nodes[0].assert_debug_log(['HEADER ERROR - COMMAND']):
msg = msg_unrecognized(str_data="d")
- msg.msgtype = b'\xff' * 12
msg = conn.build_message(msg)
# Modify msgtype
msg = msg[:7] + b'\x00' + msg[7 + 1:]
conn.send_raw_message(msg)
conn.sync_with_ping(timeout=1)
+ # Check that traffic is accounted for (24 bytes header + 2 bytes payload)
+ assert_equal(self.nodes[0].getpeerinfo()[0]['bytesrecv_per_msg']['*other*'], 26)
self.nodes[0].disconnect_p2ps()
+ def test_addrv2(self, label, required_log_messages, raw_addrv2):
+ node = self.nodes[0]
+ conn = node.add_p2p_connection(SenderOfAddrV2())
+
+ # Make sure bitcoind signals support for ADDRv2, otherwise this test
+ # will bombard an old node with messages it does not recognize which
+ # will produce unexpected results.
+ conn.wait_for_sendaddrv2()
+
+ self.log.info('Test addrv2: ' + label)
+
+ msg = msg_unrecognized(str_data=b'')
+ msg.msgtype = b'addrv2'
+ with node.assert_debug_log(required_log_messages):
+ # override serialize() which would include the length of the data
+ msg.serialize = lambda: raw_addrv2
+ conn.send_raw_message(conn.build_message(msg))
+ conn.sync_with_ping()
+
+ node.disconnect_p2ps()
+
+ def test_addrv2_empty(self):
+ self.test_addrv2('empty',
+ [
+ 'received: addrv2 (0 bytes)',
+ 'ProcessMessages(addrv2, 0 bytes): Exception',
+ 'end of data',
+ ],
+ b'')
+
+ def test_addrv2_no_addresses(self):
+ self.test_addrv2('no addresses',
+ [
+ 'received: addrv2 (1 bytes)',
+ ],
+ hex_str_to_bytes('00'))
+
+ def test_addrv2_too_long_address(self):
+ self.test_addrv2('too long address',
+ [
+ 'received: addrv2 (525 bytes)',
+ 'ProcessMessages(addrv2, 525 bytes): Exception',
+ 'Address too long: 513 > 512',
+ ],
+ hex_str_to_bytes(
+ '01' + # number of entries
+ '61bc6649' + # time, Fri Jan 9 02:54:25 UTC 2009
+ '00' + # service flags, COMPACTSIZE(NODE_NONE)
+ '01' + # network type (IPv4)
+ 'fd0102' + # address length (COMPACTSIZE(513))
+ 'ab' * 513 + # address
+ '208d')) # port
+
+ def test_addrv2_unrecognized_network(self):
+ now_hex = struct.pack('<I', int(time.time())).hex()
+ self.test_addrv2('unrecognized network',
+ [
+ 'received: addrv2 (25 bytes)',
+ 'IP 9.9.9.9 mapped',
+ 'Added 1 addresses',
+ ],
+ hex_str_to_bytes(
+ '02' + # number of entries
+ # this should be ignored without impeding acceptance of subsequent ones
+ now_hex + # time
+ '01' + # service flags, COMPACTSIZE(NODE_NETWORK)
+ '99' + # network type (unrecognized)
+ '02' + # address length (COMPACTSIZE(2))
+ 'ab' * 2 + # address
+ '208d' + # port
+ # this should be added:
+ now_hex + # time
+ '01' + # service flags, COMPACTSIZE(NODE_NETWORK)
+ '01' + # network type (IPv4)
+ '04' + # address length (COMPACTSIZE(4))
+ '09' * 4 + # address
+ '208d')) # port
+
def test_oversized_msg(self, msg, size):
msg_type = msg.msgtype.decode('ascii')
self.log.info("Test {} message of size {} is logged as misbehaving".format(msg_type, size))
diff --git a/test/functional/p2p_invalid_tx.py b/test/functional/p2p_invalid_tx.py
index c70a892463..489d903c21 100755
--- a/test/functional/p2p_invalid_tx.py
+++ b/test/functional/p2p_invalid_tx.py
@@ -13,11 +13,10 @@ from test_framework.messages import (
CTxIn,
CTxOut,
)
-from test_framework.mininode import P2PDataStore
+from test_framework.p2p import P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- wait_until,
)
from data import invalid_txs
@@ -62,7 +61,7 @@ class InvalidTxRequestTest(BitcoinTestFramework):
# Save the coinbase for later
block1 = block
tip = block.sha256
- node.p2p.send_blocks_and_test([block], node, success=True)
+ node.p2ps[0].send_blocks_and_test([block], node, success=True)
self.log.info("Mature the block.")
self.nodes[0].generatetoaddress(100, self.nodes[0].get_deterministic_priv_key().address)
@@ -73,7 +72,7 @@ class InvalidTxRequestTest(BitcoinTestFramework):
self.log.info("Testing invalid transaction: %s", BadTxTemplate.__name__)
template = BadTxTemplate(spend_block=block1)
tx = template.get_tx()
- node.p2p.send_txs_and_test(
+ node.p2ps[0].send_txs_and_test(
[tx], node, success=False,
expect_disconnect=template.expect_disconnect,
reject_reason=template.reject_reason,
@@ -122,7 +121,7 @@ class InvalidTxRequestTest(BitcoinTestFramework):
self.log.info('Send the orphans ... ')
# Send valid orphan txs from p2ps[0]
- node.p2p.send_txs_and_test([tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid], node, success=False)
+ node.p2ps[0].send_txs_and_test([tx_orphan_1, tx_orphan_2_no_fee, tx_orphan_2_valid], node, success=False)
# Send invalid tx from p2ps[1]
node.p2ps[1].send_txs_and_test([tx_orphan_2_invalid], node, success=False)
@@ -131,7 +130,7 @@ class InvalidTxRequestTest(BitcoinTestFramework):
self.log.info('Send the withhold tx ... ')
with node.assert_debug_log(expected_msgs=["bad-txns-in-belowout"]):
- node.p2p.send_txs_and_test([tx_withhold], node, success=True)
+ node.p2ps[0].send_txs_and_test([tx_withhold], node, success=True)
# Transactions that should end up in the mempool
expected_mempool = {
@@ -146,7 +145,7 @@ class InvalidTxRequestTest(BitcoinTestFramework):
# tx_orphan_no_fee, because it has too low fee (p2ps[0] is not disconnected for relaying that tx)
# tx_orphan_invaid, because it has negative fee (p2ps[1] is disconnected for relaying that tx)
- wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12) # p2ps[1] is no longer connected
+ self.wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12) # p2ps[1] is no longer connected
assert_equal(expected_mempool, set(node.getrawmempool()))
self.log.info('Test orphan pool overflow')
@@ -156,14 +155,14 @@ class InvalidTxRequestTest(BitcoinTestFramework):
orphan_tx_pool[i].vout.append(CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
with node.assert_debug_log(['mapOrphan overflow, removed 1 tx']):
- node.p2p.send_txs_and_test(orphan_tx_pool, node, success=False)
+ node.p2ps[0].send_txs_and_test(orphan_tx_pool, node, success=False)
rejected_parent = CTransaction()
rejected_parent.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_2_invalid.sha256, 0)))
rejected_parent.vout.append(CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
rejected_parent.rehash()
with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(rejected_parent.hash)]):
- node.p2p.send_txs_and_test([rejected_parent], node, success=False)
+ node.p2ps[0].send_txs_and_test([rejected_parent], node, success=False)
if __name__ == '__main__':
diff --git a/test/functional/p2p_leak.py b/test/functional/p2p_leak.py
index 2fc5245241..4b32d60db0 100755
--- a/test/functional/p2p_leak.py
+++ b/test/functional/p2p_leak.py
@@ -15,21 +15,19 @@ import time
from test_framework.messages import (
msg_getaddr,
msg_ping,
- msg_verack,
msg_version,
)
-from test_framework.mininode import mininode_lock, P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
- wait_until,
)
DISCOURAGEMENT_THRESHOLD = 100
-class CLazyNode(P2PInterface):
+class LazyPeer(P2PInterface):
def __init__(self):
super().__init__()
self.unexpected_msg = False
@@ -42,6 +40,7 @@ class CLazyNode(P2PInterface):
def on_open(self):
self.ever_connected = True
+ # Does not respond to "version" with "verack"
def on_version(self, message): self.bad_message(message)
def on_verack(self, message): self.bad_message(message)
def on_inv(self, message): self.bad_message(message)
@@ -64,21 +63,8 @@ class CLazyNode(P2PInterface):
def on_blocktxn(self, message): self.bad_message(message)
-# Node that never sends a version. We'll use this to send a bunch of messages
-# anyway, and eventually get disconnected.
-class CNodeNoVersionMisbehavior(CLazyNode):
- pass
-
-
-# Node that never sends a version. This one just sits idle and hopes to receive
-# any message (it shouldn't!)
-class CNodeNoVersionIdle(CLazyNode):
- def __init__(self):
- super().__init__()
-
-
-# Node that sends a version but not a verack.
-class CNodeNoVerackIdle(CLazyNode):
+# Peer that sends a version but not a verack.
+class NoVerackIdlePeer(LazyPeer):
def __init__(self):
self.version_received = False
super().__init__()
@@ -97,6 +83,7 @@ class P2PVersionStore(P2PInterface):
version_received = None
def on_version(self, msg):
+ # Responds with an appropriate verack
super().on_version(msg)
self.version_received = msg
@@ -106,39 +93,45 @@ class P2PLeakTest(BitcoinTestFramework):
self.num_nodes = 1
def run_test(self):
- no_version_disconnect_node = self.nodes[0].add_p2p_connection(
- CNodeNoVersionMisbehavior(), send_version=False, wait_for_verack=False)
- no_version_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVersionIdle(), send_version=False, wait_for_verack=False)
- no_verack_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVerackIdle(), wait_for_verack=False)
+ # Peer that never sends a version. We will send a bunch of messages
+ # from this peer anyway and verify eventual disconnection.
+ no_version_disconnect_peer = self.nodes[0].add_p2p_connection(
+ LazyPeer(), send_version=False, wait_for_verack=False)
+
+ # Another peer that never sends a version, nor any other messages. It shouldn't receive anything from the node.
+ no_version_idle_peer = self.nodes[0].add_p2p_connection(LazyPeer(), send_version=False, wait_for_verack=False)
+
+ # Peer that sends a version but not a verack.
+ no_verack_idle_peer = self.nodes[0].add_p2p_connection(NoVerackIdlePeer(), wait_for_verack=False)
- # Send enough veracks without a message to reach the peer discouragement
- # threshold. This should get us disconnected.
+ # Send enough ping messages (any non-version message will do) prior to sending
+ # version to reach the peer discouragement threshold. This should get us disconnected.
for _ in range(DISCOURAGEMENT_THRESHOLD):
- no_version_disconnect_node.send_message(msg_verack())
+ no_version_disconnect_peer.send_message(msg_ping())
- # Wait until we got the verack in response to the version. Though, don't wait for the other node to receive the
+ # Wait until we got the verack in response to the version. Though, don't wait for the node to receive the
# verack, since we never sent one
- no_verack_idlenode.wait_for_verack()
+ no_verack_idle_peer.wait_for_verack()
- wait_until(lambda: no_version_disconnect_node.ever_connected, timeout=10, lock=mininode_lock)
- wait_until(lambda: no_version_idlenode.ever_connected, timeout=10, lock=mininode_lock)
- wait_until(lambda: no_verack_idlenode.version_received, timeout=10, lock=mininode_lock)
+ no_version_disconnect_peer.wait_until(lambda: no_version_disconnect_peer.ever_connected, check_connected=False)
+ no_version_idle_peer.wait_until(lambda: no_version_idle_peer.ever_connected)
+ no_verack_idle_peer.wait_until(lambda: no_verack_idle_peer.version_received)
- # Mine a block and make sure that it's not sent to the connected nodes
- self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)
+ # Mine a block and make sure that it's not sent to the connected peers
+ self.nodes[0].generate(nblocks=1)
#Give the node enough time to possibly leak out a message
time.sleep(5)
- # Expect this node to be disconnected for misbehavior
- assert not no_version_disconnect_node.is_connected
+ # Expect this peer to be disconnected for misbehavior
+ assert not no_version_disconnect_peer.is_connected
self.nodes[0].disconnect_p2ps()
# Make sure no unexpected messages came in
- assert no_version_disconnect_node.unexpected_msg == False
- assert no_version_idlenode.unexpected_msg == False
- assert no_verack_idlenode.unexpected_msg == False
+ assert no_version_disconnect_peer.unexpected_msg == False
+ assert no_version_idle_peer.unexpected_msg == False
+ assert no_verack_idle_peer.unexpected_msg == False
self.log.info('Check that the version message does not leak the local address of the node')
p2p_version_store = self.nodes[0].add_p2p_connection(P2PVersionStore())
@@ -151,13 +144,13 @@ class P2PLeakTest(BitcoinTestFramework):
assert_equal(ver.nStartingHeight, 201)
assert_equal(ver.nRelay, 1)
- self.log.info('Check that old nodes are disconnected')
- p2p_old_node = self.nodes[0].add_p2p_connection(P2PInterface(), send_version=False, wait_for_verack=False)
+ self.log.info('Check that old peers are disconnected')
+ p2p_old_peer = self.nodes[0].add_p2p_connection(P2PInterface(), send_version=False, wait_for_verack=False)
old_version_msg = msg_version()
old_version_msg.nVersion = 31799
with self.nodes[0].assert_debug_log(['peer=4 using obsolete version 31799; disconnecting']):
- p2p_old_node.send_message(old_version_msg)
- p2p_old_node.wait_for_disconnect()
+ p2p_old_peer.send_message(old_version_msg)
+ p2p_old_peer.wait_for_disconnect()
if __name__ == '__main__':
diff --git a/test/functional/p2p_leak_tx.py b/test/functional/p2p_leak_tx.py
index da30ad5977..a45f792e81 100755
--- a/test/functional/p2p_leak_tx.py
+++ b/test/functional/p2p_leak_tx.py
@@ -5,11 +5,12 @@
"""Test that we don't leak txs to inbound peers that we haven't yet announced to"""
from test_framework.messages import msg_getdata, CInv, MSG_TX
-from test_framework.mininode import P2PDataStore
+from test_framework.p2p import p2p_lock, P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
+from test_framework.wallet import MiniWallet
class P2PNode(P2PDataStore):
@@ -21,12 +22,12 @@ class P2PLeakTxTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
- def skip_test_if_missing_module(self):
- self.skip_if_no_wallet()
-
def run_test(self):
gen_node = self.nodes[0] # The block and tx generating node
- gen_node.generate(1)
+ miniwallet = MiniWallet(gen_node)
+ # Add enough mature utxos to the wallet, so that all txs spend confirmed coins
+ miniwallet.generate(1)
+ gen_node.generate(100)
inbound_peer = self.nodes[0].add_p2p_connection(P2PNode()) # An "attacking" inbound peer
@@ -34,18 +35,20 @@ class P2PLeakTxTest(BitcoinTestFramework):
self.log.info("Running test up to {} times.".format(MAX_REPEATS))
for i in range(MAX_REPEATS):
self.log.info('Run repeat {}'.format(i + 1))
- txid = gen_node.sendtoaddress(gen_node.getnewaddress(), 0.01)
+ txid = miniwallet.send_self_transfer(from_node=gen_node)['wtxid']
want_tx = msg_getdata()
want_tx.inv.append(CInv(t=MSG_TX, h=int(txid, 16)))
- inbound_peer.last_message.pop('notfound', None)
+ with p2p_lock:
+ inbound_peer.last_message.pop('notfound', None)
inbound_peer.send_and_ping(want_tx)
if inbound_peer.last_message.get('notfound'):
self.log.debug('tx {} was not yet announced to us.'.format(txid))
self.log.debug("node has responded with a notfound message. End test.")
assert_equal(inbound_peer.last_message['notfound'].vec[0].hash, int(txid, 16))
- inbound_peer.last_message.pop('notfound')
+ with p2p_lock:
+ inbound_peer.last_message.pop('notfound')
break
else:
self.log.debug('tx {} was already announced to us. Try test again.'.format(txid))
diff --git a/test/functional/p2p_nobloomfilter_messages.py b/test/functional/p2p_nobloomfilter_messages.py
index accc5dc23c..c2311cb197 100755
--- a/test/functional/p2p_nobloomfilter_messages.py
+++ b/test/functional/p2p_nobloomfilter_messages.py
@@ -12,7 +12,7 @@ Test that, when bloom filters are not enabled, peers are disconnected if:
"""
from test_framework.messages import msg_mempool, msg_filteradd, msg_filterload, msg_filterclear
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
diff --git a/test/functional/p2p_node_network_limited.py b/test/functional/p2p_node_network_limited.py
index a2f6ea538c..b1a7ef6877 100755
--- a/test/functional/p2p_node_network_limited.py
+++ b/test/functional/p2p_node_network_limited.py
@@ -9,13 +9,10 @@ and that it responds to getdata requests for blocks correctly:
- send a block within 288 + 2 of the tip
- disconnect peers who request blocks older than that."""
from test_framework.messages import CInv, MSG_BLOCK, msg_getdata, msg_verack, NODE_NETWORK_LIMITED, NODE_WITNESS
-from test_framework.mininode import P2PInterface, mininode_lock
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- disconnect_nodes,
- connect_nodes,
- wait_until,
)
@@ -28,7 +25,7 @@ class P2PIgnoreInv(P2PInterface):
self.firstAddrnServices = message.addrs[0].nServices
def wait_for_addr(self, timeout=5):
test_function = lambda: self.last_message.get("addr")
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def send_getdata_for_block(self, blockhash):
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(MSG_BLOCK, int(blockhash, 16)))
@@ -41,9 +38,9 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
self.extra_args = [['-prune=550', '-addrmantest'], [], []]
def disconnect_all(self):
- disconnect_nodes(self.nodes[0], 1)
- disconnect_nodes(self.nodes[0], 2)
- disconnect_nodes(self.nodes[1], 2)
+ self.disconnect_nodes(0, 1)
+ self.disconnect_nodes(0, 2)
+ self.disconnect_nodes(1, 2)
def setup_network(self):
self.add_nodes(self.num_nodes, self.extra_args)
@@ -61,7 +58,7 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
assert_equal(int(self.nodes[0].getnetworkinfo()['localservices'], 16), expected_services)
self.log.info("Mine enough blocks to reach the NODE_NETWORK_LIMITED range.")
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
blocks = self.nodes[1].generatetoaddress(292, self.nodes[1].get_deterministic_priv_key().address)
self.sync_blocks([self.nodes[0], self.nodes[1]])
@@ -86,7 +83,7 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
# connect unsynced node 2 with pruned NODE_NETWORK_LIMITED peer
# because node 2 is in IBD and node 0 is a NODE_NETWORK_LIMITED peer, sync must not be possible
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 2)
try:
self.sync_blocks([self.nodes[0], self.nodes[2]], timeout=5)
except:
@@ -95,7 +92,7 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
assert_equal(self.nodes[2].getblockheader(self.nodes[2].getbestblockhash())['height'], 0)
# now connect also to node 1 (non pruned)
- connect_nodes(self.nodes[1], 2)
+ self.connect_nodes(1, 2)
# sync must be possible
self.sync_blocks()
@@ -107,7 +104,7 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
self.nodes[0].generatetoaddress(10, self.nodes[0].get_deterministic_priv_key().address)
# connect node1 (non pruned) with node0 (pruned) and check if the can sync
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
# sync must be possible, node 1 is no longer in IBD and should therefore connect to node 0 (NODE_NETWORK_LIMITED)
self.sync_blocks([self.nodes[0], self.nodes[1]])
diff --git a/test/functional/p2p_permissions.py b/test/functional/p2p_permissions.py
index 254352c816..653e3894af 100755
--- a/test/functional/p2p_permissions.py
+++ b/test/functional/p2p_permissions.py
@@ -13,7 +13,7 @@ from test_framework.messages import (
CTxInWitness,
FromHex,
)
-from test_framework.mininode import P2PDataStore
+from test_framework.p2p import P2PDataStore
from test_framework.script import (
CScript,
OP_TRUE,
@@ -22,9 +22,7 @@ from test_framework.test_node import ErrorMatch
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
p2p_port,
- wait_until,
)
@@ -44,6 +42,13 @@ class P2PPermissionsTests(BitcoinTestFramework):
True)
self.checkpermission(
+ # check without deprecatedrpc=whitelisted
+ ["-whitelist=127.0.0.1"],
+ # Make sure the default values in the command line documentation match the ones here
+ ["relay", "noban", "mempool", "download"],
+ None)
+
+ self.checkpermission(
# no permission (even with forcerelay)
["-whitelist=@127.0.0.1", "-whitelistforcerelay=1"],
[],
@@ -82,6 +87,12 @@ class P2PPermissionsTests(BitcoinTestFramework):
False)
self.checkpermission(
+ # check without deprecatedrpc=whitelisted
+ ["-whitelist=noban,mempool@127.0.0.1", "-whitelistrelay"],
+ ["noban", "mempool", "download"],
+ None)
+
+ self.checkpermission(
# legacy whitelistforcerelay should be ignored
["-whitelist=noban,mempool@127.0.0.1", "-whitelistforcerelay"],
["noban", "mempool", "download"],
@@ -109,7 +120,7 @@ class P2PPermissionsTests(BitcoinTestFramework):
self.sync_all()
self.log.debug("Create a connection from a forcerelay peer that rebroadcasts raw txs")
- # A python mininode is needed to send the raw transaction directly. If a full node was used, it could only
+ # A test framework p2p connection is needed to send the raw transaction directly. If a full node was used, it could only
# rebroadcast via the inv-getdata mechanism. However, even for forcerelay connections, a full node would
# currently not request a txid that is already in the mempool.
self.restart_node(1, extra_args=["-whitelist=forcerelay@127.0.0.1"])
@@ -134,10 +145,10 @@ class P2PPermissionsTests(BitcoinTestFramework):
p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1])
self.log.debug("Check that node[1] will send the tx to node[0] even though it is already in the mempool")
- connect_nodes(self.nodes[1], 0)
+ self.connect_nodes(1, 0)
with self.nodes[1].assert_debug_log(["Force relaying tx {} from peer=0".format(txid)]):
p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1])
- wait_until(lambda: txid in self.nodes[0].getrawmempool())
+ self.wait_until(lambda: txid in self.nodes[0].getrawmempool())
self.log.debug("Check that node[1] will not send an invalid tx to node[0]")
tx.vout[0].nValue += 1
@@ -150,10 +161,15 @@ class P2PPermissionsTests(BitcoinTestFramework):
)
def checkpermission(self, args, expectedPermissions, whitelisted):
+ if whitelisted is not None:
+ args = [*args, '-deprecatedrpc=whitelisted']
self.restart_node(1, args)
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
peerinfo = self.nodes[1].getpeerinfo()[0]
- assert_equal(peerinfo['whitelisted'], whitelisted)
+ if whitelisted is None:
+ assert 'whitelisted' not in peerinfo
+ else:
+ assert_equal(peerinfo['whitelisted'], whitelisted)
assert_equal(len(expectedPermissions), len(peerinfo['permissions']))
for p in expectedPermissions:
if not p in peerinfo['permissions']:
diff --git a/test/functional/p2p_ping.py b/test/functional/p2p_ping.py
index 5f5fd3e104..888e986fba 100755
--- a/test/functional/p2p_ping.py
+++ b/test/functional/p2p_ping.py
@@ -8,7 +8,7 @@
import time
from test_framework.messages import msg_pong
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
diff --git a/test/functional/p2p_segwit.py b/test/functional/p2p_segwit.py
index 564e49f3d8..5c15538418 100755
--- a/test/functional/p2p_segwit.py
+++ b/test/functional/p2p_segwit.py
@@ -3,6 +3,7 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test segwit transactions and blocks on P2P network."""
+from decimal import Decimal
import math
import random
import struct
@@ -25,6 +26,7 @@ from test_framework.messages import (
MSG_BLOCK,
MSG_TX,
MSG_WITNESS_FLAG,
+ MSG_WITNESS_TX,
MSG_WTX,
NODE_NETWORK,
NODE_WITNESS,
@@ -42,9 +44,9 @@ from test_framework.messages import (
uint256_from_str,
FromHex,
)
-from test_framework.mininode import (
+from test_framework.p2p import (
P2PInterface,
- mininode_lock,
+ p2p_lock,
)
from test_framework.script import (
CScript,
@@ -53,6 +55,7 @@ from test_framework.script import (
MAX_SCRIPT_ELEMENT_SIZE,
OP_0,
OP_1,
+ OP_2,
OP_16,
OP_2DROP,
OP_CHECKMULTISIG,
@@ -78,17 +81,13 @@ from test_framework.script import (
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
- disconnect_nodes,
softfork_active,
hex_str_to_bytes,
assert_raises_rpc_error,
- wait_until,
)
# The versionbit bit used to signal activation of SegWit
VB_WITNESS_BIT = 1
-VB_PERIOD = 144
VB_TOP_BITS = 0x20000000
MAX_SIGOP_COST = 80000
@@ -153,8 +152,8 @@ class TestP2PConn(P2PInterface):
self.lastgetdata = []
self.wtxidrelay = wtxidrelay
- # Avoid sending out msg_getdata in the mininode thread as a reply to invs.
- # They are not needed and would only lead to races because we send msg_getdata out in the test thread
+ # Don't send getdata message replies to invs automatically.
+ # We'll send the getdata messages explicitly in the test logic.
def on_inv(self, message):
pass
@@ -177,7 +176,7 @@ class TestP2PConn(P2PInterface):
if success:
# sanity check
assert (self.wtxidrelay and use_wtxid) or (not self.wtxidrelay and not use_wtxid)
- with mininode_lock:
+ with p2p_lock:
self.last_message.pop("getdata", None)
if use_wtxid:
wtxid = tx.calc_sha256(True)
@@ -195,7 +194,7 @@ class TestP2PConn(P2PInterface):
assert not self.last_message.get("getdata")
def announce_block_and_wait_for_getdata(self, block, use_header, timeout=60):
- with mininode_lock:
+ with p2p_lock:
self.last_message.pop("getdata", None)
self.last_message.pop("getheaders", None)
msg = msg_headers()
@@ -209,7 +208,7 @@ class TestP2PConn(P2PInterface):
self.wait_for_getdata([block.sha256])
def request_block(self, blockhash, inv_type, timeout=60):
- with mininode_lock:
+ with p2p_lock:
self.last_message.pop("block", None)
self.send_message(msg_getdata(inv=[CInv(inv_type, blockhash)]))
self.wait_for_block(blockhash, timeout)
@@ -232,8 +231,8 @@ class SegWitTest(BitcoinTestFramework):
def setup_network(self):
self.setup_nodes()
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(0, 2)
self.sync_all()
# Helper functions
@@ -497,7 +496,7 @@ class SegWitTest(BitcoinTestFramework):
# node2 doesn't need to be connected for this test.
# (If it's connected, node0 may propagate an invalid block to it over
# compact blocks and the nodes would have inconsistent tips.)
- disconnect_nodes(self.nodes[0], 2)
+ self.disconnect_nodes(0, 2)
# Create two outputs, a p2wsh and p2sh-p2wsh
witness_program = CScript([OP_TRUE])
@@ -559,7 +558,7 @@ class SegWitTest(BitcoinTestFramework):
# TODO: support multiple acceptable reject reasons.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=False)
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 2)
self.utxo.pop(0)
self.utxo.append(UTXO(txid, 2, value))
@@ -695,13 +694,13 @@ class SegWitTest(BitcoinTestFramework):
if not self.segwit_active:
# Just check mempool acceptance, but don't add the transaction to the mempool, since witness is disallowed
# in blocks and the tx is impossible to mine right now.
- assert_equal(self.nodes[0].testmempoolaccept([tx3.serialize_with_witness().hex()]), [{'txid': tx3.hash, 'allowed': True}])
+ assert_equal(self.nodes[0].testmempoolaccept([tx3.serialize_with_witness().hex()]), [{'txid': tx3.hash, 'allowed': True, 'vsize': tx3.get_vsize(), 'fees': { 'base': Decimal('0.00001000')}}])
# Create the same output as tx3, but by replacing tx
tx3_out = tx3.vout[0]
tx3 = tx
tx3.vout = [tx3_out]
tx3.rehash()
- assert_equal(self.nodes[0].testmempoolaccept([tx3.serialize_with_witness().hex()]), [{'txid': tx3.hash, 'allowed': True}])
+ assert_equal(self.nodes[0].testmempoolaccept([tx3.serialize_with_witness().hex()]), [{'txid': tx3.hash, 'allowed': True, 'vsize': tx3.get_vsize(), 'fees': { 'base': Decimal('0.00011000')}}])
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=True)
self.nodes[0].generate(1)
@@ -1400,7 +1399,11 @@ class SegWitTest(BitcoinTestFramework):
assert_equal(len(self.nodes[1].getrawmempool()), 0)
for version in list(range(OP_1, OP_16 + 1)) + [OP_0]:
# First try to spend to a future version segwit script_pubkey.
- script_pubkey = CScript([CScriptOp(version), witness_hash])
+ if version == OP_1:
+ # Don't use 32-byte v1 witness (used by Taproot; see BIP 341)
+ script_pubkey = CScript([CScriptOp(version), witness_hash + b'\x00'])
+ else:
+ script_pubkey = CScript([CScriptOp(version), witness_hash])
tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
tx.vout = [CTxOut(self.utxo[0].nValue - 1000, script_pubkey)]
tx.rehash()
@@ -1413,9 +1416,9 @@ class SegWitTest(BitcoinTestFramework):
self.sync_blocks()
assert len(self.nodes[0].getrawmempool()) == 0
- # Finally, verify that version 0 -> version 1 transactions
+ # Finally, verify that version 0 -> version 2 transactions
# are standard
- script_pubkey = CScript([CScriptOp(OP_1), witness_hash])
+ script_pubkey = CScript([CScriptOp(OP_2), witness_hash])
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx2.vout = [CTxOut(tx.vout[0].nValue - 1000, script_pubkey)]
@@ -1940,7 +1943,7 @@ class SegWitTest(BitcoinTestFramework):
"""Test the behavior of starting up a segwit-aware node after the softfork has activated."""
self.restart_node(2, extra_args=["-segwitheight={}".format(SEGWIT_HEIGHT)])
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 2)
# We reconnect more than 100 blocks, give it plenty of time
self.sync_blocks(timeout=240)
@@ -2096,14 +2099,14 @@ class SegWitTest(BitcoinTestFramework):
tx = FromHex(CTransaction(), raw)
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, serialize_with_bogus_witness(tx).hex())
with self.nodes[0].assert_debug_log(['Superfluous witness record']):
- self.nodes[0].p2p.send_and_ping(msg_bogus_tx(tx))
+ self.test_node.send_and_ping(msg_bogus_tx(tx))
raw = self.nodes[0].signrawtransactionwithwallet(raw)
assert raw['complete']
raw = raw['hex']
tx = FromHex(CTransaction(), raw)
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, serialize_with_bogus_witness(tx).hex())
with self.nodes[0].assert_debug_log(['Unknown transaction optional data']):
- self.nodes[0].p2p.send_and_ping(msg_bogus_tx(tx))
+ self.test_node.send_and_ping(msg_bogus_tx(tx))
@subtest # type: ignore
def test_wtxid_relay(self):
@@ -2114,7 +2117,7 @@ class SegWitTest(BitcoinTestFramework):
# Check wtxidrelay feature negotiation message through connecting a new peer
def received_wtxidrelay():
return (len(self.wtx_node.last_wtxidrelay) > 0)
- wait_until(received_wtxidrelay, timeout=60, lock=mininode_lock)
+ self.wtx_node.wait_until(received_wtxidrelay)
# Create a Segwit output from the latest UTXO
# and announce it to the network
@@ -2138,27 +2141,27 @@ class SegWitTest(BitcoinTestFramework):
# Announce Segwit transaction with wtxid
# and wait for getdata
self.wtx_node.announce_tx_and_wait_for_getdata(tx2, use_wtxid=True)
- with mininode_lock:
+ with p2p_lock:
lgd = self.wtx_node.lastgetdata[:]
assert_equal(lgd, [CInv(MSG_WTX, tx2.calc_sha256(True))])
# Announce Segwit transaction from non wtxidrelay peer
# and wait for getdata
self.tx_node.announce_tx_and_wait_for_getdata(tx2, use_wtxid=False)
- with mininode_lock:
+ with p2p_lock:
lgd = self.tx_node.lastgetdata[:]
assert_equal(lgd, [CInv(MSG_TX|MSG_WITNESS_FLAG, tx2.sha256)])
# Send tx2 through; it's an orphan so won't be accepted
- with mininode_lock:
+ with p2p_lock:
self.wtx_node.last_message.pop("getdata", None)
test_transaction_acceptance(self.nodes[0], self.wtx_node, tx2, with_witness=True, accepted=False)
# Expect a request for parent (tx) by txid despite use of WTX peer
self.wtx_node.wait_for_getdata([tx.sha256], 60)
- with mininode_lock:
+ with p2p_lock:
lgd = self.wtx_node.lastgetdata[:]
- assert_equal(lgd, [CInv(MSG_TX|MSG_WITNESS_FLAG, tx.sha256)])
+ assert_equal(lgd, [CInv(MSG_WITNESS_TX, tx.sha256)])
# Send tx through
test_transaction_acceptance(self.nodes[0], self.wtx_node, tx, with_witness=False, accepted=True)
diff --git a/test/functional/p2p_sendheaders.py b/test/functional/p2p_sendheaders.py
index 126a46bd53..04e6ec4172 100755
--- a/test/functional/p2p_sendheaders.py
+++ b/test/functional/p2p_sendheaders.py
@@ -87,11 +87,11 @@ e. Announce one more that doesn't connect.
"""
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import CInv
-from test_framework.mininode import (
+from test_framework.p2p import (
CBlockHeader,
NODE_WITNESS,
P2PInterface,
- mininode_lock,
+ p2p_lock,
MSG_BLOCK,
msg_block,
msg_getblocks,
@@ -104,7 +104,6 @@ from test_framework.mininode import (
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- wait_until,
)
DIRECT_FETCH_RESPONSE_TIME = 0.05
@@ -147,7 +146,7 @@ class BaseNode(P2PInterface):
def wait_for_block_announcement(self, block_hash, timeout=60):
test_function = lambda: self.last_blockhash_announced == block_hash
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def on_inv(self, message):
self.block_announced = True
@@ -163,7 +162,7 @@ class BaseNode(P2PInterface):
self.last_blockhash_announced = message.headers[-1].sha256
def clear_block_announcements(self):
- with mininode_lock:
+ with p2p_lock:
self.block_announced = False
self.last_message.pop("inv", None)
self.last_message.pop("headers", None)
@@ -174,8 +173,8 @@ class BaseNode(P2PInterface):
"""Test whether the last headers announcements received are right.
Headers may be announced across more than one message."""
test_function = lambda: (len(self.recent_headers_announced) >= len(headers))
- wait_until(test_function, timeout=60, lock=mininode_lock)
- with mininode_lock:
+ self.wait_until(test_function)
+ with p2p_lock:
assert_equal(self.recent_headers_announced, headers)
self.block_announced = False
self.last_message.pop("headers", None)
@@ -186,9 +185,9 @@ class BaseNode(P2PInterface):
inv should be a list of block hashes."""
test_function = lambda: self.block_announced
- wait_until(test_function, timeout=60, lock=mininode_lock)
+ self.wait_until(test_function)
- with mininode_lock:
+ with p2p_lock:
compare_inv = []
if "inv" in self.last_message:
compare_inv = [x.hash for x in self.last_message["inv"].inv]
@@ -298,7 +297,7 @@ class SendHeadersTest(BitcoinTestFramework):
test_node.send_header_for_blocks([new_block])
test_node.wait_for_getdata([new_block.sha256])
test_node.send_and_ping(msg_block(new_block)) # make sure this block is processed
- wait_until(lambda: inv_node.block_announced, timeout=60, lock=mininode_lock)
+ inv_node.wait_until(lambda: inv_node.block_announced)
inv_node.clear_block_announcements()
test_node.clear_block_announcements()
@@ -456,7 +455,7 @@ class SendHeadersTest(BitcoinTestFramework):
test_node.send_header_for_blocks(blocks)
test_node.sync_with_ping()
# should not have received any getdata messages
- with mininode_lock:
+ with p2p_lock:
assert "getdata" not in test_node.last_message
# This time, direct fetch should work
@@ -494,7 +493,7 @@ class SendHeadersTest(BitcoinTestFramework):
test_node.last_message.pop("getdata", None)
test_node.send_header_for_blocks(blocks[0:1])
test_node.sync_with_ping()
- with mininode_lock:
+ with p2p_lock:
assert "getdata" not in test_node.last_message
# Announcing one more block on fork should trigger direct fetch for
@@ -513,7 +512,7 @@ class SendHeadersTest(BitcoinTestFramework):
test_node.last_message.pop("getdata", None)
test_node.send_header_for_blocks(blocks[18:19])
test_node.sync_with_ping()
- with mininode_lock:
+ with p2p_lock:
assert "getdata" not in test_node.last_message
self.log.info("Part 4: success!")
@@ -536,7 +535,7 @@ class SendHeadersTest(BitcoinTestFramework):
block_time += 1
height += 1
# Send the header of the second block -> this won't connect.
- with mininode_lock:
+ with p2p_lock:
test_node.last_message.pop("getheaders", None)
test_node.send_header_for_blocks([blocks[1]])
test_node.wait_for_getheaders()
@@ -559,7 +558,7 @@ class SendHeadersTest(BitcoinTestFramework):
for i in range(1, MAX_UNCONNECTING_HEADERS):
# Send a header that doesn't connect, check that we get a getheaders.
- with mininode_lock:
+ with p2p_lock:
test_node.last_message.pop("getheaders", None)
test_node.send_header_for_blocks([blocks[i]])
test_node.wait_for_getheaders()
@@ -574,7 +573,7 @@ class SendHeadersTest(BitcoinTestFramework):
# before we get disconnected. Should be 5*MAX_UNCONNECTING_HEADERS
for i in range(5 * MAX_UNCONNECTING_HEADERS - 1):
# Send a header that doesn't connect, check that we get a getheaders.
- with mininode_lock:
+ with p2p_lock:
test_node.last_message.pop("getheaders", None)
test_node.send_header_for_blocks([blocks[i % len(blocks)]])
test_node.wait_for_getheaders()
diff --git a/test/functional/p2p_timeouts.py b/test/functional/p2p_timeouts.py
index 5a4fa42988..ce12ce26ce 100755
--- a/test/functional/p2p_timeouts.py
+++ b/test/functional/p2p_timeouts.py
@@ -24,7 +24,7 @@
from time import sleep
from test_framework.messages import msg_ping
-from test_framework.mininode import P2PInterface
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
diff --git a/test/functional/p2p_tx_download.py b/test/functional/p2p_tx_download.py
index 3ea1c6e5e7..16d9302db8 100755
--- a/test/functional/p2p_tx_download.py
+++ b/test/functional/p2p_tx_download.py
@@ -16,14 +16,13 @@ from test_framework.messages import (
msg_inv,
msg_notfound,
)
-from test_framework.mininode import (
+from test_framework.p2p import (
P2PInterface,
- mininode_lock,
+ p2p_lock,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- wait_until,
)
from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
@@ -43,15 +42,15 @@ class TestP2PConn(P2PInterface):
# Constants from net_processing
GETDATA_TX_INTERVAL = 60 # seconds
-MAX_GETDATA_RANDOM_DELAY = 2 # seconds
INBOUND_PEER_TX_DELAY = 2 # seconds
TXID_RELAY_DELAY = 2 # seconds
+OVERLOADED_PEER_DELAY = 2 # seconds
MAX_GETDATA_IN_FLIGHT = 100
-TX_EXPIRY_INTERVAL = GETDATA_TX_INTERVAL * 10
+MAX_PEER_TX_ANNOUNCEMENTS = 5000
# Python test constants
NUM_INBOUND = 10
-MAX_GETDATA_INBOUND_WAIT = GETDATA_TX_INTERVAL + MAX_GETDATA_RANDOM_DELAY + INBOUND_PEER_TX_DELAY + TXID_RELAY_DELAY
+MAX_GETDATA_INBOUND_WAIT = GETDATA_TX_INTERVAL + INBOUND_PEER_TX_DELAY + TXID_RELAY_DELAY
class TxDownloadTest(BitcoinTestFramework):
@@ -73,14 +72,14 @@ class TxDownloadTest(BitcoinTestFramework):
def getdata_found(peer_index):
p = self.nodes[0].p2ps[peer_index]
- with mininode_lock:
+ with p2p_lock:
return p.last_message.get("getdata") and p.last_message["getdata"].inv[-1].hash == txid
node_0_mocktime = int(time.time())
while outstanding_peer_index:
node_0_mocktime += MAX_GETDATA_INBOUND_WAIT
self.nodes[0].setmocktime(node_0_mocktime)
- wait_until(lambda: any(getdata_found(i) for i in outstanding_peer_index))
+ self.wait_until(lambda: any(getdata_found(i) for i in outstanding_peer_index))
for i in outstanding_peer_index:
if getdata_found(i):
outstanding_peer_index.remove(i)
@@ -122,60 +121,143 @@ class TxDownloadTest(BitcoinTestFramework):
# * the first time it is re-requested from the outbound peer, plus
# * 2 seconds to avoid races
assert self.nodes[1].getpeerinfo()[0]['inbound'] == False
- timeout = 2 + (MAX_GETDATA_RANDOM_DELAY + INBOUND_PEER_TX_DELAY) + (
- GETDATA_TX_INTERVAL + MAX_GETDATA_RANDOM_DELAY)
+ timeout = 2 + INBOUND_PEER_TX_DELAY + GETDATA_TX_INTERVAL
self.log.info("Tx should be received at node 1 after {} seconds".format(timeout))
self.sync_mempools(timeout=timeout)
def test_in_flight_max(self):
- self.log.info("Test that we don't request more than {} transactions from any peer, every {} minutes".format(
- MAX_GETDATA_IN_FLIGHT, TX_EXPIRY_INTERVAL / 60))
+ self.log.info("Test that we don't load peers with more than {} transaction requests immediately".format(MAX_GETDATA_IN_FLIGHT))
txids = [i for i in range(MAX_GETDATA_IN_FLIGHT + 2)]
p = self.nodes[0].p2ps[0]
- with mininode_lock:
+ with p2p_lock:
p.tx_getdata_count = 0
- p.send_message(msg_inv([CInv(t=MSG_WTX, h=i) for i in txids]))
- wait_until(lambda: p.tx_getdata_count >= MAX_GETDATA_IN_FLIGHT, lock=mininode_lock)
- with mininode_lock:
+ mock_time = int(time.time() + 1)
+ self.nodes[0].setmocktime(mock_time)
+ for i in range(MAX_GETDATA_IN_FLIGHT):
+ p.send_message(msg_inv([CInv(t=MSG_WTX, h=txids[i])]))
+ p.sync_with_ping()
+ mock_time += INBOUND_PEER_TX_DELAY
+ self.nodes[0].setmocktime(mock_time)
+ p.wait_until(lambda: p.tx_getdata_count >= MAX_GETDATA_IN_FLIGHT)
+ for i in range(MAX_GETDATA_IN_FLIGHT, len(txids)):
+ p.send_message(msg_inv([CInv(t=MSG_WTX, h=txids[i])]))
+ p.sync_with_ping()
+ self.log.info("No more than {} requests should be seen within {} seconds after announcement".format(MAX_GETDATA_IN_FLIGHT, INBOUND_PEER_TX_DELAY + OVERLOADED_PEER_DELAY - 1))
+ self.nodes[0].setmocktime(mock_time + INBOUND_PEER_TX_DELAY + OVERLOADED_PEER_DELAY - 1)
+ p.sync_with_ping()
+ with p2p_lock:
assert_equal(p.tx_getdata_count, MAX_GETDATA_IN_FLIGHT)
-
- self.log.info("Now check that if we send a NOTFOUND for a transaction, we'll get one more request")
- p.send_message(msg_notfound(vec=[CInv(t=MSG_WTX, h=txids[0])]))
- wait_until(lambda: p.tx_getdata_count >= MAX_GETDATA_IN_FLIGHT + 1, timeout=10, lock=mininode_lock)
- with mininode_lock:
- assert_equal(p.tx_getdata_count, MAX_GETDATA_IN_FLIGHT + 1)
-
- WAIT_TIME = TX_EXPIRY_INTERVAL // 2 + TX_EXPIRY_INTERVAL
- self.log.info("if we wait about {} minutes, we should eventually get more requests".format(WAIT_TIME / 60))
- self.nodes[0].setmocktime(int(time.time() + WAIT_TIME))
- wait_until(lambda: p.tx_getdata_count == MAX_GETDATA_IN_FLIGHT + 2)
- self.nodes[0].setmocktime(0)
+ self.log.info("If we wait {} seconds after announcement, we should eventually get more requests".format(INBOUND_PEER_TX_DELAY + OVERLOADED_PEER_DELAY))
+ self.nodes[0].setmocktime(mock_time + INBOUND_PEER_TX_DELAY + OVERLOADED_PEER_DELAY)
+ p.wait_until(lambda: p.tx_getdata_count == len(txids))
+
+ def test_expiry_fallback(self):
+ self.log.info('Check that expiry will select another peer for download')
+ WTXID = 0xffaa
+ peer1 = self.nodes[0].add_p2p_connection(TestP2PConn())
+ peer2 = self.nodes[0].add_p2p_connection(TestP2PConn())
+ for p in [peer1, peer2]:
+ p.send_message(msg_inv([CInv(t=MSG_WTX, h=WTXID)]))
+ # One of the peers is asked for the tx
+ peer2.wait_until(lambda: sum(p.tx_getdata_count for p in [peer1, peer2]) == 1)
+ with p2p_lock:
+ peer_expiry, peer_fallback = (peer1, peer2) if peer1.tx_getdata_count == 1 else (peer2, peer1)
+ assert_equal(peer_fallback.tx_getdata_count, 0)
+ self.nodes[0].setmocktime(int(time.time()) + GETDATA_TX_INTERVAL + 1) # Wait for request to peer_expiry to expire
+ peer_fallback.wait_until(lambda: peer_fallback.tx_getdata_count >= 1, timeout=1)
+ with p2p_lock:
+ assert_equal(peer_fallback.tx_getdata_count, 1)
+ self.restart_node(0) # reset mocktime
+
+ def test_disconnect_fallback(self):
+ self.log.info('Check that disconnect will select another peer for download')
+ WTXID = 0xffbb
+ peer1 = self.nodes[0].add_p2p_connection(TestP2PConn())
+ peer2 = self.nodes[0].add_p2p_connection(TestP2PConn())
+ for p in [peer1, peer2]:
+ p.send_message(msg_inv([CInv(t=MSG_WTX, h=WTXID)]))
+ # One of the peers is asked for the tx
+ peer2.wait_until(lambda: sum(p.tx_getdata_count for p in [peer1, peer2]) == 1)
+ with p2p_lock:
+ peer_disconnect, peer_fallback = (peer1, peer2) if peer1.tx_getdata_count == 1 else (peer2, peer1)
+ assert_equal(peer_fallback.tx_getdata_count, 0)
+ peer_disconnect.peer_disconnect()
+ peer_disconnect.wait_for_disconnect()
+ peer_fallback.wait_until(lambda: peer_fallback.tx_getdata_count >= 1, timeout=1)
+ with p2p_lock:
+ assert_equal(peer_fallback.tx_getdata_count, 1)
+
+ def test_notfound_fallback(self):
+ self.log.info('Check that notfounds will select another peer for download immediately')
+ WTXID = 0xffdd
+ peer1 = self.nodes[0].add_p2p_connection(TestP2PConn())
+ peer2 = self.nodes[0].add_p2p_connection(TestP2PConn())
+ for p in [peer1, peer2]:
+ p.send_message(msg_inv([CInv(t=MSG_WTX, h=WTXID)]))
+ # One of the peers is asked for the tx
+ peer2.wait_until(lambda: sum(p.tx_getdata_count for p in [peer1, peer2]) == 1)
+ with p2p_lock:
+ peer_notfound, peer_fallback = (peer1, peer2) if peer1.tx_getdata_count == 1 else (peer2, peer1)
+ assert_equal(peer_fallback.tx_getdata_count, 0)
+ peer_notfound.send_and_ping(msg_notfound(vec=[CInv(MSG_WTX, WTXID)])) # Send notfound, so that fallback peer is selected
+ peer_fallback.wait_until(lambda: peer_fallback.tx_getdata_count >= 1, timeout=1)
+ with p2p_lock:
+ assert_equal(peer_fallback.tx_getdata_count, 1)
+
+ def test_preferred_inv(self):
+ self.log.info('Check that invs from preferred peers are downloaded immediately')
+ self.restart_node(0, extra_args=['-whitelist=noban@127.0.0.1'])
+ peer = self.nodes[0].add_p2p_connection(TestP2PConn())
+ peer.send_message(msg_inv([CInv(t=MSG_WTX, h=0xff00ff00)]))
+ peer.wait_until(lambda: peer.tx_getdata_count >= 1, timeout=1)
+ with p2p_lock:
+ assert_equal(peer.tx_getdata_count, 1)
+
+ def test_large_inv_batch(self):
+ self.log.info('Test how large inv batches are handled with relay permission')
+ self.restart_node(0, extra_args=['-whitelist=relay@127.0.0.1'])
+ peer = self.nodes[0].add_p2p_connection(TestP2PConn())
+ peer.send_message(msg_inv([CInv(t=MSG_WTX, h=wtxid) for wtxid in range(MAX_PEER_TX_ANNOUNCEMENTS + 1)]))
+ peer.wait_until(lambda: peer.tx_getdata_count == MAX_PEER_TX_ANNOUNCEMENTS + 1)
+
+ self.log.info('Test how large inv batches are handled without relay permission')
+ self.restart_node(0)
+ peer = self.nodes[0].add_p2p_connection(TestP2PConn())
+ peer.send_message(msg_inv([CInv(t=MSG_WTX, h=wtxid) for wtxid in range(MAX_PEER_TX_ANNOUNCEMENTS + 1)]))
+ peer.wait_until(lambda: peer.tx_getdata_count == MAX_PEER_TX_ANNOUNCEMENTS)
+ peer.sync_with_ping()
+ with p2p_lock:
+ assert_equal(peer.tx_getdata_count, MAX_PEER_TX_ANNOUNCEMENTS)
def test_spurious_notfound(self):
self.log.info('Check that spurious notfound is ignored')
self.nodes[0].p2ps[0].send_message(msg_notfound(vec=[CInv(MSG_TX, 1)]))
def run_test(self):
- # Setup the p2p connections
- self.peers = []
- for node in self.nodes:
- for _ in range(NUM_INBOUND):
- self.peers.append(node.add_p2p_connection(TestP2PConn()))
-
- self.log.info("Nodes are setup with {} incoming connections each".format(NUM_INBOUND))
-
+ # Run tests without mocktime that only need one peer-connection first, to avoid restarting the nodes
+ self.test_expiry_fallback()
+ self.test_disconnect_fallback()
+ self.test_notfound_fallback()
+ self.test_preferred_inv()
+ self.test_large_inv_batch()
self.test_spurious_notfound()
- # Test the in-flight max first, because we want no transactions in
- # flight ahead of this test.
- self.test_in_flight_max()
-
- self.test_inv_block()
-
- self.test_tx_requests()
+ # Run each test against new bitcoind instances, as setting mocktimes has long-term effects on when
+ # the next trickle relay event happens.
+ for test in [self.test_in_flight_max, self.test_inv_block, self.test_tx_requests]:
+ self.stop_nodes()
+ self.start_nodes()
+ self.connect_nodes(1, 0)
+ # Setup the p2p connections
+ self.peers = []
+ for node in self.nodes:
+ for _ in range(NUM_INBOUND):
+ self.peers.append(node.add_p2p_connection(TestP2PConn()))
+ self.log.info("Nodes are setup with {} incoming connections each".format(NUM_INBOUND))
+ test()
if __name__ == '__main__':
diff --git a/test/functional/p2p_unrequested_blocks.py b/test/functional/p2p_unrequested_blocks.py
index 71b0b0f63a..e7a05d8547 100755
--- a/test/functional/p2p_unrequested_blocks.py
+++ b/test/functional/p2p_unrequested_blocks.py
@@ -55,12 +55,11 @@ import time
from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script
from test_framework.messages import CBlockHeader, CInv, MSG_BLOCK, msg_block, msg_headers, msg_inv
-from test_framework.mininode import mininode_lock, P2PInterface
+from test_framework.p2p import p2p_lock, P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
- connect_nodes,
)
@@ -199,13 +198,13 @@ class AcceptBlockTest(BitcoinTestFramework):
# 6. Try to get node to request the missing block.
# Poke the node with an inv for block at height 3 and see if that
# triggers a getdata on block 2 (it should if block 2 is missing).
- with mininode_lock:
+ with p2p_lock:
# Clear state so we can check the getdata request
test_node.last_message.pop("getdata", None)
test_node.send_message(msg_inv([CInv(MSG_BLOCK, block_h3.sha256)]))
test_node.sync_with_ping()
- with mininode_lock:
+ with p2p_lock:
getdata = test_node.last_message["getdata"]
# Check that the getdata includes the right block
@@ -284,7 +283,7 @@ class AcceptBlockTest(BitcoinTestFramework):
test_node.wait_for_disconnect()
# 9. Connect node1 to node0 and ensure it is able to sync
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.sync_blocks([self.nodes[0], self.nodes[1]])
self.log.info("Successfully synced nodes 1 and 0")
diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py
index 7c70f30ca3..f965677408 100755
--- a/test/functional/rpc_blockchain.py
+++ b/test/functional/rpc_blockchain.py
@@ -22,6 +22,17 @@ from decimal import Decimal
import http.client
import subprocess
+from test_framework.blocktools import (
+ create_block,
+ create_coinbase,
+ TIME_GENESIS_BLOCK,
+)
+from test_framework.messages import (
+ CBlockHeader,
+ FromHex,
+ msg_block,
+)
+from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
@@ -32,17 +43,6 @@ from test_framework.util import (
assert_is_hex_string,
assert_is_hash_string,
)
-from test_framework.blocktools import (
- create_block,
- create_coinbase,
- TIME_GENESIS_BLOCK,
-)
-from test_framework.messages import (
- msg_block,
-)
-from test_framework.mininode import (
- P2PInterface,
-)
class BlockchainTest(BitcoinTestFramework):
@@ -146,7 +146,19 @@ class BlockchainTest(BitcoinTestFramework):
'possible': True,
},
},
- 'active': False}
+ 'active': False
+ },
+ 'taproot': {
+ 'type': 'bip9',
+ 'bip9': {
+ 'status': 'active',
+ 'start_time': -1,
+ 'timeout': 9223372036854775807,
+ 'since': 0
+ },
+ 'height': 0,
+ 'active': True
+ }
})
def _test_getchaintxstats(self):
@@ -280,6 +292,14 @@ class BlockchainTest(BitcoinTestFramework):
assert isinstance(int(header['versionHex'], 16), int)
assert isinstance(header['difficulty'], Decimal)
+ # Test with verbose=False, which should return the header as hex.
+ header_hex = node.getblockheader(blockhash=besthash, verbose=False)
+ assert_is_hex_string(header_hex)
+
+ header = FromHex(CBlockHeader(), header_hex)
+ header.calc_sha256()
+ assert_equal(header.hash, besthash)
+
def _test_getdifficulty(self):
difficulty = self.nodes[0].getdifficulty()
# 1 hash in 2 should be valid, so difficulty should be 1/2**31
@@ -309,7 +329,7 @@ class BlockchainTest(BitcoinTestFramework):
def _test_waitforblockheight(self):
self.log.info("Test waitforblockheight")
node = self.nodes[0]
- node.add_p2p_connection(P2PInterface())
+ peer = node.add_p2p_connection(P2PInterface())
current_height = node.getblock(node.getbestblockhash())['height']
@@ -326,7 +346,7 @@ class BlockchainTest(BitcoinTestFramework):
def solve_and_send_block(prevhash, height, time):
b = create_block(prevhash, create_coinbase(height), time)
b.solve()
- node.p2p.send_and_ping(msg_block(b))
+ peer.send_and_ping(msg_block(b))
return b
b21f = solve_and_send_block(int(b20hash, 16), 21, b20['time'] + 1)
diff --git a/test/functional/rpc_createmultisig.py b/test/functional/rpc_createmultisig.py
index 3c81a4a4e2..f19c60dc36 100755
--- a/test/functional/rpc_createmultisig.py
+++ b/test/functional/rpc_createmultisig.py
@@ -129,7 +129,8 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
try:
node1.loadwallet('wmulti')
except JSONRPCException as e:
- if e.error['code'] == -18 and 'Wallet wmulti not found' in e.error['message']:
+ path = os.path.join(self.options.tmpdir, "node1", "regtest", "wallets", "wmulti")
+ if e.error['code'] == -18 and "Wallet file verification failed. Failed to load database path '{}'. Path does not exist.".format(path) in e.error['message']:
node1.createwallet(wallet_name='wmulti', disable_private_keys=True)
else:
raise
diff --git a/test/functional/rpc_deprecated.py b/test/functional/rpc_deprecated.py
index 9a21998d11..adcd8a7d4c 100755
--- a/test/functional/rpc_deprecated.py
+++ b/test/functional/rpc_deprecated.py
@@ -4,13 +4,13 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test deprecation of RPC calls."""
from test_framework.test_framework import BitcoinTestFramework
-# from test_framework.util import assert_raises_rpc_error
+from test_framework.util import assert_raises_rpc_error, find_vout_for_address
class DeprecatedRpcTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
- self.extra_args = [[], []]
+ self.extra_args = [[], ['-deprecatedrpc=bumpfee']]
def run_test(self):
# This test should be used to verify correct behaviour of deprecated
@@ -23,7 +23,38 @@ class DeprecatedRpcTest(BitcoinTestFramework):
# self.log.info("Test generate RPC")
# assert_raises_rpc_error(-32, 'The wallet generate rpc method is deprecated', self.nodes[0].rpc.generate, 1)
# self.nodes[1].generate(1)
- self.log.info("No tested deprecated RPC methods")
+
+ if self.is_wallet_compiled():
+ self.log.info("Test bumpfee RPC")
+ self.nodes[0].generate(101)
+ self.nodes[0].createwallet(wallet_name='nopriv', disable_private_keys=True)
+ noprivs0 = self.nodes[0].get_wallet_rpc('nopriv')
+ w0 = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
+ self.nodes[1].createwallet(wallet_name='nopriv', disable_private_keys=True)
+ noprivs1 = self.nodes[1].get_wallet_rpc('nopriv')
+
+ address = w0.getnewaddress()
+ desc = w0.getaddressinfo(address)['desc']
+ change_addr = w0.getrawchangeaddress()
+ change_desc = w0.getaddressinfo(change_addr)['desc']
+ txid = w0.sendtoaddress(address=address, amount=10)
+ vout = find_vout_for_address(w0, txid, address)
+ self.nodes[0].generate(1)
+ rawtx = w0.createrawtransaction([{'txid': txid, 'vout': vout}], {w0.getnewaddress(): 5}, 0, True)
+ rawtx = w0.fundrawtransaction(rawtx, {'changeAddress': change_addr})
+ signed_tx = w0.signrawtransactionwithwallet(rawtx['hex'])['hex']
+
+ noprivs0.importmulti([{'desc': desc, 'timestamp': 0}, {'desc': change_desc, 'timestamp': 0, 'internal': True}])
+ noprivs1.importmulti([{'desc': desc, 'timestamp': 0}, {'desc': change_desc, 'timestamp': 0, 'internal': True}])
+
+ txid = w0.sendrawtransaction(signed_tx)
+ self.sync_all()
+
+ assert_raises_rpc_error(-32, 'Using bumpfee with wallets that have private keys disabled is deprecated. Use psbtbumpfee instead or restart bitcoind with -deprecatedrpc=bumpfee. This functionality will be removed in 0.22', noprivs0.bumpfee, txid)
+ bumped_psbt = noprivs1.bumpfee(txid)
+ assert 'psbt' in bumped_psbt
+ else:
+ self.log.info("No tested deprecated RPC methods")
if __name__ == '__main__':
DeprecatedRpcTest().main()
diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py
index 2a0971b808..167c4671ef 100755
--- a/test/functional/rpc_fundrawtransaction.py
+++ b/test/functional/rpc_fundrawtransaction.py
@@ -12,7 +12,6 @@ from test_framework.util import (
assert_greater_than,
assert_greater_than_or_equal,
assert_raises_rpc_error,
- connect_nodes,
count_bytes,
find_vout_for_address,
)
@@ -38,10 +37,10 @@ class RawTransactionsTest(BitcoinTestFramework):
def setup_network(self):
self.setup_nodes()
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[1], 2)
- connect_nodes(self.nodes[0], 2)
- connect_nodes(self.nodes[0], 3)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(1, 2)
+ self.connect_nodes(0, 2)
+ self.connect_nodes(0, 3)
def run_test(self):
self.log.info("Connect nodes, set fees, generate blocks, and sync")
@@ -224,7 +223,7 @@ class RawTransactionsTest(BitcoinTestFramework):
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
- assert_raises_rpc_error(-5, "changeAddress must be a valid bitcoin address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':'foobar'})
+ assert_raises_rpc_error(-5, "Change address must be a valid bitcoin address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':'foobar'})
def test_valid_change_address(self):
self.log.info("Test fundrawtxn with a provided change address")
@@ -667,7 +666,7 @@ class RawTransactionsTest(BitcoinTestFramework):
result = self.nodes[3].fundrawtransaction(rawtx) # uses self.min_relay_tx_fee (set by settxfee)
result2 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 2 * self.min_relay_tx_fee})
result3 = self.nodes[3].fundrawtransaction(rawtx, {"feeRate": 10 * self.min_relay_tx_fee})
- assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[3].fundrawtransaction, rawtx, {"feeRate": 1})
+ assert_raises_rpc_error(-4, "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)", self.nodes[3].fundrawtransaction, rawtx, {"feeRate": 1})
result_fee_rate = result['fee'] * 1000 / count_bytes(result['hex'])
assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate)
assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate)
diff --git a/test/functional/rpc_generate.py b/test/functional/rpc_generate.py
new file mode 100755
index 0000000000..e55f2e6d12
--- /dev/null
+++ b/test/functional/rpc_generate.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python3
+# Copyright (c) 2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test generate RPC."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+)
+
+
+class RPCGenerateTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+
+ def run_test(self):
+ message = (
+ "generate\n"
+ "has been replaced by the -generate "
+ "cli option. Refer to -help for more information."
+ )
+
+ self.log.info("Test rpc generate raises with message to use cli option")
+ assert_raises_rpc_error(-32601, message, self.nodes[0].rpc.generate)
+
+ self.log.info("Test rpc generate help prints message to use cli option")
+ assert_equal(message, self.nodes[0].help("generate"))
+
+ self.log.info("Test rpc generate is a hidden command not discoverable in general help")
+ assert message not in self.nodes[0].help()
+
+
+if __name__ == "__main__":
+ RPCGenerateTest().main()
diff --git a/test/functional/rpc_getblockfilter.py b/test/functional/rpc_getblockfilter.py
index 8fa36445cd..c3c3622cf9 100755
--- a/test/functional/rpc_getblockfilter.py
+++ b/test/functional/rpc_getblockfilter.py
@@ -7,7 +7,6 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal, assert_is_hex_string, assert_raises_rpc_error,
- connect_nodes, disconnect_nodes
)
FILTER_TYPES = ["basic"]
@@ -20,7 +19,7 @@ class GetBlockFilterTest(BitcoinTestFramework):
def run_test(self):
# Create two chains by disconnecting nodes 0 & 1, mining, then reconnecting
- disconnect_nodes(self.nodes[0], 1)
+ self.disconnect_nodes(0, 1)
self.nodes[0].generate(3)
self.nodes[1].generate(4)
@@ -29,7 +28,7 @@ class GetBlockFilterTest(BitcoinTestFramework):
chain0_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
# Reorg node 0 to a new chain
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), 4)
diff --git a/test/functional/rpc_getdescriptorinfo.py b/test/functional/rpc_getdescriptorinfo.py
index 977dc805ef..ea064f9763 100755
--- a/test/functional/rpc_getdescriptorinfo.py
+++ b/test/functional/rpc_getdescriptorinfo.py
@@ -17,6 +17,7 @@ class DescriptorTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [["-disablewallet"]]
+ self.wallet_names = []
def test_desc(self, desc, isrange, issolvable, hasprivatekeys):
info = self.nodes[0].getdescriptorinfo(desc)
diff --git a/test/functional/rpc_getpeerinfo_banscore_deprecation.py b/test/functional/rpc_getpeerinfo_deprecation.py
index b830248e1e..340a66e12f 100755
--- a/test/functional/rpc_getpeerinfo_banscore_deprecation.py
+++ b/test/functional/rpc_getpeerinfo_deprecation.py
@@ -2,23 +2,37 @@
# Copyright (c) 2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test deprecation of getpeerinfo RPC banscore field."""
+"""Test deprecation of getpeerinfo RPC fields."""
from test_framework.test_framework import BitcoinTestFramework
-class GetpeerinfoBanscoreDeprecationTest(BitcoinTestFramework):
+class GetpeerinfoDeprecationTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [[], ["-deprecatedrpc=banscore"]]
def run_test(self):
+ self.test_banscore_deprecation()
+ self.test_addnode_deprecation()
+
+ def test_banscore_deprecation(self):
self.log.info("Test getpeerinfo by default no longer returns a banscore field")
assert "banscore" not in self.nodes[0].getpeerinfo()[0].keys()
self.log.info("Test getpeerinfo returns banscore with -deprecatedrpc=banscore")
assert "banscore" in self.nodes[1].getpeerinfo()[0].keys()
+ def test_addnode_deprecation(self):
+ self.restart_node(1, ["-deprecatedrpc=getpeerinfo_addnode"])
+ self.connect_nodes(0, 1)
+
+ self.log.info("Test getpeerinfo by default no longer returns an addnode field")
+ assert "addnode" not in self.nodes[0].getpeerinfo()[0].keys()
+
+ self.log.info("Test getpeerinfo returns addnode with -deprecatedrpc=addnode")
+ assert "addnode" in self.nodes[1].getpeerinfo()[0].keys()
+
if __name__ == "__main__":
- GetpeerinfoBanscoreDeprecationTest().main()
+ GetpeerinfoDeprecationTest().main()
diff --git a/test/functional/rpc_invalidateblock.py b/test/functional/rpc_invalidateblock.py
index 1fdc134f97..f884b8d293 100755
--- a/test/functional/rpc_invalidateblock.py
+++ b/test/functional/rpc_invalidateblock.py
@@ -8,8 +8,6 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR
from test_framework.util import (
assert_equal,
- connect_nodes,
- wait_until,
)
@@ -33,7 +31,7 @@ class InvalidateTest(BitcoinTestFramework):
assert_equal(self.nodes[1].getblockcount(), 6)
self.log.info("Connect nodes to force a reorg")
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.sync_blocks(self.nodes[0:2])
assert_equal(self.nodes[0].getblockcount(), 6)
badhash = self.nodes[1].getblockhash(2)
@@ -44,7 +42,7 @@ class InvalidateTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getbestblockhash(), besthash_n0)
self.log.info("Make sure we won't reorg to a lower work chain:")
- connect_nodes(self.nodes[1], 2)
+ self.connect_nodes(1, 2)
self.log.info("Sync node 2 to node 1 so both have 6 blocks")
self.sync_blocks(self.nodes[1:3])
assert_equal(self.nodes[2].getblockcount(), 6)
@@ -57,9 +55,9 @@ class InvalidateTest(BitcoinTestFramework):
self.log.info("..and then mine a block")
self.nodes[2].generatetoaddress(1, self.nodes[2].get_deterministic_priv_key().address)
self.log.info("Verify all nodes are at the right height")
- wait_until(lambda: self.nodes[2].getblockcount() == 3, timeout=5)
- wait_until(lambda: self.nodes[0].getblockcount() == 4, timeout=5)
- wait_until(lambda: self.nodes[1].getblockcount() == 4, timeout=5)
+ self.wait_until(lambda: self.nodes[2].getblockcount() == 3, timeout=5)
+ self.wait_until(lambda: self.nodes[0].getblockcount() == 4, timeout=5)
+ self.wait_until(lambda: self.nodes[1].getblockcount() == 4, timeout=5)
self.log.info("Verify that we reconsider all ancestors as well")
blocks = self.nodes[1].generatetodescriptor(10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR)
diff --git a/test/functional/rpc_misc.py b/test/functional/rpc_misc.py
index c8517d719e..0493ceeb64 100755
--- a/test/functional/rpc_misc.py
+++ b/test/functional/rpc_misc.py
@@ -27,8 +27,8 @@ class RpcMiscTest(BitcoinTestFramework):
self.log.info("test CHECK_NONFATAL")
assert_raises_rpc_error(
-1,
- "Internal bug detected: 'request.params.size() != 100'",
- lambda: node.echo(*[0] * 100),
+ 'Internal bug detected: \'request.params[9].get_str() != "trigger_internal_bug"\'',
+ lambda: node.echo(arg9='trigger_internal_bug'),
)
self.log.info("test getmemoryinfo")
@@ -61,6 +61,34 @@ class RpcMiscTest(BitcoinTestFramework):
node.logging(include=['qt'])
assert_equal(node.logging()['qt'], True)
+ self.log.info("test getindexinfo")
+ # Without any indices running the RPC returns an empty object
+ assert_equal(node.getindexinfo(), {})
+
+ # Restart the node with indices and wait for them to sync
+ self.restart_node(0, ["-txindex", "-blockfilterindex"])
+ self.wait_until(lambda: all(i["synced"] for i in node.getindexinfo().values()))
+
+ # Returns a list of all running indices by default
+ assert_equal(
+ node.getindexinfo(),
+ {
+ "txindex": {"synced": True, "best_block_height": 200},
+ "basic block filter index": {"synced": True, "best_block_height": 200}
+ }
+ )
+
+ # Specifying an index by name returns only the status of that index
+ assert_equal(
+ node.getindexinfo("txindex"),
+ {
+ "txindex": {"synced": True, "best_block_height": 200},
+ }
+ )
+
+ # Specifying an unknown index name returns an empty result
+ assert_equal(node.getindexinfo("foo"), {})
+
if __name__ == '__main__':
RpcMiscTest().main()
diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py
index 3336246c8b..71e8e1b22a 100755
--- a/test/functional/rpc_net.py
+++ b/test/functional/rpc_net.py
@@ -8,24 +8,23 @@ Tests correspond to code in rpc/net.cpp.
"""
from decimal import Decimal
+from itertools import product
+import time
+from test_framework.p2p import P2PInterface
+import test_framework.messages
+from test_framework.messages import (
+ NODE_NETWORK,
+ NODE_WITNESS,
+)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
+ assert_approx,
assert_equal,
assert_greater_than_or_equal,
assert_greater_than,
assert_raises_rpc_error,
- connect_nodes,
p2p_port,
- wait_until,
-)
-from test_framework.mininode import P2PInterface
-import test_framework.messages
-from test_framework.messages import (
- CAddress,
- msg_addr,
- NODE_NETWORK,
- NODE_WITNESS,
)
@@ -50,25 +49,28 @@ class NetTest(BitcoinTestFramework):
self.supports_cli = False
def run_test(self):
- self.log.info('Get out of IBD for the minfeefilter test')
- self.nodes[0].generate(1)
- self.log.info('Connect nodes both way')
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[1], 0)
-
- self._test_connection_count()
- self._test_getnettotals()
- self._test_getnetworkinfo()
- self._test_getaddednodeinfo()
- self._test_getpeerinfo()
+ # Get out of IBD for the minfeefilter and getpeerinfo tests.
+ self.nodes[0].generate(101)
+ # Connect nodes both ways.
+ self.connect_nodes(0, 1)
+ self.connect_nodes(1, 0)
+ self.sync_all()
+
+ self.test_connection_count()
+ self.test_getpeerinfo()
+ self.test_getnettotals()
+ self.test_getnetworkinfo()
+ self.test_getaddednodeinfo()
self.test_service_flags()
- self._test_getnodeaddresses()
+ self.test_getnodeaddresses()
- def _test_connection_count(self):
- # connect_nodes connects each node to the other
+ def test_connection_count(self):
+ self.log.info("Test getconnectioncount")
+ # After using `connect_nodes` to connect nodes 0 and 1 to each other.
assert_equal(self.nodes[0].getconnectioncount(), 2)
- def _test_getnettotals(self):
+ def test_getnettotals(self):
+ self.log.info("Test getnettotals")
# getnettotals totalbytesrecv and totalbytessent should be
# consistent with getpeerinfo. Since the RPC calls are not atomic,
# and messages might have been recvd or sent between RPC calls, call
@@ -90,39 +92,47 @@ class NetTest(BitcoinTestFramework):
# the bytes sent/received should change
# note ping and pong are 32 bytes each
self.nodes[0].ping()
- wait_until(lambda: (self.nodes[0].getnettotals()['totalbytessent'] >= net_totals_after['totalbytessent'] + 32 * 2), timeout=1)
- wait_until(lambda: (self.nodes[0].getnettotals()['totalbytesrecv'] >= net_totals_after['totalbytesrecv'] + 32 * 2), timeout=1)
+ self.wait_until(lambda: (self.nodes[0].getnettotals()['totalbytessent'] >= net_totals_after['totalbytessent'] + 32 * 2), timeout=1)
+ self.wait_until(lambda: (self.nodes[0].getnettotals()['totalbytesrecv'] >= net_totals_after['totalbytesrecv'] + 32 * 2), timeout=1)
peer_info_after_ping = self.nodes[0].getpeerinfo()
for before, after in zip(peer_info, peer_info_after_ping):
assert_greater_than_or_equal(after['bytesrecv_per_msg'].get('pong', 0), before['bytesrecv_per_msg'].get('pong', 0) + 32)
assert_greater_than_or_equal(after['bytessent_per_msg'].get('ping', 0), before['bytessent_per_msg'].get('ping', 0) + 32)
- def _test_getnetworkinfo(self):
- assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
- assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2)
+ def test_getnetworkinfo(self):
+ self.log.info("Test getnetworkinfo")
+ info = self.nodes[0].getnetworkinfo()
+ assert_equal(info['networkactive'], True)
+ assert_equal(info['connections'], 2)
+ assert_equal(info['connections_in'], 1)
+ assert_equal(info['connections_out'], 1)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: false\n']):
self.nodes[0].setnetworkactive(state=False)
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False)
# Wait a bit for all sockets to close
- wait_until(lambda: self.nodes[0].getnetworkinfo()['connections'] == 0, timeout=3)
+ self.wait_until(lambda: self.nodes[0].getnetworkinfo()['connections'] == 0, timeout=3)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: true\n']):
self.nodes[0].setnetworkactive(state=True)
- self.log.info('Connect nodes both way')
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[1], 0)
+ # Connect nodes both ways.
+ self.connect_nodes(0, 1)
+ self.connect_nodes(1, 0)
- assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
- assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2)
+ info = self.nodes[0].getnetworkinfo()
+ assert_equal(info['networkactive'], True)
+ assert_equal(info['connections'], 2)
+ assert_equal(info['connections_in'], 1)
+ assert_equal(info['connections_out'], 1)
# check the `servicesnames` field
network_info = [node.getnetworkinfo() for node in self.nodes]
for info in network_info:
assert_net_servicesnames(int(info["localservices"], 0x10), info["localservicesnames"])
- def _test_getaddednodeinfo(self):
+ def test_getaddednodeinfo(self):
+ self.log.info("Test getaddednodeinfo")
assert_equal(self.nodes[0].getaddednodeinfo(), [])
# add a node (node2) to node0
ip_port = "127.0.0.1:{}".format(p2p_port(2))
@@ -131,11 +141,30 @@ class NetTest(BitcoinTestFramework):
added_nodes = self.nodes[0].getaddednodeinfo(ip_port)
assert_equal(len(added_nodes), 1)
assert_equal(added_nodes[0]['addednode'], ip_port)
+ # check that node cannot be added again
+ assert_raises_rpc_error(-23, "Node already added", self.nodes[0].addnode, node=ip_port, command='add')
+ # check that node can be removed
+ self.nodes[0].addnode(node=ip_port, command='remove')
+ assert_equal(self.nodes[0].getaddednodeinfo(), [])
+ # check that trying to remove the node again returns an error
+ assert_raises_rpc_error(-24, "Node could not be removed", self.nodes[0].addnode, node=ip_port, command='remove')
# check that a non-existent node returns an error
assert_raises_rpc_error(-24, "Node has not been added", self.nodes[0].getaddednodeinfo, '1.1.1.1')
- def _test_getpeerinfo(self):
+ def test_getpeerinfo(self):
+ self.log.info("Test getpeerinfo")
+ # Create a few getpeerinfo last_block/last_transaction values.
+ if self.is_wallet_compiled():
+ self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
+ self.nodes[1].generate(1)
+ self.sync_all()
+ time_now = int(time.time())
peer_info = [x.getpeerinfo() for x in self.nodes]
+ # Verify last_block and last_transaction keys/values.
+ for node, peer, field in product(range(self.num_nodes), range(2), ['last_block', 'last_transaction']):
+ assert field in peer_info[node][peer].keys()
+ if peer_info[node][peer][field] != 0:
+ assert_approx(peer_info[node][peer][field], time_now, vspan=60)
# check both sides of bidirectional connection between nodes
# the address bound to on one side will be the source address for the other node
assert_equal(peer_info[0][0]['addrbind'], peer_info[1][0]['addr'])
@@ -146,38 +175,50 @@ class NetTest(BitcoinTestFramework):
for info in peer_info:
assert_net_servicesnames(int(info[0]["services"], 0x10), info[0]["servicesnames"])
+ assert_equal(peer_info[0][0]['connection_type'], 'inbound')
+ assert_equal(peer_info[0][1]['connection_type'], 'manual')
+
+ assert_equal(peer_info[1][0]['connection_type'], 'manual')
+ assert_equal(peer_info[1][1]['connection_type'], 'inbound')
+
def test_service_flags(self):
+ self.log.info("Test service flags")
self.nodes[0].add_p2p_connection(P2PInterface(), services=(1 << 4) | (1 << 63))
assert_equal(['UNKNOWN[2^4]', 'UNKNOWN[2^63]'], self.nodes[0].getpeerinfo()[-1]['servicesnames'])
self.nodes[0].disconnect_p2ps()
- def _test_getnodeaddresses(self):
+ def test_getnodeaddresses(self):
+ self.log.info("Test getnodeaddresses")
self.nodes[0].add_p2p_connection(P2PInterface())
- # send some addresses to the node via the p2p message addr
- msg = msg_addr()
+ # Add some addresses to the Address Manager over RPC. Due to the way
+ # bucket and bucket position are calculated, some of these addresses
+ # will collide.
imported_addrs = []
- for i in range(256):
- a = "123.123.123.{}".format(i)
+ for i in range(10000):
+ first_octet = i >> 8
+ second_octet = i % 256
+ a = "{}.{}.1.1".format(first_octet, second_octet)
imported_addrs.append(a)
- addr = CAddress()
- addr.time = 100000000
- addr.nServices = NODE_NETWORK | NODE_WITNESS
- addr.ip = a
- addr.port = 8333
- msg.addrs.append(addr)
- self.nodes[0].p2p.send_and_ping(msg)
-
- # obtain addresses via rpc call and check they were ones sent in before
- REQUEST_COUNT = 10
- node_addresses = self.nodes[0].getnodeaddresses(REQUEST_COUNT)
- assert_equal(len(node_addresses), REQUEST_COUNT)
+ self.nodes[0].addpeeraddress(a, 8333)
+
+ # Obtain addresses via rpc call and check they were ones sent in before.
+ #
+ # Maximum possible addresses in addrman is 10000, although actual
+ # number will usually be less due to bucket and bucket position
+ # collisions.
+ node_addresses = self.nodes[0].getnodeaddresses(0)
+ assert_greater_than(len(node_addresses), 5000)
+ assert_greater_than(10000, len(node_addresses))
for a in node_addresses:
- assert_greater_than(a["time"], 1527811200) # 1st June 2018
+ assert_greater_than(a["time"], 1527811200) # 1st June 2018
assert_equal(a["services"], NODE_NETWORK | NODE_WITNESS)
assert a["address"] in imported_addrs
assert_equal(a["port"], 8333)
+ node_addresses = self.nodes[0].getnodeaddresses(1)
+ assert_equal(len(node_addresses), 1)
+
assert_raises_rpc_error(-8, "Address count out of range", self.nodes[0].getnodeaddresses, -1)
# addrman's size cannot be known reliably after insertion, as hash collisions may occur
diff --git a/test/functional/rpc_preciousblock.py b/test/functional/rpc_preciousblock.py
index 8386e47411..04d55b103f 100755
--- a/test/functional/rpc_preciousblock.py
+++ b/test/functional/rpc_preciousblock.py
@@ -7,7 +7,6 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
)
def unidirectional_node_sync_via_rpc(node_src, node_dest):
@@ -61,7 +60,7 @@ class PreciousTest(BitcoinTestFramework):
self.log.info("Connect nodes and check no reorg occurs")
# Submit competing blocks via RPC so any reorg should occur before we proceed (no way to wait on inaction for p2p sync)
node_sync_via_rpc(self.nodes[0:2])
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
assert_equal(self.nodes[0].getbestblockhash(), hashC)
assert_equal(self.nodes[1].getbestblockhash(), hashG)
self.log.info("Make Node0 prefer block G")
@@ -98,8 +97,8 @@ class PreciousTest(BitcoinTestFramework):
hashL = self.nodes[2].getbestblockhash()
self.log.info("Connect nodes and check no reorg occurs")
node_sync_via_rpc(self.nodes[1:3])
- connect_nodes(self.nodes[1], 2)
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(1, 2)
+ self.connect_nodes(0, 2)
assert_equal(self.nodes[0].getbestblockhash(), hashH)
assert_equal(self.nodes[1].getbestblockhash(), hashH)
assert_equal(self.nodes[2].getbestblockhash(), hashL)
diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py
index f7f23bc8f4..32dc2f8644 100755
--- a/test/functional/rpc_psbt.py
+++ b/test/functional/rpc_psbt.py
@@ -12,8 +12,6 @@ from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
- connect_nodes,
- disconnect_nodes,
find_output,
)
@@ -30,7 +28,7 @@ class PSBTTest(BitcoinTestFramework):
self.num_nodes = 3
self.extra_args = [
["-walletrbf=1"],
- ["-walletrbf=0"],
+ ["-walletrbf=0", "-changetype=legacy"],
[]
]
self.supports_cli = False
@@ -46,7 +44,7 @@ class PSBTTest(BitcoinTestFramework):
# Disconnect offline node from others
# Topology of test network is linear, so this one call is enough
- disconnect_nodes(offline_node, 1)
+ self.disconnect_nodes(0, 1)
# Create watchonly on online_node
online_node.createwallet(wallet_name='wonline', disable_private_keys=True)
@@ -80,8 +78,16 @@ class PSBTTest(BitcoinTestFramework):
wonline.unloadwallet()
# Reconnect
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(0, 2)
+
+ def assert_change_type(self, psbtx, expected_type):
+ """Assert that the given PSBT has a change output with the given type."""
+
+ # The decodepsbt RPC is stateless and independent of any settings, we can always just call it on the first node
+ decoded_psbt = self.nodes[0].decodepsbt(psbtx["psbt"])
+ changepos = psbtx["changepos"]
+ assert_equal(decoded_psbt["tx"]["vout"][changepos]["scriptPubKey"]["type"], expected_type)
def run_test(self):
# Create and fund a raw tx for sending 10 BTC
@@ -94,6 +100,9 @@ class PSBTTest(BitcoinTestFramework):
psbtx1 = self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90}, 0, {"add_inputs": True})['psbt']
assert_equal(len(self.nodes[0].decodepsbt(psbtx1)['tx']['vin']), 2)
+ # Inputs argument can be null
+ self.nodes[0].walletcreatefundedpsbt(None, {self.nodes[2].getnewaddress():10})
+
# Node 1 should not be able to add anything to it but still return the psbtx same as before
psbtx = self.nodes[1].walletprocesspsbt(psbtx1)['psbt']
assert_equal(psbtx1, psbtx)
@@ -103,7 +112,16 @@ class PSBTTest(BitcoinTestFramework):
final_tx = self.nodes[0].finalizepsbt(signed_tx)['hex']
self.nodes[0].sendrawtransaction(final_tx)
- # Get pubkeys
+ # Manually selected inputs can be locked:
+ assert_equal(len(self.nodes[0].listlockunspent()), 0)
+ utxo1 = self.nodes[0].listunspent()[0]
+ psbtx1 = self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():1}, 0,{"lockUnspents": True})["psbt"]
+ assert_equal(len(self.nodes[0].listlockunspent()), 1)
+
+ # Locks are ignored for manually selected inputs
+ self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():1}, 0)
+
+ # Create p2sh, p2wpkh, and p2wsh addresses
pubkey0 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())['pubkey']
pubkey1 = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey']
pubkey2 = self.nodes[2].getaddressinfo(self.nodes[2].getnewaddress())['pubkey']
@@ -172,8 +190,8 @@ class PSBTTest(BitcoinTestFramework):
# feeRate of 10 BTC / KB produces a total fee well above -maxtxfee
# previously this was silently capped at -maxtxfee
- assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 10, "add_inputs": True})
- assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():1}, 0, {"feeRate": 10, "add_inputs": False})
+ assert_raises_rpc_error(-4, "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 10, "add_inputs": True})
+ assert_raises_rpc_error(-4, "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():1}, 0, {"feeRate": 10, "add_inputs": False})
# partially sign multisig things with node 1
psbtx = wmulti.walletcreatefundedpsbt(inputs=[{"txid":txid,"vout":p2wsh_pos},{"txid":txid,"vout":p2sh_pos},{"txid":txid,"vout":p2sh_p2wsh_pos}], outputs={self.nodes[1].getnewaddress():29.99}, options={'changeAddress': self.nodes[1].getrawchangeaddress()})['psbt']
@@ -289,6 +307,21 @@ class PSBTTest(BitcoinTestFramework):
# when attempting BnB coin selection
self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"changeAddress":self.nodes[1].getnewaddress()}, False)
+ # Make sure the wallet's change type is respected by default
+ small_output = {self.nodes[0].getnewaddress():0.1}
+ psbtx_native = self.nodes[0].walletcreatefundedpsbt([], [small_output])
+ self.assert_change_type(psbtx_native, "witness_v0_keyhash")
+ psbtx_legacy = self.nodes[1].walletcreatefundedpsbt([], [small_output])
+ self.assert_change_type(psbtx_legacy, "pubkeyhash")
+
+ # Make sure the change type of the wallet can also be overwritten
+ psbtx_np2wkh = self.nodes[1].walletcreatefundedpsbt([], [small_output], 0, {"change_type":"p2sh-segwit"})
+ self.assert_change_type(psbtx_np2wkh, "scripthash")
+
+ # Make sure the change type cannot be specified if a change address is given
+ invalid_options = {"change_type":"legacy","changeAddress":self.nodes[0].getnewaddress()}
+ assert_raises_rpc_error(-8, "both change address and address type options", self.nodes[0].walletcreatefundedpsbt, [], [small_output], 0, invalid_options)
+
# Regression test for 14473 (mishandling of already-signed witness transaction):
psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], 0, {"add_inputs": True})
complete_psbt = self.nodes[0].walletprocesspsbt(psbtx_info["psbt"])
diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py
index 23b5e647d6..326495843f 100755
--- a/test/functional/rpc_rawtransaction.py
+++ b/test/functional/rpc_rawtransaction.py
@@ -20,7 +20,6 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
- connect_nodes,
hex_str_to_bytes,
)
@@ -60,7 +59,7 @@ class RawTransactionsTest(BitcoinTestFramework):
def setup_network(self):
super().setup_network()
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 2)
def run_test(self):
self.log.info('prepare some coins for multiple *rawtransaction commands')
@@ -96,7 +95,7 @@ class RawTransactionsTest(BitcoinTestFramework):
assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", self.nodes[0].createrawtransaction, [{'txid': 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844'}], {})
assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid}], {})
assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 'foo'}], {})
- assert_raises_rpc_error(-8, "Invalid parameter, vout must be positive", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {})
+ assert_raises_rpc_error(-8, "Invalid parameter, vout cannot be negative", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': -1}], {})
assert_raises_rpc_error(-8, "Invalid parameter, sequence number is out of range", self.nodes[0].createrawtransaction, [{'txid': txid, 'vout': 0, 'sequence': -1}], {})
# Test `createrawtransaction` invalid `outputs`
@@ -456,9 +455,9 @@ class RawTransactionsTest(BitcoinTestFramework):
# Thus, testmempoolaccept should reject
testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']], 0.00001000)[0]
assert_equal(testres['allowed'], False)
- assert_equal(testres['reject-reason'], 'absurdly-high-fee')
+ assert_equal(testres['reject-reason'], 'max-fee-exceeded')
# and sendrawtransaction should throw
- assert_raises_rpc_error(-26, "absurdly-high-fee", self.nodes[2].sendrawtransaction, rawTxSigned['hex'], 0.00001000)
+ assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex'], 0.00001000)
# and the following calls should both succeed
testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']])[0]
assert_equal(testres['allowed'], True)
@@ -480,9 +479,9 @@ class RawTransactionsTest(BitcoinTestFramework):
# Thus, testmempoolaccept should reject
testres = self.nodes[2].testmempoolaccept([rawTxSigned['hex']])[0]
assert_equal(testres['allowed'], False)
- assert_equal(testres['reject-reason'], 'absurdly-high-fee')
+ assert_equal(testres['reject-reason'], 'max-fee-exceeded')
# and sendrawtransaction should throw
- assert_raises_rpc_error(-26, "absurdly-high-fee", self.nodes[2].sendrawtransaction, rawTxSigned['hex'])
+ assert_raises_rpc_error(-25, 'Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)', self.nodes[2].sendrawtransaction, rawTxSigned['hex'])
# and the following calls should both succeed
testres = self.nodes[2].testmempoolaccept(rawtxs=[rawTxSigned['hex']], maxfeerate='0.20000000')[0]
assert_equal(testres['allowed'], True)
diff --git a/test/functional/rpc_setban.py b/test/functional/rpc_setban.py
index 1cc1fb164b..bc48449084 100755
--- a/test/functional/rpc_setban.py
+++ b/test/functional/rpc_setban.py
@@ -6,7 +6,6 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
- connect_nodes,
p2p_port
)
@@ -18,7 +17,7 @@ class SetBanTests(BitcoinTestFramework):
def run_test(self):
# Node 0 connects to Node 1, check that the noban permission is not granted
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
peerinfo = self.nodes[1].getpeerinfo()[0]
assert(not 'noban' in peerinfo['permissions'])
@@ -32,14 +31,14 @@ class SetBanTests(BitcoinTestFramework):
# However, node 0 should be able to reconnect if it has noban permission
self.restart_node(1, ['-whitelist=127.0.0.1'])
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
peerinfo = self.nodes[1].getpeerinfo()[0]
assert('noban' in peerinfo['permissions'])
# If we remove the ban, Node 0 should be able to reconnect even without noban permission
self.nodes[1].setban("127.0.0.1", "remove")
self.restart_node(1, [])
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
peerinfo = self.nodes[1].getpeerinfo()[0]
assert(not 'noban' in peerinfo['permissions'])
diff --git a/test/functional/rpc_signrawtransaction.py b/test/functional/rpc_signrawtransaction.py
index 3d08202724..704b65c060 100755
--- a/test/functional/rpc_signrawtransaction.py
+++ b/test/functional/rpc_signrawtransaction.py
@@ -198,10 +198,30 @@ class SignRawTransactionsTest(BitcoinTestFramework):
assert_equal(spending_tx_signed['complete'], True)
self.nodes[0].sendrawtransaction(spending_tx_signed['hex'])
+ def OP_1NEGATE_test(self):
+ self.log.info("Test OP_1NEGATE (0x4f) satisfies BIP62 minimal push standardness rule")
+ hex_str = (
+ "0200000001FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
+ "FFFFFFFF00000000044F024F9CFDFFFFFF01F0B9F5050000000023210277777777"
+ "77777777777777777777777777777777777777777777777777777777AC66030000"
+ )
+ prev_txs = [
+ {
+ "txid": "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
+ "vout": 0,
+ "scriptPubKey": "A914AE44AB6E9AA0B71F1CD2B453B69340E9BFBAEF6087",
+ "redeemScript": "4F9C",
+ "amount": 1,
+ }
+ ]
+ txn = self.nodes[0].signrawtransactionwithwallet(hex_str, prev_txs)
+ assert txn["complete"]
+
def run_test(self):
self.successful_signing_test()
self.script_verification_error_test()
self.witness_script_test()
+ self.OP_1NEGATE_test()
self.test_with_lock_outputs()
diff --git a/test/functional/rpc_txoutproof.py b/test/functional/rpc_txoutproof.py
index ca8be42d3b..93fb62c5d6 100755
--- a/test/functional/rpc_txoutproof.py
+++ b/test/functional/rpc_txoutproof.py
@@ -6,41 +6,31 @@
from test_framework.messages import CMerkleBlock, FromHex, ToHex
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, assert_raises_rpc_error, connect_nodes
+from test_framework.util import assert_equal, assert_raises_rpc_error
+from test_framework.wallet import MiniWallet
+
class MerkleBlockTest(BitcoinTestFramework):
def set_test_params(self):
- self.num_nodes = 4
+ self.num_nodes = 2
self.setup_clean_chain = True
- # Nodes 0/1 are "wallet" nodes, Nodes 2/3 are used for testing
- self.extra_args = [[], [], [], ["-txindex"]]
-
- def skip_test_if_missing_module(self):
- self.skip_if_no_wallet()
-
- def setup_network(self):
- self.setup_nodes()
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[0], 2)
- connect_nodes(self.nodes[0], 3)
-
- self.sync_all()
+ self.extra_args = [
+ [],
+ ["-txindex"],
+ ]
def run_test(self):
- self.log.info("Mining blocks...")
- self.nodes[0].generate(105)
+ miniwallet = MiniWallet(self.nodes[0])
+ # Add enough mature utxos to the wallet, so that all txs spend confirmed coins
+ miniwallet.generate(5)
+ self.nodes[0].generate(100)
self.sync_all()
chain_height = self.nodes[1].getblockcount()
assert_equal(chain_height, 105)
- assert_equal(self.nodes[1].getbalance(), 0)
- assert_equal(self.nodes[2].getbalance(), 0)
-
- node0utxos = self.nodes[0].listunspent(1)
- tx1 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99})
- txid1 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransactionwithwallet(tx1)["hex"])
- tx2 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99})
- txid2 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransactionwithwallet(tx2)["hex"])
+
+ txid1 = miniwallet.send_self_transfer(from_node=self.nodes[0])['txid']
+ txid2 = miniwallet.send_self_transfer(from_node=self.nodes[0])['txid']
# This will raise an exception because the transaction is not yet in a block
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1])
@@ -53,50 +43,54 @@ class MerkleBlockTest(BitcoinTestFramework):
txlist.append(blocktxn[1])
txlist.append(blocktxn[2])
- assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1])), [txid1])
- assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2])), txlist)
- assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2], blockhash)), txlist)
+ assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1])), [txid1])
+ assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2])), txlist)
+ assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2], blockhash)), txlist)
- txin_spent = self.nodes[1].listunspent(1).pop()
- tx3 = self.nodes[1].createrawtransaction([txin_spent], {self.nodes[0].getnewaddress(): 49.98})
- txid3 = self.nodes[0].sendrawtransaction(self.nodes[1].signrawtransactionwithwallet(tx3)["hex"])
+ txin_spent = miniwallet.get_utxo() # Get the change from txid2
+ tx3 = miniwallet.send_self_transfer(from_node=self.nodes[0], utxo_to_spend=txin_spent)
+ txid3 = tx3['txid']
self.nodes[0].generate(1)
self.sync_all()
txid_spent = txin_spent["txid"]
- txid_unspent = txid1 if txin_spent["txid"] != txid1 else txid2
+ txid_unspent = txid1 # Input was change from txid2, so txid1 should be unspent
# Invalid txids
- assert_raises_rpc_error(-8, "txid must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[2].gettxoutproof, ["00000000000000000000000000000000"], blockhash)
- assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[2].gettxoutproof, ["ZZZ0000000000000000000000000000000000000000000000000000000000000"], blockhash)
+ assert_raises_rpc_error(-8, "txid must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[0].gettxoutproof, ["00000000000000000000000000000000"], blockhash)
+ assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].gettxoutproof, ["ZZZ0000000000000000000000000000000000000000000000000000000000000"], blockhash)
# Invalid blockhashes
- assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[2].gettxoutproof, [txid_spent], "00000000000000000000000000000000")
- assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[2].gettxoutproof, [txid_spent], "ZZZ0000000000000000000000000000000000000000000000000000000000000")
+ assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[0].gettxoutproof, [txid_spent], "00000000000000000000000000000000")
+ assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].gettxoutproof, [txid_spent], "ZZZ0000000000000000000000000000000000000000000000000000000000000")
# We can't find the block from a fully-spent tx
- assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[2].gettxoutproof, [txid_spent])
+ assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid_spent])
# We can get the proof if we specify the block
- assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_spent], blockhash)), [txid_spent])
+ assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid_spent], blockhash)), [txid_spent])
# We can't get the proof if we specify a non-existent block
- assert_raises_rpc_error(-5, "Block not found", self.nodes[2].gettxoutproof, [txid_spent], "0000000000000000000000000000000000000000000000000000000000000000")
+ assert_raises_rpc_error(-5, "Block not found", self.nodes[0].gettxoutproof, [txid_spent], "0000000000000000000000000000000000000000000000000000000000000000")
# We can get the proof if the transaction is unspent
- assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_unspent])), [txid_unspent])
+ assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid_unspent])), [txid_unspent])
# We can get the proof if we provide a list of transactions and one of them is unspent. The ordering of the list should not matter.
- assert_equal(sorted(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid1, txid2]))), sorted(txlist))
- assert_equal(sorted(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid2, txid1]))), sorted(txlist))
+ assert_equal(sorted(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2]))), sorted(txlist))
+ assert_equal(sorted(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid2, txid1]))), sorted(txlist))
# We can always get a proof if we have a -txindex
- assert_equal(self.nodes[2].verifytxoutproof(self.nodes[3].gettxoutproof([txid_spent])), [txid_spent])
+ assert_equal(self.nodes[0].verifytxoutproof(self.nodes[1].gettxoutproof([txid_spent])), [txid_spent])
# We can't get a proof if we specify transactions from different blocks
- assert_raises_rpc_error(-5, "Not all transactions found in specified or retrieved block", self.nodes[2].gettxoutproof, [txid1, txid3])
+ assert_raises_rpc_error(-5, "Not all transactions found in specified or retrieved block", self.nodes[0].gettxoutproof, [txid1, txid3])
+ # Test empty list
+ assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [])
+ # Test duplicate txid
+ assert_raises_rpc_error(-8, 'Invalid parameter, duplicated txid', self.nodes[0].gettxoutproof, [txid1, txid1])
# Now we'll try tweaking a proof.
- proof = self.nodes[3].gettxoutproof([txid1, txid2])
+ proof = self.nodes[1].gettxoutproof([txid1, txid2])
assert txid1 in self.nodes[0].verifytxoutproof(proof)
assert txid2 in self.nodes[1].verifytxoutproof(proof)
tweaked_proof = FromHex(CMerkleBlock(), proof)
# Make sure that our serialization/deserialization is working
- assert txid1 in self.nodes[2].verifytxoutproof(ToHex(tweaked_proof))
+ assert txid1 in self.nodes[0].verifytxoutproof(ToHex(tweaked_proof))
# Check to see if we can go up the merkle tree and pass this off as a
# single-transaction block
diff --git a/test/functional/test_framework/address.py b/test/functional/test_framework/address.py
index 9506b63f82..360962b8da 100644
--- a/test/functional/test_framework/address.py
+++ b/test/functional/test_framework/address.py
@@ -2,17 +2,17 @@
# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Encode and decode BASE58, P2PKH and P2SH addresses."""
+"""Encode and decode Bitcoin addresses.
+
+- base58 P2PKH and P2SH addresses.
+- bech32 segwit v0 P2WPKH and P2WSH addresses."""
import enum
import unittest
from .script import hash256, hash160, sha256, CScript, OP_0
-from .util import hex_str_to_bytes
-
-from . import segwit_addr
-
-from test_framework.util import assert_equal
+from .segwit_addr import encode_segwit_address
+from .util import assert_equal, hex_str_to_bytes
ADDRESS_BCRT1_UNSPENDABLE = 'bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj'
ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR = 'addr(bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj)#juyq9d97'
@@ -35,7 +35,7 @@ def byte_to_base58(b, version):
str = chr(version).encode('latin-1').hex() + str
checksum = hash256(hex_str_to_bytes(str)).hex()
str += checksum[:8]
- value = int('0x'+str,0)
+ value = int('0x' + str, 0)
while value > 0:
result = chars[value % 58] + result
value //= 58
@@ -45,7 +45,10 @@ def byte_to_base58(b, version):
return result
-def base58_to_byte(s, verify_checksum=True):
+def base58_to_byte(s):
+ """Converts a base58-encoded string to its data and version.
+
+ Throws if the base58 checksum is invalid."""
if not s:
return b''
n = 0
@@ -65,66 +68,67 @@ def base58_to_byte(s, verify_checksum=True):
else:
break
res = b'\x00' * pad + res
- if verify_checksum:
- assert_equal(hash256(res[:-4])[:4], res[-4:])
+
+ # Assert if the checksum is invalid
+ assert_equal(hash256(res[:-4])[:4], res[-4:])
return res[1:-4], int(res[0])
-def keyhash_to_p2pkh(hash, main = False):
+def keyhash_to_p2pkh(hash, main=False):
assert len(hash) == 20
version = 0 if main else 111
return byte_to_base58(hash, version)
-def scripthash_to_p2sh(hash, main = False):
+def scripthash_to_p2sh(hash, main=False):
assert len(hash) == 20
version = 5 if main else 196
return byte_to_base58(hash, version)
-def key_to_p2pkh(key, main = False):
+def key_to_p2pkh(key, main=False):
key = check_key(key)
return keyhash_to_p2pkh(hash160(key), main)
-def script_to_p2sh(script, main = False):
+def script_to_p2sh(script, main=False):
script = check_script(script)
return scripthash_to_p2sh(hash160(script), main)
-def key_to_p2sh_p2wpkh(key, main = False):
+def key_to_p2sh_p2wpkh(key, main=False):
key = check_key(key)
p2shscript = CScript([OP_0, hash160(key)])
return script_to_p2sh(p2shscript, main)
-def program_to_witness(version, program, main = False):
+def program_to_witness(version, program, main=False):
if (type(program) is str):
program = hex_str_to_bytes(program)
assert 0 <= version <= 16
assert 2 <= len(program) <= 40
assert version > 0 or len(program) in [20, 32]
- return segwit_addr.encode("bc" if main else "bcrt", version, program)
+ return encode_segwit_address("bc" if main else "bcrt", version, program)
-def script_to_p2wsh(script, main = False):
+def script_to_p2wsh(script, main=False):
script = check_script(script)
return program_to_witness(0, sha256(script), main)
-def key_to_p2wpkh(key, main = False):
+def key_to_p2wpkh(key, main=False):
key = check_key(key)
return program_to_witness(0, hash160(key), main)
-def script_to_p2sh_p2wsh(script, main = False):
+def script_to_p2sh_p2wsh(script, main=False):
script = check_script(script)
p2shscript = CScript([OP_0, sha256(script)])
return script_to_p2sh(p2shscript, main)
def check_key(key):
if (type(key) is str):
- key = hex_str_to_bytes(key) # Assuming this is hex string
+ key = hex_str_to_bytes(key) # Assuming this is hex string
if (type(key) is bytes and (len(key) == 33 or len(key) == 65)):
return key
assert False
def check_script(script):
if (type(script) is str):
- script = hex_str_to_bytes(script) # Assuming this is hex string
+ script = hex_str_to_bytes(script) # Assuming this is hex string
if (type(script) is bytes or type(script) is CScript):
return script
assert False
@@ -135,15 +139,15 @@ class TestFrameworkScript(unittest.TestCase):
def check_base58(data, version):
self.assertEqual(base58_to_byte(byte_to_base58(data, version)), (data, version))
- check_base58(b'\x1f\x8e\xa1p*{\xd4\x94\x1b\xca\tA\xb8R\xc4\xbb\xfe\xdb.\x05', 111)
- check_base58(b':\x0b\x05\xf4\xd7\xf6l;\xa7\x00\x9fE50)l\x84\\\xc9\xcf', 111)
- check_base58(b'A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
- check_base58(b'\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
- check_base58(b'\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
- check_base58(b'\0\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
- check_base58(b'\x1f\x8e\xa1p*{\xd4\x94\x1b\xca\tA\xb8R\xc4\xbb\xfe\xdb.\x05', 0)
- check_base58(b':\x0b\x05\xf4\xd7\xf6l;\xa7\x00\x9fE50)l\x84\\\xc9\xcf', 0)
- check_base58(b'A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
- check_base58(b'\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
- check_base58(b'\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
- check_base58(b'\0\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
+ check_base58(bytes.fromhex('1f8ea1702a7bd4941bca0941b852c4bbfedb2e05'), 111)
+ check_base58(bytes.fromhex('3a0b05f4d7f66c3ba7009f453530296c845cc9cf'), 111)
+ check_base58(bytes.fromhex('41c1eaf111802559bad61b60d62b1f897c63928a'), 111)
+ check_base58(bytes.fromhex('0041c1eaf111802559bad61b60d62b1f897c63928a'), 111)
+ check_base58(bytes.fromhex('000041c1eaf111802559bad61b60d62b1f897c63928a'), 111)
+ check_base58(bytes.fromhex('00000041c1eaf111802559bad61b60d62b1f897c63928a'), 111)
+ check_base58(bytes.fromhex('1f8ea1702a7bd4941bca0941b852c4bbfedb2e05'), 0)
+ check_base58(bytes.fromhex('3a0b05f4d7f66c3ba7009f453530296c845cc9cf'), 0)
+ check_base58(bytes.fromhex('41c1eaf111802559bad61b60d62b1f897c63928a'), 0)
+ check_base58(bytes.fromhex('0041c1eaf111802559bad61b60d62b1f897c63928a'), 0)
+ check_base58(bytes.fromhex('000041c1eaf111802559bad61b60d62b1f897c63928a'), 0)
+ check_base58(bytes.fromhex('00000041c1eaf111802559bad61b60d62b1f897c63928a'), 0)
diff --git a/test/functional/test_framework/bip340_test_vectors.csv b/test/functional/test_framework/bip340_test_vectors.csv
new file mode 100644
index 0000000000..e068322deb
--- /dev/null
+++ b/test/functional/test_framework/bip340_test_vectors.csv
@@ -0,0 +1,16 @@
+index,secret key,public key,aux_rand,message,signature,verification result,comment
+0,0000000000000000000000000000000000000000000000000000000000000003,F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9,0000000000000000000000000000000000000000000000000000000000000000,0000000000000000000000000000000000000000000000000000000000000000,E907831F80848D1069A5371B402410364BDF1C5F8307B0084C55F1CE2DCA821525F66A4A85EA8B71E482A74F382D2CE5EBEEE8FDB2172F477DF4900D310536C0,TRUE,
+1,B7E151628AED2A6ABF7158809CF4F3C762E7160F38B4DA56A784D9045190CFEF,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,0000000000000000000000000000000000000000000000000000000000000001,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6896BD60EEAE296DB48A229FF71DFE071BDE413E6D43F917DC8DCF8C78DE33418906D11AC976ABCCB20B091292BFF4EA897EFCB639EA871CFA95F6DE339E4B0A,TRUE,
+2,C90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B14E5C9,DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8,C87AA53824B4D7AE2EB035A2B5BBBCCC080E76CDC6D1692C4B0B62D798E6D906,7E2D58D8B3BCDF1ABADEC7829054F90DDA9805AAB56C77333024B9D0A508B75C,5831AAEED7B44BB74E5EAB94BA9D4294C49BCF2A60728D8B4C200F50DD313C1BAB745879A5AD954A72C45A91C3A51D3C7ADEA98D82F8481E0E1E03674A6F3FB7,TRUE,
+3,0B432B2677937381AEF05BB02A66ECD012773062CF3FA2549E44F58ED2401710,25D1DFF95105F5253C4022F628A996AD3A0D95FBF21D468A1B33F8C160D8F517,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF,7EB0509757E246F19449885651611CB965ECC1A187DD51B64FDA1EDC9637D5EC97582B9CB13DB3933705B32BA982AF5AF25FD78881EBB32771FC5922EFC66EA3,TRUE,test fails if msg is reduced modulo p or n
+4,,D69C3509BB99E412E68B0FE8544E72837DFA30746D8BE2AA65975F29D22DC7B9,,4DF3C3F68FCC83B27E9D42C90431A72499F17875C81A599B566C9889B9696703,00000000000000000000003B78CE563F89A0ED9414F5AA28AD0D96D6795F9C6376AFB1548AF603B3EB45C9F8207DEE1060CB71C04E80F593060B07D28308D7F4,TRUE,
+5,,EEFDEA4CDB677750A420FEE807EACF21EB9898AE79B9768766E4FAA04A2D4A34,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E17776969E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,public key not on the curve
+6,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,FFF97BD5755EEEA420453A14355235D382F6472F8568A18B2F057A14602975563CC27944640AC607CD107AE10923D9EF7A73C643E166BE5EBEAFA34B1AC553E2,FALSE,has_even_y(R) is false
+7,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,1FA62E331EDBC21C394792D2AB1100A7B432B013DF3F6FF4F99FCB33E0E1515F28890B3EDB6E7189B630448B515CE4F8622A954CFE545735AAEA5134FCCDB2BD,FALSE,negated message
+8,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E177769961764B3AA9B2FFCB6EF947B6887A226E8D7C93E00C5ED0C1834FF0D0C2E6DA6,FALSE,negated s value
+9,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,0000000000000000000000000000000000000000000000000000000000000000123DDA8328AF9C23A94C1FEECFD123BA4FB73476F0D594DCB65C6425BD186051,FALSE,sG - eP is infinite. Test fails in single verification if has_even_y(inf) is defined as true and x(inf) as 0
+10,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,00000000000000000000000000000000000000000000000000000000000000017615FBAF5AE28864013C099742DEADB4DBA87F11AC6754F93780D5A1837CF197,FALSE,sG - eP is infinite. Test fails in single verification if has_even_y(inf) is defined as true and x(inf) as 1
+11,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,4A298DACAE57395A15D0795DDBFD1DCB564DA82B0F269BC70A74F8220429BA1D69E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,sig[0:32] is not an X coordinate on the curve
+12,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F69E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,sig[0:32] is equal to field size
+13,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E177769FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141,FALSE,sig[32:64] is equal to curve order
+14,,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E17776969E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,public key is not a valid X coordinate because it exceeds the field size
diff --git a/test/functional/test_framework/blocktools.py b/test/functional/test_framework/blocktools.py
index afc1995009..0859380d06 100644
--- a/test/functional/test_framework/blocktools.py
+++ b/test/functional/test_framework/blocktools.py
@@ -4,6 +4,10 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for manipulating blocks and transactions."""
+from binascii import a2b_hex
+import io
+import struct
+import time
import unittest
from .address import (
@@ -43,7 +47,9 @@ from .script import (
from .util import assert_equal
from io import BytesIO
+WITNESS_SCALE_FACTOR = 4
MAX_BLOCK_SIGOPS = 20000
+MAX_BLOCK_SIGOPS_WEIGHT = MAX_BLOCK_SIGOPS * WITNESS_SCALE_FACTOR
# Genesis block time (regtest)
TIME_GENESIS_BLOCK = 1296688602
@@ -51,19 +57,31 @@ TIME_GENESIS_BLOCK = 1296688602
# From BIP141
WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
+NORMAL_GBT_REQUEST_PARAMS = {"rules": ["segwit"]}
-def create_block(hashprev, coinbase, ntime=None, *, version=1):
+
+def create_block(hashprev=None, coinbase=None, ntime=None, *, version=None, tmpl=None, txlist=None):
"""Create a block (with regtest difficulty)."""
block = CBlock()
- block.nVersion = version
- if ntime is None:
- import time
- block.nTime = int(time.time() + 600)
+ if tmpl is None:
+ tmpl = {}
+ block.nVersion = version or tmpl.get('version') or 1
+ block.nTime = ntime or tmpl.get('curtime') or int(time.time() + 600)
+ block.hashPrevBlock = hashprev or int(tmpl['previousblockhash'], 0x10)
+ if tmpl and not tmpl.get('bits') is None:
+ block.nBits = struct.unpack('>I', a2b_hex(tmpl['bits']))[0]
else:
- block.nTime = ntime
- block.hashPrevBlock = hashprev
- block.nBits = 0x207fffff # difficulty retargeting is disabled in REGTEST chainparams
+ block.nBits = 0x207fffff # difficulty retargeting is disabled in REGTEST chainparams
+ if coinbase is None:
+ coinbase = create_coinbase(height=tmpl['height'])
block.vtx.append(coinbase)
+ if txlist:
+ for tx in txlist:
+ if not hasattr(tx, 'calc_sha256'):
+ txo = CTransaction()
+ txo.deserialize(io.BytesIO(tx))
+ tx = txo
+ block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
@@ -101,22 +119,31 @@ def script_BIP34_coinbase_height(height):
return CScript([CScriptNum(height)])
-def create_coinbase(height, pubkey=None):
- """Create a coinbase transaction, assuming no miner fees.
+def create_coinbase(height, pubkey=None, extra_output_script=None, fees=0):
+ """Create a coinbase transaction.
If pubkey is passed in, the coinbase output will be a P2PK output;
- otherwise an anyone-can-spend output."""
+ otherwise an anyone-can-spend output.
+
+ If extra_output_script is given, make a 0-value output to that
+ script. This is useful to pad block weight/sigops as needed. """
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), script_BIP34_coinbase_height(height), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = 50 * COIN
halvings = int(height / 150) # regtest
coinbaseoutput.nValue >>= halvings
- if (pubkey is not None):
+ coinbaseoutput.nValue += fees
+ if pubkey is not None:
coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
else:
coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [coinbaseoutput]
+ if extra_output_script is not None:
+ coinbaseoutput2 = CTxOut()
+ coinbaseoutput2.nValue = 0
+ coinbaseoutput2.scriptPubKey = extra_output_script
+ coinbase.vout.append(coinbaseoutput2)
coinbase.calc_sha256()
return coinbase
diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py
index 912c0ca978..a6bc187985 100644
--- a/test/functional/test_framework/key.py
+++ b/test/functional/test_framework/key.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2019 Pieter Wuille
+# Copyright (c) 2019-2020 Pieter Wuille
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test-only secp256k1 elliptic curve implementation
@@ -6,24 +6,23 @@
WARNING: This code is slow, uses bad randomness, does not properly protect
keys, and is trivially vulnerable to side channel attacks. Do not use for
anything but tests."""
+import csv
+import hashlib
+import os
import random
+import sys
+import unittest
-def modinv(a, n):
- """Compute the modular inverse of a modulo n
+from .util import modinv
- See https://en.wikipedia.org/wiki/Extended_Euclidean_algorithm#Modular_integers.
- """
- t1, t2 = 0, 1
- r1, r2 = n, a
- while r2 != 0:
- q = r1 // r2
- t1, t2 = t2, t1 - q * t2
- r1, r2 = r2, r1 - q * r2
- if r1 > 1:
- return None
- if t1 < 0:
- t1 += n
- return t1
+def TaggedHash(tag, data):
+ ss = hashlib.sha256(tag.encode('utf-8')).digest()
+ ss += ss
+ ss += data
+ return hashlib.sha256(ss).digest()
+
+def xor_bytes(b0, b1):
+ return bytes(x ^ y for (x, y) in zip(b0, b1))
def jacobi_symbol(n, k):
"""Compute the Jacobi symbol of n modulo k
@@ -83,6 +82,10 @@ class EllipticCurve:
inv_3 = (inv_2 * inv) % self.p
return ((inv_2 * x1) % self.p, (inv_3 * y1) % self.p, 1)
+ def has_even_y(self, p1):
+ """Whether the point p1 has an even Y coordinate when expressed in affine coordinates."""
+ return not (p1[2] == 0 or self.affine(p1)[1] & 1)
+
def negate(self, p1):
"""Negate a Jacobian point tuple p1."""
x1, y1, z1 = p1
@@ -101,13 +104,13 @@ class EllipticCurve:
return jacobi_symbol(x_3 + self.a * x + self.b, self.p) != -1
def lift_x(self, x):
- """Given an X coordinate on the curve, return a corresponding affine point."""
+ """Given an X coordinate on the curve, return a corresponding affine point for which the Y coordinate is even."""
x_3 = pow(x, 3, self.p)
v = x_3 + self.a * x + self.b
y = modsqrt(v, self.p)
if y is None:
return None
- return (x, y, 1)
+ return (x, self.p - y if y & 1 else y, 1)
def double(self, p1):
"""Double a Jacobian tuple p1
@@ -212,7 +215,8 @@ class EllipticCurve:
r = self.add(r, p)
return r
-SECP256K1 = EllipticCurve(2**256 - 2**32 - 977, 0, 7)
+SECP256K1_FIELD_SIZE = 2**256 - 2**32 - 977
+SECP256K1 = EllipticCurve(SECP256K1_FIELD_SIZE, 0, 7)
SECP256K1_G = (0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798, 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8, 1)
SECP256K1_ORDER = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141
SECP256K1_ORDER_HALF = SECP256K1_ORDER // 2
@@ -236,9 +240,9 @@ class ECPubKey():
x = int.from_bytes(data[1:33], 'big')
if SECP256K1.is_x_coord(x):
p = SECP256K1.lift_x(x)
- # if the oddness of the y co-ord isn't correct, find the other
- # valid y
- if (p[1] & 1) != (data[0] & 1):
+ # Make the Y coordinate odd if required (lift_x always produces
+ # a point with an even Y coordinate).
+ if data[0] & 1:
p = SECP256K1.negate(p)
self.p = p
self.valid = True
@@ -322,6 +326,10 @@ class ECPubKey():
return False
return True
+def generate_privkey():
+ """Generate a valid random 32-byte private key."""
+ return random.randrange(1, SECP256K1_ORDER).to_bytes(32, 'big')
+
class ECKey():
"""A secp256k1 private key"""
@@ -339,7 +347,7 @@ class ECKey():
def generate(self, compressed=True):
"""Generate a random private key (compressed or uncompressed)."""
- self.set(random.randrange(1, SECP256K1_ORDER).to_bytes(32, 'big'), compressed)
+ self.set(generate_privkey(), compressed)
def get_bytes(self):
"""Retrieve the 32-byte representation of this key."""
@@ -384,3 +392,161 @@ class ECKey():
rb = r.to_bytes((r.bit_length() + 8) // 8, 'big')
sb = s.to_bytes((s.bit_length() + 8) // 8, 'big')
return b'\x30' + bytes([4 + len(rb) + len(sb), 2, len(rb)]) + rb + bytes([2, len(sb)]) + sb
+
+def compute_xonly_pubkey(key):
+ """Compute an x-only (32 byte) public key from a (32 byte) private key.
+
+ This also returns whether the resulting public key was negated.
+ """
+
+ assert len(key) == 32
+ x = int.from_bytes(key, 'big')
+ if x == 0 or x >= SECP256K1_ORDER:
+ return (None, None)
+ P = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, x)]))
+ return (P[0].to_bytes(32, 'big'), not SECP256K1.has_even_y(P))
+
+def tweak_add_privkey(key, tweak):
+ """Tweak a private key (after negating it if needed)."""
+
+ assert len(key) == 32
+ assert len(tweak) == 32
+
+ x = int.from_bytes(key, 'big')
+ if x == 0 or x >= SECP256K1_ORDER:
+ return None
+ if not SECP256K1.has_even_y(SECP256K1.mul([(SECP256K1_G, x)])):
+ x = SECP256K1_ORDER - x
+ t = int.from_bytes(tweak, 'big')
+ if t >= SECP256K1_ORDER:
+ return None
+ x = (x + t) % SECP256K1_ORDER
+ if x == 0:
+ return None
+ return x.to_bytes(32, 'big')
+
+def tweak_add_pubkey(key, tweak):
+ """Tweak a public key and return whether the result had to be negated."""
+
+ assert len(key) == 32
+ assert len(tweak) == 32
+
+ x_coord = int.from_bytes(key, 'big')
+ if x_coord >= SECP256K1_FIELD_SIZE:
+ return None
+ P = SECP256K1.lift_x(x_coord)
+ if P is None:
+ return None
+ t = int.from_bytes(tweak, 'big')
+ if t >= SECP256K1_ORDER:
+ return None
+ Q = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, t), (P, 1)]))
+ if Q is None:
+ return None
+ return (Q[0].to_bytes(32, 'big'), not SECP256K1.has_even_y(Q))
+
+def verify_schnorr(key, sig, msg):
+ """Verify a Schnorr signature (see BIP 340).
+
+ - key is a 32-byte xonly pubkey (computed using compute_xonly_pubkey).
+ - sig is a 64-byte Schnorr signature
+ - msg is a 32-byte message
+ """
+ assert len(key) == 32
+ assert len(msg) == 32
+ assert len(sig) == 64
+
+ x_coord = int.from_bytes(key, 'big')
+ if x_coord == 0 or x_coord >= SECP256K1_FIELD_SIZE:
+ return False
+ P = SECP256K1.lift_x(x_coord)
+ if P is None:
+ return False
+ r = int.from_bytes(sig[0:32], 'big')
+ if r >= SECP256K1_FIELD_SIZE:
+ return False
+ s = int.from_bytes(sig[32:64], 'big')
+ if s >= SECP256K1_ORDER:
+ return False
+ e = int.from_bytes(TaggedHash("BIP0340/challenge", sig[0:32] + key + msg), 'big') % SECP256K1_ORDER
+ R = SECP256K1.mul([(SECP256K1_G, s), (P, SECP256K1_ORDER - e)])
+ if not SECP256K1.has_even_y(R):
+ return False
+ if ((r * R[2] * R[2]) % SECP256K1_FIELD_SIZE) != R[0]:
+ return False
+ return True
+
+def sign_schnorr(key, msg, aux=None, flip_p=False, flip_r=False):
+ """Create a Schnorr signature (see BIP 340)."""
+
+ if aux is None:
+ aux = bytes(32)
+
+ assert len(key) == 32
+ assert len(msg) == 32
+ assert len(aux) == 32
+
+ sec = int.from_bytes(key, 'big')
+ if sec == 0 or sec >= SECP256K1_ORDER:
+ return None
+ P = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, sec)]))
+ if SECP256K1.has_even_y(P) == flip_p:
+ sec = SECP256K1_ORDER - sec
+ t = (sec ^ int.from_bytes(TaggedHash("BIP0340/aux", aux), 'big')).to_bytes(32, 'big')
+ kp = int.from_bytes(TaggedHash("BIP0340/nonce", t + P[0].to_bytes(32, 'big') + msg), 'big') % SECP256K1_ORDER
+ assert kp != 0
+ R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, kp)]))
+ k = kp if SECP256K1.has_even_y(R) != flip_r else SECP256K1_ORDER - kp
+ e = int.from_bytes(TaggedHash("BIP0340/challenge", R[0].to_bytes(32, 'big') + P[0].to_bytes(32, 'big') + msg), 'big') % SECP256K1_ORDER
+ return R[0].to_bytes(32, 'big') + ((k + e * sec) % SECP256K1_ORDER).to_bytes(32, 'big')
+
+class TestFrameworkKey(unittest.TestCase):
+ def test_schnorr(self):
+ """Test the Python Schnorr implementation."""
+ byte_arrays = [generate_privkey() for _ in range(3)] + [v.to_bytes(32, 'big') for v in [0, SECP256K1_ORDER - 1, SECP256K1_ORDER, 2**256 - 1]]
+ keys = {}
+ for privkey in byte_arrays: # build array of key/pubkey pairs
+ pubkey, _ = compute_xonly_pubkey(privkey)
+ if pubkey is not None:
+ keys[privkey] = pubkey
+ for msg in byte_arrays: # test every combination of message, signing key, verification key
+ for sign_privkey, sign_pubkey in keys.items():
+ sig = sign_schnorr(sign_privkey, msg)
+ for verify_privkey, verify_pubkey in keys.items():
+ if verify_privkey == sign_privkey:
+ self.assertTrue(verify_schnorr(verify_pubkey, sig, msg))
+ sig = list(sig)
+ sig[random.randrange(64)] ^= (1 << (random.randrange(8))) # damaging signature should break things
+ sig = bytes(sig)
+ self.assertFalse(verify_schnorr(verify_pubkey, sig, msg))
+
+ def test_schnorr_testvectors(self):
+ """Implement the BIP340 test vectors (read from bip340_test_vectors.csv)."""
+ num_tests = 0
+ with open(os.path.join(sys.path[0], 'test_framework', 'bip340_test_vectors.csv'), newline='', encoding='utf8') as csvfile:
+ reader = csv.reader(csvfile)
+ next(reader)
+ for row in reader:
+ (i_str, seckey_hex, pubkey_hex, aux_rand_hex, msg_hex, sig_hex, result_str, comment) = row
+ i = int(i_str)
+ pubkey = bytes.fromhex(pubkey_hex)
+ msg = bytes.fromhex(msg_hex)
+ sig = bytes.fromhex(sig_hex)
+ result = result_str == 'TRUE'
+ if seckey_hex != '':
+ seckey = bytes.fromhex(seckey_hex)
+ pubkey_actual = compute_xonly_pubkey(seckey)[0]
+ self.assertEqual(pubkey.hex(), pubkey_actual.hex(), "BIP340 test vector %i (%s): pubkey mismatch" % (i, comment))
+ aux_rand = bytes.fromhex(aux_rand_hex)
+ try:
+ sig_actual = sign_schnorr(seckey, msg, aux_rand)
+ self.assertEqual(sig.hex(), sig_actual.hex(), "BIP340 test vector %i (%s): sig mismatch" % (i, comment))
+ except RuntimeError as e:
+ self.fail("BIP340 test vector %i (%s): signing raised exception %s" % (i, comment, e))
+ result_actual = verify_schnorr(pubkey, sig, msg)
+ if result:
+ self.assertEqual(result, result_actual, "BIP340 test vector %i (%s): verification failed" % (i, comment))
+ else:
+ self.assertEqual(result, result_actual, "BIP340 test vector %i (%s): verification succeeded unexpectedly" % (i, comment))
+ num_tests += 1
+ self.assertTrue(num_tests >= 15) # expect at least 15 test vectors
diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py
index da956a94de..ff7f73bdf4 100755
--- a/test/functional/test_framework/messages.py
+++ b/test/functional/test_framework/messages.py
@@ -22,6 +22,7 @@ from codecs import encode
import copy
import hashlib
from io import BytesIO
+import math
import random
import socket
import struct
@@ -32,7 +33,7 @@ from test_framework.util import hex_str_to_bytes, assert_equal
MIN_VERSION_SUPPORTED = 60001
MY_VERSION = 70016 # past wtxid relay
-MY_SUBVERSION = b"/python-mininode-tester:0.0.3/"
+MY_SUBVERSION = b"/python-p2p-tester:0.0.3/"
MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37)
MAX_LOCATOR_SZ = 101
@@ -53,6 +54,7 @@ NODE_NETWORK = (1 << 0)
NODE_GETUTXO = (1 << 1)
NODE_BLOOM = (1 << 2)
NODE_WITNESS = (1 << 3)
+NODE_COMPACT_FILTERS = (1 << 6)
NODE_NETWORK_LIMITED = (1 << 10)
MSG_TX = 1
@@ -62,9 +64,12 @@ MSG_CMPCT_BLOCK = 4
MSG_WTX = 5
MSG_WITNESS_FLAG = 1 << 30
MSG_TYPE_MASK = 0xffffffff >> 2
+MSG_WITNESS_TX = MSG_TX | MSG_WITNESS_FLAG
FILTER_TYPE_BASIC = 0
+WITNESS_SCALE_FACTOR = 4
+
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
@@ -131,12 +136,17 @@ def uint256_from_compact(c):
return v
-def deser_vector(f, c):
+# deser_function_name: Allow for an alternate deserialization function on the
+# entries in the vector.
+def deser_vector(f, c, deser_function_name=None):
nit = deser_compact_size(f)
r = []
for _ in range(nit):
t = c()
- t.deserialize(f)
+ if deser_function_name:
+ getattr(t, deser_function_name)(f)
+ else:
+ t.deserialize(f)
r.append(t)
return r
@@ -199,38 +209,82 @@ def ToHex(obj):
class CAddress:
- __slots__ = ("ip", "nServices", "pchReserved", "port", "time")
+ __slots__ = ("net", "ip", "nServices", "port", "time")
+
+ # see https://github.com/bitcoin/bips/blob/master/bip-0155.mediawiki
+ NET_IPV4 = 1
+
+ ADDRV2_NET_NAME = {
+ NET_IPV4: "IPv4"
+ }
+
+ ADDRV2_ADDRESS_LENGTH = {
+ NET_IPV4: 4
+ }
def __init__(self):
self.time = 0
self.nServices = 1
- self.pchReserved = b"\x00" * 10 + b"\xff" * 2
+ self.net = self.NET_IPV4
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f, *, with_time=True):
+ """Deserialize from addrv1 format (pre-BIP155)"""
if with_time:
# VERSION messages serialize CAddress objects without time
- self.time = struct.unpack("<i", f.read(4))[0]
+ self.time = struct.unpack("<I", f.read(4))[0]
self.nServices = struct.unpack("<Q", f.read(8))[0]
- self.pchReserved = f.read(12)
+ # We only support IPv4 which means skip 12 bytes and read the next 4 as IPv4 address.
+ f.read(12)
+ self.net = self.NET_IPV4
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self, *, with_time=True):
+ """Serialize in addrv1 format (pre-BIP155)"""
+ assert self.net == self.NET_IPV4
r = b""
if with_time:
# VERSION messages serialize CAddress objects without time
- r += struct.pack("<i", self.time)
+ r += struct.pack("<I", self.time)
r += struct.pack("<Q", self.nServices)
- r += self.pchReserved
+ r += b"\x00" * 10 + b"\xff" * 2
+ r += socket.inet_aton(self.ip)
+ r += struct.pack(">H", self.port)
+ return r
+
+ def deserialize_v2(self, f):
+ """Deserialize from addrv2 format (BIP155)"""
+ self.time = struct.unpack("<I", f.read(4))[0]
+
+ self.nServices = deser_compact_size(f)
+
+ self.net = struct.unpack("B", f.read(1))[0]
+ assert self.net == self.NET_IPV4
+
+ address_length = deser_compact_size(f)
+ assert address_length == self.ADDRV2_ADDRESS_LENGTH[self.net]
+
+ self.ip = socket.inet_ntoa(f.read(4))
+
+ self.port = struct.unpack(">H", f.read(2))[0]
+
+ def serialize_v2(self):
+ """Serialize in addrv2 format (BIP155)"""
+ assert self.net == self.NET_IPV4
+ r = b""
+ r += struct.pack("<I", self.time)
+ r += ser_compact_size(self.nServices)
+ r += struct.pack("B", self.net)
+ r += ser_compact_size(self.ADDRV2_ADDRESS_LENGTH[self.net])
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
- return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
- self.ip, self.port)
+ return ("CAddress(nServices=%i net=%s addr=%s port=%i)"
+ % (self.nServices, self.ADDRV2_NET_NAME[self.net], self.ip, self.port))
class CInv:
@@ -243,8 +297,8 @@ class CInv:
MSG_TX | MSG_WITNESS_FLAG: "WitnessTx",
MSG_BLOCK | MSG_WITNESS_FLAG: "WitnessBlock",
MSG_FILTERED_BLOCK: "filtered Block",
- 4: "CompactBlock",
- 5: "WTX",
+ MSG_CMPCT_BLOCK: "CompactBlock",
+ MSG_WTX: "WTX",
}
def __init__(self, t=0, h=0):
@@ -252,12 +306,12 @@ class CInv:
self.hash = h
def deserialize(self, f):
- self.type = struct.unpack("<i", f.read(4))[0]
+ self.type = struct.unpack("<I", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
- r += struct.pack("<i", self.type)
+ r += struct.pack("<I", self.type)
r += ser_uint256(self.hash)
return r
@@ -535,6 +589,13 @@ class CTransaction:
return False
return True
+ # Calculate the virtual transaction size using witness and non-witness
+ # serialization size (does NOT use sigops).
+ def get_vsize(self):
+ with_witness_size = len(self.serialize_with_witness())
+ without_witness_size = len(self.serialize_without_witness())
+ return math.ceil(((WITNESS_SCALE_FACTOR - 1) * without_witness_size + with_witness_size) / WITNESS_SCALE_FACTOR)
+
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime)
@@ -1052,6 +1113,40 @@ class msg_addr:
return "msg_addr(addrs=%s)" % (repr(self.addrs))
+class msg_addrv2:
+ __slots__ = ("addrs",)
+ msgtype = b"addrv2"
+
+ def __init__(self):
+ self.addrs = []
+
+ def deserialize(self, f):
+ self.addrs = deser_vector(f, CAddress, "deserialize_v2")
+
+ def serialize(self):
+ return ser_vector(self.addrs, "serialize_v2")
+
+ def __repr__(self):
+ return "msg_addrv2(addrs=%s)" % (repr(self.addrs))
+
+
+class msg_sendaddrv2:
+ __slots__ = ()
+ msgtype = b"sendaddrv2"
+
+ def __init__(self):
+ pass
+
+ def deserialize(self, f):
+ pass
+
+ def serialize(self):
+ return b""
+
+ def __repr__(self):
+ return "msg_sendaddrv2()"
+
+
class msg_inv:
__slots__ = ("inv",)
msgtype = b"inv"
@@ -1460,9 +1555,9 @@ class msg_sendcmpct:
__slots__ = ("announce", "version")
msgtype = b"sendcmpct"
- def __init__(self):
- self.announce = False
- self.version = 1
+ def __init__(self, announce=False, version=1):
+ self.announce = announce
+ self.version = version
def deserialize(self, f):
self.announce = struct.unpack("<?", f.read(1))[0]
diff --git a/test/functional/test_framework/muhash.py b/test/functional/test_framework/muhash.py
new file mode 100644
index 0000000000..97d02359cb
--- /dev/null
+++ b/test/functional/test_framework/muhash.py
@@ -0,0 +1,110 @@
+# Copyright (c) 2020 Pieter Wuille
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Native Python MuHash3072 implementation."""
+
+import hashlib
+import unittest
+
+from .util import modinv
+
+def rot32(v, bits):
+ """Rotate the 32-bit value v left by bits bits."""
+ bits %= 32 # Make sure the term below does not throw an exception
+ return ((v << bits) & 0xffffffff) | (v >> (32 - bits))
+
+def chacha20_doubleround(s):
+ """Apply a ChaCha20 double round to 16-element state array s.
+
+ See https://cr.yp.to/chacha/chacha-20080128.pdf and https://tools.ietf.org/html/rfc8439
+ """
+ QUARTER_ROUNDS = [(0, 4, 8, 12),
+ (1, 5, 9, 13),
+ (2, 6, 10, 14),
+ (3, 7, 11, 15),
+ (0, 5, 10, 15),
+ (1, 6, 11, 12),
+ (2, 7, 8, 13),
+ (3, 4, 9, 14)]
+
+ for a, b, c, d in QUARTER_ROUNDS:
+ s[a] = (s[a] + s[b]) & 0xffffffff
+ s[d] = rot32(s[d] ^ s[a], 16)
+ s[c] = (s[c] + s[d]) & 0xffffffff
+ s[b] = rot32(s[b] ^ s[c], 12)
+ s[a] = (s[a] + s[b]) & 0xffffffff
+ s[d] = rot32(s[d] ^ s[a], 8)
+ s[c] = (s[c] + s[d]) & 0xffffffff
+ s[b] = rot32(s[b] ^ s[c], 7)
+
+def chacha20_32_to_384(key32):
+ """Specialized ChaCha20 implementation with 32-byte key, 0 IV, 384-byte output."""
+ # See RFC 8439 section 2.3 for chacha20 parameters
+ CONSTANTS = [0x61707865, 0x3320646e, 0x79622d32, 0x6b206574]
+
+ key_bytes = [0]*8
+ for i in range(8):
+ key_bytes[i] = int.from_bytes(key32[(4 * i):(4 * (i+1))], 'little')
+
+ INITIALIZATION_VECTOR = [0] * 4
+ init = CONSTANTS + key_bytes + INITIALIZATION_VECTOR
+ out = bytearray()
+ for counter in range(6):
+ init[12] = counter
+ s = init.copy()
+ for _ in range(10):
+ chacha20_doubleround(s)
+ for i in range(16):
+ out.extend(((s[i] + init[i]) & 0xffffffff).to_bytes(4, 'little'))
+ return bytes(out)
+
+def data_to_num3072(data):
+ """Hash a 32-byte array data to a 3072-bit number using 6 Chacha20 operations."""
+ bytes384 = chacha20_32_to_384(data)
+ return int.from_bytes(bytes384, 'little')
+
+class MuHash3072:
+ """Class representing the MuHash3072 computation of a set.
+
+ See https://cseweb.ucsd.edu/~mihir/papers/inchash.pdf and https://lists.linuxfoundation.org/pipermail/bitcoin-dev/2017-May/014337.html
+ """
+
+ MODULUS = 2**3072 - 1103717
+
+ def __init__(self):
+ """Initialize for an empty set."""
+ self.numerator = 1
+ self.denominator = 1
+
+ def insert(self, data):
+ """Insert a byte array data in the set."""
+ self.numerator = (self.numerator * data_to_num3072(data)) % self.MODULUS
+
+ def remove(self, data):
+ """Remove a byte array from the set."""
+ self.denominator = (self.denominator * data_to_num3072(data)) % self.MODULUS
+
+ def digest(self):
+ """Extract the final hash. Does not modify this object."""
+ val = (self.numerator * modinv(self.denominator, self.MODULUS)) % self.MODULUS
+ bytes384 = val.to_bytes(384, 'little')
+ return hashlib.sha256(bytes384).digest()
+
+class TestFrameworkMuhash(unittest.TestCase):
+ def test_muhash(self):
+ muhash = MuHash3072()
+ muhash.insert([0]*32)
+ muhash.insert([1] + [0]*31)
+ muhash.remove([2] + [0]*31)
+ finalized = muhash.digest()
+ # This mirrors the result in the C++ MuHash3072 unit test
+ self.assertEqual(finalized[::-1].hex(), "a44e16d5e34d259b349af21c06e65d653915d2e208e4e03f389af750dc0bfdc3")
+
+ def test_chacha20(self):
+ def chacha_check(key, result):
+ self.assertEqual(chacha20_32_to_384(key)[:64].hex(), result)
+
+ # Test vectors from https://tools.ietf.org/html/draft-agl-tls-chacha20poly1305-04#section-7
+ # Since the nonce is hardcoded to 0 in our function we only use those vectors.
+ chacha_check([0]*32, "76b8e0ada0f13d90405d6ae55386bd28bdd219b8a08ded1aa836efcc8b770dc7da41597c5157488d7724e03fb8d84a376a43b8f41518a11cc387b669b2ee6586")
+ chacha_check([0]*31 + [1], "4540f05a9f1fb296d7736e7b208e3c96eb4fe1834688d2604f450952ed432d41bbe2a0b6ea7566d2a5d1e7e20d42af2c53d792b1c43fea817e9ad275ae546963")
diff --git a/test/functional/test_framework/mininode.py b/test/functional/test_framework/p2p.py
index eaf637fbb8..6846d31221 100755
--- a/test/functional/test_framework/mininode.py
+++ b/test/functional/test_framework/p2p.py
@@ -4,10 +4,14 @@
# Copyright (c) 2010-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Bitcoin P2P network half-a-node.
+"""Test objects for interacting with a bitcoind node over the p2p protocol.
-This python code was modified from ArtForz' public domain half-a-node, as
-found in the mini-node branch of http://github.com/jgarzik/pynode.
+The P2PInterface objects interact with the bitcoind nodes under test using the
+node's p2p interface. They can be used to send messages to the node, and
+callbacks can be registered that execute when messages are received from the
+node. Messages are sent to/received from the node on an asyncio event loop.
+State held inside the objects must be guarded by the p2p_lock to avoid data
+races between the main testing thread and the event loop.
P2PConnection: A low-level connection object to a node's P2P interface
P2PInterface: A high-level interface object for communicating to a node over P2P
@@ -29,6 +33,7 @@ from test_framework.messages import (
MAX_HEADERS_RESULTS,
MIN_VERSION_SUPPORTED,
msg_addr,
+ msg_addrv2,
msg_block,
MSG_BLOCK,
msg_blocktxn,
@@ -52,6 +57,7 @@ from test_framework.messages import (
msg_notfound,
msg_ping,
msg_pong,
+ msg_sendaddrv2,
msg_sendcmpct,
msg_sendheaders,
msg_tx,
@@ -65,12 +71,13 @@ from test_framework.messages import (
NODE_WITNESS,
sha256,
)
-from test_framework.util import wait_until
+from test_framework.util import wait_until_helper
-logger = logging.getLogger("TestFramework.mininode")
+logger = logging.getLogger("TestFramework.p2p")
MESSAGEMAP = {
b"addr": msg_addr,
+ b"addrv2": msg_addrv2,
b"block": msg_block,
b"blocktxn": msg_blocktxn,
b"cfcheckpt": msg_cfcheckpt,
@@ -93,6 +100,7 @@ MESSAGEMAP = {
b"notfound": msg_notfound,
b"ping": msg_ping,
b"pong": msg_pong,
+ b"sendaddrv2": msg_sendaddrv2,
b"sendcmpct": msg_sendcmpct,
b"sendheaders": msg_sendheaders,
b"tx": msg_tx,
@@ -105,6 +113,7 @@ MAGIC_BYTES = {
"mainnet": b"\xf9\xbe\xb4\xd9", # mainnet
"testnet3": b"\x0b\x11\x09\x07", # testnet3
"regtest": b"\xfa\xbf\xb5\xda", # regtest
+ "signet": b"\x0a\x03\xcf\x40", # signet
}
@@ -280,7 +289,7 @@ class P2PInterface(P2PConnection):
Individual testcases should subclass this and override the on_* methods
if they want to alter message handling behaviour."""
- def __init__(self):
+ def __init__(self, support_addrv2=False):
super().__init__()
# Track number of messages of each type received.
@@ -289,7 +298,7 @@ class P2PInterface(P2PConnection):
# Track the most recent message of each type.
# To wait for a message to be received, pop that message from
- # this and use wait_until.
+ # this and use self.wait_until.
self.last_message = {}
# A count of the number of ping messages we've sent to the node
@@ -298,6 +307,8 @@ class P2PInterface(P2PConnection):
# The network services received from the peer
self.nServices = 0
+ self.support_addrv2 = support_addrv2
+
def peer_connect(self, *args, services=NODE_NETWORK|NODE_WITNESS, send_version=True, **kwargs):
create_conn = super().peer_connect(*args, **kwargs)
@@ -320,7 +331,7 @@ class P2PInterface(P2PConnection):
We keep a count of how many of each message type has been received
and the most recent message of each type."""
- with mininode_lock:
+ with p2p_lock:
try:
msgtype = message.msgtype.decode('ascii')
self.message_count[msgtype] += 1
@@ -340,6 +351,7 @@ class P2PInterface(P2PConnection):
pass
def on_addr(self, message): pass
+ def on_addrv2(self, message): pass
def on_block(self, message): pass
def on_blocktxn(self, message): pass
def on_cfcheckpt(self, message): pass
@@ -360,6 +372,7 @@ class P2PInterface(P2PConnection):
def on_merkleblock(self, message): pass
def on_notfound(self, message): pass
def on_pong(self, message): pass
+ def on_sendaddrv2(self, message): pass
def on_sendcmpct(self, message): pass
def on_sendheaders(self, message): pass
def on_tx(self, message): pass
@@ -384,6 +397,8 @@ class P2PInterface(P2PConnection):
if message.nVersion >= 70016:
self.send_message(msg_wtxidrelay())
self.send_message(msg_verack())
+ if self.support_addrv2:
+ self.send_message(msg_sendaddrv2())
self.nServices = message.nServices
# Connection helper methods
@@ -394,7 +409,7 @@ class P2PInterface(P2PConnection):
assert self.is_connected
return test_function_in()
- wait_until(test_function, timeout=timeout, lock=mininode_lock, timeout_factor=self.timeout_factor)
+ wait_until_helper(test_function, timeout=timeout, lock=p2p_lock, timeout_factor=self.timeout_factor)
def wait_for_disconnect(self, timeout=60):
test_function = lambda: not self.is_connected
@@ -498,7 +513,7 @@ class P2PInterface(P2PConnection):
# P2PConnection acquires this lock whenever delivering a message to a P2PInterface.
# This lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the P2PInterface or P2PConnection.
-mininode_lock = threading.Lock()
+p2p_lock = threading.Lock()
class NetworkThread(threading.Thread):
@@ -518,7 +533,7 @@ class NetworkThread(threading.Thread):
def close(self, timeout=10):
"""Close the connections and network event loop."""
self.network_event_loop.call_soon_threadsafe(self.network_event_loop.stop)
- wait_until(lambda: not self.network_event_loop.is_running(), timeout=timeout)
+ wait_until_helper(lambda: not self.network_event_loop.is_running(), timeout=timeout)
self.network_event_loop.close()
self.join(timeout)
# Safe to remove event loop.
@@ -592,7 +607,7 @@ class P2PDataStore(P2PInterface):
- if success is False: assert that the node's tip doesn't advance
- if reject_reason is set: assert that the correct reject message is logged"""
- with mininode_lock:
+ with p2p_lock:
for block in blocks:
self.block_store[block.sha256] = block
self.last_block_hash = block.sha256
@@ -629,7 +644,7 @@ class P2PDataStore(P2PInterface):
- if expect_disconnect is True: Skip the sync with ping
- if reject_reason is set: assert that the correct reject message is logged."""
- with mininode_lock:
+ with p2p_lock:
for tx in txs:
self.tx_store[tx.sha256] = tx
@@ -668,7 +683,7 @@ class P2PTxInvStore(P2PInterface):
self.tx_invs_received[i.hash] += 1
def get_invs(self):
- with mininode_lock:
+ with p2p_lock:
return list(self.tx_invs_received.keys())
def wait_for_broadcast(self, txns, timeout=60):
diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py
index 5e35ba0fce..8e5848d493 100644
--- a/test/functional/test_framework/script.py
+++ b/test/functional/test_framework/script.py
@@ -6,11 +6,15 @@
This file is modified from python-bitcoinlib.
"""
+
+from collections import namedtuple
import hashlib
import struct
import unittest
from typing import List, Dict
+from .key import TaggedHash, tweak_add_pubkey
+
from .messages import (
CTransaction,
CTxOut,
@@ -22,8 +26,13 @@ from .messages import (
)
MAX_SCRIPT_ELEMENT_SIZE = 520
+LOCKTIME_THRESHOLD = 500000000
+ANNEX_TAG = 0x50
+
OPCODE_NAMES = {} # type: Dict[CScriptOp, str]
+LEAF_VERSION_TAPSCRIPT = 0xc0
+
def hash160(s):
return hashlib.new('ripemd160', sha256(s)).digest()
@@ -239,11 +248,8 @@ OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
-# template matching params
-OP_SMALLINTEGER = CScriptOp(0xfa)
-OP_PUBKEYS = CScriptOp(0xfb)
-OP_PUBKEYHASH = CScriptOp(0xfd)
-OP_PUBKEY = CScriptOp(0xfe)
+# BIP 342 opcodes (Tapscript)
+OP_CHECKSIGADD = CScriptOp(0xba)
OP_INVALIDOPCODE = CScriptOp(0xff)
@@ -359,10 +365,7 @@ OPCODE_NAMES.update({
OP_NOP8: 'OP_NOP8',
OP_NOP9: 'OP_NOP9',
OP_NOP10: 'OP_NOP10',
- OP_SMALLINTEGER: 'OP_SMALLINTEGER',
- OP_PUBKEYS: 'OP_PUBKEYS',
- OP_PUBKEYHASH: 'OP_PUBKEYHASH',
- OP_PUBKEY: 'OP_PUBKEY',
+ OP_CHECKSIGADD: 'OP_CHECKSIGADD',
OP_INVALIDOPCODE: 'OP_INVALIDOPCODE',
})
@@ -593,6 +596,7 @@ class CScript(bytes):
return n
+SIGHASH_DEFAULT = 0 # Taproot-only default, semantics same as SIGHASH_ALL
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
@@ -615,7 +619,6 @@ def FindAndDelete(script, sig):
r += script[last_sop_idx:]
return CScript(r)
-
def LegacySignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
@@ -738,3 +741,113 @@ class TestFrameworkScript(unittest.TestCase):
values = [0, 1, -1, -2, 127, 128, -255, 256, (1 << 15) - 1, -(1 << 16), (1 << 24) - 1, (1 << 31), 1 - (1 << 32), 1 << 40, 1500, -1500]
for value in values:
self.assertEqual(CScriptNum.decode(CScriptNum.encode(CScriptNum(value))), value)
+
+def TaprootSignatureHash(txTo, spent_utxos, hash_type, input_index = 0, scriptpath = False, script = CScript(), codeseparator_pos = -1, annex = None, leaf_ver = LEAF_VERSION_TAPSCRIPT):
+ assert (len(txTo.vin) == len(spent_utxos))
+ assert (input_index < len(txTo.vin))
+ out_type = SIGHASH_ALL if hash_type == 0 else hash_type & 3
+ in_type = hash_type & SIGHASH_ANYONECANPAY
+ spk = spent_utxos[input_index].scriptPubKey
+ ss = bytes([0, hash_type]) # epoch, hash_type
+ ss += struct.pack("<i", txTo.nVersion)
+ ss += struct.pack("<I", txTo.nLockTime)
+ if in_type != SIGHASH_ANYONECANPAY:
+ ss += sha256(b"".join(i.prevout.serialize() for i in txTo.vin))
+ ss += sha256(b"".join(struct.pack("<q", u.nValue) for u in spent_utxos))
+ ss += sha256(b"".join(ser_string(u.scriptPubKey) for u in spent_utxos))
+ ss += sha256(b"".join(struct.pack("<I", i.nSequence) for i in txTo.vin))
+ if out_type == SIGHASH_ALL:
+ ss += sha256(b"".join(o.serialize() for o in txTo.vout))
+ spend_type = 0
+ if annex is not None:
+ spend_type |= 1
+ if (scriptpath):
+ spend_type |= 2
+ ss += bytes([spend_type])
+ if in_type == SIGHASH_ANYONECANPAY:
+ ss += txTo.vin[input_index].prevout.serialize()
+ ss += struct.pack("<q", spent_utxos[input_index].nValue)
+ ss += ser_string(spk)
+ ss += struct.pack("<I", txTo.vin[input_index].nSequence)
+ else:
+ ss += struct.pack("<I", input_index)
+ if (spend_type & 1):
+ ss += sha256(ser_string(annex))
+ if out_type == SIGHASH_SINGLE:
+ if input_index < len(txTo.vout):
+ ss += sha256(txTo.vout[input_index].serialize())
+ else:
+ ss += bytes(0 for _ in range(32))
+ if (scriptpath):
+ ss += TaggedHash("TapLeaf", bytes([leaf_ver]) + ser_string(script))
+ ss += bytes([0])
+ ss += struct.pack("<i", codeseparator_pos)
+ assert len(ss) == 175 - (in_type == SIGHASH_ANYONECANPAY) * 49 - (out_type != SIGHASH_ALL and out_type != SIGHASH_SINGLE) * 32 + (annex is not None) * 32 + scriptpath * 37
+ return TaggedHash("TapSighash", ss)
+
+def taproot_tree_helper(scripts):
+ if len(scripts) == 0:
+ return ([], bytes(0 for _ in range(32)))
+ if len(scripts) == 1:
+ # One entry: treat as a leaf
+ script = scripts[0]
+ assert(not callable(script))
+ if isinstance(script, list):
+ return taproot_tree_helper(script)
+ assert(isinstance(script, tuple))
+ version = LEAF_VERSION_TAPSCRIPT
+ name = script[0]
+ code = script[1]
+ if len(script) == 3:
+ version = script[2]
+ assert version & 1 == 0
+ assert isinstance(code, bytes)
+ h = TaggedHash("TapLeaf", bytes([version]) + ser_string(code))
+ if name is None:
+ return ([], h)
+ return ([(name, version, code, bytes())], h)
+ elif len(scripts) == 2 and callable(scripts[1]):
+ # Two entries, and the right one is a function
+ left, left_h = taproot_tree_helper(scripts[0:1])
+ right_h = scripts[1](left_h)
+ left = [(name, version, script, control + right_h) for name, version, script, control in left]
+ right = []
+ else:
+ # Two or more entries: descend into each side
+ split_pos = len(scripts) // 2
+ left, left_h = taproot_tree_helper(scripts[0:split_pos])
+ right, right_h = taproot_tree_helper(scripts[split_pos:])
+ left = [(name, version, script, control + right_h) for name, version, script, control in left]
+ right = [(name, version, script, control + left_h) for name, version, script, control in right]
+ if right_h < left_h:
+ right_h, left_h = left_h, right_h
+ h = TaggedHash("TapBranch", left_h + right_h)
+ return (left + right, h)
+
+TaprootInfo = namedtuple("TaprootInfo", "scriptPubKey,inner_pubkey,negflag,tweak,leaves")
+TaprootLeafInfo = namedtuple("TaprootLeafInfo", "script,version,merklebranch")
+
+def taproot_construct(pubkey, scripts=None):
+ """Construct a tree of Taproot spending conditions
+
+ pubkey: an ECPubKey object for the internal pubkey
+ scripts: a list of items; each item is either:
+ - a (name, CScript) tuple
+ - a (name, CScript, leaf version) tuple
+ - another list of items (with the same structure)
+ - a function, which specifies how to compute the hashing partner
+ in function of the hash of whatever it is combined with
+
+ Returns: script (sPK or redeemScript), tweak, {name:(script, leaf version, negation flag, innerkey, merklepath), ...}
+ """
+ if scripts is None:
+ scripts = []
+
+ ret, h = taproot_tree_helper(scripts)
+ tweak = TaggedHash("TapTweak", pubkey + h)
+ tweaked, negated = tweak_add_pubkey(pubkey, tweak)
+ leaves = dict((name, TaprootLeafInfo(script, version, merklebranch)) for name, version, script, merklebranch in ret)
+ return TaprootInfo(CScript([OP_1, tweaked]), pubkey, negated + 0, tweak, leaves)
+
+def is_op_success(o):
+ return o == 0x50 or o == 0x62 or o == 0x89 or o == 0x8a or o == 0x8d or o == 0x8e or (o >= 0x7e and o <= 0x81) or (o >= 0x83 and o <= 0x86) or (o >= 0x95 and o <= 0x99) or (o >= 0xbb and o <= 0xfe)
diff --git a/test/functional/test_framework/segwit_addr.py b/test/functional/test_framework/segwit_addr.py
index 02368e938f..00c0d8a919 100644
--- a/test/functional/test_framework/segwit_addr.py
+++ b/test/functional/test_framework/segwit_addr.py
@@ -3,7 +3,7 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Reference implementation for Bech32 and segwit addresses."""
-
+import unittest
CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
@@ -84,7 +84,7 @@ def convertbits(data, frombits, tobits, pad=True):
return ret
-def decode(hrp, addr):
+def decode_segwit_address(hrp, addr):
"""Decode a segwit address."""
hrpgot, data = bech32_decode(addr)
if hrpgot != hrp:
@@ -99,9 +99,23 @@ def decode(hrp, addr):
return (data[0], decoded)
-def encode(hrp, witver, witprog):
+def encode_segwit_address(hrp, witver, witprog):
"""Encode a segwit address."""
ret = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5))
- if decode(hrp, ret) == (None, None):
+ if decode_segwit_address(hrp, ret) == (None, None):
return None
return ret
+
+class TestFrameworkScript(unittest.TestCase):
+ def test_segwit_encode_decode(self):
+ def test_python_bech32(addr):
+ hrp = addr[:4]
+ self.assertEqual(hrp, "bcrt")
+ (witver, witprog) = decode_segwit_address(hrp, addr)
+ self.assertEqual(encode_segwit_address(hrp, witver, witprog), addr)
+
+ # P2WPKH
+ test_python_bech32('bcrt1qthmht0k2qnh3wy7336z05lu2km7emzfpm3wg46')
+ # P2WSH
+ test_python_bech32('bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj')
+ test_python_bech32('bcrt1qft5p2uhsdcdc3l2ua4ap5qqfg4pjaqlp250x7us7a8qqhrxrxfsqseac85')
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index 8d402d4888..20f608a9cf 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -20,18 +20,17 @@ import time
from .authproxy import JSONRPCException
from . import coverage
+from .p2p import NetworkThread
from .test_node import TestNode
-from .mininode import NetworkThread
from .util import (
MAX_NODES,
PortSeed,
assert_equal,
check_json_precision,
- connect_nodes,
- disconnect_nodes,
get_datadir_path,
initialize_datadir,
- wait_until,
+ p2p_port,
+ wait_until_helper,
)
@@ -102,8 +101,16 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.rpc_timeout = 60 # Wait for up to 60 seconds for the RPC server to respond
self.supports_cli = True
self.bind_to_localhost_only = True
- self.set_test_params()
self.parse_args()
+ self.default_wallet_name = "default_wallet" if self.options.descriptors else ""
+ self.wallet_data_filename = "wallet.dat"
+ # Optional list of wallet names that can be set in set_test_params to
+ # create and import keys to. If unset, default is len(nodes) *
+ # [default_wallet_name]. If wallet names are None, wallet creation is
+ # skipped. If list is truncated, wallet creation is skipped and keys
+ # are not imported.
+ self.wallet_names = None
+ self.set_test_params()
if self.options.timeout_factor == 0 :
self.options.timeout_factor = 99999
self.rpc_timeout = int(self.rpc_timeout * self.options.timeout_factor) # optionally, increase timeout by a factor
@@ -362,23 +369,12 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
def setup_nodes(self):
"""Override this method to customize test node setup"""
extra_args = [[]] * self.num_nodes
- wallets = [[]] * self.num_nodes
if hasattr(self, "extra_args"):
extra_args = self.extra_args
- wallets = [[x for x in eargs if x.startswith('-wallet=')] for eargs in extra_args]
- extra_args = [x + ['-nowallet'] for x in extra_args]
self.add_nodes(self.num_nodes, extra_args)
self.start_nodes()
- for i, n in enumerate(self.nodes):
- n.extra_args.pop()
- if '-wallet=0' in n.extra_args or '-nowallet' in n.extra_args or '-disablewallet' in n.extra_args or not self.is_wallet_compiled():
- continue
- if '-wallet=' not in wallets[i] and not any([x.startswith('-wallet=') for x in wallets[i]]):
- wallets[i].append('-wallet=')
- for w in wallets[i]:
- wallet_name = w.split('=', 1)[1]
- n.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors)
- self.import_deterministic_coinbase_privkeys()
+ if self.is_wallet_compiled():
+ self.import_deterministic_coinbase_privkeys()
if not self.setup_clean_chain:
for n in self.nodes:
assert_equal(n.getblockchaininfo()["blocks"], 199)
@@ -394,13 +390,11 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
assert_equal(chain_info["initialblockdownload"], False)
def import_deterministic_coinbase_privkeys(self):
- for n in self.nodes:
- try:
- n.getwalletinfo()
- except JSONRPCException as e:
- assert str(e).startswith('Method not found')
- continue
-
+ wallet_names = [self.default_wallet_name] * len(self.nodes) if self.wallet_names is None else self.wallet_names
+ assert len(wallet_names) <= len(self.nodes)
+ for wallet_name, n in zip(wallet_names, self.nodes):
+ if wallet_name is not None:
+ n.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors, load_on_startup=True)
n.importprivkey(privkey=n.get_deterministic_priv_key().key, label='coinbase')
def run_test(self):
@@ -534,10 +528,49 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.nodes[i].process.wait(timeout)
def connect_nodes(self, a, b):
- connect_nodes(self.nodes[a], b)
+ def connect_nodes_helper(from_connection, node_num):
+ ip_port = "127.0.0.1:" + str(p2p_port(node_num))
+ from_connection.addnode(ip_port, "onetry")
+ # poll until version handshake complete to avoid race conditions
+ # with transaction relaying
+ # See comments in net_processing:
+ # * Must have a version message before anything else
+ # * Must have a verack message before anything else
+ wait_until_helper(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
+ wait_until_helper(lambda: all(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in from_connection.getpeerinfo()))
+
+ connect_nodes_helper(self.nodes[a], b)
def disconnect_nodes(self, a, b):
- disconnect_nodes(self.nodes[a], b)
+ def disconnect_nodes_helper(from_connection, node_num):
+ def get_peer_ids():
+ result = []
+ for peer in from_connection.getpeerinfo():
+ if "testnode{}".format(node_num) in peer['subver']:
+ result.append(peer['id'])
+ return result
+
+ peer_ids = get_peer_ids()
+ if not peer_ids:
+ self.log.warning("disconnect_nodes: {} and {} were not connected".format(
+ from_connection.index,
+ node_num,
+ ))
+ return
+ for peer_id in peer_ids:
+ try:
+ from_connection.disconnectnode(nodeid=peer_id)
+ except JSONRPCException as e:
+ # If this node is disconnected between calculating the peer id
+ # and issuing the disconnect, don't worry about it.
+ # This avoids a race condition if we're mass-disconnecting peers.
+ if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
+ raise
+
+ # wait to disconnect
+ wait_until_helper(lambda: not get_peer_ids(), timeout=5)
+
+ disconnect_nodes_helper(self.nodes[a], b)
def split_network(self):
"""
@@ -603,8 +636,8 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.sync_blocks(nodes)
self.sync_mempools(nodes)
- def wait_until(self, test_function, timeout=60, lock=None):
- return wait_until(test_function, timeout=timeout, lock=lock, timeout_factor=self.options.timeout_factor)
+ def wait_until(self, test_function, timeout=60):
+ return wait_until_helper(test_function, timeout=timeout, timeout_factor=self.options.timeout_factor)
# Private helper methods. These should not be accessed by the subclass test scripts.
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
index 8f0d45c7f9..046efe730e 100755
--- a/test/functional/test_framework/test_node.py
+++ b/test/functional/test_framework/test_node.py
@@ -31,7 +31,7 @@ from .util import (
get_auth_cookie,
get_rpc_proxy,
rpc_url,
- wait_until,
+ wait_until_helper,
p2p_port,
EncodeDecimal,
)
@@ -231,7 +231,7 @@ class TestNode():
if self.version_is_at_least(190000):
# getmempoolinfo.loaded is available since commit
# bb8ae2c (version 0.19.0)
- wait_until(lambda: rpc.getmempoolinfo()['loaded'])
+ wait_until_helper(lambda: rpc.getmempoolinfo()['loaded'], timeout_factor=self.timeout_factor)
# Wait for the node to finish reindex, block import, and
# loading the mempool. Usually importing happens fast or
# even "immediate" when the node is started. However, there
@@ -359,7 +359,7 @@ class TestNode():
return True
def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT):
- wait_until(self.is_node_stopped, timeout=timeout, timeout_factor=self.timeout_factor)
+ wait_until_helper(self.is_node_stopped, timeout=timeout, timeout_factor=self.timeout_factor)
@contextlib.contextmanager
def assert_debug_log(self, expected_msgs, unexpected_msgs=None, timeout=2):
@@ -542,16 +542,7 @@ class TestNode():
return p2p_conn
- @property
- def p2p(self):
- """Return the first p2p connection
-
- Convenience property - most tests only use a single p2p connection to each
- node, so this saves having to write node.p2ps[0] many times."""
- assert self.p2ps, self._node_msg("No p2p connection")
- return self.p2ps[0]
-
- def num_connected_mininodes(self):
+ def num_test_p2p_connections(self):
"""Return number of test framework p2p connections to the node."""
return len([peer for peer in self.getpeerinfo() if peer['subver'] == MY_SUBVERSION])
@@ -560,7 +551,7 @@ class TestNode():
for p in self.p2ps:
p.peer_disconnect()
del self.p2ps[:]
- wait_until(lambda: self.num_connected_mininodes() == 0)
+ wait_until_helper(lambda: self.num_test_p2p_connections() == 0, timeout_factor=self.timeout_factor)
class TestNodeCLIAttr:
@@ -650,10 +641,10 @@ class RPCOverloadWrapper():
def __getattr__(self, name):
return getattr(self.rpc, name)
- def createwallet(self, wallet_name, disable_private_keys=None, blank=None, passphrase='', avoid_reuse=None, descriptors=None):
+ def createwallet(self, wallet_name, disable_private_keys=None, blank=None, passphrase='', avoid_reuse=None, descriptors=None, load_on_startup=None):
if descriptors is None:
descriptors = self.descriptors
- return self.__getattr__('createwallet')(wallet_name, disable_private_keys, blank, passphrase, avoid_reuse, descriptors)
+ return self.__getattr__('createwallet')(wallet_name, disable_private_keys, blank, passphrase, avoid_reuse, descriptors, load_on_startup)
def importprivkey(self, privkey, label=None, rescan=None):
wallet_info = self.getwalletinfo()
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
index 3362b41209..3356f1ab10 100644
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -12,9 +12,9 @@ import inspect
import json
import logging
import os
-import random
import re
import time
+import unittest
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
@@ -225,7 +225,15 @@ def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
-def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, timeout_factor=1.0):
+def wait_until_helper(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, timeout_factor=1.0):
+ """Sleep until the predicate resolves to be True.
+
+ Warning: Note that this method is not recommended to be used in tests as it is
+ not aware of the context of the test framework. Using the `wait_until()` members
+ from `BitcoinTestFramework` or `P2PInterface` class ensures the timeout is
+ properly scaled. Furthermore, `wait_until()` from `P2PInterface` class in
+ `p2p.py` has a preset lock.
+ """
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
timeout = timeout * timeout_factor
@@ -403,47 +411,6 @@ def set_node_times(nodes, t):
node.setmocktime(t)
-def disconnect_nodes(from_connection, node_num):
- def get_peer_ids():
- result = []
- for peer in from_connection.getpeerinfo():
- if "testnode{}".format(node_num) in peer['subver']:
- result.append(peer['id'])
- return result
-
- peer_ids = get_peer_ids()
- if not peer_ids:
- logger.warning("disconnect_nodes: {} and {} were not connected".format(
- from_connection.index,
- node_num,
- ))
- return
- for peer_id in peer_ids:
- try:
- from_connection.disconnectnode(nodeid=peer_id)
- except JSONRPCException as e:
- # If this node is disconnected between calculating the peer id
- # and issuing the disconnect, don't worry about it.
- # This avoids a race condition if we're mass-disconnecting peers.
- if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
- raise
-
- # wait to disconnect
- wait_until(lambda: not get_peer_ids(), timeout=5)
-
-
-def connect_nodes(from_connection, node_num):
- ip_port = "127.0.0.1:" + str(p2p_port(node_num))
- from_connection.addnode(ip_port, "onetry")
- # poll until version handshake complete to avoid race conditions
- # with transaction relaying
- # See comments in net_processing:
- # * Must have a version message before anything else
- # * Must have a verack message before anything else
- wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
- wait_until(lambda: all(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in from_connection.getpeerinfo()))
-
-
# Transaction/Block functions
#############################
@@ -460,62 +427,6 @@ def find_output(node, txid, amount, *, blockhash=None):
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
-def gather_inputs(from_node, amount_needed, confirmations_required=1):
- """
- Return a random set of unspent txouts that are enough to pay amount_needed
- """
- assert confirmations_required >= 0
- utxo = from_node.listunspent(confirmations_required)
- random.shuffle(utxo)
- inputs = []
- total_in = Decimal("0.00000000")
- while total_in < amount_needed and len(utxo) > 0:
- t = utxo.pop()
- total_in += t["amount"]
- inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
- if total_in < amount_needed:
- raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
- return (total_in, inputs)
-
-
-def make_change(from_node, amount_in, amount_out, fee):
- """
- Create change output(s), return them
- """
- outputs = {}
- amount = amount_out + fee
- change = amount_in - amount
- if change > amount * 2:
- # Create an extra change output to break up big inputs
- change_address = from_node.getnewaddress()
- # Split change in two, being careful of rounding:
- outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
- change = amount_in - amount - outputs[change_address]
- if change > 0:
- outputs[from_node.getnewaddress()] = change
- return outputs
-
-
-def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
- """
- Create a random transaction.
- Returns (txid, hex-encoded-transaction-data, fee)
- """
- from_node = random.choice(nodes)
- to_node = random.choice(nodes)
- fee = min_fee + fee_increment * random.randint(0, fee_variants)
-
- (total_in, inputs) = gather_inputs(from_node, amount + fee)
- outputs = make_change(from_node, total_in, amount, fee)
- outputs[to_node.getnewaddress()] = float(amount)
-
- rawtx = from_node.createrawtransaction(inputs, outputs)
- signresult = from_node.signrawtransactionwithwallet(rawtx)
- txid = from_node.sendrawtransaction(signresult["hex"], 0)
-
- return (txid, signresult["hex"], fee)
-
-
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
@@ -617,3 +528,33 @@ def find_vout_for_address(node, txid, addr):
if any([addr == a for a in tx["vout"][i]["scriptPubKey"]["addresses"]]):
return i
raise RuntimeError("Vout not found for address: txid=%s, addr=%s" % (txid, addr))
+
+def modinv(a, n):
+ """Compute the modular inverse of a modulo n using the extended Euclidean
+ Algorithm. See https://en.wikipedia.org/wiki/Extended_Euclidean_algorithm#Modular_integers.
+ """
+ # TODO: Change to pow(a, -1, n) available in Python 3.8
+ t1, t2 = 0, 1
+ r1, r2 = n, a
+ while r2 != 0:
+ q = r1 // r2
+ t1, t2 = t2, t1 - q * t2
+ r1, r2 = r2, r1 - q * r2
+ if r1 > 1:
+ return None
+ if t1 < 0:
+ t1 += n
+ return t1
+
+class TestFrameworkUtil(unittest.TestCase):
+ def test_modinv(self):
+ test_vectors = [
+ [7, 11],
+ [11, 29],
+ [90, 13],
+ [1891, 3797],
+ [6003722857, 77695236973],
+ ]
+
+ for a, n in test_vectors:
+ self.assertEqual(modinv(a, n), pow(a, n-2, n))
diff --git a/test/functional/test_framework/wallet.py b/test/functional/test_framework/wallet.py
new file mode 100644
index 0000000000..39b3bf2a5b
--- /dev/null
+++ b/test/functional/test_framework/wallet.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python3
+# Copyright (c) 2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""A limited-functionality wallet, which may replace a real wallet in tests"""
+
+from decimal import Decimal
+from test_framework.address import ADDRESS_BCRT1_P2WSH_OP_TRUE
+from test_framework.messages import (
+ COIN,
+ COutPoint,
+ CTransaction,
+ CTxIn,
+ CTxInWitness,
+ CTxOut,
+)
+from test_framework.script import (
+ CScript,
+ OP_TRUE,
+)
+from test_framework.util import (
+ assert_equal,
+ hex_str_to_bytes,
+ satoshi_round,
+)
+
+
+class MiniWallet:
+ def __init__(self, test_node):
+ self._test_node = test_node
+ self._utxos = []
+ self._address = ADDRESS_BCRT1_P2WSH_OP_TRUE
+ self._scriptPubKey = hex_str_to_bytes(self._test_node.validateaddress(self._address)['scriptPubKey'])
+
+ def generate(self, num_blocks):
+ """Generate blocks with coinbase outputs to the internal address, and append the outputs to the internal list"""
+ blocks = self._test_node.generatetoaddress(num_blocks, self._address)
+ for b in blocks:
+ cb_tx = self._test_node.getblock(blockhash=b, verbosity=2)['tx'][0]
+ self._utxos.append({'txid': cb_tx['txid'], 'vout': 0, 'value': cb_tx['vout'][0]['value']})
+ return blocks
+
+ def get_utxo(self):
+ """Return the last utxo. Can be used to get the change output immediately after a send_self_transfer"""
+ return self._utxos.pop()
+
+ def send_self_transfer(self, *, fee_rate=Decimal("0.003"), from_node, utxo_to_spend=None):
+ """Create and send a tx with the specified fee_rate. Fee may be exact or at most one satoshi higher than needed."""
+ self._utxos = sorted(self._utxos, key=lambda k: k['value'])
+ utxo_to_spend = utxo_to_spend or self._utxos.pop() # Pick the largest utxo (if none provided) and hope it covers the fee
+ vsize = Decimal(96)
+ send_value = satoshi_round(utxo_to_spend['value'] - fee_rate * (vsize / 1000))
+ fee = utxo_to_spend['value'] - send_value
+ assert send_value > 0
+
+ tx = CTransaction()
+ tx.vin = [CTxIn(COutPoint(int(utxo_to_spend['txid'], 16), utxo_to_spend['vout']))]
+ tx.vout = [CTxOut(int(send_value * COIN), self._scriptPubKey)]
+ tx.wit.vtxinwit = [CTxInWitness()]
+ tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
+ tx_hex = tx.serialize().hex()
+
+ txid = from_node.sendrawtransaction(tx_hex)
+ self._utxos.append({'txid': txid, 'vout': 0, 'value': send_value})
+ tx_info = from_node.getmempoolentry(txid)
+ assert_equal(tx_info['vsize'], vsize)
+ assert_equal(tx_info['fee'], fee)
+ return {'txid': txid, 'wtxid': tx_info['wtxid'], 'hex': tx_hex}
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py
index b090e93394..8cd82649b6 100755
--- a/test/functional/test_runner.py
+++ b/test/functional/test_runner.py
@@ -69,7 +69,11 @@ TEST_EXIT_SKIPPED = 77
TEST_FRAMEWORK_MODULES = [
"address",
"blocktools",
+ "muhash",
+ "key",
"script",
+ "segwit_addr",
+ "util",
]
EXTENDED_SCRIPTS = [
@@ -85,6 +89,7 @@ BASE_SCRIPTS = [
'wallet_hd.py',
'wallet_hd.py --descriptors',
'wallet_backup.py',
+ 'wallet_backup.py --descriptors',
# vv Tests less than 5m vv
'mining_getblocktemplate_longpoll.py',
'feature_maxuploadtarget.py',
@@ -103,9 +108,9 @@ BASE_SCRIPTS = [
'mempool_updatefromblock.py',
'wallet_dump.py',
'wallet_listtransactions.py',
+ 'feature_taproot.py',
# vv Tests less than 60s vv
'p2p_sendheaders.py',
- 'wallet_zapwallettxes.py',
'wallet_importmulti.py',
'mempool_limit.py',
'rpc_txoutproof.py',
@@ -138,6 +143,7 @@ BASE_SCRIPTS = [
'mempool_reorg.py',
'mempool_persist.py',
'wallet_multiwallet.py',
+ 'wallet_multiwallet.py --descriptors',
'wallet_multiwallet.py --usecli',
'wallet_createwallet.py',
'wallet_createwallet.py --usecli',
@@ -153,6 +159,7 @@ BASE_SCRIPTS = [
'feature_proxy.py',
'rpc_signrawtransaction.py',
'wallet_groups.py',
+ 'p2p_addrv2_relay.py',
'p2p_disconnect_ban.py',
'rpc_decodescript.py',
'rpc_blockchain.py',
@@ -194,6 +201,7 @@ BASE_SCRIPTS = [
'p2p_eviction.py',
'rpc_signmessage.py',
'rpc_generateblock.py',
+ 'rpc_generate.py',
'wallet_balance.py',
'feature_nulldummy.py',
'mempool_accept.py',
@@ -206,6 +214,7 @@ BASE_SCRIPTS = [
'rpc_bind.py --ipv6',
'rpc_bind.py --nonloopback',
'mining_basic.py',
+ 'feature_signet.py',
'wallet_bumpfee.py',
'wallet_implicitsegwit.py',
'rpc_named_arguments.py',
@@ -223,6 +232,7 @@ BASE_SCRIPTS = [
'rpc_estimatefee.py',
'rpc_getblockstats.py',
'wallet_create_tx.py',
+ 'wallet_send.py',
'p2p_fingerprint.py',
'feature_uacomment.py',
'wallet_coinbase_category.py',
@@ -243,10 +253,11 @@ BASE_SCRIPTS = [
'p2p_node_network_limited.py',
'p2p_permissions.py',
'feature_blocksdir.py',
+ 'wallet_startup.py',
'feature_config_args.py',
'feature_settings.py',
'rpc_getdescriptorinfo.py',
- 'rpc_getpeerinfo_banscore_deprecation.py',
+ 'rpc_getpeerinfo_deprecation.py',
'rpc_help.py',
'feature_help.py',
'feature_shutdown.py',
@@ -713,14 +724,16 @@ class RPCCoverage():
Return a set of currently untested RPC commands.
"""
- # This is shared from `test/functional/test-framework/coverage.py`
+ # This is shared from `test/functional/test_framework/coverage.py`
reference_filename = 'rpc_interface.txt'
coverage_file_prefix = 'coverage.'
coverage_ref_filename = os.path.join(self.dir, reference_filename)
coverage_filenames = set()
all_cmds = set()
- covered_cmds = set()
+ # Consider RPC generate covered, because it is overloaded in
+ # test_framework/test_node.py and not seen by the coverage check.
+ covered_cmds = set({'generate'})
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
diff --git a/test/functional/tool_wallet.py b/test/functional/tool_wallet.py
index 18f0beb598..958341c691 100755
--- a/test/functional/tool_wallet.py
+++ b/test/functional/tool_wallet.py
@@ -70,12 +70,14 @@ class ToolWalletTest(BitcoinTestFramework):
self.assert_raises_tool_error('Invalid command: help', 'help')
self.assert_raises_tool_error('Error: two methods provided (info and create). Only one method should be provided.', 'info', 'create')
self.assert_raises_tool_error('Error parsing command line arguments: Invalid parameter -foo', '-foo')
+ locked_dir = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets")
self.assert_raises_tool_error(
- 'Error loading wallet.dat. Is wallet being used by another process?',
- '-wallet=wallet.dat',
+ 'Error initializing wallet database environment "{}"!'.format(locked_dir),
+ '-wallet=' + self.default_wallet_name,
'info',
)
- self.assert_raises_tool_error('Error: no wallet file at nonexistent.dat', '-wallet=nonexistent.dat', 'info')
+ path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "nonexistent.dat")
+ self.assert_raises_tool_error("Failed to load database path '{}'. Path does not exist.".format(path), '-wallet=nonexistent.dat', 'info')
def test_tool_wallet_info(self):
# Stop the node to close the wallet to call the info command.
@@ -96,13 +98,17 @@ class ToolWalletTest(BitcoinTestFramework):
out = textwrap.dedent('''\
Wallet info
===========
+ Name: \
+
+ Format: bdb
+ Descriptors: no
Encrypted: no
HD (hd seed available): yes
Keypool Size: 2
Transactions: 0
Address Book: 3
''')
- self.assert_tool_output(out, '-wallet=wallet.dat', 'info')
+ self.assert_tool_output(out, '-wallet=' + self.default_wallet_name, 'info')
timestamp_after = self.wallet_timestamp()
self.log.debug('Wallet file timestamp after calling info: {}'.format(timestamp_after))
self.log_wallet_timestamp_comparison(timestamp_before, timestamp_after)
@@ -135,13 +141,17 @@ class ToolWalletTest(BitcoinTestFramework):
out = textwrap.dedent('''\
Wallet info
===========
+ Name: \
+
+ Format: bdb
+ Descriptors: no
Encrypted: no
HD (hd seed available): yes
Keypool Size: 2
Transactions: 1
Address Book: 3
''')
- self.assert_tool_output(out, '-wallet=wallet.dat', 'info')
+ self.assert_tool_output(out, '-wallet=' + self.default_wallet_name, 'info')
shasum_after = self.wallet_shasum()
timestamp_after = self.wallet_timestamp()
self.log.debug('Wallet file timestamp after calling info: {}'.format(timestamp_after))
@@ -162,6 +172,9 @@ class ToolWalletTest(BitcoinTestFramework):
Topping up keypool...
Wallet info
===========
+ Name: foo
+ Format: bdb
+ Descriptors: no
Encrypted: no
HD (hd seed available): yes
Keypool Size: 2000
@@ -179,7 +192,7 @@ class ToolWalletTest(BitcoinTestFramework):
def test_getwalletinfo_on_different_wallet(self):
self.log.info('Starting node with arg -wallet=foo')
- self.start_node(0, ['-wallet=foo'])
+ self.start_node(0, ['-nowallet', '-wallet=foo'])
self.log.info('Calling getwalletinfo on a different wallet ("foo"), testing output')
shasum_before = self.wallet_shasum()
@@ -211,7 +224,7 @@ class ToolWalletTest(BitcoinTestFramework):
self.assert_tool_output('', '-wallet=salvage', 'salvage')
def run_test(self):
- self.wallet_path = os.path.join(self.nodes[0].datadir, self.chain, 'wallets', 'wallet.dat')
+ self.wallet_path = os.path.join(self.nodes[0].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename)
self.test_invalid_tool_commands_and_args()
# Warning: The following tests are order-dependent.
self.test_tool_wallet_info()
diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py
index 8837e13005..2e0edcfa38 100755
--- a/test/functional/wallet_abandonconflict.py
+++ b/test/functional/wallet_abandonconflict.py
@@ -16,8 +16,6 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
- connect_nodes,
- disconnect_nodes,
)
@@ -50,7 +48,7 @@ class AbandonConflictTest(BitcoinTestFramework):
balance = newbalance
# Disconnect nodes so node0's transactions don't get into node1's mempool
- disconnect_nodes(self.nodes[0], 1)
+ self.disconnect_nodes(0, 1)
# Identify the 10btc outputs
nA = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction(txA)["details"] if tx_out["amount"] == Decimal("10"))
@@ -161,7 +159,7 @@ class AbandonConflictTest(BitcoinTestFramework):
self.nodes[1].sendrawtransaction(signed["hex"])
self.nodes[1].generate(1)
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.sync_blocks()
# Verify that B and C's 10 BTC outputs are available for spending again because AB1 is now conflicted
diff --git a/test/functional/wallet_address_types.py b/test/functional/wallet_address_types.py
index 68e22b7e86..6b3276e404 100755
--- a/test/functional/wallet_address_types.py
+++ b/test/functional/wallet_address_types.py
@@ -62,11 +62,6 @@ from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
- connect_nodes,
-)
-from test_framework.segwit_addr import (
- encode,
- decode,
)
class AddressTypeTest(BitcoinTestFramework):
@@ -94,20 +89,13 @@ class AddressTypeTest(BitcoinTestFramework):
# Fully mesh-connect nodes for faster mempool sync
for i, j in itertools.product(range(self.num_nodes), repeat=2):
if i > j:
- connect_nodes(self.nodes[i], j)
+ self.connect_nodes(i, j)
self.sync_all()
def get_balances(self, key='trusted'):
"""Return a list of balances."""
return [self.nodes[i].getbalances()['mine'][key] for i in range(4)]
- # Quick test of python bech32 implementation
- def test_python_bech32(self, addr):
- hrp = addr[:4]
- assert_equal(hrp, "bcrt")
- (witver, witprog) = decode(hrp, addr)
- assert_equal(encode(hrp, witver, witprog), addr)
-
def test_address(self, node, address, multisig, typ):
"""Run sanity checks on an address."""
info = self.nodes[node].getaddressinfo(address)
@@ -132,7 +120,6 @@ class AddressTypeTest(BitcoinTestFramework):
assert_equal(info['witness_version'], 0)
assert_equal(len(info['witness_program']), 40)
assert 'pubkey' in info
- self.test_python_bech32(info["address"])
elif typ == 'legacy':
# P2SH-multisig
assert info['isscript']
@@ -158,7 +145,6 @@ class AddressTypeTest(BitcoinTestFramework):
assert_equal(info['witness_version'], 0)
assert_equal(len(info['witness_program']), 64)
assert 'pubkeys' in info
- self.test_python_bech32(info["address"])
else:
# Unknown type
assert False
diff --git a/test/functional/wallet_avoidreuse.py b/test/functional/wallet_avoidreuse.py
index eddd938847..229c134a4b 100755
--- a/test/functional/wallet_avoidreuse.py
+++ b/test/functional/wallet_avoidreuse.py
@@ -9,7 +9,6 @@ from test_framework.util import (
assert_approx,
assert_equal,
assert_raises_rpc_error,
- connect_nodes,
)
def reset_balance(node, discardaddr):
@@ -111,7 +110,7 @@ class AvoidReuseTest(BitcoinTestFramework):
assert_equal(self.nodes[1].getwalletinfo()["avoid_reuse"], True)
self.restart_node(1)
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
# Flags should still be node1.avoid_reuse=false, node2.avoid_reuse=true
assert_equal(self.nodes[0].getwalletinfo()["avoid_reuse"], False)
diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py
index 4766355335..c7bd2ea02b 100755
--- a/test/functional/wallet_backup.py
+++ b/test/functional/wallet_backup.py
@@ -39,7 +39,6 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
- connect_nodes,
)
@@ -62,10 +61,10 @@ class WalletBackupTest(BitcoinTestFramework):
def setup_network(self):
self.setup_nodes()
- connect_nodes(self.nodes[0], 3)
- connect_nodes(self.nodes[1], 3)
- connect_nodes(self.nodes[2], 3)
- connect_nodes(self.nodes[2], 0)
+ self.connect_nodes(0, 3)
+ self.connect_nodes(1, 3)
+ self.connect_nodes(2, 3)
+ self.connect_nodes(2, 0)
self.sync_all()
def one_send(self, from_node, to_address):
@@ -96,10 +95,10 @@ class WalletBackupTest(BitcoinTestFramework):
self.start_node(0)
self.start_node(1)
self.start_node(2)
- connect_nodes(self.nodes[0], 3)
- connect_nodes(self.nodes[1], 3)
- connect_nodes(self.nodes[2], 3)
- connect_nodes(self.nodes[2], 0)
+ self.connect_nodes(0, 3)
+ self.connect_nodes(1, 3)
+ self.connect_nodes(2, 3)
+ self.connect_nodes(2, 0)
def stop_three(self):
self.stop_node(0)
@@ -107,9 +106,9 @@ class WalletBackupTest(BitcoinTestFramework):
self.stop_node(2)
def erase_three(self):
- os.remove(os.path.join(self.nodes[0].datadir, self.chain, 'wallets', 'wallet.dat'))
- os.remove(os.path.join(self.nodes[1].datadir, self.chain, 'wallets', 'wallet.dat'))
- os.remove(os.path.join(self.nodes[2].datadir, self.chain, 'wallets', 'wallet.dat'))
+ os.remove(os.path.join(self.nodes[0].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename))
+ os.remove(os.path.join(self.nodes[1].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename))
+ os.remove(os.path.join(self.nodes[2].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename))
def run_test(self):
self.log.info("Generating initial blockchain")
@@ -135,11 +134,13 @@ class WalletBackupTest(BitcoinTestFramework):
self.log.info("Backing up")
self.nodes[0].backupwallet(os.path.join(self.nodes[0].datadir, 'wallet.bak'))
- self.nodes[0].dumpwallet(os.path.join(self.nodes[0].datadir, 'wallet.dump'))
self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, 'wallet.bak'))
- self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, 'wallet.dump'))
self.nodes[2].backupwallet(os.path.join(self.nodes[2].datadir, 'wallet.bak'))
- self.nodes[2].dumpwallet(os.path.join(self.nodes[2].datadir, 'wallet.dump'))
+
+ if not self.options.descriptors:
+ self.nodes[0].dumpwallet(os.path.join(self.nodes[0].datadir, 'wallet.dump'))
+ self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, 'wallet.dump'))
+ self.nodes[2].dumpwallet(os.path.join(self.nodes[2].datadir, 'wallet.dump'))
self.log.info("More transactions")
for _ in range(5):
@@ -171,9 +172,9 @@ class WalletBackupTest(BitcoinTestFramework):
shutil.rmtree(os.path.join(self.nodes[2].datadir, self.chain, 'chainstate'))
# Restore wallets from backup
- shutil.copyfile(os.path.join(self.nodes[0].datadir, 'wallet.bak'), os.path.join(self.nodes[0].datadir, self.chain, 'wallets', 'wallet.dat'))
- shutil.copyfile(os.path.join(self.nodes[1].datadir, 'wallet.bak'), os.path.join(self.nodes[1].datadir, self.chain, 'wallets', 'wallet.dat'))
- shutil.copyfile(os.path.join(self.nodes[2].datadir, 'wallet.bak'), os.path.join(self.nodes[2].datadir, self.chain, 'wallets', 'wallet.dat'))
+ shutil.copyfile(os.path.join(self.nodes[0].datadir, 'wallet.bak'), os.path.join(self.nodes[0].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename))
+ shutil.copyfile(os.path.join(self.nodes[1].datadir, 'wallet.bak'), os.path.join(self.nodes[1].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename))
+ shutil.copyfile(os.path.join(self.nodes[2].datadir, 'wallet.bak'), os.path.join(self.nodes[2].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename))
self.log.info("Re-starting nodes")
self.start_three()
@@ -183,35 +184,36 @@ class WalletBackupTest(BitcoinTestFramework):
assert_equal(self.nodes[1].getbalance(), balance1)
assert_equal(self.nodes[2].getbalance(), balance2)
- self.log.info("Restoring using dumped wallet")
- self.stop_three()
- self.erase_three()
+ if not self.options.descriptors:
+ self.log.info("Restoring using dumped wallet")
+ self.stop_three()
+ self.erase_three()
- #start node2 with no chain
- shutil.rmtree(os.path.join(self.nodes[2].datadir, self.chain, 'blocks'))
- shutil.rmtree(os.path.join(self.nodes[2].datadir, self.chain, 'chainstate'))
+ #start node2 with no chain
+ shutil.rmtree(os.path.join(self.nodes[2].datadir, self.chain, 'blocks'))
+ shutil.rmtree(os.path.join(self.nodes[2].datadir, self.chain, 'chainstate'))
- self.start_three()
+ self.start_three()
- assert_equal(self.nodes[0].getbalance(), 0)
- assert_equal(self.nodes[1].getbalance(), 0)
- assert_equal(self.nodes[2].getbalance(), 0)
+ assert_equal(self.nodes[0].getbalance(), 0)
+ assert_equal(self.nodes[1].getbalance(), 0)
+ assert_equal(self.nodes[2].getbalance(), 0)
- self.nodes[0].importwallet(os.path.join(self.nodes[0].datadir, 'wallet.dump'))
- self.nodes[1].importwallet(os.path.join(self.nodes[1].datadir, 'wallet.dump'))
- self.nodes[2].importwallet(os.path.join(self.nodes[2].datadir, 'wallet.dump'))
+ self.nodes[0].importwallet(os.path.join(self.nodes[0].datadir, 'wallet.dump'))
+ self.nodes[1].importwallet(os.path.join(self.nodes[1].datadir, 'wallet.dump'))
+ self.nodes[2].importwallet(os.path.join(self.nodes[2].datadir, 'wallet.dump'))
- self.sync_blocks()
+ self.sync_blocks()
- assert_equal(self.nodes[0].getbalance(), balance0)
- assert_equal(self.nodes[1].getbalance(), balance1)
- assert_equal(self.nodes[2].getbalance(), balance2)
+ assert_equal(self.nodes[0].getbalance(), balance0)
+ assert_equal(self.nodes[1].getbalance(), balance1)
+ assert_equal(self.nodes[2].getbalance(), balance2)
# Backup to source wallet file must fail
sourcePaths = [
- os.path.join(self.nodes[0].datadir, self.chain, 'wallets', 'wallet.dat'),
- os.path.join(self.nodes[0].datadir, self.chain, '.', 'wallets', 'wallet.dat'),
- os.path.join(self.nodes[0].datadir, self.chain, 'wallets', ''),
+ os.path.join(self.nodes[0].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename),
+ os.path.join(self.nodes[0].datadir, self.chain, '.', 'wallets', self.default_wallet_name, self.wallet_data_filename),
+ os.path.join(self.nodes[0].datadir, self.chain, 'wallets', self.default_wallet_name),
os.path.join(self.nodes[0].datadir, self.chain, 'wallets')]
for sourcePath in sourcePaths:
diff --git a/test/functional/wallet_balance.py b/test/functional/wallet_balance.py
index 31829a18b3..589fab9992 100755
--- a/test/functional/wallet_balance.py
+++ b/test/functional/wallet_balance.py
@@ -11,7 +11,6 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
- connect_nodes,
)
@@ -215,10 +214,10 @@ class WalletTest(BitcoinTestFramework):
# dynamically loading the wallet.
before = self.nodes[1].getbalances()['mine']['untrusted_pending']
dst = self.nodes[1].getnewaddress()
- self.nodes[1].unloadwallet('')
+ self.nodes[1].unloadwallet(self.default_wallet_name)
self.nodes[0].sendtoaddress(dst, 0.1)
self.sync_all()
- self.nodes[1].loadwallet('')
+ self.nodes[1].loadwallet(self.default_wallet_name)
after = self.nodes[1].getbalances()['mine']['untrusted_pending']
assert_equal(before + Decimal('0.1'), after)
@@ -262,7 +261,7 @@ class WalletTest(BitcoinTestFramework):
# Now confirm tx_orig
self.restart_node(1, ['-persistmempool=0'])
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.sync_blocks()
self.nodes[1].sendrawtransaction(tx_orig)
self.nodes[1].generatetoaddress(1, ADDRESS_WATCHONLY)
diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py
index d9a8b58a84..d386d94e0c 100755
--- a/test/functional/wallet_basic.py
+++ b/test/functional/wallet_basic.py
@@ -11,8 +11,6 @@ from test_framework.util import (
assert_equal,
assert_fee_amount,
assert_raises_rpc_error,
- connect_nodes,
- wait_until,
)
from test_framework.wallet_util import test_address
@@ -33,9 +31,9 @@ class WalletTest(BitcoinTestFramework):
self.setup_nodes()
# Only need nodes 0-2 running at start of test
self.stop_node(3)
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[1], 2)
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(1, 2)
+ self.connect_nodes(0, 2)
self.sync_all(self.nodes[0:3])
def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size):
@@ -136,11 +134,19 @@ class WalletTest(BitcoinTestFramework):
self.nodes[2].lockunspent, False,
[{"txid": unspent_0["txid"], "vout": 999}])
- # An output should be unlocked when spent
+ # The lock on a manually selected output is ignored
unspent_0 = self.nodes[1].listunspent()[0]
self.nodes[1].lockunspent(False, [unspent_0])
tx = self.nodes[1].createrawtransaction([unspent_0], { self.nodes[1].getnewaddress() : 1 })
- tx = self.nodes[1].fundrawtransaction(tx)['hex']
+ self.nodes[1].fundrawtransaction(tx,{"lockUnspents": True})
+
+ # fundrawtransaction can lock an input
+ self.nodes[1].lockunspent(True, [unspent_0])
+ assert_equal(len(self.nodes[1].listlockunspent()), 0)
+ tx = self.nodes[1].fundrawtransaction(tx,{"lockUnspents": True})['hex']
+ assert_equal(len(self.nodes[1].listlockunspent()), 1)
+
+ # Send transaction
tx = self.nodes[1].signrawtransactionwithwallet(tx)["hex"]
self.nodes[1].sendrawtransaction(tx)
assert_equal(len(self.nodes[1].listlockunspent()), 0)
@@ -274,7 +280,7 @@ class WalletTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getbalance(), node_0_bal)
self.start_node(3, self.nodes[3].extra_args)
- connect_nodes(self.nodes[0], 3)
+ self.connect_nodes(0, 3)
self.sync_all()
# check if we can list zero value tx as available coins
@@ -309,9 +315,9 @@ class WalletTest(BitcoinTestFramework):
self.start_node(0, ["-walletbroadcast=0"])
self.start_node(1, ["-walletbroadcast=0"])
self.start_node(2, ["-walletbroadcast=0"])
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[1], 2)
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(1, 2)
+ self.connect_nodes(0, 2)
self.sync_all(self.nodes[0:3])
txid_not_broadcast = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2)
@@ -336,9 +342,9 @@ class WalletTest(BitcoinTestFramework):
self.start_node(0)
self.start_node(1)
self.start_node(2)
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[1], 2)
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(1, 2)
+ self.connect_nodes(0, 2)
self.sync_blocks(self.nodes[0:3])
self.nodes[0].generate(1)
@@ -527,8 +533,6 @@ class WalletTest(BitcoinTestFramework):
maintenance = [
'-rescan',
'-reindex',
- '-zapwallettxes=1',
- '-zapwallettxes=2',
]
chainlimit = 6
for m in maintenance:
@@ -540,7 +544,7 @@ class WalletTest(BitcoinTestFramework):
self.start_node(2, [m, "-limitancestorcount=" + str(chainlimit)])
if m == '-reindex':
# reindex will leave rpc warm up "early"; Wait for it to finish
- wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)])
+ self.wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)])
assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)])
# Exercise listsinceblock with the last two blocks
@@ -589,7 +593,10 @@ class WalletTest(BitcoinTestFramework):
self.start_node(0, extra_args=extra_args)
# wait until the wallet has submitted all transactions to the mempool
- wait_until(lambda: len(self.nodes[0].getrawmempool()) == chainlimit * 2)
+ self.wait_until(lambda: len(self.nodes[0].getrawmempool()) == chainlimit * 2)
+
+ # Prevent potential race condition when calling wallet RPCs right after restart
+ self.nodes[0].syncwithvalidationinterfacequeue()
node0_balance = self.nodes[0].getbalance()
# With walletrejectlongchains we will not create the tx and store it in our wallet.
@@ -653,6 +660,18 @@ class WalletTest(BitcoinTestFramework):
assert_array_result(tx["details"], {"category": "receive"}, expected_receive_vout)
assert_equal(tx[verbose_field], self.nodes[0].decoderawtransaction(tx["hex"]))
+ self.log.info("Test send* RPCs with verbose=True")
+ address = self.nodes[0].getnewaddress("test")
+ txid_feeReason_one = self.nodes[2].sendtoaddress(address=address, amount=5, verbose=True)
+ assert_equal(txid_feeReason_one["fee_reason"], "Fallback fee")
+ txid_feeReason_two = self.nodes[2].sendmany(dummy='', amounts={address: 5}, verbose=True)
+ assert_equal(txid_feeReason_two["fee_reason"], "Fallback fee")
+ self.log.info("Test send* RPCs with verbose=False")
+ txid_feeReason_three = self.nodes[2].sendtoaddress(address=address, amount=5, verbose=False)
+ assert_equal(self.nodes[2].gettransaction(txid_feeReason_three)['txid'], txid_feeReason_three)
+ txid_feeReason_four = self.nodes[2].sendmany(dummy='', amounts={address: 5}, verbose=False)
+ assert_equal(self.nodes[2].gettransaction(txid_feeReason_four)['txid'], txid_feeReason_four)
+
if __name__ == '__main__':
WalletTest().main()
diff --git a/test/functional/wallet_bumpfee.py b/test/functional/wallet_bumpfee.py
index 0ef78b0e1c..4b29e65b09 100755
--- a/test/functional/wallet_bumpfee.py
+++ b/test/functional/wallet_bumpfee.py
@@ -50,6 +50,11 @@ class BumpFeeTest(BitcoinTestFramework):
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
+ def clear_mempool(self):
+ # Clear mempool between subtests. The subtests may only depend on chainstate (utxos)
+ self.nodes[1].generate(1)
+ self.sync_all()
+
def run_test(self):
# Encrypt wallet for test_locked_wallet_fails test
self.nodes[1].encryptwallet(WALLET_PASSPHRASE)
@@ -71,7 +76,7 @@ class BumpFeeTest(BitcoinTestFramework):
self.log.info("Running tests")
dest_address = peer_node.getnewaddress()
- test_invalid_parameters(rbf_node, dest_address)
+ self.test_invalid_parameters(rbf_node, dest_address)
test_simple_bumpfee_succeeds(self, "default", rbf_node, peer_node, dest_address)
test_simple_bumpfee_succeeds(self, "fee_rate", rbf_node, peer_node, dest_address)
test_feerate_args(self, rbf_node, peer_node, dest_address)
@@ -93,28 +98,30 @@ class BumpFeeTest(BitcoinTestFramework):
test_small_output_with_feerate_succeeds(self, rbf_node, dest_address)
test_no_more_inputs_fails(self, rbf_node, dest_address)
-def test_invalid_parameters(node, dest_address):
- txid = spend_one_input(node, dest_address)
- # invalid estimate mode
- assert_raises_rpc_error(-8, "Invalid estimate_mode parameter", node.bumpfee, txid, {
- "estimate_mode": "moo",
- })
- assert_raises_rpc_error(-3, "Expected type string", node.bumpfee, txid, {
- "estimate_mode": 38,
- })
- assert_raises_rpc_error(-3, "Expected type string", node.bumpfee, txid, {
- "estimate_mode": {
- "foo": "bar",
- },
- })
- assert_raises_rpc_error(-8, "Invalid estimate_mode parameter", node.bumpfee, txid, {
- "estimate_mode": Decimal("3.141592"),
- })
- # confTarget and conf_target
- assert_raises_rpc_error(-8, "confTarget and conf_target options should not both be set", node.bumpfee, txid, {
- "confTarget": 123,
- "conf_target": 456,
- })
+ def test_invalid_parameters(self, node, dest_address):
+ txid = spend_one_input(node, dest_address)
+ # invalid estimate mode
+ assert_raises_rpc_error(-8, "Invalid estimate_mode parameter", node.bumpfee, txid, {
+ "estimate_mode": "moo",
+ })
+ assert_raises_rpc_error(-3, "Expected type string", node.bumpfee, txid, {
+ "estimate_mode": 38,
+ })
+ assert_raises_rpc_error(-3, "Expected type string", node.bumpfee, txid, {
+ "estimate_mode": {
+ "foo": "bar",
+ },
+ })
+ assert_raises_rpc_error(-8, "Invalid estimate_mode parameter", node.bumpfee, txid, {
+ "estimate_mode": Decimal("3.141592"),
+ })
+ # confTarget and conf_target
+ assert_raises_rpc_error(-8, "confTarget and conf_target options should not both be set", node.bumpfee, txid, {
+ "confTarget": 123,
+ "conf_target": 456,
+ })
+ self.clear_mempool()
+
def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address):
self.log.info('Test simple bumpfee: {}'.format(mode))
@@ -123,13 +130,19 @@ def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address):
self.sync_mempools((rbf_node, peer_node))
assert rbfid in rbf_node.getrawmempool() and rbfid in peer_node.getrawmempool()
if mode == "fee_rate":
+ bumped_psbt = rbf_node.psbtbumpfee(rbfid, {"fee_rate": NORMAL})
bumped_tx = rbf_node.bumpfee(rbfid, {"fee_rate": NORMAL})
else:
+ bumped_psbt = rbf_node.psbtbumpfee(rbfid)
bumped_tx = rbf_node.bumpfee(rbfid)
assert_equal(bumped_tx["errors"], [])
assert bumped_tx["fee"] > -rbftx["fee"]
assert_equal(bumped_tx["origfee"], -rbftx["fee"])
assert "psbt" not in bumped_tx
+ assert_equal(bumped_psbt["errors"], [])
+ assert bumped_psbt["fee"] > -rbftx["fee"]
+ assert_equal(bumped_psbt["origfee"], -rbftx["fee"])
+ assert "psbt" in bumped_psbt
# check that bumped_tx propagates, original tx was evicted and has a wallet conflict
self.sync_mempools((rbf_node, peer_node))
assert bumped_tx["txid"] in rbf_node.getrawmempool()
@@ -142,6 +155,7 @@ def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address):
bumpedwtx = rbf_node.gettransaction(bumped_tx["txid"])
assert_equal(oldwtx["replaced_by_txid"], bumped_tx["txid"])
assert_equal(bumpedwtx["replaces_txid"], rbfid)
+ self.clear_mempool()
def test_feerate_args(self, rbf_node, peer_node, dest_address):
@@ -161,6 +175,7 @@ def test_feerate_args(self, rbf_node, peer_node, dest_address):
assert_raises_rpc_error(-3, "Amount out of range", rbf_node.bumpfee, rbfid, {"fee_rate": -1})
assert_raises_rpc_error(-4, "is too high (cannot be higher than", rbf_node.bumpfee, rbfid, {"fee_rate": TOO_HIGH})
+ self.clear_mempool()
def test_segwit_bumpfee_succeeds(self, rbf_node, dest_address):
@@ -192,12 +207,14 @@ def test_segwit_bumpfee_succeeds(self, rbf_node, dest_address):
bumped_tx = rbf_node.bumpfee(rbfid)
assert bumped_tx["txid"] in rbf_node.getrawmempool()
assert rbfid not in rbf_node.getrawmempool()
+ self.clear_mempool()
def test_nonrbf_bumpfee_fails(self, peer_node, dest_address):
self.log.info('Test that we cannot replace a non RBF transaction')
not_rbfid = peer_node.sendtoaddress(dest_address, Decimal("0.00090000"))
assert_raises_rpc_error(-4, "not BIP 125 replaceable", peer_node.bumpfee, not_rbfid)
+ self.clear_mempool()
def test_notmine_bumpfee_fails(self, rbf_node, peer_node, dest_address):
@@ -205,20 +222,22 @@ def test_notmine_bumpfee_fails(self, rbf_node, peer_node, dest_address):
# here, the rbftx has a peer_node coin and then adds a rbf_node input
# Note that this test depends upon the RPC code checking input ownership prior to change outputs
# (since it can't use fundrawtransaction, it lacks a proper change output)
- utxos = [node.listunspent()[-1] for node in (rbf_node, peer_node)]
+ fee = Decimal("0.001")
+ utxos = [node.listunspent(query_options={'minimumAmount': fee})[-1] for node in (rbf_node, peer_node)]
inputs = [{
"txid": utxo["txid"],
"vout": utxo["vout"],
"address": utxo["address"],
"sequence": BIP125_SEQUENCE_NUMBER
} for utxo in utxos]
- output_val = sum(utxo["amount"] for utxo in utxos) - Decimal("0.001")
+ output_val = sum(utxo["amount"] for utxo in utxos) - fee
rawtx = rbf_node.createrawtransaction(inputs, {dest_address: output_val})
signedtx = rbf_node.signrawtransactionwithwallet(rawtx)
signedtx = peer_node.signrawtransactionwithwallet(signedtx["hex"])
rbfid = rbf_node.sendrawtransaction(signedtx["hex"])
assert_raises_rpc_error(-4, "Transaction contains inputs that don't belong to this wallet",
rbf_node.bumpfee, rbfid)
+ self.clear_mempool()
def test_bumpfee_with_descendant_fails(self, rbf_node, rbf_node_address, dest_address):
@@ -229,6 +248,7 @@ def test_bumpfee_with_descendant_fails(self, rbf_node, rbf_node_address, dest_ad
tx = rbf_node.signrawtransactionwithwallet(tx)
rbf_node.sendrawtransaction(tx["hex"])
assert_raises_rpc_error(-8, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id)
+ self.clear_mempool()
def test_small_output_with_feerate_succeeds(self, rbf_node, dest_address):
@@ -270,6 +290,7 @@ def test_small_output_with_feerate_succeeds(self, rbf_node, dest_address):
rbf_node.generatetoaddress(1, rbf_node.getnewaddress())
assert_equal(rbf_node.gettransaction(rbfid)["confirmations"], 1)
+ self.clear_mempool()
def test_dust_to_fee(self, rbf_node, dest_address):
@@ -292,6 +313,7 @@ def test_dust_to_fee(self, rbf_node, dest_address):
assert_equal(len(fulltx["vout"]), 2)
assert_equal(len(full_bumped_tx["vout"]), 1) # change output is eliminated
assert_equal(full_bumped_tx["vout"][0]['value'], Decimal("0.00050000"))
+ self.clear_mempool()
def test_settxfee(self, rbf_node, dest_address):
@@ -314,6 +336,8 @@ def test_settxfee(self, rbf_node, dest_address):
assert_raises_rpc_error(-8, "txfee cannot be more than wallet max tx fee", rbf_node.settxfee, Decimal('0.00003'))
self.restart_node(1, self.extra_args[1])
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
+ self.connect_nodes(1, 0)
+ self.clear_mempool()
def test_maxtxfee_fails(self, rbf_node, dest_address):
@@ -324,9 +348,11 @@ def test_maxtxfee_fails(self, rbf_node, dest_address):
self.restart_node(1, ['-maxtxfee=0.000025'] + self.extra_args[1])
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
rbfid = spend_one_input(rbf_node, dest_address)
- assert_raises_rpc_error(-4, "Unable to create transaction. Fee exceeds maximum configured by -maxtxfee", rbf_node.bumpfee, rbfid)
+ assert_raises_rpc_error(-4, "Unable to create transaction. Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)", rbf_node.bumpfee, rbfid)
self.restart_node(1, self.extra_args[1])
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
+ self.connect_nodes(1, 0)
+ self.clear_mempool()
def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
@@ -391,7 +417,7 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
assert_equal(len(watcher.decodepsbt(psbt)["tx"]["vin"]), 1)
# Bump fee, obnoxiously high to add additional watchonly input
- bumped_psbt = watcher.bumpfee(original_txid, {"fee_rate": HIGH})
+ bumped_psbt = watcher.psbtbumpfee(original_txid, {"fee_rate": HIGH})
assert_greater_than(len(watcher.decodepsbt(bumped_psbt['psbt'])["tx"]["vin"]), 1)
assert "txid" not in bumped_psbt
assert_equal(bumped_psbt["origfee"], -watcher.gettransaction(original_txid)["fee"])
@@ -409,6 +435,7 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
rbf_node.unloadwallet("watcher")
rbf_node.unloadwallet("signer")
+ self.clear_mempool()
def test_rebumping(self, rbf_node, dest_address):
@@ -417,6 +444,7 @@ def test_rebumping(self, rbf_node, dest_address):
bumped = rbf_node.bumpfee(rbfid, {"fee_rate": ECONOMICAL})
assert_raises_rpc_error(-4, "already bumped", rbf_node.bumpfee, rbfid, {"fee_rate": NORMAL})
rbf_node.bumpfee(bumped["txid"], {"fee_rate": NORMAL})
+ self.clear_mempool()
def test_rebumping_not_replaceable(self, rbf_node, dest_address):
@@ -425,6 +453,7 @@ def test_rebumping_not_replaceable(self, rbf_node, dest_address):
bumped = rbf_node.bumpfee(rbfid, {"fee_rate": ECONOMICAL, "replaceable": False})
assert_raises_rpc_error(-4, "Transaction is not BIP 125 replaceable", rbf_node.bumpfee, bumped["txid"],
{"fee_rate": NORMAL})
+ self.clear_mempool()
def test_unconfirmed_not_spendable(self, rbf_node, rbf_node_address):
@@ -464,6 +493,7 @@ def test_unconfirmed_not_spendable(self, rbf_node, rbf_node_address):
assert_equal(
sum(1 for t in rbf_node.listunspent(minconf=0, include_unsafe=False)
if t["txid"] == rbfid and t["address"] == rbf_node_address and t["spendable"]), 1)
+ self.clear_mempool()
def test_bumpfee_metadata(self, rbf_node, dest_address):
@@ -475,6 +505,7 @@ def test_bumpfee_metadata(self, rbf_node, dest_address):
bumped_wtx = rbf_node.gettransaction(bumped_tx["txid"])
assert_equal(bumped_wtx["comment"], "comment value")
assert_equal(bumped_wtx["to"], "to value")
+ self.clear_mempool()
def test_locked_wallet_fails(self, rbf_node, dest_address):
@@ -484,6 +515,7 @@ def test_locked_wallet_fails(self, rbf_node, dest_address):
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first.",
rbf_node.bumpfee, rbfid)
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
+ self.clear_mempool()
def test_change_script_match(self, rbf_node, dest_address):
@@ -504,6 +536,7 @@ def test_change_script_match(self, rbf_node, dest_address):
assert_equal(change_addresses, get_change_address(bumped_total_tx['txid']))
bumped_rate_tx = rbf_node.bumpfee(bumped_total_tx["txid"])
assert_equal(change_addresses, get_change_address(bumped_rate_tx['txid']))
+ self.clear_mempool()
def spend_one_input(node, dest_address, change_size=Decimal("0.00049000")):
@@ -542,6 +575,7 @@ def test_no_more_inputs_fails(self, rbf_node, dest_address):
# spend all funds, no change output
rbfid = rbf_node.sendtoaddress(rbf_node.getnewaddress(), rbf_node.getbalance(), "", "", True)
assert_raises_rpc_error(-4, "Unable to create transaction. Insufficient funds", rbf_node.bumpfee, rbfid)
+ self.clear_mempool()
if __name__ == "__main__":
diff --git a/test/functional/wallet_create_tx.py b/test/functional/wallet_create_tx.py
index ed9159726a..0f11aca525 100755
--- a/test/functional/wallet_create_tx.py
+++ b/test/functional/wallet_create_tx.py
@@ -53,12 +53,12 @@ class CreateTxWalletTest(BitcoinTestFramework):
self.restart_node(0, extra_args=[fee_setting])
assert_raises_rpc_error(
-6,
- "Fee exceeds maximum configured by -maxtxfee",
+ "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)",
lambda: self.nodes[0].sendmany(dummy="", amounts=outputs),
)
assert_raises_rpc_error(
-4,
- "Fee exceeds maximum configured by -maxtxfee",
+ "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)",
lambda: self.nodes[0].fundrawtransaction(hexstring=raw_tx),
)
@@ -67,12 +67,12 @@ class CreateTxWalletTest(BitcoinTestFramework):
self.nodes[0].settxfee(0.01)
assert_raises_rpc_error(
-6,
- "Fee exceeds maximum configured by -maxtxfee",
+ "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)",
lambda: self.nodes[0].sendmany(dummy="", amounts=outputs),
)
assert_raises_rpc_error(
-4,
- "Fee exceeds maximum configured by -maxtxfee",
+ "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)",
lambda: self.nodes[0].fundrawtransaction(hexstring=raw_tx),
)
self.nodes[0].settxfee(0)
diff --git a/test/functional/wallet_descriptor.py b/test/functional/wallet_descriptor.py
index 9c63e8f7d3..51e9ec8f40 100755
--- a/test/functional/wallet_descriptor.py
+++ b/test/functional/wallet_descriptor.py
@@ -21,14 +21,18 @@ class WalletDescriptorTest(BitcoinTestFramework):
self.skip_if_no_wallet()
def run_test(self):
+ wallet_info = self.nodes[0].getwalletinfo()
+ assert_equal(wallet_info['format'], 'bdb')
+
# Make a descriptor wallet
self.log.info("Making a descriptor wallet")
self.nodes[0].createwallet(wallet_name="desc1", descriptors=True)
- self.nodes[0].unloadwallet("")
+ self.nodes[0].unloadwallet(self.default_wallet_name)
# A descriptor wallet should have 100 addresses * 3 types = 300 keys
self.log.info("Checking wallet info")
wallet_info = self.nodes[0].getwalletinfo()
+ assert_equal(wallet_info['format'], 'sqlite')
assert_equal(wallet_info['keypoolsize'], 300)
assert_equal(wallet_info['keypoolsize_hd_internal'], 300)
assert 'keypoololdest' not in wallet_info
diff --git a/test/functional/wallet_disable.py b/test/functional/wallet_disable.py
index 7c2ec56b5a..c2b30fb35b 100755
--- a/test/functional/wallet_disable.py
+++ b/test/functional/wallet_disable.py
@@ -16,6 +16,7 @@ class DisableWalletTest (BitcoinTestFramework):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-disablewallet"]]
+ self.wallet_names = []
def run_test (self):
# Make sure wallet is really disabled
diff --git a/test/functional/wallet_dump.py b/test/functional/wallet_dump.py
index 06f01ef191..09581d864b 100755
--- a/test/functional/wallet_dump.py
+++ b/test/functional/wallet_dump.py
@@ -95,7 +95,7 @@ def read_dump(file_name, addrs, script_addrs, hd_master_addr_old):
class WalletDumpTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
- self.extra_args = [["-keypool=90", "-addresstype=legacy"]]
+ self.extra_args = [["-keypool=90", "-addresstype=legacy", "-wallet=dump"]]
self.rpc_timeout = 120
def skip_test_if_missing_module(self):
diff --git a/test/functional/wallet_groups.py b/test/functional/wallet_groups.py
index b6fe295127..e5c4f12f20 100755
--- a/test/functional/wallet_groups.py
+++ b/test/functional/wallet_groups.py
@@ -15,8 +15,14 @@ from test_framework.util import (
class WalletGroupTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
- self.num_nodes = 3
- self.extra_args = [[], [], ['-avoidpartialspends']]
+ self.num_nodes = 5
+ self.extra_args = [
+ [],
+ [],
+ ["-avoidpartialspends"],
+ ["-maxapsfee=0.00002719"],
+ ["-maxapsfee=0.00002720"],
+ ]
self.rpc_timeout = 480
def skip_test_if_missing_module(self):
@@ -50,8 +56,8 @@ class WalletGroupTest(BitcoinTestFramework):
# one output should be 0.2, the other should be ~0.3
v = [vout["value"] for vout in tx1["vout"]]
v.sort()
- assert_approx(v[0], 0.2)
- assert_approx(v[1], 0.3, 0.0001)
+ assert_approx(v[0], vexp=0.2, vspan=0.0001)
+ assert_approx(v[1], vexp=0.3, vspan=0.0001)
txid2 = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
tx2 = self.nodes[2].getrawtransaction(txid2, True)
@@ -61,8 +67,80 @@ class WalletGroupTest(BitcoinTestFramework):
# one output should be 0.2, the other should be ~1.3
v = [vout["value"] for vout in tx2["vout"]]
v.sort()
- assert_approx(v[0], 0.2)
- assert_approx(v[1], 1.3, 0.0001)
+ assert_approx(v[0], vexp=0.2, vspan=0.0001)
+ assert_approx(v[1], vexp=1.3, vspan=0.0001)
+
+ # Test 'avoid partial if warranted, even if disabled'
+ self.sync_all()
+ self.nodes[0].generate(1)
+ # Nodes 1-2 now have confirmed UTXOs (letters denote destinations):
+ # Node #1: Node #2:
+ # - A 1.0 - D0 1.0
+ # - B0 1.0 - D1 0.5
+ # - B1 0.5 - E0 1.0
+ # - C0 1.0 - E1 0.5
+ # - C1 0.5 - F ~1.3
+ # - D ~0.3
+ assert_approx(self.nodes[1].getbalance(), vexp=4.3, vspan=0.0001)
+ assert_approx(self.nodes[2].getbalance(), vexp=4.3, vspan=0.0001)
+ # Sending 1.4 btc should pick one 1.0 + one more. For node #1,
+ # this could be (A / B0 / C0) + (B1 / C1 / D). We ensure that it is
+ # B0 + B1 or C0 + C1, because this avoids partial spends while not being
+ # detrimental to transaction cost
+ txid3 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.4)
+ tx3 = self.nodes[1].getrawtransaction(txid3, True)
+ # tx3 should have 2 inputs and 2 outputs
+ assert_equal(2, len(tx3["vin"]))
+ assert_equal(2, len(tx3["vout"]))
+ # the accumulated value should be 1.5, so the outputs should be
+ # ~0.1 and 1.4 and should come from the same destination
+ values = [vout["value"] for vout in tx3["vout"]]
+ values.sort()
+ assert_approx(values[0], vexp=0.1, vspan=0.0001)
+ assert_approx(values[1], vexp=1.4, vspan=0.0001)
+
+ input_txids = [vin["txid"] for vin in tx3["vin"]]
+ input_addrs = [self.nodes[1].gettransaction(txid)['details'][0]['address'] for txid in input_txids]
+ assert_equal(input_addrs[0], input_addrs[1])
+ # Node 2 enforces avoidpartialspends so needs no checking here
+
+ # Test wallet option maxapsfee with Node 3
+ addr_aps = self.nodes[3].getnewaddress()
+ self.nodes[0].sendtoaddress(addr_aps, 1.0)
+ self.nodes[0].sendtoaddress(addr_aps, 1.0)
+ self.nodes[0].generate(1)
+ self.sync_all()
+ with self.nodes[3].assert_debug_log(['Fee non-grouped = 2820, grouped = 4160, using grouped']):
+ txid4 = self.nodes[3].sendtoaddress(self.nodes[0].getnewaddress(), 0.1)
+ tx4 = self.nodes[3].getrawtransaction(txid4, True)
+ # tx4 should have 2 inputs and 2 outputs although one output would
+ # have been enough and the transaction caused higher fees
+ assert_equal(2, len(tx4["vin"]))
+ assert_equal(2, len(tx4["vout"]))
+
+ addr_aps2 = self.nodes[3].getnewaddress()
+ [self.nodes[0].sendtoaddress(addr_aps2, 1.0) for _ in range(5)]
+ self.nodes[0].generate(1)
+ self.sync_all()
+ with self.nodes[3].assert_debug_log(['Fee non-grouped = 5520, grouped = 8240, using non-grouped']):
+ txid5 = self.nodes[3].sendtoaddress(self.nodes[0].getnewaddress(), 2.95)
+ tx5 = self.nodes[3].getrawtransaction(txid5, True)
+ # tx5 should have 3 inputs (1.0, 1.0, 1.0) and 2 outputs
+ assert_equal(3, len(tx5["vin"]))
+ assert_equal(2, len(tx5["vout"]))
+
+ # Test wallet option maxapsfee with node 4, which sets maxapsfee
+ # 1 sat higher, crossing the threshold from non-grouped to grouped.
+ addr_aps3 = self.nodes[4].getnewaddress()
+ [self.nodes[0].sendtoaddress(addr_aps3, 1.0) for _ in range(5)]
+ self.nodes[0].generate(1)
+ self.sync_all()
+ with self.nodes[4].assert_debug_log(['Fee non-grouped = 5520, grouped = 8240, using grouped']):
+ txid6 = self.nodes[4].sendtoaddress(self.nodes[0].getnewaddress(), 2.95)
+ tx6 = self.nodes[4].getrawtransaction(txid6, True)
+ # tx6 should have 5 inputs and 2 outputs
+ assert_equal(5, len(tx6["vin"]))
+ assert_equal(2, len(tx6["vout"]))
# Empty out node2's wallet
self.nodes[2].sendtoaddress(address=self.nodes[0].getnewaddress(), amount=self.nodes[2].getbalance(), subtractfeefromamount=True)
diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py
index 3c336623e2..d45cf05689 100755
--- a/test/functional/wallet_hd.py
+++ b/test/functional/wallet_hd.py
@@ -10,7 +10,6 @@ import shutil
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
assert_raises_rpc_error,
)
@@ -84,7 +83,7 @@ class WalletHDTest(BitcoinTestFramework):
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "chainstate"))
shutil.copyfile(
os.path.join(self.nodes[1].datadir, "hd.bak"),
- os.path.join(self.nodes[1].datadir, self.chain, 'wallets', "wallet.dat"),
+ os.path.join(self.nodes[1].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename),
)
self.start_node(1)
@@ -99,7 +98,7 @@ class WalletHDTest(BitcoinTestFramework):
assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/" + str(i) + "'")
assert_equal(hd_info_2["hdmasterfingerprint"], hd_fingerprint)
assert_equal(hd_add, hd_add_2)
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.sync_all()
# Needs rescan
@@ -112,10 +111,10 @@ class WalletHDTest(BitcoinTestFramework):
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "chainstate"))
shutil.copyfile(
os.path.join(self.nodes[1].datadir, "hd.bak"),
- os.path.join(self.nodes[1].datadir, self.chain, "wallets", "wallet.dat"),
+ os.path.join(self.nodes[1].datadir, self.chain, "wallets", self.default_wallet_name, self.wallet_data_filename),
)
self.start_node(1, extra_args=self.extra_args[1])
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
self.sync_all()
# Wallet automatically scans blocks older than key on startup
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
@@ -183,7 +182,7 @@ class WalletHDTest(BitcoinTestFramework):
# Restart node 1 with keypool of 3 and a different wallet
self.nodes[1].createwallet(wallet_name='origin', blank=True)
self.restart_node(1, extra_args=['-keypool=3', '-wallet=origin'])
- connect_nodes(self.nodes[0], 1)
+ self.connect_nodes(0, 1)
# sethdseed restoring and seeing txs to addresses out of the keypool
origin_rpc = self.nodes[1].get_wallet_rpc('origin')
diff --git a/test/functional/wallet_import_rescan.py b/test/functional/wallet_import_rescan.py
index 4ff7f1d525..aad112b499 100755
--- a/test/functional/wallet_import_rescan.py
+++ b/test/functional/wallet_import_rescan.py
@@ -22,7 +22,6 @@ happened previously.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.address import AddressType
from test_framework.util import (
- connect_nodes,
assert_equal,
set_node_times,
)
@@ -160,13 +159,12 @@ class ImportRescanTest(BitcoinTestFramework):
# Import keys with pruning disabled
self.start_nodes(extra_args=[[]] * self.num_nodes)
- for n in self.nodes:
- n.importprivkey(privkey=n.get_deterministic_priv_key().key, label='coinbase')
+ self.import_deterministic_coinbase_privkeys()
self.stop_nodes()
self.start_nodes()
for i in range(1, self.num_nodes):
- connect_nodes(self.nodes[i], 0)
+ self.connect_nodes(i, 0)
def run_test(self):
# Create one transaction on node 0 with a unique amount for
@@ -183,6 +181,7 @@ class ImportRescanTest(BitcoinTestFramework):
self.nodes[0].generate(1) # Generate one block for each send
variant.confirmation_height = self.nodes[0].getblockcount()
variant.timestamp = self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"]
+ self.sync_all() # Conclude sync before calling setmocktime to avoid timeouts
# Generate a block further in the future (past the rescan window).
assert_equal(self.nodes[0].getrawmempool(), [])
diff --git a/test/functional/wallet_importmulti.py b/test/functional/wallet_importmulti.py
index bd4fcdabcf..f7fdd6e908 100755
--- a/test/functional/wallet_importmulti.py
+++ b/test/functional/wallet_importmulti.py
@@ -820,7 +820,7 @@ class ImportMultiTest(BitcoinTestFramework):
# Cannot import those pubkeys to keypool of wallet with privkeys
self.log.info("Pubkeys cannot be added to the keypool of a wallet with private keys")
- wrpc = self.nodes[1].get_wallet_rpc("")
+ wrpc = self.nodes[1].get_wallet_rpc(self.default_wallet_name)
assert wrpc.getwalletinfo()['private_keys_enabled']
result = wrpc.importmulti(
[{
diff --git a/test/functional/wallet_keypool.py b/test/functional/wallet_keypool.py
index 40a2b3ab6a..51795aca23 100755
--- a/test/functional/wallet_keypool.py
+++ b/test/functional/wallet_keypool.py
@@ -143,7 +143,7 @@ class KeyPoolTest(BitcoinTestFramework):
w2 = nodes[0].get_wallet_rpc('w2')
# refer to initial wallet as w1
- w1 = nodes[0].get_wallet_rpc('')
+ w1 = nodes[0].get_wallet_rpc(self.default_wallet_name)
# import private key and fund it
address = addr.pop()
diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py
index 102ed23fba..78e06c5916 100755
--- a/test/functional/wallet_keypool_topup.py
+++ b/test/functional/wallet_keypool_topup.py
@@ -16,7 +16,6 @@ import shutil
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
)
@@ -30,7 +29,7 @@ class KeypoolRestoreTest(BitcoinTestFramework):
self.skip_if_no_wallet()
def run_test(self):
- wallet_path = os.path.join(self.nodes[1].datadir, self.chain, "wallets", "wallet.dat")
+ wallet_path = os.path.join(self.nodes[1].datadir, self.chain, "wallets", self.default_wallet_name, self.wallet_data_filename)
wallet_backup_path = os.path.join(self.nodes[1].datadir, "wallet.bak")
self.nodes[0].generate(101)
@@ -38,9 +37,9 @@ class KeypoolRestoreTest(BitcoinTestFramework):
self.stop_node(1)
shutil.copyfile(wallet_path, wallet_backup_path)
self.start_node(1, self.extra_args[1])
- connect_nodes(self.nodes[0], 1)
- connect_nodes(self.nodes[0], 2)
- connect_nodes(self.nodes[0], 3)
+ self.connect_nodes(0, 1)
+ self.connect_nodes(0, 2)
+ self.connect_nodes(0, 3)
for i, output_type in enumerate(["legacy", "p2sh-segwit", "bech32"]):
@@ -72,7 +71,7 @@ class KeypoolRestoreTest(BitcoinTestFramework):
self.stop_node(idx)
shutil.copyfile(wallet_backup_path, wallet_path)
self.start_node(idx, self.extra_args[idx])
- connect_nodes(self.nodes[0], idx)
+ self.connect_nodes(0, idx)
self.sync_all()
self.log.info("Verify keypool is restored and balance is correct")
diff --git a/test/functional/wallet_listsinceblock.py b/test/functional/wallet_listsinceblock.py
index 6d51ca6c93..09a336b764 100755
--- a/test/functional/wallet_listsinceblock.py
+++ b/test/functional/wallet_listsinceblock.py
@@ -10,7 +10,6 @@ from test_framework.util import (
assert_array_result,
assert_equal,
assert_raises_rpc_error,
- connect_nodes,
)
from decimal import Decimal
@@ -26,7 +25,7 @@ class ListSinceBlockTest(BitcoinTestFramework):
def run_test(self):
# All nodes are in IBD from genesis, so they'll need the miner (node2) to be an outbound connection, or have
# only one connection. (See fPreferredDownload in net_processing)
- connect_nodes(self.nodes[1], 2)
+ self.connect_nodes(1, 2)
self.nodes[2].generate(101)
self.sync_all()
@@ -36,6 +35,7 @@ class ListSinceBlockTest(BitcoinTestFramework):
self.test_double_spend()
self.test_double_send()
self.double_spends_filtered()
+ self.test_targetconfirmations()
def test_no_blockhash(self):
self.log.info("Test no blockhash")
@@ -74,6 +74,27 @@ class ListSinceBlockTest(BitcoinTestFramework):
assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'Z000000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].listsinceblock,
"Z000000000000000000000000000000000000000000000000000000000000000")
+ def test_targetconfirmations(self):
+ '''
+ This tests when the value of target_confirmations exceeds the number of
+ blocks in the main chain. In this case, the genesis block hash should be
+ given for the `lastblock` property. If target_confirmations is < 1, then
+ a -8 invalid parameter error is thrown.
+ '''
+ self.log.info("Test target_confirmations")
+ blockhash, = self.nodes[2].generate(1)
+ blockheight = self.nodes[2].getblockheader(blockhash)['height']
+ self.sync_all()
+
+ assert_equal(
+ self.nodes[0].getblockhash(0),
+ self.nodes[0].listsinceblock(blockhash, blockheight + 1)['lastblock'])
+ assert_equal(
+ self.nodes[0].getblockhash(0),
+ self.nodes[0].listsinceblock(blockhash, blockheight + 1000)['lastblock'])
+ assert_raises_rpc_error(-8, "Invalid parameter",
+ self.nodes[0].listsinceblock, blockhash, 0)
+
def test_reorg(self):
'''
`listsinceblock` did not behave correctly when handed a block that was
diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py
index 1872545cdb..61791a756c 100755
--- a/test/functional/wallet_multiwallet.py
+++ b/test/functional/wallet_multiwallet.py
@@ -21,8 +21,6 @@ from test_framework.util import (
get_rpc_proxy,
)
-FEATURE_LATEST = 169900
-
got_loading_error = False
def test_load_unload(node, name):
global got_loading_error
@@ -62,15 +60,17 @@ class MultiWalletTest(BitcoinTestFramework):
wallet = lambda name: node.get_wallet_rpc(name)
def wallet_file(name):
+ if name == self.default_wallet_name:
+ return wallet_dir(self.default_wallet_name, self.wallet_data_filename)
if os.path.isdir(wallet_dir(name)):
return wallet_dir(name, "wallet.dat")
return wallet_dir(name)
- assert_equal(self.nodes[0].listwalletdir(), { 'wallets': [{ 'name': '' }] })
+ assert_equal(self.nodes[0].listwalletdir(), { 'wallets': [{ 'name': self.default_wallet_name }] })
# check wallet.dat is created
self.stop_nodes()
- assert_equal(os.path.isfile(wallet_dir('wallet.dat')), True)
+ assert_equal(os.path.isfile(wallet_dir(self.default_wallet_name, self.wallet_data_filename)), True)
# create symlink to verify wallet directory path can be referenced
# through symlink
@@ -79,13 +79,18 @@ class MultiWalletTest(BitcoinTestFramework):
# rename wallet.dat to make sure plain wallet file paths (as opposed to
# directory paths) can be loaded
- os.rename(wallet_dir("wallet.dat"), wallet_dir("w8"))
-
# create another dummy wallet for use in testing backups later
- self.start_node(0, [])
+ self.start_node(0, ["-nowallet", "-wallet=empty", "-wallet=plain"])
+ node.createwallet("created")
self.stop_nodes()
empty_wallet = os.path.join(self.options.tmpdir, 'empty.dat')
- os.rename(wallet_dir("wallet.dat"), empty_wallet)
+ os.rename(wallet_file("empty"), empty_wallet)
+ shutil.rmtree(wallet_dir("empty"))
+ empty_created_wallet = os.path.join(self.options.tmpdir, 'empty.created.dat')
+ os.rename(wallet_dir("created", self.wallet_data_filename), empty_created_wallet)
+ shutil.rmtree(wallet_dir("created"))
+ os.rename(wallet_file("plain"), wallet_dir("w8"))
+ shutil.rmtree(wallet_dir("plain"))
# restart node with a mix of wallet names:
# w1, w2, w3 - to verify new wallets created when non-existing paths specified
@@ -95,10 +100,10 @@ class MultiWalletTest(BitcoinTestFramework):
# w7_symlink - to verify symlinked wallet path is initialized correctly
# w8 - to verify existing wallet file is loaded correctly
# '' - to verify default wallet file is created correctly
- wallet_names = ['w1', 'w2', 'w3', 'w', 'sub/w5', os.path.join(self.options.tmpdir, 'extern/w6'), 'w7_symlink', 'w8', '']
- extra_args = ['-wallet={}'.format(n) for n in wallet_names]
+ wallet_names = ['w1', 'w2', 'w3', 'w', 'sub/w5', os.path.join(self.options.tmpdir, 'extern/w6'), 'w7_symlink', 'w8', self.default_wallet_name]
+ extra_args = ['-nowallet'] + ['-wallet={}'.format(n) for n in wallet_names]
self.start_node(0, extra_args)
- assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8'])
+ assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), [self.default_wallet_name, os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8'])
assert_equal(set(node.listwallets()), set(wallet_names))
@@ -109,7 +114,7 @@ class MultiWalletTest(BitcoinTestFramework):
# should not initialize if wallet path can't be created
exp_stderr = "boost::filesystem::create_directory:"
- self.nodes[0].assert_start_raises_init_error(['-wallet=wallet.dat/bad'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
+ self.nodes[0].assert_start_raises_init_error(['-wallet=w8/bad'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" does not exist')
self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" is a relative path', cwd=data_dir())
@@ -134,22 +139,17 @@ class MultiWalletTest(BitcoinTestFramework):
open(not_a_dir, 'a', encoding="utf8").close()
self.nodes[0].assert_start_raises_init_error(['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory')
- self.log.info("Do not allow -zapwallettxes with multiwallet")
- self.nodes[0].assert_start_raises_init_error(['-zapwallettxes', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
- self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=1', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
- self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=2', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
-
# if wallets/ doesn't exist, datadir should be the default wallet dir
wallet_dir2 = data_dir('walletdir')
os.rename(wallet_dir(), wallet_dir2)
- self.start_node(0, ['-wallet=w4', '-wallet=w5'])
+ self.start_node(0, ['-nowallet', '-wallet=w4', '-wallet=w5'])
assert_equal(set(node.listwallets()), {"w4", "w5"})
w5 = wallet("w5")
node.generatetoaddress(nblocks=1, address=w5.getnewaddress())
# now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded
os.rename(wallet_dir2, wallet_dir())
- self.restart_node(0, ['-wallet=w4', '-wallet=w5', '-walletdir=' + data_dir()])
+ self.restart_node(0, ['-nowallet', '-wallet=w4', '-wallet=w5', '-walletdir=' + data_dir()])
assert_equal(set(node.listwallets()), {"w4", "w5"})
w5 = wallet("w5")
w5_info = w5.getwalletinfo()
@@ -158,12 +158,12 @@ class MultiWalletTest(BitcoinTestFramework):
competing_wallet_dir = os.path.join(self.options.tmpdir, 'competing_walletdir')
os.mkdir(competing_wallet_dir)
self.restart_node(0, ['-walletdir=' + competing_wallet_dir])
- exp_stderr = r"Error: Error initializing wallet database environment \"\S+competing_walletdir\"!"
+ exp_stderr = r"Error: Error initializing wallet database environment \"\S+competing_walletdir\S*\"!"
self.nodes[1].assert_start_raises_init_error(['-walletdir=' + competing_wallet_dir], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
self.restart_node(0, extra_args)
- assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy'])
+ assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), [self.default_wallet_name, os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy'])
wallets = [wallet(w) for w in wallet_names]
wallet_bad = wallet("bad")
@@ -211,7 +211,7 @@ class MultiWalletTest(BitcoinTestFramework):
self.restart_node(0, ['-nowallet'])
assert_equal(node.listwallets(), [])
- assert_raises_rpc_error(-32601, "Method not found", node.getwalletinfo)
+ assert_raises_rpc_error(-18, "No wallet is loaded. Load a wallet using loadwallet or create a new one with createwallet. (Note: A default wallet is no longer automatically created)", node.getwalletinfo)
self.log.info("Load first wallet")
loadwallet_name = node.loadwallet(wallet_names[0])
@@ -249,13 +249,17 @@ class MultiWalletTest(BitcoinTestFramework):
assert_equal(set(self.nodes[0].listwallets()), set(wallet_names))
# Fail to load if wallet doesn't exist
- assert_raises_rpc_error(-18, 'Wallet wallets not found.', self.nodes[0].loadwallet, 'wallets')
+ path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "wallets")
+ assert_raises_rpc_error(-18, "Wallet file verification failed. Failed to load database path '{}'. Path does not exist.".format(path), self.nodes[0].loadwallet, 'wallets')
# Fail to load duplicate wallets
- assert_raises_rpc_error(-4, 'Wallet file verification failed. Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0])
+ path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "w1", "wallet.dat")
+ assert_raises_rpc_error(-4, "Wallet file verification failed. Refusing to load database. Data file '{}' is already loaded.".format(path), self.nodes[0].loadwallet, wallet_names[0])
# Fail to load duplicate wallets by different ways (directory and filepath)
- assert_raises_rpc_error(-4, "Wallet file verification failed. Error loading wallet wallet.dat. Duplicate -wallet filename specified.", self.nodes[0].loadwallet, 'wallet.dat')
+ if not self.options.descriptors:
+ path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "wallet.dat")
+ assert_raises_rpc_error(-4, "Wallet file verification failed. Refusing to load database. Data file '{}' is already loaded.".format(path), self.nodes[0].loadwallet, 'wallet.dat')
# Fail to load if one wallet is a copy of another
assert_raises_rpc_error(-4, "BerkeleyDatabase: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
@@ -269,12 +273,14 @@ class MultiWalletTest(BitcoinTestFramework):
# Fail to load if a directory is specified that doesn't contain a wallet
os.mkdir(wallet_dir('empty_wallet_dir'))
- assert_raises_rpc_error(-18, "Directory empty_wallet_dir does not contain a wallet.dat file", self.nodes[0].loadwallet, 'empty_wallet_dir')
+ path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "empty_wallet_dir")
+ assert_raises_rpc_error(-18, "Wallet file verification failed. Failed to load database path '{}'. Data is not in recognized format.".format(path), self.nodes[0].loadwallet, 'empty_wallet_dir')
self.log.info("Test dynamic wallet creation.")
# Fail to create a wallet if it already exists.
- assert_raises_rpc_error(-4, "Wallet w2 already exists.", self.nodes[0].createwallet, 'w2')
+ path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "w2")
+ assert_raises_rpc_error(-4, "Failed to create database path '{}'. Database already exists.".format(path), self.nodes[0].createwallet, 'w2')
# Successfully create a wallet with a new name
loadwallet_name = self.nodes[0].createwallet('w9')
@@ -318,14 +324,14 @@ class MultiWalletTest(BitcoinTestFramework):
for wallet_name in self.nodes[0].listwallets():
self.nodes[0].unloadwallet(wallet_name)
assert_equal(self.nodes[0].listwallets(), [])
- assert_raises_rpc_error(-32601, "Method not found (wallet method is disabled because no wallet is loaded)", self.nodes[0].getwalletinfo)
+ assert_raises_rpc_error(-18, "No wallet is loaded. Load a wallet using loadwallet or create a new one with createwallet. (Note: A default wallet is no longer automatically created)", self.nodes[0].getwalletinfo)
# Successfully load a previously unloaded wallet
self.nodes[0].loadwallet('w1')
assert_equal(self.nodes[0].listwallets(), ['w1'])
assert_equal(w1.getwalletinfo()['walletname'], 'w1')
- assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), ['', os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy', 'w9'])
+ assert_equal(sorted(map(lambda w: w['name'], self.nodes[0].listwalletdir()['wallets'])), [self.default_wallet_name, os.path.join('sub', 'w5'), 'w', 'w1', 'w2', 'w3', 'w7', 'w7_symlink', 'w8', 'w8_copy', 'w9'])
# Test backing up and restoring wallets
self.log.info("Test wallet backup")
@@ -336,9 +342,11 @@ class MultiWalletTest(BitcoinTestFramework):
rpc = self.nodes[0].get_wallet_rpc(wallet_name)
addr = rpc.getnewaddress()
backup = os.path.join(self.options.tmpdir, 'backup.dat')
+ if os.path.exists(backup):
+ os.unlink(backup)
rpc.backupwallet(backup)
self.nodes[0].unloadwallet(wallet_name)
- shutil.copyfile(empty_wallet, wallet_file(wallet_name))
+ shutil.copyfile(empty_created_wallet if wallet_name == self.default_wallet_name else empty_wallet, wallet_file(wallet_name))
self.nodes[0].loadwallet(wallet_name)
assert_equal(rpc.getaddressinfo(addr)['ismine'], False)
self.nodes[0].unloadwallet(wallet_name)
@@ -350,7 +358,10 @@ class MultiWalletTest(BitcoinTestFramework):
self.start_node(1)
wallet = os.path.join(self.options.tmpdir, 'my_wallet')
self.nodes[0].createwallet(wallet)
- assert_raises_rpc_error(-4, "Error initializing wallet database environment", self.nodes[1].loadwallet, wallet)
+ if self.options.descriptors:
+ assert_raises_rpc_error(-4, "Unable to obtain an exclusive lock", self.nodes[1].loadwallet, wallet)
+ else:
+ assert_raises_rpc_error(-4, "Error initializing wallet database environment", self.nodes[1].loadwallet, wallet)
self.nodes[0].unloadwallet(wallet)
self.nodes[1].loadwallet(wallet)
diff --git a/test/functional/wallet_reorgsrestore.py b/test/functional/wallet_reorgsrestore.py
index 455f1fc5e8..a1d6b098ad 100755
--- a/test/functional/wallet_reorgsrestore.py
+++ b/test/functional/wallet_reorgsrestore.py
@@ -20,8 +20,6 @@ import shutil
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
- disconnect_nodes,
)
class ReorgsRestoreTest(BitcoinTestFramework):
@@ -38,9 +36,9 @@ class ReorgsRestoreTest(BitcoinTestFramework):
self.sync_blocks()
# Disconnect node1 from others to reorg its chain later
- disconnect_nodes(self.nodes[0], 1)
- disconnect_nodes(self.nodes[1], 2)
- connect_nodes(self.nodes[0], 2)
+ self.disconnect_nodes(0, 1)
+ self.disconnect_nodes(1, 2)
+ self.connect_nodes(0, 2)
# Send a tx to be unconfirmed later
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
@@ -50,7 +48,7 @@ class ReorgsRestoreTest(BitcoinTestFramework):
assert_equal(tx_before_reorg["confirmations"], 4)
# Disconnect node0 from node2 to broadcast a conflict on their respective chains
- disconnect_nodes(self.nodes[0], 2)
+ self.disconnect_nodes(0, 2)
nA = next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction(txid_conflict_from)["details"] if tx_out["amount"] == Decimal("10"))
inputs = []
inputs.append({"txid": txid_conflict_from, "vout": nA})
@@ -69,7 +67,7 @@ class ReorgsRestoreTest(BitcoinTestFramework):
self.nodes[2].generate(9)
# Reconnect node0 and node2 and check that conflicted_txid is effectively conflicted
- connect_nodes(self.nodes[0], 2)
+ self.connect_nodes(0, 2)
self.sync_blocks([self.nodes[0], self.nodes[2]])
conflicted = self.nodes[0].gettransaction(conflicted_txid)
conflicting = self.nodes[0].gettransaction(conflicting_txid)
@@ -89,7 +87,7 @@ class ReorgsRestoreTest(BitcoinTestFramework):
# Node0 wallet file is loaded on longest sync'ed node1
self.stop_node(1)
self.nodes[0].backupwallet(os.path.join(self.nodes[0].datadir, 'wallet.bak'))
- shutil.copyfile(os.path.join(self.nodes[0].datadir, 'wallet.bak'), os.path.join(self.nodes[1].datadir, self.chain, 'wallet.dat'))
+ shutil.copyfile(os.path.join(self.nodes[0].datadir, 'wallet.bak'), os.path.join(self.nodes[1].datadir, self.chain, self.default_wallet_name, self.wallet_data_filename))
self.start_node(1)
tx_after_reorg = self.nodes[1].gettransaction(txid)
# Check that normal confirmed tx is confirmed again but with different blockhash
diff --git a/test/functional/wallet_resendwallettransactions.py b/test/functional/wallet_resendwallettransactions.py
index 3417616d77..353deefcc1 100755
--- a/test/functional/wallet_resendwallettransactions.py
+++ b/test/functional/wallet_resendwallettransactions.py
@@ -7,9 +7,10 @@ import time
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import ToHex
-from test_framework.mininode import P2PTxInvStore, mininode_lock
+from test_framework.p2p import P2PTxInvStore
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, wait_until
+from test_framework.util import assert_equal
+
class ResendWalletTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
@@ -21,22 +22,22 @@ class ResendWalletTransactionsTest(BitcoinTestFramework):
def run_test(self):
node = self.nodes[0] # alias
- node.add_p2p_connection(P2PTxInvStore())
+ peer_first = node.add_p2p_connection(P2PTxInvStore())
self.log.info("Create a new transaction and wait until it's broadcast")
- txid = int(node.sendtoaddress(node.getnewaddress(), 1), 16)
+ txid = node.sendtoaddress(node.getnewaddress(), 1)
# Wallet rebroadcast is first scheduled 1 sec after startup (see
- # nNextResend in ResendWalletTransactions()). Sleep for just over a
- # second to be certain that it has been called before the first
+ # nNextResend in ResendWalletTransactions()). Tell scheduler to call
+ # MaybeResendWalletTxn now to initialize nNextResend before the first
# setmocktime call below.
- time.sleep(1.1)
+ node.mockscheduler(1)
# Can take a few seconds due to transaction trickling
- wait_until(lambda: node.p2p.tx_invs_received[txid] >= 1, lock=mininode_lock)
+ peer_first.wait_for_broadcast([txid])
# Add a second peer since txs aren't rebroadcast to the same peer (see filterInventoryKnown)
- node.add_p2p_connection(P2PTxInvStore())
+ peer_second = node.add_p2p_connection(P2PTxInvStore())
self.log.info("Create a block")
# Create and submit a block without the transaction.
@@ -57,14 +58,19 @@ class ResendWalletTransactionsTest(BitcoinTestFramework):
twelve_hrs = 12 * 60 * 60
two_min = 2 * 60
node.setmocktime(now + twelve_hrs - two_min)
- time.sleep(2) # ensure enough time has passed for rebroadcast attempt to occur
- assert_equal(txid in node.p2ps[1].get_invs(), False)
+ node.mockscheduler(1) # Tell scheduler to call MaybeResendWalletTxn now
+ assert_equal(int(txid, 16) in peer_second.get_invs(), False)
self.log.info("Bump time & check that transaction is rebroadcast")
# Transaction should be rebroadcast approximately 24 hours in the future,
# but can range from 12-36. So bump 36 hours to be sure.
- node.setmocktime(now + 36 * 60 * 60)
- wait_until(lambda: node.p2ps[1].tx_invs_received[txid] >= 1, lock=mininode_lock)
+ with node.assert_debug_log(['ResendWalletTransactions: resubmit 1 unconfirmed transactions']):
+ node.setmocktime(now + 36 * 60 * 60)
+ # Tell scheduler to call MaybeResendWalletTxn now.
+ node.mockscheduler(1)
+ # Give some time for trickle to occur
+ node.setmocktime(now + 36 * 60 * 60 + 600)
+ peer_second.wait_for_broadcast([txid])
if __name__ == '__main__':
diff --git a/test/functional/wallet_send.py b/test/functional/wallet_send.py
new file mode 100755
index 0000000000..60ab2d457e
--- /dev/null
+++ b/test/functional/wallet_send.py
@@ -0,0 +1,345 @@
+#!/usr/bin/env python3
+# Copyright (c) 2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the send RPC command."""
+
+from decimal import Decimal, getcontext
+from test_framework.authproxy import JSONRPCException
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ assert_fee_amount,
+ assert_greater_than,
+ assert_raises_rpc_error
+)
+
+class WalletSendTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+ # whitelist all peers to speed up tx relay / mempool sync
+ self.extra_args = [
+ ["-whitelist=127.0.0.1","-walletrbf=1"],
+ ["-whitelist=127.0.0.1","-walletrbf=1"],
+ ]
+ getcontext().prec = 8 # Satoshi precision for Decimal
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def test_send(self, from_wallet, to_wallet=None, amount=None, data=None,
+ arg_conf_target=None, arg_estimate_mode=None,
+ conf_target=None, estimate_mode=None, add_to_wallet=None, psbt=None,
+ inputs=None, add_inputs=None, change_address=None, change_position=None, change_type=None,
+ include_watching=None, locktime=None, lock_unspents=None, replaceable=None, subtract_fee_from_outputs=None,
+ expect_error=None):
+ assert (amount is None) != (data is None)
+
+ from_balance_before = from_wallet.getbalance()
+ if to_wallet is None:
+ assert amount is None
+ else:
+ to_untrusted_pending_before = to_wallet.getbalances()["mine"]["untrusted_pending"]
+
+ if amount:
+ dest = to_wallet.getnewaddress()
+ outputs = {dest: amount}
+ else:
+ outputs = {"data": data}
+
+ # Construct options dictionary
+ options = {}
+ if add_to_wallet is not None:
+ options["add_to_wallet"] = add_to_wallet
+ else:
+ if psbt:
+ add_to_wallet = False
+ else:
+ add_to_wallet = from_wallet.getwalletinfo()["private_keys_enabled"] # Default value
+ if psbt is not None:
+ options["psbt"] = psbt
+ if conf_target is not None:
+ options["conf_target"] = conf_target
+ if estimate_mode is not None:
+ options["estimate_mode"] = estimate_mode
+ if inputs is not None:
+ options["inputs"] = inputs
+ if add_inputs is not None:
+ options["add_inputs"] = add_inputs
+ if change_address is not None:
+ options["change_address"] = change_address
+ if change_position is not None:
+ options["change_position"] = change_position
+ if change_type is not None:
+ options["change_type"] = change_type
+ if include_watching is not None:
+ options["include_watching"] = include_watching
+ if locktime is not None:
+ options["locktime"] = locktime
+ if lock_unspents is not None:
+ options["lock_unspents"] = lock_unspents
+ if replaceable is None:
+ replaceable = True # default
+ else:
+ options["replaceable"] = replaceable
+ if subtract_fee_from_outputs is not None:
+ options["subtract_fee_from_outputs"] = subtract_fee_from_outputs
+
+ if len(options.keys()) == 0:
+ options = None
+
+ if expect_error is None:
+ res = from_wallet.send(outputs=outputs, conf_target=arg_conf_target, estimate_mode=arg_estimate_mode, options=options)
+ else:
+ try:
+ assert_raises_rpc_error(expect_error[0], expect_error[1], from_wallet.send,
+ outputs=outputs, conf_target=arg_conf_target, estimate_mode=arg_estimate_mode, options=options)
+ except AssertionError:
+ # Provide debug info if the test fails
+ self.log.error("Unexpected successful result:")
+ self.log.error(options)
+ res = from_wallet.send(outputs=outputs, conf_target=arg_conf_target, estimate_mode=arg_estimate_mode, options=options)
+ self.log.error(res)
+ if "txid" in res and add_to_wallet:
+ self.log.error("Transaction details:")
+ try:
+ tx = from_wallet.gettransaction(res["txid"])
+ self.log.error(tx)
+ self.log.error("testmempoolaccept (transaction may already be in mempool):")
+ self.log.error(from_wallet.testmempoolaccept([tx["hex"]]))
+ except JSONRPCException as exc:
+ self.log.error(exc)
+
+ raise
+
+ return
+
+ if locktime:
+ return res
+
+ if from_wallet.getwalletinfo()["private_keys_enabled"] and not include_watching:
+ assert_equal(res["complete"], True)
+ assert "txid" in res
+ else:
+ assert_equal(res["complete"], False)
+ assert not "txid" in res
+ assert "psbt" in res
+
+ if add_to_wallet and not include_watching:
+ # Ensure transaction exists in the wallet:
+ tx = from_wallet.gettransaction(res["txid"])
+ assert tx
+ assert_equal(tx["bip125-replaceable"], "yes" if replaceable else "no")
+ # Ensure transaction exists in the mempool:
+ tx = from_wallet.getrawtransaction(res["txid"], True)
+ assert tx
+ if amount:
+ if subtract_fee_from_outputs:
+ assert_equal(from_balance_before - from_wallet.getbalance(), amount)
+ else:
+ assert_greater_than(from_balance_before - from_wallet.getbalance(), amount)
+ else:
+ assert next((out for out in tx["vout"] if out["scriptPubKey"]["asm"] == "OP_RETURN 35"), None)
+ else:
+ assert_equal(from_balance_before, from_wallet.getbalance())
+
+ if to_wallet:
+ self.sync_mempools()
+ if add_to_wallet:
+ if not subtract_fee_from_outputs:
+ assert_equal(to_wallet.getbalances()["mine"]["untrusted_pending"], to_untrusted_pending_before + Decimal(amount if amount else 0))
+ else:
+ assert_equal(to_wallet.getbalances()["mine"]["untrusted_pending"], to_untrusted_pending_before)
+
+ return res
+
+ def run_test(self):
+ self.log.info("Setup wallets...")
+ # w0 is a wallet with coinbase rewards
+ w0 = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
+ # w1 is a regular wallet
+ self.nodes[1].createwallet(wallet_name="w1")
+ w1 = self.nodes[1].get_wallet_rpc("w1")
+ # w2 contains the private keys for w3
+ self.nodes[1].createwallet(wallet_name="w2")
+ w2 = self.nodes[1].get_wallet_rpc("w2")
+ # w3 is a watch-only wallet, based on w2
+ self.nodes[1].createwallet(wallet_name="w3", disable_private_keys=True)
+ w3 = self.nodes[1].get_wallet_rpc("w3")
+ for _ in range(3):
+ a2_receive = w2.getnewaddress()
+ a2_change = w2.getrawchangeaddress() # doesn't actually use change derivation
+ res = w3.importmulti([{
+ "desc": w2.getaddressinfo(a2_receive)["desc"],
+ "timestamp": "now",
+ "keypool": True,
+ "watchonly": True
+ },{
+ "desc": w2.getaddressinfo(a2_change)["desc"],
+ "timestamp": "now",
+ "keypool": True,
+ "internal": True,
+ "watchonly": True
+ }])
+ assert_equal(res, [{"success": True}, {"success": True}])
+
+ w0.sendtoaddress(a2_receive, 10) # fund w3
+ self.nodes[0].generate(1)
+ self.sync_blocks()
+
+ # w4 has private keys enabled, but only contains watch-only keys (from w2)
+ self.nodes[1].createwallet(wallet_name="w4", disable_private_keys=False)
+ w4 = self.nodes[1].get_wallet_rpc("w4")
+ for _ in range(3):
+ a2_receive = w2.getnewaddress()
+ res = w4.importmulti([{
+ "desc": w2.getaddressinfo(a2_receive)["desc"],
+ "timestamp": "now",
+ "keypool": False,
+ "watchonly": True
+ }])
+ assert_equal(res, [{"success": True}])
+
+ w0.sendtoaddress(a2_receive, 10) # fund w4
+ self.nodes[0].generate(1)
+ self.sync_blocks()
+
+ self.log.info("Send to address...")
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1)
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True)
+
+ self.log.info("Don't broadcast...")
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False)
+ assert(res["hex"])
+
+ self.log.info("Return PSBT...")
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, psbt=True)
+ assert(res["psbt"])
+
+ self.log.info("Create transaction that spends to address, but don't broadcast...")
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False)
+ # conf_target & estimate_mode can be set as argument or option
+ res1 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=1, arg_estimate_mode="economical", add_to_wallet=False)
+ res2 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=1, estimate_mode="economical", add_to_wallet=False)
+ assert_equal(self.nodes[1].decodepsbt(res1["psbt"])["fee"],
+ self.nodes[1].decodepsbt(res2["psbt"])["fee"])
+ # but not at the same time
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=1, arg_estimate_mode="economical",
+ conf_target=1, estimate_mode="economical", add_to_wallet=False, expect_error=(-8,"Use either conf_target and estimate_mode or the options dictionary to control fee rate"))
+
+ self.log.info("Create PSBT from watch-only wallet w3, sign with w2...")
+ res = self.test_send(from_wallet=w3, to_wallet=w1, amount=1)
+ res = w2.walletprocesspsbt(res["psbt"])
+ assert res["complete"]
+
+ self.log.info("Create PSBT from wallet w4 with watch-only keys, sign with w2...")
+ self.test_send(from_wallet=w4, to_wallet=w1, amount=1, expect_error=(-4, "Insufficient funds"))
+ res = self.test_send(from_wallet=w4, to_wallet=w1, amount=1, include_watching=True, add_to_wallet=False)
+ res = w2.walletprocesspsbt(res["psbt"])
+ assert res["complete"]
+
+ self.log.info("Create OP_RETURN...")
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1)
+ self.test_send(from_wallet=w0, data="Hello World", expect_error=(-8, "Data must be hexadecimal string (not 'Hello World')"))
+ self.test_send(from_wallet=w0, data="23")
+ res = self.test_send(from_wallet=w3, data="23")
+ res = w2.walletprocesspsbt(res["psbt"])
+ assert res["complete"]
+
+ self.log.info("Set fee rate...")
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=2, estimate_mode="sat/b", add_to_wallet=False)
+ fee = self.nodes[1].decodepsbt(res["psbt"])["fee"]
+ assert_fee_amount(fee, Decimal(len(res["hex"]) / 2), Decimal("0.00002"))
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=-1, estimate_mode="sat/b",
+ expect_error=(-3, "Amount out of range"))
+ # Fee rate of 0.1 satoshi per byte should throw an error
+ # TODO: error should use sat/b
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=0.1, estimate_mode="sat/b",
+ expect_error=(-4, "Fee rate (0.00000100 BTC/kB) is lower than the minimum fee rate setting (0.00001000 BTC/kB)"))
+
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=0.000001, estimate_mode="BTC/KB",
+ expect_error=(-4, "Fee rate (0.00000100 BTC/kB) is lower than the minimum fee rate setting (0.00001000 BTC/kB)"))
+
+ # TODO: Return hex if fee rate is below -maxmempool
+ # res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=0.1, estimate_mode="sat/b", add_to_wallet=False)
+ # assert res["hex"]
+ # hex = res["hex"]
+ # res = self.nodes[0].testmempoolaccept([hex])
+ # assert not res[0]["allowed"]
+ # assert_equal(res[0]["reject-reason"], "...") # low fee
+ # assert_fee_amount(fee, Decimal(len(res["hex"]) / 2), Decimal("0.000001"))
+
+ self.log.info("If inputs are specified, do not automatically add more...")
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[], add_to_wallet=False)
+ assert res["complete"]
+ utxo1 = w0.listunspent()[0]
+ assert_equal(utxo1["amount"], 50)
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1],
+ expect_error=(-4, "Insufficient funds"))
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1], add_inputs=False,
+ expect_error=(-4, "Insufficient funds"))
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1], add_inputs=True, add_to_wallet=False)
+ assert res["complete"]
+
+ self.log.info("Manual change address and position...")
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1, change_address="not an address",
+ expect_error=(-5, "Change address must be a valid bitcoin address"))
+ change_address = w0.getnewaddress()
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_address=change_address)
+ assert res["complete"]
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_address=change_address, change_position=0)
+ assert res["complete"]
+ assert_equal(self.nodes[0].decodepsbt(res["psbt"])["tx"]["vout"][0]["scriptPubKey"]["addresses"], [change_address])
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_type="legacy", change_position=0)
+ assert res["complete"]
+ change_address = self.nodes[0].decodepsbt(res["psbt"])["tx"]["vout"][0]["scriptPubKey"]["addresses"][0]
+ assert change_address[0] == "m" or change_address[0] == "n"
+
+ self.log.info("Set lock time...")
+ height = self.nodes[0].getblockchaininfo()["blocks"]
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, locktime=height + 1)
+ assert res["complete"]
+ assert res["txid"]
+ txid = res["txid"]
+ # Although the wallet finishes the transaction, it can't be added to the mempool yet:
+ hex = self.nodes[0].gettransaction(res["txid"])["hex"]
+ res = self.nodes[0].testmempoolaccept([hex])
+ assert not res[0]["allowed"]
+ assert_equal(res[0]["reject-reason"], "non-final")
+ # It shouldn't be confirmed in the next block
+ self.nodes[0].generate(1)
+ assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 0)
+ # The mempool should allow it now:
+ res = self.nodes[0].testmempoolaccept([hex])
+ assert res[0]["allowed"]
+ # Don't wait for wallet to add it to the mempool:
+ res = self.nodes[0].sendrawtransaction(hex)
+ self.nodes[0].generate(1)
+ assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 1)
+ self.sync_all()
+
+ self.log.info("Lock unspents...")
+ utxo1 = w0.listunspent()[0]
+ assert_greater_than(utxo1["amount"], 1)
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, inputs=[utxo1], add_to_wallet=False, lock_unspents=True)
+ assert res["complete"]
+ locked_coins = w0.listlockunspent()
+ assert_equal(len(locked_coins), 1)
+ # Locked coins are automatically unlocked when manually selected
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, inputs=[utxo1], add_to_wallet=False)
+ assert res["complete"]
+
+ self.log.info("Replaceable...")
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True, replaceable=True)
+ assert res["complete"]
+ assert_equal(self.nodes[0].gettransaction(res["txid"])["bip125-replaceable"], "yes")
+ res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True, replaceable=False)
+ assert res["complete"]
+ assert_equal(self.nodes[0].gettransaction(res["txid"])["bip125-replaceable"], "no")
+
+ self.log.info("Subtract fee from output")
+ self.test_send(from_wallet=w0, to_wallet=w1, amount=1, subtract_fee_from_outputs=[0])
+
+
+if __name__ == '__main__':
+ WalletSendTest().main()
diff --git a/test/functional/wallet_startup.py b/test/functional/wallet_startup.py
new file mode 100755
index 0000000000..d3119925f7
--- /dev/null
+++ b/test/functional/wallet_startup.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test wallet load on startup.
+
+Verify that a bitcoind node can maintain list of wallets loading on startup
+"""
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+)
+
+
+class WalletStartupTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+ self.supports_cli = True
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def setup_nodes(self):
+ self.add_nodes(self.num_nodes)
+ self.start_nodes()
+
+ def run_test(self):
+ self.log.info('Should start without any wallets')
+ assert_equal(self.nodes[0].listwallets(), [])
+ assert_equal(self.nodes[0].listwalletdir(), {'wallets': []})
+
+ self.log.info('New default wallet should load by default when there are no other wallets')
+ self.nodes[0].createwallet(wallet_name='', load_on_startup=False)
+ self.restart_node(0)
+ assert_equal(self.nodes[0].listwallets(), [''])
+
+ self.log.info('Test load on startup behavior')
+ self.nodes[0].createwallet(wallet_name='w0', load_on_startup=True)
+ self.nodes[0].createwallet(wallet_name='w1', load_on_startup=False)
+ self.nodes[0].createwallet(wallet_name='w2', load_on_startup=True)
+ self.nodes[0].createwallet(wallet_name='w3', load_on_startup=False)
+ self.nodes[0].createwallet(wallet_name='w4', load_on_startup=False)
+ self.nodes[0].unloadwallet(wallet_name='w0', load_on_startup=False)
+ self.nodes[0].unloadwallet(wallet_name='w4', load_on_startup=False)
+ self.nodes[0].loadwallet(filename='w4', load_on_startup=True)
+ assert_equal(set(self.nodes[0].listwallets()), set(('', 'w1', 'w2', 'w3', 'w4')))
+ self.restart_node(0)
+ assert_equal(set(self.nodes[0].listwallets()), set(('', 'w2', 'w4')))
+ self.nodes[0].unloadwallet(wallet_name='', load_on_startup=False)
+ self.nodes[0].unloadwallet(wallet_name='w4', load_on_startup=False)
+ self.nodes[0].loadwallet(filename='w3', load_on_startup=True)
+ self.nodes[0].loadwallet(filename='')
+ self.restart_node(0)
+ assert_equal(set(self.nodes[0].listwallets()), set(('w2', 'w3')))
+
+if __name__ == '__main__':
+ WalletStartupTest().main()
diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py
index 5e1a804d33..bdbbb3e530 100755
--- a/test/functional/wallet_txn_clone.py
+++ b/test/functional/wallet_txn_clone.py
@@ -8,8 +8,6 @@ import io
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
- disconnect_nodes,
)
from test_framework.messages import CTransaction, COIN
@@ -25,12 +23,12 @@ class TxnMallTest(BitcoinTestFramework):
parser.add_argument("--mineblock", dest="mine_block", default=False, action="store_true",
help="Test double-spend of 1-confirmed transaction")
parser.add_argument("--segwit", dest="segwit", default=False, action="store_true",
- help="Test behaviour with SegWit txn (which should fail")
+ help="Test behaviour with SegWit txn (which should fail)")
def setup_network(self):
# Start with split network:
super().setup_network()
- disconnect_nodes(self.nodes[1], 2)
+ self.disconnect_nodes(1, 2)
def run_test(self):
if self.options.segwit:
@@ -118,7 +116,7 @@ class TxnMallTest(BitcoinTestFramework):
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
- connect_nodes(self.nodes[1], 2)
+ self.connect_nodes(1, 2)
self.nodes[2].sendrawtransaction(node0_tx2["hex"])
self.nodes[2].sendrawtransaction(tx2["hex"])
self.nodes[2].generate(1) # Mine another block to make sure we sync
diff --git a/test/functional/wallet_txn_doublespend.py b/test/functional/wallet_txn_doublespend.py
index cac58aeaf2..42de131354 100755
--- a/test/functional/wallet_txn_doublespend.py
+++ b/test/functional/wallet_txn_doublespend.py
@@ -8,8 +8,6 @@ from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- connect_nodes,
- disconnect_nodes,
find_output,
)
@@ -28,7 +26,7 @@ class TxnMallTest(BitcoinTestFramework):
def setup_network(self):
# Start with split network:
super().setup_network()
- disconnect_nodes(self.nodes[1], 2)
+ self.disconnect_nodes(1, 2)
def run_test(self):
# All nodes should start with 1,250 BTC:
@@ -116,7 +114,7 @@ class TxnMallTest(BitcoinTestFramework):
self.nodes[2].generate(1)
# Reconnect the split network, and sync chain:
- connect_nodes(self.nodes[1], 2)
+ self.connect_nodes(1, 2)
self.nodes[2].generate(1) # Mine another block to make sure we sync
self.sync_blocks()
assert_equal(self.nodes[0].gettransaction(doublespend_txid)["confirmations"], 2)
diff --git a/test/functional/wallet_upgradewallet.py b/test/functional/wallet_upgradewallet.py
index 1a76f65215..446a601aee 100755
--- a/test/functional/wallet_upgradewallet.py
+++ b/test/functional/wallet_upgradewallet.py
@@ -6,7 +6,7 @@
Test upgradewallet RPC. Download node binaries:
-contrib/devtools/previous_release.py -b v0.19.1 v0.18.1 v0.17.1 v0.16.3 v0.15.2
+test/get_previous_releases.py -b v0.19.1 v0.18.1 v0.17.2 v0.16.3 v0.15.2
Only v0.15.2 and v0.16.3 are required by this test. The others are used in feature_backwards_compatibility.py
"""
@@ -31,6 +31,7 @@ class UpgradeWalletTest(BitcoinTestFramework):
["-usehd=1"], # v0.16.3 wallet
["-usehd=0"] # v0.15.2 wallet
]
+ self.wallet_names = [self.default_wallet_name, None, None]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
@@ -46,6 +47,7 @@ class UpgradeWalletTest(BitcoinTestFramework):
150200,
])
self.start_nodes()
+ self.import_deterministic_coinbase_privkeys()
def dumb_sync_blocks(self):
"""
diff --git a/test/functional/wallet_zapwallettxes.py b/test/functional/wallet_zapwallettxes.py
deleted file mode 100755
index 7f1cdbd20b..0000000000
--- a/test/functional/wallet_zapwallettxes.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2014-2018 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test the zapwallettxes functionality.
-
-- start two bitcoind nodes
-- create two transactions on node 0 - one is confirmed and one is unconfirmed.
-- restart node 0 and verify that both the confirmed and the unconfirmed
- transactions are still available.
-- restart node 0 with zapwallettxes and persistmempool, and verify that both
- the confirmed and the unconfirmed transactions are still available.
-- restart node 0 with just zapwallettxes and verify that the confirmed
- transactions are still available, but that the unconfirmed transaction has
- been zapped.
-"""
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (
- assert_equal,
- assert_raises_rpc_error,
- wait_until,
-)
-
-class ZapWalletTXesTest (BitcoinTestFramework):
- def set_test_params(self):
- self.setup_clean_chain = True
- self.num_nodes = 2
-
- def skip_test_if_missing_module(self):
- self.skip_if_no_wallet()
-
- def run_test(self):
- self.log.info("Mining blocks...")
- self.nodes[0].generate(1)
- self.sync_all()
- self.nodes[1].generate(100)
- self.sync_all()
-
- # This transaction will be confirmed
- txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10)
-
- self.nodes[0].generate(1)
- self.sync_all()
-
- # This transaction will not be confirmed
- txid2 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 20)
-
- # Confirmed and unconfirmed transactions are now in the wallet.
- assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
- assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
-
- # Restart node0. Both confirmed and unconfirmed transactions remain in the wallet.
- self.restart_node(0)
-
- assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
- assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
-
- # Restart node0 with zapwallettxes and persistmempool. The unconfirmed
- # transaction is zapped from the wallet, but is re-added when the mempool is reloaded.
- self.restart_node(0, ["-persistmempool=1", "-zapwallettxes=2"])
-
- wait_until(lambda: self.nodes[0].getmempoolinfo()['size'] == 1, timeout=3)
- self.nodes[0].syncwithvalidationinterfacequeue() # Flush mempool to wallet
-
- assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
- assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
-
- # Restart node0 with zapwallettxes, but not persistmempool.
- # The unconfirmed transaction is zapped and is no longer in the wallet.
- self.restart_node(0, ["-zapwallettxes=2"])
-
- # tx1 is still be available because it was confirmed
- assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
-
- # This will raise an exception because the unconfirmed transaction has been zapped
- assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', self.nodes[0].gettransaction, txid2)
-
-if __name__ == '__main__':
- ZapWalletTXesTest().main()
diff --git a/test/fuzz/test_runner.py b/test/fuzz/test_runner.py
index 56b18752ec..c7895edbcc 100755
--- a/test/fuzz/test_runner.py
+++ b/test/fuzz/test_runner.py
@@ -56,6 +56,14 @@ def main():
'--m_dir',
help='Merge inputs from this directory into the seed_dir. Needs /target subdirectory.',
)
+ parser.add_argument(
+ '-g',
+ '--generate',
+ action='store_true',
+ help='Create new corpus seeds (or extend the existing ones) by running'
+ ' the given targets for a finite number of times. Outputs them to'
+ ' the passed seed_dir.'
+ )
args = parser.parse_args()
@@ -100,19 +108,20 @@ def main():
logging.info("{} of {} detected fuzz target(s) selected: {}".format(len(test_list_selection), len(test_list_all), " ".join(test_list_selection)))
- test_list_seedless = []
- for t in test_list_selection:
- corpus_path = os.path.join(args.seed_dir, t)
- if not os.path.exists(corpus_path) or len(os.listdir(corpus_path)) == 0:
- test_list_seedless.append(t)
- test_list_seedless.sort()
- if test_list_seedless:
- logging.info(
- "Fuzzing harnesses lacking a seed corpus: {}".format(
- " ".join(test_list_seedless)
+ if not args.generate:
+ test_list_seedless = []
+ for t in test_list_selection:
+ corpus_path = os.path.join(args.seed_dir, t)
+ if not os.path.exists(corpus_path) or len(os.listdir(corpus_path)) == 0:
+ test_list_seedless.append(t)
+ test_list_seedless.sort()
+ if test_list_seedless:
+ logging.info(
+ "Fuzzing harnesses lacking a seed corpus: {}".format(
+ " ".join(test_list_seedless)
+ )
)
- )
- logging.info("Please consider adding a fuzz seed corpus at https://github.com/bitcoin-core/qa-assets")
+ logging.info("Please consider adding a fuzz seed corpus at https://github.com/bitcoin-core/qa-assets")
try:
help_output = subprocess.run(
@@ -133,6 +142,14 @@ def main():
sys.exit(1)
with ThreadPoolExecutor(max_workers=args.par) as fuzz_pool:
+ if args.generate:
+ return generate_corpus_seeds(
+ fuzz_pool=fuzz_pool,
+ build_dir=config["environment"]["BUILDDIR"],
+ seed_dir=args.seed_dir,
+ targets=test_list_selection,
+ )
+
if args.m_dir:
merge_inputs(
fuzz_pool=fuzz_pool,
@@ -152,6 +169,37 @@ def main():
)
+def generate_corpus_seeds(*, fuzz_pool, build_dir, seed_dir, targets):
+ """Generates new corpus seeds.
+
+ Run {targets} without input, and outputs the generated corpus seeds to
+ {seed_dir}.
+ """
+ logging.info("Generating corpus seeds to {}".format(seed_dir))
+
+ def job(command):
+ logging.debug("Running '{}'\n".format(" ".join(command)))
+ logging.debug("Command '{}' output:\n'{}'\n".format(
+ ' '.join(command),
+ subprocess.run(command, check=True, stderr=subprocess.PIPE,
+ universal_newlines=True).stderr
+ ))
+
+ futures = []
+ for target in targets:
+ target_seed_dir = os.path.join(seed_dir, target)
+ os.makedirs(target_seed_dir, exist_ok=True)
+ command = [
+ os.path.join(build_dir, "src", "test", "fuzz", target),
+ "-runs=100000",
+ target_seed_dir,
+ ]
+ futures.append(fuzz_pool.submit(job, command))
+
+ for future in as_completed(futures):
+ future.result()
+
+
def merge_inputs(*, fuzz_pool, corpus, test_list, build_dir, merge_dir):
logging.info("Merge the inputs in the passed dir into the seed_dir. Passed dir {}".format(merge_dir))
jobs = []
diff --git a/test/get_previous_releases.py b/test/get_previous_releases.py
new file mode 100755
index 0000000000..1348b8246b
--- /dev/null
+++ b/test/get_previous_releases.py
@@ -0,0 +1,228 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2018-2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+# Download or build previous releases.
+# Needs curl and tar to download a release, or the build dependencies when
+# building a release.
+
+import argparse
+import contextlib
+from fnmatch import fnmatch
+import os
+from pathlib import Path
+import re
+import shutil
+import subprocess
+import sys
+import hashlib
+
+
+SHA256_SUMS = {
+"d40f18b4e43c6e6370ef7db9131f584fbb137276ec2e3dba67a4b267f81cb644": "bitcoin-0.15.2-aarch64-linux-gnu.tar.gz",
+"54fb877a148a6ad189a1e1ab1ff8b11181e58ff2aaf430da55b3fd46ae549a6b": "bitcoin-0.15.2-arm-linux-gnueabihf.tar.gz",
+"2b843506c3f1af0eeca5854a920264f9a829f02d0d50328005950ddcbe88874d": "bitcoin-0.15.2-i686-pc-linux-gnu.tar.gz",
+"87e9340ff3d382d543b2b69112376077f0c8b4f7450d372e83b68f5a1e22b2df": "bitcoin-0.15.2-osx64.tar.gz",
+"566be44190fd76daa01f13d428939dadfb8e3daacefc8fa17f433cad28f73bd5": "bitcoin-0.15.2-x86_64-linux-gnu.tar.gz",
+
+"0768c6c15caffbaca6524824c9563b42c24f70633c681c2744649158aa3fd484": "bitcoin-0.16.3-aarch64-linux-gnu.tar.gz",
+"fb2818069854a6ad20ea03b28b55dbd35d8b1f7d453e90b83eace5d0098a2a87": "bitcoin-0.16.3-arm-linux-gnueabihf.tar.gz",
+"75a537844313b0a84bdb61ffcdc5c4ce19a738f7ddf71007cd2edf664efd7c37": "bitcoin-0.16.3-i686-pc-linux-gnu.tar.gz",
+"78c3bff3b619a19aed575961ea43cc9e142959218835cf51aede7f0b764fc25d": "bitcoin-0.16.3-osx64.tar.gz",
+"5d422a9d544742bc0df12427383f9c2517433ce7b58cf672b9a9b17c2ef51e4f": "bitcoin-0.16.3-x86_64-linux-gnu.tar.gz",
+
+"5a6b35d1a348a402f2d2d6ab5aed653a1a1f13bc63aaaf51605e3501b0733b7a": "bitcoin-0.17.2-aarch64-linux-gnu.tar.gz",
+"d1913a5d19c8e8da4a67d1bd5205d03c8614dfd2e02bba2fe3087476643a729e": "bitcoin-0.17.2-arm-linux-gnueabihf.tar.gz",
+"d295fc93f39bbf0fd937b730a93184899a2eb6c3a6d53f3d857cbe77ef89b98c": "bitcoin-0.17.2-i686-pc-linux-gnu.tar.gz",
+"a783ba20706dbfd5b47fbedf42165fce70fbbc7d78003305d964f6b3da14887f": "bitcoin-0.17.2-osx64.tar.gz",
+"943f9362b9f11130177839116f48f809d83478b4c28591d486ee9a7e35179da6": "bitcoin-0.17.2-x86_64-linux-gnu.tar.gz",
+
+"88f343af72803b851c7da13874cc5525026b0b55e63e1b5e1298390c4688adc6": "bitcoin-0.18.1-aarch64-linux-gnu.tar.gz",
+"cc7d483e4b20c5dabd4dcaf304965214cf4934bcc029ca99cbc9af00d3771a1f": "bitcoin-0.18.1-arm-linux-gnueabihf.tar.gz",
+"989e847b3e95fc9fedc0b109cae1b4fa43348f2f712e187a118461876af9bd16": "bitcoin-0.18.1-i686-pc-linux-gnu.tar.gz",
+"b7bbcee7a7540f711b171d6981f939ca8482005fde22689bc016596d80548bb1": "bitcoin-0.18.1-osx64.tar.gz",
+"425ee5ec631ae8da71ebc1c3f5c0269c627cf459379b9b030f047107a28e3ef8": "bitcoin-0.18.1-riscv64-linux-gnu.tar.gz",
+"600d1db5e751fa85903e935a01a74f5cc57e1e7473c15fd3e17ed21e202cfe5a": "bitcoin-0.18.1-x86_64-linux-gnu.tar.gz",
+
+"3a80431717842672df682bdb619e66523b59541483297772a7969413be3502ff": "bitcoin-0.19.1-aarch64-linux-gnu.tar.gz",
+"657f28213823d240dd3324d14829702f9ad6f0710f8bdd1c379cb3c447197f48": "bitcoin-0.19.1-arm-linux-gnueabihf.tar.gz",
+"10d1e53208aa7603022f4acc084a046299ab4ccf25fe01e81b3fb6f856772589": "bitcoin-0.19.1-i686-pc-linux-gnu.tar.gz",
+"1ae1b87de26487075cd2fd22e0d4ead87d969bd55c44f2f1d873ecdc6147ebb3": "bitcoin-0.19.1-osx64.tar.gz",
+"aa7a9563b48aa79252c8e7b6a41c07a5441bd9f14c5e4562cc72720ea6cb0ee5": "bitcoin-0.19.1-riscv64-linux-gnu.tar.gz",
+"5fcac9416e486d4960e1a946145566350ca670f9aaba99de6542080851122e4c": "bitcoin-0.19.1-x86_64-linux-gnu.tar.gz"
+}
+
+@contextlib.contextmanager
+def pushd(new_dir) -> None:
+ previous_dir = os.getcwd()
+ os.chdir(new_dir)
+ try:
+ yield
+ finally:
+ os.chdir(previous_dir)
+
+
+def download_binary(tag, args) -> int:
+ if Path(tag).is_dir():
+ if not args.remove_dir:
+ print('Using cached {}'.format(tag))
+ return 0
+ shutil.rmtree(tag)
+ Path(tag).mkdir()
+ bin_path = 'bin/bitcoin-core-{}'.format(tag[1:])
+ match = re.compile('v(.*)(rc[0-9]+)$').search(tag)
+ if match:
+ bin_path = 'bin/bitcoin-core-{}/test.{}'.format(
+ match.group(1), match.group(2))
+ tarball = 'bitcoin-{tag}-{platform}.tar.gz'.format(
+ tag=tag[1:], platform=args.platform)
+ tarballUrl = 'https://bitcoincore.org/{bin_path}/{tarball}'.format(
+ bin_path=bin_path, tarball=tarball)
+
+ print('Fetching: {tarballUrl}'.format(tarballUrl=tarballUrl))
+
+ header, status = subprocess.Popen(
+ ['curl', '--head', tarballUrl], stdout=subprocess.PIPE).communicate()
+ if re.search("404 Not Found", header.decode("utf-8")):
+ print("Binary tag was not found")
+ return 1
+
+ curlCmds = [
+ ['curl', '--remote-name', tarballUrl]
+ ]
+
+ for cmd in curlCmds:
+ ret = subprocess.run(cmd).returncode
+ if ret:
+ return ret
+
+ hasher = hashlib.sha256()
+ with open(tarball, "rb") as afile:
+ hasher.update(afile.read())
+ tarballHash = hasher.hexdigest()
+
+ if tarballHash not in SHA256_SUMS or SHA256_SUMS[tarballHash] != tarball:
+ print("Checksum did not match")
+ return 1
+ print("Checksum matched")
+
+ # Extract tarball
+ ret = subprocess.run(['tar', '-zxf', tarball, '-C', tag,
+ '--strip-components=1',
+ 'bitcoin-{tag}'.format(tag=tag[1:])]).returncode
+ if ret:
+ return ret
+
+ Path(tarball).unlink()
+ return 0
+
+
+def build_release(tag, args) -> int:
+ githubUrl = "https://github.com/bitcoin/bitcoin"
+ if args.remove_dir:
+ if Path(tag).is_dir():
+ shutil.rmtree(tag)
+ if not Path(tag).is_dir():
+ # fetch new tags
+ subprocess.run(
+ ["git", "fetch", githubUrl, "--tags"])
+ output = subprocess.check_output(['git', 'tag', '-l', tag])
+ if not output:
+ print('Tag {} not found'.format(tag))
+ return 1
+ ret = subprocess.run([
+ 'git', 'clone', githubUrl, tag
+ ]).returncode
+ if ret:
+ return ret
+ with pushd(tag):
+ ret = subprocess.run(['git', 'checkout', tag]).returncode
+ if ret:
+ return ret
+ host = args.host
+ if args.depends:
+ with pushd('depends'):
+ ret = subprocess.run(['make', 'NO_QT=1']).returncode
+ if ret:
+ return ret
+ host = os.environ.get(
+ 'HOST', subprocess.check_output(['./config.guess']))
+ config_flags = '--prefix={pwd}/depends/{host} '.format(
+ pwd=os.getcwd(),
+ host=host) + args.config_flags
+ cmds = [
+ './autogen.sh',
+ './configure {}'.format(config_flags),
+ 'make',
+ ]
+ for cmd in cmds:
+ ret = subprocess.run(cmd.split()).returncode
+ if ret:
+ return ret
+ # Move binaries, so they're in the same place as in the
+ # release download
+ Path('bin').mkdir(exist_ok=True)
+ files = ['bitcoind', 'bitcoin-cli', 'bitcoin-tx']
+ for f in files:
+ Path('src/'+f).rename('bin/'+f)
+ return 0
+
+
+def check_host(args) -> int:
+ args.host = os.environ.get('HOST', subprocess.check_output(
+ './depends/config.guess').decode())
+ if args.download_binary:
+ platforms = {
+ 'x86_64-*-linux*': 'x86_64-linux-gnu',
+ 'x86_64-apple-darwin*': 'osx64',
+ }
+ args.platform = ''
+ for pattern, target in platforms.items():
+ if fnmatch(args.host, pattern):
+ args.platform = target
+ if not args.platform:
+ print('Not sure which binary to download for {}'.format(args.host))
+ return 1
+ return 0
+
+
+def main(args) -> int:
+ Path(args.target_dir).mkdir(exist_ok=True, parents=True)
+ print("Releases directory: {}".format(args.target_dir))
+ ret = check_host(args)
+ if ret:
+ return ret
+ if args.download_binary:
+ with pushd(args.target_dir):
+ for tag in args.tags:
+ ret = download_binary(tag, args)
+ if ret:
+ return ret
+ return 0
+ args.config_flags = os.environ.get('CONFIG_FLAGS', '')
+ args.config_flags += ' --without-gui --disable-tests --disable-bench'
+ with pushd(args.target_dir):
+ for tag in args.tags:
+ ret = build_release(tag, args)
+ if ret:
+ return ret
+ return 0
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser.add_argument('-r', '--remove-dir', action='store_true',
+ help='remove existing directory.')
+ parser.add_argument('-d', '--depends', action='store_true',
+ help='use depends.')
+ parser.add_argument('-b', '--download-binary', action='store_true',
+ help='download release binary.')
+ parser.add_argument('-t', '--target-dir', action='store',
+ help='target directory.', default='releases')
+ parser.add_argument('tags', nargs='+',
+ help="release tags. e.g.: v0.18.1 v0.20.0rc2")
+ args = parser.parse_args()
+ sys.exit(main(args))
diff --git a/test/lint/check-doc.py b/test/lint/check-doc.py
index bd947d194c..f77242d335 100755
--- a/test/lint/check-doc.py
+++ b/test/lint/check-doc.py
@@ -23,7 +23,7 @@ CMD_GREP_WALLET_ARGS = r"git grep --function-context 'void WalletInit::AddWallet
CMD_GREP_WALLET_HIDDEN_ARGS = r"git grep --function-context 'void DummyWalletInit::AddWalletOptions' -- {}".format(CMD_ROOT_DIR)
CMD_GREP_DOCS = r"git grep --perl-regexp '{}' {}".format(REGEX_DOC, CMD_ROOT_DIR)
# list unsupported, deprecated and duplicate args as they need no documentation
-SET_DOC_OPTIONAL = set(['-h', '-help', '-dbcrashratio', '-forcecompactdb'])
+SET_DOC_OPTIONAL = set(['-h', '-help', '-dbcrashratio', '-forcecompactdb', '-zapwallettxes'])
def lint_missing_argument_documentation():
diff --git a/test/lint/commit-script-check.sh b/test/lint/commit-script-check.sh
index ff3f784437..827c978bed 100755
--- a/test/lint/commit-script-check.sh
+++ b/test/lint/commit-script-check.sh
@@ -37,7 +37,7 @@ for commit in $(git rev-list --reverse $1); do
git reset --quiet --hard HEAD
else
if git rev-list "--format=%b" -n1 $commit | grep -q '^-\(BEGIN\|END\)[ a-zA-Z]*-$'; then
- echo "Error: script block marker but no scripted-diff in title"
+ echo "Error: script block marker but no scripted-diff in title of commit $commit"
echo "Failed"
RET=1
fi
diff --git a/test/lint/lint-cpp.sh b/test/lint/lint-cpp.sh
new file mode 100755
index 0000000000..cac57b968d
--- /dev/null
+++ b/test/lint/lint-cpp.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+#
+# Copyright (c) 2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+# Check for various C++ code patterns we want to avoid.
+
+export LC_ALL=C
+
+EXIT_CODE=0
+
+OUTPUT=$(git grep -E "boost::bind\(" -- "*.cpp" "*.h")
+if [[ ${OUTPUT} != "" ]]; then
+ echo "Use of boost::bind detected. Use std::bind instead."
+ echo
+ echo "${OUTPUT}"
+ EXIT_CODE=1
+fi
+
+exit ${EXIT_CODE} \ No newline at end of file
diff --git a/test/lint/lint-git-commit-check.sh b/test/lint/lint-git-commit-check.sh
index 8947f67bf6..ecaad215c4 100755
--- a/test/lint/lint-git-commit-check.sh
+++ b/test/lint/lint-git-commit-check.sh
@@ -23,10 +23,18 @@ while getopts "?" opt; do
esac
done
+# TRAVIS_BRANCH will be present in a Travis environment. For builds triggered
+# by a pull request this is the name of the branch targeted by the pull request.
+# https://docs.travis-ci.com/user/environment-variables/
+if [ -n "${TRAVIS_BRANCH}" ]; then
+ COMMIT_RANGE="$TRAVIS_BRANCH..HEAD"
+fi
+
if [ -z "${COMMIT_RANGE}" ]; then
if [ -n "$1" ]; then
COMMIT_RANGE="HEAD~$1...HEAD"
else
+ # This assumes that the target branch of the pull request will be master.
MERGE_BASE=$(git merge-base HEAD master)
COMMIT_RANGE="$MERGE_BASE..HEAD"
fi
diff --git a/test/lint/lint-locale-dependence.sh b/test/lint/lint-locale-dependence.sh
index 2e5b801849..e5657f7555 100755
--- a/test/lint/lint-locale-dependence.sh
+++ b/test/lint/lint-locale-dependence.sh
@@ -4,6 +4,39 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
+
+# Be aware that bitcoind and bitcoin-qt differ in terms of localization: Qt
+# opts in to POSIX localization by running setlocale(LC_ALL, "") on startup,
+# whereas no such call is made in bitcoind.
+#
+# Qt runs setlocale(LC_ALL, "") on initialization. This installs the locale
+# specified by the user's LC_ALL (or LC_*) environment variable as the new
+# C locale.
+#
+# In contrast, bitcoind does not opt in to localization -- no call to
+# setlocale(LC_ALL, "") is made and the environment variables LC_* are
+# thus ignored.
+#
+# This results in situations where bitcoind is guaranteed to be running
+# with the classic locale ("C") whereas the locale of bitcoin-qt will vary
+# depending on the user's environment variables.
+#
+# An example: Assuming the environment variable LC_ALL=de_DE then the
+# call std::to_string(1.23) will return "1.230000" in bitcoind but
+# "1,230000" in bitcoin-qt.
+#
+# From the Qt documentation:
+# "On Unix/Linux Qt is configured to use the system locale settings by default.
+# This can cause a conflict when using POSIX functions, for instance, when
+# converting between data types such as floats and strings, since the notation
+# may differ between locales. To get around this problem, call the POSIX function
+# setlocale(LC_NUMERIC,"C") right after initializing QApplication, QGuiApplication
+# or QCoreApplication to reset the locale that is used for number formatting to
+# "C"-locale."
+#
+# See https://doc.qt.io/qt-5/qcoreapplication.html#locale-settings and
+# https://stackoverflow.com/a/34878283 for more details.
+
KNOWN_VIOLATIONS=(
"src/bitcoin-tx.cpp.*stoul"
"src/bitcoin-tx.cpp.*trim_right"
diff --git a/test/lint/lint-whitespace.sh b/test/lint/lint-whitespace.sh
index d8bdb0a8d7..80af0a439d 100755
--- a/test/lint/lint-whitespace.sh
+++ b/test/lint/lint-whitespace.sh
@@ -13,32 +13,41 @@ while getopts "?" opt; do
case $opt in
?)
echo "Usage: $0 [N]"
- echo " TRAVIS_COMMIT_RANGE='<commit range>' $0"
+ echo " COMMIT_RANGE='<commit range>' $0"
echo " $0 -?"
echo "Checks unstaged changes, the previous N commits, or a commit range."
- echo "TRAVIS_COMMIT_RANGE='47ba2c3...ee50c9e' $0"
+ echo "COMMIT_RANGE='47ba2c3...ee50c9e' $0"
exit 0
;;
esac
done
-if [ -z "${TRAVIS_COMMIT_RANGE}" ]; then
+# TRAVIS_BRANCH will be present in a Travis environment. For builds triggered
+# by a pull request this is the name of the branch targeted by the pull request.
+# https://docs.travis-ci.com/user/environment-variables/
+if [ -n "${TRAVIS_BRANCH}" ]; then
+ COMMIT_RANGE="$TRAVIS_BRANCH..HEAD"
+fi
+
+if [ -z "${COMMIT_RANGE}" ]; then
if [ -n "$1" ]; then
- TRAVIS_COMMIT_RANGE="HEAD~$1...HEAD"
+ COMMIT_RANGE="HEAD~$1...HEAD"
else
- TRAVIS_COMMIT_RANGE="HEAD"
+ # This assumes that the target branch of the pull request will be master.
+ MERGE_BASE=$(git merge-base HEAD master)
+ COMMIT_RANGE="$MERGE_BASE..HEAD"
fi
fi
showdiff() {
- if ! git diff -U0 "${TRAVIS_COMMIT_RANGE}" -- "." ":(exclude)depends/patches/" ":(exclude)src/leveldb/" ":(exclude)src/crc32c/" ":(exclude)src/secp256k1/" ":(exclude)src/univalue/" ":(exclude)doc/release-notes/" ":(exclude)src/qt/locale/"; then
+ if ! git diff -U0 "${COMMIT_RANGE}" -- "." ":(exclude)depends/patches/" ":(exclude)src/leveldb/" ":(exclude)src/crc32c/" ":(exclude)src/secp256k1/" ":(exclude)src/univalue/" ":(exclude)doc/release-notes/" ":(exclude)src/qt/locale/"; then
echo "Failed to get a diff"
exit 1
fi
}
showcodediff() {
- if ! git diff -U0 "${TRAVIS_COMMIT_RANGE}" -- *.cpp *.h *.md *.py *.sh ":(exclude)src/leveldb/" ":(exclude)src/crc32c/" ":(exclude)src/secp256k1/" ":(exclude)src/univalue/" ":(exclude)doc/release-notes/" ":(exclude)src/qt/locale/"; then
+ if ! git diff -U0 "${COMMIT_RANGE}" -- *.cpp *.h *.md *.py *.sh ":(exclude)src/leveldb/" ":(exclude)src/crc32c/" ":(exclude)src/secp256k1/" ":(exclude)src/univalue/" ":(exclude)doc/release-notes/" ":(exclude)src/qt/locale/"; then
echo "Failed to get a diff"
exit 1
fi
diff --git a/test/sanitizer_suppressions/tsan b/test/sanitizer_suppressions/tsan
index 3d0ac7f995..625085c55b 100644
--- a/test/sanitizer_suppressions/tsan
+++ b/test/sanitizer_suppressions/tsan
@@ -11,7 +11,7 @@ mutex:CConnman::ThreadOpenConnections
mutex:CConnman::ThreadOpenAddedConnections
mutex:CConnman::SocketHandler
mutex:UpdateTip
-mutex:PeerLogicValidation::UpdatedBlockTip
+mutex:PeerManager::UpdatedBlockTip
mutex:g_best_block_mutex
# race (TODO fix)
race:CConnman::WakeMessageHandler
@@ -24,6 +24,7 @@ race:WalletBatch::WriteHDChain
race:BerkeleyBatch
race:BerkeleyDatabase
race:DatabaseBatch
+race:leveldb::DBImpl::DeleteObsoleteFiles
race:zmq::*
race:bitcoin-qt
# deadlock (TODO fix)
diff --git a/test/sanitizer_suppressions/ubsan b/test/sanitizer_suppressions/ubsan
index b3d9b9e6ec..75257d886b 100644
--- a/test/sanitizer_suppressions/ubsan
+++ b/test/sanitizer_suppressions/ubsan
@@ -1,6 +1,5 @@
# -fsanitize=undefined suppressions
# =================================
-float-divide-by-zero:policy/fees.cpp
float-divide-by-zero:validation.cpp
float-divide-by-zero:wallet/wallet.cpp