aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/README.md4
-rwxr-xr-xtest/functional/example_test.py4
-rwxr-xr-xtest/functional/feature_backwards_compatibility.py127
-rwxr-xr-xtest/functional/feature_loadblock.py8
-rwxr-xr-xtest/functional/feature_maxuploadtarget.py12
-rwxr-xr-xtest/functional/feature_notifications.py68
-rwxr-xr-xtest/functional/feature_reindex.py6
-rwxr-xr-xtest/functional/feature_segwit.py8
-rwxr-xr-xtest/functional/framework_test_script.py44
-rwxr-xr-xtest/functional/mempool_packages.py10
-rwxr-xr-xtest/functional/mempool_persist.py50
-rwxr-xr-xtest/functional/mempool_reorg.py12
-rwxr-xr-xtest/functional/mempool_unbroadcast.py111
-rwxr-xr-xtest/functional/mempool_updatefromblock.py123
-rwxr-xr-xtest/functional/p2p_blockfilters.py134
-rwxr-xr-xtest/functional/p2p_blocksonly.py23
-rwxr-xr-xtest/functional/p2p_compactblocks.py15
-rwxr-xr-xtest/functional/p2p_feefilter.py4
-rwxr-xr-xtest/functional/p2p_filter.py31
-rwxr-xr-xtest/functional/p2p_fingerprint.py4
-rwxr-xr-xtest/functional/p2p_getdata.py51
-rwxr-xr-xtest/functional/p2p_invalid_messages.py23
-rwxr-xr-xtest/functional/p2p_leak.py7
-rwxr-xr-xtest/functional/p2p_leak_tx.py4
-rwxr-xr-xtest/functional/p2p_node_network_limited.py4
-rwxr-xr-xtest/functional/p2p_segwit.py16
-rwxr-xr-xtest/functional/p2p_sendheaders.py5
-rwxr-xr-xtest/functional/p2p_tx_download.py10
-rwxr-xr-xtest/functional/p2p_unrequested_blocks.py4
-rwxr-xr-xtest/functional/rpc_createmultisig.py41
-rwxr-xr-xtest/functional/rpc_psbt.py42
-rwxr-xr-xtest/functional/rpc_users.py32
-rw-r--r--test/functional/test_framework/key.py13
-rwxr-xr-xtest/functional/test_framework/messages.py58
-rwxr-xr-xtest/functional/test_framework/mininode.py65
-rw-r--r--test/functional/test_framework/script.py31
-rwxr-xr-xtest/functional/test_framework/test_framework.py85
-rwxr-xr-xtest/functional/test_framework/test_node.py148
-rw-r--r--test/functional/test_framework/util.py18
-rwxr-xr-xtest/functional/test_framework/wallet_util.py23
-rwxr-xr-xtest/functional/test_runner.py34
-rwxr-xr-xtest/functional/tool_wallet.py14
-rwxr-xr-xtest/functional/wallet_abandonconflict.py7
-rwxr-xr-xtest/functional/wallet_avoidreuse.py77
-rwxr-xr-xtest/functional/wallet_basic.py95
-rwxr-xr-xtest/functional/wallet_bumpfee.py28
-rwxr-xr-xtest/functional/wallet_descriptor.py144
-rwxr-xr-xtest/functional/wallet_encryption.py24
-rwxr-xr-xtest/functional/wallet_hd.py209
-rwxr-xr-xtest/functional/wallet_importdescriptors.py445
-rwxr-xr-xtest/functional/wallet_importmulti.py9
-rwxr-xr-xtest/functional/wallet_keypool.py78
-rwxr-xr-xtest/functional/wallet_keypool_topup.py10
-rwxr-xr-xtest/functional/wallet_labels.py19
-rwxr-xr-xtest/functional/wallet_multiwallet.py6
-rwxr-xr-xtest/functional/wallet_resendwallettransactions.py28
-rwxr-xr-xtest/functional/wallet_txn_clone.py2
-rwxr-xr-xtest/functional/wallet_upgradewallet.py143
-rwxr-xr-xtest/fuzz/test_runner.py74
-rwxr-xr-xtest/lint/lint-shell.sh12
60 files changed, 2437 insertions, 499 deletions
diff --git a/test/README.md b/test/README.md
index e1dab92a06..0210907878 100644
--- a/test/README.md
+++ b/test/README.md
@@ -225,6 +225,10 @@ gdb /home/example/bitcoind <pid>
Note: gdb attach step may require ptrace_scope to be modified, or `sudo` preceding the `gdb`.
See this link for considerations: https://www.kernel.org/doc/Documentation/security/Yama.txt
+Often while debugging rpc calls from functional tests, the test might reach timeout before
+process can return a response. Use `--timeout-factor 0` to disable all rpc timeouts for that partcular
+functional test. Ex: `test/functional/wallet_hd.py --timeout-factor 0`.
+
##### Profiling
An easy way to profile node performance during functional tests is provided
diff --git a/test/functional/example_test.py b/test/functional/example_test.py
index 70dfe81d4e..5d782026dc 100755
--- a/test/functional/example_test.py
+++ b/test/functional/example_test.py
@@ -15,7 +15,7 @@ from collections import defaultdict
# Avoid wildcard * imports
from test_framework.blocktools import (create_block, create_coinbase)
-from test_framework.messages import CInv
+from test_framework.messages import CInv, MSG_BLOCK
from test_framework.mininode import (
P2PInterface,
mininode_lock,
@@ -198,7 +198,7 @@ class ExampleTest(BitcoinTestFramework):
getdata_request = msg_getdata()
for block in blocks:
- getdata_request.inv.append(CInv(2, block))
+ getdata_request.inv.append(CInv(MSG_BLOCK, block))
self.nodes[2].p2p.send_message(getdata_request)
# wait_until() will loop until a predicate condition is met. Use it to test properties of the
diff --git a/test/functional/feature_backwards_compatibility.py b/test/functional/feature_backwards_compatibility.py
index 75e0b93c83..596ff206f2 100755
--- a/test/functional/feature_backwards_compatibility.py
+++ b/test/functional/feature_backwards_compatibility.py
@@ -6,7 +6,11 @@
Test various backwards compatibility scenarios. Download the previous node binaries:
-contrib/devtools/previous_release.sh -b v0.19.0.1 v0.18.1 v0.17.1
+contrib/devtools/previous_release.sh -b v0.19.1 v0.18.1 v0.17.1 v0.16.3 v0.15.2
+
+v0.15.2 is not required by this test, but it is used in wallet_upgradewallet.py.
+Due to a hardfork in regtest, it can't be used to sync nodes.
+
Due to RPC changes introduced in various versions the below tests
won't work for older versions without some patches or workarounds.
@@ -18,60 +22,46 @@ needs an older patch version.
import os
import shutil
-from test_framework.test_framework import BitcoinTestFramework, SkipTest
+from test_framework.test_framework import BitcoinTestFramework
from test_framework.descriptors import descsum_create
from test_framework.util import (
+ adjust_bitcoin_conf_for_pre_17,
assert_equal,
sync_blocks,
- sync_mempools
+ sync_mempools,
)
+
class BackwardsCompatibilityTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
- self.num_nodes = 5
+ self.num_nodes = 6
# Add new version after each release:
self.extra_args = [
["-addresstype=bech32"], # Pre-release: use to mine blocks
["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # Pre-release: use to receive coins, swap wallets, etc
- ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.19.0.1
+ ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.19.1
["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.18.1
- ["-nowallet", "-walletrbf=1", "-addresstype=bech32"] # v0.17.1
+ ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.17.1
+ ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.16.3
]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
+ self.skip_if_no_previous_releases()
def setup_nodes(self):
- if os.getenv("TEST_PREVIOUS_RELEASES") == "false":
- raise SkipTest("backwards compatibility tests")
-
- releases_path = os.getenv("PREVIOUS_RELEASES_DIR") or os.getcwd() + "/releases"
- if not os.path.isdir(releases_path):
- if os.getenv("TEST_PREVIOUS_RELEASES") == "true":
- raise AssertionError("TEST_PREVIOUS_RELEASES=1 but releases missing: " + releases_path)
- raise SkipTest("This test requires binaries for previous releases")
-
self.add_nodes(self.num_nodes, extra_args=self.extra_args, versions=[
None,
None,
- 190000,
+ 190100,
180100,
- 170100
- ], binary=[
- self.options.bitcoind,
- self.options.bitcoind,
- releases_path + "/v0.19.0.1/bin/bitcoind",
- releases_path + "/v0.18.1/bin/bitcoind",
- releases_path + "/v0.17.1/bin/bitcoind"
- ], binary_cli=[
- self.options.bitcoincli,
- self.options.bitcoincli,
- releases_path + "/v0.19.0.1/bin/bitcoin-cli",
- releases_path + "/v0.18.1/bin/bitcoin-cli",
- releases_path + "/v0.17.1/bin/bitcoin-cli"
+ 170100,
+ 160300,
])
+ # adapt bitcoin.conf, because older bitcoind's don't recognize config sections
+ adjust_bitcoin_conf_for_pre_17(self.nodes[5].bitcoinconf)
self.start_nodes()
@@ -84,17 +74,18 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
res = self.nodes[self.num_nodes - 1].getblockchaininfo()
assert_equal(res['blocks'], 101)
- node_master = self.nodes[self.num_nodes - 4]
- node_v19 = self.nodes[self.num_nodes - 3]
- node_v18 = self.nodes[self.num_nodes - 2]
- node_v17 = self.nodes[self.num_nodes - 1]
+ node_master = self.nodes[self.num_nodes - 5]
+ node_v19 = self.nodes[self.num_nodes - 4]
+ node_v18 = self.nodes[self.num_nodes - 3]
+ node_v17 = self.nodes[self.num_nodes - 2]
+ node_v16 = self.nodes[self.num_nodes - 1]
self.log.info("Test wallet backwards compatibility...")
# Create a number of wallets and open them in older versions:
# w1: regular wallet, created on master: update this test when default
# wallets can no longer be opened by older versions.
- node_master.createwallet(wallet_name="w1")
+ node_master.rpc.createwallet(wallet_name="w1")
wallet = node_master.get_wallet_rpc("w1")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
@@ -120,17 +111,17 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
self.nodes[1].abandontransaction(tx3_id)
# w1_v19: regular wallet, created with v0.19
- node_v19.createwallet(wallet_name="w1_v19")
+ node_v19.rpc.createwallet(wallet_name="w1_v19")
wallet = node_v19.get_wallet_rpc("w1_v19")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
assert info['keypoolsize'] > 0
# Use addmultisigaddress (see #18075)
- address_18075 = wallet.addmultisigaddress(1, ["0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52", "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073"], "", "legacy")["address"]
+ address_18075 = wallet.rpc.addmultisigaddress(1, ["0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52", "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073"], "", "legacy")["address"]
assert wallet.getaddressinfo(address_18075)["solvable"]
# w1_v18: regular wallet, created with v0.18
- node_v18.createwallet(wallet_name="w1_v18")
+ node_v18.rpc.createwallet(wallet_name="w1_v18")
wallet = node_v18.get_wallet_rpc("w1_v18")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
@@ -139,21 +130,21 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
# w2: wallet with private keys disabled, created on master: update this
# test when default wallets private keys disabled can no longer be
# opened by older versions.
- node_master.createwallet(wallet_name="w2", disable_private_keys=True)
+ node_master.rpc.createwallet(wallet_name="w2", disable_private_keys=True)
wallet = node_master.get_wallet_rpc("w2")
info = wallet.getwalletinfo()
assert info['private_keys_enabled'] == False
assert info['keypoolsize'] == 0
# w2_v19: wallet with private keys disabled, created with v0.19
- node_v19.createwallet(wallet_name="w2_v19", disable_private_keys=True)
+ node_v19.rpc.createwallet(wallet_name="w2_v19", disable_private_keys=True)
wallet = node_v19.get_wallet_rpc("w2_v19")
info = wallet.getwalletinfo()
assert info['private_keys_enabled'] == False
assert info['keypoolsize'] == 0
# w2_v18: wallet with private keys disabled, created with v0.18
- node_v18.createwallet(wallet_name="w2_v18", disable_private_keys=True)
+ node_v18.rpc.createwallet(wallet_name="w2_v18", disable_private_keys=True)
wallet = node_v18.get_wallet_rpc("w2_v18")
info = wallet.getwalletinfo()
assert info['private_keys_enabled'] == False
@@ -161,21 +152,21 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
# w3: blank wallet, created on master: update this
# test when default blank wallets can no longer be opened by older versions.
- node_master.createwallet(wallet_name="w3", blank=True)
+ node_master.rpc.createwallet(wallet_name="w3", blank=True)
wallet = node_master.get_wallet_rpc("w3")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
assert info['keypoolsize'] == 0
# w3_v19: blank wallet, created with v0.19
- node_v19.createwallet(wallet_name="w3_v19", blank=True)
+ node_v19.rpc.createwallet(wallet_name="w3_v19", blank=True)
wallet = node_v19.get_wallet_rpc("w3_v19")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
assert info['keypoolsize'] == 0
# w3_v18: blank wallet, created with v0.18
- node_v18.createwallet(wallet_name="w3_v18", blank=True)
+ node_v18.rpc.createwallet(wallet_name="w3_v18", blank=True)
wallet = node_v18.get_wallet_rpc("w3_v18")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
@@ -186,6 +177,7 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
node_v19_wallets_dir = os.path.join(node_v19.datadir, "regtest/wallets")
node_v18_wallets_dir = os.path.join(node_v18.datadir, "regtest/wallets")
node_v17_wallets_dir = os.path.join(node_v17.datadir, "regtest/wallets")
+ node_v16_wallets_dir = os.path.join(node_v16.datadir, "regtest")
node_master.unloadwallet("w1")
node_master.unloadwallet("w2")
node_v19.unloadwallet("w1_v19")
@@ -193,6 +185,13 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
node_v18.unloadwallet("w1_v18")
node_v18.unloadwallet("w2_v18")
+ # Copy wallets to v0.16
+ for wallet in os.listdir(node_master_wallets_dir):
+ shutil.copytree(
+ os.path.join(node_master_wallets_dir, wallet),
+ os.path.join(node_v16_wallets_dir, wallet)
+ )
+
# Copy wallets to v0.17
for wallet in os.listdir(node_master_wallets_dir):
shutil.copytree(
@@ -311,19 +310,26 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
# assert_raises_rpc_error(-4, "Wallet loading failed.", node_v17.loadwallet, 'w3_v18')
# Instead, we stop node and try to launch it with the wallet:
- self.stop_node(self.num_nodes - 1)
+ self.stop_node(4)
node_v17.assert_start_raises_init_error(["-wallet=w3_v18"], "Error: Error loading w3_v18: Wallet requires newer version of Bitcoin Core")
node_v17.assert_start_raises_init_error(["-wallet=w3"], "Error: Error loading w3: Wallet requires newer version of Bitcoin Core")
- self.start_node(self.num_nodes - 1)
+ self.start_node(4)
+
+ # Open most recent wallet in v0.16 (no loadwallet RPC)
+ self.stop_node(5)
+ self.start_node(5, extra_args=["-wallet=w2"])
+ wallet = node_v16.get_wallet_rpc("w2")
+ info = wallet.getwalletinfo()
+ assert info['keypoolsize'] == 1
self.log.info("Test wallet upgrade path...")
# u1: regular wallet, created with v0.17
- node_v17.createwallet(wallet_name="u1_v17")
+ node_v17.rpc.createwallet(wallet_name="u1_v17")
wallet = node_v17.get_wallet_rpc("u1_v17")
address = wallet.getnewaddress("bech32")
- info = wallet.getaddressinfo(address)
- hdkeypath = info["hdkeypath"]
- pubkey = info["pubkey"]
+ v17_info = wallet.getaddressinfo(address)
+ hdkeypath = v17_info["hdkeypath"]
+ pubkey = v17_info["pubkey"]
# Copy the 0.17 wallet to the last Bitcoin Core version and open it:
node_v17.unloadwallet("u1_v17")
@@ -337,6 +343,18 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
descriptor = "wpkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + pubkey + ")"
assert_equal(info["desc"], descsum_create(descriptor))
+ # Now copy that same wallet back to 0.17 to make sure no automatic upgrade breaks it
+ node_master.unloadwallet("u1_v17")
+ shutil.rmtree(os.path.join(node_v17_wallets_dir, "u1_v17"))
+ shutil.copytree(
+ os.path.join(node_master_wallets_dir, "u1_v17"),
+ os.path.join(node_v17_wallets_dir, "u1_v17")
+ )
+ node_v17.loadwallet("u1_v17")
+ wallet = node_v17.get_wallet_rpc("u1_v17")
+ info = wallet.getaddressinfo(address)
+ assert_equal(info, v17_info)
+
# Copy the 0.19 wallet to the last Bitcoin Core version and open it:
shutil.copytree(
os.path.join(node_v19_wallets_dir, "w1_v19"),
@@ -346,5 +364,16 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
wallet = node_master.get_wallet_rpc("w1_v19")
assert wallet.getaddressinfo(address_18075)["solvable"]
+ # Now copy that same wallet back to 0.19 to make sure no automatic upgrade breaks it
+ node_master.unloadwallet("w1_v19")
+ shutil.rmtree(os.path.join(node_v19_wallets_dir, "w1_v19"))
+ shutil.copytree(
+ os.path.join(node_master_wallets_dir, "w1_v19"),
+ os.path.join(node_v19_wallets_dir, "w1_v19")
+ )
+ node_v19.loadwallet("w1_v19")
+ wallet = node_v19.get_wallet_rpc("w1_v19")
+ assert wallet.getaddressinfo(address_18075)["solvable"]
+
if __name__ == '__main__':
BackwardsCompatibilityTest().main()
diff --git a/test/functional/feature_loadblock.py b/test/functional/feature_loadblock.py
index 1138b0f0ea..82f1331685 100755
--- a/test/functional/feature_loadblock.py
+++ b/test/functional/feature_loadblock.py
@@ -16,10 +16,8 @@ import sys
import tempfile
import urllib
-from test_framework.test_framework import (
- BitcoinTestFramework,
-)
-from test_framework.util import assert_equal, wait_until
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
class LoadblockTest(BitcoinTestFramework):
@@ -75,7 +73,7 @@ class LoadblockTest(BitcoinTestFramework):
self.log.info("Restart second, unsynced node with bootstrap file")
self.stop_node(1)
self.start_node(1, ["-loadblock=" + bootstrap_file])
- wait_until(lambda: self.nodes[1].getblockcount() == 100)
+ assert_equal(self.nodes[1].getblockcount(), 100) # start_node is blocking on all block files being imported
assert_equal(self.nodes[1].getblockchaininfo()['blocks'], 100)
assert_equal(self.nodes[0].getbestblockhash(), self.nodes[1].getbestblockhash())
diff --git a/test/functional/feature_maxuploadtarget.py b/test/functional/feature_maxuploadtarget.py
index d4a8f8a715..9579a1715d 100755
--- a/test/functional/feature_maxuploadtarget.py
+++ b/test/functional/feature_maxuploadtarget.py
@@ -13,7 +13,7 @@ if uploadtarget has been reached.
from collections import defaultdict
import time
-from test_framework.messages import CInv, msg_getdata
+from test_framework.messages import CInv, MSG_BLOCK, msg_getdata
from test_framework.mininode import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, mine_large_block
@@ -84,7 +84,7 @@ class MaxUploadTest(BitcoinTestFramework):
# the same big old block too many times (expect: disconnect)
getdata_request = msg_getdata()
- getdata_request.inv.append(CInv(2, big_old_block))
+ getdata_request.inv.append(CInv(MSG_BLOCK, big_old_block))
max_bytes_per_day = 800*1024*1024
daily_buffer = 144 * 4000000
@@ -109,7 +109,7 @@ class MaxUploadTest(BitcoinTestFramework):
# Requesting the current block on p2p_conns[1] should succeed indefinitely,
# even when over the max upload target.
# We'll try 800 times
- getdata_request.inv = [CInv(2, big_new_block)]
+ getdata_request.inv = [CInv(MSG_BLOCK, big_new_block)]
for i in range(800):
p2p_conns[1].send_and_ping(getdata_request)
assert_equal(p2p_conns[1].block_receive_map[big_new_block], i+1)
@@ -117,7 +117,7 @@ class MaxUploadTest(BitcoinTestFramework):
self.log.info("Peer 1 able to repeatedly download new block")
# But if p2p_conns[1] tries for an old block, it gets disconnected too.
- getdata_request.inv = [CInv(2, big_old_block)]
+ getdata_request.inv = [CInv(MSG_BLOCK, big_old_block)]
p2p_conns[1].send_message(getdata_request)
p2p_conns[1].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 1)
@@ -145,12 +145,12 @@ class MaxUploadTest(BitcoinTestFramework):
self.nodes[0].add_p2p_connection(TestP2PConn())
#retrieve 20 blocks which should be enough to break the 1MB limit
- getdata_request.inv = [CInv(2, big_new_block)]
+ getdata_request.inv = [CInv(MSG_BLOCK, big_new_block)]
for i in range(20):
self.nodes[0].p2p.send_and_ping(getdata_request)
assert_equal(self.nodes[0].p2p.block_receive_map[big_new_block], i+1)
- getdata_request.inv = [CInv(2, big_old_block)]
+ getdata_request.inv = [CInv(MSG_BLOCK, big_old_block)]
self.nodes[0].p2p.send_and_ping(getdata_request)
assert_equal(len(self.nodes[0].getpeerinfo()), 1) #node is still connected because of the whitelist
diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py
index b110a559c0..47200b6cc6 100755
--- a/test/functional/feature_notifications.py
+++ b/test/functional/feature_notifications.py
@@ -5,12 +5,14 @@
"""Test the -alertnotify, -blocknotify and -walletnotify options."""
import os
-from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
+from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE, keyhash_to_p2pkh
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
wait_until,
connect_nodes,
+ disconnect_nodes,
+ hex_str_to_bytes,
)
# Linux allow all characters other than \x00
@@ -81,8 +83,72 @@ class NotificationsTest(BitcoinTestFramework):
# directory content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: notify_outputname(self.wallet, t['txid']), self.nodes[1].listtransactions("*", block_count)))
assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
+ for tx_file in os.listdir(self.walletnotify_dir):
+ os.remove(os.path.join(self.walletnotify_dir, tx_file))
+
+ # Conflicting transactions tests. Give node 0 same wallet seed as
+ # node 1, generate spends from node 0, and check notifications
+ # triggered by node 1
+ self.log.info("test -walletnotify with conflicting transactions")
+ self.nodes[0].sethdseed(seed=self.nodes[1].dumpprivkey(keyhash_to_p2pkh(hex_str_to_bytes(self.nodes[1].getwalletinfo()['hdseedid'])[::-1])))
+ self.nodes[0].rescanblockchain()
+ self.nodes[0].generatetoaddress(100, ADDRESS_BCRT1_UNSPENDABLE)
+
+ # Generate transaction on node 0, sync mempools, and check for
+ # notification on node 1.
+ tx1 = self.nodes[0].sendtoaddress(address=ADDRESS_BCRT1_UNSPENDABLE, amount=1, replaceable=True)
+ assert_equal(tx1 in self.nodes[0].getrawmempool(), True)
+ self.sync_mempools()
+ self.expect_wallet_notify([tx1])
+
+ # Generate bump transaction, sync mempools, and check for bump1
+ # notification. In the future, per
+ # https://github.com/bitcoin/bitcoin/pull/9371, it might be better
+ # to have notifications for both tx1 and bump1.
+ bump1 = self.nodes[0].bumpfee(tx1)["txid"]
+ assert_equal(bump1 in self.nodes[0].getrawmempool(), True)
+ self.sync_mempools()
+ self.expect_wallet_notify([bump1])
+
+ # Add bump1 transaction to new block, checking for a notification
+ # and the correct number of confirmations.
+ self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ self.sync_blocks()
+ self.expect_wallet_notify([bump1])
+ assert_equal(self.nodes[1].gettransaction(bump1)["confirmations"], 1)
+
+ # Generate a second transaction to be bumped.
+ tx2 = self.nodes[0].sendtoaddress(address=ADDRESS_BCRT1_UNSPENDABLE, amount=1, replaceable=True)
+ assert_equal(tx2 in self.nodes[0].getrawmempool(), True)
+ self.sync_mempools()
+ self.expect_wallet_notify([tx2])
+
+ # Bump tx2 as bump2 and generate a block on node 0 while
+ # disconnected, then reconnect and check for notifications on node 1
+ # about newly confirmed bump2 and newly conflicted tx2. Currently
+ # only the bump2 notification is sent. Ideally, notifications would
+ # be sent both for bump2 and tx2, which was the previous behavior
+ # before being broken by an accidental change in PR
+ # https://github.com/bitcoin/bitcoin/pull/16624. The bug is reported
+ # in issue https://github.com/bitcoin/bitcoin/issues/18325.
+ disconnect_nodes(self.nodes[0], 1)
+ bump2 = self.nodes[0].bumpfee(tx2)["txid"]
+ self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ assert_equal(self.nodes[0].gettransaction(bump2)["confirmations"], 1)
+ assert_equal(tx2 in self.nodes[1].getrawmempool(), True)
+ connect_nodes(self.nodes[0], 1)
+ self.sync_blocks()
+ self.expect_wallet_notify([bump2])
+ assert_equal(self.nodes[1].gettransaction(bump2)["confirmations"], 1)
# TODO: add test for `-alertnotify` large fork notifications
+ def expect_wallet_notify(self, tx_ids):
+ wait_until(lambda: len(os.listdir(self.walletnotify_dir)) >= len(tx_ids), timeout=10)
+ assert_equal(sorted(notify_outputname(self.wallet, tx_id) for tx_id in tx_ids), sorted(os.listdir(self.walletnotify_dir)))
+ for tx_file in os.listdir(self.walletnotify_dir):
+ os.remove(os.path.join(self.walletnotify_dir, tx_file))
+
+
if __name__ == '__main__':
NotificationsTest().main()
diff --git a/test/functional/feature_reindex.py b/test/functional/feature_reindex.py
index 940b403f9c..31cea8d1b7 100755
--- a/test/functional/feature_reindex.py
+++ b/test/functional/feature_reindex.py
@@ -10,10 +10,10 @@
"""
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import wait_until
+from test_framework.util import assert_equal
-class ReindexTest(BitcoinTestFramework):
+class ReindexTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
@@ -24,7 +24,7 @@ class ReindexTest(BitcoinTestFramework):
self.stop_nodes()
extra_args = [["-reindex-chainstate" if justchainstate else "-reindex"]]
self.start_nodes(extra_args)
- wait_until(lambda: self.nodes[0].getblockcount() == blockcount)
+ assert_equal(self.nodes[0].getblockcount(), blockcount) # start_node is blocking on reindex
self.log.info("Success")
def run_test(self):
diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py
index fdd86310c0..24c357091f 100755
--- a/test/functional/feature_segwit.py
+++ b/test/functional/feature_segwit.py
@@ -108,12 +108,7 @@ class SegWitTest(BitcoinTestFramework):
assert tmpl['sigoplimit'] == 20000
assert tmpl['transactions'][0]['hash'] == txid
assert tmpl['transactions'][0]['sigops'] == 2
- tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']})
- assert tmpl['sizelimit'] == 1000000
- assert 'weightlimit' not in tmpl
- assert tmpl['sigoplimit'] == 20000
- assert tmpl['transactions'][0]['hash'] == txid
- assert tmpl['transactions'][0]['sigops'] == 2
+ assert '!segwit' not in tmpl['rules']
self.nodes[0].generate(1) # block 162
balance_presetup = self.nodes[0].getbalance()
@@ -213,6 +208,7 @@ class SegWitTest(BitcoinTestFramework):
assert tmpl['sigoplimit'] == 80000
assert tmpl['transactions'][0]['txid'] == txid
assert tmpl['transactions'][0]['sigops'] == 8
+ assert '!segwit' in tmpl['rules']
self.nodes[0].generate(1) # Mine a block to clear the gbt cache
diff --git a/test/functional/framework_test_script.py b/test/functional/framework_test_script.py
deleted file mode 100755
index 9d916c0022..0000000000
--- a/test/functional/framework_test_script.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2020 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Tests for test_framework.script."""
-
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.script import bn2vch
-from test_framework.util import assert_equal
-
-def test_bn2vch():
- assert_equal(bn2vch(0), bytes([]))
- assert_equal(bn2vch(1), bytes([0x01]))
- assert_equal(bn2vch(-1), bytes([0x81]))
- assert_equal(bn2vch(0x7F), bytes([0x7F]))
- assert_equal(bn2vch(-0x7F), bytes([0xFF]))
- assert_equal(bn2vch(0x80), bytes([0x80, 0x00]))
- assert_equal(bn2vch(-0x80), bytes([0x80, 0x80]))
- assert_equal(bn2vch(0xFF), bytes([0xFF, 0x00]))
- assert_equal(bn2vch(-0xFF), bytes([0xFF, 0x80]))
- assert_equal(bn2vch(0x100), bytes([0x00, 0x01]))
- assert_equal(bn2vch(-0x100), bytes([0x00, 0x81]))
- assert_equal(bn2vch(0x7FFF), bytes([0xFF, 0x7F]))
- assert_equal(bn2vch(-0x8000), bytes([0x00, 0x80, 0x80]))
- assert_equal(bn2vch(-0x7FFFFF), bytes([0xFF, 0xFF, 0xFF]))
- assert_equal(bn2vch(0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x00]))
- assert_equal(bn2vch(-0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x80]))
- assert_equal(bn2vch(0xFFFFFFFF), bytes([0xFF, 0xFF, 0xFF, 0xFF, 0x00]))
-
- assert_equal(bn2vch(123456789), bytes([0x15, 0xCD, 0x5B, 0x07]))
- assert_equal(bn2vch(-54321), bytes([0x31, 0xD4, 0x80]))
-
-class FrameworkTestScript(BitcoinTestFramework):
- def setup_network(self):
- pass
-
- def set_test_params(self):
- self.num_nodes = 0
-
- def run_test(self):
- test_bn2vch()
-
-if __name__ == '__main__':
- FrameworkTestScript().main()
diff --git a/test/functional/mempool_packages.py b/test/functional/mempool_packages.py
index a07dad18d6..5b7216b253 100755
--- a/test/functional/mempool_packages.py
+++ b/test/functional/mempool_packages.py
@@ -7,6 +7,7 @@
from decimal import Decimal
from test_framework.messages import COIN
+from test_framework.mininode import P2PTxInvStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
@@ -58,6 +59,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
def run_test(self):
# Mine some blocks and have them mature.
+ self.nodes[0].add_p2p_connection(P2PTxInvStore()) # keep track of invs
self.nodes[0].generate(101)
utxo = self.nodes[0].listunspent(10)
txid = utxo[0]['txid']
@@ -72,6 +74,10 @@ class MempoolPackagesTest(BitcoinTestFramework):
value = sent_value
chain.append(txid)
+ # Wait until mempool transactions have passed initial broadcast (sent inv and received getdata)
+ # Otherwise, getrawmempool may be inconsistent with getmempoolentry if unbroadcast changes in between
+ self.nodes[0].p2p.wait_for_broadcast(chain)
+
# Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor
# count and fees should look correct
mempool = self.nodes[0].getrawmempool(True)
@@ -212,6 +218,10 @@ class MempoolPackagesTest(BitcoinTestFramework):
for tx in chain[:MAX_ANCESTORS_CUSTOM]:
assert tx in mempool1
# TODO: more detailed check of node1's mempool (fees etc.)
+ # check transaction unbroadcast info (should be false if in both mempools)
+ mempool = self.nodes[0].getrawmempool(True)
+ for tx in mempool:
+ assert_equal(mempool[tx]['unbroadcast'], False)
# TODO: test ancestor size limits
diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py
index e1671624a8..3969da2eb0 100755
--- a/test/functional/mempool_persist.py
+++ b/test/functional/mempool_persist.py
@@ -40,10 +40,13 @@ import os
import time
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.mininode import P2PTxInvStore
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
assert_raises_rpc_error,
+ connect_nodes,
+ disconnect_nodes,
wait_until,
)
@@ -80,6 +83,11 @@ class MempoolPersistTest(BitcoinTestFramework):
assert_greater_than_or_equal(tx_creation_time, tx_creation_time_lower)
assert_greater_than_or_equal(tx_creation_time_higher, tx_creation_time)
+ # disconnect nodes & make a txn that remains in the unbroadcast set.
+ disconnect_nodes(self.nodes[0], 2)
+ self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("12"))
+ connect_nodes(self.nodes[0], 2)
+
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
# Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
@@ -87,9 +95,9 @@ class MempoolPersistTest(BitcoinTestFramework):
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
- wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"], timeout=1)
- wait_until(lambda: self.nodes[2].getmempoolinfo()["loaded"], timeout=1)
- assert_equal(len(self.nodes[0].getrawmempool()), 5)
+ assert self.nodes[0].getmempoolinfo()["loaded"] # start_node is blocking on the mempool being loaded
+ assert self.nodes[2].getmempoolinfo()["loaded"]
+ assert_equal(len(self.nodes[0].getrawmempool()), 6)
assert_equal(len(self.nodes[2].getrawmempool()), 5)
# The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
assert_equal(len(self.nodes[1].getrawmempool()), 0)
@@ -105,17 +113,18 @@ class MempoolPersistTest(BitcoinTestFramework):
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
+ # start node0 with wallet disabled so wallet transactions don't get resubmitted
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
- self.start_node(0, extra_args=["-persistmempool=0"])
- wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"])
+ self.start_node(0, extra_args=["-persistmempool=0", "-disablewallet"])
+ assert self.nodes[0].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
- wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"])
- assert_equal(len(self.nodes[0].getrawmempool()), 5)
+ assert self.nodes[0].getmempoolinfo()["loaded"]
+ assert_equal(len(self.nodes[0].getrawmempool()), 6)
mempooldat0 = os.path.join(self.nodes[0].datadir, self.chain, 'mempool.dat')
mempooldat1 = os.path.join(self.nodes[1].datadir, self.chain, 'mempool.dat')
@@ -124,12 +133,12 @@ class MempoolPersistTest(BitcoinTestFramework):
self.nodes[0].savemempool()
assert os.path.isfile(mempooldat0)
- self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
+ self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 6 transactions")
os.rename(mempooldat0, mempooldat1)
self.stop_nodes()
self.start_node(1, extra_args=[])
- wait_until(lambda: self.nodes[1].getmempoolinfo()["loaded"])
- assert_equal(len(self.nodes[1].getrawmempool()), 5)
+ assert self.nodes[1].getmempoolinfo()["loaded"]
+ assert_equal(len(self.nodes[1].getrawmempool()), 6)
self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
# to test the exception we are creating a tmp folder called mempool.dat.new
@@ -139,6 +148,27 @@ class MempoolPersistTest(BitcoinTestFramework):
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
+ self.test_persist_unbroadcast()
+
+ def test_persist_unbroadcast(self):
+ node0 = self.nodes[0]
+ self.start_node(0)
+
+ # clear out mempool
+ node0.generate(1)
+
+ # disconnect nodes to make a txn that remains in the unbroadcast set.
+ disconnect_nodes(node0, 1)
+ node0.sendtoaddress(self.nodes[1].getnewaddress(), Decimal("12"))
+
+ # shutdown, then startup with wallet disabled
+ self.stop_nodes()
+ self.start_node(0, extra_args=["-disablewallet"])
+
+ # check that txn gets broadcast due to unbroadcast logic
+ conn = node0.add_p2p_connection(P2PTxInvStore())
+ node0.mockscheduler(16*60) # 15 min + 1 for buffer
+ wait_until(lambda: len(conn.get_invs()) == 1)
if __name__ == '__main__':
MempoolPersistTest().main()
diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py
index 8edfdc7a2a..8e1f87e42c 100755
--- a/test/functional/mempool_reorg.py
+++ b/test/functional/mempool_reorg.py
@@ -51,10 +51,14 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
spend_103_raw = create_raw_transaction(self.nodes[0], coinbase_txids[3], node0_address, amount=49.99)
# Create a transaction which is time-locked to two blocks in the future
- timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 49.99})
- # Set the time lock
- timelock_tx = timelock_tx.replace("ffffffff", "11111191", 1)
- timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
+ timelock_tx = self.nodes[0].createrawtransaction(
+ inputs=[{
+ "txid": coinbase_txids[0],
+ "vout": 0,
+ }],
+ outputs={node0_address: 49.99},
+ locktime=self.nodes[0].getblockcount() + 2,
+ )
timelock_tx = self.nodes[0].signrawtransactionwithwallet(timelock_tx)["hex"]
# This will raise an exception because the timelock transaction is too immature to spend
assert_raises_rpc_error(-26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx)
diff --git a/test/functional/mempool_unbroadcast.py b/test/functional/mempool_unbroadcast.py
new file mode 100755
index 0000000000..dedf5b8a47
--- /dev/null
+++ b/test/functional/mempool_unbroadcast.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017-2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test that the mempool ensures transaction delivery by periodically sending
+to peers until a GETDATA is received."""
+
+import time
+
+from test_framework.mininode import P2PTxInvStore
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ connect_nodes,
+ create_confirmed_utxos,
+ disconnect_nodes,
+)
+
+
+class MempoolUnbroadcastTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def run_test(self):
+ self.test_broadcast()
+ self.test_txn_removal()
+
+ def test_broadcast(self):
+ self.log.info("Test that mempool reattempts delivery of locally submitted transaction")
+ node = self.nodes[0]
+
+ min_relay_fee = node.getnetworkinfo()["relayfee"]
+ utxos = create_confirmed_utxos(min_relay_fee, node, 10)
+
+ disconnect_nodes(node, 1)
+
+ self.log.info("Generate transactions that only node 0 knows about")
+
+ # generate a wallet txn
+ addr = node.getnewaddress()
+ wallet_tx_hsh = node.sendtoaddress(addr, 0.0001)
+
+ # generate a txn using sendrawtransaction
+ us0 = utxos.pop()
+ inputs = [{"txid": us0["txid"], "vout": us0["vout"]}]
+ outputs = {addr: 0.0001}
+ tx = node.createrawtransaction(inputs, outputs)
+ node.settxfee(min_relay_fee)
+ txF = node.fundrawtransaction(tx)
+ txFS = node.signrawtransactionwithwallet(txF["hex"])
+ rpc_tx_hsh = node.sendrawtransaction(txFS["hex"])
+
+ # check transactions are in unbroadcast using rpc
+ mempoolinfo = self.nodes[0].getmempoolinfo()
+ assert_equal(mempoolinfo['unbroadcastcount'], 2)
+ mempool = self.nodes[0].getrawmempool(True)
+ for tx in mempool:
+ assert_equal(mempool[tx]['unbroadcast'], True)
+
+ # check that second node doesn't have these two txns
+ mempool = self.nodes[1].getrawmempool()
+ assert rpc_tx_hsh not in mempool
+ assert wallet_tx_hsh not in mempool
+
+ # ensure that unbroadcast txs are persisted to mempool.dat
+ self.restart_node(0)
+
+ self.log.info("Reconnect nodes & check if they are sent to node 1")
+ connect_nodes(node, 1)
+
+ # fast forward into the future & ensure that the second node has the txns
+ node.mockscheduler(15 * 60) # 15 min in seconds
+ self.sync_mempools(timeout=30)
+ mempool = self.nodes[1].getrawmempool()
+ assert rpc_tx_hsh in mempool
+ assert wallet_tx_hsh in mempool
+
+ # check that transactions are no longer in first node's unbroadcast set
+ mempool = self.nodes[0].getrawmempool(True)
+ for tx in mempool:
+ assert_equal(mempool[tx]['unbroadcast'], False)
+
+ self.log.info("Add another connection & ensure transactions aren't broadcast again")
+
+ conn = node.add_p2p_connection(P2PTxInvStore())
+ node.mockscheduler(15 * 60)
+ time.sleep(5)
+ assert_equal(len(conn.get_invs()), 0)
+
+ def test_txn_removal(self):
+ self.log.info("Test that transactions removed from mempool are removed from unbroadcast set")
+ node = self.nodes[0]
+ disconnect_nodes(node, 1)
+ node.disconnect_p2ps
+
+ # since the node doesn't have any connections, it will not receive
+ # any GETDATAs & thus the transaction will remain in the unbroadcast set.
+ addr = node.getnewaddress()
+ txhsh = node.sendtoaddress(addr, 0.0001)
+
+ # check transaction was removed from unbroadcast set due to presence in
+ # a block
+ removal_reason = "Removed {} from set of unbroadcast txns before confirmation that txn was sent out".format(txhsh)
+ with node.assert_debug_log([removal_reason]):
+ node.generate(1)
+
+if __name__ == "__main__":
+ MempoolUnbroadcastTest().main()
diff --git a/test/functional/mempool_updatefromblock.py b/test/functional/mempool_updatefromblock.py
new file mode 100755
index 0000000000..8a703ef009
--- /dev/null
+++ b/test/functional/mempool_updatefromblock.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+# Copyright (c) 2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test mempool descendants/ancestors information update.
+
+Test mempool update of transaction descendants/ancestors information (count, size)
+when transactions have been re-added from a disconnected block to the mempool.
+"""
+import time
+
+from decimal import Decimal
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+
+class MempoolUpdateFromBlockTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+ self.extra_args = [['-limitdescendantsize=1000', '-limitancestorsize=1000']]
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def transaction_graph_test(self, size, n_tx_to_mine=None, start_input_txid='', end_address='', fee=Decimal(0.00100000)):
+ """Create an acyclic tournament (a type of directed graph) of transactions and use it for testing.
+
+ Keyword arguments:
+ size -- the order N of the tournament which is equal to the number of the created transactions
+ n_tx_to_mine -- the number of transaction that should be mined into a block
+
+ If all of the N created transactions tx[0]..tx[N-1] reside in the mempool,
+ the following holds:
+ the tx[K] transaction:
+ - has N-K descendants (including this one), and
+ - has K+1 ancestors (including this one)
+
+ More details: https://en.wikipedia.org/wiki/Tournament_(graph_theory)
+ """
+
+ if not start_input_txid:
+ start_input_txid = self.nodes[0].getblock(self.nodes[0].getblockhash(1))['tx'][0]
+
+ if not end_address:
+ end_address = self.nodes[0].getnewaddress()
+
+ first_block_hash = ''
+ tx_id = []
+ tx_size = []
+ self.log.info('Creating {} transactions...'.format(size))
+ for i in range(0, size):
+ self.log.debug('Preparing transaction #{}...'.format(i))
+ # Prepare inputs.
+ if i == 0:
+ inputs = [{'txid': start_input_txid, 'vout': 0}]
+ inputs_value = self.nodes[0].gettxout(start_input_txid, 0)['value']
+ else:
+ inputs = []
+ inputs_value = 0
+ for j, tx in enumerate(tx_id[0:i]):
+ # Transaction tx[K] is a child of each of previous transactions tx[0]..tx[K-1] at their output K-1.
+ vout = i - j - 1
+ inputs.append({'txid': tx_id[j], 'vout': vout})
+ inputs_value += self.nodes[0].gettxout(tx, vout)['value']
+
+ self.log.debug('inputs={}'.format(inputs))
+ self.log.debug('inputs_value={}'.format(inputs_value))
+
+ # Prepare outputs.
+ tx_count = i + 1
+ if tx_count < size:
+ # Transaction tx[K] is an ancestor of each of subsequent transactions tx[K+1]..tx[N-1].
+ n_outputs = size - tx_count
+ output_value = ((inputs_value - fee) / Decimal(n_outputs)).quantize(Decimal('0.00000001'))
+ outputs = {}
+ for n in range(0, n_outputs):
+ outputs[self.nodes[0].getnewaddress()] = output_value
+ else:
+ output_value = (inputs_value - fee).quantize(Decimal('0.00000001'))
+ outputs = {end_address: output_value}
+
+ self.log.debug('output_value={}'.format(output_value))
+ self.log.debug('outputs={}'.format(outputs))
+
+ # Create a new transaction.
+ unsigned_raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
+ signed_raw_tx = self.nodes[0].signrawtransactionwithwallet(unsigned_raw_tx)
+ tx_id.append(self.nodes[0].sendrawtransaction(signed_raw_tx['hex']))
+ tx_size.append(self.nodes[0].getrawmempool(True)[tx_id[-1]]['vsize'])
+
+ if tx_count in n_tx_to_mine:
+ # The created transactions are mined into blocks by batches.
+ self.log.info('The batch of {} transactions has been accepted into the mempool.'.format(len(self.nodes[0].getrawmempool())))
+ block_hash = self.nodes[0].generate(1)[0]
+ if not first_block_hash:
+ first_block_hash = block_hash
+ assert_equal(len(self.nodes[0].getrawmempool()), 0)
+ self.log.info('All of the transactions from the current batch have been mined into a block.')
+ elif tx_count == size:
+ # At the end all of the mined blocks are invalidated, and all of the created
+ # transactions should be re-added from disconnected blocks to the mempool.
+ self.log.info('The last batch of {} transactions has been accepted into the mempool.'.format(len(self.nodes[0].getrawmempool())))
+ start = time.time()
+ self.nodes[0].invalidateblock(first_block_hash)
+ end = time.time()
+ assert_equal(len(self.nodes[0].getrawmempool()), size)
+ self.log.info('All of the recently mined transactions have been re-added into the mempool in {} seconds.'.format(end - start))
+
+ self.log.info('Checking descendants/ancestors properties of all of the in-mempool transactions...')
+ for k, tx in enumerate(tx_id):
+ self.log.debug('Check transaction #{}.'.format(k))
+ assert_equal(self.nodes[0].getrawmempool(True)[tx]['descendantcount'], size - k)
+ assert_equal(self.nodes[0].getrawmempool(True)[tx]['descendantsize'], sum(tx_size[k:size]))
+ assert_equal(self.nodes[0].getrawmempool(True)[tx]['ancestorcount'], k + 1)
+ assert_equal(self.nodes[0].getrawmempool(True)[tx]['ancestorsize'], sum(tx_size[0:(k + 1)]))
+
+ def run_test(self):
+ # Use batch size limited by DEFAULT_ANCESTOR_LIMIT = 25 to not fire "too many unconfirmed parents" error.
+ self.transaction_graph_test(size=100, n_tx_to_mine=[25, 50, 75])
+
+
+if __name__ == '__main__':
+ MempoolUpdateFromBlockTest().main()
diff --git a/test/functional/p2p_blockfilters.py b/test/functional/p2p_blockfilters.py
new file mode 100755
index 0000000000..4d00a6dc07
--- /dev/null
+++ b/test/functional/p2p_blockfilters.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Tests NODE_COMPACT_FILTERS (BIP 157/158).
+
+Tests that a node configured with -blockfilterindex and -peerblockfilters can serve
+cfcheckpts.
+"""
+
+from test_framework.messages import (
+ FILTER_TYPE_BASIC,
+ msg_getcfcheckpt,
+)
+from test_framework.mininode import P2PInterface
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ connect_nodes,
+ disconnect_nodes,
+ wait_until,
+)
+
+class CompactFiltersTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.rpc_timeout = 480
+ self.num_nodes = 2
+ self.extra_args = [
+ ["-blockfilterindex", "-peerblockfilters"],
+ ["-blockfilterindex"],
+ ]
+
+ def run_test(self):
+ # Node 0 supports COMPACT_FILTERS, node 1 does not.
+ node0 = self.nodes[0].add_p2p_connection(P2PInterface())
+ node1 = self.nodes[1].add_p2p_connection(P2PInterface())
+
+ # Nodes 0 & 1 share the same first 999 blocks in the chain.
+ self.nodes[0].generate(999)
+ self.sync_blocks(timeout=600)
+
+ # Stale blocks by disconnecting nodes 0 & 1, mining, then reconnecting
+ disconnect_nodes(self.nodes[0], 1)
+
+ self.nodes[0].generate(1)
+ wait_until(lambda: self.nodes[0].getblockcount() == 1000)
+ stale_block_hash = self.nodes[0].getblockhash(1000)
+
+ self.nodes[1].generate(1001)
+ wait_until(lambda: self.nodes[1].getblockcount() == 2000)
+
+ self.log.info("get cfcheckpt on chain to be re-orged out.")
+ request = msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=int(stale_block_hash, 16)
+ )
+ node0.send_and_ping(message=request)
+ response = node0.last_message['cfcheckpt']
+ assert_equal(response.filter_type, request.filter_type)
+ assert_equal(response.stop_hash, request.stop_hash)
+ assert_equal(len(response.headers), 1)
+
+ self.log.info("Reorg node 0 to a new chain.")
+ connect_nodes(self.nodes[0], 1)
+ self.sync_blocks(timeout=600)
+
+ main_block_hash = self.nodes[0].getblockhash(1000)
+ assert main_block_hash != stale_block_hash, "node 0 chain did not reorganize"
+
+ self.log.info("Check that peers can fetch cfcheckpt on active chain.")
+ tip_hash = self.nodes[0].getbestblockhash()
+ request = msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=int(tip_hash, 16)
+ )
+ node0.send_and_ping(request)
+ response = node0.last_message['cfcheckpt']
+ assert_equal(response.filter_type, request.filter_type)
+ assert_equal(response.stop_hash, request.stop_hash)
+
+ main_cfcheckpt = self.nodes[0].getblockfilter(main_block_hash, 'basic')['header']
+ tip_cfcheckpt = self.nodes[0].getblockfilter(tip_hash, 'basic')['header']
+ assert_equal(
+ response.headers,
+ [int(header, 16) for header in (main_cfcheckpt, tip_cfcheckpt)]
+ )
+
+ self.log.info("Check that peers can fetch cfcheckpt on stale chain.")
+ request = msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=int(stale_block_hash, 16)
+ )
+ node0.send_and_ping(request)
+ response = node0.last_message['cfcheckpt']
+
+ stale_cfcheckpt = self.nodes[0].getblockfilter(stale_block_hash, 'basic')['header']
+ assert_equal(
+ response.headers,
+ [int(header, 16) for header in (stale_cfcheckpt,)]
+ )
+
+ self.log.info("Requests to node 1 without NODE_COMPACT_FILTERS results in disconnection.")
+ requests = [
+ msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=int(main_block_hash, 16)
+ ),
+ ]
+ for request in requests:
+ node1 = self.nodes[1].add_p2p_connection(P2PInterface())
+ node1.send_message(request)
+ node1.wait_for_disconnect()
+
+ self.log.info("Check that invalid requests result in disconnection.")
+ requests = [
+ # Requesting unknown filter type results in disconnection.
+ msg_getcfcheckpt(
+ filter_type=255,
+ stop_hash=int(main_block_hash, 16)
+ ),
+ # Requesting unknown hash results in disconnection.
+ msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=123456789,
+ ),
+ ]
+ for request in requests:
+ node0 = self.nodes[0].add_p2p_connection(P2PInterface())
+ node0.send_message(request)
+ node0.wait_for_disconnect()
+
+if __name__ == '__main__':
+ CompactFiltersTest().main()
diff --git a/test/functional/p2p_blocksonly.py b/test/functional/p2p_blocksonly.py
index 3258a38e3c..c155dda664 100755
--- a/test/functional/p2p_blocksonly.py
+++ b/test/functional/p2p_blocksonly.py
@@ -57,6 +57,29 @@ class P2PBlocksOnly(BitcoinTestFramework):
self.nodes[0].p2p.wait_for_tx(txid)
assert_equal(self.nodes[0].getmempoolinfo()['size'], 1)
+ self.log.info('Check that txs from whitelisted peers are not rejected and relayed to others')
+ self.log.info("Restarting node 0 with whitelist permission and blocksonly")
+ self.restart_node(0, ["-persistmempool=0", "-whitelist=127.0.0.1", "-whitelistforcerelay", "-blocksonly"])
+ assert_equal(self.nodes[0].getrawmempool(),[])
+ first_peer = self.nodes[0].add_p2p_connection(P2PInterface())
+ second_peer = self.nodes[0].add_p2p_connection(P2PInterface())
+ peer_1_info = self.nodes[0].getpeerinfo()[0]
+ assert_equal(peer_1_info['whitelisted'], True)
+ assert_equal(peer_1_info['permissions'], ['noban', 'forcerelay', 'relay', 'mempool'])
+ peer_2_info = self.nodes[0].getpeerinfo()[1]
+ assert_equal(peer_2_info['whitelisted'], True)
+ assert_equal(peer_2_info['permissions'], ['noban', 'forcerelay', 'relay', 'mempool'])
+ assert_equal(self.nodes[0].testmempoolaccept([sigtx])[0]['allowed'], True)
+ txid = self.nodes[0].testmempoolaccept([sigtx])[0]['txid']
+
+ self.log.info('Check that the tx from whitelisted first_peer is relayed to others (ie.second_peer)')
+ with self.nodes[0].assert_debug_log(["received getdata"]):
+ first_peer.send_message(msg_tx(FromHex(CTransaction(), sigtx)))
+ self.log.info('Check that the whitelisted peer is still connected after sending the transaction')
+ assert_equal(first_peer.is_connected, True)
+ second_peer.wait_for_tx(txid)
+ assert_equal(self.nodes[0].getmempoolinfo()['size'], 1)
+ self.log.info("Whitelisted peer's transaction is accepted and relayed")
if __name__ == '__main__':
P2PBlocksOnly().main()
diff --git a/test/functional/p2p_compactblocks.py b/test/functional/p2p_compactblocks.py
index 66e6f8c424..d77a744758 100755
--- a/test/functional/p2p_compactblocks.py
+++ b/test/functional/p2p_compactblocks.py
@@ -10,7 +10,7 @@ Version 2 compact blocks are post-segwit (wtxids)
import random
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment
-from test_framework.messages import BlockTransactions, BlockTransactionsRequest, calculate_shortid, CBlock, CBlockHeader, CInv, COutPoint, CTransaction, CTxIn, CTxInWitness, CTxOut, FromHex, HeaderAndShortIDs, msg_no_witness_block, msg_no_witness_blocktxn, msg_cmpctblock, msg_getblocktxn, msg_getdata, msg_getheaders, msg_headers, msg_inv, msg_sendcmpct, msg_sendheaders, msg_tx, msg_block, msg_blocktxn, MSG_WITNESS_FLAG, NODE_NETWORK, P2PHeaderAndShortIDs, PrefilledTransaction, ser_uint256, ToHex
+from test_framework.messages import BlockTransactions, BlockTransactionsRequest, calculate_shortid, CBlock, CBlockHeader, CInv, COutPoint, CTransaction, CTxIn, CTxInWitness, CTxOut, FromHex, HeaderAndShortIDs, msg_no_witness_block, msg_no_witness_blocktxn, msg_cmpctblock, msg_getblocktxn, msg_getdata, msg_getheaders, msg_headers, msg_inv, msg_sendcmpct, msg_sendheaders, msg_tx, msg_block, msg_blocktxn, MSG_BLOCK, MSG_CMPCT_BLOCK, MSG_WITNESS_FLAG, NODE_NETWORK, P2PHeaderAndShortIDs, PrefilledTransaction, ser_uint256, ToHex
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.script import CScript, OP_TRUE, OP_DROP
from test_framework.test_framework import BitcoinTestFramework
@@ -44,7 +44,7 @@ class TestP2PConn(P2PInterface):
def on_inv(self, message):
for x in self.last_message["inv"].inv:
- if x.type == 2:
+ if x.type == MSG_BLOCK:
self.block_announced = True
self.announced_blockhashes.add(x.hash)
@@ -307,7 +307,7 @@ class CompactBlocksTest(BitcoinTestFramework):
# Now fetch the compact block using a normal non-announce getdata
with mininode_lock:
test_node.clear_block_announcement()
- inv = CInv(4, block_hash) # 4 == "CompactBlock"
+ inv = CInv(MSG_CMPCT_BLOCK, block_hash)
test_node.send_message(msg_getdata([inv]))
wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
@@ -380,7 +380,7 @@ class CompactBlocksTest(BitcoinTestFramework):
block = self.build_block_on_tip(node, segwit=segwit)
if announce == "inv":
- test_node.send_message(msg_inv([CInv(2, block.sha256)]))
+ test_node.send_message(msg_inv([CInv(MSG_BLOCK, block.sha256)]))
wait_until(lambda: "getheaders" in test_node.last_message, timeout=30, lock=mininode_lock)
test_node.send_header_for_blocks([block])
else:
@@ -564,7 +564,8 @@ class CompactBlocksTest(BitcoinTestFramework):
# We should receive a getdata request
test_node.wait_for_getdata([block.sha256], timeout=10)
- assert test_node.last_message["getdata"].inv[0].type == 2 or test_node.last_message["getdata"].inv[0].type == 2 | MSG_WITNESS_FLAG
+ assert test_node.last_message["getdata"].inv[0].type == MSG_BLOCK or \
+ test_node.last_message["getdata"].inv[0].type == MSG_BLOCK | MSG_WITNESS_FLAG
# Deliver the block
if version == 2:
@@ -633,7 +634,7 @@ class CompactBlocksTest(BitcoinTestFramework):
wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
test_node.clear_block_announcement()
- test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
+ test_node.send_message(msg_getdata([CInv(MSG_CMPCT_BLOCK, int(new_blocks[0], 16))]))
wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30, lock=mininode_lock)
test_node.clear_block_announcement()
@@ -642,7 +643,7 @@ class CompactBlocksTest(BitcoinTestFramework):
test_node.clear_block_announcement()
with mininode_lock:
test_node.last_message.pop("block", None)
- test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
+ test_node.send_message(msg_getdata([CInv(MSG_CMPCT_BLOCK, int(new_blocks[0], 16))]))
wait_until(lambda: "block" in test_node.last_message, timeout=30, lock=mininode_lock)
with mininode_lock:
test_node.last_message["block"].block.calc_sha256()
diff --git a/test/functional/p2p_feefilter.py b/test/functional/p2p_feefilter.py
index 4f242bd94a..805cb1e84f 100755
--- a/test/functional/p2p_feefilter.py
+++ b/test/functional/p2p_feefilter.py
@@ -7,7 +7,7 @@
from decimal import Decimal
import time
-from test_framework.messages import msg_feefilter
+from test_framework.messages import MSG_TX, msg_feefilter
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.test_framework import BitcoinTestFramework
@@ -31,7 +31,7 @@ class TestP2PConn(P2PInterface):
def on_inv(self, message):
for i in message.inv:
- if (i.type == 1):
+ if (i.type == MSG_TX):
self.txinvs.append(hashToHex(i.hash))
def clear_invs(self):
diff --git a/test/functional/p2p_filter.py b/test/functional/p2p_filter.py
index a8b768c144..15955a938c 100755
--- a/test/functional/p2p_filter.py
+++ b/test/functional/p2p_filter.py
@@ -64,19 +64,40 @@ class FilterTest(BitcoinTestFramework):
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
- def run_test(self):
- filter_node = self.nodes[0].add_p2p_connection(FilterNode())
-
+ def test_size_limits(self, filter_node):
self.log.info('Check that too large filter is rejected')
with self.nodes[0].assert_debug_log(['Misbehaving']):
- filter_node.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS+1))
- with self.nodes[0].assert_debug_log(['Misbehaving']):
filter_node.send_and_ping(msg_filterload(data=b'\xbb'*(MAX_BLOOM_FILTER_SIZE+1)))
+ self.log.info('Check that max size filter is accepted')
+ with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
+ filter_node.send_and_ping(msg_filterload(data=b'\xbb'*(MAX_BLOOM_FILTER_SIZE)))
+ filter_node.send_and_ping(msg_filterclear())
+
+ self.log.info('Check that filter with too many hash functions is rejected')
+ with self.nodes[0].assert_debug_log(['Misbehaving']):
+ filter_node.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS+1))
+
+ self.log.info('Check that filter with max hash functions is accepted')
+ with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
+ filter_node.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS))
+ # Don't send filterclear until next two filteradd checks are done
+
+ self.log.info('Check that max size data element to add to the filter is accepted')
+ with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
+ filter_node.send_and_ping(msg_filteradd(data=b'\xcc'*(MAX_SCRIPT_ELEMENT_SIZE)))
+
self.log.info('Check that too large data element to add to the filter is rejected')
with self.nodes[0].assert_debug_log(['Misbehaving']):
filter_node.send_and_ping(msg_filteradd(data=b'\xcc'*(MAX_SCRIPT_ELEMENT_SIZE+1)))
+ filter_node.send_and_ping(msg_filterclear())
+
+ def run_test(self):
+ filter_node = self.nodes[0].add_p2p_connection(FilterNode())
+
+ self.test_size_limits(filter_node)
+
self.log.info('Add filtered P2P connection to the node')
filter_node.send_and_ping(filter_node.watch_filter_init)
filter_address = self.nodes[0].decodescript(filter_node.watch_script_pubkey)['addresses'][0]
diff --git a/test/functional/p2p_fingerprint.py b/test/functional/p2p_fingerprint.py
index c9fbb830c8..d743abe681 100755
--- a/test/functional/p2p_fingerprint.py
+++ b/test/functional/p2p_fingerprint.py
@@ -11,7 +11,7 @@ the node should pretend that it does not have it to avoid fingerprinting.
import time
from test_framework.blocktools import (create_block, create_coinbase)
-from test_framework.messages import CInv
+from test_framework.messages import CInv, MSG_BLOCK
from test_framework.mininode import (
P2PInterface,
msg_headers,
@@ -48,7 +48,7 @@ class P2PFingerprintTest(BitcoinTestFramework):
# Send a getdata request for a given block hash
def send_block_request(self, block_hash, node):
msg = msg_getdata()
- msg.inv.append(CInv(2, block_hash)) # 2 == "Block"
+ msg.inv.append(CInv(MSG_BLOCK, block_hash))
node.send_message(msg)
# Send a getheaders request for a given single block hash
diff --git a/test/functional/p2p_getdata.py b/test/functional/p2p_getdata.py
new file mode 100755
index 0000000000..fd94a09d80
--- /dev/null
+++ b/test/functional/p2p_getdata.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python3
+# Copyright (c) 2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test GETDATA processing behavior"""
+from collections import defaultdict
+
+from test_framework.messages import (
+ CInv,
+ msg_getdata,
+)
+from test_framework.mininode import (
+ mininode_lock,
+ P2PInterface,
+)
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import wait_until
+
+class P2PStoreBlock(P2PInterface):
+
+ def __init__(self):
+ super().__init__()
+ self.blocks = defaultdict(int)
+
+ def on_block(self, message):
+ message.block.calc_sha256()
+ self.blocks[message.block.sha256] += 1
+
+class GetdataTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+
+ def run_test(self):
+ self.nodes[0].add_p2p_connection(P2PStoreBlock())
+
+ self.log.info("test that an invalid GETDATA doesn't prevent processing of future messages")
+
+ # Send invalid message and verify that node responds to later ping
+ invalid_getdata = msg_getdata()
+ invalid_getdata.inv.append(CInv(t=0, h=0)) # INV type 0 is invalid.
+ self.nodes[0].p2ps[0].send_and_ping(invalid_getdata)
+
+ # Check getdata still works by fetching tip block
+ best_block = int(self.nodes[0].getbestblockhash(), 16)
+ good_getdata = msg_getdata()
+ good_getdata.inv.append(CInv(t=2, h=best_block))
+ self.nodes[0].p2ps[0].send_and_ping(good_getdata)
+ wait_until(lambda: self.nodes[0].p2ps[0].blocks[best_block] == 1, timeout=30, lock=mininode_lock)
+
+if __name__ == '__main__':
+ GetdataTest().main()
diff --git a/test/functional/p2p_invalid_messages.py b/test/functional/p2p_invalid_messages.py
index 4bd832e8f7..81302374c9 100755
--- a/test/functional/p2p_invalid_messages.py
+++ b/test/functional/p2p_invalid_messages.py
@@ -7,7 +7,16 @@ import asyncio
import struct
import sys
-from test_framework import messages
+from test_framework.messages import (
+ CBlockHeader,
+ CInv,
+ msg_getdata,
+ msg_headers,
+ msg_inv,
+ msg_ping,
+ MSG_TX,
+ ser_string,
+)
from test_framework.mininode import (
NetworkThread,
P2PDataStore,
@@ -25,7 +34,7 @@ class msg_unrecognized:
self.str_data = str_data.encode() if not isinstance(str_data, bytes) else str_data
def serialize(self):
- return messages.ser_string(self.str_data)
+ return ser_string(self.str_data)
def __repr__(self):
return "{}(data={})".format(self.msgtype, self.str_data)
@@ -135,7 +144,7 @@ class InvalidMessagesTest(BitcoinTestFramework):
# For some reason unknown to me, we sometimes have to push additional data to the
# peer in order for it to realize a disconnect.
try:
- node.p2p.send_message(messages.msg_ping(nonce=123123))
+ node.p2p.send_message(msg_ping(nonce=123123))
except IOError:
pass
@@ -158,7 +167,7 @@ class InvalidMessagesTest(BitcoinTestFramework):
asyncio.run_coroutine_threadsafe(swap_magic_bytes(), NetworkThread.network_event_loop).result()
with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: INVALID MESSAGESTART ping']):
- conn.send_message(messages.msg_ping(nonce=0xff))
+ conn.send_message(msg_ping(nonce=0xff))
conn.wait_for_disconnect(timeout=1)
self.nodes[0].disconnect_p2ps()
@@ -206,13 +215,13 @@ class InvalidMessagesTest(BitcoinTestFramework):
def test_large_inv(self):
conn = self.nodes[0].add_p2p_connection(P2PInterface())
with self.nodes[0].assert_debug_log(['Misbehaving', 'peer=4 (0 -> 20): message inv size() = 50001']):
- msg = messages.msg_inv([messages.CInv(1, 1)] * 50001)
+ msg = msg_inv([CInv(MSG_TX, 1)] * 50001)
conn.send_and_ping(msg)
with self.nodes[0].assert_debug_log(['Misbehaving', 'peer=4 (20 -> 40): message getdata size() = 50001']):
- msg = messages.msg_getdata([messages.CInv(1, 1)] * 50001)
+ msg = msg_getdata([CInv(MSG_TX, 1)] * 50001)
conn.send_and_ping(msg)
with self.nodes[0].assert_debug_log(['Misbehaving', 'peer=4 (40 -> 60): headers message size = 2001']):
- msg = messages.msg_headers([messages.CBlockHeader()] * 2001)
+ msg = msg_headers([CBlockHeader()] * 2001)
conn.send_and_ping(msg)
self.nodes[0].disconnect_p2ps()
diff --git a/test/functional/p2p_leak.py b/test/functional/p2p_leak.py
index 7f7430d04e..157af68203 100755
--- a/test/functional/p2p_leak.py
+++ b/test/functional/p2p_leak.py
@@ -141,12 +141,11 @@ class P2PLeakTest(BitcoinTestFramework):
assert no_verack_idlenode.unexpected_msg == False
self.log.info('Check that the version message does not leak the local address of the node')
- time_begin = int(time.time())
p2p_version_store = self.nodes[0].add_p2p_connection(P2PVersionStore())
- time_end = time.time()
ver = p2p_version_store.version_received
- assert_greater_than_or_equal(ver.nTime, time_begin)
- assert_greater_than_or_equal(time_end, ver.nTime)
+ # Check that received time is within one hour of now
+ assert_greater_than_or_equal(ver.nTime, time.time() - 3600)
+ assert_greater_than_or_equal(time.time() + 3600, ver.nTime)
assert_equal(ver.addrFrom.port, 0)
assert_equal(ver.addrFrom.ip, '0.0.0.0')
assert_equal(ver.nStartingHeight, 201)
diff --git a/test/functional/p2p_leak_tx.py b/test/functional/p2p_leak_tx.py
index 6b3436fa5f..da30ad5977 100755
--- a/test/functional/p2p_leak_tx.py
+++ b/test/functional/p2p_leak_tx.py
@@ -4,7 +4,7 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that we don't leak txs to inbound peers that we haven't yet announced to"""
-from test_framework.messages import msg_getdata, CInv
+from test_framework.messages import msg_getdata, CInv, MSG_TX
from test_framework.mininode import P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -37,7 +37,7 @@ class P2PLeakTxTest(BitcoinTestFramework):
txid = gen_node.sendtoaddress(gen_node.getnewaddress(), 0.01)
want_tx = msg_getdata()
- want_tx.inv.append(CInv(t=1, h=int(txid, 16)))
+ want_tx.inv.append(CInv(t=MSG_TX, h=int(txid, 16)))
inbound_peer.last_message.pop('notfound', None)
inbound_peer.send_and_ping(want_tx)
diff --git a/test/functional/p2p_node_network_limited.py b/test/functional/p2p_node_network_limited.py
index e6451d9f18..9c8c36c89e 100755
--- a/test/functional/p2p_node_network_limited.py
+++ b/test/functional/p2p_node_network_limited.py
@@ -8,7 +8,7 @@ Tests that a node configured with -prune=550 signals NODE_NETWORK_LIMITED correc
and that it responds to getdata requests for blocks correctly:
- send a block within 288 + 2 of the tip
- disconnect peers who request blocks older than that."""
-from test_framework.messages import CInv, msg_getdata, msg_verack, NODE_NETWORK_LIMITED, NODE_WITNESS
+from test_framework.messages import CInv, MSG_BLOCK, msg_getdata, msg_verack, NODE_NETWORK_LIMITED, NODE_WITNESS
from test_framework.mininode import P2PInterface, mininode_lock
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -31,7 +31,7 @@ class P2PIgnoreInv(P2PInterface):
wait_until(test_function, timeout=timeout, lock=mininode_lock)
def send_getdata_for_block(self, blockhash):
getdata_request = msg_getdata()
- getdata_request.inv.append(CInv(2, int(blockhash, 16)))
+ getdata_request.inv.append(CInv(MSG_BLOCK, int(blockhash, 16)))
self.send_message(getdata_request)
class NodeNetworkLimitedTest(BitcoinTestFramework):
diff --git a/test/functional/p2p_segwit.py b/test/functional/p2p_segwit.py
index dbdce6552a..8a989097b4 100755
--- a/test/functional/p2p_segwit.py
+++ b/test/functional/p2p_segwit.py
@@ -22,6 +22,8 @@ from test_framework.messages import (
CTxOut,
CTxWitness,
MAX_BLOCK_BASE_SIZE,
+ MSG_BLOCK,
+ MSG_TX,
MSG_WITNESS_FLAG,
NODE_NETWORK,
NODE_WITNESS,
@@ -157,7 +159,7 @@ class TestP2PConn(P2PInterface):
def announce_tx_and_wait_for_getdata(self, tx, timeout=60, success=True):
with mininode_lock:
self.last_message.pop("getdata", None)
- self.send_message(msg_inv(inv=[CInv(1, tx.sha256)]))
+ self.send_message(msg_inv(inv=[CInv(MSG_TX, tx.sha256)]))
if success:
self.wait_for_getdata([tx.sha256], timeout)
else:
@@ -173,7 +175,7 @@ class TestP2PConn(P2PInterface):
if use_header:
self.send_message(msg)
else:
- self.send_message(msg_inv(inv=[CInv(2, block.sha256)]))
+ self.send_message(msg_inv(inv=[CInv(MSG_BLOCK, block.sha256)]))
self.wait_for_getheaders()
self.send_message(msg)
self.wait_for_getdata([block.sha256])
@@ -576,7 +578,7 @@ class SegWitTest(BitcoinTestFramework):
# Verify that if a peer doesn't set nServices to include NODE_WITNESS,
# the getdata is just for the non-witness portion.
self.old_node.announce_tx_and_wait_for_getdata(tx)
- assert self.old_node.last_message["getdata"].inv[0].type == 1
+ assert self.old_node.last_message["getdata"].inv[0].type == MSG_TX
# Since we haven't delivered the tx yet, inv'ing the same tx from
# a witness transaction ought not result in a getdata.
@@ -1310,9 +1312,9 @@ class SegWitTest(BitcoinTestFramework):
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
# Also check that old_node gets a tx announcement, even though this is
# a witness transaction.
- self.old_node.wait_for_inv([CInv(1, tx2.sha256)]) # wait until tx2 was inv'ed
+ self.old_node.wait_for_inv([CInv(MSG_TX, tx2.sha256)]) # wait until tx2 was inv'ed
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=True)
- self.old_node.wait_for_inv([CInv(1, tx3.sha256)])
+ self.old_node.wait_for_inv([CInv(MSG_TX, tx3.sha256)])
# Test that getrawtransaction returns correct witness information
# hash, size, vsize
@@ -1896,12 +1898,12 @@ class SegWitTest(BitcoinTestFramework):
def test_upgrade_after_activation(self):
"""Test the behavior of starting up a segwit-aware node after the softfork has activated."""
- # Restart with the new binary
self.stop_node(2)
self.start_node(2, extra_args=["-segwitheight={}".format(SEGWIT_HEIGHT)])
connect_nodes(self.nodes[0], 2)
- self.sync_blocks()
+ # We reconnect more than 100 blocks, give it plenty of time
+ self.sync_blocks(timeout=240)
# Make sure that this peer thinks segwit has activated.
assert softfork_active(self.nodes[2], 'segwit')
diff --git a/test/functional/p2p_sendheaders.py b/test/functional/p2p_sendheaders.py
index a8fba306a7..481b1c1841 100755
--- a/test/functional/p2p_sendheaders.py
+++ b/test/functional/p2p_sendheaders.py
@@ -92,6 +92,7 @@ from test_framework.mininode import (
NODE_WITNESS,
P2PInterface,
mininode_lock,
+ MSG_BLOCK,
msg_block,
msg_getblocks,
msg_getdata,
@@ -120,7 +121,7 @@ class BaseNode(P2PInterface):
"""Request data for a list of block hashes."""
msg = msg_getdata()
for x in block_hashes:
- msg.inv.append(CInv(2, x))
+ msg.inv.append(CInv(MSG_BLOCK, x))
self.send_message(msg)
def send_get_headers(self, locator, hashstop):
@@ -131,7 +132,7 @@ class BaseNode(P2PInterface):
def send_block_inv(self, blockhash):
msg = msg_inv()
- msg.inv = [CInv(2, blockhash)]
+ msg.inv = [CInv(MSG_BLOCK, blockhash)]
self.send_message(msg)
def send_header_for_blocks(self, new_blocks):
diff --git a/test/functional/p2p_tx_download.py b/test/functional/p2p_tx_download.py
index a999fba818..10f5eea0e5 100755
--- a/test/functional/p2p_tx_download.py
+++ b/test/functional/p2p_tx_download.py
@@ -63,7 +63,7 @@ class TxDownloadTest(BitcoinTestFramework):
txid = 0xdeadbeef
self.log.info("Announce the txid from each incoming peer to node 0")
- msg = msg_inv([CInv(t=1, h=txid)])
+ msg = msg_inv([CInv(t=MSG_TX, h=txid)])
for p in self.nodes[0].p2ps:
p.send_and_ping(msg)
@@ -104,7 +104,7 @@ class TxDownloadTest(BitcoinTestFramework):
self.log.info(
"Announce the transaction to all nodes from all {} incoming peers, but never send it".format(NUM_INBOUND))
- msg = msg_inv([CInv(t=1, h=txid)])
+ msg = msg_inv([CInv(t=MSG_TX, h=txid)])
for p in self.peers:
p.send_and_ping(msg)
@@ -135,13 +135,13 @@ class TxDownloadTest(BitcoinTestFramework):
with mininode_lock:
p.tx_getdata_count = 0
- p.send_message(msg_inv([CInv(t=1, h=i) for i in txids]))
+ p.send_message(msg_inv([CInv(t=MSG_TX, h=i) for i in txids]))
wait_until(lambda: p.tx_getdata_count >= MAX_GETDATA_IN_FLIGHT, lock=mininode_lock)
with mininode_lock:
assert_equal(p.tx_getdata_count, MAX_GETDATA_IN_FLIGHT)
self.log.info("Now check that if we send a NOTFOUND for a transaction, we'll get one more request")
- p.send_message(msg_notfound(vec=[CInv(t=1, h=txids[0])]))
+ p.send_message(msg_notfound(vec=[CInv(t=MSG_TX, h=txids[0])]))
wait_until(lambda: p.tx_getdata_count >= MAX_GETDATA_IN_FLIGHT + 1, timeout=10, lock=mininode_lock)
with mininode_lock:
assert_equal(p.tx_getdata_count, MAX_GETDATA_IN_FLIGHT + 1)
@@ -154,7 +154,7 @@ class TxDownloadTest(BitcoinTestFramework):
def test_spurious_notfound(self):
self.log.info('Check that spurious notfound is ignored')
- self.nodes[0].p2ps[0].send_message(msg_notfound(vec=[CInv(1, 1)]))
+ self.nodes[0].p2ps[0].send_message(msg_notfound(vec=[CInv(MSG_TX, 1)]))
def run_test(self):
# Setup the p2p connections
diff --git a/test/functional/p2p_unrequested_blocks.py b/test/functional/p2p_unrequested_blocks.py
index 3aaf4b9977..c323168848 100755
--- a/test/functional/p2p_unrequested_blocks.py
+++ b/test/functional/p2p_unrequested_blocks.py
@@ -54,7 +54,7 @@ Node1 is unused in tests 3-7:
import time
from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script
-from test_framework.messages import CBlockHeader, CInv, msg_block, msg_headers, msg_inv
+from test_framework.messages import CBlockHeader, CInv, MSG_BLOCK, msg_block, msg_headers, msg_inv
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -210,7 +210,7 @@ class AcceptBlockTest(BitcoinTestFramework):
with mininode_lock:
# Clear state so we can check the getdata request
test_node.last_message.pop("getdata", None)
- test_node.send_message(msg_inv([CInv(2, block_h3.sha256)]))
+ test_node.send_message(msg_inv([CInv(MSG_BLOCK, block_h3.sha256)]))
test_node.sync_with_ping()
with mininode_lock:
diff --git a/test/functional/rpc_createmultisig.py b/test/functional/rpc_createmultisig.py
index a983716177..56e9ecfcc2 100755
--- a/test/functional/rpc_createmultisig.py
+++ b/test/functional/rpc_createmultisig.py
@@ -4,13 +4,14 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multisig RPCs"""
+from test_framework.authproxy import JSONRPCException
from test_framework.descriptors import descsum_create, drop_origins
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
assert_equal,
)
-from test_framework.key import ECPubKey
+from test_framework.key import ECPubKey, ECKey, bytes_to_wif
import binascii
import decimal
@@ -28,10 +29,14 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
self.skip_if_no_wallet()
def get_keys(self):
+ self.pub = []
+ self.priv = []
node0, node1, node2 = self.nodes
- add = [node1.getnewaddress() for _ in range(self.nkeys)]
- self.pub = [node1.getaddressinfo(a)["pubkey"] for a in add]
- self.priv = [node1.dumpprivkey(a) for a in add]
+ for _ in range(self.nkeys):
+ k = ECKey()
+ k.generate()
+ self.pub.append(k.get_pubkey().get_bytes().hex())
+ self.priv.append(bytes_to_wif(k.get_bytes(), k.is_compressed))
self.final = node2.getnewaddress()
def run_test(self):
@@ -64,17 +69,20 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
pk_obj.compressed = False
pk2 = binascii.hexlify(pk_obj.get_bytes()).decode()
+ node0.createwallet(wallet_name='wmulti0', disable_private_keys=True)
+ wmulti0 = node0.get_wallet_rpc('wmulti0')
+
# Check all permutations of keys because order matters apparently
for keys in itertools.permutations([pk0, pk1, pk2]):
# Results should be the same as this legacy one
legacy_addr = node0.createmultisig(2, keys, 'legacy')['address']
- assert_equal(legacy_addr, node0.addmultisigaddress(2, keys, '', 'legacy')['address'])
+ assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'legacy')['address'])
# Generate addresses with the segwit types. These should all make legacy addresses
- assert_equal(legacy_addr, node0.createmultisig(2, keys, 'bech32')['address'])
- assert_equal(legacy_addr, node0.createmultisig(2, keys, 'p2sh-segwit')['address'])
- assert_equal(legacy_addr, node0.addmultisigaddress(2, keys, '', 'bech32')['address'])
- assert_equal(legacy_addr, node0.addmultisigaddress(2, keys, '', 'p2sh-segwit')['address'])
+ assert_equal(legacy_addr, wmulti0.createmultisig(2, keys, 'bech32')['address'])
+ assert_equal(legacy_addr, wmulti0.createmultisig(2, keys, 'p2sh-segwit')['address'])
+ assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'bech32')['address'])
+ assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'p2sh-segwit')['address'])
self.log.info('Testing sortedmulti descriptors with BIP 67 test vectors')
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_bip67.json'), encoding='utf-8') as f:
@@ -89,6 +97,8 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
assert_equal(self.nodes[0].deriveaddresses(sorted_key_desc)[0], t['address'])
def check_addmultisigaddress_errors(self):
+ if self.options.descriptors:
+ return
self.log.info('Check that addmultisigaddress fails when the private keys are missing')
addresses = [self.nodes[1].getnewaddress(address_type='legacy') for _ in range(2)]
assert_raises_rpc_error(-5, 'no full public key for address', lambda: self.nodes[0].addmultisigaddress(nrequired=1, keys=addresses))
@@ -115,6 +125,15 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
def do_multisig(self):
node0, node1, node2 = self.nodes
+ if 'wmulti' not in node1.listwallets():
+ try:
+ node1.loadwallet('wmulti')
+ except JSONRPCException as e:
+ if e.error['code'] == -18 and 'Wallet wmulti not found' in e.error['message']:
+ node1.createwallet(wallet_name='wmulti', disable_private_keys=True)
+ else:
+ raise
+ wmulti = node1.get_wallet_rpc('wmulti')
# Construct the expected descriptor
desc = 'multi({},{})'.format(self.nsigs, ','.join(self.pub))
@@ -134,7 +153,7 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
assert madd[0:4] == "bcrt" # actually a bech32 address
# compare against addmultisigaddress
- msigw = node1.addmultisigaddress(self.nsigs, self.pub, None, self.output_type)
+ msigw = wmulti.addmultisigaddress(self.nsigs, self.pub, None, self.output_type)
maddw = msigw["address"]
mredeemw = msigw["redeemScript"]
assert_equal(desc, drop_origins(msigw['descriptor']))
@@ -194,6 +213,8 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
txinfo = node0.getrawtransaction(tx, True, blk)
self.log.info("n/m=%d/%d %s size=%d vsize=%d weight=%d" % (self.nsigs, self.nkeys, self.output_type, txinfo["size"], txinfo["vsize"], txinfo["weight"]))
+ wmulti.unloadwallet()
+
if __name__ == '__main__':
RpcCreateMultiSigTest().main()
diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py
index ea8510f92b..51d136d26a 100755
--- a/test/functional/rpc_psbt.py
+++ b/test/functional/rpc_psbt.py
@@ -48,18 +48,23 @@ class PSBTTest(BitcoinTestFramework):
disconnect_nodes(offline_node, 2)
disconnect_nodes(mining_node, 0)
+ # Create watchonly on online_node
+ online_node.createwallet(wallet_name='wonline', disable_private_keys=True)
+ wonline = online_node.get_wallet_rpc('wonline')
+ w2 = online_node.get_wallet_rpc('')
+
# Mine a transaction that credits the offline address
offline_addr = offline_node.getnewaddress(address_type="p2sh-segwit")
- online_addr = online_node.getnewaddress(address_type="p2sh-segwit")
- online_node.importaddress(offline_addr, "", False)
+ online_addr = w2.getnewaddress(address_type="p2sh-segwit")
+ wonline.importaddress(offline_addr, "", False)
mining_node.sendtoaddress(address=offline_addr, amount=1.0)
mining_node.generate(nblocks=1)
self.sync_blocks([mining_node, online_node])
# Construct an unsigned PSBT on the online node (who doesn't know the output is Segwit, so will include a non-witness UTXO)
- utxos = online_node.listunspent(addresses=[offline_addr])
- raw = online_node.createrawtransaction([{"txid":utxos[0]["txid"], "vout":utxos[0]["vout"]}],[{online_addr:0.9999}])
- psbt = online_node.walletprocesspsbt(online_node.converttopsbt(raw))["psbt"]
+ utxos = wonline.listunspent(addresses=[offline_addr])
+ raw = wonline.createrawtransaction([{"txid":utxos[0]["txid"], "vout":utxos[0]["vout"]}],[{online_addr:0.9999}])
+ psbt = wonline.walletprocesspsbt(online_node.converttopsbt(raw))["psbt"]
assert "non_witness_utxo" in mining_node.decodepsbt(psbt)["inputs"][0]
# Have the offline node sign the PSBT (which will update the UTXO to segwit)
@@ -72,6 +77,8 @@ class PSBTTest(BitcoinTestFramework):
self.sync_blocks([mining_node, online_node])
assert_equal(online_node.gettxout(txid,0)["confirmations"], 1)
+ wonline.unloadwallet()
+
# Reconnect
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
@@ -89,13 +96,23 @@ class PSBTTest(BitcoinTestFramework):
final_tx = self.nodes[0].finalizepsbt(signed_tx)['hex']
self.nodes[0].sendrawtransaction(final_tx)
- # Create p2sh, p2wpkh, and p2wsh addresses
+ # Get pubkeys
pubkey0 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())['pubkey']
pubkey1 = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey']
pubkey2 = self.nodes[2].getaddressinfo(self.nodes[2].getnewaddress())['pubkey']
- p2sh = self.nodes[1].addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "legacy")['address']
- p2wsh = self.nodes[1].addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "bech32")['address']
- p2sh_p2wsh = self.nodes[1].addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "p2sh-segwit")['address']
+
+ # Setup watchonly wallets
+ self.nodes[2].createwallet(wallet_name='wmulti', disable_private_keys=True)
+ wmulti = self.nodes[2].get_wallet_rpc('wmulti')
+
+ # Create all the addresses
+ p2sh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "legacy")['address']
+ p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "bech32")['address']
+ p2sh_p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "p2sh-segwit")['address']
+ if not self.options.descriptors:
+ wmulti.importaddress(p2sh)
+ wmulti.importaddress(p2wsh)
+ wmulti.importaddress(p2sh_p2wsh)
p2wpkh = self.nodes[1].getnewaddress("", "bech32")
p2pkh = self.nodes[1].getnewaddress("", "legacy")
p2sh_p2wpkh = self.nodes[1].getnewaddress("", "p2sh-segwit")
@@ -146,11 +163,14 @@ class PSBTTest(BitcoinTestFramework):
assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 10})
# partially sign multisig things with node 1
- psbtx = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wsh_pos},{"txid":txid,"vout":p2sh_pos},{"txid":txid,"vout":p2sh_p2wsh_pos}], {self.nodes[1].getnewaddress():29.99})['psbt']
+ psbtx = wmulti.walletcreatefundedpsbt(inputs=[{"txid":txid,"vout":p2wsh_pos},{"txid":txid,"vout":p2sh_pos},{"txid":txid,"vout":p2sh_p2wsh_pos}], outputs={self.nodes[1].getnewaddress():29.99}, options={'changeAddress': self.nodes[1].getrawchangeaddress()})['psbt']
walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(psbtx)
psbtx = walletprocesspsbt_out['psbt']
assert_equal(walletprocesspsbt_out['complete'], False)
+ # Unload wmulti, we don't need it anymore
+ wmulti.unloadwallet()
+
# partially sign with node 2. This should be complete and sendable
walletprocesspsbt_out = self.nodes[2].walletprocesspsbt(psbtx)
assert_equal(walletprocesspsbt_out['complete'], True)
@@ -297,7 +317,7 @@ class PSBTTest(BitcoinTestFramework):
# Signer tests
for i, signer in enumerate(signers):
- self.nodes[2].createwallet("wallet{}".format(i))
+ self.nodes[2].createwallet(wallet_name="wallet{}".format(i))
wrpc = self.nodes[2].get_wallet_rpc("wallet{}".format(i))
for key in signer['privkeys']:
wrpc.importprivkey(key)
diff --git a/test/functional/rpc_users.py b/test/functional/rpc_users.py
index b75ce15f2e..daf02fc4f3 100755
--- a/test/functional/rpc_users.py
+++ b/test/functional/rpc_users.py
@@ -20,6 +20,7 @@ import string
import configparser
import sys
+
def call_with_auth(node, user, password):
url = urllib.parse.urlparse(node.url)
headers = {"Authorization": "Basic " + str_to_b64str('{}:{}'.format(user, password))}
@@ -64,9 +65,9 @@ class HTTPBasicsTest(BitcoinTestFramework):
self.password = lines[3]
with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "bitcoin.conf"), 'a', encoding='utf8') as f:
- f.write(rpcauth+"\n")
- f.write(rpcauth2+"\n")
- f.write(rpcauth3+"\n")
+ f.write(rpcauth + "\n")
+ f.write(rpcauth2 + "\n")
+ f.write(rpcauth3 + "\n")
with open(os.path.join(get_datadir_path(self.options.tmpdir, 1), "bitcoin.conf"), 'a', encoding='utf8') as f:
f.write("rpcuser={}\n".format(self.rpcuser))
f.write("rpcpassword={}\n".format(self.rpcpassword))
@@ -76,19 +77,16 @@ class HTTPBasicsTest(BitcoinTestFramework):
assert_equal(200, call_with_auth(node, user, password).status)
self.log.info('Wrong...')
- assert_equal(401, call_with_auth(node, user, password+'wrong').status)
+ assert_equal(401, call_with_auth(node, user, password + 'wrong').status)
self.log.info('Wrong...')
- assert_equal(401, call_with_auth(node, user+'wrong', password).status)
+ assert_equal(401, call_with_auth(node, user + 'wrong', password).status)
self.log.info('Wrong...')
- assert_equal(401, call_with_auth(node, user+'wrong', password+'wrong').status)
+ assert_equal(401, call_with_auth(node, user + 'wrong', password + 'wrong').status)
def run_test(self):
-
- ##################################################
- # Check correctness of the rpcauth config option #
- ##################################################
+ self.log.info('Check correctness of the rpcauth config option')
url = urllib.parse.urlparse(self.nodes[0].url)
self.test_auth(self.nodes[0], url.username, url.password)
@@ -96,12 +94,18 @@ class HTTPBasicsTest(BitcoinTestFramework):
self.test_auth(self.nodes[0], 'rt2', self.rt2password)
self.test_auth(self.nodes[0], self.user, self.password)
- ###############################################################
- # Check correctness of the rpcuser/rpcpassword config options #
- ###############################################################
+ self.log.info('Check correctness of the rpcuser/rpcpassword config options')
url = urllib.parse.urlparse(self.nodes[1].url)
self.test_auth(self.nodes[1], self.rpcuser, self.rpcpassword)
+ self.log.info('Check that failure to write cookie file will abort the node gracefully')
+ self.stop_node(0)
+ cookie_file = os.path.join(get_datadir_path(self.options.tmpdir, 0), self.chain, '.cookie.tmp')
+ os.mkdir(cookie_file)
+ init_error = 'Error: Unable to start HTTP server. See debug log for details.'
+ self.nodes[0].assert_start_raises_init_error(expected_msg=init_error)
+
+
if __name__ == '__main__':
- HTTPBasicsTest ().main ()
+ HTTPBasicsTest().main()
diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py
index 912c0ca978..f2d6fba4a6 100644
--- a/test/functional/test_framework/key.py
+++ b/test/functional/test_framework/key.py
@@ -8,6 +8,8 @@ keys, and is trivially vulnerable to side channel attacks. Do not use for
anything but tests."""
import random
+from .address import byte_to_base58
+
def modinv(a, n):
"""Compute the modular inverse of a modulo n
@@ -384,3 +386,14 @@ class ECKey():
rb = r.to_bytes((r.bit_length() + 8) // 8, 'big')
sb = s.to_bytes((s.bit_length() + 8) // 8, 'big')
return b'\x30' + bytes([4 + len(rb) + len(sb), 2, len(rb)]) + rb + bytes([2, len(sb)]) + sb
+
+def bytes_to_wif(b, compressed=True):
+ if compressed:
+ b += b'\x01'
+ return byte_to_base58(b, 239)
+
+def generate_wif_key():
+ # Makes a WIF privkey for imports
+ k = ECKey()
+ k.generate()
+ return bytes_to_wif(k.get_bytes(), k.is_compressed)
diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py
index 33fba1c69a..6c9c8a7397 100755
--- a/test/functional/test_framework/messages.py
+++ b/test/functional/test_framework/messages.py
@@ -54,9 +54,12 @@ NODE_NETWORK_LIMITED = (1 << 10)
MSG_TX = 1
MSG_BLOCK = 2
MSG_FILTERED_BLOCK = 3
+MSG_CMPCT_BLOCK = 4
MSG_WITNESS_FLAG = 1 << 30
MSG_TYPE_MASK = 0xffffffff >> 2
+FILTER_TYPE_BASIC = 0
+
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
@@ -603,16 +606,16 @@ class CBlock(CBlockHeader):
__slots__ = ("vtx",)
def __init__(self, header=None):
- super(CBlock, self).__init__(header)
+ super().__init__(header)
self.vtx = []
def deserialize(self, f):
- super(CBlock, self).deserialize(f)
+ super().deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self, with_witness=True):
r = b""
- r += super(CBlock, self).serialize()
+ r += super().serialize()
if with_witness:
r += ser_vector(self.vtx, "serialize_with_witness")
else:
@@ -752,7 +755,7 @@ class P2PHeaderAndShortIDs:
class P2PHeaderAndShortWitnessIDs(P2PHeaderAndShortIDs):
__slots__ = ()
def serialize(self):
- return super(P2PHeaderAndShortWitnessIDs, self).serialize(with_witness=True)
+ return super().serialize(with_witness=True)
# Calculate the BIP 152-compact blocks shortid for a given transaction hash
def calculate_shortid(k0, k1, tx_hash):
@@ -1512,3 +1515,50 @@ class msg_no_witness_blocktxn(msg_blocktxn):
def serialize(self):
return self.block_transactions.serialize(with_witness=False)
+
+class msg_getcfcheckpt:
+ __slots__ = ("filter_type", "stop_hash")
+ msgtype = b"getcfcheckpt"
+
+ def __init__(self, filter_type, stop_hash):
+ self.filter_type = filter_type
+ self.stop_hash = stop_hash
+
+ def deserialize(self, f):
+ self.filter_type = struct.unpack("<B", f.read(1))[0]
+ self.stop_hash = deser_uint256(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<B", self.filter_type)
+ r += ser_uint256(self.stop_hash)
+ return r
+
+ def __repr__(self):
+ return "msg_getcfcheckpt(filter_type={:#x}, stop_hash={:x})".format(
+ self.filter_type, self.stop_hash)
+
+class msg_cfcheckpt:
+ __slots__ = ("filter_type", "stop_hash", "headers")
+ msgtype = b"cfcheckpt"
+
+ def __init__(self, filter_type=None, stop_hash=None, headers=None):
+ self.filter_type = filter_type
+ self.stop_hash = stop_hash
+ self.headers = headers
+
+ def deserialize(self, f):
+ self.filter_type = struct.unpack("<B", f.read(1))[0]
+ self.stop_hash = deser_uint256(f)
+ self.headers = deser_uint256_vector(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<B", self.filter_type)
+ r += ser_uint256(self.stop_hash)
+ r += ser_uint256_vector(self.headers)
+ return r
+
+ def __repr__(self):
+ return "msg_cfcheckpt(filter_type={:#x}, stop_hash={:x})".format(
+ self.filter_type, self.stop_hash)
diff --git a/test/functional/test_framework/mininode.py b/test/functional/test_framework/mininode.py
index 6aa73623e6..95a63717d6 100755
--- a/test/functional/test_framework/mininode.py
+++ b/test/functional/test_framework/mininode.py
@@ -12,7 +12,10 @@ found in the mini-node branch of http://github.com/jgarzik/pynode.
P2PConnection: A low-level connection object to a node's P2P interface
P2PInterface: A high-level interface object for communicating to a node over P2P
P2PDataStore: A p2p interface class that keeps a store of transactions and blocks
- and can respond correctly to getdata and getheaders messages"""
+ and can respond correctly to getdata and getheaders messages
+P2PTxInvStore: A p2p interface class that inherits from P2PDataStore, and keeps
+ a count of how many times each txid has been announced."""
+
import asyncio
from collections import defaultdict
from io import BytesIO
@@ -28,6 +31,7 @@ from test_framework.messages import (
msg_block,
MSG_BLOCK,
msg_blocktxn,
+ msg_cfcheckpt,
msg_cmpctblock,
msg_feefilter,
msg_filteradd,
@@ -64,6 +68,7 @@ MESSAGEMAP = {
b"addr": msg_addr,
b"block": msg_block,
b"blocktxn": msg_blocktxn,
+ b"cfcheckpt": msg_cfcheckpt,
b"cmpctblock": msg_cmpctblock,
b"feefilter": msg_feefilter,
b"filteradd": msg_filteradd,
@@ -117,8 +122,9 @@ class P2PConnection(asyncio.Protocol):
def is_connected(self):
return self._transport is not None
- def peer_connect(self, dstaddr, dstport, *, net):
+ def peer_connect(self, dstaddr, dstport, *, net, timeout_factor):
assert not self.is_connected
+ self.timeout_factor = timeout_factor
self.dstaddr = dstaddr
self.dstport = dstport
# The initial message to send after the connection was made:
@@ -324,6 +330,7 @@ class P2PInterface(P2PConnection):
def on_addr(self, message): pass
def on_block(self, message): pass
def on_blocktxn(self, message): pass
+ def on_cfcheckpt(self, message): pass
def on_cmpctblock(self, message): pass
def on_feefilter(self, message): pass
def on_filteradd(self, message): pass
@@ -364,9 +371,12 @@ class P2PInterface(P2PConnection):
# Connection helper methods
+ def wait_until(self, test_function, timeout):
+ wait_until(test_function, timeout=timeout, lock=mininode_lock, timeout_factor=self.timeout_factor)
+
def wait_for_disconnect(self, timeout=60):
test_function = lambda: not self.is_connected
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
# Message receiving helper methods
@@ -377,14 +387,14 @@ class P2PInterface(P2PConnection):
return False
return self.last_message['tx'].tx.rehash() == txid
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_block(self, blockhash, timeout=60):
def test_function():
assert self.is_connected
return self.last_message.get("block") and self.last_message["block"].block.rehash() == blockhash
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_header(self, blockhash, timeout=60):
def test_function():
@@ -394,7 +404,7 @@ class P2PInterface(P2PConnection):
return False
return last_headers.headers[0].rehash() == int(blockhash, 16)
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_merkleblock(self, blockhash, timeout=60):
def test_function():
@@ -404,7 +414,7 @@ class P2PInterface(P2PConnection):
return False
return last_filtered_block.merkleblock.header.rehash() == int(blockhash, 16)
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_getdata(self, hash_list, timeout=60):
"""Waits for a getdata message.
@@ -418,7 +428,7 @@ class P2PInterface(P2PConnection):
return False
return [x.hash for x in last_data.inv] == hash_list
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_getheaders(self, timeout=60):
"""Waits for a getheaders message.
@@ -432,7 +442,7 @@ class P2PInterface(P2PConnection):
assert self.is_connected
return self.last_message.get("getheaders")
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_inv(self, expected_inv, timeout=60):
"""Waits for an INV message and checks that the first inv object in the message was as expected."""
@@ -445,13 +455,13 @@ class P2PInterface(P2PConnection):
self.last_message["inv"].inv[0].type == expected_inv[0].type and \
self.last_message["inv"].inv[0].hash == expected_inv[0].hash
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_verack(self, timeout=60):
def test_function():
return self.message_count["verack"]
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
# Message sending helper functions
@@ -467,7 +477,7 @@ class P2PInterface(P2PConnection):
assert self.is_connected
return self.last_message.get("pong") and self.last_message["pong"].nonce == self.ping_counter
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
self.ping_counter += 1
@@ -583,7 +593,7 @@ class P2PDataStore(P2PInterface):
self.send_message(msg_block(block=b))
else:
self.send_message(msg_headers([CBlockHeader(block) for block in blocks]))
- wait_until(lambda: blocks[-1].sha256 in self.getdata_requests, timeout=timeout, lock=mininode_lock)
+ self.wait_until(lambda: blocks[-1].sha256 in self.getdata_requests, timeout=timeout)
if expect_disconnect:
self.wait_for_disconnect(timeout=timeout)
@@ -591,7 +601,7 @@ class P2PDataStore(P2PInterface):
self.sync_with_ping(timeout=timeout)
if success:
- wait_until(lambda: node.getbestblockhash() == blocks[-1].hash, timeout=timeout)
+ self.wait_until(lambda: node.getbestblockhash() == blocks[-1].hash, timeout=timeout)
else:
assert node.getbestblockhash() != blocks[-1].hash
@@ -627,3 +637,30 @@ class P2PDataStore(P2PInterface):
# Check that none of the txs are now in the mempool
for tx in txs:
assert tx.hash not in raw_mempool, "{} tx found in mempool".format(tx.hash)
+
+class P2PTxInvStore(P2PInterface):
+ """A P2PInterface which stores a count of how many times each txid has been announced."""
+ def __init__(self):
+ super().__init__()
+ self.tx_invs_received = defaultdict(int)
+
+ def on_inv(self, message):
+ super().on_inv(message) # Send getdata in response.
+ # Store how many times invs have been received for each tx.
+ for i in message.inv:
+ if i.type == MSG_TX:
+ # save txid
+ self.tx_invs_received[i.hash] += 1
+
+ def get_invs(self):
+ with mininode_lock:
+ return list(self.tx_invs_received.keys())
+
+ def wait_for_broadcast(self, txns, timeout=60):
+ """Waits for the txns (list of txids) to complete initial broadcast.
+ The mempool should mark unbroadcast=False for these transactions.
+ """
+ # Wait until invs have been received (and getdatas sent) for each txid.
+ self.wait_until(lambda: set(self.get_invs()) == set([int(tx, 16) for tx in txns]), timeout)
+ # Flush messages and wait for the getdatas to be processed
+ self.sync_with_ping()
diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py
index 016a2b4f0f..9102266456 100644
--- a/test/functional/test_framework/script.py
+++ b/test/functional/test_framework/script.py
@@ -8,6 +8,7 @@ This file is modified from python-bitcoinlib.
"""
import hashlib
import struct
+import unittest
from .messages import (
CTransaction,
@@ -97,7 +98,7 @@ class CScriptOp(int):
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
- _opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
+ _opcode_instances.append(super().__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
@@ -372,7 +373,7 @@ class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
- super(CScriptTruncatedPushDataError, self).__init__(msg)
+ super().__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
@@ -458,14 +459,14 @@ class CScript(bytes):
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
- return super(CScript, cls).__new__(cls, value)
+ return super().__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
- return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
+ return super().__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
@@ -708,3 +709,25 @@ def SegwitV0SignatureHash(script, txTo, inIdx, hashtype, amount):
ss += struct.pack("<I", hashtype)
return hash256(ss)
+
+class TestFrameworkScript(unittest.TestCase):
+ def test_bn2vch(self):
+ self.assertEqual(bn2vch(0), bytes([]))
+ self.assertEqual(bn2vch(1), bytes([0x01]))
+ self.assertEqual(bn2vch(-1), bytes([0x81]))
+ self.assertEqual(bn2vch(0x7F), bytes([0x7F]))
+ self.assertEqual(bn2vch(-0x7F), bytes([0xFF]))
+ self.assertEqual(bn2vch(0x80), bytes([0x80, 0x00]))
+ self.assertEqual(bn2vch(-0x80), bytes([0x80, 0x80]))
+ self.assertEqual(bn2vch(0xFF), bytes([0xFF, 0x00]))
+ self.assertEqual(bn2vch(-0xFF), bytes([0xFF, 0x80]))
+ self.assertEqual(bn2vch(0x100), bytes([0x00, 0x01]))
+ self.assertEqual(bn2vch(-0x100), bytes([0x00, 0x81]))
+ self.assertEqual(bn2vch(0x7FFF), bytes([0xFF, 0x7F]))
+ self.assertEqual(bn2vch(-0x8000), bytes([0x00, 0x80, 0x80]))
+ self.assertEqual(bn2vch(-0x7FFFFF), bytes([0xFF, 0xFF, 0xFF]))
+ self.assertEqual(bn2vch(0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x00]))
+ self.assertEqual(bn2vch(-0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x80]))
+ self.assertEqual(bn2vch(0xFFFFFFFF), bytes([0xFF, 0xFF, 0xFF, 0xFF, 0x00]))
+ self.assertEqual(bn2vch(123456789), bytes([0x15, 0xCD, 0x5B, 0x07]))
+ self.assertEqual(bn2vch(-54321), bytes([0x31, 0xD4, 0x80]))
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index d4cf5f8896..716fa1d845 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -6,11 +6,12 @@
import configparser
from enum import Enum
-import logging
import argparse
+import logging
import os
import pdb
import random
+import re
import shutil
import subprocess
import sys
@@ -101,6 +102,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.bind_to_localhost_only = True
self.set_test_params()
self.parse_args()
+ if self.options.timeout_factor == 0 :
+ self.options.timeout_factor = 99999
+ self.rpc_timeout = int(self.rpc_timeout * self.options.timeout_factor) # optionally, increase timeout by a factor
def main(self):
"""Main function. This should not be overridden by the subclass test scripts."""
@@ -136,6 +140,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
sys.exit(exit_code)
def parse_args(self):
+ previous_releases_path = os.getenv("PREVIOUS_RELEASES_DIR") or os.getcwd() + "/releases"
parser = argparse.ArgumentParser(usage="%(prog)s [options]")
parser.add_argument("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave bitcoinds and test.* datadir on exit or error")
@@ -150,6 +155,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
help="Print out all RPC calls as they are made")
parser.add_argument("--portseed", dest="port_seed", default=os.getpid(), type=int,
help="The seed to use for assigning port numbers (default: current process id)")
+ parser.add_argument("--previous-releases", dest="prev_releases", action="store_true",
+ default=os.path.isdir(previous_releases_path) and bool(os.listdir(previous_releases_path)),
+ help="Force test of previous releases (default: %(default)s)")
parser.add_argument("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
parser.add_argument("--configfile", dest="configfile",
@@ -165,8 +173,12 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
help="run nodes under the valgrind memory error detector: expect at least a ~10x slowdown, valgrind 3.14 or later required")
parser.add_argument("--randomseed", type=int,
help="set a random seed for deterministically reproducing a previous test run")
+ parser.add_argument("--descriptors", default=False, action="store_true",
+ help="Run test using a descriptor wallet")
+ parser.add_argument('--timeout-factor', dest="timeout_factor", type=float, default=1.0, help='adjust test timeouts by a factor. Setting it to 0 disables all timeouts')
self.add_options(parser)
self.options = parser.parse_args()
+ self.options.previous_releases_path = previous_releases_path
def setup(self):
"""Call this method to start up the test framework object with options set."""
@@ -180,13 +192,22 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
config = configparser.ConfigParser()
config.read_file(open(self.options.configfile))
self.config = config
- self.options.bitcoind = os.getenv("BITCOIND", default=config["environment"]["BUILDDIR"] + '/src/bitcoind' + config["environment"]["EXEEXT"])
- self.options.bitcoincli = os.getenv("BITCOINCLI", default=config["environment"]["BUILDDIR"] + '/src/bitcoin-cli' + config["environment"]["EXEEXT"])
+ fname_bitcoind = os.path.join(
+ config["environment"]["BUILDDIR"],
+ "src",
+ "bitcoind" + config["environment"]["EXEEXT"],
+ )
+ fname_bitcoincli = os.path.join(
+ config["environment"]["BUILDDIR"],
+ "src",
+ "bitcoin-cli" + config["environment"]["EXEEXT"],
+ )
+ self.options.bitcoind = os.getenv("BITCOIND", default=fname_bitcoind)
+ self.options.bitcoincli = os.getenv("BITCOINCLI", default=fname_bitcoincli)
os.environ['PATH'] = os.pathsep.join([
os.path.join(config['environment']['BUILDDIR'], 'src'),
- os.path.join(config['environment']['BUILDDIR'], 'src', 'qt'),
- os.environ['PATH']
+ os.path.join(config['environment']['BUILDDIR'], 'src', 'qt'), os.environ['PATH']
])
# Set up temp directory and start logging
@@ -333,11 +354,23 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
def setup_nodes(self):
"""Override this method to customize test node setup"""
- extra_args = None
+ extra_args = [[]] * self.num_nodes
+ wallets = [[]] * self.num_nodes
if hasattr(self, "extra_args"):
extra_args = self.extra_args
+ wallets = [[x for x in eargs if x.startswith('-wallet=')] for eargs in extra_args]
+ extra_args = [x + ['-nowallet'] for x in extra_args]
self.add_nodes(self.num_nodes, extra_args)
self.start_nodes()
+ for i, n in enumerate(self.nodes):
+ n.extra_args.pop()
+ if '-wallet=0' in n.extra_args or '-nowallet' in n.extra_args or '-disablewallet' in n.extra_args or not self.is_wallet_compiled():
+ continue
+ if '-wallet=' not in wallets[i] and not any([x.startswith('-wallet=') for x in wallets[i]]):
+ wallets[i].append('-wallet=')
+ for w in wallets[i]:
+ wallet_name = w.split('=', 1)[1]
+ n.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors)
self.import_deterministic_coinbase_privkeys()
if not self.setup_clean_chain:
for n in self.nodes:
@@ -374,6 +407,25 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
Should only be called once after the nodes have been specified in
set_test_params()."""
+ def get_bin_from_version(version, bin_name, bin_default):
+ if not version:
+ return bin_default
+ return os.path.join(
+ self.options.previous_releases_path,
+ re.sub(
+ r'\.0$',
+ '', # remove trailing .0 for point releases
+ 'v{}.{}.{}.{}'.format(
+ (version % 100000000) // 1000000,
+ (version % 1000000) // 10000,
+ (version % 10000) // 100,
+ (version % 100) // 1,
+ ),
+ ),
+ 'bin',
+ bin_name,
+ )
+
if self.bind_to_localhost_only:
extra_confs = [["bind=127.0.0.1"]] * num_nodes
else:
@@ -383,9 +435,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
if versions is None:
versions = [None] * num_nodes
if binary is None:
- binary = [self.options.bitcoind] * num_nodes
+ binary = [get_bin_from_version(v, 'bitcoind', self.options.bitcoind) for v in versions]
if binary_cli is None:
- binary_cli = [self.options.bitcoincli] * num_nodes
+ binary_cli = [get_bin_from_version(v, 'bitcoin-cli', self.options.bitcoincli) for v in versions]
assert_equal(len(extra_confs), num_nodes)
assert_equal(len(extra_args), num_nodes)
assert_equal(len(versions), num_nodes)
@@ -398,6 +450,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
chain=self.chain,
rpchost=rpchost,
timewait=self.rpc_timeout,
+ timeout_factor=self.options.timeout_factor,
bitcoind=binary[i],
bitcoin_cli=binary_cli[i],
version=versions[i],
@@ -408,6 +461,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
use_cli=self.options.usecli,
start_perf=self.options.perf,
use_valgrind=self.options.valgrind,
+ descriptors=self.options.descriptors,
))
def start_node(self, i, *args, **kwargs):
@@ -543,10 +597,12 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
extra_args=['-disablewallet'],
rpchost=None,
timewait=self.rpc_timeout,
+ timeout_factor=self.options.timeout_factor,
bitcoind=self.options.bitcoind,
bitcoin_cli=self.options.bitcoincli,
coverage_dir=None,
cwd=self.options.tmpdir,
+ descriptors=self.options.descriptors,
))
self.start_node(CACHE_NODE_ID)
cache_node = self.nodes[CACHE_NODE_ID]
@@ -624,6 +680,19 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
if not self.is_cli_compiled():
raise SkipTest("bitcoin-cli has not been compiled.")
+ def skip_if_no_previous_releases(self):
+ """Skip the running test if previous releases are not available."""
+ if not self.has_previous_releases():
+ raise SkipTest("previous releases not available or disabled")
+
+ def has_previous_releases(self):
+ """Checks whether previous releases are present and enabled."""
+ if not os.path.isdir(self.options.previous_releases_path):
+ if self.options.prev_releases:
+ raise AssertionError("Force test of previous releases but releases missing: {}".format(
+ self.options.previous_releases_path))
+ return self.options.prev_releases
+
def is_cli_compiled(self):
"""Checks whether bitcoin-cli was compiled."""
return self.config["components"].getboolean("ENABLE_CLI")
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
index 507a0cff60..ebc0501e11 100755
--- a/test/functional/test_framework/test_node.py
+++ b/test/functional/test_framework/test_node.py
@@ -22,6 +22,7 @@ import shlex
import sys
from .authproxy import JSONRPCException
+from .descriptors import descsum_create
from .util import (
MAX_NODES,
append_config,
@@ -61,7 +62,7 @@ class TestNode():
To make things easier for the test writer, any unrecognised messages will
be dispatched to the RPC connection."""
- def __init__(self, i, datadir, *, chain, rpchost, timewait, bitcoind, bitcoin_cli, coverage_dir, cwd, extra_conf=None, extra_args=None, use_cli=False, start_perf=False, use_valgrind=False, version=None):
+ def __init__(self, i, datadir, *, chain, rpchost, timewait, timeout_factor, bitcoind, bitcoin_cli, coverage_dir, cwd, extra_conf=None, extra_args=None, use_cli=False, start_perf=False, use_valgrind=False, version=None, descriptors=False):
"""
Kwargs:
start_perf (bool): If True, begin profiling the node with `perf` as soon as
@@ -79,6 +80,7 @@ class TestNode():
self.binary = bitcoind
self.coverage_dir = coverage_dir
self.cwd = cwd
+ self.descriptors = descriptors
if extra_conf is not None:
append_config(datadir, extra_conf)
# Most callers will just need to add extra args to the standard list below.
@@ -108,7 +110,7 @@ class TestNode():
"--gen-suppressions=all", "--exit-on-first-error=yes",
"--error-exitcode=1", "--quiet"] + self.args
- if self.version is None or self.version >= 190000:
+ if self.version_is_at_least(190000):
self.args.append("-logthreadnames")
self.cli = TestNodeCLI(bitcoin_cli, self.datadir)
@@ -126,6 +128,7 @@ class TestNode():
self.perf_subprocesses = {}
self.p2ps = []
+ self.timeout_factor = timeout_factor
AddressKeyPair = collections.namedtuple('AddressKeyPair', ['address', 'key'])
PRIV_KEYS = [
@@ -170,10 +173,10 @@ class TestNode():
def __getattr__(self, name):
"""Dispatches any unrecognised messages to the RPC connection or a CLI instance."""
if self.use_cli:
- return getattr(self.cli, name)
+ return getattr(RPCOverloadWrapper(self.cli, True, self.descriptors), name)
else:
assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection")
- return getattr(self.rpc, name)
+ return getattr(RPCOverloadWrapper(self.rpc, descriptors=self.descriptors), name)
def start(self, extra_args=None, *, cwd=None, stdout=None, stderr=None, **kwargs):
"""Start the node."""
@@ -216,9 +219,35 @@ class TestNode():
raise FailedToStartError(self._node_msg(
'bitcoind exited with status {} during initialization'.format(self.process.returncode)))
try:
- rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.chain, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir)
+ rpc = get_rpc_proxy(
+ rpc_url(self.datadir, self.index, self.chain, self.rpchost),
+ self.index,
+ timeout=self.rpc_timeout // 2, # Shorter timeout to allow for one retry in case of ETIMEDOUT
+ coveragedir=self.coverage_dir,
+ )
rpc.getblockcount()
# If the call to getblockcount() succeeds then the RPC connection is up
+ if self.version_is_at_least(190000):
+ # getmempoolinfo.loaded is available since commit
+ # bb8ae2c (version 0.19.0)
+ wait_until(lambda: rpc.getmempoolinfo()['loaded'])
+ # Wait for the node to finish reindex, block import, and
+ # loading the mempool. Usually importing happens fast or
+ # even "immediate" when the node is started. However, there
+ # is no guarantee and sometimes ThreadImport might finish
+ # later. This is going to cause intermittent test failures,
+ # because generally the tests assume the node is fully
+ # ready after being started.
+ #
+ # For example, the node will reject block messages from p2p
+ # when it is still importing with the error "Unexpected
+ # block message received"
+ #
+ # The wait is done here to make tests as robust as possible
+ # and prevent racy tests and intermittent failures as much
+ # as possible. Some tests might not need this, but the
+ # overhead is trivial, and the added guarantees are worth
+ # the minimal performance cost.
self.log.debug("RPC successfully started")
if self.use_cli:
return
@@ -236,7 +265,11 @@ class TestNode():
# succeeds. Try again to properly raise the FailedToStartError
pass
except OSError as e:
- if e.errno != errno.ECONNREFUSED: # Port not yet open?
+ if e.errno == errno.ETIMEDOUT:
+ pass # Treat identical to ConnectionResetError
+ elif e.errno == errno.ECONNREFUSED:
+ pass # Port not yet open?
+ else:
raise # unknown OS error
except ValueError as e: # cookie file not found and no rpcuser or rpcpassword; bitcoind is still starting
if "No RPC credentials" not in str(e):
@@ -265,11 +298,14 @@ class TestNode():
def get_wallet_rpc(self, wallet_name):
if self.use_cli:
- return self.cli("-rpcwallet={}".format(wallet_name))
+ return RPCOverloadWrapper(self.cli("-rpcwallet={}".format(wallet_name)), True, self.descriptors)
else:
assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected")
wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name))
- return self.rpc / wallet_path
+ return RPCOverloadWrapper(self.rpc / wallet_path, descriptors=self.descriptors)
+
+ def version_is_at_least(self, ver):
+ return self.version is None or self.version >= ver
def stop_node(self, expected_stderr='', wait=0):
"""Stop the node."""
@@ -278,7 +314,7 @@ class TestNode():
self.log.debug("Stopping node")
try:
# Do not use wait argument when testing older nodes, e.g. in feature_backwards_compatibility.py
- if self.version is None or self.version >= 180000:
+ if self.version_is_at_least(180000):
self.stop(wait=wait)
else:
self.stop()
@@ -322,13 +358,13 @@ class TestNode():
return True
def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT):
- wait_until(self.is_node_stopped, timeout=timeout)
+ wait_until(self.is_node_stopped, timeout=timeout, timeout_factor=self.timeout_factor)
@contextlib.contextmanager
def assert_debug_log(self, expected_msgs, unexpected_msgs=None, timeout=2):
if unexpected_msgs is None:
unexpected_msgs = []
- time_end = time.time() + timeout
+ time_end = time.time() + timeout * self.timeout_factor
debug_log = os.path.join(self.datadir, self.chain, 'debug.log')
with open(debug_log, encoding='utf-8') as dl:
dl.seek(0, 2)
@@ -485,7 +521,7 @@ class TestNode():
if 'dstaddr' not in kwargs:
kwargs['dstaddr'] = '127.0.0.1'
- p2p_conn.peer_connect(**kwargs, net=self.chain)()
+ p2p_conn.peer_connect(**kwargs, net=self.chain, timeout_factor=self.timeout_factor)()
self.p2ps.append(p2p_conn)
if wait_for_verack:
# Wait for the node to send us the version and verack
@@ -499,7 +535,7 @@ class TestNode():
# transaction that will be added to the mempool as soon as we return here.
#
# So syncing here is redundant when we only want to send a message, but the cost is low (a few milliseconds)
- # in comparision to the upside of making tests less fragile and unexpected intermittent errors less likely.
+ # in comparison to the upside of making tests less fragile and unexpected intermittent errors less likely.
p2p_conn.sync_with_ping()
return p2p_conn
@@ -535,6 +571,8 @@ class TestNodeCLIAttr:
def arg_to_cli(arg):
if isinstance(arg, bool):
return str(arg).lower()
+ elif arg is None:
+ return 'null'
elif isinstance(arg, dict) or isinstance(arg, list):
return json.dumps(arg, default=EncodeDecimal)
else:
@@ -595,3 +633,87 @@ class TestNodeCLI():
return json.loads(cli_stdout, parse_float=decimal.Decimal)
except json.JSONDecodeError:
return cli_stdout.rstrip("\n")
+
+class RPCOverloadWrapper():
+ def __init__(self, rpc, cli=False, descriptors=False):
+ self.rpc = rpc
+ self.is_cli = cli
+ self.descriptors = descriptors
+
+ def __getattr__(self, name):
+ return getattr(self.rpc, name)
+
+ def createwallet(self, wallet_name, disable_private_keys=None, blank=None, passphrase='', avoid_reuse=None, descriptors=None):
+ if descriptors is None:
+ descriptors = self.descriptors
+ return self.__getattr__('createwallet')(wallet_name, disable_private_keys, blank, passphrase, avoid_reuse, descriptors)
+
+ def importprivkey(self, privkey, label=None, rescan=None):
+ wallet_info = self.getwalletinfo()
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('importprivkey')(privkey, label, rescan)
+ desc = descsum_create('combo(' + privkey + ')')
+ req = [{
+ 'desc': desc,
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ }]
+ import_res = self.importdescriptors(req)
+ if not import_res[0]['success']:
+ raise JSONRPCException(import_res[0]['error'])
+
+ def addmultisigaddress(self, nrequired, keys, label=None, address_type=None):
+ wallet_info = self.getwalletinfo()
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('addmultisigaddress')(nrequired, keys, label, address_type)
+ cms = self.createmultisig(nrequired, keys, address_type)
+ req = [{
+ 'desc': cms['descriptor'],
+ 'timestamp': 0,
+ 'label': label if label else ''
+ }]
+ import_res = self.importdescriptors(req)
+ if not import_res[0]['success']:
+ raise JSONRPCException(import_res[0]['error'])
+ return cms
+
+ def importpubkey(self, pubkey, label=None, rescan=None):
+ wallet_info = self.getwalletinfo()
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('importpubkey')(pubkey, label, rescan)
+ desc = descsum_create('combo(' + pubkey + ')')
+ req = [{
+ 'desc': desc,
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ }]
+ import_res = self.importdescriptors(req)
+ if not import_res[0]['success']:
+ raise JSONRPCException(import_res[0]['error'])
+
+ def importaddress(self, address, label=None, rescan=None, p2sh=None):
+ wallet_info = self.getwalletinfo()
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('importaddress')(address, label, rescan, p2sh)
+ is_hex = False
+ try:
+ int(address ,16)
+ is_hex = True
+ desc = descsum_create('raw(' + address + ')')
+ except:
+ desc = descsum_create('addr(' + address + ')')
+ reqs = [{
+ 'desc': desc,
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ }]
+ if is_hex and p2sh:
+ reqs.append({
+ 'desc': descsum_create('p2sh(raw(' + address + '))'),
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ })
+ import_res = self.importdescriptors(reqs)
+ for res in import_res:
+ if not res['success']:
+ raise JSONRPCException(res['error'])
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
index 64e1aa3bbc..6dfea7efd2 100644
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -208,9 +208,10 @@ def str_to_b64str(string):
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
-def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None):
+def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, timeout_factor=1.0):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
+ timeout = timeout * timeout_factor
attempt = 0
time_end = time.time() + timeout
@@ -265,7 +266,7 @@ def get_rpc_proxy(url, node_number, *, timeout=None, coveragedir=None):
"""
proxy_kwargs = {}
if timeout is not None:
- proxy_kwargs['timeout'] = timeout
+ proxy_kwargs['timeout'] = int(timeout)
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
@@ -326,6 +327,13 @@ def initialize_datadir(dirname, n, chain):
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
return datadir
+def adjust_bitcoin_conf_for_pre_17(conf_file):
+ with open(conf_file,'r', encoding='utf8') as conf:
+ conf_data = conf.read()
+ with open(conf_file, 'w', encoding='utf8') as conf:
+ conf_data_changed = conf_data.replace('[regtest]', '')
+ conf.write(conf_data_changed)
+
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
@@ -391,7 +399,11 @@ def connect_nodes(from_connection, node_num):
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
- wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
+ # See comments in net_processing:
+ # * Must have a version message before anything else
+ # * Must have a verack message before anything else
+ wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
+ wait_until(lambda: all(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in from_connection.getpeerinfo()))
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
diff --git a/test/functional/test_framework/wallet_util.py b/test/functional/test_framework/wallet_util.py
index eb537015fb..1b6686ff45 100755
--- a/test/functional/test_framework/wallet_util.py
+++ b/test/functional/test_framework/wallet_util.py
@@ -13,6 +13,10 @@ from test_framework.address import (
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
+from test_framework.key import (
+ bytes_to_wif,
+ ECKey,
+)
from test_framework.script import (
CScript,
OP_0,
@@ -66,6 +70,25 @@ def get_key(node):
p2sh_p2wpkh_redeem_script=CScript([OP_0, pkh]).hex(),
p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
+def get_generate_key():
+ """Generate a fresh key
+
+ Returns a named tuple of privkey, pubkey and all address and scripts."""
+ eckey = ECKey()
+ eckey.generate()
+ privkey = bytes_to_wif(eckey.get_bytes())
+ pubkey = eckey.get_pubkey().get_bytes().hex()
+ pkh = hash160(hex_str_to_bytes(pubkey))
+ return Key(privkey=privkey,
+ pubkey=pubkey,
+ p2pkh_script=CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG]).hex(),
+ p2pkh_addr=key_to_p2pkh(pubkey),
+ p2wpkh_script=CScript([OP_0, pkh]).hex(),
+ p2wpkh_addr=key_to_p2wpkh(pubkey),
+ p2sh_p2wpkh_script=CScript([OP_HASH160, hash160(CScript([OP_0, pkh])), OP_EQUAL]).hex(),
+ p2sh_p2wpkh_redeem_script=CScript([OP_0, pkh]).hex(),
+ p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
+
def get_multisig(node):
"""Generate a fresh 2-of-3 multisig on node
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py
index 4f902717c3..7821355e29 100755
--- a/test/functional/test_runner.py
+++ b/test/functional/test_runner.py
@@ -24,6 +24,7 @@ import sys
import tempfile
import re
import logging
+import unittest
# Formatting. Default colors to empty strings.
BOLD, GREEN, RED, GREY = ("", ""), ("", ""), ("", ""), ("", "")
@@ -65,6 +66,10 @@ if os.name != 'nt' or sys.getwindowsversion() >= (10, 0, 14393):
TEST_EXIT_PASSED = 0
TEST_EXIT_SKIPPED = 77
+TEST_FRAMEWORK_MODULES = [
+ "script",
+]
+
EXTENDED_SCRIPTS = [
# These tests are not run by default.
# Longest test should go first, to favor running tests in parallel
@@ -76,6 +81,7 @@ BASE_SCRIPTS = [
# Scripts that are run by default.
# Longest test should go first, to favor running tests in parallel
'wallet_hd.py',
+ 'wallet_hd.py --descriptors',
'wallet_backup.py',
# vv Tests less than 5m vv
'mining_getblocktemplate_longpoll.py',
@@ -86,10 +92,13 @@ BASE_SCRIPTS = [
'feature_segwit.py',
# vv Tests less than 2m vv
'wallet_basic.py',
+ 'wallet_basic.py --descriptors',
'wallet_labels.py',
+ 'wallet_labels.py --descriptors',
'p2p_segwit.py',
'p2p_timeouts.py',
'p2p_tx_download.py',
+ 'mempool_updatefromblock.py',
'wallet_dump.py',
'wallet_listtransactions.py',
# vv Tests less than 60s vv
@@ -109,6 +118,7 @@ BASE_SCRIPTS = [
'feature_abortnode.py',
# vv Tests less than 30s vv
'wallet_keypool_topup.py',
+ 'wallet_keypool_topup.py --descriptors',
'feature_fee_estimation.py',
'interface_zmq.py',
'interface_bitcoin_cli.py',
@@ -122,6 +132,7 @@ BASE_SCRIPTS = [
'interface_rest.py',
'mempool_spend_coinbase.py',
'wallet_avoidreuse.py',
+ 'wallet_avoidreuse.py --descriptors',
'mempool_reorg.py',
'mempool_persist.py',
'wallet_multiwallet.py',
@@ -134,6 +145,7 @@ BASE_SCRIPTS = [
'interface_http.py',
'interface_rpc.py',
'rpc_psbt.py',
+ 'rpc_psbt.py --descriptors',
'rpc_users.py',
'rpc_whitelist.py',
'feature_proxy.py',
@@ -145,8 +157,11 @@ BASE_SCRIPTS = [
'rpc_deprecated.py',
'wallet_disable.py',
'p2p_addr_relay.py',
+ 'p2p_getdata.py',
'rpc_net.py',
'wallet_keypool.py',
+ 'wallet_keypool.py --descriptors',
+ 'wallet_descriptor.py',
'p2p_mempool.py',
'p2p_filter.py',
'rpc_setban.py',
@@ -168,6 +183,7 @@ BASE_SCRIPTS = [
'mempool_packages.py',
'mempool_package_onemore.py',
'rpc_createmultisig.py',
+ 'rpc_createmultisig.py --descriptors',
'feature_versionbits_warning.py',
'rpc_preciousblock.py',
'wallet_importprunedfunds.py',
@@ -180,6 +196,8 @@ BASE_SCRIPTS = [
'mempool_expiry.py',
'wallet_import_rescan.py',
'wallet_import_with_label.py',
+ 'wallet_importdescriptors.py',
+ 'wallet_upgradewallet.py',
'rpc_bind.py --ipv4',
'rpc_bind.py --ipv6',
'rpc_bind.py --nonloopback',
@@ -190,6 +208,7 @@ BASE_SCRIPTS = [
'wallet_listsinceblock.py',
'p2p_leak.py',
'wallet_encryption.py',
+ 'wallet_encryption.py --descriptors',
'feature_dersig.py',
'feature_cltv.py',
'rpc_uptime.py',
@@ -207,8 +226,10 @@ BASE_SCRIPTS = [
'feature_loadblock.py',
'p2p_dos_header_tree.py',
'p2p_unrequested_blocks.py',
+ 'p2p_blockfilters.py',
'feature_includeconf.py',
'feature_asmap.py',
+ 'mempool_unbroadcast.py',
'rpc_deriveaddresses.py',
'rpc_deriveaddresses.py --usecli',
'rpc_scantxoutset.py',
@@ -223,7 +244,6 @@ BASE_SCRIPTS = [
'rpc_help.py',
'feature_help.py',
'feature_shutdown.py',
- 'framework_test_script.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]
@@ -386,6 +406,16 @@ def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=
if os.path.isdir(cache_dir):
print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir))
+ # Test Framework Tests
+ print("Running Unit Tests for Test Framework Modules")
+ test_framework_tests = unittest.TestSuite()
+ for module in TEST_FRAMEWORK_MODULES:
+ test_framework_tests.addTest(unittest.TestLoader().loadTestsFromName("test_framework.{}".format(module)))
+ result = unittest.TextTestRunner(verbosity=1, failfast=True).run(test_framework_tests)
+ if not result.wasSuccessful():
+ logging.debug("Early exiting after failure in TestFramework unit tests")
+ sys.exit(False)
+
tests_dir = src_dir + '/test/functional/'
flags = ['--cachedir={}'.format(cache_dir)] + args
@@ -609,7 +639,7 @@ class TestResult():
def check_script_prefixes():
"""Check that test scripts start with one of the allowed name prefixes."""
- good_prefixes_re = re.compile("^(example|feature|interface|mempool|mining|p2p|rpc|wallet|tool|framework_test)_")
+ good_prefixes_re = re.compile("^(example|feature|interface|mempool|mining|p2p|rpc|wallet|tool)_")
bad_script_names = [script for script in ALL_SCRIPTS if good_prefixes_re.match(script) is None]
if bad_script_names:
diff --git a/test/functional/tool_wallet.py b/test/functional/tool_wallet.py
index b3d496dd51..039ce7daee 100755
--- a/test/functional/tool_wallet.py
+++ b/test/functional/tool_wallet.py
@@ -15,6 +15,7 @@ from test_framework.util import assert_equal
BUFFER_SIZE = 16 * 1024
+
class ToolWalletTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
@@ -48,7 +49,7 @@ class ToolWalletTest(BitcoinTestFramework):
h = hashlib.sha1()
mv = memoryview(bytearray(BUFFER_SIZE))
with open(self.wallet_path, 'rb', buffering=0) as f:
- for n in iter(lambda : f.readinto(mv), 0):
+ for n in iter(lambda: f.readinto(mv), 0):
h.update(mv[:n])
return h.hexdigest()
@@ -69,7 +70,12 @@ class ToolWalletTest(BitcoinTestFramework):
self.assert_raises_tool_error('Invalid command: help', 'help')
self.assert_raises_tool_error('Error: two methods provided (info and create). Only one method should be provided.', 'info', 'create')
self.assert_raises_tool_error('Error parsing command line arguments: Invalid parameter -foo', '-foo')
- self.assert_raises_tool_error('Error loading wallet.dat. Is wallet being used by other process?', '-wallet=wallet.dat', 'info')
+ self.assert_raises_tool_error(
+ 'Error initializing wallet database environment "{}"!\nError loading wallet.dat. Is wallet being used by other process?'
+ .format(os.path.join(self.nodes[0].datadir, self.chain, 'wallets')),
+ '-wallet=wallet.dat',
+ 'info',
+ )
self.assert_raises_tool_error('Error: no wallet file at nonexistent.dat', '-wallet=nonexistent.dat', 'info')
def test_tool_wallet_info(self):
@@ -84,7 +90,7 @@ class ToolWalletTest(BitcoinTestFramework):
#
# self.log.debug('Setting wallet file permissions to 400 (read-only)')
# os.chmod(self.wallet_path, stat.S_IRUSR)
- # assert(self.wallet_permissions() in ['400', '666']) # Sanity check. 666 because Appveyor.
+ # assert self.wallet_permissions() in ['400', '666'] # Sanity check. 666 because Appveyor.
# shasum_before = self.wallet_shasum()
timestamp_before = self.wallet_timestamp()
self.log.debug('Wallet file timestamp before calling info: {}'.format(timestamp_before))
@@ -103,7 +109,7 @@ class ToolWalletTest(BitcoinTestFramework):
self.log_wallet_timestamp_comparison(timestamp_before, timestamp_after)
self.log.debug('Setting wallet file permissions back to 600 (read/write)')
os.chmod(self.wallet_path, stat.S_IRUSR | stat.S_IWUSR)
- assert(self.wallet_permissions() in ['600', '666']) # Sanity check. 666 because Appveyor.
+ assert self.wallet_permissions() in ['600', '666'] # Sanity check. 666 because Appveyor.
#
# TODO: Wallet tool info should not write to the wallet file.
# The following lines should be uncommented and the tests still succeed:
diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py
index c7b19081c8..90d17a806c 100755
--- a/test/functional/wallet_abandonconflict.py
+++ b/test/functional/wallet_abandonconflict.py
@@ -18,7 +18,6 @@ from test_framework.util import (
assert_raises_rpc_error,
connect_nodes,
disconnect_nodes,
- wait_until,
)
@@ -98,7 +97,7 @@ class AbandonConflictTest(BitcoinTestFramework):
# TODO: redo with eviction
self.stop_node(0)
self.start_node(0, extra_args=["-minrelaytxfee=0.0001"])
- wait_until(lambda: self.nodes[0].getmempoolinfo()['loaded'])
+ assert self.nodes[0].getmempoolinfo()['loaded']
# Verify txs no longer in either node's mempool
assert_equal(len(self.nodes[0].getrawmempool()), 0)
@@ -126,7 +125,7 @@ class AbandonConflictTest(BitcoinTestFramework):
# Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned
self.stop_node(0)
self.start_node(0, extra_args=["-minrelaytxfee=0.00001"])
- wait_until(lambda: self.nodes[0].getmempoolinfo()['loaded'])
+ assert self.nodes[0].getmempoolinfo()['loaded']
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(self.nodes[0].getbalance(), balance)
@@ -148,7 +147,7 @@ class AbandonConflictTest(BitcoinTestFramework):
# Remove using high relay fee again
self.stop_node(0)
self.start_node(0, extra_args=["-minrelaytxfee=0.0001"])
- wait_until(lambda: self.nodes[0].getmempoolinfo()['loaded'])
+ assert self.nodes[0].getmempoolinfo()['loaded']
assert_equal(len(self.nodes[0].getrawmempool()), 0)
newbalance = self.nodes[0].getbalance()
assert_equal(newbalance, balance - Decimal("24.9996"))
diff --git a/test/functional/wallet_avoidreuse.py b/test/functional/wallet_avoidreuse.py
index 78a51a1d5f..780cce9d02 100755
--- a/test/functional/wallet_avoidreuse.py
+++ b/test/functional/wallet_avoidreuse.py
@@ -133,7 +133,7 @@ class AvoidReuseTest(BitcoinTestFramework):
tempwallet = ".wallet_avoidreuse.py_test_immutable_wallet.dat"
# Create a wallet with disable_private_keys set; this should work
- self.nodes[1].createwallet(tempwallet, True)
+ self.nodes[1].createwallet(wallet_name=tempwallet, disable_private_keys=True)
w = self.nodes[1].get_wallet_rpc(tempwallet)
# Attempt to unset the disable_private_keys flag; this should not work
@@ -249,43 +249,44 @@ class AvoidReuseTest(BitcoinTestFramework):
# getbalances should show no used, 5 btc trusted
assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5})
- # For the second send, we transmute it to a related single-key address
- # to make sure it's also detected as re-use
- fund_spk = self.nodes[0].getaddressinfo(fundaddr)["scriptPubKey"]
- fund_decoded = self.nodes[0].decodescript(fund_spk)
- if second_addr_type == "p2sh-segwit":
- new_fundaddr = fund_decoded["segwit"]["p2sh-segwit"]
- elif second_addr_type == "bech32":
- new_fundaddr = fund_decoded["segwit"]["addresses"][0]
- else:
- new_fundaddr = fundaddr
- assert_equal(second_addr_type, "legacy")
-
- self.nodes[0].sendtoaddress(new_fundaddr, 10)
- self.nodes[0].generate(1)
- self.sync_all()
-
- # listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10)
- assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10)
- # getbalances should show 10 used, 5 btc trusted
- assert_balances(self.nodes[1], mine={"used": 10, "trusted": 5})
-
- # node 1 should now have a balance of 5 (no dirty) or 15 (including dirty)
- assert_approx(self.nodes[1].getbalance(), 5, 0.001)
- assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 15, 0.001)
-
- assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[1].sendtoaddress, retaddr, 10)
-
- self.nodes[1].sendtoaddress(retaddr, 4)
-
- # listunspent should show 2 total outputs (1, 10 btc), one unused (1), one reused (10)
- assert_unspent(self.nodes[1], total_count=2, total_sum=11, reused_count=1, reused_sum=10)
- # getbalances should show 10 used, 1 btc trusted
- assert_balances(self.nodes[1], mine={"used": 10, "trusted": 1})
-
- # node 1 should now have about 1 btc left (no dirty) and 11 (including dirty)
- assert_approx(self.nodes[1].getbalance(), 1, 0.001)
- assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 11, 0.001)
+ if not self.options.descriptors:
+ # For the second send, we transmute it to a related single-key address
+ # to make sure it's also detected as re-use
+ fund_spk = self.nodes[0].getaddressinfo(fundaddr)["scriptPubKey"]
+ fund_decoded = self.nodes[0].decodescript(fund_spk)
+ if second_addr_type == "p2sh-segwit":
+ new_fundaddr = fund_decoded["segwit"]["p2sh-segwit"]
+ elif second_addr_type == "bech32":
+ new_fundaddr = fund_decoded["segwit"]["addresses"][0]
+ else:
+ new_fundaddr = fundaddr
+ assert_equal(second_addr_type, "legacy")
+
+ self.nodes[0].sendtoaddress(new_fundaddr, 10)
+ self.nodes[0].generate(1)
+ self.sync_all()
+
+ # listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10)
+ assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10)
+ # getbalances should show 10 used, 5 btc trusted
+ assert_balances(self.nodes[1], mine={"used": 10, "trusted": 5})
+
+ # node 1 should now have a balance of 5 (no dirty) or 15 (including dirty)
+ assert_approx(self.nodes[1].getbalance(), 5, 0.001)
+ assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 15, 0.001)
+
+ assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[1].sendtoaddress, retaddr, 10)
+
+ self.nodes[1].sendtoaddress(retaddr, 4)
+
+ # listunspent should show 2 total outputs (1, 10 btc), one unused (1), one reused (10)
+ assert_unspent(self.nodes[1], total_count=2, total_sum=11, reused_count=1, reused_sum=10)
+ # getbalances should show 10 used, 1 btc trusted
+ assert_balances(self.nodes[1], mine={"used": 10, "trusted": 1})
+
+ # node 1 should now have about 1 btc left (no dirty) and 11 (including dirty)
+ assert_approx(self.nodes[1].getbalance(), 1, 0.001)
+ assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 11, 0.001)
def test_getbalances_used(self):
'''
diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py
index 15746d312c..9e295af330 100755
--- a/test/functional/wallet_basic.py
+++ b/test/functional/wallet_basic.py
@@ -4,7 +4,6 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet."""
from decimal import Decimal
-import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -49,6 +48,7 @@ class WalletTest(BitcoinTestFramework):
return self.nodes[0].decoderawtransaction(txn)['vsize']
def run_test(self):
+
# Check that there's no UTXO on none of the nodes
assert_equal(len(self.nodes[0].listunspent()), 0)
assert_equal(len(self.nodes[1].listunspent()), 0)
@@ -219,7 +219,7 @@ class WalletTest(BitcoinTestFramework):
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex']))
- self.start_node(3)
+ self.start_node(3, self.nodes[3].extra_args)
connect_nodes(self.nodes[0], 3)
self.sync_all()
@@ -315,57 +315,59 @@ class WalletTest(BitcoinTestFramework):
# This will raise an exception since generate does not accept a string
assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2")
- # This will raise an exception for the invalid private key format
- assert_raises_rpc_error(-5, "Invalid private key encoding", self.nodes[0].importprivkey, "invalid")
+ if not self.options.descriptors:
- # This will raise an exception for importing an address with the PS2H flag
- temp_address = self.nodes[1].getnewaddress("", "p2sh-segwit")
- assert_raises_rpc_error(-5, "Cannot use the p2sh flag with an address - use a script instead", self.nodes[0].importaddress, temp_address, "label", False, True)
+ # This will raise an exception for the invalid private key format
+ assert_raises_rpc_error(-5, "Invalid private key encoding", self.nodes[0].importprivkey, "invalid")
- # This will raise an exception for attempting to dump the private key of an address you do not own
- assert_raises_rpc_error(-3, "Address does not refer to a key", self.nodes[0].dumpprivkey, temp_address)
+ # This will raise an exception for importing an address with the PS2H flag
+ temp_address = self.nodes[1].getnewaddress("", "p2sh-segwit")
+ assert_raises_rpc_error(-5, "Cannot use the p2sh flag with an address - use a script instead", self.nodes[0].importaddress, temp_address, "label", False, True)
- # This will raise an exception for attempting to get the private key of an invalid Bitcoin address
- assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].dumpprivkey, "invalid")
+ # This will raise an exception for attempting to dump the private key of an address you do not own
+ assert_raises_rpc_error(-3, "Address does not refer to a key", self.nodes[0].dumpprivkey, temp_address)
- # This will raise an exception for attempting to set a label for an invalid Bitcoin address
- assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].setlabel, "invalid address", "label")
+ # This will raise an exception for attempting to get the private key of an invalid Bitcoin address
+ assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].dumpprivkey, "invalid")
- # This will raise an exception for importing an invalid address
- assert_raises_rpc_error(-5, "Invalid Bitcoin address or script", self.nodes[0].importaddress, "invalid")
+ # This will raise an exception for attempting to set a label for an invalid Bitcoin address
+ assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].setlabel, "invalid address", "label")
- # This will raise an exception for attempting to import a pubkey that isn't in hex
- assert_raises_rpc_error(-5, "Pubkey must be a hex string", self.nodes[0].importpubkey, "not hex")
+ # This will raise an exception for importing an invalid address
+ assert_raises_rpc_error(-5, "Invalid Bitcoin address or script", self.nodes[0].importaddress, "invalid")
- # This will raise an exception for importing an invalid pubkey
- assert_raises_rpc_error(-5, "Pubkey is not a valid public key", self.nodes[0].importpubkey, "5361746f736869204e616b616d6f746f")
+ # This will raise an exception for attempting to import a pubkey that isn't in hex
+ assert_raises_rpc_error(-5, "Pubkey must be a hex string", self.nodes[0].importpubkey, "not hex")
- # Import address and private key to check correct behavior of spendable unspents
- # 1. Send some coins to generate new UTXO
- address_to_import = self.nodes[2].getnewaddress()
- txid = self.nodes[0].sendtoaddress(address_to_import, 1)
- self.nodes[0].generate(1)
- self.sync_all(self.nodes[0:3])
+ # This will raise an exception for importing an invalid pubkey
+ assert_raises_rpc_error(-5, "Pubkey is not a valid public key", self.nodes[0].importpubkey, "5361746f736869204e616b616d6f746f")
- # 2. Import address from node2 to node1
- self.nodes[1].importaddress(address_to_import)
+ # Import address and private key to check correct behavior of spendable unspents
+ # 1. Send some coins to generate new UTXO
+ address_to_import = self.nodes[2].getnewaddress()
+ txid = self.nodes[0].sendtoaddress(address_to_import, 1)
+ self.nodes[0].generate(1)
+ self.sync_all(self.nodes[0:3])
- # 3. Validate that the imported address is watch-only on node1
- assert self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"]
+ # 2. Import address from node2 to node1
+ self.nodes[1].importaddress(address_to_import)
- # 4. Check that the unspents after import are not spendable
- assert_array_result(self.nodes[1].listunspent(),
- {"address": address_to_import},
- {"spendable": False})
+ # 3. Validate that the imported address is watch-only on node1
+ assert self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"]
- # 5. Import private key of the previously imported address on node1
- priv_key = self.nodes[2].dumpprivkey(address_to_import)
- self.nodes[1].importprivkey(priv_key)
+ # 4. Check that the unspents after import are not spendable
+ assert_array_result(self.nodes[1].listunspent(),
+ {"address": address_to_import},
+ {"spendable": False})
- # 6. Check that the unspents are now spendable on node1
- assert_array_result(self.nodes[1].listunspent(),
- {"address": address_to_import},
- {"spendable": True})
+ # 5. Import private key of the previously imported address on node1
+ priv_key = self.nodes[2].dumpprivkey(address_to_import)
+ self.nodes[1].importprivkey(priv_key)
+
+ # 6. Check that the unspents are now spendable on node1
+ assert_array_result(self.nodes[1].listunspent(),
+ {"address": address_to_import},
+ {"spendable": True})
# Mine a block from node0 to an address from node1
coinbase_addr = self.nodes[1].getnewaddress()
@@ -460,14 +462,11 @@ class WalletTest(BitcoinTestFramework):
# Try with walletrejectlongchains
# Double chain limit but require combining inputs, so we pass SelectCoinsMinConf
self.stop_node(0)
- self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)])
-
- # wait for loadmempool
- timeout = 10
- while (timeout > 0 and len(self.nodes[0].getrawmempool()) < chainlimit * 2):
- time.sleep(0.5)
- timeout -= 0.5
- assert_equal(len(self.nodes[0].getrawmempool()), chainlimit * 2)
+ extra_args = ["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)]
+ self.start_node(0, extra_args=extra_args)
+
+ # wait until the wallet has submitted all transactions to the mempool
+ wait_until(lambda: len(self.nodes[0].getrawmempool()) == chainlimit * 2)
node0_balance = self.nodes[0].getbalance()
# With walletrejectlongchains we will not create the tx and store it in our wallet.
diff --git a/test/functional/wallet_bumpfee.py b/test/functional/wallet_bumpfee.py
index c09ca8854f..27197e3b6d 100755
--- a/test/functional/wallet_bumpfee.py
+++ b/test/functional/wallet_bumpfee.py
@@ -23,7 +23,6 @@ from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
- connect_nodes,
hex_str_to_bytes,
)
@@ -37,6 +36,7 @@ NORMAL = 0.00100000
HIGH = 0.00500000
TOO_HIGH = 1.00000000
+
class BumpFeeTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
@@ -55,9 +55,6 @@ class BumpFeeTest(BitcoinTestFramework):
self.nodes[1].encryptwallet(WALLET_PASSPHRASE)
self.nodes[1].walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
- connect_nodes(self.nodes[0], 1)
- self.sync_all()
-
peer_node, rbf_node = self.nodes
rbf_node_address = rbf_node.getnewaddress()
@@ -94,7 +91,6 @@ class BumpFeeTest(BitcoinTestFramework):
# These tests wipe out a number of utxos that are expected in other tests
test_small_output_with_feerate_succeeds(self, rbf_node, dest_address)
test_no_more_inputs_fails(self, rbf_node, dest_address)
- self.log.info("Success")
def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address):
@@ -124,6 +120,7 @@ def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address):
assert_equal(oldwtx["replaced_by_txid"], bumped_tx["txid"])
assert_equal(bumpedwtx["replaces_txid"], rbfid)
+
def test_feerate_args(self, rbf_node, peer_node, dest_address):
self.log.info('Test fee_rate args')
rbfid = spend_one_input(rbf_node, dest_address)
@@ -137,7 +134,7 @@ def test_feerate_args(self, rbf_node, peer_node, dest_address):
# Bumping to just above minrelay should fail to increase total fee enough, at least
assert_raises_rpc_error(-8, "Insufficient total fee", rbf_node.bumpfee, rbfid, {"fee_rate": INSUFFICIENT})
- assert_raises_rpc_error(-3, "Amount out of range", rbf_node.bumpfee, rbfid, {"fee_rate":-1})
+ assert_raises_rpc_error(-3, "Amount out of range", rbf_node.bumpfee, rbfid, {"fee_rate": -1})
assert_raises_rpc_error(-4, "is too high (cannot be higher than", rbf_node.bumpfee, rbfid, {"fee_rate": TOO_HIGH})
@@ -209,6 +206,7 @@ def test_bumpfee_with_descendant_fails(self, rbf_node, rbf_node_address, dest_ad
rbf_node.sendrawtransaction(tx["hex"])
assert_raises_rpc_error(-8, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id)
+
def test_small_output_with_feerate_succeeds(self, rbf_node, dest_address):
self.log.info('Testing small output with feerate bump succeeds')
@@ -249,6 +247,7 @@ def test_small_output_with_feerate_succeeds(self, rbf_node, dest_address):
rbf_node.generatetoaddress(1, rbf_node.getnewaddress())
assert_equal(rbf_node.gettransaction(rbfid)["confirmations"], 1)
+
def test_dust_to_fee(self, rbf_node, dest_address):
self.log.info('Test that bumped output that is dust is dropped to fee')
rbfid = spend_one_input(rbf_node, dest_address)
@@ -301,10 +300,11 @@ def test_maxtxfee_fails(self, rbf_node, dest_address):
self.restart_node(1, ['-maxtxfee=0.000025'] + self.extra_args[1])
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
rbfid = spend_one_input(rbf_node, dest_address)
- assert_raises_rpc_error(-4, "Unable to create transaction: Fee exceeds maximum configured by -maxtxfee", rbf_node.bumpfee, rbfid)
+ assert_raises_rpc_error(-4, "Unable to create transaction. Fee exceeds maximum configured by -maxtxfee", rbf_node.bumpfee, rbfid)
self.restart_node(1, self.extra_args[1])
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
+
def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
self.log.info('Test that PSBT is returned for bumpfee in watchonly wallets')
priv_rec_desc = "wpkh([00000001/84'/1'/0']tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0/*)#rweraev0"
@@ -339,12 +339,11 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
result = watcher.importmulti([{
"desc": pub_rec_desc,
"timestamp": 0,
- "range": [0,10],
+ "range": [0, 10],
"internal": False,
"keypool": True,
"watchonly": True
- },
- {
+ }, {
"desc": pub_change_desc,
"timestamp": 0,
"range": [0, 10],
@@ -361,7 +360,7 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
self.sync_all()
# Create single-input PSBT for transaction to be bumped
- psbt = watcher.walletcreatefundedpsbt([], {dest_address:0.0005}, 0, {"feeRate": 0.00001}, True)['psbt']
+ psbt = watcher.walletcreatefundedpsbt([], {dest_address: 0.0005}, 0, {"feeRate": 0.00001}, True)['psbt']
psbt_signed = signer.walletprocesspsbt(psbt=psbt, sign=True, sighashtype="ALL", bip32derivs=True)
psbt_final = watcher.finalizepsbt(psbt_signed["psbt"])
original_txid = watcher.sendrawtransaction(psbt_final["hex"])
@@ -387,6 +386,7 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
rbf_node.unloadwallet("watcher")
rbf_node.unloadwallet("signer")
+
def test_rebumping(self, rbf_node, dest_address):
self.log.info('Test that re-bumping the original tx fails, but bumping successor works')
rbfid = spend_one_input(rbf_node, dest_address)
@@ -461,6 +461,7 @@ def test_locked_wallet_fails(self, rbf_node, dest_address):
rbf_node.bumpfee, rbfid)
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
+
def test_change_script_match(self, rbf_node, dest_address):
self.log.info('Test that the same change addresses is used for the replacement transaction when possible')
@@ -480,6 +481,7 @@ def test_change_script_match(self, rbf_node, dest_address):
bumped_rate_tx = rbf_node.bumpfee(bumped_total_tx["txid"])
assert_equal(change_addresses, get_change_address(bumped_rate_tx['txid']))
+
def spend_one_input(node, dest_address, change_size=Decimal("0.00049000")):
tx_input = dict(
sequence=BIP125_SEQUENCE_NUMBER, **next(u for u in node.listunspent() if u["amount"] == Decimal("0.00100000")))
@@ -491,6 +493,7 @@ def spend_one_input(node, dest_address, change_size=Decimal("0.00049000")):
txid = node.sendrawtransaction(signedtx["hex"])
return txid
+
def submit_block_with_tx(node, tx):
ctx = CTransaction()
ctx.deserialize(io.BytesIO(hex_str_to_bytes(tx)))
@@ -507,13 +510,14 @@ def submit_block_with_tx(node, tx):
node.submitblock(block.serialize().hex())
return block
+
def test_no_more_inputs_fails(self, rbf_node, dest_address):
self.log.info('Test that bumpfee fails when there are no available confirmed outputs')
# feerate rbf requires confirmed outputs when change output doesn't exist or is insufficient
rbf_node.generatetoaddress(1, dest_address)
# spend all funds, no change output
rbfid = rbf_node.sendtoaddress(rbf_node.getnewaddress(), rbf_node.getbalance(), "", "", True)
- assert_raises_rpc_error(-4, "Unable to create transaction: Insufficient funds", rbf_node.bumpfee, rbfid)
+ assert_raises_rpc_error(-4, "Unable to create transaction. Insufficient funds", rbf_node.bumpfee, rbfid)
if __name__ == "__main__":
diff --git a/test/functional/wallet_descriptor.py b/test/functional/wallet_descriptor.py
new file mode 100755
index 0000000000..289ccf43ec
--- /dev/null
+++ b/test/functional/wallet_descriptor.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test descriptor wallet function."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error
+)
+
+
+class WalletDescriptorTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+ self.extra_args = [['-keypool=100']]
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def run_test(self):
+ # Make a descriptor wallet
+ self.log.info("Making a descriptor wallet")
+ self.nodes[0].createwallet(wallet_name="desc1", descriptors=True)
+ self.nodes[0].unloadwallet("")
+
+ # A descriptor wallet should have 100 addresses * 3 types = 300 keys
+ self.log.info("Checking wallet info")
+ wallet_info = self.nodes[0].getwalletinfo()
+ assert_equal(wallet_info['keypoolsize'], 300)
+ assert_equal(wallet_info['keypoolsize_hd_internal'], 300)
+ assert 'keypoololdest' not in wallet_info
+
+ # Check that getnewaddress works
+ self.log.info("Test that getnewaddress and getrawchangeaddress work")
+ addr = self.nodes[0].getnewaddress("", "legacy")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('pkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/44\'/1\'/0\'/0/0')
+
+ addr = self.nodes[0].getnewaddress("", "p2sh-segwit")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('sh(wpkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/49\'/1\'/0\'/0/0')
+
+ addr = self.nodes[0].getnewaddress("", "bech32")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('wpkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/84\'/1\'/0\'/0/0')
+
+ # Check that getrawchangeaddress works
+ addr = self.nodes[0].getrawchangeaddress("legacy")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('pkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/44\'/1\'/0\'/1/0')
+
+ addr = self.nodes[0].getrawchangeaddress("p2sh-segwit")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('sh(wpkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/49\'/1\'/0\'/1/0')
+
+ addr = self.nodes[0].getrawchangeaddress("bech32")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('wpkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/84\'/1\'/0\'/1/0')
+
+ # Make a wallet to receive coins at
+ self.nodes[0].createwallet(wallet_name="desc2", descriptors=True)
+ recv_wrpc = self.nodes[0].get_wallet_rpc("desc2")
+ send_wrpc = self.nodes[0].get_wallet_rpc("desc1")
+
+ # Generate some coins
+ send_wrpc.generatetoaddress(101, send_wrpc.getnewaddress())
+
+ # Make transactions
+ self.log.info("Test sending and receiving")
+ addr = recv_wrpc.getnewaddress()
+ send_wrpc.sendtoaddress(addr, 10)
+
+ # Make sure things are disabled
+ self.log.info("Test disabled RPCs")
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importprivkey, "cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW")
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importpubkey, send_wrpc.getaddressinfo(send_wrpc.getnewaddress()))
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importaddress, recv_wrpc.getnewaddress())
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importmulti, [])
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.addmultisigaddress, 1, [recv_wrpc.getnewaddress()])
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.dumpprivkey, recv_wrpc.getnewaddress())
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.dumpwallet, 'wallet.dump')
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importwallet, 'wallet.dump')
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.sethdseed)
+
+ self.log.info("Test encryption")
+ # Get the master fingerprint before encrypt
+ info1 = send_wrpc.getaddressinfo(send_wrpc.getnewaddress())
+
+ # Encrypt wallet 0
+ send_wrpc.encryptwallet('pass')
+ send_wrpc.walletpassphrase('pass', 10)
+ addr = send_wrpc.getnewaddress()
+ info2 = send_wrpc.getaddressinfo(addr)
+ assert info1['hdmasterfingerprint'] != info2['hdmasterfingerprint']
+ send_wrpc.walletlock()
+ assert 'hdmasterfingerprint' in send_wrpc.getaddressinfo(send_wrpc.getnewaddress())
+ info3 = send_wrpc.getaddressinfo(addr)
+ assert_equal(info2['desc'], info3['desc'])
+
+ self.log.info("Test that getnewaddress still works after keypool is exhausted in an encrypted wallet")
+ for i in range(0, 500):
+ send_wrpc.getnewaddress()
+
+ self.log.info("Test that unlock is needed when deriving only hardened keys in an encrypted wallet")
+ send_wrpc.walletpassphrase('pass', 10)
+ send_wrpc.importdescriptors([{
+ "desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/*h)#y4dfsj7n",
+ "timestamp": "now",
+ "range": [0,10],
+ "active": True
+ }])
+ send_wrpc.walletlock()
+ # Exhaust keypool of 100
+ for i in range(0, 100):
+ send_wrpc.getnewaddress(address_type='bech32')
+ # This should now error
+ assert_raises_rpc_error(-12, "Keypool ran out, please call keypoolrefill first", send_wrpc.getnewaddress, '', 'bech32')
+
+ self.log.info("Test born encrypted wallets")
+ self.nodes[0].createwallet('desc_enc', False, False, 'pass', False, True)
+ enc_rpc = self.nodes[0].get_wallet_rpc('desc_enc')
+ enc_rpc.getnewaddress() # Makes sure that we can get a new address from a born encrypted wallet
+
+ self.log.info("Test blank descriptor wallets")
+ self.nodes[0].createwallet(wallet_name='desc_blank', blank=True, descriptors=True)
+ blank_rpc = self.nodes[0].get_wallet_rpc('desc_blank')
+ assert_raises_rpc_error(-4, 'This wallet has no available keys', blank_rpc.getnewaddress)
+
+ self.log.info("Test descriptor wallet with disabled private keys")
+ self.nodes[0].createwallet(wallet_name='desc_no_priv', disable_private_keys=True, descriptors=True)
+ nopriv_rpc = self.nodes[0].get_wallet_rpc('desc_no_priv')
+ assert_raises_rpc_error(-4, 'This wallet has no available keys', nopriv_rpc.getnewaddress)
+
+if __name__ == '__main__':
+ WalletDescriptorTest().main ()
diff --git a/test/functional/wallet_encryption.py b/test/functional/wallet_encryption.py
index bc7e3cca59..6cd82ad250 100755
--- a/test/functional/wallet_encryption.py
+++ b/test/functional/wallet_encryption.py
@@ -8,7 +8,6 @@ import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
- assert_equal,
assert_raises_rpc_error,
assert_greater_than,
assert_greater_than_or_equal,
@@ -27,10 +26,10 @@ class WalletEncryptionTest(BitcoinTestFramework):
passphrase2 = "SecondWalletPassphrase"
# Make sure the wallet isn't encrypted first
- address = self.nodes[0].getnewaddress()
- privkey = self.nodes[0].dumpprivkey(address)
- assert_equal(privkey[:1], "c")
- assert_equal(len(privkey), 52)
+ msg = "test message"
+ address = self.nodes[0].getnewaddress(address_type='legacy')
+ sig = self.nodes[0].signmessage(address, msg)
+ assert self.nodes[0].verifymessage(address, sig, msg)
assert_raises_rpc_error(-15, "Error: running with an unencrypted wallet, but walletpassphrase was called", self.nodes[0].walletpassphrase, 'ff', 1)
assert_raises_rpc_error(-15, "Error: running with an unencrypted wallet, but walletpassphrasechange was called.", self.nodes[0].walletpassphrasechange, 'ff', 'ff')
@@ -39,33 +38,36 @@ class WalletEncryptionTest(BitcoinTestFramework):
self.nodes[0].encryptwallet(passphrase)
# Test that the wallet is encrypted
- assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signmessage, address, msg)
assert_raises_rpc_error(-15, "Error: running with an encrypted wallet, but encryptwallet was called.", self.nodes[0].encryptwallet, 'ff')
assert_raises_rpc_error(-8, "passphrase can not be empty", self.nodes[0].walletpassphrase, '', 1)
assert_raises_rpc_error(-8, "passphrase can not be empty", self.nodes[0].walletpassphrasechange, '', 'ff')
# Check that walletpassphrase works
self.nodes[0].walletpassphrase(passphrase, 2)
- assert_equal(privkey, self.nodes[0].dumpprivkey(address))
+ sig = self.nodes[0].signmessage(address, msg)
+ assert self.nodes[0].verifymessage(address, sig, msg)
# Check that the timeout is right
time.sleep(3)
- assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signmessage, address, msg)
# Test wrong passphrase
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10)
# Test walletlock
self.nodes[0].walletpassphrase(passphrase, 84600)
- assert_equal(privkey, self.nodes[0].dumpprivkey(address))
+ sig = self.nodes[0].signmessage(address, msg)
+ assert self.nodes[0].verifymessage(address, sig, msg)
self.nodes[0].walletlock()
- assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signmessage, address, msg)
# Test passphrase changes
self.nodes[0].walletpassphrasechange(passphrase, passphrase2)
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10)
self.nodes[0].walletpassphrase(passphrase2, 10)
- assert_equal(privkey, self.nodes[0].dumpprivkey(address))
+ sig = self.nodes[0].signmessage(address, msg)
+ assert self.nodes[0].verifymessage(address, sig, msg)
self.nodes[0].walletlock()
# Test timeout bounds
diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py
index e4328f2b0e..5b083a5398 100755
--- a/test/functional/wallet_hd.py
+++ b/test/functional/wallet_hd.py
@@ -27,17 +27,21 @@ class WalletHDTest(BitcoinTestFramework):
def run_test(self):
# Make sure we use hd, keep masterkeyid
- masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
- assert_equal(len(masterkeyid), 40)
+ hd_fingerprint = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['hdmasterfingerprint']
+ assert_equal(len(hd_fingerprint), 8)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].getaddressinfo(change_addr)
- assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key
+ if self.options.descriptors:
+ assert_equal(change_addrV["hdkeypath"], "m/84'/1'/0'/1/0")
+ else:
+ assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
- non_hd_add = self.nodes[0].getnewaddress()
- self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
+ non_hd_add = 'bcrt1qmevj8zfx0wdvp05cqwkmr6mxkfx60yezwjksmt'
+ non_hd_key = 'cS9umN9w6cDMuRVYdbkfE4c7YUFLJRoXMfhQ569uY4odiQbVN8Rt'
+ self.nodes[1].importprivkey(non_hd_key)
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, "hd.bak"))
@@ -48,11 +52,14 @@ class WalletHDTest(BitcoinTestFramework):
self.nodes[0].generate(101)
hd_add = None
NUM_HD_ADDS = 10
- for i in range(NUM_HD_ADDS):
+ for i in range(1, NUM_HD_ADDS + 1):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].getaddressinfo(hd_add)
- assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'")
- assert_equal(hd_info["hdseedid"], masterkeyid)
+ if self.options.descriptors:
+ assert_equal(hd_info["hdkeypath"], "m/84'/1'/0'/0/" + str(i))
+ else:
+ assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'")
+ assert_equal(hd_info["hdmasterfingerprint"], hd_fingerprint)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
@@ -61,7 +68,10 @@ class WalletHDTest(BitcoinTestFramework):
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].getaddressinfo(change_addr)
- assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key
+ if self.options.descriptors:
+ assert_equal(change_addrV["hdkeypath"], "m/84'/1'/0'/1/1")
+ else:
+ assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
@@ -72,16 +82,19 @@ class WalletHDTest(BitcoinTestFramework):
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "chainstate"))
- shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, self.chain, "wallets", "wallet.dat"))
+ shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, self.chain, 'wallets', "wallet.dat"))
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
- for i in range(NUM_HD_ADDS):
+ for i in range(1, NUM_HD_ADDS + 1):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2)
- assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(i)+"'")
- assert_equal(hd_info_2["hdseedid"], masterkeyid)
+ if self.options.descriptors:
+ assert_equal(hd_info_2["hdkeypath"], "m/84'/1'/0'/0/" + str(i))
+ else:
+ assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(i)+"'")
+ assert_equal(hd_info_2["hdmasterfingerprint"], hd_fingerprint)
assert_equal(hd_add, hd_add_2)
connect_nodes(self.nodes[0], 1)
self.sync_all()
@@ -117,41 +130,141 @@ class WalletHDTest(BitcoinTestFramework):
if out['value'] != 1:
keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['addresses'][0])['hdkeypath']
- assert_equal(keypath[0:7], "m/0'/1'")
-
- # Generate a new HD seed on node 1 and make sure it is set
- orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
- self.nodes[1].sethdseed()
- new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
- assert orig_masterkeyid != new_masterkeyid
- addr = self.nodes[1].getnewaddress()
- assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/0\'') # Make sure the new address is the first from the keypool
- self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key
-
- # Set a new HD seed on node 1 without flushing the keypool
- new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
- orig_masterkeyid = new_masterkeyid
- self.nodes[1].sethdseed(False, new_seed)
- new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
- assert orig_masterkeyid != new_masterkeyid
- addr = self.nodes[1].getnewaddress()
- assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid'])
- assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/1\'') # Make sure the new address continues previous keypool
-
- # Check that the next address is from the new seed
- self.nodes[1].keypoolrefill(1)
- next_addr = self.nodes[1].getnewaddress()
- assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid'])
- assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/0\'/0\'/0\'') # Make sure the new address is not from previous keypool
- assert next_addr != addr
-
- # Sethdseed parameter validity
- assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0)
- assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif")
- assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool")
- assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True)
- assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed)
- assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
+ if self.options.descriptors:
+ assert_equal(keypath[0:14], "m/84'/1'/0'/1/")
+ else:
+ assert_equal(keypath[0:7], "m/0'/1'")
+
+ if not self.options.descriptors:
+ # Generate a new HD seed on node 1 and make sure it is set
+ orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
+ self.nodes[1].sethdseed()
+ new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
+ assert orig_masterkeyid != new_masterkeyid
+ addr = self.nodes[1].getnewaddress()
+ assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/0\'') # Make sure the new address is the first from the keypool
+ self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key
+
+ # Set a new HD seed on node 1 without flushing the keypool
+ new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
+ orig_masterkeyid = new_masterkeyid
+ self.nodes[1].sethdseed(False, new_seed)
+ new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
+ assert orig_masterkeyid != new_masterkeyid
+ addr = self.nodes[1].getnewaddress()
+ assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid'])
+ assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/1\'') # Make sure the new address continues previous keypool
+
+ # Check that the next address is from the new seed
+ self.nodes[1].keypoolrefill(1)
+ next_addr = self.nodes[1].getnewaddress()
+ assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid'])
+ assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/0\'/0\'/0\'') # Make sure the new address is not from previous keypool
+ assert next_addr != addr
+
+ # Sethdseed parameter validity
+ assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0)
+ assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif")
+ assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool")
+ assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True)
+ assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed)
+ assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
+
+ self.log.info('Test sethdseed restoring with keys outside of the initial keypool')
+ self.nodes[0].generate(10)
+ # Restart node 1 with keypool of 3 and a different wallet
+ self.nodes[1].createwallet(wallet_name='origin', blank=True)
+ self.stop_node(1)
+ self.start_node(1, extra_args=['-keypool=3', '-wallet=origin'])
+ connect_nodes(self.nodes[0], 1)
+
+ # sethdseed restoring and seeing txs to addresses out of the keypool
+ origin_rpc = self.nodes[1].get_wallet_rpc('origin')
+ seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
+ origin_rpc.sethdseed(True, seed)
+
+ self.nodes[1].createwallet(wallet_name='restore', blank=True)
+ restore_rpc = self.nodes[1].get_wallet_rpc('restore')
+ restore_rpc.sethdseed(True, seed) # Set to be the same seed as origin_rpc
+ restore_rpc.sethdseed(True) # Rotate to a new seed, making original `seed` inactive
+
+ self.nodes[1].createwallet(wallet_name='restore2', blank=True)
+ restore2_rpc = self.nodes[1].get_wallet_rpc('restore2')
+ restore2_rpc.sethdseed(True, seed) # Set to be the same seed as origin_rpc
+ restore2_rpc.sethdseed(True) # Rotate to a new seed, making original `seed` inactive
+
+ # Check persistence of inactive seed by reloading restore. restore2 is still loaded to test the case where the wallet is not reloaded
+ restore_rpc.unloadwallet()
+ self.nodes[1].loadwallet('restore')
+ restore_rpc = self.nodes[1].get_wallet_rpc('restore')
+
+ # Empty origin keypool and get an address that is beyond the initial keypool
+ origin_rpc.getnewaddress()
+ origin_rpc.getnewaddress()
+ last_addr = origin_rpc.getnewaddress() # Last address of initial keypool
+ addr = origin_rpc.getnewaddress() # First address beyond initial keypool
+
+ # Check that the restored seed has last_addr but does not have addr
+ info = restore_rpc.getaddressinfo(last_addr)
+ assert_equal(info['ismine'], True)
+ info = restore_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], False)
+ info = restore2_rpc.getaddressinfo(last_addr)
+ assert_equal(info['ismine'], True)
+ info = restore2_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], False)
+ # Check that the origin seed has addr
+ info = origin_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], True)
+
+ # Send a transaction to addr, which is out of the initial keypool.
+ # The wallet that has set a new seed (restore_rpc) should not detect this transaction.
+ txid = self.nodes[0].sendtoaddress(addr, 1)
+ origin_rpc.sendrawtransaction(self.nodes[0].gettransaction(txid)['hex'])
+ self.nodes[0].generate(1)
+ origin_rpc.gettransaction(txid)
+ assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore_rpc.gettransaction, txid)
+ out_of_kp_txid = txid
+
+ # Send a transaction to last_addr, which is in the initial keypool.
+ # The wallet that has set a new seed (restore_rpc) should detect this transaction and generate 3 new keys from the initial seed.
+ # The previous transaction (out_of_kp_txid) should still not be detected as a rescan is required.
+ txid = self.nodes[0].sendtoaddress(last_addr, 1)
+ origin_rpc.sendrawtransaction(self.nodes[0].gettransaction(txid)['hex'])
+ self.nodes[0].generate(1)
+ origin_rpc.gettransaction(txid)
+ restore_rpc.gettransaction(txid)
+ assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore_rpc.gettransaction, out_of_kp_txid)
+ restore2_rpc.gettransaction(txid)
+ assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore2_rpc.gettransaction, out_of_kp_txid)
+
+ # After rescanning, restore_rpc should now see out_of_kp_txid and generate an additional key.
+ # addr should now be part of restore_rpc and be ismine
+ restore_rpc.rescanblockchain()
+ restore_rpc.gettransaction(out_of_kp_txid)
+ info = restore_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], True)
+ restore2_rpc.rescanblockchain()
+ restore2_rpc.gettransaction(out_of_kp_txid)
+ info = restore2_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], True)
+
+ # Check again that 3 keys were derived.
+ # Empty keypool and get an address that is beyond the initial keypool
+ origin_rpc.getnewaddress()
+ origin_rpc.getnewaddress()
+ last_addr = origin_rpc.getnewaddress()
+ addr = origin_rpc.getnewaddress()
+
+ # Check that the restored seed has last_addr but does not have addr
+ info = restore_rpc.getaddressinfo(last_addr)
+ assert_equal(info['ismine'], True)
+ info = restore_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], False)
+ info = restore2_rpc.getaddressinfo(last_addr)
+ assert_equal(info['ismine'], True)
+ info = restore2_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], False)
if __name__ == '__main__':
WalletHDTest().main ()
diff --git a/test/functional/wallet_importdescriptors.py b/test/functional/wallet_importdescriptors.py
new file mode 100755
index 0000000000..fc5d653a91
--- /dev/null
+++ b/test/functional/wallet_importdescriptors.py
@@ -0,0 +1,445 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the importdescriptors RPC.
+
+Test importdescriptors by generating keys on node0, importing the corresponding
+descriptors on node1 and then testing the address info for the different address
+variants.
+
+- `get_generate_key()` is called to generate keys and return the privkeys,
+ pubkeys and all variants of scriptPubKey and address.
+- `test_importdesc()` is called to send an importdescriptors call to node1, test
+ success, and (if unsuccessful) test the error code and error message returned.
+- `test_address()` is called to call getaddressinfo for an address on node1
+ and test the values returned."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.descriptors import descsum_create
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+ find_vout_for_address,
+)
+from test_framework.wallet_util import (
+ get_generate_key,
+ test_address,
+)
+
+class ImportDescriptorsTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+ self.extra_args = [["-addresstype=legacy"],
+ ["-addresstype=bech32", "-keypool=5"]
+ ]
+ self.setup_clean_chain = True
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def test_importdesc(self, req, success, error_code=None, error_message=None, warnings=None, wallet=None):
+ """Run importdescriptors and assert success"""
+ if warnings is None:
+ warnings = []
+ wrpc = self.nodes[1].get_wallet_rpc('w1')
+ if wallet is not None:
+ wrpc = wallet
+
+ result = wrpc.importdescriptors([req])
+ observed_warnings = []
+ if 'warnings' in result[0]:
+ observed_warnings = result[0]['warnings']
+ assert_equal("\n".join(sorted(warnings)), "\n".join(sorted(observed_warnings)))
+ assert_equal(result[0]['success'], success)
+ if error_code is not None:
+ assert_equal(result[0]['error']['code'], error_code)
+ assert_equal(result[0]['error']['message'], error_message)
+
+ def run_test(self):
+ self.log.info('Setting up wallets')
+ self.nodes[0].createwallet(wallet_name='w0', disable_private_keys=False)
+ w0 = self.nodes[0].get_wallet_rpc('w0')
+
+ self.nodes[1].createwallet(wallet_name='w1', disable_private_keys=True, blank=True, descriptors=True)
+ w1 = self.nodes[1].get_wallet_rpc('w1')
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
+
+ self.nodes[1].createwallet(wallet_name="wpriv", disable_private_keys=False, blank=True, descriptors=True)
+ wpriv = self.nodes[1].get_wallet_rpc("wpriv")
+ assert_equal(wpriv.getwalletinfo()['keypoolsize'], 0)
+
+ self.log.info('Mining coins')
+ w0.generatetoaddress(101, w0.getnewaddress())
+
+ # RPC importdescriptors -----------------------------------------------
+
+ # # Test import fails if no descriptor present
+ key = get_generate_key()
+ self.log.info("Import should fail if a descriptor is not provided")
+ self.test_importdesc({"timestamp": "now"},
+ success=False,
+ error_code=-8,
+ error_message='Descriptor not found.')
+
+ # # Test importing of a P2PKH descriptor
+ key = get_generate_key()
+ self.log.info("Should import a p2pkh descriptor")
+ self.test_importdesc({"desc": descsum_create("pkh(" + key.pubkey + ")"),
+ "timestamp": "now",
+ "label": "Descriptor import test"},
+ success=True)
+ test_address(w1,
+ key.p2pkh_addr,
+ solvable=True,
+ ismine=True,
+ labels=["Descriptor import test"])
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
+
+ self.log.info("Internal addresses cannot have labels")
+ self.test_importdesc({"desc": descsum_create("pkh(" + key.pubkey + ")"),
+ "timestamp": "now",
+ "internal": True,
+ "label": "Descriptor import test"},
+ success=False,
+ error_code=-8,
+ error_message="Internal addresses should not have a label")
+
+ # # Test importing of a P2SH-P2WPKH descriptor
+ key = get_generate_key()
+ self.log.info("Should not import a p2sh-p2wpkh descriptor without checksum")
+ self.test_importdesc({"desc": "sh(wpkh(" + key.pubkey + "))",
+ "timestamp": "now"
+ },
+ success=False,
+ error_code=-5,
+ error_message="Missing checksum")
+
+ self.log.info("Should not import a p2sh-p2wpkh descriptor that has range specified")
+ self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
+ "timestamp": "now",
+ "range": 1,
+ },
+ success=False,
+ error_code=-8,
+ error_message="Range should not be specified for an un-ranged descriptor")
+
+ self.log.info("Should not import a p2sh-p2wpkh descriptor and have it set to active")
+ self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
+ "timestamp": "now",
+ "active": True,
+ },
+ success=False,
+ error_code=-8,
+ error_message="Active descriptors must be ranged")
+
+ self.log.info("Should import a (non-active) p2sh-p2wpkh descriptor")
+ self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
+ "timestamp": "now",
+ "active": False,
+ },
+ success=True)
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
+
+ test_address(w1,
+ key.p2sh_p2wpkh_addr,
+ ismine=True,
+ solvable=True)
+
+ # # Test importing of a multisig descriptor
+ key1 = get_generate_key()
+ key2 = get_generate_key()
+ self.log.info("Should import a 1-of-2 bare multisig from descriptor")
+ self.test_importdesc({"desc": descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey + ")"),
+ "timestamp": "now"},
+ success=True)
+ self.log.info("Should not treat individual keys from the imported bare multisig as watchonly")
+ test_address(w1,
+ key1.p2pkh_addr,
+ ismine=False)
+
+ # # Test ranged descriptors
+ xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg"
+ xpub = "tpubD6NzVbkrYhZ4YNXVQbNhMK1WqguFsUXceaVJKbmno2aZ3B6QfbMeraaYvnBSGpV3vxLyTTK9DYT1yoEck4XUScMzXoQ2U2oSmE2JyMedq3H"
+ addresses = ["2N7yv4p8G8yEaPddJxY41kPihnWvs39qCMf", "2MsHxyb2JS3pAySeNUsJ7mNnurtpeenDzLA"] # hdkeypath=m/0'/0'/0' and 1'
+ addresses += ["bcrt1qrd3n235cj2czsfmsuvqqpr3lu6lg0ju7scl8gn", "bcrt1qfqeppuvj0ww98r6qghmdkj70tv8qpchehegrg8"] # wpkh subscripts corresponding to the above addresses
+ desc = "sh(wpkh(" + xpub + "/0/0/*" + "))"
+
+ self.log.info("Ranged descriptors cannot have labels")
+ self.test_importdesc({"desc":descsum_create(desc),
+ "timestamp": "now",
+ "range": [0, 100],
+ "label": "test"},
+ success=False,
+ error_code=-8,
+ error_message='Ranged descriptors should not have a label')
+
+ self.log.info("Private keys required for private keys enabled wallet")
+ self.test_importdesc({"desc":descsum_create(desc),
+ "timestamp": "now",
+ "range": [0, 100]},
+ success=False,
+ error_code=-4,
+ error_message='Cannot import descriptor without private keys to a wallet with private keys enabled',
+ wallet=wpriv)
+
+ self.log.info("Ranged descriptor import should warn without a specified range")
+ self.test_importdesc({"desc": descsum_create(desc),
+ "timestamp": "now"},
+ success=True,
+ warnings=['Range not given, using default keypool range'])
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
+
+ # # Test importing of a ranged descriptor with xpriv
+ self.log.info("Should not import a ranged descriptor that includes xpriv into a watch-only wallet")
+ desc = "sh(wpkh(" + xpriv + "/0'/0'/*'" + "))"
+ self.test_importdesc({"desc": descsum_create(desc),
+ "timestamp": "now",
+ "range": 1},
+ success=False,
+ error_code=-4,
+ error_message='Cannot import private keys to a wallet with private keys disabled')
+ for address in addresses:
+ test_address(w1,
+ address,
+ ismine=False,
+ solvable=False)
+
+ self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": -1},
+ success=False, error_code=-8, error_message='End of range is too high')
+
+ self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [-1, 10]},
+ success=False, error_code=-8, error_message='Range should be greater or equal than 0')
+
+ self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]},
+ success=False, error_code=-8, error_message='End of range is too high')
+
+ self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [2, 1]},
+ success=False, error_code=-8, error_message='Range specified as [begin,end] must not have begin after end')
+
+ self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [0, 1000001]},
+ success=False, error_code=-8, error_message='Range is too large')
+
+ # Make sure ranged imports import keys in order
+ w1 = self.nodes[1].get_wallet_rpc('w1')
+ self.log.info('Key ranges should be imported in order')
+ xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY"
+ addresses = [
+ 'bcrt1qtmp74ayg7p24uslctssvjm06q5phz4yrxucgnv', # m/0'/0'/0
+ 'bcrt1q8vprchan07gzagd5e6v9wd7azyucksq2xc76k8', # m/0'/0'/1
+ 'bcrt1qtuqdtha7zmqgcrr26n2rqxztv5y8rafjp9lulu', # m/0'/0'/2
+ 'bcrt1qau64272ymawq26t90md6an0ps99qkrse58m640', # m/0'/0'/3
+ 'bcrt1qsg97266hrh6cpmutqen8s4s962aryy77jp0fg0', # m/0'/0'/4
+ ]
+
+ self.test_importdesc({'desc': descsum_create('wpkh([80002067/0h/0h]' + xpub + '/*)'),
+ 'active': True,
+ 'range' : [0, 2],
+ 'timestamp': 'now'
+ },
+ success=True)
+ self.test_importdesc({'desc': descsum_create('sh(wpkh([abcdef12/0h/0h]' + xpub + '/*))'),
+ 'active': True,
+ 'range' : [0, 2],
+ 'timestamp': 'now'
+ },
+ success=True)
+ self.test_importdesc({'desc': descsum_create('pkh([12345678/0h/0h]' + xpub + '/*)'),
+ 'active': True,
+ 'range' : [0, 2],
+ 'timestamp': 'now'
+ },
+ success=True)
+
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 5 * 3)
+ for i, expected_addr in enumerate(addresses):
+ received_addr = w1.getnewaddress('', 'bech32')
+ assert_raises_rpc_error(-4, 'This wallet has no available keys', w1.getrawchangeaddress, 'bech32')
+ assert_equal(received_addr, expected_addr)
+ bech32_addr_info = w1.getaddressinfo(received_addr)
+ assert_equal(bech32_addr_info['desc'][:23], 'wpkh([80002067/0\'/0\'/{}]'.format(i))
+
+ shwpkh_addr = w1.getnewaddress('', 'p2sh-segwit')
+ shwpkh_addr_info = w1.getaddressinfo(shwpkh_addr)
+ assert_equal(shwpkh_addr_info['desc'][:26], 'sh(wpkh([abcdef12/0\'/0\'/{}]'.format(i))
+
+ pkh_addr = w1.getnewaddress('', 'legacy')
+ pkh_addr_info = w1.getaddressinfo(pkh_addr)
+ assert_equal(pkh_addr_info['desc'][:22], 'pkh([12345678/0\'/0\'/{}]'.format(i))
+
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 4 * 3) # After retrieving a key, we don't refill the keypool again, so it's one less for each address type
+ w1.keypoolrefill()
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 5 * 3)
+
+ # Check active=False default
+ self.log.info('Check imported descriptors are not active by default')
+ self.test_importdesc({'desc': descsum_create('pkh([12345678/0h/0h]' + xpub + '/*)'),
+ 'range' : [0, 2],
+ 'timestamp': 'now',
+ 'internal': True
+ },
+ success=True)
+ assert_raises_rpc_error(-4, 'This wallet has no available keys', w1.getrawchangeaddress, 'legacy')
+
+ # # Test importing a descriptor containing a WIF private key
+ wif_priv = "cTe1f5rdT8A8DFgVWTjyPwACsDPJM9ff4QngFxUixCSvvbg1x6sh"
+ address = "2MuhcG52uHPknxDgmGPsV18jSHFBnnRgjPg"
+ desc = "sh(wpkh(" + wif_priv + "))"
+ self.log.info("Should import a descriptor with a WIF private key as spendable")
+ self.test_importdesc({"desc": descsum_create(desc),
+ "timestamp": "now"},
+ success=True,
+ wallet=wpriv)
+ test_address(wpriv,
+ address,
+ solvable=True,
+ ismine=True)
+ txid = w0.sendtoaddress(address, 49.99995540)
+ w0.generatetoaddress(6, w0.getnewaddress())
+ self.sync_blocks()
+ tx = wpriv.createrawtransaction([{"txid": txid, "vout": 0}], {w0.getnewaddress(): 49.999})
+ signed_tx = wpriv.signrawtransactionwithwallet(tx)
+ w1.sendrawtransaction(signed_tx['hex'])
+
+ # Make sure that we can use import and use multisig as addresses
+ self.log.info('Test that multisigs can be imported, signed for, and getnewaddress\'d')
+ self.nodes[1].createwallet(wallet_name="wmulti_priv", disable_private_keys=False, blank=True, descriptors=True)
+ wmulti_priv = self.nodes[1].get_wallet_rpc("wmulti_priv")
+ assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 0)
+
+ self.test_importdesc({"desc":"wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/0h/0h/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/0h/0h/*,tprv8ZgxMBicQKsPeonDt8Ka2mrQmHa61hQ5FQCsvWBTpSNzBFgM58cV2EuXNAHF14VawVpznnme3SuTbA62sGriwWyKifJmXntfNeK7zeqMCj1/84h/0h/0h/*))#m2sr93jn",
+ "active": True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"},
+ success=True,
+ wallet=wmulti_priv)
+ self.test_importdesc({"desc":"wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/1h/0h/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/1h/0h/*,tprv8ZgxMBicQKsPeonDt8Ka2mrQmHa61hQ5FQCsvWBTpSNzBFgM58cV2EuXNAHF14VawVpznnme3SuTbA62sGriwWyKifJmXntfNeK7zeqMCj1/84h/1h/0h/*))#q3sztvx5",
+ "active": True,
+ "internal" : True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"},
+ success=True,
+ wallet=wmulti_priv)
+
+ assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1001) # Range end (1000) is inclusive, so 1001 addresses generated
+ addr = wmulti_priv.getnewaddress('', 'bech32')
+ assert_equal(addr, 'bcrt1qdt0qy5p7dzhxzmegnn4ulzhard33s2809arjqgjndx87rv5vd0fq2czhy8') # Derived at m/84'/0'/0'/0
+ change_addr = wmulti_priv.getrawchangeaddress('bech32')
+ assert_equal(change_addr, 'bcrt1qt9uhe3a9hnq7vajl7a094z4s3crm9ttf8zw3f5v9gr2nyd7e3lnsy44n8e')
+ assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1000)
+ txid = w0.sendtoaddress(addr, 10)
+ self.nodes[0].generate(6)
+ self.sync_all()
+ send_txid = wmulti_priv.sendtoaddress(w0.getnewaddress(), 8)
+ decoded = wmulti_priv.decoderawtransaction(wmulti_priv.gettransaction(send_txid)['hex'])
+ assert_equal(len(decoded['vin'][0]['txinwitness']), 4)
+ self.nodes[0].generate(6)
+ self.sync_all()
+
+ self.nodes[1].createwallet(wallet_name="wmulti_pub", disable_private_keys=True, blank=True, descriptors=True)
+ wmulti_pub = self.nodes[1].get_wallet_rpc("wmulti_pub")
+ assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 0)
+
+ self.test_importdesc({"desc":"wsh(multi(2,[7b2d0242/84h/0h/0h]tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*,[59b09cd6/84h/0h/0h]tpubDDBF2BTR6s8drwrfDei8WxtckGuSm1cyoKxYY1QaKSBFbHBYQArWhHPA6eJrzZej6nfHGLSURYSLHr7GuYch8aY5n61tGqgn8b4cXrMuoPH/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))#tsry0s5e",
+ "active": True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"},
+ success=True,
+ wallet=wmulti_pub)
+ self.test_importdesc({"desc":"wsh(multi(2,[7b2d0242/84h/1h/0h]tpubDCXqdwWZcszwqYJSnZp8eARkxGJfHAk23KDxbztV4BbschfaTfYLTcSkSJ3TN64dRqwa1rnFUScsYormKkGqNbbPwkorQimVevXjxzUV9Gf/*,[59b09cd6/84h/1h/0h]tpubDCYfZY2ceyHzYzMMVPt9MNeiqtQ2T7Uyp9QSFwYXh8Vi9iJFYXcuphJaGXfF3jUQJi5Y3GMNXvM11gaL4txzZgNGK22BFAwMXynnzv4z2Jh/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))#c08a2rzv",
+ "active": True,
+ "internal" : True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"},
+ success=True,
+ wallet=wmulti_pub)
+
+ assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 1000) # The first one was already consumed by previous import and is detected as used
+ addr = wmulti_pub.getnewaddress('', 'bech32')
+ assert_equal(addr, 'bcrt1qp8s25ckjl7gr6x2q3dx3tn2pytwp05upkjztk6ey857tt50r5aeqn6mvr9') # Derived at m/84'/0'/0'/1
+ change_addr = wmulti_pub.getrawchangeaddress('bech32')
+ assert_equal(change_addr, 'bcrt1qt9uhe3a9hnq7vajl7a094z4s3crm9ttf8zw3f5v9gr2nyd7e3lnsy44n8e')
+ assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 999)
+ txid = w0.sendtoaddress(addr, 10)
+ vout = find_vout_for_address(self.nodes[0], txid, addr)
+ self.nodes[0].generate(6)
+ self.sync_all()
+ assert_equal(wmulti_pub.getbalance(), wmulti_priv.getbalance())
+
+ self.log.info("Multisig with distributed keys")
+ self.nodes[1].createwallet(wallet_name="wmulti_priv1", descriptors=True)
+ wmulti_priv1 = self.nodes[1].get_wallet_rpc("wmulti_priv1")
+ res = wmulti_priv1.importdescriptors([
+ {
+ "desc": descsum_create("wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/0h/0h/*,[59b09cd6/84h/0h/0h]tpubDDBF2BTR6s8drwrfDei8WxtckGuSm1cyoKxYY1QaKSBFbHBYQArWhHPA6eJrzZej6nfHGLSURYSLHr7GuYch8aY5n61tGqgn8b4cXrMuoPH/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))"),
+ "active": True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"
+ },
+ {
+ "desc": descsum_create("wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/1h/0h/*,[59b09cd6/84h/1h/0h]tpubDCYfZY2ceyHzYzMMVPt9MNeiqtQ2T7Uyp9QSFwYXh8Vi9iJFYXcuphJaGXfF3jUQJi5Y3GMNXvM11gaL4txzZgNGK22BFAwMXynnzv4z2Jh/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))"),
+ "active": True,
+ "internal" : True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"
+ }])
+ assert_equal(res[0]['success'], True)
+ assert_equal(res[0]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
+ assert_equal(res[1]['success'], True)
+ assert_equal(res[1]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
+
+ self.nodes[1].createwallet(wallet_name='wmulti_priv2', blank=True, descriptors=True)
+ wmulti_priv2 = self.nodes[1].get_wallet_rpc('wmulti_priv2')
+ res = wmulti_priv2.importdescriptors([
+ {
+ "desc": descsum_create("wsh(multi(2,[7b2d0242/84h/0h/0h]tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/0h/0h/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))"),
+ "active": True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"
+ },
+ {
+ "desc": descsum_create("wsh(multi(2,[7b2d0242/84h/1h/0h]tpubDCXqdwWZcszwqYJSnZp8eARkxGJfHAk23KDxbztV4BbschfaTfYLTcSkSJ3TN64dRqwa1rnFUScsYormKkGqNbbPwkorQimVevXjxzUV9Gf/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/1h/0h/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))"),
+ "active": True,
+ "internal" : True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"
+ }])
+ assert_equal(res[0]['success'], True)
+ assert_equal(res[0]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
+ assert_equal(res[1]['success'], True)
+ assert_equal(res[1]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
+
+ rawtx = self.nodes[1].createrawtransaction([{'txid': txid, 'vout': vout}], {w0.getnewaddress(): 9.999})
+ tx_signed_1 = wmulti_priv1.signrawtransactionwithwallet(rawtx)
+ assert_equal(tx_signed_1['complete'], False)
+ tx_signed_2 = wmulti_priv2.signrawtransactionwithwallet(tx_signed_1['hex'])
+ assert_equal(tx_signed_2['complete'], True)
+ self.nodes[1].sendrawtransaction(tx_signed_2['hex'])
+
+ self.log.info("Combo descriptors cannot be active")
+ self.test_importdesc({"desc": descsum_create("combo(tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*)"),
+ "active": True,
+ "range": 1,
+ "timestamp": "now"},
+ success=False,
+ error_code=-4,
+ error_message="Combo descriptors cannot be set to active")
+
+ self.log.info("Descriptors with no type cannot be active")
+ self.test_importdesc({"desc": descsum_create("pk(tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*)"),
+ "active": True,
+ "range": 1,
+ "timestamp": "now"},
+ success=True,
+ warnings=["Unknown output type, cannot set descriptor to active."])
+
+if __name__ == '__main__':
+ ImportDescriptorsTest().main()
diff --git a/test/functional/wallet_importmulti.py b/test/functional/wallet_importmulti.py
index f152fcd1a4..bd4fcdabcf 100755
--- a/test/functional/wallet_importmulti.py
+++ b/test/functional/wallet_importmulti.py
@@ -32,6 +32,7 @@ from test_framework.wallet_util import (
test_address,
)
+
class ImportMultiTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
@@ -51,7 +52,7 @@ class ImportMultiTest(BitcoinTestFramework):
result = self.nodes[1].importmulti([req])
observed_warnings = []
if 'warnings' in result[0]:
- observed_warnings = result[0]['warnings']
+ observed_warnings = result[0]['warnings']
assert_equal("\n".join(sorted(warnings)), "\n".join(sorted(observed_warnings)))
assert_equal(result[0]['success'], success)
if error_code is not None:
@@ -63,6 +64,7 @@ class ImportMultiTest(BitcoinTestFramework):
self.nodes[0].generate(1)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+ self.nodes[1].syncwithvalidationinterfacequeue()
node0_address1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
@@ -257,6 +259,7 @@ class ImportMultiTest(BitcoinTestFramework):
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+ self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
@@ -277,6 +280,7 @@ class ImportMultiTest(BitcoinTestFramework):
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+ self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh with respective redeem script")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
@@ -297,6 +301,7 @@ class ImportMultiTest(BitcoinTestFramework):
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+ self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
@@ -322,6 +327,7 @@ class ImportMultiTest(BitcoinTestFramework):
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+ self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
@@ -851,5 +857,6 @@ class ImportMultiTest(BitcoinTestFramework):
addr = wrpc.getnewaddress('', 'bech32')
assert_equal(addr, addresses[i])
+
if __name__ == '__main__':
ImportMultiTest().main()
diff --git a/test/functional/wallet_keypool.py b/test/functional/wallet_keypool.py
index 9e2f00e62f..40a2b3ab6a 100755
--- a/test/functional/wallet_keypool.py
+++ b/test/functional/wallet_keypool.py
@@ -22,16 +22,63 @@ class KeyPoolTest(BitcoinTestFramework):
addr_before_encrypting = nodes[0].getnewaddress()
addr_before_encrypting_data = nodes[0].getaddressinfo(addr_before_encrypting)
wallet_info_old = nodes[0].getwalletinfo()
- assert addr_before_encrypting_data['hdseedid'] == wallet_info_old['hdseedid']
+ if not self.options.descriptors:
+ assert addr_before_encrypting_data['hdseedid'] == wallet_info_old['hdseedid']
# Encrypt wallet and wait to terminate
nodes[0].encryptwallet('test')
+ if self.options.descriptors:
+ # Import hardened derivation only descriptors
+ nodes[0].walletpassphrase('test', 10)
+ nodes[0].importdescriptors([
+ {
+ "desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/*h)#y4dfsj7n",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True
+ },
+ {
+ "desc": "pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1h/*h)#a0nyvl0k",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True
+ },
+ {
+ "desc": "sh(wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/2h/*h))#lmeu2axg",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True
+ },
+ {
+ "desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/3h/*h)#jkl636gm",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True,
+ "internal": True
+ },
+ {
+ "desc": "pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/4h/*h)#l3crwaus",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True,
+ "internal": True
+ },
+ {
+ "desc": "sh(wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/5h/*h))#qg8wa75f",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True,
+ "internal": True
+ }
+ ])
+ nodes[0].walletlock()
# Keep creating keys
addr = nodes[0].getnewaddress()
addr_data = nodes[0].getaddressinfo(addr)
wallet_info = nodes[0].getwalletinfo()
- assert addr_before_encrypting_data['hdseedid'] != wallet_info['hdseedid']
- assert addr_data['hdseedid'] == wallet_info['hdseedid']
+ assert addr_before_encrypting_data['hdmasterfingerprint'] != addr_data['hdmasterfingerprint']
+ if not self.options.descriptors:
+ assert addr_data['hdseedid'] == wallet_info['hdseedid']
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min)
@@ -39,8 +86,12 @@ class KeyPoolTest(BitcoinTestFramework):
nodes[0].keypoolrefill(6)
nodes[0].walletlock()
wi = nodes[0].getwalletinfo()
- assert_equal(wi['keypoolsize_hd_internal'], 6)
- assert_equal(wi['keypoolsize'], 6)
+ if self.options.descriptors:
+ assert_equal(wi['keypoolsize_hd_internal'], 18)
+ assert_equal(wi['keypoolsize'], 18)
+ else:
+ assert_equal(wi['keypoolsize_hd_internal'], 6)
+ assert_equal(wi['keypoolsize'], 6)
# drain the internal keys
nodes[0].getrawchangeaddress()
@@ -80,11 +131,15 @@ class KeyPoolTest(BitcoinTestFramework):
nodes[0].walletpassphrase('test', 100)
nodes[0].keypoolrefill(100)
wi = nodes[0].getwalletinfo()
- assert_equal(wi['keypoolsize_hd_internal'], 100)
- assert_equal(wi['keypoolsize'], 100)
+ if self.options.descriptors:
+ assert_equal(wi['keypoolsize_hd_internal'], 300)
+ assert_equal(wi['keypoolsize'], 300)
+ else:
+ assert_equal(wi['keypoolsize_hd_internal'], 100)
+ assert_equal(wi['keypoolsize'], 100)
# create a blank wallet
- nodes[0].createwallet(wallet_name='w2', blank=True)
+ nodes[0].createwallet(wallet_name='w2', blank=True, disable_private_keys=True)
w2 = nodes[0].get_wallet_rpc('w2')
# refer to initial wallet as w1
@@ -92,8 +147,11 @@ class KeyPoolTest(BitcoinTestFramework):
# import private key and fund it
address = addr.pop()
- privkey = w1.dumpprivkey(address)
- res = w2.importmulti([{'scriptPubKey': {'address': address}, 'keys': [privkey], 'timestamp': 'now'}])
+ desc = w1.getaddressinfo(address)['desc']
+ if self.options.descriptors:
+ res = w2.importdescriptors([{'desc': desc, 'timestamp': 'now'}])
+ else:
+ res = w2.importmulti([{'desc': desc, 'timestamp': 'now'}])
assert_equal(res[0]['success'], True)
w1.walletpassphrase('test', 100)
diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py
index 829633a050..102ed23fba 100755
--- a/test/functional/wallet_keypool_topup.py
+++ b/test/functional/wallet_keypool_topup.py
@@ -79,7 +79,15 @@ class KeypoolRestoreTest(BitcoinTestFramework):
assert_equal(self.nodes[idx].getbalance(), 15)
assert_equal(self.nodes[idx].listtransactions()[0]['category'], "receive")
# Check that we have marked all keys up to the used keypool key as used
- assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress())['hdkeypath'], "m/0'/0'/110'")
+ if self.options.descriptors:
+ if output_type == 'legacy':
+ assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/44'/1'/0'/0/110")
+ elif output_type == 'p2sh-segwit':
+ assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/49'/1'/0'/0/110")
+ elif output_type == 'bech32':
+ assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/84'/1'/0'/0/110")
+ else:
+ assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/0'/0'/110'")
if __name__ == '__main__':
diff --git a/test/functional/wallet_labels.py b/test/functional/wallet_labels.py
index 337d2e55d9..f8d1720469 100755
--- a/test/functional/wallet_labels.py
+++ b/test/functional/wallet_labels.py
@@ -115,15 +115,16 @@ class WalletLabelsTest(BitcoinTestFramework):
assert_raises_rpc_error(-11, "No addresses with label", node.getaddressesbylabel, "")
# Check that addmultisigaddress can assign labels.
- for label in labels:
- addresses = []
- for x in range(10):
- addresses.append(node.getnewaddress())
- multisig_address = node.addmultisigaddress(5, addresses, label.name)['address']
- label.add_address(multisig_address)
- label.purpose[multisig_address] = "send"
- label.verify(node)
- node.generate(101)
+ if not self.options.descriptors:
+ for label in labels:
+ addresses = []
+ for x in range(10):
+ addresses.append(node.getnewaddress())
+ multisig_address = node.addmultisigaddress(5, addresses, label.name)['address']
+ label.add_address(multisig_address)
+ label.purpose[multisig_address] = "send"
+ label.verify(node)
+ node.generate(101)
# Check that setlabel can change the label of an address from a
# different label.
diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py
index c569416292..580a61f9f3 100755
--- a/test/functional/wallet_multiwallet.py
+++ b/test/functional/wallet_multiwallet.py
@@ -227,10 +227,10 @@ class MultiWalletTest(BitcoinTestFramework):
assert_raises_rpc_error(-18, 'Wallet wallets not found.', self.nodes[0].loadwallet, 'wallets')
# Fail to load duplicate wallets
- assert_raises_rpc_error(-4, 'Wallet file verification failed: Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0])
+ assert_raises_rpc_error(-4, 'Wallet file verification failed. Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0])
# Fail to load duplicate wallets by different ways (directory and filepath)
- assert_raises_rpc_error(-4, "Wallet file verification failed: Error loading wallet wallet.dat. Duplicate -wallet filename specified.", self.nodes[0].loadwallet, 'wallet.dat')
+ assert_raises_rpc_error(-4, "Wallet file verification failed. Error loading wallet wallet.dat. Duplicate -wallet filename specified.", self.nodes[0].loadwallet, 'wallet.dat')
# Fail to load if one wallet is a copy of another
assert_raises_rpc_error(-4, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
@@ -240,7 +240,7 @@ class MultiWalletTest(BitcoinTestFramework):
# Fail to load if wallet file is a symlink
- assert_raises_rpc_error(-4, "Wallet file verification failed: Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink')
+ assert_raises_rpc_error(-4, "Wallet file verification failed. Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink')
# Fail to load if a directory is specified that doesn't contain a wallet
os.mkdir(wallet_dir('empty_wallet_dir'))
diff --git a/test/functional/wallet_resendwallettransactions.py b/test/functional/wallet_resendwallettransactions.py
index db5902f820..b384998d56 100755
--- a/test/functional/wallet_resendwallettransactions.py
+++ b/test/functional/wallet_resendwallettransactions.py
@@ -3,29 +3,14 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that the wallet resends transactions periodically."""
-from collections import defaultdict
import time
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import ToHex
-from test_framework.mininode import P2PInterface, mininode_lock
+from test_framework.mininode import P2PTxInvStore, mininode_lock
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, wait_until
-
-class P2PStoreTxInvs(P2PInterface):
- def __init__(self):
- super().__init__()
- self.tx_invs_received = defaultdict(int)
-
- def on_inv(self, message):
- # Store how many times invs have been received for each tx.
- for i in message.inv:
- if i.type == 1:
- # save txid
- self.tx_invs_received[i.hash] += 1
-
-
class ResendWalletTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
@@ -36,7 +21,7 @@ class ResendWalletTransactionsTest(BitcoinTestFramework):
def run_test(self):
node = self.nodes[0] # alias
- node.add_p2p_connection(P2PStoreTxInvs())
+ node.add_p2p_connection(P2PTxInvStore())
self.log.info("Create a new transaction and wait until it's broadcast")
txid = int(node.sendtoaddress(node.getnewaddress(), 1), 16)
@@ -51,7 +36,7 @@ class ResendWalletTransactionsTest(BitcoinTestFramework):
wait_until(lambda: node.p2p.tx_invs_received[txid] >= 1, lock=mininode_lock)
# Add a second peer since txs aren't rebroadcast to the same peer (see filterInventoryKnown)
- node.add_p2p_connection(P2PStoreTxInvs())
+ node.add_p2p_connection(P2PTxInvStore())
self.log.info("Create a block")
# Create and submit a block without the transaction.
@@ -69,9 +54,10 @@ class ResendWalletTransactionsTest(BitcoinTestFramework):
node.p2ps[1].sync_with_ping()
assert_equal(node.p2ps[1].tx_invs_received[txid], 0)
- self.log.info("Transaction should be rebroadcast after 30 minutes")
- # Use mocktime and give an extra 5 minutes to be sure.
- rebroadcast_time = int(time.time()) + 41 * 60
+ self.log.info("Bump time & check that transaction is rebroadcast")
+ # Transaction should be rebroadcast approximately 24 hours in the future,
+ # but can range from 12-36. So bump 36 hours to be sure.
+ rebroadcast_time = int(time.time()) + 36 * 60 * 60
node.setmocktime(rebroadcast_time)
wait_until(lambda: node.p2ps[1].tx_invs_received[txid] >= 1, lock=mininode_lock)
diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py
index 99559090ee..ad23206c90 100755
--- a/test/functional/wallet_txn_clone.py
+++ b/test/functional/wallet_txn_clone.py
@@ -29,7 +29,7 @@ class TxnMallTest(BitcoinTestFramework):
def setup_network(self):
# Start with split network:
- super(TxnMallTest, self).setup_network()
+ super().setup_network()
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
diff --git a/test/functional/wallet_upgradewallet.py b/test/functional/wallet_upgradewallet.py
new file mode 100755
index 0000000000..bb81746715
--- /dev/null
+++ b/test/functional/wallet_upgradewallet.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018-2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""upgradewallet RPC functional test
+
+Test upgradewallet RPC. Download node binaries:
+
+contrib/devtools/previous_release.sh -b v0.19.1 v0.18.1 v0.17.1 v0.16.3 v0.15.2
+
+Only v0.15.2 and v0.16.3 are required by this test. The others are used in feature_backwards_compatibility.py
+"""
+
+import os
+import shutil
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ adjust_bitcoin_conf_for_pre_17,
+ assert_equal,
+ assert_greater_than,
+ assert_is_hex_string,
+)
+
+
+class UpgradeWalletTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 3
+ self.extra_args = [
+ ["-addresstype=bech32"], # current wallet version
+ ["-usehd=1"], # v0.16.3 wallet
+ ["-usehd=0"] # v0.15.2 wallet
+ ]
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+ self.skip_if_no_previous_releases()
+
+ def setup_network(self):
+ self.setup_nodes()
+
+ def setup_nodes(self):
+ self.add_nodes(self.num_nodes, extra_args=self.extra_args, versions=[
+ None,
+ 160300,
+ 150200,
+ ])
+ # adapt bitcoin.conf, because older bitcoind's don't recognize config sections
+ adjust_bitcoin_conf_for_pre_17(self.nodes[1].bitcoinconf)
+ adjust_bitcoin_conf_for_pre_17(self.nodes[2].bitcoinconf)
+ self.start_nodes()
+
+ def dumb_sync_blocks(self):
+ """
+ Little helper to sync older wallets.
+ Notice that v0.15.2's regtest is hardforked, so there is
+ no sync for it.
+ v0.15.2 is only being used to test for version upgrade
+ and master hash key presence.
+ v0.16.3 is being used to test for version upgrade and balances.
+ Further info: https://github.com/bitcoin/bitcoin/pull/18774#discussion_r416967844
+ """
+ node_from = self.nodes[0]
+ v16_3_node = self.nodes[1]
+ to_height = node_from.getblockcount()
+ height = self.nodes[1].getblockcount()
+ for i in range(height, to_height+1):
+ b = node_from.getblock(blockhash=node_from.getblockhash(i), verbose=0)
+ v16_3_node.submitblock(b)
+ assert_equal(v16_3_node.getblockcount(), to_height)
+
+ def run_test(self):
+ self.nodes[0].generatetoaddress(101, self.nodes[0].getnewaddress())
+ self.dumb_sync_blocks()
+ # # Sanity check the test framework:
+ res = self.nodes[0].getblockchaininfo()
+ assert_equal(res['blocks'], 101)
+ node_master = self.nodes[0]
+ v16_3_node = self.nodes[1]
+ v15_2_node = self.nodes[2]
+
+ # Send coins to old wallets for later conversion checks.
+ v16_3_wallet = v16_3_node.get_wallet_rpc('wallet.dat')
+ v16_3_address = v16_3_wallet.getnewaddress()
+ node_master.generatetoaddress(101, v16_3_address)
+ self.dumb_sync_blocks()
+ v16_3_balance = v16_3_wallet.getbalance()
+
+ self.log.info("Test upgradewallet RPC...")
+ # Prepare for copying of the older wallet
+ node_master_wallet_dir = os.path.join(node_master.datadir, "regtest/wallets")
+ v16_3_wallet = os.path.join(v16_3_node.datadir, "regtest/wallets/wallet.dat")
+ v15_2_wallet = os.path.join(v15_2_node.datadir, "regtest/wallet.dat")
+ self.stop_nodes()
+
+ # Copy the 0.16.3 wallet to the last Bitcoin Core version and open it:
+ shutil.rmtree(node_master_wallet_dir)
+ os.mkdir(node_master_wallet_dir)
+ shutil.copy(
+ v16_3_wallet,
+ node_master_wallet_dir
+ )
+ self.restart_node(0, ['-nowallet'])
+ node_master.loadwallet('')
+
+ wallet = node_master.get_wallet_rpc('')
+ old_version = wallet.getwalletinfo()["walletversion"]
+
+ # calling upgradewallet without version arguments
+ # should return nothing if successful
+ assert_equal(wallet.upgradewallet(), "")
+ new_version = wallet.getwalletinfo()["walletversion"]
+ # upgraded wallet version should be greater than older one
+ assert_greater_than(new_version, old_version)
+ # wallet should still contain the same balance
+ assert_equal(wallet.getbalance(), v16_3_balance)
+
+ self.stop_node(0)
+ # Copy the 0.15.2 wallet to the last Bitcoin Core version and open it:
+ shutil.rmtree(node_master_wallet_dir)
+ os.mkdir(node_master_wallet_dir)
+ shutil.copy(
+ v15_2_wallet,
+ node_master_wallet_dir
+ )
+ self.restart_node(0, ['-nowallet'])
+ node_master.loadwallet('')
+
+ wallet = node_master.get_wallet_rpc('')
+ # should have no master key hash before conversion
+ assert_equal('hdseedid' in wallet.getwalletinfo(), False)
+ # calling upgradewallet with explicit version number
+ # should return nothing if successful
+ assert_equal(wallet.upgradewallet(169900), "")
+ new_version = wallet.getwalletinfo()["walletversion"]
+ # upgraded wallet should have version 169900
+ assert_equal(new_version, 169900)
+ # after conversion master key hash should be present
+ assert_is_hex_string(wallet.getwalletinfo()['hdseedid'])
+
+if __name__ == '__main__':
+ UpgradeWalletTest().main()
diff --git a/test/fuzz/test_runner.py b/test/fuzz/test_runner.py
index 2455d3a3c3..56b18752ec 100755
--- a/test/fuzz/test_runner.py
+++ b/test/fuzz/test_runner.py
@@ -5,12 +5,13 @@
"""Run fuzz test targets.
"""
+from concurrent.futures import ThreadPoolExecutor, as_completed
import argparse
import configparser
+import logging
import os
-import sys
import subprocess
-import logging
+import sys
def main():
@@ -36,6 +37,13 @@ def main():
help="A comma-separated list of targets to exclude",
)
parser.add_argument(
+ '--par',
+ '-j',
+ type=int,
+ default=4,
+ help='How many targets to merge or execute in parallel.',
+ )
+ parser.add_argument(
'seed_dir',
help='The seed corpus to run on (must contain subfolders for each fuzz target).',
)
@@ -124,25 +132,29 @@ def main():
logging.error("subprocess timed out: Currently only libFuzzer is supported")
sys.exit(1)
- if args.m_dir:
- merge_inputs(
+ with ThreadPoolExecutor(max_workers=args.par) as fuzz_pool:
+ if args.m_dir:
+ merge_inputs(
+ fuzz_pool=fuzz_pool,
+ corpus=args.seed_dir,
+ test_list=test_list_selection,
+ build_dir=config["environment"]["BUILDDIR"],
+ merge_dir=args.m_dir,
+ )
+ return
+
+ run_once(
+ fuzz_pool=fuzz_pool,
corpus=args.seed_dir,
test_list=test_list_selection,
build_dir=config["environment"]["BUILDDIR"],
- merge_dir=args.m_dir,
+ use_valgrind=args.valgrind,
)
- return
-
- run_once(
- corpus=args.seed_dir,
- test_list=test_list_selection,
- build_dir=config["environment"]["BUILDDIR"],
- use_valgrind=args.valgrind,
- )
-def merge_inputs(*, corpus, test_list, build_dir, merge_dir):
+def merge_inputs(*, fuzz_pool, corpus, test_list, build_dir, merge_dir):
logging.info("Merge the inputs in the passed dir into the seed_dir. Passed dir {}".format(merge_dir))
+ jobs = []
for t in test_list:
args = [
os.path.join(build_dir, 'src', 'test', 'fuzz', t),
@@ -153,12 +165,20 @@ def merge_inputs(*, corpus, test_list, build_dir, merge_dir):
]
os.makedirs(os.path.join(corpus, t), exist_ok=True)
os.makedirs(os.path.join(merge_dir, t), exist_ok=True)
- logging.debug('Run {} with args {}'.format(t, args))
- output = subprocess.run(args, check=True, stderr=subprocess.PIPE, universal_newlines=True).stderr
- logging.debug('Output: {}'.format(output))
+ def job(t, args):
+ output = 'Run {} with args {}\n'.format(t, " ".join(args))
+ output += subprocess.run(args, check=True, stderr=subprocess.PIPE, universal_newlines=True).stderr
+ logging.debug(output)
+
+ jobs.append(fuzz_pool.submit(job, t, args))
+
+ for future in as_completed(jobs):
+ future.result()
-def run_once(*, corpus, test_list, build_dir, use_valgrind):
+
+def run_once(*, fuzz_pool, corpus, test_list, build_dir, use_valgrind):
+ jobs = []
for t in test_list:
corpus_path = os.path.join(corpus, t)
os.makedirs(corpus_path, exist_ok=True)
@@ -169,10 +189,18 @@ def run_once(*, corpus, test_list, build_dir, use_valgrind):
]
if use_valgrind:
args = ['valgrind', '--quiet', '--error-exitcode=1'] + args
- logging.debug('Run {} with args {}'.format(t, args))
- result = subprocess.run(args, stderr=subprocess.PIPE, universal_newlines=True)
- output = result.stderr
- logging.debug('Output: {}'.format(output))
+
+ def job(t, args):
+ output = 'Run {} with args {}'.format(t, args)
+ result = subprocess.run(args, stderr=subprocess.PIPE, universal_newlines=True)
+ output += result.stderr
+ return output, result
+
+ jobs.append(fuzz_pool.submit(job, t, args))
+
+ for future in as_completed(jobs):
+ output, result = future.result()
+ logging.debug(output)
try:
result.check_returncode()
except subprocess.CalledProcessError as e:
@@ -180,7 +208,7 @@ def run_once(*, corpus, test_list, build_dir, use_valgrind):
logging.info(e.stdout)
if e.stderr:
logging.info(e.stderr)
- logging.info("Target \"{}\" failed with exit code {}: {}".format(t, e.returncode, " ".join(args)))
+ logging.info("Target \"{}\" failed with exit code {}".format(" ".join(result.args), e.returncode))
sys.exit(1)
diff --git a/test/lint/lint-shell.sh b/test/lint/lint-shell.sh
index 5540a0f74f..563e076b35 100755
--- a/test/lint/lint-shell.sh
+++ b/test/lint/lint-shell.sh
@@ -35,8 +35,9 @@ if ! command -v shellcheck > /dev/null; then
exit $EXIT_CODE
fi
+SHELLCHECK_CMD=(shellcheck --external-sources --check-sourced)
EXCLUDE="--exclude=$(IFS=','; echo "${disabled[*]}")"
-if ! shellcheck "$EXCLUDE" $(git ls-files -- '*.sh' | grep -vE 'src/(leveldb|secp256k1|univalue)/'); then
+if ! "${SHELLCHECK_CMD[@]}" "$EXCLUDE" $(git ls-files -- '*.sh' | grep -vE 'src/(leveldb|secp256k1|univalue)/'); then
EXIT_CODE=1
fi
@@ -48,13 +49,14 @@ fi
EXCLUDE_GITIAN=${EXCLUDE}",$(IFS=','; echo "${disabled_gitian[*]}")"
for descriptor in $(git ls-files -- 'contrib/gitian-descriptors/*.yml')
do
- echo
- echo "$descriptor"
+ script=$(basename "$descriptor")
# Use #!/bin/bash as gitian-builder/bin/gbuild does to complete a script.
- SCRIPT=$'#!/bin/bash\n'$(yq -r .script "$descriptor")
- if ! echo "$SCRIPT" | shellcheck "$EXCLUDE_GITIAN" -; then
+ echo "#!/bin/bash" > $script
+ yq -r .script "$descriptor" >> $script
+ if ! "${SHELLCHECK_CMD[@]}" "$EXCLUDE_GITIAN" $script; then
EXIT_CODE=1
fi
+ rm $script
done
exit $EXIT_CODE