aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/README.md11
-rw-r--r--test/config.ini.in1
-rw-r--r--test/functional/README.md6
-rw-r--r--test/functional/data/invalid_txs.py5
-rwxr-xr-xtest/functional/example_test.py4
-rwxr-xr-xtest/functional/feature_abortnode.py2
-rwxr-xr-xtest/functional/feature_assumevalid.py2
-rwxr-xr-xtest/functional/feature_backwards_compatibility.py167
-rwxr-xr-xtest/functional/feature_bip68_sequence.py4
-rwxr-xr-xtest/functional/feature_block.py2
-rwxr-xr-xtest/functional/feature_cltv.py2
-rwxr-xr-xtest/functional/feature_config_args.py2
-rwxr-xr-xtest/functional/feature_csv_activation.py2
-rwxr-xr-xtest/functional/feature_dbcrash.py7
-rwxr-xr-xtest/functional/feature_dersig.py2
-rwxr-xr-xtest/functional/feature_fee_estimation.py2
-rwxr-xr-xtest/functional/feature_filelock.py2
-rwxr-xr-xtest/functional/feature_help.py2
-rwxr-xr-xtest/functional/feature_loadblock.py13
-rwxr-xr-xtest/functional/feature_logging.py5
-rwxr-xr-xtest/functional/feature_maxuploadtarget.py21
-rwxr-xr-xtest/functional/feature_notifications.py68
-rwxr-xr-xtest/functional/feature_nulldummy.py4
-rwxr-xr-xtest/functional/feature_pruning.py14
-rwxr-xr-xtest/functional/feature_rbf.py4
-rwxr-xr-xtest/functional/feature_reindex.py6
-rwxr-xr-xtest/functional/feature_segwit.py70
-rwxr-xr-xtest/functional/framework_test_script.py44
-rwxr-xr-xtest/functional/interface_bitcoin_cli.py200
-rwxr-xr-xtest/functional/interface_rpc.py2
-rwxr-xr-xtest/functional/mempool_accept.py2
-rwxr-xr-xtest/functional/mempool_compatibility.py75
-rwxr-xr-xtest/functional/mempool_packages.py10
-rwxr-xr-xtest/functional/mempool_persist.py56
-rwxr-xr-xtest/functional/mempool_reorg.py36
-rwxr-xr-xtest/functional/mempool_unbroadcast.py113
-rwxr-xr-xtest/functional/mempool_updatefromblock.py123
-rwxr-xr-xtest/functional/mining_basic.py40
-rwxr-xr-xtest/functional/p2p_addr_relay.py4
-rwxr-xr-xtest/functional/p2p_blockfilters.py250
-rwxr-xr-xtest/functional/p2p_blocksonly.py23
-rwxr-xr-xtest/functional/p2p_compactblocks.py30
-rwxr-xr-xtest/functional/p2p_disconnect_ban.py3
-rwxr-xr-xtest/functional/p2p_dos_header_tree.py2
-rwxr-xr-xtest/functional/p2p_eviction.py129
-rwxr-xr-xtest/functional/p2p_feefilter.py65
-rwxr-xr-xtest/functional/p2p_filter.py191
-rwxr-xr-xtest/functional/p2p_fingerprint.py6
-rwxr-xr-xtest/functional/p2p_getdata.py49
-rwxr-xr-xtest/functional/p2p_invalid_block.py2
-rwxr-xr-xtest/functional/p2p_invalid_locator.py2
-rwxr-xr-xtest/functional/p2p_invalid_messages.py284
-rwxr-xr-xtest/functional/p2p_invalid_tx.py19
-rwxr-xr-xtest/functional/p2p_leak.py30
-rwxr-xr-xtest/functional/p2p_leak_tx.py6
-rwxr-xr-xtest/functional/p2p_mempool.py34
-rwxr-xr-xtest/functional/p2p_nobloomfilter_messages.py48
-rwxr-xr-xtest/functional/p2p_node_network_limited.py8
-rwxr-xr-xtest/functional/p2p_permissions.py8
-rwxr-xr-xtest/functional/p2p_segwit.py87
-rwxr-xr-xtest/functional/p2p_sendheaders.py14
-rwxr-xr-xtest/functional/p2p_timeouts.py2
-rwxr-xr-xtest/functional/p2p_tx_download.py16
-rwxr-xr-xtest/functional/p2p_unrequested_blocks.py6
-rwxr-xr-xtest/functional/rpc_blockchain.py2
-rwxr-xr-xtest/functional/rpc_createmultisig.py53
-rwxr-xr-xtest/functional/rpc_dumptxoutset.py2
-rwxr-xr-xtest/functional/rpc_estimatefee.py2
-rwxr-xr-xtest/functional/rpc_fundrawtransaction.py25
-rwxr-xr-xtest/functional/rpc_generateblock.py7
-rwxr-xr-xtest/functional/rpc_getaddressinfo_label_deprecation.py43
-rwxr-xr-xtest/functional/rpc_getaddressinfo_labels_purpose_deprecation.py48
-rwxr-xr-xtest/functional/rpc_getblockfilter.py4
-rwxr-xr-xtest/functional/rpc_help.py7
-rwxr-xr-xtest/functional/rpc_net.py13
-rwxr-xr-xtest/functional/rpc_psbt.py84
-rwxr-xr-xtest/functional/rpc_scantxoutset.py2
-rwxr-xr-xtest/functional/rpc_signrawtransaction.py2
-rwxr-xr-xtest/functional/rpc_users.py32
-rw-r--r--test/functional/test_framework/address.py51
-rw-r--r--test/functional/test_framework/authproxy.py2
-rw-r--r--test/functional/test_framework/blocktools.py8
-rwxr-xr-xtest/functional/test_framework/messages.py228
-rwxr-xr-xtest/functional/test_framework/mininode.py113
-rw-r--r--test/functional/test_framework/script.py55
-rwxr-xr-xtest/functional/test_framework/script_util.py1
-rwxr-xr-xtest/functional/test_framework/test_framework.py191
-rwxr-xr-xtest/functional/test_framework/test_node.py183
-rw-r--r--test/functional/test_framework/util.py126
-rwxr-xr-xtest/functional/test_framework/wallet_util.py32
-rwxr-xr-xtest/functional/test_runner.py53
-rwxr-xr-xtest/functional/tool_wallet.py26
-rwxr-xr-xtest/functional/wallet_abandonconflict.py18
-rwxr-xr-xtest/functional/wallet_address_types.py2
-rwxr-xr-xtest/functional/wallet_avoidreuse.py166
-rwxr-xr-xtest/functional/wallet_backup.py2
-rwxr-xr-xtest/functional/wallet_balance.py5
-rwxr-xr-xtest/functional/wallet_basic.py245
-rwxr-xr-xtest/functional/wallet_bumpfee.py66
-rwxr-xr-xtest/functional/wallet_createwallet.py2
-rwxr-xr-xtest/functional/wallet_descriptor.py144
-rwxr-xr-xtest/functional/wallet_dump.py5
-rwxr-xr-xtest/functional/wallet_encryption.py24
-rwxr-xr-xtest/functional/wallet_groups.py2
-rwxr-xr-xtest/functional/wallet_hd.py235
-rwxr-xr-xtest/functional/wallet_import_rescan.py3
-rwxr-xr-xtest/functional/wallet_importdescriptors.py445
-rwxr-xr-xtest/functional/wallet_importmulti.py9
-rwxr-xr-xtest/functional/wallet_keypool.py132
-rwxr-xr-xtest/functional/wallet_keypool_topup.py10
-rwxr-xr-xtest/functional/wallet_labels.py46
-rwxr-xr-xtest/functional/wallet_listtransactions.py2
-rwxr-xr-xtest/functional/wallet_multiwallet.py63
-rwxr-xr-xtest/functional/wallet_reorgsrestore.py3
-rwxr-xr-xtest/functional/wallet_resendwallettransactions.py45
-rwxr-xr-xtest/functional/wallet_txn_clone.py3
-rwxr-xr-xtest/functional/wallet_txn_doublespend.py1
-rwxr-xr-xtest/functional/wallet_upgradewallet.py139
-rwxr-xr-xtest/functional/wallet_zapwallettxes.py15
-rwxr-xr-xtest/fuzz/test_runner.py76
-rwxr-xr-xtest/lint/extended-lint-all.sh2
-rwxr-xr-xtest/lint/extended-lint-cppcheck.sh2
-rwxr-xr-xtest/lint/lint-circular-dependencies.sh2
-rwxr-xr-xtest/lint/lint-format-strings.sh2
-rwxr-xr-xtest/lint/lint-includes.sh6
-rwxr-xr-xtest/lint/lint-locale-dependence.sh2
-rwxr-xr-xtest/lint/lint-python-utf8-encoding.sh2
-rwxr-xr-xtest/lint/lint-python.sh16
-rwxr-xr-xtest/lint/lint-shebang.sh2
-rwxr-xr-xtest/lint/lint-shell.sh15
-rw-r--r--test/lint/lint-spelling.ignore-words.txt1
-rw-r--r--test/sanitizer_suppressions/tsan30
132 files changed, 4487 insertions, 1347 deletions
diff --git a/test/README.md b/test/README.md
index e1dab92a06..2341eef00d 100644
--- a/test/README.md
+++ b/test/README.md
@@ -225,6 +225,10 @@ gdb /home/example/bitcoind <pid>
Note: gdb attach step may require ptrace_scope to be modified, or `sudo` preceding the `gdb`.
See this link for considerations: https://www.kernel.org/doc/Documentation/security/Yama.txt
+Often while debugging rpc calls from functional tests, the test might reach timeout before
+process can return a response. Use `--timeout-factor 0` to disable all rpc timeouts for that partcular
+functional test. Ex: `test/functional/wallet_hd.py --timeout-factor 0`.
+
##### Profiling
An easy way to profile node performance during functional tests is provided
@@ -256,10 +260,11 @@ Use the `-v` option for verbose output.
| Lint test | Dependency | Version [used by CI](../ci/lint/04_install.sh) | Installation
|-----------|:----------:|:-------------------------------------------:|--------------
-| [`lint-python.sh`](lint/lint-python.sh) | [flake8](https://gitlab.com/pycqa/flake8) | [3.7.8](https://github.com/bitcoin/bitcoin/pull/15257) | `pip3 install flake8==3.7.8`
-| [`lint-shell.sh`](lint/lint-shell.sh) | [ShellCheck](https://github.com/koalaman/shellcheck) | [0.6.0](https://github.com/bitcoin/bitcoin/pull/15166) | [details...](https://github.com/koalaman/shellcheck#installing)
+| [`lint-python.sh`](lint/lint-python.sh) | [flake8](https://gitlab.com/pycqa/flake8) | [3.8.3](https://github.com/bitcoin/bitcoin/pull/19348) | `pip3 install flake8==3.8.3`
+| [`lint-python.sh`](lint/lint-python.sh) | [mypy](https://github.com/python/mypy) | [0.781](https://github.com/bitcoin/bitcoin/pull/19348) | `pip3 install mypy==0.781`
+| [`lint-shell.sh`](lint/lint-shell.sh) | [ShellCheck](https://github.com/koalaman/shellcheck) | [0.7.1](https://github.com/bitcoin/bitcoin/pull/19348) | [details...](https://github.com/koalaman/shellcheck#installing)
| [`lint-shell.sh`](lint/lint-shell.sh) | [yq](https://github.com/kislyuk/yq) | default | `pip3 install yq`
-| [`lint-spelling.sh`](lint/lint-spelling.sh) | [codespell](https://github.com/codespell-project/codespell) | [1.15.0](https://github.com/bitcoin/bitcoin/pull/16186) | `pip3 install codespell==1.15.0`
+| [`lint-spelling.sh`](lint/lint-spelling.sh) | [codespell](https://github.com/codespell-project/codespell) | [1.17.1](https://github.com/bitcoin/bitcoin/pull/19348) | `pip3 install codespell==1.17.1`
Please be aware that on Linux distributions all dependencies are usually available as packages, but could be outdated.
diff --git a/test/config.ini.in b/test/config.ini.in
index 9687206ee1..be1bfe8752 100644
--- a/test/config.ini.in
+++ b/test/config.ini.in
@@ -7,6 +7,7 @@
[environment]
PACKAGE_NAME=@PACKAGE_NAME@
+PACKAGE_BUGREPORT=@PACKAGE_BUGREPORT@
SRCDIR=@abs_top_srcdir@
BUILDDIR=@abs_top_builddir@
EXEEXT=@EXEEXT@
diff --git a/test/functional/README.md b/test/functional/README.md
index 004e0afb1d..aff5f714f2 100644
--- a/test/functional/README.md
+++ b/test/functional/README.md
@@ -26,10 +26,12 @@ don't have test cases for.
The Travis linter also checks this, but [possibly not in all cases](https://github.com/bitcoin/bitcoin/pull/14884#discussion_r239585126).
- See [the python lint script](/test/lint/lint-python.sh) that checks for violations that
could lead to bugs and issues in the test code.
+- Use [type hints](https://docs.python.org/3/library/typing.html) in your code to improve code readability
+ and to detect possible bugs earlier.
- Avoid wildcard imports
- Use a module-level docstring to describe what the test is testing, and how it
is testing it.
-- When subclassing the BitcoinTestFramwork, place overrides for the
+- When subclassing the BitcoinTestFramework, place overrides for the
`set_test_params()`, `add_options()` and `setup_xxxx()` methods at the top of
the subclass, then locally-defined helper methods, then the `run_test()` method.
- Use `'{}'.format(x)` for string formatting, not `'%s' % x`.
@@ -45,7 +47,7 @@ don't have test cases for.
- `rpc` for tests for individual RPC methods or features, eg `rpc_listtransactions.py`
- `tool` for tests for tools, eg `tool_wallet.py`
- `wallet` for tests for wallet features, eg `wallet_keypool.py`
-- use an underscore to separate words
+- Use an underscore to separate words
- exception: for tests for specific RPCs or command line options which don't include underscores, name the test after the exact RPC or argument name, eg `rpc_decodescript.py`, not `rpc_decode_script.py`
- Don't use the redundant word `test` in the name, eg `interface_zmq.py`, not `interface_zmq_test.py`
diff --git a/test/functional/data/invalid_txs.py b/test/functional/data/invalid_txs.py
index ce14998fd1..6e72db1d96 100644
--- a/test/functional/data/invalid_txs.py
+++ b/test/functional/data/invalid_txs.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
@@ -21,6 +21,7 @@ Invalid tx cases not covered here can be found by running:
"""
import abc
+from typing import Optional
from test_framework.messages import (
COutPoint,
CTransaction,
@@ -56,7 +57,7 @@ class BadTxTemplate:
__metaclass__ = abc.ABCMeta
# The expected error code given by bitcoind upon submission of the tx.
- reject_reason = ""
+ reject_reason = "" # type: Optional[str]
# Only specified if it differs from mempool acceptance error.
block_reject_reason = ""
diff --git a/test/functional/example_test.py b/test/functional/example_test.py
index 70dfe81d4e..5d782026dc 100755
--- a/test/functional/example_test.py
+++ b/test/functional/example_test.py
@@ -15,7 +15,7 @@ from collections import defaultdict
# Avoid wildcard * imports
from test_framework.blocktools import (create_block, create_coinbase)
-from test_framework.messages import CInv
+from test_framework.messages import CInv, MSG_BLOCK
from test_framework.mininode import (
P2PInterface,
mininode_lock,
@@ -198,7 +198,7 @@ class ExampleTest(BitcoinTestFramework):
getdata_request = msg_getdata()
for block in blocks:
- getdata_request.inv.append(CInv(2, block))
+ getdata_request.inv.append(CInv(MSG_BLOCK, block))
self.nodes[2].p2p.send_message(getdata_request)
# wait_until() will loop until a predicate condition is met. Use it to test properties of the
diff --git a/test/functional/feature_abortnode.py b/test/functional/feature_abortnode.py
index e47e709431..75267de80b 100755
--- a/test/functional/feature_abortnode.py
+++ b/test/functional/feature_abortnode.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2019 The Bitcoin Core developers
+# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoind aborts if can't disconnect a block.
diff --git a/test/functional/feature_assumevalid.py b/test/functional/feature_assumevalid.py
index ef4d9411c5..79777f5582 100755
--- a/test/functional/feature_assumevalid.py
+++ b/test/functional/feature_assumevalid.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test logic for skipping signature validation on old blocks.
diff --git a/test/functional/feature_backwards_compatibility.py b/test/functional/feature_backwards_compatibility.py
index adf66243b7..5cddd6527e 100755
--- a/test/functional/feature_backwards_compatibility.py
+++ b/test/functional/feature_backwards_compatibility.py
@@ -1,12 +1,16 @@
#!/usr/bin/env python3
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Backwards compatibility functional test
Test various backwards compatibility scenarios. Download the previous node binaries:
-contrib/devtools/previous_release.sh -b v0.19.0.1 v0.18.1 v0.17.1
+contrib/devtools/previous_release.sh -b v0.19.1 v0.18.1 v0.17.1 v0.16.3 v0.15.2
+
+v0.15.2 is not required by this test, but it is used in wallet_upgradewallet.py.
+Due to a hardfork in regtest, it can't be used to sync nodes.
+
Due to RPC changes introduced in various versions the below tests
won't work for older versions without some patches or workarounds.
@@ -18,59 +22,40 @@ needs an older patch version.
import os
import shutil
-from test_framework.test_framework import BitcoinTestFramework, SkipTest
+from test_framework.test_framework import BitcoinTestFramework
from test_framework.descriptors import descsum_create
from test_framework.util import (
assert_equal,
- sync_blocks,
- sync_mempools
)
+
class BackwardsCompatibilityTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
- self.num_nodes = 5
+ self.num_nodes = 6
# Add new version after each release:
self.extra_args = [
["-addresstype=bech32"], # Pre-release: use to mine blocks
["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # Pre-release: use to receive coins, swap wallets, etc
- ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.19.0.1
+ ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.19.1
["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.18.1
- ["-nowallet", "-walletrbf=1", "-addresstype=bech32"] # v0.17.1
+ ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.17.1
+ ["-nowallet", "-walletrbf=1", "-addresstype=bech32"], # v0.16.3
]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
+ self.skip_if_no_previous_releases()
def setup_nodes(self):
- if os.getenv("TEST_PREVIOUS_RELEASES") == "false":
- raise SkipTest("backwards compatibility tests")
-
- releases_path = os.getenv("PREVIOUS_RELEASES_DIR") or os.getcwd() + "/releases"
- if not os.path.isdir(releases_path):
- if os.getenv("TEST_PREVIOUS_RELEASES") == "true":
- raise AssertionError("TEST_PREVIOUS_RELEASES=1 but releases missing: " + releases_path)
- raise SkipTest("This test requires binaries for previous releases")
-
self.add_nodes(self.num_nodes, extra_args=self.extra_args, versions=[
None,
None,
- 190000,
+ 190100,
180100,
- 170100
- ], binary=[
- self.options.bitcoind,
- self.options.bitcoind,
- releases_path + "/v0.19.0.1/bin/bitcoind",
- releases_path + "/v0.18.1/bin/bitcoind",
- releases_path + "/v0.17.1/bin/bitcoind"
- ], binary_cli=[
- self.options.bitcoincli,
- self.options.bitcoincli,
- releases_path + "/v0.19.0.1/bin/bitcoin-cli",
- releases_path + "/v0.18.1/bin/bitcoin-cli",
- releases_path + "/v0.17.1/bin/bitcoin-cli"
+ 170100,
+ 160300,
])
self.start_nodes()
@@ -78,23 +63,24 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
def run_test(self):
self.nodes[0].generatetoaddress(101, self.nodes[0].getnewaddress())
- sync_blocks(self.nodes)
+ self.sync_blocks()
# Sanity check the test framework:
res = self.nodes[self.num_nodes - 1].getblockchaininfo()
assert_equal(res['blocks'], 101)
- node_master = self.nodes[self.num_nodes - 4]
- node_v19 = self.nodes[self.num_nodes - 3]
- node_v18 = self.nodes[self.num_nodes - 2]
- node_v17 = self.nodes[self.num_nodes - 1]
+ node_master = self.nodes[self.num_nodes - 5]
+ node_v19 = self.nodes[self.num_nodes - 4]
+ node_v18 = self.nodes[self.num_nodes - 3]
+ node_v17 = self.nodes[self.num_nodes - 2]
+ node_v16 = self.nodes[self.num_nodes - 1]
self.log.info("Test wallet backwards compatibility...")
# Create a number of wallets and open them in older versions:
# w1: regular wallet, created on master: update this test when default
# wallets can no longer be opened by older versions.
- node_master.createwallet(wallet_name="w1")
+ node_master.rpc.createwallet(wallet_name="w1")
wallet = node_master.get_wallet_rpc("w1")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
@@ -102,17 +88,17 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
# Create a confirmed transaction, receiving coins
address = wallet.getnewaddress()
self.nodes[0].sendtoaddress(address, 10)
- sync_mempools(self.nodes)
+ self.sync_mempools()
self.nodes[0].generate(1)
- sync_blocks(self.nodes)
+ self.sync_blocks()
# Create a conflicting transaction using RBF
return_address = self.nodes[0].getnewaddress()
tx1_id = self.nodes[1].sendtoaddress(return_address, 1)
tx2_id = self.nodes[1].bumpfee(tx1_id)["txid"]
# Confirm the transaction
- sync_mempools(self.nodes)
+ self.sync_mempools()
self.nodes[0].generate(1)
- sync_blocks(self.nodes)
+ self.sync_blocks()
# Create another conflicting transaction using RBF
tx3_id = self.nodes[1].sendtoaddress(return_address, 1)
tx4_id = self.nodes[1].bumpfee(tx3_id)["txid"]
@@ -120,17 +106,17 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
self.nodes[1].abandontransaction(tx3_id)
# w1_v19: regular wallet, created with v0.19
- node_v19.createwallet(wallet_name="w1_v19")
+ node_v19.rpc.createwallet(wallet_name="w1_v19")
wallet = node_v19.get_wallet_rpc("w1_v19")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
assert info['keypoolsize'] > 0
# Use addmultisigaddress (see #18075)
- address_18075 = wallet.addmultisigaddress(1, ["0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52", "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073"], "", "legacy")["address"]
+ address_18075 = wallet.rpc.addmultisigaddress(1, ["0296b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52", "037211a824f55b505228e4c3d5194c1fcfaa15a456abdf37f9b9d97a4040afc073"], "", "legacy")["address"]
assert wallet.getaddressinfo(address_18075)["solvable"]
# w1_v18: regular wallet, created with v0.18
- node_v18.createwallet(wallet_name="w1_v18")
+ node_v18.rpc.createwallet(wallet_name="w1_v18")
wallet = node_v18.get_wallet_rpc("w1_v18")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
@@ -139,21 +125,21 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
# w2: wallet with private keys disabled, created on master: update this
# test when default wallets private keys disabled can no longer be
# opened by older versions.
- node_master.createwallet(wallet_name="w2", disable_private_keys=True)
+ node_master.rpc.createwallet(wallet_name="w2", disable_private_keys=True)
wallet = node_master.get_wallet_rpc("w2")
info = wallet.getwalletinfo()
assert info['private_keys_enabled'] == False
assert info['keypoolsize'] == 0
# w2_v19: wallet with private keys disabled, created with v0.19
- node_v19.createwallet(wallet_name="w2_v19", disable_private_keys=True)
+ node_v19.rpc.createwallet(wallet_name="w2_v19", disable_private_keys=True)
wallet = node_v19.get_wallet_rpc("w2_v19")
info = wallet.getwalletinfo()
assert info['private_keys_enabled'] == False
assert info['keypoolsize'] == 0
# w2_v18: wallet with private keys disabled, created with v0.18
- node_v18.createwallet(wallet_name="w2_v18", disable_private_keys=True)
+ node_v18.rpc.createwallet(wallet_name="w2_v18", disable_private_keys=True)
wallet = node_v18.get_wallet_rpc("w2_v18")
info = wallet.getwalletinfo()
assert info['private_keys_enabled'] == False
@@ -161,21 +147,21 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
# w3: blank wallet, created on master: update this
# test when default blank wallets can no longer be opened by older versions.
- node_master.createwallet(wallet_name="w3", blank=True)
+ node_master.rpc.createwallet(wallet_name="w3", blank=True)
wallet = node_master.get_wallet_rpc("w3")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
assert info['keypoolsize'] == 0
# w3_v19: blank wallet, created with v0.19
- node_v19.createwallet(wallet_name="w3_v19", blank=True)
+ node_v19.rpc.createwallet(wallet_name="w3_v19", blank=True)
wallet = node_v19.get_wallet_rpc("w3_v19")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
assert info['keypoolsize'] == 0
# w3_v18: blank wallet, created with v0.18
- node_v18.createwallet(wallet_name="w3_v18", blank=True)
+ node_v18.rpc.createwallet(wallet_name="w3_v18", blank=True)
wallet = node_v18.get_wallet_rpc("w3_v18")
info = wallet.getwalletinfo()
assert info['private_keys_enabled']
@@ -186,6 +172,7 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
node_v19_wallets_dir = os.path.join(node_v19.datadir, "regtest/wallets")
node_v18_wallets_dir = os.path.join(node_v18.datadir, "regtest/wallets")
node_v17_wallets_dir = os.path.join(node_v17.datadir, "regtest/wallets")
+ node_v16_wallets_dir = os.path.join(node_v16.datadir, "regtest")
node_master.unloadwallet("w1")
node_master.unloadwallet("w2")
node_v19.unloadwallet("w1_v19")
@@ -193,6 +180,13 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
node_v18.unloadwallet("w1_v18")
node_v18.unloadwallet("w2_v18")
+ # Copy wallets to v0.16
+ for wallet in os.listdir(node_master_wallets_dir):
+ shutil.copytree(
+ os.path.join(node_master_wallets_dir, wallet),
+ os.path.join(node_v16_wallets_dir, wallet)
+ )
+
# Copy wallets to v0.17
for wallet in os.listdir(node_master_wallets_dir):
shutil.copytree(
@@ -311,19 +305,57 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
# assert_raises_rpc_error(-4, "Wallet loading failed.", node_v17.loadwallet, 'w3_v18')
# Instead, we stop node and try to launch it with the wallet:
- self.stop_node(self.num_nodes - 1)
+ self.stop_node(4)
node_v17.assert_start_raises_init_error(["-wallet=w3_v18"], "Error: Error loading w3_v18: Wallet requires newer version of Bitcoin Core")
node_v17.assert_start_raises_init_error(["-wallet=w3"], "Error: Error loading w3: Wallet requires newer version of Bitcoin Core")
- self.start_node(self.num_nodes - 1)
+ self.start_node(4)
+
+ # Open most recent wallet in v0.16 (no loadwallet RPC)
+ self.restart_node(5, extra_args=["-wallet=w2"])
+ wallet = node_v16.get_wallet_rpc("w2")
+ info = wallet.getwalletinfo()
+ assert info['keypoolsize'] == 1
+
+ # Create upgrade wallet in v0.16
+ self.restart_node(-1, extra_args=["-wallet=u1_v16"])
+ wallet = node_v16.get_wallet_rpc("u1_v16")
+ v16_addr = wallet.getnewaddress('', "bech32")
+ v16_info = wallet.validateaddress(v16_addr)
+ v16_pubkey = v16_info['pubkey']
+ self.stop_node(-1)
self.log.info("Test wallet upgrade path...")
# u1: regular wallet, created with v0.17
- node_v17.createwallet(wallet_name="u1_v17")
+ node_v17.rpc.createwallet(wallet_name="u1_v17")
wallet = node_v17.get_wallet_rpc("u1_v17")
address = wallet.getnewaddress("bech32")
- info = wallet.getaddressinfo(address)
- hdkeypath = info["hdkeypath"]
- pubkey = info["pubkey"]
+ v17_info = wallet.getaddressinfo(address)
+ hdkeypath = v17_info["hdkeypath"]
+ pubkey = v17_info["pubkey"]
+
+ # Copy the 0.16 wallet to the last Bitcoin Core version and open it:
+ shutil.copyfile(
+ os.path.join(node_v16_wallets_dir, "wallets/u1_v16"),
+ os.path.join(node_master_wallets_dir, "u1_v16")
+ )
+ load_res = node_master.loadwallet("u1_v16")
+ # Make sure this wallet opens without warnings. See https://github.com/bitcoin/bitcoin/pull/19054
+ assert_equal(load_res['warning'], '')
+ wallet = node_master.get_wallet_rpc("u1_v16")
+ info = wallet.getaddressinfo(v16_addr)
+ descriptor = "wpkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + v16_pubkey + ")"
+ assert_equal(info["desc"], descsum_create(descriptor))
+
+ # Now copy that same wallet back to 0.16 to make sure no automatic upgrade breaks it
+ os.remove(os.path.join(node_v16_wallets_dir, "wallets/u1_v16"))
+ shutil.copyfile(
+ os.path.join(node_master_wallets_dir, "u1_v16"),
+ os.path.join(node_v16_wallets_dir, "wallets/u1_v16")
+ )
+ self.start_node(-1, extra_args=["-wallet=u1_v16"])
+ wallet = node_v16.get_wallet_rpc("u1_v16")
+ info = wallet.validateaddress(v16_addr)
+ assert_equal(info, v16_info)
# Copy the 0.17 wallet to the last Bitcoin Core version and open it:
node_v17.unloadwallet("u1_v17")
@@ -337,6 +369,18 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
descriptor = "wpkh([" + info["hdmasterfingerprint"] + hdkeypath[1:] + "]" + pubkey + ")"
assert_equal(info["desc"], descsum_create(descriptor))
+ # Now copy that same wallet back to 0.17 to make sure no automatic upgrade breaks it
+ node_master.unloadwallet("u1_v17")
+ shutil.rmtree(os.path.join(node_v17_wallets_dir, "u1_v17"))
+ shutil.copytree(
+ os.path.join(node_master_wallets_dir, "u1_v17"),
+ os.path.join(node_v17_wallets_dir, "u1_v17")
+ )
+ node_v17.loadwallet("u1_v17")
+ wallet = node_v17.get_wallet_rpc("u1_v17")
+ info = wallet.getaddressinfo(address)
+ assert_equal(info, v17_info)
+
# Copy the 0.19 wallet to the last Bitcoin Core version and open it:
shutil.copytree(
os.path.join(node_v19_wallets_dir, "w1_v19"),
@@ -346,5 +390,16 @@ class BackwardsCompatibilityTest(BitcoinTestFramework):
wallet = node_master.get_wallet_rpc("w1_v19")
assert wallet.getaddressinfo(address_18075)["solvable"]
+ # Now copy that same wallet back to 0.19 to make sure no automatic upgrade breaks it
+ node_master.unloadwallet("w1_v19")
+ shutil.rmtree(os.path.join(node_v19_wallets_dir, "w1_v19"))
+ shutil.copytree(
+ os.path.join(node_master_wallets_dir, "w1_v19"),
+ os.path.join(node_v19_wallets_dir, "w1_v19")
+ )
+ node_v19.loadwallet("w1_v19")
+ wallet = node_v19.get_wallet_rpc("w1_v19")
+ assert wallet.getaddressinfo(address_18075)["solvable"]
+
if __name__ == '__main__':
BackwardsCompatibilityTest().main()
diff --git a/test/functional/feature_bip68_sequence.py b/test/functional/feature_bip68_sequence.py
index fe45ed34b5..549e8b2029 100755
--- a/test/functional/feature_bip68_sequence.py
+++ b/test/functional/feature_bip68_sequence.py
@@ -324,7 +324,7 @@ class BIP68Test(BitcoinTestFramework):
block.solve()
tip = block.sha256
height += 1
- self.nodes[0].submitblock(ToHex(block))
+ assert_equal(None if i == 1 else 'inconclusive', self.nodes[0].submitblock(ToHex(block)))
cur_time += 1
mempool = self.nodes[0].getrawmempool()
@@ -381,7 +381,7 @@ class BIP68Test(BitcoinTestFramework):
add_witness_commitment(block)
block.solve()
- self.nodes[0].submitblock(block.serialize().hex())
+ assert_equal(None, self.nodes[0].submitblock(block.serialize().hex()))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
def activateCSV(self):
diff --git a/test/functional/feature_block.py b/test/functional/feature_block.py
index 0c591de869..6619d83dc4 100755
--- a/test/functional/feature_block.py
+++ b/test/functional/feature_block.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test block processing."""
diff --git a/test/functional/feature_cltv.py b/test/functional/feature_cltv.py
index 073ed8d7c7..fd0330924d 100755
--- a/test/functional/feature_cltv.py
+++ b/test/functional/feature_cltv.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test BIP65 (CHECKLOCKTIMEVERIFY).
diff --git a/test/functional/feature_config_args.py b/test/functional/feature_config_args.py
index 1a7c656274..a4dc455d57 100755
--- a/test/functional/feature_config_args.py
+++ b/test/functional/feature_config_args.py
@@ -71,7 +71,7 @@ class ConfArgsTest(BitcoinTestFramework):
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
conf.write('[testnet]\n')
self.restart_node(0)
- self.nodes[0].stop_node(expected_stderr='Warning: ' + inc_conf_file_path + ':1 Section [testnot] is not recognized.' + os.linesep + 'Warning: ' + inc_conf_file2_path + ':1 Section [testnet] is not recognized.')
+ self.nodes[0].stop_node(expected_stderr='Warning: ' + inc_conf_file_path + ':1 Section [testnot] is not recognized.' + os.linesep + inc_conf_file2_path + ':1 Section [testnet] is not recognized.')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py
index a98480a6dd..c6852ef017 100755
--- a/test/functional/feature_csv_activation.py
+++ b/test/functional/feature_csv_activation.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test CSV soft fork activation.
diff --git a/test/functional/feature_dbcrash.py b/test/functional/feature_dbcrash.py
index 5bbdb8cda1..7b38e09bf9 100755
--- a/test/functional/feature_dbcrash.py
+++ b/test/functional/feature_dbcrash.py
@@ -256,7 +256,11 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
self.log.debug("Mining longer tip")
block_hashes = []
while current_height + 1 > self.nodes[3].getblockcount():
- block_hashes.extend(self.nodes[3].generate(min(10, current_height + 1 - self.nodes[3].getblockcount())))
+ block_hashes.extend(self.nodes[3].generatetoaddress(
+ nblocks=min(10, current_height + 1 - self.nodes[3].getblockcount()),
+ # new address to avoid mining a block that has just been invalidated
+ address=self.nodes[3].getnewaddress(),
+ ))
self.log.debug("Syncing %d new blocks...", len(block_hashes))
self.sync_node3blocks(block_hashes)
utxo_list = self.nodes[3].listunspent()
@@ -281,5 +285,6 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
if self.restart_counts[i] == 0:
self.log.warning("Node %d never crashed during utxo flush!", i)
+
if __name__ == "__main__":
ChainstateWriteCrashTest().main()
diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py
index 38cdf0501e..05fdacd451 100755
--- a/test/functional/feature_dersig.py
+++ b/test/functional/feature_dersig.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test BIP66 (DER SIG).
diff --git a/test/functional/feature_fee_estimation.py b/test/functional/feature_fee_estimation.py
index 5128485ec0..3cf0fb8f7b 100755
--- a/test/functional/feature_fee_estimation.py
+++ b/test/functional/feature_fee_estimation.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test fee estimation code."""
diff --git a/test/functional/feature_filelock.py b/test/functional/feature_filelock.py
index be1c044aa5..b56ffe179f 100755
--- a/test/functional/feature_filelock.py
+++ b/test/functional/feature_filelock.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Check that it's not possible to start a second bitcoind instance using the same datadir or wallet."""
diff --git a/test/functional/feature_help.py b/test/functional/feature_help.py
index e3e2456183..9f18413255 100755
--- a/test/functional/feature_help.py
+++ b/test/functional/feature_help.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2018 The Bitcoin Core developers
+# Copyright (c) 2018-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Verify that starting bitcoin with -h works as expected."""
diff --git a/test/functional/feature_loadblock.py b/test/functional/feature_loadblock.py
index 7f1e8f5bae..0a457ca17f 100755
--- a/test/functional/feature_loadblock.py
+++ b/test/functional/feature_loadblock.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test loadblock option
@@ -16,10 +16,8 @@ import sys
import tempfile
import urllib
-from test_framework.test_framework import (
- BitcoinTestFramework,
-)
-from test_framework.util import assert_equal, wait_until
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
class LoadblockTest(BitcoinTestFramework):
@@ -73,9 +71,8 @@ class LoadblockTest(BitcoinTestFramework):
check=True)
self.log.info("Restart second, unsynced node with bootstrap file")
- self.stop_node(1)
- self.start_node(1, ["-loadblock=" + bootstrap_file])
- wait_until(lambda: self.nodes[1].getblockcount() == 100)
+ self.restart_node(1, extra_args=["-loadblock=" + bootstrap_file])
+ assert_equal(self.nodes[1].getblockcount(), 100) # start_node is blocking on all block files being imported
assert_equal(self.nodes[1].getblockchaininfo()['blocks'], 100)
assert_equal(self.nodes[0].getbestblockhash(), self.nodes[1].getbestblockhash())
diff --git a/test/functional/feature_logging.py b/test/functional/feature_logging.py
index 1bae29b302..afcbcf099a 100755
--- a/test/functional/feature_logging.py
+++ b/test/functional/feature_logging.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test debug logging."""
@@ -67,8 +67,7 @@ class LoggingTest(BitcoinTestFramework):
assert not os.path.isfile(default_log_path)
# just sanity check no crash here
- self.stop_node(0)
- self.start_node(0, ["-debuglogfile=%s" % os.devnull])
+ self.restart_node(0, ["-debuglogfile=%s" % os.devnull])
if __name__ == '__main__':
diff --git a/test/functional/feature_maxuploadtarget.py b/test/functional/feature_maxuploadtarget.py
index dc00e03fe7..7eabf86cad 100755
--- a/test/functional/feature_maxuploadtarget.py
+++ b/test/functional/feature_maxuploadtarget.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test behavior of -maxuploadtarget.
@@ -13,7 +13,7 @@ if uploadtarget has been reached.
from collections import defaultdict
import time
-from test_framework.messages import CInv, msg_getdata
+from test_framework.messages import CInv, MSG_BLOCK, msg_getdata
from test_framework.mininode import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, mine_large_block
@@ -84,7 +84,7 @@ class MaxUploadTest(BitcoinTestFramework):
# the same big old block too many times (expect: disconnect)
getdata_request = msg_getdata()
- getdata_request.inv.append(CInv(2, big_old_block))
+ getdata_request.inv.append(CInv(MSG_BLOCK, big_old_block))
max_bytes_per_day = 800*1024*1024
daily_buffer = 144 * 4000000
@@ -109,7 +109,7 @@ class MaxUploadTest(BitcoinTestFramework):
# Requesting the current block on p2p_conns[1] should succeed indefinitely,
# even when over the max upload target.
# We'll try 800 times
- getdata_request.inv = [CInv(2, big_new_block)]
+ getdata_request.inv = [CInv(MSG_BLOCK, big_new_block)]
for i in range(800):
p2p_conns[1].send_and_ping(getdata_request)
assert_equal(p2p_conns[1].block_receive_map[big_new_block], i+1)
@@ -117,7 +117,7 @@ class MaxUploadTest(BitcoinTestFramework):
self.log.info("Peer 1 able to repeatedly download new block")
# But if p2p_conns[1] tries for an old block, it gets disconnected too.
- getdata_request.inv = [CInv(2, big_old_block)]
+ getdata_request.inv = [CInv(MSG_BLOCK, big_old_block)]
p2p_conns[1].send_message(getdata_request)
p2p_conns[1].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 1)
@@ -138,23 +138,22 @@ class MaxUploadTest(BitcoinTestFramework):
self.nodes[0].disconnect_p2ps()
self.log.info("Restarting node 0 with noban permission and 1MB maxuploadtarget")
- self.stop_node(0)
- self.start_node(0, ["-whitelist=noban@127.0.0.1", "-maxuploadtarget=1"])
+ self.restart_node(0, ["-whitelist=noban@127.0.0.1", "-maxuploadtarget=1"])
# Reconnect to self.nodes[0]
self.nodes[0].add_p2p_connection(TestP2PConn())
#retrieve 20 blocks which should be enough to break the 1MB limit
- getdata_request.inv = [CInv(2, big_new_block)]
+ getdata_request.inv = [CInv(MSG_BLOCK, big_new_block)]
for i in range(20):
self.nodes[0].p2p.send_and_ping(getdata_request)
assert_equal(self.nodes[0].p2p.block_receive_map[big_new_block], i+1)
- getdata_request.inv = [CInv(2, big_old_block)]
+ getdata_request.inv = [CInv(MSG_BLOCK, big_old_block)]
self.nodes[0].p2p.send_and_ping(getdata_request)
- assert_equal(len(self.nodes[0].getpeerinfo()), 1) #node is still connected because of the whitelist
+ assert_equal(len(self.nodes[0].getpeerinfo()), 1) #node is still connected because of the noban permission
- self.log.info("Peer still connected after trying to download old block (whitelisted)")
+ self.log.info("Peer still connected after trying to download old block (noban permission)")
if __name__ == '__main__':
MaxUploadTest().main()
diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py
index b110a559c0..dd4c318cee 100755
--- a/test/functional/feature_notifications.py
+++ b/test/functional/feature_notifications.py
@@ -5,19 +5,21 @@
"""Test the -alertnotify, -blocknotify and -walletnotify options."""
import os
-from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
+from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE, keyhash_to_p2pkh
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
wait_until,
connect_nodes,
+ disconnect_nodes,
+ hex_str_to_bytes,
)
# Linux allow all characters other than \x00
# Windows disallow control characters (0-31) and /\?%:|"<>
FILE_CHAR_START = 32 if os.name == 'nt' else 1
FILE_CHAR_END = 128
-FILE_CHAR_BLACKLIST = '/\\?%*:|"<>' if os.name == 'nt' else '/'
+FILE_CHAR_BLOCKLIST = '/\\?%*:|"<>' if os.name == 'nt' else '/'
def notify_outputname(walletname, txid):
@@ -30,7 +32,7 @@ class NotificationsTest(BitcoinTestFramework):
self.setup_clean_chain = True
def setup_network(self):
- self.wallet = ''.join(chr(i) for i in range(FILE_CHAR_START, FILE_CHAR_END) if chr(i) not in FILE_CHAR_BLACKLIST)
+ self.wallet = ''.join(chr(i) for i in range(FILE_CHAR_START, FILE_CHAR_END) if chr(i) not in FILE_CHAR_BLOCKLIST)
self.alertnotify_dir = os.path.join(self.options.tmpdir, "alertnotify")
self.blocknotify_dir = os.path.join(self.options.tmpdir, "blocknotify")
self.walletnotify_dir = os.path.join(self.options.tmpdir, "walletnotify")
@@ -81,8 +83,68 @@ class NotificationsTest(BitcoinTestFramework):
# directory content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: notify_outputname(self.wallet, t['txid']), self.nodes[1].listtransactions("*", block_count)))
assert_equal(sorted(txids_rpc), sorted(os.listdir(self.walletnotify_dir)))
+ for tx_file in os.listdir(self.walletnotify_dir):
+ os.remove(os.path.join(self.walletnotify_dir, tx_file))
+
+ # Conflicting transactions tests. Give node 0 same wallet seed as
+ # node 1, generate spends from node 0, and check notifications
+ # triggered by node 1
+ self.log.info("test -walletnotify with conflicting transactions")
+ self.nodes[0].sethdseed(seed=self.nodes[1].dumpprivkey(keyhash_to_p2pkh(hex_str_to_bytes(self.nodes[1].getwalletinfo()['hdseedid'])[::-1])))
+ self.nodes[0].rescanblockchain()
+ self.nodes[0].generatetoaddress(100, ADDRESS_BCRT1_UNSPENDABLE)
+ self.sync_blocks()
+
+ # Generate transaction on node 0, sync mempools, and check for
+ # notification on node 1.
+ tx1 = self.nodes[0].sendtoaddress(address=ADDRESS_BCRT1_UNSPENDABLE, amount=1, replaceable=True)
+ assert_equal(tx1 in self.nodes[0].getrawmempool(), True)
+ self.sync_mempools()
+ self.expect_wallet_notify([tx1])
+
+ # Generate bump transaction, sync mempools, and check for bump1
+ # notification. In the future, per
+ # https://github.com/bitcoin/bitcoin/pull/9371, it might be better
+ # to have notifications for both tx1 and bump1.
+ bump1 = self.nodes[0].bumpfee(tx1)["txid"]
+ assert_equal(bump1 in self.nodes[0].getrawmempool(), True)
+ self.sync_mempools()
+ self.expect_wallet_notify([bump1])
+
+ # Add bump1 transaction to new block, checking for a notification
+ # and the correct number of confirmations.
+ self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ self.sync_blocks()
+ self.expect_wallet_notify([bump1])
+ assert_equal(self.nodes[1].gettransaction(bump1)["confirmations"], 1)
+
+ # Generate a second transaction to be bumped.
+ tx2 = self.nodes[0].sendtoaddress(address=ADDRESS_BCRT1_UNSPENDABLE, amount=1, replaceable=True)
+ assert_equal(tx2 in self.nodes[0].getrawmempool(), True)
+ self.sync_mempools()
+ self.expect_wallet_notify([tx2])
+
+ # Bump tx2 as bump2 and generate a block on node 0 while
+ # disconnected, then reconnect and check for notifications on node 1
+ # about newly confirmed bump2 and newly conflicted tx2.
+ disconnect_nodes(self.nodes[0], 1)
+ bump2 = self.nodes[0].bumpfee(tx2)["txid"]
+ self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)
+ assert_equal(self.nodes[0].gettransaction(bump2)["confirmations"], 1)
+ assert_equal(tx2 in self.nodes[1].getrawmempool(), True)
+ connect_nodes(self.nodes[0], 1)
+ self.sync_blocks()
+ self.expect_wallet_notify([bump2, tx2])
+ assert_equal(self.nodes[1].gettransaction(bump2)["confirmations"], 1)
# TODO: add test for `-alertnotify` large fork notifications
+ def expect_wallet_notify(self, tx_ids):
+ wait_until(lambda: len(os.listdir(self.walletnotify_dir)) >= len(tx_ids), timeout=10)
+ assert_equal(sorted(notify_outputname(self.wallet, tx_id) for tx_id in tx_ids), sorted(os.listdir(self.walletnotify_dir)))
+ for tx_file in os.listdir(self.walletnotify_dir):
+ os.remove(os.path.join(self.walletnotify_dir, tx_file))
+
+
if __name__ == '__main__':
NotificationsTest().main()
diff --git a/test/functional/feature_nulldummy.py b/test/functional/feature_nulldummy.py
index 9c92ee7f90..ff55cb76d9 100755
--- a/test/functional/feature_nulldummy.py
+++ b/test/functional/feature_nulldummy.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2016-2019 The Bitcoin Core developers
+# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test NULLDUMMY softfork.
@@ -111,7 +111,7 @@ class NULLDUMMYTest(BitcoinTestFramework):
witness and add_witness_commitment(block)
block.rehash()
block.solve()
- node.submitblock(block.serialize().hex())
+ assert_equal(None if accept else 'block-validation-failed', node.submitblock(block.serialize().hex()))
if (accept):
assert_equal(node.getbestblockhash(), block.hash)
self.tip = block.sha256
diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py
index 21b6b299f6..e46e5aacc8 100755
--- a/test/functional/feature_pruning.py
+++ b/test/functional/feature_pruning.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the pruning code.
@@ -263,8 +263,7 @@ class PruneTest(BitcoinTestFramework):
assert_raises_rpc_error(-1, "not in prune mode", node.pruneblockchain, 500)
# now re-start in manual pruning mode
- self.stop_node(node_number)
- self.start_node(node_number, extra_args=["-prune=1"])
+ self.restart_node(node_number, extra_args=["-prune=1"])
node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
@@ -326,16 +325,14 @@ class PruneTest(BitcoinTestFramework):
assert not has_block(3), "blk00003.dat is still there, should be pruned by now"
# stop node, start back up with auto-prune at 550 MiB, make sure still runs
- self.stop_node(node_number)
- self.start_node(node_number, extra_args=["-prune=550"])
+ self.restart_node(node_number, extra_args=["-prune=550"])
self.log.info("Success")
def wallet_test(self):
# check that the pruning node's wallet is still in good shape
self.log.info("Stop and start pruning node to trigger wallet rescan")
- self.stop_node(2)
- self.start_node(2, extra_args=["-prune=550"])
+ self.restart_node(2, extra_args=["-prune=550"])
self.log.info("Success")
# check that wallet loads successfully when restarting a pruned node after IBD.
@@ -344,8 +341,7 @@ class PruneTest(BitcoinTestFramework):
connect_nodes(self.nodes[0], 5)
nds = [self.nodes[0], self.nodes[5]]
self.sync_blocks(nds, wait=5, timeout=300)
- self.stop_node(5) # stop and start to trigger rescan
- self.start_node(5, extra_args=["-prune=550"])
+ self.restart_node(5, extra_args=["-prune=550"]) # restart to trigger rescan
self.log.info("Success")
def run_test(self):
diff --git a/test/functional/feature_rbf.py b/test/functional/feature_rbf.py
index 9e578f0026..acf551ef69 100755
--- a/test/functional/feature_rbf.py
+++ b/test/functional/feature_rbf.py
@@ -10,7 +10,7 @@ from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut
from test_framework.script import CScript, OP_DROP
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, satoshi_round
-from test_framework.script_util import DUMMY_P2WPKH_SCRIPT
+from test_framework.script_util import DUMMY_P2WPKH_SCRIPT, DUMMY_2_P2WPKH_SCRIPT
MAX_REPLACEMENT_LIMIT = 100
@@ -142,7 +142,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
# Should fail because we haven't changed the fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
- tx1b.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT + b'a')]
+ tx1b.vout = [CTxOut(1 * COIN, DUMMY_2_P2WPKH_SCRIPT)]
tx1b_hex = txToHex(tx1b)
# This will raise an exception due to insufficient fee
diff --git a/test/functional/feature_reindex.py b/test/functional/feature_reindex.py
index 940b403f9c..31cea8d1b7 100755
--- a/test/functional/feature_reindex.py
+++ b/test/functional/feature_reindex.py
@@ -10,10 +10,10 @@
"""
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import wait_until
+from test_framework.util import assert_equal
-class ReindexTest(BitcoinTestFramework):
+class ReindexTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
@@ -24,7 +24,7 @@ class ReindexTest(BitcoinTestFramework):
self.stop_nodes()
extra_args = [["-reindex-chainstate" if justchainstate else "-reindex"]]
self.start_nodes(extra_args)
- wait_until(lambda: self.nodes[0].getblockcount() == blockcount)
+ assert_equal(self.nodes[0].getblockcount(), blockcount) # start_node is blocking on reindex
self.log.info("Success")
def run_test(self):
diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py
index 909a43c8d9..5195d20dcb 100755
--- a/test/functional/feature_segwit.py
+++ b/test/functional/feature_segwit.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2016-2019 The Bitcoin Core developers
+# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the SegWit changeover logic."""
@@ -20,6 +20,7 @@ from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, OP_0, hash16
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
+ assert_is_hex_string,
assert_raises_rpc_error,
connect_nodes,
hex_str_to_bytes,
@@ -28,8 +29,8 @@ from test_framework.util import (
NODE_0 = 0
NODE_2 = 2
-WIT_V0 = 0
-WIT_V1 = 1
+P2WPKH = 0
+P2WSH = 1
def getutxo(txid):
utxo = {}
@@ -108,18 +109,13 @@ class SegWitTest(BitcoinTestFramework):
assert tmpl['sigoplimit'] == 20000
assert tmpl['transactions'][0]['hash'] == txid
assert tmpl['transactions'][0]['sigops'] == 2
- tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']})
- assert tmpl['sizelimit'] == 1000000
- assert 'weightlimit' not in tmpl
- assert tmpl['sigoplimit'] == 20000
- assert tmpl['transactions'][0]['hash'] == txid
- assert tmpl['transactions'][0]['sigops'] == 2
+ assert '!segwit' not in tmpl['rules']
self.nodes[0].generate(1) # block 162
balance_presetup = self.nodes[0].getbalance()
self.pubkey = []
- p2sh_ids = [] # p2sh_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE embedded in p2sh
- wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
+ p2sh_ids = [] # p2sh_ids[NODE][TYPE] is an array of txids that spend to P2WPKH (TYPE=0) or P2WSH (TYPE=1) scripts to an address for NODE embedded in p2sh
+ wit_ids = [] # wit_ids[NODE][TYPE] is an array of txids that spend to P2WPKH (TYPE=0) or P2WSH (TYPE=1) scripts to an address for NODE via bare witness
for i in range(3):
newaddress = self.nodes[i].getnewaddress()
self.pubkey.append(self.nodes[i].getaddressinfo(newaddress)["pubkey"])
@@ -152,14 +148,14 @@ class SegWitTest(BitcoinTestFramework):
self.sync_blocks()
self.log.info("Verify witness txs are skipped for mining before the fork")
- self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][0], True) # block 424
- self.skip_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][0], True) # block 425
- self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) # block 426
- self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) # block 427
+ self.skip_mine(self.nodes[2], wit_ids[NODE_2][P2WPKH][0], True) # block 424
+ self.skip_mine(self.nodes[2], wit_ids[NODE_2][P2WSH][0], True) # block 425
+ self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][P2WPKH][0], True) # block 426
+ self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][P2WSH][0], True) # block 427
self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid")
- self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False)
- self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False)
+ self.fail_accept(self.nodes[2], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_2][P2WPKH][1], sign=False)
+ self.fail_accept(self.nodes[2], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_2][P2WSH][1], sign=False)
self.nodes[2].generate(4) # blocks 428-431
@@ -173,13 +169,13 @@ class SegWitTest(BitcoinTestFramework):
self.log.info("Verify default node can't accept txs with missing witness")
# unsigned, no scriptsig
- self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V0][0], False)
- self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", wit_ids[NODE_0][WIT_V1][0], False)
- self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False)
- self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False)
+ self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program hash mismatch)", wit_ids[NODE_0][P2WPKH][0], sign=False)
+ self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program was passed an empty witness)", wit_ids[NODE_0][P2WSH][0], sign=False)
+ self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WPKH][0], sign=False)
+ self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WSH][0], sign=False)
# unsigned with redeem script
- self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V0][0], False, witness_script(False, self.pubkey[0]))
- self.fail_accept(self.nodes[0], "mandatory-script-verify-flag", p2sh_ids[NODE_0][WIT_V1][0], False, witness_script(True, self.pubkey[0]))
+ self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program hash mismatch)", p2sh_ids[NODE_0][P2WPKH][0], sign=False, redeem_script=witness_script(False, self.pubkey[0]))
+ self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program was passed an empty witness)", p2sh_ids[NODE_0][P2WSH][0], sign=False, redeem_script=witness_script(True, self.pubkey[0]))
self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag")
assert self.nodes[2].getblock(blockhash, False) != self.nodes[0].getblock(blockhash, False)
@@ -193,17 +189,25 @@ class SegWitTest(BitcoinTestFramework):
assert self.nodes[1].getrawtransaction(tx_id, False, blockhash) == self.nodes[2].gettransaction(tx_id)["hex"]
assert self.nodes[0].getrawtransaction(tx_id, False, blockhash) == tx.serialize_without_witness().hex()
+ # Coinbase contains the witness commitment nonce, check that RPC shows us
+ coinbase_txid = self.nodes[2].getblock(blockhash)['tx'][0]
+ coinbase_tx = self.nodes[2].gettransaction(txid=coinbase_txid, verbose=True)
+ witnesses = coinbase_tx["decoded"]["vin"][0]["txinwitness"]
+ assert_equal(len(witnesses), 1)
+ assert_is_hex_string(witnesses[0])
+ assert_equal(witnesses[0], '00'*32)
+
self.log.info("Verify witness txs without witness data are invalid after the fork")
- self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', wit_ids[NODE_2][WIT_V0][2], sign=False)
- self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', wit_ids[NODE_2][WIT_V1][2], sign=False)
- self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', p2sh_ids[NODE_2][WIT_V0][2], sign=False, redeem_script=witness_script(False, self.pubkey[2]))
- self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', p2sh_ids[NODE_2][WIT_V1][2], sign=False, redeem_script=witness_script(True, self.pubkey[2]))
+ self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', wit_ids[NODE_2][P2WPKH][2], sign=False)
+ self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', wit_ids[NODE_2][P2WSH][2], sign=False)
+ self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', p2sh_ids[NODE_2][P2WPKH][2], sign=False, redeem_script=witness_script(False, self.pubkey[2]))
+ self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', p2sh_ids[NODE_2][P2WSH][2], sign=False, redeem_script=witness_script(True, self.pubkey[2]))
self.log.info("Verify default node can now use witness txs")
- self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V0][0], True) # block 432
- self.success_mine(self.nodes[0], wit_ids[NODE_0][WIT_V1][0], True) # block 433
- self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V0][0], True) # block 434
- self.success_mine(self.nodes[0], p2sh_ids[NODE_0][WIT_V1][0], True) # block 435
+ self.success_mine(self.nodes[0], wit_ids[NODE_0][P2WPKH][0], True) # block 432
+ self.success_mine(self.nodes[0], wit_ids[NODE_0][P2WSH][0], True) # block 433
+ self.success_mine(self.nodes[0], p2sh_ids[NODE_0][P2WPKH][0], True) # block 434
+ self.success_mine(self.nodes[0], p2sh_ids[NODE_0][P2WSH][0], True) # block 435
self.log.info("Verify sigops are counted in GBT with BIP141 rules after the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
@@ -213,6 +217,7 @@ class SegWitTest(BitcoinTestFramework):
assert tmpl['sigoplimit'] == 80000
assert tmpl['transactions'][0]['txid'] == txid
assert tmpl['transactions'][0]['sigops'] == 8
+ assert '!segwit' in tmpl['rules']
self.nodes[0].generate(1) # Mine a block to clear the gbt cache
@@ -554,8 +559,7 @@ class SegWitTest(BitcoinTestFramework):
assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
# Assert it is properly saved
- self.stop_node(1)
- self.start_node(1)
+ self.restart_node(1)
assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
diff --git a/test/functional/framework_test_script.py b/test/functional/framework_test_script.py
deleted file mode 100755
index 9d916c0022..0000000000
--- a/test/functional/framework_test_script.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2020 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Tests for test_framework.script."""
-
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.script import bn2vch
-from test_framework.util import assert_equal
-
-def test_bn2vch():
- assert_equal(bn2vch(0), bytes([]))
- assert_equal(bn2vch(1), bytes([0x01]))
- assert_equal(bn2vch(-1), bytes([0x81]))
- assert_equal(bn2vch(0x7F), bytes([0x7F]))
- assert_equal(bn2vch(-0x7F), bytes([0xFF]))
- assert_equal(bn2vch(0x80), bytes([0x80, 0x00]))
- assert_equal(bn2vch(-0x80), bytes([0x80, 0x80]))
- assert_equal(bn2vch(0xFF), bytes([0xFF, 0x00]))
- assert_equal(bn2vch(-0xFF), bytes([0xFF, 0x80]))
- assert_equal(bn2vch(0x100), bytes([0x00, 0x01]))
- assert_equal(bn2vch(-0x100), bytes([0x00, 0x81]))
- assert_equal(bn2vch(0x7FFF), bytes([0xFF, 0x7F]))
- assert_equal(bn2vch(-0x8000), bytes([0x00, 0x80, 0x80]))
- assert_equal(bn2vch(-0x7FFFFF), bytes([0xFF, 0xFF, 0xFF]))
- assert_equal(bn2vch(0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x00]))
- assert_equal(bn2vch(-0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x80]))
- assert_equal(bn2vch(0xFFFFFFFF), bytes([0xFF, 0xFF, 0xFF, 0xFF, 0x00]))
-
- assert_equal(bn2vch(123456789), bytes([0x15, 0xCD, 0x5B, 0x07]))
- assert_equal(bn2vch(-54321), bytes([0x31, 0xD4, 0x80]))
-
-class FrameworkTestScript(BitcoinTestFramework):
- def setup_network(self):
- pass
-
- def set_test_params(self):
- self.num_nodes = 0
-
- def run_test(self):
- test_bn2vch()
-
-if __name__ == '__main__':
- FrameworkTestScript().main()
diff --git a/test/functional/interface_bitcoin_cli.py b/test/functional/interface_bitcoin_cli.py
index 2e1e84b707..80003aca0d 100755
--- a/test/functional/interface_bitcoin_cli.py
+++ b/test/functional/interface_bitcoin_cli.py
@@ -1,10 +1,17 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoin-cli"""
+
+from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, assert_raises_process_error, get_auth_cookie
+from test_framework.util import (
+ assert_equal,
+ assert_raises_process_error,
+ assert_raises_rpc_error,
+ get_auth_cookie,
+)
# The block reward of coinbaseoutput.nValue (50) BTC/block matures after
# COINBASE_MATURITY (100) blocks. Therefore, after mining 101 blocks we expect
@@ -12,8 +19,13 @@ from test_framework.util import assert_equal, assert_raises_process_error, get_a
BLOCKS = 101
BALANCE = (BLOCKS - 100) * 50
-class TestBitcoinCli(BitcoinTestFramework):
+JSON_PARSING_ERROR = 'error: Error parsing JSON: foo'
+BLOCKS_VALUE_OF_ZERO = 'error: the first argument (number of blocks to generate, default: 1) must be an integer value greater than zero'
+TOO_MANY_ARGS = 'error: too many arguments (maximum 2 for nblocks and maxtries)'
+WALLET_NOT_LOADED = 'Requested wallet does not exist or is not loaded'
+WALLET_NOT_SPECIFIED = 'Wallet file not specified'
+class TestBitcoinCli(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
@@ -25,15 +37,6 @@ class TestBitcoinCli(BitcoinTestFramework):
"""Main test logic"""
self.nodes[0].generate(BLOCKS)
- cli_response = self.nodes[0].cli("-version").send_cli()
- assert "{} RPC client version".format(self.config['environment']['PACKAGE_NAME']) in cli_response
-
- self.log.info("Compare responses from getwalletinfo RPC and `bitcoin-cli getwalletinfo`")
- if self.is_wallet_compiled():
- cli_response = self.nodes[0].cli.getwalletinfo()
- rpc_response = self.nodes[0].getwalletinfo()
- assert_equal(cli_response, rpc_response)
-
self.log.info("Compare responses from getblockchaininfo RPC and `bitcoin-cli getblockchaininfo`")
cli_response = self.nodes[0].cli.getblockchaininfo()
rpc_response = self.nodes[0].getblockchaininfo()
@@ -42,12 +45,12 @@ class TestBitcoinCli(BitcoinTestFramework):
user, password = get_auth_cookie(self.nodes[0].datadir, self.chain)
self.log.info("Test -stdinrpcpass option")
- assert_equal(BLOCKS, self.nodes[0].cli('-rpcuser=%s' % user, '-stdinrpcpass', input=password).getblockcount())
- assert_raises_process_error(1, "Incorrect rpcuser or rpcpassword", self.nodes[0].cli('-rpcuser=%s' % user, '-stdinrpcpass', input="foo").echo)
+ assert_equal(BLOCKS, self.nodes[0].cli('-rpcuser={}'.format(user), '-stdinrpcpass', input=password).getblockcount())
+ assert_raises_process_error(1, 'Incorrect rpcuser or rpcpassword', self.nodes[0].cli('-rpcuser={}'.format(user), '-stdinrpcpass', input='foo').echo)
self.log.info("Test -stdin and -stdinrpcpass")
- assert_equal(["foo", "bar"], self.nodes[0].cli('-rpcuser=%s' % user, '-stdin', '-stdinrpcpass', input=password + "\nfoo\nbar").echo())
- assert_raises_process_error(1, "Incorrect rpcuser or rpcpassword", self.nodes[0].cli('-rpcuser=%s' % user, '-stdin', '-stdinrpcpass', input="foo").echo)
+ assert_equal(['foo', 'bar'], self.nodes[0].cli('-rpcuser={}'.format(user), '-stdin', '-stdinrpcpass', input=password + '\nfoo\nbar').echo())
+ assert_raises_process_error(1, 'Incorrect rpcuser or rpcpassword', self.nodes[0].cli('-rpcuser={}'.format(user), '-stdin', '-stdinrpcpass', input='foo').echo)
self.log.info("Test connecting to a non-existing server")
assert_raises_process_error(1, "Could not connect to the server", self.nodes[0].cli('-rpcport=1').echo)
@@ -55,31 +58,184 @@ class TestBitcoinCli(BitcoinTestFramework):
self.log.info("Test connecting with non-existing RPC cookie file")
assert_raises_process_error(1, "Could not locate RPC credentials", self.nodes[0].cli('-rpccookiefile=does-not-exist', '-rpcpassword=').echo)
- self.log.info("Make sure that -getinfo with arguments fails")
+ self.log.info("Test -getinfo with arguments fails")
assert_raises_process_error(1, "-getinfo takes no arguments", self.nodes[0].cli('-getinfo').help)
- self.log.info("Test that -getinfo returns the expected network and blockchain info")
+ self.log.info("Test -getinfo returns expected network and blockchain info")
+ if self.is_wallet_compiled():
+ self.nodes[0].encryptwallet(password)
cli_get_info = self.nodes[0].cli('-getinfo').send_cli()
network_info = self.nodes[0].getnetworkinfo()
blockchain_info = self.nodes[0].getblockchaininfo()
-
assert_equal(cli_get_info['version'], network_info['version'])
assert_equal(cli_get_info['blocks'], blockchain_info['blocks'])
+ assert_equal(cli_get_info['headers'], blockchain_info['headers'])
assert_equal(cli_get_info['timeoffset'], network_info['timeoffset'])
assert_equal(cli_get_info['connections'], network_info['connections'])
assert_equal(cli_get_info['proxy'], network_info['networks'][0]['proxy'])
assert_equal(cli_get_info['difficulty'], blockchain_info['difficulty'])
assert_equal(cli_get_info['chain'], blockchain_info['chain'])
+
if self.is_wallet_compiled():
- self.log.info("Test that -getinfo returns the expected wallet info")
+ self.log.info("Test -getinfo and bitcoin-cli getwalletinfo return expected wallet info")
assert_equal(cli_get_info['balance'], BALANCE)
+ assert 'balances' not in cli_get_info.keys()
wallet_info = self.nodes[0].getwalletinfo()
assert_equal(cli_get_info['keypoolsize'], wallet_info['keypoolsize'])
+ assert_equal(cli_get_info['unlocked_until'], wallet_info['unlocked_until'])
assert_equal(cli_get_info['paytxfee'], wallet_info['paytxfee'])
assert_equal(cli_get_info['relayfee'], network_info['relayfee'])
- # unlocked_until is not tested because the wallet is not encrypted
+ assert_equal(self.nodes[0].cli.getwalletinfo(), wallet_info)
+
+ # Setup to test -getinfo, -generate, and -rpcwallet= with multiple wallets.
+ wallets = ['', 'Encrypted', 'secret']
+ amounts = [BALANCE + Decimal('9.999928'), Decimal(9), Decimal(31)]
+ self.nodes[0].createwallet(wallet_name=wallets[1])
+ self.nodes[0].createwallet(wallet_name=wallets[2])
+ w1 = self.nodes[0].get_wallet_rpc(wallets[0])
+ w2 = self.nodes[0].get_wallet_rpc(wallets[1])
+ w3 = self.nodes[0].get_wallet_rpc(wallets[2])
+ rpcwallet2 = '-rpcwallet={}'.format(wallets[1])
+ rpcwallet3 = '-rpcwallet={}'.format(wallets[2])
+ w1.walletpassphrase(password, self.rpc_timeout)
+ w2.encryptwallet(password)
+ w1.sendtoaddress(w2.getnewaddress(), amounts[1])
+ w1.sendtoaddress(w3.getnewaddress(), amounts[2])
+
+ # Mine a block to confirm; adds a block reward (50 BTC) to the default wallet.
+ self.nodes[0].generate(1)
+
+ self.log.info("Test -getinfo with multiple wallets and -rpcwallet returns specified wallet balance")
+ for i in range(len(wallets)):
+ cli_get_info = self.nodes[0].cli('-getinfo', '-rpcwallet={}'.format(wallets[i])).send_cli()
+ assert 'balances' not in cli_get_info.keys()
+ assert_equal(cli_get_info['balance'], amounts[i])
+
+ self.log.info("Test -getinfo with multiple wallets and -rpcwallet=non-existing-wallet returns no balances")
+ cli_get_info_keys = self.nodes[0].cli('-getinfo', '-rpcwallet=does-not-exist').send_cli().keys()
+ assert 'balance' not in cli_get_info_keys
+ assert 'balances' not in cli_get_info_keys
+
+ self.log.info("Test -getinfo with multiple wallets returns all loaded wallet names and balances")
+ assert_equal(set(self.nodes[0].listwallets()), set(wallets))
+ cli_get_info = self.nodes[0].cli('-getinfo').send_cli()
+ assert 'balance' not in cli_get_info.keys()
+ assert_equal(cli_get_info['balances'], {k: v for k, v in zip(wallets, amounts)})
+
+ # Unload the default wallet and re-verify.
+ self.nodes[0].unloadwallet(wallets[0])
+ assert wallets[0] not in self.nodes[0].listwallets()
+ cli_get_info = self.nodes[0].cli('-getinfo').send_cli()
+ assert 'balance' not in cli_get_info.keys()
+ assert_equal(cli_get_info['balances'], {k: v for k, v in zip(wallets[1:], amounts[1:])})
+
+ self.log.info("Test -getinfo after unloading all wallets except a non-default one returns its balance")
+ self.nodes[0].unloadwallet(wallets[2])
+ assert_equal(self.nodes[0].listwallets(), [wallets[1]])
+ cli_get_info = self.nodes[0].cli('-getinfo').send_cli()
+ assert 'balances' not in cli_get_info.keys()
+ assert_equal(cli_get_info['balance'], amounts[1])
+
+ self.log.info("Test -getinfo with -rpcwallet=remaining-non-default-wallet returns only its balance")
+ cli_get_info = self.nodes[0].cli('-getinfo', rpcwallet2).send_cli()
+ assert 'balances' not in cli_get_info.keys()
+ assert_equal(cli_get_info['balance'], amounts[1])
+
+ self.log.info("Test -getinfo with -rpcwallet=unloaded wallet returns no balances")
+ cli_get_info = self.nodes[0].cli('-getinfo', rpcwallet3).send_cli()
+ assert 'balance' not in cli_get_info_keys
+ assert 'balances' not in cli_get_info_keys
+
+ # Test bitcoin-cli -generate.
+ n1 = 3
+ n2 = 4
+ w2.walletpassphrase(password, self.rpc_timeout)
+ blocks = self.nodes[0].getblockcount()
+
+ self.log.info('Test -generate with no args')
+ generate = self.nodes[0].cli('-generate').send_cli()
+ assert_equal(set(generate.keys()), {'address', 'blocks'})
+ assert_equal(len(generate["blocks"]), 1)
+ assert_equal(self.nodes[0].getblockcount(), blocks + 1)
+
+ self.log.info('Test -generate with bad args')
+ assert_raises_process_error(1, JSON_PARSING_ERROR, self.nodes[0].cli('-generate', 'foo').echo)
+ assert_raises_process_error(1, BLOCKS_VALUE_OF_ZERO, self.nodes[0].cli('-generate', 0).echo)
+ assert_raises_process_error(1, TOO_MANY_ARGS, self.nodes[0].cli('-generate', 1, 2, 3).echo)
+
+ self.log.info('Test -generate with nblocks')
+ generate = self.nodes[0].cli('-generate', n1).send_cli()
+ assert_equal(set(generate.keys()), {'address', 'blocks'})
+ assert_equal(len(generate["blocks"]), n1)
+ assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n1)
+
+ self.log.info('Test -generate with nblocks and maxtries')
+ generate = self.nodes[0].cli('-generate', n2, 1000000).send_cli()
+ assert_equal(set(generate.keys()), {'address', 'blocks'})
+ assert_equal(len(generate["blocks"]), n2)
+ assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n1 + n2)
+
+ self.log.info('Test -generate -rpcwallet in single-wallet mode')
+ generate = self.nodes[0].cli(rpcwallet2, '-generate').send_cli()
+ assert_equal(set(generate.keys()), {'address', 'blocks'})
+ assert_equal(len(generate["blocks"]), 1)
+ assert_equal(self.nodes[0].getblockcount(), blocks + 2 + n1 + n2)
+
+ self.log.info('Test -generate -rpcwallet=unloaded wallet raises RPC error')
+ assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate').echo)
+ assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 'foo').echo)
+ assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 0).echo)
+ assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 1, 2, 3).echo)
+
+ # Test bitcoin-cli -generate with -rpcwallet in multiwallet mode.
+ self.nodes[0].loadwallet(wallets[2])
+ n3 = 4
+ n4 = 10
+ blocks = self.nodes[0].getblockcount()
+
+ self.log.info('Test -generate -rpcwallet with no args')
+ generate = self.nodes[0].cli(rpcwallet2, '-generate').send_cli()
+ assert_equal(set(generate.keys()), {'address', 'blocks'})
+ assert_equal(len(generate["blocks"]), 1)
+ assert_equal(self.nodes[0].getblockcount(), blocks + 1)
+
+ self.log.info('Test -generate -rpcwallet with bad args')
+ assert_raises_process_error(1, JSON_PARSING_ERROR, self.nodes[0].cli(rpcwallet2, '-generate', 'foo').echo)
+ assert_raises_process_error(1, BLOCKS_VALUE_OF_ZERO, self.nodes[0].cli(rpcwallet2, '-generate', 0).echo)
+ assert_raises_process_error(1, TOO_MANY_ARGS, self.nodes[0].cli(rpcwallet2, '-generate', 1, 2, 3).echo)
+
+ self.log.info('Test -generate -rpcwallet with nblocks')
+ generate = self.nodes[0].cli(rpcwallet2, '-generate', n3).send_cli()
+ assert_equal(set(generate.keys()), {'address', 'blocks'})
+ assert_equal(len(generate["blocks"]), n3)
+ assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n3)
+
+ self.log.info('Test -generate -rpcwallet with nblocks and maxtries')
+ generate = self.nodes[0].cli(rpcwallet2, '-generate', n4, 1000000).send_cli()
+ assert_equal(set(generate.keys()), {'address', 'blocks'})
+ assert_equal(len(generate["blocks"]), n4)
+ assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n3 + n4)
+
+ self.log.info('Test -generate without -rpcwallet in multiwallet mode raises RPC error')
+ assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate').echo)
+ assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 'foo').echo)
+ assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 0).echo)
+ assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 1, 2, 3).echo)
else:
- self.log.info("*** Wallet not compiled; -getinfo wallet tests skipped")
+ self.log.info("*** Wallet not compiled; cli getwalletinfo and -getinfo wallet tests skipped")
+ self.nodes[0].generate(25) # maintain block parity with the wallet_compiled conditional branch
+
+ self.log.info("Test -version with node stopped")
+ self.stop_node(0)
+ cli_response = self.nodes[0].cli('-version').send_cli()
+ assert "{} RPC client version".format(self.config['environment']['PACKAGE_NAME']) in cli_response
+
+ self.log.info("Test -rpcwait option successfully waits for RPC connection")
+ self.nodes[0].start() # start node without RPC connection
+ self.nodes[0].wait_for_cookie_credentials() # ensure cookie file is available to avoid race condition
+ blocks = self.nodes[0].cli('-rpcwait').send_cli('getblockcount')
+ self.nodes[0].wait_for_rpc_connection()
+ assert_equal(blocks, BLOCKS + 25)
if __name__ == '__main__':
diff --git a/test/functional/interface_rpc.py b/test/functional/interface_rpc.py
index 3b3b5bb0c1..ac2e73fc5c 100755
--- a/test/functional/interface_rpc.py
+++ b/test/functional/interface_rpc.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests some generic aspects of the RPC interface."""
diff --git a/test/functional/mempool_accept.py b/test/functional/mempool_accept.py
index 9ade22a7eb..6df8f1c3ec 100755
--- a/test/functional/mempool_accept.py
+++ b/test/functional/mempool_accept.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool acceptance of raw transactions."""
diff --git a/test/functional/mempool_compatibility.py b/test/functional/mempool_compatibility.py
new file mode 100755
index 0000000000..999399dec0
--- /dev/null
+++ b/test/functional/mempool_compatibility.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017-2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test that mempool.dat is both backward and forward compatible between versions
+
+NOTE: The test is designed to prevent cases when compatibility is broken accidentally.
+In case we need to break mempool compatibility we can continue to use the test by just bumping the version number.
+
+Download node binaries:
+contrib/devtools/previous_release.sh -b v0.19.1 v0.18.1 v0.17.1 v0.16.3 v0.15.2
+
+Only v0.15.2 is required by this test. The rest is used in other backwards compatibility tests.
+"""
+
+import os
+
+from test_framework.test_framework import BitcoinTestFramework
+
+
+class MempoolCompatibilityTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+ self.skip_if_no_previous_releases()
+
+ def setup_network(self):
+ self.add_nodes(self.num_nodes, versions=[
+ 150200, # oldest version supported by the test framework
+ None,
+ ])
+ self.start_nodes()
+ self.import_deterministic_coinbase_privkeys()
+
+ def run_test(self):
+ self.log.info("Test that mempool.dat is compatible between versions")
+
+ old_node = self.nodes[0]
+ new_node = self.nodes[1]
+ recipient = old_node.getnewaddress()
+ self.stop_node(1)
+
+ self.log.info("Add a transaction to mempool on old node and shutdown")
+ old_tx_hash = old_node.sendtoaddress(recipient, 0.0001)
+ assert old_tx_hash in old_node.getrawmempool()
+ self.stop_node(0)
+
+ self.log.info("Move mempool.dat from old to new node")
+ old_node_mempool = os.path.join(old_node.datadir, self.chain, 'mempool.dat')
+ new_node_mempool = os.path.join(new_node.datadir, self.chain, 'mempool.dat')
+ os.rename(old_node_mempool, new_node_mempool)
+
+ self.log.info("Start new node and verify mempool contains the tx")
+ self.start_node(1)
+ assert old_tx_hash in new_node.getrawmempool()
+
+ self.log.info("Add unbroadcasted tx to mempool on new node and shutdown")
+ unbroadcasted_tx_hash = new_node.sendtoaddress(recipient, 0.0001)
+ assert unbroadcasted_tx_hash in new_node.getrawmempool()
+ mempool = new_node.getrawmempool(True)
+ assert mempool[unbroadcasted_tx_hash]['unbroadcast']
+ self.stop_node(1)
+
+ self.log.info("Move mempool.dat from new to old node")
+ os.rename(new_node_mempool, old_node_mempool)
+
+ self.log.info("Start old node again and verify mempool contains both txs")
+ self.start_node(0, ['-nowallet'])
+ assert old_tx_hash in old_node.getrawmempool()
+ assert unbroadcasted_tx_hash in old_node.getrawmempool()
+
+if __name__ == "__main__":
+ MempoolCompatibilityTest().main()
diff --git a/test/functional/mempool_packages.py b/test/functional/mempool_packages.py
index a07dad18d6..5b7216b253 100755
--- a/test/functional/mempool_packages.py
+++ b/test/functional/mempool_packages.py
@@ -7,6 +7,7 @@
from decimal import Decimal
from test_framework.messages import COIN
+from test_framework.mininode import P2PTxInvStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
@@ -58,6 +59,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
def run_test(self):
# Mine some blocks and have them mature.
+ self.nodes[0].add_p2p_connection(P2PTxInvStore()) # keep track of invs
self.nodes[0].generate(101)
utxo = self.nodes[0].listunspent(10)
txid = utxo[0]['txid']
@@ -72,6 +74,10 @@ class MempoolPackagesTest(BitcoinTestFramework):
value = sent_value
chain.append(txid)
+ # Wait until mempool transactions have passed initial broadcast (sent inv and received getdata)
+ # Otherwise, getrawmempool may be inconsistent with getmempoolentry if unbroadcast changes in between
+ self.nodes[0].p2p.wait_for_broadcast(chain)
+
# Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor
# count and fees should look correct
mempool = self.nodes[0].getrawmempool(True)
@@ -212,6 +218,10 @@ class MempoolPackagesTest(BitcoinTestFramework):
for tx in chain[:MAX_ANCESTORS_CUSTOM]:
assert tx in mempool1
# TODO: more detailed check of node1's mempool (fees etc.)
+ # check transaction unbroadcast info (should be false if in both mempools)
+ mempool = self.nodes[0].getrawmempool(True)
+ for tx in mempool:
+ assert_equal(mempool[tx]['unbroadcast'], False)
# TODO: test ancestor size limits
diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py
index d3690b245e..5d00648aed 100755
--- a/test/functional/mempool_persist.py
+++ b/test/functional/mempool_persist.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool persistence.
@@ -40,10 +40,13 @@ import os
import time
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.mininode import P2PTxInvStore
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
assert_raises_rpc_error,
+ connect_nodes,
+ disconnect_nodes,
wait_until,
)
@@ -80,6 +83,13 @@ class MempoolPersistTest(BitcoinTestFramework):
assert_greater_than_or_equal(tx_creation_time, tx_creation_time_lower)
assert_greater_than_or_equal(tx_creation_time_higher, tx_creation_time)
+ # disconnect nodes & make a txn that remains in the unbroadcast set.
+ disconnect_nodes(self.nodes[0], 1)
+ assert(len(self.nodes[0].getpeerinfo()) == 0)
+ assert(len(self.nodes[0].p2ps) == 0)
+ self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("12"))
+ connect_nodes(self.nodes[0], 2)
+
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
# Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
@@ -87,9 +97,9 @@ class MempoolPersistTest(BitcoinTestFramework):
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
- wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"], timeout=1)
- wait_until(lambda: self.nodes[2].getmempoolinfo()["loaded"], timeout=1)
- assert_equal(len(self.nodes[0].getrawmempool()), 5)
+ assert self.nodes[0].getmempoolinfo()["loaded"] # start_node is blocking on the mempool being loaded
+ assert self.nodes[2].getmempoolinfo()["loaded"]
+ assert_equal(len(self.nodes[0].getrawmempool()), 6)
assert_equal(len(self.nodes[2].getrawmempool()), 5)
# The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
assert_equal(len(self.nodes[1].getrawmempool()), 0)
@@ -105,17 +115,18 @@ class MempoolPersistTest(BitcoinTestFramework):
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
+ # start node0 with wallet disabled so wallet transactions don't get resubmitted
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
- self.start_node(0, extra_args=["-persistmempool=0"])
- wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"])
+ self.start_node(0, extra_args=["-persistmempool=0", "-disablewallet"])
+ assert self.nodes[0].getmempoolinfo()["loaded"]
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
- wait_until(lambda: self.nodes[0].getmempoolinfo()["loaded"])
- assert_equal(len(self.nodes[0].getrawmempool()), 5)
+ assert self.nodes[0].getmempoolinfo()["loaded"]
+ assert_equal(len(self.nodes[0].getrawmempool()), 6)
mempooldat0 = os.path.join(self.nodes[0].datadir, self.chain, 'mempool.dat')
mempooldat1 = os.path.join(self.nodes[1].datadir, self.chain, 'mempool.dat')
@@ -124,12 +135,12 @@ class MempoolPersistTest(BitcoinTestFramework):
self.nodes[0].savemempool()
assert os.path.isfile(mempooldat0)
- self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
+ self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 6 transactions")
os.rename(mempooldat0, mempooldat1)
self.stop_nodes()
self.start_node(1, extra_args=[])
- wait_until(lambda: self.nodes[1].getmempoolinfo()["loaded"])
- assert_equal(len(self.nodes[1].getrawmempool()), 5)
+ assert self.nodes[1].getmempoolinfo()["loaded"]
+ assert_equal(len(self.nodes[1].getrawmempool()), 6)
self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
# to test the exception we are creating a tmp folder called mempool.dat.new
@@ -139,6 +150,29 @@ class MempoolPersistTest(BitcoinTestFramework):
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
+ self.test_persist_unbroadcast()
+
+ def test_persist_unbroadcast(self):
+ node0 = self.nodes[0]
+ self.start_node(0)
+
+ # clear out mempool
+ node0.generate(1)
+
+ # ensure node0 doesn't have any connections
+ # make a transaction that will remain in the unbroadcast set
+ assert(len(node0.getpeerinfo()) == 0)
+ assert(len(node0.p2ps) == 0)
+ node0.sendtoaddress(self.nodes[1].getnewaddress(), Decimal("12"))
+
+ # shutdown, then startup with wallet disabled
+ self.stop_nodes()
+ self.start_node(0, extra_args=["-disablewallet"])
+
+ # check that txn gets broadcast due to unbroadcast logic
+ conn = node0.add_p2p_connection(P2PTxInvStore())
+ node0.mockscheduler(16*60) # 15 min + 1 for buffer
+ wait_until(lambda: len(conn.get_invs()) == 1)
if __name__ == '__main__':
MempoolPersistTest().main()
diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py
index d797dff134..8e1f87e42c 100755
--- a/test/functional/mempool_reorg.py
+++ b/test/functional/mempool_reorg.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2018 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool re-org scenarios.
@@ -16,12 +16,16 @@ from test_framework.util import assert_equal, assert_raises_rpc_error
class MempoolCoinbaseTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
+ self.extra_args = [
+ [
+ '-whitelist=noban@127.0.0.1', # immediate tx relay
+ ],
+ []
+ ]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
- alert_filename = None # Set by setup_network
-
def run_test(self):
# Start with a 200 block chain
assert_equal(self.nodes[0].getblockcount(), 200)
@@ -40,17 +44,21 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
# 3. Indirect (coinbase and child both in chain) : spend_103 and spend_103_1
# Use invalidatblock to make all of the above coinbase spends invalid (immature coinbase),
# and make sure the mempool code behaves correctly.
- b = [ self.nodes[0].getblockhash(n) for n in range(101, 105) ]
- coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
+ b = [self.nodes[0].getblockhash(n) for n in range(101, 105)]
+ coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b]
spend_101_raw = create_raw_transaction(self.nodes[0], coinbase_txids[1], node1_address, amount=49.99)
spend_102_raw = create_raw_transaction(self.nodes[0], coinbase_txids[2], node0_address, amount=49.99)
spend_103_raw = create_raw_transaction(self.nodes[0], coinbase_txids[3], node0_address, amount=49.99)
# Create a transaction which is time-locked to two blocks in the future
- timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 49.99})
- # Set the time lock
- timelock_tx = timelock_tx.replace("ffffffff", "11111191", 1)
- timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
+ timelock_tx = self.nodes[0].createrawtransaction(
+ inputs=[{
+ "txid": coinbase_txids[0],
+ "vout": 0,
+ }],
+ outputs={node0_address: 49.99},
+ locktime=self.nodes[0].getblockcount() + 2,
+ )
timelock_tx = self.nodes[0].signrawtransactionwithwallet(timelock_tx)["hex"]
# This will raise an exception because the timelock transaction is too immature to spend
assert_raises_rpc_error(-26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx)
@@ -69,6 +77,10 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
# Broadcast and mine 103_1:
spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw)
last_block = self.nodes[0].generate(1)
+ # Sync blocks, so that peer 1 gets the block before timelock_tx
+ # Otherwise, peer 1 would put the timelock_tx in recentRejects
+ self.sync_all()
+
# Time-locked transaction can now be spent
timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx)
@@ -76,9 +88,8 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
spend_101_id = self.nodes[0].sendrawtransaction(spend_101_raw)
spend_102_1_id = self.nodes[0].sendrawtransaction(spend_102_1_raw)
- self.sync_all(timeout=720)
-
assert_equal(set(self.nodes[0].getrawmempool()), {spend_101_id, spend_102_1_id, timelock_tx_id})
+ self.sync_all()
for node in self.nodes:
node.invalidateblock(last_block[0])
@@ -91,10 +102,9 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
for node in self.nodes:
node.invalidateblock(new_blocks[0])
- self.sync_all(timeout=720)
-
# mempool should be empty.
assert_equal(set(self.nodes[0].getrawmempool()), set())
+ self.sync_all()
if __name__ == '__main__':
diff --git a/test/functional/mempool_unbroadcast.py b/test/functional/mempool_unbroadcast.py
new file mode 100755
index 0000000000..365d011157
--- /dev/null
+++ b/test/functional/mempool_unbroadcast.py
@@ -0,0 +1,113 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017-2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test that the mempool ensures transaction delivery by periodically sending
+to peers until a GETDATA is received."""
+
+import time
+
+from test_framework.mininode import P2PTxInvStore
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ connect_nodes,
+ create_confirmed_utxos,
+ disconnect_nodes,
+)
+
+MAX_INITIAL_BROADCAST_DELAY = 15 * 60 # 15 minutes in seconds
+
+class MempoolUnbroadcastTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def run_test(self):
+ self.test_broadcast()
+ self.test_txn_removal()
+
+ def test_broadcast(self):
+ self.log.info("Test that mempool reattempts delivery of locally submitted transaction")
+ node = self.nodes[0]
+
+ min_relay_fee = node.getnetworkinfo()["relayfee"]
+ utxos = create_confirmed_utxos(min_relay_fee, node, 10)
+
+ disconnect_nodes(node, 1)
+
+ self.log.info("Generate transactions that only node 0 knows about")
+
+ # generate a wallet txn
+ addr = node.getnewaddress()
+ wallet_tx_hsh = node.sendtoaddress(addr, 0.0001)
+
+ # generate a txn using sendrawtransaction
+ us0 = utxos.pop()
+ inputs = [{"txid": us0["txid"], "vout": us0["vout"]}]
+ outputs = {addr: 0.0001}
+ tx = node.createrawtransaction(inputs, outputs)
+ node.settxfee(min_relay_fee)
+ txF = node.fundrawtransaction(tx)
+ txFS = node.signrawtransactionwithwallet(txF["hex"])
+ rpc_tx_hsh = node.sendrawtransaction(txFS["hex"])
+
+ # check transactions are in unbroadcast using rpc
+ mempoolinfo = self.nodes[0].getmempoolinfo()
+ assert_equal(mempoolinfo['unbroadcastcount'], 2)
+ mempool = self.nodes[0].getrawmempool(True)
+ for tx in mempool:
+ assert_equal(mempool[tx]['unbroadcast'], True)
+
+ # check that second node doesn't have these two txns
+ mempool = self.nodes[1].getrawmempool()
+ assert rpc_tx_hsh not in mempool
+ assert wallet_tx_hsh not in mempool
+
+ # ensure that unbroadcast txs are persisted to mempool.dat
+ self.restart_node(0)
+
+ self.log.info("Reconnect nodes & check if they are sent to node 1")
+ connect_nodes(node, 1)
+
+ # fast forward into the future & ensure that the second node has the txns
+ node.mockscheduler(MAX_INITIAL_BROADCAST_DELAY)
+ self.sync_mempools(timeout=30)
+ mempool = self.nodes[1].getrawmempool()
+ assert rpc_tx_hsh in mempool
+ assert wallet_tx_hsh in mempool
+
+ # check that transactions are no longer in first node's unbroadcast set
+ mempool = self.nodes[0].getrawmempool(True)
+ for tx in mempool:
+ assert_equal(mempool[tx]['unbroadcast'], False)
+
+ self.log.info("Add another connection & ensure transactions aren't broadcast again")
+
+ conn = node.add_p2p_connection(P2PTxInvStore())
+ node.mockscheduler(MAX_INITIAL_BROADCAST_DELAY)
+ time.sleep(2) # allow sufficient time for possibility of broadcast
+ assert_equal(len(conn.get_invs()), 0)
+
+ disconnect_nodes(node, 1)
+ node.disconnect_p2ps()
+
+ def test_txn_removal(self):
+ self.log.info("Test that transactions removed from mempool are removed from unbroadcast set")
+ node = self.nodes[0]
+
+ # since the node doesn't have any connections, it will not receive
+ # any GETDATAs & thus the transaction will remain in the unbroadcast set.
+ addr = node.getnewaddress()
+ txhsh = node.sendtoaddress(addr, 0.0001)
+
+ # check transaction was removed from unbroadcast set due to presence in
+ # a block
+ removal_reason = "Removed {} from set of unbroadcast txns before confirmation that txn was sent out".format(txhsh)
+ with node.assert_debug_log([removal_reason]):
+ node.generate(1)
+
+if __name__ == "__main__":
+ MempoolUnbroadcastTest().main()
diff --git a/test/functional/mempool_updatefromblock.py b/test/functional/mempool_updatefromblock.py
new file mode 100755
index 0000000000..8a703ef009
--- /dev/null
+++ b/test/functional/mempool_updatefromblock.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+# Copyright (c) 2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test mempool descendants/ancestors information update.
+
+Test mempool update of transaction descendants/ancestors information (count, size)
+when transactions have been re-added from a disconnected block to the mempool.
+"""
+import time
+
+from decimal import Decimal
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+
+class MempoolUpdateFromBlockTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+ self.extra_args = [['-limitdescendantsize=1000', '-limitancestorsize=1000']]
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def transaction_graph_test(self, size, n_tx_to_mine=None, start_input_txid='', end_address='', fee=Decimal(0.00100000)):
+ """Create an acyclic tournament (a type of directed graph) of transactions and use it for testing.
+
+ Keyword arguments:
+ size -- the order N of the tournament which is equal to the number of the created transactions
+ n_tx_to_mine -- the number of transaction that should be mined into a block
+
+ If all of the N created transactions tx[0]..tx[N-1] reside in the mempool,
+ the following holds:
+ the tx[K] transaction:
+ - has N-K descendants (including this one), and
+ - has K+1 ancestors (including this one)
+
+ More details: https://en.wikipedia.org/wiki/Tournament_(graph_theory)
+ """
+
+ if not start_input_txid:
+ start_input_txid = self.nodes[0].getblock(self.nodes[0].getblockhash(1))['tx'][0]
+
+ if not end_address:
+ end_address = self.nodes[0].getnewaddress()
+
+ first_block_hash = ''
+ tx_id = []
+ tx_size = []
+ self.log.info('Creating {} transactions...'.format(size))
+ for i in range(0, size):
+ self.log.debug('Preparing transaction #{}...'.format(i))
+ # Prepare inputs.
+ if i == 0:
+ inputs = [{'txid': start_input_txid, 'vout': 0}]
+ inputs_value = self.nodes[0].gettxout(start_input_txid, 0)['value']
+ else:
+ inputs = []
+ inputs_value = 0
+ for j, tx in enumerate(tx_id[0:i]):
+ # Transaction tx[K] is a child of each of previous transactions tx[0]..tx[K-1] at their output K-1.
+ vout = i - j - 1
+ inputs.append({'txid': tx_id[j], 'vout': vout})
+ inputs_value += self.nodes[0].gettxout(tx, vout)['value']
+
+ self.log.debug('inputs={}'.format(inputs))
+ self.log.debug('inputs_value={}'.format(inputs_value))
+
+ # Prepare outputs.
+ tx_count = i + 1
+ if tx_count < size:
+ # Transaction tx[K] is an ancestor of each of subsequent transactions tx[K+1]..tx[N-1].
+ n_outputs = size - tx_count
+ output_value = ((inputs_value - fee) / Decimal(n_outputs)).quantize(Decimal('0.00000001'))
+ outputs = {}
+ for n in range(0, n_outputs):
+ outputs[self.nodes[0].getnewaddress()] = output_value
+ else:
+ output_value = (inputs_value - fee).quantize(Decimal('0.00000001'))
+ outputs = {end_address: output_value}
+
+ self.log.debug('output_value={}'.format(output_value))
+ self.log.debug('outputs={}'.format(outputs))
+
+ # Create a new transaction.
+ unsigned_raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
+ signed_raw_tx = self.nodes[0].signrawtransactionwithwallet(unsigned_raw_tx)
+ tx_id.append(self.nodes[0].sendrawtransaction(signed_raw_tx['hex']))
+ tx_size.append(self.nodes[0].getrawmempool(True)[tx_id[-1]]['vsize'])
+
+ if tx_count in n_tx_to_mine:
+ # The created transactions are mined into blocks by batches.
+ self.log.info('The batch of {} transactions has been accepted into the mempool.'.format(len(self.nodes[0].getrawmempool())))
+ block_hash = self.nodes[0].generate(1)[0]
+ if not first_block_hash:
+ first_block_hash = block_hash
+ assert_equal(len(self.nodes[0].getrawmempool()), 0)
+ self.log.info('All of the transactions from the current batch have been mined into a block.')
+ elif tx_count == size:
+ # At the end all of the mined blocks are invalidated, and all of the created
+ # transactions should be re-added from disconnected blocks to the mempool.
+ self.log.info('The last batch of {} transactions has been accepted into the mempool.'.format(len(self.nodes[0].getrawmempool())))
+ start = time.time()
+ self.nodes[0].invalidateblock(first_block_hash)
+ end = time.time()
+ assert_equal(len(self.nodes[0].getrawmempool()), size)
+ self.log.info('All of the recently mined transactions have been re-added into the mempool in {} seconds.'.format(end - start))
+
+ self.log.info('Checking descendants/ancestors properties of all of the in-mempool transactions...')
+ for k, tx in enumerate(tx_id):
+ self.log.debug('Check transaction #{}.'.format(k))
+ assert_equal(self.nodes[0].getrawmempool(True)[tx]['descendantcount'], size - k)
+ assert_equal(self.nodes[0].getrawmempool(True)[tx]['descendantsize'], sum(tx_size[k:size]))
+ assert_equal(self.nodes[0].getrawmempool(True)[tx]['ancestorcount'], k + 1)
+ assert_equal(self.nodes[0].getrawmempool(True)[tx]['ancestorsize'], sum(tx_size[0:(k + 1)]))
+
+ def run_test(self):
+ # Use batch size limited by DEFAULT_ANCESTOR_LIMIT = 25 to not fire "too many unconfirmed parents" error.
+ self.transaction_graph_test(size=100, n_tx_to_mine=[25, 50, 75])
+
+
+if __name__ == '__main__':
+ MempoolUpdateFromBlockTest().main()
diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py
index 8262e30592..63d1ccfb36 100755
--- a/test/functional/mining_basic.py
+++ b/test/functional/mining_basic.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mining RPCs
@@ -18,24 +18,25 @@ from test_framework.blocktools import (
from test_framework.messages import (
CBlock,
CBlockHeader,
- BLOCK_HEADER_SIZE
-)
-from test_framework.mininode import (
- P2PDataStore,
+ BLOCK_HEADER_SIZE,
)
+from test_framework.mininode import P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
connect_nodes,
)
-from test_framework.script import CScriptNum
def assert_template(node, block, expect, rehash=True):
if rehash:
block.hashMerkleRoot = block.calc_merkle_root()
- rsp = node.getblocktemplate(template_request={'data': block.serialize().hex(), 'mode': 'proposal', 'rules': ['segwit']})
+ rsp = node.getblocktemplate(template_request={
+ 'data': block.serialize().hex(),
+ 'mode': 'proposal',
+ 'rules': ['segwit'],
+ })
assert_equal(rsp, expect)
@@ -87,16 +88,9 @@ class MiningTest(BitcoinTestFramework):
next_height = int(tmpl["height"])
coinbase_tx = create_coinbase(height=next_height)
# sequence numbers must not be max for nLockTime to have effect
- coinbase_tx.vin[0].nSequence = 2 ** 32 - 2
+ coinbase_tx.vin[0].nSequence = 2**32 - 2
coinbase_tx.rehash()
- # round-trip the encoded bip34 block height commitment
- assert_equal(CScriptNum.decode(coinbase_tx.vin[0].scriptSig), next_height)
- # round-trip negative and multi-byte CScriptNums to catch python regression
- assert_equal(CScriptNum.decode(CScriptNum.encode(CScriptNum(1500))), 1500)
- assert_equal(CScriptNum.decode(CScriptNum.encode(CScriptNum(-1500))), -1500)
- assert_equal(CScriptNum.decode(CScriptNum.encode(CScriptNum(-1))), -1)
-
block = CBlock()
block.nVersion = tmpl["version"]
block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
@@ -124,7 +118,11 @@ class MiningTest(BitcoinTestFramework):
assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, bad_block.serialize().hex())
self.log.info("getblocktemplate: Test truncated final transaction")
- assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': block.serialize()[:-1].hex(), 'mode': 'proposal', 'rules': ['segwit']})
+ assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {
+ 'data': block.serialize()[:-1].hex(),
+ 'mode': 'proposal',
+ 'rules': ['segwit'],
+ })
self.log.info("getblocktemplate: Test duplicate transaction")
bad_block = copy.deepcopy(block)
@@ -143,7 +141,7 @@ class MiningTest(BitcoinTestFramework):
self.log.info("getblocktemplate: Test nonfinal transaction")
bad_block = copy.deepcopy(block)
- bad_block.vtx[0].nLockTime = 2 ** 32 - 1
+ bad_block.vtx[0].nLockTime = 2**32 - 1
bad_block.vtx[0].rehash()
assert_template(node, bad_block, 'bad-txns-nonfinal')
assert_submitblock(bad_block, 'bad-txns-nonfinal')
@@ -153,7 +151,11 @@ class MiningTest(BitcoinTestFramework):
bad_block_sn = bytearray(block.serialize())
assert_equal(bad_block_sn[BLOCK_HEADER_SIZE], 1)
bad_block_sn[BLOCK_HEADER_SIZE] += 1
- assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': bad_block_sn.hex(), 'mode': 'proposal', 'rules': ['segwit']})
+ assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {
+ 'data': bad_block_sn.hex(),
+ 'mode': 'proposal',
+ 'rules': ['segwit'],
+ })
self.log.info("getblocktemplate: Test bad bits")
bad_block = copy.deepcopy(block)
@@ -168,7 +170,7 @@ class MiningTest(BitcoinTestFramework):
self.log.info("getblocktemplate: Test bad timestamps")
bad_block = copy.deepcopy(block)
- bad_block.nTime = 2 ** 31 - 1
+ bad_block.nTime = 2**31 - 1
assert_template(node, bad_block, 'time-too-new')
assert_submitblock(bad_block, 'time-too-new', 'time-too-new')
bad_block.nTime = 0
diff --git a/test/functional/p2p_addr_relay.py b/test/functional/p2p_addr_relay.py
index 6046237101..5c7e27a3a8 100755
--- a/test/functional/p2p_addr_relay.py
+++ b/test/functional/p2p_addr_relay.py
@@ -49,9 +49,9 @@ class AddrTest(BitcoinTestFramework):
addr_source = self.nodes[0].add_p2p_connection(P2PInterface())
msg = msg_addr()
- self.log.info('Send too large addr message')
+ self.log.info('Send too-large addr message')
msg.addrs = ADDRS * 101
- with self.nodes[0].assert_debug_log(['message addr size() = 1010']):
+ with self.nodes[0].assert_debug_log(['addr message size = 1010']):
addr_source.send_and_ping(msg)
self.log.info('Check that addr message content is relayed and added to addrman')
diff --git a/test/functional/p2p_blockfilters.py b/test/functional/p2p_blockfilters.py
new file mode 100755
index 0000000000..6d947ac660
--- /dev/null
+++ b/test/functional/p2p_blockfilters.py
@@ -0,0 +1,250 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Tests NODE_COMPACT_FILTERS (BIP 157/158).
+
+Tests that a node configured with -blockfilterindex and -peerblockfilters can serve
+cfheaders and cfcheckpts.
+"""
+
+from test_framework.messages import (
+ FILTER_TYPE_BASIC,
+ hash256,
+ msg_getcfcheckpt,
+ msg_getcfheaders,
+ msg_getcfilters,
+ ser_uint256,
+ uint256_from_str,
+)
+from test_framework.mininode import P2PInterface
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ connect_nodes,
+ disconnect_nodes,
+ wait_until,
+)
+
+class CFiltersClient(P2PInterface):
+ def __init__(self):
+ super().__init__()
+ # Store the cfilters received.
+ self.cfilters = []
+
+ def pop_cfilters(self):
+ cfilters = self.cfilters
+ self.cfilters = []
+ return cfilters
+
+ def on_cfilter(self, message):
+ """Store cfilters received in a list."""
+ self.cfilters.append(message)
+
+class CompactFiltersTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.rpc_timeout = 480
+ self.num_nodes = 2
+ self.extra_args = [
+ ["-blockfilterindex", "-peerblockfilters"],
+ ["-blockfilterindex"],
+ ]
+
+ def run_test(self):
+ # Node 0 supports COMPACT_FILTERS, node 1 does not.
+ node0 = self.nodes[0].add_p2p_connection(CFiltersClient())
+ node1 = self.nodes[1].add_p2p_connection(CFiltersClient())
+
+ # Nodes 0 & 1 share the same first 999 blocks in the chain.
+ self.nodes[0].generate(999)
+ self.sync_blocks(timeout=600)
+
+ # Stale blocks by disconnecting nodes 0 & 1, mining, then reconnecting
+ disconnect_nodes(self.nodes[0], 1)
+
+ self.nodes[0].generate(1)
+ wait_until(lambda: self.nodes[0].getblockcount() == 1000)
+ stale_block_hash = self.nodes[0].getblockhash(1000)
+
+ self.nodes[1].generate(1001)
+ wait_until(lambda: self.nodes[1].getblockcount() == 2000)
+
+ self.log.info("get cfcheckpt on chain to be re-orged out.")
+ request = msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=int(stale_block_hash, 16)
+ )
+ node0.send_and_ping(message=request)
+ response = node0.last_message['cfcheckpt']
+ assert_equal(response.filter_type, request.filter_type)
+ assert_equal(response.stop_hash, request.stop_hash)
+ assert_equal(len(response.headers), 1)
+
+ self.log.info("Reorg node 0 to a new chain.")
+ connect_nodes(self.nodes[0], 1)
+ self.sync_blocks(timeout=600)
+
+ main_block_hash = self.nodes[0].getblockhash(1000)
+ assert main_block_hash != stale_block_hash, "node 0 chain did not reorganize"
+
+ self.log.info("Check that peers can fetch cfcheckpt on active chain.")
+ tip_hash = self.nodes[0].getbestblockhash()
+ request = msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=int(tip_hash, 16)
+ )
+ node0.send_and_ping(request)
+ response = node0.last_message['cfcheckpt']
+ assert_equal(response.filter_type, request.filter_type)
+ assert_equal(response.stop_hash, request.stop_hash)
+
+ main_cfcheckpt = self.nodes[0].getblockfilter(main_block_hash, 'basic')['header']
+ tip_cfcheckpt = self.nodes[0].getblockfilter(tip_hash, 'basic')['header']
+ assert_equal(
+ response.headers,
+ [int(header, 16) for header in (main_cfcheckpt, tip_cfcheckpt)]
+ )
+
+ self.log.info("Check that peers can fetch cfcheckpt on stale chain.")
+ request = msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=int(stale_block_hash, 16)
+ )
+ node0.send_and_ping(request)
+ response = node0.last_message['cfcheckpt']
+
+ stale_cfcheckpt = self.nodes[0].getblockfilter(stale_block_hash, 'basic')['header']
+ assert_equal(
+ response.headers,
+ [int(header, 16) for header in (stale_cfcheckpt,)]
+ )
+
+ self.log.info("Check that peers can fetch cfheaders on active chain.")
+ request = msg_getcfheaders(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=1,
+ stop_hash=int(main_block_hash, 16)
+ )
+ node0.send_and_ping(request)
+ response = node0.last_message['cfheaders']
+ main_cfhashes = response.hashes
+ assert_equal(len(main_cfhashes), 1000)
+ assert_equal(
+ compute_last_header(response.prev_header, response.hashes),
+ int(main_cfcheckpt, 16)
+ )
+
+ self.log.info("Check that peers can fetch cfheaders on stale chain.")
+ request = msg_getcfheaders(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=1,
+ stop_hash=int(stale_block_hash, 16)
+ )
+ node0.send_and_ping(request)
+ response = node0.last_message['cfheaders']
+ stale_cfhashes = response.hashes
+ assert_equal(len(stale_cfhashes), 1000)
+ assert_equal(
+ compute_last_header(response.prev_header, response.hashes),
+ int(stale_cfcheckpt, 16)
+ )
+
+ self.log.info("Check that peers can fetch cfilters.")
+ stop_hash = self.nodes[0].getblockhash(10)
+ request = msg_getcfilters(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=1,
+ stop_hash=int(stop_hash, 16)
+ )
+ node0.send_message(request)
+ node0.sync_with_ping()
+ response = node0.pop_cfilters()
+ assert_equal(len(response), 10)
+
+ self.log.info("Check that cfilter responses are correct.")
+ for cfilter, cfhash, height in zip(response, main_cfhashes, range(1, 11)):
+ block_hash = self.nodes[0].getblockhash(height)
+ assert_equal(cfilter.filter_type, FILTER_TYPE_BASIC)
+ assert_equal(cfilter.block_hash, int(block_hash, 16))
+ computed_cfhash = uint256_from_str(hash256(cfilter.filter_data))
+ assert_equal(computed_cfhash, cfhash)
+
+ self.log.info("Check that peers can fetch cfilters for stale blocks.")
+ request = msg_getcfilters(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=1000,
+ stop_hash=int(stale_block_hash, 16)
+ )
+ node0.send_message(request)
+ node0.sync_with_ping()
+ response = node0.pop_cfilters()
+ assert_equal(len(response), 1)
+
+ cfilter = response[0]
+ assert_equal(cfilter.filter_type, FILTER_TYPE_BASIC)
+ assert_equal(cfilter.block_hash, int(stale_block_hash, 16))
+ computed_cfhash = uint256_from_str(hash256(cfilter.filter_data))
+ assert_equal(computed_cfhash, stale_cfhashes[999])
+
+ self.log.info("Requests to node 1 without NODE_COMPACT_FILTERS results in disconnection.")
+ requests = [
+ msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=int(main_block_hash, 16)
+ ),
+ msg_getcfheaders(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=1000,
+ stop_hash=int(main_block_hash, 16)
+ ),
+ msg_getcfilters(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=1000,
+ stop_hash=int(main_block_hash, 16)
+ ),
+ ]
+ for request in requests:
+ node1 = self.nodes[1].add_p2p_connection(P2PInterface())
+ node1.send_message(request)
+ node1.wait_for_disconnect()
+
+ self.log.info("Check that invalid requests result in disconnection.")
+ requests = [
+ # Requesting too many filters results in disconnection.
+ msg_getcfilters(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=0,
+ stop_hash=int(main_block_hash, 16)
+ ),
+ # Requesting too many filter headers results in disconnection.
+ msg_getcfheaders(
+ filter_type=FILTER_TYPE_BASIC,
+ start_height=0,
+ stop_hash=int(tip_hash, 16)
+ ),
+ # Requesting unknown filter type results in disconnection.
+ msg_getcfcheckpt(
+ filter_type=255,
+ stop_hash=int(main_block_hash, 16)
+ ),
+ # Requesting unknown hash results in disconnection.
+ msg_getcfcheckpt(
+ filter_type=FILTER_TYPE_BASIC,
+ stop_hash=123456789,
+ ),
+ ]
+ for request in requests:
+ node0 = self.nodes[0].add_p2p_connection(P2PInterface())
+ node0.send_message(request)
+ node0.wait_for_disconnect()
+
+def compute_last_header(prev_header, hashes):
+ """Compute the last filter header from a starting header and a sequence of filter hashes."""
+ header = ser_uint256(prev_header)
+ for filter_hash in hashes:
+ header = hash256(ser_uint256(filter_hash) + header)
+ return uint256_from_str(header)
+
+if __name__ == '__main__':
+ CompactFiltersTest().main()
diff --git a/test/functional/p2p_blocksonly.py b/test/functional/p2p_blocksonly.py
index 3258a38e3c..c155dda664 100755
--- a/test/functional/p2p_blocksonly.py
+++ b/test/functional/p2p_blocksonly.py
@@ -57,6 +57,29 @@ class P2PBlocksOnly(BitcoinTestFramework):
self.nodes[0].p2p.wait_for_tx(txid)
assert_equal(self.nodes[0].getmempoolinfo()['size'], 1)
+ self.log.info('Check that txs from whitelisted peers are not rejected and relayed to others')
+ self.log.info("Restarting node 0 with whitelist permission and blocksonly")
+ self.restart_node(0, ["-persistmempool=0", "-whitelist=127.0.0.1", "-whitelistforcerelay", "-blocksonly"])
+ assert_equal(self.nodes[0].getrawmempool(),[])
+ first_peer = self.nodes[0].add_p2p_connection(P2PInterface())
+ second_peer = self.nodes[0].add_p2p_connection(P2PInterface())
+ peer_1_info = self.nodes[0].getpeerinfo()[0]
+ assert_equal(peer_1_info['whitelisted'], True)
+ assert_equal(peer_1_info['permissions'], ['noban', 'forcerelay', 'relay', 'mempool'])
+ peer_2_info = self.nodes[0].getpeerinfo()[1]
+ assert_equal(peer_2_info['whitelisted'], True)
+ assert_equal(peer_2_info['permissions'], ['noban', 'forcerelay', 'relay', 'mempool'])
+ assert_equal(self.nodes[0].testmempoolaccept([sigtx])[0]['allowed'], True)
+ txid = self.nodes[0].testmempoolaccept([sigtx])[0]['txid']
+
+ self.log.info('Check that the tx from whitelisted first_peer is relayed to others (ie.second_peer)')
+ with self.nodes[0].assert_debug_log(["received getdata"]):
+ first_peer.send_message(msg_tx(FromHex(CTransaction(), sigtx)))
+ self.log.info('Check that the whitelisted peer is still connected after sending the transaction')
+ assert_equal(first_peer.is_connected, True)
+ second_peer.wait_for_tx(txid)
+ assert_equal(self.nodes[0].getmempoolinfo()['size'], 1)
+ self.log.info("Whitelisted peer's transaction is accepted and relayed")
if __name__ == '__main__':
P2PBlocksOnly().main()
diff --git a/test/functional/p2p_compactblocks.py b/test/functional/p2p_compactblocks.py
index 7905cf5018..0b3738b572 100755
--- a/test/functional/p2p_compactblocks.py
+++ b/test/functional/p2p_compactblocks.py
@@ -10,7 +10,7 @@ Version 2 compact blocks are post-segwit (wtxids)
import random
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment
-from test_framework.messages import BlockTransactions, BlockTransactionsRequest, calculate_shortid, CBlock, CBlockHeader, CInv, COutPoint, CTransaction, CTxIn, CTxInWitness, CTxOut, FromHex, HeaderAndShortIDs, msg_no_witness_block, msg_no_witness_blocktxn, msg_cmpctblock, msg_getblocktxn, msg_getdata, msg_getheaders, msg_headers, msg_inv, msg_sendcmpct, msg_sendheaders, msg_tx, msg_block, msg_blocktxn, MSG_WITNESS_FLAG, NODE_NETWORK, P2PHeaderAndShortIDs, PrefilledTransaction, ser_uint256, ToHex
+from test_framework.messages import BlockTransactions, BlockTransactionsRequest, calculate_shortid, CBlock, CBlockHeader, CInv, COutPoint, CTransaction, CTxIn, CTxInWitness, CTxOut, FromHex, HeaderAndShortIDs, msg_no_witness_block, msg_no_witness_blocktxn, msg_cmpctblock, msg_getblocktxn, msg_getdata, msg_getheaders, msg_headers, msg_inv, msg_sendcmpct, msg_sendheaders, msg_tx, msg_block, msg_blocktxn, MSG_BLOCK, MSG_CMPCT_BLOCK, MSG_WITNESS_FLAG, NODE_NETWORK, P2PHeaderAndShortIDs, PrefilledTransaction, ser_uint256, ToHex
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.script import CScript, OP_TRUE, OP_DROP
from test_framework.test_framework import BitcoinTestFramework
@@ -44,7 +44,7 @@ class TestP2PConn(P2PInterface):
def on_inv(self, message):
for x in self.last_message["inv"].inv:
- if x.type == 2:
+ if x.type == MSG_BLOCK:
self.block_announced = True
self.announced_blockhashes.add(x.hash)
@@ -305,10 +305,9 @@ class CompactBlocksTest(BitcoinTestFramework):
self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block)
# Now fetch the compact block using a normal non-announce getdata
- with mininode_lock:
- test_node.clear_block_announcement()
- inv = CInv(4, block_hash) # 4 == "CompactBlock"
- test_node.send_message(msg_getdata([inv]))
+ test_node.clear_block_announcement()
+ inv = CInv(MSG_CMPCT_BLOCK, block_hash)
+ test_node.send_message(msg_getdata([inv]))
wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
@@ -378,19 +377,15 @@ class CompactBlocksTest(BitcoinTestFramework):
# request
for announce in ["inv", "header"]:
block = self.build_block_on_tip(node, segwit=segwit)
- with mininode_lock:
- test_node.last_message.pop("getdata", None)
if announce == "inv":
- test_node.send_message(msg_inv([CInv(2, block.sha256)]))
+ test_node.send_message(msg_inv([CInv(MSG_BLOCK, block.sha256)]))
wait_until(lambda: "getheaders" in test_node.last_message, timeout=30, lock=mininode_lock)
test_node.send_header_for_blocks([block])
else:
test_node.send_header_for_blocks([block])
- wait_until(lambda: "getdata" in test_node.last_message, timeout=30, lock=mininode_lock)
- assert_equal(len(test_node.last_message["getdata"].inv), 1)
+ test_node.wait_for_getdata([block.sha256], timeout=30)
assert_equal(test_node.last_message["getdata"].inv[0].type, 4)
- assert_equal(test_node.last_message["getdata"].inv[0].hash, block.sha256)
# Send back a compactblock message that omits the coinbase
comp_block = HeaderAndShortIDs()
@@ -567,10 +562,9 @@ class CompactBlocksTest(BitcoinTestFramework):
assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)
# We should receive a getdata request
- wait_until(lambda: "getdata" in test_node.last_message, timeout=10, lock=mininode_lock)
- assert_equal(len(test_node.last_message["getdata"].inv), 1)
- assert test_node.last_message["getdata"].inv[0].type == 2 or test_node.last_message["getdata"].inv[0].type == 2 | MSG_WITNESS_FLAG
- assert_equal(test_node.last_message["getdata"].inv[0].hash, block.sha256)
+ test_node.wait_for_getdata([block.sha256], timeout=10)
+ assert test_node.last_message["getdata"].inv[0].type == MSG_BLOCK or \
+ test_node.last_message["getdata"].inv[0].type == MSG_BLOCK | MSG_WITNESS_FLAG
# Deliver the block
if version == 2:
@@ -639,7 +633,7 @@ class CompactBlocksTest(BitcoinTestFramework):
wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
test_node.clear_block_announcement()
- test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
+ test_node.send_message(msg_getdata([CInv(MSG_CMPCT_BLOCK, int(new_blocks[0], 16))]))
wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30, lock=mininode_lock)
test_node.clear_block_announcement()
@@ -648,7 +642,7 @@ class CompactBlocksTest(BitcoinTestFramework):
test_node.clear_block_announcement()
with mininode_lock:
test_node.last_message.pop("block", None)
- test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
+ test_node.send_message(msg_getdata([CInv(MSG_CMPCT_BLOCK, int(new_blocks[0], 16))]))
wait_until(lambda: "block" in test_node.last_message, timeout=30, lock=mininode_lock)
with mininode_lock:
test_node.last_message["block"].block.calc_sha256()
diff --git a/test/functional/p2p_disconnect_ban.py b/test/functional/p2p_disconnect_ban.py
index 9047fc6828..09b9ebeb2d 100755
--- a/test/functional/p2p_disconnect_ban.py
+++ b/test/functional/p2p_disconnect_ban.py
@@ -69,8 +69,7 @@ class DisconnectBanTest(BitcoinTestFramework):
self.nodes[1].setmocktime(old_time + 3)
assert_equal(len(self.nodes[1].listbanned()), 3)
- self.stop_node(1)
- self.start_node(1)
+ self.restart_node(1)
listAfterShutdown = self.nodes[1].listbanned()
assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
diff --git a/test/functional/p2p_dos_header_tree.py b/test/functional/p2p_dos_header_tree.py
index 28dd809ca5..f8552cf53d 100755
--- a/test/functional/p2p_dos_header_tree.py
+++ b/test/functional/p2p_dos_header_tree.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2019 The Bitcoin Core developers
+# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that we reject low difficulty headers to prevent our block tree from filling up with useless bloat"""
diff --git a/test/functional/p2p_eviction.py b/test/functional/p2p_eviction.py
new file mode 100755
index 0000000000..b2b3a89aab
--- /dev/null
+++ b/test/functional/p2p_eviction.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+""" Test node eviction logic
+
+When the number of peers has reached the limit of maximum connections,
+the next connecting inbound peer will trigger the eviction mechanism.
+We cannot currently test the parts of the eviction logic that are based on
+address/netgroup since in the current framework, all peers are connecting from
+the same local address. See Issue #14210 for more info.
+Therefore, this test is limited to the remaining protection criteria.
+"""
+
+import time
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.mininode import P2PInterface, P2PDataStore
+from test_framework.util import assert_equal, wait_until
+from test_framework.blocktools import create_block, create_coinbase
+from test_framework.messages import CTransaction, FromHex, msg_pong, msg_tx
+
+
+class SlowP2PDataStore(P2PDataStore):
+ def on_ping(self, message):
+ time.sleep(0.1)
+ self.send_message(msg_pong(message.nonce))
+
+class SlowP2PInterface(P2PInterface):
+ def on_ping(self, message):
+ time.sleep(0.1)
+ self.send_message(msg_pong(message.nonce))
+
+class P2PEvict(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+ # The choice of maxconnections=32 results in a maximum of 21 inbound connections
+ # (32 - 10 outbound - 1 feeler). 20 inbound peers are protected from eviction:
+ # 4 by netgroup, 4 that sent us blocks, 4 that sent us transactions and 8 via lowest ping time
+ self.extra_args = [['-maxconnections=32']]
+
+ def run_test(self):
+ protected_peers = set() # peers that we expect to be protected from eviction
+ current_peer = -1
+ node = self.nodes[0]
+ node.generatetoaddress(101, node.get_deterministic_priv_key().address)
+
+ self.log.info("Create 4 peers and protect them from eviction by sending us a block")
+ for _ in range(4):
+ block_peer = node.add_p2p_connection(SlowP2PDataStore())
+ current_peer += 1
+ block_peer.sync_with_ping()
+ best_block = node.getbestblockhash()
+ tip = int(best_block, 16)
+ best_block_time = node.getblock(best_block)['time']
+ block = create_block(tip, create_coinbase(node.getblockcount() + 1), best_block_time + 1)
+ block.solve()
+ block_peer.send_blocks_and_test([block], node, success=True)
+ protected_peers.add(current_peer)
+
+ self.log.info("Create 5 slow-pinging peers, making them eviction candidates")
+ for _ in range(5):
+ node.add_p2p_connection(SlowP2PInterface())
+ current_peer += 1
+
+ self.log.info("Create 4 peers and protect them from eviction by sending us a tx")
+ for i in range(4):
+ txpeer = node.add_p2p_connection(SlowP2PInterface())
+ current_peer += 1
+ txpeer.sync_with_ping()
+
+ prevtx = node.getblock(node.getblockhash(i + 1), 2)['tx'][0]
+ rawtx = node.createrawtransaction(
+ inputs=[{'txid': prevtx['txid'], 'vout': 0}],
+ outputs=[{node.get_deterministic_priv_key().address: 50 - 0.00125}],
+ )
+ sigtx = node.signrawtransactionwithkey(
+ hexstring=rawtx,
+ privkeys=[node.get_deterministic_priv_key().key],
+ prevtxs=[{
+ 'txid': prevtx['txid'],
+ 'vout': 0,
+ 'scriptPubKey': prevtx['vout'][0]['scriptPubKey']['hex'],
+ }],
+ )['hex']
+ txpeer.send_message(msg_tx(FromHex(CTransaction(), sigtx)))
+ protected_peers.add(current_peer)
+
+ self.log.info("Create 8 peers and protect them from eviction by having faster pings")
+ for _ in range(8):
+ fastpeer = node.add_p2p_connection(P2PInterface())
+ current_peer += 1
+ wait_until(lambda: "ping" in fastpeer.last_message, timeout=10)
+
+ # Make sure by asking the node what the actual min pings are
+ peerinfo = node.getpeerinfo()
+ pings = {}
+ for i in range(len(peerinfo)):
+ pings[i] = peerinfo[i]['minping'] if 'minping' in peerinfo[i] else 1000000
+ sorted_pings = sorted(pings.items(), key=lambda x: x[1])
+
+ # Usually the 8 fast peers are protected. In rare case of unreliable pings,
+ # one of the slower peers might have a faster min ping though.
+ for i in range(8):
+ protected_peers.add(sorted_pings[i][0])
+
+ self.log.info("Create peer that triggers the eviction mechanism")
+ node.add_p2p_connection(SlowP2PInterface())
+
+ # One of the non-protected peers must be evicted. We can't be sure which one because
+ # 4 peers are protected via netgroup, which is identical for all peers,
+ # and the eviction mechanism doesn't preserve the order of identical elements.
+ evicted_peers = []
+ for i in range(len(node.p2ps)):
+ if not node.p2ps[i].is_connected:
+ evicted_peers.append(i)
+
+ self.log.info("Test that one peer was evicted")
+ self.log.debug("{} evicted peer: {}".format(len(evicted_peers), set(evicted_peers)))
+ assert_equal(len(evicted_peers), 1)
+
+ self.log.info("Test that no peer expected to be protected was evicted")
+ self.log.debug("{} protected peers: {}".format(len(protected_peers), protected_peers))
+ assert evicted_peers[0] not in protected_peers
+
+if __name__ == '__main__':
+ P2PEvict().main()
diff --git a/test/functional/p2p_feefilter.py b/test/functional/p2p_feefilter.py
index 4f242bd94a..f939ea965c 100755
--- a/test/functional/p2p_feefilter.py
+++ b/test/functional/p2p_feefilter.py
@@ -7,14 +7,16 @@
from decimal import Decimal
import time
-from test_framework.messages import msg_feefilter
+from test_framework.messages import MSG_TX, msg_feefilter
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
def hashToHex(hash):
return format(hash, '064x')
+
# Wait up to 60 secs to see if the testnode has received all the expected invs
def allInvsMatch(invsExpected, testnode):
for x in range(60):
@@ -24,6 +26,18 @@ def allInvsMatch(invsExpected, testnode):
time.sleep(1)
return False
+
+class FeefilterConn(P2PInterface):
+ feefilter_received = False
+
+ def on_feefilter(self, message):
+ self.feefilter_received = True
+
+ def assert_feefilter_received(self, recv: bool):
+ with mininode_lock:
+ assert_equal(self.feefilter_received, recv)
+
+
class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
@@ -31,13 +45,14 @@ class TestP2PConn(P2PInterface):
def on_inv(self, message):
for i in message.inv:
- if (i.type == 1):
+ if (i.type == MSG_TX):
self.txinvs.append(hashToHex(i.hash))
def clear_invs(self):
with mininode_lock:
self.txinvs = []
+
class FeeFilterTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
@@ -46,41 +61,54 @@ class FeeFilterTest(BitcoinTestFramework):
# mempool and wallet feerate calculation based on GetFee
# rounding down 3 places, leading to stranded transactions.
# See issue #16499
- self.extra_args = [["-minrelaytxfee=0.00000100", "-mintxfee=0.00000100"]]*self.num_nodes
+ self.extra_args = [["-minrelaytxfee=0.00000100", "-mintxfee=0.00000100"]] * self.num_nodes
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
+ self.test_feefilter_forcerelay()
+ self.test_feefilter()
+
+ def test_feefilter_forcerelay(self):
+ self.log.info('Check that peers without forcerelay permission (default) get a feefilter message')
+ self.nodes[0].add_p2p_connection(FeefilterConn()).assert_feefilter_received(True)
+
+ self.log.info('Check that peers with forcerelay permission do not get a feefilter message')
+ self.restart_node(0, extra_args=['-whitelist=forcerelay@127.0.0.1'])
+ self.nodes[0].add_p2p_connection(FeefilterConn()).assert_feefilter_received(False)
+
+ # Restart to disconnect peers and load default extra_args
+ self.restart_node(0)
+ self.connect_nodes(1, 0)
+
+ def test_feefilter(self):
node1 = self.nodes[1]
node0 = self.nodes[0]
- # Get out of IBD
- node1.generate(1)
- self.sync_blocks()
- self.nodes[0].add_p2p_connection(TestP2PConn())
+ conn = self.nodes[0].add_p2p_connection(TestP2PConn())
# Test that invs are received by test connection for all txs at
# feerate of .2 sat/byte
node1.settxfee(Decimal("0.00000200"))
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
- assert allInvsMatch(txids, self.nodes[0].p2p)
- self.nodes[0].p2p.clear_invs()
+ assert allInvsMatch(txids, conn)
+ conn.clear_invs()
# Set a filter of .15 sat/byte on test connection
- self.nodes[0].p2p.send_and_ping(msg_feefilter(150))
+ conn.send_and_ping(msg_feefilter(150))
# Test that txs are still being received by test connection (paying .15 sat/byte)
node1.settxfee(Decimal("0.00000150"))
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
- assert allInvsMatch(txids, self.nodes[0].p2p)
- self.nodes[0].p2p.clear_invs()
+ assert allInvsMatch(txids, conn)
+ conn.clear_invs()
# Change tx fee rate to .1 sat/byte and test they are no longer received
# by the test connection
node1.settxfee(Decimal("0.00000100"))
[node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
- self.sync_mempools() # must be sure node 0 has received all txs
+ self.sync_mempools() # must be sure node 0 has received all txs
# Send one transaction from node0 that should be received, so that we
# we can sync the test on receipt (if node1's txs were relayed, they'd
@@ -91,14 +119,15 @@ class FeeFilterTest(BitcoinTestFramework):
# as well.
node0.settxfee(Decimal("0.00020000"))
txids = [node0.sendtoaddress(node0.getnewaddress(), 1)]
- assert allInvsMatch(txids, self.nodes[0].p2p)
- self.nodes[0].p2p.clear_invs()
+ assert allInvsMatch(txids, conn)
+ conn.clear_invs()
# Remove fee filter and check that txs are received again
- self.nodes[0].p2p.send_and_ping(msg_feefilter(0))
+ conn.send_and_ping(msg_feefilter(0))
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
- assert allInvsMatch(txids, self.nodes[0].p2p)
- self.nodes[0].p2p.clear_invs()
+ assert allInvsMatch(txids, conn)
+ conn.clear_invs()
+
if __name__ == '__main__':
FeeFilterTest().main()
diff --git a/test/functional/p2p_filter.py b/test/functional/p2p_filter.py
index b73d5784aa..741da3be31 100755
--- a/test/functional/p2p_filter.py
+++ b/test/functional/p2p_filter.py
@@ -7,18 +7,24 @@ Test BIP 37
"""
from test_framework.messages import (
+ CInv,
+ MAX_BLOOM_FILTER_SIZE,
+ MAX_BLOOM_HASH_FUNCS,
MSG_BLOCK,
MSG_FILTERED_BLOCK,
- msg_getdata,
- msg_filterload,
msg_filteradd,
msg_filterclear,
+ msg_filterload,
+ msg_getdata,
+ msg_mempool,
+ msg_version,
)
-from test_framework.mininode import P2PInterface
+from test_framework.mininode import P2PInterface, mininode_lock
+from test_framework.script import MAX_SCRIPT_ELEMENT_SIZE
from test_framework.test_framework import BitcoinTestFramework
-class FilterNode(P2PInterface):
+class P2PBloomFilter(P2PInterface):
# This is a P2SH watch-only wallet
watch_script_pubkey = 'a914ffffffffffffffffffffffffffffffffffffffff87'
# The initial filter (n=10, fp=0.000001) with just the above scriptPubKey added
@@ -30,21 +36,47 @@ class FilterNode(P2PInterface):
nFlags=1,
)
+ def __init__(self):
+ super().__init__()
+ self._tx_received = False
+ self._merkleblock_received = False
+
def on_inv(self, message):
want = msg_getdata()
for i in message.inv:
# inv messages can only contain TX or BLOCK, so translate BLOCK to FILTERED_BLOCK
if i.type == MSG_BLOCK:
- i.type = MSG_FILTERED_BLOCK
- want.inv.append(i)
+ want.inv.append(CInv(MSG_FILTERED_BLOCK, i.hash))
+ else:
+ want.inv.append(i)
if len(want.inv):
self.send_message(want)
def on_merkleblock(self, message):
- self.merkleblock_received = True
+ self._merkleblock_received = True
def on_tx(self, message):
- self.tx_received = True
+ self._tx_received = True
+
+ @property
+ def tx_received(self):
+ with mininode_lock:
+ return self._tx_received
+
+ @tx_received.setter
+ def tx_received(self, value):
+ with mininode_lock:
+ self._tx_received = value
+
+ @property
+ def merkleblock_received(self):
+ with mininode_lock:
+ return self._merkleblock_received
+
+ @merkleblock_received.setter
+ def merkleblock_received(self, value):
+ with mininode_lock:
+ self._merkleblock_received = value
class FilterTest(BitcoinTestFramework):
@@ -59,48 +91,143 @@ class FilterTest(BitcoinTestFramework):
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
- def run_test(self):
- self.log.info('Add filtered P2P connection to the node')
- filter_node = self.nodes[0].add_p2p_connection(FilterNode())
- filter_node.send_and_ping(filter_node.watch_filter_init)
- filter_address = self.nodes[0].decodescript(filter_node.watch_script_pubkey)['addresses'][0]
+ def test_size_limits(self, filter_peer):
+ self.log.info('Check that too large filter is rejected')
+ with self.nodes[0].assert_debug_log(['Misbehaving']):
+ filter_peer.send_and_ping(msg_filterload(data=b'\xbb'*(MAX_BLOOM_FILTER_SIZE+1)))
+
+ self.log.info('Check that max size filter is accepted')
+ with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
+ filter_peer.send_and_ping(msg_filterload(data=b'\xbb'*(MAX_BLOOM_FILTER_SIZE)))
+ filter_peer.send_and_ping(msg_filterclear())
+
+ self.log.info('Check that filter with too many hash functions is rejected')
+ with self.nodes[0].assert_debug_log(['Misbehaving']):
+ filter_peer.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS+1))
+
+ self.log.info('Check that filter with max hash functions is accepted')
+ with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
+ filter_peer.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS))
+ # Don't send filterclear until next two filteradd checks are done
+
+ self.log.info('Check that max size data element to add to the filter is accepted')
+ with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
+ filter_peer.send_and_ping(msg_filteradd(data=b'\xcc'*(MAX_SCRIPT_ELEMENT_SIZE)))
+
+ self.log.info('Check that too large data element to add to the filter is rejected')
+ with self.nodes[0].assert_debug_log(['Misbehaving']):
+ filter_peer.send_and_ping(msg_filteradd(data=b'\xcc'*(MAX_SCRIPT_ELEMENT_SIZE+1)))
+
+ filter_peer.send_and_ping(msg_filterclear())
+
+ def test_msg_mempool(self):
+ self.log.info("Check that a node with bloom filters enabled services p2p mempool messages")
+ filter_peer = P2PBloomFilter()
+
+ self.log.debug("Create a tx relevant to the peer before connecting")
+ filter_address = self.nodes[0].decodescript(filter_peer.watch_script_pubkey)['addresses'][0]
+ txid = self.nodes[0].sendtoaddress(filter_address, 90)
+
+ self.log.debug("Send a mempool msg after connecting and check that the tx is received")
+ self.nodes[0].add_p2p_connection(filter_peer)
+ filter_peer.send_and_ping(filter_peer.watch_filter_init)
+ self.nodes[0].p2p.send_message(msg_mempool())
+ filter_peer.wait_for_tx(txid)
+
+ def test_frelay_false(self, filter_peer):
+ self.log.info("Check that a node with fRelay set to false does not receive invs until the filter is set")
+ filter_peer.tx_received = False
+ filter_address = self.nodes[0].decodescript(filter_peer.watch_script_pubkey)['addresses'][0]
+ self.nodes[0].sendtoaddress(filter_address, 90)
+ # Sync to make sure the reason filter_peer doesn't receive the tx is not p2p delays
+ filter_peer.sync_with_ping()
+ assert not filter_peer.tx_received
+
+ # Clear the mempool so that this transaction does not impact subsequent tests
+ self.nodes[0].generate(1)
+
+ def test_filter(self, filter_peer):
+ # Set the bloomfilter using filterload
+ filter_peer.send_and_ping(filter_peer.watch_filter_init)
+ # If fRelay is not already True, sending filterload sets it to True
+ assert self.nodes[0].getpeerinfo()[0]['relaytxes']
+ filter_address = self.nodes[0].decodescript(filter_peer.watch_script_pubkey)['addresses'][0]
self.log.info('Check that we receive merkleblock and tx if the filter matches a tx in a block')
block_hash = self.nodes[0].generatetoaddress(1, filter_address)[0]
txid = self.nodes[0].getblock(block_hash)['tx'][0]
- filter_node.wait_for_merkleblock(block_hash)
- filter_node.wait_for_tx(txid)
+ filter_peer.wait_for_merkleblock(block_hash)
+ filter_peer.wait_for_tx(txid)
self.log.info('Check that we only receive a merkleblock if the filter does not match a tx in a block')
- filter_node.tx_received = False
+ filter_peer.tx_received = False
block_hash = self.nodes[0].generatetoaddress(1, self.nodes[0].getnewaddress())[0]
- filter_node.wait_for_merkleblock(block_hash)
- assert not filter_node.tx_received
+ filter_peer.wait_for_merkleblock(block_hash)
+ assert not filter_peer.tx_received
self.log.info('Check that we not receive a tx if the filter does not match a mempool tx')
- filter_node.merkleblock_received = False
- filter_node.tx_received = False
+ filter_peer.merkleblock_received = False
+ filter_peer.tx_received = False
self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 90)
- filter_node.sync_with_ping()
- filter_node.sync_with_ping()
- assert not filter_node.merkleblock_received
- assert not filter_node.tx_received
+ filter_peer.sync_with_ping()
+ filter_peer.sync_with_ping()
+ assert not filter_peer.merkleblock_received
+ assert not filter_peer.tx_received
- self.log.info('Check that we receive a tx in reply to a mempool msg if the filter matches a mempool tx')
- filter_node.merkleblock_received = False
+ self.log.info('Check that we receive a tx if the filter matches a mempool tx')
+ filter_peer.merkleblock_received = False
txid = self.nodes[0].sendtoaddress(filter_address, 90)
- filter_node.wait_for_tx(txid)
- assert not filter_node.merkleblock_received
+ filter_peer.wait_for_tx(txid)
+ assert not filter_peer.merkleblock_received
self.log.info('Check that after deleting filter all txs get relayed again')
- filter_node.send_and_ping(msg_filterclear())
+ filter_peer.send_and_ping(msg_filterclear())
for _ in range(5):
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 7)
- filter_node.wait_for_tx(txid)
+ filter_peer.wait_for_tx(txid)
+
+ self.log.info('Check that request for filtered blocks is ignored if no filter is set')
+ filter_peer.merkleblock_received = False
+ filter_peer.tx_received = False
+ with self.nodes[0].assert_debug_log(expected_msgs=['received getdata']):
+ block_hash = self.nodes[0].generatetoaddress(1, self.nodes[0].getnewaddress())[0]
+ filter_peer.wait_for_inv([CInv(MSG_BLOCK, int(block_hash, 16))])
+ filter_peer.sync_with_ping()
+ assert not filter_peer.merkleblock_received
+ assert not filter_peer.tx_received
+
+ self.log.info('Check that sending "filteradd" if no filter is set is treated as misbehavior')
+ with self.nodes[0].assert_debug_log(['Misbehaving']):
+ filter_peer.send_and_ping(msg_filteradd(data=b'letsmisbehave'))
self.log.info("Check that division-by-zero remote crash bug [CVE-2013-5700] is fixed")
- filter_node.send_and_ping(msg_filterload(data=b'', nHashFuncs=1))
- filter_node.send_and_ping(msg_filteradd(data=b'letstrytocrashthisnode'))
+ filter_peer.send_and_ping(msg_filterload(data=b'', nHashFuncs=1))
+ filter_peer.send_and_ping(msg_filteradd(data=b'letstrytocrashthisnode'))
+ self.nodes[0].disconnect_p2ps()
+
+ def run_test(self):
+ filter_peer = self.nodes[0].add_p2p_connection(P2PBloomFilter())
+ self.log.info('Test filter size limits')
+ self.test_size_limits(filter_peer)
+
+ self.log.info('Test BIP 37 for a node with fRelay = True (default)')
+ self.test_filter(filter_peer)
+ self.nodes[0].disconnect_p2ps()
+
+ self.log.info('Test BIP 37 for a node with fRelay = False')
+ # Add peer but do not send version yet
+ filter_peer_without_nrelay = self.nodes[0].add_p2p_connection(P2PBloomFilter(), send_version=False, wait_for_verack=False)
+ # Send version with fRelay=False
+ filter_peer_without_nrelay.wait_until(lambda: filter_peer_without_nrelay.is_connected, timeout=10)
+ version_without_fRelay = msg_version()
+ version_without_fRelay.nRelay = 0
+ filter_peer_without_nrelay.send_message(version_without_fRelay)
+ filter_peer_without_nrelay.wait_for_verack()
+ assert not self.nodes[0].getpeerinfo()[0]['relaytxes']
+ self.test_frelay_false(filter_peer_without_nrelay)
+ self.test_filter(filter_peer_without_nrelay)
+
+ self.test_msg_mempool()
if __name__ == '__main__':
diff --git a/test/functional/p2p_fingerprint.py b/test/functional/p2p_fingerprint.py
index fab0887197..d743abe681 100755
--- a/test/functional/p2p_fingerprint.py
+++ b/test/functional/p2p_fingerprint.py
@@ -11,7 +11,7 @@ the node should pretend that it does not have it to avoid fingerprinting.
import time
from test_framework.blocktools import (create_block, create_coinbase)
-from test_framework.messages import CInv
+from test_framework.messages import CInv, MSG_BLOCK
from test_framework.mininode import (
P2PInterface,
msg_headers,
@@ -48,7 +48,7 @@ class P2PFingerprintTest(BitcoinTestFramework):
# Send a getdata request for a given block hash
def send_block_request(self, block_hash, node):
msg = msg_getdata()
- msg.inv.append(CInv(2, block_hash)) # 2 == "Block"
+ msg.inv.append(CInv(MSG_BLOCK, block_hash))
node.send_message(msg)
# Send a getheaders request for a given single block hash
@@ -90,7 +90,7 @@ class P2PFingerprintTest(BitcoinTestFramework):
# Force reorg to a longer chain
node0.send_message(msg_headers(new_blocks))
- node0.wait_for_getdata()
+ node0.wait_for_getdata([x.sha256 for x in new_blocks])
for block in new_blocks:
node0.send_and_ping(msg_block(block))
diff --git a/test/functional/p2p_getdata.py b/test/functional/p2p_getdata.py
new file mode 100755
index 0000000000..d1b11c2c61
--- /dev/null
+++ b/test/functional/p2p_getdata.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python3
+# Copyright (c) 2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test GETDATA processing behavior"""
+from collections import defaultdict
+
+from test_framework.messages import (
+ CInv,
+ msg_getdata,
+)
+from test_framework.mininode import P2PInterface
+from test_framework.test_framework import BitcoinTestFramework
+
+
+class P2PStoreBlock(P2PInterface):
+ def __init__(self):
+ super().__init__()
+ self.blocks = defaultdict(int)
+
+ def on_block(self, message):
+ message.block.calc_sha256()
+ self.blocks[message.block.sha256] += 1
+
+
+class GetdataTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+
+ def run_test(self):
+ p2p_block_store = self.nodes[0].add_p2p_connection(P2PStoreBlock())
+
+ self.log.info("test that an invalid GETDATA doesn't prevent processing of future messages")
+
+ # Send invalid message and verify that node responds to later ping
+ invalid_getdata = msg_getdata()
+ invalid_getdata.inv.append(CInv(t=0, h=0)) # INV type 0 is invalid.
+ p2p_block_store.send_and_ping(invalid_getdata)
+
+ # Check getdata still works by fetching tip block
+ best_block = int(self.nodes[0].getbestblockhash(), 16)
+ good_getdata = msg_getdata()
+ good_getdata.inv.append(CInv(t=2, h=best_block))
+ p2p_block_store.send_and_ping(good_getdata)
+ p2p_block_store.wait_until(lambda: self.nodes[0].p2ps[0].blocks[best_block] == 1)
+
+
+if __name__ == '__main__':
+ GetdataTest().main()
diff --git a/test/functional/p2p_invalid_block.py b/test/functional/p2p_invalid_block.py
index 801407757f..e280a62997 100755
--- a/test/functional/p2p_invalid_block.py
+++ b/test/functional/p2p_invalid_block.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2018 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid blocks.
diff --git a/test/functional/p2p_invalid_locator.py b/test/functional/p2p_invalid_locator.py
index 58be703bf6..0155eb21f0 100755
--- a/test/functional/p2p_invalid_locator.py
+++ b/test/functional/p2p_invalid_locator.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2018 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid locators.
diff --git a/test/functional/p2p_invalid_messages.py b/test/functional/p2p_invalid_messages.py
index 759f111e69..d9a9ae5188 100755
--- a/test/functional/p2p_invalid_messages.py
+++ b/test/functional/p2p_invalid_messages.py
@@ -1,30 +1,47 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid network messages."""
-import asyncio
-import struct
-import sys
-from test_framework import messages
-from test_framework.mininode import P2PDataStore, NetworkThread
+from test_framework.messages import (
+ CBlockHeader,
+ CInv,
+ MAX_HEADERS_RESULTS,
+ MAX_INV_SIZE,
+ MAX_PROTOCOL_MESSAGE_LENGTH,
+ msg_getdata,
+ msg_headers,
+ msg_inv,
+ msg_ping,
+ MSG_TX,
+ ser_string,
+)
+from test_framework.mininode import (
+ P2PDataStore,
+ P2PInterface,
+)
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ wait_until,
+)
+VALID_DATA_LIMIT = MAX_PROTOCOL_MESSAGE_LENGTH - 5 # Account for the 5-byte length prefix
class msg_unrecognized:
"""Nonsensical message. Modeled after similar types in test_framework.messages."""
- command = b'badmsg'
+ msgtype = b'badmsg'
def __init__(self, *, str_data):
self.str_data = str_data.encode() if not isinstance(str_data, bytes) else str_data
def serialize(self):
- return messages.ser_string(self.str_data)
+ return ser_string(self.str_data)
def __repr__(self):
- return "{}(data={})".format(self.command, self.str_data)
+ return "{}(data={})".format(self.msgtype, self.str_data)
class InvalidMessagesTest(BitcoinTestFramework):
@@ -33,190 +50,125 @@ class InvalidMessagesTest(BitcoinTestFramework):
self.setup_clean_chain = True
def run_test(self):
- """
- . Test msg header
- 0. Send a bunch of large (4MB) messages of an unrecognized type. Check to see
- that it isn't an effective DoS against the node.
-
- 1. Send an oversized (4MB+) message and check that we're disconnected.
-
- 2. Send a few messages with an incorrect data size in the header, ensure the
- messages are ignored.
- """
+ self.test_buffer()
self.test_magic_bytes()
self.test_checksum()
self.test_size()
- self.test_command()
-
- node = self.nodes[0]
- self.node = node
- node.add_p2p_connection(P2PDataStore())
- conn2 = node.add_p2p_connection(P2PDataStore())
-
- msg_limit = 4 * 1000 * 1000 # 4MB, per MAX_PROTOCOL_MESSAGE_LENGTH
- valid_data_limit = msg_limit - 5 # Account for the 4-byte length prefix
-
- #
- # 0.
- #
- # Send as large a message as is valid, ensure we aren't disconnected but
- # also can't exhaust resources.
- #
- msg_at_size = msg_unrecognized(str_data="b" * valid_data_limit)
- assert len(msg_at_size.serialize()) == msg_limit
-
- self.log.info("Sending a bunch of large, junk messages to test memory exhaustion. May take a bit...")
-
- # Run a bunch of times to test for memory exhaustion.
- for _ in range(80):
- node.p2p.send_message(msg_at_size)
-
- # Check that, even though the node is being hammered by nonsense from one
- # connection, it can still service other peers in a timely way.
- for _ in range(20):
- conn2.sync_with_ping(timeout=2)
-
- # Peer 1, despite serving up a bunch of nonsense, should still be connected.
- self.log.info("Waiting for node to drop junk messages.")
- node.p2p.sync_with_ping(timeout=400)
- assert node.p2p.is_connected
-
- #
- # 1.
- #
- # Send an oversized message, ensure we're disconnected.
- #
- # Under macOS this test is skipped due to an unexpected error code
- # returned from the closing socket which python/asyncio does not
- # yet know how to handle.
- #
- if sys.platform != 'darwin':
- msg_over_size = msg_unrecognized(str_data="b" * (valid_data_limit + 1))
- assert len(msg_over_size.serialize()) == (msg_limit + 1)
-
- # An unknown message type (or *any* message type) over
- # MAX_PROTOCOL_MESSAGE_LENGTH should result in a disconnect.
- node.p2p.send_message(msg_over_size)
- node.p2p.wait_for_disconnect(timeout=4)
-
- node.disconnect_p2ps()
- conn = node.add_p2p_connection(P2PDataStore())
- conn.wait_for_verack()
- else:
- self.log.info("Skipping test p2p_invalid_messages/1 (oversized message) under macOS")
-
- #
- # 2.
- #
- # Send messages with an incorrect data size in the header.
- #
- actual_size = 100
- msg = msg_unrecognized(str_data="b" * actual_size)
-
- # TODO: handle larger-than cases. I haven't been able to pin down what behavior to expect.
- for wrong_size in (2, 77, 78, 79):
- self.log.info("Sending a message with incorrect size of {}".format(wrong_size))
-
- # Unmodified message should submit okay.
- node.p2p.send_and_ping(msg)
-
- # A message lying about its data size results in a disconnect when the incorrect
- # data size is less than the actual size.
- #
- # TODO: why does behavior change at 78 bytes?
- #
- node.p2p.send_raw_message(self._tweak_msg_data_size(msg, wrong_size))
-
- # For some reason unknown to me, we sometimes have to push additional data to the
- # peer in order for it to realize a disconnect.
- try:
- node.p2p.send_message(messages.msg_ping(nonce=123123))
- except IOError:
- pass
-
- node.p2p.wait_for_disconnect(timeout=10)
- node.disconnect_p2ps()
- node.add_p2p_connection(P2PDataStore())
-
- # Node is still up.
- conn = node.add_p2p_connection(P2PDataStore())
+ self.test_msgtype()
+ self.test_oversized_inv_msg()
+ self.test_oversized_getdata_msg()
+ self.test_oversized_headers_msg()
+ self.test_resource_exhaustion()
+
+ def test_buffer(self):
+ self.log.info("Test message with header split across two buffers is received")
+ conn = self.nodes[0].add_p2p_connection(P2PDataStore())
+ # Create valid message
+ msg = conn.build_message(msg_ping(nonce=12345))
+ cut_pos = 12 # Chosen at an arbitrary position within the header
+ # Send message in two pieces
+ before = int(self.nodes[0].getnettotals()['totalbytesrecv'])
+ conn.send_raw_message(msg[:cut_pos])
+ # Wait until node has processed the first half of the message
+ wait_until(lambda: int(self.nodes[0].getnettotals()['totalbytesrecv']) != before)
+ middle = int(self.nodes[0].getnettotals()['totalbytesrecv'])
+ # If this assert fails, we've hit an unlikely race
+ # where the test framework sent a message in between the two halves
+ assert_equal(middle, before + cut_pos)
+ conn.send_raw_message(msg[cut_pos:])
+ conn.sync_with_ping(timeout=1)
+ self.nodes[0].disconnect_p2ps()
def test_magic_bytes(self):
+ self.log.info("Test message with invalid magic bytes disconnects peer")
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
-
- async def swap_magic_bytes():
- conn._on_data = lambda: None # Need to ignore all incoming messages from now, since they come with "invalid" magic bytes
- conn.magic_bytes = b'\x00\x11\x22\x32'
-
- # Call .result() to block until the atomic swap is complete, otherwise
- # we might run into races later on
- asyncio.run_coroutine_threadsafe(swap_magic_bytes(), NetworkThread.network_event_loop).result()
-
- with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: INVALID MESSAGESTART ping']):
- conn.send_message(messages.msg_ping(nonce=0xff))
+ with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: INVALID MESSAGESTART badmsg']):
+ msg = conn.build_message(msg_unrecognized(str_data="d"))
+ # modify magic bytes
+ msg = b'\xff' * 4 + msg[4:]
+ conn.send_raw_message(msg)
conn.wait_for_disconnect(timeout=1)
- self.nodes[0].disconnect_p2ps()
+ self.nodes[0].disconnect_p2ps()
def test_checksum(self):
+ self.log.info("Test message with invalid checksum logs an error")
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
with self.nodes[0].assert_debug_log(['CHECKSUM ERROR (badmsg, 2 bytes), expected 78df0a04 was ffffffff']):
msg = conn.build_message(msg_unrecognized(str_data="d"))
- cut_len = (
- 4 + # magic
- 12 + # command
- 4 #len
- )
+ # Checksum is after start bytes (4B), message type (12B), len (4B)
+ cut_len = 4 + 12 + 4
# modify checksum
msg = msg[:cut_len] + b'\xff' * 4 + msg[cut_len + 4:]
- self.nodes[0].p2p.send_raw_message(msg)
+ conn.send_raw_message(msg)
conn.sync_with_ping(timeout=1)
- self.nodes[0].disconnect_p2ps()
+ self.nodes[0].disconnect_p2ps()
def test_size(self):
+ self.log.info("Test message with oversized payload disconnects peer")
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
with self.nodes[0].assert_debug_log(['']):
- msg = conn.build_message(msg_unrecognized(str_data="d"))
- cut_len = (
- 4 + # magic
- 12 # command
- )
- # modify len to MAX_SIZE + 1
- msg = msg[:cut_len] + struct.pack("<I", 0x02000000 + 1) + msg[cut_len + 4:]
- self.nodes[0].p2p.send_raw_message(msg)
+ msg = msg_unrecognized(str_data="d" * (VALID_DATA_LIMIT + 1))
+ msg = conn.build_message(msg)
+ conn.send_raw_message(msg)
conn.wait_for_disconnect(timeout=1)
- self.nodes[0].disconnect_p2ps()
+ self.nodes[0].disconnect_p2ps()
- def test_command(self):
+ def test_msgtype(self):
+ self.log.info("Test message with invalid message type logs an error")
conn = self.nodes[0].add_p2p_connection(P2PDataStore())
with self.nodes[0].assert_debug_log(['PROCESSMESSAGE: ERRORS IN HEADER']):
msg = msg_unrecognized(str_data="d")
- msg.command = b'\xff' * 12
+ msg.msgtype = b'\xff' * 12
msg = conn.build_message(msg)
- # Modify command
+ # Modify msgtype
msg = msg[:7] + b'\x00' + msg[7 + 1:]
- self.nodes[0].p2p.send_raw_message(msg)
+ conn.send_raw_message(msg)
conn.sync_with_ping(timeout=1)
- self.nodes[0].disconnect_p2ps()
-
- def _tweak_msg_data_size(self, message, wrong_size):
- """
- Return a raw message based on another message but with an incorrect data size in
- the message header.
- """
- raw_msg = self.node.p2p.build_message(message)
-
- bad_size_bytes = struct.pack("<I", wrong_size)
- num_header_bytes_before_size = 4 + 12
-
- # Replace the correct data size in the message with an incorrect one.
- raw_msg_with_wrong_size = (
- raw_msg[:num_header_bytes_before_size] +
- bad_size_bytes +
- raw_msg[(num_header_bytes_before_size + len(bad_size_bytes)):]
- )
- assert len(raw_msg) == len(raw_msg_with_wrong_size)
-
- return raw_msg_with_wrong_size
+ self.nodes[0].disconnect_p2ps()
+
+ def test_oversized_msg(self, msg, size):
+ msg_type = msg.msgtype.decode('ascii')
+ self.log.info("Test {} message of size {} is logged as misbehaving".format(msg_type, size))
+ with self.nodes[0].assert_debug_log(['Misbehaving', '{} message size = {}'.format(msg_type, size)]):
+ self.nodes[0].add_p2p_connection(P2PInterface()).send_and_ping(msg)
+ self.nodes[0].disconnect_p2ps()
+
+ def test_oversized_inv_msg(self):
+ size = MAX_INV_SIZE + 1
+ self.test_oversized_msg(msg_inv([CInv(MSG_TX, 1)] * size), size)
+
+ def test_oversized_getdata_msg(self):
+ size = MAX_INV_SIZE + 1
+ self.test_oversized_msg(msg_getdata([CInv(MSG_TX, 1)] * size), size)
+
+ def test_oversized_headers_msg(self):
+ size = MAX_HEADERS_RESULTS + 1
+ self.test_oversized_msg(msg_headers([CBlockHeader()] * size), size)
+
+ def test_resource_exhaustion(self):
+ self.log.info("Test node stays up despite many large junk messages")
+ conn = self.nodes[0].add_p2p_connection(P2PDataStore())
+ conn2 = self.nodes[0].add_p2p_connection(P2PDataStore())
+ msg_at_size = msg_unrecognized(str_data="b" * VALID_DATA_LIMIT)
+ assert len(msg_at_size.serialize()) == MAX_PROTOCOL_MESSAGE_LENGTH
+
+ self.log.info("(a) Send 80 messages, each of maximum valid data size (4MB)")
+ for _ in range(80):
+ conn.send_message(msg_at_size)
+
+ # Check that, even though the node is being hammered by nonsense from one
+ # connection, it can still service other peers in a timely way.
+ self.log.info("(b) Check node still services peers in a timely way")
+ for _ in range(20):
+ conn2.sync_with_ping(timeout=2)
+
+ self.log.info("(c) Wait for node to drop junk messages, while remaining connected")
+ conn.sync_with_ping(timeout=400)
+
+ # Despite being served up a bunch of nonsense, the peers should still be connected.
+ assert conn.is_connected
+ assert conn2.is_connected
+ self.nodes[0].disconnect_p2ps()
if __name__ == '__main__':
diff --git a/test/functional/p2p_invalid_tx.py b/test/functional/p2p_invalid_tx.py
index 5975a52b2a..c70a892463 100755
--- a/test/functional/p2p_invalid_tx.py
+++ b/test/functional/p2p_invalid_tx.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid transactions.
@@ -118,6 +118,7 @@ class InvalidTxRequestTest(BitcoinTestFramework):
tx_orphan_2_invalid = CTransaction()
tx_orphan_2_invalid.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_1.sha256, 2)))
tx_orphan_2_invalid.vout.append(CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
+ tx_orphan_2_invalid.calc_sha256()
self.log.info('Send the orphans ... ')
# Send valid orphan txs from p2ps[0]
@@ -148,6 +149,22 @@ class InvalidTxRequestTest(BitcoinTestFramework):
wait_until(lambda: 1 == len(node.getpeerinfo()), timeout=12) # p2ps[1] is no longer connected
assert_equal(expected_mempool, set(node.getrawmempool()))
+ self.log.info('Test orphan pool overflow')
+ orphan_tx_pool = [CTransaction() for _ in range(101)]
+ for i in range(len(orphan_tx_pool)):
+ orphan_tx_pool[i].vin.append(CTxIn(outpoint=COutPoint(i, 333)))
+ orphan_tx_pool[i].vout.append(CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
+
+ with node.assert_debug_log(['mapOrphan overflow, removed 1 tx']):
+ node.p2p.send_txs_and_test(orphan_tx_pool, node, success=False)
+
+ rejected_parent = CTransaction()
+ rejected_parent.vin.append(CTxIn(outpoint=COutPoint(tx_orphan_2_invalid.sha256, 0)))
+ rejected_parent.vout.append(CTxOut(nValue=11 * COIN, scriptPubKey=SCRIPT_PUB_KEY_OP_TRUE))
+ rejected_parent.rehash()
+ with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(rejected_parent.hash)]):
+ node.p2p.send_txs_and_test([rejected_parent], node, success=False)
+
if __name__ == '__main__':
InvalidTxRequestTest().main()
diff --git a/test/functional/p2p_leak.py b/test/functional/p2p_leak.py
index c574c1ab9f..3b3dbd08f2 100755
--- a/test/functional/p2p_leak.py
+++ b/test/functional/p2p_leak.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test message sending before handshake completion.
@@ -12,7 +12,12 @@ into sending us something it shouldn't."""
import time
-from test_framework.messages import msg_getaddr, msg_ping, msg_verack
+from test_framework.messages import (
+ msg_getaddr,
+ msg_ping,
+ msg_verack,
+ msg_version,
+)
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -32,7 +37,7 @@ class CLazyNode(P2PInterface):
def bad_message(self, message):
self.unexpected_msg = True
- self.log.info("should not have received message: %s" % message.command)
+ self.log.info("should not have received message: %s" % message.msgtype)
def on_open(self):
self.ever_connected = True
@@ -127,26 +132,31 @@ class P2PLeakTest(BitcoinTestFramework):
self.nodes[0].disconnect_p2ps()
- # Wait until all connections are closed
- wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0)
-
# Make sure no unexpected messages came in
assert no_version_bannode.unexpected_msg == False
assert no_version_idlenode.unexpected_msg == False
assert no_verack_idlenode.unexpected_msg == False
self.log.info('Check that the version message does not leak the local address of the node')
- time_begin = int(time.time())
p2p_version_store = self.nodes[0].add_p2p_connection(P2PVersionStore())
- time_end = time.time()
ver = p2p_version_store.version_received
- assert_greater_than_or_equal(ver.nTime, time_begin)
- assert_greater_than_or_equal(time_end, ver.nTime)
+ # Check that received time is within one hour of now
+ assert_greater_than_or_equal(ver.nTime, time.time() - 3600)
+ assert_greater_than_or_equal(time.time() + 3600, ver.nTime)
assert_equal(ver.addrFrom.port, 0)
assert_equal(ver.addrFrom.ip, '0.0.0.0')
assert_equal(ver.nStartingHeight, 201)
assert_equal(ver.nRelay, 1)
+ self.log.info('Check that old nodes are disconnected')
+ p2p_old_node = self.nodes[0].add_p2p_connection(P2PInterface(), send_version=False, wait_for_verack=False)
+ old_version_msg = msg_version()
+ old_version_msg.nVersion = 31799
+ wait_until(lambda: p2p_old_node.is_connected)
+ with self.nodes[0].assert_debug_log(['peer=4 using obsolete version 31799; disconnecting']):
+ p2p_old_node.send_message(old_version_msg)
+ p2p_old_node.wait_for_disconnect()
+
if __name__ == '__main__':
P2PLeakTest().main()
diff --git a/test/functional/p2p_leak_tx.py b/test/functional/p2p_leak_tx.py
index c6fcc5e200..da30ad5977 100755
--- a/test/functional/p2p_leak_tx.py
+++ b/test/functional/p2p_leak_tx.py
@@ -1,10 +1,10 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2018 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that we don't leak txs to inbound peers that we haven't yet announced to"""
-from test_framework.messages import msg_getdata, CInv
+from test_framework.messages import msg_getdata, CInv, MSG_TX
from test_framework.mininode import P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -37,7 +37,7 @@ class P2PLeakTxTest(BitcoinTestFramework):
txid = gen_node.sendtoaddress(gen_node.getnewaddress(), 0.01)
want_tx = msg_getdata()
- want_tx.inv.append(CInv(t=1, h=int(txid, 16)))
+ want_tx.inv.append(CInv(t=MSG_TX, h=int(txid, 16)))
inbound_peer.last_message.pop('notfound', None)
inbound_peer.send_and_ping(want_tx)
diff --git a/test/functional/p2p_mempool.py b/test/functional/p2p_mempool.py
deleted file mode 100755
index a8fcb181e6..0000000000
--- a/test/functional/p2p_mempool.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2015-2018 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test p2p mempool message.
-
-Test that nodes are disconnected if they send mempool messages when bloom
-filters are not enabled.
-"""
-
-from test_framework.messages import msg_mempool
-from test_framework.mininode import P2PInterface
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal
-
-class P2PMempoolTests(BitcoinTestFramework):
- def set_test_params(self):
- self.setup_clean_chain = True
- self.num_nodes = 1
- self.extra_args = [["-peerbloomfilters=0"]]
-
- def run_test(self):
- # Add a p2p connection
- self.nodes[0].add_p2p_connection(P2PInterface())
-
- #request mempool
- self.nodes[0].p2p.send_message(msg_mempool())
- self.nodes[0].p2p.wait_for_disconnect()
-
- #mininode must be disconnected at this point
- assert_equal(len(self.nodes[0].getpeerinfo()), 0)
-
-if __name__ == '__main__':
- P2PMempoolTests().main()
diff --git a/test/functional/p2p_nobloomfilter_messages.py b/test/functional/p2p_nobloomfilter_messages.py
new file mode 100755
index 0000000000..accc5dc23c
--- /dev/null
+++ b/test/functional/p2p_nobloomfilter_messages.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test invalid p2p messages for nodes with bloom filters disabled.
+
+Test that, when bloom filters are not enabled, peers are disconnected if:
+1. They send a p2p mempool message
+2. They send a p2p filterload message
+3. They send a p2p filteradd message
+4. They send a p2p filterclear message
+"""
+
+from test_framework.messages import msg_mempool, msg_filteradd, msg_filterload, msg_filterclear
+from test_framework.mininode import P2PInterface
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+
+class P2PNoBloomFilterMessages(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+ self.extra_args = [["-peerbloomfilters=0"]]
+
+ def test_message_causes_disconnect(self, message):
+ """Add a p2p connection that sends a message and check that it disconnects."""
+ peer = self.nodes[0].add_p2p_connection(P2PInterface())
+ peer.send_message(message)
+ peer.wait_for_disconnect()
+ assert_equal(self.nodes[0].getconnectioncount(), 0)
+
+ def run_test(self):
+ self.log.info("Test that peer is disconnected if it sends mempool message")
+ self.test_message_causes_disconnect(msg_mempool())
+
+ self.log.info("Test that peer is disconnected if it sends filterload message")
+ self.test_message_causes_disconnect(msg_filterload())
+
+ self.log.info("Test that peer is disconnected if it sends filteradd message")
+ self.test_message_causes_disconnect(msg_filteradd(data=b'\xcc'))
+
+ self.log.info("Test that peer is disconnected if it sends a filterclear message")
+ self.test_message_causes_disconnect(msg_filterclear())
+
+
+if __name__ == '__main__':
+ P2PNoBloomFilterMessages().main()
diff --git a/test/functional/p2p_node_network_limited.py b/test/functional/p2p_node_network_limited.py
index e6451d9f18..a2f6ea538c 100755
--- a/test/functional/p2p_node_network_limited.py
+++ b/test/functional/p2p_node_network_limited.py
@@ -8,7 +8,7 @@ Tests that a node configured with -prune=550 signals NODE_NETWORK_LIMITED correc
and that it responds to getdata requests for blocks correctly:
- send a block within 288 + 2 of the tip
- disconnect peers who request blocks older than that."""
-from test_framework.messages import CInv, msg_getdata, msg_verack, NODE_NETWORK_LIMITED, NODE_WITNESS
+from test_framework.messages import CInv, MSG_BLOCK, msg_getdata, msg_verack, NODE_NETWORK_LIMITED, NODE_WITNESS
from test_framework.mininode import P2PInterface, mininode_lock
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -31,7 +31,7 @@ class P2PIgnoreInv(P2PInterface):
wait_until(test_function, timeout=timeout, lock=mininode_lock)
def send_getdata_for_block(self, blockhash):
getdata_request = msg_getdata()
- getdata_request.inv.append(CInv(2, int(blockhash, 16)))
+ getdata_request.inv.append(CInv(MSG_BLOCK, int(blockhash, 16)))
self.send_message(getdata_request)
class NodeNetworkLimitedTest(BitcoinTestFramework):
@@ -42,9 +42,6 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
def disconnect_all(self):
disconnect_nodes(self.nodes[0], 1)
- disconnect_nodes(self.nodes[1], 0)
- disconnect_nodes(self.nodes[2], 1)
- disconnect_nodes(self.nodes[2], 0)
disconnect_nodes(self.nodes[0], 2)
disconnect_nodes(self.nodes[1], 2)
@@ -86,7 +83,6 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
assert_equal(node1.firstAddrnServices, expected_services)
self.nodes[0].disconnect_p2ps()
- node1.wait_for_disconnect()
# connect unsynced node 2 with pruned NODE_NETWORK_LIMITED peer
# because node 2 is in IBD and node 0 is a NODE_NETWORK_LIMITED peer, sync must not be possible
diff --git a/test/functional/p2p_permissions.py b/test/functional/p2p_permissions.py
index 3a7bf4bfc3..bea202855d 100755
--- a/test/functional/p2p_permissions.py
+++ b/test/functional/p2p_permissions.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test p2p permission message.
@@ -43,6 +43,12 @@ class P2PPermissionsTests(BitcoinTestFramework):
True)
self.checkpermission(
+ # no permission (even with forcerelay)
+ ["-whitelist=@127.0.0.1", "-whitelistforcerelay=1"],
+ [],
+ False)
+
+ self.checkpermission(
# relay permission removed (no specific permissions)
["-whitelist=127.0.0.1", "-whitelistrelay=0"],
["noban", "mempool"],
diff --git a/test/functional/p2p_segwit.py b/test/functional/p2p_segwit.py
index d8dce7fe56..25dd765442 100755
--- a/test/functional/p2p_segwit.py
+++ b/test/functional/p2p_segwit.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2016-2019 The Bitcoin Core developers
+# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test segwit transactions and blocks on P2P network."""
@@ -22,6 +22,8 @@ from test_framework.messages import (
CTxOut,
CTxWitness,
MAX_BLOCK_BASE_SIZE,
+ MSG_BLOCK,
+ MSG_TX,
MSG_WITNESS_FLAG,
NODE_NETWORK,
NODE_WITNESS,
@@ -157,9 +159,9 @@ class TestP2PConn(P2PInterface):
def announce_tx_and_wait_for_getdata(self, tx, timeout=60, success=True):
with mininode_lock:
self.last_message.pop("getdata", None)
- self.send_message(msg_inv(inv=[CInv(1, tx.sha256)]))
+ self.send_message(msg_inv(inv=[CInv(MSG_TX, tx.sha256)]))
if success:
- self.wait_for_getdata(timeout)
+ self.wait_for_getdata([tx.sha256], timeout)
else:
time.sleep(timeout)
assert not self.last_message.get("getdata")
@@ -173,10 +175,10 @@ class TestP2PConn(P2PInterface):
if use_header:
self.send_message(msg)
else:
- self.send_message(msg_inv(inv=[CInv(2, block.sha256)]))
+ self.send_message(msg_inv(inv=[CInv(MSG_BLOCK, block.sha256)]))
self.wait_for_getheaders()
self.send_message(msg)
- self.wait_for_getdata()
+ self.wait_for_getdata([block.sha256])
def request_block(self, blockhash, inv_type, timeout=60):
with mininode_lock:
@@ -293,7 +295,7 @@ class SegWitTest(BitcoinTestFramework):
return func_wrapper
- @subtest
+ @subtest # type: ignore
def test_non_witness_transaction(self):
"""See if sending a regular transaction works, and create a utxo to use in later tests."""
# Mine a block with an anyone-can-spend coinbase,
@@ -322,7 +324,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.append(UTXO(tx.sha256, 0, 49 * 100000000))
self.nodes[0].generate(1)
- @subtest
+ @subtest # type: ignore
def test_unnecessary_witness_before_segwit_activation(self):
"""Verify that blocks with witnesses are rejected before activation."""
@@ -353,7 +355,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
self.utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_block_relay(self):
"""Test that block requests to NODE_WITNESS peer are with MSG_WITNESS_FLAG.
@@ -449,7 +451,7 @@ class SegWitTest(BitcoinTestFramework):
self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0])
assert block4.sha256 not in self.old_node.getdataset
- @subtest
+ @subtest # type: ignore
def test_v0_outputs_arent_spendable(self):
"""Test that v0 outputs aren't spendable before segwit activation.
@@ -531,7 +533,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
self.utxo.append(UTXO(txid, 2, value))
- @subtest
+ @subtest # type: ignore
def test_getblocktemplate_before_lockin(self):
txid = int(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1), 16)
@@ -557,7 +559,7 @@ class SegWitTest(BitcoinTestFramework):
self.nodes[0].generate(1)
self.sync_blocks()
- @subtest
+ @subtest # type: ignore
def test_witness_tx_relay_before_segwit_activation(self):
# Generate a transaction that doesn't require a witness, but send it
@@ -576,7 +578,7 @@ class SegWitTest(BitcoinTestFramework):
# Verify that if a peer doesn't set nServices to include NODE_WITNESS,
# the getdata is just for the non-witness portion.
self.old_node.announce_tx_and_wait_for_getdata(tx)
- assert self.old_node.last_message["getdata"].inv[0].type == 1
+ assert self.old_node.last_message["getdata"].inv[0].type == MSG_TX
# Since we haven't delivered the tx yet, inv'ing the same tx from
# a witness transaction ought not result in a getdata.
@@ -599,7 +601,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
self.utxo.append(UTXO(tx_hash, 0, tx_value))
- @subtest
+ @subtest # type: ignore
def test_standardness_v0(self):
"""Test V0 txout standardness.
@@ -677,7 +679,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
assert_equal(len(self.nodes[1].getrawmempool()), 0)
- @subtest
+ @subtest # type: ignore
def advance_to_segwit_active(self):
"""Mine enough blocks to activate segwit."""
assert not softfork_active(self.nodes[0], 'segwit')
@@ -688,7 +690,7 @@ class SegWitTest(BitcoinTestFramework):
assert softfork_active(self.nodes[0], 'segwit')
self.segwit_active = True
- @subtest
+ @subtest # type: ignore
def test_p2sh_witness(self):
"""Test P2SH wrapped witness programs."""
@@ -757,7 +759,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
self.utxo.append(UTXO(spend_tx.sha256, 0, spend_tx.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_witness_commitments(self):
"""Test witness commitments.
@@ -847,7 +849,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_block_malleability(self):
# Make sure that a block that has too big a virtual size
@@ -862,13 +864,13 @@ class SegWitTest(BitcoinTestFramework):
# We can't send over the p2p network, because this is too big to relay
# TODO: repeat this test with a block that can be relayed
- self.nodes[0].submitblock(block.serialize().hex())
+ assert_equal('bad-witness-nonce-size', self.nodes[0].submitblock(block.serialize().hex()))
assert self.nodes[0].getbestblockhash() != block.hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop()
assert get_virtual_size(block) < MAX_BLOCK_BASE_SIZE
- self.nodes[0].submitblock(block.serialize().hex())
+ assert_equal(None, self.nodes[0].submitblock(block.serialize().hex()))
assert self.nodes[0].getbestblockhash() == block.hash
@@ -887,7 +889,7 @@ class SegWitTest(BitcoinTestFramework):
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(0)]
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
- @subtest
+ @subtest # type: ignore
def test_witness_block_size(self):
# TODO: Test that non-witness carrying blocks can't exceed 1MB
# Skipping this test for now; this is covered in p2p-fullblocktest.py
@@ -965,7 +967,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
self.utxo.append(UTXO(block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_submit_block(self):
"""Test that submitblock adds the nonce automatically when possible."""
block = self.build_next_block()
@@ -975,14 +977,14 @@ class SegWitTest(BitcoinTestFramework):
add_witness_commitment(block, nonce=1)
block.vtx[0].wit = CTxWitness() # drop the nonce
block.solve()
- self.nodes[0].submitblock(block.serialize().hex())
+ assert_equal('bad-witness-merkle-match', self.nodes[0].submitblock(block.serialize().hex()))
assert self.nodes[0].getbestblockhash() != block.hash
# Now redo commitment with the standard nonce, but let bitcoind fill it in.
add_witness_commitment(block, nonce=0)
block.vtx[0].wit = CTxWitness()
block.solve()
- self.nodes[0].submitblock(block.serialize().hex())
+ assert_equal(None, self.nodes[0].submitblock(block.serialize().hex()))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
# This time, add a tx with non-empty witness, but don't supply
@@ -997,11 +999,11 @@ class SegWitTest(BitcoinTestFramework):
block_2.vtx[0].vout.pop()
block_2.vtx[0].wit = CTxWitness()
- self.nodes[0].submitblock(block_2.serialize().hex())
+ assert_equal('bad-txnmrklroot', self.nodes[0].submitblock(block_2.serialize().hex()))
# Tip should not advance!
assert self.nodes[0].getbestblockhash() != block_2.hash
- @subtest
+ @subtest # type: ignore
def test_extra_witness_data(self):
"""Test extra witness data in a transaction."""
@@ -1074,7 +1076,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_max_witness_push_length(self):
"""Test that witness stack can only allow up to 520 byte pushes."""
@@ -1111,7 +1113,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_max_witness_program_length(self):
"""Test that witness outputs greater than 10kB can't be spent."""
@@ -1159,7 +1161,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_witness_input_length(self):
"""Test that vin length must match vtxinwit length."""
@@ -1241,7 +1243,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_tx_relay_after_segwit_activation(self):
"""Test transaction relay after segwit activation.
@@ -1310,9 +1312,9 @@ class SegWitTest(BitcoinTestFramework):
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
# Also check that old_node gets a tx announcement, even though this is
# a witness transaction.
- self.old_node.wait_for_inv([CInv(1, tx2.sha256)]) # wait until tx2 was inv'ed
+ self.old_node.wait_for_inv([CInv(MSG_TX, tx2.sha256)]) # wait until tx2 was inv'ed
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=True)
- self.old_node.wait_for_inv([CInv(1, tx3.sha256)])
+ self.old_node.wait_for_inv([CInv(MSG_TX, tx3.sha256)])
# Test that getrawtransaction returns correct witness information
# hash, size, vsize
@@ -1334,7 +1336,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_segwit_versions(self):
"""Test validity of future segwit version transactions.
@@ -1416,7 +1418,7 @@ class SegWitTest(BitcoinTestFramework):
# Add utxo to our list
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_premature_coinbase_witness_spend(self):
block = self.build_next_block()
@@ -1451,7 +1453,7 @@ class SegWitTest(BitcoinTestFramework):
test_witness_block(self.nodes[0], self.test_node, block2, accepted=True)
self.sync_blocks()
- @subtest
+ @subtest # type: ignore
def test_uncompressed_pubkey(self):
"""Test uncompressed pubkey validity in segwit transactions.
@@ -1556,7 +1558,7 @@ class SegWitTest(BitcoinTestFramework):
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.utxo.append(UTXO(tx5.sha256, 0, tx5.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_signature_version_1(self):
key = ECKey()
@@ -1738,7 +1740,7 @@ class SegWitTest(BitcoinTestFramework):
for i in range(len(tx.vout)):
self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue))
- @subtest
+ @subtest # type: ignore
def test_non_standard_witness_blinding(self):
"""Test behavior of unnecessary witnesses in transactions does not blind the node for the transaction"""
@@ -1792,7 +1794,7 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
- @subtest
+ @subtest # type: ignore
def test_non_standard_witness(self):
"""Test detection of non-standard P2WSH witness"""
pad = chr(1).encode('latin-1')
@@ -1892,16 +1894,15 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
- @subtest
+ @subtest # type: ignore
def test_upgrade_after_activation(self):
"""Test the behavior of starting up a segwit-aware node after the softfork has activated."""
- # Restart with the new binary
- self.stop_node(2)
- self.start_node(2, extra_args=["-segwitheight={}".format(SEGWIT_HEIGHT)])
+ self.restart_node(2, extra_args=["-segwitheight={}".format(SEGWIT_HEIGHT)])
connect_nodes(self.nodes[0], 2)
- self.sync_blocks()
+ # We reconnect more than 100 blocks, give it plenty of time
+ self.sync_blocks(timeout=240)
# Make sure that this peer thinks segwit has activated.
assert softfork_active(self.nodes[2], 'segwit')
@@ -1914,7 +1915,7 @@ class SegWitTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getblock(block_hash), self.nodes[2].getblock(block_hash))
height -= 1
- @subtest
+ @subtest # type: ignore
def test_witness_sigops(self):
"""Test sigop counting is correct inside witnesses."""
diff --git a/test/functional/p2p_sendheaders.py b/test/functional/p2p_sendheaders.py
index 84d818400a..481b1c1841 100755
--- a/test/functional/p2p_sendheaders.py
+++ b/test/functional/p2p_sendheaders.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test behavior of headers messages to announce blocks.
@@ -92,6 +92,7 @@ from test_framework.mininode import (
NODE_WITNESS,
P2PInterface,
mininode_lock,
+ MSG_BLOCK,
msg_block,
msg_getblocks,
msg_getdata,
@@ -120,7 +121,7 @@ class BaseNode(P2PInterface):
"""Request data for a list of block hashes."""
msg = msg_getdata()
for x in block_hashes:
- msg.inv.append(CInv(2, x))
+ msg.inv.append(CInv(MSG_BLOCK, x))
self.send_message(msg)
def send_get_headers(self, locator, hashstop):
@@ -131,7 +132,7 @@ class BaseNode(P2PInterface):
def send_block_inv(self, blockhash):
msg = msg_inv()
- msg.inv = [CInv(2, blockhash)]
+ msg.inv = [CInv(MSG_BLOCK, blockhash)]
self.send_message(msg)
def send_header_for_blocks(self, new_blocks):
@@ -144,13 +145,6 @@ class BaseNode(P2PInterface):
getblocks_message.locator.vHave = locator
self.send_message(getblocks_message)
- def wait_for_getdata(self, hash_list, timeout=60):
- if hash_list == []:
- return
-
- test_function = lambda: "getdata" in self.last_message and [x.hash for x in self.last_message["getdata"].inv] == hash_list
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
-
def wait_for_block_announcement(self, block_hash, timeout=60):
test_function = lambda: self.last_blockhash_announced == block_hash
wait_until(test_function, timeout=timeout, lock=mininode_lock)
diff --git a/test/functional/p2p_timeouts.py b/test/functional/p2p_timeouts.py
index c1235f8a6b..5a4fa42988 100755
--- a/test/functional/p2p_timeouts.py
+++ b/test/functional/p2p_timeouts.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2016-2018 The Bitcoin Core developers
+# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test various net timeouts.
diff --git a/test/functional/p2p_tx_download.py b/test/functional/p2p_tx_download.py
index b56dc994e7..10f5eea0e5 100755
--- a/test/functional/p2p_tx_download.py
+++ b/test/functional/p2p_tx_download.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2019 The Bitcoin Core developers
+# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
@@ -63,7 +63,7 @@ class TxDownloadTest(BitcoinTestFramework):
txid = 0xdeadbeef
self.log.info("Announce the txid from each incoming peer to node 0")
- msg = msg_inv([CInv(t=1, h=txid)])
+ msg = msg_inv([CInv(t=MSG_TX, h=txid)])
for p in self.nodes[0].p2ps:
p.send_and_ping(msg)
@@ -104,7 +104,7 @@ class TxDownloadTest(BitcoinTestFramework):
self.log.info(
"Announce the transaction to all nodes from all {} incoming peers, but never send it".format(NUM_INBOUND))
- msg = msg_inv([CInv(t=1, h=txid)])
+ msg = msg_inv([CInv(t=MSG_TX, h=txid)])
for p in self.peers:
p.send_and_ping(msg)
@@ -135,13 +135,13 @@ class TxDownloadTest(BitcoinTestFramework):
with mininode_lock:
p.tx_getdata_count = 0
- p.send_message(msg_inv([CInv(t=1, h=i) for i in txids]))
+ p.send_message(msg_inv([CInv(t=MSG_TX, h=i) for i in txids]))
wait_until(lambda: p.tx_getdata_count >= MAX_GETDATA_IN_FLIGHT, lock=mininode_lock)
with mininode_lock:
assert_equal(p.tx_getdata_count, MAX_GETDATA_IN_FLIGHT)
self.log.info("Now check that if we send a NOTFOUND for a transaction, we'll get one more request")
- p.send_message(msg_notfound(vec=[CInv(t=1, h=txids[0])]))
+ p.send_message(msg_notfound(vec=[CInv(t=MSG_TX, h=txids[0])]))
wait_until(lambda: p.tx_getdata_count >= MAX_GETDATA_IN_FLIGHT + 1, timeout=10, lock=mininode_lock)
with mininode_lock:
assert_equal(p.tx_getdata_count, MAX_GETDATA_IN_FLIGHT + 1)
@@ -152,6 +152,10 @@ class TxDownloadTest(BitcoinTestFramework):
wait_until(lambda: p.tx_getdata_count == MAX_GETDATA_IN_FLIGHT + 2)
self.nodes[0].setmocktime(0)
+ def test_spurious_notfound(self):
+ self.log.info('Check that spurious notfound is ignored')
+ self.nodes[0].p2ps[0].send_message(msg_notfound(vec=[CInv(MSG_TX, 1)]))
+
def run_test(self):
# Setup the p2p connections
self.peers = []
@@ -161,6 +165,8 @@ class TxDownloadTest(BitcoinTestFramework):
self.log.info("Nodes are setup with {} incoming connections each".format(NUM_INBOUND))
+ self.test_spurious_notfound()
+
# Test the in-flight max first, because we want no transactions in
# flight ahead of this test.
self.test_in_flight_max()
diff --git a/test/functional/p2p_unrequested_blocks.py b/test/functional/p2p_unrequested_blocks.py
index d18d4b069a..c323168848 100755
--- a/test/functional/p2p_unrequested_blocks.py
+++ b/test/functional/p2p_unrequested_blocks.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test processing of unrequested blocks.
@@ -54,7 +54,7 @@ Node1 is unused in tests 3-7:
import time
from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script
-from test_framework.messages import CBlockHeader, CInv, msg_block, msg_headers, msg_inv
+from test_framework.messages import CBlockHeader, CInv, MSG_BLOCK, msg_block, msg_headers, msg_inv
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -210,7 +210,7 @@ class AcceptBlockTest(BitcoinTestFramework):
with mininode_lock:
# Clear state so we can check the getdata request
test_node.last_message.pop("getdata", None)
- test_node.send_message(msg_inv([CInv(2, block_h3.sha256)]))
+ test_node.send_message(msg_inv([CInv(MSG_BLOCK, block_h3.sha256)]))
test_node.sync_with_ping()
with mininode_lock:
diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py
index a6eaaa4539..6273c229ae 100755
--- a/test/functional/rpc_blockchain.py
+++ b/test/functional/rpc_blockchain.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPCs related to blockchainstate.
diff --git a/test/functional/rpc_createmultisig.py b/test/functional/rpc_createmultisig.py
index a983716177..3c81a4a4e2 100755
--- a/test/functional/rpc_createmultisig.py
+++ b/test/functional/rpc_createmultisig.py
@@ -3,20 +3,21 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multisig RPCs"""
+import binascii
+import decimal
+import itertools
+import json
+import os
+from test_framework.authproxy import JSONRPCException
from test_framework.descriptors import descsum_create, drop_origins
+from test_framework.key import ECPubKey, ECKey
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
assert_equal,
)
-from test_framework.key import ECPubKey
-
-import binascii
-import decimal
-import itertools
-import json
-import os
+from test_framework.wallet_util import bytes_to_wif
class RpcCreateMultiSigTest(BitcoinTestFramework):
def set_test_params(self):
@@ -28,10 +29,14 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
self.skip_if_no_wallet()
def get_keys(self):
+ self.pub = []
+ self.priv = []
node0, node1, node2 = self.nodes
- add = [node1.getnewaddress() for _ in range(self.nkeys)]
- self.pub = [node1.getaddressinfo(a)["pubkey"] for a in add]
- self.priv = [node1.dumpprivkey(a) for a in add]
+ for _ in range(self.nkeys):
+ k = ECKey()
+ k.generate()
+ self.pub.append(k.get_pubkey().get_bytes().hex())
+ self.priv.append(bytes_to_wif(k.get_bytes(), k.is_compressed))
self.final = node2.getnewaddress()
def run_test(self):
@@ -64,17 +69,20 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
pk_obj.compressed = False
pk2 = binascii.hexlify(pk_obj.get_bytes()).decode()
+ node0.createwallet(wallet_name='wmulti0', disable_private_keys=True)
+ wmulti0 = node0.get_wallet_rpc('wmulti0')
+
# Check all permutations of keys because order matters apparently
for keys in itertools.permutations([pk0, pk1, pk2]):
# Results should be the same as this legacy one
legacy_addr = node0.createmultisig(2, keys, 'legacy')['address']
- assert_equal(legacy_addr, node0.addmultisigaddress(2, keys, '', 'legacy')['address'])
+ assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'legacy')['address'])
# Generate addresses with the segwit types. These should all make legacy addresses
- assert_equal(legacy_addr, node0.createmultisig(2, keys, 'bech32')['address'])
- assert_equal(legacy_addr, node0.createmultisig(2, keys, 'p2sh-segwit')['address'])
- assert_equal(legacy_addr, node0.addmultisigaddress(2, keys, '', 'bech32')['address'])
- assert_equal(legacy_addr, node0.addmultisigaddress(2, keys, '', 'p2sh-segwit')['address'])
+ assert_equal(legacy_addr, wmulti0.createmultisig(2, keys, 'bech32')['address'])
+ assert_equal(legacy_addr, wmulti0.createmultisig(2, keys, 'p2sh-segwit')['address'])
+ assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'bech32')['address'])
+ assert_equal(legacy_addr, wmulti0.addmultisigaddress(2, keys, '', 'p2sh-segwit')['address'])
self.log.info('Testing sortedmulti descriptors with BIP 67 test vectors')
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_bip67.json'), encoding='utf-8') as f:
@@ -89,6 +97,8 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
assert_equal(self.nodes[0].deriveaddresses(sorted_key_desc)[0], t['address'])
def check_addmultisigaddress_errors(self):
+ if self.options.descriptors:
+ return
self.log.info('Check that addmultisigaddress fails when the private keys are missing')
addresses = [self.nodes[1].getnewaddress(address_type='legacy') for _ in range(2)]
assert_raises_rpc_error(-5, 'no full public key for address', lambda: self.nodes[0].addmultisigaddress(nrequired=1, keys=addresses))
@@ -115,6 +125,15 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
def do_multisig(self):
node0, node1, node2 = self.nodes
+ if 'wmulti' not in node1.listwallets():
+ try:
+ node1.loadwallet('wmulti')
+ except JSONRPCException as e:
+ if e.error['code'] == -18 and 'Wallet wmulti not found' in e.error['message']:
+ node1.createwallet(wallet_name='wmulti', disable_private_keys=True)
+ else:
+ raise
+ wmulti = node1.get_wallet_rpc('wmulti')
# Construct the expected descriptor
desc = 'multi({},{})'.format(self.nsigs, ','.join(self.pub))
@@ -134,7 +153,7 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
assert madd[0:4] == "bcrt" # actually a bech32 address
# compare against addmultisigaddress
- msigw = node1.addmultisigaddress(self.nsigs, self.pub, None, self.output_type)
+ msigw = wmulti.addmultisigaddress(self.nsigs, self.pub, None, self.output_type)
maddw = msigw["address"]
mredeemw = msigw["redeemScript"]
assert_equal(desc, drop_origins(msigw['descriptor']))
@@ -194,6 +213,8 @@ class RpcCreateMultiSigTest(BitcoinTestFramework):
txinfo = node0.getrawtransaction(tx, True, blk)
self.log.info("n/m=%d/%d %s size=%d vsize=%d weight=%d" % (self.nsigs, self.nkeys, self.output_type, txinfo["size"], txinfo["vsize"], txinfo["weight"]))
+ wmulti.unloadwallet()
+
if __name__ == '__main__':
RpcCreateMultiSigTest().main()
diff --git a/test/functional/rpc_dumptxoutset.py b/test/functional/rpc_dumptxoutset.py
index 438e7f68e0..e65787ce08 100755
--- a/test/functional/rpc_dumptxoutset.py
+++ b/test/functional/rpc_dumptxoutset.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2019 The Bitcoin Core developers
+# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the generation of UTXO snapshots using `dumptxoutset`.
diff --git a/test/functional/rpc_estimatefee.py b/test/functional/rpc_estimatefee.py
index 8bdecfc8cd..1fff9e1512 100755
--- a/test/functional/rpc_estimatefee.py
+++ b/test/functional/rpc_estimatefee.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2018 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the estimatefee RPCs.
diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py
index c435ef24ce..57c8f511ac 100755
--- a/test/functional/rpc_fundrawtransaction.py
+++ b/test/functional/rpc_fundrawtransaction.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the fundrawtransaction RPC."""
@@ -271,7 +271,11 @@ class RawTransactionsTest(BitcoinTestFramework):
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
+ # Should fail without add_inputs:
+ assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx, {"add_inputs": False})
+ # add_inputs is enabled by default
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
@@ -299,7 +303,10 @@ class RawTransactionsTest(BitcoinTestFramework):
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
- rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+ # Should fail without add_inputs:
+ assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx, {"add_inputs": False})
+ rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {"add_inputs": True})
+
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
@@ -330,7 +337,10 @@ class RawTransactionsTest(BitcoinTestFramework):
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
- rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
+ # Should fail without add_inputs:
+ assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx, {"add_inputs": False})
+ rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {"add_inputs": True})
+
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
@@ -500,11 +510,16 @@ class RawTransactionsTest(BitcoinTestFramework):
self.nodes[1].getnewaddress()
self.nodes[1].getrawchangeaddress()
inputs = []
- outputs = {self.nodes[0].getnewaddress():1.1}
+ outputs = {self.nodes[0].getnewaddress():1.09999500}
rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
+ # fund a transaction that does not require a new key for the change output
+ self.nodes[1].fundrawtransaction(rawtx)
+
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
- assert_raises_rpc_error(-4, "Keypool ran out, please call keypoolrefill first", self.nodes[1].fundrawtransaction, rawtx)
+ outputs = {self.nodes[0].getnewaddress():1.1}
+ rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
+ assert_raises_rpc_error(-4, "Transaction needs a change address, but we can't generate it. Please call keypoolrefill first.", self.nodes[1].fundrawtransaction, rawtx)
# Refill the keypool.
self.nodes[1].walletpassphrase("test", 100)
diff --git a/test/functional/rpc_generateblock.py b/test/functional/rpc_generateblock.py
index f23d9ec556..aa58c0af9d 100755
--- a/test/functional/rpc_generateblock.py
+++ b/test/functional/rpc_generateblock.py
@@ -11,6 +11,7 @@ from test_framework.util import (
assert_raises_rpc_error,
)
+
class GenerateBlockTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
@@ -23,14 +24,14 @@ class GenerateBlockTest(BitcoinTestFramework):
self.log.info('Generate an empty block to address')
address = node.getnewaddress()
- hash = node.generateblock(address, [])['hash']
- block = node.getblock(hash, 2)
+ hash = node.generateblock(output=address, transactions=[])['hash']
+ block = node.getblock(blockhash=hash, verbose=2)
assert_equal(len(block['tx']), 1)
assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['addresses'][0], address)
self.log.info('Generate an empty block to a descriptor')
hash = node.generateblock('addr(' + address + ')', [])['hash']
- block = node.getblock(hash, 2)
+ block = node.getblock(blockhash=hash, verbosity=2)
assert_equal(len(block['tx']), 1)
assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['addresses'][0], address)
diff --git a/test/functional/rpc_getaddressinfo_label_deprecation.py b/test/functional/rpc_getaddressinfo_label_deprecation.py
deleted file mode 100755
index 5e739ebede..0000000000
--- a/test/functional/rpc_getaddressinfo_label_deprecation.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2020 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""
-Test deprecation of the RPC getaddressinfo `label` field. It has been
-superceded by the `labels` field.
-
-"""
-from test_framework.test_framework import BitcoinTestFramework
-
-class GetAddressInfoLabelDeprecationTest(BitcoinTestFramework):
- def set_test_params(self):
- self.num_nodes = 2
- self.setup_clean_chain = False
- # Start node[0] with -deprecatedrpc=label, and node[1] without.
- self.extra_args = [["-deprecatedrpc=label"], []]
-
- def skip_test_if_missing_module(self):
- self.skip_if_no_wallet()
-
- def test_label_with_deprecatedrpc_flag(self):
- self.log.info("Test getaddressinfo label with -deprecatedrpc flag")
- node = self.nodes[0]
- address = node.getnewaddress()
- info = node.getaddressinfo(address)
- assert "label" in info
-
- def test_label_without_deprecatedrpc_flag(self):
- self.log.info("Test getaddressinfo label without -deprecatedrpc flag")
- node = self.nodes[1]
- address = node.getnewaddress()
- info = node.getaddressinfo(address)
- assert "label" not in info
-
- def run_test(self):
- """Test getaddressinfo label with and without -deprecatedrpc flag."""
- self.test_label_with_deprecatedrpc_flag()
- self.test_label_without_deprecatedrpc_flag()
-
-
-if __name__ == '__main__':
- GetAddressInfoLabelDeprecationTest().main()
diff --git a/test/functional/rpc_getaddressinfo_labels_purpose_deprecation.py b/test/functional/rpc_getaddressinfo_labels_purpose_deprecation.py
deleted file mode 100755
index 3f2e8dee18..0000000000
--- a/test/functional/rpc_getaddressinfo_labels_purpose_deprecation.py
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2020-2019 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""
-Test deprecation of RPC getaddressinfo `labels` returning an array
-containing a JSON object of `name` and purpose` key-value pairs. It now
-returns an array containing only the label name.
-
-"""
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal
-
-LABELS_TO_TEST = frozenset({"" , "New 𝅘𝅥𝅯 $<#>&!рыба Label"})
-
-class GetAddressInfoLabelsPurposeDeprecationTest(BitcoinTestFramework):
- def set_test_params(self):
- self.num_nodes = 2
- self.setup_clean_chain = False
- # Start node[0] with -deprecatedrpc=labelspurpose and node[1] without.
- self.extra_args = [["-deprecatedrpc=labelspurpose"], []]
-
- def skip_test_if_missing_module(self):
- self.skip_if_no_wallet()
-
- def test_labels(self, node_num, label_name, expected_value):
- node = self.nodes[node_num]
- address = node.getnewaddress()
- if label_name != "":
- node.setlabel(address, label_name)
- self.log.info(" set label to {}".format(label_name))
- labels = node.getaddressinfo(address)["labels"]
- self.log.info(" labels = {}".format(labels))
- assert_equal(labels, expected_value)
-
- def run_test(self):
- """Test getaddressinfo labels with and without -deprecatedrpc flag."""
- self.log.info("Test getaddressinfo labels with -deprecatedrpc flag")
- for label in LABELS_TO_TEST:
- self.test_labels(node_num=0, label_name=label, expected_value=[{"name": label, "purpose": "receive"}])
-
- self.log.info("Test getaddressinfo labels without -deprecatedrpc flag")
- for label in LABELS_TO_TEST:
- self.test_labels(node_num=1, label_name=label, expected_value=[label])
-
-
-if __name__ == '__main__':
- GetAddressInfoLabelsPurposeDeprecationTest().main()
diff --git a/test/functional/rpc_getblockfilter.py b/test/functional/rpc_getblockfilter.py
index bd93b6f7a4..8fa36445cd 100755
--- a/test/functional/rpc_getblockfilter.py
+++ b/test/functional/rpc_getblockfilter.py
@@ -7,7 +7,7 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal, assert_is_hex_string, assert_raises_rpc_error,
- connect_nodes, disconnect_nodes, sync_blocks
+ connect_nodes, disconnect_nodes
)
FILTER_TYPES = ["basic"]
@@ -30,7 +30,7 @@ class GetBlockFilterTest(BitcoinTestFramework):
# Reorg node 0 to a new chain
connect_nodes(self.nodes[0], 1)
- sync_blocks(self.nodes)
+ self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), 4)
chain1_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
diff --git a/test/functional/rpc_help.py b/test/functional/rpc_help.py
index 027ae368e7..9b981b864e 100755
--- a/test/functional/rpc_help.py
+++ b/test/functional/rpc_help.py
@@ -18,6 +18,8 @@ class HelpRpcTest(BitcoinTestFramework):
def run_test(self):
self.test_categories()
self.dump_help()
+ if self.is_wallet_compiled():
+ self.wallet_help()
def test_categories(self):
node = self.nodes[0]
@@ -53,6 +55,11 @@ class HelpRpcTest(BitcoinTestFramework):
# Make sure the node can generate the help at runtime without crashing
f.write(self.nodes[0].help(call))
+ def wallet_help(self):
+ assert 'getnewaddress ( "label" "address_type" )' in self.nodes[0].help('getnewaddress')
+ self.restart_node(0, extra_args=['-nowallet=1'])
+ assert 'getnewaddress ( "label" "address_type" )' in self.nodes[0].help('getnewaddress')
+
if __name__ == '__main__':
HelpRpcTest().main()
diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py
index 376bb35f07..ca26152e7e 100755
--- a/test/functional/rpc_net.py
+++ b/test/functional/rpc_net.py
@@ -28,6 +28,7 @@ from test_framework.messages import (
NODE_WITNESS,
)
+
def assert_net_servicesnames(servicesflag, servicenames):
"""Utility that checks if all flags are correctly decoded in
`getpeerinfo` and `getnetworkinfo`.
@@ -40,14 +41,17 @@ def assert_net_servicesnames(servicesflag, servicenames):
servicesflag_generated |= getattr(test_framework.messages, 'NODE_' + servicename)
assert servicesflag_generated == servicesflag
+
class NetTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
- self.extra_args = [["-minrelaytxfee=0.00001000"],["-minrelaytxfee=0.00000500"]]
+ self.extra_args = [["-minrelaytxfee=0.00001000"], ["-minrelaytxfee=0.00000500"]]
self.supports_cli = False
def run_test(self):
+ self.log.info('Get out of IBD for the minfeefilter test')
+ self.nodes[0].generate(1)
self.log.info('Connect nodes both way')
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 0)
@@ -57,6 +61,7 @@ class NetTest(BitcoinTestFramework):
self._test_getnetworkinfo()
self._test_getaddednodeinfo()
self._test_getpeerinfo()
+ self.test_service_flags()
self._test_getnodeaddresses()
def _test_connection_count(self):
@@ -139,6 +144,11 @@ class NetTest(BitcoinTestFramework):
for info in peer_info:
assert_net_servicesnames(int(info[0]["services"], 0x10), info[0]["servicesnames"])
+ def test_service_flags(self):
+ self.nodes[0].add_p2p_connection(P2PInterface(), services=(1 << 4) | (1 << 63))
+ assert_equal(['UNKNOWN[2^4]', 'UNKNOWN[2^63]'], self.nodes[0].getpeerinfo()[-1]['servicesnames'])
+ self.nodes[0].disconnect_p2ps()
+
def _test_getnodeaddresses(self):
self.nodes[0].add_p2p_connection(P2PInterface())
@@ -174,5 +184,6 @@ class NetTest(BitcoinTestFramework):
node_addresses = self.nodes[0].getnodeaddresses(LARGE_REQUEST_COUNT)
assert_greater_than(LARGE_REQUEST_COUNT, len(node_addresses))
+
if __name__ == '__main__':
NetTest().main()
diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py
index 3a63377545..e5e62fd646 100755
--- a/test/functional/rpc_psbt.py
+++ b/test/functional/rpc_psbt.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the Partially Signed Transaction RPCs.
@@ -8,6 +8,7 @@
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
+ assert_approx,
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
@@ -37,29 +38,33 @@ class PSBTTest(BitcoinTestFramework):
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
+ # TODO: Re-enable this test with segwit v1
def test_utxo_conversion(self):
mining_node = self.nodes[2]
offline_node = self.nodes[0]
online_node = self.nodes[1]
# Disconnect offline node from others
+ # Topology of test network is linear, so this one call is enough
disconnect_nodes(offline_node, 1)
- disconnect_nodes(online_node, 0)
- disconnect_nodes(offline_node, 2)
- disconnect_nodes(mining_node, 0)
+
+ # Create watchonly on online_node
+ online_node.createwallet(wallet_name='wonline', disable_private_keys=True)
+ wonline = online_node.get_wallet_rpc('wonline')
+ w2 = online_node.get_wallet_rpc('')
# Mine a transaction that credits the offline address
offline_addr = offline_node.getnewaddress(address_type="p2sh-segwit")
- online_addr = online_node.getnewaddress(address_type="p2sh-segwit")
- online_node.importaddress(offline_addr, "", False)
+ online_addr = w2.getnewaddress(address_type="p2sh-segwit")
+ wonline.importaddress(offline_addr, "", False)
mining_node.sendtoaddress(address=offline_addr, amount=1.0)
mining_node.generate(nblocks=1)
self.sync_blocks([mining_node, online_node])
# Construct an unsigned PSBT on the online node (who doesn't know the output is Segwit, so will include a non-witness UTXO)
- utxos = online_node.listunspent(addresses=[offline_addr])
- raw = online_node.createrawtransaction([{"txid":utxos[0]["txid"], "vout":utxos[0]["vout"]}],[{online_addr:0.9999}])
- psbt = online_node.walletprocesspsbt(online_node.converttopsbt(raw))["psbt"]
+ utxos = wonline.listunspent(addresses=[offline_addr])
+ raw = wonline.createrawtransaction([{"txid":utxos[0]["txid"], "vout":utxos[0]["vout"]}],[{online_addr:0.9999}])
+ psbt = wonline.walletprocesspsbt(online_node.converttopsbt(raw))["psbt"]
assert "non_witness_utxo" in mining_node.decodepsbt(psbt)["inputs"][0]
# Have the offline node sign the PSBT (which will update the UTXO to segwit)
@@ -72,6 +77,8 @@ class PSBTTest(BitcoinTestFramework):
self.sync_blocks([mining_node, online_node])
assert_equal(online_node.gettxout(txid,0)["confirmations"], 1)
+ wonline.unloadwallet()
+
# Reconnect
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[0], 2)
@@ -80,6 +87,13 @@ class PSBTTest(BitcoinTestFramework):
# Create and fund a raw tx for sending 10 BTC
psbtx1 = self.nodes[0].walletcreatefundedpsbt([], {self.nodes[2].getnewaddress():10})['psbt']
+ # If inputs are specified, do not automatically add more:
+ utxo1 = self.nodes[0].listunspent()[0]
+ assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[0].walletcreatefundedpsbt, [{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90})
+
+ psbtx1 = self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90}, 0, {"add_inputs": True})['psbt']
+ assert_equal(len(self.nodes[0].decodepsbt(psbtx1)['tx']['vin']), 2)
+
# Node 1 should not be able to add anything to it but still return the psbtx same as before
psbtx = self.nodes[1].walletprocesspsbt(psbtx1)['psbt']
assert_equal(psbtx1, psbtx)
@@ -89,13 +103,23 @@ class PSBTTest(BitcoinTestFramework):
final_tx = self.nodes[0].finalizepsbt(signed_tx)['hex']
self.nodes[0].sendrawtransaction(final_tx)
- # Create p2sh, p2wpkh, and p2wsh addresses
+ # Get pubkeys
pubkey0 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())['pubkey']
pubkey1 = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey']
pubkey2 = self.nodes[2].getaddressinfo(self.nodes[2].getnewaddress())['pubkey']
- p2sh = self.nodes[1].addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "legacy")['address']
- p2wsh = self.nodes[1].addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "bech32")['address']
- p2sh_p2wsh = self.nodes[1].addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "p2sh-segwit")['address']
+
+ # Setup watchonly wallets
+ self.nodes[2].createwallet(wallet_name='wmulti', disable_private_keys=True)
+ wmulti = self.nodes[2].get_wallet_rpc('wmulti')
+
+ # Create all the addresses
+ p2sh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "legacy")['address']
+ p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "bech32")['address']
+ p2sh_p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "p2sh-segwit")['address']
+ if not self.options.descriptors:
+ wmulti.importaddress(p2sh)
+ wmulti.importaddress(p2wsh)
+ wmulti.importaddress(p2sh_p2wsh)
p2wpkh = self.nodes[1].getnewaddress("", "bech32")
p2pkh = self.nodes[1].getnewaddress("", "legacy")
p2sh_p2wpkh = self.nodes[1].getnewaddress("", "p2sh-segwit")
@@ -133,24 +157,31 @@ class PSBTTest(BitcoinTestFramework):
# spend single key from node 1
rawtx = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99})['psbt']
walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(rawtx)
+ # Make sure it has both types of UTXOs
+ decoded = self.nodes[1].decodepsbt(walletprocesspsbt_out['psbt'])
+ assert 'non_witness_utxo' in decoded['inputs'][0]
+ assert 'witness_utxo' in decoded['inputs'][0]
assert_equal(walletprocesspsbt_out['complete'], True)
self.nodes[1].sendrawtransaction(self.nodes[1].finalizepsbt(walletprocesspsbt_out['psbt'])['hex'])
# feeRate of 0.1 BTC / KB produces a total fee slightly below -maxtxfee (~0.05280000):
- res = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 0.1})
- assert_greater_than(res["fee"], 0.05)
- assert_greater_than(0.06, res["fee"])
+ res = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 0.1, "add_inputs": True})
+ assert_approx(res["fee"], 0.055, 0.005)
# feeRate of 10 BTC / KB produces a total fee well above -maxtxfee
# previously this was silently capped at -maxtxfee
- assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 10})
+ assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():29.99}, 0, {"feeRate": 10, "add_inputs": True})
+ assert_raises_rpc_error(-4, "Fee exceeds maximum configured by -maxtxfee", self.nodes[1].walletcreatefundedpsbt, [{"txid":txid,"vout":p2wpkh_pos},{"txid":txid,"vout":p2sh_p2wpkh_pos},{"txid":txid,"vout":p2pkh_pos}], {self.nodes[1].getnewaddress():1}, 0, {"feeRate": 10, "add_inputs": False})
# partially sign multisig things with node 1
- psbtx = self.nodes[1].walletcreatefundedpsbt([{"txid":txid,"vout":p2wsh_pos},{"txid":txid,"vout":p2sh_pos},{"txid":txid,"vout":p2sh_p2wsh_pos}], {self.nodes[1].getnewaddress():29.99})['psbt']
+ psbtx = wmulti.walletcreatefundedpsbt(inputs=[{"txid":txid,"vout":p2wsh_pos},{"txid":txid,"vout":p2sh_pos},{"txid":txid,"vout":p2sh_p2wsh_pos}], outputs={self.nodes[1].getnewaddress():29.99}, options={'changeAddress': self.nodes[1].getrawchangeaddress()})['psbt']
walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(psbtx)
psbtx = walletprocesspsbt_out['psbt']
assert_equal(walletprocesspsbt_out['complete'], False)
+ # Unload wmulti, we don't need it anymore
+ wmulti.unloadwallet()
+
# partially sign with node 2. This should be complete and sendable
walletprocesspsbt_out = self.nodes[2].walletprocesspsbt(psbtx)
assert_equal(walletprocesspsbt_out['complete'], True)
@@ -221,7 +252,7 @@ class PSBTTest(BitcoinTestFramework):
# replaceable arg
block_height = self.nodes[0].getblockcount()
unspent = self.nodes[0].listunspent()[0]
- psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"replaceable": False}, False)
+ psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"replaceable": False, "add_inputs": True}, False)
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
@@ -229,7 +260,7 @@ class PSBTTest(BitcoinTestFramework):
assert_equal(decoded_psbt["tx"]["locktime"], block_height+2)
# Same construction with only locktime set and RBF explicitly enabled
- psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height, {"replaceable": True}, True)
+ psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height, {"replaceable": True, "add_inputs": True}, True)
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
@@ -237,7 +268,7 @@ class PSBTTest(BitcoinTestFramework):
assert_equal(decoded_psbt["tx"]["locktime"], block_height)
# Same construction without optional arguments
- psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}])
+ psbtx_info = self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}])
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
@@ -246,7 +277,7 @@ class PSBTTest(BitcoinTestFramework):
# Same construction without optional arguments, for a node with -walletrbf=0
unspent1 = self.nodes[1].listunspent()[0]
- psbtx_info = self.nodes[1].walletcreatefundedpsbt([{"txid":unspent1["txid"], "vout":unspent1["vout"]}], [{self.nodes[2].getnewaddress():unspent1["amount"]+1}], block_height)
+ psbtx_info = self.nodes[1].walletcreatefundedpsbt([{"txid":unspent1["txid"], "vout":unspent1["vout"]}], [{self.nodes[2].getnewaddress():unspent1["amount"]+1}], block_height, {"add_inputs": True})
decoded_psbt = self.nodes[1].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
@@ -257,7 +288,7 @@ class PSBTTest(BitcoinTestFramework):
self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"changeAddress":self.nodes[1].getnewaddress()}, False)
# Regression test for 14473 (mishandling of already-signed witness transaction):
- psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}])
+ psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], 0, {"add_inputs": True})
complete_psbt = self.nodes[0].walletprocesspsbt(psbtx_info["psbt"])
double_processed_psbt = self.nodes[0].walletprocesspsbt(complete_psbt["psbt"])
assert_equal(complete_psbt, double_processed_psbt)
@@ -297,7 +328,7 @@ class PSBTTest(BitcoinTestFramework):
# Signer tests
for i, signer in enumerate(signers):
- self.nodes[2].createwallet("wallet{}".format(i))
+ self.nodes[2].createwallet(wallet_name="wallet{}".format(i))
wrpc = self.nodes[2].get_wallet_rpc("wallet{}".format(i))
for key in signer['privkeys']:
wrpc.importprivkey(key)
@@ -326,7 +357,8 @@ class PSBTTest(BitcoinTestFramework):
for i, signer in enumerate(signers):
self.nodes[2].unloadwallet("wallet{}".format(i))
- self.test_utxo_conversion()
+ # TODO: Re-enable this for segwit v1
+ # self.test_utxo_conversion()
# Test that psbts with p2pkh outputs are created properly
p2pkh = self.nodes[0].getnewaddress(address_type='legacy')
@@ -449,7 +481,7 @@ class PSBTTest(BitcoinTestFramework):
assert_equal(analysis['next'], 'creator')
assert_equal(analysis['error'], 'PSBT is not valid. Input 0 specifies invalid prevout')
- assert_raises_rpc_error(-25, 'Missing inputs', self.nodes[0].walletprocesspsbt, 'cHNidP8BAJoCAAAAAkvEW8NnDtdNtDpsmze+Ht2LH35IJcKv00jKAlUs21RrAwAAAAD/////S8Rbw2cO1020OmybN74e3Ysffkglwq/TSMoCVSzbVGsBAAAAAP7///8CwLYClQAAAAAWABSNJKzjaUb3uOxixsvh1GGE3fW7zQD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XIAAAAAAAEAnQIAAAACczMa321tVHuN4GKWKRncycI22aX3uXgwSFUKM2orjRsBAAAAAP7///9zMxrfbW1Ue43gYpYpGdzJwjbZpfe5eDBIVQozaiuNGwAAAAAA/v///wIA+QKVAAAAABl2qRT9zXUVA8Ls5iVqynLHe5/vSe1XyYisQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAAAAAQEfQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAA==')
+ assert_raises_rpc_error(-25, 'Inputs missing or spent', self.nodes[0].walletprocesspsbt, 'cHNidP8BAJoCAAAAAkvEW8NnDtdNtDpsmze+Ht2LH35IJcKv00jKAlUs21RrAwAAAAD/////S8Rbw2cO1020OmybN74e3Ysffkglwq/TSMoCVSzbVGsBAAAAAP7///8CwLYClQAAAAAWABSNJKzjaUb3uOxixsvh1GGE3fW7zQD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XIAAAAAAAEAnQIAAAACczMa321tVHuN4GKWKRncycI22aX3uXgwSFUKM2orjRsBAAAAAP7///9zMxrfbW1Ue43gYpYpGdzJwjbZpfe5eDBIVQozaiuNGwAAAAAA/v///wIA+QKVAAAAABl2qRT9zXUVA8Ls5iVqynLHe5/vSe1XyYisQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAAAAAQEfQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAA==')
if __name__ == '__main__':
PSBTTest().main()
diff --git a/test/functional/rpc_scantxoutset.py b/test/functional/rpc_scantxoutset.py
index c3d34be0dd..861b394e70 100755
--- a/test/functional/rpc_scantxoutset.py
+++ b/test/functional/rpc_scantxoutset.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the scantxoutset rpc call."""
diff --git a/test/functional/rpc_signrawtransaction.py b/test/functional/rpc_signrawtransaction.py
index 3c7b398d2d..3d08202724 100755
--- a/test/functional/rpc_signrawtransaction.py
+++ b/test/functional/rpc_signrawtransaction.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test transaction signing using the signrawtransaction* RPCs."""
diff --git a/test/functional/rpc_users.py b/test/functional/rpc_users.py
index b75ce15f2e..daf02fc4f3 100755
--- a/test/functional/rpc_users.py
+++ b/test/functional/rpc_users.py
@@ -20,6 +20,7 @@ import string
import configparser
import sys
+
def call_with_auth(node, user, password):
url = urllib.parse.urlparse(node.url)
headers = {"Authorization": "Basic " + str_to_b64str('{}:{}'.format(user, password))}
@@ -64,9 +65,9 @@ class HTTPBasicsTest(BitcoinTestFramework):
self.password = lines[3]
with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "bitcoin.conf"), 'a', encoding='utf8') as f:
- f.write(rpcauth+"\n")
- f.write(rpcauth2+"\n")
- f.write(rpcauth3+"\n")
+ f.write(rpcauth + "\n")
+ f.write(rpcauth2 + "\n")
+ f.write(rpcauth3 + "\n")
with open(os.path.join(get_datadir_path(self.options.tmpdir, 1), "bitcoin.conf"), 'a', encoding='utf8') as f:
f.write("rpcuser={}\n".format(self.rpcuser))
f.write("rpcpassword={}\n".format(self.rpcpassword))
@@ -76,19 +77,16 @@ class HTTPBasicsTest(BitcoinTestFramework):
assert_equal(200, call_with_auth(node, user, password).status)
self.log.info('Wrong...')
- assert_equal(401, call_with_auth(node, user, password+'wrong').status)
+ assert_equal(401, call_with_auth(node, user, password + 'wrong').status)
self.log.info('Wrong...')
- assert_equal(401, call_with_auth(node, user+'wrong', password).status)
+ assert_equal(401, call_with_auth(node, user + 'wrong', password).status)
self.log.info('Wrong...')
- assert_equal(401, call_with_auth(node, user+'wrong', password+'wrong').status)
+ assert_equal(401, call_with_auth(node, user + 'wrong', password + 'wrong').status)
def run_test(self):
-
- ##################################################
- # Check correctness of the rpcauth config option #
- ##################################################
+ self.log.info('Check correctness of the rpcauth config option')
url = urllib.parse.urlparse(self.nodes[0].url)
self.test_auth(self.nodes[0], url.username, url.password)
@@ -96,12 +94,18 @@ class HTTPBasicsTest(BitcoinTestFramework):
self.test_auth(self.nodes[0], 'rt2', self.rt2password)
self.test_auth(self.nodes[0], self.user, self.password)
- ###############################################################
- # Check correctness of the rpcuser/rpcpassword config options #
- ###############################################################
+ self.log.info('Check correctness of the rpcuser/rpcpassword config options')
url = urllib.parse.urlparse(self.nodes[1].url)
self.test_auth(self.nodes[1], self.rpcuser, self.rpcpassword)
+ self.log.info('Check that failure to write cookie file will abort the node gracefully')
+ self.stop_node(0)
+ cookie_file = os.path.join(get_datadir_path(self.options.tmpdir, 0), self.chain, '.cookie.tmp')
+ os.mkdir(cookie_file)
+ init_error = 'Error: Unable to start HTTP server. See debug log for details.'
+ self.nodes[0].assert_start_raises_init_error(expected_msg=init_error)
+
+
if __name__ == '__main__':
- HTTPBasicsTest ().main ()
+ HTTPBasicsTest().main()
diff --git a/test/functional/test_framework/address.py b/test/functional/test_framework/address.py
index 6a7e91216a..9506b63f82 100644
--- a/test/functional/test_framework/address.py
+++ b/test/functional/test_framework/address.py
@@ -1,16 +1,19 @@
#!/usr/bin/env python3
-# Copyright (c) 2016-2019 The Bitcoin Core developers
+# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Encode and decode BASE58, P2PKH and P2SH addresses."""
import enum
+import unittest
from .script import hash256, hash160, sha256, CScript, OP_0
from .util import hex_str_to_bytes
from . import segwit_addr
+from test_framework.util import assert_equal
+
ADDRESS_BCRT1_UNSPENDABLE = 'bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj'
ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR = 'addr(bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj)#juyq9d97'
# Coins sent to this address can be spent with a witness stack of just OP_TRUE
@@ -41,7 +44,32 @@ def byte_to_base58(b, version):
str = str[2:]
return result
-# TODO: def base58_decode
+
+def base58_to_byte(s, verify_checksum=True):
+ if not s:
+ return b''
+ n = 0
+ for c in s:
+ n *= 58
+ assert c in chars
+ digit = chars.index(c)
+ n += digit
+ h = '%x' % n
+ if len(h) % 2:
+ h = '0' + h
+ res = n.to_bytes((n.bit_length() + 7) // 8, 'big')
+ pad = 0
+ for c in s:
+ if c == chars[0]:
+ pad += 1
+ else:
+ break
+ res = b'\x00' * pad + res
+ if verify_checksum:
+ assert_equal(hash256(res[:-4])[:4], res[-4:])
+
+ return res[1:-4], int(res[0])
+
def keyhash_to_p2pkh(hash, main = False):
assert len(hash) == 20
@@ -100,3 +128,22 @@ def check_script(script):
if (type(script) is bytes or type(script) is CScript):
return script
assert False
+
+
+class TestFrameworkScript(unittest.TestCase):
+ def test_base58encodedecode(self):
+ def check_base58(data, version):
+ self.assertEqual(base58_to_byte(byte_to_base58(data, version)), (data, version))
+
+ check_base58(b'\x1f\x8e\xa1p*{\xd4\x94\x1b\xca\tA\xb8R\xc4\xbb\xfe\xdb.\x05', 111)
+ check_base58(b':\x0b\x05\xf4\xd7\xf6l;\xa7\x00\x9fE50)l\x84\\\xc9\xcf', 111)
+ check_base58(b'A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
+ check_base58(b'\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
+ check_base58(b'\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
+ check_base58(b'\0\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
+ check_base58(b'\x1f\x8e\xa1p*{\xd4\x94\x1b\xca\tA\xb8R\xc4\xbb\xfe\xdb.\x05', 0)
+ check_base58(b':\x0b\x05\xf4\xd7\xf6l;\xa7\x00\x9fE50)l\x84\\\xc9\xcf', 0)
+ check_base58(b'A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
+ check_base58(b'\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
+ check_base58(b'\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
+ check_base58(b'\0\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
diff --git a/test/functional/test_framework/authproxy.py b/test/functional/test_framework/authproxy.py
index 05308931e3..81eb881234 100644
--- a/test/functional/test_framework/authproxy.py
+++ b/test/functional/test_framework/authproxy.py
@@ -115,6 +115,8 @@ class AuthServiceProxy():
except OSError as e:
retry = (
'[WinError 10053] An established connection was aborted by the software in your host machine' in str(e))
+ # Workaround for a bug on macOS. See https://bugs.python.org/issue33450
+ retry = retry or ('[Errno 41] Protocol wrong type for socket' in str(e))
if retry:
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
diff --git a/test/functional/test_framework/blocktools.py b/test/functional/test_framework/blocktools.py
index d741b00ba0..afc1995009 100644
--- a/test/functional/test_framework/blocktools.py
+++ b/test/functional/test_framework/blocktools.py
@@ -4,6 +4,8 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for manipulating blocks and transactions."""
+import unittest
+
from .address import (
key_to_p2sh_p2wpkh,
key_to_p2wpkh,
@@ -217,3 +219,9 @@ def send_to_witness(use_p2wsh, node, utxo, pubkey, encode_p2sh, amount, sign=Tru
tx_to_witness = ToHex(tx)
return node.sendrawtransaction(tx_to_witness)
+
+class TestFrameworkBlockTools(unittest.TestCase):
+ def test_create_coinbase(self):
+ height = 20
+ coinbase_tx = create_coinbase(height=height)
+ assert_equal(CScriptNum.decode(coinbase_tx.vin[0].scriptSig), height)
diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py
index 45b49bcf9e..eb1244035f 100755
--- a/test/functional/test_framework/messages.py
+++ b/test/functional/test_framework/messages.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
-# Copyright (c) 2010-2019 The Bitcoin Core developers
+# Copyright (c) 2010-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Bitcoin test framework primitive and message structures
@@ -37,12 +37,18 @@ MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version
MAX_LOCATOR_SZ = 101
MAX_BLOCK_BASE_SIZE = 1000000
+MAX_BLOOM_FILTER_SIZE = 36000
+MAX_BLOOM_HASH_FUNCS = 50
COIN = 100000000 # 1 btc in satoshis
MAX_MONEY = 21000000 * COIN
BIP125_SEQUENCE_NUMBER = 0xfffffffd # Sequence number that is BIP 125 opt-in and BIP 68-opt-out
+MAX_PROTOCOL_MESSAGE_LENGTH = 4000000 # Maximum length of incoming protocol messages
+MAX_HEADERS_RESULTS = 2000 # Number of headers sent in one getheaders result
+MAX_INV_SIZE = 50000 # Maximum number of entries in an 'inv' protocol message
+
NODE_NETWORK = (1 << 0)
NODE_GETUTXO = (1 << 1)
NODE_BLOOM = (1 << 2)
@@ -52,9 +58,12 @@ NODE_NETWORK_LIMITED = (1 << 10)
MSG_TX = 1
MSG_BLOCK = 2
MSG_FILTERED_BLOCK = 3
+MSG_CMPCT_BLOCK = 4
MSG_WITNESS_FLAG = 1 << 30
MSG_TYPE_MASK = 0xffffffff >> 2
+FILTER_TYPE_BASIC = 0
+
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
@@ -601,16 +610,16 @@ class CBlock(CBlockHeader):
__slots__ = ("vtx",)
def __init__(self, header=None):
- super(CBlock, self).__init__(header)
+ super().__init__(header)
self.vtx = []
def deserialize(self, f):
- super(CBlock, self).deserialize(f)
+ super().deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self, with_witness=True):
r = b""
- r += super(CBlock, self).serialize()
+ r += super().serialize()
if with_witness:
r += ser_vector(self.vtx, "serialize_with_witness")
else:
@@ -750,7 +759,7 @@ class P2PHeaderAndShortIDs:
class P2PHeaderAndShortWitnessIDs(P2PHeaderAndShortIDs):
__slots__ = ()
def serialize(self):
- return super(P2PHeaderAndShortWitnessIDs, self).serialize(with_witness=True)
+ return super().serialize(with_witness=True)
# Calculate the BIP 152-compact blocks shortid for a given transaction hash
def calculate_shortid(k0, k1, tx_hash):
@@ -946,7 +955,7 @@ class CMerkleBlock:
class msg_version:
__slots__ = ("addrFrom", "addrTo", "nNonce", "nRelay", "nServices",
"nStartingHeight", "nTime", "nVersion", "strSubVer")
- command = b"version"
+ msgtype = b"version"
def __init__(self):
self.nVersion = MY_VERSION
@@ -1004,7 +1013,7 @@ class msg_version:
class msg_verack:
__slots__ = ()
- command = b"verack"
+ msgtype = b"verack"
def __init__(self):
pass
@@ -1021,7 +1030,7 @@ class msg_verack:
class msg_addr:
__slots__ = ("addrs",)
- command = b"addr"
+ msgtype = b"addr"
def __init__(self):
self.addrs = []
@@ -1038,7 +1047,7 @@ class msg_addr:
class msg_inv:
__slots__ = ("inv",)
- command = b"inv"
+ msgtype = b"inv"
def __init__(self, inv=None):
if inv is None:
@@ -1058,7 +1067,7 @@ class msg_inv:
class msg_getdata:
__slots__ = ("inv",)
- command = b"getdata"
+ msgtype = b"getdata"
def __init__(self, inv=None):
self.inv = inv if inv is not None else []
@@ -1075,7 +1084,7 @@ class msg_getdata:
class msg_getblocks:
__slots__ = ("locator", "hashstop")
- command = b"getblocks"
+ msgtype = b"getblocks"
def __init__(self):
self.locator = CBlockLocator()
@@ -1099,7 +1108,7 @@ class msg_getblocks:
class msg_tx:
__slots__ = ("tx",)
- command = b"tx"
+ msgtype = b"tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
@@ -1123,7 +1132,7 @@ class msg_no_witness_tx(msg_tx):
class msg_block:
__slots__ = ("block",)
- command = b"block"
+ msgtype = b"block"
def __init__(self, block=None):
if block is None:
@@ -1142,12 +1151,12 @@ class msg_block:
# for cases where a user needs tighter control over what is sent over the wire
-# note that the user must supply the name of the command, and the data
+# note that the user must supply the name of the msgtype, and the data
class msg_generic:
- __slots__ = ("command", "data")
+ __slots__ = ("msgtype", "data")
- def __init__(self, command, data=None):
- self.command = command
+ def __init__(self, msgtype, data=None):
+ self.msgtype = msgtype
self.data = data
def serialize(self):
@@ -1165,7 +1174,7 @@ class msg_no_witness_block(msg_block):
class msg_getaddr:
__slots__ = ()
- command = b"getaddr"
+ msgtype = b"getaddr"
def __init__(self):
pass
@@ -1182,7 +1191,7 @@ class msg_getaddr:
class msg_ping:
__slots__ = ("nonce",)
- command = b"ping"
+ msgtype = b"ping"
def __init__(self, nonce=0):
self.nonce = nonce
@@ -1201,7 +1210,7 @@ class msg_ping:
class msg_pong:
__slots__ = ("nonce",)
- command = b"pong"
+ msgtype = b"pong"
def __init__(self, nonce=0):
self.nonce = nonce
@@ -1220,7 +1229,7 @@ class msg_pong:
class msg_mempool:
__slots__ = ()
- command = b"mempool"
+ msgtype = b"mempool"
def __init__(self):
pass
@@ -1237,7 +1246,7 @@ class msg_mempool:
class msg_notfound:
__slots__ = ("vec", )
- command = b"notfound"
+ msgtype = b"notfound"
def __init__(self, vec=None):
self.vec = vec or []
@@ -1254,7 +1263,7 @@ class msg_notfound:
class msg_sendheaders:
__slots__ = ()
- command = b"sendheaders"
+ msgtype = b"sendheaders"
def __init__(self):
pass
@@ -1275,7 +1284,7 @@ class msg_sendheaders:
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders:
__slots__ = ("hashstop", "locator",)
- command = b"getheaders"
+ msgtype = b"getheaders"
def __init__(self):
self.locator = CBlockLocator()
@@ -1301,7 +1310,7 @@ class msg_getheaders:
# <count> <vector of block headers>
class msg_headers:
__slots__ = ("headers",)
- command = b"headers"
+ msgtype = b"headers"
def __init__(self, headers=None):
self.headers = headers if headers is not None else []
@@ -1322,7 +1331,7 @@ class msg_headers:
class msg_merkleblock:
__slots__ = ("merkleblock",)
- command = b"merkleblock"
+ msgtype = b"merkleblock"
def __init__(self, merkleblock=None):
if merkleblock is None:
@@ -1342,7 +1351,7 @@ class msg_merkleblock:
class msg_filterload:
__slots__ = ("data", "nHashFuncs", "nTweak", "nFlags")
- command = b"filterload"
+ msgtype = b"filterload"
def __init__(self, data=b'00', nHashFuncs=0, nTweak=0, nFlags=0):
self.data = data
@@ -1371,7 +1380,7 @@ class msg_filterload:
class msg_filteradd:
__slots__ = ("data")
- command = b"filteradd"
+ msgtype = b"filteradd"
def __init__(self, data):
self.data = data
@@ -1390,7 +1399,7 @@ class msg_filteradd:
class msg_filterclear:
__slots__ = ()
- command = b"filterclear"
+ msgtype = b"filterclear"
def __init__(self):
pass
@@ -1407,7 +1416,7 @@ class msg_filterclear:
class msg_feefilter:
__slots__ = ("feerate",)
- command = b"feefilter"
+ msgtype = b"feefilter"
def __init__(self, feerate=0):
self.feerate = feerate
@@ -1426,7 +1435,7 @@ class msg_feefilter:
class msg_sendcmpct:
__slots__ = ("announce", "version")
- command = b"sendcmpct"
+ msgtype = b"sendcmpct"
def __init__(self):
self.announce = False
@@ -1448,7 +1457,7 @@ class msg_sendcmpct:
class msg_cmpctblock:
__slots__ = ("header_and_shortids",)
- command = b"cmpctblock"
+ msgtype = b"cmpctblock"
def __init__(self, header_and_shortids = None):
self.header_and_shortids = header_and_shortids
@@ -1468,7 +1477,7 @@ class msg_cmpctblock:
class msg_getblocktxn:
__slots__ = ("block_txn_request",)
- command = b"getblocktxn"
+ msgtype = b"getblocktxn"
def __init__(self):
self.block_txn_request = None
@@ -1488,7 +1497,7 @@ class msg_getblocktxn:
class msg_blocktxn:
__slots__ = ("block_transactions",)
- command = b"blocktxn"
+ msgtype = b"blocktxn"
def __init__(self):
self.block_transactions = BlockTransactions()
@@ -1510,3 +1519,154 @@ class msg_no_witness_blocktxn(msg_blocktxn):
def serialize(self):
return self.block_transactions.serialize(with_witness=False)
+
+
+class msg_getcfilters:
+ __slots__ = ("filter_type", "start_height", "stop_hash")
+ msgtype = b"getcfilters"
+
+ def __init__(self, filter_type, start_height, stop_hash):
+ self.filter_type = filter_type
+ self.start_height = start_height
+ self.stop_hash = stop_hash
+
+ def deserialize(self, f):
+ self.filter_type = struct.unpack("<B", f.read(1))[0]
+ self.start_height = struct.unpack("<I", f.read(4))[0]
+ self.stop_hash = deser_uint256(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<B", self.filter_type)
+ r += struct.pack("<I", self.start_height)
+ r += ser_uint256(self.stop_hash)
+ return r
+
+ def __repr__(self):
+ return "msg_getcfilters(filter_type={:#x}, start_height={}, stop_hash={:x})".format(
+ self.filter_type, self.start_height, self.stop_hash)
+
+class msg_cfilter:
+ __slots__ = ("filter_type", "block_hash", "filter_data")
+ msgtype = b"cfilter"
+
+ def __init__(self, filter_type=None, block_hash=None, filter_data=None):
+ self.filter_type = filter_type
+ self.block_hash = block_hash
+ self.filter_data = filter_data
+
+ def deserialize(self, f):
+ self.filter_type = struct.unpack("<B", f.read(1))[0]
+ self.block_hash = deser_uint256(f)
+ self.filter_data = deser_string(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<B", self.filter_type)
+ r += ser_uint256(self.block_hash)
+ r += ser_string(self.filter_data)
+ return r
+
+ def __repr__(self):
+ return "msg_cfilter(filter_type={:#x}, block_hash={:x})".format(
+ self.filter_type, self.block_hash)
+
+class msg_getcfheaders:
+ __slots__ = ("filter_type", "start_height", "stop_hash")
+ msgtype = b"getcfheaders"
+
+ def __init__(self, filter_type, start_height, stop_hash):
+ self.filter_type = filter_type
+ self.start_height = start_height
+ self.stop_hash = stop_hash
+
+ def deserialize(self, f):
+ self.filter_type = struct.unpack("<B", f.read(1))[0]
+ self.start_height = struct.unpack("<I", f.read(4))[0]
+ self.stop_hash = deser_uint256(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<B", self.filter_type)
+ r += struct.pack("<I", self.start_height)
+ r += ser_uint256(self.stop_hash)
+ return r
+
+ def __repr__(self):
+ return "msg_getcfheaders(filter_type={:#x}, start_height={}, stop_hash={:x})".format(
+ self.filter_type, self.start_height, self.stop_hash)
+
+class msg_cfheaders:
+ __slots__ = ("filter_type", "stop_hash", "prev_header", "hashes")
+ msgtype = b"cfheaders"
+
+ def __init__(self, filter_type=None, stop_hash=None, prev_header=None, hashes=None):
+ self.filter_type = filter_type
+ self.stop_hash = stop_hash
+ self.prev_header = prev_header
+ self.hashes = hashes
+
+ def deserialize(self, f):
+ self.filter_type = struct.unpack("<B", f.read(1))[0]
+ self.stop_hash = deser_uint256(f)
+ self.prev_header = deser_uint256(f)
+ self.hashes = deser_uint256_vector(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<B", self.filter_type)
+ r += ser_uint256(self.stop_hash)
+ r += ser_uint256(self.prev_header)
+ r += ser_uint256_vector(self.hashes)
+ return r
+
+ def __repr__(self):
+ return "msg_cfheaders(filter_type={:#x}, stop_hash={:x})".format(
+ self.filter_type, self.stop_hash)
+
+class msg_getcfcheckpt:
+ __slots__ = ("filter_type", "stop_hash")
+ msgtype = b"getcfcheckpt"
+
+ def __init__(self, filter_type, stop_hash):
+ self.filter_type = filter_type
+ self.stop_hash = stop_hash
+
+ def deserialize(self, f):
+ self.filter_type = struct.unpack("<B", f.read(1))[0]
+ self.stop_hash = deser_uint256(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<B", self.filter_type)
+ r += ser_uint256(self.stop_hash)
+ return r
+
+ def __repr__(self):
+ return "msg_getcfcheckpt(filter_type={:#x}, stop_hash={:x})".format(
+ self.filter_type, self.stop_hash)
+
+class msg_cfcheckpt:
+ __slots__ = ("filter_type", "stop_hash", "headers")
+ msgtype = b"cfcheckpt"
+
+ def __init__(self, filter_type=None, stop_hash=None, headers=None):
+ self.filter_type = filter_type
+ self.stop_hash = stop_hash
+ self.headers = headers
+
+ def deserialize(self, f):
+ self.filter_type = struct.unpack("<B", f.read(1))[0]
+ self.stop_hash = deser_uint256(f)
+ self.headers = deser_uint256_vector(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<B", self.filter_type)
+ r += ser_uint256(self.stop_hash)
+ r += ser_uint256_vector(self.headers)
+ return r
+
+ def __repr__(self):
+ return "msg_cfcheckpt(filter_type={:#x}, stop_hash={:x})".format(
+ self.filter_type, self.stop_hash)
diff --git a/test/functional/test_framework/mininode.py b/test/functional/test_framework/mininode.py
index 9f51bef946..e6da33763d 100755
--- a/test/functional/test_framework/mininode.py
+++ b/test/functional/test_framework/mininode.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
-# Copyright (c) 2010-2019 The Bitcoin Core developers
+# Copyright (c) 2010-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Bitcoin P2P network half-a-node.
@@ -12,7 +12,10 @@ found in the mini-node branch of http://github.com/jgarzik/pynode.
P2PConnection: A low-level connection object to a node's P2P interface
P2PInterface: A high-level interface object for communicating to a node over P2P
P2PDataStore: A p2p interface class that keeps a store of transactions and blocks
- and can respond correctly to getdata and getheaders messages"""
+ and can respond correctly to getdata and getheaders messages
+P2PTxInvStore: A p2p interface class that inherits from P2PDataStore, and keeps
+ a count of how many times each txid has been announced."""
+
import asyncio
from collections import defaultdict
from io import BytesIO
@@ -23,11 +26,15 @@ import threading
from test_framework.messages import (
CBlockHeader,
+ MAX_HEADERS_RESULTS,
MIN_VERSION_SUPPORTED,
msg_addr,
msg_block,
MSG_BLOCK,
msg_blocktxn,
+ msg_cfcheckpt,
+ msg_cfheaders,
+ msg_cfilter,
msg_cmpctblock,
msg_feefilter,
msg_filteradd,
@@ -64,6 +71,9 @@ MESSAGEMAP = {
b"addr": msg_addr,
b"block": msg_block,
b"blocktxn": msg_blocktxn,
+ b"cfcheckpt": msg_cfcheckpt,
+ b"cfheaders": msg_cfheaders,
+ b"cfilter": msg_cfilter,
b"cmpctblock": msg_cmpctblock,
b"feefilter": msg_feefilter,
b"filteradd": msg_filteradd,
@@ -117,8 +127,9 @@ class P2PConnection(asyncio.Protocol):
def is_connected(self):
return self._transport is not None
- def peer_connect(self, dstaddr, dstport, *, net):
+ def peer_connect(self, dstaddr, dstport, *, net, timeout_factor):
assert not self.is_connected
+ self.timeout_factor = timeout_factor
self.dstaddr = dstaddr
self.dstport = dstport
# The initial message to send after the connection was made:
@@ -180,7 +191,7 @@ class P2PConnection(asyncio.Protocol):
raise ValueError("magic bytes mismatch: {} != {}".format(repr(self.magic_bytes), repr(self.recvbuf)))
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
- command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
+ msgtype = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
@@ -191,10 +202,10 @@ class P2PConnection(asyncio.Protocol):
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
- if command not in MESSAGEMAP:
- raise ValueError("Received unknown command from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, command, repr(msg)))
+ if msgtype not in MESSAGEMAP:
+ raise ValueError("Received unknown msgtype from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, msgtype, repr(msg)))
f = BytesIO(msg)
- t = MESSAGEMAP[command]()
+ t = MESSAGEMAP[msgtype]()
t.deserialize(f)
self._log_message("receive", t)
self.on_message(t)
@@ -233,11 +244,11 @@ class P2PConnection(asyncio.Protocol):
def build_message(self, message):
"""Build a serialized P2P message"""
- command = message.command
+ msgtype = message.msgtype
data = message.serialize()
tmsg = self.magic_bytes
- tmsg += command
- tmsg += b"\x00" * (12 - len(command))
+ tmsg += msgtype
+ tmsg += b"\x00" * (12 - len(msgtype))
tmsg += struct.pack("<I", len(data))
th = sha256(data)
h = sha256(th)
@@ -304,10 +315,10 @@ class P2PInterface(P2PConnection):
and the most recent message of each type."""
with mininode_lock:
try:
- command = message.command.decode('ascii')
- self.message_count[command] += 1
- self.last_message[command] = message
- getattr(self, 'on_' + command)(message)
+ msgtype = message.msgtype.decode('ascii')
+ self.message_count[msgtype] += 1
+ self.last_message[msgtype] = message
+ getattr(self, 'on_' + msgtype)(message)
except:
print("ERROR delivering %s (%s)" % (repr(message), sys.exc_info()[0]))
raise
@@ -324,6 +335,9 @@ class P2PInterface(P2PConnection):
def on_addr(self, message): pass
def on_block(self, message): pass
def on_blocktxn(self, message): pass
+ def on_cfcheckpt(self, message): pass
+ def on_cfheaders(self, message): pass
+ def on_cfilter(self, message): pass
def on_cmpctblock(self, message): pass
def on_feefilter(self, message): pass
def on_filteradd(self, message): pass
@@ -364,9 +378,12 @@ class P2PInterface(P2PConnection):
# Connection helper methods
+ def wait_until(self, test_function, timeout=60):
+ wait_until(test_function, timeout=timeout, lock=mininode_lock, timeout_factor=self.timeout_factor)
+
def wait_for_disconnect(self, timeout=60):
test_function = lambda: not self.is_connected
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
# Message receiving helper methods
@@ -377,14 +394,14 @@ class P2PInterface(P2PConnection):
return False
return self.last_message['tx'].tx.rehash() == txid
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_block(self, blockhash, timeout=60):
def test_function():
assert self.is_connected
return self.last_message.get("block") and self.last_message["block"].block.rehash() == blockhash
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_header(self, blockhash, timeout=60):
def test_function():
@@ -394,7 +411,7 @@ class P2PInterface(P2PConnection):
return False
return last_headers.headers[0].rehash() == int(blockhash, 16)
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_merkleblock(self, blockhash, timeout=60):
def test_function():
@@ -404,21 +421,21 @@ class P2PInterface(P2PConnection):
return False
return last_filtered_block.merkleblock.header.rehash() == int(blockhash, 16)
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
- def wait_for_getdata(self, timeout=60):
+ def wait_for_getdata(self, hash_list, timeout=60):
"""Waits for a getdata message.
- Receiving any getdata message will satisfy the predicate. the last_message["getdata"]
- value must be explicitly cleared before calling this method, or this will return
- immediately with success. TODO: change this method to take a hash value and only
- return true if the correct block/tx has been requested."""
+ The object hashes in the inventory vector must match the provided hash_list."""
def test_function():
assert self.is_connected
- return self.last_message.get("getdata")
+ last_data = self.last_message.get("getdata")
+ if not last_data:
+ return False
+ return [x.hash for x in last_data.inv] == hash_list
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_getheaders(self, timeout=60):
"""Waits for a getheaders message.
@@ -432,7 +449,7 @@ class P2PInterface(P2PConnection):
assert self.is_connected
return self.last_message.get("getheaders")
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_inv(self, expected_inv, timeout=60):
"""Waits for an INV message and checks that the first inv object in the message was as expected."""
@@ -445,13 +462,13 @@ class P2PInterface(P2PConnection):
self.last_message["inv"].inv[0].type == expected_inv[0].type and \
self.last_message["inv"].inv[0].hash == expected_inv[0].hash
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_verack(self, timeout=60):
def test_function():
return self.message_count["verack"]
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
# Message sending helper functions
@@ -467,7 +484,7 @@ class P2PInterface(P2PConnection):
assert self.is_connected
return self.last_message.get("pong") and self.last_message["pong"].nonce == self.ping_counter
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
self.ping_counter += 1
@@ -476,7 +493,7 @@ class P2PInterface(P2PConnection):
# P2PConnection acquires this lock whenever delivering a message to a P2PInterface.
# This lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the P2PInterface or P2PConnection.
-mininode_lock = threading.RLock()
+mininode_lock = threading.Lock()
class NetworkThread(threading.Thread):
@@ -537,7 +554,6 @@ class P2PDataStore(P2PInterface):
return
headers_list = [self.block_store[self.last_block_hash]]
- maxheaders = 2000
while headers_list[-1].sha256 not in locator.vHave:
# Walk back through the block store, adding headers to headers_list
# as we go.
@@ -553,7 +569,7 @@ class P2PDataStore(P2PInterface):
break
# Truncate the list if there are too many headers
- headers_list = headers_list[:-maxheaders - 1:-1]
+ headers_list = headers_list[:-MAX_HEADERS_RESULTS - 1:-1]
response = msg_headers(headers_list)
if response is not None:
@@ -583,7 +599,7 @@ class P2PDataStore(P2PInterface):
self.send_message(msg_block(block=b))
else:
self.send_message(msg_headers([CBlockHeader(block) for block in blocks]))
- wait_until(lambda: blocks[-1].sha256 in self.getdata_requests, timeout=timeout, lock=mininode_lock)
+ self.wait_until(lambda: blocks[-1].sha256 in self.getdata_requests, timeout=timeout)
if expect_disconnect:
self.wait_for_disconnect(timeout=timeout)
@@ -591,7 +607,7 @@ class P2PDataStore(P2PInterface):
self.sync_with_ping(timeout=timeout)
if success:
- wait_until(lambda: node.getbestblockhash() == blocks[-1].hash, timeout=timeout)
+ self.wait_until(lambda: node.getbestblockhash() == blocks[-1].hash, timeout=timeout)
else:
assert node.getbestblockhash() != blocks[-1].hash
@@ -627,3 +643,30 @@ class P2PDataStore(P2PInterface):
# Check that none of the txs are now in the mempool
for tx in txs:
assert tx.hash not in raw_mempool, "{} tx found in mempool".format(tx.hash)
+
+class P2PTxInvStore(P2PInterface):
+ """A P2PInterface which stores a count of how many times each txid has been announced."""
+ def __init__(self):
+ super().__init__()
+ self.tx_invs_received = defaultdict(int)
+
+ def on_inv(self, message):
+ super().on_inv(message) # Send getdata in response.
+ # Store how many times invs have been received for each tx.
+ for i in message.inv:
+ if i.type == MSG_TX:
+ # save txid
+ self.tx_invs_received[i.hash] += 1
+
+ def get_invs(self):
+ with mininode_lock:
+ return list(self.tx_invs_received.keys())
+
+ def wait_for_broadcast(self, txns, timeout=60):
+ """Waits for the txns (list of txids) to complete initial broadcast.
+ The mempool should mark unbroadcast=False for these transactions.
+ """
+ # Wait until invs have been received (and getdatas sent) for each txid.
+ self.wait_until(lambda: set(self.tx_invs_received.keys()) == set([int(tx, 16) for tx in txns]), timeout)
+ # Flush messages and wait for the getdatas to be processed
+ self.sync_with_ping()
diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py
index 92725dfcf4..cc5f8307d3 100644
--- a/test/functional/test_framework/script.py
+++ b/test/functional/test_framework/script.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Functionality to build scripts, as well as signature hash functions.
@@ -8,6 +8,8 @@ This file is modified from python-bitcoinlib.
"""
import hashlib
import struct
+import unittest
+from typing import List, Dict
from .messages import (
CTransaction,
@@ -20,7 +22,7 @@ from .messages import (
)
MAX_SCRIPT_ELEMENT_SIZE = 520
-OPCODE_NAMES = {}
+OPCODE_NAMES = {} # type: Dict[CScriptOp, str]
def hash160(s):
return hashlib.new('ripemd160', sha256(s)).digest()
@@ -36,7 +38,7 @@ def bn2vch(v):
# Serialize to bytes
return encoded_v.to_bytes(n_bytes, 'little')
-_opcode_instances = []
+_opcode_instances = [] # type: List[CScriptOp]
class CScriptOp(int):
"""A single script opcode"""
__slots__ = ()
@@ -97,7 +99,7 @@ class CScriptOp(int):
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
- _opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
+ _opcode_instances.append(super().__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
@@ -372,7 +374,7 @@ class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
- super(CScriptTruncatedPushDataError, self).__init__(msg)
+ super().__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
@@ -449,15 +451,8 @@ class CScript(bytes):
return other
def __add__(self, other):
- # Do the coercion outside of the try block so that errors in it are
- # noticed.
- other = self.__coerce_instance(other)
-
- try:
- # bytes.__add__ always returns bytes instances unfortunately
- return CScript(super(CScript, self).__add__(other))
- except TypeError:
- raise TypeError('Can not add a %r instance to a CScript' % other.__class__)
+ # add makes no sense for a CScript()
+ raise NotImplementedError
def join(self, iterable):
# join makes no sense for a CScript()
@@ -465,14 +460,14 @@ class CScript(bytes):
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
- return super(CScript, cls).__new__(cls, value)
+ return super().__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
- return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
+ return super().__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
@@ -715,3 +710,31 @@ def SegwitV0SignatureHash(script, txTo, inIdx, hashtype, amount):
ss += struct.pack("<I", hashtype)
return hash256(ss)
+
+class TestFrameworkScript(unittest.TestCase):
+ def test_bn2vch(self):
+ self.assertEqual(bn2vch(0), bytes([]))
+ self.assertEqual(bn2vch(1), bytes([0x01]))
+ self.assertEqual(bn2vch(-1), bytes([0x81]))
+ self.assertEqual(bn2vch(0x7F), bytes([0x7F]))
+ self.assertEqual(bn2vch(-0x7F), bytes([0xFF]))
+ self.assertEqual(bn2vch(0x80), bytes([0x80, 0x00]))
+ self.assertEqual(bn2vch(-0x80), bytes([0x80, 0x80]))
+ self.assertEqual(bn2vch(0xFF), bytes([0xFF, 0x00]))
+ self.assertEqual(bn2vch(-0xFF), bytes([0xFF, 0x80]))
+ self.assertEqual(bn2vch(0x100), bytes([0x00, 0x01]))
+ self.assertEqual(bn2vch(-0x100), bytes([0x00, 0x81]))
+ self.assertEqual(bn2vch(0x7FFF), bytes([0xFF, 0x7F]))
+ self.assertEqual(bn2vch(-0x8000), bytes([0x00, 0x80, 0x80]))
+ self.assertEqual(bn2vch(-0x7FFFFF), bytes([0xFF, 0xFF, 0xFF]))
+ self.assertEqual(bn2vch(0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x00]))
+ self.assertEqual(bn2vch(-0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x80]))
+ self.assertEqual(bn2vch(0xFFFFFFFF), bytes([0xFF, 0xFF, 0xFF, 0xFF, 0x00]))
+ self.assertEqual(bn2vch(123456789), bytes([0x15, 0xCD, 0x5B, 0x07]))
+ self.assertEqual(bn2vch(-54321), bytes([0x31, 0xD4, 0x80]))
+
+ def test_cscriptnum_encoding(self):
+ # round-trip negative and multi-byte CScriptNums
+ values = [0, 1, -1, -2, 127, 128, -255, 256, (1 << 15) - 1, -(1 << 16), (1 << 24) - 1, (1 << 31), 1 - (1 << 32), 1 << 40, 1500, -1500]
+ for value in values:
+ self.assertEqual(CScriptNum.decode(CScriptNum.encode(CScriptNum(value))), value)
diff --git a/test/functional/test_framework/script_util.py b/test/functional/test_framework/script_util.py
index 5ef67226c4..80fbae70bf 100755
--- a/test/functional/test_framework/script_util.py
+++ b/test/functional/test_framework/script_util.py
@@ -23,3 +23,4 @@ from test_framework.script import CScript
# scriptPubKeys are needed, to guarantee that the minimum transaction size is
# met.
DUMMY_P2WPKH_SCRIPT = CScript([b'a' * 21])
+DUMMY_2_P2WPKH_SCRIPT = CScript([b'b' * 21])
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index e36fb350c6..9d9e065158 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -6,11 +6,12 @@
import configparser
from enum import Enum
-import logging
import argparse
+import logging
import os
import pdb
import random
+import re
import shutil
import subprocess
import sys
@@ -30,8 +31,6 @@ from .util import (
disconnect_nodes,
get_datadir_path,
initialize_datadir,
- sync_blocks,
- sync_mempools,
)
@@ -90,6 +89,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
This class also contains various public and private helper methods."""
+ chain = None # type: str
+ setup_clean_chain = None # type: bool
+
def __init__(self):
"""Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method"""
self.chain = 'regtest'
@@ -101,6 +103,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.bind_to_localhost_only = True
self.set_test_params()
self.parse_args()
+ if self.options.timeout_factor == 0 :
+ self.options.timeout_factor = 99999
+ self.rpc_timeout = int(self.rpc_timeout * self.options.timeout_factor) # optionally, increase timeout by a factor
def main(self):
"""Main function. This should not be overridden by the subclass test scripts."""
@@ -136,6 +141,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
sys.exit(exit_code)
def parse_args(self):
+ previous_releases_path = os.getenv("PREVIOUS_RELEASES_DIR") or os.getcwd() + "/releases"
parser = argparse.ArgumentParser(usage="%(prog)s [options]")
parser.add_argument("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave bitcoinds and test.* datadir on exit or error")
@@ -150,6 +156,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
help="Print out all RPC calls as they are made")
parser.add_argument("--portseed", dest="port_seed", default=os.getpid(), type=int,
help="The seed to use for assigning port numbers (default: current process id)")
+ parser.add_argument("--previous-releases", dest="prev_releases", action="store_true",
+ default=os.path.isdir(previous_releases_path) and bool(os.listdir(previous_releases_path)),
+ help="Force test of previous releases (default: %(default)s)")
parser.add_argument("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
parser.add_argument("--configfile", dest="configfile",
@@ -165,8 +174,12 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
help="run nodes under the valgrind memory error detector: expect at least a ~10x slowdown, valgrind 3.14 or later required")
parser.add_argument("--randomseed", type=int,
help="set a random seed for deterministically reproducing a previous test run")
+ parser.add_argument("--descriptors", default=False, action="store_true",
+ help="Run test using a descriptor wallet")
+ parser.add_argument('--timeout-factor', dest="timeout_factor", type=float, default=1.0, help='adjust test timeouts by a factor. Setting it to 0 disables all timeouts')
self.add_options(parser)
self.options = parser.parse_args()
+ self.options.previous_releases_path = previous_releases_path
def setup(self):
"""Call this method to start up the test framework object with options set."""
@@ -180,13 +193,22 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
config = configparser.ConfigParser()
config.read_file(open(self.options.configfile))
self.config = config
- self.options.bitcoind = os.getenv("BITCOIND", default=config["environment"]["BUILDDIR"] + '/src/bitcoind' + config["environment"]["EXEEXT"])
- self.options.bitcoincli = os.getenv("BITCOINCLI", default=config["environment"]["BUILDDIR"] + '/src/bitcoin-cli' + config["environment"]["EXEEXT"])
+ fname_bitcoind = os.path.join(
+ config["environment"]["BUILDDIR"],
+ "src",
+ "bitcoind" + config["environment"]["EXEEXT"],
+ )
+ fname_bitcoincli = os.path.join(
+ config["environment"]["BUILDDIR"],
+ "src",
+ "bitcoin-cli" + config["environment"]["EXEEXT"],
+ )
+ self.options.bitcoind = os.getenv("BITCOIND", default=fname_bitcoind)
+ self.options.bitcoincli = os.getenv("BITCOINCLI", default=fname_bitcoincli)
os.environ['PATH'] = os.pathsep.join([
os.path.join(config['environment']['BUILDDIR'], 'src'),
- os.path.join(config['environment']['BUILDDIR'], 'src', 'qt'),
- os.environ['PATH']
+ os.path.join(config['environment']['BUILDDIR'], 'src', 'qt'), os.environ['PATH']
])
# Set up temp directory and start logging
@@ -269,7 +291,12 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
exit_code = TEST_EXIT_SKIPPED
else:
self.log.error("Test failed. Test logging available at %s/test_framework.log", self.options.tmpdir)
+ self.log.error("")
self.log.error("Hint: Call {} '{}' to consolidate all logs".format(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../combine_logs.py"), self.options.tmpdir))
+ self.log.error("")
+ self.log.error("If this failure happened unexpectedly or intermittently, please file a bug and provide a link or upload of the combined log.")
+ self.log.error(self.config['environment']['PACKAGE_BUGREPORT'])
+ self.log.error("")
exit_code = TEST_EXIT_FAILED
# Logging.shutdown will not remove stream- and filehandlers, so we must
# do it explicitly. Handlers are removed so the next test run can apply
@@ -326,18 +353,30 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
# See fPreferredDownload in net_processing.
#
# If further outbound connections are needed, they can be added at the beginning of the test with e.g.
- # connect_nodes(self.nodes[1], 2)
+ # self.connect_nodes(1, 2)
for i in range(self.num_nodes - 1):
- connect_nodes(self.nodes[i + 1], i)
+ self.connect_nodes(i + 1, i)
self.sync_all()
def setup_nodes(self):
"""Override this method to customize test node setup"""
- extra_args = None
+ extra_args = [[]] * self.num_nodes
+ wallets = [[]] * self.num_nodes
if hasattr(self, "extra_args"):
extra_args = self.extra_args
+ wallets = [[x for x in eargs if x.startswith('-wallet=')] for eargs in extra_args]
+ extra_args = [x + ['-nowallet'] for x in extra_args]
self.add_nodes(self.num_nodes, extra_args)
self.start_nodes()
+ for i, n in enumerate(self.nodes):
+ n.extra_args.pop()
+ if '-wallet=0' in n.extra_args or '-nowallet' in n.extra_args or '-disablewallet' in n.extra_args or not self.is_wallet_compiled():
+ continue
+ if '-wallet=' not in wallets[i] and not any([x.startswith('-wallet=') for x in wallets[i]]):
+ wallets[i].append('-wallet=')
+ for w in wallets[i]:
+ wallet_name = w.split('=', 1)[1]
+ n.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors)
self.import_deterministic_coinbase_privkeys()
if not self.setup_clean_chain:
for n in self.nodes:
@@ -369,11 +408,30 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
# Public helper methods. These can be accessed by the subclass test scripts.
- def add_nodes(self, num_nodes, extra_args=None, *, rpchost=None, binary=None, binary_cli=None, versions=None):
+ def add_nodes(self, num_nodes: int, extra_args=None, *, rpchost=None, binary=None, binary_cli=None, versions=None):
"""Instantiate TestNode objects.
Should only be called once after the nodes have been specified in
set_test_params()."""
+ def get_bin_from_version(version, bin_name, bin_default):
+ if not version:
+ return bin_default
+ return os.path.join(
+ self.options.previous_releases_path,
+ re.sub(
+ r'\.0$',
+ '', # remove trailing .0 for point releases
+ 'v{}.{}.{}.{}'.format(
+ (version % 100000000) // 1000000,
+ (version % 1000000) // 10000,
+ (version % 10000) // 100,
+ (version % 100) // 1,
+ ),
+ ),
+ 'bin',
+ bin_name,
+ )
+
if self.bind_to_localhost_only:
extra_confs = [["bind=127.0.0.1"]] * num_nodes
else:
@@ -383,21 +441,22 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
if versions is None:
versions = [None] * num_nodes
if binary is None:
- binary = [self.options.bitcoind] * num_nodes
+ binary = [get_bin_from_version(v, 'bitcoind', self.options.bitcoind) for v in versions]
if binary_cli is None:
- binary_cli = [self.options.bitcoincli] * num_nodes
+ binary_cli = [get_bin_from_version(v, 'bitcoin-cli', self.options.bitcoincli) for v in versions]
assert_equal(len(extra_confs), num_nodes)
assert_equal(len(extra_args), num_nodes)
assert_equal(len(versions), num_nodes)
assert_equal(len(binary), num_nodes)
assert_equal(len(binary_cli), num_nodes)
for i in range(num_nodes):
- self.nodes.append(TestNode(
+ test_node_i = TestNode(
i,
get_datadir_path(self.options.tmpdir, i),
chain=self.chain,
rpchost=rpchost,
timewait=self.rpc_timeout,
+ timeout_factor=self.options.timeout_factor,
bitcoind=binary[i],
bitcoin_cli=binary_cli[i],
version=versions[i],
@@ -408,7 +467,16 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
use_cli=self.options.usecli,
start_perf=self.options.perf,
use_valgrind=self.options.valgrind,
- ))
+ descriptors=self.options.descriptors,
+ )
+ self.nodes.append(test_node_i)
+ if not test_node_i.version_is_at_least(170000):
+ # adjust conf for pre 17
+ conf_file = test_node_i.bitcoinconf
+ with open(conf_file, 'r', encoding='utf8') as conf:
+ conf_data = conf.read()
+ with open(conf_file, 'w', encoding='utf8') as conf:
+ conf.write(conf_data.replace('[regtest]', ''))
def start_node(self, i, *args, **kwargs):
"""Start a bitcoind"""
@@ -464,12 +532,17 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
def wait_for_node_exit(self, i, timeout):
self.nodes[i].process.wait(timeout)
+ def connect_nodes(self, a, b):
+ connect_nodes(self.nodes[a], b)
+
+ def disconnect_nodes(self, a, b):
+ disconnect_nodes(self.nodes[a], b)
+
def split_network(self):
"""
Split the network of four nodes into nodes 0/1 and 2/3.
"""
- disconnect_nodes(self.nodes[1], 2)
- disconnect_nodes(self.nodes[2], 1)
+ self.disconnect_nodes(1, 2)
self.sync_all(self.nodes[:2])
self.sync_all(self.nodes[2:])
@@ -477,18 +550,57 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
"""
Join the (previously split) network halves together.
"""
- connect_nodes(self.nodes[1], 2)
+ self.connect_nodes(1, 2)
self.sync_all()
- def sync_blocks(self, nodes=None, **kwargs):
- sync_blocks(nodes or self.nodes, **kwargs)
-
- def sync_mempools(self, nodes=None, **kwargs):
- sync_mempools(nodes or self.nodes, **kwargs)
-
- def sync_all(self, nodes=None, **kwargs):
- self.sync_blocks(nodes, **kwargs)
- self.sync_mempools(nodes, **kwargs)
+ def sync_blocks(self, nodes=None, wait=1, timeout=60):
+ """
+ Wait until everybody has the same tip.
+ sync_blocks needs to be called with an rpc_connections set that has least
+ one node already synced to the latest, stable tip, otherwise there's a
+ chance it might return before all nodes are stably synced.
+ """
+ rpc_connections = nodes or self.nodes
+ timeout = int(timeout * self.options.timeout_factor)
+ stop_time = time.time() + timeout
+ while time.time() <= stop_time:
+ best_hash = [x.getbestblockhash() for x in rpc_connections]
+ if best_hash.count(best_hash[0]) == len(rpc_connections):
+ return
+ # Check that each peer has at least one connection
+ assert (all([len(x.getpeerinfo()) for x in rpc_connections]))
+ time.sleep(wait)
+ raise AssertionError("Block sync timed out after {}s:{}".format(
+ timeout,
+ "".join("\n {!r}".format(b) for b in best_hash),
+ ))
+
+ def sync_mempools(self, nodes=None, wait=1, timeout=60, flush_scheduler=True):
+ """
+ Wait until everybody has the same transactions in their memory
+ pools
+ """
+ rpc_connections = nodes or self.nodes
+ timeout = int(timeout * self.options.timeout_factor)
+ stop_time = time.time() + timeout
+ while time.time() <= stop_time:
+ pool = [set(r.getrawmempool()) for r in rpc_connections]
+ if pool.count(pool[0]) == len(rpc_connections):
+ if flush_scheduler:
+ for r in rpc_connections:
+ r.syncwithvalidationinterfacequeue()
+ return
+ # Check that each peer has at least one connection
+ assert (all([len(x.getpeerinfo()) for x in rpc_connections]))
+ time.sleep(wait)
+ raise AssertionError("Mempool sync timed out after {}s:{}".format(
+ timeout,
+ "".join("\n {!r}".format(m) for m in pool),
+ ))
+
+ def sync_all(self, nodes=None):
+ self.sync_blocks(nodes)
+ self.sync_mempools(nodes)
# Private helper methods. These should not be accessed by the subclass test scripts.
@@ -543,15 +655,21 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
extra_args=['-disablewallet'],
rpchost=None,
timewait=self.rpc_timeout,
+ timeout_factor=self.options.timeout_factor,
bitcoind=self.options.bitcoind,
bitcoin_cli=self.options.bitcoincli,
coverage_dir=None,
cwd=self.options.tmpdir,
+ descriptors=self.options.descriptors,
))
self.start_node(CACHE_NODE_ID)
+ cache_node = self.nodes[CACHE_NODE_ID]
# Wait for RPC connections to be ready
- self.nodes[CACHE_NODE_ID].wait_for_rpc_connection()
+ cache_node.wait_for_rpc_connection()
+
+ # Set a time in the past, so that blocks don't end up in the future
+ cache_node.setmocktime(cache_node.getblockheader(cache_node.getbestblockhash())['time'])
# Create a 199-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
@@ -560,12 +678,12 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
# This is needed so that we are out of IBD when the test starts,
# see the tip age check in IsInitialBlockDownload().
for i in range(8):
- self.nodes[CACHE_NODE_ID].generatetoaddress(
+ cache_node.generatetoaddress(
nblocks=25 if i != 7 else 24,
address=TestNode.PRIV_KEYS[i % 4].address,
)
- assert_equal(self.nodes[CACHE_NODE_ID].getblockchaininfo()["blocks"], 199)
+ assert_equal(cache_node.getblockchaininfo()["blocks"], 199)
# Shut it down, and clean up cache directories:
self.stop_nodes()
@@ -620,6 +738,19 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
if not self.is_cli_compiled():
raise SkipTest("bitcoin-cli has not been compiled.")
+ def skip_if_no_previous_releases(self):
+ """Skip the running test if previous releases are not available."""
+ if not self.has_previous_releases():
+ raise SkipTest("previous releases not available or disabled")
+
+ def has_previous_releases(self):
+ """Checks whether previous releases are present and enabled."""
+ if not os.path.isdir(self.options.previous_releases_path):
+ if self.options.prev_releases:
+ raise AssertionError("Force test of previous releases but releases missing: {}".format(
+ self.options.previous_releases_path))
+ return self.options.prev_releases
+
def is_cli_compiled(self):
"""Checks whether bitcoin-cli was compiled."""
return self.config["components"].getboolean("ENABLE_CLI")
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
index 53bc5ca9e7..66bb2c89b5 100755
--- a/test/functional/test_framework/test_node.py
+++ b/test/functional/test_framework/test_node.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Class for bitcoind node under test"""
@@ -22,10 +22,13 @@ import shlex
import sys
from .authproxy import JSONRPCException
+from .descriptors import descsum_create
+from .messages import MY_SUBVERSION
from .util import (
MAX_NODES,
append_config,
delete_cookie_file,
+ get_auth_cookie,
get_rpc_proxy,
rpc_url,
wait_until,
@@ -60,7 +63,7 @@ class TestNode():
To make things easier for the test writer, any unrecognised messages will
be dispatched to the RPC connection."""
- def __init__(self, i, datadir, *, chain, rpchost, timewait, bitcoind, bitcoin_cli, coverage_dir, cwd, extra_conf=None, extra_args=None, use_cli=False, start_perf=False, use_valgrind=False, version=None):
+ def __init__(self, i, datadir, *, chain, rpchost, timewait, timeout_factor, bitcoind, bitcoin_cli, coverage_dir, cwd, extra_conf=None, extra_args=None, use_cli=False, start_perf=False, use_valgrind=False, version=None, descriptors=False):
"""
Kwargs:
start_perf (bool): If True, begin profiling the node with `perf` as soon as
@@ -78,6 +81,7 @@ class TestNode():
self.binary = bitcoind
self.coverage_dir = coverage_dir
self.cwd = cwd
+ self.descriptors = descriptors
if extra_conf is not None:
append_config(datadir, extra_conf)
# Most callers will just need to add extra args to the standard list below.
@@ -107,7 +111,7 @@ class TestNode():
"--gen-suppressions=all", "--exit-on-first-error=yes",
"--error-exitcode=1", "--quiet"] + self.args
- if self.version is None or self.version >= 190000:
+ if self.version_is_at_least(190000):
self.args.append("-logthreadnames")
self.cli = TestNodeCLI(bitcoin_cli, self.datadir)
@@ -125,6 +129,7 @@ class TestNode():
self.perf_subprocesses = {}
self.p2ps = []
+ self.timeout_factor = timeout_factor
AddressKeyPair = collections.namedtuple('AddressKeyPair', ['address', 'key'])
PRIV_KEYS = [
@@ -169,10 +174,10 @@ class TestNode():
def __getattr__(self, name):
"""Dispatches any unrecognised messages to the RPC connection or a CLI instance."""
if self.use_cli:
- return getattr(self.cli, name)
+ return getattr(RPCOverloadWrapper(self.cli, True, self.descriptors), name)
else:
assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection")
- return getattr(self.rpc, name)
+ return getattr(RPCOverloadWrapper(self.rpc, descriptors=self.descriptors), name)
def start(self, extra_args=None, *, cwd=None, stdout=None, stderr=None, **kwargs):
"""Start the node."""
@@ -215,9 +220,35 @@ class TestNode():
raise FailedToStartError(self._node_msg(
'bitcoind exited with status {} during initialization'.format(self.process.returncode)))
try:
- rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.chain, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir)
+ rpc = get_rpc_proxy(
+ rpc_url(self.datadir, self.index, self.chain, self.rpchost),
+ self.index,
+ timeout=self.rpc_timeout // 2, # Shorter timeout to allow for one retry in case of ETIMEDOUT
+ coveragedir=self.coverage_dir,
+ )
rpc.getblockcount()
# If the call to getblockcount() succeeds then the RPC connection is up
+ if self.version_is_at_least(190000):
+ # getmempoolinfo.loaded is available since commit
+ # bb8ae2c (version 0.19.0)
+ wait_until(lambda: rpc.getmempoolinfo()['loaded'])
+ # Wait for the node to finish reindex, block import, and
+ # loading the mempool. Usually importing happens fast or
+ # even "immediate" when the node is started. However, there
+ # is no guarantee and sometimes ThreadImport might finish
+ # later. This is going to cause intermittent test failures,
+ # because generally the tests assume the node is fully
+ # ready after being started.
+ #
+ # For example, the node will reject block messages from p2p
+ # when it is still importing with the error "Unexpected
+ # block message received"
+ #
+ # The wait is done here to make tests as robust as possible
+ # and prevent racy tests and intermittent failures as much
+ # as possible. Some tests might not need this, but the
+ # overhead is trivial, and the added guarantees are worth
+ # the minimal performance cost.
self.log.debug("RPC successfully started")
if self.use_cli:
return
@@ -225,9 +256,6 @@ class TestNode():
self.rpc_connected = True
self.url = self.rpc.url
return
- except IOError as e:
- if e.errno != errno.ECONNREFUSED: # Port not yet open?
- raise # unknown IO error
except JSONRPCException as e: # Initialization phase
# -28 RPC in warmup
# -342 Service unavailable, RPC server started but is shutting down due to error
@@ -237,11 +265,33 @@ class TestNode():
# This might happen when the RPC server is in warmup, but shut down before the call to getblockcount
# succeeds. Try again to properly raise the FailedToStartError
pass
- except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting
+ except OSError as e:
+ if e.errno == errno.ETIMEDOUT:
+ pass # Treat identical to ConnectionResetError
+ elif e.errno == errno.ECONNREFUSED:
+ pass # Port not yet open?
+ else:
+ raise # unknown OS error
+ except ValueError as e: # cookie file not found and no rpcuser or rpcpassword; bitcoind is still starting
if "No RPC credentials" not in str(e):
raise
time.sleep(1.0 / poll_per_s)
- self._raise_assertion_error("Unable to connect to bitcoind")
+ self._raise_assertion_error("Unable to connect to bitcoind after {}s".format(self.rpc_timeout))
+
+ def wait_for_cookie_credentials(self):
+ """Ensures auth cookie credentials can be read, e.g. for testing CLI with -rpcwait before RPC connection is up."""
+ self.log.debug("Waiting for cookie credentials")
+ # Poll at a rate of four times per second.
+ poll_per_s = 4
+ for _ in range(poll_per_s * self.rpc_timeout):
+ try:
+ get_auth_cookie(self.datadir, self.chain)
+ self.log.debug("Cookie credentials successfully retrieved")
+ return
+ except ValueError: # cookie file not found and no rpcuser or rpcpassword; bitcoind is still starting
+ pass # so we continue polling until RPC credentials are retrieved
+ time.sleep(1.0 / poll_per_s)
+ self._raise_assertion_error("Unable to retrieve cookie credentials after {}s".format(self.rpc_timeout))
def generate(self, nblocks, maxtries=1000000):
self.log.debug("TestNode.generate() dispatches `generate` call to `generatetoaddress`")
@@ -249,11 +299,14 @@ class TestNode():
def get_wallet_rpc(self, wallet_name):
if self.use_cli:
- return self.cli("-rpcwallet={}".format(wallet_name))
+ return RPCOverloadWrapper(self.cli("-rpcwallet={}".format(wallet_name)), True, self.descriptors)
else:
assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected")
wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name))
- return self.rpc / wallet_path
+ return RPCOverloadWrapper(self.rpc / wallet_path, descriptors=self.descriptors)
+
+ def version_is_at_least(self, ver):
+ return self.version is None or self.version >= ver
def stop_node(self, expected_stderr='', wait=0):
"""Stop the node."""
@@ -262,7 +315,7 @@ class TestNode():
self.log.debug("Stopping node")
try:
# Do not use wait argument when testing older nodes, e.g. in feature_backwards_compatibility.py
- if self.version is None or self.version >= 180000:
+ if self.version_is_at_least(180000):
self.stop(wait=wait)
else:
self.stop()
@@ -306,13 +359,13 @@ class TestNode():
return True
def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT):
- wait_until(self.is_node_stopped, timeout=timeout)
+ wait_until(self.is_node_stopped, timeout=timeout, timeout_factor=self.timeout_factor)
@contextlib.contextmanager
def assert_debug_log(self, expected_msgs, unexpected_msgs=None, timeout=2):
if unexpected_msgs is None:
unexpected_msgs = []
- time_end = time.time() + timeout
+ time_end = time.time() + timeout * self.timeout_factor
debug_log = os.path.join(self.datadir, self.chain, 'debug.log')
with open(debug_log, encoding='utf-8') as dl:
dl.seek(0, 2)
@@ -469,7 +522,7 @@ class TestNode():
if 'dstaddr' not in kwargs:
kwargs['dstaddr'] = '127.0.0.1'
- p2p_conn.peer_connect(**kwargs, net=self.chain)()
+ p2p_conn.peer_connect(**kwargs, net=self.chain, timeout_factor=self.timeout_factor)()
self.p2ps.append(p2p_conn)
if wait_for_verack:
# Wait for the node to send us the version and verack
@@ -483,7 +536,7 @@ class TestNode():
# transaction that will be added to the mempool as soon as we return here.
#
# So syncing here is redundant when we only want to send a message, but the cost is low (a few milliseconds)
- # in comparision to the upside of making tests less fragile and unexpected intermittent errors less likely.
+ # in comparison to the upside of making tests less fragile and unexpected intermittent errors less likely.
p2p_conn.sync_with_ping()
return p2p_conn
@@ -497,11 +550,16 @@ class TestNode():
assert self.p2ps, self._node_msg("No p2p connection")
return self.p2ps[0]
+ def num_connected_mininodes(self):
+ """Return number of test framework p2p connections to the node."""
+ return len([peer for peer in self.getpeerinfo() if peer['subver'] == MY_SUBVERSION])
+
def disconnect_p2ps(self):
"""Close all p2p connections to the node."""
for p in self.p2ps:
p.peer_disconnect()
del self.p2ps[:]
+ wait_until(lambda: self.num_connected_mininodes() == 0)
class TestNodeCLIAttr:
@@ -519,6 +577,8 @@ class TestNodeCLIAttr:
def arg_to_cli(arg):
if isinstance(arg, bool):
return str(arg).lower()
+ elif arg is None:
+ return 'null'
elif isinstance(arg, dict) or isinstance(arg, list):
return json.dumps(arg, default=EncodeDecimal)
else:
@@ -527,7 +587,6 @@ def arg_to_cli(arg):
class TestNodeCLI():
"""Interface to bitcoin-cli for an individual node"""
-
def __init__(self, binary, datadir):
self.options = []
self.binary = binary
@@ -565,7 +624,7 @@ class TestNodeCLI():
if command is not None:
p_args += [command]
p_args += pos_args + named_args
- self.log.debug("Running bitcoin-cli command: %s" % command)
+ self.log.debug("Running bitcoin-cli {}".format(p_args[2:]))
process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
cli_stdout, cli_stderr = process.communicate(input=self.input)
returncode = process.poll()
@@ -580,3 +639,87 @@ class TestNodeCLI():
return json.loads(cli_stdout, parse_float=decimal.Decimal)
except json.JSONDecodeError:
return cli_stdout.rstrip("\n")
+
+class RPCOverloadWrapper():
+ def __init__(self, rpc, cli=False, descriptors=False):
+ self.rpc = rpc
+ self.is_cli = cli
+ self.descriptors = descriptors
+
+ def __getattr__(self, name):
+ return getattr(self.rpc, name)
+
+ def createwallet(self, wallet_name, disable_private_keys=None, blank=None, passphrase='', avoid_reuse=None, descriptors=None):
+ if descriptors is None:
+ descriptors = self.descriptors
+ return self.__getattr__('createwallet')(wallet_name, disable_private_keys, blank, passphrase, avoid_reuse, descriptors)
+
+ def importprivkey(self, privkey, label=None, rescan=None):
+ wallet_info = self.getwalletinfo()
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('importprivkey')(privkey, label, rescan)
+ desc = descsum_create('combo(' + privkey + ')')
+ req = [{
+ 'desc': desc,
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ }]
+ import_res = self.importdescriptors(req)
+ if not import_res[0]['success']:
+ raise JSONRPCException(import_res[0]['error'])
+
+ def addmultisigaddress(self, nrequired, keys, label=None, address_type=None):
+ wallet_info = self.getwalletinfo()
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('addmultisigaddress')(nrequired, keys, label, address_type)
+ cms = self.createmultisig(nrequired, keys, address_type)
+ req = [{
+ 'desc': cms['descriptor'],
+ 'timestamp': 0,
+ 'label': label if label else ''
+ }]
+ import_res = self.importdescriptors(req)
+ if not import_res[0]['success']:
+ raise JSONRPCException(import_res[0]['error'])
+ return cms
+
+ def importpubkey(self, pubkey, label=None, rescan=None):
+ wallet_info = self.getwalletinfo()
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('importpubkey')(pubkey, label, rescan)
+ desc = descsum_create('combo(' + pubkey + ')')
+ req = [{
+ 'desc': desc,
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ }]
+ import_res = self.importdescriptors(req)
+ if not import_res[0]['success']:
+ raise JSONRPCException(import_res[0]['error'])
+
+ def importaddress(self, address, label=None, rescan=None, p2sh=None):
+ wallet_info = self.getwalletinfo()
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('importaddress')(address, label, rescan, p2sh)
+ is_hex = False
+ try:
+ int(address ,16)
+ is_hex = True
+ desc = descsum_create('raw(' + address + ')')
+ except:
+ desc = descsum_create('addr(' + address + ')')
+ reqs = [{
+ 'desc': desc,
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ }]
+ if is_hex and p2sh:
+ reqs.append({
+ 'desc': descsum_create('p2sh(raw(' + address + '))'),
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ })
+ import_res = self.importdescriptors(reqs)
+ for res in import_res:
+ if not res['success']:
+ raise JSONRPCException(res['error'])
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
index e89b4e9879..506057f1fa 100644
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Helpful routines for regression testing."""
@@ -25,6 +25,7 @@ logger = logging.getLogger("TestFramework.utils")
# Assert functions
##################
+
def assert_approx(v, vexp, vspan=0.00001):
"""Assert that `v` is within `vspan` of `vexp`"""
if v < vexp - vspan:
@@ -32,6 +33,7 @@ def assert_approx(v, vexp, vspan=0.00001):
if v > vexp + vspan:
raise AssertionError("%s > [%s..%s]" % (str(v), str(vexp - vspan), str(vexp + vspan)))
+
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
target_fee = round(tx_size * fee_per_kB / 1000, 8)
@@ -41,21 +43,26 @@ def assert_fee_amount(fee, tx_size, fee_per_kB):
if fee > (tx_size + 2) * fee_per_kB / 1000:
raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)" % (str(fee), str(target_fee)))
+
def assert_equal(thing1, thing2, *args):
if thing1 != thing2 or any(thing1 != arg for arg in args):
raise AssertionError("not(%s)" % " == ".join(str(arg) for arg in (thing1, thing2) + args))
+
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s" % (str(thing1), str(thing2)))
+
def assert_greater_than_or_equal(thing1, thing2):
if thing1 < thing2:
raise AssertionError("%s < %s" % (str(thing1), str(thing2)))
+
def assert_raises(exc, fun, *args, **kwds):
assert_raises_message(exc, None, fun, *args, **kwds)
+
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
@@ -71,6 +78,7 @@ def assert_raises_message(exc, message, fun, *args, **kwds):
else:
raise AssertionError("No exception raised")
+
def assert_raises_process_error(returncode, output, fun, *args, **kwds):
"""Execute a process and asserts the process return code and output.
@@ -95,6 +103,7 @@ def assert_raises_process_error(returncode, output, fun, *args, **kwds):
else:
raise AssertionError("No exception raised")
+
def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
@@ -113,6 +122,7 @@ def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""
assert try_rpc(code, message, fun, *args, **kwds), "No exception raised"
+
def try_rpc(code, message, fun, *args, **kwds):
"""Tries to run an rpc command.
@@ -134,22 +144,22 @@ def try_rpc(code, message, fun, *args, **kwds):
else:
return False
+
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
- raise AssertionError(
- "Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
+ raise AssertionError("Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
+
def assert_is_hash_string(string, length=64):
if not isinstance(string, str):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
- raise AssertionError(
- "String of length %d expected; got %d" % (length, len(string)))
+ raise AssertionError("String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
- raise AssertionError(
- "String %r contains invalid characters for a hash." % string)
+ raise AssertionError("String %r contains invalid characters for a hash." % string)
+
def assert_array_result(object_array, to_match, expected, should_not_find=False):
"""
@@ -180,9 +190,11 @@ def assert_array_result(object_array, to_match, expected, should_not_find=False)
if num_matched > 0 and should_not_find:
raise AssertionError("Objects were found %s" % (str(to_match)))
+
# Utility functions
###################
+
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
@@ -190,11 +202,13 @@ def check_json_precision():
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
+
def EncodeDecimal(o):
if isinstance(o, Decimal):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
+
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
@@ -202,15 +216,19 @@ def count_bytes(hex_string):
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
+
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
+
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
-def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None):
+
+def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, timeout_factor=1.0):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
+ timeout = timeout * timeout_factor
attempt = 0
time_end = time.time() + timeout
@@ -234,6 +252,7 @@ def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=N
raise AssertionError("Predicate {} not true after {} seconds".format(predicate_source, timeout))
raise RuntimeError('Unreachable')
+
# RPC/P2P connection constants and functions
############################################
@@ -249,6 +268,7 @@ class PortSeed:
# Must be initialized with a unique integer for each process
n = None
+
def get_rpc_proxy(url, node_number, *, timeout=None, coveragedir=None):
"""
Args:
@@ -265,23 +285,25 @@ def get_rpc_proxy(url, node_number, *, timeout=None, coveragedir=None):
"""
proxy_kwargs = {}
if timeout is not None:
- proxy_kwargs['timeout'] = timeout
+ proxy_kwargs['timeout'] = int(timeout)
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
- coverage_logfile = coverage.get_filename(
- coveragedir, node_number) if coveragedir else None
+ coverage_logfile = coverage.get_filename(coveragedir, node_number) if coveragedir else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
+
def p2p_port(n):
assert n <= MAX_NODES
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
+
def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
+
def rpc_url(datadir, i, chain, rpchost):
rpc_u, rpc_p = get_auth_cookie(datadir, chain)
host = '127.0.0.1'
@@ -294,9 +316,11 @@ def rpc_url(datadir, i, chain, rpchost):
host = rpchost
return "http://%s:%s@%s:%d" % (rpc_u, rpc_p, host, int(port))
+
# Node functions
################
+
def initialize_datadir(dirname, n, chain):
datadir = get_datadir_path(dirname, n)
if not os.path.isdir(datadir):
@@ -326,14 +350,17 @@ def initialize_datadir(dirname, n, chain):
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
return datadir
+
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
+
def append_config(datadir, options):
with open(os.path.join(datadir, "bitcoin.conf"), 'a', encoding='utf8') as f:
for option in options:
f.write(option + "\n")
+
def get_auth_cookie(datadir, chain):
user = None
password = None
@@ -358,82 +385,69 @@ def get_auth_cookie(datadir, chain):
raise ValueError("No RPC credentials")
return user, password
+
# If a cookie file exists in the given datadir, delete it.
def delete_cookie_file(datadir, chain):
if os.path.isfile(os.path.join(datadir, chain, ".cookie")):
logger.debug("Deleting leftover cookie file")
os.remove(os.path.join(datadir, chain, ".cookie"))
+
def softfork_active(node, key):
"""Return whether a softfork is active."""
return node.getblockchaininfo()['softforks'][key]['active']
+
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
+
def disconnect_nodes(from_connection, node_num):
- for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
+ def get_peer_ids():
+ result = []
+ for peer in from_connection.getpeerinfo():
+ if "testnode{}".format(node_num) in peer['subver']:
+ result.append(peer['id'])
+ return result
+
+ peer_ids = get_peer_ids()
+ if not peer_ids:
+ logger.warning("disconnect_nodes: {} and {} were not connected".format(
+ from_connection.index,
+ node_num,
+ ))
+ return
+ for peer_id in peer_ids:
try:
from_connection.disconnectnode(nodeid=peer_id)
except JSONRPCException as e:
# If this node is disconnected between calculating the peer id
# and issuing the disconnect, don't worry about it.
# This avoids a race condition if we're mass-disconnecting peers.
- if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
+ if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
raise
# wait to disconnect
- wait_until(lambda: [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == [], timeout=5)
+ wait_until(lambda: not get_peer_ids(), timeout=5)
+
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
- wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
+ # See comments in net_processing:
+ # * Must have a version message before anything else
+ # * Must have a verack message before anything else
+ wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
+ wait_until(lambda: all(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in from_connection.getpeerinfo()))
-def sync_blocks(rpc_connections, *, wait=1, timeout=60):
- """
- Wait until everybody has the same tip.
-
- sync_blocks needs to be called with an rpc_connections set that has least
- one node already synced to the latest, stable tip, otherwise there's a
- chance it might return before all nodes are stably synced.
- """
- stop_time = time.time() + timeout
- while time.time() <= stop_time:
- best_hash = [x.getbestblockhash() for x in rpc_connections]
- if best_hash.count(best_hash[0]) == len(rpc_connections):
- return
- # Check that each peer has at least one connection
- assert (all([len(x.getpeerinfo()) for x in rpc_connections]))
- time.sleep(wait)
- raise AssertionError("Block sync timed out:{}".format("".join("\n {!r}".format(b) for b in best_hash)))
-
-
-def sync_mempools(rpc_connections, *, wait=1, timeout=60, flush_scheduler=True):
- """
- Wait until everybody has the same transactions in their memory
- pools
- """
- stop_time = time.time() + timeout
- while time.time() <= stop_time:
- pool = [set(r.getrawmempool()) for r in rpc_connections]
- if pool.count(pool[0]) == len(rpc_connections):
- if flush_scheduler:
- for r in rpc_connections:
- r.syncwithvalidationinterfacequeue()
- return
- # Check that each peer has at least one connection
- assert (all([len(x.getpeerinfo()) for x in rpc_connections]))
- time.sleep(wait)
- raise AssertionError("Mempool sync timed out:{}".format("".join("\n {!r}".format(m) for m in pool)))
-
# Transaction/Block functions
#############################
+
def find_output(node, txid, amount, *, blockhash=None):
"""
Return index to output of txid with value amount
@@ -445,6 +459,7 @@ def find_output(node, txid, amount, *, blockhash=None):
return i
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
+
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
@@ -462,6 +477,7 @@ def gather_inputs(from_node, amount_needed, confirmations_required=1):
raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
return (total_in, inputs)
+
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
@@ -479,6 +495,7 @@ def make_change(from_node, amount_in, amount_out, fee):
outputs[from_node.getnewaddress()] = change
return outputs
+
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
@@ -498,6 +515,7 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
return (txid, signresult["hex"], fee)
+
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
@@ -530,6 +548,7 @@ def create_confirmed_utxos(fee, node, count):
assert len(utxos) >= count
return utxos
+
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
@@ -549,6 +568,7 @@ def gen_return_txouts():
txouts.append(txout)
return txouts
+
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
@@ -572,6 +592,7 @@ def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
txids.append(txid)
return txids
+
def mine_large_block(node, utxos=None):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
@@ -585,6 +606,7 @@ def mine_large_block(node, utxos=None):
create_lots_of_big_transactions(node, txouts, utxos, num, fee=fee)
node.generate(1)
+
def find_vout_for_address(node, txid, addr):
"""
Locate the vout index of the given transaction sending to the
diff --git a/test/functional/test_framework/wallet_util.py b/test/functional/test_framework/wallet_util.py
index eb537015fb..b9c0fb6691 100755
--- a/test/functional/test_framework/wallet_util.py
+++ b/test/functional/test_framework/wallet_util.py
@@ -6,6 +6,7 @@
from collections import namedtuple
from test_framework.address import (
+ byte_to_base58,
key_to_p2pkh,
key_to_p2sh_p2wpkh,
key_to_p2wpkh,
@@ -13,6 +14,7 @@ from test_framework.address import (
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
+from test_framework.key import ECKey
from test_framework.script import (
CScript,
OP_0,
@@ -66,6 +68,25 @@ def get_key(node):
p2sh_p2wpkh_redeem_script=CScript([OP_0, pkh]).hex(),
p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
+def get_generate_key():
+ """Generate a fresh key
+
+ Returns a named tuple of privkey, pubkey and all address and scripts."""
+ eckey = ECKey()
+ eckey.generate()
+ privkey = bytes_to_wif(eckey.get_bytes())
+ pubkey = eckey.get_pubkey().get_bytes().hex()
+ pkh = hash160(hex_str_to_bytes(pubkey))
+ return Key(privkey=privkey,
+ pubkey=pubkey,
+ p2pkh_script=CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG]).hex(),
+ p2pkh_addr=key_to_p2pkh(pubkey),
+ p2wpkh_script=CScript([OP_0, pkh]).hex(),
+ p2wpkh_addr=key_to_p2wpkh(pubkey),
+ p2sh_p2wpkh_script=CScript([OP_HASH160, hash160(CScript([OP_0, pkh])), OP_EQUAL]).hex(),
+ p2sh_p2wpkh_redeem_script=CScript([OP_0, pkh]).hex(),
+ p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
+
def get_multisig(node):
"""Generate a fresh 2-of-3 multisig on node
@@ -97,3 +118,14 @@ def test_address(node, address, **kwargs):
raise AssertionError("key {} unexpectedly returned in getaddressinfo.".format(key))
elif addr_info[key] != value:
raise AssertionError("key {} value {} did not match expected value {}".format(key, addr_info[key], value))
+
+def bytes_to_wif(b, compressed=True):
+ if compressed:
+ b += b'\x01'
+ return byte_to_base58(b, 239)
+
+def generate_wif_key():
+ # Makes a WIF privkey for imports
+ k = ECKey()
+ k.generate()
+ return bytes_to_wif(k.get_bytes(), k.is_compressed)
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py
index ee71de3310..2fa48006f4 100755
--- a/test/functional/test_runner.py
+++ b/test/functional/test_runner.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Run regression test suite.
@@ -24,6 +24,7 @@ import sys
import tempfile
import re
import logging
+import unittest
# Formatting. Default colors to empty strings.
BOLD, GREEN, RED, GREY = ("", ""), ("", ""), ("", ""), ("", "")
@@ -41,7 +42,7 @@ except UnicodeDecodeError:
if os.name != 'nt' or sys.getwindowsversion() >= (10, 0, 14393):
if os.name == 'nt':
import ctypes
- kernel32 = ctypes.windll.kernel32
+ kernel32 = ctypes.windll.kernel32 # type: ignore
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
@@ -65,6 +66,12 @@ if os.name != 'nt' or sys.getwindowsversion() >= (10, 0, 14393):
TEST_EXIT_PASSED = 0
TEST_EXIT_SKIPPED = 77
+TEST_FRAMEWORK_MODULES = [
+ "address",
+ "blocktools",
+ "script",
+]
+
EXTENDED_SCRIPTS = [
# These tests are not run by default.
# Longest test should go first, to favor running tests in parallel
@@ -76,6 +83,7 @@ BASE_SCRIPTS = [
# Scripts that are run by default.
# Longest test should go first, to favor running tests in parallel
'wallet_hd.py',
+ 'wallet_hd.py --descriptors',
'wallet_backup.py',
# vv Tests less than 5m vv
'mining_getblocktemplate_longpoll.py',
@@ -86,10 +94,13 @@ BASE_SCRIPTS = [
'feature_segwit.py',
# vv Tests less than 2m vv
'wallet_basic.py',
+ 'wallet_basic.py --descriptors',
'wallet_labels.py',
+ 'wallet_labels.py --descriptors',
'p2p_segwit.py',
'p2p_timeouts.py',
'p2p_tx_download.py',
+ 'mempool_updatefromblock.py',
'wallet_dump.py',
'wallet_listtransactions.py',
# vv Tests less than 60s vv
@@ -109,6 +120,7 @@ BASE_SCRIPTS = [
'feature_abortnode.py',
# vv Tests less than 30s vv
'wallet_keypool_topup.py',
+ 'wallet_keypool_topup.py --descriptors',
'feature_fee_estimation.py',
'interface_zmq.py',
'interface_bitcoin_cli.py',
@@ -122,6 +134,7 @@ BASE_SCRIPTS = [
'interface_rest.py',
'mempool_spend_coinbase.py',
'wallet_avoidreuse.py',
+ 'wallet_avoidreuse.py --descriptors',
'mempool_reorg.py',
'mempool_persist.py',
'wallet_multiwallet.py',
@@ -134,6 +147,7 @@ BASE_SCRIPTS = [
'interface_http.py',
'interface_rpc.py',
'rpc_psbt.py',
+ 'rpc_psbt.py --descriptors',
'rpc_users.py',
'rpc_whitelist.py',
'feature_proxy.py',
@@ -145,9 +159,12 @@ BASE_SCRIPTS = [
'rpc_deprecated.py',
'wallet_disable.py',
'p2p_addr_relay.py',
+ 'p2p_getdata.py',
'rpc_net.py',
'wallet_keypool.py',
- 'p2p_mempool.py',
+ 'wallet_keypool.py --descriptors',
+ 'wallet_descriptor.py',
+ 'p2p_nobloomfilter_messages.py',
'p2p_filter.py',
'rpc_setban.py',
'p2p_blocksonly.py',
@@ -168,10 +185,12 @@ BASE_SCRIPTS = [
'mempool_packages.py',
'mempool_package_onemore.py',
'rpc_createmultisig.py',
+ 'rpc_createmultisig.py --descriptors',
'feature_versionbits_warning.py',
'rpc_preciousblock.py',
'wallet_importprunedfunds.py',
'p2p_leak_tx.py',
+ 'p2p_eviction.py',
'rpc_signmessage.py',
'rpc_generateblock.py',
'wallet_balance.py',
@@ -180,6 +199,8 @@ BASE_SCRIPTS = [
'mempool_expiry.py',
'wallet_import_rescan.py',
'wallet_import_with_label.py',
+ 'wallet_importdescriptors.py',
+ 'wallet_upgradewallet.py',
'rpc_bind.py --ipv4',
'rpc_bind.py --ipv6',
'rpc_bind.py --nonloopback',
@@ -190,6 +211,7 @@ BASE_SCRIPTS = [
'wallet_listsinceblock.py',
'p2p_leak.py',
'wallet_encryption.py',
+ 'wallet_encryption.py --descriptors',
'feature_dersig.py',
'feature_cltv.py',
'rpc_uptime.py',
@@ -207,8 +229,11 @@ BASE_SCRIPTS = [
'feature_loadblock.py',
'p2p_dos_header_tree.py',
'p2p_unrequested_blocks.py',
+ 'p2p_blockfilters.py',
'feature_includeconf.py',
'feature_asmap.py',
+ 'mempool_unbroadcast.py',
+ 'mempool_compatibility.py',
'rpc_deriveaddresses.py',
'rpc_deriveaddresses.py --usecli',
'rpc_scantxoutset.py',
@@ -217,13 +242,10 @@ BASE_SCRIPTS = [
'p2p_permissions.py',
'feature_blocksdir.py',
'feature_config_args.py',
- 'rpc_getaddressinfo_labels_purpose_deprecation.py',
- 'rpc_getaddressinfo_label_deprecation.py',
'rpc_getdescriptorinfo.py',
'rpc_help.py',
'feature_help.py',
'feature_shutdown.py',
- 'framework_test_script.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]
@@ -374,11 +396,12 @@ def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=
args = args or []
# Warn if bitcoind is already running
- # pidof might fail or return an empty string if bitcoind is not running
try:
- if subprocess.check_output(["pidof", "bitcoind"]) not in [b'']:
+ # pgrep exits with code zero when one or more matching processes found
+ if subprocess.run(["pgrep", "-x", "bitcoind"], stdout=subprocess.DEVNULL).returncode == 0:
print("%sWARNING!%s There is already a bitcoind process running on this system. Tests may fail unexpectedly due to resource contention!" % (BOLD[1], BOLD[0]))
- except (OSError, subprocess.SubprocessError):
+ except OSError:
+ # pgrep not supported
pass
# Warn if there is a cache directory
@@ -386,6 +409,16 @@ def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=
if os.path.isdir(cache_dir):
print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir))
+ # Test Framework Tests
+ print("Running Unit Tests for Test Framework Modules")
+ test_framework_tests = unittest.TestSuite()
+ for module in TEST_FRAMEWORK_MODULES:
+ test_framework_tests.addTest(unittest.TestLoader().loadTestsFromName("test_framework.{}".format(module)))
+ result = unittest.TextTestRunner(verbosity=1, failfast=True).run(test_framework_tests)
+ if not result.wasSuccessful():
+ logging.debug("Early exiting after failure in TestFramework unit tests")
+ sys.exit(False)
+
tests_dir = src_dir + '/test/functional/'
flags = ['--cachedir={}'.format(cache_dir)] + args
@@ -609,7 +642,7 @@ class TestResult():
def check_script_prefixes():
"""Check that test scripts start with one of the allowed name prefixes."""
- good_prefixes_re = re.compile("^(example|feature|interface|mempool|mining|p2p|rpc|wallet|tool|framework_test)_")
+ good_prefixes_re = re.compile("^(example|feature|interface|mempool|mining|p2p|rpc|wallet|tool)_")
bad_script_names = [script for script in ALL_SCRIPTS if good_prefixes_re.match(script) is None]
if bad_script_names:
diff --git a/test/functional/tool_wallet.py b/test/functional/tool_wallet.py
index d2629ff1ed..524e1593ba 100755
--- a/test/functional/tool_wallet.py
+++ b/test/functional/tool_wallet.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoin-wallet."""
@@ -15,6 +15,7 @@ from test_framework.util import assert_equal
BUFFER_SIZE = 16 * 1024
+
class ToolWalletTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
@@ -48,7 +49,7 @@ class ToolWalletTest(BitcoinTestFramework):
h = hashlib.sha1()
mv = memoryview(bytearray(BUFFER_SIZE))
with open(self.wallet_path, 'rb', buffering=0) as f:
- for n in iter(lambda : f.readinto(mv), 0):
+ for n in iter(lambda: f.readinto(mv), 0):
h.update(mv[:n])
return h.hexdigest()
@@ -69,7 +70,12 @@ class ToolWalletTest(BitcoinTestFramework):
self.assert_raises_tool_error('Invalid command: help', 'help')
self.assert_raises_tool_error('Error: two methods provided (info and create). Only one method should be provided.', 'info', 'create')
self.assert_raises_tool_error('Error parsing command line arguments: Invalid parameter -foo', '-foo')
- self.assert_raises_tool_error('Error loading wallet.dat. Is wallet being used by other process?', '-wallet=wallet.dat', 'info')
+ self.assert_raises_tool_error(
+ 'Error initializing wallet database environment "{}"!\nError loading wallet.dat. Is wallet being used by other process?'
+ .format(os.path.join(self.nodes[0].datadir, self.chain, 'wallets')),
+ '-wallet=wallet.dat',
+ 'info',
+ )
self.assert_raises_tool_error('Error: no wallet file at nonexistent.dat', '-wallet=nonexistent.dat', 'info')
def test_tool_wallet_info(self):
@@ -84,7 +90,7 @@ class ToolWalletTest(BitcoinTestFramework):
#
# self.log.debug('Setting wallet file permissions to 400 (read-only)')
# os.chmod(self.wallet_path, stat.S_IRUSR)
- # assert(self.wallet_permissions() in ['400', '666']) # Sanity check. 666 because Appveyor.
+ # assert self.wallet_permissions() in ['400', '666'] # Sanity check. 666 because Appveyor.
# shasum_before = self.wallet_shasum()
timestamp_before = self.wallet_timestamp()
self.log.debug('Wallet file timestamp before calling info: {}'.format(timestamp_before))
@@ -103,7 +109,7 @@ class ToolWalletTest(BitcoinTestFramework):
self.log_wallet_timestamp_comparison(timestamp_before, timestamp_after)
self.log.debug('Setting wallet file permissions back to 600 (read/write)')
os.chmod(self.wallet_path, stat.S_IRUSR | stat.S_IWUSR)
- assert(self.wallet_permissions() in ['600', '666']) # Sanity check. 666 because Appveyor.
+ assert self.wallet_permissions() in ['600', '666'] # Sanity check. 666 because Appveyor.
#
# TODO: Wallet tool info should not write to the wallet file.
# The following lines should be uncommented and the tests still succeed:
@@ -197,6 +203,14 @@ class ToolWalletTest(BitcoinTestFramework):
assert_equal(shasum_after, shasum_before)
self.log.debug('Wallet file shasum unchanged\n')
+ def test_salvage(self):
+ # TODO: Check salvage actually salvages and doesn't break things. https://github.com/bitcoin/bitcoin/issues/7463
+ self.log.info('Check salvage')
+ self.start_node(0, ['-wallet=salvage'])
+ self.stop_node(0)
+
+ self.assert_tool_output('', '-wallet=salvage', 'salvage')
+
def run_test(self):
self.wallet_path = os.path.join(self.nodes[0].datadir, self.chain, 'wallets', 'wallet.dat')
self.test_invalid_tool_commands_and_args()
@@ -205,7 +219,7 @@ class ToolWalletTest(BitcoinTestFramework):
self.test_tool_wallet_info_after_transaction()
self.test_tool_wallet_create_on_existing_wallet()
self.test_getwalletinfo_on_different_wallet()
-
+ self.test_salvage()
if __name__ == '__main__':
ToolWalletTest().main()
diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py
index d63ab28d61..8837e13005 100755
--- a/test/functional/wallet_abandonconflict.py
+++ b/test/functional/wallet_abandonconflict.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the abandontransaction RPC.
@@ -18,7 +18,6 @@ from test_framework.util import (
assert_raises_rpc_error,
connect_nodes,
disconnect_nodes,
- wait_until,
)
@@ -96,9 +95,8 @@ class AbandonConflictTest(BitcoinTestFramework):
# Restart the node with a higher min relay fee so the parent tx is no longer in mempool
# TODO: redo with eviction
- self.stop_node(0)
- self.start_node(0, extra_args=["-minrelaytxfee=0.0001"])
- wait_until(lambda: self.nodes[0].getmempoolinfo()['loaded'])
+ self.restart_node(0, extra_args=["-minrelaytxfee=0.0001"])
+ assert self.nodes[0].getmempoolinfo()['loaded']
# Verify txs no longer in either node's mempool
assert_equal(len(self.nodes[0].getrawmempool()), 0)
@@ -124,9 +122,8 @@ class AbandonConflictTest(BitcoinTestFramework):
balance = newbalance
# Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned
- self.stop_node(0)
- self.start_node(0, extra_args=["-minrelaytxfee=0.00001"])
- wait_until(lambda: self.nodes[0].getmempoolinfo()['loaded'])
+ self.restart_node(0, extra_args=["-minrelaytxfee=0.00001"])
+ assert self.nodes[0].getmempoolinfo()['loaded']
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(self.nodes[0].getbalance(), balance)
@@ -146,9 +143,8 @@ class AbandonConflictTest(BitcoinTestFramework):
balance = newbalance
# Remove using high relay fee again
- self.stop_node(0)
- self.start_node(0, extra_args=["-minrelaytxfee=0.0001"])
- wait_until(lambda: self.nodes[0].getmempoolinfo()['loaded'])
+ self.restart_node(0, extra_args=["-minrelaytxfee=0.0001"])
+ assert self.nodes[0].getmempoolinfo()['loaded']
assert_equal(len(self.nodes[0].getrawmempool()), 0)
newbalance = self.nodes[0].getbalance()
assert_equal(newbalance, balance - Decimal("24.9996"))
diff --git a/test/functional/wallet_address_types.py b/test/functional/wallet_address_types.py
index 065764dcac..68e22b7e86 100755
--- a/test/functional/wallet_address_types.py
+++ b/test/functional/wallet_address_types.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that the wallet can send and receive using all combinations of address types.
diff --git a/test/functional/wallet_avoidreuse.py b/test/functional/wallet_avoidreuse.py
index 2ce8d459c6..eddd938847 100755
--- a/test/functional/wallet_avoidreuse.py
+++ b/test/functional/wallet_avoidreuse.py
@@ -85,15 +85,19 @@ class AvoidReuseTest(BitcoinTestFramework):
self.sync_all()
self.test_change_remains_change(self.nodes[1])
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
- self.test_fund_send_fund_senddirty()
+ self.test_sending_from_reused_address_without_avoid_reuse()
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
- self.test_fund_send_fund_send("legacy")
+ self.test_sending_from_reused_address_fails("legacy")
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
- self.test_fund_send_fund_send("p2sh-segwit")
+ self.test_sending_from_reused_address_fails("p2sh-segwit")
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
- self.test_fund_send_fund_send("bech32")
+ self.test_sending_from_reused_address_fails("bech32")
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
self.test_getbalances_used()
+ reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
+ self.test_full_destination_group_is_preferred()
+ reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
+ self.test_all_destination_groups_are_used()
def test_persistence(self):
'''Test that wallet files persist the avoid_reuse flag.'''
@@ -106,9 +110,7 @@ class AvoidReuseTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getwalletinfo()["avoid_reuse"], False)
assert_equal(self.nodes[1].getwalletinfo()["avoid_reuse"], True)
- # Stop and restart node 1
- self.stop_node(1)
- self.start_node(1)
+ self.restart_node(1)
connect_nodes(self.nodes[0], 1)
# Flags should still be node1.avoid_reuse=false, node2.avoid_reuse=true
@@ -129,7 +131,7 @@ class AvoidReuseTest(BitcoinTestFramework):
tempwallet = ".wallet_avoidreuse.py_test_immutable_wallet.dat"
# Create a wallet with disable_private_keys set; this should work
- self.nodes[1].createwallet(tempwallet, True)
+ self.nodes[1].createwallet(wallet_name=tempwallet, disable_private_keys=True)
w = self.nodes[1].get_wallet_rpc(tempwallet)
# Attempt to unset the disable_private_keys flag; this should not work
@@ -162,13 +164,13 @@ class AvoidReuseTest(BitcoinTestFramework):
for logical_tx in node.listtransactions():
assert logical_tx.get('address') != changeaddr
- def test_fund_send_fund_senddirty(self):
+ def test_sending_from_reused_address_without_avoid_reuse(self):
'''
- Test the same as test_fund_send_fund_send, except send the 10 BTC with
+ Test the same as test_sending_from_reused_address_fails, except send the 10 BTC with
the avoid_reuse flag set to false. This means the 10 BTC send should succeed,
- where it fails in test_fund_send_fund_send.
+ where it fails in test_sending_from_reused_address_fails.
'''
- self.log.info("Test fund send fund send dirty")
+ self.log.info("Test sending from reused address with avoid_reuse=false")
fundaddr = self.nodes[1].getnewaddress()
retaddr = self.nodes[0].getnewaddress()
@@ -213,7 +215,7 @@ class AvoidReuseTest(BitcoinTestFramework):
assert_approx(self.nodes[1].getbalance(), 5, 0.001)
assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 5, 0.001)
- def test_fund_send_fund_send(self, second_addr_type):
+ def test_sending_from_reused_address_fails(self, second_addr_type):
'''
Test the simple case where [1] generates a new address A, then
[0] sends 10 BTC to A.
@@ -222,7 +224,7 @@ class AvoidReuseTest(BitcoinTestFramework):
[1] tries to spend 10 BTC (fails; dirty).
[1] tries to spend 4 BTC (succeeds; change address sufficient)
'''
- self.log.info("Test fund send fund send")
+ self.log.info("Test sending from reused {} address fails".format(second_addr_type))
fundaddr = self.nodes[1].getnewaddress(label="", address_type="legacy")
retaddr = self.nodes[0].getnewaddress()
@@ -245,43 +247,44 @@ class AvoidReuseTest(BitcoinTestFramework):
# getbalances should show no used, 5 btc trusted
assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5})
- # For the second send, we transmute it to a related single-key address
- # to make sure it's also detected as re-use
- fund_spk = self.nodes[0].getaddressinfo(fundaddr)["scriptPubKey"]
- fund_decoded = self.nodes[0].decodescript(fund_spk)
- if second_addr_type == "p2sh-segwit":
- new_fundaddr = fund_decoded["segwit"]["p2sh-segwit"]
- elif second_addr_type == "bech32":
- new_fundaddr = fund_decoded["segwit"]["addresses"][0]
- else:
- new_fundaddr = fundaddr
- assert_equal(second_addr_type, "legacy")
-
- self.nodes[0].sendtoaddress(new_fundaddr, 10)
- self.nodes[0].generate(1)
- self.sync_all()
-
- # listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10)
- assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10)
- # getbalances should show 10 used, 5 btc trusted
- assert_balances(self.nodes[1], mine={"used": 10, "trusted": 5})
-
- # node 1 should now have a balance of 5 (no dirty) or 15 (including dirty)
- assert_approx(self.nodes[1].getbalance(), 5, 0.001)
- assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 15, 0.001)
-
- assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[1].sendtoaddress, retaddr, 10)
-
- self.nodes[1].sendtoaddress(retaddr, 4)
-
- # listunspent should show 2 total outputs (1, 10 btc), one unused (1), one reused (10)
- assert_unspent(self.nodes[1], total_count=2, total_sum=11, reused_count=1, reused_sum=10)
- # getbalances should show 10 used, 1 btc trusted
- assert_balances(self.nodes[1], mine={"used": 10, "trusted": 1})
-
- # node 1 should now have about 1 btc left (no dirty) and 11 (including dirty)
- assert_approx(self.nodes[1].getbalance(), 1, 0.001)
- assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 11, 0.001)
+ if not self.options.descriptors:
+ # For the second send, we transmute it to a related single-key address
+ # to make sure it's also detected as re-use
+ fund_spk = self.nodes[0].getaddressinfo(fundaddr)["scriptPubKey"]
+ fund_decoded = self.nodes[0].decodescript(fund_spk)
+ if second_addr_type == "p2sh-segwit":
+ new_fundaddr = fund_decoded["segwit"]["p2sh-segwit"]
+ elif second_addr_type == "bech32":
+ new_fundaddr = fund_decoded["segwit"]["addresses"][0]
+ else:
+ new_fundaddr = fundaddr
+ assert_equal(second_addr_type, "legacy")
+
+ self.nodes[0].sendtoaddress(new_fundaddr, 10)
+ self.nodes[0].generate(1)
+ self.sync_all()
+
+ # listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10)
+ assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10)
+ # getbalances should show 10 used, 5 btc trusted
+ assert_balances(self.nodes[1], mine={"used": 10, "trusted": 5})
+
+ # node 1 should now have a balance of 5 (no dirty) or 15 (including dirty)
+ assert_approx(self.nodes[1].getbalance(), 5, 0.001)
+ assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 15, 0.001)
+
+ assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[1].sendtoaddress, retaddr, 10)
+
+ self.nodes[1].sendtoaddress(retaddr, 4)
+
+ # listunspent should show 2 total outputs (1, 10 btc), one unused (1), one reused (10)
+ assert_unspent(self.nodes[1], total_count=2, total_sum=11, reused_count=1, reused_sum=10)
+ # getbalances should show 10 used, 1 btc trusted
+ assert_balances(self.nodes[1], mine={"used": 10, "trusted": 1})
+
+ # node 1 should now have about 1 btc left (no dirty) and 11 (including dirty)
+ assert_approx(self.nodes[1].getbalance(), 1, 0.001)
+ assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 11, 0.001)
def test_getbalances_used(self):
'''
@@ -313,5 +316,66 @@ class AvoidReuseTest(BitcoinTestFramework):
assert_unspent(self.nodes[1], total_count=2, total_sum=6, reused_count=1, reused_sum=1)
assert_balances(self.nodes[1], mine={"used": 1, "trusted": 5})
+ def test_full_destination_group_is_preferred(self):
+ '''
+ Test the case where [1] only has 11 outputs of 1 BTC in the same reused
+ address and tries to send a small payment of 0.5 BTC. The wallet
+ should use 10 outputs from the reused address as inputs and not a
+ single 1 BTC input, in order to join several outputs from the reused
+ address.
+ '''
+ self.log.info("Test that full destination groups are preferred in coin selection")
+
+ # Node under test should be empty
+ assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0)
+
+ new_addr = self.nodes[1].getnewaddress()
+ ret_addr = self.nodes[0].getnewaddress()
+
+ # Send 11 outputs of 1 BTC to the same, reused address in the wallet
+ for _ in range(11):
+ self.nodes[0].sendtoaddress(new_addr, 1)
+
+ self.nodes[0].generate(1)
+ self.sync_all()
+
+ # Sending a transaction that is smaller than each one of the
+ # available outputs
+ txid = self.nodes[1].sendtoaddress(address=ret_addr, amount=0.5)
+ inputs = self.nodes[1].getrawtransaction(txid, 1)["vin"]
+
+ # The transaction should use 10 inputs exactly
+ assert_equal(len(inputs), 10)
+
+ def test_all_destination_groups_are_used(self):
+ '''
+ Test the case where [1] only has 22 outputs of 1 BTC in the same reused
+ address and tries to send a payment of 20.5 BTC. The wallet
+ should use all 22 outputs from the reused address as inputs.
+ '''
+ self.log.info("Test that all destination groups are used")
+
+ # Node under test should be empty
+ assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0)
+
+ new_addr = self.nodes[1].getnewaddress()
+ ret_addr = self.nodes[0].getnewaddress()
+
+ # Send 22 outputs of 1 BTC to the same, reused address in the wallet
+ for _ in range(22):
+ self.nodes[0].sendtoaddress(new_addr, 1)
+
+ self.nodes[0].generate(1)
+ self.sync_all()
+
+ # Sending a transaction that needs to use the full groups
+ # of 10 inputs but also the incomplete group of 2 inputs.
+ txid = self.nodes[1].sendtoaddress(address=ret_addr, amount=20.5)
+ inputs = self.nodes[1].getrawtransaction(txid, 1)["vin"]
+
+ # The transaction should use 22 inputs exactly
+ assert_equal(len(inputs), 22)
+
+
if __name__ == '__main__':
AvoidReuseTest().main()
diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py
index fb80a06433..9dd91b2495 100755
--- a/test/functional/wallet_backup.py
+++ b/test/functional/wallet_backup.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet backup features.
diff --git a/test/functional/wallet_balance.py b/test/functional/wallet_balance.py
index 58eb34e66c..31829a18b3 100755
--- a/test/functional/wallet_balance.py
+++ b/test/functional/wallet_balance.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet balance RPC methods."""
@@ -12,7 +12,6 @@ from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
connect_nodes,
- sync_blocks,
)
@@ -264,7 +263,7 @@ class WalletTest(BitcoinTestFramework):
# Now confirm tx_orig
self.restart_node(1, ['-persistmempool=0'])
connect_nodes(self.nodes[0], 1)
- sync_blocks(self.nodes)
+ self.sync_blocks()
self.nodes[1].sendrawtransaction(tx_orig)
self.nodes[1].generatetoaddress(1, ADDRESS_WATCHONLY)
self.sync_all()
diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py
index 15746d312c..8962362276 100755
--- a/test/functional/wallet_basic.py
+++ b/test/functional/wallet_basic.py
@@ -4,7 +4,6 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet."""
from decimal import Decimal
-import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
@@ -49,6 +48,7 @@ class WalletTest(BitcoinTestFramework):
return self.nodes[0].decoderawtransaction(txn)['vsize']
def run_test(self):
+
# Check that there's no UTXO on none of the nodes
assert_equal(len(self.nodes[0].listunspent()), 0)
assert_equal(len(self.nodes[1].listunspent()), 0)
@@ -219,7 +219,61 @@ class WalletTest(BitcoinTestFramework):
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex']))
- self.start_node(3)
+ # Sendmany with explicit fee (BTC/kB)
+ # Throw if no conf_target provided
+ assert_raises_rpc_error(-8, "Selected estimate_mode requires a fee rate",
+ self.nodes[2].sendmany,
+ amounts={ address: 10 },
+ estimate_mode='bTc/kB')
+ # Throw if negative feerate
+ assert_raises_rpc_error(-3, "Amount out of range",
+ self.nodes[2].sendmany,
+ amounts={ address: 10 },
+ conf_target=-1,
+ estimate_mode='bTc/kB')
+ fee_per_kb = 0.0002500
+ explicit_fee_per_byte = Decimal(fee_per_kb) / 1000
+ txid = self.nodes[2].sendmany(
+ amounts={ address: 10 },
+ conf_target=fee_per_kb,
+ estimate_mode='bTc/kB',
+ )
+ self.nodes[2].generate(1)
+ self.sync_all(self.nodes[0:3])
+ node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), explicit_fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex']))
+ assert_equal(self.nodes[2].getbalance(), node_2_bal)
+ node_0_bal += Decimal('10')
+ assert_equal(self.nodes[0].getbalance(), node_0_bal)
+
+ # Sendmany with explicit fee (SAT/B)
+ # Throw if no conf_target provided
+ assert_raises_rpc_error(-8, "Selected estimate_mode requires a fee rate",
+ self.nodes[2].sendmany,
+ amounts={ address: 10 },
+ estimate_mode='sat/b')
+ # Throw if negative feerate
+ assert_raises_rpc_error(-3, "Amount out of range",
+ self.nodes[2].sendmany,
+ amounts={ address: 10 },
+ conf_target=-1,
+ estimate_mode='sat/b')
+ fee_sat_per_b = 2
+ fee_per_kb = fee_sat_per_b / 100000.0
+ explicit_fee_per_byte = Decimal(fee_per_kb) / 1000
+ txid = self.nodes[2].sendmany(
+ amounts={ address: 10 },
+ conf_target=fee_sat_per_b,
+ estimate_mode='sAT/b',
+ )
+ self.nodes[2].generate(1)
+ self.sync_all(self.nodes[0:3])
+ balance = self.nodes[2].getbalance()
+ node_2_bal = self.check_fee_amount(balance, node_2_bal - Decimal('10'), explicit_fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex']))
+ assert_equal(balance, node_2_bal)
+ node_0_bal += Decimal('10')
+ assert_equal(self.nodes[0].getbalance(), node_0_bal)
+
+ self.start_node(3, self.nodes[3].extra_args)
connect_nodes(self.nodes[0], 3)
self.sync_all()
@@ -315,57 +369,127 @@ class WalletTest(BitcoinTestFramework):
# This will raise an exception since generate does not accept a string
assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2")
- # This will raise an exception for the invalid private key format
- assert_raises_rpc_error(-5, "Invalid private key encoding", self.nodes[0].importprivkey, "invalid")
-
- # This will raise an exception for importing an address with the PS2H flag
- temp_address = self.nodes[1].getnewaddress("", "p2sh-segwit")
- assert_raises_rpc_error(-5, "Cannot use the p2sh flag with an address - use a script instead", self.nodes[0].importaddress, temp_address, "label", False, True)
-
- # This will raise an exception for attempting to dump the private key of an address you do not own
- assert_raises_rpc_error(-3, "Address does not refer to a key", self.nodes[0].dumpprivkey, temp_address)
-
- # This will raise an exception for attempting to get the private key of an invalid Bitcoin address
- assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].dumpprivkey, "invalid")
-
- # This will raise an exception for attempting to set a label for an invalid Bitcoin address
- assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].setlabel, "invalid address", "label")
-
- # This will raise an exception for importing an invalid address
- assert_raises_rpc_error(-5, "Invalid Bitcoin address or script", self.nodes[0].importaddress, "invalid")
-
- # This will raise an exception for attempting to import a pubkey that isn't in hex
- assert_raises_rpc_error(-5, "Pubkey must be a hex string", self.nodes[0].importpubkey, "not hex")
-
- # This will raise an exception for importing an invalid pubkey
- assert_raises_rpc_error(-5, "Pubkey is not a valid public key", self.nodes[0].importpubkey, "5361746f736869204e616b616d6f746f")
-
- # Import address and private key to check correct behavior of spendable unspents
- # 1. Send some coins to generate new UTXO
- address_to_import = self.nodes[2].getnewaddress()
- txid = self.nodes[0].sendtoaddress(address_to_import, 1)
- self.nodes[0].generate(1)
- self.sync_all(self.nodes[0:3])
-
- # 2. Import address from node2 to node1
- self.nodes[1].importaddress(address_to_import)
-
- # 3. Validate that the imported address is watch-only on node1
- assert self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"]
-
- # 4. Check that the unspents after import are not spendable
- assert_array_result(self.nodes[1].listunspent(),
- {"address": address_to_import},
- {"spendable": False})
-
- # 5. Import private key of the previously imported address on node1
- priv_key = self.nodes[2].dumpprivkey(address_to_import)
- self.nodes[1].importprivkey(priv_key)
-
- # 6. Check that the unspents are now spendable on node1
- assert_array_result(self.nodes[1].listunspent(),
- {"address": address_to_import},
- {"spendable": True})
+ if not self.options.descriptors:
+
+ # This will raise an exception for the invalid private key format
+ assert_raises_rpc_error(-5, "Invalid private key encoding", self.nodes[0].importprivkey, "invalid")
+
+ # This will raise an exception for importing an address with the PS2H flag
+ temp_address = self.nodes[1].getnewaddress("", "p2sh-segwit")
+ assert_raises_rpc_error(-5, "Cannot use the p2sh flag with an address - use a script instead", self.nodes[0].importaddress, temp_address, "label", False, True)
+
+ # This will raise an exception for attempting to dump the private key of an address you do not own
+ assert_raises_rpc_error(-3, "Address does not refer to a key", self.nodes[0].dumpprivkey, temp_address)
+
+ # This will raise an exception for attempting to get the private key of an invalid Bitcoin address
+ assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].dumpprivkey, "invalid")
+
+ # This will raise an exception for attempting to set a label for an invalid Bitcoin address
+ assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].setlabel, "invalid address", "label")
+
+ # This will raise an exception for importing an invalid address
+ assert_raises_rpc_error(-5, "Invalid Bitcoin address or script", self.nodes[0].importaddress, "invalid")
+
+ # This will raise an exception for attempting to import a pubkey that isn't in hex
+ assert_raises_rpc_error(-5, "Pubkey must be a hex string", self.nodes[0].importpubkey, "not hex")
+
+ # This will raise an exception for importing an invalid pubkey
+ assert_raises_rpc_error(-5, "Pubkey is not a valid public key", self.nodes[0].importpubkey, "5361746f736869204e616b616d6f746f")
+
+ # Import address and private key to check correct behavior of spendable unspents
+ # 1. Send some coins to generate new UTXO
+ address_to_import = self.nodes[2].getnewaddress()
+ txid = self.nodes[0].sendtoaddress(address_to_import, 1)
+ self.nodes[0].generate(1)
+ self.sync_all(self.nodes[0:3])
+
+ # send with explicit btc/kb fee
+ self.log.info("test explicit fee (sendtoaddress as btc/kb)")
+ self.nodes[0].generate(1)
+ self.sync_all(self.nodes[0:3])
+ prebalance = self.nodes[2].getbalance()
+ assert prebalance > 2
+ address = self.nodes[1].getnewaddress()
+ # Throw if no conf_target provided
+ assert_raises_rpc_error(-8, "Selected estimate_mode requires a fee rate",
+ self.nodes[2].sendtoaddress,
+ address=address,
+ amount=1.0,
+ estimate_mode='BTc/Kb')
+ # Throw if negative feerate
+ assert_raises_rpc_error(-3, "Amount out of range",
+ self.nodes[2].sendtoaddress,
+ address=address,
+ amount=1.0,
+ conf_target=-1,
+ estimate_mode='btc/kb')
+ txid = self.nodes[2].sendtoaddress(
+ address=address,
+ amount=1.0,
+ conf_target=0.00002500,
+ estimate_mode='btc/kb',
+ )
+ tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])
+ self.sync_all(self.nodes[0:3])
+ self.nodes[0].generate(1)
+ self.sync_all(self.nodes[0:3])
+ postbalance = self.nodes[2].getbalance()
+ fee = prebalance - postbalance - Decimal('1')
+ assert_fee_amount(fee, tx_size, Decimal('0.00002500'))
+
+ # send with explicit sat/b fee
+ self.sync_all(self.nodes[0:3])
+ self.log.info("test explicit fee (sendtoaddress as sat/b)")
+ self.nodes[0].generate(1)
+ prebalance = self.nodes[2].getbalance()
+ assert prebalance > 2
+ address = self.nodes[1].getnewaddress()
+ # Throw if no conf_target provided
+ assert_raises_rpc_error(-8, "Selected estimate_mode requires a fee rate",
+ self.nodes[2].sendtoaddress,
+ address=address,
+ amount=1.0,
+ estimate_mode='SAT/b')
+ # Throw if negative feerate
+ assert_raises_rpc_error(-3, "Amount out of range",
+ self.nodes[2].sendtoaddress,
+ address=address,
+ amount=1.0,
+ conf_target=-1,
+ estimate_mode='SAT/b')
+ txid = self.nodes[2].sendtoaddress(
+ address=address,
+ amount=1.0,
+ conf_target=2,
+ estimate_mode='SAT/B',
+ )
+ tx_size = self.get_vsize(self.nodes[2].gettransaction(txid)['hex'])
+ self.sync_all(self.nodes[0:3])
+ self.nodes[0].generate(1)
+ self.sync_all(self.nodes[0:3])
+ postbalance = self.nodes[2].getbalance()
+ fee = prebalance - postbalance - Decimal('1')
+ assert_fee_amount(fee, tx_size, Decimal('0.00002000'))
+
+ # 2. Import address from node2 to node1
+ self.nodes[1].importaddress(address_to_import)
+
+ # 3. Validate that the imported address is watch-only on node1
+ assert self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"]
+
+ # 4. Check that the unspents after import are not spendable
+ assert_array_result(self.nodes[1].listunspent(),
+ {"address": address_to_import},
+ {"spendable": False})
+
+ # 5. Import private key of the previously imported address on node1
+ priv_key = self.nodes[2].dumpprivkey(address_to_import)
+ self.nodes[1].importprivkey(priv_key)
+
+ # 6. Check that the unspents are now spendable on node1
+ assert_array_result(self.nodes[1].listunspent(),
+ {"address": address_to_import},
+ {"spendable": True})
# Mine a block from node0 to an address from node1
coinbase_addr = self.nodes[1].getnewaddress()
@@ -402,8 +526,6 @@ class WalletTest(BitcoinTestFramework):
'-reindex',
'-zapwallettxes=1',
'-zapwallettxes=2',
- # disabled until issue is fixed: https://github.com/bitcoin/bitcoin/issues/7463
- # '-salvagewallet',
]
chainlimit = 6
for m in maintenance:
@@ -460,14 +582,11 @@ class WalletTest(BitcoinTestFramework):
# Try with walletrejectlongchains
# Double chain limit but require combining inputs, so we pass SelectCoinsMinConf
self.stop_node(0)
- self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)])
-
- # wait for loadmempool
- timeout = 10
- while (timeout > 0 and len(self.nodes[0].getrawmempool()) < chainlimit * 2):
- time.sleep(0.5)
- timeout -= 0.5
- assert_equal(len(self.nodes[0].getrawmempool()), chainlimit * 2)
+ extra_args = ["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)]
+ self.start_node(0, extra_args=extra_args)
+
+ # wait until the wallet has submitted all transactions to the mempool
+ wait_until(lambda: len(self.nodes[0].getrawmempool()) == chainlimit * 2)
node0_balance = self.nodes[0].getbalance()
# With walletrejectlongchains we will not create the tx and store it in our wallet.
diff --git a/test/functional/wallet_bumpfee.py b/test/functional/wallet_bumpfee.py
index 0b3dea94d5..72c85b8832 100755
--- a/test/functional/wallet_bumpfee.py
+++ b/test/functional/wallet_bumpfee.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2016-2019 The Bitcoin Core developers
+# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the bumpfee RPC.
@@ -23,7 +23,6 @@ from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
- connect_nodes,
hex_str_to_bytes,
)
@@ -37,6 +36,7 @@ NORMAL = 0.00100000
HIGH = 0.00500000
TOO_HIGH = 1.00000000
+
class BumpFeeTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
@@ -55,9 +55,6 @@ class BumpFeeTest(BitcoinTestFramework):
self.nodes[1].encryptwallet(WALLET_PASSPHRASE)
self.nodes[1].walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
- connect_nodes(self.nodes[0], 1)
- self.sync_all()
-
peer_node, rbf_node = self.nodes
rbf_node_address = rbf_node.getnewaddress()
@@ -74,6 +71,7 @@ class BumpFeeTest(BitcoinTestFramework):
self.log.info("Running tests")
dest_address = peer_node.getnewaddress()
+ test_invalid_parameters(rbf_node, dest_address)
test_simple_bumpfee_succeeds(self, "default", rbf_node, peer_node, dest_address)
test_simple_bumpfee_succeeds(self, "fee_rate", rbf_node, peer_node, dest_address)
test_feerate_args(self, rbf_node, peer_node, dest_address)
@@ -82,7 +80,6 @@ class BumpFeeTest(BitcoinTestFramework):
test_notmine_bumpfee_fails(self, rbf_node, peer_node, dest_address)
test_bumpfee_with_descendant_fails(self, rbf_node, rbf_node_address, dest_address)
test_dust_to_fee(self, rbf_node, dest_address)
- test_settxfee(self, rbf_node, dest_address)
test_watchonly_psbt(self, peer_node, rbf_node, dest_address)
test_rebumping(self, rbf_node, dest_address)
test_rebumping_not_replaceable(self, rbf_node, dest_address)
@@ -90,12 +87,34 @@ class BumpFeeTest(BitcoinTestFramework):
test_bumpfee_metadata(self, rbf_node, dest_address)
test_locked_wallet_fails(self, rbf_node, dest_address)
test_change_script_match(self, rbf_node, dest_address)
+ test_settxfee(self, rbf_node, dest_address)
test_maxtxfee_fails(self, rbf_node, dest_address)
# These tests wipe out a number of utxos that are expected in other tests
test_small_output_with_feerate_succeeds(self, rbf_node, dest_address)
test_no_more_inputs_fails(self, rbf_node, dest_address)
- self.log.info("Success")
+def test_invalid_parameters(node, dest_address):
+ txid = spend_one_input(node, dest_address)
+ # invalid estimate mode
+ assert_raises_rpc_error(-8, "Invalid estimate_mode parameter", node.bumpfee, txid, {
+ "estimate_mode": "moo",
+ })
+ assert_raises_rpc_error(-3, "Expected type string", node.bumpfee, txid, {
+ "estimate_mode": 38,
+ })
+ assert_raises_rpc_error(-3, "Expected type string", node.bumpfee, txid, {
+ "estimate_mode": {
+ "foo": "bar",
+ },
+ })
+ assert_raises_rpc_error(-8, "Invalid estimate_mode parameter", node.bumpfee, txid, {
+ "estimate_mode": Decimal("3.141592"),
+ })
+ # confTarget and conf_target
+ assert_raises_rpc_error(-8, "confTarget and conf_target options should not both be set", node.bumpfee, txid, {
+ "confTarget": 123,
+ "conf_target": 456,
+ })
def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address):
self.log.info('Test simple bumpfee: {}'.format(mode))
@@ -124,20 +143,22 @@ def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address):
assert_equal(oldwtx["replaced_by_txid"], bumped_tx["txid"])
assert_equal(bumpedwtx["replaces_txid"], rbfid)
+
def test_feerate_args(self, rbf_node, peer_node, dest_address):
self.log.info('Test fee_rate args')
rbfid = spend_one_input(rbf_node, dest_address)
self.sync_mempools((rbf_node, peer_node))
assert rbfid in rbf_node.getrawmempool() and rbfid in peer_node.getrawmempool()
- assert_raises_rpc_error(-8, "confTarget can't be set with fee_rate. Please provide either a confirmation target in blocks for automatic fee estimation, or an explicit fee rate.", rbf_node.bumpfee, rbfid, {"fee_rate": NORMAL, "confTarget": 1})
+ assert_raises_rpc_error(-8, "conf_target can't be set with fee_rate. Please provide either a confirmation target in blocks for automatic fee estimation, or an explicit fee rate.", rbf_node.bumpfee, rbfid, {"fee_rate": NORMAL, "confTarget": 1})
assert_raises_rpc_error(-3, "Unexpected key totalFee", rbf_node.bumpfee, rbfid, {"totalFee": NORMAL})
+ assert_raises_rpc_error(-8, "conf_target can't be set with fee_rate. Please provide either a confirmation target in blocks for automatic fee estimation, or an explicit fee rate.", rbf_node.bumpfee, rbfid, {"fee_rate":0.00001, "confTarget": 1})
# Bumping to just above minrelay should fail to increase total fee enough, at least
assert_raises_rpc_error(-8, "Insufficient total fee", rbf_node.bumpfee, rbfid, {"fee_rate": INSUFFICIENT})
- assert_raises_rpc_error(-3, "Amount out of range", rbf_node.bumpfee, rbfid, {"fee_rate":-1})
+ assert_raises_rpc_error(-3, "Amount out of range", rbf_node.bumpfee, rbfid, {"fee_rate": -1})
assert_raises_rpc_error(-4, "is too high (cannot be higher than", rbf_node.bumpfee, rbfid, {"fee_rate": TOO_HIGH})
@@ -209,6 +230,7 @@ def test_bumpfee_with_descendant_fails(self, rbf_node, rbf_node_address, dest_ad
rbf_node.sendrawtransaction(tx["hex"])
assert_raises_rpc_error(-8, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id)
+
def test_small_output_with_feerate_succeeds(self, rbf_node, dest_address):
self.log.info('Testing small output with feerate bump succeeds')
@@ -249,6 +271,7 @@ def test_small_output_with_feerate_succeeds(self, rbf_node, dest_address):
rbf_node.generatetoaddress(1, rbf_node.getnewaddress())
assert_equal(rbf_node.gettransaction(rbfid)["confirmations"], 1)
+
def test_dust_to_fee(self, rbf_node, dest_address):
self.log.info('Test that bumped output that is dust is dropped to fee')
rbfid = spend_one_input(rbf_node, dest_address)
@@ -286,19 +309,26 @@ def test_settxfee(self, rbf_node, dest_address):
assert_greater_than(Decimal("0.00001000"), abs(requested_feerate - actual_feerate))
rbf_node.settxfee(Decimal("0.00000000")) # unset paytxfee
+ # check that settxfee respects -maxtxfee
+ self.restart_node(1, ['-maxtxfee=0.000025'] + self.extra_args[1])
+ assert_raises_rpc_error(-8, "txfee cannot be more than wallet max tx fee", rbf_node.settxfee, Decimal('0.00003'))
+ self.restart_node(1, self.extra_args[1])
+ rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
+
def test_maxtxfee_fails(self, rbf_node, dest_address):
- self.log.info('Test that bumpfee fails when it hits -matxfee')
+ self.log.info('Test that bumpfee fails when it hits -maxtxfee')
# size of bumped transaction (p2wpkh, 1 input, 2 outputs): 141 vbytes
# expected bump fee of 141 vbytes * 0.00200000 BTC / 1000 vbytes = 0.00002820 BTC
# which exceeds maxtxfee and is expected to raise
self.restart_node(1, ['-maxtxfee=0.000025'] + self.extra_args[1])
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
rbfid = spend_one_input(rbf_node, dest_address)
- assert_raises_rpc_error(-4, "Unable to create transaction: Fee exceeds maximum configured by -maxtxfee", rbf_node.bumpfee, rbfid)
+ assert_raises_rpc_error(-4, "Unable to create transaction. Fee exceeds maximum configured by -maxtxfee", rbf_node.bumpfee, rbfid)
self.restart_node(1, self.extra_args[1])
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
+
def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
self.log.info('Test that PSBT is returned for bumpfee in watchonly wallets')
priv_rec_desc = "wpkh([00000001/84'/1'/0']tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0/*)#rweraev0"
@@ -333,12 +363,11 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
result = watcher.importmulti([{
"desc": pub_rec_desc,
"timestamp": 0,
- "range": [0,10],
+ "range": [0, 10],
"internal": False,
"keypool": True,
"watchonly": True
- },
- {
+ }, {
"desc": pub_change_desc,
"timestamp": 0,
"range": [0, 10],
@@ -355,7 +384,7 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
self.sync_all()
# Create single-input PSBT for transaction to be bumped
- psbt = watcher.walletcreatefundedpsbt([], {dest_address:0.0005}, 0, {"feeRate": 0.00001}, True)['psbt']
+ psbt = watcher.walletcreatefundedpsbt([], {dest_address: 0.0005}, 0, {"feeRate": 0.00001}, True)['psbt']
psbt_signed = signer.walletprocesspsbt(psbt=psbt, sign=True, sighashtype="ALL", bip32derivs=True)
psbt_final = watcher.finalizepsbt(psbt_signed["psbt"])
original_txid = watcher.sendrawtransaction(psbt_final["hex"])
@@ -381,6 +410,7 @@ def test_watchonly_psbt(self, peer_node, rbf_node, dest_address):
rbf_node.unloadwallet("watcher")
rbf_node.unloadwallet("signer")
+
def test_rebumping(self, rbf_node, dest_address):
self.log.info('Test that re-bumping the original tx fails, but bumping successor works')
rbfid = spend_one_input(rbf_node, dest_address)
@@ -455,6 +485,7 @@ def test_locked_wallet_fails(self, rbf_node, dest_address):
rbf_node.bumpfee, rbfid)
rbf_node.walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
+
def test_change_script_match(self, rbf_node, dest_address):
self.log.info('Test that the same change addresses is used for the replacement transaction when possible')
@@ -474,6 +505,7 @@ def test_change_script_match(self, rbf_node, dest_address):
bumped_rate_tx = rbf_node.bumpfee(bumped_total_tx["txid"])
assert_equal(change_addresses, get_change_address(bumped_rate_tx['txid']))
+
def spend_one_input(node, dest_address, change_size=Decimal("0.00049000")):
tx_input = dict(
sequence=BIP125_SEQUENCE_NUMBER, **next(u for u in node.listunspent() if u["amount"] == Decimal("0.00100000")))
@@ -485,6 +517,7 @@ def spend_one_input(node, dest_address, change_size=Decimal("0.00049000")):
txid = node.sendrawtransaction(signedtx["hex"])
return txid
+
def submit_block_with_tx(node, tx):
ctx = CTransaction()
ctx.deserialize(io.BytesIO(hex_str_to_bytes(tx)))
@@ -501,13 +534,14 @@ def submit_block_with_tx(node, tx):
node.submitblock(block.serialize().hex())
return block
+
def test_no_more_inputs_fails(self, rbf_node, dest_address):
self.log.info('Test that bumpfee fails when there are no available confirmed outputs')
# feerate rbf requires confirmed outputs when change output doesn't exist or is insufficient
rbf_node.generatetoaddress(1, dest_address)
# spend all funds, no change output
rbfid = rbf_node.sendtoaddress(rbf_node.getnewaddress(), rbf_node.getbalance(), "", "", True)
- assert_raises_rpc_error(-4, "Unable to create transaction: Insufficient funds", rbf_node.bumpfee, rbfid)
+ assert_raises_rpc_error(-4, "Unable to create transaction. Insufficient funds", rbf_node.bumpfee, rbfid)
if __name__ == "__main__":
diff --git a/test/functional/wallet_createwallet.py b/test/functional/wallet_createwallet.py
index b24d312e27..48c0fcb731 100755
--- a/test/functional/wallet_createwallet.py
+++ b/test/functional/wallet_createwallet.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test createwallet arguments.
diff --git a/test/functional/wallet_descriptor.py b/test/functional/wallet_descriptor.py
new file mode 100755
index 0000000000..289ccf43ec
--- /dev/null
+++ b/test/functional/wallet_descriptor.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test descriptor wallet function."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error
+)
+
+
+class WalletDescriptorTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+ self.extra_args = [['-keypool=100']]
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def run_test(self):
+ # Make a descriptor wallet
+ self.log.info("Making a descriptor wallet")
+ self.nodes[0].createwallet(wallet_name="desc1", descriptors=True)
+ self.nodes[0].unloadwallet("")
+
+ # A descriptor wallet should have 100 addresses * 3 types = 300 keys
+ self.log.info("Checking wallet info")
+ wallet_info = self.nodes[0].getwalletinfo()
+ assert_equal(wallet_info['keypoolsize'], 300)
+ assert_equal(wallet_info['keypoolsize_hd_internal'], 300)
+ assert 'keypoololdest' not in wallet_info
+
+ # Check that getnewaddress works
+ self.log.info("Test that getnewaddress and getrawchangeaddress work")
+ addr = self.nodes[0].getnewaddress("", "legacy")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('pkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/44\'/1\'/0\'/0/0')
+
+ addr = self.nodes[0].getnewaddress("", "p2sh-segwit")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('sh(wpkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/49\'/1\'/0\'/0/0')
+
+ addr = self.nodes[0].getnewaddress("", "bech32")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('wpkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/84\'/1\'/0\'/0/0')
+
+ # Check that getrawchangeaddress works
+ addr = self.nodes[0].getrawchangeaddress("legacy")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('pkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/44\'/1\'/0\'/1/0')
+
+ addr = self.nodes[0].getrawchangeaddress("p2sh-segwit")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('sh(wpkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/49\'/1\'/0\'/1/0')
+
+ addr = self.nodes[0].getrawchangeaddress("bech32")
+ addr_info = self.nodes[0].getaddressinfo(addr)
+ assert addr_info['desc'].startswith('wpkh(')
+ assert_equal(addr_info['hdkeypath'], 'm/84\'/1\'/0\'/1/0')
+
+ # Make a wallet to receive coins at
+ self.nodes[0].createwallet(wallet_name="desc2", descriptors=True)
+ recv_wrpc = self.nodes[0].get_wallet_rpc("desc2")
+ send_wrpc = self.nodes[0].get_wallet_rpc("desc1")
+
+ # Generate some coins
+ send_wrpc.generatetoaddress(101, send_wrpc.getnewaddress())
+
+ # Make transactions
+ self.log.info("Test sending and receiving")
+ addr = recv_wrpc.getnewaddress()
+ send_wrpc.sendtoaddress(addr, 10)
+
+ # Make sure things are disabled
+ self.log.info("Test disabled RPCs")
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importprivkey, "cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW")
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importpubkey, send_wrpc.getaddressinfo(send_wrpc.getnewaddress()))
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importaddress, recv_wrpc.getnewaddress())
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importmulti, [])
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.addmultisigaddress, 1, [recv_wrpc.getnewaddress()])
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.dumpprivkey, recv_wrpc.getnewaddress())
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.dumpwallet, 'wallet.dump')
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importwallet, 'wallet.dump')
+ assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.sethdseed)
+
+ self.log.info("Test encryption")
+ # Get the master fingerprint before encrypt
+ info1 = send_wrpc.getaddressinfo(send_wrpc.getnewaddress())
+
+ # Encrypt wallet 0
+ send_wrpc.encryptwallet('pass')
+ send_wrpc.walletpassphrase('pass', 10)
+ addr = send_wrpc.getnewaddress()
+ info2 = send_wrpc.getaddressinfo(addr)
+ assert info1['hdmasterfingerprint'] != info2['hdmasterfingerprint']
+ send_wrpc.walletlock()
+ assert 'hdmasterfingerprint' in send_wrpc.getaddressinfo(send_wrpc.getnewaddress())
+ info3 = send_wrpc.getaddressinfo(addr)
+ assert_equal(info2['desc'], info3['desc'])
+
+ self.log.info("Test that getnewaddress still works after keypool is exhausted in an encrypted wallet")
+ for i in range(0, 500):
+ send_wrpc.getnewaddress()
+
+ self.log.info("Test that unlock is needed when deriving only hardened keys in an encrypted wallet")
+ send_wrpc.walletpassphrase('pass', 10)
+ send_wrpc.importdescriptors([{
+ "desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/*h)#y4dfsj7n",
+ "timestamp": "now",
+ "range": [0,10],
+ "active": True
+ }])
+ send_wrpc.walletlock()
+ # Exhaust keypool of 100
+ for i in range(0, 100):
+ send_wrpc.getnewaddress(address_type='bech32')
+ # This should now error
+ assert_raises_rpc_error(-12, "Keypool ran out, please call keypoolrefill first", send_wrpc.getnewaddress, '', 'bech32')
+
+ self.log.info("Test born encrypted wallets")
+ self.nodes[0].createwallet('desc_enc', False, False, 'pass', False, True)
+ enc_rpc = self.nodes[0].get_wallet_rpc('desc_enc')
+ enc_rpc.getnewaddress() # Makes sure that we can get a new address from a born encrypted wallet
+
+ self.log.info("Test blank descriptor wallets")
+ self.nodes[0].createwallet(wallet_name='desc_blank', blank=True, descriptors=True)
+ blank_rpc = self.nodes[0].get_wallet_rpc('desc_blank')
+ assert_raises_rpc_error(-4, 'This wallet has no available keys', blank_rpc.getnewaddress)
+
+ self.log.info("Test descriptor wallet with disabled private keys")
+ self.nodes[0].createwallet(wallet_name='desc_no_priv', disable_private_keys=True, descriptors=True)
+ nopriv_rpc = self.nodes[0].get_wallet_rpc('desc_no_priv')
+ assert_raises_rpc_error(-4, 'This wallet has no available keys', nopriv_rpc.getnewaddress)
+
+if __name__ == '__main__':
+ WalletDescriptorTest().main ()
diff --git a/test/functional/wallet_dump.py b/test/functional/wallet_dump.py
index f10719fba2..ba1e494d9a 100755
--- a/test/functional/wallet_dump.py
+++ b/test/functional/wallet_dump.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2016-2019 The Bitcoin Core developers
+# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the dumpwallet RPC."""
@@ -190,8 +190,7 @@ class WalletDumpTest(BitcoinTestFramework):
assert_raises_rpc_error(-8, "already exists", lambda: self.nodes[0].dumpwallet(wallet_enc_dump))
# Restart node with new wallet, and test importwallet
- self.stop_node(0)
- self.start_node(0, ['-wallet=w2'])
+ self.restart_node(0, ['-wallet=w2'])
# Make sure the address is not IsMine before import
result = self.nodes[0].getaddressinfo(multisig_addr)
diff --git a/test/functional/wallet_encryption.py b/test/functional/wallet_encryption.py
index bc7e3cca59..6cd82ad250 100755
--- a/test/functional/wallet_encryption.py
+++ b/test/functional/wallet_encryption.py
@@ -8,7 +8,6 @@ import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
- assert_equal,
assert_raises_rpc_error,
assert_greater_than,
assert_greater_than_or_equal,
@@ -27,10 +26,10 @@ class WalletEncryptionTest(BitcoinTestFramework):
passphrase2 = "SecondWalletPassphrase"
# Make sure the wallet isn't encrypted first
- address = self.nodes[0].getnewaddress()
- privkey = self.nodes[0].dumpprivkey(address)
- assert_equal(privkey[:1], "c")
- assert_equal(len(privkey), 52)
+ msg = "test message"
+ address = self.nodes[0].getnewaddress(address_type='legacy')
+ sig = self.nodes[0].signmessage(address, msg)
+ assert self.nodes[0].verifymessage(address, sig, msg)
assert_raises_rpc_error(-15, "Error: running with an unencrypted wallet, but walletpassphrase was called", self.nodes[0].walletpassphrase, 'ff', 1)
assert_raises_rpc_error(-15, "Error: running with an unencrypted wallet, but walletpassphrasechange was called.", self.nodes[0].walletpassphrasechange, 'ff', 'ff')
@@ -39,33 +38,36 @@ class WalletEncryptionTest(BitcoinTestFramework):
self.nodes[0].encryptwallet(passphrase)
# Test that the wallet is encrypted
- assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signmessage, address, msg)
assert_raises_rpc_error(-15, "Error: running with an encrypted wallet, but encryptwallet was called.", self.nodes[0].encryptwallet, 'ff')
assert_raises_rpc_error(-8, "passphrase can not be empty", self.nodes[0].walletpassphrase, '', 1)
assert_raises_rpc_error(-8, "passphrase can not be empty", self.nodes[0].walletpassphrasechange, '', 'ff')
# Check that walletpassphrase works
self.nodes[0].walletpassphrase(passphrase, 2)
- assert_equal(privkey, self.nodes[0].dumpprivkey(address))
+ sig = self.nodes[0].signmessage(address, msg)
+ assert self.nodes[0].verifymessage(address, sig, msg)
# Check that the timeout is right
time.sleep(3)
- assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signmessage, address, msg)
# Test wrong passphrase
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10)
# Test walletlock
self.nodes[0].walletpassphrase(passphrase, 84600)
- assert_equal(privkey, self.nodes[0].dumpprivkey(address))
+ sig = self.nodes[0].signmessage(address, msg)
+ assert self.nodes[0].verifymessage(address, sig, msg)
self.nodes[0].walletlock()
- assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signmessage, address, msg)
# Test passphrase changes
self.nodes[0].walletpassphrasechange(passphrase, passphrase2)
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10)
self.nodes[0].walletpassphrase(passphrase2, 10)
- assert_equal(privkey, self.nodes[0].dumpprivkey(address))
+ sig = self.nodes[0].signmessage(address, msg)
+ assert self.nodes[0].verifymessage(address, sig, msg)
self.nodes[0].walletlock()
# Test timeout bounds
diff --git a/test/functional/wallet_groups.py b/test/functional/wallet_groups.py
index 261a43472b..9dd55b4ab1 100755
--- a/test/functional/wallet_groups.py
+++ b/test/functional/wallet_groups.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test wallet group functionality."""
diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py
index 7497475b67..3c336623e2 100755
--- a/test/functional/wallet_hd.py
+++ b/test/functional/wallet_hd.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2016-2019 The Bitcoin Core developers
+# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
@@ -11,7 +11,7 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
- assert_raises_rpc_error
+ assert_raises_rpc_error,
)
@@ -27,17 +27,21 @@ class WalletHDTest(BitcoinTestFramework):
def run_test(self):
# Make sure we use hd, keep masterkeyid
- masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
- assert_equal(len(masterkeyid), 40)
+ hd_fingerprint = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['hdmasterfingerprint']
+ assert_equal(len(hd_fingerprint), 8)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
- change_addrV= self.nodes[1].getaddressinfo(change_addr)
- assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key
+ change_addrV = self.nodes[1].getaddressinfo(change_addr)
+ if self.options.descriptors:
+ assert_equal(change_addrV["hdkeypath"], "m/84'/1'/0'/1/0")
+ else:
+ assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
- non_hd_add = self.nodes[0].getnewaddress()
- self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
+ non_hd_add = 'bcrt1qmevj8zfx0wdvp05cqwkmr6mxkfx60yezwjksmt'
+ non_hd_key = 'cS9umN9w6cDMuRVYdbkfE4c7YUFLJRoXMfhQ569uY4odiQbVN8Rt'
+ self.nodes[1].importprivkey(non_hd_key)
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, "hd.bak"))
@@ -48,11 +52,14 @@ class WalletHDTest(BitcoinTestFramework):
self.nodes[0].generate(101)
hd_add = None
NUM_HD_ADDS = 10
- for i in range(NUM_HD_ADDS):
+ for i in range(1, NUM_HD_ADDS + 1):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].getaddressinfo(hd_add)
- assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'")
- assert_equal(hd_info["hdseedid"], masterkeyid)
+ if self.options.descriptors:
+ assert_equal(hd_info["hdkeypath"], "m/84'/1'/0'/0/" + str(i))
+ else:
+ assert_equal(hd_info["hdkeypath"], "m/0'/0'/" + str(i) + "'")
+ assert_equal(hd_info["hdmasterfingerprint"], hd_fingerprint)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
@@ -60,8 +67,11 @@ class WalletHDTest(BitcoinTestFramework):
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
- change_addrV= self.nodes[1].getaddressinfo(change_addr)
- assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key
+ change_addrV = self.nodes[1].getaddressinfo(change_addr)
+ if self.options.descriptors:
+ assert_equal(change_addrV["hdkeypath"], "m/84'/1'/0'/1/1")
+ else:
+ assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
@@ -72,30 +82,38 @@ class WalletHDTest(BitcoinTestFramework):
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "chainstate"))
- shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, self.chain, "wallets", "wallet.dat"))
+ shutil.copyfile(
+ os.path.join(self.nodes[1].datadir, "hd.bak"),
+ os.path.join(self.nodes[1].datadir, self.chain, 'wallets', "wallet.dat"),
+ )
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
- for i in range(NUM_HD_ADDS):
+ for i in range(1, NUM_HD_ADDS + 1):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2)
- assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(i)+"'")
- assert_equal(hd_info_2["hdseedid"], masterkeyid)
+ if self.options.descriptors:
+ assert_equal(hd_info_2["hdkeypath"], "m/84'/1'/0'/0/" + str(i))
+ else:
+ assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/" + str(i) + "'")
+ assert_equal(hd_info_2["hdmasterfingerprint"], hd_fingerprint)
assert_equal(hd_add, hd_add_2)
connect_nodes(self.nodes[0], 1)
self.sync_all()
# Needs rescan
- self.stop_node(1)
- self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
+ self.restart_node(1, extra_args=self.extra_args[1] + ['-rescan'])
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# Try a RPC based rescan
self.stop_node(1)
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "blocks"))
shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "chainstate"))
- shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, self.chain, "wallets", "wallet.dat"))
+ shutil.copyfile(
+ os.path.join(self.nodes[1].datadir, "hd.bak"),
+ os.path.join(self.nodes[1].datadir, self.chain, "wallets", "wallet.dat"),
+ )
self.start_node(1, extra_args=self.extra_args[1])
connect_nodes(self.nodes[0], 1)
self.sync_all()
@@ -117,41 +135,146 @@ class WalletHDTest(BitcoinTestFramework):
if out['value'] != 1:
keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['addresses'][0])['hdkeypath']
- assert_equal(keypath[0:7], "m/0'/1'")
-
- # Generate a new HD seed on node 1 and make sure it is set
- orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
- self.nodes[1].sethdseed()
- new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
- assert orig_masterkeyid != new_masterkeyid
- addr = self.nodes[1].getnewaddress()
- assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/0\'') # Make sure the new address is the first from the keypool
- self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key
-
- # Set a new HD seed on node 1 without flushing the keypool
- new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
- orig_masterkeyid = new_masterkeyid
- self.nodes[1].sethdseed(False, new_seed)
- new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
- assert orig_masterkeyid != new_masterkeyid
- addr = self.nodes[1].getnewaddress()
- assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid'])
- assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/1\'') # Make sure the new address continues previous keypool
-
- # Check that the next address is from the new seed
- self.nodes[1].keypoolrefill(1)
- next_addr = self.nodes[1].getnewaddress()
- assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid'])
- assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/0\'/0\'/0\'') # Make sure the new address is not from previous keypool
- assert next_addr != addr
-
- # Sethdseed parameter validity
- assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0)
- assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif")
- assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool")
- assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True)
- assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed)
- assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
+ if self.options.descriptors:
+ assert_equal(keypath[0:14], "m/84'/1'/0'/1/")
+ else:
+ assert_equal(keypath[0:7], "m/0'/1'")
+
+ if not self.options.descriptors:
+ # Generate a new HD seed on node 1 and make sure it is set
+ orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
+ self.nodes[1].sethdseed()
+ new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
+ assert orig_masterkeyid != new_masterkeyid
+ addr = self.nodes[1].getnewaddress()
+ # Make sure the new address is the first from the keypool
+ assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/0\'')
+ self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key
+
+ # Set a new HD seed on node 1 without flushing the keypool
+ new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
+ orig_masterkeyid = new_masterkeyid
+ self.nodes[1].sethdseed(False, new_seed)
+ new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
+ assert orig_masterkeyid != new_masterkeyid
+ addr = self.nodes[1].getnewaddress()
+ assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid'])
+ # Make sure the new address continues previous keypool
+ assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/1\'')
+
+ # Check that the next address is from the new seed
+ self.nodes[1].keypoolrefill(1)
+ next_addr = self.nodes[1].getnewaddress()
+ assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid'])
+ # Make sure the new address is not from previous keypool
+ assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/0\'/0\'/0\'')
+ assert next_addr != addr
+
+ # Sethdseed parameter validity
+ assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0)
+ assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif")
+ assert_raises_rpc_error(-1, "JSON value is not a boolean as expected", self.nodes[1].sethdseed, "Not_bool")
+ assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[1].sethdseed, False, True)
+ assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed)
+ assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
+
+ self.log.info('Test sethdseed restoring with keys outside of the initial keypool')
+ self.nodes[0].generate(10)
+ # Restart node 1 with keypool of 3 and a different wallet
+ self.nodes[1].createwallet(wallet_name='origin', blank=True)
+ self.restart_node(1, extra_args=['-keypool=3', '-wallet=origin'])
+ connect_nodes(self.nodes[0], 1)
+
+ # sethdseed restoring and seeing txs to addresses out of the keypool
+ origin_rpc = self.nodes[1].get_wallet_rpc('origin')
+ seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
+ origin_rpc.sethdseed(True, seed)
+
+ self.nodes[1].createwallet(wallet_name='restore', blank=True)
+ restore_rpc = self.nodes[1].get_wallet_rpc('restore')
+ restore_rpc.sethdseed(True, seed) # Set to be the same seed as origin_rpc
+ restore_rpc.sethdseed(True) # Rotate to a new seed, making original `seed` inactive
+
+ self.nodes[1].createwallet(wallet_name='restore2', blank=True)
+ restore2_rpc = self.nodes[1].get_wallet_rpc('restore2')
+ restore2_rpc.sethdseed(True, seed) # Set to be the same seed as origin_rpc
+ restore2_rpc.sethdseed(True) # Rotate to a new seed, making original `seed` inactive
+
+ # Check persistence of inactive seed by reloading restore. restore2 is still loaded to test the case where the wallet is not reloaded
+ restore_rpc.unloadwallet()
+ self.nodes[1].loadwallet('restore')
+ restore_rpc = self.nodes[1].get_wallet_rpc('restore')
+
+ # Empty origin keypool and get an address that is beyond the initial keypool
+ origin_rpc.getnewaddress()
+ origin_rpc.getnewaddress()
+ last_addr = origin_rpc.getnewaddress() # Last address of initial keypool
+ addr = origin_rpc.getnewaddress() # First address beyond initial keypool
+
+ # Check that the restored seed has last_addr but does not have addr
+ info = restore_rpc.getaddressinfo(last_addr)
+ assert_equal(info['ismine'], True)
+ info = restore_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], False)
+ info = restore2_rpc.getaddressinfo(last_addr)
+ assert_equal(info['ismine'], True)
+ info = restore2_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], False)
+ # Check that the origin seed has addr
+ info = origin_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], True)
+
+ # Send a transaction to addr, which is out of the initial keypool.
+ # The wallet that has set a new seed (restore_rpc) should not detect this transaction.
+ txid = self.nodes[0].sendtoaddress(addr, 1)
+ origin_rpc.sendrawtransaction(self.nodes[0].gettransaction(txid)['hex'])
+ self.nodes[0].generate(1)
+ self.sync_blocks()
+ origin_rpc.gettransaction(txid)
+ assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore_rpc.gettransaction, txid)
+ out_of_kp_txid = txid
+
+ # Send a transaction to last_addr, which is in the initial keypool.
+ # The wallet that has set a new seed (restore_rpc) should detect this transaction and generate 3 new keys from the initial seed.
+ # The previous transaction (out_of_kp_txid) should still not be detected as a rescan is required.
+ txid = self.nodes[0].sendtoaddress(last_addr, 1)
+ origin_rpc.sendrawtransaction(self.nodes[0].gettransaction(txid)['hex'])
+ self.nodes[0].generate(1)
+ self.sync_blocks()
+ origin_rpc.gettransaction(txid)
+ restore_rpc.gettransaction(txid)
+ assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore_rpc.gettransaction, out_of_kp_txid)
+ restore2_rpc.gettransaction(txid)
+ assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore2_rpc.gettransaction, out_of_kp_txid)
+
+ # After rescanning, restore_rpc should now see out_of_kp_txid and generate an additional key.
+ # addr should now be part of restore_rpc and be ismine
+ restore_rpc.rescanblockchain()
+ restore_rpc.gettransaction(out_of_kp_txid)
+ info = restore_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], True)
+ restore2_rpc.rescanblockchain()
+ restore2_rpc.gettransaction(out_of_kp_txid)
+ info = restore2_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], True)
+
+ # Check again that 3 keys were derived.
+ # Empty keypool and get an address that is beyond the initial keypool
+ origin_rpc.getnewaddress()
+ origin_rpc.getnewaddress()
+ last_addr = origin_rpc.getnewaddress()
+ addr = origin_rpc.getnewaddress()
+
+ # Check that the restored seed has last_addr but does not have addr
+ info = restore_rpc.getaddressinfo(last_addr)
+ assert_equal(info['ismine'], True)
+ info = restore_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], False)
+ info = restore2_rpc.getaddressinfo(last_addr)
+ assert_equal(info['ismine'], True)
+ info = restore2_rpc.getaddressinfo(addr)
+ assert_equal(info['ismine'], False)
+
if __name__ == '__main__':
- WalletHDTest().main ()
+ WalletHDTest().main()
diff --git a/test/functional/wallet_import_rescan.py b/test/functional/wallet_import_rescan.py
index b8b85b7a19..4ff7f1d525 100755
--- a/test/functional/wallet_import_rescan.py
+++ b/test/functional/wallet_import_rescan.py
@@ -40,7 +40,6 @@ Rescan = enum.Enum("Rescan", "no yes late_timestamp")
class Variant(collections.namedtuple("Variant", "call data address_type rescan prune")):
"""Helper for importing one key and verifying scanned transactions."""
-
def do_import(self, timestamp):
"""Call one key import RPC."""
rescan = self.rescan == Rescan.yes
@@ -146,6 +145,7 @@ class ImportRescanTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2 + len(IMPORT_NODES)
self.supports_cli = False
+ self.rpc_timeout = 120
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
@@ -226,5 +226,6 @@ class ImportRescanTest(BitcoinTestFramework):
variant.expected_txs += 1
variant.check(variant.sent_txid, variant.sent_amount, variant.confirmation_height)
+
if __name__ == "__main__":
ImportRescanTest().main()
diff --git a/test/functional/wallet_importdescriptors.py b/test/functional/wallet_importdescriptors.py
new file mode 100755
index 0000000000..fc5d653a91
--- /dev/null
+++ b/test/functional/wallet_importdescriptors.py
@@ -0,0 +1,445 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the importdescriptors RPC.
+
+Test importdescriptors by generating keys on node0, importing the corresponding
+descriptors on node1 and then testing the address info for the different address
+variants.
+
+- `get_generate_key()` is called to generate keys and return the privkeys,
+ pubkeys and all variants of scriptPubKey and address.
+- `test_importdesc()` is called to send an importdescriptors call to node1, test
+ success, and (if unsuccessful) test the error code and error message returned.
+- `test_address()` is called to call getaddressinfo for an address on node1
+ and test the values returned."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.descriptors import descsum_create
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+ find_vout_for_address,
+)
+from test_framework.wallet_util import (
+ get_generate_key,
+ test_address,
+)
+
+class ImportDescriptorsTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+ self.extra_args = [["-addresstype=legacy"],
+ ["-addresstype=bech32", "-keypool=5"]
+ ]
+ self.setup_clean_chain = True
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def test_importdesc(self, req, success, error_code=None, error_message=None, warnings=None, wallet=None):
+ """Run importdescriptors and assert success"""
+ if warnings is None:
+ warnings = []
+ wrpc = self.nodes[1].get_wallet_rpc('w1')
+ if wallet is not None:
+ wrpc = wallet
+
+ result = wrpc.importdescriptors([req])
+ observed_warnings = []
+ if 'warnings' in result[0]:
+ observed_warnings = result[0]['warnings']
+ assert_equal("\n".join(sorted(warnings)), "\n".join(sorted(observed_warnings)))
+ assert_equal(result[0]['success'], success)
+ if error_code is not None:
+ assert_equal(result[0]['error']['code'], error_code)
+ assert_equal(result[0]['error']['message'], error_message)
+
+ def run_test(self):
+ self.log.info('Setting up wallets')
+ self.nodes[0].createwallet(wallet_name='w0', disable_private_keys=False)
+ w0 = self.nodes[0].get_wallet_rpc('w0')
+
+ self.nodes[1].createwallet(wallet_name='w1', disable_private_keys=True, blank=True, descriptors=True)
+ w1 = self.nodes[1].get_wallet_rpc('w1')
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
+
+ self.nodes[1].createwallet(wallet_name="wpriv", disable_private_keys=False, blank=True, descriptors=True)
+ wpriv = self.nodes[1].get_wallet_rpc("wpriv")
+ assert_equal(wpriv.getwalletinfo()['keypoolsize'], 0)
+
+ self.log.info('Mining coins')
+ w0.generatetoaddress(101, w0.getnewaddress())
+
+ # RPC importdescriptors -----------------------------------------------
+
+ # # Test import fails if no descriptor present
+ key = get_generate_key()
+ self.log.info("Import should fail if a descriptor is not provided")
+ self.test_importdesc({"timestamp": "now"},
+ success=False,
+ error_code=-8,
+ error_message='Descriptor not found.')
+
+ # # Test importing of a P2PKH descriptor
+ key = get_generate_key()
+ self.log.info("Should import a p2pkh descriptor")
+ self.test_importdesc({"desc": descsum_create("pkh(" + key.pubkey + ")"),
+ "timestamp": "now",
+ "label": "Descriptor import test"},
+ success=True)
+ test_address(w1,
+ key.p2pkh_addr,
+ solvable=True,
+ ismine=True,
+ labels=["Descriptor import test"])
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
+
+ self.log.info("Internal addresses cannot have labels")
+ self.test_importdesc({"desc": descsum_create("pkh(" + key.pubkey + ")"),
+ "timestamp": "now",
+ "internal": True,
+ "label": "Descriptor import test"},
+ success=False,
+ error_code=-8,
+ error_message="Internal addresses should not have a label")
+
+ # # Test importing of a P2SH-P2WPKH descriptor
+ key = get_generate_key()
+ self.log.info("Should not import a p2sh-p2wpkh descriptor without checksum")
+ self.test_importdesc({"desc": "sh(wpkh(" + key.pubkey + "))",
+ "timestamp": "now"
+ },
+ success=False,
+ error_code=-5,
+ error_message="Missing checksum")
+
+ self.log.info("Should not import a p2sh-p2wpkh descriptor that has range specified")
+ self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
+ "timestamp": "now",
+ "range": 1,
+ },
+ success=False,
+ error_code=-8,
+ error_message="Range should not be specified for an un-ranged descriptor")
+
+ self.log.info("Should not import a p2sh-p2wpkh descriptor and have it set to active")
+ self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
+ "timestamp": "now",
+ "active": True,
+ },
+ success=False,
+ error_code=-8,
+ error_message="Active descriptors must be ranged")
+
+ self.log.info("Should import a (non-active) p2sh-p2wpkh descriptor")
+ self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
+ "timestamp": "now",
+ "active": False,
+ },
+ success=True)
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
+
+ test_address(w1,
+ key.p2sh_p2wpkh_addr,
+ ismine=True,
+ solvable=True)
+
+ # # Test importing of a multisig descriptor
+ key1 = get_generate_key()
+ key2 = get_generate_key()
+ self.log.info("Should import a 1-of-2 bare multisig from descriptor")
+ self.test_importdesc({"desc": descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey + ")"),
+ "timestamp": "now"},
+ success=True)
+ self.log.info("Should not treat individual keys from the imported bare multisig as watchonly")
+ test_address(w1,
+ key1.p2pkh_addr,
+ ismine=False)
+
+ # # Test ranged descriptors
+ xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg"
+ xpub = "tpubD6NzVbkrYhZ4YNXVQbNhMK1WqguFsUXceaVJKbmno2aZ3B6QfbMeraaYvnBSGpV3vxLyTTK9DYT1yoEck4XUScMzXoQ2U2oSmE2JyMedq3H"
+ addresses = ["2N7yv4p8G8yEaPddJxY41kPihnWvs39qCMf", "2MsHxyb2JS3pAySeNUsJ7mNnurtpeenDzLA"] # hdkeypath=m/0'/0'/0' and 1'
+ addresses += ["bcrt1qrd3n235cj2czsfmsuvqqpr3lu6lg0ju7scl8gn", "bcrt1qfqeppuvj0ww98r6qghmdkj70tv8qpchehegrg8"] # wpkh subscripts corresponding to the above addresses
+ desc = "sh(wpkh(" + xpub + "/0/0/*" + "))"
+
+ self.log.info("Ranged descriptors cannot have labels")
+ self.test_importdesc({"desc":descsum_create(desc),
+ "timestamp": "now",
+ "range": [0, 100],
+ "label": "test"},
+ success=False,
+ error_code=-8,
+ error_message='Ranged descriptors should not have a label')
+
+ self.log.info("Private keys required for private keys enabled wallet")
+ self.test_importdesc({"desc":descsum_create(desc),
+ "timestamp": "now",
+ "range": [0, 100]},
+ success=False,
+ error_code=-4,
+ error_message='Cannot import descriptor without private keys to a wallet with private keys enabled',
+ wallet=wpriv)
+
+ self.log.info("Ranged descriptor import should warn without a specified range")
+ self.test_importdesc({"desc": descsum_create(desc),
+ "timestamp": "now"},
+ success=True,
+ warnings=['Range not given, using default keypool range'])
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
+
+ # # Test importing of a ranged descriptor with xpriv
+ self.log.info("Should not import a ranged descriptor that includes xpriv into a watch-only wallet")
+ desc = "sh(wpkh(" + xpriv + "/0'/0'/*'" + "))"
+ self.test_importdesc({"desc": descsum_create(desc),
+ "timestamp": "now",
+ "range": 1},
+ success=False,
+ error_code=-4,
+ error_message='Cannot import private keys to a wallet with private keys disabled')
+ for address in addresses:
+ test_address(w1,
+ address,
+ ismine=False,
+ solvable=False)
+
+ self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": -1},
+ success=False, error_code=-8, error_message='End of range is too high')
+
+ self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [-1, 10]},
+ success=False, error_code=-8, error_message='Range should be greater or equal than 0')
+
+ self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]},
+ success=False, error_code=-8, error_message='End of range is too high')
+
+ self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [2, 1]},
+ success=False, error_code=-8, error_message='Range specified as [begin,end] must not have begin after end')
+
+ self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [0, 1000001]},
+ success=False, error_code=-8, error_message='Range is too large')
+
+ # Make sure ranged imports import keys in order
+ w1 = self.nodes[1].get_wallet_rpc('w1')
+ self.log.info('Key ranges should be imported in order')
+ xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY"
+ addresses = [
+ 'bcrt1qtmp74ayg7p24uslctssvjm06q5phz4yrxucgnv', # m/0'/0'/0
+ 'bcrt1q8vprchan07gzagd5e6v9wd7azyucksq2xc76k8', # m/0'/0'/1
+ 'bcrt1qtuqdtha7zmqgcrr26n2rqxztv5y8rafjp9lulu', # m/0'/0'/2
+ 'bcrt1qau64272ymawq26t90md6an0ps99qkrse58m640', # m/0'/0'/3
+ 'bcrt1qsg97266hrh6cpmutqen8s4s962aryy77jp0fg0', # m/0'/0'/4
+ ]
+
+ self.test_importdesc({'desc': descsum_create('wpkh([80002067/0h/0h]' + xpub + '/*)'),
+ 'active': True,
+ 'range' : [0, 2],
+ 'timestamp': 'now'
+ },
+ success=True)
+ self.test_importdesc({'desc': descsum_create('sh(wpkh([abcdef12/0h/0h]' + xpub + '/*))'),
+ 'active': True,
+ 'range' : [0, 2],
+ 'timestamp': 'now'
+ },
+ success=True)
+ self.test_importdesc({'desc': descsum_create('pkh([12345678/0h/0h]' + xpub + '/*)'),
+ 'active': True,
+ 'range' : [0, 2],
+ 'timestamp': 'now'
+ },
+ success=True)
+
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 5 * 3)
+ for i, expected_addr in enumerate(addresses):
+ received_addr = w1.getnewaddress('', 'bech32')
+ assert_raises_rpc_error(-4, 'This wallet has no available keys', w1.getrawchangeaddress, 'bech32')
+ assert_equal(received_addr, expected_addr)
+ bech32_addr_info = w1.getaddressinfo(received_addr)
+ assert_equal(bech32_addr_info['desc'][:23], 'wpkh([80002067/0\'/0\'/{}]'.format(i))
+
+ shwpkh_addr = w1.getnewaddress('', 'p2sh-segwit')
+ shwpkh_addr_info = w1.getaddressinfo(shwpkh_addr)
+ assert_equal(shwpkh_addr_info['desc'][:26], 'sh(wpkh([abcdef12/0\'/0\'/{}]'.format(i))
+
+ pkh_addr = w1.getnewaddress('', 'legacy')
+ pkh_addr_info = w1.getaddressinfo(pkh_addr)
+ assert_equal(pkh_addr_info['desc'][:22], 'pkh([12345678/0\'/0\'/{}]'.format(i))
+
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 4 * 3) # After retrieving a key, we don't refill the keypool again, so it's one less for each address type
+ w1.keypoolrefill()
+ assert_equal(w1.getwalletinfo()['keypoolsize'], 5 * 3)
+
+ # Check active=False default
+ self.log.info('Check imported descriptors are not active by default')
+ self.test_importdesc({'desc': descsum_create('pkh([12345678/0h/0h]' + xpub + '/*)'),
+ 'range' : [0, 2],
+ 'timestamp': 'now',
+ 'internal': True
+ },
+ success=True)
+ assert_raises_rpc_error(-4, 'This wallet has no available keys', w1.getrawchangeaddress, 'legacy')
+
+ # # Test importing a descriptor containing a WIF private key
+ wif_priv = "cTe1f5rdT8A8DFgVWTjyPwACsDPJM9ff4QngFxUixCSvvbg1x6sh"
+ address = "2MuhcG52uHPknxDgmGPsV18jSHFBnnRgjPg"
+ desc = "sh(wpkh(" + wif_priv + "))"
+ self.log.info("Should import a descriptor with a WIF private key as spendable")
+ self.test_importdesc({"desc": descsum_create(desc),
+ "timestamp": "now"},
+ success=True,
+ wallet=wpriv)
+ test_address(wpriv,
+ address,
+ solvable=True,
+ ismine=True)
+ txid = w0.sendtoaddress(address, 49.99995540)
+ w0.generatetoaddress(6, w0.getnewaddress())
+ self.sync_blocks()
+ tx = wpriv.createrawtransaction([{"txid": txid, "vout": 0}], {w0.getnewaddress(): 49.999})
+ signed_tx = wpriv.signrawtransactionwithwallet(tx)
+ w1.sendrawtransaction(signed_tx['hex'])
+
+ # Make sure that we can use import and use multisig as addresses
+ self.log.info('Test that multisigs can be imported, signed for, and getnewaddress\'d')
+ self.nodes[1].createwallet(wallet_name="wmulti_priv", disable_private_keys=False, blank=True, descriptors=True)
+ wmulti_priv = self.nodes[1].get_wallet_rpc("wmulti_priv")
+ assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 0)
+
+ self.test_importdesc({"desc":"wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/0h/0h/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/0h/0h/*,tprv8ZgxMBicQKsPeonDt8Ka2mrQmHa61hQ5FQCsvWBTpSNzBFgM58cV2EuXNAHF14VawVpznnme3SuTbA62sGriwWyKifJmXntfNeK7zeqMCj1/84h/0h/0h/*))#m2sr93jn",
+ "active": True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"},
+ success=True,
+ wallet=wmulti_priv)
+ self.test_importdesc({"desc":"wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/1h/0h/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/1h/0h/*,tprv8ZgxMBicQKsPeonDt8Ka2mrQmHa61hQ5FQCsvWBTpSNzBFgM58cV2EuXNAHF14VawVpznnme3SuTbA62sGriwWyKifJmXntfNeK7zeqMCj1/84h/1h/0h/*))#q3sztvx5",
+ "active": True,
+ "internal" : True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"},
+ success=True,
+ wallet=wmulti_priv)
+
+ assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1001) # Range end (1000) is inclusive, so 1001 addresses generated
+ addr = wmulti_priv.getnewaddress('', 'bech32')
+ assert_equal(addr, 'bcrt1qdt0qy5p7dzhxzmegnn4ulzhard33s2809arjqgjndx87rv5vd0fq2czhy8') # Derived at m/84'/0'/0'/0
+ change_addr = wmulti_priv.getrawchangeaddress('bech32')
+ assert_equal(change_addr, 'bcrt1qt9uhe3a9hnq7vajl7a094z4s3crm9ttf8zw3f5v9gr2nyd7e3lnsy44n8e')
+ assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1000)
+ txid = w0.sendtoaddress(addr, 10)
+ self.nodes[0].generate(6)
+ self.sync_all()
+ send_txid = wmulti_priv.sendtoaddress(w0.getnewaddress(), 8)
+ decoded = wmulti_priv.decoderawtransaction(wmulti_priv.gettransaction(send_txid)['hex'])
+ assert_equal(len(decoded['vin'][0]['txinwitness']), 4)
+ self.nodes[0].generate(6)
+ self.sync_all()
+
+ self.nodes[1].createwallet(wallet_name="wmulti_pub", disable_private_keys=True, blank=True, descriptors=True)
+ wmulti_pub = self.nodes[1].get_wallet_rpc("wmulti_pub")
+ assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 0)
+
+ self.test_importdesc({"desc":"wsh(multi(2,[7b2d0242/84h/0h/0h]tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*,[59b09cd6/84h/0h/0h]tpubDDBF2BTR6s8drwrfDei8WxtckGuSm1cyoKxYY1QaKSBFbHBYQArWhHPA6eJrzZej6nfHGLSURYSLHr7GuYch8aY5n61tGqgn8b4cXrMuoPH/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))#tsry0s5e",
+ "active": True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"},
+ success=True,
+ wallet=wmulti_pub)
+ self.test_importdesc({"desc":"wsh(multi(2,[7b2d0242/84h/1h/0h]tpubDCXqdwWZcszwqYJSnZp8eARkxGJfHAk23KDxbztV4BbschfaTfYLTcSkSJ3TN64dRqwa1rnFUScsYormKkGqNbbPwkorQimVevXjxzUV9Gf/*,[59b09cd6/84h/1h/0h]tpubDCYfZY2ceyHzYzMMVPt9MNeiqtQ2T7Uyp9QSFwYXh8Vi9iJFYXcuphJaGXfF3jUQJi5Y3GMNXvM11gaL4txzZgNGK22BFAwMXynnzv4z2Jh/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))#c08a2rzv",
+ "active": True,
+ "internal" : True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"},
+ success=True,
+ wallet=wmulti_pub)
+
+ assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 1000) # The first one was already consumed by previous import and is detected as used
+ addr = wmulti_pub.getnewaddress('', 'bech32')
+ assert_equal(addr, 'bcrt1qp8s25ckjl7gr6x2q3dx3tn2pytwp05upkjztk6ey857tt50r5aeqn6mvr9') # Derived at m/84'/0'/0'/1
+ change_addr = wmulti_pub.getrawchangeaddress('bech32')
+ assert_equal(change_addr, 'bcrt1qt9uhe3a9hnq7vajl7a094z4s3crm9ttf8zw3f5v9gr2nyd7e3lnsy44n8e')
+ assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 999)
+ txid = w0.sendtoaddress(addr, 10)
+ vout = find_vout_for_address(self.nodes[0], txid, addr)
+ self.nodes[0].generate(6)
+ self.sync_all()
+ assert_equal(wmulti_pub.getbalance(), wmulti_priv.getbalance())
+
+ self.log.info("Multisig with distributed keys")
+ self.nodes[1].createwallet(wallet_name="wmulti_priv1", descriptors=True)
+ wmulti_priv1 = self.nodes[1].get_wallet_rpc("wmulti_priv1")
+ res = wmulti_priv1.importdescriptors([
+ {
+ "desc": descsum_create("wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/0h/0h/*,[59b09cd6/84h/0h/0h]tpubDDBF2BTR6s8drwrfDei8WxtckGuSm1cyoKxYY1QaKSBFbHBYQArWhHPA6eJrzZej6nfHGLSURYSLHr7GuYch8aY5n61tGqgn8b4cXrMuoPH/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))"),
+ "active": True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"
+ },
+ {
+ "desc": descsum_create("wsh(multi(2,tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52/84h/1h/0h/*,[59b09cd6/84h/1h/0h]tpubDCYfZY2ceyHzYzMMVPt9MNeiqtQ2T7Uyp9QSFwYXh8Vi9iJFYXcuphJaGXfF3jUQJi5Y3GMNXvM11gaL4txzZgNGK22BFAwMXynnzv4z2Jh/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))"),
+ "active": True,
+ "internal" : True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"
+ }])
+ assert_equal(res[0]['success'], True)
+ assert_equal(res[0]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
+ assert_equal(res[1]['success'], True)
+ assert_equal(res[1]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
+
+ self.nodes[1].createwallet(wallet_name='wmulti_priv2', blank=True, descriptors=True)
+ wmulti_priv2 = self.nodes[1].get_wallet_rpc('wmulti_priv2')
+ res = wmulti_priv2.importdescriptors([
+ {
+ "desc": descsum_create("wsh(multi(2,[7b2d0242/84h/0h/0h]tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/0h/0h/*,[e81a0532/84h/0h/0h]tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E/*))"),
+ "active": True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"
+ },
+ {
+ "desc": descsum_create("wsh(multi(2,[7b2d0242/84h/1h/0h]tpubDCXqdwWZcszwqYJSnZp8eARkxGJfHAk23KDxbztV4BbschfaTfYLTcSkSJ3TN64dRqwa1rnFUScsYormKkGqNbbPwkorQimVevXjxzUV9Gf/*,tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq/84h/1h/0h/*,[e81a0532/84h/1h/0h]tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb/*))"),
+ "active": True,
+ "internal" : True,
+ "range": 1000,
+ "next_index": 0,
+ "timestamp": "now"
+ }])
+ assert_equal(res[0]['success'], True)
+ assert_equal(res[0]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
+ assert_equal(res[1]['success'], True)
+ assert_equal(res[1]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
+
+ rawtx = self.nodes[1].createrawtransaction([{'txid': txid, 'vout': vout}], {w0.getnewaddress(): 9.999})
+ tx_signed_1 = wmulti_priv1.signrawtransactionwithwallet(rawtx)
+ assert_equal(tx_signed_1['complete'], False)
+ tx_signed_2 = wmulti_priv2.signrawtransactionwithwallet(tx_signed_1['hex'])
+ assert_equal(tx_signed_2['complete'], True)
+ self.nodes[1].sendrawtransaction(tx_signed_2['hex'])
+
+ self.log.info("Combo descriptors cannot be active")
+ self.test_importdesc({"desc": descsum_create("combo(tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*)"),
+ "active": True,
+ "range": 1,
+ "timestamp": "now"},
+ success=False,
+ error_code=-4,
+ error_message="Combo descriptors cannot be set to active")
+
+ self.log.info("Descriptors with no type cannot be active")
+ self.test_importdesc({"desc": descsum_create("pk(tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*)"),
+ "active": True,
+ "range": 1,
+ "timestamp": "now"},
+ success=True,
+ warnings=["Unknown output type, cannot set descriptor to active."])
+
+if __name__ == '__main__':
+ ImportDescriptorsTest().main()
diff --git a/test/functional/wallet_importmulti.py b/test/functional/wallet_importmulti.py
index f152fcd1a4..bd4fcdabcf 100755
--- a/test/functional/wallet_importmulti.py
+++ b/test/functional/wallet_importmulti.py
@@ -32,6 +32,7 @@ from test_framework.wallet_util import (
test_address,
)
+
class ImportMultiTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
@@ -51,7 +52,7 @@ class ImportMultiTest(BitcoinTestFramework):
result = self.nodes[1].importmulti([req])
observed_warnings = []
if 'warnings' in result[0]:
- observed_warnings = result[0]['warnings']
+ observed_warnings = result[0]['warnings']
assert_equal("\n".join(sorted(warnings)), "\n".join(sorted(observed_warnings)))
assert_equal(result[0]['success'], success)
if error_code is not None:
@@ -63,6 +64,7 @@ class ImportMultiTest(BitcoinTestFramework):
self.nodes[0].generate(1)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+ self.nodes[1].syncwithvalidationinterfacequeue()
node0_address1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
@@ -257,6 +259,7 @@ class ImportMultiTest(BitcoinTestFramework):
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+ self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
@@ -277,6 +280,7 @@ class ImportMultiTest(BitcoinTestFramework):
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+ self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh with respective redeem script")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
@@ -297,6 +301,7 @@ class ImportMultiTest(BitcoinTestFramework):
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+ self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
@@ -322,6 +327,7 @@ class ImportMultiTest(BitcoinTestFramework):
self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
+ self.nodes[1].syncwithvalidationinterfacequeue()
self.log.info("Should import a p2sh with respective redeem script and private keys")
self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
@@ -851,5 +857,6 @@ class ImportMultiTest(BitcoinTestFramework):
addr = wrpc.getnewaddress('', 'bech32')
assert_equal(addr, addresses[i])
+
if __name__ == '__main__':
ImportMultiTest().main()
diff --git a/test/functional/wallet_keypool.py b/test/functional/wallet_keypool.py
index e3aeb61197..40a2b3ab6a 100755
--- a/test/functional/wallet_keypool.py
+++ b/test/functional/wallet_keypool.py
@@ -5,6 +5,7 @@
"""Test the wallet keypool and interaction with wallet encryption/locking."""
import time
+from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
@@ -21,16 +22,63 @@ class KeyPoolTest(BitcoinTestFramework):
addr_before_encrypting = nodes[0].getnewaddress()
addr_before_encrypting_data = nodes[0].getaddressinfo(addr_before_encrypting)
wallet_info_old = nodes[0].getwalletinfo()
- assert addr_before_encrypting_data['hdseedid'] == wallet_info_old['hdseedid']
+ if not self.options.descriptors:
+ assert addr_before_encrypting_data['hdseedid'] == wallet_info_old['hdseedid']
# Encrypt wallet and wait to terminate
nodes[0].encryptwallet('test')
+ if self.options.descriptors:
+ # Import hardened derivation only descriptors
+ nodes[0].walletpassphrase('test', 10)
+ nodes[0].importdescriptors([
+ {
+ "desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/*h)#y4dfsj7n",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True
+ },
+ {
+ "desc": "pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1h/*h)#a0nyvl0k",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True
+ },
+ {
+ "desc": "sh(wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/2h/*h))#lmeu2axg",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True
+ },
+ {
+ "desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/3h/*h)#jkl636gm",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True,
+ "internal": True
+ },
+ {
+ "desc": "pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/4h/*h)#l3crwaus",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True,
+ "internal": True
+ },
+ {
+ "desc": "sh(wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/5h/*h))#qg8wa75f",
+ "timestamp": "now",
+ "range": [0,0],
+ "active": True,
+ "internal": True
+ }
+ ])
+ nodes[0].walletlock()
# Keep creating keys
addr = nodes[0].getnewaddress()
addr_data = nodes[0].getaddressinfo(addr)
wallet_info = nodes[0].getwalletinfo()
- assert addr_before_encrypting_data['hdseedid'] != wallet_info['hdseedid']
- assert addr_data['hdseedid'] == wallet_info['hdseedid']
+ assert addr_before_encrypting_data['hdmasterfingerprint'] != addr_data['hdmasterfingerprint']
+ if not self.options.descriptors:
+ assert addr_data['hdseedid'] == wallet_info['hdseedid']
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min)
@@ -38,8 +86,12 @@ class KeyPoolTest(BitcoinTestFramework):
nodes[0].keypoolrefill(6)
nodes[0].walletlock()
wi = nodes[0].getwalletinfo()
- assert_equal(wi['keypoolsize_hd_internal'], 6)
- assert_equal(wi['keypoolsize'], 6)
+ if self.options.descriptors:
+ assert_equal(wi['keypoolsize_hd_internal'], 18)
+ assert_equal(wi['keypoolsize'], 18)
+ else:
+ assert_equal(wi['keypoolsize_hd_internal'], 6)
+ assert_equal(wi['keypoolsize'], 6)
# drain the internal keys
nodes[0].getrawchangeaddress()
@@ -53,12 +105,12 @@ class KeyPoolTest(BitcoinTestFramework):
assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].getrawchangeaddress)
# drain the external keys
- addr.add(nodes[0].getnewaddress())
- addr.add(nodes[0].getnewaddress())
- addr.add(nodes[0].getnewaddress())
- addr.add(nodes[0].getnewaddress())
- addr.add(nodes[0].getnewaddress())
- addr.add(nodes[0].getnewaddress())
+ addr.add(nodes[0].getnewaddress(address_type="bech32"))
+ addr.add(nodes[0].getnewaddress(address_type="bech32"))
+ addr.add(nodes[0].getnewaddress(address_type="bech32"))
+ addr.add(nodes[0].getnewaddress(address_type="bech32"))
+ addr.add(nodes[0].getnewaddress(address_type="bech32"))
+ addr.add(nodes[0].getnewaddress(address_type="bech32"))
assert len(addr) == 6
# the next one should fail
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
@@ -79,8 +131,62 @@ class KeyPoolTest(BitcoinTestFramework):
nodes[0].walletpassphrase('test', 100)
nodes[0].keypoolrefill(100)
wi = nodes[0].getwalletinfo()
- assert_equal(wi['keypoolsize_hd_internal'], 100)
- assert_equal(wi['keypoolsize'], 100)
+ if self.options.descriptors:
+ assert_equal(wi['keypoolsize_hd_internal'], 300)
+ assert_equal(wi['keypoolsize'], 300)
+ else:
+ assert_equal(wi['keypoolsize_hd_internal'], 100)
+ assert_equal(wi['keypoolsize'], 100)
+
+ # create a blank wallet
+ nodes[0].createwallet(wallet_name='w2', blank=True, disable_private_keys=True)
+ w2 = nodes[0].get_wallet_rpc('w2')
+
+ # refer to initial wallet as w1
+ w1 = nodes[0].get_wallet_rpc('')
+
+ # import private key and fund it
+ address = addr.pop()
+ desc = w1.getaddressinfo(address)['desc']
+ if self.options.descriptors:
+ res = w2.importdescriptors([{'desc': desc, 'timestamp': 'now'}])
+ else:
+ res = w2.importmulti([{'desc': desc, 'timestamp': 'now'}])
+ assert_equal(res[0]['success'], True)
+ w1.walletpassphrase('test', 100)
+
+ res = w1.sendtoaddress(address=address, amount=0.00010000)
+ nodes[0].generate(1)
+ destination = addr.pop()
+
+ # Using a fee rate (10 sat / byte) well above the minimum relay rate
+ # creating a 5,000 sat transaction with change should not be possible
+ assert_raises_rpc_error(-4, "Transaction needs a change address, but we can't generate it. Please call keypoolrefill first.", w2.walletcreatefundedpsbt, inputs=[], outputs=[{addr.pop(): 0.00005000}], options={"subtractFeeFromOutputs": [0], "feeRate": 0.00010})
+
+ # creating a 10,000 sat transaction without change, with a manual input, should still be possible
+ res = w2.walletcreatefundedpsbt(inputs=w2.listunspent(), outputs=[{destination: 0.00010000}], options={"subtractFeeFromOutputs": [0], "feeRate": 0.00010})
+ assert_equal("psbt" in res, True)
+
+ # creating a 10,000 sat transaction without change should still be possible
+ res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00010000}], options={"subtractFeeFromOutputs": [0], "feeRate": 0.00010})
+ assert_equal("psbt" in res, True)
+ # should work without subtractFeeFromOutputs if the exact fee is subtracted from the amount
+ res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00008900}], options={"feeRate": 0.00010})
+ assert_equal("psbt" in res, True)
+
+ # dust change should be removed
+ res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00008800}], options={"feeRate": 0.00010})
+ assert_equal("psbt" in res, True)
+
+ # create a transaction without change at the maximum fee rate, such that the output is still spendable:
+ res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00010000}], options={"subtractFeeFromOutputs": [0], "feeRate": 0.0008824})
+ assert_equal("psbt" in res, True)
+ assert_equal(res["fee"], Decimal("0.00009706"))
+
+ # creating a 10,000 sat transaction with a manual change address should be possible
+ res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00010000}], options={"subtractFeeFromOutputs": [0], "feeRate": 0.00010, "changeAddress": addr.pop()})
+ assert_equal("psbt" in res, True)
+
if __name__ == '__main__':
KeyPoolTest().main()
diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py
index 829633a050..102ed23fba 100755
--- a/test/functional/wallet_keypool_topup.py
+++ b/test/functional/wallet_keypool_topup.py
@@ -79,7 +79,15 @@ class KeypoolRestoreTest(BitcoinTestFramework):
assert_equal(self.nodes[idx].getbalance(), 15)
assert_equal(self.nodes[idx].listtransactions()[0]['category'], "receive")
# Check that we have marked all keys up to the used keypool key as used
- assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress())['hdkeypath'], "m/0'/0'/110'")
+ if self.options.descriptors:
+ if output_type == 'legacy':
+ assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/44'/1'/0'/0/110")
+ elif output_type == 'p2sh-segwit':
+ assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/49'/1'/0'/0/110")
+ elif output_type == 'bech32':
+ assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/84'/1'/0'/0/110")
+ else:
+ assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress(address_type=output_type))['hdkeypath'], "m/0'/0'/110'")
if __name__ == '__main__':
diff --git a/test/functional/wallet_labels.py b/test/functional/wallet_labels.py
index 337d2e55d9..fb4a1f9792 100755
--- a/test/functional/wallet_labels.py
+++ b/test/functional/wallet_labels.py
@@ -115,15 +115,16 @@ class WalletLabelsTest(BitcoinTestFramework):
assert_raises_rpc_error(-11, "No addresses with label", node.getaddressesbylabel, "")
# Check that addmultisigaddress can assign labels.
- for label in labels:
- addresses = []
- for x in range(10):
- addresses.append(node.getnewaddress())
- multisig_address = node.addmultisigaddress(5, addresses, label.name)['address']
- label.add_address(multisig_address)
- label.purpose[multisig_address] = "send"
- label.verify(node)
- node.generate(101)
+ if not self.options.descriptors:
+ for label in labels:
+ addresses = []
+ for x in range(10):
+ addresses.append(node.getnewaddress())
+ multisig_address = node.addmultisigaddress(5, addresses, label.name)['address']
+ label.add_address(multisig_address)
+ label.purpose[multisig_address] = "send"
+ label.verify(node)
+ node.generate(101)
# Check that setlabel can change the label of an address from a
# different label.
@@ -133,6 +134,33 @@ class WalletLabelsTest(BitcoinTestFramework):
# in the label. This is a no-op.
change_label(node, labels[2].addresses[0], labels[2], labels[2])
+ self.log.info('Check watchonly labels')
+ node.createwallet(wallet_name='watch_only', disable_private_keys=True, descriptors=False)
+ wallet_watch_only = node.get_wallet_rpc('watch_only')
+ BECH32_VALID = {
+ '✔️_VER15_PROG40': 'bcrt10qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqn2cjv3',
+ '✔️_VER16_PROG03': 'bcrt1sqqqqqjq8pdp',
+ '✔️_VER16_PROB02': 'bcrt1sqqqqqjq8pv',
+ }
+ BECH32_INVALID = {
+ '❌_VER15_PROG41': 'bcrt10qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzc7xyq',
+ '❌_VER16_PROB01': 'bcrt1sqqpl9r5c',
+ }
+ for l in BECH32_VALID:
+ ad = BECH32_VALID[l]
+ wallet_watch_only.importaddress(label=l, rescan=False, address=ad)
+ node.generatetoaddress(1, ad)
+ assert_equal(wallet_watch_only.getaddressesbylabel(label=l), {ad: {'purpose': 'receive'}})
+ assert_equal(wallet_watch_only.getreceivedbylabel(label=l), 0)
+ for l in BECH32_INVALID:
+ ad = BECH32_INVALID[l]
+ assert_raises_rpc_error(
+ -5,
+ "Invalid Bitcoin address or script",
+ lambda: wallet_watch_only.importaddress(label=l, rescan=False, address=ad),
+ )
+
+
class Label:
def __init__(self, name):
# Label name
diff --git a/test/functional/wallet_listtransactions.py b/test/functional/wallet_listtransactions.py
index 8c44a070b8..8ff663ccd2 100755
--- a/test/functional/wallet_listtransactions.py
+++ b/test/functional/wallet_listtransactions.py
@@ -97,6 +97,8 @@ class ListTransactionsTest(BitcoinTestFramework):
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
self.nodes[1].generate(1)
self.sync_all()
+ assert_equal(len(self.nodes[0].listtransactions(label="watchonly", include_watchonly=True)), 1)
+ assert_equal(len(self.nodes[0].listtransactions(dummy="watchonly", include_watchonly=True)), 1)
assert len(self.nodes[0].listtransactions(label="watchonly", count=100, include_watchonly=False)) == 0
assert_array_result(self.nodes[0].listtransactions(label="watchonly", count=100, include_watchonly=True),
{"category": "receive", "amount": Decimal("0.1")},
diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py
index a2c502f280..88beef1034 100755
--- a/test/functional/wallet_multiwallet.py
+++ b/test/functional/wallet_multiwallet.py
@@ -1,24 +1,42 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test multiwallet.
Verify that a bitcoind node can load multiple wallet files
"""
+from decimal import Decimal
+from threading import Thread
import os
import shutil
import time
+from test_framework.authproxy import JSONRPCException
from test_framework.test_framework import BitcoinTestFramework
from test_framework.test_node import ErrorMatch
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
+ get_rpc_proxy,
)
FEATURE_LATEST = 169900
+got_loading_error = False
+def test_load_unload(node, name):
+ global got_loading_error
+ for i in range(10):
+ if got_loading_error:
+ return
+ try:
+ node.loadwallet(name)
+ node.unloadwallet(name)
+ except JSONRPCException as e:
+ if e.error['code'] == -4 and 'Wallet already being loading' in e.error['message']:
+ got_loading_error = True
+ return
+
class MultiWalletTest(BitcoinTestFramework):
def set_test_params(self):
@@ -121,14 +139,6 @@ class MultiWalletTest(BitcoinTestFramework):
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=1', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=2', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
- self.log.info("Do not allow -salvagewallet with multiwallet")
- self.nodes[0].assert_start_raises_init_error(['-salvagewallet', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file")
- self.nodes[0].assert_start_raises_init_error(['-salvagewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file")
-
- self.log.info("Do not allow -upgradewallet with multiwallet")
- self.nodes[0].assert_start_raises_init_error(['-upgradewallet', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file")
- self.nodes[0].assert_start_raises_init_error(['-upgradewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file")
-
# if wallets/ doesn't exist, datadir should be the default wallet dir
wallet_dir2 = data_dir('walletdir')
os.rename(wallet_dir(), wallet_dir2)
@@ -193,9 +203,9 @@ class MultiWalletTest(BitcoinTestFramework):
self.log.info('Check for per-wallet settxfee call')
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
assert_equal(w2.getwalletinfo()['paytxfee'], 0)
- w2.settxfee(4.0)
+ w2.settxfee(0.001)
assert_equal(w1.getwalletinfo()['paytxfee'], 0)
- assert_equal(w2.getwalletinfo()['paytxfee'], 4.0)
+ assert_equal(w2.getwalletinfo()['paytxfee'], Decimal('0.00100000'))
self.log.info("Test dynamic wallet loading")
@@ -219,6 +229,18 @@ class MultiWalletTest(BitcoinTestFramework):
w2 = node.get_wallet_rpc(wallet_names[1])
w2.getwalletinfo()
+ self.log.info("Concurrent wallet loading")
+ threads = []
+ for _ in range(3):
+ n = node.cli if self.options.usecli else get_rpc_proxy(node.url, 1, timeout=600, coveragedir=node.coverage_dir)
+ t = Thread(target=test_load_unload, args=(n, wallet_names[2], ))
+ t.start()
+ threads.append(t)
+ for t in threads:
+ t.join()
+ global got_loading_error
+ assert_equal(got_loading_error, True)
+
self.log.info("Load remaining wallets")
for wallet_name in wallet_names[2:]:
loadwallet_name = self.nodes[0].loadwallet(wallet_name)
@@ -230,10 +252,10 @@ class MultiWalletTest(BitcoinTestFramework):
assert_raises_rpc_error(-18, 'Wallet wallets not found.', self.nodes[0].loadwallet, 'wallets')
# Fail to load duplicate wallets
- assert_raises_rpc_error(-4, 'Wallet file verification failed: Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0])
+ assert_raises_rpc_error(-4, 'Wallet file verification failed. Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0])
# Fail to load duplicate wallets by different ways (directory and filepath)
- assert_raises_rpc_error(-4, "Wallet file verification failed: Error loading wallet wallet.dat. Duplicate -wallet filename specified.", self.nodes[0].loadwallet, 'wallet.dat')
+ assert_raises_rpc_error(-4, "Wallet file verification failed. Error loading wallet wallet.dat. Duplicate -wallet filename specified.", self.nodes[0].loadwallet, 'wallet.dat')
# Fail to load if one wallet is a copy of another
assert_raises_rpc_error(-4, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
@@ -243,7 +265,7 @@ class MultiWalletTest(BitcoinTestFramework):
# Fail to load if wallet file is a symlink
- assert_raises_rpc_error(-4, "Wallet file verification failed: Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink')
+ assert_raises_rpc_error(-4, "Wallet file verification failed. Invalid -wallet path 'w8_symlink'", self.nodes[0].loadwallet, 'w8_symlink')
# Fail to load if a directory is specified that doesn't contain a wallet
os.mkdir(wallet_dir('empty_wallet_dir'))
@@ -332,18 +354,5 @@ class MultiWalletTest(BitcoinTestFramework):
self.nodes[0].unloadwallet(wallet)
self.nodes[1].loadwallet(wallet)
- # Fail to load if wallet is downgraded
- shutil.copytree(os.path.join(self.options.data_wallets_dir, 'high_minversion'), wallet_dir('high_minversion'))
- self.restart_node(0, extra_args=['-upgradewallet={}'.format(FEATURE_LATEST)])
- assert {'name': 'high_minversion'} in self.nodes[0].listwalletdir()['wallets']
- self.log.info("Fail -upgradewallet that results in downgrade")
- assert_raises_rpc_error(
- -4,
- 'Wallet loading failed: Error loading {}: Wallet requires newer version of {}'.format(
- wallet_dir('high_minversion', 'wallet.dat'), self.config['environment']['PACKAGE_NAME']),
- lambda: self.nodes[0].loadwallet(filename='high_minversion'),
- )
-
-
if __name__ == '__main__':
MultiWalletTest().main()
diff --git a/test/functional/wallet_reorgsrestore.py b/test/functional/wallet_reorgsrestore.py
index 497a5dd95e..455f1fc5e8 100755
--- a/test/functional/wallet_reorgsrestore.py
+++ b/test/functional/wallet_reorgsrestore.py
@@ -77,8 +77,7 @@ class ReorgsRestoreTest(BitcoinTestFramework):
assert_equal(conflicted["walletconflicts"][0], conflicting["txid"])
# Node0 wallet is shutdown
- self.stop_node(0)
- self.start_node(0)
+ self.restart_node(0)
# The block chain re-orgs and the tx is included in a different block
self.nodes[1].generate(9)
diff --git a/test/functional/wallet_resendwallettransactions.py b/test/functional/wallet_resendwallettransactions.py
index d122e3db52..3417616d77 100755
--- a/test/functional/wallet_resendwallettransactions.py
+++ b/test/functional/wallet_resendwallettransactions.py
@@ -1,31 +1,16 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that the wallet resends transactions periodically."""
-from collections import defaultdict
import time
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import ToHex
-from test_framework.mininode import P2PInterface, mininode_lock
+from test_framework.mininode import P2PTxInvStore, mininode_lock
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, wait_until
-
-class P2PStoreTxInvs(P2PInterface):
- def __init__(self):
- super().__init__()
- self.tx_invs_received = defaultdict(int)
-
- def on_inv(self, message):
- # Store how many times invs have been received for each tx.
- for i in message.inv:
- if i.type == 1:
- # save txid
- self.tx_invs_received[i.hash] += 1
-
-
class ResendWalletTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
@@ -36,7 +21,7 @@ class ResendWalletTransactionsTest(BitcoinTestFramework):
def run_test(self):
node = self.nodes[0] # alias
- node.add_p2p_connection(P2PStoreTxInvs())
+ node.add_p2p_connection(P2PTxInvStore())
self.log.info("Create a new transaction and wait until it's broadcast")
txid = int(node.sendtoaddress(node.getnewaddress(), 1), 16)
@@ -51,7 +36,7 @@ class ResendWalletTransactionsTest(BitcoinTestFramework):
wait_until(lambda: node.p2p.tx_invs_received[txid] >= 1, lock=mininode_lock)
# Add a second peer since txs aren't rebroadcast to the same peer (see filterInventoryKnown)
- node.add_p2p_connection(P2PStoreTxInvs())
+ node.add_p2p_connection(P2PTxInvStore())
self.log.info("Create a block")
# Create and submit a block without the transaction.
@@ -64,15 +49,21 @@ class ResendWalletTransactionsTest(BitcoinTestFramework):
block.solve()
node.submitblock(ToHex(block))
- # Transaction should not be rebroadcast
node.syncwithvalidationinterfacequeue()
- node.p2ps[1].sync_with_ping()
- assert_equal(node.p2ps[1].tx_invs_received[txid], 0)
-
- self.log.info("Transaction should be rebroadcast after 30 minutes")
- # Use mocktime and give an extra 5 minutes to be sure.
- rebroadcast_time = int(time.time()) + 41 * 60
- node.setmocktime(rebroadcast_time)
+ now = int(time.time())
+
+ # Transaction should not be rebroadcast within first 12 hours
+ # Leave 2 mins for buffer
+ twelve_hrs = 12 * 60 * 60
+ two_min = 2 * 60
+ node.setmocktime(now + twelve_hrs - two_min)
+ time.sleep(2) # ensure enough time has passed for rebroadcast attempt to occur
+ assert_equal(txid in node.p2ps[1].get_invs(), False)
+
+ self.log.info("Bump time & check that transaction is rebroadcast")
+ # Transaction should be rebroadcast approximately 24 hours in the future,
+ # but can range from 12-36. So bump 36 hours to be sure.
+ node.setmocktime(now + 36 * 60 * 60)
wait_until(lambda: node.p2ps[1].tx_invs_received[txid] >= 1, lock=mininode_lock)
diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py
index 99559090ee..5e1a804d33 100755
--- a/test/functional/wallet_txn_clone.py
+++ b/test/functional/wallet_txn_clone.py
@@ -29,9 +29,8 @@ class TxnMallTest(BitcoinTestFramework):
def setup_network(self):
# Start with split network:
- super(TxnMallTest, self).setup_network()
+ super().setup_network()
disconnect_nodes(self.nodes[1], 2)
- disconnect_nodes(self.nodes[2], 1)
def run_test(self):
if self.options.segwit:
diff --git a/test/functional/wallet_txn_doublespend.py b/test/functional/wallet_txn_doublespend.py
index 1891cd9190..cac58aeaf2 100755
--- a/test/functional/wallet_txn_doublespend.py
+++ b/test/functional/wallet_txn_doublespend.py
@@ -29,7 +29,6 @@ class TxnMallTest(BitcoinTestFramework):
# Start with split network:
super().setup_network()
disconnect_nodes(self.nodes[1], 2)
- disconnect_nodes(self.nodes[2], 1)
def run_test(self):
# All nodes should start with 1,250 BTC:
diff --git a/test/functional/wallet_upgradewallet.py b/test/functional/wallet_upgradewallet.py
new file mode 100755
index 0000000000..cc2139a027
--- /dev/null
+++ b/test/functional/wallet_upgradewallet.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018-2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""upgradewallet RPC functional test
+
+Test upgradewallet RPC. Download node binaries:
+
+contrib/devtools/previous_release.sh -b v0.19.1 v0.18.1 v0.17.1 v0.16.3 v0.15.2
+
+Only v0.15.2 and v0.16.3 are required by this test. The others are used in feature_backwards_compatibility.py
+"""
+
+import os
+import shutil
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ assert_greater_than,
+ assert_is_hex_string,
+)
+
+
+class UpgradeWalletTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 3
+ self.extra_args = [
+ ["-addresstype=bech32"], # current wallet version
+ ["-usehd=1"], # v0.16.3 wallet
+ ["-usehd=0"] # v0.15.2 wallet
+ ]
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+ self.skip_if_no_previous_releases()
+
+ def setup_network(self):
+ self.setup_nodes()
+
+ def setup_nodes(self):
+ self.add_nodes(self.num_nodes, extra_args=self.extra_args, versions=[
+ None,
+ 160300,
+ 150200,
+ ])
+ self.start_nodes()
+
+ def dumb_sync_blocks(self):
+ """
+ Little helper to sync older wallets.
+ Notice that v0.15.2's regtest is hardforked, so there is
+ no sync for it.
+ v0.15.2 is only being used to test for version upgrade
+ and master hash key presence.
+ v0.16.3 is being used to test for version upgrade and balances.
+ Further info: https://github.com/bitcoin/bitcoin/pull/18774#discussion_r416967844
+ """
+ node_from = self.nodes[0]
+ v16_3_node = self.nodes[1]
+ to_height = node_from.getblockcount()
+ height = self.nodes[1].getblockcount()
+ for i in range(height, to_height+1):
+ b = node_from.getblock(blockhash=node_from.getblockhash(i), verbose=0)
+ v16_3_node.submitblock(b)
+ assert_equal(v16_3_node.getblockcount(), to_height)
+
+ def run_test(self):
+ self.nodes[0].generatetoaddress(101, self.nodes[0].getnewaddress())
+ self.dumb_sync_blocks()
+ # # Sanity check the test framework:
+ res = self.nodes[0].getblockchaininfo()
+ assert_equal(res['blocks'], 101)
+ node_master = self.nodes[0]
+ v16_3_node = self.nodes[1]
+ v15_2_node = self.nodes[2]
+
+ # Send coins to old wallets for later conversion checks.
+ v16_3_wallet = v16_3_node.get_wallet_rpc('wallet.dat')
+ v16_3_address = v16_3_wallet.getnewaddress()
+ node_master.generatetoaddress(101, v16_3_address)
+ self.dumb_sync_blocks()
+ v16_3_balance = v16_3_wallet.getbalance()
+
+ self.log.info("Test upgradewallet RPC...")
+ # Prepare for copying of the older wallet
+ node_master_wallet_dir = os.path.join(node_master.datadir, "regtest/wallets")
+ v16_3_wallet = os.path.join(v16_3_node.datadir, "regtest/wallets/wallet.dat")
+ v15_2_wallet = os.path.join(v15_2_node.datadir, "regtest/wallet.dat")
+ self.stop_nodes()
+
+ # Copy the 0.16.3 wallet to the last Bitcoin Core version and open it:
+ shutil.rmtree(node_master_wallet_dir)
+ os.mkdir(node_master_wallet_dir)
+ shutil.copy(
+ v16_3_wallet,
+ node_master_wallet_dir
+ )
+ self.restart_node(0, ['-nowallet'])
+ node_master.loadwallet('')
+
+ wallet = node_master.get_wallet_rpc('')
+ old_version = wallet.getwalletinfo()["walletversion"]
+
+ # calling upgradewallet without version arguments
+ # should return nothing if successful
+ assert_equal(wallet.upgradewallet(), "")
+ new_version = wallet.getwalletinfo()["walletversion"]
+ # upgraded wallet version should be greater than older one
+ assert_greater_than(new_version, old_version)
+ # wallet should still contain the same balance
+ assert_equal(wallet.getbalance(), v16_3_balance)
+
+ self.stop_node(0)
+ # Copy the 0.15.2 wallet to the last Bitcoin Core version and open it:
+ shutil.rmtree(node_master_wallet_dir)
+ os.mkdir(node_master_wallet_dir)
+ shutil.copy(
+ v15_2_wallet,
+ node_master_wallet_dir
+ )
+ self.restart_node(0, ['-nowallet'])
+ node_master.loadwallet('')
+
+ wallet = node_master.get_wallet_rpc('')
+ # should have no master key hash before conversion
+ assert_equal('hdseedid' in wallet.getwalletinfo(), False)
+ # calling upgradewallet with explicit version number
+ # should return nothing if successful
+ assert_equal(wallet.upgradewallet(169900), "")
+ new_version = wallet.getwalletinfo()["walletversion"]
+ # upgraded wallet should have version 169900
+ assert_equal(new_version, 169900)
+ # after conversion master key hash should be present
+ assert_is_hex_string(wallet.getwalletinfo()['hdseedid'])
+
+if __name__ == '__main__':
+ UpgradeWalletTest().main()
diff --git a/test/functional/wallet_zapwallettxes.py b/test/functional/wallet_zapwallettxes.py
index adebff360a..7f1cdbd20b 100755
--- a/test/functional/wallet_zapwallettxes.py
+++ b/test/functional/wallet_zapwallettxes.py
@@ -49,17 +49,15 @@ class ZapWalletTXesTest (BitcoinTestFramework):
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
- # Stop-start node0. Both confirmed and unconfirmed transactions remain in the wallet.
- self.stop_node(0)
- self.start_node(0)
+ # Restart node0. Both confirmed and unconfirmed transactions remain in the wallet.
+ self.restart_node(0)
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
- # Stop node0 and restart with zapwallettxes and persistmempool. The unconfirmed
+ # Restart node0 with zapwallettxes and persistmempool. The unconfirmed
# transaction is zapped from the wallet, but is re-added when the mempool is reloaded.
- self.stop_node(0)
- self.start_node(0, ["-persistmempool=1", "-zapwallettxes=2"])
+ self.restart_node(0, ["-persistmempool=1", "-zapwallettxes=2"])
wait_until(lambda: self.nodes[0].getmempoolinfo()['size'] == 1, timeout=3)
self.nodes[0].syncwithvalidationinterfacequeue() # Flush mempool to wallet
@@ -67,10 +65,9 @@ class ZapWalletTXesTest (BitcoinTestFramework):
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
- # Stop node0 and restart with zapwallettxes, but not persistmempool.
+ # Restart node0 with zapwallettxes, but not persistmempool.
# The unconfirmed transaction is zapped and is no longer in the wallet.
- self.stop_node(0)
- self.start_node(0, ["-zapwallettxes=2"])
+ self.restart_node(0, ["-zapwallettxes=2"])
# tx1 is still be available because it was confirmed
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
diff --git a/test/fuzz/test_runner.py b/test/fuzz/test_runner.py
index 87f4255a6f..56b18752ec 100755
--- a/test/fuzz/test_runner.py
+++ b/test/fuzz/test_runner.py
@@ -1,16 +1,17 @@
#!/usr/bin/env python3
-# Copyright (c) 2019 The Bitcoin Core developers
+# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Run fuzz test targets.
"""
+from concurrent.futures import ThreadPoolExecutor, as_completed
import argparse
import configparser
+import logging
import os
-import sys
import subprocess
-import logging
+import sys
def main():
@@ -36,6 +37,13 @@ def main():
help="A comma-separated list of targets to exclude",
)
parser.add_argument(
+ '--par',
+ '-j',
+ type=int,
+ default=4,
+ help='How many targets to merge or execute in parallel.',
+ )
+ parser.add_argument(
'seed_dir',
help='The seed corpus to run on (must contain subfolders for each fuzz target).',
)
@@ -124,25 +132,29 @@ def main():
logging.error("subprocess timed out: Currently only libFuzzer is supported")
sys.exit(1)
- if args.m_dir:
- merge_inputs(
+ with ThreadPoolExecutor(max_workers=args.par) as fuzz_pool:
+ if args.m_dir:
+ merge_inputs(
+ fuzz_pool=fuzz_pool,
+ corpus=args.seed_dir,
+ test_list=test_list_selection,
+ build_dir=config["environment"]["BUILDDIR"],
+ merge_dir=args.m_dir,
+ )
+ return
+
+ run_once(
+ fuzz_pool=fuzz_pool,
corpus=args.seed_dir,
test_list=test_list_selection,
build_dir=config["environment"]["BUILDDIR"],
- merge_dir=args.m_dir,
+ use_valgrind=args.valgrind,
)
- return
-
- run_once(
- corpus=args.seed_dir,
- test_list=test_list_selection,
- build_dir=config["environment"]["BUILDDIR"],
- use_valgrind=args.valgrind,
- )
-def merge_inputs(*, corpus, test_list, build_dir, merge_dir):
+def merge_inputs(*, fuzz_pool, corpus, test_list, build_dir, merge_dir):
logging.info("Merge the inputs in the passed dir into the seed_dir. Passed dir {}".format(merge_dir))
+ jobs = []
for t in test_list:
args = [
os.path.join(build_dir, 'src', 'test', 'fuzz', t),
@@ -153,12 +165,20 @@ def merge_inputs(*, corpus, test_list, build_dir, merge_dir):
]
os.makedirs(os.path.join(corpus, t), exist_ok=True)
os.makedirs(os.path.join(merge_dir, t), exist_ok=True)
- logging.debug('Run {} with args {}'.format(t, args))
- output = subprocess.run(args, check=True, stderr=subprocess.PIPE, universal_newlines=True).stderr
- logging.debug('Output: {}'.format(output))
+ def job(t, args):
+ output = 'Run {} with args {}\n'.format(t, " ".join(args))
+ output += subprocess.run(args, check=True, stderr=subprocess.PIPE, universal_newlines=True).stderr
+ logging.debug(output)
+
+ jobs.append(fuzz_pool.submit(job, t, args))
+
+ for future in as_completed(jobs):
+ future.result()
-def run_once(*, corpus, test_list, build_dir, use_valgrind):
+
+def run_once(*, fuzz_pool, corpus, test_list, build_dir, use_valgrind):
+ jobs = []
for t in test_list:
corpus_path = os.path.join(corpus, t)
os.makedirs(corpus_path, exist_ok=True)
@@ -169,10 +189,18 @@ def run_once(*, corpus, test_list, build_dir, use_valgrind):
]
if use_valgrind:
args = ['valgrind', '--quiet', '--error-exitcode=1'] + args
- logging.debug('Run {} with args {}'.format(t, args))
- result = subprocess.run(args, stderr=subprocess.PIPE, universal_newlines=True)
- output = result.stderr
- logging.debug('Output: {}'.format(output))
+
+ def job(t, args):
+ output = 'Run {} with args {}'.format(t, args)
+ result = subprocess.run(args, stderr=subprocess.PIPE, universal_newlines=True)
+ output += result.stderr
+ return output, result
+
+ jobs.append(fuzz_pool.submit(job, t, args))
+
+ for future in as_completed(jobs):
+ output, result = future.result()
+ logging.debug(output)
try:
result.check_returncode()
except subprocess.CalledProcessError as e:
@@ -180,7 +208,7 @@ def run_once(*, corpus, test_list, build_dir, use_valgrind):
logging.info(e.stdout)
if e.stderr:
logging.info(e.stderr)
- logging.info("Target \"{}\" failed with exit code {}: {}".format(t, e.returncode, " ".join(args)))
+ logging.info("Target \"{}\" failed with exit code {}".format(" ".join(result.args), e.returncode))
sys.exit(1)
diff --git a/test/lint/extended-lint-all.sh b/test/lint/extended-lint-all.sh
index 65c51e02f5..be5d9db4a9 100755
--- a/test/lint/extended-lint-all.sh
+++ b/test/lint/extended-lint-all.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
#
-# Copyright (c) 2019 The Bitcoin Core developers
+# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
diff --git a/test/lint/extended-lint-cppcheck.sh b/test/lint/extended-lint-cppcheck.sh
index ae18d74ebf..20021d8605 100755
--- a/test/lint/extended-lint-cppcheck.sh
+++ b/test/lint/extended-lint-cppcheck.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
#
-# Copyright (c) 2019 The Bitcoin Core developers
+# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
diff --git a/test/lint/lint-circular-dependencies.sh b/test/lint/lint-circular-dependencies.sh
index bbd94dd6c7..6bd02d45ac 100755
--- a/test/lint/lint-circular-dependencies.sh
+++ b/test/lint/lint-circular-dependencies.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
#
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
diff --git a/test/lint/lint-format-strings.sh b/test/lint/lint-format-strings.sh
index 184c3682c8..cb3ec09ae6 100755
--- a/test/lint/lint-format-strings.sh
+++ b/test/lint/lint-format-strings.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
#
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
diff --git a/test/lint/lint-includes.sh b/test/lint/lint-includes.sh
index 1cece6a525..611bd4a8c4 100755
--- a/test/lint/lint-includes.sh
+++ b/test/lint/lint-includes.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
#
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
@@ -64,12 +64,12 @@ EXPECTED_BOOST_INCLUDES=(
boost/preprocessor/cat.hpp
boost/preprocessor/stringize.hpp
boost/signals2/connection.hpp
- boost/signals2/last_value.hpp
+ boost/signals2/optional_last_value.hpp
boost/signals2/signal.hpp
boost/test/unit_test.hpp
- boost/thread.hpp
boost/thread/condition_variable.hpp
boost/thread/mutex.hpp
+ boost/thread/shared_mutex.hpp
boost/thread/thread.hpp
boost/variant.hpp
boost/variant/apply_visitor.hpp
diff --git a/test/lint/lint-locale-dependence.sh b/test/lint/lint-locale-dependence.sh
index 3a28c286fc..2e5b801849 100755
--- a/test/lint/lint-locale-dependence.sh
+++ b/test/lint/lint-locale-dependence.sh
@@ -1,5 +1,5 @@
#!/usr/bin/env bash
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/test/lint/lint-python-utf8-encoding.sh b/test/lint/lint-python-utf8-encoding.sh
index 773855bed1..7257919c98 100755
--- a/test/lint/lint-python-utf8-encoding.sh
+++ b/test/lint/lint-python-utf8-encoding.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
#
-# Copyright (c) 2018 The Bitcoin Core developers
+# Copyright (c) 2018-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
diff --git a/test/lint/lint-python.sh b/test/lint/lint-python.sh
index 86ac5a930f..72e8ef7c7d 100755
--- a/test/lint/lint-python.sh
+++ b/test/lint/lint-python.sh
@@ -7,6 +7,7 @@
# Check for specified flake8 warnings in python files.
export LC_ALL=C
+export MYPY_CACHE_DIR="${BASE_ROOT_DIR}/test/.mypy_cache"
enabled=(
E101 # indentation contains mixed spaces and tabs
@@ -38,7 +39,6 @@ enabled=(
E711 # comparison to None should be 'if cond is None:'
E714 # test for object identity should be "is not"
E721 # do not compare types, use "isinstance()"
- E741 # do not use variables named "l", "O", or "I"
E742 # do not define classes named "l", "O", or "I"
E743 # do not define functions named "l", "O", or "I"
E901 # SyntaxError: invalid syntax
@@ -89,10 +89,20 @@ elif PYTHONWARNINGS="ignore" flake8 --version | grep -q "Python 2"; then
exit 0
fi
-PYTHONWARNINGS="ignore" flake8 --ignore=B,C,E,F,I,N,W --select=$(IFS=","; echo "${enabled[*]}") $(
+EXIT_CODE=0
+
+if ! PYTHONWARNINGS="ignore" flake8 --ignore=B,C,E,F,I,N,W --select=$(IFS=","; echo "${enabled[*]}") $(
if [[ $# == 0 ]]; then
git ls-files "*.py"
else
echo "$@"
fi
-)
+); then
+ EXIT_CODE=1
+fi
+
+if ! mypy --ignore-missing-imports $(git ls-files "test/functional/*.py"); then
+ EXIT_CODE=1
+fi
+
+exit $EXIT_CODE
diff --git a/test/lint/lint-shebang.sh b/test/lint/lint-shebang.sh
index a666fdfecf..a5c8aa42b2 100755
--- a/test/lint/lint-shebang.sh
+++ b/test/lint/lint-shebang.sh
@@ -1,5 +1,5 @@
#!/usr/bin/env bash
-# Copyright (c) 2018 The Bitcoin Core developers
+# Copyright (c) 2018-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/test/lint/lint-shell.sh b/test/lint/lint-shell.sh
index f59b2c9945..9a26cd9c02 100755
--- a/test/lint/lint-shell.sh
+++ b/test/lint/lint-shell.sh
@@ -1,6 +1,6 @@
#!/usr/bin/env bash
#
-# Copyright (c) 2018-2019 The Bitcoin Core developers
+# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
@@ -25,7 +25,6 @@ disabled=(
disabled_gitian=(
SC2094 # Make sure not to read and write the same file in the same pipeline.
SC2129 # Consider using { cmd1; cmd2; } >> file instead of individual redirects.
- SC2230 # which is non-standard. Use builtin 'command -v' instead.
)
EXIT_CODE=0
@@ -35,8 +34,9 @@ if ! command -v shellcheck > /dev/null; then
exit $EXIT_CODE
fi
+SHELLCHECK_CMD=(shellcheck --external-sources --check-sourced)
EXCLUDE="--exclude=$(IFS=','; echo "${disabled[*]}")"
-if ! shellcheck "$EXCLUDE" $(git ls-files -- '*.sh' | grep -vE 'src/(leveldb|secp256k1|univalue)/'); then
+if ! "${SHELLCHECK_CMD[@]}" "$EXCLUDE" $(git ls-files -- '*.sh' | grep -vE 'src/(leveldb|secp256k1|univalue)/'); then
EXIT_CODE=1
fi
@@ -48,13 +48,14 @@ fi
EXCLUDE_GITIAN=${EXCLUDE}",$(IFS=','; echo "${disabled_gitian[*]}")"
for descriptor in $(git ls-files -- 'contrib/gitian-descriptors/*.yml')
do
- echo
- echo "$descriptor"
+ script=$(basename "$descriptor")
# Use #!/bin/bash as gitian-builder/bin/gbuild does to complete a script.
- SCRIPT=$'#!/bin/bash\n'$(yq -r .script "$descriptor")
- if ! echo "$SCRIPT" | shellcheck "$EXCLUDE_GITIAN" -; then
+ echo "#!/bin/bash" > $script
+ yq -r .script "$descriptor" >> $script
+ if ! "${SHELLCHECK_CMD[@]}" "$EXCLUDE_GITIAN" $script; then
EXIT_CODE=1
fi
+ rm $script
done
exit $EXIT_CODE
diff --git a/test/lint/lint-spelling.ignore-words.txt b/test/lint/lint-spelling.ignore-words.txt
index a7a97eb41f..34f54325b3 100644
--- a/test/lint/lint-spelling.ignore-words.txt
+++ b/test/lint/lint-spelling.ignore-words.txt
@@ -14,3 +14,4 @@ setban
hist
ser
unselect
+lowercased
diff --git a/test/sanitizer_suppressions/tsan b/test/sanitizer_suppressions/tsan
index 70eea34363..16a0bf8ea8 100644
--- a/test/sanitizer_suppressions/tsan
+++ b/test/sanitizer_suppressions/tsan
@@ -1,6 +1,36 @@
# ThreadSanitizer suppressions
# ============================
+# double locks (TODO fix)
+mutex:g_genesis_wait_mutex
+mutex:Interrupt
+mutex:CThreadInterrupt
+mutex:CConnman::Interrupt
+mutex:CConnman::WakeMessageHandler
+mutex:CConnman::ThreadOpenConnections
+mutex:CConnman::ThreadOpenAddedConnections
+mutex:CConnman::SocketHandler
+mutex:UpdateTip
+mutex:PeerLogicValidation::UpdatedBlockTip
+mutex:g_best_block_mutex
+# race (TODO fix)
+race:CConnman::WakeMessageHandler
+race:CConnman::ThreadMessageHandler
+race:fHaveGenesis
+race:ProcessNewBlock
+race:ThreadImport
+race:LoadWallet
+race:WalletBatch::WriteHDChain
+race:BerkeleyDatabase
+race:zmq::*
+race:bitcoin-qt
+# deadlock (TODO fix)
+deadlock:CConnman::ForNode
+deadlock:CConnman::GetNodeStats
+deadlock:CChainState::ConnectTip
+deadlock:UpdateTip
+deadlock:wallet_tests::CreateWalletFromFile
+
# WalletBatch (unidentified deadlock)
deadlock:WalletBatch