aboutsummaryrefslogtreecommitdiff
path: root/test/functional
diff options
context:
space:
mode:
Diffstat (limited to 'test/functional')
-rw-r--r--test/functional/README.md4
-rwxr-xr-xtest/functional/combine_logs.py56
-rw-r--r--test/functional/data/invalid_txs.py180
-rwxr-xr-xtest/functional/example_test.py4
-rwxr-xr-xtest/functional/feature_assumevalid.py2
-rwxr-xr-xtest/functional/feature_block.py136
-rwxr-xr-xtest/functional/feature_cltv.py1
-rwxr-xr-xtest/functional/feature_config_args.py17
-rwxr-xr-xtest/functional/feature_dbcrash.py2
-rwxr-xr-xtest/functional/feature_dersig.py1
-rwxr-xr-xtest/functional/feature_pruning.py6
-rwxr-xr-xtest/functional/feature_segwit.py12
-rwxr-xr-xtest/functional/feature_shutdown.py34
-rwxr-xr-xtest/functional/interface_http.py12
-rwxr-xr-xtest/functional/interface_rpc.py57
-rwxr-xr-xtest/functional/interface_zmq.py8
-rwxr-xr-xtest/functional/mempool_accept.py28
-rwxr-xr-xtest/functional/mempool_persist.py14
-rwxr-xr-xtest/functional/mempool_resurrect.py5
-rwxr-xr-xtest/functional/mining_basic.py24
-rwxr-xr-xtest/functional/mining_getblocktemplate_longpoll.py8
-rwxr-xr-xtest/functional/mining_prioritisetransaction.py6
-rwxr-xr-xtest/functional/p2p_disconnect_ban.py2
-rwxr-xr-xtest/functional/p2p_invalid_block.py9
-rwxr-xr-xtest/functional/p2p_invalid_locator.py2
-rwxr-xr-xtest/functional/p2p_invalid_messages.py179
-rwxr-xr-xtest/functional/p2p_invalid_tx.py24
-rwxr-xr-xtest/functional/p2p_segwit.py24
-rwxr-xr-xtest/functional/p2p_timeouts.py30
-rwxr-xr-xtest/functional/rpc_bind.py13
-rwxr-xr-xtest/functional/rpc_blockchain.py6
-rwxr-xr-xtest/functional/rpc_fundrawtransaction.py2
-rwxr-xr-xtest/functional/rpc_help.py15
-rwxr-xr-xtest/functional/rpc_invalidateblock.py69
-rwxr-xr-xtest/functional/rpc_net.py10
-rwxr-xr-xtest/functional/rpc_psbt.py18
-rwxr-xr-xtest/functional/rpc_rawtransaction.py15
-rwxr-xr-xtest/functional/rpc_scantxoutset.py8
-rw-r--r--test/functional/test_framework/blocktools.py5
-rwxr-xr-xtest/functional/test_framework/messages.py17
-rwxr-xr-xtest/functional/test_framework/mininode.py31
-rw-r--r--test/functional/test_framework/script.py20
-rw-r--r--test/functional/test_framework/socks5.py7
-rwxr-xr-xtest/functional/test_framework/test_framework.py69
-rwxr-xr-xtest/functional/test_framework/test_node.py52
-rw-r--r--test/functional/test_framework/util.py2
-rwxr-xr-xtest/functional/test_framework/wallet_util.py99
-rwxr-xr-xtest/functional/test_runner.py15
-rwxr-xr-xtest/functional/wallet_address_types.py56
-rwxr-xr-xtest/functional/wallet_backup.py2
-rwxr-xr-xtest/functional/wallet_balance.py133
-rwxr-xr-xtest/functional/wallet_basic.py13
-rwxr-xr-xtest/functional/wallet_coinbase_category.py59
-rwxr-xr-xtest/functional/wallet_create_tx.py35
-rwxr-xr-xtest/functional/wallet_dump.py2
-rwxr-xr-xtest/functional/wallet_encryption.py6
-rwxr-xr-xtest/functional/wallet_groups.py2
-rwxr-xr-xtest/functional/wallet_import_rescan.py25
-rwxr-xr-xtest/functional/wallet_import_with_label.py135
-rwxr-xr-xtest/functional/wallet_importmulti.py856
-rwxr-xr-xtest/functional/wallet_importprunedfunds.py2
-rwxr-xr-xtest/functional/wallet_keypool_topup.py63
-rwxr-xr-xtest/functional/wallet_listtransactions.py10
-rwxr-xr-xtest/functional/wallet_multiwallet.py3
-rwxr-xr-xtest/functional/wallet_txn_clone.py2
65 files changed, 2001 insertions, 763 deletions
diff --git a/test/functional/README.md b/test/functional/README.md
index d40052ac93..bce0d5db2e 100644
--- a/test/functional/README.md
+++ b/test/functional/README.md
@@ -20,6 +20,10 @@ don't have test cases for.
- Where possible, try to adhere to [PEP-8 guidelines](https://www.python.org/dev/peps/pep-0008/)
- Use a python linter like flake8 before submitting PRs to catch common style
nits (eg trailing whitespace, unused imports, etc)
+- The oldest supported Python version is specified in [doc/dependencies.md](/doc/dependencies.md).
+ Consider using [pyenv](https://github.com/pyenv/pyenv), which checks [.python-version](/.python-version),
+ to prevent accidentally introducing modern syntax from an unsupported Python version.
+ The Travis linter also checks this, but [possibly not in all cases](https://github.com/bitcoin/bitcoin/pull/14884#discussion_r239585126).
- See [the python lint script](/test/lint/lint-python.sh) that checks for violations that
could lead to bugs and issues in the test code.
- Avoid wildcard imports where possible
diff --git a/test/functional/combine_logs.py b/test/functional/combine_logs.py
index 3230d5cb6b..5bb3b5c094 100755
--- a/test/functional/combine_logs.py
+++ b/test/functional/combine_logs.py
@@ -2,7 +2,9 @@
"""Combine logs from multiple bitcoin nodes as well as the test_framework log.
This streams the combined log output to stdout. Use combine_logs.py > outputfile
-to write to an outputfile."""
+to write to an outputfile.
+
+If no argument is provided, the most recent test directory will be used."""
import argparse
from collections import defaultdict, namedtuple
@@ -11,6 +13,13 @@ import itertools
import os
import re
import sys
+import tempfile
+
+# N.B.: don't import any local modules here - this script must remain executable
+# without the parent module installed.
+
+# Should match same symbol in `test_framework.test_framework`.
+TMPDIR_PREFIX = "bitcoin_func_test_"
# Matches on the date format at the start of the log event
TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{6})?Z")
@@ -19,22 +28,30 @@ LogEvent = namedtuple('LogEvent', ['timestamp', 'source', 'event'])
def main():
"""Main function. Parses args, reads the log files and renders them as text or html."""
-
- parser = argparse.ArgumentParser(usage='%(prog)s [options] <test temporary directory>', description=__doc__)
+ parser = argparse.ArgumentParser(
+ description=__doc__, formatter_class=argparse.RawTextHelpFormatter)
+ parser.add_argument(
+ 'testdir', nargs='?', default='',
+ help=('temporary test directory to combine logs from. '
+ 'Defaults to the most recent'))
parser.add_argument('-c', '--color', dest='color', action='store_true', help='outputs the combined log with events colored by source (requires posix terminal colors. Use less -r for viewing)')
parser.add_argument('--html', dest='html', action='store_true', help='outputs the combined log as html. Requires jinja2. pip install jinja2')
- args, unknown_args = parser.parse_known_args()
+ args = parser.parse_args()
if args.html and args.color:
print("Only one out of --color or --html should be specified")
sys.exit(1)
- # There should only be one unknown argument - the path of the temporary test directory
- if len(unknown_args) != 1:
- print("Unexpected arguments" + str(unknown_args))
+ testdir = args.testdir or find_latest_test_dir()
+
+ if not testdir:
+ print("No test directories found")
sys.exit(1)
- log_events = read_logs(unknown_args[0])
+ if not args.testdir:
+ print("Opening latest test directory: {}".format(testdir), file=sys.stderr)
+
+ log_events = read_logs(testdir)
print_logs(log_events, color=args.color, html=args.html)
@@ -53,6 +70,29 @@ def read_logs(tmp_dir):
return heapq.merge(*[get_log_events(source, f) for source, f in files])
+
+def find_latest_test_dir():
+ """Returns the latest tmpfile test directory prefix."""
+ tmpdir = tempfile.gettempdir()
+
+ def join_tmp(basename):
+ return os.path.join(tmpdir, basename)
+
+ def is_valid_test_tmpdir(basename):
+ fullpath = join_tmp(basename)
+ return (
+ os.path.isdir(fullpath)
+ and basename.startswith(TMPDIR_PREFIX)
+ and os.access(fullpath, os.R_OK)
+ )
+
+ testdir_paths = [
+ join_tmp(name) for name in os.listdir(tmpdir) if is_valid_test_tmpdir(name)
+ ]
+
+ return max(testdir_paths, key=os.path.getmtime) if testdir_paths else None
+
+
def get_log_events(source, logfile):
"""Generator function that returns individual log events.
diff --git a/test/functional/data/invalid_txs.py b/test/functional/data/invalid_txs.py
new file mode 100644
index 0000000000..02deae92f3
--- /dev/null
+++ b/test/functional/data/invalid_txs.py
@@ -0,0 +1,180 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""
+Templates for constructing various sorts of invalid transactions.
+
+These templates (or an iterator over all of them) can be reused in different
+contexts to test using a number of invalid transaction types.
+
+Hopefully this makes it easier to get coverage of a full variety of tx
+validation checks through different interfaces (AcceptBlock, AcceptToMemPool,
+etc.) without repeating ourselves.
+
+Invalid tx cases not covered here can be found by running:
+
+ $ diff \
+ <(grep -IREho "bad-txns[a-zA-Z-]+" src | sort -u) \
+ <(grep -IEho "bad-txns[a-zA-Z-]+" test/functional/data/invalid_txs.py | sort -u)
+
+"""
+import abc
+
+from test_framework.messages import CTransaction, CTxIn, CTxOut, COutPoint
+from test_framework import script as sc
+from test_framework.blocktools import create_tx_with_script, MAX_BLOCK_SIGOPS
+
+basic_p2sh = sc.CScript([sc.OP_HASH160, sc.hash160(sc.CScript([sc.OP_0])), sc.OP_EQUAL])
+
+
+class BadTxTemplate:
+ """Allows simple construction of a certain kind of invalid tx. Base class to be subclassed."""
+ __metaclass__ = abc.ABCMeta
+
+ # The expected error code given by bitcoind upon submission of the tx.
+ reject_reason = ""
+
+ # Only specified if it differs from mempool acceptance error.
+ block_reject_reason = ""
+
+ # Do we expect to be disconnected after submitting this tx?
+ expect_disconnect = False
+
+ # Is this tx considered valid when included in a block, but not for acceptance into
+ # the mempool (i.e. does it violate policy but not consensus)?
+ valid_in_block = False
+
+ def __init__(self, *, spend_tx=None, spend_block=None):
+ self.spend_tx = spend_block.vtx[0] if spend_block else spend_tx
+ self.spend_avail = sum(o.nValue for o in self.spend_tx.vout)
+ self.valid_txin = CTxIn(COutPoint(self.spend_tx.sha256, 0), b"", 0xffffffff)
+
+ @abc.abstractmethod
+ def get_tx(self, *args, **kwargs):
+ """Return a CTransaction that is invalid per the subclass."""
+ pass
+
+
+class OutputMissing(BadTxTemplate):
+ reject_reason = "bad-txns-vout-empty"
+ expect_disconnect = False
+
+ def get_tx(self):
+ tx = CTransaction()
+ tx.vin.append(self.valid_txin)
+ tx.calc_sha256()
+ return tx
+
+
+class InputMissing(BadTxTemplate):
+ reject_reason = "bad-txns-vin-empty"
+ expect_disconnect = False
+
+ def get_tx(self):
+ tx = CTransaction()
+ tx.vout.append(CTxOut(0, sc.CScript([sc.OP_TRUE] * 100)))
+ tx.calc_sha256()
+ return tx
+
+
+class SizeTooSmall(BadTxTemplate):
+ reject_reason = "tx-size-small"
+ expect_disconnect = False
+ valid_in_block = True
+
+ def get_tx(self):
+ tx = CTransaction()
+ tx.vin.append(self.valid_txin)
+ tx.vout.append(CTxOut(0, sc.CScript([sc.OP_TRUE])))
+ tx.calc_sha256()
+ return tx
+
+
+class BadInputOutpointIndex(BadTxTemplate):
+ # Won't be rejected - nonexistent outpoint index is treated as an orphan since the coins
+ # database can't distinguish between spent outpoints and outpoints which never existed.
+ reject_reason = None
+ expect_disconnect = False
+
+ def get_tx(self):
+ num_indices = len(self.spend_tx.vin)
+ bad_idx = num_indices + 100
+
+ tx = CTransaction()
+ tx.vin.append(CTxIn(COutPoint(self.spend_tx.sha256, bad_idx), b"", 0xffffffff))
+ tx.vout.append(CTxOut(0, basic_p2sh))
+ tx.calc_sha256()
+ return tx
+
+
+class DuplicateInput(BadTxTemplate):
+ reject_reason = 'bad-txns-inputs-duplicate'
+ expect_disconnect = True
+
+ def get_tx(self):
+ tx = CTransaction()
+ tx.vin.append(self.valid_txin)
+ tx.vin.append(self.valid_txin)
+ tx.vout.append(CTxOut(1, basic_p2sh))
+ tx.calc_sha256()
+ return tx
+
+
+class NonexistentInput(BadTxTemplate):
+ reject_reason = None # Added as an orphan tx.
+ expect_disconnect = False
+
+ def get_tx(self):
+ tx = CTransaction()
+ tx.vin.append(CTxIn(COutPoint(self.spend_tx.sha256 + 1, 0), b"", 0xffffffff))
+ tx.vin.append(self.valid_txin)
+ tx.vout.append(CTxOut(1, basic_p2sh))
+ tx.calc_sha256()
+ return tx
+
+
+class SpendTooMuch(BadTxTemplate):
+ reject_reason = 'bad-txns-in-belowout'
+ expect_disconnect = True
+
+ def get_tx(self):
+ return create_tx_with_script(
+ self.spend_tx, 0, script_pub_key=basic_p2sh, amount=(self.spend_avail + 1))
+
+
+class SpendNegative(BadTxTemplate):
+ reject_reason = 'bad-txns-vout-negative'
+ expect_disconnect = True
+
+ def get_tx(self):
+ return create_tx_with_script(self.spend_tx, 0, amount=-1)
+
+
+class InvalidOPIFConstruction(BadTxTemplate):
+ reject_reason = "mandatory-script-verify-flag-failed (Invalid OP_IF construction)"
+ expect_disconnect = True
+ valid_in_block = True
+
+ def get_tx(self):
+ return create_tx_with_script(
+ self.spend_tx, 0, script_sig=b'\x64' * 35,
+ amount=(self.spend_avail // 2))
+
+
+class TooManySigops(BadTxTemplate):
+ reject_reason = "bad-txns-too-many-sigops"
+ block_reject_reason = "bad-blk-sigops, out-of-bounds SigOpCount"
+ expect_disconnect = False
+
+ def get_tx(self):
+ lotsa_checksigs = sc.CScript([sc.OP_CHECKSIG] * (MAX_BLOCK_SIGOPS))
+ return create_tx_with_script(
+ self.spend_tx, 0,
+ script_pub_key=lotsa_checksigs,
+ amount=1)
+
+
+def iter_all_templates():
+ """Iterate through all bad transaction template types."""
+ return BadTxTemplate.__subclasses__()
diff --git a/test/functional/example_test.py b/test/functional/example_test.py
index 3f15367a75..be3544ee74 100755
--- a/test/functional/example_test.py
+++ b/test/functional/example_test.py
@@ -164,13 +164,13 @@ class ExampleTest(BitcoinTestFramework):
self.tip = int(self.nodes[0].getbestblockhash(), 16)
self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1
- height = 1
+ height = self.nodes[0].getblockcount()
for i in range(10):
# Use the mininode and blocktools functionality to manually build a block
# Calling the generate() rpc is easier, but this allows us to exactly
# control the blocks and transactions.
- block = create_block(self.tip, create_coinbase(height), self.block_time)
+ block = create_block(self.tip, create_coinbase(height+1), self.block_time)
block.solve()
block_message = msg_block(block)
# Send message is used to send a P2P message to the node over our P2PInterface
diff --git a/test/functional/feature_assumevalid.py b/test/functional/feature_assumevalid.py
index 3d0467038d..12a4ce9aff 100755
--- a/test/functional/feature_assumevalid.py
+++ b/test/functional/feature_assumevalid.py
@@ -180,7 +180,7 @@ class AssumeValidTest(BitcoinTestFramework):
for i in range(2202):
p2p1.send_message(msg_block(self.blocks[i]))
# Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync.
- p2p1.sync_with_ping(120)
+ p2p1.sync_with_ping(150)
assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202)
# Send blocks to node2. Block 102 will be rejected.
diff --git a/test/functional/feature_block.py b/test/functional/feature_block.py
index 628cefb76d..5253ff7aaa 100755
--- a/test/functional/feature_block.py
+++ b/test/functional/feature_block.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2018 The Bitcoin Core developers
+# Copyright (c) 2015-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test block processing."""
@@ -7,7 +7,13 @@ import copy
import struct
import time
-from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script, get_legacy_sigopcount_block
+from test_framework.blocktools import (
+ create_block,
+ create_coinbase,
+ create_tx_with_script,
+ get_legacy_sigopcount_block,
+ MAX_BLOCK_SIGOPS,
+)
from test_framework.key import CECKey
from test_framework.messages import (
CBlock,
@@ -45,8 +51,7 @@ from test_framework.script import (
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
-
-MAX_BLOCK_SIGOPS = 20000
+from data import invalid_txs
# Use this class for tests that require behavior other than normal "mininode" behavior.
# For now, it is used to serialize a bloated varint (b64).
@@ -95,16 +100,21 @@ class FullBlockTest(BitcoinTestFramework):
self.save_spendable_output()
self.sync_blocks([b0])
+ # These constants chosen specifically to trigger an immature coinbase spend
+ # at a certain time below.
+ NUM_BUFFER_BLOCKS_TO_GENERATE = 99
+ NUM_OUTPUTS_TO_COLLECT = 33
+
# Allow the block to mature
blocks = []
- for i in range(99):
- blocks.append(self.next_block(5000 + i))
+ for i in range(NUM_BUFFER_BLOCKS_TO_GENERATE):
+ blocks.append(self.next_block("maturitybuffer.{}".format(i)))
self.save_spendable_output()
self.sync_blocks(blocks)
# collect spendable outputs now to avoid cluttering the code later on
out = []
- for i in range(33):
+ for i in range(NUM_OUTPUTS_TO_COLLECT):
out.append(self.get_spendable_output())
# Start by building a couple of blocks on top (which output is spent is
@@ -116,7 +126,48 @@ class FullBlockTest(BitcoinTestFramework):
b2 = self.next_block(2, spend=out[1])
self.save_spendable_output()
- self.sync_blocks([b1, b2])
+ self.sync_blocks([b1, b2], timeout=4)
+
+ # Select a txn with an output eligible for spending. This won't actually be spent,
+ # since we're testing submission of a series of blocks with invalid txns.
+ attempt_spend_tx = out[2]
+
+ # Submit blocks for rejection, each of which contains a single transaction
+ # (aside from coinbase) which should be considered invalid.
+ for TxTemplate in invalid_txs.iter_all_templates():
+ template = TxTemplate(spend_tx=attempt_spend_tx)
+
+ if template.valid_in_block:
+ continue
+
+ self.log.info("Reject block with invalid tx: %s", TxTemplate.__name__)
+ blockname = "for_invalid.%s" % TxTemplate.__name__
+ badblock = self.next_block(blockname)
+ badtx = template.get_tx()
+ if TxTemplate != invalid_txs.InputMissing:
+ self.sign_tx(badtx, attempt_spend_tx)
+ else:
+ # Segwit is active in regtest at this point, so to deserialize a
+ # transaction without any inputs correctly, we set the outputs
+ # to an empty list. This is a hack, as the serialization of an
+ # empty list of outputs is deserialized as flags==0 and thus
+ # deserialization of the outputs is skipped.
+ # A policy check requires "loose" txs to be of a minimum size,
+ # so vtx is not set to be empty in the TxTemplate class and we
+ # only apply the workaround where txs are not "loose", i.e. in
+ # blocks.
+ #
+ # The workaround has the purpose that both sides calculate
+ # the same tx hash in the merkle tree
+ badtx.vout = []
+ badtx.rehash()
+ badblock = self.update_block(blockname, [badtx])
+ self.sync_blocks(
+ [badblock], success=False,
+ reject_reason=(template.block_reject_reason or template.reject_reason),
+ reconnect=True, timeout=2)
+
+ self.move_tip(2)
# Fork like this:
#
@@ -579,14 +630,14 @@ class FullBlockTest(BitcoinTestFramework):
while b47.sha256 < target:
b47.nNonce += 1
b47.rehash()
- self.sync_blocks([b47], False, request_block=False)
+ self.sync_blocks([b47], False, force_send=True, reject_reason='high-hash')
self.log.info("Reject a block with a timestamp >2 hours in the future")
self.move_tip(44)
b48 = self.next_block(48, solve=False)
b48.nTime = int(time.time()) + 60 * 60 * 3
b48.solve()
- self.sync_blocks([b48], False, request_block=False)
+ self.sync_blocks([b48], False, force_send=True, reject_reason='time-too-new')
self.log.info("Reject a block with invalid merkle hash")
self.move_tip(44)
@@ -600,7 +651,7 @@ class FullBlockTest(BitcoinTestFramework):
b50 = self.next_block(50)
b50.nBits = b50.nBits - 1
b50.solve()
- self.sync_blocks([b50], False, request_block=False, reconnect=True)
+ self.sync_blocks([b50], False, force_send=True, reject_reason='bad-diffbits', reconnect=True)
self.log.info("Reject a block with two coinbase transactions")
self.move_tip(44)
@@ -630,7 +681,7 @@ class FullBlockTest(BitcoinTestFramework):
b54 = self.next_block(54, spend=out[15])
b54.nTime = b35.nTime - 1
b54.solve()
- self.sync_blocks([b54], False, request_block=False)
+ self.sync_blocks([b54], False, force_send=True, reject_reason='time-too-old')
# valid timestamp
self.move_tip(53)
@@ -824,7 +875,7 @@ class FullBlockTest(BitcoinTestFramework):
tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0)))
b64a = self.update_block("64a", [tx])
assert_equal(len(b64a.serialize()), MAX_BLOCK_BASE_SIZE + 8)
- self.sync_blocks([b64a], success=False, reject_reason='non-canonical ReadCompactSize():')
+ self.sync_blocks([b64a], success=False, reject_reason='non-canonical ReadCompactSize()')
# bitcoind doesn't disconnect us for sending a bloated block, but if we subsequently
# resend the header message, it won't send us the getdata message again. Just
@@ -874,7 +925,7 @@ class FullBlockTest(BitcoinTestFramework):
# \-> b67 (20)
#
#
- self.log.info("Reject a block with a transaction double spending a transaction creted in the same block")
+ self.log.info("Reject a block with a transaction double spending a transaction created in the same block")
self.move_tip(65)
b67 = self.next_block(67)
tx1 = self.create_and_sign_transaction(out[20], out[20].vout[0].nValue)
@@ -1078,11 +1129,11 @@ class FullBlockTest(BitcoinTestFramework):
self.move_tip(77)
b80 = self.next_block(80, spend=out[25])
- self.sync_blocks([b80], False, request_block=False)
+ self.sync_blocks([b80], False, force_send=True)
self.save_spendable_output()
b81 = self.next_block(81, spend=out[26])
- self.sync_blocks([b81], False, request_block=False) # other chain is same length
+ self.sync_blocks([b81], False, force_send=True) # other chain is same length
self.save_spendable_output()
b82 = self.next_block(82, spend=out[27])
@@ -1169,7 +1220,7 @@ class FullBlockTest(BitcoinTestFramework):
blocks = []
spend = out[32]
for i in range(89, LARGE_REORG_SIZE + 89):
- b = self.next_block(i, spend)
+ b = self.next_block(i, spend, version=4)
tx = CTransaction()
script_length = MAX_BLOCK_BASE_SIZE - len(b.serialize()) - 69
script_output = CScript([b'\x00' * script_length])
@@ -1181,26 +1232,38 @@ class FullBlockTest(BitcoinTestFramework):
self.save_spendable_output()
spend = self.get_spendable_output()
- self.sync_blocks(blocks, True, timeout=180)
+ self.sync_blocks(blocks, True, timeout=480)
chain1_tip = i
# now create alt chain of same length
self.move_tip(88)
blocks2 = []
for i in range(89, LARGE_REORG_SIZE + 89):
- blocks2.append(self.next_block("alt" + str(i)))
- self.sync_blocks(blocks2, False, request_block=False)
+ blocks2.append(self.next_block("alt" + str(i), version=4))
+ self.sync_blocks(blocks2, False, force_send=True)
# extend alt chain to trigger re-org
- block = self.next_block("alt" + str(chain1_tip + 1))
- self.sync_blocks([block], True, timeout=180)
+ block = self.next_block("alt" + str(chain1_tip + 1), version=4)
+ self.sync_blocks([block], True, timeout=480)
# ... and re-org back to the first chain
self.move_tip(chain1_tip)
- block = self.next_block(chain1_tip + 1)
- self.sync_blocks([block], False, request_block=False)
- block = self.next_block(chain1_tip + 2)
- self.sync_blocks([block], True, timeout=180)
+ block = self.next_block(chain1_tip + 1, version=4)
+ self.sync_blocks([block], False, force_send=True)
+ block = self.next_block(chain1_tip + 2, version=4)
+ self.sync_blocks([block], True, timeout=480)
+
+ self.log.info("Reject a block with an invalid block header version")
+ b_v1 = self.next_block('b_v1', version=1)
+ self.sync_blocks([b_v1], success=False, force_send=True, reject_reason='bad-version(0x00000001)')
+
+ self.move_tip(chain1_tip + 2)
+ b_cb34 = self.next_block('b_cb34', version=4)
+ b_cb34.vtx[0].vin[0].scriptSig = b_cb34.vtx[0].vin[0].scriptSig[:-1]
+ b_cb34.vtx[0].rehash()
+ b_cb34.hashMerkleRoot = b_cb34.calc_merkle_root()
+ b_cb34.solve()
+ self.sync_blocks([b_cb34], success=False, reject_reason='bad-cb-height', reconnect=True)
# Helper methods
################
@@ -1229,7 +1292,7 @@ class FullBlockTest(BitcoinTestFramework):
tx.rehash()
return tx
- def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True):
+ def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True, *, version=1):
if self.tip is None:
base_block_hash = self.genesis_hash
block_time = int(time.time()) + 1
@@ -1242,11 +1305,11 @@ class FullBlockTest(BitcoinTestFramework):
coinbase.vout[0].nValue += additional_coinbase_value
coinbase.rehash()
if spend is None:
- block = create_block(base_block_hash, coinbase, block_time)
+ block = create_block(base_block_hash, coinbase, block_time, version=version)
else:
coinbase.vout[0].nValue += spend.vout[0].nValue - 1 # all but one satoshi to fees
coinbase.rehash()
- block = create_block(base_block_hash, coinbase, block_time)
+ block = create_block(base_block_hash, coinbase, block_time, version=version)
tx = self.create_tx(spend, 0, 1, script) # spend 1 satoshi
self.sign_tx(tx, spend)
self.add_transactions_to_block(block, [tx])
@@ -1288,7 +1351,7 @@ class FullBlockTest(BitcoinTestFramework):
self.blocks[block_number] = block
return block
- def bootstrap_p2p(self):
+ def bootstrap_p2p(self, timeout=10):
"""Add a P2P connection to the node.
Helper to connect and wait for version handshake."""
@@ -1299,24 +1362,25 @@ class FullBlockTest(BitcoinTestFramework):
# an INV for the next block and receive two getheaders - one for the
# IBD and one for the INV. We'd respond to both and could get
# unexpectedly disconnected if the DoS score for that error is 50.
- self.nodes[0].p2p.wait_for_getheaders(timeout=5)
+ self.nodes[0].p2p.wait_for_getheaders(timeout=timeout)
- def reconnect_p2p(self):
+ def reconnect_p2p(self, timeout=60):
"""Tear down and bootstrap the P2P connection to the node.
The node gets disconnected several times in this test. This helper
method reconnects the p2p and restarts the network thread."""
self.nodes[0].disconnect_p2ps()
- self.bootstrap_p2p()
+ self.bootstrap_p2p(timeout=timeout)
- def sync_blocks(self, blocks, success=True, reject_reason=None, request_block=True, reconnect=False, timeout=60):
+ def sync_blocks(self, blocks, success=True, reject_reason=None, force_send=False, reconnect=False, timeout=60):
"""Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.
Call with success = False if the tip shouldn't advance to the most recent block."""
- self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_reason=reject_reason, request_block=request_block, timeout=timeout, expect_disconnect=reconnect)
+ self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_reason=reject_reason, force_send=force_send, timeout=timeout, expect_disconnect=reconnect)
if reconnect:
- self.reconnect_p2p()
+ self.reconnect_p2p(timeout=timeout)
+
if __name__ == '__main__':
FullBlockTest().main()
diff --git a/test/functional/feature_cltv.py b/test/functional/feature_cltv.py
index f84b08a199..302a5ec1cb 100755
--- a/test/functional/feature_cltv.py
+++ b/test/functional/feature_cltv.py
@@ -25,7 +25,6 @@ CLTV_HEIGHT = 1351
# Reject codes that we might receive in this test
REJECT_INVALID = 16
-REJECT_OBSOLETE = 17
REJECT_NONSTANDARD = 64
def cltv_invalidate(tx):
diff --git a/test/functional/feature_config_args.py b/test/functional/feature_config_args.py
index 492772d5e3..4b3f6603a2 100755
--- a/test/functional/feature_config_args.py
+++ b/test/functional/feature_config_args.py
@@ -30,6 +30,23 @@ class ConfArgsTest(BitcoinTestFramework):
self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 1: nono, if you intended to specify a negated option, use nono=1 instead')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
+ conf.write('server=1\nrpcuser=someuser\nrpcpassword=some#pass')
+ self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 3, using # in rpcpassword can be ambiguous and should be avoided')
+
+ with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
+ conf.write('server=1\nrpcuser=someuser\nmain.rpcpassword=some#pass')
+ self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 3, using # in rpcpassword can be ambiguous and should be avoided')
+
+ with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
+ conf.write('server=1\nrpcuser=someuser\n[main]\nrpcpassword=some#pass')
+ self.nodes[0].assert_start_raises_init_error(expected_msg='Error reading configuration file: parse error on line 4, using # in rpcpassword can be ambiguous and should be avoided')
+
+ with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
+ conf.write('testnot.datadir=1\n[testnet]\n')
+ self.restart_node(0)
+ self.nodes[0].stop_node(expected_stderr='Warning: Section [testnet] is not recognized.' + os.linesep + 'Warning: Section [testnot] is not recognized.')
+
+ with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
def run_test(self):
diff --git a/test/functional/feature_dbcrash.py b/test/functional/feature_dbcrash.py
index 70d67aa53a..8b06cc7372 100755
--- a/test/functional/feature_dbcrash.py
+++ b/test/functional/feature_dbcrash.py
@@ -46,7 +46,7 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
self.num_nodes = 4
self.setup_clean_chain = False
# Need a bit of extra time for the nodes to start up for this test
- self.rpc_timewait = 90
+ self.rpc_timeout = 90
# Set -maxmempool=0 to turn off mempool memory sharing with dbcache
# Set -rpcservertimeout=900 to reduce socket disconnects in this
diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py
index 16272877e7..9cbc1b39bd 100755
--- a/test/functional/feature_dersig.py
+++ b/test/functional/feature_dersig.py
@@ -22,7 +22,6 @@ DERSIG_HEIGHT = 1251
# Reject codes that we might receive in this test
REJECT_INVALID = 16
-REJECT_OBSOLETE = 17
REJECT_NONSTANDARD = 64
# A canonical signature consists of:
diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py
index c820ca33e2..9a3f4fae45 100755
--- a/test/functional/feature_pruning.py
+++ b/test/functional/feature_pruning.py
@@ -29,7 +29,7 @@ class PruneTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 6
- self.rpc_timewait = 900
+ self.rpc_timeout = 900
# Create nodes 0 and 1 to mine.
# Create node 2 to test pruning.
@@ -191,6 +191,8 @@ class PruneTest(BitcoinTestFramework):
def reorg_back(self):
# Verify that a block on the old main chain fork has been pruned away
assert_raises_rpc_error(-1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash)
+ with self.nodes[2].assert_debug_log(expected_msgs=['block verification stopping at height', '(pruning, no data)']):
+ self.nodes[2].verifychain(checklevel=4, nblocks=0)
self.log.info("Will need to redownload block %d" % self.forkheight)
# Verify that we have enough history to reorg back to the fork point
@@ -249,7 +251,7 @@ class PruneTest(BitcoinTestFramework):
return index
def prune(index, expected_ret=None):
- ret = node.pruneblockchain(height(index))
+ ret = node.pruneblockchain(height=height(index))
# Check the return value. When use_timestamp is True, just check
# that the return value is less than or equal to the expected
# value, because when more than one block is generated per second,
diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py
index 7098a03f1e..4bcdf9af55 100755
--- a/test/functional/feature_segwit.py
+++ b/test/functional/feature_segwit.py
@@ -90,7 +90,7 @@ class SegWitTest(BitcoinTestFramework):
self.log.info("Verify sigops are counted in GBT with pre-BIP141 rules before the fork")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
- tmpl = self.nodes[0].getblocktemplate({})
+ tmpl = self.nodes[0].getblocktemplate({'rules': ['segwit']})
assert(tmpl['sizelimit'] == 1000000)
assert('weightlimit' not in tmpl)
assert(tmpl['sigoplimit'] == 20000)
@@ -232,15 +232,7 @@ class SegWitTest(BitcoinTestFramework):
assert(tx.wit.is_null())
assert(txid3 in self.nodes[0].getrawmempool())
- # Now try calling getblocktemplate() without segwit support.
- template = self.nodes[0].getblocktemplate()
-
- # Check that tx1 is the only transaction of the 3 in the template.
- template_txids = [t['txid'] for t in template['transactions']]
- assert(txid2 not in template_txids and txid3 not in template_txids)
- assert(txid1 in template_txids)
-
- # Check that running with segwit support results in all 3 being included.
+ # Check that getblocktemplate includes all transactions.
template = self.nodes[0].getblocktemplate({"rules": ["segwit"]})
template_txids = [t['txid'] for t in template['transactions']]
assert(txid1 in template_txids)
diff --git a/test/functional/feature_shutdown.py b/test/functional/feature_shutdown.py
new file mode 100755
index 0000000000..5084cb1322
--- /dev/null
+++ b/test/functional/feature_shutdown.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test bitcoind shutdown."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal, get_rpc_proxy, wait_until
+from threading import Thread
+
+def test_long_call(node):
+ block = node.waitfornewblock()
+ assert_equal(block['height'], 0)
+
+class ShutdownTest(BitcoinTestFramework):
+
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+
+ def run_test(self):
+ node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir)
+ # Force connection establishment by executing a dummy command.
+ node.getblockcount()
+ Thread(target=test_long_call, args=(node,)).start()
+ # Wait until the server is executing the above `waitfornewblock`.
+ wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2)
+ # Wait 1 second after requesting shutdown but not before the `stop` call
+ # finishes. This is to ensure event loop waits for current connections
+ # to close.
+ self.stop_node(0, wait=1000)
+
+if __name__ == '__main__':
+ ShutdownTest().main()
diff --git a/test/functional/interface_http.py b/test/functional/interface_http.py
index e4b86f9e1e..20889366e5 100755
--- a/test/functional/interface_http.py
+++ b/test/functional/interface_http.py
@@ -31,13 +31,13 @@ class HTTPBasicsTest (BitcoinTestFramework):
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
- assert(conn.sock!=None) #according to http/1.1 connection must still be open!
+ assert(conn.sock is not None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
- assert(conn.sock!=None) #according to http/1.1 connection must still be open!
+ assert(conn.sock is not None) #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
@@ -48,13 +48,13 @@ class HTTPBasicsTest (BitcoinTestFramework):
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
- assert(conn.sock!=None) #according to http/1.1 connection must still be open!
+ assert(conn.sock is not None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
- assert(conn.sock!=None) #according to http/1.1 connection must still be open!
+ assert(conn.sock is not None) #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "Connection: close"
@@ -65,7 +65,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
- assert(conn.sock==None) #now the connection must be closed after the response
+ assert(conn.sock is None) #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
@@ -88,7 +88,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
- assert(conn.sock!=None) #connection must be closed because bitcoind should use keep-alive by default
+ assert(conn.sock is not None) #connection must be closed because bitcoind should use keep-alive by default
# Check excessive request size
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
diff --git a/test/functional/interface_rpc.py b/test/functional/interface_rpc.py
new file mode 100755
index 0000000000..b6955d4492
--- /dev/null
+++ b/test/functional/interface_rpc.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Tests some generic aspects of the RPC interface."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal, assert_greater_than_or_equal
+
+class RPCInterfaceTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+ self.setup_clean_chain = True
+
+ def test_getrpcinfo(self):
+ self.log.info("Testing getrpcinfo...")
+
+ info = self.nodes[0].getrpcinfo()
+ assert_equal(len(info['active_commands']), 1)
+
+ command = info['active_commands'][0]
+ assert_equal(command['method'], 'getrpcinfo')
+ assert_greater_than_or_equal(command['duration'], 0)
+
+ def test_batch_request(self):
+ self.log.info("Testing basic JSON-RPC batch request...")
+
+ results = self.nodes[0].batch([
+ # A basic request that will work fine.
+ {"method": "getblockcount", "id": 1},
+ # Request that will fail. The whole batch request should still
+ # work fine.
+ {"method": "invalidmethod", "id": 2},
+ # Another call that should succeed.
+ {"method": "getbestblockhash", "id": 3},
+ ])
+
+ result_by_id = {}
+ for res in results:
+ result_by_id[res["id"]] = res
+
+ assert_equal(result_by_id[1]['error'], None)
+ assert_equal(result_by_id[1]['result'], 0)
+
+ assert_equal(result_by_id[2]['error']['code'], -32601)
+ assert_equal(result_by_id[2]['result'], None)
+
+ assert_equal(result_by_id[3]['error'], None)
+ assert result_by_id[3]['result'] is not None
+
+ def run_test(self):
+ self.test_getrpcinfo()
+ self.test_batch_request()
+
+
+if __name__ == '__main__':
+ RPCInterfaceTest().main()
diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py
index 12d484d7c2..94fea37090 100755
--- a/test/functional/interface_zmq.py
+++ b/test/functional/interface_zmq.py
@@ -122,10 +122,10 @@ class ZMQTest (BitcoinTestFramework):
self.log.info("Test the getzmqnotifications RPC")
assert_equal(self.nodes[0].getzmqnotifications(), [
- {"type": "pubhashblock", "address": ADDRESS},
- {"type": "pubhashtx", "address": ADDRESS},
- {"type": "pubrawblock", "address": ADDRESS},
- {"type": "pubrawtx", "address": ADDRESS},
+ {"type": "pubhashblock", "address": ADDRESS, "hwm": 1000},
+ {"type": "pubhashtx", "address": ADDRESS, "hwm": 1000},
+ {"type": "pubrawblock", "address": ADDRESS, "hwm": 1000},
+ {"type": "pubrawtx", "address": ADDRESS, "hwm": 1000},
])
assert_equal(self.nodes[1].getzmqnotifications(), [])
diff --git a/test/functional/mempool_accept.py b/test/functional/mempool_accept.py
index 8847777ba7..e2a219b85a 100755
--- a/test/functional/mempool_accept.py
+++ b/test/functional/mempool_accept.py
@@ -1,10 +1,12 @@
#!/usr/bin/env python3
-# Copyright (c) 2017 The Bitcoin Core developers
+# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool acceptance of raw transactions."""
from io import BytesIO
+import math
+
from test_framework.test_framework import BitcoinTestFramework
from test_framework.messages import (
BIP125_SEQUENCE_NUMBER,
@@ -56,6 +58,7 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
self.mempool_size = 0
wait_until(lambda: node.getblockcount() == 200)
assert_equal(node.getmempoolinfo()['size'], self.mempool_size)
+ coins = node.listunspent()
self.log.info('Should not accept garbage to testmempoolaccept')
assert_raises_rpc_error(-3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar'))
@@ -63,13 +66,14 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
assert_raises_rpc_error(-22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar']))
self.log.info('A transaction already in the blockchain')
- coin = node.listunspent()[0] # Pick a random coin(base) to spend
+ coin = coins.pop() # Pick a random coin(base) to spend
raw_tx_in_block = node.signrawtransactionwithwallet(node.createrawtransaction(
inputs=[{'txid': coin['txid'], 'vout': coin['vout']}],
outputs=[{node.getnewaddress(): 0.3}, {node.getnewaddress(): 49}],
))['hex']
txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, allowhighfees=True)
node.generate(1)
+ self.mempool_size = 0
self.check_mempool_result(
result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': '18: txn-already-known'}],
rawtxs=[raw_tx_in_block],
@@ -89,9 +93,25 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
rawtxs=[raw_tx_0],
)
+ self.log.info('A final transaction not in the mempool')
+ coin = coins.pop() # Pick a random coin(base) to spend
+ raw_tx_final = node.signrawtransactionwithwallet(node.createrawtransaction(
+ inputs=[{'txid': coin['txid'], 'vout': coin['vout'], "sequence": 0xffffffff}], # SEQUENCE_FINAL
+ outputs=[{node.getnewaddress(): 0.025}],
+ locktime=node.getblockcount() + 2000, # Can be anything
+ ))['hex']
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_final)))
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': True}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ allowhighfees=True,
+ )
+ node.sendrawtransaction(hexstring=raw_tx_final, allowhighfees=True)
+ self.mempool_size += 1
+
self.log.info('A transaction in the mempool')
node.sendrawtransaction(hexstring=raw_tx_0)
- self.mempool_size = 1
+ self.mempool_size += 1
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': '18: txn-already-in-mempool'}],
rawtxs=[raw_tx_0],
@@ -181,7 +201,7 @@ class MempoolAcceptanceTest(BitcoinTestFramework):
self.log.info('A really large transaction')
tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
- tx.vin = [tx.vin[0]] * (MAX_BLOCK_BASE_SIZE // len(tx.vin[0].serialize()))
+ tx.vin = [tx.vin[0]] * math.ceil(MAX_BLOCK_BASE_SIZE / len(tx.vin[0].serialize()))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-oversize'}],
rawtxs=[bytes_to_hex_str(tx.serialize())],
diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py
index b4e9d967fd..d74d4eaaf1 100755
--- a/test/functional/mempool_persist.py
+++ b/test/functional/mempool_persist.py
@@ -42,6 +42,7 @@ import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, wait_until
+
class MempoolPersistTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
@@ -60,7 +61,7 @@ class MempoolPersistTest(BitcoinTestFramework):
self.log.debug("Send 5 transactions from node2 (to its own address)")
for i in range(5):
- self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
+ last_txid = self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
@@ -68,6 +69,13 @@ class MempoolPersistTest(BitcoinTestFramework):
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
+ self.log.debug("Prioritize a transaction on node0")
+ fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
+ assert_equal(fees['base'], fees['modified'])
+ self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000)
+ fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
+ assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
+
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
# Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
@@ -81,6 +89,10 @@ class MempoolPersistTest(BitcoinTestFramework):
# The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
assert_equal(len(self.nodes[1].getrawmempool()), 0)
+ self.log.debug('Verify prioritization is loaded correctly')
+ fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
+ assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
+
# Verify accounting of mempool transactions after restart is correct
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
diff --git a/test/functional/mempool_resurrect.py b/test/functional/mempool_resurrect.py
index d035ca907a..845beb551e 100755
--- a/test/functional/mempool_resurrect.py
+++ b/test/functional/mempool_resurrect.py
@@ -47,12 +47,11 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] > 0)
- # Use invalidateblock to re-org back; all transactions should
- # end up unconfirmed and back in the mempool
+ # Use invalidateblock to re-org back
for node in self.nodes:
node.invalidateblock(blocks[0])
- # mempool should be empty, all txns confirmed
+ # All txns should be back in mempool with 0 confirmations
assert_equal(set(self.nodes[0].getrawmempool()), set(spends1_id+spends2_id))
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py
index ff55ea5528..661d9f4c97 100755
--- a/test/functional/mining_basic.py
+++ b/test/functional/mining_basic.py
@@ -25,14 +25,15 @@ from test_framework.util import (
assert_raises_rpc_error,
bytes_to_hex_str as b2x,
)
-
+from test_framework.script import CScriptNum
def assert_template(node, block, expect, rehash=True):
if rehash:
block.hashMerkleRoot = block.calc_merkle_root()
- rsp = node.getblocktemplate({'data': b2x(block.serialize()), 'mode': 'proposal'})
+ rsp = node.getblocktemplate(template_request={'data': b2x(block.serialize()), 'mode': 'proposal', 'rules': ['segwit']})
assert_equal(rsp, expect)
+
class MiningTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
@@ -59,16 +60,24 @@ class MiningTest(BitcoinTestFramework):
# Mine a block to leave initial block download
node.generatetoaddress(1, node.get_deterministic_priv_key().address)
- tmpl = node.getblocktemplate()
+ tmpl = node.getblocktemplate({'rules': ['segwit']})
self.log.info("getblocktemplate: Test capability advertised")
assert 'proposal' in tmpl['capabilities']
assert 'coinbasetxn' not in tmpl
- coinbase_tx = create_coinbase(height=int(tmpl["height"]) + 1)
+ next_height = int(tmpl["height"])
+ coinbase_tx = create_coinbase(height=next_height)
# sequence numbers must not be max for nLockTime to have effect
coinbase_tx.vin[0].nSequence = 2 ** 32 - 2
coinbase_tx.rehash()
+ # round-trip the encoded bip34 block height commitment
+ assert_equal(CScriptNum.decode(coinbase_tx.vin[0].scriptSig), next_height)
+ # round-trip negative and multi-byte CScriptNums to catch python regression
+ assert_equal(CScriptNum.decode(CScriptNum.encode(CScriptNum(1500))), 1500)
+ assert_equal(CScriptNum.decode(CScriptNum.encode(CScriptNum(-1500))), -1500)
+ assert_equal(CScriptNum.decode(CScriptNum.encode(CScriptNum(-1))), -1)
+
block = CBlock()
block.nVersion = tmpl["version"]
block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
@@ -77,6 +86,9 @@ class MiningTest(BitcoinTestFramework):
block.nNonce = 0
block.vtx = [coinbase_tx]
+ self.log.info("getblocktemplate: segwit rule must be set")
+ assert_raises_rpc_error(-8, "getblocktemplate must be called with the segwit rule set", node.getblocktemplate)
+
self.log.info("getblocktemplate: Test valid block")
assert_template(node, block, None)
@@ -93,7 +105,7 @@ class MiningTest(BitcoinTestFramework):
assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, b2x(bad_block.serialize()))
self.log.info("getblocktemplate: Test truncated final transaction")
- assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(block.serialize()[:-1]), 'mode': 'proposal'})
+ assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(block.serialize()[:-1]), 'mode': 'proposal', 'rules': ['segwit']})
self.log.info("getblocktemplate: Test duplicate transaction")
bad_block = copy.deepcopy(block)
@@ -123,7 +135,7 @@ class MiningTest(BitcoinTestFramework):
bad_block_sn = bytearray(block.serialize())
assert_equal(bad_block_sn[TX_COUNT_OFFSET], 1)
bad_block_sn[TX_COUNT_OFFSET] += 1
- assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(bad_block_sn), 'mode': 'proposal'})
+ assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(bad_block_sn), 'mode': 'proposal', 'rules': ['segwit']})
self.log.info("getblocktemplate: Test bad bits")
bad_block = copy.deepcopy(block)
diff --git a/test/functional/mining_getblocktemplate_longpoll.py b/test/functional/mining_getblocktemplate_longpoll.py
index 9a3c15a4a7..72cde8e811 100755
--- a/test/functional/mining_getblocktemplate_longpoll.py
+++ b/test/functional/mining_getblocktemplate_longpoll.py
@@ -15,14 +15,14 @@ class LongpollThread(threading.Thread):
def __init__(self, node):
threading.Thread.__init__(self)
# query current longpollid
- template = node.getblocktemplate()
+ template = node.getblocktemplate({'rules': ['segwit']})
self.longpollid = template['longpollid']
# create a new connection to the node, we can't use the same
# connection from two threads
self.node = get_rpc_proxy(node.url, 1, timeout=600, coveragedir=node.coverage_dir)
def run(self):
- self.node.getblocktemplate({'longpollid':self.longpollid})
+ self.node.getblocktemplate({'longpollid': self.longpollid, 'rules': ['segwit']})
class GetBlockTemplateLPTest(BitcoinTestFramework):
def set_test_params(self):
@@ -34,10 +34,10 @@ class GetBlockTemplateLPTest(BitcoinTestFramework):
def run_test(self):
self.log.info("Warning: this test will take about 70 seconds in the best case. Be patient.")
self.nodes[0].generate(10)
- template = self.nodes[0].getblocktemplate()
+ template = self.nodes[0].getblocktemplate({'rules': ['segwit']})
longpollid = template['longpollid']
# longpollid should not change between successive invocations if nothing else happens
- template2 = self.nodes[0].getblocktemplate()
+ template2 = self.nodes[0].getblocktemplate({'rules': ['segwit']})
assert(template2['longpollid'] == longpollid)
# Test 1: test that the longpolling wait if we do nothing
diff --git a/test/functional/mining_prioritisetransaction.py b/test/functional/mining_prioritisetransaction.py
index c5ddee56f1..ca4b621a78 100755
--- a/test/functional/mining_prioritisetransaction.py
+++ b/test/functional/mining_prioritisetransaction.py
@@ -84,7 +84,7 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
high_fee_tx = x
# Something high-fee should have been mined!
- assert(high_fee_tx != None)
+ assert(high_fee_tx is not None)
# Add a prioritisation before a tx is in the mempool (de-prioritising a
# high-fee transaction so that it's now low fee).
@@ -142,10 +142,10 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
# getblocktemplate to (eventually) return a new block.
mock_time = int(time.time())
self.nodes[0].setmocktime(mock_time)
- template = self.nodes[0].getblocktemplate()
+ template = self.nodes[0].getblocktemplate({'rules': ['segwit']})
self.nodes[0].prioritisetransaction(txid=tx_id, fee_delta=-int(self.relayfee*COIN))
self.nodes[0].setmocktime(mock_time+10)
- new_template = self.nodes[0].getblocktemplate()
+ new_template = self.nodes[0].getblocktemplate({'rules': ['segwit']})
assert(template != new_template)
diff --git a/test/functional/p2p_disconnect_ban.py b/test/functional/p2p_disconnect_ban.py
index 67f24d6bff..1b11a2a294 100755
--- a/test/functional/p2p_disconnect_ban.py
+++ b/test/functional/p2p_disconnect_ban.py
@@ -22,7 +22,7 @@ class DisconnectBanTest(BitcoinTestFramework):
self.log.info("setban: successfully ban single IP address")
assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point
- self.nodes[1].setban("127.0.0.1", "add")
+ self.nodes[1].setban(subnet="127.0.0.1", command="add")
wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10)
assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point
assert_equal(len(self.nodes[1].listbanned()), 1)
diff --git a/test/functional/p2p_invalid_block.py b/test/functional/p2p_invalid_block.py
index 0678b1a651..1e0b876593 100755
--- a/test/functional/p2p_invalid_block.py
+++ b/test/functional/p2p_invalid_block.py
@@ -77,9 +77,9 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
block2.vtx.append(tx2)
assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
assert_equal(orig_hash, block2.rehash())
- assert(block2_orig.vtx != block2.vtx)
+ assert block2_orig.vtx != block2.vtx
- node.p2p.send_blocks_and_test([block2], node, success=False, request_block=False, reject_reason='bad-txns-duplicate')
+ node.p2p.send_blocks_and_test([block2], node, success=False, reject_reason='bad-txns-duplicate')
# Check transactions for duplicate inputs
self.log.info("Test duplicate input block.")
@@ -89,7 +89,7 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
block2_orig.hashMerkleRoot = block2_orig.calc_merkle_root()
block2_orig.rehash()
block2_orig.solve()
- node.p2p.send_blocks_and_test([block2_orig], node, success=False, request_block=False, reject_reason='bad-txns-inputs-duplicate')
+ node.p2p.send_blocks_and_test([block2_orig], node, success=False, reject_reason='bad-txns-inputs-duplicate')
self.log.info("Test very broken block.")
@@ -102,7 +102,8 @@ class InvalidBlockRequestTest(BitcoinTestFramework):
block3.rehash()
block3.solve()
- node.p2p.send_blocks_and_test([block3], node, success=False, request_block=False, reject_reason='bad-cb-amount')
+ node.p2p.send_blocks_and_test([block3], node, success=False, reject_reason='bad-cb-amount')
+
if __name__ == '__main__':
InvalidBlockRequestTest().main()
diff --git a/test/functional/p2p_invalid_locator.py b/test/functional/p2p_invalid_locator.py
index c8c752d1f7..33b7060060 100755
--- a/test/functional/p2p_invalid_locator.py
+++ b/test/functional/p2p_invalid_locator.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2017 The Bitcoin Core developers
+# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid locators.
diff --git a/test/functional/p2p_invalid_messages.py b/test/functional/p2p_invalid_messages.py
new file mode 100755
index 0000000000..dbc5c5fff6
--- /dev/null
+++ b/test/functional/p2p_invalid_messages.py
@@ -0,0 +1,179 @@
+#!/usr/bin/env python3
+# Copyright (c) 2015-2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test node responses to invalid network messages."""
+import os
+import struct
+
+from test_framework import messages
+from test_framework.mininode import P2PDataStore
+from test_framework.test_framework import BitcoinTestFramework
+
+
+class msg_unrecognized:
+ """Nonsensical message. Modeled after similar types in test_framework.messages."""
+
+ command = b'badmsg'
+
+ def __init__(self, str_data):
+ self.str_data = str_data.encode() if not isinstance(str_data, bytes) else str_data
+
+ def serialize(self):
+ return messages.ser_string(self.str_data)
+
+ def __repr__(self):
+ return "{}(data={})".format(self.command, self.str_data)
+
+
+class msg_nametoolong(msg_unrecognized):
+
+ command = b'thisnameiswayyyyyyyyytoolong'
+
+
+class InvalidMessagesTest(BitcoinTestFramework):
+
+ def set_test_params(self):
+ self.num_nodes = 1
+ self.setup_clean_chain = True
+
+ def run_test(self):
+ """
+ 0. Send a bunch of large (4MB) messages of an unrecognized type. Check to see
+ that it isn't an effective DoS against the node.
+
+ 1. Send an oversized (4MB+) message and check that we're disconnected.
+
+ 2. Send a few messages with an incorrect data size in the header, ensure the
+ messages are ignored.
+
+ 3. Send an unrecognized message with a command name longer than 12 characters.
+
+ """
+ node = self.nodes[0]
+ self.node = node
+ node.add_p2p_connection(P2PDataStore())
+ conn2 = node.add_p2p_connection(P2PDataStore())
+
+ msg_limit = 4 * 1000 * 1000 # 4MB, per MAX_PROTOCOL_MESSAGE_LENGTH
+ valid_data_limit = msg_limit - 5 # Account for the 4-byte length prefix
+
+ #
+ # 0.
+ #
+ # Send as large a message as is valid, ensure we aren't disconnected but
+ # also can't exhaust resources.
+ #
+ msg_at_size = msg_unrecognized("b" * valid_data_limit)
+ assert len(msg_at_size.serialize()) == msg_limit
+
+ increase_allowed = 0.5
+ if [s for s in os.environ.get("BITCOIN_CONFIG", "").split(" ") if "--with-sanitizers" in s and "address" in s]:
+ increase_allowed = 3.5
+ with node.assert_memory_usage_stable(increase_allowed=increase_allowed):
+ self.log.info(
+ "Sending a bunch of large, junk messages to test "
+ "memory exhaustion. May take a bit...")
+
+ # Run a bunch of times to test for memory exhaustion.
+ for _ in range(80):
+ node.p2p.send_message(msg_at_size)
+
+ # Check that, even though the node is being hammered by nonsense from one
+ # connection, it can still service other peers in a timely way.
+ for _ in range(20):
+ conn2.sync_with_ping(timeout=2)
+
+ # Peer 1, despite serving up a bunch of nonsense, should still be connected.
+ self.log.info("Waiting for node to drop junk messages.")
+ node.p2p.sync_with_ping(timeout=120)
+ assert node.p2p.is_connected
+
+ #
+ # 1.
+ #
+ # Send an oversized message, ensure we're disconnected.
+ #
+ msg_over_size = msg_unrecognized("b" * (valid_data_limit + 1))
+ assert len(msg_over_size.serialize()) == (msg_limit + 1)
+
+ with node.assert_debug_log(["Oversized message from peer=0, disconnecting"]):
+ # An unknown message type (or *any* message type) over
+ # MAX_PROTOCOL_MESSAGE_LENGTH should result in a disconnect.
+ node.p2p.send_message(msg_over_size)
+ node.p2p.wait_for_disconnect(timeout=4)
+
+ node.disconnect_p2ps()
+ conn = node.add_p2p_connection(P2PDataStore())
+ conn.wait_for_verack()
+
+ #
+ # 2.
+ #
+ # Send messages with an incorrect data size in the header.
+ #
+ actual_size = 100
+ msg = msg_unrecognized("b" * actual_size)
+
+ # TODO: handle larger-than cases. I haven't been able to pin down what behavior to expect.
+ for wrong_size in (2, 77, 78, 79):
+ self.log.info("Sending a message with incorrect size of {}".format(wrong_size))
+
+ # Unmodified message should submit okay.
+ node.p2p.send_and_ping(msg)
+
+ # A message lying about its data size results in a disconnect when the incorrect
+ # data size is less than the actual size.
+ #
+ # TODO: why does behavior change at 78 bytes?
+ #
+ node.p2p.send_raw_message(self._tweak_msg_data_size(msg, wrong_size))
+
+ # For some reason unknown to me, we sometimes have to push additional data to the
+ # peer in order for it to realize a disconnect.
+ try:
+ node.p2p.send_message(messages.msg_ping(nonce=123123))
+ except IOError:
+ pass
+
+ node.p2p.wait_for_disconnect(timeout=10)
+ node.disconnect_p2ps()
+ node.add_p2p_connection(P2PDataStore())
+
+ #
+ # 3.
+ #
+ # Send a message with a too-long command name.
+ #
+ node.p2p.send_message(msg_nametoolong("foobar"))
+ node.p2p.wait_for_disconnect(timeout=4)
+
+ # Node is still up.
+ conn = node.add_p2p_connection(P2PDataStore())
+ conn.sync_with_ping()
+
+
+ def _tweak_msg_data_size(self, message, wrong_size):
+ """
+ Return a raw message based on another message but with an incorrect data size in
+ the message header.
+ """
+ raw_msg = self.node.p2p.build_message(message)
+
+ bad_size_bytes = struct.pack("<I", wrong_size)
+ num_header_bytes_before_size = 4 + 12
+
+ # Replace the correct data size in the message with an incorrect one.
+ raw_msg_with_wrong_size = (
+ raw_msg[:num_header_bytes_before_size] +
+ bad_size_bytes +
+ raw_msg[(num_header_bytes_before_size + len(bad_size_bytes)):]
+ )
+ assert len(raw_msg) == len(raw_msg_with_wrong_size)
+
+ return raw_msg_with_wrong_size
+
+
+
+if __name__ == '__main__':
+ InvalidMessagesTest().main()
diff --git a/test/functional/p2p_invalid_tx.py b/test/functional/p2p_invalid_tx.py
index 58e129b57d..1b18dd3e58 100755
--- a/test/functional/p2p_invalid_tx.py
+++ b/test/functional/p2p_invalid_tx.py
@@ -5,7 +5,7 @@
"""Test node responses to invalid transactions.
In this test we connect to one node over p2p, and test tx requests."""
-from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script
+from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import (
COIN,
COutPoint,
@@ -19,6 +19,7 @@ from test_framework.util import (
assert_equal,
wait_until,
)
+from data import invalid_txs
class InvalidTxRequestTest(BitcoinTestFramework):
@@ -63,12 +64,21 @@ class InvalidTxRequestTest(BitcoinTestFramework):
self.log.info("Mature the block.")
self.nodes[0].generatetoaddress(100, self.nodes[0].get_deterministic_priv_key().address)
- # b'\x64' is OP_NOTIF
- # Transaction will be rejected with code 16 (REJECT_INVALID)
- # and we get disconnected immediately
- self.log.info('Test a transaction that is rejected')
- tx1 = create_tx_with_script(block1.vtx[0], 0, script_sig=b'\x64' * 35, amount=50 * COIN - 12000)
- node.p2p.send_txs_and_test([tx1], node, success=False, expect_disconnect=True)
+ # Iterate through a list of known invalid transaction types, ensuring each is
+ # rejected. Some are consensus invalid and some just violate policy.
+ for BadTxTemplate in invalid_txs.iter_all_templates():
+ self.log.info("Testing invalid transaction: %s", BadTxTemplate.__name__)
+ template = BadTxTemplate(spend_block=block1)
+ tx = template.get_tx()
+ node.p2p.send_txs_and_test(
+ [tx], node, success=False,
+ expect_disconnect=template.expect_disconnect,
+ reject_reason=template.reject_reason,
+ )
+
+ if template.expect_disconnect:
+ self.log.info("Reconnecting to peer")
+ self.reconnect_p2p()
# Make two p2p connections to provide the node with orphans
# * p2ps[0] will send valid orphan txs (one with low fee)
diff --git a/test/functional/p2p_segwit.py b/test/functional/p2p_segwit.py
index afbbfa8992..d95da227e5 100755
--- a/test/functional/p2p_segwit.py
+++ b/test/functional/p2p_segwit.py
@@ -545,31 +545,13 @@ class SegWitTest(BitcoinTestFramework):
@subtest
def test_getblocktemplate_before_lockin(self):
- # Node0 is segwit aware, node2 is not.
- for node in [self.nodes[0], self.nodes[2]]:
- gbt_results = node.getblocktemplate()
- block_version = gbt_results['version']
- # If we're not indicating segwit support, we will still be
- # signalling for segwit activation.
- assert_equal((block_version & (1 << VB_WITNESS_BIT) != 0), node == self.nodes[0])
- # If we don't specify the segwit rule, then we won't get a default
- # commitment.
- assert('default_witness_commitment' not in gbt_results)
-
- # Workaround:
- # Can either change the tip, or change the mempool and wait 5 seconds
- # to trigger a recomputation of getblocktemplate.
txid = int(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1), 16)
- # Using mocktime lets us avoid sleep()
- sync_mempools(self.nodes)
- self.nodes[0].setmocktime(int(time.time()) + 10)
- self.nodes[2].setmocktime(int(time.time()) + 10)
for node in [self.nodes[0], self.nodes[2]]:
gbt_results = node.getblocktemplate({"rules": ["segwit"]})
block_version = gbt_results['version']
if node == self.nodes[2]:
- # If this is a non-segwit node, we should still not get a witness
+ # If this is a non-segwit node, we should not get a witness
# commitment, nor a version bit signalling segwit.
assert_equal(block_version & (1 << VB_WITNESS_BIT), 0)
assert('default_witness_commitment' not in gbt_results)
@@ -586,10 +568,6 @@ class SegWitTest(BitcoinTestFramework):
script = get_witness_script(witness_root, 0)
assert_equal(witness_commitment, bytes_to_hex_str(script))
- # undo mocktime
- self.nodes[0].setmocktime(0)
- self.nodes[2].setmocktime(0)
-
@subtest
def advance_to_segwit_lockin(self):
"""Mine enough blocks to lock in segwit, but don't activate."""
diff --git a/test/functional/p2p_timeouts.py b/test/functional/p2p_timeouts.py
index 2459a9f243..02ceec3dc1 100755
--- a/test/functional/p2p_timeouts.py
+++ b/test/functional/p2p_timeouts.py
@@ -14,11 +14,11 @@
- Wait 1 second
- Assert that we're connected
- Send a ping to no_verack_node and no_version_node
-- Wait 30 seconds
+- Wait 1 second
- Assert that we're still connected
- Send a ping to no_verack_node and no_version_node
-- Wait 31 seconds
-- Assert that we're no longer connected (timeout to receive version/verack is 60 seconds)
+- Wait 2 seconds
+- Assert that we're no longer connected (timeout to receive version/verack is 3 seconds)
"""
from time import sleep
@@ -36,6 +36,8 @@ class TimeoutsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
+ # set timeout to receive version/verack to 3 seconds
+ self.extra_args = [["-peertimeout=3"]]
def run_test(self):
# Setup the p2p connections
@@ -52,7 +54,7 @@ class TimeoutsTest(BitcoinTestFramework):
no_verack_node.send_message(msg_ping())
no_version_node.send_message(msg_ping())
- sleep(30)
+ sleep(1)
assert "version" in no_verack_node.last_message
@@ -63,11 +65,21 @@ class TimeoutsTest(BitcoinTestFramework):
no_verack_node.send_message(msg_ping())
no_version_node.send_message(msg_ping())
- sleep(31)
-
- assert not no_verack_node.is_connected
- assert not no_version_node.is_connected
- assert not no_send_node.is_connected
+ expected_timeout_logs = [
+ "version handshake timeout from 0",
+ "socket no message in first 3 seconds, 1 0 from 1",
+ "socket no message in first 3 seconds, 0 0 from 2",
+ ]
+
+ with self.nodes[0].assert_debug_log(expected_msgs=expected_timeout_logs):
+ sleep(3)
+ # By now, we waited a total of 5 seconds. Off-by-two for two
+ # reasons:
+ # * The internal precision is one second
+ # * Account for network delay
+ assert not no_verack_node.is_connected
+ assert not no_version_node.is_connected
+ assert not no_send_node.is_connected
if __name__ == '__main__':
TimeoutsTest().main()
diff --git a/test/functional/rpc_bind.py b/test/functional/rpc_bind.py
index 53916d5290..3938ca98dd 100755
--- a/test/functional/rpc_bind.py
+++ b/test/functional/rpc_bind.py
@@ -48,9 +48,12 @@ class RPCBindTest(BitcoinTestFramework):
at a non-localhost IP.
'''
self.log.info("Allow IP test for %s:%d" % (rpchost, rpcport))
- base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips]
+ node_args = \
+ ['-disablewallet', '-nolisten'] + \
+ ['-rpcallowip='+x for x in allow_ips] + \
+ ['-rpcbind='+addr for addr in ['127.0.0.1', "%s:%d" % (rpchost, rpcport)]] # Bind to localhost as well so start_nodes doesn't hang
self.nodes[0].rpchost = None
- self.start_nodes([base_args])
+ self.start_nodes([node_args])
# connect to node through non-loopback interface
node = get_rpc_proxy(rpc_url(self.nodes[0].datadir, 0, "%s:%d" % (rpchost, rpcport)), 0, coveragedir=self.options.coveragedir)
node.getnetworkinfo()
@@ -67,7 +70,7 @@ class RPCBindTest(BitcoinTestFramework):
self.log.info("Check for ipv6")
have_ipv6 = test_ipv6_local()
- if not have_ipv6 and not self.options.run_ipv4:
+ if not have_ipv6 and not (self.options.run_ipv4 or self.options.run_nonloopback):
raise SkipTest("This test requires ipv6 support.")
self.log.info("Check for non-loopback interface")
@@ -101,9 +104,9 @@ class RPCBindTest(BitcoinTestFramework):
# check default without rpcallowip (IPv4 and IPv6 localhost)
self.run_bind_test(None, '127.0.0.1', [],
[('127.0.0.1', self.defaultport), ('::1', self.defaultport)])
- # check default with rpcallowip (IPv6 any)
+ # check default with rpcallowip (IPv4 and IPv6 localhost)
self.run_bind_test(['127.0.0.1'], '127.0.0.1', [],
- [('::0', self.defaultport)])
+ [('127.0.0.1', self.defaultport), ('::1', self.defaultport)])
# check only IPv6 localhost (explicit)
self.run_bind_test(['[::1]'], '[::1]', ['[::1]'],
[('::1', self.defaultport)])
diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py
index 92b690176d..31e60f1cea 100755
--- a/test/functional/rpc_blockchain.py
+++ b/test/functional/rpc_blockchain.py
@@ -133,7 +133,7 @@ class BlockchainTest(BitcoinTestFramework):
assert_raises_rpc_error(-8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash)
self.nodes[0].reconsiderblock(blockhash)
- chaintxstats = self.nodes[0].getchaintxstats(1)
+ chaintxstats = self.nodes[0].getchaintxstats(nblocks=1)
# 200 txs plus genesis tx
assert_equal(chaintxstats['txcount'], 201)
# tx rate should be 1 per 10 minutes, or 1/600
@@ -211,7 +211,7 @@ class BlockchainTest(BitcoinTestFramework):
besthash = node.getbestblockhash()
secondbesthash = node.getblockhash(199)
- header = node.getblockheader(besthash)
+ header = node.getblockheader(blockhash=besthash)
assert_equal(header['hash'], besthash)
assert_equal(header['height'], 200)
@@ -287,7 +287,7 @@ class BlockchainTest(BitcoinTestFramework):
def assert_waitforheight(height, timeout=2):
assert_equal(
- node.waitforblockheight(height, timeout)['height'],
+ node.waitforblockheight(height=height, timeout=timeout)['height'],
current_height)
assert_waitforheight(0)
diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py
index 0c61e9ab62..4f350953b2 100755
--- a/test/functional/rpc_fundrawtransaction.py
+++ b/test/functional/rpc_fundrawtransaction.py
@@ -32,7 +32,7 @@ class RawTransactionsTest(BitcoinTestFramework):
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
- def setup_network(self, split=False):
+ def setup_network(self):
self.setup_nodes()
connect_nodes_bi(self.nodes, 0, 1)
diff --git a/test/functional/rpc_help.py b/test/functional/rpc_help.py
index be096af892..78d6e78aed 100755
--- a/test/functional/rpc_help.py
+++ b/test/functional/rpc_help.py
@@ -7,12 +7,18 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
+import os
+
class HelpRpcTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
+ self.test_categories()
+ self.dump_help()
+
+ def test_categories(self):
node = self.nodes[0]
# wrong argument count
@@ -37,6 +43,15 @@ class HelpRpcTest(BitcoinTestFramework):
assert_equal(titles, components)
+ def dump_help(self):
+ dump_dir = os.path.join(self.options.tmpdir, 'rpc_help_dump')
+ os.mkdir(dump_dir)
+ calls = [line.split(' ', 1)[0] for line in self.nodes[0].help().splitlines() if line and not line.startswith('==')]
+ for call in calls:
+ with open(os.path.join(dump_dir, call), 'w', encoding='utf-8') as f:
+ # Make sure the node can generate the help at runtime without crashing
+ f.write(self.nodes[0].help(call))
+
if __name__ == '__main__':
HelpRpcTest().main()
diff --git a/test/functional/rpc_invalidateblock.py b/test/functional/rpc_invalidateblock.py
index d8ecdd573a..d8a1deb2a3 100755
--- a/test/functional/rpc_invalidateblock.py
+++ b/test/functional/rpc_invalidateblock.py
@@ -4,10 +4,15 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the invalidateblock RPC."""
-import time
-
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, connect_nodes_bi, sync_blocks
+from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
+from test_framework.util import (
+ assert_equal,
+ connect_nodes_bi,
+ sync_blocks,
+ wait_until,
+)
+
class InvalidateTest(BitcoinTestFramework):
def set_test_params(self):
@@ -21,46 +26,66 @@ class InvalidateTest(BitcoinTestFramework):
self.log.info("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:")
self.log.info("Mine 4 blocks on Node 0")
self.nodes[0].generatetoaddress(4, self.nodes[0].get_deterministic_priv_key().address)
- assert(self.nodes[0].getblockcount() == 4)
- besthash = self.nodes[0].getbestblockhash()
+ assert_equal(self.nodes[0].getblockcount(), 4)
+ besthash_n0 = self.nodes[0].getbestblockhash()
self.log.info("Mine competing 6 blocks on Node 1")
self.nodes[1].generatetoaddress(6, self.nodes[1].get_deterministic_priv_key().address)
- assert(self.nodes[1].getblockcount() == 6)
+ assert_equal(self.nodes[1].getblockcount(), 6)
self.log.info("Connect nodes to force a reorg")
- connect_nodes_bi(self.nodes,0,1)
+ connect_nodes_bi(self.nodes, 0, 1)
sync_blocks(self.nodes[0:2])
- assert(self.nodes[0].getblockcount() == 6)
+ assert_equal(self.nodes[0].getblockcount(), 6)
badhash = self.nodes[1].getblockhash(2)
self.log.info("Invalidate block 2 on node 0 and verify we reorg to node 0's original chain")
self.nodes[0].invalidateblock(badhash)
- newheight = self.nodes[0].getblockcount()
- newhash = self.nodes[0].getbestblockhash()
- if (newheight != 4 or newhash != besthash):
- raise AssertionError("Wrong tip for node0, hash %s, height %d"%(newhash,newheight))
+ assert_equal(self.nodes[0].getblockcount(), 4)
+ assert_equal(self.nodes[0].getbestblockhash(), besthash_n0)
self.log.info("Make sure we won't reorg to a lower work chain:")
- connect_nodes_bi(self.nodes,1,2)
+ connect_nodes_bi(self.nodes, 1, 2)
self.log.info("Sync node 2 to node 1 so both have 6 blocks")
sync_blocks(self.nodes[1:3])
- assert(self.nodes[2].getblockcount() == 6)
+ assert_equal(self.nodes[2].getblockcount(), 6)
self.log.info("Invalidate block 5 on node 1 so its tip is now at 4")
self.nodes[1].invalidateblock(self.nodes[1].getblockhash(5))
- assert(self.nodes[1].getblockcount() == 4)
+ assert_equal(self.nodes[1].getblockcount(), 4)
self.log.info("Invalidate block 3 on node 2, so its tip is now 2")
self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3))
- assert(self.nodes[2].getblockcount() == 2)
+ assert_equal(self.nodes[2].getblockcount(), 2)
self.log.info("..and then mine a block")
self.nodes[2].generatetoaddress(1, self.nodes[2].get_deterministic_priv_key().address)
self.log.info("Verify all nodes are at the right height")
- time.sleep(5)
- assert_equal(self.nodes[2].getblockcount(), 3)
- assert_equal(self.nodes[0].getblockcount(), 4)
- node1height = self.nodes[1].getblockcount()
- if node1height < 4:
- raise AssertionError("Node 1 reorged to a lower height: %d"%node1height)
+ wait_until(lambda: self.nodes[2].getblockcount() == 3, timeout=5)
+ wait_until(lambda: self.nodes[0].getblockcount() == 4, timeout=5)
+ wait_until(lambda: self.nodes[1].getblockcount() == 4, timeout=5)
+
+ self.log.info("Verify that we reconsider all ancestors as well")
+ blocks = self.nodes[1].generatetoaddress(10, ADDRESS_BCRT1_UNSPENDABLE)
+ assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
+ # Invalidate the two blocks at the tip
+ self.nodes[1].invalidateblock(blocks[-1])
+ self.nodes[1].invalidateblock(blocks[-2])
+ assert_equal(self.nodes[1].getbestblockhash(), blocks[-3])
+ # Reconsider only the previous tip
+ self.nodes[1].reconsiderblock(blocks[-1])
+ # Should be back at the tip by now
+ assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
+
+ self.log.info("Verify that we reconsider all descendants")
+ blocks = self.nodes[1].generatetoaddress(10, ADDRESS_BCRT1_UNSPENDABLE)
+ assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
+ # Invalidate the two blocks at the tip
+ self.nodes[1].invalidateblock(blocks[-2])
+ self.nodes[1].invalidateblock(blocks[-4])
+ assert_equal(self.nodes[1].getbestblockhash(), blocks[-5])
+ # Reconsider only the previous tip
+ self.nodes[1].reconsiderblock(blocks[-4])
+ # Should be back at the tip by now
+ assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
+
if __name__ == '__main__':
InvalidateTest().main()
diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py
index 1e525214fa..b12eb1d9ec 100755
--- a/test/functional/rpc_net.py
+++ b/test/functional/rpc_net.py
@@ -67,19 +67,19 @@ class NetTest(BitcoinTestFramework):
peer_info_after_ping = self.nodes[0].getpeerinfo()
for before, after in zip(peer_info, peer_info_after_ping):
- assert_greater_than_or_equal(after['bytesrecv_per_msg']['pong'], before['bytesrecv_per_msg']['pong'] + 32)
- assert_greater_than_or_equal(after['bytessent_per_msg']['ping'], before['bytessent_per_msg']['ping'] + 32)
+ assert_greater_than_or_equal(after['bytesrecv_per_msg'].get('pong', 0), before['bytesrecv_per_msg'].get('pong', 0) + 32)
+ assert_greater_than_or_equal(after['bytessent_per_msg'].get('ping', 0), before['bytessent_per_msg'].get('ping', 0) + 32)
def _test_getnetworkinginfo(self):
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2)
- self.nodes[0].setnetworkactive(False)
+ self.nodes[0].setnetworkactive(state=False)
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False)
# Wait a bit for all sockets to close
wait_until(lambda: self.nodes[0].getnetworkinfo()['connections'] == 0, timeout=3)
- self.nodes[0].setnetworkactive(True)
+ self.nodes[0].setnetworkactive(state=True)
connect_nodes_bi(self.nodes, 0, 1)
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2)
@@ -88,7 +88,7 @@ class NetTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getaddednodeinfo(), [])
# add a node (node2) to node0
ip_port = "127.0.0.1:{}".format(p2p_port(2))
- self.nodes[0].addnode(ip_port, 'add')
+ self.nodes[0].addnode(node=ip_port, command='add')
# check that the node has indeed been added
added_nodes = self.nodes[0].getaddednodeinfo(ip_port)
assert_equal(len(added_nodes), 1)
diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py
index fca910bf64..272ebe65cb 100755
--- a/test/functional/rpc_psbt.py
+++ b/test/functional/rpc_psbt.py
@@ -146,6 +146,9 @@ class PSBTTest(BitcoinTestFramework):
# Make sure that a psbt with signatures cannot be converted
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex'])
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].converttopsbt, signedtx['hex'])
+ assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].converttopsbt, signedtx['hex'], False)
+ # Unless we allow it to convert and strip signatures
+ self.nodes[0].converttopsbt(signedtx['hex'], True)
# Explicitly allow converting non-empty txs
new_psbt = self.nodes[0].converttopsbt(rawtx['hex'])
@@ -207,6 +210,17 @@ class PSBTTest(BitcoinTestFramework):
assert tx_in["sequence"] > MAX_BIP125_RBF_SEQUENCE
assert_equal(decoded_psbt["tx"]["locktime"], 0)
+ # Make sure change address wallet does not have P2SH innerscript access to results in success
+ # when attempting BnB coin selection
+ self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"changeAddress":self.nodes[1].getnewaddress()}, False)
+
+ # Regression test for 14473 (mishandling of already-signed witness transaction):
+ psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}])
+ complete_psbt = self.nodes[0].walletprocesspsbt(psbtx_info["psbt"])
+ double_processed_psbt = self.nodes[0].walletprocesspsbt(complete_psbt["psbt"])
+ assert_equal(complete_psbt, double_processed_psbt)
+ # We don't care about the decode result, but decoding must succeed.
+ self.nodes[0].decodepsbt(double_processed_psbt["psbt"])
# BIP 174 Test Vectors
@@ -269,6 +283,10 @@ class PSBTTest(BitcoinTestFramework):
self.test_utxo_conversion()
+ # Test that psbts with p2pkh outputs are created properly
+ p2pkh = self.nodes[0].getnewaddress(address_type='legacy')
+ psbt = self.nodes[1].walletcreatefundedpsbt([], [{p2pkh : 1}], 0, {"includeWatching" : True}, True)
+ self.nodes[0].decodepsbt(psbt['psbt'])
if __name__ == '__main__':
PSBTTest().main()
diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py
index 8ed490f552..5b9dbef68d 100755
--- a/test/functional/rpc_rawtransaction.py
+++ b/test/functional/rpc_rawtransaction.py
@@ -47,7 +47,7 @@ class RawTransactionsTest(BitcoinTestFramework):
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
- def setup_network(self, split=False):
+ def setup_network(self):
super().setup_network()
connect_nodes_bi(self.nodes, 0, 2)
@@ -100,6 +100,8 @@ class RawTransactionsTest(BitcoinTestFramework):
assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1})
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
+ assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}])
+ assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")]))
assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])
@@ -127,19 +129,12 @@ class RawTransactionsTest(BitcoinTestFramework):
bytes_to_hex_str(tx.serialize()),
self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]),
)
- # Two data outputs
- tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([('data', '99'), ('data', '99')])))))
- assert_equal(len(tx.vout), 2)
- assert_equal(
- bytes_to_hex_str(tx.serialize()),
- self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{'data': '99'}, {'data': '99'}]),
- )
# Multiple mixed outputs
- tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), ('data', '99'), ('data', '99')])))))
+ tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')])))))
assert_equal(len(tx.vout), 3)
assert_equal(
bytes_to_hex_str(tx.serialize()),
- self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {'data': '99'}, {'data': '99'}]),
+ self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]),
)
for type in ["bech32", "p2sh-segwit", "legacy"]:
diff --git a/test/functional/rpc_scantxoutset.py b/test/functional/rpc_scantxoutset.py
index 881b839a4e..11b4db6ec5 100755
--- a/test/functional/rpc_scantxoutset.py
+++ b/test/functional/rpc_scantxoutset.py
@@ -10,6 +10,9 @@ from decimal import Decimal
import shutil
import os
+def descriptors(out):
+ return sorted(u['desc'] for u in out['unspents'])
+
class ScantxoutsetTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
@@ -93,5 +96,10 @@ class ScantxoutsetTest(BitcoinTestFramework):
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1499}])['total_amount'], Decimal("12.288"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])['total_amount'], Decimal("28.672"))
+ # Test the reported descriptors for a few matches
+ assert_equal(descriptors(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])), ["pkh([0c5f9a1e/0'/0'/0]026dbd8b2315f296d36e6b6920b1579ca75569464875c7ebe869b536a7d9503c8c)", "pkh([0c5f9a1e/0'/0'/1]033e6f25d76c00bedb3a8993c7d5739ee806397f0529b1b31dda31ef890f19a60c)"])
+ assert_equal(descriptors(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])), ["pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)"])
+ assert_equal(descriptors(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])), ['pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)', 'pkh([0c5f9a1e/1/1/1500]03832901c250025da2aebae2bfb38d5c703a57ab66ad477f9c578bfbcd78abca6f)', 'pkh([0c5f9a1e/1/1/1]030d820fc9e8211c4169be8530efbc632775d8286167afd178caaf1089b77daba7)'])
+
if __name__ == '__main__':
ScantxoutsetTest().main()
diff --git a/test/functional/test_framework/blocktools.py b/test/functional/test_framework/blocktools.py
index 81cce1167b..6b47cae4c3 100644
--- a/test/functional/test_framework/blocktools.py
+++ b/test/functional/test_framework/blocktools.py
@@ -41,12 +41,15 @@ from .script import (
from .util import assert_equal
from io import BytesIO
+MAX_BLOCK_SIGOPS = 20000
+
# From BIP141
WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
-def create_block(hashprev, coinbase, ntime=None):
+def create_block(hashprev, coinbase, ntime=None, *, version=1):
"""Create a block (with regtest difficulty)."""
block = CBlock()
+ block.nVersion = version
if ntime is None:
import time
block.nTime = int(time.time() + 600)
diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py
index 92acbb9a09..356a45d6d0 100755
--- a/test/functional/test_framework/messages.py
+++ b/test/functional/test_framework/messages.py
@@ -35,7 +35,6 @@ MY_VERSION = 70014 # past bip-31 for ping/pong
MY_SUBVERSION = b"/python-mininode-tester:0.0.3/"
MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37)
-MAX_INV_SZ = 50000
MAX_LOCATOR_SZ = 101
MAX_BLOCK_BASE_SIZE = 1000000
@@ -58,9 +57,6 @@ MSG_TYPE_MASK = 0xffffffff >> 2
def sha256(s):
return hashlib.new('sha256', s).digest()
-def ripemd160(s):
- return hashlib.new('ripemd160', s).digest()
-
def hash256(s):
return sha256(sha256(s))
@@ -454,6 +450,8 @@ class CTransaction:
if flags != 0:
self.wit.vtxinwit = [CTxInWitness() for i in range(len(self.vin))]
self.wit.deserialize(f)
+ else:
+ self.wit = CTxWitness()
self.nLockTime = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
@@ -768,7 +766,7 @@ class HeaderAndShortIDs:
self.prefilled_txn = []
self.use_witness = False
- if p2pheaders_and_shortids != None:
+ if p2pheaders_and_shortids is not None:
self.header = p2pheaders_and_shortids.header
self.nonce = p2pheaders_and_shortids.nonce
self.shortids = p2pheaders_and_shortids.shortids
@@ -826,7 +824,7 @@ class BlockTransactionsRequest:
def __init__(self, blockhash=0, indexes = None):
self.blockhash = blockhash
- self.indexes = indexes if indexes != None else []
+ self.indexes = indexes if indexes is not None else []
def deserialize(self, f):
self.blockhash = deser_uint256(f)
@@ -867,7 +865,7 @@ class BlockTransactions:
def __init__(self, blockhash=0, transactions = None):
self.blockhash = blockhash
- self.transactions = transactions if transactions != None else []
+ self.transactions = transactions if transactions is not None else []
def deserialize(self, f):
self.blockhash = deser_uint256(f)
@@ -887,13 +885,12 @@ class BlockTransactions:
class CPartialMerkleTree:
- __slots__ = ("fBad", "nTransactions", "vBits", "vHash")
+ __slots__ = ("nTransactions", "vBits", "vHash")
def __init__(self):
self.nTransactions = 0
self.vHash = []
self.vBits = []
- self.fBad = False
def deserialize(self, f):
self.nTransactions = struct.unpack("<i", f.read(4))[0]
@@ -1057,7 +1054,7 @@ class msg_getdata:
command = b"getdata"
def __init__(self, inv=None):
- self.inv = inv if inv != None else []
+ self.inv = inv if inv is not None else []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
diff --git a/test/functional/test_framework/mininode.py b/test/functional/test_framework/mininode.py
index 91fde136de..ca5734d67d 100755
--- a/test/functional/test_framework/mininode.py
+++ b/test/functional/test_framework/mininode.py
@@ -207,10 +207,13 @@ class P2PConnection(asyncio.Protocol):
This method takes a P2P payload, builds the P2P header and adds
the message to the send buffer to be sent over the socket."""
+ tmsg = self.build_message(message)
+ self._log_message("send", message)
+ return self.send_raw_message(tmsg)
+
+ def send_raw_message(self, raw_message_bytes):
if not self.is_connected:
raise IOError('Not connected')
- self._log_message("send", message)
- tmsg = self._build_message(message)
def maybe_write():
if not self._transport:
@@ -220,12 +223,12 @@ class P2PConnection(asyncio.Protocol):
# Python 3.4 versions.
if hasattr(self._transport, 'is_closing') and self._transport.is_closing():
return
- self._transport.write(tmsg)
+ self._transport.write(raw_message_bytes)
NetworkThread.network_event_loop.call_soon_threadsafe(maybe_write)
# Class utility methods
- def _build_message(self, message):
+ def build_message(self, message):
"""Build a serialized P2P message"""
command = message.command
data = message.serialize()
@@ -346,7 +349,7 @@ class P2PInterface(P2PConnection):
self.send_message(msg_pong(message.nonce))
def on_verack(self, message):
- self.verack_received = True
+ pass
def on_version(self, message):
assert message.nVersion >= MIN_VERSION_SUPPORTED, "Version {} received. Test framework only supports versions greater than {}".format(message.nVersion, MIN_VERSION_SUPPORTED)
@@ -409,9 +412,9 @@ class P2PInterface(P2PConnection):
# Message sending helper functions
- def send_and_ping(self, message):
+ def send_and_ping(self, message, timeout=60):
self.send_message(message)
- self.sync_with_ping()
+ self.sync_with_ping(timeout=timeout)
# Sync up with the node
def sync_with_ping(self, timeout=60):
@@ -508,14 +511,14 @@ class P2PDataStore(P2PInterface):
if response is not None:
self.send_message(response)
- def send_blocks_and_test(self, blocks, node, *, success=True, request_block=True, reject_reason=None, expect_disconnect=False, timeout=60):
+ def send_blocks_and_test(self, blocks, node, *, success=True, force_send=False, reject_reason=None, expect_disconnect=False, timeout=60):
"""Send blocks to test node and test whether the tip advances.
- add all blocks to our block_store
- send a headers message for the final block
- the on_getheaders handler will ensure that any getheaders are responded to
- - if request_block is True: wait for getdata for each of the blocks. The on_getdata handler will
- ensure that any getdata messages are responded to
+ - if force_send is False: wait for getdata for each of the blocks. The on_getdata handler will
+ ensure that any getdata messages are responded to. Otherwise send the full block unsolicited.
- if success is True: assert that the node's tip advances to the most recent block
- if success is False: assert that the node's tip doesn't advance
- if reject_reason is set: assert that the correct reject message is logged"""
@@ -527,9 +530,11 @@ class P2PDataStore(P2PInterface):
reject_reason = [reject_reason] if reject_reason else []
with node.assert_debug_log(expected_msgs=reject_reason):
- self.send_message(msg_headers([CBlockHeader(blocks[-1])]))
-
- if request_block:
+ if force_send:
+ for b in blocks:
+ self.send_message(msg_block(block=b))
+ else:
+ self.send_message(msg_headers([CBlockHeader(blocks[-1])]))
wait_until(lambda: blocks[-1].sha256 in self.getdata_requests, timeout=timeout, lock=mininode_lock)
if expect_disconnect:
diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py
index 2fe44010ba..012c80a1be 100644
--- a/test/functional/test_framework/script.py
+++ b/test/functional/test_framework/script.py
@@ -385,6 +385,22 @@ class CScriptNum:
r[-1] |= 0x80
return bytes([len(r)]) + r
+ @staticmethod
+ def decode(vch):
+ result = 0
+ # We assume valid push_size and minimal encoding
+ value = vch[1:]
+ if len(value) == 0:
+ return result
+ for i, byte in enumerate(value):
+ result |= int(byte) << 8*i
+ if value[-1] >= 0x80:
+ # Mask for all but the highest result bit
+ num_mask = (2**(len(value)*8) - 1) >> 1
+ result &= num_mask
+ result *= -1
+ return result
+
class CScript(bytes):
"""Serialized script
@@ -434,6 +450,10 @@ class CScript(bytes):
# join makes no sense for a CScript()
raise NotImplementedError
+ # Python 3.4 compatibility
+ def hex(self):
+ return hexlify(self).decode('ascii')
+
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super(CScript, cls).__new__(cls, value)
diff --git a/test/functional/test_framework/socks5.py b/test/functional/test_framework/socks5.py
index dd0f209268..a21c864e75 100644
--- a/test/functional/test_framework/socks5.py
+++ b/test/functional/test_framework/socks5.py
@@ -54,10 +54,9 @@ class Socks5Command():
return 'Socks5Command(%s,%s,%s,%s,%s,%s)' % (self.cmd, self.atyp, self.addr, self.port, self.username, self.password)
class Socks5Connection():
- def __init__(self, serv, conn, peer):
+ def __init__(self, serv, conn):
self.serv = serv
self.conn = conn
- self.peer = peer
def handle(self):
"""Handle socks5 request according to RFC192."""
@@ -137,9 +136,9 @@ class Socks5Server():
def run(self):
while self.running:
- (sockconn, peer) = self.s.accept()
+ (sockconn, _) = self.s.accept()
if self.running:
- conn = Socks5Connection(self, sockconn, peer)
+ conn = Socks5Connection(self, sockconn)
thread = threading.Thread(None, conn.handle)
thread.daemon = True
thread.start()
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index 44fc185e6d..352fa32b5b 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -43,6 +43,8 @@ TEST_EXIT_PASSED = 0
TEST_EXIT_FAILED = 1
TEST_EXIT_SKIPPED = 77
+TMPDIR_PREFIX = "bitcoin_func_test_"
+
class SkipTest(Exception):
"""This exception is raised to skip a test"""
@@ -93,7 +95,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.nodes = []
self.network_thread = None
self.mocktime = 0
- self.rpc_timewait = 60 # Wait for up to 60 seconds for the RPC server to respond
+ self.rpc_timeout = 60 # Wait for up to 60 seconds for the RPC server to respond
self.supports_cli = False
self.bind_to_localhost_only = True
self.set_test_params()
@@ -151,7 +153,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.options.tmpdir = os.path.abspath(self.options.tmpdir)
os.makedirs(self.options.tmpdir, exist_ok=False)
else:
- self.options.tmpdir = tempfile.mkdtemp(prefix="test")
+ self.options.tmpdir = tempfile.mkdtemp(prefix=TMPDIR_PREFIX)
self._start_logging()
self.log.debug('Setting up network thread')
@@ -279,7 +281,10 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
# Public helper methods. These can be accessed by the subclass test scripts.
def add_nodes(self, num_nodes, extra_args=None, *, rpchost=None, binary=None):
- """Instantiate TestNode objects"""
+ """Instantiate TestNode objects.
+
+ Should only be called once after the nodes have been specified in
+ set_test_params()."""
if self.bind_to_localhost_only:
extra_confs = [["bind=127.0.0.1"]] * num_nodes
else:
@@ -292,7 +297,19 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
assert_equal(len(extra_args), num_nodes)
assert_equal(len(binary), num_nodes)
for i in range(num_nodes):
- self.nodes.append(TestNode(i, get_datadir_path(self.options.tmpdir, i), rpchost=rpchost, timewait=self.rpc_timewait, bitcoind=binary[i], bitcoin_cli=self.options.bitcoincli, mocktime=self.mocktime, coverage_dir=self.options.coveragedir, extra_conf=extra_confs[i], extra_args=extra_args[i], use_cli=self.options.usecli))
+ self.nodes.append(TestNode(
+ i,
+ get_datadir_path(self.options.tmpdir, i),
+ rpchost=rpchost,
+ timewait=self.rpc_timeout,
+ bitcoind=binary[i],
+ bitcoin_cli=self.options.bitcoincli,
+ mocktime=self.mocktime,
+ coverage_dir=self.options.coveragedir,
+ extra_conf=extra_confs[i],
+ extra_args=extra_args[i],
+ use_cli=self.options.usecli,
+ ))
def start_node(self, i, *args, **kwargs):
"""Start a bitcoind"""
@@ -325,16 +342,16 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
for node in self.nodes:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
- def stop_node(self, i, expected_stderr=''):
+ def stop_node(self, i, expected_stderr='', wait=0):
"""Stop a bitcoind test node"""
- self.nodes[i].stop_node(expected_stderr)
+ self.nodes[i].stop_node(expected_stderr, wait=wait)
self.nodes[i].wait_until_stopped()
- def stop_nodes(self):
+ def stop_nodes(self, wait=0):
"""Stop multiple bitcoind test nodes"""
for node in self.nodes:
# Issue RPC to stop nodes
- node.stop_node()
+ node.stop_node(wait=wait)
for node in self.nodes:
# Wait for nodes to stop
@@ -371,21 +388,6 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
sync_blocks(group)
sync_mempools(group)
- def enable_mocktime(self):
- """Enable mocktime for the script.
-
- mocktime may be needed for scripts that use the cached version of the
- blockchain. If the cached version of the blockchain is used without
- mocktime then the mempools will not sync due to IBD.
-
- For backward compatibility of the python scripts with previous
- versions of the cache, this helper function sets mocktime to Jan 1,
- 2014 + (201 * 10 * 60)"""
- self.mocktime = 1388534400 + (201 * 10 * 60)
-
- def disable_mocktime(self):
- self.mocktime = 0
-
# Private helper methods. These should not be accessed by the subclass test scripts.
def _start_logging(self):
@@ -443,7 +445,18 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
args = [self.options.bitcoind, "-datadir=" + datadir, '-disablewallet']
if i > 0:
args.append("-connect=127.0.0.1:" + str(p2p_port(0)))
- self.nodes.append(TestNode(i, get_datadir_path(self.options.cachedir, i), extra_conf=["bind=127.0.0.1"], extra_args=[], rpchost=None, timewait=self.rpc_timewait, bitcoind=self.options.bitcoind, bitcoin_cli=self.options.bitcoincli, mocktime=self.mocktime, coverage_dir=None))
+ self.nodes.append(TestNode(
+ i,
+ get_datadir_path(self.options.cachedir, i),
+ extra_conf=["bind=127.0.0.1"],
+ extra_args=[],
+ rpchost=None,
+ timewait=self.rpc_timeout,
+ bitcoind=self.options.bitcoind,
+ bitcoin_cli=self.options.bitcoincli,
+ mocktime=self.mocktime,
+ coverage_dir=None,
+ ))
self.nodes[i].args = args
self.start_node(i)
@@ -451,6 +464,11 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
for node in self.nodes:
node.wait_for_rpc_connection()
+ # For backward compatibility of the python scripts with previous
+ # versions of the cache, set mocktime to Jan 1,
+ # 2014 + (201 * 10 * 60)"""
+ self.mocktime = 1388534400 + (201 * 10 * 60)
+
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
@@ -458,7 +476,6 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
#
# blocks are created with timestamps 10 minutes apart
# starting from 2010 minutes in the past
- self.enable_mocktime()
block_time = self.mocktime - (201 * 10 * 60)
for i in range(2):
for peer in range(4):
@@ -472,7 +489,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
# Shut them down, and clean up cache directories:
self.stop_nodes()
self.nodes = []
- self.disable_mocktime()
+ self.mocktime = 0
def cache_path(n, *paths):
return os.path.join(get_datadir_path(self.options.cachedir, n), "regtest", *paths)
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
index 3a6107bb37..031a8824b1 100755
--- a/test/functional/test_framework/test_node.py
+++ b/test/functional/test_framework/test_node.py
@@ -68,7 +68,7 @@ class TestNode():
self.rpc_timeout = timewait
self.binary = bitcoind
self.coverage_dir = coverage_dir
- if extra_conf != None:
+ if extra_conf is not None:
append_config(datadir, extra_conf)
# Most callers will just need to add extra args to the standard list below.
# For those callers that need more flexibility, they can just set the args property directly.
@@ -115,6 +115,25 @@ class TestNode():
]
return PRIV_KEYS[self.index]
+ def get_mem_rss_kilobytes(self):
+ """Get the memory usage (RSS) per `ps`.
+
+ Returns None if `ps` is unavailable.
+ """
+ assert self.running
+
+ try:
+ return int(subprocess.check_output(
+ ["ps", "h", "-o", "rss", "{}".format(self.process.pid)],
+ stderr=subprocess.DEVNULL).split()[-1])
+
+ # Avoid failing on platforms where ps isn't installed.
+ #
+ # We could later use something like `psutils` to work across platforms.
+ except (FileNotFoundError, subprocess.SubprocessError):
+ self.log.exception("Unable to get memory usage")
+ return None
+
def _node_msg(self, msg: str) -> str:
"""Return a modified msg that identifies this node by its index as a debugging aid."""
return "[node %d] %s" % (self.index, msg)
@@ -209,13 +228,13 @@ class TestNode():
wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name))
return self.rpc / wallet_path
- def stop_node(self, expected_stderr=''):
+ def stop_node(self, expected_stderr='', wait=0):
"""Stop the node."""
if not self.running:
return
self.log.debug("Stopping node")
try:
- self.stop()
+ self.stop(wait=wait)
except http.client.CannotSendRequest:
self.log.exception("Unable to stop node.")
@@ -271,6 +290,33 @@ class TestNode():
if re.search(re.escape(expected_msg), log, flags=re.MULTILINE) is None:
self._raise_assertion_error('Expected message "{}" does not partially match log:\n\n{}\n\n'.format(expected_msg, print_log))
+ @contextlib.contextmanager
+ def assert_memory_usage_stable(self, *, increase_allowed=0.03):
+ """Context manager that allows the user to assert that a node's memory usage (RSS)
+ hasn't increased beyond some threshold percentage.
+
+ Args:
+ increase_allowed (float): the fractional increase in memory allowed until failure;
+ e.g. `0.12` for up to 12% increase allowed.
+ """
+ before_memory_usage = self.get_mem_rss_kilobytes()
+
+ yield
+
+ after_memory_usage = self.get_mem_rss_kilobytes()
+
+ if not (before_memory_usage and after_memory_usage):
+ self.log.warning("Unable to detect memory usage (RSS) - skipping memory check.")
+ return
+
+ perc_increase_memory_usage = (after_memory_usage / before_memory_usage) - 1
+
+ if perc_increase_memory_usage > increase_allowed:
+ self._raise_assertion_error(
+ "Memory usage increased over threshold of {:.3f}% from {} to {} ({:.3f}%)".format(
+ increase_allowed * 100, before_memory_usage, after_memory_usage,
+ perc_increase_memory_usage * 100))
+
def assert_start_raises_init_error(self, extra_args=None, expected_msg=None, match=ErrorMatch.FULL_TEXT, *args, **kwargs):
"""Attempt to start the node and expect it to raise an error.
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
index b355816d8b..d0a78d8dfd 100644
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -326,7 +326,7 @@ def get_auth_cookie(datadir):
if line.startswith("rpcpassword="):
assert password is None # Ensure that there is only one rpcpassword line
password = line.split("=")[1].strip("\n")
- if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")):
+ if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")) and os.access(os.path.join(datadir, "regtest", ".cookie"), os.R_OK):
with open(os.path.join(datadir, "regtest", ".cookie"), 'r', encoding="ascii") as f:
userpass = f.read()
split_userpass = userpass.split(':')
diff --git a/test/functional/test_framework/wallet_util.py b/test/functional/test_framework/wallet_util.py
new file mode 100755
index 0000000000..c0dfa4c3f0
--- /dev/null
+++ b/test/functional/test_framework/wallet_util.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Useful util functions for testing the wallet"""
+from collections import namedtuple
+
+from test_framework.address import (
+ key_to_p2pkh,
+ key_to_p2sh_p2wpkh,
+ key_to_p2wpkh,
+ script_to_p2sh,
+ script_to_p2sh_p2wsh,
+ script_to_p2wsh,
+)
+from test_framework.script import (
+ CScript,
+ OP_0,
+ OP_2,
+ OP_3,
+ OP_CHECKMULTISIG,
+ OP_CHECKSIG,
+ OP_DUP,
+ OP_EQUAL,
+ OP_EQUALVERIFY,
+ OP_HASH160,
+ hash160,
+ sha256,
+)
+from test_framework.util import hex_str_to_bytes
+
+Key = namedtuple('Key', ['privkey',
+ 'pubkey',
+ 'p2pkh_script',
+ 'p2pkh_addr',
+ 'p2wpkh_script',
+ 'p2wpkh_addr',
+ 'p2sh_p2wpkh_script',
+ 'p2sh_p2wpkh_redeem_script',
+ 'p2sh_p2wpkh_addr'])
+
+Multisig = namedtuple('Multisig', ['privkeys',
+ 'pubkeys',
+ 'p2sh_script',
+ 'p2sh_addr',
+ 'redeem_script',
+ 'p2wsh_script',
+ 'p2wsh_addr',
+ 'p2sh_p2wsh_script',
+ 'p2sh_p2wsh_addr'])
+
+def get_key(node):
+ """Generate a fresh key on node
+
+ Returns a named tuple of privkey, pubkey and all address and scripts."""
+ addr = node.getnewaddress()
+ pubkey = node.getaddressinfo(addr)['pubkey']
+ pkh = hash160(hex_str_to_bytes(pubkey))
+ return Key(privkey=node.dumpprivkey(addr),
+ pubkey=pubkey,
+ p2pkh_script=CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG]).hex(),
+ p2pkh_addr=key_to_p2pkh(pubkey),
+ p2wpkh_script=CScript([OP_0, pkh]).hex(),
+ p2wpkh_addr=key_to_p2wpkh(pubkey),
+ p2sh_p2wpkh_script=CScript([OP_HASH160, hash160(CScript([OP_0, pkh])), OP_EQUAL]).hex(),
+ p2sh_p2wpkh_redeem_script=CScript([OP_0, pkh]).hex(),
+ p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
+
+def get_multisig(node):
+ """Generate a fresh 2-of-3 multisig on node
+
+ Returns a named tuple of privkeys, pubkeys and all address and scripts."""
+ addrs = []
+ pubkeys = []
+ for _ in range(3):
+ addr = node.getaddressinfo(node.getnewaddress())
+ addrs.append(addr['address'])
+ pubkeys.append(addr['pubkey'])
+ script_code = CScript([OP_2] + [hex_str_to_bytes(pubkey) for pubkey in pubkeys] + [OP_3, OP_CHECKMULTISIG])
+ witness_script = CScript([OP_0, sha256(script_code)])
+ return Multisig(privkeys=[node.dumpprivkey(addr) for addr in addrs],
+ pubkeys=pubkeys,
+ p2sh_script=CScript([OP_HASH160, hash160(script_code), OP_EQUAL]).hex(),
+ p2sh_addr=script_to_p2sh(script_code),
+ redeem_script=script_code.hex(),
+ p2wsh_script=witness_script.hex(),
+ p2wsh_addr=script_to_p2wsh(script_code),
+ p2sh_p2wsh_script=CScript([OP_HASH160, witness_script, OP_EQUAL]).hex(),
+ p2sh_p2wsh_addr=script_to_p2sh_p2wsh(script_code))
+
+def test_address(node, address, **kwargs):
+ """Get address info for `address` and test whether the returned values are as expected."""
+ addr_info = node.getaddressinfo(address)
+ for key, value in kwargs.items():
+ if value is None:
+ if key in addr_info.keys():
+ raise AssertionError("key {} unexpectedly returned in getaddressinfo.".format(key))
+ elif addr_info[key] != value:
+ raise AssertionError("key {} value {} did not match expected value {}".format(key, addr_info[key], value))
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py
index 8cbc9655c6..8c6f6706e7 100755
--- a/test/functional/test_runner.py
+++ b/test/functional/test_runner.py
@@ -120,6 +120,7 @@ BASE_SCRIPTS = [
'wallet_disableprivatekeys.py',
'wallet_disableprivatekeys.py --usecli',
'interface_http.py',
+ 'interface_rpc.py',
'rpc_psbt.py',
'rpc_users.py',
'feature_proxy.py',
@@ -136,6 +137,7 @@ BASE_SCRIPTS = [
'mining_prioritisetransaction.py',
'p2p_invalid_locator.py',
'p2p_invalid_block.py',
+ 'p2p_invalid_messages.py',
'p2p_invalid_tx.py',
'feature_assumevalid.py',
'example_test.py',
@@ -151,9 +153,11 @@ BASE_SCRIPTS = [
'wallet_importprunedfunds.py',
'p2p_leak_tx.py',
'rpc_signmessage.py',
+ 'wallet_balance.py',
'feature_nulldummy.py',
'mempool_accept.py',
'wallet_import_rescan.py',
+ 'wallet_import_with_label.py',
'rpc_bind.py --ipv4',
'rpc_bind.py --ipv6',
'rpc_bind.py --nonloopback',
@@ -170,8 +174,10 @@ BASE_SCRIPTS = [
'wallet_fallbackfee.py',
'feature_minchainwork.py',
'rpc_getblockstats.py',
+ 'wallet_create_tx.py',
'p2p_fingerprint.py',
'feature_uacomment.py',
+ 'wallet_coinbase_category.py',
'feature_filelock.py',
'p2p_unrequested_blocks.py',
'feature_includeconf.py',
@@ -182,6 +188,7 @@ BASE_SCRIPTS = [
'feature_config_args.py',
'rpc_help.py',
'feature_help.py',
+ 'feature_shutdown.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]
@@ -211,7 +218,7 @@ def main():
epilog='''
Help text and arguments for individual test script:''',
formatter_class=argparse.RawTextHelpFormatter)
- parser.add_argument('--combinedlogslen', '-c', type=int, default=0, help='print a combined log (of length n lines) from all test nodes and test framework to the console on failure.')
+ parser.add_argument('--combinedlogslen', '-c', type=int, default=0, metavar='n', help='On failure, print a log (of length n lines) to the console, combined from the test framework and all test nodes.')
parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface')
parser.add_argument('--ci', action='store_true', help='Run checks and code that are usually only enabled in a continuous integration environment')
parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.')
@@ -269,7 +276,7 @@ def main():
if tests:
# Individual tests have been specified. Run specified tests that exist
# in the ALL_SCRIPTS list. Accept the name with or without .py extension.
- tests = [re.sub("\.py$", "", test) + ".py" for test in tests]
+ tests = [test + ".py" if ".py" not in test else test for test in tests]
for test in tests:
if test in ALL_SCRIPTS:
test_list.append(test)
@@ -364,7 +371,7 @@ def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=
tmpdir=tmpdir,
test_list=test_list,
flags=flags,
- timeout_duration=20 * 60 if runs_ci else float('inf'), # in seconds
+ timeout_duration=40 * 60 if runs_ci else float('inf'), # in seconds
)
start_time = time.time()
test_results = []
@@ -634,7 +641,7 @@ class RPCCoverage():
with open(coverage_ref_filename, 'r', encoding="utf8") as coverage_ref_file:
all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
- for root, dirs, files in os.walk(self.dir):
+ for root, _, files in os.walk(self.dir):
for filename in files:
if filename.startswith(coverage_file_prefix):
coverage_filenames.add(os.path.join(root, filename))
diff --git a/test/functional/wallet_address_types.py b/test/functional/wallet_address_types.py
index 0f75045c9d..bafa556aad 100755
--- a/test/functional/wallet_address_types.py
+++ b/test/functional/wallet_address_types.py
@@ -99,6 +99,8 @@ class AddressTypeTest(BitcoinTestFramework):
"""Run sanity checks on an address."""
info = self.nodes[node].getaddressinfo(address)
assert(self.nodes[node].validateaddress(address)['isvalid'])
+ assert_equal(info.get('solvable'), True)
+
if not multisig and typ == 'legacy':
# P2PKH
assert(not info['isscript'])
@@ -146,6 +148,47 @@ class AddressTypeTest(BitcoinTestFramework):
# Unknown type
assert(False)
+ def test_desc(self, node, address, multisig, typ, utxo):
+ """Run sanity checks on a descriptor reported by getaddressinfo."""
+ info = self.nodes[node].getaddressinfo(address)
+ assert('desc' in info)
+ assert_equal(info['desc'], utxo['desc'])
+ assert(self.nodes[node].validateaddress(address)['isvalid'])
+
+ # Use a ridiculously roundabout way to find the key origin info through
+ # the PSBT logic. However, this does test consistency between the PSBT reported
+ # fingerprints/paths and the descriptor logic.
+ psbt = self.nodes[node].createpsbt([{'txid':utxo['txid'], 'vout':utxo['vout']}],[{address:0.00010000}])
+ psbt = self.nodes[node].walletprocesspsbt(psbt, False, "ALL", True)
+ decode = self.nodes[node].decodepsbt(psbt['psbt'])
+ key_descs = {}
+ for deriv in decode['inputs'][0]['bip32_derivs']:
+ assert_equal(len(deriv['master_fingerprint']), 8)
+ assert_equal(deriv['path'][0], 'm')
+ key_descs[deriv['pubkey']] = '[' + deriv['master_fingerprint'] + deriv['path'][1:] + ']' + deriv['pubkey']
+
+ if not multisig and typ == 'legacy':
+ # P2PKH
+ assert_equal(info['desc'], "pkh(%s)" % key_descs[info['pubkey']])
+ elif not multisig and typ == 'p2sh-segwit':
+ # P2SH-P2WPKH
+ assert_equal(info['desc'], "sh(wpkh(%s))" % key_descs[info['pubkey']])
+ elif not multisig and typ == 'bech32':
+ # P2WPKH
+ assert_equal(info['desc'], "wpkh(%s)" % key_descs[info['pubkey']])
+ elif typ == 'legacy':
+ # P2SH-multisig
+ assert_equal(info['desc'], "sh(multi(2,%s,%s))" % (key_descs[info['pubkeys'][0]], key_descs[info['pubkeys'][1]]))
+ elif typ == 'p2sh-segwit':
+ # P2SH-P2WSH-multisig
+ assert_equal(info['desc'], "sh(wsh(multi(2,%s,%s)))" % (key_descs[info['embedded']['pubkeys'][0]], key_descs[info['embedded']['pubkeys'][1]]))
+ elif typ == 'bech32':
+ # P2WSH-multisig
+ assert_equal(info['desc'], "wsh(multi(2,%s,%s))" % (key_descs[info['pubkeys'][0]], key_descs[info['pubkeys'][1]]))
+ else:
+ # Unknown type
+ assert(False)
+
def test_change_output_type(self, node_sender, destinations, expected_type):
txid = self.nodes[node_sender].sendmany(dummy="", amounts=dict.fromkeys(destinations, 0.001))
raw_tx = self.nodes[node_sender].getrawtransaction(txid)
@@ -198,6 +241,7 @@ class AddressTypeTest(BitcoinTestFramework):
self.log.debug("Old balances are {}".format(old_balances))
to_send = (old_balances[from_node] / 101).quantize(Decimal("0.00000001"))
sends = {}
+ addresses = {}
self.log.debug("Prepare sends")
for n, to_node in enumerate(range(from_node, from_node + 4)):
@@ -228,6 +272,7 @@ class AddressTypeTest(BitcoinTestFramework):
# Output entry
sends[address] = to_send * 10 * (1 + n)
+ addresses[to_node] = (address, typ)
self.log.debug("Sending: {}".format(sends))
self.nodes[from_node].sendmany("", sends)
@@ -244,6 +289,17 @@ class AddressTypeTest(BitcoinTestFramework):
self.nodes[5].generate(1)
sync_blocks(self.nodes)
+ # Verify that the receiving wallet contains a UTXO with the expected address, and expected descriptor
+ for n, to_node in enumerate(range(from_node, from_node + 4)):
+ to_node %= 4
+ found = False
+ for utxo in self.nodes[to_node].listunspent():
+ if utxo['address'] == addresses[to_node][0]:
+ found = True
+ self.test_desc(to_node, addresses[to_node][0], multisig, addresses[to_node][1], utxo)
+ break
+ assert found
+
new_balances = self.get_balances()
self.log.debug("Check new balances: {}".format(new_balances))
# We don't know what fee was set, so we can only check bounds on the balance of the sending node
diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py
index 32ec385fa1..dd3750203a 100755
--- a/test/functional/wallet_backup.py
+++ b/test/functional/wallet_backup.py
@@ -48,7 +48,7 @@ class WalletBackupTest(BitcoinTestFramework):
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
- def setup_network(self, split=False):
+ def setup_network(self):
self.setup_nodes()
connect_nodes(self.nodes[0], 3)
connect_nodes(self.nodes[1], 3)
diff --git a/test/functional/wallet_balance.py b/test/functional/wallet_balance.py
new file mode 100755
index 0000000000..05c97e0340
--- /dev/null
+++ b/test/functional/wallet_balance.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the wallet balance RPC methods."""
+from decimal import Decimal
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+)
+
+RANDOM_COINBASE_ADDRESS = 'mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ'
+
+def create_transactions(node, address, amt, fees):
+ # Create and sign raw transactions from node to address for amt.
+ # Creates a transaction for each fee and returns an array
+ # of the raw transactions.
+ utxos = node.listunspent(0)
+
+ # Create transactions
+ inputs = []
+ ins_total = 0
+ for utxo in utxos:
+ inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]})
+ ins_total += utxo['amount']
+ if ins_total > amt:
+ break
+
+ txs = []
+ for fee in fees:
+ outputs = {address: amt, node.getrawchangeaddress(): ins_total - amt - fee}
+ raw_tx = node.createrawtransaction(inputs, outputs, 0, True)
+ raw_tx = node.signrawtransactionwithwallet(raw_tx)
+ txs.append(raw_tx)
+
+ return txs
+
+class WalletTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+ self.setup_clean_chain = True
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def run_test(self):
+ # Check that nodes don't own any UTXOs
+ assert_equal(len(self.nodes[0].listunspent()), 0)
+ assert_equal(len(self.nodes[1].listunspent()), 0)
+
+ self.log.info("Mining one block for each node")
+
+ self.nodes[0].generate(1)
+ self.sync_all()
+ self.nodes[1].generate(1)
+ self.nodes[1].generatetoaddress(100, RANDOM_COINBASE_ADDRESS)
+ self.sync_all()
+
+ assert_equal(self.nodes[0].getbalance(), 50)
+ assert_equal(self.nodes[1].getbalance(), 50)
+
+ self.log.info("Test getbalance with different arguments")
+ assert_equal(self.nodes[0].getbalance("*"), 50)
+ assert_equal(self.nodes[0].getbalance("*", 1), 50)
+ assert_equal(self.nodes[0].getbalance("*", 1, True), 50)
+ assert_equal(self.nodes[0].getbalance(minconf=1), 50)
+
+ # Send 40 BTC from 0 to 1 and 60 BTC from 1 to 0.
+ txs = create_transactions(self.nodes[0], self.nodes[1].getnewaddress(), 40, [Decimal('0.01')])
+ self.nodes[0].sendrawtransaction(txs[0]['hex'])
+ self.nodes[1].sendrawtransaction(txs[0]['hex']) # sending on both nodes is faster than waiting for propagation
+
+ self.sync_all()
+ txs = create_transactions(self.nodes[1], self.nodes[0].getnewaddress(), 60, [Decimal('0.01'), Decimal('0.02')])
+ self.nodes[1].sendrawtransaction(txs[0]['hex'])
+ self.nodes[0].sendrawtransaction(txs[0]['hex']) # sending on both nodes is faster than waiting for propagation
+ self.sync_all()
+
+ # First argument of getbalance must be set to "*"
+ assert_raises_rpc_error(-32, "dummy first argument must be excluded or set to \"*\"", self.nodes[1].getbalance, "")
+
+ self.log.info("Test getbalance and getunconfirmedbalance with unconfirmed inputs")
+
+ # getbalance without any arguments includes unconfirmed transactions, but not untrusted transactions
+ assert_equal(self.nodes[0].getbalance(), Decimal('9.99')) # change from node 0's send
+ assert_equal(self.nodes[1].getbalance(), Decimal('29.99')) # change from node 1's send
+ # Same with minconf=0
+ assert_equal(self.nodes[0].getbalance(minconf=0), Decimal('9.99'))
+ assert_equal(self.nodes[1].getbalance(minconf=0), Decimal('29.99'))
+ # getbalance with a minconf incorrectly excludes coins that have been spent more recently than the minconf blocks ago
+ # TODO: fix getbalance tracking of coin spentness depth
+ assert_equal(self.nodes[0].getbalance(minconf=1), Decimal('0'))
+ assert_equal(self.nodes[1].getbalance(minconf=1), Decimal('0'))
+ # getunconfirmedbalance
+ assert_equal(self.nodes[0].getunconfirmedbalance(), Decimal('60')) # output of node 1's spend
+ assert_equal(self.nodes[1].getunconfirmedbalance(), Decimal('0')) # Doesn't include output of node 0's send since it was spent
+
+ # Node 1 bumps the transaction fee and resends
+ self.nodes[1].sendrawtransaction(txs[1]['hex'])
+ self.sync_all()
+
+ self.log.info("Test getbalance and getunconfirmedbalance with conflicted unconfirmed inputs")
+
+ assert_equal(self.nodes[0].getwalletinfo()["unconfirmed_balance"], Decimal('60')) # output of node 1's send
+ assert_equal(self.nodes[0].getunconfirmedbalance(), Decimal('60'))
+ assert_equal(self.nodes[1].getwalletinfo()["unconfirmed_balance"], Decimal('0')) # Doesn't include output of node 0's send since it was spent
+ assert_equal(self.nodes[1].getunconfirmedbalance(), Decimal('0'))
+
+ self.nodes[1].generatetoaddress(1, RANDOM_COINBASE_ADDRESS)
+ self.sync_all()
+
+ # balances are correct after the transactions are confirmed
+ assert_equal(self.nodes[0].getbalance(), Decimal('69.99')) # node 1's send plus change from node 0's send
+ assert_equal(self.nodes[1].getbalance(), Decimal('29.98')) # change from node 0's send
+
+ # Send total balance away from node 1
+ txs = create_transactions(self.nodes[1], self.nodes[0].getnewaddress(), Decimal('29.97'), [Decimal('0.01')])
+ self.nodes[1].sendrawtransaction(txs[0]['hex'])
+ self.nodes[1].generatetoaddress(2, RANDOM_COINBASE_ADDRESS)
+ self.sync_all()
+
+ # getbalance with a minconf incorrectly excludes coins that have been spent more recently than the minconf blocks ago
+ # TODO: fix getbalance tracking of coin spentness depth
+ # getbalance with minconf=3 should still show the old balance
+ assert_equal(self.nodes[1].getbalance(minconf=3), Decimal('0'))
+
+ # getbalance with minconf=2 will show the new balance.
+ assert_equal(self.nodes[1].getbalance(minconf=2), Decimal('0'))
+
+if __name__ == '__main__':
+ WalletTest().main()
diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py
index c9b40905f0..7184bb8cb6 100755
--- a/test/functional/wallet_basic.py
+++ b/test/functional/wallet_basic.py
@@ -67,15 +67,6 @@ class WalletTest(BitcoinTestFramework):
assert_equal(self.nodes[1].getbalance(), 50)
assert_equal(self.nodes[2].getbalance(), 0)
- # Check getbalance with different arguments
- assert_equal(self.nodes[0].getbalance("*"), 50)
- assert_equal(self.nodes[0].getbalance("*", 1), 50)
- assert_equal(self.nodes[0].getbalance("*", 1, True), 50)
- assert_equal(self.nodes[0].getbalance(minconf=1), 50)
-
- # first argument of getbalance must be excluded or set to "*"
- assert_raises_rpc_error(-32, "dummy first argument must be excluded or set to \"*\"", self.nodes[0].getbalance, "")
-
# Check that only first and second nodes have UTXOs
utxos = self.nodes[0].listunspent()
assert_equal(len(utxos), 1)
@@ -248,10 +239,6 @@ class WalletTest(BitcoinTestFramework):
assert(txid1 in self.nodes[3].getrawmempool())
- # Exercise balance rpcs
- assert_equal(self.nodes[0].getwalletinfo()["unconfirmed_balance"], 1)
- assert_equal(self.nodes[0].getunconfirmedbalance(), 1)
-
# check if we can list zero value tx as available coins
# 1. create raw_tx
# 2. hex-changed one output to 0.0
diff --git a/test/functional/wallet_coinbase_category.py b/test/functional/wallet_coinbase_category.py
new file mode 100755
index 0000000000..7aa8b44ebd
--- /dev/null
+++ b/test/functional/wallet_coinbase_category.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test coinbase transactions return the correct categories.
+
+Tests listtransactions, listsinceblock, and gettransaction.
+"""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_array_result
+)
+
+class CoinbaseCategoryTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def assert_category(self, category, address, txid, skip):
+ assert_array_result(self.nodes[0].listtransactions(skip=skip),
+ {"address": address},
+ {"category": category})
+ assert_array_result(self.nodes[0].listsinceblock()["transactions"],
+ {"address": address},
+ {"category": category})
+ assert_array_result(self.nodes[0].gettransaction(txid)["details"],
+ {"address": address},
+ {"category": category})
+
+ def run_test(self):
+ # Generate one block to an address
+ address = self.nodes[0].getnewaddress()
+ self.nodes[0].generatetoaddress(1, address)
+ hash = self.nodes[0].getbestblockhash()
+ txid = self.nodes[0].getblock(hash)["tx"][0]
+
+ # Coinbase transaction is immature after 1 confirmation
+ self.assert_category("immature", address, txid, 0)
+
+ # Mine another 99 blocks on top
+ self.nodes[0].generate(99)
+ # Coinbase transaction is still immature after 100 confirmations
+ self.assert_category("immature", address, txid, 99)
+
+ # Mine one more block
+ self.nodes[0].generate(1)
+ # Coinbase transaction is now matured, so category is "generate"
+ self.assert_category("generate", address, txid, 100)
+
+ # Orphan block that paid to address
+ self.nodes[0].invalidateblock(hash)
+ # Coinbase transaction is now orphaned
+ self.assert_category("orphan", address, txid, 100)
+
+if __name__ == '__main__':
+ CoinbaseCategoryTest().main()
diff --git a/test/functional/wallet_create_tx.py b/test/functional/wallet_create_tx.py
new file mode 100755
index 0000000000..27dc0fb279
--- /dev/null
+++ b/test/functional/wallet_create_tx.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+)
+
+
+class CreateTxWalletTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = False
+ self.num_nodes = 1
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def run_test(self):
+ self.log.info('Check that we have some (old) blocks and that anti-fee-sniping is disabled')
+ assert_equal(self.nodes[0].getblockchaininfo()['blocks'], 200)
+ txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
+ tx = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(txid)['hex'])
+ assert_equal(tx['locktime'], 0)
+
+ self.log.info('Check that anti-fee-sniping is enabled when we mine a recent block')
+ self.nodes[0].generate(1)
+ txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
+ tx = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(txid)['hex'])
+ assert 0 < tx['locktime'] <= 201
+
+
+if __name__ == '__main__':
+ CreateTxWalletTest().main()
diff --git a/test/functional/wallet_dump.py b/test/functional/wallet_dump.py
index 20cb816ee8..3f39654bb8 100755
--- a/test/functional/wallet_dump.py
+++ b/test/functional/wallet_dump.py
@@ -94,7 +94,7 @@ class WalletDumpTest(BitcoinTestFramework):
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
- def setup_network(self, split=False):
+ def setup_network(self):
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
self.start_nodes()
diff --git a/test/functional/wallet_encryption.py b/test/functional/wallet_encryption.py
index ab9ebed8d4..c514b7e0b4 100755
--- a/test/functional/wallet_encryption.py
+++ b/test/functional/wallet_encryption.py
@@ -31,12 +31,18 @@ class WalletEncryptionTest(BitcoinTestFramework):
privkey = self.nodes[0].dumpprivkey(address)
assert_equal(privkey[:1], "c")
assert_equal(len(privkey), 52)
+ assert_raises_rpc_error(-15, "Error: running with an unencrypted wallet, but walletpassphrase was called", self.nodes[0].walletpassphrase, 'ff', 1)
+ assert_raises_rpc_error(-15, "Error: running with an unencrypted wallet, but walletpassphrasechange was called.", self.nodes[0].walletpassphrasechange, 'ff', 'ff')
# Encrypt the wallet
+ assert_raises_rpc_error(-8, "passphrase can not be empty", self.nodes[0].encryptwallet, '')
self.nodes[0].encryptwallet(passphrase)
# Test that the wallet is encrypted
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
+ assert_raises_rpc_error(-15, "Error: running with an encrypted wallet, but encryptwallet was called.", self.nodes[0].encryptwallet, 'ff')
+ assert_raises_rpc_error(-8, "passphrase can not be empty", self.nodes[0].walletpassphrase, '', 1)
+ assert_raises_rpc_error(-8, "passphrase can not be empty", self.nodes[0].walletpassphrasechange, '', 'ff')
# Check that walletpassphrase works
self.nodes[0].walletpassphrase(passphrase, 2)
diff --git a/test/functional/wallet_groups.py b/test/functional/wallet_groups.py
index 9d61483868..5452433acf 100755
--- a/test/functional/wallet_groups.py
+++ b/test/functional/wallet_groups.py
@@ -21,7 +21,7 @@ class WalletGroupTest(BitcoinTestFramework):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [[], [], ['-avoidpartialspends']]
- self.rpc_timewait = 120
+ self.rpc_timeout = 120
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
diff --git a/test/functional/wallet_import_rescan.py b/test/functional/wallet_import_rescan.py
index 08809a688a..46462a16f3 100755
--- a/test/functional/wallet_import_rescan.py
+++ b/test/functional/wallet_import_rescan.py
@@ -46,11 +46,11 @@ class Variant(collections.namedtuple("Variant", "call data rescan prune")):
if self.call == Call.single:
if self.data == Data.address:
- response = self.try_rpc(self.node.importaddress, address=self.address["address"], rescan=rescan)
+ response = self.try_rpc(self.node.importaddress, address=self.address["address"], label=self.label, rescan=rescan)
elif self.data == Data.pub:
- response = self.try_rpc(self.node.importpubkey, pubkey=self.address["pubkey"], rescan=rescan)
+ response = self.try_rpc(self.node.importpubkey, pubkey=self.address["pubkey"], label=self.label, rescan=rescan)
elif self.data == Data.priv:
- response = self.try_rpc(self.node.importprivkey, privkey=self.key, rescan=rescan)
+ response = self.try_rpc(self.node.importprivkey, privkey=self.key, label=self.label, rescan=rescan)
assert_equal(response, None)
elif self.call in (Call.multiaddress, Call.multiscript):
@@ -61,18 +61,32 @@ class Variant(collections.namedtuple("Variant", "call data rescan prune")):
"timestamp": timestamp + TIMESTAMP_WINDOW + (1 if self.rescan == Rescan.late_timestamp else 0),
"pubkeys": [self.address["pubkey"]] if self.data == Data.pub else [],
"keys": [self.key] if self.data == Data.priv else [],
+ "label": self.label,
"watchonly": self.data != Data.priv
}], {"rescan": self.rescan in (Rescan.yes, Rescan.late_timestamp)})
assert_equal(response, [{"success": True}])
def check(self, txid=None, amount=None, confirmations=None):
- """Verify that listreceivedbyaddress returns expected values."""
+ """Verify that listtransactions/listreceivedbyaddress return expected values."""
+
+ txs = self.node.listtransactions(label=self.label, count=10000, include_watchonly=True)
+ assert_equal(len(txs), self.expected_txs)
addresses = self.node.listreceivedbyaddress(minconf=0, include_watchonly=True, address_filter=self.address['address'])
if self.expected_txs:
assert_equal(len(addresses[0]["txids"]), self.expected_txs)
if txid is not None:
+ tx, = [tx for tx in txs if tx["txid"] == txid]
+ assert_equal(tx["label"], self.label)
+ assert_equal(tx["address"], self.address["address"])
+ assert_equal(tx["amount"], amount)
+ assert_equal(tx["category"], "receive")
+ assert_equal(tx["label"], self.label)
+ assert_equal(tx["txid"], txid)
+ assert_equal(tx["confirmations"], confirmations)
+ assert_equal("trusted" not in tx, True)
+
address, = [ad for ad in addresses if txid in ad["txids"]]
assert_equal(address["address"], self.address["address"])
assert_equal(address["amount"], self.expected_balance)
@@ -134,7 +148,8 @@ class ImportRescanTest(BitcoinTestFramework):
# Create one transaction on node 0 with a unique amount for
# each possible type of wallet import RPC.
for i, variant in enumerate(IMPORT_VARIANTS):
- variant.address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())
+ variant.label = "label {} {}".format(i, variant)
+ variant.address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress(variant.label))
variant.key = self.nodes[1].dumpprivkey(variant.address["address"])
variant.initial_amount = 1 - (i + 1) / 64
variant.initial_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.initial_amount)
diff --git a/test/functional/wallet_import_with_label.py b/test/functional/wallet_import_with_label.py
new file mode 100755
index 0000000000..a623b75606
--- /dev/null
+++ b/test/functional/wallet_import_with_label.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the behavior of RPC importprivkey on set and unset labels of
+addresses.
+
+It tests different cases in which an address is imported with importaddress
+with or without a label and then its private key is imported with importprivkey
+with and without a label.
+"""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.wallet_util import test_address
+
+
+class ImportWithLabel(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+ self.setup_clean_chain = True
+
+ def skip_test_if_missing_module(self):
+ self.skip_if_no_wallet()
+
+ def run_test(self):
+ """Main test logic"""
+
+ self.log.info(
+ "Test importaddress with label and importprivkey without label."
+ )
+ self.log.info("Import a watch-only address with a label.")
+ address = self.nodes[0].getnewaddress()
+ label = "Test Label"
+ self.nodes[1].importaddress(address, label)
+ test_address(self.nodes[1],
+ address,
+ iswatchonly=True,
+ ismine=False,
+ label=label)
+
+ self.log.info(
+ "Import the watch-only address's private key without a "
+ "label and the address should keep its label."
+ )
+ priv_key = self.nodes[0].dumpprivkey(address)
+ self.nodes[1].importprivkey(priv_key)
+
+ test_address(self.nodes[1],
+ address,
+ label=label)
+
+ self.log.info(
+ "Test importaddress without label and importprivkey with label."
+ )
+ self.log.info("Import a watch-only address without a label.")
+ address2 = self.nodes[0].getnewaddress()
+ self.nodes[1].importaddress(address2)
+ test_address(self.nodes[1],
+ address2,
+ iswatchonly=True,
+ ismine=False,
+ label="")
+
+ self.log.info(
+ "Import the watch-only address's private key with a "
+ "label and the address should have its label updated."
+ )
+ priv_key2 = self.nodes[0].dumpprivkey(address2)
+ label2 = "Test Label 2"
+ self.nodes[1].importprivkey(priv_key2, label2)
+
+ test_address(self.nodes[1],
+ address2,
+ label=label2)
+
+ self.log.info("Test importaddress with label and importprivkey with label.")
+ self.log.info("Import a watch-only address with a label.")
+ address3 = self.nodes[0].getnewaddress()
+ label3_addr = "Test Label 3 for importaddress"
+ self.nodes[1].importaddress(address3, label3_addr)
+ test_address(self.nodes[1],
+ address3,
+ iswatchonly=True,
+ ismine=False,
+ label=label3_addr)
+
+ self.log.info(
+ "Import the watch-only address's private key with a "
+ "label and the address should have its label updated."
+ )
+ priv_key3 = self.nodes[0].dumpprivkey(address3)
+ label3_priv = "Test Label 3 for importprivkey"
+ self.nodes[1].importprivkey(priv_key3, label3_priv)
+
+ test_address(self.nodes[1],
+ address3,
+ label=label3_priv)
+
+ self.log.info(
+ "Test importprivkey won't label new dests with the same "
+ "label as others labeled dests for the same key."
+ )
+ self.log.info("Import a watch-only legacy address with a label.")
+ address4 = self.nodes[0].getnewaddress()
+ label4_addr = "Test Label 4 for importaddress"
+ self.nodes[1].importaddress(address4, label4_addr)
+ test_address(self.nodes[1],
+ address4,
+ iswatchonly=True,
+ ismine=False,
+ label=label4_addr,
+ embedded=None)
+
+ self.log.info(
+ "Import the watch-only address's private key without a "
+ "label and new destinations for the key should have an "
+ "empty label while the 'old' destination should keep "
+ "its label."
+ )
+ priv_key4 = self.nodes[0].dumpprivkey(address4)
+ self.nodes[1].importprivkey(priv_key4)
+ embedded_addr = self.nodes[1].getaddressinfo(address4)['embedded']['address']
+
+ test_address(self.nodes[1],
+ embedded_addr,
+ label="")
+ test_address(self.nodes[1],
+ address4,
+ label=label4_addr)
+
+ self.stop_nodes()
+
+
+if __name__ == "__main__":
+ ImportWithLabel().main()
diff --git a/test/functional/wallet_importmulti.py b/test/functional/wallet_importmulti.py
index 9ba6860306..f122f19e3a 100755
--- a/test/functional/wallet_importmulti.py
+++ b/test/functional/wallet_importmulti.py
@@ -2,23 +2,35 @@
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test the importmulti RPC."""
+"""Test the importmulti RPC.
-from test_framework import script
+Test importmulti by generating keys on node0, importing the scriptPubKeys and
+addresses on node1 and then testing the address info for the different address
+variants.
+
+- `get_key()` and `get_multisig()` are called to generate keys on node0 and
+ return the privkeys, pubkeys and all variants of scriptPubKey and address.
+- `test_importmulti()` is called to send an importmulti call to node1, test
+ success, and (if unsuccessful) test the error code and error message returned.
+- `test_address()` is called to call getaddressinfo for an address on node1
+ and test the values returned."""
+
+from test_framework.script import (
+ CScript,
+ OP_NOP,
+)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
bytes_to_hex_str,
- hex_str_to_bytes
)
-from test_framework.script import (
- CScript,
- OP_0,
- hash160
+from test_framework.wallet_util import (
+ get_key,
+ get_multisig,
+ test_address,
)
-from test_framework.messages import sha256
class ImportMultiTest(BitcoinTestFramework):
def set_test_params(self):
@@ -32,7 +44,19 @@ class ImportMultiTest(BitcoinTestFramework):
def setup_network(self):
self.setup_nodes()
- def run_test (self):
+ def test_importmulti(self, req, success, error_code=None, error_message=None, warnings=[]):
+ """Run importmulti and assert success"""
+ result = self.nodes[1].importmulti([req])
+ observed_warnings = []
+ if 'warnings' in result[0]:
+ observed_warnings = result[0]['warnings']
+ assert_equal("\n".join(sorted(warnings)), "\n".join(sorted(observed_warnings)))
+ assert_equal(result[0]['success'], success)
+ if error_code is not None:
+ assert_equal(result[0]['error']['code'], error_code)
+ assert_equal(result[0]['error']['message'], error_message)
+
+ def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.nodes[1].generate(1)
@@ -40,584 +64,484 @@ class ImportMultiTest(BitcoinTestFramework):
node0_address1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- #Check only one address
+ # Check only one address
assert_equal(node0_address1['ismine'], True)
- #Node 1 sync test
- assert_equal(self.nodes[1].getblockcount(),1)
+ # Node 1 sync test
+ assert_equal(self.nodes[1].getblockcount(), 1)
- #Address Test - before import
+ # Address Test - before import
address_info = self.nodes[1].getaddressinfo(node0_address1['address'])
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
-
# RPC importmulti -----------------------------------------------
- # Bitcoin Address
+ # Bitcoin Address (implicit non-internal)
self.log.info("Should import an address")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": address['address']
- },
- "timestamp": "now",
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], True)
- assert_equal(address_assert['ismine'], False)
- assert_equal(address_assert['timestamp'], timestamp)
- watchonly_address = address['address']
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
+ "timestamp": "now"},
+ success=True)
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=True,
+ ismine=False,
+ timestamp=timestamp,
+ ischange=False)
+ watchonly_address = key.p2pkh_addr
watchonly_timestamp = timestamp
self.log.info("Should not import an invalid address")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": "not valid address",
- },
- "timestamp": "now",
- }])
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -5)
- assert_equal(result[0]['error']['message'], 'Invalid address')
+ self.test_importmulti({"scriptPubKey": {"address": "not valid address"},
+ "timestamp": "now"},
+ success=False,
+ error_code=-5,
+ error_message='Invalid address \"not valid address\"')
# ScriptPubKey + internal
self.log.info("Should import a scriptPubKey with internal flag")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": address['scriptPubKey'],
- "timestamp": "now",
- "internal": True
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], True)
- assert_equal(address_assert['ismine'], False)
- assert_equal(address_assert['timestamp'], timestamp)
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": key.p2pkh_script,
+ "timestamp": "now",
+ "internal": True},
+ success=True)
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=True,
+ ismine=False,
+ timestamp=timestamp,
+ ischange=True)
# ScriptPubKey + internal + label
self.log.info("Should not allow a label to be specified when internal is true")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": address['scriptPubKey'],
- "timestamp": "now",
- "internal": True,
- "label": "Example label"
- }])
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -8)
- assert_equal(result[0]['error']['message'], 'Internal addresses should not have a label')
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": key.p2pkh_script,
+ "timestamp": "now",
+ "internal": True,
+ "label": "Example label"},
+ success=False,
+ error_code=-8,
+ error_message='Internal addresses should not have a label')
# Nonstandard scriptPubKey + !internal
self.log.info("Should not import a nonstandard scriptPubKey without internal flag")
- nonstandardScriptPubKey = address['scriptPubKey'] + bytes_to_hex_str(script.CScript([script.OP_NOP]))
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": nonstandardScriptPubKey,
- "timestamp": "now",
- }])
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -8)
- assert_equal(result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.')
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], False)
- assert_equal('timestamp' in address_assert, False)
-
-
- # Address + Public key + !Internal
+ nonstandardScriptPubKey = key.p2pkh_script + bytes_to_hex_str(CScript([OP_NOP]))
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
+ "timestamp": "now"},
+ success=False,
+ error_code=-8,
+ error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=False,
+ ismine=False,
+ timestamp=None)
+
+ # Address + Public key + !Internal(explicit)
self.log.info("Should import an address with public key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": address['address']
- },
- "timestamp": "now",
- "pubkeys": [ address['pubkey'] ]
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], True)
- assert_equal(address_assert['ismine'], False)
- assert_equal(address_assert['timestamp'], timestamp)
-
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
+ "timestamp": "now",
+ "pubkeys": [key.pubkey],
+ "internal": False},
+ success=True,
+ warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=True,
+ ismine=False,
+ timestamp=timestamp)
# ScriptPubKey + Public key + internal
self.log.info("Should import a scriptPubKey with internal and with public key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- request = [{
- "scriptPubKey": address['scriptPubKey'],
- "timestamp": "now",
- "pubkeys": [ address['pubkey'] ],
- "internal": True
- }]
- result = self.nodes[1].importmulti(request)
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], True)
- assert_equal(address_assert['ismine'], False)
- assert_equal(address_assert['timestamp'], timestamp)
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": key.p2pkh_script,
+ "timestamp": "now",
+ "pubkeys": [key.pubkey],
+ "internal": True},
+ success=True,
+ warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=True,
+ ismine=False,
+ timestamp=timestamp)
# Nonstandard scriptPubKey + Public key + !internal
self.log.info("Should not import a nonstandard scriptPubKey without internal and with public key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- request = [{
- "scriptPubKey": nonstandardScriptPubKey,
- "timestamp": "now",
- "pubkeys": [ address['pubkey'] ]
- }]
- result = self.nodes[1].importmulti(request)
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -8)
- assert_equal(result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.')
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], False)
- assert_equal('timestamp' in address_assert, False)
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
+ "timestamp": "now",
+ "pubkeys": [key.pubkey]},
+ success=False,
+ error_code=-8,
+ error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=False,
+ ismine=False,
+ timestamp=None)
# Address + Private key + !watchonly
self.log.info("Should import an address with private key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": address['address']
- },
- "timestamp": "now",
- "keys": [ self.nodes[0].dumpprivkey(address['address']) ]
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], True)
- assert_equal(address_assert['timestamp'], timestamp)
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
+ "timestamp": "now",
+ "keys": [key.privkey]},
+ success=True)
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=False,
+ ismine=True,
+ timestamp=timestamp)
self.log.info("Should not import an address with private key if is already imported")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": address['address']
- },
- "timestamp": "now",
- "keys": [ self.nodes[0].dumpprivkey(address['address']) ]
- }])
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -4)
- assert_equal(result[0]['error']['message'], 'The wallet already contains the private key for this address or script')
+ self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
+ "timestamp": "now",
+ "keys": [key.privkey]},
+ success=False,
+ error_code=-4,
+ error_message='The wallet already contains the private key for this address or script')
# Address + Private key + watchonly
- self.log.info("Should not import an address with private key and with watchonly")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": address['address']
- },
- "timestamp": "now",
- "keys": [ self.nodes[0].dumpprivkey(address['address']) ],
- "watchonly": True
- }])
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -8)
- assert_equal(result[0]['error']['message'], 'Watch-only addresses should not include private keys')
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], False)
- assert_equal('timestamp' in address_assert, False)
+ self.log.info("Should import an address with private key and with watchonly")
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
+ "timestamp": "now",
+ "keys": [key.privkey],
+ "watchonly": True},
+ success=True,
+ warnings=["All private keys are provided, outputs will be considered spendable. If this is intentional, do not specify the watchonly flag."])
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=False,
+ ismine=True,
+ timestamp=timestamp)
# ScriptPubKey + Private key + internal
self.log.info("Should import a scriptPubKey with internal and with private key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": address['scriptPubKey'],
- "timestamp": "now",
- "keys": [ self.nodes[0].dumpprivkey(address['address']) ],
- "internal": True
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], True)
- assert_equal(address_assert['timestamp'], timestamp)
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": key.p2pkh_script,
+ "timestamp": "now",
+ "keys": [key.privkey],
+ "internal": True},
+ success=True)
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=False,
+ ismine=True,
+ timestamp=timestamp)
# Nonstandard scriptPubKey + Private key + !internal
self.log.info("Should not import a nonstandard scriptPubKey without internal and with private key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": nonstandardScriptPubKey,
- "timestamp": "now",
- "keys": [ self.nodes[0].dumpprivkey(address['address']) ]
- }])
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -8)
- assert_equal(result[0]['error']['message'], 'Internal must be set to true for nonstandard scriptPubKey imports.')
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], False)
- assert_equal('timestamp' in address_assert, False)
-
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": nonstandardScriptPubKey,
+ "timestamp": "now",
+ "keys": [key.privkey]},
+ success=False,
+ error_code=-8,
+ error_message='Internal must be set to true for nonstandard scriptPubKey imports.')
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=False,
+ ismine=False,
+ timestamp=None)
# P2SH address
- sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']])
+ multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
- self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+ self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
self.log.info("Should import a p2sh")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": multi_sig_script['address']
- },
- "timestamp": "now",
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address'])
- assert_equal(address_assert['isscript'], True)
- assert_equal(address_assert['iswatchonly'], True)
- assert_equal(address_assert['timestamp'], timestamp)
- p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
+ self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
+ "timestamp": "now"},
+ success=True)
+ test_address(self.nodes[1],
+ multisig.p2sh_addr,
+ isscript=True,
+ iswatchonly=True,
+ timestamp=timestamp)
+ p2shunspent = self.nodes[1].listunspent(0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], False)
-
# P2SH + Redeem script
- sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']])
+ multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
- self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+ self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
self.log.info("Should import a p2sh with respective redeem script")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": multi_sig_script['address']
- },
- "timestamp": "now",
- "redeemscript": multi_sig_script['redeemScript']
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address'])
- assert_equal(address_assert['timestamp'], timestamp)
-
- p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
+ self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
+ "timestamp": "now",
+ "redeemscript": multisig.redeem_script},
+ success=True,
+ warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ multisig.p2sh_addr, timestamp=timestamp, iswatchonly=True, ismine=False, solvable=True)
+
+ p2shunspent = self.nodes[1].listunspent(0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
-
# P2SH + Redeem script + Private Keys + !Watchonly
- sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']])
+ multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
- self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+ self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
self.log.info("Should import a p2sh with respective redeem script and private keys")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": multi_sig_script['address']
- },
- "timestamp": "now",
- "redeemscript": multi_sig_script['redeemScript'],
- "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])]
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address'])
- assert_equal(address_assert['timestamp'], timestamp)
-
- p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
+ self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
+ "timestamp": "now",
+ "redeemscript": multisig.redeem_script,
+ "keys": multisig.privkeys[0:2]},
+ success=True,
+ warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ multisig.p2sh_addr,
+ timestamp=timestamp,
+ ismine=False,
+ iswatchonly=True,
+ solvable=True)
+
+ p2shunspent = self.nodes[1].listunspent(0, 999999, [multisig.p2sh_addr])[0]
assert_equal(p2shunspent['spendable'], False)
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + Watchonly
- sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']])
+ multisig = get_multisig(self.nodes[0])
self.nodes[1].generate(100)
- self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+ self.nodes[1].sendtoaddress(multisig.p2sh_addr, 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
self.log.info("Should import a p2sh with respective redeem script and private keys")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": multi_sig_script['address']
- },
- "timestamp": "now",
- "redeemscript": multi_sig_script['redeemScript'],
- "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])],
- "watchonly": True
- }])
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -8)
- assert_equal(result[0]['error']['message'], 'Watch-only addresses should not include private keys')
-
+ self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_addr},
+ "timestamp": "now",
+ "redeemscript": multisig.redeem_script,
+ "keys": multisig.privkeys[0:2],
+ "watchonly": True},
+ success=True)
+ test_address(self.nodes[1],
+ multisig.p2sh_addr,
+ iswatchonly=True,
+ ismine=False,
+ solvable=True,
+ timestamp=timestamp)
# Address + Public key + !Internal + Wrong pubkey
- self.log.info("Should not import an address with a wrong public key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": address['address']
- },
- "timestamp": "now",
- "pubkeys": [ address2['pubkey'] ]
- }])
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -5)
- assert_equal(result[0]['error']['message'], 'Key does not match address destination')
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], False)
- assert_equal('timestamp' in address_assert, False)
-
+ self.log.info("Should not import an address with the wrong public key as non-solvable")
+ key = get_key(self.nodes[0])
+ wrong_key = get_key(self.nodes[0]).pubkey
+ self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
+ "timestamp": "now",
+ "pubkeys": [wrong_key]},
+ success=True,
+ warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=True,
+ ismine=False,
+ solvable=False,
+ timestamp=timestamp)
# ScriptPubKey + Public key + internal + Wrong pubkey
- self.log.info("Should not import a scriptPubKey with internal and with a wrong public key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- request = [{
- "scriptPubKey": address['scriptPubKey'],
- "timestamp": "now",
- "pubkeys": [ address2['pubkey'] ],
- "internal": True
- }]
- result = self.nodes[1].importmulti(request)
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -5)
- assert_equal(result[0]['error']['message'], 'Key does not match address destination')
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], False)
- assert_equal('timestamp' in address_assert, False)
-
+ self.log.info("Should import a scriptPubKey with internal and with a wrong public key as non-solvable")
+ key = get_key(self.nodes[0])
+ wrong_key = get_key(self.nodes[0]).pubkey
+ self.test_importmulti({"scriptPubKey": key.p2pkh_script,
+ "timestamp": "now",
+ "pubkeys": [wrong_key],
+ "internal": True},
+ success=True,
+ warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=True,
+ ismine=False,
+ solvable=False,
+ timestamp=timestamp)
# Address + Private key + !watchonly + Wrong private key
- self.log.info("Should not import an address with a wrong private key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": address['address']
- },
- "timestamp": "now",
- "keys": [ self.nodes[0].dumpprivkey(address2['address']) ]
- }])
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -5)
- assert_equal(result[0]['error']['message'], 'Key does not match address destination')
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], False)
- assert_equal('timestamp' in address_assert, False)
-
+ self.log.info("Should import an address with a wrong private key as non-solvable")
+ key = get_key(self.nodes[0])
+ wrong_privkey = get_key(self.nodes[0]).privkey
+ self.test_importmulti({"scriptPubKey": {"address": key.p2pkh_addr},
+ "timestamp": "now",
+ "keys": [wrong_privkey]},
+ success=True,
+ warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=True,
+ ismine=False,
+ solvable=False,
+ timestamp=timestamp)
# ScriptPubKey + Private key + internal + Wrong private key
- self.log.info("Should not import a scriptPubKey with internal and with a wrong private key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- result = self.nodes[1].importmulti([{
- "scriptPubKey": address['scriptPubKey'],
- "timestamp": "now",
- "keys": [ self.nodes[0].dumpprivkey(address2['address']) ],
- "internal": True
- }])
- assert_equal(result[0]['success'], False)
- assert_equal(result[0]['error']['code'], -5)
- assert_equal(result[0]['error']['message'], 'Key does not match address destination')
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], False)
- assert_equal('timestamp' in address_assert, False)
-
+ self.log.info("Should import a scriptPubKey with internal and with a wrong private key as non-solvable")
+ key = get_key(self.nodes[0])
+ wrong_privkey = get_key(self.nodes[0]).privkey
+ self.test_importmulti({"scriptPubKey": key.p2pkh_script,
+ "timestamp": "now",
+ "keys": [wrong_privkey],
+ "internal": True},
+ success=True,
+ warnings=["Importing as non-solvable: some required keys are missing. If this is intentional, don't provide any keys, pubkeys, witnessscript, or redeemscript.", "Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ key.p2pkh_addr,
+ iswatchonly=True,
+ ismine=False,
+ solvable=False,
+ timestamp=timestamp)
# Importing existing watch only address with new timestamp should replace saved timestamp.
assert_greater_than(timestamp, watchonly_timestamp)
self.log.info("Should replace previously saved watch only timestamp.")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": watchonly_address,
- },
- "timestamp": "now",
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(watchonly_address)
- assert_equal(address_assert['iswatchonly'], True)
- assert_equal(address_assert['ismine'], False)
- assert_equal(address_assert['timestamp'], timestamp)
+ self.test_importmulti({"scriptPubKey": {"address": watchonly_address},
+ "timestamp": "now"},
+ success=True)
+ test_address(self.nodes[1],
+ watchonly_address,
+ iswatchonly=True,
+ ismine=False,
+ timestamp=timestamp)
watchonly_timestamp = timestamp
-
# restart nodes to check for proper serialization/deserialization of watch only address
self.stop_nodes()
self.start_nodes()
- address_assert = self.nodes[1].getaddressinfo(watchonly_address)
- assert_equal(address_assert['iswatchonly'], True)
- assert_equal(address_assert['ismine'], False)
- assert_equal(address_assert['timestamp'], watchonly_timestamp)
+ test_address(self.nodes[1],
+ watchonly_address,
+ iswatchonly=True,
+ ismine=False,
+ timestamp=watchonly_timestamp)
# Bad or missing timestamps
self.log.info("Should throw on invalid or missing timestamp values")
assert_raises_rpc_error(-3, 'Missing required timestamp field for key',
- self.nodes[1].importmulti, [{
- "scriptPubKey": address['scriptPubKey'],
- }])
+ self.nodes[1].importmulti, [{"scriptPubKey": key.p2pkh_script}])
assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string',
- self.nodes[1].importmulti, [{
- "scriptPubKey": address['scriptPubKey'],
- "timestamp": "",
- }])
+ self.nodes[1].importmulti, [{
+ "scriptPubKey": key.p2pkh_script,
+ "timestamp": ""
+ }])
# Import P2WPKH address as watch only
self.log.info("Should import a P2WPKH address as watch only")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress(address_type="bech32"))
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": address['address']
- },
- "timestamp": "now",
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], True)
- assert_equal(address_assert['solvable'], False)
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": {"address": key.p2wpkh_addr},
+ "timestamp": "now"},
+ success=True)
+ test_address(self.nodes[1],
+ key.p2wpkh_addr,
+ iswatchonly=True,
+ solvable=False)
# Import P2WPKH address with public key but no private key
self.log.info("Should import a P2WPKH address and public key as solvable but not spendable")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress(address_type="bech32"))
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": address['address']
- },
- "timestamp": "now",
- "pubkeys": [ address['pubkey'] ]
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['ismine'], False)
- assert_equal(address_assert['solvable'], True)
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": {"address": key.p2wpkh_addr},
+ "timestamp": "now",
+ "pubkeys": [key.pubkey]},
+ success=True,
+ warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ key.p2wpkh_addr,
+ ismine=False,
+ solvable=True)
# Import P2WPKH address with key and check it is spendable
self.log.info("Should import a P2WPKH address with key")
- address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress(address_type="bech32"))
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": address['address']
- },
- "timestamp": "now",
- "keys": [self.nodes[0].dumpprivkey(address['address'])]
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(address['address'])
- assert_equal(address_assert['iswatchonly'], False)
- assert_equal(address_assert['ismine'], True)
+ key = get_key(self.nodes[0])
+ self.test_importmulti({"scriptPubKey": {"address": key.p2wpkh_addr},
+ "timestamp": "now",
+ "keys": [key.privkey]},
+ success=True)
+ test_address(self.nodes[1],
+ key.p2wpkh_addr,
+ iswatchonly=False,
+ ismine=True)
# P2WSH multisig address without scripts or keys
- sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- multi_sig_script = self.nodes[0].addmultisigaddress(2, [sig_address_1['pubkey'], sig_address_2['pubkey']], "", "bech32")
+ multisig = get_multisig(self.nodes[0])
self.log.info("Should import a p2wsh multisig as watch only without respective redeem script and private keys")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": multi_sig_script['address']
- },
- "timestamp": "now"
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address'])
- assert_equal(address_assert['solvable'], False)
+ self.test_importmulti({"scriptPubKey": {"address": multisig.p2wsh_addr},
+ "timestamp": "now"},
+ success=True)
+ test_address(self.nodes[1],
+ multisig.p2sh_addr,
+ solvable=False)
# Same P2WSH multisig address as above, but now with witnessscript + private keys
- self.log.info("Should import a p2wsh with respective redeem script and private keys")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": multi_sig_script['address']
- },
- "timestamp": "now",
- "witnessscript": multi_sig_script['redeemScript'],
- "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address']) ]
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address'])
- assert_equal(address_assert['solvable'], True)
- assert_equal(address_assert['ismine'], True)
- assert_equal(address_assert['sigsrequired'], 2)
+ self.log.info("Should import a p2wsh with respective witness script and private keys")
+ self.test_importmulti({"scriptPubKey": {"address": multisig.p2wsh_addr},
+ "timestamp": "now",
+ "witnessscript": multisig.redeem_script,
+ "keys": multisig.privkeys},
+ success=True)
+ test_address(self.nodes[1],
+ multisig.p2sh_addr,
+ solvable=True,
+ ismine=True,
+ sigsrequired=2)
# P2SH-P2WPKH address with no redeemscript or public or private key
- sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress(address_type="p2sh-segwit"))
- pubkeyhash = hash160(hex_str_to_bytes(sig_address_1['pubkey']))
- pkscript = CScript([OP_0, pubkeyhash])
+ key = get_key(self.nodes[0])
self.log.info("Should import a p2sh-p2wpkh without redeem script or keys")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": sig_address_1['address']
- },
- "timestamp": "now"
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(sig_address_1['address'])
- assert_equal(address_assert['solvable'], False)
- assert_equal(address_assert['ismine'], False)
+ self.test_importmulti({"scriptPubKey": {"address": key.p2sh_p2wpkh_addr},
+ "timestamp": "now"},
+ success=True)
+ test_address(self.nodes[1],
+ key.p2sh_p2wpkh_addr,
+ solvable=False,
+ ismine=False)
# P2SH-P2WPKH address + redeemscript + public key with no private key
self.log.info("Should import a p2sh-p2wpkh with respective redeem script and pubkey as solvable")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": sig_address_1['address']
- },
- "timestamp": "now",
- "redeemscript": bytes_to_hex_str(pkscript),
- "pubkeys": [ sig_address_1['pubkey'] ]
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(sig_address_1['address'])
- assert_equal(address_assert['solvable'], True)
- assert_equal(address_assert['ismine'], False)
+ self.test_importmulti({"scriptPubKey": {"address": key.p2sh_p2wpkh_addr},
+ "timestamp": "now",
+ "redeemscript": key.p2sh_p2wpkh_redeem_script,
+ "pubkeys": [key.pubkey]},
+ success=True,
+ warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ key.p2sh_p2wpkh_addr,
+ solvable=True,
+ ismine=False)
# P2SH-P2WPKH address + redeemscript + private key
- sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress(address_type="p2sh-segwit"))
- pubkeyhash = hash160(hex_str_to_bytes(sig_address_1['pubkey']))
- pkscript = CScript([OP_0, pubkeyhash])
+ key = get_key(self.nodes[0])
self.log.info("Should import a p2sh-p2wpkh with respective redeem script and private keys")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": sig_address_1['address']
- },
- "timestamp": "now",
- "redeemscript": bytes_to_hex_str(pkscript),
- "keys": [ self.nodes[0].dumpprivkey(sig_address_1['address'])]
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(sig_address_1['address'])
- assert_equal(address_assert['solvable'], True)
- assert_equal(address_assert['ismine'], True)
-
- # P2SH-P2WSH 1-of-1 multisig + redeemscript with no private key
- sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
- multi_sig_script = self.nodes[0].addmultisigaddress(1, [sig_address_1['pubkey']], "", "p2sh-segwit")
- scripthash = sha256(hex_str_to_bytes(multi_sig_script['redeemScript']))
- redeem_script = CScript([OP_0, scripthash])
+ self.test_importmulti({"scriptPubKey": {"address": key.p2sh_p2wpkh_addr},
+ "timestamp": "now",
+ "redeemscript": key.p2sh_p2wpkh_redeem_script,
+ "keys": [key.privkey]},
+ success=True)
+ test_address(self.nodes[1],
+ key.p2sh_p2wpkh_addr,
+ solvable=True,
+ ismine=True)
+
+ # P2SH-P2WSH multisig + redeemscript with no private key
+ multisig = get_multisig(self.nodes[0])
self.log.info("Should import a p2sh-p2wsh with respective redeem script but no private key")
- result = self.nodes[1].importmulti([{
- "scriptPubKey": {
- "address": multi_sig_script['address']
- },
- "timestamp": "now",
- "redeemscript": bytes_to_hex_str(redeem_script),
- "witnessscript": multi_sig_script['redeemScript']
- }])
- assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address'])
- assert_equal(address_assert['solvable'], True)
+ self.test_importmulti({"scriptPubKey": {"address": multisig.p2sh_p2wsh_addr},
+ "timestamp": "now",
+ "redeemscript": multisig.p2wsh_script,
+ "witnessscript": multisig.redeem_script},
+ success=True,
+ warnings=["Some private keys are missing, outputs will be considered watchonly. If this is intentional, specify the watchonly flag."])
+ test_address(self.nodes[1],
+ multisig.p2sh_p2wsh_addr,
+ solvable=True,
+ ismine=False)
if __name__ == '__main__':
- ImportMultiTest ().main ()
+ ImportMultiTest().main()
diff --git a/test/functional/wallet_importprunedfunds.py b/test/functional/wallet_importprunedfunds.py
index 26b181db33..78426018ef 100755
--- a/test/functional/wallet_importprunedfunds.py
+++ b/test/functional/wallet_importprunedfunds.py
@@ -81,7 +81,7 @@ class ImportPrunedFundsTest(BitcoinTestFramework):
# Import with affiliated address with no rescan
self.nodes[1].importaddress(address=address2, rescan=False)
- self.nodes[1].importprunedfunds(rawtxn2, proof2)
+ self.nodes[1].importprunedfunds(rawtransaction=rawtxn2, txoutproof=proof2)
assert [tx for tx in self.nodes[1].listtransactions(include_watchonly=True) if tx['txid'] == txnid2]
# Import with private key with no rescan
diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py
index f1a441c399..b7c8d3098d 100755
--- a/test/functional/wallet_keypool_topup.py
+++ b/test/functional/wallet_keypool_topup.py
@@ -24,8 +24,8 @@ from test_framework.util import (
class KeypoolRestoreTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
- self.num_nodes = 2
- self.extra_args = [[], ['-keypool=100']]
+ self.num_nodes = 4
+ self.extra_args = [[], ['-keypool=100'], ['-keypool=100'], ['-keypool=100']]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
@@ -40,32 +40,47 @@ class KeypoolRestoreTest(BitcoinTestFramework):
shutil.copyfile(wallet_path, wallet_backup_path)
self.start_node(1, self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
+ connect_nodes_bi(self.nodes, 0, 2)
+ connect_nodes_bi(self.nodes, 0, 3)
- self.log.info("Generate keys for wallet")
- for _ in range(90):
- addr_oldpool = self.nodes[1].getnewaddress()
- for _ in range(20):
- addr_extpool = self.nodes[1].getnewaddress()
+ for i, output_type in enumerate(["legacy", "p2sh-segwit", "bech32"]):
- self.log.info("Send funds to wallet")
- self.nodes[0].sendtoaddress(addr_oldpool, 10)
- self.nodes[0].generate(1)
- self.nodes[0].sendtoaddress(addr_extpool, 5)
- self.nodes[0].generate(1)
- sync_blocks(self.nodes)
+ self.log.info("Generate keys for wallet with address type: {}".format(output_type))
+ idx = i+1
+ for _ in range(90):
+ addr_oldpool = self.nodes[idx].getnewaddress(address_type=output_type)
+ for _ in range(20):
+ addr_extpool = self.nodes[idx].getnewaddress(address_type=output_type)
- self.log.info("Restart node with wallet backup")
- self.stop_node(1)
- shutil.copyfile(wallet_backup_path, wallet_path)
- self.start_node(1, self.extra_args[1])
- connect_nodes_bi(self.nodes, 0, 1)
- self.sync_all()
+ # Make sure we're creating the outputs we expect
+ address_details = self.nodes[idx].validateaddress(addr_extpool)
+ if i == 0:
+ assert(not address_details["isscript"] and not address_details["iswitness"])
+ elif i == 1:
+ assert(address_details["isscript"] and not address_details["iswitness"])
+ else:
+ assert(not address_details["isscript"] and address_details["iswitness"])
+
+
+ self.log.info("Send funds to wallet")
+ self.nodes[0].sendtoaddress(addr_oldpool, 10)
+ self.nodes[0].generate(1)
+ self.nodes[0].sendtoaddress(addr_extpool, 5)
+ self.nodes[0].generate(1)
+ sync_blocks(self.nodes)
+
+ self.log.info("Restart node with wallet backup")
+ self.stop_node(idx)
+ shutil.copyfile(wallet_backup_path, wallet_path)
+ self.start_node(idx, self.extra_args[idx])
+ connect_nodes_bi(self.nodes, 0, idx)
+ self.sync_all()
- self.log.info("Verify keypool is restored and balance is correct")
- assert_equal(self.nodes[1].getbalance(), 15)
- assert_equal(self.nodes[1].listtransactions()[0]['category'], "receive")
- # Check that we have marked all keys up to the used keypool key as used
- assert_equal(self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['hdkeypath'], "m/0'/0'/110'")
+ self.log.info("Verify keypool is restored and balance is correct")
+ assert_equal(self.nodes[idx].getbalance(), 15)
+ assert_equal(self.nodes[idx].listtransactions()[0]['category'], "receive")
+ # Check that we have marked all keys up to the used keypool key as used
+ assert_equal(self.nodes[idx].getaddressinfo(self.nodes[idx].getnewaddress())['hdkeypath'], "m/0'/0'/110'")
if __name__ == '__main__':
diff --git a/test/functional/wallet_listtransactions.py b/test/functional/wallet_listtransactions.py
index 5a17395abd..17f044bf65 100755
--- a/test/functional/wallet_listtransactions.py
+++ b/test/functional/wallet_listtransactions.py
@@ -25,12 +25,13 @@ def tx_from_hex(hexstring):
class ListTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
- self.enable_mocktime()
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
+ self.nodes[0].generate(1) # Get out of IBD
+ self.sync_all()
# Simple send, 0 to 1:
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
@@ -97,9 +98,10 @@ class ListTransactionsTest(BitcoinTestFramework):
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
self.nodes[1].generate(1)
self.sync_all()
- assert not [tx for tx in self.nodes[0].listtransactions(dummy="*", count=100, skip=0, include_watchonly=False) if "label" in tx and tx["label"] == "watchonly"]
- txs = [tx for tx in self.nodes[0].listtransactions(dummy="*", count=100, skip=0, include_watchonly=True) if "label" in tx and tx['label'] == 'watchonly']
- assert_array_result(txs, {"category": "receive", "amount": Decimal("0.1")}, {"txid": txid})
+ assert len(self.nodes[0].listtransactions(label="watchonly", count=100, include_watchonly=False)) == 0
+ assert_array_result(self.nodes[0].listtransactions(label="watchonly", count=100, include_watchonly=True),
+ {"category": "receive", "amount": Decimal("0.1")},
+ {"txid": txid, "label": "watchonly"})
self.run_rbf_opt_in_test()
diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py
index 4f663c82c7..8ab569a3c3 100755
--- a/test/functional/wallet_multiwallet.py
+++ b/test/functional/wallet_multiwallet.py
@@ -223,6 +223,9 @@ class MultiWalletTest(BitcoinTestFramework):
# Fail to load duplicate wallets
assert_raises_rpc_error(-4, 'Wallet file verification failed: Error loading wallet w1. Duplicate -wallet filename specified.', self.nodes[0].loadwallet, wallet_names[0])
+ # Fail to load duplicate wallets by different ways (directory and filepath)
+ assert_raises_rpc_error(-4, "Wallet file verification failed: Error loading wallet wallet.dat. Duplicate -wallet filename specified.", self.nodes[0].loadwallet, 'wallet.dat')
+
# Fail to load if one wallet is a copy of another
assert_raises_rpc_error(-1, "BerkeleyBatch: Can't open database w8_copy (duplicates fileid", self.nodes[0].loadwallet, 'w8_copy')
diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py
index d78c105c17..1c2e0a9cb7 100755
--- a/test/functional/wallet_txn_clone.py
+++ b/test/functional/wallet_txn_clone.py
@@ -65,7 +65,7 @@ class TxnMallTest(BitcoinTestFramework):
# Construct a clone of tx1, to be malleated
rawtx1 = self.nodes[0].getrawtransaction(txid1, 1)
- clone_inputs = [{"txid": rawtx1["vin"][0]["txid"], "vout": rawtx1["vin"][0]["vout"]}]
+ clone_inputs = [{"txid": rawtx1["vin"][0]["txid"], "vout": rawtx1["vin"][0]["vout"], "sequence": rawtx1["vin"][0]["sequence"]}]
clone_outputs = {rawtx1["vout"][0]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][0]["value"],
rawtx1["vout"][1]["scriptPubKey"]["addresses"][0]: rawtx1["vout"][1]["value"]}
clone_locktime = rawtx1["locktime"]