aboutsummaryrefslogtreecommitdiff
path: root/test/functional
diff options
context:
space:
mode:
Diffstat (limited to 'test/functional')
-rw-r--r--test/functional/README.md52
-rwxr-xr-xtest/functional/combine_logs.py2
-rwxr-xr-xtest/functional/example_test.py2
-rwxr-xr-xtest/functional/feature_bip68_sequence.py18
-rwxr-xr-xtest/functional/feature_bip9_softforks.py283
-rwxr-xr-xtest/functional/feature_block.py1314
-rwxr-xr-xtest/functional/feature_blocksdir.py36
-rwxr-xr-xtest/functional/feature_cltv.py4
-rwxr-xr-xtest/functional/feature_config_args.py19
-rwxr-xr-xtest/functional/feature_csv_activation.py566
-rwxr-xr-xtest/functional/feature_dbcrash.py2
-rwxr-xr-xtest/functional/feature_dersig.py2
-rwxr-xr-xtest/functional/feature_fee_estimation.py18
-rwxr-xr-xtest/functional/feature_help.py46
-rwxr-xr-xtest/functional/feature_logging.py30
-rwxr-xr-xtest/functional/feature_maxuploadtarget.py14
-rwxr-xr-xtest/functional/feature_notifications.py6
-rwxr-xr-xtest/functional/feature_nulldummy.py2
-rwxr-xr-xtest/functional/feature_proxy.py2
-rwxr-xr-xtest/functional/feature_pruning.py37
-rwxr-xr-xtest/functional/feature_rbf.py2
-rwxr-xr-xtest/functional/feature_reindex.py7
-rwxr-xr-xtest/functional/feature_segwit.py42
-rwxr-xr-xtest/functional/feature_uacomment.py13
-rwxr-xr-xtest/functional/interface_bitcoin_cli.py10
-rwxr-xr-xtest/functional/interface_rest.py426
-rwxr-xr-xtest/functional/interface_zmq.py3
-rwxr-xr-xtest/functional/mempool_accept.py293
-rwxr-xr-xtest/functional/mempool_limit.py6
-rwxr-xr-xtest/functional/mempool_packages.py59
-rwxr-xr-xtest/functional/mempool_persist.py18
-rwxr-xr-xtest/functional/mempool_reorg.py2
-rwxr-xr-xtest/functional/mining_prioritisetransaction.py6
-rwxr-xr-xtest/functional/p2p_compactblocks.py12
-rwxr-xr-xtest/functional/p2p_feefilter.py4
-rwxr-xr-xtest/functional/p2p_invalid_block.py116
-rwxr-xr-xtest/functional/p2p_invalid_tx.py7
-rwxr-xr-xtest/functional/p2p_leak.py2
-rwxr-xr-xtest/functional/p2p_mempool.py2
-rwxr-xr-xtest/functional/p2p_node_network_limited.py80
-rwxr-xr-xtest/functional/p2p_segwit.py116
-rwxr-xr-xtest/functional/p2p_sendheaders.py9
-rwxr-xr-xtest/functional/p2p_timeouts.py8
-rwxr-xr-xtest/functional/p2p_unrequested_blocks.py8
-rwxr-xr-xtest/functional/rpc_bind.py105
-rwxr-xr-xtest/functional/rpc_blockchain.py75
-rwxr-xr-xtest/functional/rpc_deprecated.py93
-rwxr-xr-xtest/functional/rpc_fundrawtransaction.py30
-rwxr-xr-xtest/functional/rpc_net.py48
-rwxr-xr-xtest/functional/rpc_preciousblock.py3
-rwxr-xr-xtest/functional/rpc_rawtransaction.py111
-rwxr-xr-xtest/functional/rpc_signrawtransaction.py30
-rwxr-xr-xtest/functional/rpc_txoutproof.py6
-rwxr-xr-xtest/functional/rpc_users.py62
-rw-r--r--test/functional/test_framework/authproxy.py2
-rw-r--r--test/functional/test_framework/blockstore.py160
-rw-r--r--test/functional/test_framework/blocktools.py116
-rwxr-xr-xtest/functional/test_framework/comptool.py397
-rw-r--r--test/functional/test_framework/key.py8
-rwxr-xr-x[-rw-r--r--]test/functional/test_framework/messages.py24
-rwxr-xr-xtest/functional/test_framework/mininode.py8
-rw-r--r--test/functional/test_framework/netutil.py3
-rw-r--r--test/functional/test_framework/script.py35
-rw-r--r--test/functional/test_framework/socks5.py12
-rwxr-xr-xtest/functional/test_framework/test_framework.py131
-rwxr-xr-xtest/functional/test_framework/test_node.py128
-rw-r--r--test/functional/test_framework/util.py96
-rwxr-xr-xtest/functional/test_runner.py106
-rwxr-xr-xtest/functional/wallet_abandonconflict.py10
-rwxr-xr-xtest/functional/wallet_accounts.py206
-rwxr-xr-xtest/functional/wallet_address_types.py4
-rwxr-xr-xtest/functional/wallet_backup.py48
-rwxr-xr-xtest/functional/wallet_basic.py36
-rwxr-xr-xtest/functional/wallet_bumpfee.py21
-rwxr-xr-xtest/functional/wallet_dump.py34
-rwxr-xr-xtest/functional/wallet_encryption.py9
-rwxr-xr-xtest/functional/wallet_fallbackfee.py28
-rwxr-xr-xtest/functional/wallet_hd.py54
-rwxr-xr-xtest/functional/wallet_import_rescan.py8
-rwxr-xr-xtest/functional/wallet_importmulti.py110
-rwxr-xr-xtest/functional/wallet_importprunedfunds.py17
-rwxr-xr-xtest/functional/wallet_keypool.py6
-rwxr-xr-xtest/functional/wallet_keypool_topup.py25
-rwxr-xr-xtest/functional/wallet_labels.py241
-rwxr-xr-xtest/functional/wallet_listreceivedby.py93
-rwxr-xr-xtest/functional/wallet_listsinceblock.py9
-rwxr-xr-xtest/functional/wallet_listtransactions.py (renamed from test/functional/rpc_listtransactions.py)11
-rwxr-xr-xtest/functional/wallet_multiwallet.py131
-rwxr-xr-xtest/functional/wallet_txn_clone.py8
-rwxr-xr-xtest/functional/wallet_txn_doublespend.py12
90 files changed, 3397 insertions, 3219 deletions
diff --git a/test/functional/README.md b/test/functional/README.md
index 662b4b44d5..21050cc2fa 100644
--- a/test/functional/README.md
+++ b/test/functional/README.md
@@ -89,52 +89,6 @@ thread.)
- Can be used to write tests where specific P2P protocol behavior is tested.
Examples tests are `p2p_unrequested_blocks.py`, `p2p_compactblocks.py`.
-#### Comptool
-
-- Comptool is a Testing framework for writing tests that compare the block/tx acceptance
-behavior of a bitcoind against 1 or more other bitcoind instances. It should not be used
-to write static tests with known outcomes, since that type of test is easier to write and
-maintain using the standard BitcoinTestFramework.
-
-- Set the `num_nodes` variable (defined in `ComparisonTestFramework`) to start up
-1 or more nodes. If using 1 node, then `--testbinary` can be used as a command line
-option to change the bitcoind binary used by the test. If using 2 or more nodes,
-then `--refbinary` can be optionally used to change the bitcoind that will be used
-on nodes 2 and up.
-
-- Implement a (generator) function called `get_tests()` which yields `TestInstance`s.
-Each `TestInstance` consists of:
- - A list of `[object, outcome, hash]` entries
- * `object` is a `CBlock`, `CTransaction`, or
- `CBlockHeader`. `CBlock`'s and `CTransaction`'s are tested for
- acceptance. `CBlockHeader`s can be used so that the test runner can deliver
- complete headers-chains when requested from the bitcoind, to allow writing
- tests where blocks can be delivered out of order but still processed by
- headers-first bitcoind's.
- * `outcome` is `True`, `False`, or `None`. If `True`
- or `False`, the tip is compared with the expected tip -- either the
- block passed in, or the hash specified as the optional 3rd entry. If
- `None` is specified, then the test will compare all the bitcoind's
- being tested to see if they all agree on what the best tip is.
- * `hash` is the block hash of the tip to compare against. Optional to
- specify; if left out then the hash of the block passed in will be used as
- the expected tip. This allows for specifying an expected tip while testing
- the handling of either invalid blocks or blocks delivered out of order,
- which complete a longer chain.
- - `sync_every_block`: `True/False`. If `False`, then all blocks
- are inv'ed together, and the test runner waits until the node receives the
- last one, and tests only the last block for tip acceptance using the
- outcome and specified tip. If `True`, then each block is tested in
- sequence and synced (this is slower when processing many blocks).
- - `sync_every_transaction`: `True/False`. Analogous to
- `sync_every_block`, except if the outcome on the last tx is "None",
- then the contents of the entire mempool are compared across all bitcoind
- connections. If `True` or `False`, then only the last tx's
- acceptance is tested against the given outcome.
-
-- For examples of tests written in this framework, see
- `p2p_invalid_block.py` and `feature_block.py`.
-
### test-framework modules
#### [test_framework/authproxy.py](test_framework/authproxy.py)
@@ -149,15 +103,9 @@ Generally useful functions.
#### [test_framework/mininode.py](test_framework/mininode.py)
Basic code to support P2P connectivity to a bitcoind.
-#### [test_framework/comptool.py](test_framework/comptool.py)
-Framework for comparison-tool style, P2P tests.
-
#### [test_framework/script.py](test_framework/script.py)
Utilities for manipulating transaction scripts (originally from python-bitcoinlib)
-#### [test_framework/blockstore.py](test_framework/blockstore.py)
-Implements disk-backed block and tx storage.
-
#### [test_framework/key.py](test_framework/key.py)
Wrapper around OpenSSL EC_Key (originally from python-bitcoinlib)
diff --git a/test/functional/combine_logs.py b/test/functional/combine_logs.py
index 3ca74ea35e..d1bf9206b2 100755
--- a/test/functional/combine_logs.py
+++ b/test/functional/combine_logs.py
@@ -13,7 +13,7 @@ import re
import sys
# Matches on the date format at the start of the log event
-TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{6}")
+TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6}Z")
LogEvent = namedtuple('LogEvent', ['timestamp', 'source', 'event'])
diff --git a/test/functional/example_test.py b/test/functional/example_test.py
index 12be685ecf..05d1c1bf4e 100755
--- a/test/functional/example_test.py
+++ b/test/functional/example_test.py
@@ -38,7 +38,7 @@ class BaseNode(P2PInterface):
def __init__(self):
"""Initialize the P2PInterface
- Used to inialize custom properties for the Node that aren't
+ Used to initialize custom properties for the Node that aren't
included by default in the base class. Be aware that the P2PInterface
base class already stores a counter for each P2P message type and the
last received message of each type, which should be sufficient for the
diff --git a/test/functional/feature_bip68_sequence.py b/test/functional/feature_bip68_sequence.py
index 94b13653b9..eee38ce648 100755
--- a/test/functional/feature_bip68_sequence.py
+++ b/test/functional/feature_bip68_sequence.py
@@ -14,7 +14,7 @@ SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift
SEQUENCE_LOCKTIME_MASK = 0x0000ffff
# RPC error for non-BIP68 final transactions
-NOT_FINAL_ERROR = "64: non-BIP68-final"
+NOT_FINAL_ERROR = "non-BIP68-final (code 64)"
class BIP68Test(BitcoinTestFramework):
def set_test_params(self):
@@ -70,7 +70,7 @@ class BIP68Test(BitcoinTestFramework):
tx1.vin = [CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), nSequence=sequence_value)]
tx1.vout = [CTxOut(value, CScript([b'a']))]
- tx1_signed = self.nodes[0].signrawtransaction(ToHex(tx1))["hex"]
+ tx1_signed = self.nodes[0].signrawtransactionwithwallet(ToHex(tx1))["hex"]
tx1_id = self.nodes[0].sendrawtransaction(tx1_signed)
tx1_id = int(tx1_id, 16)
@@ -129,7 +129,7 @@ class BIP68Test(BitcoinTestFramework):
# Track whether any sequence locks used should fail
should_pass = True
-
+
# Track whether this transaction was built with sequence locks
using_sequence_locks = False
@@ -176,7 +176,7 @@ class BIP68Test(BitcoinTestFramework):
# Overestimate the size of the tx - signatures should be less than 120 bytes, and leave 50 for the output
tx_size = len(ToHex(tx))//2 + 120*num_inputs + 50
tx.vout.append(CTxOut(int(value-self.relayfee*tx_size*COIN/1000), CScript([b'a'])))
- rawtx = self.nodes[0].signrawtransaction(ToHex(tx))["hex"]
+ rawtx = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))["hex"]
if (using_sequence_locks and not should_pass):
# This transaction should be rejected
@@ -205,7 +205,7 @@ class BIP68Test(BitcoinTestFramework):
tx2.nVersion = 2
tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
- tx2_raw = self.nodes[0].signrawtransaction(ToHex(tx2))["hex"]
+ tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
tx2 = FromHex(tx2, tx2_raw)
tx2.rehash()
@@ -278,7 +278,7 @@ class BIP68Test(BitcoinTestFramework):
utxos = self.nodes[0].listunspent()
tx5.vin.append(CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), nSequence=1))
tx5.vout[0].nValue += int(utxos[0]["amount"]*COIN)
- raw_tx5 = self.nodes[0].signrawtransaction(ToHex(tx5))["hex"]
+ raw_tx5 = self.nodes[0].signrawtransactionwithwallet(ToHex(tx5))["hex"]
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, raw_tx5)
@@ -338,12 +338,12 @@ class BIP68Test(BitcoinTestFramework):
tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
# sign tx2
- tx2_raw = self.nodes[0].signrawtransaction(ToHex(tx2))["hex"]
+ tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
tx2 = FromHex(tx2, tx2_raw)
tx2.rehash()
self.nodes[0].sendrawtransaction(ToHex(tx2))
-
+
# Now make an invalid spend of tx2 according to BIP68
sequence_value = 100 # 100 block relative locktime
@@ -388,7 +388,7 @@ class BIP68Test(BitcoinTestFramework):
rawtxfund = self.nodes[1].fundrawtransaction(rawtx)['hex']
tx = FromHex(CTransaction(), rawtxfund)
tx.nVersion = 2
- tx_signed = self.nodes[1].signrawtransaction(ToHex(tx))["hex"]
+ tx_signed = self.nodes[1].signrawtransactionwithwallet(ToHex(tx))["hex"]
self.nodes[1].sendrawtransaction(tx_signed)
if __name__ == '__main__':
diff --git a/test/functional/feature_bip9_softforks.py b/test/functional/feature_bip9_softforks.py
deleted file mode 100755
index ae92e9f07c..0000000000
--- a/test/functional/feature_bip9_softforks.py
+++ /dev/null
@@ -1,283 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2015-2017 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test BIP 9 soft forks.
-
-Connect to a single node.
-regtest lock-in with 108/144 block signalling
-activation after a further 144 blocks
-mine 2 block and save coinbases for later use
-mine 141 blocks to transition from DEFINED to STARTED
-mine 100 blocks signalling readiness and 44 not in order to fail to change state this period
-mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN)
-mine a further 143 blocks (LOCKED_IN)
-test that enforcement has not triggered (which triggers ACTIVE)
-test that enforcement has triggered
-"""
-from io import BytesIO
-import shutil
-import time
-import itertools
-
-from test_framework.test_framework import ComparisonTestFramework
-from test_framework.util import *
-from test_framework.mininode import CTransaction, network_thread_start
-from test_framework.blocktools import create_coinbase, create_block
-from test_framework.comptool import TestInstance, TestManager
-from test_framework.script import CScript, OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP
-
-class BIP9SoftForksTest(ComparisonTestFramework):
- def set_test_params(self):
- self.num_nodes = 1
- self.extra_args = [['-whitelist=127.0.0.1']]
- self.setup_clean_chain = True
-
- def run_test(self):
- self.test = TestManager(self, self.options.tmpdir)
- self.test.add_all_connections(self.nodes)
- network_thread_start()
- self.test.run()
-
- def create_transaction(self, node, coinbase, to_address, amount):
- from_txid = node.getblock(coinbase)['tx'][0]
- inputs = [{ "txid" : from_txid, "vout" : 0}]
- outputs = { to_address : amount }
- rawtx = node.createrawtransaction(inputs, outputs)
- tx = CTransaction()
- f = BytesIO(hex_str_to_bytes(rawtx))
- tx.deserialize(f)
- tx.nVersion = 2
- return tx
-
- def sign_transaction(self, node, tx):
- signresult = node.signrawtransaction(bytes_to_hex_str(tx.serialize()))
- tx = CTransaction()
- f = BytesIO(hex_str_to_bytes(signresult['hex']))
- tx.deserialize(f)
- return tx
-
- def generate_blocks(self, number, version, test_blocks = []):
- for i in range(number):
- block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
- block.nVersion = version
- block.rehash()
- block.solve()
- test_blocks.append([block, True])
- self.last_block_time += 1
- self.tip = block.sha256
- self.height += 1
- return test_blocks
-
- def get_bip9_status(self, key):
- info = self.nodes[0].getblockchaininfo()
- return info['bip9_softforks'][key]
-
- def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature, bitno):
- assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
- assert_equal(self.get_bip9_status(bipName)['since'], 0)
-
- # generate some coins for later
- self.coinbase_blocks = self.nodes[0].generate(2)
- self.height = 3 # height of the next block to build
- self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
- self.nodeaddress = self.nodes[0].getnewaddress()
- self.last_block_time = int(time.time())
-
- assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
- assert_equal(self.get_bip9_status(bipName)['since'], 0)
- tmpl = self.nodes[0].getblocktemplate({})
- assert(bipName not in tmpl['rules'])
- assert(bipName not in tmpl['vbavailable'])
- assert_equal(tmpl['vbrequired'], 0)
- assert_equal(tmpl['version'], 0x20000000)
-
- # Test 1
- # Advance from DEFINED to STARTED
- test_blocks = self.generate_blocks(141, 4)
- yield TestInstance(test_blocks, sync_every_block=False)
-
- assert_equal(self.get_bip9_status(bipName)['status'], 'started')
- assert_equal(self.get_bip9_status(bipName)['since'], 144)
- assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 0)
- assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 0)
- tmpl = self.nodes[0].getblocktemplate({})
- assert(bipName not in tmpl['rules'])
- assert_equal(tmpl['vbavailable'][bipName], bitno)
- assert_equal(tmpl['vbrequired'], 0)
- assert(tmpl['version'] & activated_version)
-
- # Test 1-A
- # check stats after max number of "signalling not" blocks such that LOCKED_IN still possible this period
- test_blocks = self.generate_blocks(36, 4, test_blocks) # 0x00000004 (signalling not)
- test_blocks = self.generate_blocks(10, activated_version) # 0x20000001 (signalling ready)
- yield TestInstance(test_blocks, sync_every_block=False)
-
- assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 46)
- assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 10)
- assert_equal(self.get_bip9_status(bipName)['statistics']['possible'], True)
-
- # Test 1-B
- # check stats after one additional "signalling not" block -- LOCKED_IN no longer possible this period
- test_blocks = self.generate_blocks(1, 4, test_blocks) # 0x00000004 (signalling not)
- yield TestInstance(test_blocks, sync_every_block=False)
-
- assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 47)
- assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 10)
- assert_equal(self.get_bip9_status(bipName)['statistics']['possible'], False)
-
- # Test 1-C
- # finish period with "ready" blocks, but soft fork will still fail to advance to LOCKED_IN
- test_blocks = self.generate_blocks(97, activated_version) # 0x20000001 (signalling ready)
- yield TestInstance(test_blocks, sync_every_block=False)
-
- assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 0)
- assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 0)
- assert_equal(self.get_bip9_status(bipName)['statistics']['possible'], True)
- assert_equal(self.get_bip9_status(bipName)['status'], 'started')
-
- # Test 2
- # Fail to achieve LOCKED_IN 100 out of 144 signal bit 1
- # using a variety of bits to simulate multiple parallel softforks
- test_blocks = self.generate_blocks(50, activated_version) # 0x20000001 (signalling ready)
- test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
- test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
- test_blocks = self.generate_blocks(24, 4, test_blocks) # 0x20010000 (signalling not)
- yield TestInstance(test_blocks, sync_every_block=False)
-
- assert_equal(self.get_bip9_status(bipName)['status'], 'started')
- assert_equal(self.get_bip9_status(bipName)['since'], 144)
- assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 0)
- assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 0)
- tmpl = self.nodes[0].getblocktemplate({})
- assert(bipName not in tmpl['rules'])
- assert_equal(tmpl['vbavailable'][bipName], bitno)
- assert_equal(tmpl['vbrequired'], 0)
- assert(tmpl['version'] & activated_version)
-
- # Test 3
- # 108 out of 144 signal bit 1 to achieve LOCKED_IN
- # using a variety of bits to simulate multiple parallel softforks
- test_blocks = self.generate_blocks(57, activated_version) # 0x20000001 (signalling ready)
- test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
- test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
- test_blocks = self.generate_blocks(10, 4, test_blocks) # 0x20010000 (signalling not)
- yield TestInstance(test_blocks, sync_every_block=False)
-
- # check counting stats and "possible" flag before last block of this period achieves LOCKED_IN...
- assert_equal(self.get_bip9_status(bipName)['statistics']['elapsed'], 143)
- assert_equal(self.get_bip9_status(bipName)['statistics']['count'], 107)
- assert_equal(self.get_bip9_status(bipName)['statistics']['possible'], True)
- assert_equal(self.get_bip9_status(bipName)['status'], 'started')
-
- # ...continue with Test 3
- test_blocks = self.generate_blocks(1, activated_version) # 0x20000001 (signalling ready)
- yield TestInstance(test_blocks, sync_every_block=False)
-
- assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
- assert_equal(self.get_bip9_status(bipName)['since'], 576)
- tmpl = self.nodes[0].getblocktemplate({})
- assert(bipName not in tmpl['rules'])
-
- # Test 4
- # 143 more version 536870913 blocks (waiting period-1)
- test_blocks = self.generate_blocks(143, 4)
- yield TestInstance(test_blocks, sync_every_block=False)
-
- assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
- assert_equal(self.get_bip9_status(bipName)['since'], 576)
- tmpl = self.nodes[0].getblocktemplate({})
- assert(bipName not in tmpl['rules'])
-
- # Test 5
- # Check that the new rule is enforced
- spendtx = self.create_transaction(self.nodes[0],
- self.coinbase_blocks[0], self.nodeaddress, 1.0)
- invalidate(spendtx)
- spendtx = self.sign_transaction(self.nodes[0], spendtx)
- spendtx.rehash()
- invalidatePostSignature(spendtx)
- spendtx.rehash()
- block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
- block.nVersion = activated_version
- block.vtx.append(spendtx)
- block.hashMerkleRoot = block.calc_merkle_root()
- block.rehash()
- block.solve()
-
- self.last_block_time += 1
- self.tip = block.sha256
- self.height += 1
- yield TestInstance([[block, True]])
-
- assert_equal(self.get_bip9_status(bipName)['status'], 'active')
- assert_equal(self.get_bip9_status(bipName)['since'], 720)
- tmpl = self.nodes[0].getblocktemplate({})
- assert(bipName in tmpl['rules'])
- assert(bipName not in tmpl['vbavailable'])
- assert_equal(tmpl['vbrequired'], 0)
- assert(not (tmpl['version'] & (1 << bitno)))
-
- # Test 6
- # Check that the new sequence lock rules are enforced
- spendtx = self.create_transaction(self.nodes[0],
- self.coinbase_blocks[1], self.nodeaddress, 1.0)
- invalidate(spendtx)
- spendtx = self.sign_transaction(self.nodes[0], spendtx)
- spendtx.rehash()
- invalidatePostSignature(spendtx)
- spendtx.rehash()
-
- block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
- block.nVersion = 5
- block.vtx.append(spendtx)
- block.hashMerkleRoot = block.calc_merkle_root()
- block.rehash()
- block.solve()
- self.last_block_time += 1
- yield TestInstance([[block, False]])
-
- # Restart all
- self.test.clear_all_connections()
- self.stop_nodes()
- self.nodes = []
- shutil.rmtree(self.options.tmpdir + "/node0")
- self.setup_chain()
- self.setup_network()
- self.test.add_all_connections(self.nodes)
- network_thread_start()
- self.test.p2p_connections[0].wait_for_verack()
-
- def get_tests(self):
- for test in itertools.chain(
- self.test_BIP('csv', 0x20000001, self.sequence_lock_invalidate, self.donothing, 0),
- self.test_BIP('csv', 0x20000001, self.mtp_invalidate, self.donothing, 0),
- self.test_BIP('csv', 0x20000001, self.donothing, self.csv_invalidate, 0)
- ):
- yield test
-
- def donothing(self, tx):
- return
-
- def csv_invalidate(self, tx):
- """Modify the signature in vin 0 of the tx to fail CSV
- Prepends -1 CSV DROP in the scriptSig itself.
- """
- tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP] +
- list(CScript(tx.vin[0].scriptSig)))
-
- def sequence_lock_invalidate(self, tx):
- """Modify the nSequence to make it fails once sequence lock rule is
- activated (high timespan).
- """
- tx.vin[0].nSequence = 0x00FFFFFF
- tx.nLockTime = 0
-
- def mtp_invalidate(self, tx):
- """Modify the nLockTime to make it fails once MTP rule is activated."""
- # Disable Sequence lock, Activate nLockTime
- tx.vin[0].nSequence = 0x90FFFFFF
- tx.nLockTime = self.last_block_time
-
-if __name__ == '__main__':
- BIP9SoftForksTest().main()
diff --git a/test/functional/feature_block.py b/test/functional/feature_block.py
index fe9bbda14b..38b76239e5 100755
--- a/test/functional/feature_block.py
+++ b/test/functional/feature_block.py
@@ -2,36 +2,59 @@
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test block processing.
-
-This reimplements tests from the bitcoinj/FullBlockTestGenerator used
-by the pull-tester.
-
-We use the testing framework in which we expect a particular answer from
-each test.
-"""
-
-from test_framework.test_framework import ComparisonTestFramework
-from test_framework.util import *
-from test_framework.comptool import TestManager, TestInstance, RejectResult
-from test_framework.blocktools import *
+"""Test block processing."""
+import copy
+import struct
import time
+
+from test_framework.blocktools import create_block, create_coinbase, create_transaction, get_legacy_sigopcount_block
from test_framework.key import CECKey
-from test_framework.script import *
-from test_framework.mininode import network_thread_start
-import struct
+from test_framework.messages import (
+ CBlock,
+ COIN,
+ COutPoint,
+ CTransaction,
+ CTxIn,
+ CTxOut,
+ MAX_BLOCK_BASE_SIZE,
+ uint256_from_compact,
+ uint256_from_str,
+)
+from test_framework.mininode import P2PDataStore, network_thread_start, network_thread_join
+from test_framework.script import (
+ CScript,
+ MAX_SCRIPT_ELEMENT_SIZE,
+ OP_2DUP,
+ OP_CHECKMULTISIG,
+ OP_CHECKMULTISIGVERIFY,
+ OP_CHECKSIG,
+ OP_CHECKSIGVERIFY,
+ OP_ELSE,
+ OP_ENDIF,
+ OP_EQUAL,
+ OP_FALSE,
+ OP_HASH160,
+ OP_IF,
+ OP_INVALIDOPCODE,
+ OP_RETURN,
+ OP_TRUE,
+ SIGHASH_ALL,
+ SignatureHash,
+ hash160,
+)
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+MAX_BLOCK_SIGOPS = 20000
class PreviousSpendableOutput():
- def __init__(self, tx = CTransaction(), n = -1):
+ def __init__(self, tx=CTransaction(), n=-1):
self.tx = tx
self.n = n # the output we're spending
# Use this class for tests that require behavior other than normal "mininode" behavior.
# For now, it is used to serialize a bloated varint (b64).
class CBrokenBlock(CBlock):
- def __init__(self, header=None):
- super(CBrokenBlock, self).__init__(header)
-
def initialize(self, base_block):
self.vtx = copy.deepcopy(base_block.vtx)
self.hashMerkleRoot = self.calc_merkle_root()
@@ -48,381 +71,272 @@ class CBrokenBlock(CBlock):
return r
def normal_serialize(self):
- r = b""
- r += super(CBrokenBlock, self).serialize()
- return r
+ return super().serialize()
-class FullBlockTest(ComparisonTestFramework):
- # Can either run this test as 1 node with expected answers, or two and compare them.
- # Change the "outcome" variable from each TestInstance object to only do the comparison.
+class FullBlockTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
+ self.extra_args = [[]]
+
+ def run_test(self):
+ node = self.nodes[0] # convenience reference to the node
+
+ # reconnect_p2p() expects the network thread to be running
+ network_thread_start()
+
+ self.reconnect_p2p()
+
self.block_heights = {}
self.coinbase_key = CECKey()
self.coinbase_key.set_secretbytes(b"horsebattery")
self.coinbase_pubkey = self.coinbase_key.get_pubkey()
self.tip = None
self.blocks = {}
-
- def add_options(self, parser):
- super().add_options(parser)
- parser.add_option("--runbarelyexpensive", dest="runbarelyexpensive", default=True)
-
- def run_test(self):
- self.test = TestManager(self, self.options.tmpdir)
- self.test.add_all_connections(self.nodes)
- network_thread_start()
- self.test.run()
-
- def add_transactions_to_block(self, block, tx_list):
- [ tx.rehash() for tx in tx_list ]
- block.vtx.extend(tx_list)
-
- # this is a little handier to use than the version in blocktools.py
- def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
- tx = create_transaction(spend_tx, n, b"", value, script)
- return tx
-
- # sign a transaction, using the key we know about
- # this signs input 0 in tx, which is assumed to be spending output n in spend_tx
- def sign_tx(self, tx, spend_tx, n):
- scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
- if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend
- tx.vin[0].scriptSig = CScript()
- return
- (sighash, err) = SignatureHash(spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL)
- tx.vin[0].scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
-
- def create_and_sign_transaction(self, spend_tx, n, value, script=CScript([OP_TRUE])):
- tx = self.create_tx(spend_tx, n, value, script)
- self.sign_tx(tx, spend_tx, n)
- tx.rehash()
- return tx
-
- def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True):
- if self.tip == None:
- base_block_hash = self.genesis_hash
- block_time = int(time.time())+1
- else:
- base_block_hash = self.tip.sha256
- block_time = self.tip.nTime + 1
- # First create the coinbase
- height = self.block_heights[base_block_hash] + 1
- coinbase = create_coinbase(height, self.coinbase_pubkey)
- coinbase.vout[0].nValue += additional_coinbase_value
- coinbase.rehash()
- if spend == None:
- block = create_block(base_block_hash, coinbase, block_time)
- else:
- coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
- coinbase.rehash()
- block = create_block(base_block_hash, coinbase, block_time)
- tx = create_transaction(spend.tx, spend.n, b"", 1, script) # spend 1 satoshi
- self.sign_tx(tx, spend.tx, spend.n)
- self.add_transactions_to_block(block, [tx])
- block.hashMerkleRoot = block.calc_merkle_root()
- if solve:
- block.solve()
- self.tip = block
- self.block_heights[block.sha256] = height
- assert number not in self.blocks
- self.blocks[number] = block
- return block
-
- def get_tests(self):
self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
self.block_heights[self.genesis_hash] = 0
- spendable_outputs = []
-
- # save the current tip so it can be spent by a later block
- def save_spendable_output():
- spendable_outputs.append(self.tip)
-
- # get an output that we previously marked as spendable
- def get_spendable_output():
- return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)
-
- # returns a test case that asserts that the current tip was accepted
- def accepted():
- return TestInstance([[self.tip, True]])
-
- # returns a test case that asserts that the current tip was rejected
- def rejected(reject = None):
- if reject is None:
- return TestInstance([[self.tip, False]])
- else:
- return TestInstance([[self.tip, reject]])
-
- # move the tip back to a previous block
- def tip(number):
- self.tip = self.blocks[number]
-
- # adds transactions to the block and updates state
- def update_block(block_number, new_transactions):
- block = self.blocks[block_number]
- self.add_transactions_to_block(block, new_transactions)
- old_sha256 = block.sha256
- block.hashMerkleRoot = block.calc_merkle_root()
- block.solve()
- # Update the internal state just like in next_block
- self.tip = block
- if block.sha256 != old_sha256:
- self.block_heights[block.sha256] = self.block_heights[old_sha256]
- del self.block_heights[old_sha256]
- self.blocks[block_number] = block
- return block
-
- # shorthand for functions
- block = self.next_block
- create_tx = self.create_tx
- create_and_sign_tx = self.create_and_sign_transaction
-
- # these must be updated if consensus changes
- MAX_BLOCK_SIGOPS = 20000
-
+ self.spendable_outputs = []
# Create a new block
- block(0)
- save_spendable_output()
- yield accepted()
+ b0 = self.next_block(0)
+ self.save_spendable_output()
+ self.sync_blocks([b0])
-
- # Now we need that block to mature so we can spend the coinbase.
- test = TestInstance(sync_every_block=False)
+ # Allow the block to mature
+ blocks = []
for i in range(99):
- block(5000 + i)
- test.blocks_and_transactions.append([self.tip, True])
- save_spendable_output()
- yield test
+ blocks.append(self.next_block(5000 + i))
+ self.save_spendable_output()
+ self.sync_blocks(blocks)
# collect spendable outputs now to avoid cluttering the code later on
out = []
for i in range(33):
- out.append(get_spendable_output())
+ out.append(self.get_spendable_output())
# Start by building a couple of blocks on top (which output is spent is
# in parentheses):
# genesis -> b1 (0) -> b2 (1)
- block(1, spend=out[0])
- save_spendable_output()
- yield accepted()
+ b1 = self.next_block(1, spend=out[0])
+ self.save_spendable_output()
+
+ b2 = self.next_block(2, spend=out[1])
+ self.save_spendable_output()
- block(2, spend=out[1])
- yield accepted()
- save_spendable_output()
+ self.sync_blocks([b1, b2])
- # so fork like this:
+ # Fork like this:
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1)
#
# Nothing should happen at this point. We saw b2 first so it takes priority.
- tip(1)
- b3 = block(3, spend=out[1])
+ self.log.info("Don't reorg to a chain of the same length")
+ self.move_tip(1)
+ b3 = self.next_block(3, spend=out[1])
txout_b3 = PreviousSpendableOutput(b3.vtx[1], 0)
- yield rejected()
-
+ self.sync_blocks([b3], False)
# Now we add another block to make the alternative chain longer.
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1) -> b4 (2)
- block(4, spend=out[2])
- yield accepted()
-
+ self.log.info("Reorg to a longer chain")
+ b4 = self.next_block(4, spend=out[2])
+ self.sync_blocks([b4])
# ... and back to the first chain.
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b3 (1) -> b4 (2)
- tip(2)
- block(5, spend=out[2])
- save_spendable_output()
- yield rejected()
+ self.move_tip(2)
+ b5 = self.next_block(5, spend=out[2])
+ self.save_spendable_output()
+ self.sync_blocks([b5], False)
- block(6, spend=out[3])
- yield accepted()
+ self.log.info("Reorg back to the original chain")
+ b6 = self.next_block(6, spend=out[3])
+ self.sync_blocks([b6], True)
# Try to create a fork that double-spends
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b7 (2) -> b8 (4)
# \-> b3 (1) -> b4 (2)
- tip(5)
- block(7, spend=out[2])
- yield rejected()
+ self.log.info("Reject a chain with a double spend, even if it is longer")
+ self.move_tip(5)
+ b7 = self.next_block(7, spend=out[2])
+ self.sync_blocks([b7], False)
- block(8, spend=out[4])
- yield rejected()
+ b8 = self.next_block(8, spend=out[4])
+ self.sync_blocks([b8], False, reconnect=True)
# Try to create a block that has too much fee
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b9 (4)
# \-> b3 (1) -> b4 (2)
- tip(6)
- block(9, spend=out[4], additional_coinbase_value=1)
- yield rejected(RejectResult(16, b'bad-cb-amount'))
+ self.log.info("Reject a block where the miner creates too much coinbase reward")
+ self.move_tip(6)
+ b9 = self.next_block(9, spend=out[4], additional_coinbase_value=1)
+ self.sync_blocks([b9], False, 16, b'bad-cb-amount', reconnect=True)
# Create a fork that ends in a block with too much fee (the one that causes the reorg)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b10 (3) -> b11 (4)
# \-> b3 (1) -> b4 (2)
- tip(5)
- block(10, spend=out[3])
- yield rejected()
-
- block(11, spend=out[4], additional_coinbase_value=1)
- yield rejected(RejectResult(16, b'bad-cb-amount'))
+ self.log.info("Reject a chain where the miner creates too much coinbase reward, even if the chain is longer")
+ self.move_tip(5)
+ b10 = self.next_block(10, spend=out[3])
+ self.sync_blocks([b10], False)
+ b11 = self.next_block(11, spend=out[4], additional_coinbase_value=1)
+ self.sync_blocks([b11], False, 16, b'bad-cb-amount', reconnect=True)
# Try again, but with a valid fork first
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b14 (5)
- # (b12 added last)
# \-> b3 (1) -> b4 (2)
- tip(5)
- b12 = block(12, spend=out[3])
- save_spendable_output()
- b13 = block(13, spend=out[4])
- # Deliver the block header for b12, and the block b13.
- # b13 should be accepted but the tip won't advance until b12 is delivered.
- yield TestInstance([[CBlockHeader(b12), None], [b13, False]])
-
- save_spendable_output()
- # b14 is invalid, but the node won't know that until it tries to connect
- # Tip still can't advance because b12 is missing
- block(14, spend=out[5], additional_coinbase_value=1)
- yield rejected()
-
- yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.
+ self.log.info("Reject a chain where the miner creates too much coinbase reward, even if the chain is longer (on a forked chain)")
+ self.move_tip(5)
+ b12 = self.next_block(12, spend=out[3])
+ self.save_spendable_output()
+ b13 = self.next_block(13, spend=out[4])
+ self.save_spendable_output()
+ b14 = self.next_block(14, spend=out[5], additional_coinbase_value=1)
+ self.sync_blocks([b12, b13, b14], False, 16, b'bad-cb-amount', reconnect=True)
+
+ # New tip should be b13.
+ assert_equal(node.getbestblockhash(), b13.hash)
# Add a block with MAX_BLOCK_SIGOPS and one with one more sigop
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)
# \-> b3 (1) -> b4 (2)
-
- # Test that a block with a lot of checksigs is okay
+ self.log.info("Accept a block with lots of checksigs")
lots_of_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS - 1))
- tip(13)
- block(15, spend=out[5], script=lots_of_checksigs)
- yield accepted()
- save_spendable_output()
-
+ self.move_tip(13)
+ b15 = self.next_block(15, spend=out[5], script=lots_of_checksigs)
+ self.save_spendable_output()
+ self.sync_blocks([b15], True)
- # Test that a block with too many checksigs is rejected
+ self.log.info("Reject a block with too many checksigs")
too_many_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS))
- block(16, spend=out[6], script=too_many_checksigs)
- yield rejected(RejectResult(16, b'bad-blk-sigops'))
-
+ b16 = self.next_block(16, spend=out[6], script=too_many_checksigs)
+ self.sync_blocks([b16], False, 16, b'bad-blk-sigops', reconnect=True)
# Attempt to spend a transaction created on a different fork
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
# \-> b3 (1) -> b4 (2)
- tip(15)
- block(17, spend=txout_b3)
- yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
+ self.log.info("Reject a block with a spend from a re-org'ed out tx")
+ self.move_tip(15)
+ b17 = self.next_block(17, spend=txout_b3)
+ self.sync_blocks([b17], False, 16, b'bad-txns-inputs-missingorspent', reconnect=True)
# Attempt to spend a transaction created on a different fork (on a fork this time)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5)
# \-> b18 (b3.vtx[1]) -> b19 (6)
# \-> b3 (1) -> b4 (2)
- tip(13)
- block(18, spend=txout_b3)
- yield rejected()
+ self.log.info("Reject a block with a spend from a re-org'ed out tx (on a forked chain)")
+ self.move_tip(13)
+ b18 = self.next_block(18, spend=txout_b3)
+ self.sync_blocks([b18], False)
- block(19, spend=out[6])
- yield rejected()
+ b19 = self.next_block(19, spend=out[6])
+ self.sync_blocks([b19], False, 16, b'bad-txns-inputs-missingorspent', reconnect=True)
# Attempt to spend a coinbase at depth too low
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
# \-> b3 (1) -> b4 (2)
- tip(15)
- block(20, spend=out[7])
- yield rejected(RejectResult(16, b'bad-txns-premature-spend-of-coinbase'))
+ self.log.info("Reject a block spending an immature coinbase.")
+ self.move_tip(15)
+ b20 = self.next_block(20, spend=out[7])
+ self.sync_blocks([b20], False, 16, b'bad-txns-premature-spend-of-coinbase')
# Attempt to spend a coinbase at depth too low (on a fork this time)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5)
# \-> b21 (6) -> b22 (5)
# \-> b3 (1) -> b4 (2)
- tip(13)
- block(21, spend=out[6])
- yield rejected()
+ self.log.info("Reject a block spending an immature coinbase (on a forked chain)")
+ self.move_tip(13)
+ b21 = self.next_block(21, spend=out[6])
+ self.sync_blocks([b21], False)
- block(22, spend=out[5])
- yield rejected()
+ b22 = self.next_block(22, spend=out[5])
+ self.sync_blocks([b22], False, 16, b'bad-txns-premature-spend-of-coinbase')
# Create a block on either side of MAX_BLOCK_BASE_SIZE and make sure its accepted/rejected
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
# \-> b24 (6) -> b25 (7)
# \-> b3 (1) -> b4 (2)
- tip(15)
- b23 = block(23, spend=out[6])
+ self.log.info("Accept a block of size MAX_BLOCK_BASE_SIZE")
+ self.move_tip(15)
+ b23 = self.next_block(23, spend=out[6])
tx = CTransaction()
script_length = MAX_BLOCK_BASE_SIZE - len(b23.serialize()) - 69
script_output = CScript([b'\x00' * script_length])
tx.vout.append(CTxOut(0, script_output))
tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 0)))
- b23 = update_block(23, [tx])
+ b23 = self.update_block(23, [tx])
# Make sure the math above worked out to produce a max-sized block
assert_equal(len(b23.serialize()), MAX_BLOCK_BASE_SIZE)
- yield accepted()
- save_spendable_output()
+ self.sync_blocks([b23], True)
+ self.save_spendable_output()
- # Make the next block one byte bigger and check that it fails
- tip(15)
- b24 = block(24, spend=out[6])
+ self.log.info("Reject a block of size MAX_BLOCK_BASE_SIZE + 1")
+ self.move_tip(15)
+ b24 = self.next_block(24, spend=out[6])
script_length = MAX_BLOCK_BASE_SIZE - len(b24.serialize()) - 69
- script_output = CScript([b'\x00' * (script_length+1)])
+ script_output = CScript([b'\x00' * (script_length + 1)])
tx.vout = [CTxOut(0, script_output)]
- b24 = update_block(24, [tx])
- assert_equal(len(b24.serialize()), MAX_BLOCK_BASE_SIZE+1)
- yield rejected(RejectResult(16, b'bad-blk-length'))
+ b24 = self.update_block(24, [tx])
+ assert_equal(len(b24.serialize()), MAX_BLOCK_BASE_SIZE + 1)
+ self.sync_blocks([b24], False, 16, b'bad-blk-length', reconnect=True)
- block(25, spend=out[7])
- yield rejected()
+ b25 = self.next_block(25, spend=out[7])
+ self.sync_blocks([b25], False)
# Create blocks with a coinbase input script size out of range
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
# \-> ... (6) -> ... (7)
# \-> b3 (1) -> b4 (2)
- tip(15)
- b26 = block(26, spend=out[6])
+ self.log.info("Reject a block with coinbase input script size out of range")
+ self.move_tip(15)
+ b26 = self.next_block(26, spend=out[6])
b26.vtx[0].vin[0].scriptSig = b'\x00'
b26.vtx[0].rehash()
# update_block causes the merkle root to get updated, even with no new
# transactions, and updates the required state.
- b26 = update_block(26, [])
- yield rejected(RejectResult(16, b'bad-cb-length'))
+ b26 = self.update_block(26, [])
+ self.sync_blocks([b26], False, 16, b'bad-cb-length', reconnect=True)
# Extend the b26 chain to make sure bitcoind isn't accepting b26
- block(27, spend=out[7])
- yield rejected(False)
+ b27 = self.next_block(27, spend=out[7])
+ self.sync_blocks([b27], False)
# Now try a too-large-coinbase script
- tip(15)
- b28 = block(28, spend=out[6])
+ self.move_tip(15)
+ b28 = self.next_block(28, spend=out[6])
b28.vtx[0].vin[0].scriptSig = b'\x00' * 101
b28.vtx[0].rehash()
- b28 = update_block(28, [])
- yield rejected(RejectResult(16, b'bad-cb-length'))
+ b28 = self.update_block(28, [])
+ self.sync_blocks([b28], False, 16, b'bad-cb-length', reconnect=True)
# Extend the b28 chain to make sure bitcoind isn't accepting b28
- block(29, spend=out[7])
- yield rejected(False)
+ b29 = self.next_block(29, spend=out[7])
+ self.sync_blocks([b29], False)
# b30 has a max-sized coinbase scriptSig.
- tip(23)
- b30 = block(30)
+ self.move_tip(23)
+ b30 = self.next_block(30)
b30.vtx[0].vin[0].scriptSig = b'\x00' * 100
b30.vtx[0].rehash()
- b30 = update_block(30, [])
- yield accepted()
- save_spendable_output()
+ b30 = self.update_block(30, [])
+ self.sync_blocks([b30], True)
+ self.save_spendable_output()
# b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY
#
@@ -433,42 +347,45 @@ class FullBlockTest(ComparisonTestFramework):
#
# MULTISIG: each op code counts as 20 sigops. To create the edge case, pack another 19 sigops at the end.
- lots_of_multisigs = CScript([OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19)
- b31 = block(31, spend=out[8], script=lots_of_multisigs)
+ self.log.info("Accept a block with the max number of OP_CHECKMULTISIG sigops")
+ lots_of_multisigs = CScript([OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS - 1) // 20) + [OP_CHECKSIG] * 19)
+ b31 = self.next_block(31, spend=out[8], script=lots_of_multisigs)
assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS)
- yield accepted()
- save_spendable_output()
+ self.sync_blocks([b31], True)
+ self.save_spendable_output()
# this goes over the limit because the coinbase has one sigop
+ self.log.info("Reject a block with too many OP_CHECKMULTISIG sigops")
too_many_multisigs = CScript([OP_CHECKMULTISIG] * (MAX_BLOCK_SIGOPS // 20))
- b32 = block(32, spend=out[9], script=too_many_multisigs)
+ b32 = self.next_block(32, spend=out[9], script=too_many_multisigs)
assert_equal(get_legacy_sigopcount_block(b32), MAX_BLOCK_SIGOPS + 1)
- yield rejected(RejectResult(16, b'bad-blk-sigops'))
-
+ self.sync_blocks([b32], False, 16, b'bad-blk-sigops', reconnect=True)
# CHECKMULTISIGVERIFY
- tip(31)
- lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19)
- block(33, spend=out[9], script=lots_of_multisigs)
- yield accepted()
- save_spendable_output()
-
+ self.log.info("Accept a block with the max number of OP_CHECKMULTISIGVERIFY sigops")
+ self.move_tip(31)
+ lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS - 1) // 20) + [OP_CHECKSIG] * 19)
+ b33 = self.next_block(33, spend=out[9], script=lots_of_multisigs)
+ self.sync_blocks([b33], True)
+ self.save_spendable_output()
+
+ self.log.info("Reject a block with too many OP_CHECKMULTISIGVERIFY sigops")
too_many_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * (MAX_BLOCK_SIGOPS // 20))
- block(34, spend=out[10], script=too_many_multisigs)
- yield rejected(RejectResult(16, b'bad-blk-sigops'))
-
+ b34 = self.next_block(34, spend=out[10], script=too_many_multisigs)
+ self.sync_blocks([b34], False, 16, b'bad-blk-sigops', reconnect=True)
# CHECKSIGVERIFY
- tip(33)
+ self.log.info("Accept a block with the max number of OP_CHECKSIGVERIFY sigops")
+ self.move_tip(33)
lots_of_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS - 1))
- b35 = block(35, spend=out[10], script=lots_of_checksigs)
- yield accepted()
- save_spendable_output()
+ b35 = self.next_block(35, spend=out[10], script=lots_of_checksigs)
+ self.sync_blocks([b35], True)
+ self.save_spendable_output()
+ self.log.info("Reject a block with too many OP_CHECKSIGVERIFY sigops")
too_many_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS))
- block(36, spend=out[11], script=too_many_checksigs)
- yield rejected(RejectResult(16, b'bad-blk-sigops'))
-
+ b36 = self.next_block(36, spend=out[11], script=too_many_checksigs)
+ self.sync_blocks([b36], False, 16, b'bad-blk-sigops', reconnect=True)
# Check spending of a transaction in a block which failed to connect
#
@@ -479,17 +396,18 @@ class FullBlockTest(ComparisonTestFramework):
#
# save 37's spendable output, but then double-spend out11 to invalidate the block
- tip(35)
- b37 = block(37, spend=out[11])
+ self.log.info("Reject a block spending transaction from a block which failed to connect")
+ self.move_tip(35)
+ b37 = self.next_block(37, spend=out[11])
txout_b37 = PreviousSpendableOutput(b37.vtx[1], 0)
- tx = create_and_sign_tx(out[11].tx, out[11].n, 0)
- b37 = update_block(37, [tx])
- yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
+ tx = self.create_and_sign_transaction(out[11].tx, out[11].n, 0)
+ b37 = self.update_block(37, [tx])
+ self.sync_blocks([b37], False, 16, b'bad-txns-inputs-missingorspent', reconnect=True)
# attempt to spend b37's first non-coinbase tx, at which point b37 was still considered valid
- tip(35)
- block(38, spend=txout_b37)
- yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
+ self.move_tip(35)
+ b38 = self.next_block(38, spend=txout_b37)
+ self.sync_blocks([b38], False, 16, b'bad-txns-inputs-missingorspent', reconnect=True)
# Check P2SH SigOp counting
#
@@ -502,45 +420,45 @@ class FullBlockTest(ComparisonTestFramework):
# redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG
# p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL
#
- tip(35)
- b39 = block(39)
+ self.log.info("Check P2SH SIGOPS are correctly counted")
+ self.move_tip(35)
+ b39 = self.next_block(39)
b39_outputs = 0
b39_sigops_per_output = 6
# Build the redeem script, hash it, use hash to create the p2sh script
- redeem_script = CScript([self.coinbase_pubkey] + [OP_2DUP, OP_CHECKSIGVERIFY]*5 + [OP_CHECKSIG])
+ redeem_script = CScript([self.coinbase_pubkey] + [OP_2DUP, OP_CHECKSIGVERIFY] * 5 + [OP_CHECKSIG])
redeem_script_hash = hash160(redeem_script)
p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL])
# Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE
# This must be signed because it is spending a coinbase
spend = out[11]
- tx = create_tx(spend.tx, spend.n, 1, p2sh_script)
+ tx = self.create_tx(spend.tx, spend.n, 1, p2sh_script)
tx.vout.append(CTxOut(spend.tx.vout[spend.n].nValue - 1, CScript([OP_TRUE])))
self.sign_tx(tx, spend.tx, spend.n)
tx.rehash()
- b39 = update_block(39, [tx])
+ b39 = self.update_block(39, [tx])
b39_outputs += 1
# Until block is full, add tx's with 1 satoshi to p2sh_script, the rest to OP_TRUE
tx_new = None
tx_last = tx
- total_size=len(b39.serialize())
+ total_size = len(b39.serialize())
while(total_size < MAX_BLOCK_BASE_SIZE):
- tx_new = create_tx(tx_last, 1, 1, p2sh_script)
+ tx_new = self.create_tx(tx_last, 1, 1, p2sh_script)
tx_new.vout.append(CTxOut(tx_last.vout[1].nValue - 1, CScript([OP_TRUE])))
tx_new.rehash()
total_size += len(tx_new.serialize())
if total_size >= MAX_BLOCK_BASE_SIZE:
break
- b39.vtx.append(tx_new) # add tx to block
+ b39.vtx.append(tx_new) # add tx to block
tx_last = tx_new
b39_outputs += 1
- b39 = update_block(39, [])
- yield accepted()
- save_spendable_output()
-
+ b39 = self.update_block(39, [])
+ self.sync_blocks([b39], True)
+ self.save_spendable_output()
# Test sigops in P2SH redeem scripts
#
@@ -549,15 +467,16 @@ class FullBlockTest(ComparisonTestFramework):
#
# b41 does the same, less one, so it has the maximum sigops permitted.
#
- tip(39)
- b40 = block(40, spend=out[12])
+ self.log.info("Reject a block with too many P2SH sigops")
+ self.move_tip(39)
+ b40 = self.next_block(40, spend=out[12])
sigops = get_legacy_sigopcount_block(b40)
numTxes = (MAX_BLOCK_SIGOPS - sigops) // b39_sigops_per_output
assert_equal(numTxes <= b39_outputs, True)
lastOutpoint = COutPoint(b40.vtx[1].sha256, 0)
new_txs = []
- for i in range(1, numTxes+1):
+ for i in range(1, numTxes + 1):
tx = CTransaction()
tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
tx.vin.append(CTxIn(lastOutpoint, b''))
@@ -579,35 +498,34 @@ class FullBlockTest(ComparisonTestFramework):
tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill)))
tx.rehash()
new_txs.append(tx)
- update_block(40, new_txs)
- yield rejected(RejectResult(16, b'bad-blk-sigops'))
+ self.update_block(40, new_txs)
+ self.sync_blocks([b40], False, 16, b'bad-blk-sigops', reconnect=True)
# same as b40, but one less sigop
- tip(39)
- block(41, spend=None)
- update_block(41, b40.vtx[1:-1])
+ self.log.info("Accept a block with the max number of P2SH sigops")
+ self.move_tip(39)
+ b41 = self.next_block(41, spend=None)
+ self.update_block(41, b40.vtx[1:-1])
b41_sigops_to_fill = b40_sigops_to_fill - 1
tx = CTransaction()
tx.vin.append(CTxIn(lastOutpoint, b''))
tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill)))
tx.rehash()
- update_block(41, [tx])
- yield accepted()
+ self.update_block(41, [tx])
+ self.sync_blocks([b41], True)
# Fork off of b39 to create a constant base again
#
# b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13)
# \-> b41 (12)
#
- tip(39)
- block(42, spend=out[12])
- yield rejected()
- save_spendable_output()
-
- block(43, spend=out[13])
- yield accepted()
- save_spendable_output()
+ self.move_tip(39)
+ b42 = self.next_block(42, spend=out[12])
+ self.save_spendable_output()
+ b43 = self.next_block(43, spend=out[13])
+ self.save_spendable_output()
+ self.sync_blocks([b42, b43], True)
# Test a number of really invalid scenarios
#
@@ -616,6 +534,7 @@ class FullBlockTest(ComparisonTestFramework):
# The next few blocks are going to be created "by hand" since they'll do funky things, such as having
# the first transaction be non-coinbase, etc. The purpose of b44 is to make sure this works.
+ self.log.info("Build block 44 manually")
height = self.block_heights[self.tip.sha256] + 1
coinbase = create_coinbase(height, self.coinbase_pubkey)
b44 = CBlock()
@@ -628,10 +547,10 @@ class FullBlockTest(ComparisonTestFramework):
self.tip = b44
self.block_heights[b44.sha256] = height
self.blocks[44] = b44
- yield accepted()
+ self.sync_blocks([b44], True)
- # A block with a non-coinbase as the first tx
- non_coinbase = create_tx(out[15].tx, out[15].n, 1)
+ self.log.info("Reject a block with a non-coinbase as the first tx")
+ non_coinbase = self.create_tx(out[15].tx, out[15].n, 1)
b45 = CBlock()
b45.nTime = self.tip.nTime + 1
b45.hashPrevBlock = self.tip.sha256
@@ -640,104 +559,102 @@ class FullBlockTest(ComparisonTestFramework):
b45.hashMerkleRoot = b45.calc_merkle_root()
b45.calc_sha256()
b45.solve()
- self.block_heights[b45.sha256] = self.block_heights[self.tip.sha256]+1
+ self.block_heights[b45.sha256] = self.block_heights[self.tip.sha256] + 1
self.tip = b45
self.blocks[45] = b45
- yield rejected(RejectResult(16, b'bad-cb-missing'))
+ self.sync_blocks([b45], False, 16, b'bad-cb-missing', reconnect=True)
- # A block with no txns
- tip(44)
+ self.log.info("Reject a block with no transactions")
+ self.move_tip(44)
b46 = CBlock()
- b46.nTime = b44.nTime+1
+ b46.nTime = b44.nTime + 1
b46.hashPrevBlock = b44.sha256
b46.nBits = 0x207fffff
b46.vtx = []
b46.hashMerkleRoot = 0
b46.solve()
- self.block_heights[b46.sha256] = self.block_heights[b44.sha256]+1
+ self.block_heights[b46.sha256] = self.block_heights[b44.sha256] + 1
self.tip = b46
assert 46 not in self.blocks
self.blocks[46] = b46
- s = ser_uint256(b46.hashMerkleRoot)
- yield rejected(RejectResult(16, b'bad-blk-length'))
+ self.sync_blocks([b46], False, 16, b'bad-blk-length', reconnect=True)
- # A block with invalid work
- tip(44)
- b47 = block(47, solve=False)
+ self.log.info("Reject a block with invalid work")
+ self.move_tip(44)
+ b47 = self.next_block(47, solve=False)
target = uint256_from_compact(b47.nBits)
- while b47.sha256 < target: #changed > to <
+ while b47.sha256 < target:
b47.nNonce += 1
b47.rehash()
- yield rejected(RejectResult(16, b'high-hash'))
+ self.sync_blocks([b47], False, request_block=False)
- # A block with timestamp > 2 hrs in the future
- tip(44)
- b48 = block(48, solve=False)
+ self.log.info("Reject a block with a timestamp >2 hours in the future")
+ self.move_tip(44)
+ b48 = self.next_block(48, solve=False)
b48.nTime = int(time.time()) + 60 * 60 * 3
b48.solve()
- yield rejected(RejectResult(16, b'time-too-new'))
+ self.sync_blocks([b48], False, request_block=False)
- # A block with an invalid merkle hash
- tip(44)
- b49 = block(49)
+ self.log.info("Reject a block with invalid merkle hash")
+ self.move_tip(44)
+ b49 = self.next_block(49)
b49.hashMerkleRoot += 1
b49.solve()
- yield rejected(RejectResult(16, b'bad-txnmrklroot'))
+ self.sync_blocks([b49], False, 16, b'bad-txnmrklroot', reconnect=True)
- # A block with an incorrect POW limit
- tip(44)
- b50 = block(50)
+ self.log.info("Reject a block with incorrect POW limit")
+ self.move_tip(44)
+ b50 = self.next_block(50)
b50.nBits = b50.nBits - 1
b50.solve()
- yield rejected(RejectResult(16, b'bad-diffbits'))
+ self.sync_blocks([b50], False, request_block=False, reconnect=True)
- # A block with two coinbase txns
- tip(44)
- b51 = block(51)
+ self.log.info("Reject a block with two coinbase transactions")
+ self.move_tip(44)
+ b51 = self.next_block(51)
cb2 = create_coinbase(51, self.coinbase_pubkey)
- b51 = update_block(51, [cb2])
- yield rejected(RejectResult(16, b'bad-cb-multiple'))
+ b51 = self.update_block(51, [cb2])
+ self.sync_blocks([b51], False, 16, b'bad-cb-multiple', reconnect=True)
- # A block w/ duplicate txns
+ self.log.info("Reject a block with duplicate transactions")
# Note: txns have to be in the right position in the merkle tree to trigger this error
- tip(44)
- b52 = block(52, spend=out[15])
- tx = create_tx(b52.vtx[1], 0, 1)
- b52 = update_block(52, [tx, tx])
- yield rejected(RejectResult(16, b'bad-txns-duplicate'))
+ self.move_tip(44)
+ b52 = self.next_block(52, spend=out[15])
+ tx = self.create_tx(b52.vtx[1], 0, 1)
+ b52 = self.update_block(52, [tx, tx])
+ self.sync_blocks([b52], False, 16, b'bad-txns-duplicate', reconnect=True)
# Test block timestamps
# -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15)
# \-> b54 (15)
#
- tip(43)
- block(53, spend=out[14])
- yield rejected() # rejected since b44 is at same height
- save_spendable_output()
+ self.move_tip(43)
+ b53 = self.next_block(53, spend=out[14])
+ self.sync_blocks([b53], False)
+ self.save_spendable_output()
- # invalid timestamp (b35 is 5 blocks back, so its time is MedianTimePast)
- b54 = block(54, spend=out[15])
+ self.log.info("Reject a block with timestamp before MedianTimePast")
+ b54 = self.next_block(54, spend=out[15])
b54.nTime = b35.nTime - 1
b54.solve()
- yield rejected(RejectResult(16, b'time-too-old'))
+ self.sync_blocks([b54], False, request_block=False)
# valid timestamp
- tip(53)
- b55 = block(55, spend=out[15])
+ self.move_tip(53)
+ b55 = self.next_block(55, spend=out[15])
b55.nTime = b35.nTime
- update_block(55, [])
- yield accepted()
- save_spendable_output()
-
+ self.update_block(55, [])
+ self.sync_blocks([b55], True)
+ self.save_spendable_output()
- # Test CVE-2012-2459
+ # Test Merkle tree malleability
#
# -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57p2 (16)
# \-> b57 (16)
# \-> b56p2 (16)
# \-> b56 (16)
#
- # Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without
+ # Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without
# affecting the merkle root of a block, while still invalidating it.
# See: src/consensus/merkle.h
#
@@ -758,46 +675,48 @@ class FullBlockTest(ComparisonTestFramework):
# that the error was caught early, avoiding a DOS vulnerability.)
# b57 - a good block with 2 txs, don't submit until end
- tip(55)
- b57 = block(57)
- tx = create_and_sign_tx(out[16].tx, out[16].n, 1)
- tx1 = create_tx(tx, 0, 1)
- b57 = update_block(57, [tx, tx1])
+ self.move_tip(55)
+ b57 = self.next_block(57)
+ tx = self.create_and_sign_transaction(out[16].tx, out[16].n, 1)
+ tx1 = self.create_tx(tx, 0, 1)
+ b57 = self.update_block(57, [tx, tx1])
# b56 - copy b57, add a duplicate tx
- tip(55)
+ self.log.info("Reject a block with a duplicate transaction in the Merkle Tree (but with a valid Merkle Root)")
+ self.move_tip(55)
b56 = copy.deepcopy(b57)
self.blocks[56] = b56
- assert_equal(len(b56.vtx),3)
- b56 = update_block(56, [tx1])
+ assert_equal(len(b56.vtx), 3)
+ b56 = self.update_block(56, [tx1])
assert_equal(b56.hash, b57.hash)
- yield rejected(RejectResult(16, b'bad-txns-duplicate'))
+ self.sync_blocks([b56], False, 16, b'bad-txns-duplicate', reconnect=True)
# b57p2 - a good block with 6 tx'es, don't submit until end
- tip(55)
- b57p2 = block("57p2")
- tx = create_and_sign_tx(out[16].tx, out[16].n, 1)
- tx1 = create_tx(tx, 0, 1)
- tx2 = create_tx(tx1, 0, 1)
- tx3 = create_tx(tx2, 0, 1)
- tx4 = create_tx(tx3, 0, 1)
- b57p2 = update_block("57p2", [tx, tx1, tx2, tx3, tx4])
+ self.move_tip(55)
+ b57p2 = self.next_block("57p2")
+ tx = self.create_and_sign_transaction(out[16].tx, out[16].n, 1)
+ tx1 = self.create_tx(tx, 0, 1)
+ tx2 = self.create_tx(tx1, 0, 1)
+ tx3 = self.create_tx(tx2, 0, 1)
+ tx4 = self.create_tx(tx3, 0, 1)
+ b57p2 = self.update_block("57p2", [tx, tx1, tx2, tx3, tx4])
# b56p2 - copy b57p2, duplicate two non-consecutive tx's
- tip(55)
+ self.log.info("Reject a block with two duplicate transactions in the Merkle Tree (but with a valid Merkle Root)")
+ self.move_tip(55)
b56p2 = copy.deepcopy(b57p2)
self.blocks["b56p2"] = b56p2
assert_equal(b56p2.hash, b57p2.hash)
- assert_equal(len(b56p2.vtx),6)
- b56p2 = update_block("b56p2", [tx3, tx4])
- yield rejected(RejectResult(16, b'bad-txns-duplicate'))
+ assert_equal(len(b56p2.vtx), 6)
+ b56p2 = self.update_block("b56p2", [tx3, tx4])
+ self.sync_blocks([b56p2], False, 16, b'bad-txns-duplicate', reconnect=True)
- tip("57p2")
- yield accepted()
+ self.move_tip("57p2")
+ self.sync_blocks([b57p2], True)
- tip(57)
- yield rejected() #rejected because 57p2 seen first
- save_spendable_output()
+ self.move_tip(57)
+ self.sync_blocks([b57], False) # The tip is not updated because 57p2 seen first
+ self.save_spendable_output()
# Test a few invalid tx types
#
@@ -806,28 +725,30 @@ class FullBlockTest(ComparisonTestFramework):
#
# tx with prevout.n out of range
- tip(57)
- b58 = block(58, spend=out[17])
+ self.log.info("Reject a block with a transaction with prevout.n out of range")
+ self.move_tip(57)
+ b58 = self.next_block(58, spend=out[17])
tx = CTransaction()
assert(len(out[17].tx.vout) < 42)
tx.vin.append(CTxIn(COutPoint(out[17].tx.sha256, 42), CScript([OP_TRUE]), 0xffffffff))
tx.vout.append(CTxOut(0, b""))
tx.calc_sha256()
- b58 = update_block(58, [tx])
- yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
+ b58 = self.update_block(58, [tx])
+ self.sync_blocks([b58], False, 16, b'bad-txns-inputs-missingorspent', reconnect=True)
- # tx with output value > input value out of range
- tip(57)
- b59 = block(59)
- tx = create_and_sign_tx(out[17].tx, out[17].n, 51*COIN)
- b59 = update_block(59, [tx])
- yield rejected(RejectResult(16, b'bad-txns-in-belowout'))
+ # tx with output value > input value
+ self.log.info("Reject a block with a transaction with outputs > inputs")
+ self.move_tip(57)
+ b59 = self.next_block(59)
+ tx = self.create_and_sign_transaction(out[17].tx, out[17].n, 51 * COIN)
+ b59 = self.update_block(59, [tx])
+ self.sync_blocks([b59], False, 16, b'bad-txns-in-belowout', reconnect=True)
# reset to good chain
- tip(57)
- b60 = block(60, spend=out[17])
- yield accepted()
- save_spendable_output()
+ self.move_tip(57)
+ b60 = self.next_block(60, spend=out[17])
+ self.sync_blocks([b60], True)
+ self.save_spendable_output()
# Test BIP30
#
@@ -838,46 +759,46 @@ class FullBlockTest(ComparisonTestFramework):
# not-fully-spent transaction in the same chain. To test, make identical coinbases;
# the second one should be rejected.
#
- tip(60)
- b61 = block(61, spend=out[18])
- b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[0].scriptSig #equalize the coinbases
+ self.log.info("Reject a block with a transaction with a duplicate hash of a previous transaction (BIP30)")
+ self.move_tip(60)
+ b61 = self.next_block(61, spend=out[18])
+ b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[0].scriptSig # Equalize the coinbases
b61.vtx[0].rehash()
- b61 = update_block(61, [])
+ b61 = self.update_block(61, [])
assert_equal(b60.vtx[0].serialize(), b61.vtx[0].serialize())
- yield rejected(RejectResult(16, b'bad-txns-BIP30'))
-
+ self.sync_blocks([b61], False, 16, b'bad-txns-BIP30', reconnect=True)
# Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests)
#
# -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
# \-> b62 (18)
#
- tip(60)
- b62 = block(62)
+ self.log.info("Reject a block with a transaction with a nonfinal locktime")
+ self.move_tip(60)
+ b62 = self.next_block(62)
tx = CTransaction()
- tx.nLockTime = 0xffffffff #this locktime is non-final
+ tx.nLockTime = 0xffffffff # this locktime is non-final
assert(out[18].n < len(out[18].tx.vout))
- tx.vin.append(CTxIn(COutPoint(out[18].tx.sha256, out[18].n))) # don't set nSequence
+ tx.vin.append(CTxIn(COutPoint(out[18].tx.sha256, out[18].n))) # don't set nSequence
tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
assert(tx.vin[0].nSequence < 0xffffffff)
tx.calc_sha256()
- b62 = update_block(62, [tx])
- yield rejected(RejectResult(16, b'bad-txns-nonfinal'))
-
+ b62 = self.update_block(62, [tx])
+ self.sync_blocks([b62], False, 16, b'bad-txns-nonfinal')
# Test a non-final coinbase is also rejected
#
# -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
# \-> b63 (-)
#
- tip(60)
- b63 = block(63)
+ self.log.info("Reject a block with a coinbase transaction with a nonfinal locktime")
+ self.move_tip(60)
+ b63 = self.next_block(63)
b63.vtx[0].nLockTime = 0xffffffff
b63.vtx[0].vin[0].nSequence = 0xDEADBEEF
b63.vtx[0].rehash()
- b63 = update_block(63, [])
- yield rejected(RejectResult(16, b'bad-txns-nonfinal'))
-
+ b63 = self.update_block(63, [])
+ self.sync_blocks([b63], False, 16, b'bad-txns-nonfinal')
# This checks that a block with a bloated VARINT between the block_header and the array of tx such that
# the block is > MAX_BLOCK_BASE_SIZE with the bloated varint, but <= MAX_BLOCK_BASE_SIZE without the bloated varint,
@@ -893,8 +814,9 @@ class FullBlockTest(ComparisonTestFramework):
# b64a is a bloated block (non-canonical varint)
# b64 is a good block (same as b64 but w/ canonical varint)
#
- tip(60)
- regular_block = block("64a", spend=out[18])
+ self.log.info("Accept a valid block even if a bloated version of the block has previously been sent")
+ self.move_tip(60)
+ regular_block = self.next_block("64a", spend=out[18])
# make it a "broken_block," with non-canonical serialization
b64a = CBrokenBlock(regular_block)
@@ -908,45 +830,51 @@ class FullBlockTest(ComparisonTestFramework):
script_output = CScript([b'\x00' * script_length])
tx.vout.append(CTxOut(0, script_output))
tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0)))
- b64a = update_block("64a", [tx])
+ b64a = self.update_block("64a", [tx])
assert_equal(len(b64a.serialize()), MAX_BLOCK_BASE_SIZE + 8)
- yield TestInstance([[self.tip, None]])
+ self.sync_blocks([b64a], False, 1, b'error parsing message')
- # comptool workaround: to make sure b64 is delivered, manually erase b64a from blockstore
- self.test.block_store.erase(b64a.sha256)
+ # bitcoind doesn't disconnect us for sending a bloated block, but if we subsequently
+ # resend the header message, it won't send us the getdata message again. Just
+ # disconnect and reconnect and then call sync_blocks.
+ # TODO: improve this test to be less dependent on P2P DOS behaviour.
+ node.disconnect_p2ps()
+ self.reconnect_p2p()
- tip(60)
+ self.move_tip(60)
b64 = CBlock(b64a)
b64.vtx = copy.deepcopy(b64a.vtx)
assert_equal(b64.hash, b64a.hash)
assert_equal(len(b64.serialize()), MAX_BLOCK_BASE_SIZE)
self.blocks[64] = b64
- update_block(64, [])
- yield accepted()
- save_spendable_output()
+ b64 = self.update_block(64, [])
+ self.sync_blocks([b64], True)
+ self.save_spendable_output()
# Spend an output created in the block itself
#
# -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
#
- tip(64)
- block(65)
- tx1 = create_and_sign_tx(out[19].tx, out[19].n, out[19].tx.vout[0].nValue)
- tx2 = create_and_sign_tx(tx1, 0, 0)
- update_block(65, [tx1, tx2])
- yield accepted()
- save_spendable_output()
+ self.log.info("Accept a block with a transaction spending an output created in the same block")
+ self.move_tip(64)
+ b65 = self.next_block(65)
+ tx1 = self.create_and_sign_transaction(out[19].tx, out[19].n, out[19].tx.vout[0].nValue)
+ tx2 = self.create_and_sign_transaction(tx1, 0, 0)
+ b65 = self.update_block(65, [tx1, tx2])
+ self.sync_blocks([b65], True)
+ self.save_spendable_output()
# Attempt to spend an output created later in the same block
#
# -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
# \-> b66 (20)
- tip(65)
- block(66)
- tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue)
- tx2 = create_and_sign_tx(tx1, 0, 1)
- update_block(66, [tx2, tx1])
- yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
+ self.log.info("Reject a block with a transaction spending an output created later in the same block")
+ self.move_tip(65)
+ b66 = self.next_block(66)
+ tx1 = self.create_and_sign_transaction(out[20].tx, out[20].n, out[20].tx.vout[0].nValue)
+ tx2 = self.create_and_sign_transaction(tx1, 0, 1)
+ b66 = self.update_block(66, [tx2, tx1])
+ self.sync_blocks([b66], False, 16, b'bad-txns-inputs-missingorspent', reconnect=True)
# Attempt to double-spend a transaction created in a block
#
@@ -954,13 +882,14 @@ class FullBlockTest(ComparisonTestFramework):
# \-> b67 (20)
#
#
- tip(65)
- block(67)
- tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue)
- tx2 = create_and_sign_tx(tx1, 0, 1)
- tx3 = create_and_sign_tx(tx1, 0, 2)
- update_block(67, [tx1, tx2, tx3])
- yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
+ self.log.info("Reject a block with a transaction double spending a transaction creted in the same block")
+ self.move_tip(65)
+ b67 = self.next_block(67)
+ tx1 = self.create_and_sign_transaction(out[20].tx, out[20].n, out[20].tx.vout[0].nValue)
+ tx2 = self.create_and_sign_transaction(tx1, 0, 1)
+ tx3 = self.create_and_sign_transaction(tx1, 0, 2)
+ b67 = self.update_block(67, [tx1, tx2, tx3])
+ self.sync_blocks([b67], False, 16, b'bad-txns-inputs-missingorspent', reconnect=True)
# More tests of block subsidy
#
@@ -974,34 +903,36 @@ class FullBlockTest(ComparisonTestFramework):
# b69 - coinbase with extra 10 satoshis, and a tx that gives a 10 satoshi fee
# this succeeds
#
- tip(65)
- block(68, additional_coinbase_value=10)
- tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-9)
- update_block(68, [tx])
- yield rejected(RejectResult(16, b'bad-cb-amount'))
-
- tip(65)
- b69 = block(69, additional_coinbase_value=10)
- tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-10)
- update_block(69, [tx])
- yield accepted()
- save_spendable_output()
+ self.log.info("Reject a block trying to claim too much subsidy in the coinbase transaction")
+ self.move_tip(65)
+ b68 = self.next_block(68, additional_coinbase_value=10)
+ tx = self.create_and_sign_transaction(out[20].tx, out[20].n, out[20].tx.vout[0].nValue - 9)
+ b68 = self.update_block(68, [tx])
+ self.sync_blocks([b68], False, 16, b'bad-cb-amount', reconnect=True)
+
+ self.log.info("Accept a block claiming the correct subsidy in the coinbase transaction")
+ self.move_tip(65)
+ b69 = self.next_block(69, additional_coinbase_value=10)
+ tx = self.create_and_sign_transaction(out[20].tx, out[20].n, out[20].tx.vout[0].nValue - 10)
+ self.update_block(69, [tx])
+ self.sync_blocks([b69], True)
+ self.save_spendable_output()
# Test spending the outpoint of a non-existent transaction
#
# -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20)
# \-> b70 (21)
#
- tip(69)
- block(70, spend=out[21])
+ self.log.info("Reject a block containing a transaction spending from a non-existent input")
+ self.move_tip(69)
+ b70 = self.next_block(70, spend=out[21])
bogus_tx = CTransaction()
bogus_tx.sha256 = uint256_from_str(b"23c70ed7c0506e9178fc1a987f40a33946d4ad4c962b5ae3a52546da53af0c5c")
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(bogus_tx.sha256, 0), b"", 0xffffffff))
tx.vout.append(CTxOut(1, b""))
- update_block(70, [tx])
- yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
-
+ b70 = self.update_block(70, [tx])
+ self.sync_blocks([b70], False, 16, b'bad-txns-inputs-missingorspent', reconnect=True)
# Test accepting an invalid block which has the same hash as a valid one (via merkle tree tricks)
#
@@ -1009,13 +940,13 @@ class FullBlockTest(ComparisonTestFramework):
# \-> b71 (21)
#
# b72 is a good block.
- # b71 is a copy of 72, but re-adds one of its transactions. However, it has the same hash as b71.
- #
- tip(69)
- b72 = block(72)
- tx1 = create_and_sign_tx(out[21].tx, out[21].n, 2)
- tx2 = create_and_sign_tx(tx1, 0, 1)
- b72 = update_block(72, [tx1, tx2]) # now tip is 72
+ # b71 is a copy of 72, but re-adds one of its transactions. However, it has the same hash as b72.
+ self.log.info("Reject a block containing a duplicate transaction but with the same Merkle root (Merkle tree malleability")
+ self.move_tip(69)
+ b72 = self.next_block(72)
+ tx1 = self.create_and_sign_transaction(out[21].tx, out[21].n, 2)
+ tx2 = self.create_and_sign_transaction(tx1, 0, 1)
+ b72 = self.update_block(72, [tx1, tx2]) # now tip is 72
b71 = copy.deepcopy(b72)
b71.vtx.append(tx2) # add duplicate tx2
self.block_heights[b71.sha256] = self.block_heights[b69.sha256] + 1 # b71 builds off b69
@@ -1025,12 +956,12 @@ class FullBlockTest(ComparisonTestFramework):
assert_equal(len(b72.vtx), 3)
assert_equal(b72.sha256, b71.sha256)
- tip(71)
- yield rejected(RejectResult(16, b'bad-txns-duplicate'))
- tip(72)
- yield accepted()
- save_spendable_output()
+ self.move_tip(71)
+ self.sync_blocks([b71], False, 16, b'bad-txns-duplicate', reconnect=True)
+ self.move_tip(72)
+ self.sync_blocks([b72], True)
+ self.save_spendable_output()
# Test some invalid scripts and MAX_BLOCK_SIGOPS
#
@@ -1048,23 +979,23 @@ class FullBlockTest(ComparisonTestFramework):
# bytearray[20,000-20,003]: 521 (max_script_element_size+1, in little-endian format)
# bytearray[20,004-20,525]: unread data (script_element)
# bytearray[20,526] : OP_CHECKSIG (this puts us over the limit)
- #
- tip(72)
- b73 = block(73)
+ self.log.info("Reject a block containing too many sigops after a large script element")
+ self.move_tip(72)
+ b73 = self.next_block(73)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + 1
a = bytearray([OP_CHECKSIG] * size)
- a[MAX_BLOCK_SIGOPS - 1] = int("4e",16) # OP_PUSHDATA4
+ a[MAX_BLOCK_SIGOPS - 1] = int("4e", 16) # OP_PUSHDATA4
element_size = MAX_SCRIPT_ELEMENT_SIZE + 1
a[MAX_BLOCK_SIGOPS] = element_size % 256
- a[MAX_BLOCK_SIGOPS+1] = element_size // 256
- a[MAX_BLOCK_SIGOPS+2] = 0
- a[MAX_BLOCK_SIGOPS+3] = 0
+ a[MAX_BLOCK_SIGOPS + 1] = element_size // 256
+ a[MAX_BLOCK_SIGOPS + 2] = 0
+ a[MAX_BLOCK_SIGOPS + 3] = 0
- tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a))
- b73 = update_block(73, [tx])
- assert_equal(get_legacy_sigopcount_block(b73), MAX_BLOCK_SIGOPS+1)
- yield rejected(RejectResult(16, b'bad-blk-sigops'))
+ tx = self.create_and_sign_transaction(out[22].tx, 0, 1, CScript(a))
+ b73 = self.update_block(73, [tx])
+ assert_equal(get_legacy_sigopcount_block(b73), MAX_BLOCK_SIGOPS + 1)
+ self.sync_blocks([b73], False, 16, b'bad-blk-sigops', reconnect=True)
# b74/75 - if we push an invalid script element, all prevous sigops are counted,
# but sigops after the element are not counted.
@@ -1076,45 +1007,44 @@ class FullBlockTest(ComparisonTestFramework):
#
# b74 fails because we put MAX_BLOCK_SIGOPS+1 before the element
# b75 succeeds because we put MAX_BLOCK_SIGOPS before the element
- #
- #
- tip(72)
- b74 = block(74)
- size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561
+ self.log.info("Check sigops are counted correctly after an invalid script element")
+ self.move_tip(72)
+ b74 = self.next_block(74)
+ size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561
a = bytearray([OP_CHECKSIG] * size)
a[MAX_BLOCK_SIGOPS] = 0x4e
- a[MAX_BLOCK_SIGOPS+1] = 0xfe
- a[MAX_BLOCK_SIGOPS+2] = 0xff
- a[MAX_BLOCK_SIGOPS+3] = 0xff
- a[MAX_BLOCK_SIGOPS+4] = 0xff
- tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a))
- b74 = update_block(74, [tx])
- yield rejected(RejectResult(16, b'bad-blk-sigops'))
-
- tip(72)
- b75 = block(75)
+ a[MAX_BLOCK_SIGOPS + 1] = 0xfe
+ a[MAX_BLOCK_SIGOPS + 2] = 0xff
+ a[MAX_BLOCK_SIGOPS + 3] = 0xff
+ a[MAX_BLOCK_SIGOPS + 4] = 0xff
+ tx = self.create_and_sign_transaction(out[22].tx, 0, 1, CScript(a))
+ b74 = self.update_block(74, [tx])
+ self.sync_blocks([b74], False, 16, b'bad-blk-sigops', reconnect=True)
+
+ self.move_tip(72)
+ b75 = self.next_block(75)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42
a = bytearray([OP_CHECKSIG] * size)
- a[MAX_BLOCK_SIGOPS-1] = 0x4e
+ a[MAX_BLOCK_SIGOPS - 1] = 0x4e
a[MAX_BLOCK_SIGOPS] = 0xff
- a[MAX_BLOCK_SIGOPS+1] = 0xff
- a[MAX_BLOCK_SIGOPS+2] = 0xff
- a[MAX_BLOCK_SIGOPS+3] = 0xff
- tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a))
- b75 = update_block(75, [tx])
- yield accepted()
- save_spendable_output()
+ a[MAX_BLOCK_SIGOPS + 1] = 0xff
+ a[MAX_BLOCK_SIGOPS + 2] = 0xff
+ a[MAX_BLOCK_SIGOPS + 3] = 0xff
+ tx = self.create_and_sign_transaction(out[22].tx, 0, 1, CScript(a))
+ b75 = self.update_block(75, [tx])
+ self.sync_blocks([b75], True)
+ self.save_spendable_output()
# Check that if we push an element filled with CHECKSIGs, they are not counted
- tip(75)
- b76 = block(76)
+ self.move_tip(75)
+ b76 = self.next_block(76)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5
a = bytearray([OP_CHECKSIG] * size)
- a[MAX_BLOCK_SIGOPS-1] = 0x4e # PUSHDATA4, but leave the following bytes as just checksigs
- tx = create_and_sign_tx(out[23].tx, 0, 1, CScript(a))
- b76 = update_block(76, [tx])
- yield accepted()
- save_spendable_output()
+ a[MAX_BLOCK_SIGOPS - 1] = 0x4e # PUSHDATA4, but leave the following bytes as just checksigs
+ tx = self.create_and_sign_transaction(out[23].tx, 0, 1, CScript(a))
+ b76 = self.update_block(76, [tx])
+ self.sync_blocks([b76], True)
+ self.save_spendable_output()
# Test transaction resurrection
#
@@ -1133,39 +1063,39 @@ class FullBlockTest(ComparisonTestFramework):
# To get around this issue, we construct transactions which are not signed and which
# spend to OP_TRUE. If the standard-ness rules change, this test would need to be
# updated. (Perhaps to spend to a P2SH OP_TRUE script)
- #
- tip(76)
- block(77)
- tx77 = create_and_sign_tx(out[24].tx, out[24].n, 10*COIN)
- update_block(77, [tx77])
- yield accepted()
- save_spendable_output()
-
- block(78)
- tx78 = create_tx(tx77, 0, 9*COIN)
- update_block(78, [tx78])
- yield accepted()
-
- block(79)
- tx79 = create_tx(tx78, 0, 8*COIN)
- update_block(79, [tx79])
- yield accepted()
+ self.log.info("Test transaction resurrection during a re-org")
+ self.move_tip(76)
+ b77 = self.next_block(77)
+ tx77 = self.create_and_sign_transaction(out[24].tx, out[24].n, 10 * COIN)
+ b77 = self.update_block(77, [tx77])
+ self.sync_blocks([b77], True)
+ self.save_spendable_output()
+
+ b78 = self.next_block(78)
+ tx78 = self.create_tx(tx77, 0, 9 * COIN)
+ b78 = self.update_block(78, [tx78])
+ self.sync_blocks([b78], True)
+
+ b79 = self.next_block(79)
+ tx79 = self.create_tx(tx78, 0, 8 * COIN)
+ b79 = self.update_block(79, [tx79])
+ self.sync_blocks([b79], True)
# mempool should be empty
assert_equal(len(self.nodes[0].getrawmempool()), 0)
- tip(77)
- block(80, spend=out[25])
- yield rejected()
- save_spendable_output()
+ self.move_tip(77)
+ b80 = self.next_block(80, spend=out[25])
+ self.sync_blocks([b80], False, request_block=False)
+ self.save_spendable_output()
- block(81, spend=out[26])
- yield rejected() # other chain is same length
- save_spendable_output()
+ b81 = self.next_block(81, spend=out[26])
+ self.sync_blocks([b81], False, request_block=False) # other chain is same length
+ self.save_spendable_output()
- block(82, spend=out[27])
- yield accepted() # now this chain is longer, triggers re-org
- save_spendable_output()
+ b82 = self.next_block(82, spend=out[27])
+ self.sync_blocks([b82], True) # now this chain is longer, triggers re-org
+ self.save_spendable_output()
# now check that tx78 and tx79 have been put back into the peer's mempool
mempool = self.nodes[0].getrawmempool()
@@ -1173,33 +1103,32 @@ class FullBlockTest(ComparisonTestFramework):
assert(tx78.hash in mempool)
assert(tx79.hash in mempool)
-
# Test invalid opcodes in dead execution paths.
#
# -> b81 (26) -> b82 (27) -> b83 (28)
#
- block(83)
+ self.log.info("Accept a block with invalid opcodes in dead execution paths")
+ b83 = self.next_block(83)
op_codes = [OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF]
script = CScript(op_codes)
- tx1 = create_and_sign_tx(out[28].tx, out[28].n, out[28].tx.vout[0].nValue, script)
+ tx1 = self.create_and_sign_transaction(out[28].tx, out[28].n, out[28].tx.vout[0].nValue, script)
- tx2 = create_and_sign_tx(tx1, 0, 0, CScript([OP_TRUE]))
+ tx2 = self.create_and_sign_transaction(tx1, 0, 0, CScript([OP_TRUE]))
tx2.vin[0].scriptSig = CScript([OP_FALSE])
tx2.rehash()
- update_block(83, [tx1, tx2])
- yield accepted()
- save_spendable_output()
-
+ b83 = self.update_block(83, [tx1, tx2])
+ self.sync_blocks([b83], True)
+ self.save_spendable_output()
# Reorg on/off blocks that have OP_RETURN in them (and try to spend them)
#
# -> b81 (26) -> b82 (27) -> b83 (28) -> b84 (29) -> b87 (30) -> b88 (31)
# \-> b85 (29) -> b86 (30) \-> b89a (32)
#
- #
- block(84)
- tx1 = create_tx(out[29].tx, out[29].n, 0, CScript([OP_RETURN]))
+ self.log.info("Test re-orging blocks with OP_RETURN in them")
+ b84 = self.next_block(84)
+ tx1 = self.create_tx(out[29].tx, out[29].n, 0, CScript([OP_RETURN]))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
@@ -1207,87 +1136,192 @@ class FullBlockTest(ComparisonTestFramework):
tx1.calc_sha256()
self.sign_tx(tx1, out[29].tx, out[29].n)
tx1.rehash()
- tx2 = create_tx(tx1, 1, 0, CScript([OP_RETURN]))
+ tx2 = self.create_tx(tx1, 1, 0, CScript([OP_RETURN]))
tx2.vout.append(CTxOut(0, CScript([OP_RETURN])))
- tx3 = create_tx(tx1, 2, 0, CScript([OP_RETURN]))
+ tx3 = self.create_tx(tx1, 2, 0, CScript([OP_RETURN]))
tx3.vout.append(CTxOut(0, CScript([OP_TRUE])))
- tx4 = create_tx(tx1, 3, 0, CScript([OP_TRUE]))
+ tx4 = self.create_tx(tx1, 3, 0, CScript([OP_TRUE]))
tx4.vout.append(CTxOut(0, CScript([OP_RETURN])))
- tx5 = create_tx(tx1, 4, 0, CScript([OP_RETURN]))
+ tx5 = self.create_tx(tx1, 4, 0, CScript([OP_RETURN]))
- update_block(84, [tx1,tx2,tx3,tx4,tx5])
- yield accepted()
- save_spendable_output()
+ b84 = self.update_block(84, [tx1, tx2, tx3, tx4, tx5])
+ self.sync_blocks([b84], True)
+ self.save_spendable_output()
- tip(83)
- block(85, spend=out[29])
- yield rejected()
+ self.move_tip(83)
+ b85 = self.next_block(85, spend=out[29])
+ self.sync_blocks([b85], False) # other chain is same length
- block(86, spend=out[30])
- yield accepted()
+ b86 = self.next_block(86, spend=out[30])
+ self.sync_blocks([b86], True)
- tip(84)
- block(87, spend=out[30])
- yield rejected()
- save_spendable_output()
+ self.move_tip(84)
+ b87 = self.next_block(87, spend=out[30])
+ self.sync_blocks([b87], False) # other chain is same length
+ self.save_spendable_output()
- block(88, spend=out[31])
- yield accepted()
- save_spendable_output()
+ b88 = self.next_block(88, spend=out[31])
+ self.sync_blocks([b88], True)
+ self.save_spendable_output()
# trying to spend the OP_RETURN output is rejected
- block("89a", spend=out[32])
- tx = create_tx(tx1, 0, 0, CScript([OP_TRUE]))
- update_block("89a", [tx])
- yield rejected()
-
-
- # Test re-org of a week's worth of blocks (1088 blocks)
- # This test takes a minute or two and can be accomplished in memory
- #
- if self.options.runbarelyexpensive:
- tip(88)
- LARGE_REORG_SIZE = 1088
- test1 = TestInstance(sync_every_block=False)
- spend=out[32]
- for i in range(89, LARGE_REORG_SIZE + 89):
- b = block(i, spend)
- tx = CTransaction()
- script_length = MAX_BLOCK_BASE_SIZE - len(b.serialize()) - 69
- script_output = CScript([b'\x00' * script_length])
- tx.vout.append(CTxOut(0, script_output))
- tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0)))
- b = update_block(i, [tx])
- assert_equal(len(b.serialize()), MAX_BLOCK_BASE_SIZE)
- test1.blocks_and_transactions.append([self.tip, True])
- save_spendable_output()
- spend = get_spendable_output()
-
- yield test1
- chain1_tip = i
-
- # now create alt chain of same length
- tip(88)
- test2 = TestInstance(sync_every_block=False)
- for i in range(89, LARGE_REORG_SIZE + 89):
- block("alt"+str(i))
- test2.blocks_and_transactions.append([self.tip, False])
- yield test2
-
- # extend alt chain to trigger re-org
- block("alt" + str(chain1_tip + 1))
- yield accepted()
-
- # ... and re-org back to the first chain
- tip(chain1_tip)
- block(chain1_tip + 1)
- yield rejected()
- block(chain1_tip + 2)
- yield accepted()
-
- chain1_tip += 2
+ b89a = self.next_block("89a", spend=out[32])
+ tx = self.create_tx(tx1, 0, 0, CScript([OP_TRUE]))
+ b89a = self.update_block("89a", [tx])
+ self.sync_blocks([b89a], False, 16, b'bad-txns-inputs-missingorspent', reconnect=True)
+
+ self.log.info("Test a re-org of one week's worth of blocks (1088 blocks)")
+
+ self.move_tip(88)
+ LARGE_REORG_SIZE = 1088
+ blocks = []
+ spend = out[32]
+ for i in range(89, LARGE_REORG_SIZE + 89):
+ b = self.next_block(i, spend)
+ tx = CTransaction()
+ script_length = MAX_BLOCK_BASE_SIZE - len(b.serialize()) - 69
+ script_output = CScript([b'\x00' * script_length])
+ tx.vout.append(CTxOut(0, script_output))
+ tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0)))
+ b = self.update_block(i, [tx])
+ assert_equal(len(b.serialize()), MAX_BLOCK_BASE_SIZE)
+ blocks.append(b)
+ self.save_spendable_output()
+ spend = self.get_spendable_output()
+
+ self.sync_blocks(blocks, True, timeout=180)
+ chain1_tip = i
+
+ # now create alt chain of same length
+ self.move_tip(88)
+ blocks2 = []
+ for i in range(89, LARGE_REORG_SIZE + 89):
+ blocks2.append(self.next_block("alt" + str(i)))
+ self.sync_blocks(blocks2, False, request_block=False)
+
+ # extend alt chain to trigger re-org
+ block = self.next_block("alt" + str(chain1_tip + 1))
+ self.sync_blocks([block], True, timeout=180)
+
+ # ... and re-org back to the first chain
+ self.move_tip(chain1_tip)
+ block = self.next_block(chain1_tip + 1)
+ self.sync_blocks([block], False, request_block=False)
+ block = self.next_block(chain1_tip + 2)
+ self.sync_blocks([block], True, timeout=180)
+
+ # Helper methods
+ ################
+
+ def add_transactions_to_block(self, block, tx_list):
+ [tx.rehash() for tx in tx_list]
+ block.vtx.extend(tx_list)
+
+ # this is a little handier to use than the version in blocktools.py
+ def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
+ return create_transaction(spend_tx, n, b"", value, script)
+
+ # sign a transaction, using the key we know about
+ # this signs input 0 in tx, which is assumed to be spending output n in spend_tx
+ def sign_tx(self, tx, spend_tx, n):
+ scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
+ if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend
+ tx.vin[0].scriptSig = CScript()
+ return
+ (sighash, err) = SignatureHash(spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL)
+ tx.vin[0].scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
+ def create_and_sign_transaction(self, spend_tx, n, value, script=CScript([OP_TRUE])):
+ tx = self.create_tx(spend_tx, n, value, script)
+ self.sign_tx(tx, spend_tx, n)
+ tx.rehash()
+ return tx
+ def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True):
+ if self.tip is None:
+ base_block_hash = self.genesis_hash
+ block_time = int(time.time()) + 1
+ else:
+ base_block_hash = self.tip.sha256
+ block_time = self.tip.nTime + 1
+ # First create the coinbase
+ height = self.block_heights[base_block_hash] + 1
+ coinbase = create_coinbase(height, self.coinbase_pubkey)
+ coinbase.vout[0].nValue += additional_coinbase_value
+ coinbase.rehash()
+ if spend is None:
+ block = create_block(base_block_hash, coinbase, block_time)
+ else:
+ coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
+ coinbase.rehash()
+ block = create_block(base_block_hash, coinbase, block_time)
+ tx = create_transaction(spend.tx, spend.n, b"", 1, script) # spend 1 satoshi
+ self.sign_tx(tx, spend.tx, spend.n)
+ self.add_transactions_to_block(block, [tx])
+ block.hashMerkleRoot = block.calc_merkle_root()
+ if solve:
+ block.solve()
+ self.tip = block
+ self.block_heights[block.sha256] = height
+ assert number not in self.blocks
+ self.blocks[number] = block
+ return block
+
+ # save the current tip so it can be spent by a later block
+ def save_spendable_output(self):
+ self.log.debug("saving spendable output %s" % self.tip.vtx[0])
+ self.spendable_outputs.append(self.tip)
+
+ # get an output that we previously marked as spendable
+ def get_spendable_output(self):
+ self.log.debug("getting spendable output %s" % self.spendable_outputs[0].vtx[0])
+ return PreviousSpendableOutput(self.spendable_outputs.pop(0).vtx[0], 0)
+
+ # move the tip back to a previous block
+ def move_tip(self, number):
+ self.tip = self.blocks[number]
+
+ # adds transactions to the block and updates state
+ def update_block(self, block_number, new_transactions):
+ block = self.blocks[block_number]
+ self.add_transactions_to_block(block, new_transactions)
+ old_sha256 = block.sha256
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.solve()
+ # Update the internal state just like in next_block
+ self.tip = block
+ if block.sha256 != old_sha256:
+ self.block_heights[block.sha256] = self.block_heights[old_sha256]
+ del self.block_heights[old_sha256]
+ self.blocks[block_number] = block
+ return block
+
+ def reconnect_p2p(self):
+ """Add a P2P connection to the node.
+
+ The node gets disconnected several times in this test. This helper
+ method reconnects the p2p and restarts the network thread."""
+
+ network_thread_join()
+ self.nodes[0].disconnect_p2ps()
+ self.nodes[0].add_p2p_connection(P2PDataStore())
+ network_thread_start()
+ # We need to wait for the initial getheaders from the peer before we
+ # start populating our blockstore. If we don't, then we may run ahead
+ # to the next subtest before we receive the getheaders. We'd then send
+ # an INV for the next block and receive two getheaders - one for the
+ # IBD and one for the INV. We'd respond to both and could get
+ # unexpectedly disconnected if the DoS score for that error is 50.
+ self.nodes[0].p2p.wait_for_getheaders(timeout=5)
+
+ def sync_blocks(self, blocks, success=True, reject_code=None, reject_reason=None, request_block=True, reconnect=False, timeout=60):
+ """Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.
+
+ Call with success = False if the tip shouldn't advance to the most recent block."""
+ self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_code=reject_code, reject_reason=reject_reason, request_block=request_block, timeout=timeout)
+
+ if reconnect:
+ self.reconnect_p2p()
if __name__ == '__main__':
FullBlockTest().main()
diff --git a/test/functional/feature_blocksdir.py b/test/functional/feature_blocksdir.py
new file mode 100755
index 0000000000..56f91651a8
--- /dev/null
+++ b/test/functional/feature_blocksdir.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the blocksdir option.
+"""
+
+import os
+import shutil
+
+from test_framework.test_framework import BitcoinTestFramework, initialize_datadir
+
+
+class BlocksdirTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+
+ def run_test(self):
+ self.stop_node(0)
+ shutil.rmtree(self.nodes[0].datadir)
+ initialize_datadir(self.options.tmpdir, 0)
+ self.log.info("Starting with non exiting blocksdir ...")
+ blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir')
+ self.nodes[0].assert_start_raises_init_error(["-blocksdir=" + blocksdir_path], 'Error: Specified blocks directory "{}" does not exist.'.format(blocksdir_path))
+ os.mkdir(blocksdir_path)
+ self.log.info("Starting with exiting blocksdir ...")
+ self.start_node(0, ["-blocksdir=" + blocksdir_path])
+ self.log.info("mining blocks..")
+ self.nodes[0].generate(10)
+ assert os.path.isfile(os.path.join(blocksdir_path, "regtest", "blocks", "blk00000.dat"))
+ assert os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest", "blocks", "index"))
+
+
+if __name__ == '__main__':
+ BlocksdirTest().main()
diff --git a/test/functional/feature_cltv.py b/test/functional/feature_cltv.py
index f62ae31654..e9a8945e76 100755
--- a/test/functional/feature_cltv.py
+++ b/test/functional/feature_cltv.py
@@ -41,7 +41,7 @@ def cltv_validate(node, tx, height):
tx.nLockTime = height
# Need to re-sign, since nSequence and nLockTime changed
- signed_result = node.signrawtransaction(ToHex(tx))
+ signed_result = node.signrawtransactionwithwallet(ToHex(tx))
new_tx = CTransaction()
new_tx.deserialize(BytesIO(hex_str_to_bytes(signed_result['hex'])))
@@ -54,7 +54,7 @@ def create_transaction(node, coinbase, to_address, amount):
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
- signresult = node.signrawtransaction(rawtx)
+ signresult = node.signrawtransactionwithwallet(rawtx)
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(signresult['hex'])))
return tx
diff --git a/test/functional/feature_config_args.py b/test/functional/feature_config_args.py
index 61abba8082..e9924451d1 100755
--- a/test/functional/feature_config_args.py
+++ b/test/functional/feature_config_args.py
@@ -7,7 +7,7 @@
import os
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import get_datadir_path
+
class ConfArgsTest(BitcoinTestFramework):
def set_test_params(self):
@@ -19,31 +19,36 @@ class ConfArgsTest(BitcoinTestFramework):
# Remove the -datadir argument so it doesn't override the config file
self.nodes[0].args = [arg for arg in self.nodes[0].args if not arg.startswith("-datadir")]
- default_data_dir = get_datadir_path(self.options.tmpdir, 0)
+ default_data_dir = self.nodes[0].datadir
new_data_dir = os.path.join(default_data_dir, 'newdatadir')
new_data_dir_2 = os.path.join(default_data_dir, 'newdatadir2')
# Check that using -datadir argument on non-existent directory fails
self.nodes[0].datadir = new_data_dir
- self.assert_start_raises_init_error(0, ['-datadir='+new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.')
+ self.nodes[0].assert_start_raises_init_error(['-datadir=' + new_data_dir], 'Error: Specified data directory "' + new_data_dir + '" does not exist.')
# Check that using non-existent datadir in conf file fails
conf_file = os.path.join(default_data_dir, "bitcoin.conf")
- with open(conf_file, 'a', encoding='utf8') as f:
+
+ # datadir needs to be set before [regtest] section
+ conf_file_contents = open(conf_file, encoding='utf8').read()
+ with open(conf_file, 'w', encoding='utf8') as f:
f.write("datadir=" + new_data_dir + "\n")
- self.assert_start_raises_init_error(0, ['-conf='+conf_file], 'Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.')
+ f.write(conf_file_contents)
+
+ self.nodes[0].assert_start_raises_init_error(['-conf=' + conf_file], 'Error reading configuration file: specified data directory "' + new_data_dir + '" does not exist.')
# Create the directory and ensure the config file now works
os.mkdir(new_data_dir)
self.start_node(0, ['-conf='+conf_file, '-wallet=w1'])
self.stop_node(0)
- assert os.path.isfile(os.path.join(new_data_dir, 'regtest', 'wallets', 'w1'))
+ assert os.path.exists(os.path.join(new_data_dir, 'regtest', 'wallets', 'w1'))
# Ensure command line argument overrides datadir in conf
os.mkdir(new_data_dir_2)
self.nodes[0].datadir = new_data_dir_2
self.start_node(0, ['-datadir='+new_data_dir_2, '-conf='+conf_file, '-wallet=w2'])
- assert os.path.isfile(os.path.join(new_data_dir_2, 'regtest', 'wallets', 'w2'))
+ assert os.path.exists(os.path.join(new_data_dir_2, 'regtest', 'wallets', 'w2'))
if __name__ == '__main__':
ConfArgsTest().main()
diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py
index 82aa0ff891..37d60aad61 100755
--- a/test/functional/feature_csv_activation.py
+++ b/test/functional/feature_csv_activation.py
@@ -42,98 +42,131 @@ bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {re
bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
bip112tx_special - test negative argument to OP_CSV
"""
-
-from test_framework.test_framework import ComparisonTestFramework
-from test_framework.util import *
-from test_framework.mininode import ToHex, CTransaction, network_thread_start
-from test_framework.blocktools import create_coinbase, create_block
-from test_framework.comptool import TestInstance, TestManager
-from test_framework.script import *
+from decimal import Decimal
+from itertools import product
from io import BytesIO
import time
-base_relative_locktime = 10
-seq_disable_flag = 1<<31
-seq_random_high_bit = 1<<25
-seq_type_flag = 1<<22
-seq_random_low_bit = 1<<18
-
-# b31,b25,b22,b18 represent the 31st, 25th, 22nd and 18th bits respectively in the nSequence field
-# relative_locktimes[b31][b25][b22][b18] is a base_relative_locktime with the indicated bits set if their indices are 1
-relative_locktimes = []
-for b31 in range(2):
- b25times = []
- for b25 in range(2):
- b22times = []
- for b22 in range(2):
- b18times = []
- for b18 in range(2):
- rlt = base_relative_locktime
- if (b31):
- rlt = rlt | seq_disable_flag
- if (b25):
- rlt = rlt | seq_random_high_bit
- if (b22):
- rlt = rlt | seq_type_flag
- if (b18):
- rlt = rlt | seq_random_low_bit
- b18times.append(rlt)
- b22times.append(b18times)
- b25times.append(b22times)
- relative_locktimes.append(b25times)
-
-def all_rlt_txs(txarray):
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.messages import ToHex, CTransaction
+from test_framework.mininode import network_thread_start, P2PDataStore
+from test_framework.script import (
+ CScript,
+ OP_CHECKSEQUENCEVERIFY,
+ OP_DROP,
+)
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ get_bip9_status,
+ hex_str_to_bytes,
+)
+
+BASE_RELATIVE_LOCKTIME = 10
+SEQ_DISABLE_FLAG = 1 << 31
+SEQ_RANDOM_HIGH_BIT = 1 << 25
+SEQ_TYPE_FLAG = 1 << 22
+SEQ_RANDOM_LOW_BIT = 1 << 18
+
+def relative_locktime(sdf, srhb, stf, srlb):
+ """Returns a locktime with certain bits set."""
+
+ locktime = BASE_RELATIVE_LOCKTIME
+ if sdf:
+ locktime |= SEQ_DISABLE_FLAG
+ if srhb:
+ locktime |= SEQ_RANDOM_HIGH_BIT
+ if stf:
+ locktime |= SEQ_TYPE_FLAG
+ if srlb:
+ locktime |= SEQ_RANDOM_LOW_BIT
+ return locktime
+
+def all_rlt_txs(txs):
+ return [tx['tx'] for tx in txs]
+
+def create_transaction(node, txid, to_address, amount):
+ inputs = [{"txid": txid, "vout": 0}]
+ outputs = {to_address: amount}
+ rawtx = node.createrawtransaction(inputs, outputs)
+ tx = CTransaction()
+ f = BytesIO(hex_str_to_bytes(rawtx))
+ tx.deserialize(f)
+ return tx
+
+def sign_transaction(node, unsignedtx):
+ rawtx = ToHex(unsignedtx)
+ signresult = node.signrawtransactionwithwallet(rawtx)
+ tx = CTransaction()
+ f = BytesIO(hex_str_to_bytes(signresult['hex']))
+ tx.deserialize(f)
+ return tx
+
+def create_bip112special(node, input, txversion, address):
+ tx = create_transaction(node, input, address, Decimal("49.98"))
+ tx.nVersion = txversion
+ signtx = sign_transaction(node, tx)
+ signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ return signtx
+
+def send_generic_input_tx(node, coinbases, address):
+ amount = Decimal("49.99")
+ return node.sendrawtransaction(ToHex(sign_transaction(node, create_transaction(node, node.getblock(coinbases.pop())['tx'][0], address, amount))))
+
+def create_bip68txs(node, bip68inputs, txversion, address, locktime_delta=0):
+ """Returns a list of bip68 transactions with different bits set."""
txs = []
- for b31 in range(2):
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- txs.append(txarray[b31][b25][b22][b18])
+ assert(len(bip68inputs) >= 16)
+ for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)):
+ locktime = relative_locktime(sdf, srhb, stf, srlb)
+ tx = create_transaction(node, bip68inputs[i], address, Decimal("49.98"))
+ tx.nVersion = txversion
+ tx.vin[0].nSequence = locktime + locktime_delta
+ tx = sign_transaction(node, tx)
+ tx.rehash()
+ txs.append({'tx': tx, 'sdf': sdf, 'stf': stf})
+
return txs
-class BIP68_112_113Test(ComparisonTestFramework):
+def create_bip112txs(node, bip112inputs, varyOP_CSV, txversion, address, locktime_delta=0):
+ """Returns a list of bip68 transactions with different bits set."""
+ txs = []
+ assert(len(bip112inputs) >= 16)
+ for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)):
+ locktime = relative_locktime(sdf, srhb, stf, srlb)
+ tx = create_transaction(node, bip112inputs[i], address, Decimal("49.98"))
+ if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed
+ tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME + locktime_delta
+ else: # vary nSequence instead, OP_CSV is fixed
+ tx.vin[0].nSequence = locktime + locktime_delta
+ tx.nVersion = txversion
+ signtx = sign_transaction(node, tx)
+ if (varyOP_CSV):
+ signtx.vin[0].scriptSig = CScript([locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ else:
+ signtx.vin[0].scriptSig = CScript([BASE_RELATIVE_LOCKTIME, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ tx.rehash()
+ txs.append({'tx': signtx, 'sdf': sdf, 'stf': stf})
+ return txs
+
+class BIP68_112_113Test(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.extra_args = [['-whitelist=127.0.0.1', '-blockversion=4', '-addresstype=legacy']]
- def run_test(self):
- test = TestManager(self, self.options.tmpdir)
- test.add_all_connections(self.nodes)
- network_thread_start()
- test.run()
-
- def send_generic_input_tx(self, node, coinbases):
- amount = Decimal("49.99")
- return node.sendrawtransaction(ToHex(self.sign_transaction(node, self.create_transaction(node, node.getblock(coinbases.pop())['tx'][0], self.nodeaddress, amount))))
-
- def create_transaction(self, node, txid, to_address, amount):
- inputs = [{ "txid" : txid, "vout" : 0}]
- outputs = { to_address : amount }
- rawtx = node.createrawtransaction(inputs, outputs)
- tx = CTransaction()
- f = BytesIO(hex_str_to_bytes(rawtx))
- tx.deserialize(f)
- return tx
-
- def sign_transaction(self, node, unsignedtx):
- rawtx = ToHex(unsignedtx)
- signresult = node.signrawtransaction(rawtx)
- tx = CTransaction()
- f = BytesIO(hex_str_to_bytes(signresult['hex']))
- tx.deserialize(f)
- return tx
-
- def generate_blocks(self, number, version, test_blocks = []):
+ def generate_blocks(self, number, version, test_blocks=None):
+ if test_blocks is None:
+ test_blocks = []
for i in range(number):
block = self.create_test_block([], version)
- test_blocks.append([block, True])
+ test_blocks.append(block)
self.last_block_time += 600
self.tip = block.sha256
self.tipheight += 1
return test_blocks
- def create_test_block(self, txs, version = 536870912):
+ def create_test_block(self, txs, version=536870912):
block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600)
block.nVersion = version
block.vtx.extend(txs)
@@ -142,184 +175,148 @@ class BIP68_112_113Test(ComparisonTestFramework):
block.solve()
return block
- def create_bip68txs(self, bip68inputs, txversion, locktime_delta = 0):
- txs = []
- assert(len(bip68inputs) >= 16)
- i = 0
- for b31 in range(2):
- b25txs = []
- for b25 in range(2):
- b22txs = []
- for b22 in range(2):
- b18txs = []
- for b18 in range(2):
- tx = self.create_transaction(self.nodes[0], bip68inputs[i], self.nodeaddress, Decimal("49.98"))
- i += 1
- tx.nVersion = txversion
- tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
- b18txs.append(self.sign_transaction(self.nodes[0], tx))
- b22txs.append(b18txs)
- b25txs.append(b22txs)
- txs.append(b25txs)
- return txs
-
- def create_bip112special(self, input, txversion):
- tx = self.create_transaction(self.nodes[0], input, self.nodeaddress, Decimal("49.98"))
- tx.nVersion = txversion
- signtx = self.sign_transaction(self.nodes[0], tx)
- signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
- return signtx
-
- def create_bip112txs(self, bip112inputs, varyOP_CSV, txversion, locktime_delta = 0):
- txs = []
- assert(len(bip112inputs) >= 16)
- i = 0
- for b31 in range(2):
- b25txs = []
- for b25 in range(2):
- b22txs = []
- for b22 in range(2):
- b18txs = []
- for b18 in range(2):
- tx = self.create_transaction(self.nodes[0], bip112inputs[i], self.nodeaddress, Decimal("49.98"))
- i += 1
- if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed
- tx.vin[0].nSequence = base_relative_locktime + locktime_delta
- else: # vary nSequence instead, OP_CSV is fixed
- tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
- tx.nVersion = txversion
- signtx = self.sign_transaction(self.nodes[0], tx)
- if (varyOP_CSV):
- signtx.vin[0].scriptSig = CScript([relative_locktimes[b31][b25][b22][b18], OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
- else:
- signtx.vin[0].scriptSig = CScript([base_relative_locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
- b18txs.append(signtx)
- b22txs.append(b18txs)
- b25txs.append(b22txs)
- txs.append(b25txs)
- return txs
-
- def get_tests(self):
- long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
- self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
- self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2*32 + 1) # 82 blocks generated for inputs
- self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
- self.tipheight = 82 # height of the next block to build
+ def sync_blocks(self, blocks, success=True, reject_code=None, reject_reason=None, request_block=True):
+ """Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.
+
+ Call with success = False if the tip shouldn't advance to the most recent block."""
+ self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_code=reject_code, reject_reason=reject_reason, request_block=request_block)
+
+ def run_test(self):
+ self.nodes[0].add_p2p_connection(P2PDataStore())
+ network_thread_start()
+ self.nodes[0].p2p.wait_for_verack()
+
+ self.log.info("Generate blocks in the past for coinbase outputs.")
+ long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
+ self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
+ self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2 * 32 + 1) # 82 blocks generated for inputs
+ self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
+ self.tipheight = 82 # height of the next block to build
self.last_block_time = long_past_time
- self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
+ self.tip = int(self.nodes[0].getbestblockhash(), 16)
self.nodeaddress = self.nodes[0].getnewaddress()
+ self.log.info("Test that the csv softfork is DEFINED")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
test_blocks = self.generate_blocks(61, 4)
- yield TestInstance(test_blocks, sync_every_block=False) # 1
- # Advanced from DEFINED to STARTED, height = 143
+ self.sync_blocks(test_blocks)
+
+ self.log.info("Advance from DEFINED to STARTED, height = 143")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
- # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0
- # using a variety of bits to simulate multiple parallel softforks
- test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready)
- test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
- test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
- test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
- yield TestInstance(test_blocks, sync_every_block=False) # 2
- # Failed to advance past STARTED, height = 287
+ self.log.info("Fail to achieve LOCKED_IN")
+ # 100 out of 144 signal bit 0. Use a variety of bits to simulate multiple parallel softforks
+
+ test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
+ self.sync_blocks(test_blocks)
+
+ self.log.info("Failed to advance past STARTED, height = 287")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
+ self.log.info("Generate blocks to achieve LOCK-IN")
# 108 out of 144 signal bit 0 to achieve lock-in
# using a variety of bits to simulate multiple parallel softforks
- test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready)
- test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
- test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
- test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
- yield TestInstance(test_blocks, sync_every_block=False) # 3
- # Advanced from STARTED to LOCKED_IN, height = 431
+ test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
+ self.sync_blocks(test_blocks)
+
+ self.log.info("Advanced from STARTED to LOCKED_IN, height = 431")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
- # 140 more version 4 blocks
+ # Generate 140 more version 4 blocks
test_blocks = self.generate_blocks(140, 4)
- yield TestInstance(test_blocks, sync_every_block=False) # 4
+ self.sync_blocks(test_blocks)
- ### Inputs at height = 572
+ # Inputs at height = 572
+ #
# Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
# Note we reuse inputs for v1 and v2 txs so must test these separately
# 16 normal inputs
bip68inputs = []
for i in range(16):
- bip68inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ bip68inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
+
# 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
bip112basicinputs = []
for j in range(2):
inputs = []
for i in range(16):
- inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
bip112basicinputs.append(inputs)
+
# 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
bip112diverseinputs = []
for j in range(2):
inputs = []
for i in range(16):
- inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
bip112diverseinputs.append(inputs)
+
# 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
- bip112specialinput = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
+ bip112specialinput = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)
+
# 1 normal input
- bip113input = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
+ bip113input = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)
self.nodes[0].setmocktime(self.last_block_time + 600)
- inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
+ inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
self.nodes[0].setmocktime(0)
- self.tip = int("0x" + inputblockhash, 0)
+ self.tip = int(inputblockhash, 16)
self.tipheight += 1
self.last_block_time += 600
- assert_equal(len(self.nodes[0].getblock(inputblockhash,True)["tx"]), 82+1)
+ assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]), 82 + 1)
# 2 more version 4 blocks
test_blocks = self.generate_blocks(2, 4)
- yield TestInstance(test_blocks, sync_every_block=False) # 5
- # Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)
+ self.sync_blocks(test_blocks)
+
+ self.log.info("Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
# Test both version 1 and version 2 transactions for all tests
# BIP113 test transaction will be modified before each use to put in appropriate block time
- bip113tx_v1 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
+ bip113tx_v1 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
bip113tx_v1.nVersion = 1
- bip113tx_v2 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
+ bip113tx_v2 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
bip113tx_v2.nVersion = 2
# For BIP68 test all 16 relative sequence locktimes
- bip68txs_v1 = self.create_bip68txs(bip68inputs, 1)
- bip68txs_v2 = self.create_bip68txs(bip68inputs, 2)
+ bip68txs_v1 = create_bip68txs(self.nodes[0], bip68inputs, 1, self.nodeaddress)
+ bip68txs_v2 = create_bip68txs(self.nodes[0], bip68inputs, 2, self.nodeaddress)
# For BIP112 test:
# 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
- bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1)
- bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2)
+ bip112txs_vary_nSequence_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 1, self.nodeaddress)
+ bip112txs_vary_nSequence_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 2, self.nodeaddress)
# 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
- bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1)
- bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1)
+ bip112txs_vary_nSequence_9_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress, -1)
+ bip112txs_vary_nSequence_9_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress, -1)
# sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
- bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1)
- bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2)
+ bip112txs_vary_OP_CSV_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 1, self.nodeaddress)
+ bip112txs_vary_OP_CSV_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 2, self.nodeaddress)
# sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
- bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1)
- bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1)
+ bip112txs_vary_OP_CSV_9_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 1, self.nodeaddress, -1)
+ bip112txs_vary_OP_CSV_9_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 2, self.nodeaddress, -1)
# -1 OP_CSV OP_DROP input
- bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1)
- bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2)
+ bip112tx_special_v1 = create_bip112special(self.nodes[0], bip112specialinput, 1, self.nodeaddress)
+ bip112tx_special_v2 = create_bip112special(self.nodes[0], bip112specialinput, 2, self.nodeaddress)
+
+ self.log.info("TESTING")
+ self.log.info("Pre-Soft Fork Tests. All txs should pass.")
+ self.log.info("Test version 1 txs")
- ### TESTING ###
- ##################################
- ### Before Soft Forks Activate ###
- ##################################
- # All txs should pass
- ### Version 1 txs ###
success_txs = []
# add BIP113 tx and -1 CSV tx
- bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
- bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
success_txs.append(bip113signed1)
success_txs.append(bip112tx_special_v1)
# add BIP 68 txs
@@ -330,14 +327,15 @@ class BIP68_112_113Test(ComparisonTestFramework):
# try BIP 112 with seq=9 txs
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 6
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
- ### Version 2 txs ###
+ self.log.info("Test version 2 txs")
+
success_txs = []
# add BIP113 tx and -1 CSV tx
- bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
- bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
success_txs.append(bip113signed2)
success_txs.append(bip112tx_special_v2)
# add BIP 68 txs
@@ -348,187 +346,149 @@ class BIP68_112_113Test(ComparisonTestFramework):
# try BIP 112 with seq=9 txs
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 7
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
-
# 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
test_blocks = self.generate_blocks(1, 4)
- yield TestInstance(test_blocks, sync_every_block=False) # 8
+ self.sync_blocks(test_blocks)
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')
+ self.log.info("Post-Soft Fork Tests.")
- #################################
- ### After Soft Forks Activate ###
- #################################
- ### BIP 113 ###
+ self.log.info("BIP 113 tests")
# BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
- bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
- bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
- bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
- bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
for bip113tx in [bip113signed1, bip113signed2]:
- yield TestInstance([[self.create_test_block([bip113tx]), False]]) # 9,10
+ self.sync_blocks([self.create_test_block([bip113tx])], success=False)
# BIP 113 tests should now pass if the locktime is < MTP
- bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
- bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
- bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
- bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
+ bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
+ bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
for bip113tx in [bip113signed1, bip113signed2]:
- yield TestInstance([[self.create_test_block([bip113tx]), True]]) # 11,12
+ self.sync_blocks([self.create_test_block([bip113tx])])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# Next block height = 580 after 4 blocks of random version
test_blocks = self.generate_blocks(4, 1234)
- yield TestInstance(test_blocks, sync_every_block=False) # 13
+ self.sync_blocks(test_blocks)
+
+ self.log.info("BIP 68 tests")
+ self.log.info("Test version 1 txs - all should still pass")
- ### BIP 68 ###
- ### Version 1 txs ###
- # All still pass
success_txs = []
success_txs.extend(all_rlt_txs(bip68txs_v1))
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 14
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
- ### Version 2 txs ###
- bip68success_txs = []
+ self.log.info("Test version 2 txs")
+
# All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- bip68success_txs.append(bip68txs_v2[1][b25][b22][b18])
- yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 15
+ bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']]
+ self.sync_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
# All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
- bip68timetxs = []
- for b25 in range(2):
- for b18 in range(2):
- bip68timetxs.append(bip68txs_v2[0][b25][1][b18])
+ bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']]
for tx in bip68timetxs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 16 - 19
- bip68heighttxs = []
- for b25 in range(2):
- for b18 in range(2):
- bip68heighttxs.append(bip68txs_v2[0][b25][0][b18])
+ self.sync_blocks([self.create_test_block([tx])], success=False)
+
+ bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']]
for tx in bip68heighttxs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 20 - 23
+ self.sync_blocks([self.create_test_block([tx])], success=False)
# Advance one block to 581
test_blocks = self.generate_blocks(1, 1234)
- yield TestInstance(test_blocks, sync_every_block=False) # 24
+ self.sync_blocks(test_blocks)
# Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
bip68success_txs.extend(bip68timetxs)
- yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 25
+ self.sync_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
for tx in bip68heighttxs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 26 - 29
+ self.sync_blocks([self.create_test_block([tx])], success=False)
# Advance one block to 582
test_blocks = self.generate_blocks(1, 1234)
- yield TestInstance(test_blocks, sync_every_block=False) # 30
+ self.sync_blocks(test_blocks)
# All BIP 68 txs should pass
bip68success_txs.extend(bip68heighttxs)
- yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 31
+ self.sync_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ self.log.info("BIP 112 tests")
+ self.log.info("Test version 1 txs")
- ### BIP 112 ###
- ### Version 1 txs ###
# -1 OP_CSV tx should fail
- yield TestInstance([[self.create_test_block([bip112tx_special_v1]), False]]) #32
+ self.sync_blocks([self.create_test_block([bip112tx_special_v1])], success=False)
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
- success_txs = []
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- success_txs.append(bip112txs_vary_OP_CSV_v1[1][b25][b22][b18])
- success_txs.append(bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18])
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 33
+
+ success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']]
+ success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']]
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
- fail_txs = []
- fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
- fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18])
- fail_txs.append(bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18])
-
+ fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1)
+ fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1)
+ fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
+ fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
for tx in fail_txs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 34 - 81
+ self.sync_blocks([self.create_test_block([tx])], success=False)
+
+ self.log.info("Test version 2 txs")
- ### Version 2 txs ###
# -1 OP_CSV tx should fail
- yield TestInstance([[self.create_test_block([bip112tx_special_v2]), False]]) #82
+ self.sync_blocks([self.create_test_block([bip112tx_special_v2])], success=False)
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
- success_txs = []
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV
- success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV_9
+ success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']]
+ success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']]
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 83
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
- ## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
- # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
- fail_txs = []
- fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) # 16/16 of vary_nSequence_9
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22][b18]) # 16/16 of vary_OP_CSV_9
+ # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
+ # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
+ fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2)
+ fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']]
for tx in fail_txs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 84 - 107
+ self.sync_blocks([self.create_test_block([tx])], success=False)
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
- fail_txs = []
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22][b18]) # 8/16 of vary_nSequence
+ fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']]
for tx in fail_txs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 108-115
+ self.sync_blocks([self.create_test_block([tx])], success=False)
# If sequencelock types mismatch, tx should fail
- fail_txs = []
- for b25 in range(2):
- for b18 in range(2):
- fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1][b18]) # 12/16 of vary_nSequence
- fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1][b18]) # 12/16 of vary_OP_CSV
+ fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']]
+ fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]
for tx in fail_txs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 116-123
+ self.sync_blocks([self.create_test_block([tx])], success=False)
# Remaining txs should pass, just test masking works properly
- success_txs = []
- for b25 in range(2):
- for b18 in range(2):
- success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0][b18]) # 16/16 of vary_nSequence
- success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0][b18]) # 16/16 of vary_OP_CSV
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 124
+ success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']]
+ success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']]
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# Additional test, of checking that comparison of two time types works properly
time_txs = []
- for b25 in range(2):
- for b18 in range(2):
- tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18]
- tx.vin[0].nSequence = base_relative_locktime | seq_type_flag
- signtx = self.sign_transaction(self.nodes[0], tx)
- time_txs.append(signtx)
- yield TestInstance([[self.create_test_block(time_txs), True]]) # 125
- self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ for tx in [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]:
+ tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG
+ signtx = sign_transaction(self.nodes[0], tx)
+ time_txs.append(signtx)
- ### Missing aspects of test
- ## Testing empty stack fails
+ self.sync_blocks([self.create_test_block(time_txs)])
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ # TODO: Test empty stack fails
if __name__ == '__main__':
BIP68_112_113Test().main()
diff --git a/test/functional/feature_dbcrash.py b/test/functional/feature_dbcrash.py
index 24b9765b4e..cef257cf9b 100755
--- a/test/functional/feature_dbcrash.py
+++ b/test/functional/feature_dbcrash.py
@@ -206,7 +206,7 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
tx.vout.append(CTxOut(output_amount, hex_str_to_bytes(utxo['scriptPubKey'])))
# Sign and send the transaction to get into the mempool
- tx_signed_hex = node.signrawtransaction(ToHex(tx))['hex']
+ tx_signed_hex = node.signrawtransactionwithwallet(ToHex(tx))['hex']
node.sendrawtransaction(tx_signed_hex)
num_transactions += 1
diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py
index 3414571678..02dcc3e55d 100755
--- a/test/functional/feature_dersig.py
+++ b/test/functional/feature_dersig.py
@@ -42,7 +42,7 @@ def create_transaction(node, coinbase, to_address, amount):
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
- signresult = node.signrawtransaction(rawtx)
+ signresult = node.signrawtransactionwithwallet(rawtx)
tx = CTransaction()
tx.deserialize(BytesIO(hex_str_to_bytes(signresult['hex'])))
return tx
diff --git a/test/functional/feature_fee_estimation.py b/test/functional/feature_fee_estimation.py
index e1263414bd..32a6bd5d59 100755
--- a/test/functional/feature_fee_estimation.py
+++ b/test/functional/feature_fee_estimation.py
@@ -91,7 +91,7 @@ def split_inputs(from_node, txins, txouts, initial_split=False):
# If this is the initial split we actually need to sign the transaction
# Otherwise we just need to insert the proper ScriptSig
if (initial_split):
- completetx = from_node.signrawtransaction(ToHex(tx))["hex"]
+ completetx = from_node.signrawtransactionwithwallet(ToHex(tx))["hex"]
else:
tx.vin[0].scriptSig = SCRIPT_SIG[prevtxout["vout"]]
completetx = ToHex(tx)
@@ -99,7 +99,7 @@ def split_inputs(from_node, txins, txouts, initial_split=False):
txouts.append({"txid": txid, "vout": 0, "amount": half_change})
txouts.append({"txid": txid, "vout": 1, "amount": rem_change})
-def check_estimates(node, fees_seen, max_invalid):
+def check_estimates(node, fees_seen):
"""Call estimatesmartfee and verify that the estimates meet certain invariants."""
delta = 1.0e-6 # account for rounding error
@@ -133,12 +133,12 @@ class EstimateFeeTest(BitcoinTestFramework):
which we will use to generate our transactions.
"""
self.add_nodes(3, extra_args=[["-maxorphantx=1000", "-whitelist=127.0.0.1"],
- ["-blockmaxsize=17000", "-maxorphantx=1000"],
- ["-blockmaxsize=8000", "-maxorphantx=1000"]])
+ ["-blockmaxweight=68000", "-maxorphantx=1000"],
+ ["-blockmaxweight=32000", "-maxorphantx=1000"]])
# Use node0 to mine blocks for input splitting
# Node1 mines small blocks but that are bigger than the expected transaction rate.
- # NOTE: the CreateNewBlock code starts counting block size at 1,000 bytes,
- # (17k is room enough for 110 or so transactions)
+ # NOTE: the CreateNewBlock code starts counting block weight at 4,000 weight,
+ # (68k weight is room enough for 120 or so transactions)
# Node2 is a stingy miner, that
# produces too small blocks (room for only 55 or so transactions)
@@ -219,13 +219,13 @@ class EstimateFeeTest(BitcoinTestFramework):
self.log.info("Creating transactions and mining them with a block size that can't keep up")
# Create transactions and mine 10 small blocks with node 2, but create txs faster than we can mine
self.transact_and_mine(10, self.nodes[2])
- check_estimates(self.nodes[1], self.fees_per_kb, 14)
+ check_estimates(self.nodes[1], self.fees_per_kb)
self.log.info("Creating transactions and mining them at a block size that is just big enough")
# Generate transactions while mining 10 more blocks, this time with node1
# which mines blocks with capacity just above the rate that transactions are being created
self.transact_and_mine(10, self.nodes[1])
- check_estimates(self.nodes[1], self.fees_per_kb, 2)
+ check_estimates(self.nodes[1], self.fees_per_kb)
# Finish by mining a normal-sized block:
while len(self.nodes[1].getrawmempool()) > 0:
@@ -233,7 +233,7 @@ class EstimateFeeTest(BitcoinTestFramework):
sync_blocks(self.nodes[0:3], wait=.1)
self.log.info("Final estimates after emptying mempools")
- check_estimates(self.nodes[1], self.fees_per_kb, 2)
+ check_estimates(self.nodes[1], self.fees_per_kb)
if __name__ == '__main__':
EstimateFeeTest().main()
diff --git a/test/functional/feature_help.py b/test/functional/feature_help.py
new file mode 100755
index 0000000000..fd4a72f628
--- /dev/null
+++ b/test/functional/feature_help.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Verify that starting bitcoin with -h works as expected."""
+import subprocess
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+class HelpTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+
+ def setup_network(self):
+ self.add_nodes(self.num_nodes)
+ # Don't start the node
+
+ def run_test(self):
+ self.log.info("Start bitcoin with -h for help text")
+ self.nodes[0].start(extra_args=['-h'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
+ # Node should exit immediately and output help to stdout.
+ ret_code = self.nodes[0].process.wait(timeout=1)
+ assert_equal(ret_code, 0)
+ output = self.nodes[0].process.stdout.read()
+ assert b'Options' in output
+ self.log.info("Help text received: {} (...)".format(output[0:60]))
+ self.nodes[0].running = False
+
+ self.log.info("Start bitcoin with -version for version information")
+ self.nodes[0].start(extra_args=['-version'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
+ # Node should exit immediately and output version to stdout.
+ ret_code = self.nodes[0].process.wait(timeout=1)
+ assert_equal(ret_code, 0)
+ output = self.nodes[0].process.stdout.read()
+ assert b'version' in output
+ self.log.info("Version text received: {} (...)".format(output[0:60]))
+ # Clean up TestNode state
+ self.nodes[0].running = False
+ self.nodes[0].process = None
+ self.nodes[0].rpc_connected = False
+ self.nodes[0].rpc = None
+
+if __name__ == '__main__':
+ HelpTest().main()
diff --git a/test/functional/feature_logging.py b/test/functional/feature_logging.py
index da4e7b0398..166f8f8694 100755
--- a/test/functional/feature_logging.py
+++ b/test/functional/feature_logging.py
@@ -7,19 +7,25 @@
import os
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.test_node import ErrorMatch
+
class LoggingTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
+ def relative_log_path(self, name):
+ return os.path.join(self.nodes[0].datadir, "regtest", name)
+
def run_test(self):
# test default log file name
- assert os.path.isfile(os.path.join(self.nodes[0].datadir, "regtest", "debug.log"))
+ default_log_path = self.relative_log_path("debug.log")
+ assert os.path.isfile(default_log_path)
# test alternative log file name in datadir
self.restart_node(0, ["-debuglogfile=foo.log"])
- assert os.path.isfile(os.path.join(self.nodes[0].datadir, "regtest", "foo.log"))
+ assert os.path.isfile(self.relative_log_path("foo.log"))
# test alternative log file name outside datadir
tempname = os.path.join(self.options.tmpdir, "foo.log")
@@ -27,11 +33,11 @@ class LoggingTest(BitcoinTestFramework):
assert os.path.isfile(tempname)
# check that invalid log (relative) will cause error
- invdir = os.path.join(self.nodes[0].datadir, "regtest", "foo")
+ invdir = self.relative_log_path("foo")
invalidname = os.path.join("foo", "foo.log")
self.stop_node(0)
- self.assert_start_raises_init_error(0, ["-debuglogfile=%s" % (invalidname)],
- "Error: Could not open debug log file")
+ exp_stderr = "Error: Could not open debug log file \S+$"
+ self.nodes[0].assert_start_raises_init_error(["-debuglogfile=%s" % (invalidname)], exp_stderr, match=ErrorMatch.FULL_REGEX)
assert not os.path.isfile(os.path.join(invdir, "foo.log"))
# check that invalid log (relative) works after path exists
@@ -44,8 +50,7 @@ class LoggingTest(BitcoinTestFramework):
self.stop_node(0)
invdir = os.path.join(self.options.tmpdir, "foo")
invalidname = os.path.join(invdir, "foo.log")
- self.assert_start_raises_init_error(0, ["-debuglogfile=%s" % invalidname],
- "Error: Could not open debug log file")
+ self.nodes[0].assert_start_raises_init_error(["-debuglogfile=%s" % invalidname], exp_stderr, match=ErrorMatch.FULL_REGEX)
assert not os.path.isfile(os.path.join(invdir, "foo.log"))
# check that invalid log (absolute) works after path exists
@@ -54,6 +59,17 @@ class LoggingTest(BitcoinTestFramework):
self.start_node(0, ["-debuglogfile=%s" % (invalidname)])
assert os.path.isfile(os.path.join(invdir, "foo.log"))
+ # check that -nodebuglogfile disables logging
+ self.stop_node(0)
+ os.unlink(default_log_path)
+ assert not os.path.isfile(default_log_path)
+ self.start_node(0, ["-nodebuglogfile"])
+ assert not os.path.isfile(default_log_path)
+
+ # just sanity check no crash here
+ self.stop_node(0)
+ self.start_node(0, ["-debuglogfile=%s" % os.devnull])
+
if __name__ == '__main__':
LoggingTest().main()
diff --git a/test/functional/feature_maxuploadtarget.py b/test/functional/feature_maxuploadtarget.py
index 45336ee801..072ba6c7c7 100755
--- a/test/functional/feature_maxuploadtarget.py
+++ b/test/functional/feature_maxuploadtarget.py
@@ -6,7 +6,7 @@
* Verify that getdata requests for old blocks (>1week) are dropped
if uploadtarget has been reached.
-* Verify that getdata requests for recent blocks are respecteved even
+* Verify that getdata requests for recent blocks are respected even
if uploadtarget has been reached.
* Verify that the upload counters are reset after 24 hours.
"""
@@ -17,7 +17,7 @@ from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-class TestNode(P2PInterface):
+class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.block_receive_map = defaultdict(int)
@@ -30,11 +30,11 @@ class TestNode(P2PInterface):
self.block_receive_map[message.block.sha256] += 1
class MaxUploadTest(BitcoinTestFramework):
-
+
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
- self.extra_args = [["-maxuploadtarget=800", "-blockmaxsize=999000"]]
+ self.extra_args = [["-maxuploadtarget=800"]]
# Cache for utxos, as the listunspent may take a long time later in the test
self.utxo_cache = []
@@ -55,7 +55,7 @@ class MaxUploadTest(BitcoinTestFramework):
p2p_conns = []
for _ in range(3):
- p2p_conns.append(self.nodes[0].add_p2p_connection(TestNode()))
+ p2p_conns.append(self.nodes[0].add_p2p_connection(TestP2PConn()))
network_thread_start()
for p2pc in p2p_conns:
@@ -144,10 +144,10 @@ class MaxUploadTest(BitcoinTestFramework):
#stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1
self.log.info("Restarting nodes with -whitelist=127.0.0.1")
self.stop_node(0)
- self.start_node(0, ["-whitelist=127.0.0.1", "-maxuploadtarget=1", "-blockmaxsize=999000"])
+ self.start_node(0, ["-whitelist=127.0.0.1", "-maxuploadtarget=1"])
# Reconnect to self.nodes[0]
- self.nodes[0].add_p2p_connection(TestNode())
+ self.nodes[0].add_p2p_connection(TestP2PConn())
network_thread_start()
self.nodes[0].p2p.wait_for_verack()
diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py
index 980bef5fc8..8964c8d64b 100755
--- a/test/functional/feature_notifications.py
+++ b/test/functional/feature_notifications.py
@@ -37,7 +37,7 @@ class NotificationsTest(BitcoinTestFramework):
# file content should equal the generated blocks hashes
with open(self.block_filename, 'r') as f:
- assert_equal(sorted(blocks), sorted(f.read().splitlines()))
+ assert_equal(sorted(blocks), sorted(l.strip() for l in f.read().splitlines()))
self.log.info("test -walletnotify")
# wait at most 10 seconds for expected file size before reading the content
@@ -46,7 +46,7 @@ class NotificationsTest(BitcoinTestFramework):
# file content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
with open(self.tx_filename, 'r') as f:
- assert_equal(sorted(txids_rpc), sorted(f.read().splitlines()))
+ assert_equal(sorted(txids_rpc), sorted(l.strip() for l in f.read().splitlines()))
os.remove(self.tx_filename)
self.log.info("test -walletnotify after rescan")
@@ -59,7 +59,7 @@ class NotificationsTest(BitcoinTestFramework):
# file content should equal the generated transaction hashes
txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
with open(self.tx_filename, 'r') as f:
- assert_equal(sorted(txids_rpc), sorted(f.read().splitlines()))
+ assert_equal(sorted(txids_rpc), sorted(l.strip() for l in f.read().splitlines()))
# Mine another 41 up-version blocks. -alertnotify should trigger on the 51st.
self.log.info("test -alertnotify")
diff --git a/test/functional/feature_nulldummy.py b/test/functional/feature_nulldummy.py
index 740c498ce6..7db6a03b45 100755
--- a/test/functional/feature_nulldummy.py
+++ b/test/functional/feature_nulldummy.py
@@ -102,7 +102,7 @@ class NULLDUMMYTest(BitcoinTestFramework):
inputs = [{ "txid" : txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
- signresult = node.signrawtransaction(rawtx)
+ signresult = node.signrawtransactionwithwallet(rawtx)
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(signresult['hex']))
tx.deserialize(f)
diff --git a/test/functional/feature_proxy.py b/test/functional/feature_proxy.py
index 2eb1be47a5..60859de7a5 100755
--- a/test/functional/feature_proxy.py
+++ b/test/functional/feature_proxy.py
@@ -182,7 +182,7 @@ class ProxyTest(BitcoinTestFramework):
assert_equal(n1['onion']['proxy'], '%s:%i' % (self.conf2.addr))
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['reachable'], True)
-
+
n2 = networks_dict(self.nodes[2].getnetworkinfo())
for net in ['ipv4','ipv6','onion']:
assert_equal(n2[net]['proxy'], '%s:%i' % (self.conf2.addr))
diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py
index 49ad7f838c..3adde8dd73 100755
--- a/test/functional/feature_pruning.py
+++ b/test/functional/feature_pruning.py
@@ -11,7 +11,6 @@ This test takes 30 mins or more (up to 2 hours)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-import time
import os
MIN_BLOCKS_TO_KEEP = 288
@@ -23,7 +22,7 @@ TIMESTAMP_WINDOW = 2 * 60 * 60
def calc_usage(blockdir):
- return sum(os.path.getsize(blockdir+f) for f in os.listdir(blockdir) if os.path.isfile(blockdir+f)) / (1024. * 1024.)
+ return sum(os.path.getsize(blockdir+f) for f in os.listdir(blockdir) if os.path.isfile(os.path.join(blockdir, f))) / (1024. * 1024.)
class PruneTest(BitcoinTestFramework):
def set_test_params(self):
@@ -32,20 +31,20 @@ class PruneTest(BitcoinTestFramework):
# Create nodes 0 and 1 to mine.
# Create node 2 to test pruning.
- self.full_node_default_args = ["-maxreceivebuffer=20000","-blockmaxsize=999000", "-checkblocks=5", "-limitdescendantcount=100", "-limitdescendantsize=5000", "-limitancestorcount=100", "-limitancestorsize=5000" ]
+ self.full_node_default_args = ["-maxreceivebuffer=20000", "-checkblocks=5", "-limitdescendantcount=100", "-limitdescendantsize=5000", "-limitancestorcount=100", "-limitancestorsize=5000" ]
# Create nodes 3 and 4 to test manual pruning (they will be re-started with manual pruning later)
# Create nodes 5 to test wallet in prune mode, but do not connect
self.extra_args = [self.full_node_default_args,
self.full_node_default_args,
["-maxreceivebuffer=20000", "-prune=550"],
- ["-maxreceivebuffer=20000", "-blockmaxsize=999000"],
- ["-maxreceivebuffer=20000", "-blockmaxsize=999000"],
+ ["-maxreceivebuffer=20000"],
+ ["-maxreceivebuffer=20000"],
["-prune=550"]]
def setup_network(self):
self.setup_nodes()
- self.prunedir = self.options.tmpdir + "/node2/regtest/blocks/"
+ self.prunedir = os.path.join(self.nodes[2].datadir, 'regtest', 'blocks', '')
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
@@ -70,7 +69,7 @@ class PruneTest(BitcoinTestFramework):
sync_blocks(self.nodes[0:5])
def test_height_min(self):
- if not os.path.isfile(self.prunedir+"blk00000.dat"):
+ if not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")):
raise AssertionError("blk00000.dat is missing, pruning too early")
self.log.info("Success")
self.log.info("Though we're already using more than 550MiB, current usage: %d" % calc_usage(self.prunedir))
@@ -79,11 +78,8 @@ class PruneTest(BitcoinTestFramework):
for i in range(25):
mine_large_block(self.nodes[0], self.utxo_cache_0)
- waitstart = time.time()
- while os.path.isfile(self.prunedir+"blk00000.dat"):
- time.sleep(0.1)
- if time.time() - waitstart > 30:
- raise AssertionError("blk00000.dat not pruned when it should be")
+ # Wait for blk00000.dat to be pruned
+ wait_until(lambda: not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), timeout=30)
self.log.info("Success")
usage = calc_usage(self.prunedir)
@@ -128,7 +124,7 @@ class PruneTest(BitcoinTestFramework):
# Reboot node 1 to clear its mempool (hopefully make the invalidate faster)
# Lower the block max size so we don't keep mining all our big mempool transactions (from disconnected blocks)
self.stop_node(1)
- self.start_node(1, extra_args=["-maxreceivebuffer=20000","-blockmaxsize=5000", "-checkblocks=5", "-disablesafemode"])
+ self.start_node(1, extra_args=["-maxreceivebuffer=20000","-blockmaxweight=20000", "-checkblocks=5", "-disablesafemode"])
height = self.nodes[1].getblockcount()
self.log.info("Current block height: %d" % height)
@@ -151,7 +147,7 @@ class PruneTest(BitcoinTestFramework):
# Reboot node1 to clear those giant tx's from mempool
self.stop_node(1)
- self.start_node(1, extra_args=["-maxreceivebuffer=20000","-blockmaxsize=5000", "-checkblocks=5", "-disablesafemode"])
+ self.start_node(1, extra_args=["-maxreceivebuffer=20000","-blockmaxweight=20000", "-checkblocks=5", "-disablesafemode"])
self.log.info("Generating new longer chain of 300 more blocks")
self.nodes[1].generate(300)
@@ -190,8 +186,8 @@ class PruneTest(BitcoinTestFramework):
# Verify that we have enough history to reorg back to the fork point
# Although this is more than 288 blocks, because this chain was written more recently
- # and only its other 299 small and 220 large block are in the block files after it,
- # its expected to still be retained
+ # and only its other 299 small and 220 large blocks are in the block files after it,
+ # it is expected to still be retained
self.nodes[2].getblock(self.nodes[2].getblockhash(self.forkheight))
first_reorg_height = self.nodes[2].getblockcount()
@@ -218,11 +214,8 @@ class PruneTest(BitcoinTestFramework):
goalbestheight = first_reorg_height + 1
self.log.info("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload")
- waitstart = time.time()
- while self.nodes[2].getblockcount() < goalbestheight:
- time.sleep(0.1)
- if time.time() - waitstart > 900:
- raise AssertionError("Node 2 didn't reorg to proper height")
+ # Wait for Node 2 to reorg to proper height
+ wait_until(lambda: self.nodes[2].getblockcount() >= goalbestheight, timeout=900)
assert(self.nodes[2].getbestblockhash() == goalbesthash)
# Verify we can now have the data for a block previously pruned
assert(self.nodes[2].getblock(self.forkhash)["height"] == self.forkheight)
@@ -262,7 +255,7 @@ class PruneTest(BitcoinTestFramework):
assert_equal(ret, expected_ret)
def has_block(index):
- return os.path.isfile(self.options.tmpdir + "/node{}/regtest/blocks/blk{:05}.dat".format(node_number, index))
+ return os.path.isfile(os.path.join(self.nodes[node_number].datadir, "regtest", "blocks", "blk{:05}.dat".format(index)))
# should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000)
assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
diff --git a/test/functional/feature_rbf.py b/test/functional/feature_rbf.py
index 6b7ab0f43e..d6ab5ecc37 100755
--- a/test/functional/feature_rbf.py
+++ b/test/functional/feature_rbf.py
@@ -42,7 +42,7 @@ def make_utxo(node, amount, confirmed=True, scriptPubKey=CScript([1])):
tx2.vout = [CTxOut(amount, scriptPubKey)]
tx2.rehash()
- signed_tx = node.signrawtransaction(txToHex(tx2))
+ signed_tx = node.signrawtransactionwithwallet(txToHex(tx2))
txid = node.sendrawtransaction(signed_tx['hex'], True)
diff --git a/test/functional/feature_reindex.py b/test/functional/feature_reindex.py
index ac67e6e9ba..d1d3f1d7f1 100755
--- a/test/functional/feature_reindex.py
+++ b/test/functional/feature_reindex.py
@@ -10,8 +10,7 @@
"""
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal
-import time
+from test_framework.util import wait_until
class ReindexTest(BitcoinTestFramework):
@@ -25,9 +24,7 @@ class ReindexTest(BitcoinTestFramework):
self.stop_nodes()
extra_args = [["-reindex-chainstate" if justchainstate else "-reindex", "-checkblockindex=1"]]
self.start_nodes(extra_args)
- while self.nodes[0].getblockcount() < blockcount:
- time.sleep(0.1)
- assert_equal(self.nodes[0].getblockcount(), blockcount)
+ wait_until(lambda: self.nodes[0].getblockcount() == blockcount)
self.log.info("Success")
def run_test(self):
diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py
index 7db05077c9..e835b9d777 100755
--- a/test/functional/feature_segwit.py
+++ b/test/functional/feature_segwit.py
@@ -96,7 +96,7 @@ class SegWitTest(BitcoinTestFramework):
wit_ids = [] # wit_ids[NODE][VER] is an array of txids that spend to a witness version VER pkscript to an address for NODE via bare witness
for i in range(3):
newaddress = self.nodes[i].getnewaddress()
- self.pubkey.append(self.nodes[i].validateaddress(newaddress)["pubkey"])
+ self.pubkey.append(self.nodes[i].getaddressinfo(newaddress)["pubkey"])
multiscript = CScript([OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG])
p2sh_addr = self.nodes[i].addwitnessaddress(newaddress)
bip173_addr = self.nodes[i].addwitnessaddress(newaddress, False)
@@ -150,19 +150,11 @@ class SegWitTest(BitcoinTestFramework):
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][0], True) #block 426
self.skip_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][0], True) #block 427
- # TODO: An old node would see these txs without witnesses and be able to mine them
-
- self.log.info("Verify unsigned bare witness txs in versionbits-setting blocks are valid before the fork")
- self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V0][1], False) #block 428
- self.success_mine(self.nodes[2], wit_ids[NODE_2][WIT_V1][1], False) #block 429
-
self.log.info("Verify unsigned p2sh witness txs without a redeem script are invalid")
self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V0][1], False)
self.fail_accept(self.nodes[2], "mandatory-script-verify-flag", p2sh_ids[NODE_2][WIT_V1][1], False)
- self.log.info("Verify unsigned p2sh witness txs with a redeem script in versionbits-settings blocks are valid before the fork")
- self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V0][1], False, witness_script(False, self.pubkey[2])) #block 430
- self.success_mine(self.nodes[2], p2sh_ids[NODE_2][WIT_V1][1], False, witness_script(True, self.pubkey[2])) #block 431
+ self.nodes[2].generate(4) # blocks 428-431
self.log.info("Verify previous witness txs skipped for mining can now be mined")
assert_equal(len(self.nodes[2].getrawmempool()), 4)
@@ -221,7 +213,7 @@ class SegWitTest(BitcoinTestFramework):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(txid1, 16), 0), b''))
tx.vout.append(CTxOut(int(49.99*COIN), CScript([OP_TRUE])))
- tx2_hex = self.nodes[0].signrawtransaction(ToHex(tx))['hex']
+ tx2_hex = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))['hex']
txid2 = self.nodes[0].sendrawtransaction(tx2_hex)
tx = FromHex(CTransaction(), tx2_hex)
assert(not tx.wit.is_null())
@@ -274,8 +266,8 @@ class SegWitTest(BitcoinTestFramework):
uncompressed_spendable_address = ["mvozP4UwyGD2mGZU4D2eMvMLPB9WkMmMQu"]
self.nodes[0].importprivkey("cNC8eQ5dg3mFAVePDX4ddmPYpPbw41r9bm2jd1nLJT77e6RrzTRR")
compressed_spendable_address = ["mmWQubrDomqpgSYekvsU7HWEVjLFHAakLe"]
- assert ((self.nodes[0].validateaddress(uncompressed_spendable_address[0])['iscompressed'] == False))
- assert ((self.nodes[0].validateaddress(compressed_spendable_address[0])['iscompressed'] == True))
+ assert ((self.nodes[0].getaddressinfo(uncompressed_spendable_address[0])['iscompressed'] == False))
+ assert ((self.nodes[0].getaddressinfo(compressed_spendable_address[0])['iscompressed'] == True))
self.nodes[0].importpubkey(pubkeys[0])
compressed_solvable_address = [key_to_p2pkh(pubkeys[0])]
@@ -308,7 +300,7 @@ class SegWitTest(BitcoinTestFramework):
solvable_after_importaddress.append(CScript([OP_HASH160, hash160(script), OP_EQUAL]))
for i in compressed_spendable_address:
- v = self.nodes[0].validateaddress(i)
+ v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# bare and p2sh multisig with compressed keys should always be spendable
@@ -325,7 +317,7 @@ class SegWitTest(BitcoinTestFramework):
spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in uncompressed_spendable_address:
- v = self.nodes[0].validateaddress(i)
+ v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# bare and p2sh multisig with uncompressed keys should always be spendable
@@ -342,7 +334,7 @@ class SegWitTest(BitcoinTestFramework):
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in compressed_solvable_address:
- v = self.nodes[0].validateaddress(i)
+ v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
# Multisig without private is not seen after addmultisigaddress, but seen after importaddress
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
@@ -355,7 +347,7 @@ class SegWitTest(BitcoinTestFramework):
solvable_after_importaddress.extend([p2sh_p2pk, p2sh_p2pkh, p2wsh_p2pk, p2wsh_p2pkh, p2sh_p2wsh_p2pk, p2sh_p2wsh_p2pkh])
for i in uncompressed_solvable_address:
- v = self.nodes[0].validateaddress(i)
+ v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# Base uncompressed multisig without private is not seen after addmultisigaddress, but seen after importaddress
@@ -395,7 +387,7 @@ class SegWitTest(BitcoinTestFramework):
importlist = []
for i in compressed_spendable_address + uncompressed_spendable_address + compressed_solvable_address + uncompressed_solvable_address:
- v = self.nodes[0].validateaddress(i)
+ v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
bare = hex_str_to_bytes(v['hex'])
importlist.append(bytes_to_hex_str(bare))
@@ -473,7 +465,7 @@ class SegWitTest(BitcoinTestFramework):
premature_witaddress = []
for i in compressed_spendable_address:
- v = self.nodes[0].validateaddress(i)
+ v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# P2WSH and P2SH(P2WSH) multisig with compressed keys are spendable after addwitnessaddress
@@ -485,7 +477,7 @@ class SegWitTest(BitcoinTestFramework):
spendable_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in uncompressed_spendable_address + uncompressed_solvable_address:
- v = self.nodes[0].validateaddress(i)
+ v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
# P2WSH and P2SH(P2WSH) multisig with uncompressed keys are never seen
@@ -496,7 +488,7 @@ class SegWitTest(BitcoinTestFramework):
unseen_anytime.extend([p2wpkh, p2sh_p2wpkh])
for i in compressed_solvable_address:
- v = self.nodes[0].validateaddress(i)
+ v = self.nodes[0].getaddressinfo(i)
if (v['isscript']):
# P2WSH multisig without private key are seen after addwitnessaddress
[bare, p2sh, p2wsh, p2sh_p2wsh] = self.p2sh_address_to_script(v)
@@ -519,7 +511,7 @@ class SegWitTest(BitcoinTestFramework):
assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
# after importaddress it should pass addwitnessaddress
- v = self.nodes[0].validateaddress(compressed_solvable_address[1])
+ v = self.nodes[0].getaddressinfo(compressed_solvable_address[1])
self.nodes[0].importaddress(v['hex'],"",False,True)
for i in compressed_spendable_address + compressed_solvable_address + premature_witaddress:
witaddress = self.nodes[0].addwitnessaddress(i)
@@ -559,7 +551,7 @@ class SegWitTest(BitcoinTestFramework):
self.nodes[1].importaddress(scriptPubKey, "", False)
rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex']
- rawtxfund = self.nodes[1].signrawtransaction(rawtxfund)["hex"]
+ rawtxfund = self.nodes[1].signrawtransactionwithwallet(rawtxfund)["hex"]
txid = self.nodes[1].sendrawtransaction(rawtxfund)
assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
@@ -578,7 +570,7 @@ class SegWitTest(BitcoinTestFramework):
for i in script_list:
tx.vout.append(CTxOut(10000000, i))
tx.rehash()
- signresults = self.nodes[0].signrawtransaction(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
+ signresults = self.nodes[0].signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
txid = self.nodes[0].sendrawtransaction(signresults, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
@@ -630,7 +622,7 @@ class SegWitTest(BitcoinTestFramework):
tx.vin.append(CTxIn(COutPoint(int('0x'+i,0), j)))
tx.vout.append(CTxOut(0, CScript()))
tx.rehash()
- signresults = self.nodes[0].signrawtransaction(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
+ signresults = self.nodes[0].signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize_without_witness()))['hex']
self.nodes[0].sendrawtransaction(signresults, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
diff --git a/test/functional/feature_uacomment.py b/test/functional/feature_uacomment.py
index bc3791508a..80bd7ff29f 100755
--- a/test/functional/feature_uacomment.py
+++ b/test/functional/feature_uacomment.py
@@ -4,9 +4,13 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the -uacomment option."""
+import re
+
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.test_node import ErrorMatch
from test_framework.util import assert_equal
+
class UacommentTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
@@ -23,13 +27,14 @@ class UacommentTest(BitcoinTestFramework):
self.log.info("test -uacomment max length")
self.stop_node(0)
- expected = "exceeds maximum length (256). Reduce the number or size of uacomments."
- self.assert_start_raises_init_error(0, ["-uacomment=" + 'a' * 256], expected)
+ expected = "Error: Total length of network version string \([0-9]+\) exceeds maximum length \(256\). Reduce the number or size of uacomments."
+ self.nodes[0].assert_start_raises_init_error(["-uacomment=" + 'a' * 256], expected, match=ErrorMatch.FULL_REGEX)
self.log.info("test -uacomment unsafe characters")
for unsafe_char in ['/', ':', '(', ')']:
- expected = "User Agent comment (" + unsafe_char + ") contains unsafe characters"
- self.assert_start_raises_init_error(0, ["-uacomment=" + unsafe_char], expected)
+ expected = "Error: User Agent comment \(" + re.escape(unsafe_char) + "\) contains unsafe characters."
+ self.nodes[0].assert_start_raises_init_error(["-uacomment=" + unsafe_char], expected, match=ErrorMatch.FULL_REGEX)
+
if __name__ == '__main__':
UacommentTest().main()
diff --git a/test/functional/interface_bitcoin_cli.py b/test/functional/interface_bitcoin_cli.py
index d8c80ab34f..e29fdc84e7 100755
--- a/test/functional/interface_bitcoin_cli.py
+++ b/test/functional/interface_bitcoin_cli.py
@@ -29,11 +29,17 @@ class TestBitcoinCli(BitcoinTestFramework):
self.log.info("Test -stdinrpcpass option")
assert_equal(0, self.nodes[0].cli('-rpcuser=%s' % user, '-stdinrpcpass', input=password).getblockcount())
- assert_raises_process_error(1, "incorrect rpcuser or rpcpassword", self.nodes[0].cli('-rpcuser=%s' % user, '-stdinrpcpass', input="foo").echo)
+ assert_raises_process_error(1, "Incorrect rpcuser or rpcpassword", self.nodes[0].cli('-rpcuser=%s' % user, '-stdinrpcpass', input="foo").echo)
self.log.info("Test -stdin and -stdinrpcpass")
assert_equal(["foo", "bar"], self.nodes[0].cli('-rpcuser=%s' % user, '-stdin', '-stdinrpcpass', input=password + "\nfoo\nbar").echo())
- assert_raises_process_error(1, "incorrect rpcuser or rpcpassword", self.nodes[0].cli('-rpcuser=%s' % user, '-stdin', '-stdinrpcpass', input="foo").echo)
+ assert_raises_process_error(1, "Incorrect rpcuser or rpcpassword", self.nodes[0].cli('-rpcuser=%s' % user, '-stdin', '-stdinrpcpass', input="foo").echo)
+
+ self.log.info("Test connecting to a non-existing server")
+ assert_raises_process_error(1, "Could not connect to the server", self.nodes[0].cli('-rpcport=1').echo)
+
+ self.log.info("Test connecting with non-existing RPC cookie file")
+ assert_raises_process_error(1, "Could not locate RPC credentials", self.nodes[0].cli('-rpccookiefile=does-not-exist', '-rpcpassword=').echo)
self.log.info("Make sure that -getinfo with arguments fails")
assert_raises_process_error(1, "-getinfo takes no arguments", self.nodes[0].cli('-getinfo').help)
diff --git a/test/functional/interface_rest.py b/test/functional/interface_rest.py
index 9006e27cbe..a48939d2e0 100755
--- a/test/functional/interface_rest.py
+++ b/test/functional/interface_rest.py
@@ -4,323 +4,297 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the REST API."""
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import *
-from struct import *
+import binascii
+from decimal import Decimal
+from enum import Enum
from io import BytesIO
-from codecs import encode
+import json
+from struct import pack, unpack
import http.client
import urllib.parse
-def deser_uint256(f):
- r = 0
- for i in range(8):
- t = unpack(b"<I", f.read(4))[0]
- r += t << (i * 32)
- return r
-
-#allows simple http get calls
-def http_get_call(host, port, path, response_object = 0):
- conn = http.client.HTTPConnection(host, port)
- conn.request('GET', path)
-
- if response_object:
- return conn.getresponse()
-
- return conn.getresponse().read().decode('utf-8')
-
-#allows simple http post calls with a request body
-def http_post_call(host, port, path, requestdata = '', response_object = 0):
- conn = http.client.HTTPConnection(host, port)
- conn.request('POST', path, requestdata)
-
- if response_object:
- return conn.getresponse()
-
- return conn.getresponse().read()
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ assert_greater_than,
+ assert_greater_than_or_equal,
+ hex_str_to_bytes,
+)
+
+class ReqType(Enum):
+ JSON = 1
+ BIN = 2
+ HEX = 3
+
+class RetType(Enum):
+ OBJ = 1
+ BYTES = 2
+ JSON = 3
+
+def filter_output_indices_by_value(vouts, value):
+ for vout in vouts:
+ if vout['value'] == value:
+ yield vout['n']
class RESTTest (BitcoinTestFramework):
- FORMAT_SEPARATOR = "."
-
def set_test_params(self):
self.setup_clean_chain = True
- self.num_nodes = 3
-
- def setup_network(self, split=False):
- super().setup_network()
- connect_nodes_bi(self.nodes, 0, 2)
+ self.num_nodes = 2
+ self.extra_args = [["-rest"], []]
+
+ def test_rest_request(self, uri, http_method='GET', req_type=ReqType.JSON, body='', status=200, ret_type=RetType.JSON):
+ rest_uri = '/rest' + uri
+ if req_type == ReqType.JSON:
+ rest_uri += '.json'
+ elif req_type == ReqType.BIN:
+ rest_uri += '.bin'
+ elif req_type == ReqType.HEX:
+ rest_uri += '.hex'
+
+ conn = http.client.HTTPConnection(self.url.hostname, self.url.port)
+ self.log.debug('%s %s %s', http_method, rest_uri, body)
+ if http_method == 'GET':
+ conn.request('GET', rest_uri)
+ elif http_method == 'POST':
+ conn.request('POST', rest_uri, body)
+ resp = conn.getresponse()
+
+ assert_equal(resp.status, status)
+
+ if ret_type == RetType.OBJ:
+ return resp
+ elif ret_type == RetType.BYTES:
+ return resp.read()
+ elif ret_type == RetType.JSON:
+ return json.loads(resp.read().decode('utf-8'), parse_float=Decimal)
def run_test(self):
- url = urllib.parse.urlparse(self.nodes[0].url)
- self.log.info("Mining blocks...")
+ self.url = urllib.parse.urlparse(self.nodes[0].url)
+ self.log.info("Mine blocks and send Bitcoin to node 1")
+
+ # Random address so node1's balance doesn't increase
+ not_related_address = "2MxqoHEdNQTyYeX1mHcbrrpzgojbosTpCvJ"
self.nodes[0].generate(1)
self.sync_all()
- self.nodes[2].generate(100)
+ self.nodes[1].generatetoaddress(100, not_related_address)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 50)
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
- self.nodes[2].generate(1)
+ self.nodes[1].generatetoaddress(1, not_related_address)
self.sync_all()
bb_hash = self.nodes[0].getbestblockhash()
- assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) #balance now should be 0.1 on node 1
+ assert_equal(self.nodes[1].getbalance(), Decimal("0.1"))
+
+ self.log.info("Load the transaction using the /tx URI")
- # load the latest 0.1 tx over the REST API
- json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.FORMAT_SEPARATOR+"json")
- json_obj = json.loads(json_string)
- vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
+ json_obj = self.test_rest_request("/tx/{}".format(txid))
+ spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout']) # get the vin to later check for utxo (should be spent by then)
# get n of 0.1 outpoint
- n = 0
- for vout in json_obj['vout']:
- if vout['value'] == 0.1:
- n = vout['n']
+ n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1'))
+ spending = (txid, n)
+ self.log.info("Query an unspent TXO using the /getutxos URI")
- #######################################
- # GETUTXOS: query an unspent outpoint #
- #######################################
- json_request = '/checkmempool/'+txid+'-'+str(n)
- json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
- json_obj = json.loads(json_string)
+ json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending))
- #check chainTip response
+ # Check chainTip response
assert_equal(json_obj['chaintipHash'], bb_hash)
- #make sure there is one utxo
+ # Make sure there is one utxo
assert_equal(len(json_obj['utxos']), 1)
- assert_equal(json_obj['utxos'][0]['value'], 0.1)
+ assert_equal(json_obj['utxos'][0]['value'], Decimal('0.1'))
+ self.log.info("Query a spent TXO using the /getutxos URI")
- #################################################
- # GETUTXOS: now query an already spent outpoint #
- #################################################
- json_request = '/checkmempool/'+vintx+'-0'
- json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
- json_obj = json.loads(json_string)
+ json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent))
- #check chainTip response
+ # Check chainTip response
assert_equal(json_obj['chaintipHash'], bb_hash)
- #make sure there is no utox in the response because this oupoint has been spent
+ # Make sure there is no utxo in the response because this outpoint has been spent
assert_equal(len(json_obj['utxos']), 0)
- #check bitmap
+ # Check bitmap
assert_equal(json_obj['bitmap'], "0")
+ self.log.info("Query two TXOs using the /getutxos URI")
+
+ json_obj = self.test_rest_request("/getutxos/{}-{}/{}-{}".format(*(spending + spent)))
- ##################################################
- # GETUTXOS: now check both with the same request #
- ##################################################
- json_request = '/checkmempool/'+txid+'-'+str(n)+'/'+vintx+'-0'
- json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
- json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['bitmap'], "10")
- #test binary response
- bb_hash = self.nodes[0].getbestblockhash()
-
- binaryRequest = b'\x01\x02'
- binaryRequest += hex_str_to_bytes(txid)
- binaryRequest += pack("i", n)
- binaryRequest += hex_str_to_bytes(vintx)
- binaryRequest += pack("i", 0)
+ self.log.info("Query the TXOs using the /getutxos URI with a binary response")
- bin_response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', binaryRequest)
- output = BytesIO()
- output.write(bin_response)
- output.seek(0)
- chainHeight = unpack("i", output.read(4))[0]
- hashFromBinResponse = hex(deser_uint256(output))[2:].zfill(64)
+ bin_request = b'\x01\x02'
+ for txid, n in [spending, spent]:
+ bin_request += hex_str_to_bytes(txid)
+ bin_request += pack("i", n)
- assert_equal(bb_hash, hashFromBinResponse) #check if getutxo's chaintip during calculation was fine
- assert_equal(chainHeight, 102) #chain height must be 102
+ bin_response = self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body=bin_request, ret_type=RetType.BYTES)
+ output = BytesIO(bin_response)
+ chain_height, = unpack("i", output.read(4))
+ response_hash = binascii.hexlify(output.read(32)[::-1]).decode('ascii')
+ assert_equal(bb_hash, response_hash) # check if getutxo's chaintip during calculation was fine
+ assert_equal(chain_height, 102) # chain height must be 102
- ############################
- # GETUTXOS: mempool checks #
- ############################
+ self.log.info("Test the /getutxos URI with and without /checkmempool")
+ # Create a transaction, check that it's found with /checkmempool, but
+ # not found without. Then confirm the transaction and check that it's
+ # found with or without /checkmempool.
# do a tx and don't sync
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
- json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.FORMAT_SEPARATOR+"json")
- json_obj = json.loads(json_string)
- vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
+ json_obj = self.test_rest_request("/tx/{}".format(txid))
+ # get the spent output to later check for utxo (should be spent by then)
+ spent = (json_obj['vin'][0]['txid'], json_obj['vin'][0]['vout'])
# get n of 0.1 outpoint
- n = 0
- for vout in json_obj['vout']:
- if vout['value'] == 0.1:
- n = vout['n']
-
- json_request = '/'+txid+'-'+str(n)
- json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
- json_obj = json.loads(json_string)
- assert_equal(len(json_obj['utxos']), 0) #there should be an outpoint because it has just added to the mempool
-
- json_request = '/checkmempool/'+txid+'-'+str(n)
- json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
- json_obj = json.loads(json_string)
- assert_equal(len(json_obj['utxos']), 1) #there should be an outpoint because it has just added to the mempool
-
- #do some invalid requests
- json_request = '{"checkmempool'
- response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'json', json_request, True)
- assert_equal(response.status, 400) #must be a 400 because we send an invalid json request
-
- json_request = '{"checkmempool'
- response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', json_request, True)
- assert_equal(response.status, 400) #must be a 400 because we send an invalid bin request
-
- response = http_post_call(url.hostname, url.port, '/rest/getutxos/checkmempool'+self.FORMAT_SEPARATOR+'bin', '', True)
- assert_equal(response.status, 400) #must be a 400 because we send an invalid bin request
-
- #test limits
- json_request = '/checkmempool/'
- for x in range(0, 20):
- json_request += txid+'-'+str(n)+'/'
- json_request = json_request.rstrip("/")
- response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
- assert_equal(response.status, 400) #must be a 400 because we exceeding the limits
-
- json_request = '/checkmempool/'
- for x in range(0, 15):
- json_request += txid+'-'+str(n)+'/'
- json_request = json_request.rstrip("/")
- response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
- assert_equal(response.status, 200) #must be a 200 because we are within the limits
-
- self.nodes[0].generate(1) #generate block to not affect upcoming tests
+ n, = filter_output_indices_by_value(json_obj['vout'], Decimal('0.1'))
+ spending = (txid, n)
+
+ json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending))
+ assert_equal(len(json_obj['utxos']), 0)
+
+ json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spending))
+ assert_equal(len(json_obj['utxos']), 1)
+
+ json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spent))
+ assert_equal(len(json_obj['utxos']), 1)
+
+ json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spent))
+ assert_equal(len(json_obj['utxos']), 0)
+
+ self.nodes[0].generate(1)
self.sync_all()
- ################
- # /rest/block/ #
- ################
+ json_obj = self.test_rest_request("/getutxos/{}-{}".format(*spending))
+ assert_equal(len(json_obj['utxos']), 1)
+
+ json_obj = self.test_rest_request("/getutxos/checkmempool/{}-{}".format(*spending))
+ assert_equal(len(json_obj['utxos']), 1)
+
+ # Do some invalid requests
+ self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.JSON, body='{"checkmempool', status=400, ret_type=RetType.OBJ)
+ self.test_rest_request("/getutxos", http_method='POST', req_type=ReqType.BIN, body='{"checkmempool', status=400, ret_type=RetType.OBJ)
+ self.test_rest_request("/getutxos/checkmempool", http_method='POST', req_type=ReqType.JSON, status=400, ret_type=RetType.OBJ)
+
+ # Test limits
+ long_uri = '/'.join(["{}-{}".format(txid, n_) for n_ in range(20)])
+ self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=400, ret_type=RetType.OBJ)
- # check binary format
- response = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"bin", True)
- assert_equal(response.status, 200)
+ long_uri = '/'.join(['{}-{}'.format(txid, n_) for n_ in range(15)])
+ self.test_rest_request("/getutxos/checkmempool/{}".format(long_uri), http_method='POST', status=200)
+
+ self.nodes[0].generate(1) # generate block to not affect upcoming tests
+ self.sync_all()
+
+ self.log.info("Test the /block and /headers URIs")
+ bb_hash = self.nodes[0].getbestblockhash()
+
+ # Check binary format
+ response = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ)
assert_greater_than(int(response.getheader('content-length')), 80)
- response_str = response.read()
+ response_bytes = response.read()
- # compare with block header
- response_header = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"bin", True)
- assert_equal(response_header.status, 200)
+ # Compare with block header
+ response_header = self.test_rest_request("/headers/1/{}".format(bb_hash), req_type=ReqType.BIN, ret_type=RetType.OBJ)
assert_equal(int(response_header.getheader('content-length')), 80)
- response_header_str = response_header.read()
- assert_equal(response_str[0:80], response_header_str)
+ response_header_bytes = response_header.read()
+ assert_equal(response_bytes[:80], response_header_bytes)
- # check block hex format
- response_hex = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
- assert_equal(response_hex.status, 200)
+ # Check block hex format
+ response_hex = self.test_rest_request("/block/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ)
assert_greater_than(int(response_hex.getheader('content-length')), 160)
- response_hex_str = response_hex.read()
- assert_equal(encode(response_str, "hex_codec")[0:160], response_hex_str[0:160])
+ response_hex_bytes = response_hex.read().strip(b'\n')
+ assert_equal(binascii.hexlify(response_bytes), response_hex_bytes)
- # compare with hex block header
- response_header_hex = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
- assert_equal(response_header_hex.status, 200)
+ # Compare with hex block header
+ response_header_hex = self.test_rest_request("/headers/1/{}".format(bb_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ)
assert_greater_than(int(response_header_hex.getheader('content-length')), 160)
- response_header_hex_str = response_header_hex.read()
- assert_equal(response_hex_str[0:160], response_header_hex_str[0:160])
- assert_equal(encode(response_header_str, "hex_codec")[0:160], response_header_hex_str[0:160])
+ response_header_hex_bytes = response_header_hex.read(160)
+ assert_equal(binascii.hexlify(response_bytes[:80]), response_header_hex_bytes)
- # check json format
- block_json_string = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+'json')
- block_json_obj = json.loads(block_json_string)
+ # Check json format
+ block_json_obj = self.test_rest_request("/block/{}".format(bb_hash))
assert_equal(block_json_obj['hash'], bb_hash)
- # compare with json block header
- response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
- assert_equal(response_header_json.status, 200)
- response_header_json_str = response_header_json.read().decode('utf-8')
- json_obj = json.loads(response_header_json_str, parse_float=Decimal)
- assert_equal(len(json_obj), 1) #ensure that there is one header in the json response
- assert_equal(json_obj[0]['hash'], bb_hash) #request/response hash should be the same
+ # Compare with json block header
+ json_obj = self.test_rest_request("/headers/1/{}".format(bb_hash))
+ assert_equal(len(json_obj), 1) # ensure that there is one header in the json response
+ assert_equal(json_obj[0]['hash'], bb_hash) # request/response hash should be the same
- #compare with normal RPC block response
+ # Compare with normal RPC block response
rpc_block_json = self.nodes[0].getblock(bb_hash)
- assert_equal(json_obj[0]['hash'], rpc_block_json['hash'])
- assert_equal(json_obj[0]['confirmations'], rpc_block_json['confirmations'])
- assert_equal(json_obj[0]['height'], rpc_block_json['height'])
- assert_equal(json_obj[0]['version'], rpc_block_json['version'])
- assert_equal(json_obj[0]['merkleroot'], rpc_block_json['merkleroot'])
- assert_equal(json_obj[0]['time'], rpc_block_json['time'])
- assert_equal(json_obj[0]['nonce'], rpc_block_json['nonce'])
- assert_equal(json_obj[0]['bits'], rpc_block_json['bits'])
- assert_equal(json_obj[0]['difficulty'], rpc_block_json['difficulty'])
- assert_equal(json_obj[0]['chainwork'], rpc_block_json['chainwork'])
- assert_equal(json_obj[0]['previousblockhash'], rpc_block_json['previousblockhash'])
-
- #see if we can get 5 headers in one response
+ for key in ['hash', 'confirmations', 'height', 'version', 'merkleroot', 'time', 'nonce', 'bits', 'difficulty', 'chainwork', 'previousblockhash']:
+ assert_equal(json_obj[0][key], rpc_block_json[key])
+
+ # See if we can get 5 headers in one response
self.nodes[1].generate(5)
self.sync_all()
- response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/5/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
- assert_equal(response_header_json.status, 200)
- response_header_json_str = response_header_json.read().decode('utf-8')
- json_obj = json.loads(response_header_json_str)
- assert_equal(len(json_obj), 5) #now we should have 5 header objects
+ json_obj = self.test_rest_request("/headers/5/{}".format(bb_hash))
+ assert_equal(len(json_obj), 5) # now we should have 5 header objects
+
+ self.log.info("Test the /tx URI")
- # do tx test
tx_hash = block_json_obj['tx'][0]['txid']
- json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+tx_hash+self.FORMAT_SEPARATOR+"json")
- json_obj = json.loads(json_string)
+ json_obj = self.test_rest_request("/tx/{}".format(tx_hash))
assert_equal(json_obj['txid'], tx_hash)
- # check hex format response
- hex_string = http_get_call(url.hostname, url.port, '/rest/tx/'+tx_hash+self.FORMAT_SEPARATOR+"hex", True)
- assert_equal(hex_string.status, 200)
- assert_greater_than(int(response.getheader('content-length')), 10)
+ # Check hex format response
+ hex_response = self.test_rest_request("/tx/{}".format(tx_hash), req_type=ReqType.HEX, ret_type=RetType.OBJ)
+ assert_greater_than_or_equal(int(hex_response.getheader('content-length')),
+ json_obj['size']*2)
+ self.log.info("Test tx inclusion in the /mempool and /block URIs")
- # check block tx details
- # let's make 3 tx and mine them on node 1
+ # Make 3 tx and mine them on node 1
txs = []
- txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
- txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
- txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
+ txs.append(self.nodes[0].sendtoaddress(not_related_address, 11))
+ txs.append(self.nodes[0].sendtoaddress(not_related_address, 11))
+ txs.append(self.nodes[0].sendtoaddress(not_related_address, 11))
self.sync_all()
- # check that there are exactly 3 transactions in the TX memory pool before generating the block
- json_string = http_get_call(url.hostname, url.port, '/rest/mempool/info'+self.FORMAT_SEPARATOR+'json')
- json_obj = json.loads(json_string)
+ # Check that there are exactly 3 transactions in the TX memory pool before generating the block
+ json_obj = self.test_rest_request("/mempool/info")
assert_equal(json_obj['size'], 3)
# the size of the memory pool should be greater than 3x ~100 bytes
assert_greater_than(json_obj['bytes'], 300)
- # check that there are our submitted transactions in the TX memory pool
- json_string = http_get_call(url.hostname, url.port, '/rest/mempool/contents'+self.FORMAT_SEPARATOR+'json')
- json_obj = json.loads(json_string)
- for tx in txs:
- assert_equal(tx in json_obj, True)
+ # Check that there are our submitted transactions in the TX memory pool
+ json_obj = self.test_rest_request("/mempool/contents")
+ for i, tx in enumerate(txs):
+ assert tx in json_obj
+ assert_equal(json_obj[tx]['spentby'], txs[i + 1:i + 2])
+ assert_equal(json_obj[tx]['depends'], txs[i - 1:i])
- # now mine the transactions
+ # Now mine the transactions
newblockhash = self.nodes[1].generate(1)
self.sync_all()
- #check if the 3 tx show up in the new block
- json_string = http_get_call(url.hostname, url.port, '/rest/block/'+newblockhash[0]+self.FORMAT_SEPARATOR+'json')
- json_obj = json.loads(json_string)
- for tx in json_obj['tx']:
- if not 'coinbase' in tx['vin'][0]: #exclude coinbase
- assert_equal(tx['txid'] in txs, True)
+ # Check if the 3 tx show up in the new block
+ json_obj = self.test_rest_request("/block/{}".format(newblockhash[0]))
+ non_coinbase_txs = {tx['txid'] for tx in json_obj['tx']
+ if 'coinbase' not in tx['vin'][0]}
+ assert_equal(non_coinbase_txs, set(txs))
- #check the same but without tx details
- json_string = http_get_call(url.hostname, url.port, '/rest/block/notxdetails/'+newblockhash[0]+self.FORMAT_SEPARATOR+'json')
- json_obj = json.loads(json_string)
+ # Check the same but without tx details
+ json_obj = self.test_rest_request("/block/notxdetails/{}".format(newblockhash[0]))
for tx in txs:
- assert_equal(tx in json_obj['tx'], True)
+ assert tx in json_obj['tx']
+
+ self.log.info("Test the /chaininfo URI")
- #test rest bestblock
bb_hash = self.nodes[0].getbestblockhash()
- json_string = http_get_call(url.hostname, url.port, '/rest/chaininfo.json')
- json_obj = json.loads(json_string)
+ json_obj = self.test_rest_request("/chaininfo")
assert_equal(json_obj['bestblockhash'], bb_hash)
if __name__ == '__main__':
- RESTTest ().main ()
+ RESTTest().main()
diff --git a/test/functional/interface_zmq.py b/test/functional/interface_zmq.py
index 86ccea4394..af2e752b7a 100755
--- a/test/functional/interface_zmq.py
+++ b/test/functional/interface_zmq.py
@@ -4,7 +4,6 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the ZMQ notification interface."""
import configparser
-import os
import struct
from test_framework.test_framework import BitcoinTestFramework, SkipTest
@@ -47,8 +46,6 @@ class ZMQTest (BitcoinTestFramework):
# Check that bitcoin has been built with ZMQ enabled.
config = configparser.ConfigParser()
- if not self.options.configfile:
- self.options.configfile = os.path.abspath(os.path.join(os.path.dirname(__file__), "../config.ini"))
config.read_file(open(self.options.configfile))
if not config["components"].getboolean("ENABLE_ZMQ"):
diff --git a/test/functional/mempool_accept.py b/test/functional/mempool_accept.py
new file mode 100755
index 0000000000..7cdb24c6a5
--- /dev/null
+++ b/test/functional/mempool_accept.py
@@ -0,0 +1,293 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test mempool acceptance of raw transactions."""
+
+from io import BytesIO
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.messages import (
+ BIP125_SEQUENCE_NUMBER,
+ COIN,
+ COutPoint,
+ CTransaction,
+ CTxOut,
+ MAX_BLOCK_BASE_SIZE,
+)
+from test_framework.script import (
+ hash160,
+ CScript,
+ OP_0,
+ OP_EQUAL,
+ OP_HASH160,
+ OP_RETURN,
+)
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+ bytes_to_hex_str,
+ hex_str_to_bytes,
+ wait_until,
+)
+
+
+class MempoolAcceptanceTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+ self.extra_args = [[
+ '-checkmempool',
+ '-txindex',
+ '-reindex', # Need reindex for txindex
+ '-acceptnonstdtxn=0', # Try to mimic main-net
+ ]] * self.num_nodes
+
+ def check_mempool_result(self, result_expected, *args, **kwargs):
+ """Wrapper to check result of testmempoolaccept on node_0's mempool"""
+ result_test = self.nodes[0].testmempoolaccept(*args, **kwargs)
+ assert_equal(result_expected, result_test)
+ assert_equal(self.nodes[0].getmempoolinfo()['size'], self.mempool_size) # Must not change mempool state
+
+ def run_test(self):
+ node = self.nodes[0]
+
+ self.log.info('Start with empty mempool, and 200 blocks')
+ self.mempool_size = 0
+ wait_until(lambda: node.getblockcount() == 200)
+ assert_equal(node.getmempoolinfo()['size'], self.mempool_size)
+
+ self.log.info('Should not accept garbage to testmempoolaccept')
+ assert_raises_rpc_error(-3, 'Expected type array, got string', lambda: node.testmempoolaccept(rawtxs='ff00baar'))
+ assert_raises_rpc_error(-8, 'Array must contain exactly one raw transaction for now', lambda: node.testmempoolaccept(rawtxs=['ff00baar', 'ff22']))
+ assert_raises_rpc_error(-22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar']))
+
+ self.log.info('A transaction already in the blockchain')
+ coin = node.listunspent()[0] # Pick a random coin(base) to spend
+ raw_tx_in_block = node.signrawtransactionwithwallet(node.createrawtransaction(
+ inputs=[{'txid': coin['txid'], 'vout': coin['vout']}],
+ outputs=[{node.getnewaddress(): 0.3}, {node.getnewaddress(): 49}],
+ ))['hex']
+ txid_in_block = node.sendrawtransaction(hexstring=raw_tx_in_block, allowhighfees=True)
+ node.generate(1)
+ self.check_mempool_result(
+ result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': '18: txn-already-known'}],
+ rawtxs=[raw_tx_in_block],
+ )
+
+ self.log.info('A transaction not in the mempool')
+ fee = 0.00000700
+ raw_tx_0 = node.signrawtransactionwithwallet(node.createrawtransaction(
+ inputs=[{"txid": txid_in_block, "vout": 0, "sequence": BIP125_SEQUENCE_NUMBER}], # RBF is used later
+ outputs=[{node.getnewaddress(): 0.3 - fee}],
+ ))['hex']
+ tx = CTransaction()
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
+ txid_0 = tx.rehash()
+ self.check_mempool_result(
+ result_expected=[{'txid': txid_0, 'allowed': True}],
+ rawtxs=[raw_tx_0],
+ )
+
+ self.log.info('A transaction in the mempool')
+ node.sendrawtransaction(hexstring=raw_tx_0)
+ self.mempool_size = 1
+ self.check_mempool_result(
+ result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': '18: txn-already-in-mempool'}],
+ rawtxs=[raw_tx_0],
+ )
+
+ self.log.info('A transaction that replaces a mempool transaction')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
+ tx.vout[0].nValue -= int(fee * COIN) # Double the fee
+ tx.vin[0].nSequence = BIP125_SEQUENCE_NUMBER + 1 # Now, opt out of RBF
+ raw_tx_0 = node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex']
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
+ txid_0 = tx.rehash()
+ self.check_mempool_result(
+ result_expected=[{'txid': txid_0, 'allowed': True}],
+ rawtxs=[raw_tx_0],
+ )
+
+ self.log.info('A transaction that conflicts with an unconfirmed tx')
+ # Send the transaction that replaces the mempool transaction and opts out of replaceability
+ node.sendrawtransaction(hexstring=bytes_to_hex_str(tx.serialize()), allowhighfees=True)
+ # take original raw_tx_0
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
+ tx.vout[0].nValue -= int(4 * fee * COIN) # Set more fee
+ # skip re-signing the tx
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '18: txn-mempool-conflict'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ allowhighfees=True,
+ )
+
+ self.log.info('A transaction with missing inputs, that never existed')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
+ tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14)
+ # skip re-signing the tx
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'missing-inputs'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('A transaction with missing inputs, that existed once in the past')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_0)))
+ tx.vin[0].prevout.n = 1 # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend
+ raw_tx_1 = node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex']
+ txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, allowhighfees=True)
+ # Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them
+ raw_tx_spend_both = node.signrawtransactionwithwallet(node.createrawtransaction(
+ inputs=[
+ {'txid': txid_0, 'vout': 0},
+ {'txid': txid_1, 'vout': 0},
+ ],
+ outputs=[{node.getnewaddress(): 0.1}]
+ ))['hex']
+ txid_spend_both = node.sendrawtransaction(hexstring=raw_tx_spend_both, allowhighfees=True)
+ node.generate(1)
+ self.mempool_size = 0
+ # Now see if we can add the coins back to the utxo set by sending the exact txs again
+ self.check_mempool_result(
+ result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs'}],
+ rawtxs=[raw_tx_0],
+ )
+ self.check_mempool_result(
+ result_expected=[{'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs'}],
+ rawtxs=[raw_tx_1],
+ )
+
+ self.log.info('Create a signed "reference" tx for later use')
+ raw_tx_reference = node.signrawtransactionwithwallet(node.createrawtransaction(
+ inputs=[{'txid': txid_spend_both, 'vout': 0}],
+ outputs=[{node.getnewaddress(): 0.05}],
+ ))['hex']
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ # Reference tx should be valid on itself
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': True}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('A transaction with no outputs')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vout = []
+ # Skip re-signing the transaction for context independent checks from now on
+ # tx.deserialize(BytesIO(hex_str_to_bytes(node.signrawtransactionwithwallet(bytes_to_hex_str(tx.serialize()))['hex'])))
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-empty'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('A really large transaction')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vin = [tx.vin[0]] * (MAX_BLOCK_BASE_SIZE // len(tx.vin[0].serialize()))
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-oversize'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('A transaction with negative output value')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vout[0].nValue *= -1
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-negative'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('A transaction with too large output value')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vout[0].nValue = 21000000 * COIN + 1
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-vout-toolarge'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('A transaction with too large sum of output values')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vout = [tx.vout[0]] * 2
+ tx.vout[0].nValue = 21000000 * COIN
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-txouttotal-toolarge'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('A transaction with duplicate inputs')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vin = [tx.vin[0]] * 2
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: bad-txns-inputs-duplicate'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('A coinbase transaction')
+ # Pick the input of the first tx we signed, so it has to be a coinbase tx
+ raw_tx_coinbase_spent = node.getrawtransaction(txid=node.decoderawtransaction(hexstring=raw_tx_in_block)['vin'][0]['txid'])
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_coinbase_spent)))
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '16: coinbase'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('Some nonstandard transactions')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.nVersion = 3 # A version currently non-standard
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: version'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vout[0].scriptPubKey = CScript([OP_0]) # Some non-standard script
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: scriptpubkey'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vin[0].scriptSig = CScript([OP_HASH160]) # Some not-pushonly scriptSig
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: scriptsig-not-pushonly'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript([OP_HASH160, hash160(b'burn'), OP_EQUAL]))
+ num_scripts = 100000 // len(output_p2sh_burn.serialize()) # Use enough outputs to make the tx too large for our policy
+ tx.vout = [output_p2sh_burn] * num_scripts
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: tx-size'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vout[0] = output_p2sh_burn
+ tx.vout[0].nValue -= 1 # Make output smaller, such that it is dust for our policy
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: dust'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
+ tx.vout = [tx.vout[0]] * 2
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: multi-op-return'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('A timelocked transaction')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vin[0].nSequence -= 1 # Should be non-max, so locktime is not ignored
+ tx.nLockTime = node.getblockcount() + 1
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: non-final'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ )
+
+ self.log.info('A transaction that is locked by BIP68 sequence logic')
+ tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx_reference)))
+ tx.vin[0].nSequence = 2 # We could include it in the second block mined from now, but not the very next one
+ # Can skip re-signing the tx because of early rejection
+ self.check_mempool_result(
+ result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': '64: non-BIP68-final'}],
+ rawtxs=[bytes_to_hex_str(tx.serialize())],
+ allowhighfees=True,
+ )
+
+
+if __name__ == '__main__':
+ MempoolAcceptanceTest().main()
diff --git a/test/functional/mempool_limit.py b/test/functional/mempool_limit.py
index 7e01663c96..5382fe439e 100755
--- a/test/functional/mempool_limit.py
+++ b/test/functional/mempool_limit.py
@@ -32,7 +32,7 @@ class MempoolLimitTest(BitcoinTestFramework):
self.nodes[0].settxfee(relayfee) # specifically fund this tx with low fee
txF = self.nodes[0].fundrawtransaction(tx)
self.nodes[0].settxfee(0) # return to automatic fee selection
- txFS = self.nodes[0].signrawtransaction(txF['hex'])
+ txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex'])
txid = self.nodes[0].sendrawtransaction(txFS['hex'])
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
@@ -57,8 +57,8 @@ class MempoolLimitTest(BitcoinTestFramework):
tx = self.nodes[0].createrawtransaction(inputs, outputs)
# specifically fund this tx with a fee < mempoolminfee, >= than minrelaytxfee
txF = self.nodes[0].fundrawtransaction(tx, {'feeRate': relayfee})
- txFS = self.nodes[0].signrawtransaction(txF['hex'])
- assert_raises_rpc_error(-26, "mempool min fee not met, 166 < 411 (code 66)", self.nodes[0].sendrawtransaction, txFS['hex'])
+ txFS = self.nodes[0].signrawtransactionwithwallet(txF['hex'])
+ assert_raises_rpc_error(-26, "mempool min fee not met", self.nodes[0].sendrawtransaction, txFS['hex'])
if __name__ == '__main__':
MempoolLimitTest().main()
diff --git a/test/functional/mempool_packages.py b/test/functional/mempool_packages.py
index a3e872a8c6..79cf159f43 100755
--- a/test/functional/mempool_packages.py
+++ b/test/functional/mempool_packages.py
@@ -25,7 +25,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
for i in range(num_outputs):
outputs[node.getnewaddress()] = send_value
rawtx = node.createrawtransaction(inputs, outputs)
- signedtx = node.signrawtransaction(rawtx)
+ signedtx = node.signrawtransactionwithwallet(rawtx)
txid = node.sendrawtransaction(signedtx['hex'])
fulltx = node.getrawtransaction(txid, 1)
assert(len(fulltx['vout']) == num_outputs) # make sure we didn't generate a change output
@@ -47,7 +47,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
value = sent_value
chain.append(txid)
- # Check mempool has MAX_ANCESTORS transactions in it, and descendant
+ # Check mempool has MAX_ANCESTORS transactions in it, and descendant and ancestor
# count and fees should look correct
mempool = self.nodes[0].getrawmempool(True)
assert_equal(len(mempool), MAX_ANCESTORS)
@@ -55,6 +55,10 @@ class MempoolPackagesTest(BitcoinTestFramework):
descendant_fees = 0
descendant_size = 0
+ ancestor_size = sum([mempool[tx]['size'] for tx in mempool])
+ ancestor_count = MAX_ANCESTORS
+ ancestor_fees = sum([mempool[tx]['fee'] for tx in mempool])
+
descendants = []
ancestors = list(chain)
for x in reversed(chain):
@@ -66,19 +70,51 @@ class MempoolPackagesTest(BitcoinTestFramework):
assert_equal(mempool[x]['descendantcount'], descendant_count)
descendant_fees += mempool[x]['fee']
assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee'])
+ assert_equal(mempool[x]['fees']['base'], mempool[x]['fee'])
+ assert_equal(mempool[x]['fees']['modified'], mempool[x]['modifiedfee'])
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN)
+ assert_equal(mempool[x]['fees']['descendant'], descendant_fees)
descendant_size += mempool[x]['size']
assert_equal(mempool[x]['descendantsize'], descendant_size)
descendant_count += 1
+ # Check that ancestor calculations are correct
+ assert_equal(mempool[x]['ancestorcount'], ancestor_count)
+ assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN)
+ assert_equal(mempool[x]['ancestorsize'], ancestor_size)
+ ancestor_size -= mempool[x]['size']
+ ancestor_fees -= mempool[x]['fee']
+ ancestor_count -= 1
+
+ # Check that parent/child list is correct
+ assert_equal(mempool[x]['spentby'], descendants[-1:])
+ assert_equal(mempool[x]['depends'], ancestors[-2:-1])
+
# Check that getmempooldescendants is correct
assert_equal(sorted(descendants), sorted(self.nodes[0].getmempooldescendants(x)))
+
+ # Check getmempooldescendants verbose output is correct
+ for descendant, dinfo in self.nodes[0].getmempooldescendants(x, True).items():
+ assert_equal(dinfo['depends'], [chain[chain.index(descendant)-1]])
+ if dinfo['descendantcount'] > 1:
+ assert_equal(dinfo['spentby'], [chain[chain.index(descendant)+1]])
+ else:
+ assert_equal(dinfo['spentby'], [])
descendants.append(x)
# Check that getmempoolancestors is correct
ancestors.remove(x)
assert_equal(sorted(ancestors), sorted(self.nodes[0].getmempoolancestors(x)))
+ # Check that getmempoolancestors verbose output is correct
+ for ancestor, ainfo in self.nodes[0].getmempoolancestors(x, True).items():
+ assert_equal(ainfo['spentby'], [chain[chain.index(ancestor)+1]])
+ if ainfo['ancestorcount'] > 1:
+ assert_equal(ainfo['depends'], [chain[chain.index(ancestor)-1]])
+ else:
+ assert_equal(ainfo['depends'], [])
+
+
# Check that getmempoolancestors/getmempooldescendants correctly handle verbose=true
v_ancestors = self.nodes[0].getmempoolancestors(chain[-1], True)
assert_equal(len(v_ancestors), len(chain)-1)
@@ -99,8 +135,9 @@ class MempoolPackagesTest(BitcoinTestFramework):
ancestor_fees = 0
for x in chain:
ancestor_fees += mempool[x]['fee']
+ assert_equal(mempool[x]['fees']['ancestor'], ancestor_fees + Decimal('0.00001'))
assert_equal(mempool[x]['ancestorfees'], ancestor_fees * COIN + 1000)
-
+
# Undo the prioritisetransaction for later tests
self.nodes[0].prioritisetransaction(txid=chain[0], fee_delta=-1000)
@@ -112,6 +149,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
descendant_fees = 0
for x in reversed(chain):
descendant_fees += mempool[x]['fee']
+ assert_equal(mempool[x]['fees']['descendant'], descendant_fees + Decimal('0.00001'))
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 1000)
# Adding one more transaction on to the chain should fail.
@@ -137,7 +175,9 @@ class MempoolPackagesTest(BitcoinTestFramework):
descendant_fees += mempool[x]['fee']
if (x == chain[-1]):
assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']+satoshi_round(0.00002))
+ assert_equal(mempool[x]['fees']['modified'], mempool[x]['fee']+satoshi_round(0.00002))
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 2000)
+ assert_equal(mempool[x]['fees']['descendant'], descendant_fees+satoshi_round(0.00002))
# TODO: check that node1's mempool is as expected
@@ -149,6 +189,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
vout = utxo[1]['vout']
transaction_package = []
+ tx_children = []
# First create one parent tx with 10 children
(txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 10)
parent_transaction = txid
@@ -159,11 +200,17 @@ class MempoolPackagesTest(BitcoinTestFramework):
for i in range(MAX_DESCENDANTS - 1):
utxo = transaction_package.pop(0)
(txid, sent_value) = self.chain_transaction(self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
+ if utxo['txid'] is parent_transaction:
+ tx_children.append(txid)
for j in range(10):
transaction_package.append({'txid': txid, 'vout': j, 'amount': sent_value})
mempool = self.nodes[0].getrawmempool(True)
assert_equal(mempool[parent_transaction]['descendantcount'], MAX_DESCENDANTS)
+ assert_equal(sorted(mempool[parent_transaction]['spentby']), sorted(tx_children))
+
+ for child in tx_children:
+ assert_equal(mempool[child]['depends'], [parent_transaction])
# Sending one more chained transaction will fail
utxo = transaction_package.pop(0)
@@ -205,7 +252,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
for i in range(2):
outputs[self.nodes[0].getnewaddress()] = send_value
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
- signedtx = self.nodes[0].signrawtransaction(rawtx)
+ signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
tx0_id = txid
value = send_value
@@ -229,10 +276,10 @@ class MempoolPackagesTest(BitcoinTestFramework):
inputs = [ {'txid' : tx1_id, 'vout': 0}, {'txid' : txid, 'vout': 0} ]
outputs = { self.nodes[0].getnewaddress() : send_value + value - 4*fee }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
- signedtx = self.nodes[0].signrawtransaction(rawtx)
+ signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
sync_mempools(self.nodes)
-
+
# Now try to disconnect the tip on each node...
self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash())
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py
index 17f0967219..83dffb0521 100755
--- a/test/functional/mempool_persist.py
+++ b/test/functional/mempool_persist.py
@@ -29,7 +29,7 @@ Test is as follows:
transactions in its mempool. This tests that -persistmempool=0
does not overwrite a previously valid mempool stored on disk.
- Remove node0 mempool.dat and verify savemempool RPC recreates it
- and verify that node1 can load it and has 5 transaction in its
+ and verify that node1 can load it and has 5 transactions in its
mempool.
- Verify that savemempool throws when the RPC is called if
node1 can't write to disk.
@@ -66,7 +66,9 @@ class MempoolPersistTest(BitcoinTestFramework):
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
- self.start_node(1) # Give this one a head-start, so we can be "extra-sure" that it didn't load anything later
+ # Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
+ # Also don't store the mempool, to keep the datadir clean
+ self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
# Give bitcoind a second to reload the mempool
@@ -91,8 +93,8 @@ class MempoolPersistTest(BitcoinTestFramework):
self.start_node(0)
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
- mempooldat0 = os.path.join(self.options.tmpdir, 'node0', 'regtest', 'mempool.dat')
- mempooldat1 = os.path.join(self.options.tmpdir, 'node1', 'regtest', 'mempool.dat')
+ mempooldat0 = os.path.join(self.nodes[0].datadir, 'regtest', 'mempool.dat')
+ mempooldat1 = os.path.join(self.nodes[1].datadir, 'regtest', 'mempool.dat')
self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
os.remove(mempooldat0)
self.nodes[0].savemempool()
@@ -105,13 +107,13 @@ class MempoolPersistTest(BitcoinTestFramework):
wait_until(lambda: len(self.nodes[1].getrawmempool()) == 5)
self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
- # to test the exception we are setting bad permissions on a tmp file called mempool.dat.new
+ # to test the exception we are creating a tmp folder called mempool.dat.new
# which is an implementation detail that could change and break this test
mempooldotnew1 = mempooldat1 + '.new'
- with os.fdopen(os.open(mempooldotnew1, os.O_CREAT, 0o000), 'w'):
- pass
+ os.mkdir(mempooldotnew1)
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
- os.remove(mempooldotnew1)
+ os.rmdir(mempooldotnew1)
+
if __name__ == '__main__':
MempoolPersistTest().main()
diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py
index d6bb292a58..eabed5d633 100755
--- a/test/functional/mempool_reorg.py
+++ b/test/functional/mempool_reorg.py
@@ -48,7 +48,7 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
# Set the time lock
timelock_tx = timelock_tx.replace("ffffffff", "11111191", 1)
timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
- timelock_tx = self.nodes[0].signrawtransaction(timelock_tx)["hex"]
+ timelock_tx = self.nodes[0].signrawtransactionwithwallet(timelock_tx)["hex"]
# This will raise an exception because the timelock transaction is too immature to spend
assert_raises_rpc_error(-26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx)
diff --git a/test/functional/mining_prioritisetransaction.py b/test/functional/mining_prioritisetransaction.py
index 8cea9c2783..85f1af6682 100755
--- a/test/functional/mining_prioritisetransaction.py
+++ b/test/functional/mining_prioritisetransaction.py
@@ -116,15 +116,15 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
inputs.append({"txid" : utxo["txid"], "vout" : utxo["vout"]})
outputs[self.nodes[0].getnewaddress()] = utxo["amount"]
raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
- tx_hex = self.nodes[0].signrawtransaction(raw_tx)["hex"]
+ tx_hex = self.nodes[0].signrawtransactionwithwallet(raw_tx)["hex"]
tx_id = self.nodes[0].decoderawtransaction(tx_hex)["txid"]
# This will raise an exception due to min relay fee not being met
- assert_raises_rpc_error(-26, "min relay fee not met (code 66)", self.nodes[0].sendrawtransaction, tx_hex)
+ assert_raises_rpc_error(-26, "min relay fee not met", self.nodes[0].sendrawtransaction, tx_hex)
assert(tx_id not in self.nodes[0].getrawmempool())
# This is a less than 1000-byte transaction, so just set the fee
- # to be the minimum for a 1000 byte transaction and check that it is
+ # to be the minimum for a 1000-byte transaction and check that it is
# accepted.
self.nodes[0].prioritisetransaction(txid=tx_id, fee_delta=int(self.relayfee*COIN))
diff --git a/test/functional/p2p_compactblocks.py b/test/functional/p2p_compactblocks.py
index d9f461a049..1657d97281 100755
--- a/test/functional/p2p_compactblocks.py
+++ b/test/functional/p2p_compactblocks.py
@@ -14,8 +14,8 @@ from test_framework.util import *
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment
from test_framework.script import CScript, OP_TRUE
-# TestNode: A peer we use to send messages to bitcoind, and store responses.
-class TestNode(P2PInterface):
+# TestP2PConn: A peer we use to send messages to bitcoind, and store responses.
+class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.last_sendcmpct = []
@@ -548,7 +548,7 @@ class CompactBlocksTest(BitcoinTestFramework):
# Note that it's possible for bitcoind to be smart enough to know we're
# lying, since it could check to see if the shortid matches what we're
# sending, and eg disconnect us for misbehavior. If that behavior
- # change were made, we could just modify this test by having a
+ # change was made, we could just modify this test by having a
# different peer provide the block further down, so that we're still
# verifying that the block isn't marked bad permanently. This is good
# enough for now.
@@ -788,9 +788,9 @@ class CompactBlocksTest(BitcoinTestFramework):
def run_test(self):
# Setup the p2p connections and start up the network thread.
- self.test_node = self.nodes[0].add_p2p_connection(TestNode())
- self.segwit_node = self.nodes[1].add_p2p_connection(TestNode(), services=NODE_NETWORK|NODE_WITNESS)
- self.old_node = self.nodes[1].add_p2p_connection(TestNode(), services=NODE_NETWORK)
+ self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn())
+ self.segwit_node = self.nodes[1].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK|NODE_WITNESS)
+ self.old_node = self.nodes[1].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK)
network_thread_start()
diff --git a/test/functional/p2p_feefilter.py b/test/functional/p2p_feefilter.py
index 47d9c55160..7c954cdca2 100755
--- a/test/functional/p2p_feefilter.py
+++ b/test/functional/p2p_feefilter.py
@@ -22,7 +22,7 @@ def allInvsMatch(invsExpected, testnode):
time.sleep(1)
return False
-class TestNode(P2PInterface):
+class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.txinvs = []
@@ -48,7 +48,7 @@ class FeeFilterTest(BitcoinTestFramework):
sync_blocks(self.nodes)
# Setup the p2p connections and start up the network thread.
- self.nodes[0].add_p2p_connection(TestNode())
+ self.nodes[0].add_p2p_connection(TestP2PConn())
network_thread_start()
self.nodes[0].p2p.wait_for_verack()
diff --git a/test/functional/p2p_invalid_block.py b/test/functional/p2p_invalid_block.py
index edcade63c1..e1f328ba77 100755
--- a/test/functional/p2p_invalid_block.py
+++ b/test/functional/p2p_invalid_block.py
@@ -10,75 +10,63 @@ In this test we connect to one node over p2p, and test block requests:
3) Invalid block with bad coinbase value should be rejected and not
re-requested.
"""
-
-from test_framework.test_framework import ComparisonTestFramework
-from test_framework.util import *
-from test_framework.comptool import TestManager, TestInstance, RejectResult
-from test_framework.blocktools import *
-from test_framework.mininode import network_thread_start
import copy
-import time
-# Use the ComparisonTestFramework with 1 node: only use --testbinary.
-class InvalidBlockRequestTest(ComparisonTestFramework):
+from test_framework.blocktools import create_block, create_coinbase, create_transaction
+from test_framework.messages import COIN
+from test_framework.mininode import network_thread_start, P2PDataStore
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
- ''' Can either run this test as 1 node with expected answers, or two and compare them.
- Change the "outcome" variable from each TestInstance object to only do the comparison. '''
+class InvalidBlockRequestTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
+ self.extra_args = [["-whitelist=127.0.0.1"]]
def run_test(self):
- test = TestManager(self, self.options.tmpdir)
- test.add_all_connections(self.nodes)
- self.tip = None
- self.block_time = None
+ # Add p2p connection to node0
+ node = self.nodes[0] # convenience reference to the node
+ node.add_p2p_connection(P2PDataStore())
+
network_thread_start()
- test.run()
+ node.p2p.wait_for_verack()
+
+ best_block = node.getblock(node.getbestblockhash())
+ tip = int(node.getbestblockhash(), 16)
+ height = best_block["height"] + 1
+ block_time = best_block["time"] + 1
- def get_tests(self):
- if self.tip is None:
- self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
- self.block_time = int(time.time())+1
+ self.log.info("Create a new block with an anyone-can-spend coinbase")
- '''
- Create a new block with an anyone-can-spend coinbase
- '''
height = 1
- block = create_block(self.tip, create_coinbase(height), self.block_time)
- self.block_time += 1
+ block = create_block(tip, create_coinbase(height), block_time)
block.solve()
# Save the coinbase for later
- self.block1 = block
- self.tip = block.sha256
- height += 1
- yield TestInstance([[block, True]])
-
- '''
- Now we need that block to mature so we can spend the coinbase.
- '''
- test = TestInstance(sync_every_block=False)
- for i in range(100):
- block = create_block(self.tip, create_coinbase(height), self.block_time)
- block.solve()
- self.tip = block.sha256
- self.block_time += 1
- test.blocks_and_transactions.append([block, True])
- height += 1
- yield test
-
- '''
- Now we use merkle-root malleability to generate an invalid block with
- same blockheader.
- Manufacture a block with 3 transactions (coinbase, spend of prior
- coinbase, spend of that spend). Duplicate the 3rd transaction to
- leave merkle root and blockheader unchanged but invalidate the block.
- '''
- block2 = create_block(self.tip, create_coinbase(height), self.block_time)
- self.block_time += 1
+ block1 = block
+ tip = block.sha256
+ node.p2p.send_blocks_and_test([block1], node, True)
+
+ self.log.info("Mature the block.")
+ node.generate(100)
+
+ best_block = node.getblock(node.getbestblockhash())
+ tip = int(node.getbestblockhash(), 16)
+ height = best_block["height"] + 1
+ block_time = best_block["time"] + 1
+
+ # Use merkle-root malleability to generate an invalid block with
+ # same blockheader.
+ # Manufacture a block with 3 transactions (coinbase, spend of prior
+ # coinbase, spend of that spend). Duplicate the 3rd transaction to
+ # leave merkle root and blockheader unchanged but invalidate the block.
+ self.log.info("Test merkle root malleability.")
+
+ block2 = create_block(tip, create_coinbase(height), block_time)
+ block_time += 1
# b'0x51' is OP_TRUE
- tx1 = create_transaction(self.block1.vtx[0], 0, b'\x51', 50 * COIN)
+ tx1 = create_transaction(block1.vtx[0], 0, b'\x51', 50 * COIN)
tx2 = create_transaction(tx1, 0, b'\x51', 50 * COIN)
block2.vtx.extend([tx1, tx2])
@@ -94,24 +82,20 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
assert_equal(orig_hash, block2.rehash())
assert(block2_orig.vtx != block2.vtx)
- self.tip = block2.sha256
- yield TestInstance([[block2, RejectResult(16, b'bad-txns-duplicate')], [block2_orig, True]])
- height += 1
-
- '''
- Make sure that a totally screwed up block is not valid.
- '''
- block3 = create_block(self.tip, create_coinbase(height), self.block_time)
- self.block_time += 1
- block3.vtx[0].vout[0].nValue = 100 * COIN # Too high!
- block3.vtx[0].sha256=None
+ node.p2p.send_blocks_and_test([block2], node, False, False, 16, b'bad-txns-duplicate')
+
+ self.log.info("Test very broken block.")
+
+ block3 = create_block(tip, create_coinbase(height), block_time)
+ block_time += 1
+ block3.vtx[0].vout[0].nValue = 100 * COIN # Too high!
+ block3.vtx[0].sha256 = None
block3.vtx[0].calc_sha256()
block3.hashMerkleRoot = block3.calc_merkle_root()
block3.rehash()
block3.solve()
- yield TestInstance([[block3, RejectResult(16, b'bad-cb-amount')]])
-
+ node.p2p.send_blocks_and_test([block3], node, False, False, 16, b'bad-cb-amount')
if __name__ == '__main__':
InvalidBlockRequestTest().main()
diff --git a/test/functional/p2p_invalid_tx.py b/test/functional/p2p_invalid_tx.py
index 64fada38e2..69ce529ad6 100755
--- a/test/functional/p2p_invalid_tx.py
+++ b/test/functional/p2p_invalid_tx.py
@@ -33,12 +33,10 @@ class InvalidTxRequestTest(BitcoinTestFramework):
self.log.info("Create a new block with an anyone-can-spend coinbase.")
height = 1
block = create_block(tip, create_coinbase(height), block_time)
- block_time += 1
block.solve()
# Save the coinbase for later
block1 = block
tip = block.sha256
- height += 1
node.p2p.send_blocks_and_test([block], node, success=True)
self.log.info("Mature the block.")
@@ -49,7 +47,10 @@ class InvalidTxRequestTest(BitcoinTestFramework):
tx1 = create_transaction(block1.vtx[0], 0, b'\x64', 50 * COIN - 12000)
node.p2p.send_txs_and_test([tx1], node, success=False, reject_code=16, reject_reason=b'mandatory-script-verify-flag-failed (Invalid OP_IF construction)')
- # TODO: test further transactions...
+ # Verify valid transaction
+ tx1 = create_transaction(block1.vtx[0], 0, b'', 50 * COIN - 12000)
+ node.p2p.send_txs_and_test([tx1], node, success=True)
+
if __name__ == '__main__':
InvalidTxRequestTest().main()
diff --git a/test/functional/p2p_leak.py b/test/functional/p2p_leak.py
index ce4e6e9144..198dcc1490 100755
--- a/test/functional/p2p_leak.py
+++ b/test/functional/p2p_leak.py
@@ -7,7 +7,7 @@
A node should never send anything other than VERSION/VERACK/REJECT until it's
received a VERACK.
-This test connects to a node and sends it a few messages, trying to intice it
+This test connects to a node and sends it a few messages, trying to entice it
into sending us something it shouldn't.
Also test that nodes that send unsupported service bits to bitcoind are disconnected
diff --git a/test/functional/p2p_mempool.py b/test/functional/p2p_mempool.py
index 485a8af3d0..e54843b26f 100755
--- a/test/functional/p2p_mempool.py
+++ b/test/functional/p2p_mempool.py
@@ -30,6 +30,6 @@ class P2PMempoolTests(BitcoinTestFramework):
#mininode must be disconnected at this point
assert_equal(len(self.nodes[0].getpeerinfo()), 0)
-
+
if __name__ == '__main__':
P2PMempoolTests().main()
diff --git a/test/functional/p2p_node_network_limited.py b/test/functional/p2p_node_network_limited.py
index 70415e0168..301d8c181a 100755
--- a/test/functional/p2p_node_network_limited.py
+++ b/test/functional/p2p_node_network_limited.py
@@ -8,16 +8,21 @@ Tests that a node configured with -prune=550 signals NODE_NETWORK_LIMITED correc
and that it responds to getdata requests for blocks correctly:
- send a block within 288 + 2 of the tip
- disconnect peers who request blocks older than that."""
-from test_framework.messages import CInv, msg_getdata
-from test_framework.mininode import NODE_BLOOM, NODE_NETWORK_LIMITED, NODE_WITNESS, NetworkThread, P2PInterface
+from test_framework.messages import CInv, msg_getdata, msg_verack
+from test_framework.mininode import NODE_BLOOM, NODE_NETWORK_LIMITED, NODE_WITNESS, P2PInterface, wait_until, mininode_lock, network_thread_start, network_thread_join
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal
+from test_framework.util import assert_equal, disconnect_nodes, connect_nodes_bi, sync_blocks
class P2PIgnoreInv(P2PInterface):
+ firstAddrnServices = 0
def on_inv(self, message):
# The node will send us invs for other blocks. Ignore them.
pass
-
+ def on_addr(self, message):
+ self.firstAddrnServices = message.addrs[0].nServices
+ def wait_for_addr(self, timeout=5):
+ test_function = lambda: self.last_message.get("addr")
+ wait_until(test_function, timeout=timeout, lock=mininode_lock)
def send_getdata_for_block(self, blockhash):
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(2, int(blockhash, 16)))
@@ -26,12 +31,24 @@ class P2PIgnoreInv(P2PInterface):
class NodeNetworkLimitedTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
- self.num_nodes = 1
- self.extra_args = [['-prune=550']]
+ self.num_nodes = 3
+ self.extra_args = [['-prune=550', '-addrmantest'], [], []]
+
+ def disconnect_all(self):
+ disconnect_nodes(self.nodes[0], 1)
+ disconnect_nodes(self.nodes[1], 0)
+ disconnect_nodes(self.nodes[2], 1)
+ disconnect_nodes(self.nodes[2], 0)
+ disconnect_nodes(self.nodes[0], 2)
+ disconnect_nodes(self.nodes[1], 2)
+
+ def setup_network(self):
+ super(NodeNetworkLimitedTest, self).setup_network()
+ self.disconnect_all()
def run_test(self):
node = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
- NetworkThread().start()
+ network_thread_start()
node.wait_for_verack()
expected_services = NODE_BLOOM | NODE_WITNESS | NODE_NETWORK_LIMITED
@@ -43,9 +60,11 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
assert_equal(int(self.nodes[0].getnetworkinfo()['localservices'], 16), expected_services)
self.log.info("Mine enough blocks to reach the NODE_NETWORK_LIMITED range.")
- blocks = self.nodes[0].generate(292)
+ connect_nodes_bi(self.nodes, 0, 1)
+ blocks = self.nodes[1].generate(292)
+ sync_blocks([self.nodes[0], self.nodes[1]])
- self.log.info("Make sure we can max retrive block at tip-288.")
+ self.log.info("Make sure we can max retrieve block at tip-288.")
node.send_getdata_for_block(blocks[1]) # last block in valid range
node.wait_for_block(int(blocks[1], 16), timeout=3)
@@ -53,5 +72,48 @@ class NodeNetworkLimitedTest(BitcoinTestFramework):
node.send_getdata_for_block(blocks[0]) # first block outside of the 288+2 limit
node.wait_for_disconnect(5)
+ self.log.info("Check local address relay, do a fresh connection.")
+ self.nodes[0].disconnect_p2ps()
+ network_thread_join()
+ node1 = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
+ network_thread_start()
+ node1.wait_for_verack()
+ node1.send_message(msg_verack())
+
+ node1.wait_for_addr()
+ #must relay address with NODE_NETWORK_LIMITED
+ assert_equal(node1.firstAddrnServices, 1036)
+
+ self.nodes[0].disconnect_p2ps()
+ node1.wait_for_disconnect()
+
+ # connect unsynced node 2 with pruned NODE_NETWORK_LIMITED peer
+ # because node 2 is in IBD and node 0 is a NODE_NETWORK_LIMITED peer, sync must not be possible
+ connect_nodes_bi(self.nodes, 0, 2)
+ try:
+ sync_blocks([self.nodes[0], self.nodes[2]], timeout=5)
+ except:
+ pass
+ # node2 must remain at heigh 0
+ assert_equal(self.nodes[2].getblockheader(self.nodes[2].getbestblockhash())['height'], 0)
+
+ # now connect also to node 1 (non pruned)
+ connect_nodes_bi(self.nodes, 1, 2)
+
+ # sync must be possible
+ sync_blocks(self.nodes)
+
+ # disconnect all peers
+ self.disconnect_all()
+
+ # mine 10 blocks on node 0 (pruned node)
+ self.nodes[0].generate(10)
+
+ # connect node1 (non pruned) with node0 (pruned) and check if the can sync
+ connect_nodes_bi(self.nodes, 0, 1)
+
+ # sync must be possible, node 1 is no longer in IBD and should therefore connect to node 0 (NODE_NETWORK_LIMITED)
+ sync_blocks([self.nodes[0], self.nodes[1]])
+
if __name__ == '__main__':
NodeNetworkLimitedTest().main()
diff --git a/test/functional/p2p_segwit.py b/test/functional/p2p_segwit.py
index 20e4805df0..4fecd4ffee 100755
--- a/test/functional/p2p_segwit.py
+++ b/test/functional/p2p_segwit.py
@@ -10,6 +10,7 @@ from test_framework.util import *
from test_framework.script import *
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment, get_witness_script, WITNESS_COMMITMENT_HEADER
from test_framework.key import CECKey, CPubKey
+import math
import time
import random
from binascii import hexlify
@@ -47,7 +48,7 @@ def test_transaction_acceptance(rpc, p2p, tx, with_witness, accepted, reason=Non
with mininode_lock:
assert_equal(p2p.last_message["reject"].reason, reason)
-def test_witness_block(rpc, p2p, block, accepted, with_witness=True):
+def test_witness_block(rpc, p2p, block, accepted, with_witness=True, reason=None):
"""Send a block to the node and check that it's accepted
- Submit the block over the p2p interface
@@ -58,8 +59,12 @@ def test_witness_block(rpc, p2p, block, accepted, with_witness=True):
p2p.send_message(msg_block(block))
p2p.sync_with_ping()
assert_equal(rpc.getbestblockhash() == block.hash, accepted)
+ if (reason != None and not accepted):
+ # Check the rejection reason as well.
+ with mininode_lock:
+ assert_equal(p2p.last_message["reject"].reason, reason)
-class TestNode(P2PInterface):
+class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.getdataset = set()
@@ -271,6 +276,80 @@ class SegWitTest(BitcoinTestFramework):
self.utxo.pop(0)
self.utxo.append(UTXO(tx4.sha256, 0, tx4.vout[0].nValue))
+ # ~6 months after segwit activation, the SCRIPT_VERIFY_WITNESS flag was
+ # backdated so that it applies to all blocks, going back to the genesis
+ # block.
+ #
+ # Consequently, version 0 witness outputs are never spendable without
+ # witness, and so can't be spent before segwit activation (the point at which
+ # blocks are permitted to contain witnesses).
+ def test_v0_outputs_arent_spendable(self):
+ self.log.info("Testing that v0 witness program outputs aren't spendable before activation")
+
+ assert len(self.utxo), "self.utxo is empty"
+
+ # Create two outputs, a p2wsh and p2sh-p2wsh
+ witness_program = CScript([OP_TRUE])
+ witness_hash = sha256(witness_program)
+ scriptPubKey = CScript([OP_0, witness_hash])
+
+ p2sh_pubkey = hash160(scriptPubKey)
+ p2sh_scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL])
+
+ value = self.utxo[0].nValue // 3
+
+ tx = CTransaction()
+ tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b'')]
+ tx.vout = [CTxOut(value, scriptPubKey), CTxOut(value, p2sh_scriptPubKey)]
+ tx.vout.append(CTxOut(value, CScript([OP_TRUE])))
+ tx.rehash()
+ txid = tx.sha256
+
+ # Add it to a block
+ block = self.build_next_block()
+ self.update_witness_block_with_transactions(block, [tx])
+ # Verify that segwit isn't activated. A block serialized with witness
+ # should be rejected prior to activation.
+ test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=True, reason = b'unexpected-witness')
+ # Now send the block without witness. It should be accepted
+ test_witness_block(self.nodes[0], self.test_node, block, accepted=True, with_witness=False)
+
+ # Now try to spend the outputs. This should fail since SCRIPT_VERIFY_WITNESS is always enabled.
+ p2wsh_tx = CTransaction()
+ p2wsh_tx.vin = [CTxIn(COutPoint(txid, 0), b'')]
+ p2wsh_tx.vout = [CTxOut(value, CScript([OP_TRUE]))]
+ p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
+ p2wsh_tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
+ p2wsh_tx.rehash()
+
+ p2sh_p2wsh_tx = CTransaction()
+ p2sh_p2wsh_tx.vin = [CTxIn(COutPoint(txid, 1), CScript([scriptPubKey]))]
+ p2sh_p2wsh_tx.vout = [CTxOut(value, CScript([OP_TRUE]))]
+ p2sh_p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
+ p2sh_p2wsh_tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
+ p2sh_p2wsh_tx.rehash()
+
+ for tx in [p2wsh_tx, p2sh_p2wsh_tx]:
+
+ block = self.build_next_block()
+ self.update_witness_block_with_transactions(block, [tx])
+
+ # When the block is serialized with a witness, the block will be rejected because witness
+ # data isn't allowed in blocks that don't commit to witness data.
+ test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=True, reason=b'unexpected-witness')
+
+ # When the block is serialized without witness, validation fails because the transaction is
+ # invalid (transactions are always validated with SCRIPT_VERIFY_WITNESS so a segwit v0 transaction
+ # without a witness is invalid).
+ # Note: The reject reason for this failure could be
+ # 'block-validation-failed' (if script check threads > 1) or
+ # 'non-mandatory-script-verify-flag (Witness program was passed an
+ # empty witness)' (otherwise).
+ # TODO: support multiple acceptable reject reasons.
+ test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=False)
+
+ self.utxo.pop(0)
+ self.utxo.append(UTXO(txid, 2, value))
# Mine enough blocks for segwit's vb state to be 'started'.
def advance_to_segwit_started(self):
@@ -425,7 +504,7 @@ class SegWitTest(BitcoinTestFramework):
assert(self.nodes[0].getbestblockhash() == block.hash)
- # Now make sure that malleating the witness nonce doesn't
+ # Now make sure that malleating the witness reserved value doesn't
# result in a block permanently marked bad.
block = self.build_next_block()
add_witness_commitment(block)
@@ -436,7 +515,7 @@ class SegWitTest(BitcoinTestFramework):
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(1) ]
test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
- # Changing the witness nonce doesn't change the block hash
+ # Changing the witness reserved value doesn't change the block hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(0) ]
test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
@@ -450,7 +529,7 @@ class SegWitTest(BitcoinTestFramework):
block = self.build_next_block()
assert(len(self.utxo) > 0)
-
+
# Create a P2WSH transaction.
# The witness program will be a bunch of OP_2DROP's, followed by OP_TRUE.
# This should give us plenty of room to tweak the spending tx's
@@ -562,7 +641,7 @@ class SegWitTest(BitcoinTestFramework):
self.log.info("Testing extra witness data in tx")
assert(len(self.utxo) > 0)
-
+
block = self.build_next_block()
witness_program = CScript([OP_DROP, OP_TRUE])
@@ -730,7 +809,7 @@ class SegWitTest(BitcoinTestFramework):
witness_program = CScript([OP_DROP, OP_TRUE])
witness_hash = sha256(witness_program)
scriptPubKey = CScript([OP_0, witness_hash])
-
+
# Create a transaction that splits our utxo into many outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
@@ -833,7 +912,7 @@ class SegWitTest(BitcoinTestFramework):
self.test_node.announce_tx_and_wait_for_getdata(tx, timeout=2)
self.log.error("Error: duplicate tx getdata!")
assert(False)
- except AssertionError as e:
+ except AssertionError:
pass
# Delivering this transaction with witness should fail (no matter who
@@ -930,8 +1009,10 @@ class SegWitTest(BitcoinTestFramework):
raw_tx = self.nodes[0].getrawtransaction(tx3.hash, 1)
assert_equal(int(raw_tx["hash"], 16), tx3.calc_sha256(True))
assert_equal(raw_tx["size"], len(tx3.serialize_with_witness()))
- vsize = (len(tx3.serialize_with_witness()) + 3*len(tx3.serialize_without_witness()) + 3) / 4
+ weight = len(tx3.serialize_with_witness()) + 3*len(tx3.serialize_without_witness())
+ vsize = math.ceil(weight / 4)
assert_equal(raw_tx["vsize"], vsize)
+ assert_equal(raw_tx["weight"], weight)
assert_equal(len(raw_tx["vin"][0]["txinwitness"]), 1)
assert_equal(raw_tx["vin"][0]["txinwitness"][0], hexlify(witness_program).decode('ascii'))
assert(vsize != raw_tx["size"])
@@ -1476,9 +1557,10 @@ class SegWitTest(BitcoinTestFramework):
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [spend_tx])
- # If we're before activation, then sending this without witnesses
- # should be valid. If we're after activation, then sending this with
- # witnesses should be valid.
+ # If we're after activation, then sending this with witnesses should be valid.
+ # This no longer works before activation, because SCRIPT_VERIFY_WITNESS
+ # is always set.
+ # TODO: rewrite this test to make clear that it only works after activation.
if segwit_activated:
test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
else:
@@ -1511,7 +1593,7 @@ class SegWitTest(BitcoinTestFramework):
# Make sure that this peer thinks segwit has activated.
assert(get_bip9_status(self.nodes[node_id], 'segwit')['status'] == "active")
- # Make sure this peers blocks match those of node0.
+ # Make sure this peer's blocks match those of node0.
height = self.nodes[node_id].getblockcount()
while height >= 0:
block_hash = self.nodes[node_id].getblockhash(height)
@@ -1878,11 +1960,11 @@ class SegWitTest(BitcoinTestFramework):
def run_test(self):
# Setup the p2p connections and start up the network thread.
# self.test_node sets NODE_WITNESS|NODE_NETWORK
- self.test_node = self.nodes[0].add_p2p_connection(TestNode(), services=NODE_NETWORK|NODE_WITNESS)
+ self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK|NODE_WITNESS)
# self.old_node sets only NODE_NETWORK
- self.old_node = self.nodes[0].add_p2p_connection(TestNode(), services=NODE_NETWORK)
+ self.old_node = self.nodes[0].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK)
# self.std_node is for testing node1 (fRequireStandard=true)
- self.std_node = self.nodes[1].add_p2p_connection(TestNode(), services=NODE_NETWORK|NODE_WITNESS)
+ self.std_node = self.nodes[1].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK|NODE_WITNESS)
network_thread_start()
@@ -1897,6 +1979,7 @@ class SegWitTest(BitcoinTestFramework):
self.test_witness_services() # Verifies NODE_WITNESS
self.test_non_witness_transaction() # non-witness tx's are accepted
self.test_unnecessary_witness_before_segwit_activation()
+ self.test_v0_outputs_arent_spendable()
self.test_block_relay(segwit_activated=False)
# Advance to segwit being 'started'
@@ -1914,7 +1997,6 @@ class SegWitTest(BitcoinTestFramework):
self.test_unnecessary_witness_before_segwit_activation()
self.test_witness_tx_relay_before_segwit_activation()
self.test_block_relay(segwit_activated=False)
- self.test_p2sh_witness(segwit_activated=False)
self.test_standardness_v0(segwit_activated=False)
sync_blocks(self.nodes)
diff --git a/test/functional/p2p_sendheaders.py b/test/functional/p2p_sendheaders.py
index 8869aeaaea..4c7d5e65c5 100755
--- a/test/functional/p2p_sendheaders.py
+++ b/test/functional/p2p_sendheaders.py
@@ -411,21 +411,18 @@ class SendHeadersTest(BitcoinTestFramework):
inv_node.check_last_announcement(inv=[tip], headers=[])
test_node.check_last_announcement(inv=[tip], headers=[])
if i == 0:
- # Just get the data -- shouldn't cause headers announcements to resume
+ self.log.debug("Just get the data -- shouldn't cause headers announcements to resume")
test_node.send_get_data([tip])
test_node.wait_for_block(tip)
elif i == 1:
- # Send a getheaders message that shouldn't trigger headers announcements
- # to resume (best header sent will be too old)
+ self.log.debug("Send a getheaders message that shouldn't trigger headers announcements to resume (best header sent will be too old)")
test_node.send_get_headers(locator=[fork_point], hashstop=new_block_hashes[1])
test_node.send_get_data([tip])
test_node.wait_for_block(tip)
elif i == 2:
test_node.send_get_data([tip])
test_node.wait_for_block(tip)
- # This time, try sending either a getheaders to trigger resumption
- # of headers announcements, or mine a new block and inv it, also
- # triggering resumption of headers announcements.
+ self.log.debug("This time, try sending either a getheaders to trigger resumption of headers announcements, or mine a new block and inv it, also triggering resumption of headers announcements.")
if j == 0:
test_node.send_get_headers(locator=[tip], hashstop=0)
test_node.sync_with_ping()
diff --git a/test/functional/p2p_timeouts.py b/test/functional/p2p_timeouts.py
index 6d21095cc6..6a21b693b4 100755
--- a/test/functional/p2p_timeouts.py
+++ b/test/functional/p2p_timeouts.py
@@ -27,7 +27,7 @@ from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-class TestNode(P2PInterface):
+class TestP2PConn(P2PInterface):
def on_version(self, message):
# Don't send a verack in response
pass
@@ -39,9 +39,9 @@ class TimeoutsTest(BitcoinTestFramework):
def run_test(self):
# Setup the p2p connections and start up the network thread.
- no_verack_node = self.nodes[0].add_p2p_connection(TestNode())
- no_version_node = self.nodes[0].add_p2p_connection(TestNode(), send_version=False)
- no_send_node = self.nodes[0].add_p2p_connection(TestNode(), send_version=False)
+ no_verack_node = self.nodes[0].add_p2p_connection(TestP2PConn())
+ no_version_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False)
+ no_send_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False)
network_thread_start()
diff --git a/test/functional/p2p_unrequested_blocks.py b/test/functional/p2p_unrequested_blocks.py
index 672626f15b..49c619a4ce 100755
--- a/test/functional/p2p_unrequested_blocks.py
+++ b/test/functional/p2p_unrequested_blocks.py
@@ -57,12 +57,8 @@ from test_framework.util import *
import time
from test_framework.blocktools import create_block, create_coinbase, create_transaction
-class AcceptBlockTest(BitcoinTestFramework):
- def add_options(self, parser):
- parser.add_option("--testbinary", dest="testbinary",
- default=os.getenv("BITCOIND", "bitcoind"),
- help="bitcoind binary to test")
+class AcceptBlockTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
@@ -166,7 +162,7 @@ class AcceptBlockTest(BitcoinTestFramework):
self.log.info("Unrequested more-work block accepted")
# 4c. Now mine 288 more blocks and deliver; all should be processed but
- # the last (height-too-high) on node (as long as its not missing any headers)
+ # the last (height-too-high) on node (as long as it is not missing any headers)
tip = block_h3
all_blocks = []
for i in range(288):
diff --git a/test/functional/rpc_bind.py b/test/functional/rpc_bind.py
index 05433c7e24..343e162058 100755
--- a/test/functional/rpc_bind.py
+++ b/test/functional/rpc_bind.py
@@ -4,7 +4,6 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test running bitcoind with the -rpcbind and -rpcallowip options."""
-import socket
import sys
from test_framework.test_framework import BitcoinTestFramework, SkipTest
@@ -14,11 +13,17 @@ from test_framework.netutil import *
class RPCBindTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
+ self.bind_to_localhost_only = False
self.num_nodes = 1
def setup_network(self):
self.add_nodes(self.num_nodes, None)
+ def add_options(self, parser):
+ parser.add_option("--ipv4", action='store_true', dest="run_ipv4", help="Run ipv4 tests only", default=False)
+ parser.add_option("--ipv6", action='store_true', dest="run_ipv6", help="Run ipv6 tests only", default=False)
+ parser.add_option("--nonloopback", action='store_true', dest="run_nonloopback", help="Run non-loopback tests only", default=False)
+
def run_bind_test(self, allow_ips, connect_to, addresses, expected):
'''
Start a node with requested rpcallowip and rpcbind parameters,
@@ -47,61 +52,75 @@ class RPCBindTest(BitcoinTestFramework):
self.nodes[0].rpchost = None
self.start_nodes([base_args])
# connect to node through non-loopback interface
- node = get_rpc_proxy(rpc_url(get_datadir_path(self.options.tmpdir, 0), 0, "%s:%d" % (rpchost, rpcport)), 0, coveragedir=self.options.coveragedir)
+ node = get_rpc_proxy(rpc_url(self.nodes[0].datadir, 0, "%s:%d" % (rpchost, rpcport)), 0, coveragedir=self.options.coveragedir)
node.getnetworkinfo()
self.stop_nodes()
def run_test(self):
# due to OS-specific network stats queries, this test works only on Linux
+ if sum([self.options.run_ipv4, self.options.run_ipv6, self.options.run_nonloopback]) > 1:
+ raise AssertionError("Only one of --ipv4, --ipv6 and --nonloopback can be set")
+
+ self.log.info("Check for linux")
if not sys.platform.startswith('linux'):
raise SkipTest("This test can only be run on linux.")
- # find the first non-loopback interface for testing
- non_loopback_ip = None
+
+ self.log.info("Check for ipv6")
+ have_ipv6 = test_ipv6_local()
+ if not have_ipv6 and not self.options.run_ipv4:
+ raise SkipTest("This test requires ipv6 support.")
+
+ self.log.info("Check for non-loopback interface")
+ self.non_loopback_ip = None
for name,ip in all_interfaces():
if ip != '127.0.0.1':
- non_loopback_ip = ip
+ self.non_loopback_ip = ip
break
- if non_loopback_ip is None:
- raise SkipTest("This test requires at least one non-loopback IPv4 interface.")
- try:
- s = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
- s.connect(("::1",1))
- s.close
- except OSError:
- raise SkipTest("This test requires IPv6 support.")
-
- self.log.info("Using interface %s for testing" % non_loopback_ip)
-
- defaultport = rpc_port(0)
-
- # check default without rpcallowip (IPv4 and IPv6 localhost)
- self.run_bind_test(None, '127.0.0.1', [],
- [('127.0.0.1', defaultport), ('::1', defaultport)])
- # check default with rpcallowip (IPv6 any)
- self.run_bind_test(['127.0.0.1'], '127.0.0.1', [],
- [('::0', defaultport)])
- # check only IPv4 localhost (explicit)
- self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
- [('127.0.0.1', defaultport)])
- # check only IPv4 localhost (explicit) with alternative port
- self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
- [('127.0.0.1', 32171)])
- # check only IPv4 localhost (explicit) with multiple alternative ports on same host
- self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
- [('127.0.0.1', 32171), ('127.0.0.1', 32172)])
- # check only IPv6 localhost (explicit)
- self.run_bind_test(['[::1]'], '[::1]', ['[::1]'],
- [('::1', defaultport)])
- # check both IPv4 and IPv6 localhost (explicit)
- self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
- [('127.0.0.1', defaultport), ('::1', defaultport)])
+ if self.non_loopback_ip is None and self.options.run_nonloopback:
+ raise SkipTest("This test requires a non-loopback ip address.")
+
+ self.defaultport = rpc_port(0)
+
+ if not self.options.run_nonloopback:
+ self._run_loopback_tests()
+ if not self.options.run_ipv4 and not self.options.run_ipv6:
+ self._run_nonloopback_tests()
+
+ def _run_loopback_tests(self):
+ if self.options.run_ipv4:
+ # check only IPv4 localhost (explicit)
+ self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
+ [('127.0.0.1', self.defaultport)])
+ # check only IPv4 localhost (explicit) with alternative port
+ self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
+ [('127.0.0.1', 32171)])
+ # check only IPv4 localhost (explicit) with multiple alternative ports on same host
+ self.run_bind_test(['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
+ [('127.0.0.1', 32171), ('127.0.0.1', 32172)])
+ else:
+ # check default without rpcallowip (IPv4 and IPv6 localhost)
+ self.run_bind_test(None, '127.0.0.1', [],
+ [('127.0.0.1', self.defaultport), ('::1', self.defaultport)])
+ # check default with rpcallowip (IPv6 any)
+ self.run_bind_test(['127.0.0.1'], '127.0.0.1', [],
+ [('::0', self.defaultport)])
+ # check only IPv6 localhost (explicit)
+ self.run_bind_test(['[::1]'], '[::1]', ['[::1]'],
+ [('::1', self.defaultport)])
+ # check both IPv4 and IPv6 localhost (explicit)
+ self.run_bind_test(['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
+ [('127.0.0.1', self.defaultport), ('::1', self.defaultport)])
+
+ def _run_nonloopback_tests(self):
+ self.log.info("Using interface %s for testing" % self.non_loopback_ip)
+
# check only non-loopback interface
- self.run_bind_test([non_loopback_ip], non_loopback_ip, [non_loopback_ip],
- [(non_loopback_ip, defaultport)])
+ self.run_bind_test([self.non_loopback_ip], self.non_loopback_ip, [self.non_loopback_ip],
+ [(self.non_loopback_ip, self.defaultport)])
# Check that with invalid rpcallowip, we are denied
- self.run_allowip_test([non_loopback_ip], non_loopback_ip, defaultport)
- assert_raises_rpc_error(-342, "non-JSON HTTP response with '403 Forbidden' from server", self.run_allowip_test, ['1.1.1.1'], non_loopback_ip, defaultport)
+ self.run_allowip_test([self.non_loopback_ip], self.non_loopback_ip, self.defaultport)
+ assert_raises_rpc_error(-342, "non-JSON HTTP response with '403 Forbidden' from server", self.run_allowip_test, ['1.1.1.1'], self.non_loopback_ip, self.defaultport)
if __name__ == '__main__':
RPCBindTest().main()
diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py
index 7cf2abe6f0..17e24453e5 100755
--- a/test/functional/rpc_blockchain.py
+++ b/test/functional/rpc_blockchain.py
@@ -32,6 +32,18 @@ from test_framework.util import (
assert_is_hex_string,
assert_is_hash_string,
)
+from test_framework.blocktools import (
+ create_block,
+ create_coinbase,
+)
+from test_framework.messages import (
+ msg_block,
+)
+from test_framework.mininode import (
+ P2PInterface,
+ network_thread_start,
+)
+
class BlockchainTest(BitcoinTestFramework):
def set_test_params(self):
@@ -46,6 +58,7 @@ class BlockchainTest(BitcoinTestFramework):
self._test_getdifficulty()
self._test_getnetworkhashps()
self._test_stopatheight()
+ self._test_waitforblockheight()
assert self.nodes[0].verifychain(4, 0)
def _test_getblockchaininfo(self):
@@ -102,6 +115,22 @@ class BlockchainTest(BitcoinTestFramework):
def _test_getchaintxstats(self):
self.log.info("Test getchaintxstats")
+ # Test `getchaintxstats` invalid extra parameters
+ assert_raises_rpc_error(-1, 'getchaintxstats', self.nodes[0].getchaintxstats, 0, '', 0)
+
+ # Test `getchaintxstats` invalid `nblocks`
+ assert_raises_rpc_error(-1, "JSON value is not an integer as expected", self.nodes[0].getchaintxstats, '')
+ assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, -1)
+ assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, self.nodes[0].getblockcount())
+
+ # Test `getchaintxstats` invalid `blockhash`
+ assert_raises_rpc_error(-1, "JSON value is not a string as expected", self.nodes[0].getchaintxstats, blockhash=0)
+ assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getchaintxstats, blockhash='0')
+ blockhash = self.nodes[0].getblockhash(200)
+ self.nodes[0].invalidateblock(blockhash)
+ assert_raises_rpc_error(-8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash)
+ self.nodes[0].reconsiderblock(blockhash)
+
chaintxstats = self.nodes[0].getchaintxstats(1)
# 200 txs plus genesis tx
assert_equal(chaintxstats['txcount'], 201)
@@ -133,8 +162,6 @@ class BlockchainTest(BitcoinTestFramework):
assert('window_interval' not in chaintxstats)
assert('txrate' not in chaintxstats)
- assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, 201)
-
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
@@ -227,6 +254,50 @@ class BlockchainTest(BitcoinTestFramework):
self.start_node(0)
assert_equal(self.nodes[0].getblockcount(), 207)
+ def _test_waitforblockheight(self):
+ self.log.info("Test waitforblockheight")
+
+ node = self.nodes[0]
+
+ # Start a P2P connection since we'll need to create some blocks.
+ node.add_p2p_connection(P2PInterface())
+ network_thread_start()
+ node.p2p.wait_for_verack()
+
+ current_height = node.getblock(node.getbestblockhash())['height']
+
+ # Create a fork somewhere below our current height, invalidate the tip
+ # of that fork, and then ensure that waitforblockheight still
+ # works as expected.
+ #
+ # (Previously this was broken based on setting
+ # `rpc/blockchain.cpp:latestblock` incorrectly.)
+ #
+ b20hash = node.getblockhash(20)
+ b20 = node.getblock(b20hash)
+
+ def solve_and_send_block(prevhash, height, time):
+ b = create_block(prevhash, create_coinbase(height), time)
+ b.solve()
+ node.p2p.send_message(msg_block(b))
+ node.p2p.sync_with_ping()
+ return b
+
+ b21f = solve_and_send_block(int(b20hash, 16), 21, b20['time'] + 1)
+ b22f = solve_and_send_block(b21f.sha256, 22, b21f.nTime + 1)
+
+ node.invalidateblock(b22f.hash)
+
+ def assert_waitforheight(height, timeout=2):
+ assert_equal(
+ node.waitforblockheight(height, timeout)['height'],
+ current_height)
+
+ assert_waitforheight(0)
+ assert_waitforheight(current_height - 1)
+ assert_waitforheight(current_height)
+ assert_waitforheight(current_height + 1)
+
if __name__ == '__main__':
BlockchainTest().main()
diff --git a/test/functional/rpc_deprecated.py b/test/functional/rpc_deprecated.py
index 90183474bb..7b7c596506 100755
--- a/test/functional/rpc_deprecated.py
+++ b/test/functional/rpc_deprecated.py
@@ -4,12 +4,13 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test deprecation of RPC calls."""
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_raises_rpc_error
class DeprecatedRpcTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
- self.extra_args = [[], []]
+ self.extra_args = [[], ["-deprecatedrpc=validateaddress", "-deprecatedrpc=accounts"]]
def run_test(self):
# This test should be used to verify correct behaviour of deprecated
@@ -18,10 +19,94 @@ class DeprecatedRpcTest(BitcoinTestFramework):
# self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses")
# assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()])
# self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
+
+ self.log.info("Test validateaddress deprecation")
+ SOME_ADDRESS = "mnvGjUy3NMj67yJ6gkK5o9e5RS33Z2Vqcu" # This is just some random address to pass as a parameter to validateaddress
+ dep_validate_address = self.nodes[0].validateaddress(SOME_ADDRESS)
+ assert "ismine" not in dep_validate_address
+ not_dep_val = self.nodes[1].validateaddress(SOME_ADDRESS)
+ assert "ismine" in not_dep_val
+
+ self.log.info("Test accounts deprecation")
+ # The following account RPC methods are deprecated:
+ # - getaccount
+ # - getaccountaddress
+ # - getaddressesbyaccount
+ # - getreceivedbyaccount
+ # - listaccouts
+ # - listreceivedbyaccount
+ # - move
+ # - setaccount
+ #
+ # The following 'label' RPC methods are usable both with and without the
+ # -deprecatedrpc=accounts switch enabled.
+ # - getlabeladdress
+ # - getaddressesbylabel
+ # - getreceivedbylabel
+ # - listlabels
+ # - listreceivedbylabel
+ # - setlabel
#
- # There are currently no deprecated RPC methods in master, so this
- # test is currently empty.
- pass
+ address0 = self.nodes[0].getnewaddress()
+ self.nodes[0].generatetoaddress(101, address0)
+ address1 = self.nodes[1].getnewaddress()
+ self.nodes[1].generatetoaddress(101, address1)
+
+ self.log.info("- getaccount")
+ assert_raises_rpc_error(-32, "getaccount is deprecated", self.nodes[0].getaccount, address0)
+ self.nodes[1].getaccount(address1)
+
+ self.log.info("- setaccount")
+ assert_raises_rpc_error(-32, "setaccount is deprecated", self.nodes[0].setaccount, address0, "label0")
+ self.nodes[1].setaccount(address1, "label1")
+
+ self.log.info("- setlabel")
+ self.nodes[0].setlabel(address0, "label0")
+ self.nodes[1].setlabel(address1, "label1")
+
+ self.log.info("- getaccountaddress")
+ assert_raises_rpc_error(-32, "getaccountaddress is deprecated", self.nodes[0].getaccountaddress, "label0")
+ self.nodes[1].getaccountaddress("label1")
+
+ self.log.info("- getlabeladdress")
+ self.nodes[0].getlabeladdress("label0")
+ self.nodes[1].getlabeladdress("label1")
+
+ self.log.info("- getaddressesbyaccount")
+ assert_raises_rpc_error(-32, "getaddressesbyaccount is deprecated", self.nodes[0].getaddressesbyaccount, "label0")
+ self.nodes[1].getaddressesbyaccount("label1")
+
+ self.log.info("- getaddressesbylabel")
+ self.nodes[0].getaddressesbylabel("label0")
+ self.nodes[1].getaddressesbylabel("label1")
+
+ self.log.info("- getreceivedbyaccount")
+ assert_raises_rpc_error(-32, "getreceivedbyaccount is deprecated", self.nodes[0].getreceivedbyaccount, "label0")
+ self.nodes[1].getreceivedbyaccount("label1")
+
+ self.log.info("- getreceivedbylabel")
+ self.nodes[0].getreceivedbylabel("label0")
+ self.nodes[1].getreceivedbylabel("label1")
+
+ self.log.info("- listaccounts")
+ assert_raises_rpc_error(-32, "listaccounts is deprecated", self.nodes[0].listaccounts)
+ self.nodes[1].listaccounts()
+
+ self.log.info("- listlabels")
+ self.nodes[0].listlabels()
+ self.nodes[1].listlabels()
+
+ self.log.info("- listreceivedbyaccount")
+ assert_raises_rpc_error(-32, "listreceivedbyaccount is deprecated", self.nodes[0].listreceivedbyaccount)
+ self.nodes[1].listreceivedbyaccount()
+
+ self.log.info("- listreceivedbylabel")
+ self.nodes[0].listreceivedbylabel()
+ self.nodes[1].listreceivedbylabel()
+
+ self.log.info("- move")
+ assert_raises_rpc_error(-32, "move is deprecated", self.nodes[0].move, "label0", "label0b", 10)
+ self.nodes[1].move("label1", "label1b", 10)
if __name__ == '__main__':
DeprecatedRpcTest().main()
diff --git a/test/functional/rpc_fundrawtransaction.py b/test/functional/rpc_fundrawtransaction.py
index 4d3be18516..5fb9a361d9 100755
--- a/test/functional/rpc_fundrawtransaction.py
+++ b/test/functional/rpc_fundrawtransaction.py
@@ -53,7 +53,7 @@ class RawTransactionsTest(BitcoinTestFramework):
assert_equal(rawmatch["changepos"], -1)
watchonly_address = self.nodes[0].getnewaddress()
- watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
+ watchonly_pubkey = self.nodes[0].getaddressinfo(watchonly_address)["pubkey"]
watchonly_amount = Decimal(200)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
@@ -371,8 +371,8 @@ class RawTransactionsTest(BitcoinTestFramework):
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
- addr1Obj = self.nodes[1].validateaddress(addr1)
- addr2Obj = self.nodes[1].validateaddress(addr2)
+ addr1Obj = self.nodes[1].getaddressinfo(addr1)
+ addr2Obj = self.nodes[1].getaddressinfo(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
@@ -401,11 +401,11 @@ class RawTransactionsTest(BitcoinTestFramework):
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
- addr1Obj = self.nodes[1].validateaddress(addr1)
- addr2Obj = self.nodes[1].validateaddress(addr2)
- addr3Obj = self.nodes[1].validateaddress(addr3)
- addr4Obj = self.nodes[1].validateaddress(addr4)
- addr5Obj = self.nodes[1].validateaddress(addr5)
+ addr1Obj = self.nodes[1].getaddressinfo(addr1)
+ addr2Obj = self.nodes[1].getaddressinfo(addr2)
+ addr3Obj = self.nodes[1].getaddressinfo(addr3)
+ addr4Obj = self.nodes[1].getaddressinfo(addr4)
+ addr5Obj = self.nodes[1].getaddressinfo(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])['address']
@@ -431,8 +431,8 @@ class RawTransactionsTest(BitcoinTestFramework):
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
- addr1Obj = self.nodes[2].validateaddress(addr1)
- addr2Obj = self.nodes[2].validateaddress(addr2)
+ addr1Obj = self.nodes[2].getaddressinfo(addr1)
+ addr2Obj = self.nodes[2].getaddressinfo(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
@@ -449,7 +449,7 @@ class RawTransactionsTest(BitcoinTestFramework):
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawtx)
- signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
+ signedTx = self.nodes[2].signrawtransactionwithwallet(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
@@ -503,7 +503,7 @@ class RawTransactionsTest(BitcoinTestFramework):
#now we need to unlock
self.nodes[1].walletpassphrase("test", 600)
- signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
+ signedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.nodes[1].generate(1)
self.sync_all()
@@ -564,7 +564,7 @@ class RawTransactionsTest(BitcoinTestFramework):
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawtx)
- fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
+ fundedAndSignedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
@@ -622,9 +622,9 @@ class RawTransactionsTest(BitcoinTestFramework):
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
- signedtx = self.nodes[3].signrawtransaction(result["hex"])
+ signedtx = self.nodes[3].signrawtransactionwithwallet(result["hex"])
assert(not signedtx["complete"])
- signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
+ signedtx = self.nodes[0].signrawtransactionwithwallet(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py
index 16e4f6adb4..72b5f4748f 100755
--- a/test/functional/rpc_net.py
+++ b/test/functional/rpc_net.py
@@ -7,14 +7,14 @@
Tests correspond to code in rpc/net.cpp.
"""
-import time
-
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
+ assert_greater_than_or_equal,
assert_raises_rpc_error,
connect_nodes_bi,
p2p_port,
+ wait_until,
)
class NetTest(BitcoinTestFramework):
@@ -34,27 +34,34 @@ class NetTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getconnectioncount(), 2)
def _test_getnettotals(self):
- # check that getnettotals totalbytesrecv and totalbytessent
- # are consistent with getpeerinfo
+ # getnettotals totalbytesrecv and totalbytessent should be
+ # consistent with getpeerinfo. Since the RPC calls are not atomic,
+ # and messages might have been recvd or sent between RPC calls, call
+ # getnettotals before and after and verify that the returned values
+ # from getpeerinfo are bounded by those values.
+ net_totals_before = self.nodes[0].getnettotals()
peer_info = self.nodes[0].getpeerinfo()
+ net_totals_after = self.nodes[0].getnettotals()
assert_equal(len(peer_info), 2)
- net_totals = self.nodes[0].getnettotals()
- assert_equal(sum([peer['bytesrecv'] for peer in peer_info]),
- net_totals['totalbytesrecv'])
- assert_equal(sum([peer['bytessent'] for peer in peer_info]),
- net_totals['totalbytessent'])
+ peers_recv = sum([peer['bytesrecv'] for peer in peer_info])
+ peers_sent = sum([peer['bytessent'] for peer in peer_info])
+
+ assert_greater_than_or_equal(peers_recv, net_totals_before['totalbytesrecv'])
+ assert_greater_than_or_equal(net_totals_after['totalbytesrecv'], peers_recv)
+ assert_greater_than_or_equal(peers_sent, net_totals_before['totalbytessent'])
+ assert_greater_than_or_equal(net_totals_after['totalbytessent'], peers_sent)
+
# test getnettotals and getpeerinfo by doing a ping
# the bytes sent/received should change
# note ping and pong are 32 bytes each
self.nodes[0].ping()
- time.sleep(0.1)
+ wait_until(lambda: (self.nodes[0].getnettotals()['totalbytessent'] >= net_totals_after['totalbytessent'] + 32 * 2), timeout=1)
+ wait_until(lambda: (self.nodes[0].getnettotals()['totalbytesrecv'] >= net_totals_after['totalbytesrecv'] + 32 * 2), timeout=1)
+
peer_info_after_ping = self.nodes[0].getpeerinfo()
- net_totals_after_ping = self.nodes[0].getnettotals()
for before, after in zip(peer_info, peer_info_after_ping):
- assert_equal(before['bytesrecv_per_msg']['pong'] + 32, after['bytesrecv_per_msg']['pong'])
- assert_equal(before['bytessent_per_msg']['ping'] + 32, after['bytessent_per_msg']['ping'])
- assert_equal(net_totals['totalbytesrecv'] + 32*2, net_totals_after_ping['totalbytesrecv'])
- assert_equal(net_totals['totalbytessent'] + 32*2, net_totals_after_ping['totalbytessent'])
+ assert_greater_than_or_equal(after['bytesrecv_per_msg']['pong'], before['bytesrecv_per_msg']['pong'] + 32)
+ assert_greater_than_or_equal(after['bytessent_per_msg']['ping'], before['bytessent_per_msg']['ping'] + 32)
def _test_getnetworkinginfo(self):
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
@@ -62,12 +69,8 @@ class NetTest(BitcoinTestFramework):
self.nodes[0].setnetworkactive(False)
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False)
- timeout = 3
- while self.nodes[0].getnetworkinfo()['connections'] != 0:
- # Wait a bit for all sockets to close
- assert timeout > 0, 'not all connections closed in time'
- timeout -= 0.1
- time.sleep(0.1)
+ # Wait a bit for all sockets to close
+ wait_until(lambda: self.nodes[0].getnetworkinfo()['connections'] == 0, timeout=3)
self.nodes[0].setnetworkactive(True)
connect_nodes_bi(self.nodes, 0, 1)
@@ -84,8 +87,7 @@ class NetTest(BitcoinTestFramework):
assert_equal(len(added_nodes), 1)
assert_equal(added_nodes[0]['addednode'], ip_port)
# check that a non-existent node returns an error
- assert_raises_rpc_error(-24, "Node has not been added",
- self.nodes[0].getaddednodeinfo, '1.1.1.1')
+ assert_raises_rpc_error(-24, "Node has not been added", self.nodes[0].getaddednodeinfo, '1.1.1.1')
def _test_getpeerinfo(self):
peer_info = [x.getpeerinfo() for x in self.nodes]
diff --git a/test/functional/rpc_preciousblock.py b/test/functional/rpc_preciousblock.py
index 960cd0ad12..796a2edbef 100755
--- a/test/functional/rpc_preciousblock.py
+++ b/test/functional/rpc_preciousblock.py
@@ -8,7 +8,6 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes_bi,
- sync_chain,
sync_blocks,
)
@@ -72,7 +71,7 @@ class PreciousTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getbestblockhash(), hashC)
self.log.info("Make Node1 prefer block C")
self.nodes[1].preciousblock(hashC)
- sync_chain(self.nodes[0:2]) # wait because node 1 may not have downloaded hashC
+ sync_blocks(self.nodes[0:2]) # wait because node 1 may not have downloaded hashC
assert_equal(self.nodes[1].getbestblockhash(), hashC)
self.log.info("Make Node1 prefer block G again")
self.nodes[1].preciousblock(hashG)
diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py
index 92126ef4b7..48b4a4a9db 100755
--- a/test/functional/rpc_rawtransaction.py
+++ b/test/functional/rpc_rawtransaction.py
@@ -6,16 +6,18 @@
Test the following RPCs:
- createrawtransaction
- - signrawtransaction
+ - signrawtransactionwithwallet
- sendrawtransaction
- decoderawtransaction
- getrawtransaction
"""
+from collections import OrderedDict
+from io import BytesIO
+from test_framework.messages import CTransaction, ToHex
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-
class multidict(dict):
"""Dictionary that allows duplicate keys.
@@ -43,11 +45,10 @@ class RawTransactionsTest(BitcoinTestFramework):
def setup_network(self, split=False):
super().setup_network()
- connect_nodes_bi(self.nodes,0,2)
+ connect_nodes_bi(self.nodes, 0, 2)
def run_test(self):
-
- #prepare some coins for multiple *rawtransaction commands
+ self.log.info('prepare some coins for multiple *rawtransaction commands')
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
@@ -59,10 +60,11 @@ class RawTransactionsTest(BitcoinTestFramework):
self.nodes[0].generate(5)
self.sync_all()
- # Test getrawtransaction on genesis block coinbase returns an error
+ self.log.info('Test getrawtransaction on genesis block coinbase returns an error')
block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))
assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot'])
+ self.log.info('Check parameter types and required parameters of createrawtransaction')
# Test `createrawtransaction` required parameters
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction)
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [])
@@ -83,12 +85,18 @@ class RawTransactionsTest(BitcoinTestFramework):
# Test `createrawtransaction` invalid `outputs`
address = self.nodes[0].getnewaddress()
- assert_raises_rpc_error(-3, "Expected type object", self.nodes[0].createrawtransaction, [], 'foo')
+ address2 = self.nodes[0].getnewaddress()
+ assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo')
+ self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility
+ self.nodes[0].createrawtransaction(inputs=[], outputs=[])
assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'})
assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0})
assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'})
assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1})
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
+ assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
+ assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
+ assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])
# Test `createrawtransaction` invalid `locktime`
assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo')
@@ -98,13 +106,42 @@ class RawTransactionsTest(BitcoinTestFramework):
# Test `createrawtransaction` invalid `replaceable`
assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo')
- #########################################
- # sendrawtransaction with missing input #
- #########################################
+ self.log.info('Check that createrawtransaction accepts an array and object as outputs')
+ tx = CTransaction()
+ # One output
+ tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99}))))
+ assert_equal(len(tx.vout), 1)
+ assert_equal(
+ bytes_to_hex_str(tx.serialize()),
+ self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]),
+ )
+ # Two outputs
+ tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))))
+ assert_equal(len(tx.vout), 2)
+ assert_equal(
+ bytes_to_hex_str(tx.serialize()),
+ self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]),
+ )
+ # Two data outputs
+ tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([('data', '99'), ('data', '99')])))))
+ assert_equal(len(tx.vout), 2)
+ assert_equal(
+ bytes_to_hex_str(tx.serialize()),
+ self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{'data': '99'}, {'data': '99'}]),
+ )
+ # Multiple mixed outputs
+ tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), ('data', '99'), ('data', '99')])))))
+ assert_equal(len(tx.vout), 3)
+ assert_equal(
+ bytes_to_hex_str(tx.serialize()),
+ self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {'data': '99'}, {'data': '99'}]),
+ )
+
+ self.log.info('sendrawtransaction with missing input')
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists
outputs = { self.nodes[0].getnewaddress() : 4.998 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
- rawtx = self.nodes[2].signrawtransaction(rawtx)
+ rawtx = self.nodes[2].signrawtransactionwithwallet(rawtx)
# This will raise an exception since there are missing inputs
assert_raises_rpc_error(-25, "Missing inputs", self.nodes[2].sendrawtransaction, rawtx['hex'])
@@ -146,8 +183,8 @@ class RawTransactionsTest(BitcoinTestFramework):
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
- addr1Obj = self.nodes[2].validateaddress(addr1)
- addr2Obj = self.nodes[2].validateaddress(addr2)
+ addr1Obj = self.nodes[2].getaddressinfo(addr1)
+ addr2Obj = self.nodes[2].getaddressinfo(addr2)
# Tests for createmultisig and addmultisigaddress
assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"])
@@ -173,9 +210,9 @@ class RawTransactionsTest(BitcoinTestFramework):
addr2 = self.nodes[2].getnewaddress()
addr3 = self.nodes[2].getnewaddress()
- addr1Obj = self.nodes[1].validateaddress(addr1)
- addr2Obj = self.nodes[2].validateaddress(addr2)
- addr3Obj = self.nodes[2].validateaddress(addr3)
+ addr1Obj = self.nodes[1].getaddressinfo(addr1)
+ addr2Obj = self.nodes[2].getaddressinfo(addr2)
+ addr3Obj = self.nodes[2].getaddressinfo(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address']
@@ -202,10 +239,10 @@ class RawTransactionsTest(BitcoinTestFramework):
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "amount" : vout['value']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
- rawTxPartialSigned = self.nodes[1].signrawtransaction(rawTx, inputs)
+ rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
- rawTxSigned = self.nodes[2].signrawtransaction(rawTx, inputs)
+ rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
@@ -219,12 +256,12 @@ class RawTransactionsTest(BitcoinTestFramework):
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
- addr1Obj = self.nodes[1].validateaddress(addr1)
- addr2Obj = self.nodes[2].validateaddress(addr2)
+ addr1Obj = self.nodes[1].getaddressinfo(addr1)
+ addr2Obj = self.nodes[2].getaddressinfo(addr2)
self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
- mSigObjValid = self.nodes[2].validateaddress(mSigObj)
+ mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
@@ -247,15 +284,15 @@ class RawTransactionsTest(BitcoinTestFramework):
inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex'], "amount" : vout['value']}]
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
- rawTxPartialSigned1 = self.nodes[1].signrawtransaction(rawTx2, inputs)
- self.log.info(rawTxPartialSigned1)
- assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
+ rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)
+ self.log.debug(rawTxPartialSigned1)
+ assert_equal(rawTxPartialSigned1['complete'], False) #node1 only has one key, can't comp. sign the tx
- rawTxPartialSigned2 = self.nodes[2].signrawtransaction(rawTx2, inputs)
- self.log.info(rawTxPartialSigned2)
+ rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)
+ self.log.debug(rawTxPartialSigned2)
assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx
rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
- self.log.info(rawTxComb)
+ self.log.debug(rawTxComb)
self.nodes[2].sendrawtransaction(rawTxComb)
rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
self.sync_all()
@@ -273,7 +310,7 @@ class RawTransactionsTest(BitcoinTestFramework):
encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
-
+
# getrawtransaction tests
# 1. valid parameters - only supply txid
txHash = rawTx["hash"]
@@ -323,5 +360,23 @@ class RawTransactionsTest(BitcoinTestFramework):
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 4294967294)
+ ####################################
+ # TRANSACTION VERSION NUMBER TESTS #
+ ####################################
+
+ # Test the minimum transaction version number that fits in a signed 32-bit integer.
+ tx = CTransaction()
+ tx.nVersion = -0x80000000
+ rawtx = ToHex(tx)
+ decrawtx = self.nodes[0].decoderawtransaction(rawtx)
+ assert_equal(decrawtx['version'], -0x80000000)
+
+ # Test the maximum transaction version number that fits in a signed 32-bit integer.
+ tx = CTransaction()
+ tx.nVersion = 0x7fffffff
+ rawtx = ToHex(tx)
+ decrawtx = self.nodes[0].decoderawtransaction(rawtx)
+ assert_equal(decrawtx['version'], 0x7fffffff)
+
if __name__ == '__main__':
RawTransactionsTest().main()
diff --git a/test/functional/rpc_signrawtransaction.py b/test/functional/rpc_signrawtransaction.py
index dd0fa6c02c..18829ef4b8 100755
--- a/test/functional/rpc_signrawtransaction.py
+++ b/test/functional/rpc_signrawtransaction.py
@@ -2,7 +2,7 @@
# Copyright (c) 2015-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test transaction signing using the signrawtransaction RPC."""
+"""Test transaction signing using the signrawtransaction* RPCs."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
@@ -12,6 +12,7 @@ class SignRawTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
+ self.extra_args = [["-deprecatedrpc=signrawtransaction"]]
def successful_signing_test(self):
"""Create and sign a valid raw transaction with one input.
@@ -33,15 +34,18 @@ class SignRawTransactionsTest(BitcoinTestFramework):
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
- rawTxSigned = self.nodes[0].signrawtransaction(rawTx, inputs, privKeys)
+ rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, inputs)
# 1) The transaction has a complete set of signatures
- assert 'complete' in rawTxSigned
- assert_equal(rawTxSigned['complete'], True)
+ assert rawTxSigned['complete']
# 2) No script verification error occurred
assert 'errors' not in rawTxSigned
+ # Perform the same test on signrawtransaction
+ rawTxSigned2 = self.nodes[0].signrawtransaction(rawTx, inputs, privKeys)
+ assert_equal(rawTxSigned, rawTxSigned2)
+
def script_verification_error_test(self):
"""Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script.
@@ -84,11 +88,10 @@ class SignRawTransactionsTest(BitcoinTestFramework):
# Make sure decoderawtransaction throws if there is extra data
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, rawTx + "00")
- rawTxSigned = self.nodes[0].signrawtransaction(rawTx, scripts, privKeys)
+ rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, scripts)
# 3) The transaction has no complete set of signatures
- assert 'complete' in rawTxSigned
- assert_equal(rawTxSigned['complete'], False)
+ assert not rawTxSigned['complete']
# 4) Two script verification errors occurred
assert 'errors' in rawTxSigned
@@ -109,14 +112,17 @@ class SignRawTransactionsTest(BitcoinTestFramework):
assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout'])
assert not rawTxSigned['errors'][0]['witness']
+ # Perform same test with signrawtransaction
+ rawTxSigned2 = self.nodes[0].signrawtransaction(rawTx, scripts, privKeys)
+ assert_equal(rawTxSigned, rawTxSigned2)
+
# Now test signing failure for transaction with input witnesses
p2wpkh_raw_tx = "01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000"
- rawTxSigned = self.nodes[0].signrawtransaction(p2wpkh_raw_tx)
+ rawTxSigned = self.nodes[0].signrawtransactionwithwallet(p2wpkh_raw_tx)
# 7) The transaction has no complete set of signatures
- assert 'complete' in rawTxSigned
- assert_equal(rawTxSigned['complete'], False)
+ assert not rawTxSigned['complete']
# 8) Two script verification errors occurred
assert 'errors' in rawTxSigned
@@ -134,6 +140,10 @@ class SignRawTransactionsTest(BitcoinTestFramework):
assert_equal(rawTxSigned['errors'][1]['witness'], ["304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee01", "025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357"])
assert not rawTxSigned['errors'][0]['witness']
+ # Perform same test with signrawtransaction
+ rawTxSigned2 = self.nodes[0].signrawtransaction(p2wpkh_raw_tx)
+ assert_equal(rawTxSigned, rawTxSigned2)
+
def run_test(self):
self.successful_signing_test()
self.script_verification_error_test()
diff --git a/test/functional/rpc_txoutproof.py b/test/functional/rpc_txoutproof.py
index 50e0371fdf..c52a7397dc 100755
--- a/test/functional/rpc_txoutproof.py
+++ b/test/functional/rpc_txoutproof.py
@@ -34,9 +34,9 @@ class MerkleBlockTest(BitcoinTestFramework):
node0utxos = self.nodes[0].listunspent(1)
tx1 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99})
- txid1 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransaction(tx1)["hex"])
+ txid1 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransactionwithwallet(tx1)["hex"])
tx2 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99})
- txid2 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransaction(tx2)["hex"])
+ txid2 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransactionwithwallet(tx2)["hex"])
# This will raise an exception because the transaction is not yet in a block
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1])
@@ -55,7 +55,7 @@ class MerkleBlockTest(BitcoinTestFramework):
txin_spent = self.nodes[1].listunspent(1).pop()
tx3 = self.nodes[1].createrawtransaction([txin_spent], {self.nodes[0].getnewaddress(): 49.98})
- txid3 = self.nodes[0].sendrawtransaction(self.nodes[1].signrawtransaction(tx3)["hex"])
+ txid3 = self.nodes[0].sendrawtransaction(self.nodes[1].signrawtransactionwithwallet(tx3)["hex"])
self.nodes[0].generate(1)
self.sync_all()
diff --git a/test/functional/rpc_users.py b/test/functional/rpc_users.py
index 01f68344ae..1ef59da5ad 100755
--- a/test/functional/rpc_users.py
+++ b/test/functional/rpc_users.py
@@ -5,13 +5,22 @@
"""Test multiple RPC users."""
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import str_to_b64str, assert_equal
+from test_framework.util import (
+ assert_equal,
+ get_datadir_path,
+ str_to_b64str,
+)
import os
import http.client
import urllib.parse
+import subprocess
+from random import SystemRandom
+import string
+import configparser
-class HTTPBasicsTest (BitcoinTestFramework):
+
+class HTTPBasicsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
@@ -22,10 +31,21 @@ class HTTPBasicsTest (BitcoinTestFramework):
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
rpcuser = "rpcuser=rpcuser💻"
rpcpassword = "rpcpassword=rpcpassword🔑"
- with open(os.path.join(self.options.tmpdir+"/node0", "bitcoin.conf"), 'a', encoding='utf8') as f:
+
+ self.user = ''.join(SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(10))
+ config = configparser.ConfigParser()
+ config.read_file(open(self.options.configfile))
+ gen_rpcauth = config['environment']['RPCAUTH']
+ p = subprocess.Popen([gen_rpcauth, self.user], stdout=subprocess.PIPE, universal_newlines=True)
+ lines = p.stdout.read().splitlines()
+ rpcauth3 = lines[1]
+ self.password = lines[3]
+
+ with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "bitcoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
- with open(os.path.join(self.options.tmpdir+"/node1", "bitcoin.conf"), 'a', encoding='utf8') as f:
+ f.write(rpcauth3+"\n")
+ with open(os.path.join(get_datadir_path(self.options.tmpdir, 1), "bitcoin.conf"), 'a', encoding='utf8') as f:
f.write(rpcuser+"\n")
f.write(rpcpassword+"\n")
@@ -46,6 +66,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
authpairnew = "rt:"+password
+ self.log.info('Correct...')
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
@@ -54,8 +75,9 @@ class HTTPBasicsTest (BitcoinTestFramework):
resp = conn.getresponse()
assert_equal(resp.status, 200)
conn.close()
-
+
#Use new authpair to confirm both work
+ self.log.info('Correct...')
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = http.client.HTTPConnection(url.hostname, url.port)
@@ -66,6 +88,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
conn.close()
#Wrong login name with rt's password
+ self.log.info('Wrong...')
authpairnew = "rtwrong:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
@@ -77,6 +100,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
conn.close()
#Wrong password for rt
+ self.log.info('Wrong...')
authpairnew = "rt:"+password+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
@@ -88,6 +112,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
conn.close()
#Correct for rt2
+ self.log.info('Correct...')
authpairnew = "rt2:"+password2
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
@@ -99,6 +124,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
conn.close()
#Wrong password for rt2
+ self.log.info('Wrong...')
authpairnew = "rt2:"+password2+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
@@ -109,12 +135,37 @@ class HTTPBasicsTest (BitcoinTestFramework):
assert_equal(resp.status, 401)
conn.close()
+ #Correct for randomly generated user
+ self.log.info('Correct...')
+ authpairnew = self.user+":"+self.password
+ headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
+
+ conn = http.client.HTTPConnection(url.hostname, url.port)
+ conn.connect()
+ conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+ resp = conn.getresponse()
+ assert_equal(resp.status, 200)
+ conn.close()
+
+ #Wrong password for randomly generated user
+ self.log.info('Wrong...')
+ authpairnew = self.user+":"+self.password+"Wrong"
+ headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
+
+ conn = http.client.HTTPConnection(url.hostname, url.port)
+ conn.connect()
+ conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+ resp = conn.getresponse()
+ assert_equal(resp.status, 401)
+ conn.close()
+
###############################################################
# Check correctness of the rpcuser/rpcpassword config options #
###############################################################
url = urllib.parse.urlparse(self.nodes[1].url)
# rpcuser and rpcpassword authpair
+ self.log.info('Correct...')
rpcuserauthpair = "rpcuser💻:rpcpassword🔑"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
@@ -138,6 +189,7 @@ class HTTPBasicsTest (BitcoinTestFramework):
conn.close()
#Wrong password for rpcuser
+ self.log.info('Wrong...')
rpcuserauthpair = "rpcuser:rpcpasswordwrong"
headers = {"Authorization": "Basic " + str_to_b64str(rpcuserauthpair)}
diff --git a/test/functional/test_framework/authproxy.py b/test/functional/test_framework/authproxy.py
index bd3a3b3fab..900090bb66 100644
--- a/test/functional/test_framework/authproxy.py
+++ b/test/functional/test_framework/authproxy.py
@@ -151,7 +151,7 @@ class AuthServiceProxy():
req_start_time = time.time()
try:
http_response = self.__conn.getresponse()
- except socket.timeout as e:
+ except socket.timeout:
raise JSONRPCException({
'code': -344,
'message': '%r RPC took longer than %f seconds. Consider '
diff --git a/test/functional/test_framework/blockstore.py b/test/functional/test_framework/blockstore.py
deleted file mode 100644
index 6067a407cc..0000000000
--- a/test/functional/test_framework/blockstore.py
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2015-2017 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""BlockStore and TxStore helper classes."""
-
-from .mininode import *
-from io import BytesIO
-import dbm.dumb as dbmd
-
-logger = logging.getLogger("TestFramework.blockstore")
-
-class BlockStore():
- """BlockStore helper class.
-
- BlockStore keeps a map of blocks and implements helper functions for
- responding to getheaders and getdata, and for constructing a getheaders
- message.
- """
-
- def __init__(self, datadir):
- self.blockDB = dbmd.open(datadir + "/blocks", 'c')
- self.currentBlock = 0
- self.headers_map = dict()
-
- def close(self):
- self.blockDB.close()
-
- def erase(self, blockhash):
- del self.blockDB[repr(blockhash)]
-
- # lookup an entry and return the item as raw bytes
- def get(self, blockhash):
- value = None
- try:
- value = self.blockDB[repr(blockhash)]
- except KeyError:
- return None
- return value
-
- # lookup an entry and return it as a CBlock
- def get_block(self, blockhash):
- ret = None
- serialized_block = self.get(blockhash)
- if serialized_block is not None:
- f = BytesIO(serialized_block)
- ret = CBlock()
- ret.deserialize(f)
- ret.calc_sha256()
- return ret
-
- def get_header(self, blockhash):
- try:
- return self.headers_map[blockhash]
- except KeyError:
- return None
-
- # Note: this pulls full blocks out of the database just to retrieve
- # the headers -- perhaps we could keep a separate data structure
- # to avoid this overhead.
- def headers_for(self, locator, hash_stop, current_tip=None):
- if current_tip is None:
- current_tip = self.currentBlock
- current_block_header = self.get_header(current_tip)
- if current_block_header is None:
- return None
-
- response = msg_headers()
- headersList = [ current_block_header ]
- maxheaders = 2000
- while (headersList[0].sha256 not in locator.vHave):
- prevBlockHash = headersList[0].hashPrevBlock
- prevBlockHeader = self.get_header(prevBlockHash)
- if prevBlockHeader is not None:
- headersList.insert(0, prevBlockHeader)
- else:
- break
- headersList = headersList[:maxheaders] # truncate if we have too many
- hashList = [x.sha256 for x in headersList]
- index = len(headersList)
- if (hash_stop in hashList):
- index = hashList.index(hash_stop)+1
- response.headers = headersList[:index]
- return response
-
- def add_block(self, block):
- block.calc_sha256()
- try:
- self.blockDB[repr(block.sha256)] = bytes(block.serialize())
- except TypeError as e:
- logger.exception("Unexpected error")
- self.currentBlock = block.sha256
- self.headers_map[block.sha256] = CBlockHeader(block)
-
- def add_header(self, header):
- self.headers_map[header.sha256] = header
-
- # lookup the hashes in "inv", and return p2p messages for delivering
- # blocks found.
- def get_blocks(self, inv):
- responses = []
- for i in inv:
- if (i.type == 2 or i.type == (2 | (1 << 30))): # MSG_BLOCK or MSG_WITNESS_BLOCK
- data = self.get(i.hash)
- if data is not None:
- # Use msg_generic to avoid re-serialization
- responses.append(msg_generic(b"block", data))
- return responses
-
- def get_locator(self, current_tip=None):
- if current_tip is None:
- current_tip = self.currentBlock
- r = []
- counter = 0
- step = 1
- lastBlock = self.get_block(current_tip)
- while lastBlock is not None:
- r.append(lastBlock.hashPrevBlock)
- for i in range(step):
- lastBlock = self.get_block(lastBlock.hashPrevBlock)
- if lastBlock is None:
- break
- counter += 1
- if counter > 10:
- step *= 2
- locator = CBlockLocator()
- locator.vHave = r
- return locator
-
-class TxStore():
- def __init__(self, datadir):
- self.txDB = dbmd.open(datadir + "/transactions", 'c')
-
- def close(self):
- self.txDB.close()
-
- # lookup an entry and return the item as raw bytes
- def get(self, txhash):
- value = None
- try:
- value = self.txDB[repr(txhash)]
- except KeyError:
- return None
- return value
-
- def add_transaction(self, tx):
- tx.calc_sha256()
- try:
- self.txDB[repr(tx.sha256)] = bytes(tx.serialize())
- except TypeError as e:
- logger.exception("Unexpected error")
-
- def get_transactions(self, inv):
- responses = []
- for i in inv:
- if (i.type == 1 or i.type == (1 | (1 << 30))): # MSG_TX or MSG_WITNESS_TX
- tx = self.get(i.hash)
- if tx is not None:
- responses.append(msg_generic(b"tx", tx))
- return responses
diff --git a/test/functional/test_framework/blocktools.py b/test/functional/test_framework/blocktools.py
index 642ef98a27..5c2b1815e5 100644
--- a/test/functional/test_framework/blocktools.py
+++ b/test/functional/test_framework/blocktools.py
@@ -10,7 +10,24 @@ from .address import (
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
-from .mininode import *
+from .messages import (
+ CBlock,
+ COIN,
+ COutPoint,
+ CTransaction,
+ CTxIn,
+ CTxInWitness,
+ CTxOut,
+ FromHex,
+ ToHex,
+ bytes_to_hex_str,
+ hash256,
+ hex_str_to_bytes,
+ ser_string,
+ ser_uint256,
+ sha256,
+ uint256_from_str,
+)
from .script import (
CScript,
OP_0,
@@ -23,34 +40,34 @@ from .script import (
)
from .util import assert_equal
-# Create a block (with regtest difficulty)
-def create_block(hashprev, coinbase, nTime=None):
+# From BIP141
+WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
+
+def create_block(hashprev, coinbase, ntime=None):
+ """Create a block (with regtest difficulty)."""
block = CBlock()
- if nTime is None:
+ if ntime is None:
import time
- block.nTime = int(time.time()+600)
+ block.nTime = int(time.time() + 600)
else:
- block.nTime = nTime
+ block.nTime = ntime
block.hashPrevBlock = hashprev
- block.nBits = 0x207fffff # Will break after a difficulty adjustment...
+ block.nBits = 0x207fffff # difficulty retargeting is disabled in REGTEST chainparams
block.vtx.append(coinbase)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
-# From BIP141
-WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
-
-
def get_witness_script(witness_root, witness_nonce):
- witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root)+ser_uint256(witness_nonce)))
+ witness_commitment = uint256_from_str(hash256(ser_uint256(witness_root) + ser_uint256(witness_nonce)))
output_data = WITNESS_COMMITMENT_HEADER + ser_uint256(witness_commitment)
return CScript([OP_RETURN, output_data])
-
-# According to BIP141, blocks with witness rules active must commit to the
-# hash of all in-block transactions including witness.
def add_witness_commitment(block, nonce=0):
+ """Add a witness commitment to the block's coinbase transaction.
+
+ According to BIP141, blocks with witness rules active must commit to the
+ hash of all in-block transactions including witness."""
# First calculate the merkle root of the block's
# transactions, with witnesses.
witness_nonce = nonce
@@ -65,7 +82,6 @@ def add_witness_commitment(block, nonce=0):
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
-
def serialize_script_num(value):
r = bytearray(0)
if value == 0:
@@ -81,55 +97,59 @@ def serialize_script_num(value):
r[-1] |= 0x80
return r
-# Create a coinbase transaction, assuming no miner fees.
-# If pubkey is passed in, the coinbase output will be a P2PK output;
-# otherwise an anyone-can-spend output.
-def create_coinbase(height, pubkey = None):
+def create_coinbase(height, pubkey=None):
+ """Create a coinbase transaction, assuming no miner fees.
+
+ If pubkey is passed in, the coinbase output will be a P2PK output;
+ otherwise an anyone-can-spend output."""
coinbase = CTransaction()
- coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff),
- ser_string(serialize_script_num(height)), 0xffffffff))
+ coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff),
+ ser_string(serialize_script_num(height)), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = 50 * COIN
- halvings = int(height/150) # regtest
+ halvings = int(height / 150) # regtest
coinbaseoutput.nValue >>= halvings
- if (pubkey != None):
+ if (pubkey is not None):
coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
else:
coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
- coinbase.vout = [ coinbaseoutput ]
+ coinbase.vout = [coinbaseoutput]
coinbase.calc_sha256()
return coinbase
-# Create a transaction.
-# If the scriptPubKey is not specified, make it anyone-can-spend.
-def create_transaction(prevtx, n, sig, value, scriptPubKey=CScript()):
+def create_transaction(prevtx, n, sig, value, script_pub_key=CScript()):
+ """Create a transaction.
+
+ If the script_pub_key is not specified, make it anyone-can-spend."""
tx = CTransaction()
assert(n < len(prevtx.vout))
tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff))
- tx.vout.append(CTxOut(value, scriptPubKey))
+ tx.vout.append(CTxOut(value, script_pub_key))
tx.calc_sha256()
return tx
-def get_legacy_sigopcount_block(block, fAccurate=True):
+def get_legacy_sigopcount_block(block, accurate=True):
count = 0
for tx in block.vtx:
- count += get_legacy_sigopcount_tx(tx, fAccurate)
+ count += get_legacy_sigopcount_tx(tx, accurate)
return count
-def get_legacy_sigopcount_tx(tx, fAccurate=True):
+def get_legacy_sigopcount_tx(tx, accurate=True):
count = 0
for i in tx.vout:
- count += i.scriptPubKey.GetSigOpCount(fAccurate)
+ count += i.scriptPubKey.GetSigOpCount(accurate)
for j in tx.vin:
# scriptSig might be of type bytes, so convert to CScript for the moment
- count += CScript(j.scriptSig).GetSigOpCount(fAccurate)
+ count += CScript(j.scriptSig).GetSigOpCount(accurate)
return count
-# Create a scriptPubKey corresponding to either a P2WPKH output for the
-# given pubkey, or a P2WSH output of a 1-of-1 multisig for the given
-# pubkey. Returns the hex encoding of the scriptPubKey.
def witness_script(use_p2wsh, pubkey):
- if (use_p2wsh == False):
+ """Create a scriptPubKey for a pay-to-wtiness TxOut.
+
+ This is either a P2WPKH output for the given pubkey, or a P2WSH output of a
+ 1-of-1 multisig for the given pubkey. Returns the hex encoding of the
+ scriptPubKey."""
+ if not use_p2wsh:
# P2WPKH instead
pubkeyhash = hash160(hex_str_to_bytes(pubkey))
pkscript = CScript([OP_0, pubkeyhash])
@@ -140,27 +160,29 @@ def witness_script(use_p2wsh, pubkey):
pkscript = CScript([OP_0, scripthash])
return bytes_to_hex_str(pkscript)
-# Return a transaction (in hex) that spends the given utxo to a segwit output,
-# optionally wrapping the segwit output using P2SH.
def create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount):
+ """Return a transaction (in hex) that spends the given utxo to a segwit output.
+
+ Optionally wrap the segwit output using P2SH."""
if use_p2wsh:
program = CScript([OP_1, hex_str_to_bytes(pubkey), OP_1, OP_CHECKMULTISIG])
addr = script_to_p2sh_p2wsh(program) if encode_p2sh else script_to_p2wsh(program)
else:
addr = key_to_p2sh_p2wpkh(pubkey) if encode_p2sh else key_to_p2wpkh(pubkey)
if not encode_p2sh:
- assert_equal(node.validateaddress(addr)['scriptPubKey'], witness_script(use_p2wsh, pubkey))
+ assert_equal(node.getaddressinfo(addr)['scriptPubKey'], witness_script(use_p2wsh, pubkey))
return node.createrawtransaction([utxo], {addr: amount})
-# Create a transaction spending a given utxo to a segwit output corresponding
-# to the given pubkey: use_p2wsh determines whether to use P2WPKH or P2WSH;
-# encode_p2sh determines whether to wrap in P2SH.
-# sign=True will have the given node sign the transaction.
-# insert_redeem_script will be added to the scriptSig, if given.
def send_to_witness(use_p2wsh, node, utxo, pubkey, encode_p2sh, amount, sign=True, insert_redeem_script=""):
+ """Create a transaction spending a given utxo to a segwit output.
+
+ The output corresponds to the given pubkey: use_p2wsh determines whether to
+ use P2WPKH or P2WSH; encode_p2sh determines whether to wrap in P2SH.
+ sign=True will have the given node sign the transaction.
+ insert_redeem_script will be added to the scriptSig, if given."""
tx_to_witness = create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount)
if (sign):
- signed = node.signrawtransaction(tx_to_witness)
+ signed = node.signrawtransactionwithwallet(tx_to_witness)
assert("errors" not in signed or len(["errors"]) == 0)
return node.sendrawtransaction(signed["hex"])
else:
diff --git a/test/functional/test_framework/comptool.py b/test/functional/test_framework/comptool.py
deleted file mode 100755
index 61ea2280e2..0000000000
--- a/test/functional/test_framework/comptool.py
+++ /dev/null
@@ -1,397 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2015-2017 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Compare two or more bitcoinds to each other.
-
-To use, create a class that implements get_tests(), and pass it in
-as the test generator to TestManager. get_tests() should be a python
-generator that returns TestInstance objects. See below for definition.
-
-TestNode behaves as follows:
- Configure with a BlockStore and TxStore
- on_inv: log the message but don't request
- on_headers: log the chain tip
- on_pong: update ping response map (for synchronization)
- on_getheaders: provide headers via BlockStore
- on_getdata: provide blocks via BlockStore
-"""
-
-from .mininode import *
-from .blockstore import BlockStore, TxStore
-from .util import p2p_port, wait_until
-
-import logging
-
-logger=logging.getLogger("TestFramework.comptool")
-
-global mininode_lock
-
-class RejectResult():
- """Outcome that expects rejection of a transaction or block."""
- def __init__(self, code, reason=b''):
- self.code = code
- self.reason = reason
- def match(self, other):
- if self.code != other.code:
- return False
- return other.reason.startswith(self.reason)
- def __repr__(self):
- return '%i:%s' % (self.code,self.reason or '*')
-
-class TestNode(P2PInterface):
-
- def __init__(self, block_store, tx_store):
- super().__init__()
- self.bestblockhash = None
- self.block_store = block_store
- self.block_request_map = {}
- self.tx_store = tx_store
- self.tx_request_map = {}
- self.block_reject_map = {}
- self.tx_reject_map = {}
-
- # When the pingmap is non-empty we're waiting for
- # a response
- self.pingMap = {}
- self.lastInv = []
- self.closed = False
-
- def on_close(self):
- self.closed = True
-
- def on_headers(self, message):
- if len(message.headers) > 0:
- best_header = message.headers[-1]
- best_header.calc_sha256()
- self.bestblockhash = best_header.sha256
-
- def on_getheaders(self, message):
- response = self.block_store.headers_for(message.locator, message.hashstop)
- if response is not None:
- self.send_message(response)
-
- def on_getdata(self, message):
- [self.send_message(r) for r in self.block_store.get_blocks(message.inv)]
- [self.send_message(r) for r in self.tx_store.get_transactions(message.inv)]
-
- for i in message.inv:
- if i.type == 1 or i.type == 1 | (1 << 30): # MSG_TX or MSG_WITNESS_TX
- self.tx_request_map[i.hash] = True
- elif i.type == 2 or i.type == 2 | (1 << 30): # MSG_BLOCK or MSG_WITNESS_BLOCK
- self.block_request_map[i.hash] = True
-
- def on_inv(self, message):
- self.lastInv = [x.hash for x in message.inv]
-
- def on_pong(self, message):
- try:
- del self.pingMap[message.nonce]
- except KeyError:
- raise AssertionError("Got pong for unknown ping [%s]" % repr(message))
-
- def on_reject(self, message):
- if message.message == b'tx':
- self.tx_reject_map[message.data] = RejectResult(message.code, message.reason)
- if message.message == b'block':
- self.block_reject_map[message.data] = RejectResult(message.code, message.reason)
-
- def send_inv(self, obj):
- mtype = 2 if isinstance(obj, CBlock) else 1
- self.send_message(msg_inv([CInv(mtype, obj.sha256)]))
-
- def send_getheaders(self):
- # We ask for headers from their last tip.
- m = msg_getheaders()
- m.locator = self.block_store.get_locator(self.bestblockhash)
- self.send_message(m)
-
- def send_header(self, header):
- m = msg_headers()
- m.headers.append(header)
- self.send_message(m)
-
- # This assumes BIP31
- def send_ping(self, nonce):
- self.pingMap[nonce] = True
- self.send_message(msg_ping(nonce))
-
- def received_ping_response(self, nonce):
- return nonce not in self.pingMap
-
- def send_mempool(self):
- self.lastInv = []
- self.send_message(msg_mempool())
-
-# TestInstance:
-#
-# Instances of these are generated by the test generator, and fed into the
-# comptool.
-#
-# "blocks_and_transactions" should be an array of
-# [obj, True/False/None, hash/None]:
-# - obj is either a CBlock, CBlockHeader, or a CTransaction, and
-# - the second value indicates whether the object should be accepted
-# into the blockchain or mempool (for tests where we expect a certain
-# answer), or "None" if we don't expect a certain answer and are just
-# comparing the behavior of the nodes being tested.
-# - the third value is the hash to test the tip against (if None or omitted,
-# use the hash of the block)
-# - NOTE: if a block header, no test is performed; instead the header is
-# just added to the block_store. This is to facilitate block delivery
-# when communicating with headers-first clients (when withholding an
-# intermediate block).
-# sync_every_block: if True, then each block will be inv'ed, synced, and
-# nodes will be tested based on the outcome for the block. If False,
-# then inv's accumulate until all blocks are processed (or max inv size
-# is reached) and then sent out in one inv message. Then the final block
-# will be synced across all connections, and the outcome of the final
-# block will be tested.
-# sync_every_tx: analogous to behavior for sync_every_block, except if outcome
-# on the final tx is None, then contents of entire mempool are compared
-# across all connections. (If outcome of final tx is specified as true
-# or false, then only the last tx is tested against outcome.)
-
-class TestInstance():
- def __init__(self, objects=None, sync_every_block=True, sync_every_tx=False):
- self.blocks_and_transactions = objects if objects else []
- self.sync_every_block = sync_every_block
- self.sync_every_tx = sync_every_tx
-
-class TestManager():
-
- def __init__(self, testgen, datadir):
- self.test_generator = testgen
- self.p2p_connections= []
- self.block_store = BlockStore(datadir)
- self.tx_store = TxStore(datadir)
- self.ping_counter = 1
-
- def add_all_connections(self, nodes):
- for i in range(len(nodes)):
- # Create a p2p connection to each node
- node = TestNode(self.block_store, self.tx_store)
- node.peer_connect('127.0.0.1', p2p_port(i))
- self.p2p_connections.append(node)
-
- def clear_all_connections(self):
- self.p2p_connections = []
-
- def wait_for_disconnections(self):
- def disconnected():
- return all(node.closed for node in self.p2p_connections)
- wait_until(disconnected, timeout=10, lock=mininode_lock)
-
- def wait_for_verack(self):
- return all(node.wait_for_verack() for node in self.p2p_connections)
-
- def wait_for_pings(self, counter):
- def received_pongs():
- return all(node.received_ping_response(counter) for node in self.p2p_connections)
- wait_until(received_pongs, lock=mininode_lock)
-
- # sync_blocks: Wait for all connections to request the blockhash given
- # then send get_headers to find out the tip of each node, and synchronize
- # the response by using a ping (and waiting for pong with same nonce).
- def sync_blocks(self, blockhash, num_blocks):
- def blocks_requested():
- return all(
- blockhash in node.block_request_map and node.block_request_map[blockhash]
- for node in self.p2p_connections
- )
-
- # --> error if not requested
- wait_until(blocks_requested, attempts=20*num_blocks, lock=mininode_lock)
-
- # Send getheaders message
- [ c.send_getheaders() for c in self.p2p_connections ]
-
- # Send ping and wait for response -- synchronization hack
- [ c.send_ping(self.ping_counter) for c in self.p2p_connections ]
- self.wait_for_pings(self.ping_counter)
- self.ping_counter += 1
-
- # Analogous to sync_block (see above)
- def sync_transaction(self, txhash, num_events):
- # Wait for nodes to request transaction (50ms sleep * 20 tries * num_events)
- def transaction_requested():
- return all(
- txhash in node.tx_request_map and node.tx_request_map[txhash]
- for node in self.p2p_connections
- )
-
- # --> error if not requested
- wait_until(transaction_requested, attempts=20*num_events, lock=mininode_lock)
-
- # Get the mempool
- [ c.send_mempool() for c in self.p2p_connections ]
-
- # Send ping and wait for response -- synchronization hack
- [ c.send_ping(self.ping_counter) for c in self.p2p_connections ]
- self.wait_for_pings(self.ping_counter)
- self.ping_counter += 1
-
- # Sort inv responses from each node
- with mininode_lock:
- [ c.lastInv.sort() for c in self.p2p_connections ]
-
- # Verify that the tip of each connection all agree with each other, and
- # with the expected outcome (if given)
- def check_results(self, blockhash, outcome):
- with mininode_lock:
- for c in self.p2p_connections:
- if outcome is None:
- if c.bestblockhash != self.p2p_connections[0].bestblockhash:
- return False
- elif isinstance(outcome, RejectResult): # Check that block was rejected w/ code
- if c.bestblockhash == blockhash:
- return False
- if blockhash not in c.block_reject_map:
- logger.error('Block not in reject map: %064x' % (blockhash))
- return False
- if not outcome.match(c.block_reject_map[blockhash]):
- logger.error('Block rejected with %s instead of expected %s: %064x' % (c.block_reject_map[blockhash], outcome, blockhash))
- return False
- elif ((c.bestblockhash == blockhash) != outcome):
- return False
- return True
-
- # Either check that the mempools all agree with each other, or that
- # txhash's presence in the mempool matches the outcome specified.
- # This is somewhat of a strange comparison, in that we're either comparing
- # a particular tx to an outcome, or the entire mempools altogether;
- # perhaps it would be useful to add the ability to check explicitly that
- # a particular tx's existence in the mempool is the same across all nodes.
- def check_mempool(self, txhash, outcome):
- with mininode_lock:
- for c in self.p2p_connections:
- if outcome is None:
- # Make sure the mempools agree with each other
- if c.lastInv != self.p2p_connections[0].lastInv:
- return False
- elif isinstance(outcome, RejectResult): # Check that tx was rejected w/ code
- if txhash in c.lastInv:
- return False
- if txhash not in c.tx_reject_map:
- logger.error('Tx not in reject map: %064x' % (txhash))
- return False
- if not outcome.match(c.tx_reject_map[txhash]):
- logger.error('Tx rejected with %s instead of expected %s: %064x' % (c.tx_reject_map[txhash], outcome, txhash))
- return False
- elif ((txhash in c.lastInv) != outcome):
- return False
- return True
-
- def run(self):
- # Wait until verack is received
- self.wait_for_verack()
-
- test_number = 0
- tests = self.test_generator.get_tests()
- for test_instance in tests:
- test_number += 1
- logger.info("Running test %d: %s line %s" % (test_number, tests.gi_code.co_filename, tests.gi_frame.f_lineno))
- # We use these variables to keep track of the last block
- # and last transaction in the tests, which are used
- # if we're not syncing on every block or every tx.
- [ block, block_outcome, tip ] = [ None, None, None ]
- [ tx, tx_outcome ] = [ None, None ]
- invqueue = []
-
- for test_obj in test_instance.blocks_and_transactions:
- b_or_t = test_obj[0]
- outcome = test_obj[1]
- # Determine if we're dealing with a block or tx
- if isinstance(b_or_t, CBlock): # Block test runner
- block = b_or_t
- block_outcome = outcome
- tip = block.sha256
- # each test_obj can have an optional third argument
- # to specify the tip we should compare with
- # (default is to use the block being tested)
- if len(test_obj) >= 3:
- tip = test_obj[2]
-
- # Add to shared block_store, set as current block
- # If there was an open getdata request for the block
- # previously, and we didn't have an entry in the
- # block_store, then immediately deliver, because the
- # node wouldn't send another getdata request while
- # the earlier one is outstanding.
- first_block_with_hash = True
- if self.block_store.get(block.sha256) is not None:
- first_block_with_hash = False
- with mininode_lock:
- self.block_store.add_block(block)
- for c in self.p2p_connections:
- if first_block_with_hash and block.sha256 in c.block_request_map and c.block_request_map[block.sha256] == True:
- # There was a previous request for this block hash
- # Most likely, we delivered a header for this block
- # but never had the block to respond to the getdata
- c.send_message(msg_block(block))
- else:
- c.block_request_map[block.sha256] = False
- # Either send inv's to each node and sync, or add
- # to invqueue for later inv'ing.
- if (test_instance.sync_every_block):
- # if we expect success, send inv and sync every block
- # if we expect failure, just push the block and see what happens.
- if outcome == True:
- [ c.send_inv(block) for c in self.p2p_connections ]
- self.sync_blocks(block.sha256, 1)
- else:
- [ c.send_message(msg_block(block)) for c in self.p2p_connections ]
- [ c.send_ping(self.ping_counter) for c in self.p2p_connections ]
- self.wait_for_pings(self.ping_counter)
- self.ping_counter += 1
- if (not self.check_results(tip, outcome)):
- raise AssertionError("Test failed at test %d" % test_number)
- else:
- invqueue.append(CInv(2, block.sha256))
- elif isinstance(b_or_t, CBlockHeader):
- block_header = b_or_t
- self.block_store.add_header(block_header)
- [ c.send_header(block_header) for c in self.p2p_connections ]
-
- else: # Tx test runner
- assert(isinstance(b_or_t, CTransaction))
- tx = b_or_t
- tx_outcome = outcome
- # Add to shared tx store and clear map entry
- with mininode_lock:
- self.tx_store.add_transaction(tx)
- for c in self.p2p_connections:
- c.tx_request_map[tx.sha256] = False
- # Again, either inv to all nodes or save for later
- if (test_instance.sync_every_tx):
- [ c.send_inv(tx) for c in self.p2p_connections ]
- self.sync_transaction(tx.sha256, 1)
- if (not self.check_mempool(tx.sha256, outcome)):
- raise AssertionError("Test failed at test %d" % test_number)
- else:
- invqueue.append(CInv(1, tx.sha256))
- # Ensure we're not overflowing the inv queue
- if len(invqueue) == MAX_INV_SZ:
- [ c.send_message(msg_inv(invqueue)) for c in self.p2p_connections ]
- invqueue = []
-
- # Do final sync if we weren't syncing on every block or every tx.
- if (not test_instance.sync_every_block and block is not None):
- if len(invqueue) > 0:
- [ c.send_message(msg_inv(invqueue)) for c in self.p2p_connections ]
- invqueue = []
- self.sync_blocks(block.sha256, len(test_instance.blocks_and_transactions))
- if (not self.check_results(tip, block_outcome)):
- raise AssertionError("Block test failed at test %d" % test_number)
- if (not test_instance.sync_every_tx and tx is not None):
- if len(invqueue) > 0:
- [ c.send_message(msg_inv(invqueue)) for c in self.p2p_connections ]
- invqueue = []
- self.sync_transaction(tx.sha256, len(test_instance.blocks_and_transactions))
- if (not self.check_mempool(tx.sha256, tx_outcome)):
- raise AssertionError("Mempool test failed at test %d" % test_number)
-
- [ c.disconnect_node() for c in self.p2p_connections ]
- self.wait_for_disconnections()
- self.block_store.close()
- self.tx_store.close()
diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py
index aa91fb5b0d..1b3e510dc4 100644
--- a/test/functional/test_framework/key.py
+++ b/test/functional/test_framework/key.py
@@ -10,7 +10,6 @@ This file is modified from python-bitcoinlib.
import ctypes
import ctypes.util
import hashlib
-import sys
ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library ('ssl') or 'libeay32')
@@ -223,10 +222,5 @@ class CPubKey(bytes):
return repr(self)
def __repr__(self):
- # Always have represent as b'<secret>' so test cases don't have to
- # change for py2/3
- if sys.version > '3':
- return '%s(%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__())
- else:
- return '%s(b%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__())
+ return '%s(%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__())
diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py
index 46ef7521e0..ca2e425bd6 100644..100755
--- a/test/functional/test_framework/messages.py
+++ b/test/functional/test_framework/messages.py
@@ -4,7 +4,7 @@
# Copyright (c) 2010-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Bitcoin test framework primitive and message strcutures
+"""Bitcoin test framework primitive and message structures
CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
data structures that should map to corresponding structures in
@@ -34,7 +34,9 @@ MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version
MAX_INV_SZ = 50000
MAX_BLOCK_BASE_SIZE = 1000000
-COIN = 100000000 # 1 btc in satoshis
+COIN = 100000000 # 1 btc in satoshis
+
+BIP125_SEQUENCE_NUMBER = 0xfffffffd # Sequence number that is BIP 125 opt-in and BIP 68-opt-out
NODE_NETWORK = (1 << 0)
# NODE_GETUTXO = (1 << 1)
@@ -186,19 +188,24 @@ def ToHex(obj):
class CAddress():
def __init__(self):
+ self.time = 0
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
- def deserialize(self, f):
+ def deserialize(self, f, with_time=True):
+ if with_time:
+ self.time = struct.unpack("<i", f.read(4))[0]
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
- def serialize(self):
+ def serialize(self, with_time=True):
r = b""
+ if with_time:
+ r += struct.pack("<i", self.time)
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
@@ -465,6 +472,7 @@ class CTransaction():
def rehash(self):
self.sha256 = None
self.calc_sha256()
+ return self.hash
# We will only cache the serialization without witness in
# self.sha256 and self.hash -- those are expected to be the txid.
@@ -856,11 +864,11 @@ class msg_version():
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
- self.addrTo.deserialize(f)
+ self.addrTo.deserialize(f, False)
if self.nVersion >= 106:
self.addrFrom = CAddress()
- self.addrFrom.deserialize(f)
+ self.addrFrom.deserialize(f, False)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
else:
@@ -888,8 +896,8 @@ class msg_version():
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
- r += self.addrTo.serialize()
- r += self.addrFrom.serialize()
+ r += self.addrTo.serialize(False)
+ r += self.addrFrom.serialize(False)
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
diff --git a/test/functional/test_framework/mininode.py b/test/functional/test_framework/mininode.py
index 99d0abc3f9..aba2841682 100755
--- a/test/functional/test_framework/mininode.py
+++ b/test/functional/test_framework/mininode.py
@@ -405,7 +405,7 @@ class P2PInterface(P2PConnection):
# Keep our own socket map for asyncore, so that we can track disconnects
-# ourselves (to workaround an issue with closing an asyncore socket when
+# ourselves (to work around an issue with closing an asyncore socket when
# using select)
mininode_socket_map = dict()
@@ -424,7 +424,7 @@ class NetworkThread(threading.Thread):
def run(self):
while mininode_socket_map:
# We check for whether to disconnect outside of the asyncore
- # loop to workaround the behavior of asyncore when using
+ # loop to work around the behavior of asyncore when using
# select
disconnected = []
for fd, obj in mininode_socket_map.items():
@@ -498,7 +498,7 @@ class P2PDataStore(P2PInterface):
# as we go.
prev_block_hash = headers_list[-1].hashPrevBlock
if prev_block_hash in self.block_store:
- prev_block_header = self.block_store[prev_block_hash]
+ prev_block_header = CBlockHeader(self.block_store[prev_block_hash])
headers_list.append(prev_block_header)
if prev_block_header.sha256 == hash_stop:
# if this is the hashstop header, stop here
@@ -539,7 +539,7 @@ class P2PDataStore(P2PInterface):
self.block_store[block.sha256] = block
self.last_block_hash = block.sha256
- self.send_message(msg_headers([blocks[-1]]))
+ self.send_message(msg_headers([CBlockHeader(blocks[-1])]))
if request_block:
wait_until(lambda: blocks[-1].sha256 in self.getdata_requests, timeout=timeout, lock=mininode_lock)
diff --git a/test/functional/test_framework/netutil.py b/test/functional/test_framework/netutil.py
index 96fe283347..36d1a2f856 100644
--- a/test/functional/test_framework/netutil.py
+++ b/test/functional/test_framework/netutil.py
@@ -9,7 +9,6 @@ Roughly based on http://voorloopnul.com/blog/a-python-netstat-in-less-than-100-l
import sys
import socket
-import fcntl
import struct
import array
import os
@@ -90,6 +89,8 @@ def all_interfaces():
'''
Return all interfaces that are up
'''
+ import fcntl # Linux only, so only import when required
+
is_64bits = sys.maxsize > 2**32
struct_size = 40 if is_64bits else 32
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py
index dae8a4e569..44650d7584 100644
--- a/test/functional/test_framework/script.py
+++ b/test/functional/test_framework/script.py
@@ -10,15 +10,6 @@ This file is modified from python-bitcoinlib.
from .mininode import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string
from binascii import hexlify
import hashlib
-
-import sys
-bchr = chr
-bord = ord
-if sys.version > '3':
- long = int
- bchr = lambda x: bytes([x])
- bord = lambda x: x
-
import struct
from .bignum import bn2vch
@@ -40,9 +31,9 @@ class CScriptOp(int):
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
- return b'' + bchr(len(d)) + d # OP_PUSHDATA
+ return b'' + bytes([len(d)]) + d # OP_PUSHDATA
elif len(d) <= 0xff:
- return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1
+ return b'\x4c' + bytes([len(d)]) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
@@ -388,7 +379,7 @@ class CScriptNum():
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
- return bytes(bchr(len(r)) + r)
+ return bytes([len(r)]) + r
class CScript(bytes):
@@ -405,17 +396,17 @@ class CScript(bytes):
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
- other = bchr(other)
+ other = bytes([other])
elif isinstance(other, CScriptNum):
if (other.value == 0):
- other = bchr(CScriptOp(OP_0))
+ other = bytes([CScriptOp(OP_0)])
else:
other = CScriptNum.encode(other)
elif isinstance(other, int):
if 0 <= other <= 16:
- other = bytes(bchr(CScriptOp.encode_op_n(other)))
+ other = bytes([CScriptOp.encode_op_n(other)])
elif other == -1:
- other = bytes(bchr(OP_1NEGATE))
+ other = bytes([OP_1NEGATE])
else:
other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
@@ -458,7 +449,7 @@ class CScript(bytes):
i = 0
while i < len(self):
sop_idx = i
- opcode = bord(self[i])
+ opcode = self[i]
i += 1
if opcode > OP_PUSHDATA4:
@@ -474,21 +465,21 @@ class CScript(bytes):
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
- datasize = bord(self[i])
+ datasize = self[i]
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
- datasize = bord(self[i]) + (bord(self[i+1]) << 8)
+ datasize = self[i] + (self[i+1] << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
- datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24)
+ datasize = self[i] + (self[i+1] << 8) + (self[i+2] << 16) + (self[i+3] << 24)
i += 4
else:
@@ -526,11 +517,9 @@ class CScript(bytes):
yield CScriptOp(opcode)
def __repr__(self):
- # For Python3 compatibility add b before strings so testcases don't
- # need to change
def _repr(o):
if isinstance(o, bytes):
- return b"x('%s')" % hexlify(o).decode('ascii')
+ return "x('%s')" % hexlify(o).decode('ascii')
else:
return repr(o)
diff --git a/test/functional/test_framework/socks5.py b/test/functional/test_framework/socks5.py
index 4721809a3b..581de0ed5d 100644
--- a/test/functional/test_framework/socks5.py
+++ b/test/functional/test_framework/socks5.py
@@ -4,12 +4,14 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Dummy Socks5 server for testing."""
-import socket, threading, queue
+import socket
+import threading
+import queue
import logging
logger = logging.getLogger("TestFramework.socks5")
-### Protocol constants
+# Protocol constants
class Command:
CONNECT = 0x01
@@ -18,7 +20,7 @@ class AddressType:
DOMAINNAME = 0x03
IPV6 = 0x04
-### Utility functions
+# Utility functions
def recvall(s, n):
"""Receive n bytes from a socket, or fail."""
rv = bytearray()
@@ -30,7 +32,7 @@ def recvall(s, n):
n -= len(d)
return rv
-### Implementation classes
+# Implementation classes
class Socks5Configuration():
"""Proxy configuration."""
def __init__(self):
@@ -141,7 +143,7 @@ class Socks5Server():
thread = threading.Thread(None, conn.handle)
thread.daemon = True
thread.start()
-
+
def start(self):
assert(not self.running)
self.running = True
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index a5e66bd959..472664a314 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -4,6 +4,7 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Base class for RPC testing."""
+import configparser
from enum import Enum
import logging
import optparse
@@ -24,8 +25,8 @@ from .util import (
check_json_precision,
connect_nodes_bi,
disconnect_nodes,
+ get_datadir_path,
initialize_datadir,
- log_filename,
p2p_port,
set_node_times,
sync_blocks,
@@ -41,7 +42,28 @@ TEST_EXIT_PASSED = 0
TEST_EXIT_FAILED = 1
TEST_EXIT_SKIPPED = 77
-class BitcoinTestFramework():
+
+class BitcoinTestMetaClass(type):
+ """Metaclass for BitcoinTestFramework.
+
+ Ensures that any attempt to register a subclass of `BitcoinTestFramework`
+ adheres to a standard whereby the subclass overrides `set_test_params` and
+ `run_test` but DOES NOT override either `__init__` or `main`. If any of
+ those standards are violated, a ``TypeError`` is raised."""
+
+ def __new__(cls, clsname, bases, dct):
+ if not clsname == 'BitcoinTestFramework':
+ if not ('run_test' in dct and 'set_test_params' in dct):
+ raise TypeError("BitcoinTestFramework subclasses must override "
+ "'run_test' and 'set_test_params'")
+ if '__init__' in dct or 'main' in dct:
+ raise TypeError("BitcoinTestFramework subclasses may not override "
+ "'__init__' or 'main'")
+
+ return super().__new__(cls, clsname, bases, dct)
+
+
+class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
"""Base class for a bitcoin test script.
Individual bitcoin test scripts should subclass this class and override the set_test_params() and run_test() methods.
@@ -63,6 +85,7 @@ class BitcoinTestFramework():
self.nodes = []
self.mocktime = 0
self.supports_cli = False
+ self.bind_to_localhost_only = True
self.set_test_params()
assert hasattr(self, "num_nodes"), "Test must set self.num_nodes in set_test_params()"
@@ -75,10 +98,10 @@ class BitcoinTestFramework():
help="Leave bitcoinds and test.* datadir on exit or error")
parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true",
help="Don't stop bitcoinds after the test execution")
- parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../../src"),
+ parser.add_option("--srcdir", dest="srcdir", default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../../src"),
help="Source directory containing bitcoind/bitcoin-cli (default: %default)")
- parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"),
- help="Directory for caching pregenerated datadirs")
+ parser.add_option("--cachedir", dest="cachedir", default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"),
+ help="Directory for caching pregenerated datadirs (default: %default)")
parser.add_option("--tmpdir", dest="tmpdir", help="Root directory for datadirs")
parser.add_option("-l", "--loglevel", dest="loglevel", default="INFO",
help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.")
@@ -89,7 +112,8 @@ class BitcoinTestFramework():
parser.add_option("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
parser.add_option("--configfile", dest="configfile",
- help="Location of the test framework config file")
+ default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../config.ini"),
+ help="Location of the test framework config file (default: %default)")
parser.add_option("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true",
help="Attach a python debugger if test fails")
parser.add_option("--usecli", dest="usecli", default=False, action="store_true",
@@ -107,6 +131,11 @@ class BitcoinTestFramework():
self.options.cachedir = os.path.abspath(self.options.cachedir)
+ config = configparser.ConfigParser()
+ config.read_file(open(self.options.configfile))
+ self.options.bitcoind = os.getenv("BITCOIND", default=config["environment"]["BUILDDIR"] + '/src/bitcoind' + config["environment"]["EXEEXT"])
+ self.options.bitcoincli = os.getenv("BITCOINCLI", default=config["environment"]["BUILDDIR"] + '/src/bitcoin-cli' + config["environment"]["EXEEXT"])
+
# Set up temp directory and start logging
if self.options.tmpdir:
self.options.tmpdir = os.path.abspath(self.options.tmpdir)
@@ -147,6 +176,8 @@ class BitcoinTestFramework():
if self.nodes:
self.stop_nodes()
else:
+ for node in self.nodes:
+ node.cleanup_on_exit = False
self.log.info("Note: bitcoinds were not stopped and may still be running")
if not self.options.nocleanup and not self.options.noshutdown and success != TestStatus.FAILED:
@@ -215,15 +246,19 @@ class BitcoinTestFramework():
def add_nodes(self, num_nodes, extra_args=None, rpchost=None, timewait=None, binary=None):
"""Instantiate TestNode objects"""
-
+ if self.bind_to_localhost_only:
+ extra_confs = [["bind=127.0.0.1"]] * num_nodes
+ else:
+ extra_confs = [[]] * num_nodes
if extra_args is None:
extra_args = [[]] * num_nodes
if binary is None:
- binary = [None] * num_nodes
+ binary = [self.options.bitcoind] * num_nodes
+ assert_equal(len(extra_confs), num_nodes)
assert_equal(len(extra_args), num_nodes)
assert_equal(len(binary), num_nodes)
for i in range(num_nodes):
- self.nodes.append(TestNode(i, self.options.tmpdir, extra_args[i], rpchost, timewait=timewait, binary=binary[i], stderr=None, mocktime=self.mocktime, coverage_dir=self.options.coveragedir, use_cli=self.options.usecli))
+ self.nodes.append(TestNode(i, get_datadir_path(self.options.tmpdir, i), rpchost=rpchost, timewait=timewait, bitcoind=binary[i], bitcoin_cli=self.options.bitcoincli, stderr=None, mocktime=self.mocktime, coverage_dir=self.options.coveragedir, extra_conf=extra_confs[i], extra_args=extra_args[i], use_cli=self.options.usecli))
def start_node(self, i, *args, **kwargs):
"""Start a bitcoind"""
@@ -276,27 +311,6 @@ class BitcoinTestFramework():
self.stop_node(i)
self.start_node(i, extra_args)
- def assert_start_raises_init_error(self, i, extra_args=None, expected_msg=None, *args, **kwargs):
- with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr:
- try:
- self.start_node(i, extra_args, stderr=log_stderr, *args, **kwargs)
- self.stop_node(i)
- except Exception as e:
- assert 'bitcoind exited' in str(e) # node must have shutdown
- self.nodes[i].running = False
- self.nodes[i].process = None
- if expected_msg is not None:
- log_stderr.seek(0)
- stderr = log_stderr.read().decode('utf-8')
- if expected_msg not in stderr:
- raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr)
- else:
- if expected_msg is None:
- assert_msg = "bitcoind should have exited with an error"
- else:
- assert_msg = "bitcoind should have exited with expected error " + expected_msg
- raise AssertionError(assert_msg)
-
def wait_for_node_exit(self, i, timeout):
self.nodes[i].process.wait(timeout)
@@ -330,7 +344,7 @@ class BitcoinTestFramework():
blockchain. If the cached version of the blockchain is used without
mocktime then the mempools will not sync due to IBD.
- For backwared compatibility of the python scripts with previous
+ For backward compatibility of the python scripts with previous
versions of the cache, this helper function sets mocktime to Jan 1,
2014 + (201 * 10 * 60)"""
self.mocktime = 1388534400 + (201 * 10 * 60)
@@ -353,7 +367,7 @@ class BitcoinTestFramework():
ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper()
ch.setLevel(ll)
# Format logs the same as bitcoind's debug.log with microprecision (so log files can be concatenated and sorted)
- formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
+ formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000Z %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%dT%H:%M:%S')
formatter.converter = time.gmtime
fh.setFormatter(formatter)
ch.setFormatter(formatter)
@@ -377,7 +391,7 @@ class BitcoinTestFramework():
assert self.num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
- if not os.path.isdir(os.path.join(self.options.cachedir, 'node' + str(i))):
+ if not os.path.isdir(get_datadir_path(self.options.cachedir, i)):
create_cache = True
break
@@ -386,16 +400,16 @@ class BitcoinTestFramework():
# find and delete old cache directories if any exist
for i in range(MAX_NODES):
- if os.path.isdir(os.path.join(self.options.cachedir, "node" + str(i))):
- shutil.rmtree(os.path.join(self.options.cachedir, "node" + str(i)))
+ if os.path.isdir(get_datadir_path(self.options.cachedir, i)):
+ shutil.rmtree(get_datadir_path(self.options.cachedir, i))
# Create cache directories, run bitcoinds:
for i in range(MAX_NODES):
datadir = initialize_datadir(self.options.cachedir, i)
- args = [os.getenv("BITCOIND", "bitcoind"), "-server", "-keypool=1", "-datadir=" + datadir, "-discover=0"]
+ args = [self.options.bitcoind, "-datadir=" + datadir]
if i > 0:
args.append("-connect=127.0.0.1:" + str(p2p_port(0)))
- self.nodes.append(TestNode(i, self.options.cachedir, extra_args=[], rpchost=None, timewait=None, binary=None, stderr=None, mocktime=self.mocktime, coverage_dir=None))
+ self.nodes.append(TestNode(i, get_datadir_path(self.options.cachedir, i), extra_conf=["bind=127.0.0.1"], extra_args=[], rpchost=None, timewait=None, bitcoind=self.options.bitcoind, bitcoin_cli=self.options.bitcoincli, stderr=None, mocktime=self.mocktime, coverage_dir=None))
self.nodes[i].args = args
self.start_node(i)
@@ -425,15 +439,18 @@ class BitcoinTestFramework():
self.stop_nodes()
self.nodes = []
self.disable_mocktime()
+
+ def cache_path(n, *paths):
+ return os.path.join(get_datadir_path(self.options.cachedir, n), "regtest", *paths)
+
for i in range(MAX_NODES):
- os.remove(log_filename(self.options.cachedir, i, "debug.log"))
- os.remove(log_filename(self.options.cachedir, i, "wallets/db.log"))
- os.remove(log_filename(self.options.cachedir, i, "peers.dat"))
- os.remove(log_filename(self.options.cachedir, i, "fee_estimates.dat"))
+ for entry in os.listdir(cache_path(i)):
+ if entry not in ['wallets', 'chainstate', 'blocks']:
+ os.remove(cache_path(i, entry))
for i in range(self.num_nodes):
- from_dir = os.path.join(self.options.cachedir, "node" + str(i))
- to_dir = os.path.join(self.options.tmpdir, "node" + str(i))
+ from_dir = get_datadir_path(self.options.cachedir, i)
+ to_dir = get_datadir_path(self.options.tmpdir, i)
shutil.copytree(from_dir, to_dir)
initialize_datadir(self.options.tmpdir, i) # Overwrite port/rpcport in bitcoin.conf
@@ -445,34 +462,6 @@ class BitcoinTestFramework():
for i in range(self.num_nodes):
initialize_datadir(self.options.tmpdir, i)
-class ComparisonTestFramework(BitcoinTestFramework):
- """Test framework for doing p2p comparison testing
-
- Sets up some bitcoind binaries:
- - 1 binary: test binary
- - 2 binaries: 1 test binary, 1 ref binary
- - n>2 binaries: 1 test binary, n-1 ref binaries"""
-
- def set_test_params(self):
- self.num_nodes = 2
- self.setup_clean_chain = True
-
- def add_options(self, parser):
- parser.add_option("--testbinary", dest="testbinary",
- default=os.getenv("BITCOIND", "bitcoind"),
- help="bitcoind binary to test")
- parser.add_option("--refbinary", dest="refbinary",
- default=os.getenv("BITCOIND", "bitcoind"),
- help="bitcoind binary to use for reference nodes (if any)")
-
- def setup_network(self):
- extra_args = [['-whitelist=127.0.0.1']] * self.num_nodes
- if hasattr(self, "extra_args"):
- extra_args = self.extra_args
- self.add_nodes(self.num_nodes, extra_args,
- binary=[self.options.testbinary] +
- [self.options.refbinary] * (self.num_nodes - 1))
- self.start_nodes()
class SkipTest(Exception):
"""This exception is raised to skip a test"""
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
index 1054e6d028..5a6a659392 100755
--- a/test/functional/test_framework/test_node.py
+++ b/test/functional/test_framework/test_node.py
@@ -6,17 +6,19 @@
import decimal
import errno
+from enum import Enum
import http.client
import json
import logging
-import os
import re
import subprocess
+import tempfile
import time
from .authproxy import JSONRPCException
from .util import (
- assert_equal,
+ append_config,
+ delete_cookie_file,
get_rpc_proxy,
rpc_url,
wait_until,
@@ -28,6 +30,17 @@ JSONDecodeError = getattr(json, "JSONDecodeError", ValueError)
BITCOIND_PROC_WAIT_TIMEOUT = 60
+
+class FailedToStartError(Exception):
+ """Raised when a node fails to start correctly."""
+
+
+class ErrorMatch(Enum):
+ FULL_TEXT = 1
+ FULL_REGEX = 2
+ PARTIAL_REGEX = 3
+
+
class TestNode():
"""A class for representing a bitcoind node under test.
@@ -42,26 +55,37 @@ class TestNode():
To make things easier for the test writer, any unrecognised messages will
be dispatched to the RPC connection."""
- def __init__(self, i, dirname, extra_args, rpchost, timewait, binary, stderr, mocktime, coverage_dir, use_cli=False):
+ def __init__(self, i, datadir, rpchost, timewait, bitcoind, bitcoin_cli, stderr, mocktime, coverage_dir, extra_conf=None, extra_args=None, use_cli=False):
self.index = i
- self.datadir = os.path.join(dirname, "node" + str(i))
+ self.datadir = datadir
self.rpchost = rpchost
if timewait:
self.rpc_timeout = timewait
else:
# Wait for up to 60 seconds for the RPC server to respond
self.rpc_timeout = 60
- if binary is None:
- self.binary = os.getenv("BITCOIND", "bitcoind")
- else:
- self.binary = binary
+ self.binary = bitcoind
self.stderr = stderr
self.coverage_dir = coverage_dir
- # Most callers will just need to add extra args to the standard list below. For those callers that need more flexibity, they can just set the args property directly.
+ if extra_conf != None:
+ append_config(datadir, extra_conf)
+ # Most callers will just need to add extra args to the standard list below.
+ # For those callers that need more flexibility, they can just set the args property directly.
+ # Note that common args are set in the config file (see initialize_datadir)
self.extra_args = extra_args
- self.args = [self.binary, "-datadir=" + self.datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(mocktime), "-uacomment=testnode%d" % i]
-
- self.cli = TestNodeCLI(os.getenv("BITCOINCLI", "bitcoin-cli"), self.datadir)
+ self.args = [
+ self.binary,
+ "-datadir=" + self.datadir,
+ "-logtimemicros",
+ "-debug",
+ "-debugexclude=libevent",
+ "-debugexclude=leveldb",
+ "-mocktime=" + str(mocktime),
+ "-uacomment=testnode%d" % i,
+ "-noprinttoconsole"
+ ]
+
+ self.cli = TestNodeCLI(bitcoin_cli, self.datadir)
self.use_cli = use_cli
self.running = False
@@ -70,15 +94,34 @@ class TestNode():
self.rpc = None
self.url = None
self.log = logging.getLogger('TestFramework.node%d' % i)
+ self.cleanup_on_exit = True # Whether to kill the node when this object goes away
self.p2ps = []
+ def _node_msg(self, msg: str) -> str:
+ """Return a modified msg that identifies this node by its index as a debugging aid."""
+ return "[node %d] %s" % (self.index, msg)
+
+ def _raise_assertion_error(self, msg: str):
+ """Raise an AssertionError with msg modified to identify this node."""
+ raise AssertionError(self._node_msg(msg))
+
+ def __del__(self):
+ # Ensure that we don't leave any bitcoind processes lying around after
+ # the test ends
+ if self.process and self.cleanup_on_exit:
+ # Should only happen on test failure
+ # Avoid using logger, as that may have already been shutdown when
+ # this destructor is called.
+ print(self._node_msg("Cleaning up leftover process"))
+ self.process.kill()
+
def __getattr__(self, name):
"""Dispatches any unrecognised messages to the RPC connection or a CLI instance."""
if self.use_cli:
return getattr(self.cli, name)
else:
- assert self.rpc_connected and self.rpc is not None, "Error: no RPC connection"
+ assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection")
return getattr(self.rpc, name)
def start(self, extra_args=None, stderr=None, *args, **kwargs):
@@ -87,6 +130,10 @@ class TestNode():
extra_args = self.extra_args
if stderr is None:
stderr = self.stderr
+ # Delete any existing cookie file -- if such a file exists (eg due to
+ # unclean shutdown), it will get overwritten anyway by bitcoind, and
+ # potentially interfere with our attempt to authenticate
+ delete_cookie_file(self.datadir)
self.process = subprocess.Popen(self.args + extra_args, stderr=stderr, *args, **kwargs)
self.running = True
self.log.debug("bitcoind started, waiting for RPC to come up")
@@ -96,7 +143,9 @@ class TestNode():
# Poll at a rate of four times per second
poll_per_s = 4
for _ in range(poll_per_s * self.rpc_timeout):
- assert self.process.poll() is None, "bitcoind exited with status %i during initialization" % self.process.returncode
+ if self.process.poll() is not None:
+ raise FailedToStartError(self._node_msg(
+ 'bitcoind exited with status {} during initialization'.format(self.process.returncode)))
try:
self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir)
self.rpc.getblockcount()
@@ -115,14 +164,13 @@ class TestNode():
if "No RPC credentials" not in str(e):
raise
time.sleep(1.0 / poll_per_s)
- raise AssertionError("Unable to connect to bitcoind")
+ self._raise_assertion_error("Unable to connect to bitcoind")
def get_wallet_rpc(self, wallet_name):
if self.use_cli:
return self.cli("-rpcwallet={}".format(wallet_name))
else:
- assert self.rpc_connected
- assert self.rpc
+ assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected")
wallet_path = "wallet/%s" % wallet_name
return self.rpc / wallet_path
@@ -149,7 +197,8 @@ class TestNode():
return False
# process has stopped. Assert that it didn't return an error code.
- assert_equal(return_code, 0)
+ assert return_code == 0, self._node_msg(
+ "Node returned non-zero exit code (%d) when stopping" % return_code)
self.running = False
self.process = None
self.rpc_connected = False
@@ -160,6 +209,47 @@ class TestNode():
def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT):
wait_until(self.is_node_stopped, timeout=timeout)
+ def assert_start_raises_init_error(self, extra_args=None, expected_msg=None, match=ErrorMatch.FULL_TEXT, *args, **kwargs):
+ """Attempt to start the node and expect it to raise an error.
+
+ extra_args: extra arguments to pass through to bitcoind
+ expected_msg: regex that stderr should match when bitcoind fails
+
+ Will throw if bitcoind starts without an error.
+ Will throw if an expected_msg is provided and it does not match bitcoind's stdout."""
+ with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr:
+ try:
+ self.start(extra_args, stderr=log_stderr, *args, **kwargs)
+ self.wait_for_rpc_connection()
+ self.stop_node()
+ self.wait_until_stopped()
+ except FailedToStartError as e:
+ self.log.debug('bitcoind failed to start: %s', e)
+ self.running = False
+ self.process = None
+ # Check stderr for expected message
+ if expected_msg is not None:
+ log_stderr.seek(0)
+ stderr = log_stderr.read().decode('utf-8').strip()
+ if match == ErrorMatch.PARTIAL_REGEX:
+ if re.search(expected_msg, stderr, flags=re.MULTILINE) is None:
+ self._raise_assertion_error(
+ 'Expected message "{}" does not partially match stderr:\n"{}"'.format(expected_msg, stderr))
+ elif match == ErrorMatch.FULL_REGEX:
+ if re.fullmatch(expected_msg, stderr) is None:
+ self._raise_assertion_error(
+ 'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr))
+ elif match == ErrorMatch.FULL_TEXT:
+ if expected_msg != stderr:
+ self._raise_assertion_error(
+ 'Expected message "{}" does not fully match stderr:\n"{}"'.format(expected_msg, stderr))
+ else:
+ if expected_msg is None:
+ assert_msg = "bitcoind should have exited with an error"
+ else:
+ assert_msg = "bitcoind should have exited with expected error " + expected_msg
+ self._raise_assertion_error(assert_msg)
+
def node_encrypt_wallet(self, passphrase):
""""Encrypts the wallet.
@@ -189,7 +279,7 @@ class TestNode():
Convenience property - most tests only use a single p2p connection to each
node, so this saves having to write node.p2ps[0] many times."""
- assert self.p2ps, "No p2p connection"
+ assert self.p2ps, self._node_msg("No p2p connection")
return self.p2ps[0]
def disconnect_p2ps(self):
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
index 8bf75ca1ae..4ec3175cd6 100644
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -8,6 +8,7 @@ from base64 import b64encode
from binascii import hexlify, unhexlify
from decimal import Decimal, ROUND_DOWN
import hashlib
+import inspect
import json
import logging
import os
@@ -26,7 +27,7 @@ logger = logging.getLogger("TestFramework.utils")
def assert_fee_amount(fee, tx_size, fee_per_kB):
"""Assert the fee was in range"""
- target_fee = tx_size * fee_per_kB / 1000
+ target_fee = round(tx_size * fee_per_kB / 1000, 8)
if fee < target_fee:
raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)" % (str(fee), str(target_fee)))
# allow the wallet's estimation to be at most 2 bytes off
@@ -204,9 +205,9 @@ def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=N
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
attempt = 0
- timeout += time.time()
+ time_end = time.time() + timeout
- while attempt < attempts and time.time() < timeout:
+ while attempt < attempts and time.time() < time_end:
if lock:
with lock:
if predicate():
@@ -218,8 +219,12 @@ def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=N
time.sleep(0.05)
# Print the cause of the timeout
- assert_greater_than(attempts, attempt)
- assert_greater_than(timeout, time.time())
+ predicate_source = inspect.getsourcelines(predicate)
+ logger.error("wait_until() failed. Predicate: {}".format(predicate_source))
+ if attempt >= attempts:
+ raise AssertionError("Predicate {} not true after {} attempts".format(predicate_source, attempts))
+ elif time.time() >= time_end:
+ raise AssertionError("Predicate {} not true after {} seconds".format(predicate_source, timeout))
raise RuntimeError('Unreachable')
# RPC/P2P connection constants and functions
@@ -284,20 +289,28 @@ def rpc_url(datadir, i, rpchost=None):
################
def initialize_datadir(dirname, n):
- datadir = os.path.join(dirname, "node" + str(n))
+ datadir = get_datadir_path(dirname, n)
if not os.path.isdir(datadir):
os.makedirs(datadir)
with open(os.path.join(datadir, "bitcoin.conf"), 'w', encoding='utf8') as f:
f.write("regtest=1\n")
+ f.write("[regtest]\n")
f.write("port=" + str(p2p_port(n)) + "\n")
f.write("rpcport=" + str(rpc_port(n)) + "\n")
+ f.write("server=1\n")
+ f.write("keypool=1\n")
+ f.write("discover=0\n")
f.write("listenonion=0\n")
- f.write("bind=127.0.0.1\n")
return datadir
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
+def append_config(datadir, options):
+ with open(os.path.join(datadir, "bitcoin.conf"), 'a', encoding='utf8') as f:
+ for option in options:
+ f.write(option + "\n")
+
def get_auth_cookie(datadir):
user = None
password = None
@@ -320,8 +333,11 @@ def get_auth_cookie(datadir):
raise ValueError("No RPC credentials")
return user, password
-def log_filename(dirname, n_node, logname):
- return os.path.join(dirname, "node" + str(n_node), "regtest", logname)
+# If a cookie file exists in the given datadir, delete it.
+def delete_cookie_file(datadir):
+ if os.path.isfile(os.path.join(datadir, "regtest", ".cookie")):
+ logger.debug("Deleting leftover cookie file")
+ os.remove(os.path.join(datadir, "regtest", ".cookie"))
def get_bip9_status(node, key):
info = node.getblockchaininfo()
@@ -335,20 +351,15 @@ def disconnect_nodes(from_connection, node_num):
for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
from_connection.disconnectnode(nodeid=peer_id)
- for _ in range(50):
- if [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == []:
- break
- time.sleep(0.1)
- else:
- raise AssertionError("timed out waiting for disconnect")
+ # wait to disconnect
+ wait_until(lambda: [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == [], timeout=5)
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
- while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
- time.sleep(0.1)
+ wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
@@ -362,54 +373,29 @@ def sync_blocks(rpc_connections, *, wait=1, timeout=60):
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
- # Use getblockcount() instead of waitforblockheight() to determine the
- # initial max height because the two RPCs look at different internal global
- # variables (chainActive vs latestBlock) and the former gets updated
- # earlier.
- maxheight = max(x.getblockcount() for x in rpc_connections)
- start_time = cur_time = time.time()
- while cur_time <= start_time + timeout:
- tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
- if all(t["height"] == maxheight for t in tips):
- if all(t["hash"] == tips[0]["hash"] for t in tips):
- return
- raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
- "".join("\n {!r}".format(tip) for tip in tips)))
- cur_time = time.time()
- raise AssertionError("Block sync to height {} timed out:{}".format(
- maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
-
-def sync_chain(rpc_connections, *, wait=1, timeout=60):
- """
- Wait until everybody has the same best block
- """
- while timeout > 0:
+ stop_time = time.time() + timeout
+ while time.time() <= stop_time:
best_hash = [x.getbestblockhash() for x in rpc_connections]
- if best_hash == [best_hash[0]] * len(best_hash):
+ if best_hash.count(best_hash[0]) == len(rpc_connections):
return
time.sleep(wait)
- timeout -= wait
- raise AssertionError("Chain sync failed: Best block hashes don't match")
+ raise AssertionError("Block sync timed out:{}".format("".join("\n {!r}".format(b) for b in best_hash)))
def sync_mempools(rpc_connections, *, wait=1, timeout=60, flush_scheduler=True):
"""
Wait until everybody has the same transactions in their memory
pools
"""
- while timeout > 0:
- pool = set(rpc_connections[0].getrawmempool())
- num_match = 1
- for i in range(1, len(rpc_connections)):
- if set(rpc_connections[i].getrawmempool()) == pool:
- num_match = num_match + 1
- if num_match == len(rpc_connections):
+ stop_time = time.time() + timeout
+ while time.time() <= stop_time:
+ pool = [set(r.getrawmempool()) for r in rpc_connections]
+ if pool.count(pool[0]) == len(rpc_connections):
if flush_scheduler:
for r in rpc_connections:
r.syncwithvalidationinterfacequeue()
return
time.sleep(wait)
- timeout -= wait
- raise AssertionError("Mempool sync failed")
+ raise AssertionError("Mempool sync timed out:{}".format("".join("\n {!r}".format(m) for m in pool)))
# Transaction/Block functions
#############################
@@ -473,7 +459,7 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
- signresult = from_node.signrawtransaction(rawtx)
+ signresult = from_node.signrawtransactionwithwallet(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
@@ -500,7 +486,7 @@ def create_confirmed_utxos(fee, node, count):
outputs[addr1] = satoshi_round(send_value / 2)
outputs[addr2] = satoshi_round(send_value / 2)
raw_tx = node.createrawtransaction(inputs, outputs)
- signed_tx = node.signrawtransaction(raw_tx)["hex"]
+ signed_tx = node.signrawtransactionwithwallet(raw_tx)["hex"]
node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
@@ -534,7 +520,7 @@ def create_tx(node, coinbase, to_address, amount):
inputs = [{"txid": coinbase, "vout": 0}]
outputs = {to_address: amount}
rawtx = node.createrawtransaction(inputs, outputs)
- signresult = node.signrawtransaction(rawtx)
+ signresult = node.signrawtransactionwithwallet(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
@@ -553,7 +539,7 @@ def create_lots_of_big_transactions(node, txouts, utxos, num, fee):
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
- signresult = node.signrawtransaction(newtx, None, None, "NONE")
+ signresult = node.signrawtransactionwithwallet(newtx, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py
index 945f645eac..f5e1f3d4f7 100755
--- a/test/functional/test_runner.py
+++ b/test/functional/test_runner.py
@@ -52,7 +52,10 @@ if os.name == 'posix':
TEST_EXIT_PASSED = 0
TEST_EXIT_SKIPPED = 77
-BASE_SCRIPTS= [
+# 20 minutes represented in seconds
+TRAVIS_TIMEOUT_DURATION = 20 * 60
+
+BASE_SCRIPTS = [
# Scripts that are run by the travis build process.
# Longest test should go first, to favor running tests in parallel
'wallet_hd.py',
@@ -64,10 +67,10 @@ BASE_SCRIPTS= [
'feature_segwit.py',
# vv Tests less than 2m vv
'wallet_basic.py',
- 'wallet_accounts.py',
+ 'wallet_labels.py',
'p2p_segwit.py',
'wallet_dump.py',
- 'rpc_listtransactions.py',
+ 'wallet_listtransactions.py',
# vv Tests less than 60s vv
'p2p_sendheaders.py',
'wallet_zapwallettxes.py',
@@ -115,7 +118,11 @@ BASE_SCRIPTS= [
'wallet_importprunedfunds.py',
'rpc_signmessage.py',
'feature_nulldummy.py',
+ 'mempool_accept.py',
'wallet_import_rescan.py',
+ 'rpc_bind.py --ipv4',
+ 'rpc_bind.py --ipv6',
+ 'rpc_bind.py --nonloopback',
'mining_basic.py',
'wallet_bumpfee.py',
'rpc_named_arguments.py',
@@ -126,13 +133,16 @@ BASE_SCRIPTS= [
'feature_cltv.py',
'rpc_uptime.py',
'wallet_resendwallettransactions.py',
+ 'wallet_fallbackfee.py',
'feature_minchainwork.py',
'p2p_fingerprint.py',
'feature_uacomment.py',
'p2p_unrequested_blocks.py',
'feature_logging.py',
'p2p_node_network_limited.py',
+ 'feature_blocksdir.py',
'feature_config_args.py',
+ 'feature_help.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]
@@ -152,9 +162,7 @@ EXTENDED_SCRIPTS = [
'mining_getblocktemplate_longpoll.py',
'p2p_timeouts.py',
# vv Tests less than 60s vv
- 'feature_bip9_softforks.py',
'p2p_feefilter.py',
- 'rpc_bind.py',
# vv Tests less than 30s vv
'feature_assumevalid.py',
'example_test.py',
@@ -232,29 +240,27 @@ def main():
sys.exit(0)
# Build list of tests
+ test_list = []
if tests:
# Individual tests have been specified. Run specified tests that exist
# in the ALL_SCRIPTS list. Accept the name with or without .py extension.
- tests = [re.sub("\.py$", "", t) + ".py" for t in tests]
- test_list = []
- for t in tests:
- if t in ALL_SCRIPTS:
- test_list.append(t)
+ tests = [re.sub("\.py$", "", test) + ".py" for test in tests]
+ for test in tests:
+ if test in ALL_SCRIPTS:
+ test_list.append(test)
else:
- print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], t))
+ print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], test))
+ elif args.extended:
+ # Include extended tests
+ test_list += ALL_SCRIPTS
else:
- # No individual tests have been specified.
- # Run all base tests, and optionally run extended tests.
- test_list = BASE_SCRIPTS
- if args.extended:
- # place the EXTENDED_SCRIPTS first since the three longest ones
- # are there and the list is shorter
- test_list = EXTENDED_SCRIPTS + test_list
+ # Run base tests only
+ test_list += BASE_SCRIPTS
# Remove the test cases that the user has explicitly asked to exclude.
if args.exclude:
- tests_excl = [re.sub("\.py$", "", t) + ".py" for t in args.exclude.split(',')]
- for exclude_test in tests_excl:
+ exclude_tests = [re.sub("\.py$", "", test) + ".py" for test in args.exclude.split(',')]
+ for exclude_test in exclude_tests:
if exclude_test in test_list:
test_list.remove(exclude_test)
else:
@@ -277,9 +283,9 @@ def main():
if not args.keepcache:
shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True)
- run_tests(test_list, config["environment"]["SRCDIR"], config["environment"]["BUILDDIR"], config["environment"]["EXEEXT"], tmpdir, args.jobs, args.coverage, passon_args, args.combinedlogslen)
+ run_tests(test_list, config["environment"]["SRCDIR"], config["environment"]["BUILDDIR"], tmpdir, args.jobs, args.coverage, passon_args, args.combinedlogslen)
-def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_coverage=False, args=[], combined_logs_len=0):
+def run_tests(test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=[], combined_logs_len=0):
# Warn if bitcoind is already running (unix only)
try:
if subprocess.check_output(["pidof", "bitcoind"]) is not None:
@@ -292,11 +298,6 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove
if os.path.isdir(cache_dir):
print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir))
- #Set env vars
- if "BITCOIND" not in os.environ:
- os.environ["BITCOIND"] = build_dir + '/src/bitcoind' + exeext
- os.environ["BITCOINCLI"] = build_dir + '/src/bitcoin-cli' + exeext
-
tests_dir = src_dir + '/test/functional/'
flags = ["--srcdir={}/src".format(build_dir)] + args
@@ -319,7 +320,7 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove
#Run Tests
job_queue = TestHandler(jobs, tests_dir, tmpdir, test_list, flags)
- time0 = time.time()
+ start_time = time.time()
test_results = []
max_len_name = len(max(test_list, key=len))
@@ -345,7 +346,7 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove
combined_logs, _ = subprocess.Popen([sys.executable, os.path.join(tests_dir, 'combine_logs.py'), '-c', testdir], universal_newlines=True, stdout=subprocess.PIPE).communicate()
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
- print_results(test_results, max_len_name, (int(time.time() - time0)))
+ print_results(test_results, max_len_name, (int(time.time() - start_time)))
if coverage:
coverage.report_rpc_coverage()
@@ -364,7 +365,7 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove
def print_results(test_results, max_len_name, runtime):
results = "\n" + BOLD[1] + "%s | %s | %s\n\n" % ("TEST".ljust(max_len_name), "STATUS ", "DURATION") + BOLD[0]
- test_results.sort(key=lambda result: result.name.lower())
+ test_results.sort(key=TestResult.sort_key)
all_passed = True
time_sum = 0
@@ -375,7 +376,11 @@ def print_results(test_results, max_len_name, runtime):
results += str(test_result)
status = TICK + "Passed" if all_passed else CROSS + "Failed"
+ if not all_passed:
+ results += RED[1]
results += BOLD[1] + "\n%s | %s | %s s (accumulated) \n" % ("ALL".ljust(max_len_name), status.ljust(9), time_sum) + BOLD[0]
+ if not all_passed:
+ results += RED[0]
results += "Runtime: %s s\n" % (runtime)
print(results)
@@ -402,15 +407,15 @@ class TestHandler:
while self.num_running < self.num_jobs and self.test_list:
# Add tests
self.num_running += 1
- t = self.test_list.pop(0)
+ test = self.test_list.pop(0)
portseed = len(self.test_list) + self.portseed_offset
portseed_arg = ["--portseed={}".format(portseed)]
log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16)
log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16)
- test_argv = t.split()
+ test_argv = test.split()
testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)
tmpdir_arg = ["--tmpdir={}".format(testdir)]
- self.jobs.append((t,
+ self.jobs.append((test,
time.time(),
subprocess.Popen([sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg,
universal_newlines=True,
@@ -424,15 +429,14 @@ class TestHandler:
while True:
# Return first proc that finishes
time.sleep(.5)
- for j in self.jobs:
- (name, time0, proc, testdir, log_out, log_err) = j
- if os.getenv('TRAVIS') == 'true' and int(time.time() - time0) > 20 * 60:
- # In travis, timeout individual tests after 20 minutes (to stop tests hanging and not
- # providing useful output.
+ for job in self.jobs:
+ (name, start_time, proc, testdir, log_out, log_err) = job
+ if os.getenv('TRAVIS') == 'true' and int(time.time() - start_time) > TRAVIS_TIMEOUT_DURATION:
+ # In travis, timeout individual tests (to stop tests hanging and not providing useful output).
proc.send_signal(signal.SIGINT)
if proc.poll() is not None:
log_out.seek(0), log_err.seek(0)
- [stdout, stderr] = [l.read().decode('utf-8') for l in (log_out, log_err)]
+ [stdout, stderr] = [log_file.read().decode('utf-8') for log_file in (log_out, log_err)]
log_out.close(), log_err.close()
if proc.returncode == TEST_EXIT_PASSED and stderr == "":
status = "Passed"
@@ -441,9 +445,9 @@ class TestHandler:
else:
status = "Failed"
self.num_running -= 1
- self.jobs.remove(j)
+ self.jobs.remove(job)
- return TestResult(name, status, int(time.time() - time0)), testdir, stdout, stderr
+ return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr
print('.', end='', flush=True)
class TestResult():
@@ -453,6 +457,14 @@ class TestResult():
self.time = time
self.padding = 0
+ def sort_key(self):
+ if self.status == "Passed":
+ return 0, self.name.lower()
+ elif self.status == "Failed":
+ return 2, self.name.lower()
+ elif self.status == "Skipped":
+ return 1, self.name.lower()
+
def __repr__(self):
if self.status == "Passed":
color = BLUE
@@ -489,7 +501,7 @@ def check_script_list(src_dir):
Check that there are no scripts in the functional tests directory which are
not being run by pull-tester.py."""
script_dir = src_dir + '/test/functional/'
- python_files = set([t for t in os.listdir(script_dir) if t[-3:] == ".py"])
+ python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")])
missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS)))
if len(missed_tests) != 0:
print("%sWARNING!%s The following scripts are not being run: %s. Check the test lists in test_runner.py." % (BOLD[1], BOLD[0], str(missed_tests)))
@@ -525,7 +537,7 @@ class RPCCoverage():
if uncovered:
print("Uncovered RPC commands:")
- print("".join((" - %s\n" % i) for i in sorted(uncovered)))
+ print("".join((" - %s\n" % command) for command in sorted(uncovered)))
else:
print("All RPC commands covered.")
@@ -549,8 +561,8 @@ class RPCCoverage():
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
- with open(coverage_ref_filename, 'r') as f:
- all_cmds.update([i.strip() for i in f.readlines()])
+ with open(coverage_ref_filename, 'r') as coverage_ref_file:
+ all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
for root, dirs, files in os.walk(self.dir):
for filename in files:
@@ -558,8 +570,8 @@ class RPCCoverage():
coverage_filenames.add(os.path.join(root, filename))
for filename in coverage_filenames:
- with open(filename, 'r') as f:
- covered_cmds.update([i.strip() for i in f.readlines()])
+ with open(filename, 'r') as coverage_file:
+ covered_cmds.update([line.strip() for line in coverage_file.readlines()])
return all_cmds - covered_cmds
diff --git a/test/functional/wallet_abandonconflict.py b/test/functional/wallet_abandonconflict.py
index 8fb860cd7e..d5ef08d782 100755
--- a/test/functional/wallet_abandonconflict.py
+++ b/test/functional/wallet_abandonconflict.py
@@ -55,7 +55,7 @@ class AbandonConflictTest(BitcoinTestFramework):
outputs[self.nodes[0].getnewaddress()] = Decimal("14.99998")
outputs[self.nodes[1].getnewaddress()] = Decimal("5")
- signed = self.nodes[0].signrawtransaction(self.nodes[0].createrawtransaction(inputs, outputs))
+ signed = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs))
txAB1 = self.nodes[0].sendrawtransaction(signed["hex"])
# Identify the 14.99998btc output
@@ -67,7 +67,7 @@ class AbandonConflictTest(BitcoinTestFramework):
inputs.append({"txid":txC, "vout":nC})
outputs = {}
outputs[self.nodes[0].getnewaddress()] = Decimal("24.9996")
- signed2 = self.nodes[0].signrawtransaction(self.nodes[0].createrawtransaction(inputs, outputs))
+ signed2 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs, outputs))
txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"])
# In mempool txs from self should increase balance from change
@@ -109,7 +109,7 @@ class AbandonConflictTest(BitcoinTestFramework):
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(self.nodes[0].getbalance(), balance)
- # But if its received again then it is unabandoned
+ # But if it is received again then it is unabandoned
# And since now in mempool, the change is available
# But its child tx remains abandoned
self.nodes[0].sendrawtransaction(signed["hex"])
@@ -117,7 +117,7 @@ class AbandonConflictTest(BitcoinTestFramework):
assert_equal(newbalance, balance - Decimal("20") + Decimal("14.99998"))
balance = newbalance
- # Send child tx again so its unabandoned
+ # Send child tx again so it is unabandoned
self.nodes[0].sendrawtransaction(signed2["hex"])
newbalance = self.nodes[0].getbalance()
assert_equal(newbalance, balance - Decimal("10") - Decimal("14.99998") + Decimal("24.9996"))
@@ -138,7 +138,7 @@ class AbandonConflictTest(BitcoinTestFramework):
outputs = {}
outputs[self.nodes[1].getnewaddress()] = Decimal("9.9999")
tx = self.nodes[0].createrawtransaction(inputs, outputs)
- signed = self.nodes[0].signrawtransaction(tx)
+ signed = self.nodes[0].signrawtransactionwithwallet(tx)
self.nodes[1].sendrawtransaction(signed["hex"])
self.nodes[1].generate(1)
diff --git a/test/functional/wallet_accounts.py b/test/functional/wallet_accounts.py
deleted file mode 100755
index ecd1cfc82b..0000000000
--- a/test/functional/wallet_accounts.py
+++ /dev/null
@@ -1,206 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2016-2017 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test account RPCs.
-
-RPCs tested are:
- - getaccountaddress
- - getaddressesbyaccount
- - listaddressgroupings
- - setaccount
- - sendfrom (with account arguments)
- - move (with account arguments)
-"""
-
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal
-
-class WalletAccountsTest(BitcoinTestFramework):
- def set_test_params(self):
- self.setup_clean_chain = True
- self.num_nodes = 1
- self.extra_args = [[]]
-
- def run_test(self):
- node = self.nodes[0]
- # Check that there's no UTXO on any of the nodes
- assert_equal(len(node.listunspent()), 0)
-
- # Note each time we call generate, all generated coins go into
- # the same address, so we call twice to get two addresses w/50 each
- node.generate(1)
- node.generate(101)
- assert_equal(node.getbalance(), 100)
-
- # there should be 2 address groups
- # each with 1 address with a balance of 50 Bitcoins
- address_groups = node.listaddressgroupings()
- assert_equal(len(address_groups), 2)
- # the addresses aren't linked now, but will be after we send to the
- # common address
- linked_addresses = set()
- for address_group in address_groups:
- assert_equal(len(address_group), 1)
- assert_equal(len(address_group[0]), 2)
- assert_equal(address_group[0][1], 50)
- linked_addresses.add(address_group[0][0])
-
- # send 50 from each address to a third address not in this wallet
- # There's some fee that will come back to us when the miner reward
- # matures.
- common_address = "msf4WtN1YQKXvNtvdFYt9JBnUD2FB41kjr"
- txid = node.sendmany(
- fromaccount="",
- amounts={common_address: 100},
- subtractfeefrom=[common_address],
- minconf=1,
- )
- tx_details = node.gettransaction(txid)
- fee = -tx_details['details'][0]['fee']
- # there should be 1 address group, with the previously
- # unlinked addresses now linked (they both have 0 balance)
- address_groups = node.listaddressgroupings()
- assert_equal(len(address_groups), 1)
- assert_equal(len(address_groups[0]), 2)
- assert_equal(set([a[0] for a in address_groups[0]]), linked_addresses)
- assert_equal([a[1] for a in address_groups[0]], [0, 0])
-
- node.generate(1)
-
- # we want to reset so that the "" account has what's expected.
- # otherwise we're off by exactly the fee amount as that's mined
- # and matures in the next 100 blocks
- node.sendfrom("", common_address, fee)
- amount_to_send = 1.0
-
- # Create accounts and make sure subsequent account API calls
- # recognize the account/address associations.
- accounts = [Account(name) for name in ("a", "b", "c", "d", "e")]
- for account in accounts:
- account.add_receive_address(node.getaccountaddress(account.name))
- account.verify(node)
-
- # Send a transaction to each account, and make sure this forces
- # getaccountaddress to generate a new receiving address.
- for account in accounts:
- node.sendtoaddress(account.receive_address, amount_to_send)
- account.add_receive_address(node.getaccountaddress(account.name))
- account.verify(node)
-
- # Check the amounts received.
- node.generate(1)
- for account in accounts:
- assert_equal(
- node.getreceivedbyaddress(account.addresses[0]), amount_to_send)
- assert_equal(node.getreceivedbyaccount(account.name), amount_to_send)
-
- # Check that sendfrom account reduces listaccounts balances.
- for i, account in enumerate(accounts):
- to_account = accounts[(i+1) % len(accounts)]
- node.sendfrom(account.name, to_account.receive_address, amount_to_send)
- node.generate(1)
- for account in accounts:
- account.add_receive_address(node.getaccountaddress(account.name))
- account.verify(node)
- assert_equal(node.getreceivedbyaccount(account.name), 2)
- node.move(account.name, "", node.getbalance(account.name))
- account.verify(node)
- node.generate(101)
- expected_account_balances = {"": 5200}
- for account in accounts:
- expected_account_balances[account.name] = 0
- assert_equal(node.listaccounts(), expected_account_balances)
- assert_equal(node.getbalance(""), 5200)
-
- # Check that setaccount can assign an account to a new unused address.
- for account in accounts:
- address = node.getaccountaddress("")
- node.setaccount(address, account.name)
- account.add_address(address)
- account.verify(node)
- assert(address not in node.getaddressesbyaccount(""))
-
- # Check that addmultisigaddress can assign accounts.
- for account in accounts:
- addresses = []
- for x in range(10):
- addresses.append(node.getnewaddress())
- multisig_address = node.addmultisigaddress(5, addresses, account.name)['address']
- account.add_address(multisig_address)
- account.verify(node)
- node.sendfrom("", multisig_address, 50)
- node.generate(101)
- for account in accounts:
- assert_equal(node.getbalance(account.name), 50)
-
- # Check that setaccount can change the account of an address from a
- # different account.
- change_account(node, accounts[0].addresses[0], accounts[0], accounts[1])
-
- # Check that setaccount can change the account of an address which
- # is the receiving address of a different account.
- change_account(node, accounts[0].receive_address, accounts[0], accounts[1])
-
- # Check that setaccount can set the account of an address already
- # in the account. This is a no-op.
- change_account(node, accounts[2].addresses[0], accounts[2], accounts[2])
-
- # Check that setaccount can set the account of an address which is
- # already the receiving address of the account. It would probably make
- # sense for this to be a no-op, but right now it resets the receiving
- # address, causing getaccountaddress to return a brand new address.
- change_account(node, accounts[2].receive_address, accounts[2], accounts[2])
-
-class Account:
- def __init__(self, name):
- # Account name
- self.name = name
- # Current receiving address associated with this account.
- self.receive_address = None
- # List of all addresses assigned with this account
- self.addresses = []
-
- def add_address(self, address):
- assert_equal(address not in self.addresses, True)
- self.addresses.append(address)
-
- def add_receive_address(self, address):
- self.add_address(address)
- self.receive_address = address
-
- def verify(self, node):
- if self.receive_address is not None:
- assert self.receive_address in self.addresses
- assert_equal(node.getaccountaddress(self.name), self.receive_address)
-
- for address in self.addresses:
- assert_equal(node.getaccount(address), self.name)
-
- assert_equal(
- set(node.getaddressesbyaccount(self.name)), set(self.addresses))
-
-
-def change_account(node, address, old_account, new_account):
- assert_equal(address in old_account.addresses, True)
- node.setaccount(address, new_account.name)
-
- old_account.addresses.remove(address)
- new_account.add_address(address)
-
- # Calling setaccount on an address which was previously the receiving
- # address of a different account should reset the receiving address of
- # the old account, causing getaccountaddress to return a brand new
- # address.
- if address == old_account.receive_address:
- new_address = node.getaccountaddress(old_account.name)
- assert_equal(new_address not in old_account.addresses, True)
- assert_equal(new_address not in new_account.addresses, True)
- old_account.add_receive_address(new_address)
-
- old_account.verify(node)
- new_account.verify(node)
-
-
-if __name__ == '__main__':
- WalletAccountsTest().main()
diff --git a/test/functional/wallet_address_types.py b/test/functional/wallet_address_types.py
index 38a3425214..5d2428e6ef 100755
--- a/test/functional/wallet_address_types.py
+++ b/test/functional/wallet_address_types.py
@@ -93,8 +93,8 @@ class AddressTypeTest(BitcoinTestFramework):
def test_address(self, node, address, multisig, typ):
"""Run sanity checks on an address."""
- info = self.nodes[node].validateaddress(address)
- assert(info['isvalid'])
+ info = self.nodes[node].getaddressinfo(address)
+ assert(self.nodes[node].validateaddress(address)['isvalid'])
if not multisig and typ == 'legacy':
# P2PKH
assert(not info['isscript'])
diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py
index b4be7debb5..46a72d7e28 100755
--- a/test/functional/wallet_backup.py
+++ b/test/functional/wallet_backup.py
@@ -90,9 +90,9 @@ class WalletBackupTest(BitcoinTestFramework):
self.stop_node(2)
def erase_three(self):
- os.remove(self.options.tmpdir + "/node0/regtest/wallets/wallet.dat")
- os.remove(self.options.tmpdir + "/node1/regtest/wallets/wallet.dat")
- os.remove(self.options.tmpdir + "/node2/regtest/wallets/wallet.dat")
+ os.remove(os.path.join(self.nodes[0].datadir, 'regtest', 'wallets', 'wallet.dat'))
+ os.remove(os.path.join(self.nodes[1].datadir, 'regtest', 'wallets', 'wallet.dat'))
+ os.remove(os.path.join(self.nodes[2].datadir, 'regtest', 'wallets', 'wallet.dat'))
def run_test(self):
self.log.info("Generating initial blockchain")
@@ -116,13 +116,13 @@ class WalletBackupTest(BitcoinTestFramework):
self.do_one_round()
self.log.info("Backing up")
- tmpdir = self.options.tmpdir
- self.nodes[0].backupwallet(tmpdir + "/node0/wallet.bak")
- self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.dump")
- self.nodes[1].backupwallet(tmpdir + "/node1/wallet.bak")
- self.nodes[1].dumpwallet(tmpdir + "/node1/wallet.dump")
- self.nodes[2].backupwallet(tmpdir + "/node2/wallet.bak")
- self.nodes[2].dumpwallet(tmpdir + "/node2/wallet.dump")
+
+ self.nodes[0].backupwallet(os.path.join(self.nodes[0].datadir, 'wallet.bak'))
+ self.nodes[0].dumpwallet(os.path.join(self.nodes[0].datadir, 'wallet.dump'))
+ self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, 'wallet.bak'))
+ self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, 'wallet.dump'))
+ self.nodes[2].backupwallet(os.path.join(self.nodes[2].datadir, 'wallet.bak'))
+ self.nodes[2].dumpwallet(os.path.join(self.nodes[2].datadir, 'wallet.dump'))
self.log.info("More transactions")
for i in range(5):
@@ -150,13 +150,13 @@ class WalletBackupTest(BitcoinTestFramework):
self.erase_three()
# Start node2 with no chain
- shutil.rmtree(self.options.tmpdir + "/node2/regtest/blocks")
- shutil.rmtree(self.options.tmpdir + "/node2/regtest/chainstate")
+ shutil.rmtree(os.path.join(self.nodes[2].datadir, 'regtest', 'blocks'))
+ shutil.rmtree(os.path.join(self.nodes[2].datadir, 'regtest', 'chainstate'))
# Restore wallets from backup
- shutil.copyfile(tmpdir + "/node0/wallet.bak", tmpdir + "/node0/regtest/wallets/wallet.dat")
- shutil.copyfile(tmpdir + "/node1/wallet.bak", tmpdir + "/node1/regtest/wallets/wallet.dat")
- shutil.copyfile(tmpdir + "/node2/wallet.bak", tmpdir + "/node2/regtest/wallets/wallet.dat")
+ shutil.copyfile(os.path.join(self.nodes[0].datadir, 'wallet.bak'), os.path.join(self.nodes[0].datadir, 'regtest', 'wallets', 'wallet.dat'))
+ shutil.copyfile(os.path.join(self.nodes[1].datadir, 'wallet.bak'), os.path.join(self.nodes[1].datadir, 'regtest', 'wallets', 'wallet.dat'))
+ shutil.copyfile(os.path.join(self.nodes[2].datadir, 'wallet.bak'), os.path.join(self.nodes[2].datadir, 'regtest', 'wallets', 'wallet.dat'))
self.log.info("Re-starting nodes")
self.start_three()
@@ -171,8 +171,8 @@ class WalletBackupTest(BitcoinTestFramework):
self.erase_three()
#start node2 with no chain
- shutil.rmtree(self.options.tmpdir + "/node2/regtest/blocks")
- shutil.rmtree(self.options.tmpdir + "/node2/regtest/chainstate")
+ shutil.rmtree(os.path.join(self.nodes[2].datadir, 'regtest', 'blocks'))
+ shutil.rmtree(os.path.join(self.nodes[2].datadir, 'regtest', 'chainstate'))
self.start_three()
@@ -180,9 +180,9 @@ class WalletBackupTest(BitcoinTestFramework):
assert_equal(self.nodes[1].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 0)
- self.nodes[0].importwallet(tmpdir + "/node0/wallet.dump")
- self.nodes[1].importwallet(tmpdir + "/node1/wallet.dump")
- self.nodes[2].importwallet(tmpdir + "/node2/wallet.dump")
+ self.nodes[0].importwallet(os.path.join(self.nodes[0].datadir, 'wallet.dump'))
+ self.nodes[1].importwallet(os.path.join(self.nodes[1].datadir, 'wallet.dump'))
+ self.nodes[2].importwallet(os.path.join(self.nodes[2].datadir, 'wallet.dump'))
sync_blocks(self.nodes)
@@ -192,10 +192,10 @@ class WalletBackupTest(BitcoinTestFramework):
# Backup to source wallet file must fail
sourcePaths = [
- tmpdir + "/node0/regtest/wallets/wallet.dat",
- tmpdir + "/node0/./regtest/wallets/wallet.dat",
- tmpdir + "/node0/regtest/wallets/",
- tmpdir + "/node0/regtest/wallets"]
+ os.path.join(self.nodes[0].datadir, 'regtest', 'wallets', 'wallet.dat'),
+ os.path.join(self.nodes[0].datadir, 'regtest', '.', 'wallets', 'wallet.dat'),
+ os.path.join(self.nodes[0].datadir, 'regtest', 'wallets', ''),
+ os.path.join(self.nodes[0].datadir, 'regtest', 'wallets')]
for sourcePath in sourcePaths:
assert_raises_rpc_error(-4, "backup failed", self.nodes[0].backupwallet, sourcePath)
diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py
index a90dbc8adf..bfe90957a7 100755
--- a/test/functional/wallet_basic.py
+++ b/test/functional/wallet_basic.py
@@ -10,9 +10,10 @@ class WalletTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.setup_clean_chain = True
+ self.extra_args = [['-deprecatedrpc=accounts']] * 4
def setup_network(self):
- self.add_nodes(4)
+ self.add_nodes(4, self.extra_args)
self.start_node(0)
self.start_node(1)
self.start_node(2)
@@ -66,7 +67,7 @@ class WalletTest(BitcoinTestFramework):
assert_equal(txout['value'], 50)
txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True)
assert_equal(txout['value'], 50)
-
+
# Send 21 BTC from 0 to 2 using sendtoaddress call.
# Locked memory should use at least 32 bytes to sign each transaction
self.log.info("test getmemoryinfo")
@@ -140,7 +141,7 @@ class WalletTest(BitcoinTestFramework):
inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]})
outputs[self.nodes[2].getnewaddress("from1")] = utxo["amount"] - 3
raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
- txns_to_send.append(self.nodes[0].signrawtransaction(raw_tx))
+ txns_to_send.append(self.nodes[0].signrawtransactionwithwallet(raw_tx))
# Have node 1 (miner) send the transactions
self.nodes[1].sendrawtransaction(txns_to_send[0]["hex"], True)
@@ -225,7 +226,7 @@ class WalletTest(BitcoinTestFramework):
rawTx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") #replace 11.11 with 0.0 (int32)
decRawTx = self.nodes[1].decoderawtransaction(rawTx)
- signedRawTx = self.nodes[1].signrawtransaction(rawTx)
+ signedRawTx = self.nodes[1].signrawtransactionwithwallet(rawTx)
decRawTx = self.nodes[1].decoderawtransaction(signedRawTx['hex'])
zeroValueTxid= decRawTx['txid']
self.nodes[1].sendrawtransaction(signedRawTx['hex'])
@@ -283,7 +284,7 @@ class WalletTest(BitcoinTestFramework):
sync_blocks(self.nodes[0:3])
node_2_bal += 2
- #tx should be added to balance because after restarting the nodes tx should be broadcastet
+ #tx should be added to balance because after restarting the nodes tx should be broadcast
assert_equal(self.nodes[2].getbalance(), node_2_bal)
#send a tx with value in a string (PR#6380 +)
@@ -317,7 +318,7 @@ class WalletTest(BitcoinTestFramework):
self.nodes[1].importaddress(address_to_import)
# 3. Validate that the imported address is watch-only on node1
- assert(self.nodes[1].validateaddress(address_to_import)["iswatchonly"])
+ assert(self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"])
# 4. Check that the unspents after import are not spendable
assert_array_result(self.nodes[1].listunspent(),
@@ -376,12 +377,12 @@ class WalletTest(BitcoinTestFramework):
self.log.info("check " + m)
self.stop_nodes()
# set lower ancestor limit for later
- self.start_node(0, [m, "-limitancestorcount="+str(chainlimit)])
- self.start_node(1, [m, "-limitancestorcount="+str(chainlimit)])
- self.start_node(2, [m, "-limitancestorcount="+str(chainlimit)])
- while m == '-reindex' and [block_count] * 3 != [self.nodes[i].getblockcount() for i in range(3)]:
+ self.start_node(0, [m, "-deprecatedrpc=accounts", "-limitancestorcount="+str(chainlimit)])
+ self.start_node(1, [m, "-deprecatedrpc=accounts", "-limitancestorcount="+str(chainlimit)])
+ self.start_node(2, [m, "-deprecatedrpc=accounts", "-limitancestorcount="+str(chainlimit)])
+ if m == '-reindex':
# reindex will leave rpc warm up "early"; Wait for it to finish
- time.sleep(0.1)
+ wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)])
assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)])
# Exercise listsinceblock with the last two blocks
@@ -400,7 +401,7 @@ class WalletTest(BitcoinTestFramework):
node0_balance = self.nodes[0].getbalance()
# Split into two chains
rawtx = self.nodes[0].createrawtransaction([{"txid":singletxid, "vout":0}], {chain_addrs[0]:node0_balance/2-Decimal('0.01'), chain_addrs[1]:node0_balance/2-Decimal('0.01')})
- signedtx = self.nodes[0].signrawtransaction(rawtx)
+ signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
singletxid = self.nodes[0].sendrawtransaction(signedtx["hex"])
self.nodes[0].generate(1)
@@ -426,7 +427,7 @@ class WalletTest(BitcoinTestFramework):
# Try with walletrejectlongchains
# Double chain limit but require combining inputs, so we pass SelectCoinsMinConf
self.stop_node(0)
- self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount="+str(2*chainlimit)])
+ self.start_node(0, extra_args=["-deprecatedrpc=accounts", "-walletrejectlongchains", "-limitancestorcount="+str(2*chainlimit)])
# wait for loadmempool
timeout = 10
@@ -442,5 +443,14 @@ class WalletTest(BitcoinTestFramework):
# Verify nothing new in wallet
assert_equal(total_txs, len(self.nodes[0].listtransactions("*",99999)))
+ # Test getaddressinfo. Note that these addresses are taken from disablewallet.py
+ assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].getaddressinfo, "3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy")
+ address_info = self.nodes[0].getaddressinfo("mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ")
+ assert_equal(address_info['address'], "mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ")
+ assert_equal(address_info["scriptPubKey"], "76a9144e3854046c7bd1594ac904e4793b6a45b36dea0988ac")
+ assert not address_info["ismine"]
+ assert not address_info["iswatchonly"]
+ assert not address_info["isscript"]
+
if __name__ == '__main__':
WalletTest().main()
diff --git a/test/functional/wallet_bumpfee.py b/test/functional/wallet_bumpfee.py
index f621d41b4e..fcc11abce0 100755
--- a/test/functional/wallet_bumpfee.py
+++ b/test/functional/wallet_bumpfee.py
@@ -17,14 +17,12 @@ make assumptions about execution order.
from test_framework.blocktools import send_to_witness
from test_framework.test_framework import BitcoinTestFramework
from test_framework import blocktools
+from test_framework.messages import BIP125_SEQUENCE_NUMBER
from test_framework.mininode import CTransaction
from test_framework.util import *
import io
-# Sequence number that is BIP 125 opt-in and BIP 68-compliant
-BIP125_SEQUENCE_NUMBER = 0xfffffffd
-
WALLET_PASSPHRASE = "test"
WALLET_PASSPHRASE_TIMEOUT = 3600
@@ -104,7 +102,7 @@ def test_segwit_bumpfee_succeeds(rbf_node, dest_address):
# which spends it, and make sure bumpfee can be called on it.
segwit_in = next(u for u in rbf_node.listunspent() if u["amount"] == Decimal("0.001"))
- segwit_out = rbf_node.validateaddress(rbf_node.getnewaddress())
+ segwit_out = rbf_node.getaddressinfo(rbf_node.getnewaddress())
rbf_node.addwitnessaddress(segwit_out["address"])
segwitid = send_to_witness(
use_p2wsh=False,
@@ -121,7 +119,7 @@ def test_segwit_bumpfee_succeeds(rbf_node, dest_address):
"sequence": BIP125_SEQUENCE_NUMBER
}], {dest_address: Decimal("0.0005"),
rbf_node.getrawchangeaddress(): Decimal("0.0003")})
- rbfsigned = rbf_node.signrawtransaction(rbfraw)
+ rbfsigned = rbf_node.signrawtransactionwithwallet(rbfraw)
rbfid = rbf_node.sendrawtransaction(rbfsigned["hex"])
assert rbfid in rbf_node.getrawmempool()
@@ -150,8 +148,8 @@ def test_notmine_bumpfee_fails(rbf_node, peer_node, dest_address):
} for utxo in utxos]
output_val = sum(utxo["amount"] for utxo in utxos) - Decimal("0.001")
rawtx = rbf_node.createrawtransaction(inputs, {dest_address: output_val})
- signedtx = rbf_node.signrawtransaction(rawtx)
- signedtx = peer_node.signrawtransaction(signedtx["hex"])
+ signedtx = rbf_node.signrawtransactionwithwallet(rawtx)
+ signedtx = peer_node.signrawtransactionwithwallet(signedtx["hex"])
rbfid = rbf_node.sendrawtransaction(signedtx["hex"])
assert_raises_rpc_error(-4, "Transaction contains inputs that don't belong to this wallet",
rbf_node.bumpfee, rbfid)
@@ -162,7 +160,7 @@ def test_bumpfee_with_descendant_fails(rbf_node, rbf_node_address, dest_address)
# parent is send-to-self, so we don't have to check which output is change when creating the child tx
parent_id = spend_one_input(rbf_node, rbf_node_address)
tx = rbf_node.createrawtransaction([{"txid": parent_id, "vout": 0}], {dest_address: 0.00020000})
- tx = rbf_node.signrawtransaction(tx)
+ tx = rbf_node.signrawtransactionwithwallet(tx)
rbf_node.sendrawtransaction(tx["hex"])
assert_raises_rpc_error(-8, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id)
@@ -181,7 +179,10 @@ def test_dust_to_fee(rbf_node, dest_address):
# the bumped tx sets fee=49,900, but it converts to 50,000
rbfid = spend_one_input(rbf_node, dest_address)
fulltx = rbf_node.getrawtransaction(rbfid, 1)
- bumped_tx = rbf_node.bumpfee(rbfid, {"totalFee": 49900})
+ # (32-byte p2sh-pwpkh output size + 148 p2pkh spend estimate) * 10k(discard_rate) / 1000 = 1800
+ # P2SH outputs are slightly "over-discarding" due to the IsDust calculation assuming it will
+ # be spent as a P2PKH.
+ bumped_tx = rbf_node.bumpfee(rbfid, {"totalFee": 50000-1800})
full_bumped_tx = rbf_node.getrawtransaction(bumped_tx["txid"], 1)
assert_equal(bumped_tx["fee"], Decimal("0.00050000"))
assert_equal(len(fulltx["vout"]), 2)
@@ -277,7 +278,7 @@ def spend_one_input(node, dest_address):
rawtx = node.createrawtransaction(
[tx_input], {dest_address: Decimal("0.00050000"),
node.getrawchangeaddress(): Decimal("0.00049000")})
- signedtx = node.signrawtransaction(rawtx)
+ signedtx = node.signrawtransactionwithwallet(rawtx)
txid = node.sendrawtransaction(signedtx["hex"])
return txid
diff --git a/test/functional/wallet_dump.py b/test/functional/wallet_dump.py
index 5e943d048d..997f67ec7e 100755
--- a/test/functional/wallet_dump.py
+++ b/test/functional/wallet_dump.py
@@ -7,7 +7,10 @@
import os
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (assert_equal, assert_raises_rpc_error)
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+)
def read_dump(file_name, addrs, script_addrs, hd_master_addr_old):
@@ -49,7 +52,7 @@ def read_dump(file_name, addrs, script_addrs, hd_master_addr_old):
# count key types
for addrObj in addrs:
if addrObj['address'] == addr.split(",")[0] and addrObj['hdkeypath'] == keypath and keytype == "label=":
- # a labled entry in the wallet should contain both a native address
+ # a labeled entry in the wallet should contain both a native address
# and the p2sh-p2wpkh address that was added at wallet setup
if len(addr.split(",")) == 2:
addr_list = addr.split(",")
@@ -84,11 +87,12 @@ class WalletDumpTest(BitcoinTestFramework):
# longer than the default 30 seconds due to an expensive
# CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in
# the test often takes even longer.
- self.add_nodes(self.num_nodes, self.extra_args, timewait=60)
+ self.add_nodes(self.num_nodes, extra_args=self.extra_args, timewait=60)
self.start_nodes()
def run_test (self):
- tmpdir = self.options.tmpdir
+ wallet_unenc_dump = os.path.join(self.nodes[0].datadir, "wallet.unencrypted.dump")
+ wallet_enc_dump = os.path.join(self.nodes[0].datadir, "wallet.encrypted.dump")
# generate 20 addresses to compare against the dump
# but since we add a p2sh-p2wpkh address for the first pubkey in the
@@ -97,7 +101,7 @@ class WalletDumpTest(BitcoinTestFramework):
addrs = []
for i in range(0,test_addr_count):
addr = self.nodes[0].getnewaddress()
- vaddr= self.nodes[0].validateaddress(addr) #required to get hd keypath
+ vaddr= self.nodes[0].getaddressinfo(addr) #required to get hd keypath
addrs.append(vaddr)
# Should be a no-op:
self.nodes[0].keypoolrefill()
@@ -108,11 +112,11 @@ class WalletDumpTest(BitcoinTestFramework):
script_addrs = [witness_addr, multisig_addr]
# dump unencrypted wallet
- result = self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.unencrypted.dump")
- assert_equal(result['filename'], os.path.abspath(tmpdir + "/node0/wallet.unencrypted.dump"))
+ result = self.nodes[0].dumpwallet(wallet_unenc_dump)
+ assert_equal(result['filename'], wallet_unenc_dump)
found_addr, found_script_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc, witness_addr_ret = \
- read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, script_addrs, None)
+ read_dump(wallet_unenc_dump, addrs, script_addrs, None)
assert_equal(found_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_script_addr, 2) # all scripts must be in the dump
assert_equal(found_addr_chg, 50) # 50 blocks where mined
@@ -125,10 +129,10 @@ class WalletDumpTest(BitcoinTestFramework):
self.nodes[0].walletpassphrase('test', 10)
# Should be a no-op:
self.nodes[0].keypoolrefill()
- self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")
+ self.nodes[0].dumpwallet(wallet_enc_dump)
found_addr, found_script_addr, found_addr_chg, found_addr_rsv, _, witness_addr_ret = \
- read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, script_addrs, hd_master_addr_unenc)
+ read_dump(wallet_enc_dump, addrs, script_addrs, hd_master_addr_unenc)
assert_equal(found_addr, test_addr_count)
assert_equal(found_script_addr, 2)
assert_equal(found_addr_chg, 90*2 + 50) # old reserve keys are marked as change now
@@ -136,21 +140,21 @@ class WalletDumpTest(BitcoinTestFramework):
assert_equal(witness_addr_ret, witness_addr)
# Overwriting should fail
- assert_raises_rpc_error(-8, "already exists", self.nodes[0].dumpwallet, tmpdir + "/node0/wallet.unencrypted.dump")
+ assert_raises_rpc_error(-8, "already exists", lambda: self.nodes[0].dumpwallet(wallet_enc_dump))
# Restart node with new wallet, and test importwallet
self.stop_node(0)
self.start_node(0, ['-wallet=w2'])
# Make sure the address is not IsMine before import
- result = self.nodes[0].validateaddress(multisig_addr)
+ result = self.nodes[0].getaddressinfo(multisig_addr)
assert(result['ismine'] == False)
- self.nodes[0].importwallet(os.path.abspath(tmpdir + "/node0/wallet.unencrypted.dump"))
+ self.nodes[0].importwallet(wallet_unenc_dump)
# Now check IsMine is true
- result = self.nodes[0].validateaddress(multisig_addr)
+ result = self.nodes[0].getaddressinfo(multisig_addr)
assert(result['ismine'] == True)
if __name__ == '__main__':
- WalletDumpTest().main ()
+ WalletDumpTest().main()
diff --git a/test/functional/wallet_encryption.py b/test/functional/wallet_encryption.py
index 3c927ee484..64ee678744 100755
--- a/test/functional/wallet_encryption.py
+++ b/test/functional/wallet_encryption.py
@@ -64,14 +64,15 @@ class WalletEncryptionTest(BitcoinTestFramework):
assert_raises_rpc_error(-8, "Timeout cannot be negative.", self.nodes[0].walletpassphrase, passphrase2, -10)
# Check the timeout
# Check a time less than the limit
- expected_time = int(time.time()) + (1 << 30) - 600
- self.nodes[0].walletpassphrase(passphrase2, (1 << 30) - 600)
+ MAX_VALUE = 100000000
+ expected_time = int(time.time()) + MAX_VALUE - 600
+ self.nodes[0].walletpassphrase(passphrase2, MAX_VALUE - 600)
actual_time = self.nodes[0].getwalletinfo()['unlocked_until']
assert_greater_than_or_equal(actual_time, expected_time)
assert_greater_than(expected_time + 5, actual_time) # 5 second buffer
# Check a time greater than the limit
- expected_time = int(time.time()) + (1 << 30) - 1
- self.nodes[0].walletpassphrase(passphrase2, (1 << 33))
+ expected_time = int(time.time()) + MAX_VALUE - 1
+ self.nodes[0].walletpassphrase(passphrase2, MAX_VALUE + 1000)
actual_time = self.nodes[0].getwalletinfo()['unlocked_until']
assert_greater_than_or_equal(actual_time, expected_time)
assert_greater_than(expected_time + 5, actual_time) # 5 second buffer
diff --git a/test/functional/wallet_fallbackfee.py b/test/functional/wallet_fallbackfee.py
new file mode 100755
index 0000000000..e9cd052344
--- /dev/null
+++ b/test/functional/wallet_fallbackfee.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test wallet replace-by-fee capabilities in conjunction with the fallbackfee."""
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class WalletRBFTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+ self.setup_clean_chain = True
+
+ def run_test(self):
+ self.nodes[0].generate(101)
+
+ # sending a transaction without fee estimations must be possible by default on regtest
+ self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
+
+ # test sending a tx with disabled fallback fee (must fail)
+ self.restart_node(0, extra_args=["-fallbackfee=0"])
+ assert_raises_rpc_error(-4, "Fee estimation failed", lambda: self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1))
+ assert_raises_rpc_error(-4, "Fee estimation failed", lambda: self.nodes[0].fundrawtransaction(self.nodes[0].createrawtransaction([], {self.nodes[0].getnewaddress(): 1})))
+ assert_raises_rpc_error(-4, "Fee estimation failed", lambda: self.nodes[0].sendfrom("", self.nodes[0].getnewaddress(), 1))
+ assert_raises_rpc_error(-6, "Fee estimation failed", lambda: self.nodes[0].sendmany("", {self.nodes[0].getnewaddress(): 1}))
+
+if __name__ == '__main__':
+ WalletRBFTest().main()
diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py
index 9f0e9acb47..8c754807e6 100755
--- a/test/functional/wallet_hd.py
+++ b/test/functional/wallet_hd.py
@@ -4,13 +4,15 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
+import os
+import shutil
+
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes_bi,
)
-import shutil
-import os
+
class WalletHDTest(BitcoinTestFramework):
def set_test_params(self):
@@ -18,12 +20,10 @@ class WalletHDTest(BitcoinTestFramework):
self.num_nodes = 2
self.extra_args = [[], ['-keypool=0']]
- def run_test (self):
- tmpdir = self.options.tmpdir
-
+ def run_test(self):
# Make sure can't switch off usehd after wallet creation
self.stop_node(1)
- self.assert_start_raises_init_error(1, ['-usehd=0'], 'already existing HD wallet')
+ self.nodes[1].assert_start_raises_init_error(['-usehd=0'], "Error: Error loading : You can't disable HD on an already existing HD wallet")
self.start_node(1)
connect_nodes_bi(self.nodes, 0, 1)
@@ -33,7 +33,7 @@ class WalletHDTest(BitcoinTestFramework):
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
- change_addrV= self.nodes[1].validateaddress(change_addr)
+ change_addrV= self.nodes[1].getaddressinfo(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
@@ -41,17 +41,17 @@ class WalletHDTest(BitcoinTestFramework):
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
- self.nodes[1].backupwallet(tmpdir + "/hd.bak")
- #self.nodes[1].dumpwallet(tmpdir + "/hd.dump")
+ self.nodes[1].backupwallet(os.path.join(self.nodes[1].datadir, "hd.bak"))
+ #self.nodes[1].dumpwallet(os.path.join(self.nodes[1].datadir, "hd.dump"))
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(101)
hd_add = None
- num_hd_adds = 300
- for i in range(num_hd_adds):
+ NUM_HD_ADDS = 10
+ for i in range(NUM_HD_ADDS):
hd_add = self.nodes[1].getnewaddress()
- hd_info = self.nodes[1].validateaddress(hd_add)
+ hd_info = self.nodes[1].getaddressinfo(hd_add)
assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'")
assert_equal(hd_info["hdmasterkeyid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
@@ -61,27 +61,27 @@ class WalletHDTest(BitcoinTestFramework):
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
- change_addrV= self.nodes[1].validateaddress(change_addr)
+ change_addrV= self.nodes[1].getaddressinfo(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key
self.sync_all()
- assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
+ assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
self.log.info("Restore backup ...")
self.stop_node(1)
# we need to delete the complete regtest directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
- shutil.rmtree(os.path.join(tmpdir, "node1/regtest/blocks"))
- shutil.rmtree(os.path.join(tmpdir, "node1/regtest/chainstate"))
- shutil.copyfile(os.path.join(tmpdir, "hd.bak"), os.path.join(tmpdir, "node1/regtest/wallets/wallet.dat"))
+ shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks"))
+ shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "chainstate"))
+ shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, "regtest", "wallets", "wallet.dat"))
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
- for _ in range(num_hd_adds):
+ for i in range(NUM_HD_ADDS):
hd_add_2 = self.nodes[1].getnewaddress()
- hd_info_2 = self.nodes[1].validateaddress(hd_add_2)
- assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(_)+"'")
+ hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2)
+ assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(i)+"'")
assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
connect_nodes_bi(self.nodes, 0, 1)
@@ -90,23 +90,25 @@ class WalletHDTest(BitcoinTestFramework):
# Needs rescan
self.stop_node(1)
self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
- assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
+ assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# Try a RPC based rescan
self.stop_node(1)
- shutil.rmtree(os.path.join(tmpdir, "node1/regtest/blocks"))
- shutil.rmtree(os.path.join(tmpdir, "node1/regtest/chainstate"))
- shutil.copyfile(os.path.join(tmpdir, "hd.bak"), os.path.join(tmpdir, "node1/regtest/wallet.dat"))
+ shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "blocks"))
+ shutil.rmtree(os.path.join(self.nodes[1].datadir, "regtest", "chainstate"))
+ shutil.copyfile(os.path.join(self.nodes[1].datadir, "hd.bak"), os.path.join(self.nodes[1].datadir, "regtest", "wallets", "wallet.dat"))
self.start_node(1, extra_args=self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
+ # Wallet automatically scans blocks older than key on startup
+ assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
out = self.nodes[1].rescanblockchain(0, 1)
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], 1)
out = self.nodes[1].rescanblockchain()
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], self.nodes[1].getblockcount())
- assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
+ assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
@@ -114,7 +116,7 @@ class WalletHDTest(BitcoinTestFramework):
keypath = ""
for out in outs:
if out['value'] != 1:
- keypath = self.nodes[1].validateaddress(out['scriptPubKey']['addresses'][0])['hdkeypath']
+ keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['addresses'][0])['hdkeypath']
assert_equal(keypath[0:7], "m/0'/1'")
diff --git a/test/functional/wallet_import_rescan.py b/test/functional/wallet_import_rescan.py
index d193a99d5b..6775d8b46d 100755
--- a/test/functional/wallet_import_rescan.py
+++ b/test/functional/wallet_import_rescan.py
@@ -78,7 +78,7 @@ class Variant(collections.namedtuple("Variant", "call data rescan prune")):
if txid is not None:
tx, = [tx for tx in txs if tx["txid"] == txid]
- assert_equal(tx["account"], self.label)
+ assert_equal(tx["label"], self.label)
assert_equal(tx["address"], self.address["address"])
assert_equal(tx["amount"], amount)
assert_equal(tx["category"], "receive")
@@ -119,12 +119,12 @@ class ImportRescanTest(BitcoinTestFramework):
self.num_nodes = 2 + len(IMPORT_NODES)
def setup_network(self):
- extra_args = [["-addresstype=legacy"] for _ in range(self.num_nodes)]
+ extra_args = [["-addresstype=legacy", '-deprecatedrpc=accounts'] for _ in range(self.num_nodes)]
for i, import_node in enumerate(IMPORT_NODES, 2):
if import_node.prune:
extra_args[i] += ["-prune=1"]
- self.add_nodes(self.num_nodes, extra_args)
+ self.add_nodes(self.num_nodes, extra_args=extra_args)
self.start_nodes()
for i in range(1, self.num_nodes):
connect_nodes(self.nodes[i], 0)
@@ -134,7 +134,7 @@ class ImportRescanTest(BitcoinTestFramework):
# each possible type of wallet import RPC.
for i, variant in enumerate(IMPORT_VARIANTS):
variant.label = "label {} {}".format(i, variant)
- variant.address = self.nodes[1].validateaddress(self.nodes[1].getnewaddress(variant.label))
+ variant.address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress(variant.label))
variant.key = self.nodes[1].dumpprivkey(variant.address["address"])
variant.initial_amount = 10 - (i + 1) / 4.0
variant.initial_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.initial_amount)
diff --git a/test/functional/wallet_importmulti.py b/test/functional/wallet_importmulti.py
index be9be83839..91acdb01d0 100755
--- a/test/functional/wallet_importmulti.py
+++ b/test/functional/wallet_importmulti.py
@@ -21,7 +21,7 @@ class ImportMultiTest (BitcoinTestFramework):
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
- node0_address1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ node0_address1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
#Check only one address
assert_equal(node0_address1['ismine'], True)
@@ -30,7 +30,7 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(self.nodes[1].getblockcount(),1)
#Address Test - before import
- address_info = self.nodes[1].validateaddress(node0_address1['address'])
+ address_info = self.nodes[1].getaddressinfo(node0_address1['address'])
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
@@ -39,7 +39,7 @@ class ImportMultiTest (BitcoinTestFramework):
# Bitcoin Address
self.log.info("Should import an address")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
@@ -47,7 +47,7 @@ class ImportMultiTest (BitcoinTestFramework):
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
@@ -67,21 +67,21 @@ class ImportMultiTest (BitcoinTestFramework):
# ScriptPubKey + internal
self.log.info("Should import a scriptPubKey with internal flag")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
"internal": True
}])
assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + !internal
self.log.info("Should not import a scriptPubKey without internal flag")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
@@ -89,7 +89,7 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
@@ -97,7 +97,7 @@ class ImportMultiTest (BitcoinTestFramework):
# Address + Public key + !Internal
self.log.info("Should import an address with public key")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
@@ -106,7 +106,7 @@ class ImportMultiTest (BitcoinTestFramework):
"pubkeys": [ address['pubkey'] ]
}])
assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
@@ -114,7 +114,7 @@ class ImportMultiTest (BitcoinTestFramework):
# ScriptPubKey + Public key + internal
self.log.info("Should import a scriptPubKey with internal and with public key")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
@@ -123,14 +123,14 @@ class ImportMultiTest (BitcoinTestFramework):
}]
result = self.nodes[1].importmulti(request)
assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Public key + !internal
self.log.info("Should not import a scriptPubKey without internal and with public key")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
@@ -140,14 +140,14 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# Address + Private key + !watchonly
self.log.info("Should import an address with private key")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
@@ -156,7 +156,7 @@ class ImportMultiTest (BitcoinTestFramework):
"keys": [ self.nodes[0].dumpprivkey(address['address']) ]
}])
assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
@@ -175,7 +175,7 @@ class ImportMultiTest (BitcoinTestFramework):
# Address + Private key + watchonly
self.log.info("Should not import an address with private key and with watchonly")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
@@ -187,14 +187,14 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Incompatibility found between watchonly and keys')
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# ScriptPubKey + Private key + internal
self.log.info("Should import a scriptPubKey with internal and with private key")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
@@ -202,14 +202,14 @@ class ImportMultiTest (BitcoinTestFramework):
"internal": True
}])
assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
# ScriptPubKey + Private key + !internal
self.log.info("Should not import a scriptPubKey without internal and with private key")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
@@ -218,19 +218,19 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -8)
assert_equal(result[0]['error']['message'], 'Internal must be set for hex scriptPubKey')
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
# P2SH address
- sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
- transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+ self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
@@ -242,7 +242,7 @@ class ImportMultiTest (BitcoinTestFramework):
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
+ address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address'])
assert_equal(address_assert['isscript'], True)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['timestamp'], timestamp)
@@ -252,12 +252,12 @@ class ImportMultiTest (BitcoinTestFramework):
# P2SH + Redeem script
- sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
- transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+ self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
@@ -270,7 +270,7 @@ class ImportMultiTest (BitcoinTestFramework):
"redeemscript": multi_sig_script['redeemScript']
}])
assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
+ address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
@@ -279,12 +279,12 @@ class ImportMultiTest (BitcoinTestFramework):
# P2SH + Redeem script + Private Keys + !Watchonly
- sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
- transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+ self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
@@ -298,7 +298,7 @@ class ImportMultiTest (BitcoinTestFramework):
"keys": [ self.nodes[0].dumpprivkey(sig_address_1['address']), self.nodes[0].dumpprivkey(sig_address_2['address'])]
}])
assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].validateaddress(multi_sig_script['address'])
+ address_assert = self.nodes[1].getaddressinfo(multi_sig_script['address'])
assert_equal(address_assert['timestamp'], timestamp)
p2shunspent = self.nodes[1].listunspent(0,999999, [multi_sig_script['address']])[0]
@@ -306,12 +306,12 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(p2shunspent['solvable'], True)
# P2SH + Redeem script + Private Keys + Watchonly
- sig_address_1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- sig_address_2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- sig_address_3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ sig_address_1 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ sig_address_2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ sig_address_3 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
multi_sig_script = self.nodes[0].createmultisig(2, [sig_address_1['pubkey'], sig_address_2['pubkey'], sig_address_3['pubkey']])
self.nodes[1].generate(100)
- transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
+ self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
@@ -332,8 +332,8 @@ class ImportMultiTest (BitcoinTestFramework):
# Address + Public key + !Internal + Wrong pubkey
self.log.info("Should not import an address with a wrong public key")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
@@ -344,7 +344,7 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
@@ -352,8 +352,8 @@ class ImportMultiTest (BitcoinTestFramework):
# ScriptPubKey + Public key + internal + Wrong pubkey
self.log.info("Should not import a scriptPubKey with internal and with a wrong public key")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
request = [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
@@ -364,7 +364,7 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
@@ -372,8 +372,8 @@ class ImportMultiTest (BitcoinTestFramework):
# Address + Private key + !watchonly + Wrong private key
self.log.info("Should not import an address with a wrong private key")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": {
"address": address['address']
@@ -384,7 +384,7 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
@@ -392,8 +392,8 @@ class ImportMultiTest (BitcoinTestFramework):
# ScriptPubKey + Private key + internal + Wrong private key
self.log.info("Should not import a scriptPubKey with internal and with a wrong private key")
- address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
+ address = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
+ address2 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())
result = self.nodes[1].importmulti([{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "now",
@@ -403,7 +403,7 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(result[0]['success'], False)
assert_equal(result[0]['error']['code'], -5)
assert_equal(result[0]['error']['message'], 'Consistency check failed')
- address_assert = self.nodes[1].validateaddress(address['address'])
+ address_assert = self.nodes[1].getaddressinfo(address['address'])
assert_equal(address_assert['iswatchonly'], False)
assert_equal(address_assert['ismine'], False)
assert_equal('timestamp' in address_assert, False)
@@ -419,7 +419,7 @@ class ImportMultiTest (BitcoinTestFramework):
"timestamp": "now",
}])
assert_equal(result[0]['success'], True)
- address_assert = self.nodes[1].validateaddress(watchonly_address)
+ address_assert = self.nodes[1].getaddressinfo(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], timestamp)
@@ -429,7 +429,7 @@ class ImportMultiTest (BitcoinTestFramework):
# restart nodes to check for proper serialization/deserialization of watch only address
self.stop_nodes()
self.start_nodes()
- address_assert = self.nodes[1].validateaddress(watchonly_address)
+ address_assert = self.nodes[1].getaddressinfo(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
assert_equal(address_assert['timestamp'], watchonly_timestamp)
diff --git a/test/functional/wallet_importprunedfunds.py b/test/functional/wallet_importprunedfunds.py
index 6b2919b5ae..d0ec290f36 100755
--- a/test/functional/wallet_importprunedfunds.py
+++ b/test/functional/wallet_importprunedfunds.py
@@ -10,13 +10,14 @@ class ImportPrunedFundsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
+ self.extra_args = [['-deprecatedrpc=accounts']] * 2
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(101)
self.sync_all()
-
+
# address
address1 = self.nodes[0].getnewaddress()
# pubkey
@@ -26,7 +27,7 @@ class ImportPrunedFundsTest(BitcoinTestFramework):
address3_privkey = self.nodes[0].dumpprivkey(address3) # Using privkey
#Check only one address
- address_info = self.nodes[0].validateaddress(address1)
+ address_info = self.nodes[0].getaddressinfo(address1)
assert_equal(address_info['ismine'], True)
self.sync_all()
@@ -35,15 +36,15 @@ class ImportPrunedFundsTest(BitcoinTestFramework):
assert_equal(self.nodes[1].getblockcount(),101)
#Address Test - before import
- address_info = self.nodes[1].validateaddress(address1)
+ address_info = self.nodes[1].getaddressinfo(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
- address_info = self.nodes[1].validateaddress(address2)
+ address_info = self.nodes[1].getaddressinfo(address2)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
- address_info = self.nodes[1].validateaddress(address3)
+ address_info = self.nodes[1].getaddressinfo(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
@@ -86,13 +87,13 @@ class ImportPrunedFundsTest(BitcoinTestFramework):
assert_equal(balance3, Decimal('0.075'))
#Addresses Test - after import
- address_info = self.nodes[1].validateaddress(address1)
+ address_info = self.nodes[1].getaddressinfo(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
- address_info = self.nodes[1].validateaddress(address2)
+ address_info = self.nodes[1].getaddressinfo(address2)
assert_equal(address_info['iswatchonly'], True)
assert_equal(address_info['ismine'], False)
- address_info = self.nodes[1].validateaddress(address3)
+ address_info = self.nodes[1].getaddressinfo(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], True)
diff --git a/test/functional/wallet_keypool.py b/test/functional/wallet_keypool.py
index 45a5eed8ec..505014e48f 100755
--- a/test/functional/wallet_keypool.py
+++ b/test/functional/wallet_keypool.py
@@ -14,17 +14,17 @@ class KeyPoolTest(BitcoinTestFramework):
def run_test(self):
nodes = self.nodes
addr_before_encrypting = nodes[0].getnewaddress()
- addr_before_encrypting_data = nodes[0].validateaddress(addr_before_encrypting)
+ addr_before_encrypting_data = nodes[0].getaddressinfo(addr_before_encrypting)
wallet_info_old = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] == wallet_info_old['hdmasterkeyid'])
-
+
# Encrypt wallet and wait to terminate
nodes[0].node_encrypt_wallet('test')
# Restart node 0
self.start_node(0)
# Keep creating keys
addr = nodes[0].getnewaddress()
- addr_data = nodes[0].validateaddress(addr)
+ addr_data = nodes[0].getaddressinfo(addr)
wallet_info = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] != wallet_info['hdmasterkeyid'])
assert(addr_data['hdmasterkeyid'] == wallet_info['hdmasterkeyid'])
diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py
index e7af3c3987..ab1493dd04 100755
--- a/test/functional/wallet_keypool_topup.py
+++ b/test/functional/wallet_keypool_topup.py
@@ -10,6 +10,7 @@ Two nodes. Node1 is under test. Node0 is providing transactions and generating b
- Generate 110 keys (enough to drain the keypool). Store key 90 (in the initial keypool) and key 110 (beyond the initial keypool). Send funds to key 90 and key 110.
- Stop node1, clear the datadir, move wallet file back into the datadir and restart node1.
- connect node1 to node0. Verify that they sync and node1 receives its funds."""
+import os
import shutil
from test_framework.test_framework import BitcoinTestFramework
@@ -19,33 +20,31 @@ from test_framework.util import (
sync_blocks,
)
+
class KeypoolRestoreTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
- self.extra_args = [[], ['-keypool=100', '-keypoolmin=20']]
+ self.extra_args = [['-deprecatedrpc=accounts'], ['-deprecatedrpc=accounts', '-keypool=100', '-keypoolmin=20']]
def run_test(self):
- self.tmpdir = self.options.tmpdir
+ wallet_path = os.path.join(self.nodes[1].datadir, "regtest", "wallets", "wallet.dat")
+ wallet_backup_path = os.path.join(self.nodes[1].datadir, "wallet.bak")
self.nodes[0].generate(101)
self.log.info("Make backup of wallet")
-
self.stop_node(1)
-
- shutil.copyfile(self.tmpdir + "/node1/regtest/wallets/wallet.dat", self.tmpdir + "/wallet.bak")
+ shutil.copyfile(wallet_path, wallet_backup_path)
self.start_node(1, self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
self.log.info("Generate keys for wallet")
-
for _ in range(90):
addr_oldpool = self.nodes[1].getnewaddress()
for _ in range(20):
addr_extpool = self.nodes[1].getnewaddress()
self.log.info("Send funds to wallet")
-
self.nodes[0].sendtoaddress(addr_oldpool, 10)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(addr_extpool, 5)
@@ -53,22 +52,18 @@ class KeypoolRestoreTest(BitcoinTestFramework):
sync_blocks(self.nodes)
self.log.info("Restart node with wallet backup")
-
self.stop_node(1)
-
- shutil.copyfile(self.tmpdir + "/wallet.bak", self.tmpdir + "/node1/regtest/wallets/wallet.dat")
-
- self.log.info("Verify keypool is restored and balance is correct")
-
+ shutil.copyfile(wallet_backup_path, wallet_path)
self.start_node(1, self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
+ self.log.info("Verify keypool is restored and balance is correct")
assert_equal(self.nodes[1].getbalance(), 15)
assert_equal(self.nodes[1].listtransactions()[0]['category'], "receive")
-
# Check that we have marked all keys up to the used keypool key as used
- assert_equal(self.nodes[1].validateaddress(self.nodes[1].getnewaddress())['hdkeypath'], "m/0'/0'/110'")
+ assert_equal(self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['hdkeypath'], "m/0'/0'/110'")
+
if __name__ == '__main__':
KeypoolRestoreTest().main()
diff --git a/test/functional/wallet_labels.py b/test/functional/wallet_labels.py
new file mode 100755
index 0000000000..705dd8985e
--- /dev/null
+++ b/test/functional/wallet_labels.py
@@ -0,0 +1,241 @@
+#!/usr/bin/env python3
+# Copyright (c) 2016-2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test label RPCs.
+
+RPCs tested are:
+ - getlabeladdress
+ - getaddressesbyaccount/getaddressesbylabel
+ - listaddressgroupings
+ - setlabel
+ - sendfrom (with account arguments)
+ - move (with account arguments)
+
+Run the test twice - once using the accounts API and once using the labels API.
+The accounts API test can be removed in V0.18.
+"""
+from collections import defaultdict
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal, assert_raises_rpc_error
+
+class WalletLabelsTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 2
+ self.extra_args = [['-deprecatedrpc=accounts'], []]
+
+ def setup_network(self):
+ """Don't connect nodes."""
+ self.setup_nodes()
+
+ def run_test(self):
+ """Run the test twice - once using the accounts API and once using the labels API."""
+ self.log.info("Test accounts API")
+ self._run_subtest(True, self.nodes[0])
+ self.log.info("Test labels API")
+ self._run_subtest(False, self.nodes[1])
+
+ def _run_subtest(self, accounts_api, node):
+ # Check that there's no UTXO on any of the nodes
+ assert_equal(len(node.listunspent()), 0)
+
+ # Note each time we call generate, all generated coins go into
+ # the same address, so we call twice to get two addresses w/50 each
+ node.generate(1)
+ node.generate(101)
+ assert_equal(node.getbalance(), 100)
+
+ # there should be 2 address groups
+ # each with 1 address with a balance of 50 Bitcoins
+ address_groups = node.listaddressgroupings()
+ assert_equal(len(address_groups), 2)
+ # the addresses aren't linked now, but will be after we send to the
+ # common address
+ linked_addresses = set()
+ for address_group in address_groups:
+ assert_equal(len(address_group), 1)
+ assert_equal(len(address_group[0]), 2)
+ assert_equal(address_group[0][1], 50)
+ linked_addresses.add(address_group[0][0])
+
+ # send 50 from each address to a third address not in this wallet
+ # There's some fee that will come back to us when the miner reward
+ # matures.
+ common_address = "msf4WtN1YQKXvNtvdFYt9JBnUD2FB41kjr"
+ txid = node.sendmany(
+ fromaccount="",
+ amounts={common_address: 100},
+ subtractfeefrom=[common_address],
+ minconf=1,
+ )
+ tx_details = node.gettransaction(txid)
+ fee = -tx_details['details'][0]['fee']
+ # there should be 1 address group, with the previously
+ # unlinked addresses now linked (they both have 0 balance)
+ address_groups = node.listaddressgroupings()
+ assert_equal(len(address_groups), 1)
+ assert_equal(len(address_groups[0]), 2)
+ assert_equal(set([a[0] for a in address_groups[0]]), linked_addresses)
+ assert_equal([a[1] for a in address_groups[0]], [0, 0])
+
+ node.generate(1)
+
+ # we want to reset so that the "" label has what's expected.
+ # otherwise we're off by exactly the fee amount as that's mined
+ # and matures in the next 100 blocks
+ node.sendfrom("", common_address, fee)
+ amount_to_send = 1.0
+
+ # Create labels and make sure subsequent label API calls
+ # recognize the label/address associations.
+ labels = [Label(name, accounts_api) for name in ("a", "b", "c", "d", "e")]
+ for label in labels:
+ label.add_receive_address(node.getlabeladdress(label=label.name, force=True))
+ label.verify(node)
+
+ # Check all labels are returned by listlabels.
+ assert_equal(node.listlabels(), [label.name for label in labels])
+
+ # Send a transaction to each label, and make sure this forces
+ # getlabeladdress to generate a new receiving address.
+ for label in labels:
+ node.sendtoaddress(label.receive_address, amount_to_send)
+ label.add_receive_address(node.getlabeladdress(label.name))
+ label.verify(node)
+
+ # Check the amounts received.
+ node.generate(1)
+ for label in labels:
+ assert_equal(
+ node.getreceivedbyaddress(label.addresses[0]), amount_to_send)
+ assert_equal(node.getreceivedbylabel(label.name), amount_to_send)
+
+ # Check that sendfrom label reduces listaccounts balances.
+ for i, label in enumerate(labels):
+ to_label = labels[(i + 1) % len(labels)]
+ node.sendfrom(label.name, to_label.receive_address, amount_to_send)
+ node.generate(1)
+ for label in labels:
+ label.add_receive_address(node.getlabeladdress(label.name))
+ label.verify(node)
+ assert_equal(node.getreceivedbylabel(label.name), 2)
+ if accounts_api:
+ node.move(label.name, "", node.getbalance(label.name))
+ label.verify(node)
+ node.generate(101)
+ expected_account_balances = {"": 5200}
+ for label in labels:
+ expected_account_balances[label.name] = 0
+ if accounts_api:
+ assert_equal(node.listaccounts(), expected_account_balances)
+ assert_equal(node.getbalance(""), 5200)
+
+ # Check that setlabel can assign a label to a new unused address.
+ for label in labels:
+ address = node.getlabeladdress(label="", force=True)
+ node.setlabel(address, label.name)
+ label.add_address(address)
+ label.verify(node)
+ if accounts_api:
+ assert(address not in node.getaddressesbyaccount(""))
+ else:
+ assert_raises_rpc_error(-11, "No addresses with label", node.getaddressesbylabel, "")
+
+ # Check that addmultisigaddress can assign labels.
+ for label in labels:
+ addresses = []
+ for x in range(10):
+ addresses.append(node.getnewaddress())
+ multisig_address = node.addmultisigaddress(5, addresses, label.name)['address']
+ label.add_address(multisig_address)
+ label.purpose[multisig_address] = "send"
+ label.verify(node)
+ node.sendfrom("", multisig_address, 50)
+ node.generate(101)
+ if accounts_api:
+ for label in labels:
+ assert_equal(node.getbalance(label.name), 50)
+
+ # Check that setlabel can change the label of an address from a
+ # different label.
+ change_label(node, labels[0].addresses[0], labels[0], labels[1])
+
+ # Check that setlabel can change the label of an address which
+ # is the receiving address of a different label.
+ change_label(node, labels[0].receive_address, labels[0], labels[1])
+
+ # Check that setlabel can set the label of an address already
+ # in the label. This is a no-op.
+ change_label(node, labels[2].addresses[0], labels[2], labels[2])
+
+ # Check that setlabel can set the label of an address which is
+ # already the receiving address of the label. This is a no-op.
+ change_label(node, labels[2].receive_address, labels[2], labels[2])
+
+class Label:
+ def __init__(self, name, accounts_api):
+ # Label name
+ self.name = name
+ self.accounts_api = accounts_api
+ # Current receiving address associated with this label.
+ self.receive_address = None
+ # List of all addresses assigned with this label
+ self.addresses = []
+ # Map of address to address purpose
+ self.purpose = defaultdict(lambda: "receive")
+
+ def add_address(self, address):
+ assert_equal(address not in self.addresses, True)
+ self.addresses.append(address)
+
+ def add_receive_address(self, address):
+ self.add_address(address)
+ self.receive_address = address
+
+ def verify(self, node):
+ if self.receive_address is not None:
+ assert self.receive_address in self.addresses
+ assert_equal(node.getlabeladdress(self.name), self.receive_address)
+
+ for address in self.addresses:
+ assert_equal(
+ node.getaddressinfo(address)['labels'][0],
+ {"name": self.name,
+ "purpose": self.purpose[address]})
+ if self.accounts_api:
+ assert_equal(node.getaccount(address), self.name)
+ else:
+ assert_equal(node.getaddressinfo(address)['label'], self.name)
+
+ assert_equal(
+ node.getaddressesbylabel(self.name),
+ {address: {"purpose": self.purpose[address]} for address in self.addresses})
+ if self.accounts_api:
+ assert_equal(set(node.getaddressesbyaccount(self.name)), set(self.addresses))
+
+
+def change_label(node, address, old_label, new_label):
+ assert_equal(address in old_label.addresses, True)
+ node.setlabel(address, new_label.name)
+
+ old_label.addresses.remove(address)
+ new_label.add_address(address)
+
+ # Calling setlabel on an address which was previously the receiving
+ # address of a different label should reset the receiving address of
+ # the old label, causing getlabeladdress to return a brand new
+ # address.
+ if old_label.name != new_label.name and address == old_label.receive_address:
+ new_address = node.getlabeladdress(old_label.name)
+ assert_equal(new_address not in old_label.addresses, True)
+ assert_equal(new_address not in new_label.addresses, True)
+ old_label.add_receive_address(new_address)
+
+ old_label.verify(node)
+ new_label.verify(node)
+
+
+if __name__ == '__main__':
+ WalletLabelsTest().main()
diff --git a/test/functional/wallet_listreceivedby.py b/test/functional/wallet_listreceivedby.py
index 1f2b3c8aa7..aba5d642bc 100755
--- a/test/functional/wallet_listreceivedby.py
+++ b/test/functional/wallet_listreceivedby.py
@@ -14,6 +14,7 @@ from test_framework.util import (assert_array_result,
class ReceivedByTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
+ self.extra_args = [['-deprecatedrpc=accounts']] * 2
def run_test(self):
# Generate block to get out of IBD
@@ -36,19 +37,53 @@ class ReceivedByTest(BitcoinTestFramework):
self.sync_all()
assert_array_result(self.nodes[1].listreceivedbyaddress(),
{"address": addr},
- {"address": addr, "account": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]})
+ {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]})
# With min confidence < 10
assert_array_result(self.nodes[1].listreceivedbyaddress(5),
{"address": addr},
- {"address": addr, "account": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]})
+ {"address": addr, "label": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]})
# With min confidence > 10, should not find Tx
assert_array_result(self.nodes[1].listreceivedbyaddress(11), {"address": addr}, {}, True)
# Empty Tx
- addr = self.nodes[1].getnewaddress()
+ empty_addr = self.nodes[1].getnewaddress()
assert_array_result(self.nodes[1].listreceivedbyaddress(0, True),
- {"address": addr},
- {"address": addr, "account": "", "amount": 0, "confirmations": 0, "txids": []})
+ {"address": empty_addr},
+ {"address": empty_addr, "label": "", "amount": 0, "confirmations": 0, "txids": []})
+
+ #Test Address filtering
+ #Only on addr
+ expected = {"address":addr, "label":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]}
+ res = self.nodes[1].listreceivedbyaddress(minconf=0, include_empty=True, include_watchonly=True, address_filter=addr)
+ assert_array_result(res, {"address":addr}, expected)
+ assert_equal(len(res), 1)
+ #Error on invalid address
+ assert_raises_rpc_error(-4, "address_filter parameter was invalid", self.nodes[1].listreceivedbyaddress, minconf=0, include_empty=True, include_watchonly=True, address_filter="bamboozling")
+ #Another address receive money
+ res = self.nodes[1].listreceivedbyaddress(0, True, True)
+ assert_equal(len(res), 2) #Right now 2 entries
+ other_addr = self.nodes[1].getnewaddress()
+ txid2 = self.nodes[0].sendtoaddress(other_addr, 0.1)
+ self.nodes[0].generate(1)
+ self.sync_all()
+ #Same test as above should still pass
+ expected = {"address":addr, "label":"", "amount":Decimal("0.1"), "confirmations":11, "txids":[txid,]}
+ res = self.nodes[1].listreceivedbyaddress(0, True, True, addr)
+ assert_array_result(res, {"address":addr}, expected)
+ assert_equal(len(res), 1)
+ #Same test as above but with other_addr should still pass
+ expected = {"address":other_addr, "label":"", "amount":Decimal("0.1"), "confirmations":1, "txids":[txid2,]}
+ res = self.nodes[1].listreceivedbyaddress(0, True, True, other_addr)
+ assert_array_result(res, {"address":other_addr}, expected)
+ assert_equal(len(res), 1)
+ #Should be two entries though without filter
+ res = self.nodes[1].listreceivedbyaddress(0, True, True)
+ assert_equal(len(res), 3) #Became 3 entries
+
+ #Not on random addr
+ other_addr = self.nodes[0].getnewaddress() # note on node[0]! just a random addr
+ res = self.nodes[1].listreceivedbyaddress(0, True, True, other_addr)
+ assert_equal(len(res), 0)
self.log.info("getreceivedbyaddress Test")
@@ -74,46 +109,46 @@ class ReceivedByTest(BitcoinTestFramework):
# Trying to getreceivedby for an address the wallet doesn't own should return an error
assert_raises_rpc_error(-4, "Address not found in wallet", self.nodes[0].getreceivedbyaddress, addr)
- self.log.info("listreceivedbyaccount + getreceivedbyaccount Test")
+ self.log.info("listreceivedbylabel + getreceivedbylabel Test")
# set pre-state
addrArr = self.nodes[1].getnewaddress()
- account = self.nodes[1].getaccount(addrArr)
- received_by_account_json = [r for r in self.nodes[1].listreceivedbyaccount() if r["account"] == account][0]
- balance_by_account = self.nodes[1].getreceivedbyaccount(account)
+ label = self.nodes[1].getaccount(addrArr)
+ received_by_label_json = [r for r in self.nodes[1].listreceivedbylabel() if r["label"] == label][0]
+ balance_by_label = self.nodes[1].getreceivedbylabel(label)
txid = self.nodes[0].sendtoaddress(addr, 0.1)
self.sync_all()
- # listreceivedbyaccount should return received_by_account_json because of 0 confirmations
- assert_array_result(self.nodes[1].listreceivedbyaccount(),
- {"account": account},
- received_by_account_json)
+ # listreceivedbylabel should return received_by_label_json because of 0 confirmations
+ assert_array_result(self.nodes[1].listreceivedbylabel(),
+ {"label": label},
+ received_by_label_json)
# getreceivedbyaddress should return same balance because of 0 confirmations
- balance = self.nodes[1].getreceivedbyaccount(account)
- assert_equal(balance, balance_by_account)
+ balance = self.nodes[1].getreceivedbylabel(label)
+ assert_equal(balance, balance_by_label)
self.nodes[1].generate(10)
self.sync_all()
- # listreceivedbyaccount should return updated account balance
- assert_array_result(self.nodes[1].listreceivedbyaccount(),
- {"account": account},
- {"account": received_by_account_json["account"], "amount": (received_by_account_json["amount"] + Decimal("0.1"))})
+ # listreceivedbylabel should return updated received list
+ assert_array_result(self.nodes[1].listreceivedbylabel(),
+ {"label": label},
+ {"label": received_by_label_json["label"], "amount": (received_by_label_json["amount"] + Decimal("0.1"))})
- # getreceivedbyaddress should return updates balance
- balance = self.nodes[1].getreceivedbyaccount(account)
- assert_equal(balance, balance_by_account + Decimal("0.1"))
+ # getreceivedbylabel should return updated receive total
+ balance = self.nodes[1].getreceivedbylabel(label)
+ assert_equal(balance, balance_by_label + Decimal("0.1"))
- # Create a new account named "mynewaccount" that has a 0 balance
- self.nodes[1].getaccountaddress("mynewaccount")
- received_by_account_json = [r for r in self.nodes[1].listreceivedbyaccount(0, True) if r["account"] == "mynewaccount"][0]
+ # Create a new label named "mynewlabel" that has a 0 balance
+ self.nodes[1].getlabeladdress(label="mynewlabel", force=True)
+ received_by_label_json = [r for r in self.nodes[1].listreceivedbylabel(0, True) if r["label"] == "mynewlabel"][0]
- # Test includeempty of listreceivedbyaccount
- assert_equal(received_by_account_json["amount"], Decimal("0.0"))
+ # Test includeempty of listreceivedbylabel
+ assert_equal(received_by_label_json["amount"], Decimal("0.0"))
- # Test getreceivedbyaccount for 0 amount accounts
- balance = self.nodes[1].getreceivedbyaccount("mynewaccount")
+ # Test getreceivedbylabel for 0 amount labels
+ balance = self.nodes[1].getreceivedbylabel("mynewlabel")
assert_equal(balance, Decimal("0.0"))
if __name__ == '__main__':
diff --git a/test/functional/wallet_listsinceblock.py b/test/functional/wallet_listsinceblock.py
index 67e7744bf8..930bfcd7b0 100755
--- a/test/functional/wallet_listsinceblock.py
+++ b/test/functional/wallet_listsinceblock.py
@@ -11,6 +11,7 @@ class ListSinceBlockTest (BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.setup_clean_chain = True
+ self.extra_args = [['-deprecatedrpc=accounts']] * 4
def run_test(self):
self.nodes[2].generate(101)
@@ -158,7 +159,7 @@ class ListSinceBlockTest (BitcoinTestFramework):
'vout': utxo['vout'],
}]
txid1 = self.nodes[1].sendrawtransaction(
- self.nodes[1].signrawtransaction(
+ self.nodes[1].signrawtransactionwithwallet(
self.nodes[1].createrawtransaction(utxoDicts, recipientDict))['hex'])
# send from nodes[2] using utxo to nodes[3]
@@ -167,7 +168,7 @@ class ListSinceBlockTest (BitcoinTestFramework):
self.nodes[2].getnewaddress(): change,
}
self.nodes[2].sendrawtransaction(
- self.nodes[2].signrawtransaction(
+ self.nodes[2].signrawtransactionwithwallet(
self.nodes[2].createrawtransaction(utxoDicts, recipientDict2))['hex'])
# generate on both sides
@@ -211,7 +212,7 @@ class ListSinceBlockTest (BitcoinTestFramework):
1. tx1 is listed in listsinceblock.
2. It is included in 'removed' as it was removed, even though it is now
present in a different block.
- 3. It is listed with a confirmations count of 2 (bb3, bb4), not
+ 3. It is listed with a confirmation count of 2 (bb3, bb4), not
3 (aa1, aa2, aa3).
'''
@@ -232,7 +233,7 @@ class ListSinceBlockTest (BitcoinTestFramework):
'txid': utxo['txid'],
'vout': utxo['vout'],
}]
- signedtxres = self.nodes[2].signrawtransaction(
+ signedtxres = self.nodes[2].signrawtransactionwithwallet(
self.nodes[2].createrawtransaction(utxoDicts, recipientDict))
assert signedtxres['complete']
diff --git a/test/functional/rpc_listtransactions.py b/test/functional/wallet_listtransactions.py
index ba71ac0967..5466bbf18b 100755
--- a/test/functional/rpc_listtransactions.py
+++ b/test/functional/wallet_listtransactions.py
@@ -18,6 +18,7 @@ def txFromHex(hexstring):
class ListTransactionsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
+ self.extra_args = [['-deprecatedrpc=accounts']] * 2
self.enable_mocktime()
def run_test(self):
@@ -81,7 +82,7 @@ class ListTransactionsTest(BitcoinTestFramework):
{"category":"receive","amount":Decimal("0.44")},
{"txid":txid, "account" : "toself"} )
- pubkey = self.nodes[1].validateaddress(self.nodes[1].getnewaddress())['pubkey']
+ pubkey = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey']
multisig = self.nodes[1].createmultisig(1, [pubkey])
self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
@@ -131,7 +132,7 @@ class ListTransactionsTest(BitcoinTestFramework):
inputs = [{"txid":utxo_to_use["txid"], "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.999}
tx2 = self.nodes[1].createrawtransaction(inputs, outputs)
- tx2_signed = self.nodes[1].signrawtransaction(tx2)["hex"]
+ tx2_signed = self.nodes[1].signrawtransactionwithwallet(tx2)["hex"]
txid_2 = self.nodes[1].sendrawtransaction(tx2_signed)
# ...and check the result
@@ -148,7 +149,7 @@ class ListTransactionsTest(BitcoinTestFramework):
tx3_modified = txFromHex(tx3)
tx3_modified.vin[0].nSequence = 0
tx3 = bytes_to_hex_str(tx3_modified.serialize())
- tx3_signed = self.nodes[0].signrawtransaction(tx3)['hex']
+ tx3_signed = self.nodes[0].signrawtransactionwithwallet(tx3)['hex']
txid_3 = self.nodes[0].sendrawtransaction(tx3_signed)
assert(is_opt_in(self.nodes[0], txid_3))
@@ -162,7 +163,7 @@ class ListTransactionsTest(BitcoinTestFramework):
inputs = [{"txid": txid_3, "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.997}
tx4 = self.nodes[1].createrawtransaction(inputs, outputs)
- tx4_signed = self.nodes[1].signrawtransaction(tx4)["hex"]
+ tx4_signed = self.nodes[1].signrawtransactionwithwallet(tx4)["hex"]
txid_4 = self.nodes[1].sendrawtransaction(tx4_signed)
assert(not is_opt_in(self.nodes[1], txid_4))
@@ -174,7 +175,7 @@ class ListTransactionsTest(BitcoinTestFramework):
tx3_b = tx3_modified
tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee
tx3_b = bytes_to_hex_str(tx3_b.serialize())
- tx3_b_signed = self.nodes[0].signrawtransaction(tx3_b)['hex']
+ tx3_b_signed = self.nodes[0].signrawtransactionwithwallet(tx3_b)['hex']
txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, True)
assert(is_opt_in(self.nodes[0], txid_3b))
diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py
index b07e451667..e0571ea8f9 100755
--- a/test/functional/wallet_multiwallet.py
+++ b/test/functional/wallet_multiwallet.py
@@ -10,13 +10,17 @@ import os
import shutil
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal, assert_raises_rpc_error
+from test_framework.test_node import ErrorMatch
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+)
+
class MultiWalletTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
- self.extra_args = [['-wallet=w1', '-wallet=w2', '-wallet=w3', '-wallet=w'], []]
self.supports_cli = True
def run_test(self):
@@ -26,35 +30,79 @@ class MultiWalletTest(BitcoinTestFramework):
wallet_dir = lambda *p: data_dir('wallets', *p)
wallet = lambda name: node.get_wallet_rpc(name)
- assert_equal(set(node.listwallets()), {"w1", "w2", "w3", "w"})
-
+ # check wallet.dat is created
self.stop_nodes()
-
- self.assert_start_raises_init_error(0, ['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" does not exist')
- self.assert_start_raises_init_error(0, ['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" is a relative path', cwd=data_dir())
- self.assert_start_raises_init_error(0, ['-walletdir=debug.log'], 'Error: Specified -walletdir "debug.log" is not a directory', cwd=data_dir())
+ assert_equal(os.path.isfile(wallet_dir('wallet.dat')), True)
+
+ # create symlink to verify wallet directory path can be referenced
+ # through symlink
+ os.mkdir(wallet_dir('w7'))
+ os.symlink('w7', wallet_dir('w7_symlink'))
+
+ # rename wallet.dat to make sure plain wallet file paths (as opposed to
+ # directory paths) can be loaded
+ os.rename(wallet_dir("wallet.dat"), wallet_dir("w8"))
+
+ # restart node with a mix of wallet names:
+ # w1, w2, w3 - to verify new wallets created when non-existing paths specified
+ # w - to verify wallet name matching works when one wallet path is prefix of another
+ # sub/w5 - to verify relative wallet path is created correctly
+ # extern/w6 - to verify absolute wallet path is created correctly
+ # w7_symlink - to verify symlinked wallet path is initialized correctly
+ # w8 - to verify existing wallet file is loaded correctly
+ # '' - to verify default wallet file is created correctly
+ wallet_names = ['w1', 'w2', 'w3', 'w', 'sub/w5', os.path.join(self.options.tmpdir, 'extern/w6'), 'w7_symlink', 'w8', '']
+ extra_args = ['-wallet={}'.format(n) for n in wallet_names]
+ self.start_node(0, extra_args)
+ assert_equal(set(node.listwallets()), set(wallet_names))
+
+ # check that all requested wallets were created
+ self.stop_node(0)
+ for wallet_name in wallet_names:
+ if os.path.isdir(wallet_dir(wallet_name)):
+ assert_equal(os.path.isfile(wallet_dir(wallet_name, "wallet.dat")), True)
+ else:
+ assert_equal(os.path.isfile(wallet_dir(wallet_name)), True)
+
+ # should not initialize if wallet path can't be created
+ exp_stderr = "boost::filesystem::create_directory: (The system cannot find the path specified|Not a directory):"
+ self.nodes[0].assert_start_raises_init_error(['-wallet=wallet.dat/bad'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
+
+ self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" does not exist')
+ self.nodes[0].assert_start_raises_init_error(['-walletdir=wallets'], 'Error: Specified -walletdir "wallets" is a relative path', cwd=data_dir())
+ self.nodes[0].assert_start_raises_init_error(['-walletdir=debug.log'], 'Error: Specified -walletdir "debug.log" is not a directory', cwd=data_dir())
# should not initialize if there are duplicate wallets
- self.assert_start_raises_init_error(0, ['-wallet=w1', '-wallet=w1'], 'Error loading wallet w1. Duplicate -wallet filename specified.')
-
- # should not initialize if wallet file is a directory
- os.mkdir(wallet_dir('w11'))
- self.assert_start_raises_init_error(0, ['-wallet=w11'], 'Error loading wallet w11. -wallet filename must be a regular file.')
+ self.nodes[0].assert_start_raises_init_error(['-wallet=w1', '-wallet=w1'], 'Error: Error loading wallet w1. Duplicate -wallet filename specified.')
# should not initialize if one wallet is a copy of another
- shutil.copyfile(wallet_dir('w2'), wallet_dir('w22'))
- self.assert_start_raises_init_error(0, ['-wallet=w2', '-wallet=w22'], 'duplicates fileid')
+ shutil.copyfile(wallet_dir('w8'), wallet_dir('w8_copy'))
+ exp_stderr = "BerkeleyBatch: Can't open database w8_copy \(duplicates fileid \w+ from w8\)"
+ self.nodes[0].assert_start_raises_init_error(['-wallet=w8', '-wallet=w8_copy'], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
# should not initialize if wallet file is a symlink
- os.symlink(wallet_dir('w1'), wallet_dir('w12'))
- self.assert_start_raises_init_error(0, ['-wallet=w12'], 'Error loading wallet w12. -wallet filename must be a regular file.')
+ os.symlink('w8', wallet_dir('w8_symlink'))
+ self.nodes[0].assert_start_raises_init_error(['-wallet=w8_symlink'], 'Error: Invalid -wallet path \'w8_symlink\'\. .*', match=ErrorMatch.FULL_REGEX)
# should not initialize if the specified walletdir does not exist
- self.assert_start_raises_init_error(0, ['-walletdir=bad'], 'Error: Specified -walletdir "bad" does not exist')
+ self.nodes[0].assert_start_raises_init_error(['-walletdir=bad'], 'Error: Specified -walletdir "bad" does not exist')
# should not initialize if the specified walletdir is not a directory
not_a_dir = wallet_dir('notadir')
open(not_a_dir, 'a').close()
- self.assert_start_raises_init_error(0, ['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory')
+ self.nodes[0].assert_start_raises_init_error(['-walletdir=' + not_a_dir], 'Error: Specified -walletdir "' + not_a_dir + '" is not a directory')
+
+ self.log.info("Do not allow -zapwallettxes with multiwallet")
+ self.nodes[0].assert_start_raises_init_error(['-zapwallettxes', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
+ self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=1', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
+ self.nodes[0].assert_start_raises_init_error(['-zapwallettxes=2', '-wallet=w1', '-wallet=w2'], "Error: -zapwallettxes is only allowed with a single wallet file")
+
+ self.log.info("Do not allow -salvagewallet with multiwallet")
+ self.nodes[0].assert_start_raises_init_error(['-salvagewallet', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file")
+ self.nodes[0].assert_start_raises_init_error(['-salvagewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -salvagewallet is only allowed with a single wallet file")
+
+ self.log.info("Do not allow -upgradewallet with multiwallet")
+ self.nodes[0].assert_start_raises_init_error(['-upgradewallet', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file")
+ self.nodes[0].assert_start_raises_init_error(['-upgradewallet=1', '-wallet=w1', '-wallet=w2'], "Error: -upgradewallet is only allowed with a single wallet file")
# if wallets/ doesn't exist, datadir should be the default wallet dir
wallet_dir2 = data_dir('walletdir')
@@ -74,18 +122,21 @@ class MultiWalletTest(BitcoinTestFramework):
competing_wallet_dir = os.path.join(self.options.tmpdir, 'competing_walletdir')
os.mkdir(competing_wallet_dir)
- self.restart_node(0, ['-walletdir='+competing_wallet_dir])
- self.assert_start_raises_init_error(1, ['-walletdir='+competing_wallet_dir], 'Error initializing wallet database environment')
+ self.restart_node(0, ['-walletdir=' + competing_wallet_dir])
+ exp_stderr = "Error: Error initializing wallet database environment \"\S+competing_walletdir\"!"
+ self.nodes[1].assert_start_raises_init_error(['-walletdir=' + competing_wallet_dir], exp_stderr, match=ErrorMatch.PARTIAL_REGEX)
- self.restart_node(0, self.extra_args[0])
+ self.restart_node(0, extra_args)
- w1 = wallet("w1")
- w2 = wallet("w2")
- w3 = wallet("w3")
- w4 = wallet("w")
+ wallets = [wallet(w) for w in wallet_names]
wallet_bad = wallet("bad")
- w1.generate(1)
+ # check wallet names and balances
+ wallets[0].generate(1)
+ for wallet_name, wallet in zip(wallet_names, wallets):
+ info = wallet.getwalletinfo()
+ assert_equal(info['immature_balance'], 50 if wallet is wallets[0] else 0)
+ assert_equal(info['walletname'], wallet_name)
# accessing invalid wallet fails
assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", wallet_bad.getwalletinfo)
@@ -93,24 +144,7 @@ class MultiWalletTest(BitcoinTestFramework):
# accessing wallet RPC without using wallet endpoint fails
assert_raises_rpc_error(-19, "Wallet file not specified", node.getwalletinfo)
- # check w1 wallet balance
- w1_info = w1.getwalletinfo()
- assert_equal(w1_info['immature_balance'], 50)
- w1_name = w1_info['walletname']
- assert_equal(w1_name, "w1")
-
- # check w2 wallet balance
- w2_info = w2.getwalletinfo()
- assert_equal(w2_info['immature_balance'], 0)
- w2_name = w2_info['walletname']
- assert_equal(w2_name, "w2")
-
- w3_name = w3.getwalletinfo()['walletname']
- assert_equal(w3_name, "w3")
-
- w4_name = w4.getwalletinfo()['walletname']
- assert_equal(w4_name, "w")
-
+ w1, w2, w3, w4, *_ = wallets
w1.generate(101)
assert_equal(w1.getbalance(), 100)
assert_equal(w2.getbalance(), 0)
@@ -129,5 +163,12 @@ class MultiWalletTest(BitcoinTestFramework):
assert_equal(batch[0]["result"]["chain"], "regtest")
assert_equal(batch[1]["result"]["walletname"], "w1")
+ self.log.info('Check for per-wallet settxfee call')
+ assert_equal(w1.getwalletinfo()['paytxfee'], 0)
+ assert_equal(w2.getwalletinfo()['paytxfee'], 0)
+ w2.settxfee(4.0)
+ assert_equal(w1.getwalletinfo()['paytxfee'], 0)
+ assert_equal(w2.getwalletinfo()['paytxfee'], 4.0)
+
if __name__ == '__main__':
MultiWalletTest().main()
diff --git a/test/functional/wallet_txn_clone.py b/test/functional/wallet_txn_clone.py
index ce26d6e0ee..aee84f7e90 100755
--- a/test/functional/wallet_txn_clone.py
+++ b/test/functional/wallet_txn_clone.py
@@ -10,6 +10,7 @@ from test_framework.util import *
class TxnMallTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
+ self.extra_args = [['-deprecatedrpc=accounts']] * 4
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
@@ -78,7 +79,7 @@ class TxnMallTest(BitcoinTestFramework):
# Use a different signature hash type to sign. This creates an equivalent but malleated clone.
# Don't send the clone anywhere yet
- tx1_clone = self.nodes[0].signrawtransaction(clone_raw, None, None, "ALL|ANYONECANPAY")
+ tx1_clone = self.nodes[0].signrawtransactionwithwallet(clone_raw, None, "ALL|ANYONECANPAY")
assert_equal(tx1_clone["complete"], True)
# Have node0 mine a block, if requested:
@@ -92,7 +93,8 @@ class TxnMallTest(BitcoinTestFramework):
# Node0's balance should be starting balance, plus 50BTC for another
# matured block, minus tx1 and tx2 amounts, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
- if self.options.mine_block: expected += 50
+ if self.options.mine_block:
+ expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
@@ -131,7 +133,7 @@ class TxnMallTest(BitcoinTestFramework):
tx1 = self.nodes[0].gettransaction(txid1)
tx1_clone = self.nodes[0].gettransaction(txid1_clone)
tx2 = self.nodes[0].gettransaction(txid2)
-
+
# Verify expected confirmations
assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
diff --git a/test/functional/wallet_txn_doublespend.py b/test/functional/wallet_txn_doublespend.py
index 01129f3817..d644a94c73 100755
--- a/test/functional/wallet_txn_doublespend.py
+++ b/test/functional/wallet_txn_doublespend.py
@@ -10,6 +10,7 @@ from test_framework.util import *
class TxnMallTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
+ self.extra_args = [['-deprecatedrpc=accounts']] * 4
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
@@ -27,7 +28,7 @@ class TxnMallTest(BitcoinTestFramework):
for i in range(4):
assert_equal(self.nodes[i].getbalance(), starting_balance)
self.nodes[i].getnewaddress("") # bug workaround, coins generated assigned to first getnewaddress!
-
+
# Assign coins to foo and bar accounts:
node0_address_foo = self.nodes[0].getnewaddress("foo")
fund_foo_txid = self.nodes[0].sendfrom("", node0_address_foo, 1219)
@@ -58,13 +59,13 @@ class TxnMallTest(BitcoinTestFramework):
outputs[node1_address] = 1240
outputs[change_address] = 1248 - 1240 + doublespend_fee
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
- doublespend = self.nodes[0].signrawtransaction(rawtx)
+ doublespend = self.nodes[0].signrawtransactionwithwallet(rawtx)
assert_equal(doublespend["complete"], True)
# Create two spends using 1 50 BTC coin each
txid1 = self.nodes[0].sendfrom("foo", node1_address, 40, 0)
txid2 = self.nodes[0].sendfrom("bar", node1_address, 20, 0)
-
+
# Have node0 mine a block:
if (self.options.mine_block):
self.nodes[0].generate(1)
@@ -76,7 +77,8 @@ class TxnMallTest(BitcoinTestFramework):
# Node0's balance should be starting balance, plus 50BTC for another
# matured block, minus 40, minus 20, and minus transaction fees:
expected = starting_balance + fund_foo_tx["fee"] + fund_bar_tx["fee"]
- if self.options.mine_block: expected += 50
+ if self.options.mine_block:
+ expected += 50
expected += tx1["amount"] + tx1["fee"]
expected += tx2["amount"] + tx2["fee"]
assert_equal(self.nodes[0].getbalance(), expected)
@@ -93,7 +95,7 @@ class TxnMallTest(BitcoinTestFramework):
else:
assert_equal(tx1["confirmations"], 0)
assert_equal(tx2["confirmations"], 0)
-
+
# Now give doublespend and its parents to miner:
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])