aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/README.md20
-rw-r--r--test/functional/README.md18
-rwxr-xr-xtest/functional/abandonconflict.py10
-rwxr-xr-xtest/functional/assumevalid.py64
-rwxr-xr-xtest/functional/bip65-cltv-p2p.py242
-rwxr-xr-xtest/functional/bip65-cltv.py82
-rwxr-xr-xtest/functional/bip68-112-113-p2p.py4
-rwxr-xr-xtest/functional/bip68-sequence.py25
-rwxr-xr-xtest/functional/bip9-softforks.py8
-rwxr-xr-xtest/functional/bipdersig-p2p.py217
-rwxr-xr-xtest/functional/bipdersig.py81
-rwxr-xr-xtest/functional/bitcoin_cli.py65
-rwxr-xr-xtest/functional/blockchain.py97
-rwxr-xr-xtest/functional/bumpfee.py35
-rwxr-xr-xtest/functional/create_cache.py7
-rwxr-xr-xtest/functional/dbcrash.py120
-rwxr-xr-xtest/functional/decodescript.py4
-rwxr-xr-xtest/functional/deprecated_rpc.py23
-rwxr-xr-xtest/functional/disablewallet.py9
-rwxr-xr-xtest/functional/disconnect_ban.py33
-rwxr-xr-xtest/functional/example_test.py67
-rwxr-xr-xtest/functional/feature_logging.py59
-rwxr-xr-xtest/functional/forknotify.py69
-rwxr-xr-xtest/functional/fundrawtransaction.py44
-rwxr-xr-xtest/functional/getblocktemplate_longpoll.py8
-rwxr-xr-xtest/functional/getblocktemplate_proposals.py157
-rwxr-xr-xtest/functional/getchaintips.py5
-rwxr-xr-xtest/functional/httpbasics.py4
-rwxr-xr-xtest/functional/import-rescan.py37
-rwxr-xr-xtest/functional/importmulti.py34
-rwxr-xr-xtest/functional/importprunedfunds.py16
-rwxr-xr-xtest/functional/invalidateblock.py4
-rwxr-xr-xtest/functional/invalidblockrequest.py4
-rwxr-xr-xtest/functional/invalidtxrequest.py4
-rwxr-xr-xtest/functional/keypool-topup.py74
-rwxr-xr-xtest/functional/keypool.py20
-rwxr-xr-xtest/functional/listsinceblock.py231
-rwxr-xr-xtest/functional/listtransactions.py10
-rwxr-xr-xtest/functional/maxuploadtarget.py83
-rwxr-xr-xtest/functional/mempool_limit.py4
-rwxr-xr-xtest/functional/mempool_packages.py10
-rwxr-xr-xtest/functional/mempool_persist.py58
-rwxr-xr-xtest/functional/mempool_reorg.py8
-rwxr-xr-xtest/functional/mempool_resurrect_test.py6
-rwxr-xr-xtest/functional/mempool_spendcoinbase.py7
-rwxr-xr-xtest/functional/merkle_blocks.py14
-rwxr-xr-xtest/functional/minchainwork.py89
-rwxr-xr-xtest/functional/mining.py135
-rwxr-xr-xtest/functional/multi_rpc.py5
-rwxr-xr-xtest/functional/multiwallet.py117
-rwxr-xr-xtest/functional/net.py10
-rwxr-xr-xtest/functional/node_network_limited.py81
-rwxr-xr-xtest/functional/notifications.py86
-rwxr-xr-xtest/functional/nulldummy.py13
-rwxr-xr-xtest/functional/p2p-acceptblock.py305
-rwxr-xr-xtest/functional/p2p-compactblocks.py86
-rwxr-xr-xtest/functional/p2p-feefilter.py35
-rwxr-xr-xtest/functional/p2p-fingerprint.py152
-rwxr-xr-xtest/functional/p2p-fullblocktest.py25
-rwxr-xr-xtest/functional/p2p-leaktests.py122
-rwxr-xr-xtest/functional/p2p-mempool.py16
-rwxr-xr-xtest/functional/p2p-segwit.py304
-rwxr-xr-xtest/functional/p2p-timeouts.py50
-rwxr-xr-xtest/functional/p2p-versionbits-warning.py32
-rwxr-xr-xtest/functional/preciousblock.py9
-rwxr-xr-xtest/functional/prioritise_transaction.py6
-rwxr-xr-xtest/functional/proxy_test.py8
-rwxr-xr-xtest/functional/pruning.py40
-rwxr-xr-xtest/functional/rawtransactions.py107
-rwxr-xr-xtest/functional/receivedby.py133
-rwxr-xr-xtest/functional/reindex.py5
-rwxr-xr-xtest/functional/replace-by-fee.py68
-rwxr-xr-xtest/functional/resendwallettransactions.py29
-rwxr-xr-xtest/functional/rest.py3
-rwxr-xr-xtest/functional/rpcbind_test.py24
-rwxr-xr-xtest/functional/rpcnamedargs.py17
-rwxr-xr-xtest/functional/segwit.py103
-rwxr-xr-xtest/functional/sendheaders.py319
-rwxr-xr-xtest/functional/signmessages.py25
-rwxr-xr-xtest/functional/signrawtransactions.py21
-rwxr-xr-xtest/functional/smartfees.py94
-rw-r--r--test/functional/test_framework/address.py18
-rw-r--r--test/functional/test_framework/authproxy.py77
-rw-r--r--test/functional/test_framework/bignum.py39
-rw-r--r--test/functional/test_framework/blockstore.py18
-rwxr-xr-xtest/functional/test_framework/comptool.py159
-rw-r--r--test/functional/test_framework/coverage.py26
-rw-r--r--test/functional/test_framework/key.py2
-rw-r--r--test/functional/test_framework/messages.py1304
-rwxr-xr-xtest/functional/test_framework/mininode.py1998
-rw-r--r--test/functional/test_framework/netutil.py20
-rw-r--r--test/functional/test_framework/script.py247
-rw-r--r--test/functional/test_framework/segwit_addr.py107
-rw-r--r--test/functional/test_framework/socks5.py10
-rwxr-xr-xtest/functional/test_framework/test_framework.py313
-rwxr-xr-xtest/functional/test_framework/test_node.py227
-rw-r--r--test/functional/test_framework/util.py84
-rwxr-xr-xtest/functional/test_runner.py61
-rwxr-xr-xtest/functional/txn_clone.py5
-rwxr-xr-xtest/functional/txn_doublespend.py5
-rwxr-xr-xtest/functional/uacomment.py35
-rwxr-xr-xtest/functional/uptime.py4
-rwxr-xr-xtest/functional/wallet-accounts.py141
-rwxr-xr-xtest/functional/wallet-dump.py20
-rwxr-xr-xtest/functional/wallet-encryption.py23
-rwxr-xr-xtest/functional/wallet-hd.py52
-rwxr-xr-xtest/functional/wallet.py115
-rwxr-xr-xtest/functional/walletbackup.py34
-rwxr-xr-xtest/functional/zapwallettxes.py108
-rwxr-xr-xtest/functional/zmq_test.py146
-rwxr-xr-xtest/util/bitcoin-util-test.py13
-rw-r--r--test/util/data/bitcoin-util-test.json18
-rw-r--r--test/util/data/tt-delin1-out.json42
-rw-r--r--test/util/data/tt-delout1-out.json42
-rw-r--r--test/util/data/tt-locktime317000-out.json44
-rw-r--r--test/util/data/txcreate1.json10
-rw-r--r--test/util/data/txcreate2.json2
-rw-r--r--test/util/data/txcreatedata1.json6
-rw-r--r--test/util/data/txcreatedata2.json6
-rw-r--r--test/util/data/txcreatedata_seq0.json2
-rw-r--r--test/util/data/txcreatedata_seq1.json4
-rw-r--r--test/util/data/txcreatemultisig1.json6
-rw-r--r--test/util/data/txcreatemultisig2.json2
-rw-r--r--test/util/data/txcreatemultisig3.json8
-rw-r--r--test/util/data/txcreatemultisig4.json2
-rw-r--r--test/util/data/txcreatemultisig5.json26
-rw-r--r--test/util/data/txcreateoutpubkey1.json2
-rw-r--r--test/util/data/txcreateoutpubkey2.json8
-rw-r--r--test/util/data/txcreateoutpubkey3.json2
-rw-r--r--test/util/data/txcreatescript1.json2
-rw-r--r--test/util/data/txcreatescript2.json2
-rw-r--r--test/util/data/txcreatescript3.json8
-rw-r--r--test/util/data/txcreatescript4.json2
-rw-r--r--test/util/data/txcreatesignv1.json2
134 files changed, 5803 insertions, 4731 deletions
diff --git a/test/README.md b/test/README.md
index 15f6df790f..868eb667ae 100644
--- a/test/README.md
+++ b/test/README.md
@@ -155,6 +155,26 @@ import pdb; pdb.set_trace()
anywhere in the test. You will then be able to inspect variables, as well as
call methods that interact with the bitcoind nodes-under-test.
+If further introspection of the bitcoind instances themselves becomes
+necessary, this can be accomplished by first setting a pdb breakpoint
+at an appropriate location, running the test to that point, then using
+`gdb` to attach to the process and debug.
+
+For instance, to attach to `self.node[1]` during a run:
+
+```bash
+2017-06-27 14:13:56.686000 TestFramework (INFO): Initializing test directory /tmp/user/1000/testo9vsdjo3
+```
+
+use the directory path to get the pid from the pid file:
+
+```bash
+cat /tmp/user/1000/testo9vsdjo3/node1/regtest/bitcoind.pid
+gdb /home/example/bitcoind <pid>
+```
+
+Note: gdb attach step may require `sudo`
+
### Util tests
Util tests can be run locally by running `test/util/bitcoin-util-test.py`.
diff --git a/test/functional/README.md b/test/functional/README.md
index 96fe0becce..193ca947bc 100644
--- a/test/functional/README.md
+++ b/test/functional/README.md
@@ -24,8 +24,8 @@ don't have test cases for.
- Use a module-level docstring to describe what the test is testing, and how it
is testing it.
- When subclassing the BitcoinTestFramwork, place overrides for the
- `__init__()`, and `setup_xxxx()` methods at the top of the subclass, then
- locally-defined helper methods, then the `run_test()` method.
+ `set_test_params()`, `add_options()` and `setup_xxxx()` methods at the top of
+ the subclass, then locally-defined helper methods, then the `run_test()` method.
#### General test-writing advice
@@ -36,7 +36,7 @@ don't have test cases for.
- Avoid stop-starting the nodes multiple times during the test if possible. A
stop-start takes several seconds, so doing it several times blows up the
runtime of the test.
-- Set the `self.setup_clean_chain` variable in `__init__()` to control whether
+- Set the `self.setup_clean_chain` variable in `set_test_params()` to control whether
or not to use the cached data directories. The cached data directories
contain a 200-block pre-mined blockchain and wallets for four nodes. Each node
has 25 mature blocks (25x50=1250 BTC) in its wallet.
@@ -63,12 +63,12 @@ wrappers for them, `msg_block`, `msg_tx`, etc).
with the bitcoind(s) being tested (using python's asyncore package); the other
implements the test logic.
-- `NodeConn` is the class used to connect to a bitcoind. If you implement
-a callback class that derives from `NodeConnCB` and pass that to the
-`NodeConn` object, your code will receive the appropriate callbacks when
-events of interest arrive.
+- `P2PConnection` is the class used to connect to a bitcoind. `P2PInterface`
+contains the higher level logic for processing P2P payloads and connecting to
+the Bitcoin Core node application logic. For custom behaviour, subclass the
+P2PInterface object and override the callback methods.
-- Call `NetworkThread.start()` after all `NodeConn` objects are created to
+- Call `NetworkThread.start()` after all `P2PInterface` objects are created to
start the networking thread. (Continue with the test logic in your existing
thread.)
@@ -90,7 +90,7 @@ on nodes 2 and up.
- Implement a (generator) function called `get_tests()` which yields `TestInstance`s.
Each `TestInstance` consists of:
- - a list of `[object, outcome, hash]` entries
+ - A list of `[object, outcome, hash]` entries
* `object` is a `CBlock`, `CTransaction`, or
`CBlockHeader`. `CBlock`'s and `CTransaction`'s are tested for
acceptance. `CBlockHeader`s can be used so that the test runner can deliver
diff --git a/test/functional/abandonconflict.py b/test/functional/abandonconflict.py
index c87c02492d..e8dbc86469 100755
--- a/test/functional/abandonconflict.py
+++ b/test/functional/abandonconflict.py
@@ -14,10 +14,8 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class AbandonConflictTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 2
- self.setup_clean_chain = False
self.extra_args = [["-minrelaytxfee=0.00001"], []]
def run_test(self):
@@ -74,7 +72,7 @@ class AbandonConflictTest(BitcoinTestFramework):
# Restart the node with a higher min relay fee so the parent tx is no longer in mempool
# TODO: redo with eviction
self.stop_node(0)
- self.nodes[0] = self.start_node(0, self.options.tmpdir, ["-minrelaytxfee=0.0001"])
+ self.start_node(0, extra_args=["-minrelaytxfee=0.0001"])
# Verify txs no longer in either node's mempool
assert_equal(len(self.nodes[0].getrawmempool()), 0)
@@ -101,7 +99,7 @@ class AbandonConflictTest(BitcoinTestFramework):
# Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned
self.stop_node(0)
- self.nodes[0] = self.start_node(0, self.options.tmpdir, ["-minrelaytxfee=0.00001"])
+ self.start_node(0, extra_args=["-minrelaytxfee=0.00001"])
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(self.nodes[0].getbalance(), balance)
@@ -121,7 +119,7 @@ class AbandonConflictTest(BitcoinTestFramework):
# Remove using high relay fee again
self.stop_node(0)
- self.nodes[0] = self.start_node(0, self.options.tmpdir, ["-minrelaytxfee=0.0001"])
+ self.start_node(0, extra_args=["-minrelaytxfee=0.0001"])
assert_equal(len(self.nodes[0].getrawmempool()), 0)
newbalance = self.nodes[0].getbalance()
assert_equal(newbalance, balance - Decimal("24.9996"))
diff --git a/test/functional/assumevalid.py b/test/functional/assumevalid.py
index 9d17faac51..13104f71bc 100755
--- a/test/functional/assumevalid.py
+++ b/test/functional/assumevalid.py
@@ -39,37 +39,38 @@ from test_framework.mininode import (CBlockHeader,
CTxIn,
CTxOut,
NetworkThread,
- NodeConn,
- NodeConnCB,
+ P2PInterface,
msg_block,
msg_headers)
from test_framework.script import (CScript, OP_TRUE)
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (p2p_port, assert_equal)
+from test_framework.util import assert_equal
-class BaseNode(NodeConnCB):
+class BaseNode(P2PInterface):
def send_header_for_blocks(self, new_blocks):
headers_message = msg_headers()
headers_message.headers = [CBlockHeader(b) for b in new_blocks]
self.send_message(headers_message)
class AssumeValidTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self):
+ self.add_nodes(3)
# Start node0. We don't start the other nodes yet since
# we need to pre-mine a block with an invalid transaction
# signature so we can pass in the block hash as assumevalid.
- self.nodes = [self.start_node(0, self.options.tmpdir)]
+ self.start_node(0)
- def send_blocks_until_disconnected(self, node):
+ def send_blocks_until_disconnected(self, p2p_conn):
"""Keep sending blocks to the node until we're disconnected."""
for i in range(len(self.blocks)):
+ if p2p_conn.state != "connected":
+ break
try:
- node.send_message(msg_block(self.blocks[i]))
+ p2p_conn.send_message(msg_block(self.blocks[i]))
except IOError as e:
assert str(e) == 'Not connected, no pushbuf'
break
@@ -95,13 +96,10 @@ class AssumeValidTest(BitcoinTestFramework):
def run_test(self):
# Connect to node0
- node0 = BaseNode()
- connections = []
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0))
- node0.add_connection(connections[0])
+ p2p0 = self.nodes[0].add_p2p_connection(BaseNode())
NetworkThread().start() # Start up network handling in another thread
- node0.wait_for_verack()
+ self.nodes[0].p2p.wait_for_verack()
# Build the blockchain
self.tip = int(self.nodes[0].getbestblockhash(), 16)
@@ -162,40 +160,34 @@ class AssumeValidTest(BitcoinTestFramework):
height += 1
# Start node1 and node2 with assumevalid so they accept a block with a bad signature.
- self.nodes.append(self.start_node(1, self.options.tmpdir,
- ["-assumevalid=" + hex(block102.sha256)]))
- node1 = BaseNode() # connects to node1
- connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], node1))
- node1.add_connection(connections[1])
- node1.wait_for_verack()
-
- self.nodes.append(self.start_node(2, self.options.tmpdir,
- ["-assumevalid=" + hex(block102.sha256)]))
- node2 = BaseNode() # connects to node2
- connections.append(NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], node2))
- node2.add_connection(connections[2])
- node2.wait_for_verack()
+ self.start_node(1, extra_args=["-assumevalid=" + hex(block102.sha256)])
+ p2p1 = self.nodes[1].add_p2p_connection(BaseNode())
+ p2p1.wait_for_verack()
+
+ self.start_node(2, extra_args=["-assumevalid=" + hex(block102.sha256)])
+ p2p2 = self.nodes[2].add_p2p_connection(BaseNode())
+ p2p2.wait_for_verack()
# send header lists to all three nodes
- node0.send_header_for_blocks(self.blocks[0:2000])
- node0.send_header_for_blocks(self.blocks[2000:])
- node1.send_header_for_blocks(self.blocks[0:2000])
- node1.send_header_for_blocks(self.blocks[2000:])
- node2.send_header_for_blocks(self.blocks[0:200])
+ p2p0.send_header_for_blocks(self.blocks[0:2000])
+ p2p0.send_header_for_blocks(self.blocks[2000:])
+ p2p1.send_header_for_blocks(self.blocks[0:2000])
+ p2p1.send_header_for_blocks(self.blocks[2000:])
+ p2p2.send_header_for_blocks(self.blocks[0:200])
# Send blocks to node0. Block 102 will be rejected.
- self.send_blocks_until_disconnected(node0)
+ self.send_blocks_until_disconnected(p2p0)
self.assert_blockchain_height(self.nodes[0], 101)
# Send all blocks to node1. All blocks will be accepted.
for i in range(2202):
- node1.send_message(msg_block(self.blocks[i]))
+ p2p1.send_message(msg_block(self.blocks[i]))
# Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync.
- node1.sync_with_ping(120)
+ p2p1.sync_with_ping(120)
assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202)
# Send blocks to node2. Block 102 will be rejected.
- self.send_blocks_until_disconnected(node2)
+ self.send_blocks_until_disconnected(p2p2)
self.assert_blockchain_height(self.nodes[2], 101)
if __name__ == '__main__':
diff --git a/test/functional/bip65-cltv-p2p.py b/test/functional/bip65-cltv-p2p.py
index bb83042f35..2af5eb275f 100755
--- a/test/functional/bip65-cltv-p2p.py
+++ b/test/functional/bip65-cltv-p2p.py
@@ -4,173 +4,157 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test BIP65 (CHECKLOCKTIMEVERIFY).
-Connect to a single node.
-Mine 2 (version 3) blocks (save the coinbases for later).
-Generate 98 more version 3 blocks, verify the node accepts.
-Mine 749 version 4 blocks, verify the node accepts.
-Check that the new CLTV rules are not enforced on the 750th version 4 block.
-Check that the new CLTV rules are enforced on the 751st version 4 block.
-Mine 199 new version blocks.
-Mine 1 old-version block.
-Mine 1 new version block.
-Mine 1 old version block, see that the node rejects.
+Test that the CHECKLOCKTIMEVERIFY soft-fork activates at (regtest) block height
+1351.
"""
-from test_framework.test_framework import ComparisonTestFramework
+from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-from test_framework.mininode import CTransaction, NetworkThread
+from test_framework.mininode import *
from test_framework.blocktools import create_coinbase, create_block
-from test_framework.comptool import TestInstance, TestManager
-from test_framework.script import CScript, OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP
+from test_framework.script import CScript, OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP, CScriptNum
from io import BytesIO
-import time
+
+CLTV_HEIGHT = 1351
+
+# Reject codes that we might receive in this test
+REJECT_INVALID = 16
+REJECT_OBSOLETE = 17
+REJECT_NONSTANDARD = 64
def cltv_invalidate(tx):
'''Modify the signature in vin 0 of the tx to fail CLTV
Prepends -1 CLTV DROP in the scriptSig itself.
+
+ TODO: test more ways that transactions using CLTV could be invalid (eg
+ locktime requirements fail, sequence time requirements fail, etc).
'''
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP] +
list(CScript(tx.vin[0].scriptSig)))
-
-class BIP65Test(ComparisonTestFramework):
-
- def __init__(self):
- super().__init__()
+def cltv_validate(node, tx, height):
+ '''Modify the signature in vin 0 of the tx to pass CLTV
+ Prepends <height> CLTV DROP in the scriptSig, and sets
+ the locktime to height'''
+ tx.vin[0].nSequence = 0
+ tx.nLockTime = height
+
+ # Need to re-sign, since nSequence and nLockTime changed
+ signed_result = node.signrawtransaction(ToHex(tx))
+ new_tx = CTransaction()
+ new_tx.deserialize(BytesIO(hex_str_to_bytes(signed_result['hex'])))
+
+ new_tx.vin[0].scriptSig = CScript([CScriptNum(height), OP_CHECKLOCKTIMEVERIFY, OP_DROP] +
+ list(CScript(new_tx.vin[0].scriptSig)))
+ return new_tx
+
+def create_transaction(node, coinbase, to_address, amount):
+ from_txid = node.getblock(coinbase)['tx'][0]
+ inputs = [{ "txid" : from_txid, "vout" : 0}]
+ outputs = { to_address : amount }
+ rawtx = node.createrawtransaction(inputs, outputs)
+ signresult = node.signrawtransaction(rawtx)
+ tx = CTransaction()
+ tx.deserialize(BytesIO(hex_str_to_bytes(signresult['hex'])))
+ return tx
+
+class BIP65Test(BitcoinTestFramework):
+ def set_test_params(self):
self.num_nodes = 1
- self.extra_args = [['-whitelist=127.0.0.1', '-blockversion=3']]
+ self.extra_args = [['-promiscuousmempoolflags=1', '-whitelist=127.0.0.1']]
+ self.setup_clean_chain = True
def run_test(self):
- test = TestManager(self, self.options.tmpdir)
- test.add_all_connections(self.nodes)
+ self.nodes[0].add_p2p_connection(P2PInterface())
+
NetworkThread().start() # Start up network handling in another thread
- test.run()
-
- def create_transaction(self, node, coinbase, to_address, amount):
- from_txid = node.getblock(coinbase)['tx'][0]
- inputs = [{ "txid" : from_txid, "vout" : 0}]
- outputs = { to_address : amount }
- rawtx = node.createrawtransaction(inputs, outputs)
- signresult = node.signrawtransaction(rawtx)
- tx = CTransaction()
- f = BytesIO(hex_str_to_bytes(signresult['hex']))
- tx.deserialize(f)
- return tx
-
- def get_tests(self):
-
- self.coinbase_blocks = self.nodes[0].generate(2)
- height = 3 # height of the next block to build
- self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
+
+ # wait_for_verack ensures that the P2P connection is fully up.
+ self.nodes[0].p2p.wait_for_verack()
+
+ self.log.info("Mining %d blocks", CLTV_HEIGHT - 2)
+ self.coinbase_blocks = self.nodes[0].generate(CLTV_HEIGHT - 2)
self.nodeaddress = self.nodes[0].getnewaddress()
- self.last_block_time = int(time.time())
-
- ''' 398 more version 3 blocks '''
- test_blocks = []
- for i in range(398):
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 3
- block.rehash()
- block.solve()
- test_blocks.append([block, True])
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance(test_blocks, sync_every_block=False)
-
- ''' Mine 749 version 4 blocks '''
- test_blocks = []
- for i in range(749):
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 4
- block.rehash()
- block.solve()
- test_blocks.append([block, True])
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance(test_blocks, sync_every_block=False)
-
- '''
- Check that the new CLTV rules are not enforced in the 750th
- version 3 block.
- '''
- spendtx = self.create_transaction(self.nodes[0],
- self.coinbase_blocks[0], self.nodeaddress, 1.0)
+
+ self.log.info("Test that an invalid-according-to-CLTV transaction can still appear in a block")
+
+ spendtx = create_transaction(self.nodes[0], self.coinbase_blocks[0],
+ self.nodeaddress, 1.0)
cltv_invalidate(spendtx)
spendtx.rehash()
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 4
+ tip = self.nodes[0].getbestblockhash()
+ block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
+ block = create_block(int(tip, 16), create_coinbase(CLTV_HEIGHT - 1), block_time)
+ block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
- block.rehash()
block.solve()
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance([[block, True]])
-
- ''' Mine 199 new version blocks on last valid tip '''
- test_blocks = []
- for i in range(199):
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 4
- block.rehash()
- block.solve()
- test_blocks.append([block, True])
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance(test_blocks, sync_every_block=False)
-
- ''' Mine 1 old version block '''
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ self.nodes[0].p2p.send_and_ping(msg_block(block))
+ assert_equal(self.nodes[0].getbestblockhash(), block.hash)
+
+ self.log.info("Test that blocks must now be at least version 4")
+ tip = block.sha256
+ block_time += 1
+ block = create_block(tip, create_coinbase(CLTV_HEIGHT), block_time)
block.nVersion = 3
- block.rehash()
block.solve()
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance([[block, True]])
+ self.nodes[0].p2p.send_and_ping(msg_block(block))
+ assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
- ''' Mine 1 new version block '''
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
+ with mininode_lock:
+ assert_equal(self.nodes[0].p2p.last_message["reject"].code, REJECT_OBSOLETE)
+ assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'bad-version(0x00000003)')
+ assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
+ del self.nodes[0].p2p.last_message["reject"]
+
+ self.log.info("Test that invalid-according-to-cltv transactions cannot appear in a block")
block.nVersion = 4
- block.rehash()
- block.solve()
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance([[block, True]])
-
- '''
- Check that the new CLTV rules are enforced in the 951st version 4
- block.
- '''
- spendtx = self.create_transaction(self.nodes[0],
- self.coinbase_blocks[1], self.nodeaddress, 1.0)
+
+ spendtx = create_transaction(self.nodes[0], self.coinbase_blocks[1],
+ self.nodeaddress, 1.0)
cltv_invalidate(spendtx)
spendtx.rehash()
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 4
+ # First we show that this tx is valid except for CLTV by getting it
+ # accepted to the mempool (which we can achieve with
+ # -promiscuousmempoolflags).
+ self.nodes[0].p2p.send_and_ping(msg_tx(spendtx))
+ assert spendtx.hash in self.nodes[0].getrawmempool()
+
+ # Now we verify that a block with this transaction is invalid.
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
- block.rehash()
block.solve()
- self.last_block_time += 1
- yield TestInstance([[block, False]])
- ''' Mine 1 old version block, should be invalid '''
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 3
- block.rehash()
+ self.nodes[0].p2p.send_and_ping(msg_block(block))
+ assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
+
+ wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
+ with mininode_lock:
+ assert self.nodes[0].p2p.last_message["reject"].code in [REJECT_INVALID, REJECT_NONSTANDARD]
+ assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
+ if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID:
+ # Generic rejection when a block is invalid
+ assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'block-validation-failed')
+ else:
+ assert b'Negative locktime' in self.nodes[0].p2p.last_message["reject"].reason
+
+ self.log.info("Test that a version 4 block with a valid-according-to-CLTV transaction is accepted")
+ spendtx = cltv_validate(self.nodes[0], spendtx, CLTV_HEIGHT - 1)
+ spendtx.rehash()
+
+ block.vtx.pop(1)
+ block.vtx.append(spendtx)
+ block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
- self.last_block_time += 1
- yield TestInstance([[block, False]])
+
+ self.nodes[0].p2p.send_and_ping(msg_block(block))
+ assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
+
if __name__ == '__main__':
BIP65Test().main()
diff --git a/test/functional/bip65-cltv.py b/test/functional/bip65-cltv.py
deleted file mode 100755
index ddf932c746..0000000000
--- a/test/functional/bip65-cltv.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2015-2016 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test the CHECKLOCKTIMEVERIFY (BIP65) soft-fork logic."""
-
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import *
-
-class BIP65Test(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
- self.num_nodes = 3
- self.setup_clean_chain = False
- self.extra_args = [[], ["-blockversion=3"], ["-blockversion=4"]]
-
- def setup_network(self):
- self.setup_nodes()
- connect_nodes(self.nodes[1], 0)
- connect_nodes(self.nodes[2], 0)
- self.sync_all()
-
- def run_test(self):
- cnt = self.nodes[0].getblockcount()
-
- # Mine some old-version blocks
- self.nodes[1].generate(200)
- cnt += 100
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 100):
- raise AssertionError("Failed to mine 100 version=3 blocks")
-
- # Mine 750 new-version blocks
- for i in range(15):
- self.nodes[2].generate(50)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 850):
- raise AssertionError("Failed to mine 750 version=4 blocks")
-
- # TODO: check that new CHECKLOCKTIMEVERIFY rules are not enforced
-
- # Mine 1 new-version block
- self.nodes[2].generate(1)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 851):
- raise AssertionError("Failed to mine a version=4 blocks")
-
- # TODO: check that new CHECKLOCKTIMEVERIFY rules are enforced
-
- # Mine 198 new-version blocks
- for i in range(2):
- self.nodes[2].generate(99)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 1049):
- raise AssertionError("Failed to mine 198 version=4 blocks")
-
- # Mine 1 old-version block
- self.nodes[1].generate(1)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 1050):
- raise AssertionError("Failed to mine a version=3 block after 949 version=4 blocks")
-
- # Mine 1 new-version blocks
- self.nodes[2].generate(1)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 1051):
- raise AssertionError("Failed to mine a version=4 block")
-
- # Mine 1 old-version blocks. This should fail
- assert_raises_jsonrpc(-1,"CreateNewBlock: TestBlockValidity failed: bad-version(0x00000003)", self.nodes[1].generate, 1)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 1051):
- raise AssertionError("Accepted a version=3 block after 950 version=4 blocks")
-
- # Mine 1 new-version blocks
- self.nodes[2].generate(1)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 1052):
- raise AssertionError("Failed to mine a version=4 block")
-
-if __name__ == '__main__':
- BIP65Test().main()
diff --git a/test/functional/bip68-112-113-p2p.py b/test/functional/bip68-112-113-p2p.py
index 5a322e8c0e..7e6a4f4408 100755
--- a/test/functional/bip68-112-113-p2p.py
+++ b/test/functional/bip68-112-113-p2p.py
@@ -92,9 +92,9 @@ def all_rlt_txs(txarray):
return txs
class BIP68_112_113Test(ComparisonTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 1
+ self.setup_clean_chain = True
self.extra_args = [['-whitelist=127.0.0.1', '-blockversion=4']]
def run_test(self):
diff --git a/test/functional/bip68-sequence.py b/test/functional/bip68-sequence.py
index 87a50692f6..5f8f21701f 100755
--- a/test/functional/bip68-sequence.py
+++ b/test/functional/bip68-sequence.py
@@ -17,10 +17,8 @@ SEQUENCE_LOCKTIME_MASK = 0x0000ffff
NOT_FINAL_ERROR = "64: non-BIP68-final"
class BIP68Test(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 2
- self.setup_clean_chain = False
self.extra_args = [[], ["-acceptnonstdtxn=0"]]
def run_test(self):
@@ -85,7 +83,7 @@ class BIP68Test(BitcoinTestFramework):
tx2.vout = [CTxOut(int(value-self.relayfee*COIN), CScript([b'a']))]
tx2.rehash()
- assert_raises_jsonrpc(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx2))
+ assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx2))
# Setting the version back down to 1 should disable the sequence lock,
# so this should be accepted.
@@ -182,7 +180,7 @@ class BIP68Test(BitcoinTestFramework):
if (using_sequence_locks and not should_pass):
# This transaction should be rejected
- assert_raises_jsonrpc(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, rawtx)
+ assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, rawtx)
else:
# This raw transaction should be accepted
self.nodes[0].sendrawtransaction(rawtx)
@@ -229,7 +227,7 @@ class BIP68Test(BitcoinTestFramework):
if (orig_tx.hash in node.getrawmempool()):
# sendrawtransaction should fail if the tx is in the mempool
- assert_raises_jsonrpc(-26, NOT_FINAL_ERROR, node.sendrawtransaction, ToHex(tx))
+ assert_raises_rpc_error(-26, NOT_FINAL_ERROR, node.sendrawtransaction, ToHex(tx))
else:
# sendrawtransaction should succeed if the tx is not in the mempool
node.sendrawtransaction(ToHex(tx))
@@ -282,7 +280,7 @@ class BIP68Test(BitcoinTestFramework):
tx5.vout[0].nValue += int(utxos[0]["amount"]*COIN)
raw_tx5 = self.nodes[0].signrawtransaction(ToHex(tx5))["hex"]
- assert_raises_jsonrpc(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, raw_tx5)
+ assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, raw_tx5)
# Test mempool-BIP68 consistency after reorg
#
@@ -355,7 +353,7 @@ class BIP68Test(BitcoinTestFramework):
tx3.vout = [CTxOut(int(tx2.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
tx3.rehash()
- assert_raises_jsonrpc(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx3))
+ assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx3))
# make a block that violates bip68; ensure that the tip updates
tip = int(self.nodes[0].getbestblockhash(), 16)
@@ -371,11 +369,14 @@ class BIP68Test(BitcoinTestFramework):
def activateCSV(self):
# activation should happen at block height 432 (3 periods)
+ # getblockchaininfo will show CSV as active at block 431 (144 * 3 -1) since it's returning whether CSV is active for the next block.
min_activation_height = 432
height = self.nodes[0].getblockcount()
- assert(height < min_activation_height)
- self.nodes[0].generate(min_activation_height-height)
- assert(get_bip9_status(self.nodes[0], 'csv')['status'] == 'active')
+ assert_greater_than(min_activation_height - height, 2)
+ self.nodes[0].generate(min_activation_height - height - 2)
+ assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "locked_in")
+ self.nodes[0].generate(1)
+ assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "active")
sync_blocks(self.nodes)
# Use self.nodes[1] to test that version 2 transactions are standard.
@@ -387,7 +388,7 @@ class BIP68Test(BitcoinTestFramework):
tx = FromHex(CTransaction(), rawtxfund)
tx.nVersion = 2
tx_signed = self.nodes[1].signrawtransaction(ToHex(tx))["hex"]
- tx_id = self.nodes[1].sendrawtransaction(tx_signed)
+ self.nodes[1].sendrawtransaction(tx_signed)
if __name__ == '__main__':
BIP68Test().main()
diff --git a/test/functional/bip9-softforks.py b/test/functional/bip9-softforks.py
index f00232c9ff..ec4d1d9365 100755
--- a/test/functional/bip9-softforks.py
+++ b/test/functional/bip9-softforks.py
@@ -28,11 +28,10 @@ from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript, OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP
class BIP9SoftForksTest(ComparisonTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-whitelist=127.0.0.1']]
+ self.setup_clean_chain = True
def run_test(self):
self.test = TestManager(self, self.options.tmpdir)
@@ -241,12 +240,13 @@ class BIP9SoftForksTest(ComparisonTestFramework):
# Restart all
self.test.clear_all_connections()
self.stop_nodes()
+ self.nodes = []
shutil.rmtree(self.options.tmpdir + "/node0")
self.setup_chain()
self.setup_network()
self.test.add_all_connections(self.nodes)
NetworkThread().start()
- self.test.test_nodes[0].wait_for_verack()
+ self.test.p2p_connections[0].wait_for_verack()
def get_tests(self):
for test in itertools.chain(
diff --git a/test/functional/bipdersig-p2p.py b/test/functional/bipdersig-p2p.py
index 31c7ebba90..7a3e565e2c 100755
--- a/test/functional/bipdersig-p2p.py
+++ b/test/functional/bipdersig-p2p.py
@@ -4,28 +4,24 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test BIP66 (DER SIG).
-Connect to a single node.
-Mine 2 (version 2) blocks (save the coinbases for later).
-Generate 98 more version 2 blocks, verify the node accepts.
-Mine 749 version 3 blocks, verify the node accepts.
-Check that the new DERSIG rules are not enforced on the 750th version 3 block.
-Check that the new DERSIG rules are enforced on the 751st version 3 block.
-Mine 199 new version blocks.
-Mine 1 old-version block.
-Mine 1 new version block.
-Mine 1 old version block, see that the node rejects.
+Test that the DERSIG soft-fork activates at (regtest) height 1251.
"""
-from test_framework.test_framework import ComparisonTestFramework
+from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-from test_framework.mininode import CTransaction, NetworkThread
+from test_framework.mininode import *
from test_framework.blocktools import create_coinbase, create_block
-from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript
from io import BytesIO
-import time
-# A canonical signature consists of:
+DERSIG_HEIGHT = 1251
+
+# Reject codes that we might receive in this test
+REJECT_INVALID = 16
+REJECT_OBSOLETE = 17
+REJECT_NONSTANDARD = 64
+
+# A canonical signature consists of:
# <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype>
def unDERify(tx):
"""
@@ -40,143 +36,118 @@ def unDERify(tx):
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
-
-class BIP66Test(ComparisonTestFramework):
- def __init__(self):
- super().__init__()
+def create_transaction(node, coinbase, to_address, amount):
+ from_txid = node.getblock(coinbase)['tx'][0]
+ inputs = [{ "txid" : from_txid, "vout" : 0}]
+ outputs = { to_address : amount }
+ rawtx = node.createrawtransaction(inputs, outputs)
+ signresult = node.signrawtransaction(rawtx)
+ tx = CTransaction()
+ tx.deserialize(BytesIO(hex_str_to_bytes(signresult['hex'])))
+ return tx
+
+class BIP66Test(BitcoinTestFramework):
+ def set_test_params(self):
self.num_nodes = 1
+ self.extra_args = [['-promiscuousmempoolflags=1', '-whitelist=127.0.0.1']]
+ self.setup_clean_chain = True
def run_test(self):
- test = TestManager(self, self.options.tmpdir)
- test.add_all_connections(self.nodes)
+ self.nodes[0].add_p2p_connection(P2PInterface())
+
NetworkThread().start() # Start up network handling in another thread
- test.run()
-
- def create_transaction(self, node, coinbase, to_address, amount):
- from_txid = node.getblock(coinbase)['tx'][0]
- inputs = [{ "txid" : from_txid, "vout" : 0}]
- outputs = { to_address : amount }
- rawtx = node.createrawtransaction(inputs, outputs)
- signresult = node.signrawtransaction(rawtx)
- tx = CTransaction()
- f = BytesIO(hex_str_to_bytes(signresult['hex']))
- tx.deserialize(f)
- return tx
-
- def get_tests(self):
-
- self.coinbase_blocks = self.nodes[0].generate(2)
- height = 3 # height of the next block to build
- self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
+
+ # wait_for_verack ensures that the P2P connection is fully up.
+ self.nodes[0].p2p.wait_for_verack()
+
+ self.log.info("Mining %d blocks", DERSIG_HEIGHT - 2)
+ self.coinbase_blocks = self.nodes[0].generate(DERSIG_HEIGHT - 2)
self.nodeaddress = self.nodes[0].getnewaddress()
- self.last_block_time = int(time.time())
-
- ''' 298 more version 2 blocks '''
- test_blocks = []
- for i in range(298):
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 2
- block.rehash()
- block.solve()
- test_blocks.append([block, True])
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance(test_blocks, sync_every_block=False)
-
- ''' Mine 749 version 3 blocks '''
- test_blocks = []
- for i in range(749):
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 3
- block.rehash()
- block.solve()
- test_blocks.append([block, True])
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance(test_blocks, sync_every_block=False)
-
- '''
- Check that the new DERSIG rules are not enforced in the 750th
- version 3 block.
- '''
- spendtx = self.create_transaction(self.nodes[0],
- self.coinbase_blocks[0], self.nodeaddress, 1.0)
+
+ self.log.info("Test that a transaction with non-DER signature can still appear in a block")
+
+ spendtx = create_transaction(self.nodes[0], self.coinbase_blocks[0],
+ self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 3
+ tip = self.nodes[0].getbestblockhash()
+ block_time = self.nodes[0].getblockheader(tip)['mediantime'] + 1
+ block = create_block(int(tip, 16), create_coinbase(DERSIG_HEIGHT - 1), block_time)
+ block.nVersion = 2
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance([[block, True]])
-
- ''' Mine 199 new version blocks on last valid tip '''
- test_blocks = []
- for i in range(199):
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 3
- block.rehash()
- block.solve()
- test_blocks.append([block, True])
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance(test_blocks, sync_every_block=False)
-
- ''' Mine 1 old version block '''
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ self.nodes[0].p2p.send_and_ping(msg_block(block))
+ assert_equal(self.nodes[0].getbestblockhash(), block.hash)
+
+ self.log.info("Test that blocks must now be at least version 3")
+ tip = block.sha256
+ block_time += 1
+ block = create_block(tip, create_coinbase(DERSIG_HEIGHT), block_time)
block.nVersion = 2
block.rehash()
block.solve()
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance([[block, True]])
+ self.nodes[0].p2p.send_and_ping(msg_block(block))
+ assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
+
+ wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
+ with mininode_lock:
+ assert_equal(self.nodes[0].p2p.last_message["reject"].code, REJECT_OBSOLETE)
+ assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'bad-version(0x00000002)')
+ assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
+ del self.nodes[0].p2p.last_message["reject"]
- ''' Mine 1 new version block '''
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ self.log.info("Test that transactions with non-DER signatures cannot appear in a block")
block.nVersion = 3
- block.rehash()
- block.solve()
- self.last_block_time += 1
- self.tip = block.sha256
- height += 1
- yield TestInstance([[block, True]])
-
- '''
- Check that the new DERSIG rules are enforced in the 951st version 3
- block.
- '''
- spendtx = self.create_transaction(self.nodes[0],
- self.coinbase_blocks[1], self.nodeaddress, 1.0)
+
+ spendtx = create_transaction(self.nodes[0], self.coinbase_blocks[1],
+ self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 3
+ # First we show that this tx is valid except for DERSIG by getting it
+ # accepted to the mempool (which we can achieve with
+ # -promiscuousmempoolflags).
+ self.nodes[0].p2p.send_and_ping(msg_tx(spendtx))
+ assert spendtx.hash in self.nodes[0].getrawmempool()
+
+ # Now we verify that a block with this transaction is invalid.
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
- self.last_block_time += 1
- yield TestInstance([[block, False]])
- ''' Mine 1 old version block, should be invalid '''
- block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
- block.nVersion = 2
+ self.nodes[0].p2p.send_and_ping(msg_block(block))
+ assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip)
+
+ wait_until(lambda: "reject" in self.nodes[0].p2p.last_message.keys(), lock=mininode_lock)
+ with mininode_lock:
+ # We can receive different reject messages depending on whether
+ # bitcoind is running with multiple script check threads. If script
+ # check threads are not in use, then transaction script validation
+ # happens sequentially, and bitcoind produces more specific reject
+ # reasons.
+ assert self.nodes[0].p2p.last_message["reject"].code in [REJECT_INVALID, REJECT_NONSTANDARD]
+ assert_equal(self.nodes[0].p2p.last_message["reject"].data, block.sha256)
+ if self.nodes[0].p2p.last_message["reject"].code == REJECT_INVALID:
+ # Generic rejection when a block is invalid
+ assert_equal(self.nodes[0].p2p.last_message["reject"].reason, b'block-validation-failed')
+ else:
+ assert b'Non-canonical DER signature' in self.nodes[0].p2p.last_message["reject"].reason
+
+ self.log.info("Test that a version 3 block with a DERSIG-compliant transaction is accepted")
+ block.vtx[1] = create_transaction(self.nodes[0],
+ self.coinbase_blocks[1], self.nodeaddress, 1.0)
+ block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
- self.last_block_time += 1
- yield TestInstance([[block, False]])
+
+ self.nodes[0].p2p.send_and_ping(msg_block(block))
+ assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
if __name__ == '__main__':
BIP66Test().main()
diff --git a/test/functional/bipdersig.py b/test/functional/bipdersig.py
deleted file mode 100755
index 41f88fb664..0000000000
--- a/test/functional/bipdersig.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2014-2016 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test the BIP66 changeover logic."""
-
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import *
-
-class BIP66Test(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
- self.num_nodes = 3
- self.setup_clean_chain = False
- self.extra_args = [[], ["-blockversion=2"], ["-blockversion=3"]]
-
- def setup_network(self):
- self.setup_nodes()
- connect_nodes(self.nodes[1], 0)
- connect_nodes(self.nodes[2], 0)
- self.sync_all()
-
- def run_test(self):
- cnt = self.nodes[0].getblockcount()
-
- # Mine some old-version blocks
- self.nodes[1].generate(100)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 100):
- raise AssertionError("Failed to mine 100 version=2 blocks")
-
- # Mine 750 new-version blocks
- for i in range(15):
- self.nodes[2].generate(50)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 850):
- raise AssertionError("Failed to mine 750 version=3 blocks")
-
- # TODO: check that new DERSIG rules are not enforced
-
- # Mine 1 new-version block
- self.nodes[2].generate(1)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 851):
- raise AssertionError("Failed to mine a version=3 blocks")
-
- # TODO: check that new DERSIG rules are enforced
-
- # Mine 198 new-version blocks
- for i in range(2):
- self.nodes[2].generate(99)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 1049):
- raise AssertionError("Failed to mine 198 version=3 blocks")
-
- # Mine 1 old-version block
- self.nodes[1].generate(1)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 1050):
- raise AssertionError("Failed to mine a version=2 block after 949 version=3 blocks")
-
- # Mine 1 new-version blocks
- self.nodes[2].generate(1)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 1051):
- raise AssertionError("Failed to mine a version=3 block")
-
- # Mine 1 old-version blocks. This should fail
- assert_raises_jsonrpc(-1, "CreateNewBlock: TestBlockValidity failed: bad-version(0x00000002)", self.nodes[1].generate, 1)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 1051):
- raise AssertionError("Accepted a version=2 block after 950 version=3 blocks")
-
- # Mine 1 new-version blocks
- self.nodes[2].generate(1)
- self.sync_all()
- if (self.nodes[0].getblockcount() != cnt + 1052):
- raise AssertionError("Failed to mine a version=3 block")
-
-if __name__ == '__main__':
- BIP66Test().main()
diff --git a/test/functional/bitcoin_cli.py b/test/functional/bitcoin_cli.py
new file mode 100755
index 0000000000..d1cd3b3620
--- /dev/null
+++ b/test/functional/bitcoin_cli.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test bitcoin-cli"""
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal, assert_raises_process_error, get_auth_cookie
+
+class TestBitcoinCli(BitcoinTestFramework):
+
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+
+ def run_test(self):
+ """Main test logic"""
+
+ self.log.info("Compare responses from gewalletinfo RPC and `bitcoin-cli getwalletinfo`")
+ cli_response = self.nodes[0].cli.getwalletinfo()
+ rpc_response = self.nodes[0].getwalletinfo()
+ assert_equal(cli_response, rpc_response)
+
+ self.log.info("Compare responses from getblockchaininfo RPC and `bitcoin-cli getblockchaininfo`")
+ cli_response = self.nodes[0].cli.getblockchaininfo()
+ rpc_response = self.nodes[0].getblockchaininfo()
+ assert_equal(cli_response, rpc_response)
+
+ user, password = get_auth_cookie(self.nodes[0].datadir)
+
+ self.log.info("Test -stdinrpcpass option")
+ assert_equal(0, self.nodes[0].cli('-rpcuser=%s' % user, '-stdinrpcpass', input=password).getblockcount())
+ assert_raises_process_error(1, "incorrect rpcuser or rpcpassword", self.nodes[0].cli('-rpcuser=%s' % user, '-stdinrpcpass', input="foo").echo)
+
+ self.log.info("Test -stdin and -stdinrpcpass")
+ assert_equal(["foo", "bar"], self.nodes[0].cli('-rpcuser=%s' % user, '-stdin', '-stdinrpcpass', input=password + "\nfoo\nbar").echo())
+ assert_raises_process_error(1, "incorrect rpcuser or rpcpassword", self.nodes[0].cli('-rpcuser=%s' % user, '-stdin', '-stdinrpcpass', input="foo").echo)
+
+ self.log.info("Make sure that -getinfo with arguments fails")
+ assert_raises_process_error(1, "-getinfo takes no arguments", self.nodes[0].cli('-getinfo').help)
+
+ self.log.info("Compare responses from `bitcoin-cli -getinfo` and the RPCs data is retrieved from.")
+ cli_get_info = self.nodes[0].cli().send_cli('-getinfo')
+ wallet_info = self.nodes[0].getwalletinfo()
+ network_info = self.nodes[0].getnetworkinfo()
+ blockchain_info = self.nodes[0].getblockchaininfo()
+
+ assert_equal(cli_get_info['version'], network_info['version'])
+ assert_equal(cli_get_info['protocolversion'], network_info['protocolversion'])
+ assert_equal(cli_get_info['walletversion'], wallet_info['walletversion'])
+ assert_equal(cli_get_info['balance'], wallet_info['balance'])
+ assert_equal(cli_get_info['blocks'], blockchain_info['blocks'])
+ assert_equal(cli_get_info['timeoffset'], network_info['timeoffset'])
+ assert_equal(cli_get_info['connections'], network_info['connections'])
+ assert_equal(cli_get_info['proxy'], network_info['networks'][0]['proxy'])
+ assert_equal(cli_get_info['difficulty'], blockchain_info['difficulty'])
+ assert_equal(cli_get_info['testnet'], blockchain_info['chain'] == "test")
+ assert_equal(cli_get_info['balance'], wallet_info['balance'])
+ assert_equal(cli_get_info['keypoololdest'], wallet_info['keypoololdest'])
+ assert_equal(cli_get_info['keypoolsize'], wallet_info['keypoolsize'])
+ assert_equal(cli_get_info['paytxfee'], wallet_info['paytxfee'])
+ assert_equal(cli_get_info['relayfee'], network_info['relayfee'])
+ # unlocked_until is not tested because the wallet is not encrypted
+
+if __name__ == '__main__':
+ TestBitcoinCli().main()
diff --git a/test/functional/blockchain.py b/test/functional/blockchain.py
index a7034e6bcd..49fafbc9aa 100755
--- a/test/functional/blockchain.py
+++ b/test/functional/blockchain.py
@@ -5,6 +5,7 @@
"""Test RPCs related to blockchainstate.
Test the following RPCs:
+ - getblockchaininfo
- gettxoutsetinfo
- getdifficulty
- getbestblockhash
@@ -21,25 +22,24 @@ from decimal import Decimal
import http.client
import subprocess
-from test_framework.test_framework import (BitcoinTestFramework, BITCOIND_PROC_WAIT_TIMEOUT)
+from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
+ assert_greater_than,
+ assert_greater_than_or_equal,
assert_raises,
- assert_raises_jsonrpc,
+ assert_raises_rpc_error,
assert_is_hex_string,
assert_is_hash_string,
)
-
class BlockchainTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
- self.setup_clean_chain = False
+ def set_test_params(self):
self.num_nodes = 1
- self.extra_args = [['-stopatheight=207']]
+ self.extra_args = [['-stopatheight=207', '-prune=1']]
def run_test(self):
+ self._test_getblockchaininfo()
self._test_getchaintxstats()
self._test_gettxoutsetinfo()
self._test_getblockheader()
@@ -48,6 +48,57 @@ class BlockchainTest(BitcoinTestFramework):
self._test_stopatheight()
assert self.nodes[0].verifychain(4, 0)
+ def _test_getblockchaininfo(self):
+ self.log.info("Test getblockchaininfo")
+
+ keys = [
+ 'bestblockhash',
+ 'bip9_softforks',
+ 'blocks',
+ 'chain',
+ 'chainwork',
+ 'difficulty',
+ 'headers',
+ 'initialblockdownload',
+ 'mediantime',
+ 'pruned',
+ 'size_on_disk',
+ 'softforks',
+ 'verificationprogress',
+ 'warnings',
+ ]
+ res = self.nodes[0].getblockchaininfo()
+
+ # result should have these additional pruning keys if manual pruning is enabled
+ assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning'] + keys))
+
+ # size_on_disk should be > 0
+ assert_greater_than(res['size_on_disk'], 0)
+
+ # pruneheight should be greater or equal to 0
+ assert_greater_than_or_equal(res['pruneheight'], 0)
+
+ # check other pruning fields given that prune=1
+ assert res['pruned']
+ assert not res['automatic_pruning']
+
+ self.restart_node(0, ['-stopatheight=207'])
+ res = self.nodes[0].getblockchaininfo()
+ # should have exact keys
+ assert_equal(sorted(res.keys()), keys)
+
+ self.restart_node(0, ['-stopatheight=207', '-prune=550'])
+ res = self.nodes[0].getblockchaininfo()
+ # result should have these additional pruning keys if prune=550
+ assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning', 'prune_target_size'] + keys))
+
+ # check related fields
+ assert res['pruned']
+ assert_equal(res['pruneheight'], 0)
+ assert res['automatic_pruning']
+ assert_equal(res['prune_target_size'], 576716800)
+ assert_greater_than(res['size_on_disk'], 0)
+
def _test_getchaintxstats(self):
chaintxstats = self.nodes[0].getchaintxstats(1)
# 200 txs plus genesis tx
@@ -56,6 +107,28 @@ class BlockchainTest(BitcoinTestFramework):
# we have to round because of binary math
assert_equal(round(chaintxstats['txrate'] * 600, 10), Decimal(1))
+ b1 = self.nodes[0].getblock(self.nodes[0].getblockhash(1))
+ b200 = self.nodes[0].getblock(self.nodes[0].getblockhash(200))
+ time_diff = b200['mediantime'] - b1['mediantime']
+
+ chaintxstats = self.nodes[0].getchaintxstats()
+ assert_equal(chaintxstats['time'], b200['time'])
+ assert_equal(chaintxstats['txcount'], 201)
+ assert_equal(chaintxstats['window_block_count'], 199)
+ assert_equal(chaintxstats['window_tx_count'], 199)
+ assert_equal(chaintxstats['window_interval'], time_diff)
+ assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(199))
+
+ chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1['hash'])
+ assert_equal(chaintxstats['time'], b1['time'])
+ assert_equal(chaintxstats['txcount'], 2)
+ assert_equal(chaintxstats['window_block_count'], 0)
+ assert('window_tx_count' not in chaintxstats)
+ assert('window_interval' not in chaintxstats)
+ assert('txrate' not in chaintxstats)
+
+ assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, 201)
+
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
@@ -100,7 +173,7 @@ class BlockchainTest(BitcoinTestFramework):
def _test_getblockheader(self):
node = self.nodes[0]
- assert_raises_jsonrpc(-5, "Block not found",
+ assert_raises_rpc_error(-5, "Block not found",
node.getblockheader, "nonsense")
besthash = node.getbestblockhash()
@@ -139,14 +212,14 @@ class BlockchainTest(BitcoinTestFramework):
self.nodes[0].generate(6)
assert_equal(self.nodes[0].getblockcount(), 206)
self.log.debug('Node should not stop at this height')
- assert_raises(subprocess.TimeoutExpired, lambda: self.bitcoind_processes[0].wait(timeout=3))
+ assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3))
try:
self.nodes[0].generate(1)
except (ConnectionError, http.client.BadStatusLine):
pass # The node already shut down before response
self.log.debug('Node should stop at this height...')
- self.bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT)
- self.nodes[0] = self.start_node(0, self.options.tmpdir)
+ self.nodes[0].wait_until_stopped()
+ self.start_node(0)
assert_equal(self.nodes[0].getblockcount(), 207)
diff --git a/test/functional/bumpfee.py b/test/functional/bumpfee.py
index 9237f09240..008e83d5b2 100755
--- a/test/functional/bumpfee.py
+++ b/test/functional/bumpfee.py
@@ -30,26 +30,21 @@ WALLET_PASSPHRASE_TIMEOUT = 3600
class BumpFeeTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
+ self.extra_args = [["-prematurewitness", "-walletprematurewitness", "-walletrbf={}".format(i)]
+ for i in range(self.num_nodes)]
- def setup_network(self, split=False):
- extra_args = [["-prematurewitness", "-walletprematurewitness", "-walletrbf={}".format(i)]
- for i in range(self.num_nodes)]
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, extra_args)
-
+ def run_test(self):
# Encrypt wallet for test_locked_wallet_fails test
- self.nodes[1].encryptwallet(WALLET_PASSPHRASE)
- self.bitcoind_processes[1].wait()
- self.nodes[1] = self.start_node(1, self.options.tmpdir, extra_args[1])
+ self.nodes[1].node_encrypt_wallet(WALLET_PASSPHRASE)
+ self.start_node(1)
self.nodes[1].walletpassphrase(WALLET_PASSPHRASE, WALLET_PASSPHRASE_TIMEOUT)
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
- def run_test(self):
peer_node, rbf_node = self.nodes
rbf_node_address = rbf_node.getnewaddress()
@@ -90,7 +85,7 @@ def test_simple_bumpfee_succeeds(rbf_node, peer_node, dest_address):
bumped_tx = rbf_node.bumpfee(rbfid)
assert_equal(bumped_tx["errors"], [])
assert bumped_tx["fee"] - abs(rbftx["fee"]) > 0
- # check that bumped_tx propogates, original tx was evicted and has a wallet conflict
+ # check that bumped_tx propagates, original tx was evicted and has a wallet conflict
sync_mempools((rbf_node, peer_node))
assert bumped_tx["txid"] in rbf_node.getrawmempool()
assert bumped_tx["txid"] in peer_node.getrawmempool()
@@ -138,7 +133,7 @@ def test_segwit_bumpfee_succeeds(rbf_node, dest_address):
def test_nonrbf_bumpfee_fails(peer_node, dest_address):
# cannot replace a non RBF transaction (from node which did not enable RBF)
not_rbfid = peer_node.sendtoaddress(dest_address, Decimal("0.00090000"))
- assert_raises_jsonrpc(-4, "not BIP 125 replaceable", peer_node.bumpfee, not_rbfid)
+ assert_raises_rpc_error(-4, "not BIP 125 replaceable", peer_node.bumpfee, not_rbfid)
def test_notmine_bumpfee_fails(rbf_node, peer_node, dest_address):
@@ -158,7 +153,7 @@ def test_notmine_bumpfee_fails(rbf_node, peer_node, dest_address):
signedtx = rbf_node.signrawtransaction(rawtx)
signedtx = peer_node.signrawtransaction(signedtx["hex"])
rbfid = rbf_node.sendrawtransaction(signedtx["hex"])
- assert_raises_jsonrpc(-4, "Transaction contains inputs that don't belong to this wallet",
+ assert_raises_rpc_error(-4, "Transaction contains inputs that don't belong to this wallet",
rbf_node.bumpfee, rbfid)
@@ -168,8 +163,8 @@ def test_bumpfee_with_descendant_fails(rbf_node, rbf_node_address, dest_address)
parent_id = spend_one_input(rbf_node, rbf_node_address)
tx = rbf_node.createrawtransaction([{"txid": parent_id, "vout": 0}], {dest_address: 0.00020000})
tx = rbf_node.signrawtransaction(tx)
- txid = rbf_node.sendrawtransaction(tx["hex"])
- assert_raises_jsonrpc(-8, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id)
+ rbf_node.sendrawtransaction(tx["hex"])
+ assert_raises_rpc_error(-8, "Transaction has descendants in the wallet", rbf_node.bumpfee, parent_id)
def test_small_output_fails(rbf_node, dest_address):
@@ -178,7 +173,7 @@ def test_small_output_fails(rbf_node, dest_address):
rbf_node.bumpfee(rbfid, {"totalFee": 50000})
rbfid = spend_one_input(rbf_node, dest_address)
- assert_raises_jsonrpc(-4, "Change output is too small", rbf_node.bumpfee, rbfid, {"totalFee": 50001})
+ assert_raises_rpc_error(-4, "Change output is too small", rbf_node.bumpfee, rbfid, {"totalFee": 50001})
def test_dust_to_fee(rbf_node, dest_address):
@@ -210,7 +205,7 @@ def test_rebumping(rbf_node, dest_address):
# check that re-bumping the original tx fails, but bumping the bumper succeeds
rbfid = spend_one_input(rbf_node, dest_address)
bumped = rbf_node.bumpfee(rbfid, {"totalFee": 2000})
- assert_raises_jsonrpc(-4, "already bumped", rbf_node.bumpfee, rbfid, {"totalFee": 3000})
+ assert_raises_rpc_error(-4, "already bumped", rbf_node.bumpfee, rbfid, {"totalFee": 3000})
rbf_node.bumpfee(bumped["txid"], {"totalFee": 3000})
@@ -218,7 +213,7 @@ def test_rebumping_not_replaceable(rbf_node, dest_address):
# check that re-bumping a non-replaceable bump tx fails
rbfid = spend_one_input(rbf_node, dest_address)
bumped = rbf_node.bumpfee(rbfid, {"totalFee": 10000, "replaceable": False})
- assert_raises_jsonrpc(-4, "Transaction is not BIP 125 replaceable", rbf_node.bumpfee, bumped["txid"],
+ assert_raises_rpc_error(-4, "Transaction is not BIP 125 replaceable", rbf_node.bumpfee, bumped["txid"],
{"totalFee": 20000})
@@ -269,7 +264,7 @@ def test_bumpfee_metadata(rbf_node, dest_address):
def test_locked_wallet_fails(rbf_node, dest_address):
rbfid = spend_one_input(rbf_node, dest_address)
rbf_node.walletlock()
- assert_raises_jsonrpc(-13, "Please enter the wallet passphrase with walletpassphrase first.",
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first.",
rbf_node.bumpfee, rbfid)
diff --git a/test/functional/create_cache.py b/test/functional/create_cache.py
index 39c4c0f47e..7d4d1a529b 100755
--- a/test/functional/create_cache.py
+++ b/test/functional/create_cache.py
@@ -12,13 +12,10 @@ tests are being run in parallel.
from test_framework.test_framework import BitcoinTestFramework
class CreateCache(BitcoinTestFramework):
+ # Test network and test nodes are not required:
- def __init__(self):
- super().__init__()
-
- # Test network and test nodes are not required:
+ def set_test_params(self):
self.num_nodes = 0
- self.nodes = []
def setup_network(self):
pass
diff --git a/test/functional/dbcrash.py b/test/functional/dbcrash.py
index 6f877f8362..24b9765b4e 100755
--- a/test/functional/dbcrash.py
+++ b/test/functional/dbcrash.py
@@ -2,21 +2,7 @@
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test recovery from a crash during chainstate writing."""
-
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import *
-from test_framework.script import *
-from test_framework.mininode import *
-import random
-try:
- import http.client as httplib
-except ImportError:
- import httplib
-import errno
-
-'''
-Test structure:
+"""Test recovery from a crash during chainstate writing.
- 4 nodes
* node0, node1, and node2 will have different dbcrash ratios, and different
@@ -37,50 +23,62 @@ Test structure:
* submit block to node
* if node crashed on/after submitting:
- restart until recovery succeeds
- - check that utxo matches node3 using gettxoutsetinfo
-'''
+ - check that utxo matches node3 using gettxoutsetinfo"""
-class ChainstateWriteCrashTest(BitcoinTestFramework):
+import errno
+import http.client
+import random
+import sys
+import time
+
+from test_framework.mininode import *
+from test_framework.script import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+HTTP_DISCONNECT_ERRORS = [http.client.CannotSendRequest]
+try:
+ HTTP_DISCONNECT_ERRORS.append(http.client.RemoteDisconnected)
+except AttributeError:
+ pass
- def __init__(self):
- super().__init__()
+class ChainstateWriteCrashTest(BitcoinTestFramework):
+ def set_test_params(self):
self.num_nodes = 4
self.setup_clean_chain = False
# Set -maxmempool=0 to turn off mempool memory sharing with dbcache
# Set -rpcservertimeout=900 to reduce socket disconnects in this
# long-running test
- self.base_args = ["-limitdescendantsize=0", "-maxmempool=0", "-rpcservertimeout=900"]
+ self.base_args = ["-limitdescendantsize=0", "-maxmempool=0", "-rpcservertimeout=900", "-dbbatchsize=200000"]
# Set different crash ratios and cache sizes. Note that not all of
# -dbcache goes to pcoinsTip.
- self.node0_args = ["-dbcrashratio=8", "-dbcache=4", "-dbbatchsize=200000"] + self.base_args
- self.node1_args = ["-dbcrashratio=16", "-dbcache=8", "-dbbatchsize=200000"] + self.base_args
- self.node2_args = ["-dbcrashratio=24", "-dbcache=16", "-dbbatchsize=200000"] + self.base_args
+ self.node0_args = ["-dbcrashratio=8", "-dbcache=4"] + self.base_args
+ self.node1_args = ["-dbcrashratio=16", "-dbcache=8"] + self.base_args
+ self.node2_args = ["-dbcrashratio=24", "-dbcache=16"] + self.base_args
# Node3 is a normal node with default args, except will mine full blocks
self.node3_args = ["-blockmaxweight=4000000"]
self.extra_args = [self.node0_args, self.node1_args, self.node2_args, self.node3_args]
- # We'll track some test coverage statistics
- self.restart_counts = [0, 0, 0] # Track the restarts for nodes 0-2
- self.crashed_on_restart = 0 # Track count of crashes during recovery
-
def setup_network(self):
- self.setup_nodes()
+ # Need a bit of extra time for the nodes to start up for this test
+ self.add_nodes(self.num_nodes, extra_args=self.extra_args, timewait=90)
+ self.start_nodes()
# Leave them unconnected, we'll use submitblock directly in this test
- # Starts up a given node id, waits for the tip to reach the given block
- # hash, and calculates the utxo hash. Exceptions on startup should
- # indicate node crash (due to -dbcrashratio), in which case we try again.
- # Give up after 60 seconds.
- # Returns the utxo hash of the given node.
def restart_node(self, node_index, expected_tip):
+ """Start up a given node id, wait for the tip to reach the given block hash, and calculate the utxo hash.
+
+ Exceptions on startup should indicate node crash (due to -dbcrashratio), in which case we try again. Give up
+ after 60 seconds. Returns the utxo hash of the given node."""
+
time_start = time.time()
- while time.time() - time_start < 60:
+ while time.time() - time_start < 120:
try:
# Any of these RPC calls could throw due to node crash
- self.nodes[node_index] = self.start_node(node_index, self.options.tmpdir, self.extra_args[node_index])
+ self.start_node(node_index)
self.nodes[node_index].waitforblock(expected_tip)
utxo_hash = self.nodes[node_index].gettxoutsetinfo()['hash_serialized_2']
return utxo_hash
@@ -99,14 +97,23 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
# and make sure that recovery happens.
raise AssertionError("Unable to successfully restart node %d in allotted time", node_index)
- # Try submitting a block to the given node.
- # Catch any exceptions that indicate the node has crashed.
- # Returns true if the block was submitted successfully; false otherwise.
def submit_block_catch_error(self, node_index, block):
+ """Try submitting a block to the given node.
+
+ Catch any exceptions that indicate the node has crashed.
+ Returns true if the block was submitted successfully; false otherwise."""
+
try:
self.nodes[node_index].submitblock(block)
return True
- except (httplib.CannotSendRequest, httplib.RemoteDisconnected) as e:
+ except http.client.BadStatusLine as e:
+ # Prior to 3.5 BadStatusLine('') was raised for a remote disconnect error.
+ if sys.version_info[0] == 3 and sys.version_info[1] < 5 and e.line == "''":
+ self.log.debug("node %d submitblock raised exception: %s", node_index, e)
+ return False
+ else:
+ raise
+ except tuple(HTTP_DISCONNECT_ERRORS) as e:
self.log.debug("node %d submitblock raised exception: %s", node_index, e)
return False
except OSError as e:
@@ -118,11 +125,13 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
# Unexpected exception, raise
raise
- # Use submitblock to sync node3's chain with the other nodes
- # If submitblock fails, restart the node and get the new utxo hash.
def sync_node3blocks(self, block_hashes):
- # If any nodes crash while updating, we'll compare utxo hashes to
- # ensure recovery was successful.
+ """Use submitblock to sync node3's chain with the other nodes
+
+ If submitblock fails, restart the node and get the new utxo hash.
+ If any nodes crash while updating, we'll compare utxo hashes to
+ ensure recovery was successful."""
+
node3_utxo_hash = self.nodes[3].gettxoutsetinfo()['hash_serialized_2']
# Retrieve all the blocks from node3
@@ -161,9 +170,10 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
self.log.debug("Checking txoutsetinfo matches for node %d", i)
assert_equal(nodei_utxo_hash, node3_utxo_hash)
- # Verify that the utxo hash of each node matches node3.
- # Restart any nodes that crash while querying.
def verify_utxo_hash(self):
+ """Verify that the utxo hash of each node matches node3.
+
+ Restart any nodes that crash while querying."""
node3_utxo_hash = self.nodes[3].gettxoutsetinfo()['hash_serialized_2']
self.log.info("Verifying utxo hash matches for all nodes")
@@ -175,9 +185,8 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
nodei_utxo_hash = self.restart_node(i, self.nodes[3].getbestblockhash())
assert_equal(nodei_utxo_hash, node3_utxo_hash)
-
def generate_small_transactions(self, node, count, utxo_list):
- FEE = 1000 # TODO: replace this with node relay fee based calculation
+ FEE = 1000 # TODO: replace this with node relay fee based calculation
num_transactions = 0
random.shuffle(utxo_list)
while len(utxo_list) >= 2 and num_transactions < count:
@@ -186,8 +195,8 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
for i in range(2):
utxo = utxo_list.pop()
tx.vin.append(CTxIn(COutPoint(int(utxo['txid'], 16), utxo['vout'])))
- input_amount += int(utxo['amount']*COIN)
- output_amount = (input_amount - FEE)//3
+ input_amount += int(utxo['amount'] * COIN)
+ output_amount = (input_amount - FEE) // 3
if output_amount <= 0:
# Sanity check -- if we chose inputs that are too small, skip
@@ -202,6 +211,9 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
num_transactions += 1
def run_test(self):
+ # Track test coverage statistics
+ self.restart_counts = [0, 0, 0] # Track the restarts for nodes 0-2
+ self.crashed_on_restart = 0 # Track count of crashes during recovery
# Start by creating a lot of utxos on node3
initial_height = self.nodes[3].getblockcount()
@@ -210,7 +222,7 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
# Sync these blocks with the other nodes
block_hashes_to_sync = []
- for height in range(initial_height+1, self.nodes[3].getblockcount()+1):
+ for height in range(initial_height + 1, self.nodes[3].getblockcount() + 1):
block_hashes_to_sync.append(self.nodes[3].getblockhash(height))
self.log.debug("Syncing %d blocks with other nodes", len(block_hashes_to_sync))
@@ -233,13 +245,15 @@ class ChainstateWriteCrashTest(BitcoinTestFramework):
if random_height > starting_tip_height:
# Randomly reorg from this point with some probability (1/4 for
# tip, 1/5 for tip-1, ...)
- if random.random() < 1.0/(current_height + 4 - random_height):
+ if random.random() < 1.0 / (current_height + 4 - random_height):
self.log.debug("Invalidating block at height %d", random_height)
self.nodes[3].invalidateblock(self.nodes[3].getblockhash(random_height))
# Now generate new blocks until we pass the old tip height
self.log.debug("Mining longer tip")
- block_hashes = self.nodes[3].generate(current_height+1-self.nodes[3].getblockcount())
+ block_hashes = []
+ while current_height + 1 > self.nodes[3].getblockcount():
+ block_hashes.extend(self.nodes[3].generate(min(10, current_height + 1 - self.nodes[3].getblockcount())))
self.log.debug("Syncing %d new blocks...", len(block_hashes))
self.sync_node3blocks(block_hashes)
utxo_list = self.nodes[3].listunspent()
diff --git a/test/functional/decodescript.py b/test/functional/decodescript.py
index 21a9f1223f..6611da8831 100755
--- a/test/functional/decodescript.py
+++ b/test/functional/decodescript.py
@@ -10,9 +10,7 @@ from test_framework.mininode import *
from io import BytesIO
class DecodeScriptTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
diff --git a/test/functional/deprecated_rpc.py b/test/functional/deprecated_rpc.py
new file mode 100755
index 0000000000..19fd24edb9
--- /dev/null
+++ b/test/functional/deprecated_rpc.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test deprecation of RPC calls."""
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_raises_rpc_error
+
+class DeprecatedRpcTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+ self.setup_clean_chain = True
+ self.extra_args = [[], ["-deprecatedrpc=estimatefee"]]
+
+ def run_test(self):
+ self.log.info("estimatefee: Shows deprecated message")
+ assert_raises_rpc_error(-32, 'estimatefee is deprecated', self.nodes[0].estimatefee, 1)
+
+ self.log.info("Using -deprecatedrpc=estimatefee bypasses the error")
+ self.nodes[1].estimatefee(1)
+
+if __name__ == '__main__':
+ DeprecatedRpcTest().main()
diff --git a/test/functional/disablewallet.py b/test/functional/disablewallet.py
index d344513414..c75ef9b9f1 100755
--- a/test/functional/disablewallet.py
+++ b/test/functional/disablewallet.py
@@ -11,18 +11,15 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-
class DisableWalletTest (BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-disablewallet"]]
def run_test (self):
# Make sure wallet is really disabled
- assert_raises_jsonrpc(-32601, 'Method not found', self.nodes[0].getwalletinfo)
+ assert_raises_rpc_error(-32601, 'Method not found', self.nodes[0].getwalletinfo)
x = self.nodes[0].validateaddress('3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
assert(x['isvalid'] == False)
x = self.nodes[0].validateaddress('mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
@@ -31,7 +28,7 @@ class DisableWalletTest (BitcoinTestFramework):
# Checking mining to an address without a wallet. Generating to a valid address should succeed
# but generating to an invalid address will fail.
self.nodes[0].generatetoaddress(1, 'mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
- assert_raises_jsonrpc(-5, "Invalid address", self.nodes[0].generatetoaddress, 1, '3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
+ assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].generatetoaddress, 1, '3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
if __name__ == '__main__':
DisableWalletTest ().main ()
diff --git a/test/functional/disconnect_ban.py b/test/functional/disconnect_ban.py
index 89b68aeb25..59655d37fb 100755
--- a/test/functional/disconnect_ban.py
+++ b/test/functional/disconnect_ban.py
@@ -5,18 +5,17 @@
"""Test node disconnect and ban behavior"""
import time
-from test_framework.mininode import wait_until
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (assert_equal,
- assert_raises_jsonrpc,
- connect_nodes_bi)
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+ connect_nodes_bi,
+ wait_until,
+)
class DisconnectBanTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 2
- self.setup_clean_chain = False
def run_test(self):
self.log.info("Test setban and listbanned RPCs")
@@ -24,7 +23,7 @@ class DisconnectBanTest(BitcoinTestFramework):
self.log.info("setban: successfully ban single IP address")
assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point
self.nodes[1].setban("127.0.0.1", "add")
- assert wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10)
+ wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10)
assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point
assert_equal(len(self.nodes[1].listbanned()), 1)
@@ -35,14 +34,14 @@ class DisconnectBanTest(BitcoinTestFramework):
self.log.info("setban: fail to ban an already banned subnet")
assert_equal(len(self.nodes[1].listbanned()), 1)
- assert_raises_jsonrpc(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add")
+ assert_raises_rpc_error(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add")
self.log.info("setban: fail to ban an invalid subnet")
- assert_raises_jsonrpc(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add")
+ assert_raises_rpc_error(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add")
assert_equal(len(self.nodes[1].listbanned()), 1) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24
self.log.info("setban remove: fail to unban a non-banned subnet")
- assert_raises_jsonrpc(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove")
+ assert_raises_rpc_error(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove")
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("setban remove: successfully unban subnet")
@@ -66,8 +65,8 @@ class DisconnectBanTest(BitcoinTestFramework):
assert_equal(len(self.nodes[1].listbanned()), 3)
self.stop_node(1)
+ self.start_node(1)
- self.nodes[1] = self.start_node(1, self.options.tmpdir)
listAfterShutdown = self.nodes[1].listbanned()
assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
assert_equal("127.0.0.0/32", listAfterShutdown[1]['address'])
@@ -82,15 +81,15 @@ class DisconnectBanTest(BitcoinTestFramework):
self.log.info("disconnectnode: fail to disconnect when calling with address and nodeid")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
node1 = self.nodes[0].getpeerinfo()[0]['addr']
- assert_raises_jsonrpc(-32602, "Only one of address and nodeid should be provided.", self.nodes[0].disconnectnode, address=address1, nodeid=node1)
+ assert_raises_rpc_error(-32602, "Only one of address and nodeid should be provided.", self.nodes[0].disconnectnode, address=address1, nodeid=node1)
self.log.info("disconnectnode: fail to disconnect when calling with junk address")
- assert_raises_jsonrpc(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, address="221B Baker Street")
+ assert_raises_rpc_error(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, address="221B Baker Street")
self.log.info("disconnectnode: successfully disconnect node by address")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
self.nodes[0].disconnectnode(address=address1)
- assert wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
+ wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully reconnect node")
@@ -101,7 +100,7 @@ class DisconnectBanTest(BitcoinTestFramework):
self.log.info("disconnectnode: successfully disconnect node by node id")
id1 = self.nodes[0].getpeerinfo()[0]['id']
self.nodes[0].disconnectnode(nodeid=id1)
- assert wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
+ wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['id'] == id1]
if __name__ == '__main__':
diff --git a/test/functional/example_test.py b/test/functional/example_test.py
index 1ba5f756cd..35a6bd2673 100755
--- a/test/functional/example_test.py
+++ b/test/functional/example_test.py
@@ -18,29 +18,27 @@ from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.mininode import (
CInv,
NetworkThread,
- NodeConn,
- NodeConnCB,
+ P2PInterface,
mininode_lock,
msg_block,
msg_getdata,
- wait_until,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
- p2p_port,
+ wait_until,
)
-# NodeConnCB is a class containing callbacks to be executed when a P2P
-# message is received from the node-under-test. Subclass NodeConnCB and
+# P2PInterface is a class containing callbacks to be executed when a P2P
+# message is received from the node-under-test. Subclass P2PInterface and
# override the on_*() methods if you need custom behaviour.
-class BaseNode(NodeConnCB):
+class BaseNode(P2PInterface):
def __init__(self):
- """Initialize the NodeConnCB
+ """Initialize the P2PInterface
Used to inialize custom properties for the Node that aren't
- included by default in the base class. Be aware that the NodeConnCB
+ included by default in the base class. Be aware that the P2PInterface
base class already stores a counter for each P2P message type and the
last received message of each type, which should be sufficient for the
needs of most tests.
@@ -51,13 +49,17 @@ class BaseNode(NodeConnCB):
# Stores a dictionary of all blocks received
self.block_receive_map = defaultdict(int)
- def on_block(self, conn, message):
+ def on_block(self, message):
"""Override the standard on_block callback
Store the hash of a received block in the dictionary."""
message.block.calc_sha256()
self.block_receive_map[message.block.sha256] += 1
+ def on_inv(self, message):
+ """Override the standard on_inv callback"""
+ pass
+
def custom_function():
"""Do some custom behaviour
@@ -69,21 +71,19 @@ def custom_function():
class ExampleTest(BitcoinTestFramework):
# Each functional test is a subclass of the BitcoinTestFramework class.
- # Override the __init__(), add_options(), setup_chain(), setup_network()
+ # Override the set_test_params(), add_options(), setup_chain(), setup_network()
# and setup_nodes() methods to customize the test setup as required.
- def __init__(self):
- """Initialize the test
+ def set_test_params(self):
+ """Override test parameters for your individual test.
- Call super().__init__() first, and then override any test parameters
- for your individual test."""
- super().__init__()
+ This method must be overridden and num_nodes must be exlicitly set."""
self.setup_clean_chain = True
self.num_nodes = 3
# Use self.extra_args to change command-line arguments for the nodes
self.extra_args = [[], ["-logips"], []]
- # self.log.info("I've finished __init__") # Oops! Can't run self.log before run_test()
+ # self.log.info("I've finished set_test_params") # Oops! Can't run self.log before run_test()
# Use add_options() to add specific command-line options for your test.
# In practice this is not used very much, since the tests are mostly written
@@ -132,16 +132,13 @@ class ExampleTest(BitcoinTestFramework):
"""Main test logic"""
# Create a P2P connection to one of the nodes
- node0 = BaseNode()
- connections = []
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0))
- node0.add_connection(connections[0])
+ self.nodes[0].add_p2p_connection(BaseNode())
# Start up network handling in another thread. This needs to be called
# after the P2P connections have been created.
NetworkThread().start()
# wait_for_verack ensures that the P2P connection is fully up.
- node0.wait_for_verack()
+ self.nodes[0].p2p.wait_for_verack()
# Generating a block on one of the nodes will get us out of IBD
blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
@@ -177,8 +174,8 @@ class ExampleTest(BitcoinTestFramework):
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
block_message = msg_block(block)
- # Send message is used to send a P2P message to the node over our NodeConn connection
- node0.send_message(block_message)
+ # Send message is used to send a P2P message to the node over our P2PInterface
+ self.nodes[0].p2p.send_message(block_message)
self.tip = block.sha256
blocks.append(self.tip)
self.block_time += 1
@@ -191,28 +188,26 @@ class ExampleTest(BitcoinTestFramework):
connect_nodes(self.nodes[1], 2)
self.log.info("Add P2P connection to node2")
- node2 = BaseNode()
- connections.append(NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], node2))
- node2.add_connection(connections[1])
- node2.wait_for_verack()
+ self.nodes[2].add_p2p_connection(BaseNode())
+ self.nodes[2].p2p.wait_for_verack()
- self.log.info("Wait for node2 reach current tip. Test that it has propogated all the blocks to us")
+ self.log.info("Wait for node2 reach current tip. Test that it has propagated all the blocks to us")
+ getdata_request = msg_getdata()
for block in blocks:
- getdata_request = msg_getdata()
getdata_request.inv.append(CInv(2, block))
- node2.send_message(getdata_request)
+ self.nodes[2].p2p.send_message(getdata_request)
# wait_until() will loop until a predicate condition is met. Use it to test properties of the
- # NodeConnCB objects.
- assert wait_until(lambda: sorted(blocks) == sorted(list(node2.block_receive_map.keys())), timeout=5)
+ # P2PInterface objects.
+ wait_until(lambda: sorted(blocks) == sorted(list(self.nodes[2].p2p.block_receive_map.keys())), timeout=5, lock=mininode_lock)
self.log.info("Check that each block was received only once")
- # The network thread uses a global lock on data access to the NodeConn objects when sending and receiving
- # messages. The test thread should acquire the global lock before accessing any NodeConn data to avoid locking
+ # The network thread uses a global lock on data access to the P2PConnection objects when sending and receiving
+ # messages. The test thread should acquire the global lock before accessing any P2PConnection data to avoid locking
# and synchronization issues. Note wait_until() acquires this global lock when testing the predicate.
with mininode_lock:
- for block in node2.block_receive_map.values():
+ for block in self.nodes[2].p2p.block_receive_map.values():
assert_equal(block, 1)
if __name__ == '__main__':
diff --git a/test/functional/feature_logging.py b/test/functional/feature_logging.py
new file mode 100755
index 0000000000..da4e7b0398
--- /dev/null
+++ b/test/functional/feature_logging.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test debug logging."""
+
+import os
+
+from test_framework.test_framework import BitcoinTestFramework
+
+class LoggingTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+ self.setup_clean_chain = True
+
+ def run_test(self):
+ # test default log file name
+ assert os.path.isfile(os.path.join(self.nodes[0].datadir, "regtest", "debug.log"))
+
+ # test alternative log file name in datadir
+ self.restart_node(0, ["-debuglogfile=foo.log"])
+ assert os.path.isfile(os.path.join(self.nodes[0].datadir, "regtest", "foo.log"))
+
+ # test alternative log file name outside datadir
+ tempname = os.path.join(self.options.tmpdir, "foo.log")
+ self.restart_node(0, ["-debuglogfile=%s" % tempname])
+ assert os.path.isfile(tempname)
+
+ # check that invalid log (relative) will cause error
+ invdir = os.path.join(self.nodes[0].datadir, "regtest", "foo")
+ invalidname = os.path.join("foo", "foo.log")
+ self.stop_node(0)
+ self.assert_start_raises_init_error(0, ["-debuglogfile=%s" % (invalidname)],
+ "Error: Could not open debug log file")
+ assert not os.path.isfile(os.path.join(invdir, "foo.log"))
+
+ # check that invalid log (relative) works after path exists
+ self.stop_node(0)
+ os.mkdir(invdir)
+ self.start_node(0, ["-debuglogfile=%s" % (invalidname)])
+ assert os.path.isfile(os.path.join(invdir, "foo.log"))
+
+ # check that invalid log (absolute) will cause error
+ self.stop_node(0)
+ invdir = os.path.join(self.options.tmpdir, "foo")
+ invalidname = os.path.join(invdir, "foo.log")
+ self.assert_start_raises_init_error(0, ["-debuglogfile=%s" % invalidname],
+ "Error: Could not open debug log file")
+ assert not os.path.isfile(os.path.join(invdir, "foo.log"))
+
+ # check that invalid log (absolute) works after path exists
+ self.stop_node(0)
+ os.mkdir(invdir)
+ self.start_node(0, ["-debuglogfile=%s" % (invalidname)])
+ assert os.path.isfile(os.path.join(invdir, "foo.log"))
+
+
+if __name__ == '__main__':
+ LoggingTest().main()
diff --git a/test/functional/forknotify.py b/test/functional/forknotify.py
deleted file mode 100755
index 3bcf0a6795..0000000000
--- a/test/functional/forknotify.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2014-2016 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test the -alertnotify option."""
-import os
-import time
-
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import *
-
-class ForkNotifyTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
- self.num_nodes = 2
- self.setup_clean_chain = False
-
- def setup_network(self):
- self.nodes = []
- self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
- with open(self.alert_filename, 'w', encoding='utf8'):
- pass # Just open then close to create zero-length file
- self.nodes.append(self.start_node(0, self.options.tmpdir,
- ["-blockversion=2", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]))
- # Node1 mines block.version=211 blocks
- self.nodes.append(self.start_node(1, self.options.tmpdir,
- ["-blockversion=211"]))
- connect_nodes(self.nodes[1], 0)
-
- self.sync_all()
-
- def run_test(self):
- # Mine 51 up-version blocks
- self.nodes[1].generate(51)
- self.sync_all()
- # -alertnotify should trigger on the 51'st,
- # but mine and sync another to give
- # -alertnotify time to write
- self.nodes[1].generate(1)
- self.sync_all()
-
- # Give bitcoind 10 seconds to write the alert notification
- timeout = 10.0
- while timeout > 0:
- if os.path.exists(self.alert_filename) and os.path.getsize(self.alert_filename):
- break
- time.sleep(0.1)
- timeout -= 0.1
- else:
- assert False, "-alertnotify did not warn of up-version blocks"
-
- with open(self.alert_filename, 'r', encoding='utf8') as f:
- alert_text = f.read()
-
- # Mine more up-version blocks, should not get more alerts:
- self.nodes[1].generate(1)
- self.sync_all()
- self.nodes[1].generate(1)
- self.sync_all()
-
- with open(self.alert_filename, 'r', encoding='utf8') as f:
- alert_text2 = f.read()
-
- if alert_text != alert_text2:
- raise AssertionError("-alertnotify excessive warning of up-version blocks")
-
-if __name__ == '__main__':
- ForkNotifyTest().main()
diff --git a/test/functional/fundrawtransaction.py b/test/functional/fundrawtransaction.py
index 0baab6d01c..d446f56d0e 100755
--- a/test/functional/fundrawtransaction.py
+++ b/test/functional/fundrawtransaction.py
@@ -4,7 +4,7 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the fundrawtransaction RPC."""
-from test_framework.test_framework import BitcoinTestFramework, BITCOIND_PROC_WAIT_TIMEOUT
+from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
@@ -14,13 +14,10 @@ def get_unspent(listunspent, amount):
return utx
raise AssertionError('Could not find unspent with amount={}'.format(amount))
-
class RawTransactionsTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
- self.setup_clean_chain = True
+ def set_test_params(self):
self.num_nodes = 4
+ self.setup_clean_chain = True
def setup_network(self, split=False):
self.setup_nodes()
@@ -182,7 +179,7 @@ class RawTransactionsTest(BitcoinTestFramework):
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
- assert_raises_jsonrpc(-3, "Unexpected key foo", self.nodes[2].fundrawtransaction, rawtx, {'foo':'bar'})
+ assert_raises_rpc_error(-3, "Unexpected key foo", self.nodes[2].fundrawtransaction, rawtx, {'foo':'bar'})
############################################################
# test a fundrawtransaction with an invalid change address #
@@ -195,7 +192,7 @@ class RawTransactionsTest(BitcoinTestFramework):
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
- assert_raises_jsonrpc(-5, "changeAddress must be a valid bitcoin address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':'foobar'})
+ assert_raises_rpc_error(-5, "changeAddress must be a valid bitcoin address", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':'foobar'})
############################################################
# test a fundrawtransaction with a provided change address #
@@ -209,7 +206,7 @@ class RawTransactionsTest(BitcoinTestFramework):
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
change = self.nodes[2].getnewaddress()
- assert_raises_jsonrpc(-8, "changePosition out of bounds", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':change, 'changePosition':2})
+ assert_raises_rpc_error(-8, "changePosition out of bounds", self.nodes[2].fundrawtransaction, rawtx, {'changeAddress':change, 'changePosition':2})
rawtxfund = self.nodes[2].fundrawtransaction(rawtx, {'changeAddress': change, 'changePosition': 0})
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
out = dec_tx['vout'][0]
@@ -312,13 +309,12 @@ class RawTransactionsTest(BitcoinTestFramework):
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
- listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
- assert_raises_jsonrpc(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx)
+ assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].fundrawtransaction, rawtx)
############################################################
#compare fee of a standard pubkeyhash transaction
@@ -449,12 +445,11 @@ class RawTransactionsTest(BitcoinTestFramework):
############################################################
# locked wallet test
self.stop_node(0)
+ self.nodes[1].node_encrypt_wallet("test")
self.stop_node(2)
self.stop_node(3)
- self.nodes[1].encryptwallet("test")
- self.bitcoind_processes[1].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT)
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir)
+ self.start_nodes()
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
@@ -474,14 +469,14 @@ class RawTransactionsTest(BitcoinTestFramework):
rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
# fund a transaction that requires a new key for the change output
# creating the key must be impossible because the wallet is locked
- assert_raises_jsonrpc(-4, "Keypool ran out, please call keypoolrefill first", self.nodes[1].fundrawtransaction, rawtx)
+ assert_raises_rpc_error(-4, "Keypool ran out, please call keypoolrefill first", self.nodes[1].fundrawtransaction, rawtx)
#refill the keypool
self.nodes[1].walletpassphrase("test", 100)
self.nodes[1].keypoolrefill(8) #need to refill the keypool to get an internal change address
self.nodes[1].walletlock()
- assert_raises_jsonrpc(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2)
+ assert_raises_rpc_error(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2)
oldBalance = self.nodes[0].getbalance()
@@ -636,20 +631,9 @@ class RawTransactionsTest(BitcoinTestFramework):
assert_fee_amount(result2['fee'], count_bytes(result2['hex']), 2 * result_fee_rate)
assert_fee_amount(result3['fee'], count_bytes(result3['hex']), 10 * result_fee_rate)
- #############################
- # Test address reuse option #
- #############################
-
- result3 = self.nodes[3].fundrawtransaction(rawtx, {"reserveChangeKey": False})
- res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
- changeaddress = ""
- for out in res_dec['vout']:
- if out['value'] > 1.0:
- changeaddress += out['scriptPubKey']['addresses'][0]
- assert(changeaddress != "")
- nextaddr = self.nodes[3].getrawchangeaddress()
- # frt should not have removed the key from the keypool
- assert(changeaddress == nextaddr)
+ ################################
+ # Test no address reuse occurs #
+ ################################
result3 = self.nodes[3].fundrawtransaction(rawtx)
res_dec = self.nodes[0].decoderawtransaction(result3["hex"])
diff --git a/test/functional/getblocktemplate_longpoll.py b/test/functional/getblocktemplate_longpoll.py
index bbe1dda5f7..89768bd2fb 100755
--- a/test/functional/getblocktemplate_longpoll.py
+++ b/test/functional/getblocktemplate_longpoll.py
@@ -17,16 +17,14 @@ class LongpollThread(threading.Thread):
self.longpollid = templat['longpollid']
# create a new connection to the node, we can't use the same
# connection from two threads
- self.node = get_rpc_proxy(node.url, 1, timeout=600)
+ self.node = get_rpc_proxy(node.url, 1, timeout=600, coveragedir=node.coverage_dir)
def run(self):
self.node.getblocktemplate({'longpollid':self.longpollid})
class GetBlockTemplateLPTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
- self.num_nodes = 4
- self.setup_clean_chain = False
+ def set_test_params(self):
+ self.num_nodes = 2
def run_test(self):
self.log.info("Warning: this test will take about 70 seconds in the best case. Be patient.")
diff --git a/test/functional/getblocktemplate_proposals.py b/test/functional/getblocktemplate_proposals.py
deleted file mode 100755
index fca99c7df5..0000000000
--- a/test/functional/getblocktemplate_proposals.py
+++ /dev/null
@@ -1,157 +0,0 @@
-#!/usr/bin/env python3
-# Copyright (c) 2014-2016 The Bitcoin Core developers
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test block proposals with getblocktemplate."""
-
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import *
-
-from binascii import a2b_hex, b2a_hex
-from hashlib import sha256
-from struct import pack
-
-def b2x(b):
- return b2a_hex(b).decode('ascii')
-
-# NOTE: This does not work for signed numbers (set the high bit) or zero (use b'\0')
-def encodeUNum(n):
- s = bytearray(b'\1')
- while n > 127:
- s[0] += 1
- s.append(n % 256)
- n //= 256
- s.append(n)
- return bytes(s)
-
-def varlenEncode(n):
- if n < 0xfd:
- return pack('<B', n)
- if n <= 0xffff:
- return b'\xfd' + pack('<H', n)
- if n <= 0xffffffff:
- return b'\xfe' + pack('<L', n)
- return b'\xff' + pack('<Q', n)
-
-def dblsha(b):
- return sha256(sha256(b).digest()).digest()
-
-def genmrklroot(leaflist):
- cur = leaflist
- while len(cur) > 1:
- n = []
- if len(cur) & 1:
- cur.append(cur[-1])
- for i in range(0, len(cur), 2):
- n.append(dblsha(cur[i] + cur[i+1]))
- cur = n
- return cur[0]
-
-def template_to_bytearray(tmpl, txlist):
- blkver = pack('<L', tmpl['version'])
- mrklroot = genmrklroot(list(dblsha(a) for a in txlist))
- timestamp = pack('<L', tmpl['curtime'])
- nonce = b'\0\0\0\0'
- blk = blkver + a2b_hex(tmpl['previousblockhash'])[::-1] + mrklroot + timestamp + a2b_hex(tmpl['bits'])[::-1] + nonce
- blk += varlenEncode(len(txlist))
- for tx in txlist:
- blk += tx
- return bytearray(blk)
-
-def template_to_hex(tmpl, txlist):
- return b2x(template_to_bytearray(tmpl, txlist))
-
-def assert_template(node, tmpl, txlist, expect):
- rsp = node.getblocktemplate({'data':template_to_hex(tmpl, txlist),'mode':'proposal'})
- if rsp != expect:
- raise AssertionError('unexpected: %s' % (rsp,))
-
-class GetBlockTemplateProposalTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
- self.num_nodes = 2
- self.setup_clean_chain = False
-
- def run_test(self):
- node = self.nodes[0]
- node.generate(1) # Mine a block to leave initial block download
- tmpl = node.getblocktemplate()
- if 'coinbasetxn' not in tmpl:
- rawcoinbase = encodeUNum(tmpl['height'])
- rawcoinbase += b'\x01-'
- hexcoinbase = b2x(rawcoinbase)
- hexoutval = b2x(pack('<Q', tmpl['coinbasevalue']))
- tmpl['coinbasetxn'] = {'data': '01000000' + '01' + '0000000000000000000000000000000000000000000000000000000000000000ffffffff' + ('%02x' % (len(rawcoinbase),)) + hexcoinbase + 'fffffffe' + '01' + hexoutval + '00' + '00000000'}
- txlist = list(bytearray(a2b_hex(a['data'])) for a in (tmpl['coinbasetxn'],) + tuple(tmpl['transactions']))
-
- # Test 0: Capability advertised
- assert('proposal' in tmpl['capabilities'])
-
- # NOTE: This test currently FAILS (regtest mode doesn't enforce block height in coinbase)
- ## Test 1: Bad height in coinbase
- #txlist[0][4+1+36+1+1] += 1
- #assert_template(node, tmpl, txlist, 'FIXME')
- #txlist[0][4+1+36+1+1] -= 1
-
- # Test 2: Bad input hash for gen tx
- txlist[0][4+1] += 1
- assert_template(node, tmpl, txlist, 'bad-cb-missing')
- txlist[0][4+1] -= 1
-
- # Test 3: Truncated final tx
- lastbyte = txlist[-1].pop()
- assert_raises_jsonrpc(-22, "Block decode failed", assert_template, node, tmpl, txlist, 'n/a')
- txlist[-1].append(lastbyte)
-
- # Test 4: Add an invalid tx to the end (duplicate of gen tx)
- txlist.append(txlist[0])
- assert_template(node, tmpl, txlist, 'bad-txns-duplicate')
- txlist.pop()
-
- # Test 5: Add an invalid tx to the end (non-duplicate)
- txlist.append(bytearray(txlist[0]))
- txlist[-1][4+1] = 0xff
- assert_template(node, tmpl, txlist, 'bad-txns-inputs-missingorspent')
- txlist.pop()
-
- # Test 6: Future tx lock time
- txlist[0][-4:] = b'\xff\xff\xff\xff'
- assert_template(node, tmpl, txlist, 'bad-txns-nonfinal')
- txlist[0][-4:] = b'\0\0\0\0'
-
- # Test 7: Bad tx count
- txlist.append(b'')
- assert_raises_jsonrpc(-22, 'Block decode failed', assert_template, node, tmpl, txlist, 'n/a')
- txlist.pop()
-
- # Test 8: Bad bits
- realbits = tmpl['bits']
- tmpl['bits'] = '1c0000ff' # impossible in the real world
- assert_template(node, tmpl, txlist, 'bad-diffbits')
- tmpl['bits'] = realbits
-
- # Test 9: Bad merkle root
- rawtmpl = template_to_bytearray(tmpl, txlist)
- rawtmpl[4+32] = (rawtmpl[4+32] + 1) % 0x100
- rsp = node.getblocktemplate({'data':b2x(rawtmpl),'mode':'proposal'})
- if rsp != 'bad-txnmrklroot':
- raise AssertionError('unexpected: %s' % (rsp,))
-
- # Test 10: Bad timestamps
- realtime = tmpl['curtime']
- tmpl['curtime'] = 0x7fffffff
- assert_template(node, tmpl, txlist, 'time-too-new')
- tmpl['curtime'] = 0
- assert_template(node, tmpl, txlist, 'time-too-old')
- tmpl['curtime'] = realtime
-
- # Test 11: Valid block
- assert_template(node, tmpl, txlist, None)
-
- # Test 12: Orphan block
- tmpl['previousblockhash'] = 'ff00' * 16
- assert_template(node, tmpl, txlist, 'inconclusive-not-best-prevblk')
-
-if __name__ == '__main__':
- GetBlockTemplateProposalTest().main()
diff --git a/test/functional/getchaintips.py b/test/functional/getchaintips.py
index 15f96c565f..21b67bfc64 100755
--- a/test/functional/getchaintips.py
+++ b/test/functional/getchaintips.py
@@ -14,13 +14,10 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class GetChainTipsTest (BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 4
- self.setup_clean_chain = False
def run_test (self):
-
tips = self.nodes[0].getchaintips ()
assert_equal (len (tips), 1)
assert_equal (tips[0]['branchlen'], 0)
diff --git a/test/functional/httpbasics.py b/test/functional/httpbasics.py
index 4b32e8d9ca..c7682cb49d 100755
--- a/test/functional/httpbasics.py
+++ b/test/functional/httpbasics.py
@@ -11,10 +11,8 @@ import http.client
import urllib.parse
class HTTPBasicsTest (BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 3
- self.setup_clean_chain = False
def setup_network(self):
self.setup_nodes()
diff --git a/test/functional/import-rescan.py b/test/functional/import-rescan.py
index 4fc5078217..6807fa6696 100755
--- a/test/functional/import-rescan.py
+++ b/test/functional/import-rescan.py
@@ -19,9 +19,8 @@ importing nodes pick up the new transactions regardless of whether rescans
happened previously.
"""
-from test_framework.authproxy import JSONRPCException
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import (connect_nodes, sync_blocks, assert_equal, set_node_times)
+from test_framework.util import (assert_raises_rpc_error, connect_nodes, sync_blocks, assert_equal, set_node_times)
import collections
import enum
@@ -35,21 +34,26 @@ Rescan = enum.Enum("Rescan", "no yes late_timestamp")
class Variant(collections.namedtuple("Variant", "call data rescan prune")):
"""Helper for importing one key and verifying scanned transactions."""
+ def try_rpc(self, func, *args, **kwargs):
+ if self.expect_disabled:
+ assert_raises_rpc_error(-4, "Rescan is disabled in pruned mode", func, *args, **kwargs)
+ else:
+ return func(*args, **kwargs)
+
def do_import(self, timestamp):
"""Call one key import RPC."""
if self.call == Call.single:
if self.data == Data.address:
- response, error = try_rpc(self.node.importaddress, self.address["address"], self.label,
- self.rescan == Rescan.yes)
+ response = self.try_rpc(self.node.importaddress, self.address["address"], self.label,
+ self.rescan == Rescan.yes)
elif self.data == Data.pub:
- response, error = try_rpc(self.node.importpubkey, self.address["pubkey"], self.label,
- self.rescan == Rescan.yes)
+ response = self.try_rpc(self.node.importpubkey, self.address["pubkey"], self.label,
+ self.rescan == Rescan.yes)
elif self.data == Data.priv:
- response, error = try_rpc(self.node.importprivkey, self.key, self.label, self.rescan == Rescan.yes)
+ response = self.try_rpc(self.node.importprivkey, self.key, self.label, self.rescan == Rescan.yes)
assert_equal(response, None)
- assert_equal(error, {'message': 'Rescan is disabled in pruned mode',
- 'code': -4} if self.expect_disabled else None)
+
elif self.call == Call.multi:
response = self.node.importmulti([{
"scriptPubKey": {
@@ -111,8 +115,7 @@ TIMESTAMP_WINDOW = 2 * 60 * 60
class ImportRescanTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 2 + len(IMPORT_NODES)
def setup_network(self):
@@ -121,7 +124,8 @@ class ImportRescanTest(BitcoinTestFramework):
if import_node.prune:
extra_args[i] += ["-prune=1"]
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, extra_args)
+ self.add_nodes(self.num_nodes, extra_args)
+ self.start_nodes()
for i in range(1, self.num_nodes):
connect_nodes(self.nodes[i], 0)
@@ -161,7 +165,6 @@ class ImportRescanTest(BitcoinTestFramework):
variant.check()
# Create new transactions sending to each address.
- fee = self.nodes[0].getnetworkinfo()["relayfee"]
for i, variant in enumerate(IMPORT_VARIANTS):
variant.sent_amount = 10 - (2 * i + 1) / 8.0
variant.sent_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.sent_amount)
@@ -180,13 +183,5 @@ class ImportRescanTest(BitcoinTestFramework):
else:
variant.check()
-
-def try_rpc(func, *args, **kwargs):
- try:
- return func(*args, **kwargs), None
- except JSONRPCException as e:
- return None, e.error
-
-
if __name__ == "__main__":
ImportRescanTest().main()
diff --git a/test/functional/importmulti.py b/test/functional/importmulti.py
index e83e85de13..a691595f15 100755
--- a/test/functional/importmulti.py
+++ b/test/functional/importmulti.py
@@ -7,8 +7,7 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class ImportMultiTest (BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
@@ -21,16 +20,7 @@ class ImportMultiTest (BitcoinTestFramework):
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
- # keyword definition
- PRIV_KEY = 'privkey'
- PUB_KEY = 'pubkey'
- ADDRESS_KEY = 'address'
- SCRIPT_KEY = 'script'
-
-
node0_address1 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- node0_address2 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
- node0_address3 = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
#Check only one address
assert_equal(node0_address1['ismine'], True)
@@ -170,6 +160,18 @@ class ImportMultiTest (BitcoinTestFramework):
assert_equal(address_assert['ismine'], True)
assert_equal(address_assert['timestamp'], timestamp)
+ self.log.info("Should not import an address with private key if is already imported")
+ result = self.nodes[1].importmulti([{
+ "scriptPubKey": {
+ "address": address['address']
+ },
+ "timestamp": "now",
+ "keys": [ self.nodes[0].dumpprivkey(address['address']) ]
+ }])
+ assert_equal(result[0]['success'], False)
+ assert_equal(result[0]['error']['code'], -4)
+ assert_equal(result[0]['error']['message'], 'The wallet already contains the private key for this address or script')
+
# Address + Private key + watchonly
self.log.info("Should not import an address with private key and with watchonly")
address = self.nodes[0].validateaddress(self.nodes[0].getnewaddress())
@@ -230,7 +232,6 @@ class ImportMultiTest (BitcoinTestFramework):
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
- transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh")
result = self.nodes[1].importmulti([{
@@ -258,7 +259,6 @@ class ImportMultiTest (BitcoinTestFramework):
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
- transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script")
result = self.nodes[1].importmulti([{
@@ -286,7 +286,6 @@ class ImportMultiTest (BitcoinTestFramework):
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
- transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
@@ -314,7 +313,6 @@ class ImportMultiTest (BitcoinTestFramework):
transactionid = self.nodes[1].sendtoaddress(multi_sig_script['address'], 10.00)
self.nodes[1].generate(1)
timestamp = self.nodes[1].getblock(self.nodes[1].getbestblockhash())['mediantime']
- transaction = self.nodes[1].gettransaction(transactionid)
self.log.info("Should import a p2sh with respective redeem script and private keys")
result = self.nodes[1].importmulti([{
@@ -429,7 +427,7 @@ class ImportMultiTest (BitcoinTestFramework):
# restart nodes to check for proper serialization/deserialization of watch only address
self.stop_nodes()
- self.nodes = self.start_nodes(2, self.options.tmpdir)
+ self.start_nodes()
address_assert = self.nodes[1].validateaddress(watchonly_address)
assert_equal(address_assert['iswatchonly'], True)
assert_equal(address_assert['ismine'], False)
@@ -437,11 +435,11 @@ class ImportMultiTest (BitcoinTestFramework):
# Bad or missing timestamps
self.log.info("Should throw on invalid or missing timestamp values")
- assert_raises_message(JSONRPCException, 'Missing required timestamp field for key',
+ assert_raises_rpc_error(-3, 'Missing required timestamp field for key',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
}])
- assert_raises_message(JSONRPCException, 'Expected number or "now" timestamp value for key. got type string',
+ assert_raises_rpc_error(-3, 'Expected number or "now" timestamp value for key. got type string',
self.nodes[1].importmulti, [{
"scriptPubKey": address['scriptPubKey'],
"timestamp": "",
diff --git a/test/functional/importprunedfunds.py b/test/functional/importprunedfunds.py
index 94753fe431..068052409a 100755
--- a/test/functional/importprunedfunds.py
+++ b/test/functional/importprunedfunds.py
@@ -6,11 +6,8 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-
class ImportPrunedFundsTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
@@ -24,7 +21,6 @@ class ImportPrunedFundsTest(BitcoinTestFramework):
address1 = self.nodes[0].getnewaddress()
# pubkey
address2 = self.nodes[0].getnewaddress()
- address2_pubkey = self.nodes[0].validateaddress(address2)['pubkey'] # Using pubkey
# privkey
address3 = self.nodes[0].getnewaddress()
address3_privkey = self.nodes[0].dumpprivkey(address3) # Using privkey
@@ -70,20 +66,20 @@ class ImportPrunedFundsTest(BitcoinTestFramework):
self.sync_all()
#Import with no affiliated address
- assert_raises_jsonrpc(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1)
+ assert_raises_rpc_error(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1)
balance1 = self.nodes[1].getbalance("", 0, True)
assert_equal(balance1, Decimal(0))
#Import with affiliated address with no rescan
self.nodes[1].importaddress(address2, "add2", False)
- result2 = self.nodes[1].importprunedfunds(rawtxn2, proof2)
+ self.nodes[1].importprunedfunds(rawtxn2, proof2)
balance2 = self.nodes[1].getbalance("add2", 0, True)
assert_equal(balance2, Decimal('0.05'))
#Import with private key with no rescan
- self.nodes[1].importprivkey(address3_privkey, "add3", False)
- result3 = self.nodes[1].importprunedfunds(rawtxn3, proof3)
+ self.nodes[1].importprivkey(privkey=address3_privkey, label="add3", rescan=False)
+ self.nodes[1].importprunedfunds(rawtxn3, proof3)
balance3 = self.nodes[1].getbalance("add3", 0, False)
assert_equal(balance3, Decimal('0.025'))
balance3 = self.nodes[1].getbalance("*", 0, True)
@@ -101,7 +97,7 @@ class ImportPrunedFundsTest(BitcoinTestFramework):
assert_equal(address_info['ismine'], True)
#Remove transactions
- assert_raises_jsonrpc(-8, "Transaction does not exist in wallet.", self.nodes[1].removeprunedfunds, txnid1)
+ assert_raises_rpc_error(-8, "Transaction does not exist in wallet.", self.nodes[1].removeprunedfunds, txnid1)
balance1 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance1, Decimal('0.075'))
diff --git a/test/functional/invalidateblock.py b/test/functional/invalidateblock.py
index c499d57b90..dd3daf1e07 100755
--- a/test/functional/invalidateblock.py
+++ b/test/functional/invalidateblock.py
@@ -8,9 +8,7 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class InvalidateTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
diff --git a/test/functional/invalidblockrequest.py b/test/functional/invalidblockrequest.py
index eabc0db8df..9f44b44927 100755
--- a/test/functional/invalidblockrequest.py
+++ b/test/functional/invalidblockrequest.py
@@ -23,9 +23,9 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
''' Can either run this test as 1 node with expected answers, or two and compare them.
Change the "outcome" variable from each TestInstance object to only do the comparison. '''
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 1
+ self.setup_clean_chain = True
def run_test(self):
test = TestManager(self, self.options.tmpdir)
diff --git a/test/functional/invalidtxrequest.py b/test/functional/invalidtxrequest.py
index a9ac231f09..a22bd8f8cd 100755
--- a/test/functional/invalidtxrequest.py
+++ b/test/functional/invalidtxrequest.py
@@ -19,9 +19,9 @@ class InvalidTxRequestTest(ComparisonTestFramework):
''' Can either run this test as 1 node with expected answers, or two and compare them.
Change the "outcome" variable from each TestInstance object to only do the comparison. '''
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 1
+ self.setup_clean_chain = True
def run_test(self):
test = TestManager(self, self.options.tmpdir)
diff --git a/test/functional/keypool-topup.py b/test/functional/keypool-topup.py
new file mode 100755
index 0000000000..e7af3c3987
--- /dev/null
+++ b/test/functional/keypool-topup.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test HD Wallet keypool restore function.
+
+Two nodes. Node1 is under test. Node0 is providing transactions and generating blocks.
+
+- Start node1, shutdown and backup wallet.
+- Generate 110 keys (enough to drain the keypool). Store key 90 (in the initial keypool) and key 110 (beyond the initial keypool). Send funds to key 90 and key 110.
+- Stop node1, clear the datadir, move wallet file back into the datadir and restart node1.
+- connect node1 to node0. Verify that they sync and node1 receives its funds."""
+import shutil
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ connect_nodes_bi,
+ sync_blocks,
+)
+
+class KeypoolRestoreTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 2
+ self.extra_args = [[], ['-keypool=100', '-keypoolmin=20']]
+
+ def run_test(self):
+ self.tmpdir = self.options.tmpdir
+ self.nodes[0].generate(101)
+
+ self.log.info("Make backup of wallet")
+
+ self.stop_node(1)
+
+ shutil.copyfile(self.tmpdir + "/node1/regtest/wallets/wallet.dat", self.tmpdir + "/wallet.bak")
+ self.start_node(1, self.extra_args[1])
+ connect_nodes_bi(self.nodes, 0, 1)
+
+ self.log.info("Generate keys for wallet")
+
+ for _ in range(90):
+ addr_oldpool = self.nodes[1].getnewaddress()
+ for _ in range(20):
+ addr_extpool = self.nodes[1].getnewaddress()
+
+ self.log.info("Send funds to wallet")
+
+ self.nodes[0].sendtoaddress(addr_oldpool, 10)
+ self.nodes[0].generate(1)
+ self.nodes[0].sendtoaddress(addr_extpool, 5)
+ self.nodes[0].generate(1)
+ sync_blocks(self.nodes)
+
+ self.log.info("Restart node with wallet backup")
+
+ self.stop_node(1)
+
+ shutil.copyfile(self.tmpdir + "/wallet.bak", self.tmpdir + "/node1/regtest/wallets/wallet.dat")
+
+ self.log.info("Verify keypool is restored and balance is correct")
+
+ self.start_node(1, self.extra_args[1])
+ connect_nodes_bi(self.nodes, 0, 1)
+ self.sync_all()
+
+ assert_equal(self.nodes[1].getbalance(), 15)
+ assert_equal(self.nodes[1].listtransactions()[0]['category'], "receive")
+
+ # Check that we have marked all keys up to the used keypool key as used
+ assert_equal(self.nodes[1].validateaddress(self.nodes[1].getnewaddress())['hdkeypath'], "m/0'/0'/110'")
+
+if __name__ == '__main__':
+ KeypoolRestoreTest().main()
diff --git a/test/functional/keypool.py b/test/functional/keypool.py
index e8be559918..f2701c36bd 100755
--- a/test/functional/keypool.py
+++ b/test/functional/keypool.py
@@ -8,6 +8,8 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class KeyPoolTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
def run_test(self):
nodes = self.nodes
@@ -17,17 +19,16 @@ class KeyPoolTest(BitcoinTestFramework):
assert(addr_before_encrypting_data['hdmasterkeyid'] == wallet_info_old['hdmasterkeyid'])
# Encrypt wallet and wait to terminate
- nodes[0].encryptwallet('test')
- self.bitcoind_processes[0].wait()
+ nodes[0].node_encrypt_wallet('test')
# Restart node 0
- nodes[0] = self.start_node(0, self.options.tmpdir)
+ self.start_node(0)
# Keep creating keys
addr = nodes[0].getnewaddress()
addr_data = nodes[0].validateaddress(addr)
wallet_info = nodes[0].getwalletinfo()
assert(addr_before_encrypting_data['hdmasterkeyid'] != wallet_info['hdmasterkeyid'])
assert(addr_data['hdmasterkeyid'] == wallet_info['hdmasterkeyid'])
- assert_raises_jsonrpc(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
+ assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min)
nodes[0].walletpassphrase('test', 12000)
@@ -46,7 +47,7 @@ class KeyPoolTest(BitcoinTestFramework):
nodes[0].getrawchangeaddress()
addr = set()
# the next one should fail
- assert_raises_jsonrpc(-12, "Keypool ran out", nodes[0].getrawchangeaddress)
+ assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].getrawchangeaddress)
# drain the external keys
addr.add(nodes[0].getnewaddress())
@@ -57,7 +58,7 @@ class KeyPoolTest(BitcoinTestFramework):
addr.add(nodes[0].getnewaddress())
assert(len(addr) == 6)
# the next one should fail
- assert_raises_jsonrpc(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
+ assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# refill keypool with three new addresses
nodes[0].walletpassphrase('test', 1)
@@ -71,7 +72,7 @@ class KeyPoolTest(BitcoinTestFramework):
nodes[0].generate(1)
nodes[0].generate(1)
nodes[0].generate(1)
- assert_raises_jsonrpc(-12, "Keypool ran out", nodes[0].generate, 1)
+ assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].generate, 1)
nodes[0].walletpassphrase('test', 100)
nodes[0].keypoolrefill(100)
@@ -79,10 +80,5 @@ class KeyPoolTest(BitcoinTestFramework):
assert_equal(wi['keypoolsize_hd_internal'], 100)
assert_equal(wi['keypoolsize'], 100)
- def __init__(self):
- super().__init__()
- self.setup_clean_chain = False
- self.num_nodes = 1
-
if __name__ == '__main__':
KeyPoolTest().main()
diff --git a/test/functional/listsinceblock.py b/test/functional/listsinceblock.py
index f3d41e573e..67e7744bf8 100755
--- a/test/functional/listsinceblock.py
+++ b/test/functional/listsinceblock.py
@@ -5,16 +5,55 @@
"""Test the listsincelast RPC."""
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal
+from test_framework.util import assert_equal, assert_array_result, assert_raises_rpc_error
class ListSinceBlockTest (BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
- self.setup_clean_chain = True
+ def set_test_params(self):
self.num_nodes = 4
+ self.setup_clean_chain = True
+
+ def run_test(self):
+ self.nodes[2].generate(101)
+ self.sync_all()
+
+ self.test_no_blockhash()
+ self.test_invalid_blockhash()
+ self.test_reorg()
+ self.test_double_spend()
+ self.test_double_send()
+
+ def test_no_blockhash(self):
+ txid = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 1)
+ blockhash, = self.nodes[2].generate(1)
+ self.sync_all()
+
+ txs = self.nodes[0].listtransactions()
+ assert_array_result(txs, {"txid": txid}, {
+ "category": "receive",
+ "amount": 1,
+ "blockhash": blockhash,
+ "confirmations": 1,
+ })
+ assert_equal(
+ self.nodes[0].listsinceblock(),
+ {"lastblock": blockhash,
+ "removed": [],
+ "transactions": txs})
+ assert_equal(
+ self.nodes[0].listsinceblock(""),
+ {"lastblock": blockhash,
+ "removed": [],
+ "transactions": txs})
+
+ def test_invalid_blockhash(self):
+ assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock,
+ "42759cde25462784395a337460bde75f58e73d3f08bd31fdc3507cbac856a2c4")
+ assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock,
+ "0000000000000000000000000000000000000000000000000000000000000000")
+ assert_raises_rpc_error(-5, "Block not found", self.nodes[0].listsinceblock,
+ "invalid-hex")
- def run_test (self):
+ def test_reorg(self):
'''
`listsinceblock` did not behave correctly when handed a block that was
no longer in the main chain:
@@ -43,14 +82,6 @@ class ListSinceBlockTest (BitcoinTestFramework):
This test only checks that [tx0] is present.
'''
- self.nodes[2].generate(101)
- self.sync_all()
-
- assert_equal(self.nodes[0].getbalance(), 0)
- assert_equal(self.nodes[1].getbalance(), 0)
- assert_equal(self.nodes[2].getbalance(), 50)
- assert_equal(self.nodes[3].getbalance(), 0)
-
# Split network into two
self.split_network()
@@ -73,7 +104,177 @@ class ListSinceBlockTest (BitcoinTestFramework):
if tx['txid'] == senttx:
found = True
break
- assert_equal(found, True)
+ assert found
+
+ def test_double_spend(self):
+ '''
+ This tests the case where the same UTXO is spent twice on two separate
+ blocks as part of a reorg.
+
+ ab0
+ / \
+ aa1 [tx1] bb1 [tx2]
+ | |
+ aa2 bb2
+ | |
+ aa3 bb3
+ |
+ bb4
+
+ Problematic case:
+
+ 1. User 1 receives BTC in tx1 from utxo1 in block aa1.
+ 2. User 2 receives BTC in tx2 from utxo1 (same) in block bb1
+ 3. User 1 sees 2 confirmations at block aa3.
+ 4. Reorg into bb chain.
+ 5. User 1 asks `listsinceblock aa3` and does not see that tx1 is now
+ invalidated.
+
+ Currently the solution to this is to detect that a reorg'd block is
+ asked for in listsinceblock, and to iterate back over existing blocks up
+ until the fork point, and to include all transactions that relate to the
+ node wallet.
+ '''
+
+ self.sync_all()
+
+ # Split network into two
+ self.split_network()
+
+ # share utxo between nodes[1] and nodes[2]
+ utxos = self.nodes[2].listunspent()
+ utxo = utxos[0]
+ privkey = self.nodes[2].dumpprivkey(utxo['address'])
+ self.nodes[1].importprivkey(privkey)
+
+ # send from nodes[1] using utxo to nodes[0]
+ change = '%.8f' % (float(utxo['amount']) - 1.0003)
+ recipientDict = {
+ self.nodes[0].getnewaddress(): 1,
+ self.nodes[1].getnewaddress(): change,
+ }
+ utxoDicts = [{
+ 'txid': utxo['txid'],
+ 'vout': utxo['vout'],
+ }]
+ txid1 = self.nodes[1].sendrawtransaction(
+ self.nodes[1].signrawtransaction(
+ self.nodes[1].createrawtransaction(utxoDicts, recipientDict))['hex'])
+
+ # send from nodes[2] using utxo to nodes[3]
+ recipientDict2 = {
+ self.nodes[3].getnewaddress(): 1,
+ self.nodes[2].getnewaddress(): change,
+ }
+ self.nodes[2].sendrawtransaction(
+ self.nodes[2].signrawtransaction(
+ self.nodes[2].createrawtransaction(utxoDicts, recipientDict2))['hex'])
+
+ # generate on both sides
+ lastblockhash = self.nodes[1].generate(3)[2]
+ self.nodes[2].generate(4)
+
+ self.join_network()
+
+ self.sync_all()
+
+ # gettransaction should work for txid1
+ assert self.nodes[0].gettransaction(txid1)['txid'] == txid1, "gettransaction failed to find txid1"
+
+ # listsinceblock(lastblockhash) should now include txid1, as seen from nodes[0]
+ lsbres = self.nodes[0].listsinceblock(lastblockhash)
+ assert any(tx['txid'] == txid1 for tx in lsbres['removed'])
+
+ # but it should not include 'removed' if include_removed=false
+ lsbres2 = self.nodes[0].listsinceblock(blockhash=lastblockhash, include_removed=False)
+ assert 'removed' not in lsbres2
+
+ def test_double_send(self):
+ '''
+ This tests the case where the same transaction is submitted twice on two
+ separate blocks as part of a reorg. The former will vanish and the
+ latter will appear as the true transaction (with confirmations dropping
+ as a result).
+
+ ab0
+ / \
+ aa1 [tx1] bb1
+ | |
+ aa2 bb2
+ | |
+ aa3 bb3 [tx1]
+ |
+ bb4
+
+ Asserted:
+
+ 1. tx1 is listed in listsinceblock.
+ 2. It is included in 'removed' as it was removed, even though it is now
+ present in a different block.
+ 3. It is listed with a confirmations count of 2 (bb3, bb4), not
+ 3 (aa1, aa2, aa3).
+ '''
+
+ self.sync_all()
+
+ # Split network into two
+ self.split_network()
+
+ # create and sign a transaction
+ utxos = self.nodes[2].listunspent()
+ utxo = utxos[0]
+ change = '%.8f' % (float(utxo['amount']) - 1.0003)
+ recipientDict = {
+ self.nodes[0].getnewaddress(): 1,
+ self.nodes[2].getnewaddress(): change,
+ }
+ utxoDicts = [{
+ 'txid': utxo['txid'],
+ 'vout': utxo['vout'],
+ }]
+ signedtxres = self.nodes[2].signrawtransaction(
+ self.nodes[2].createrawtransaction(utxoDicts, recipientDict))
+ assert signedtxres['complete']
+
+ signedtx = signedtxres['hex']
+
+ # send from nodes[1]; this will end up in aa1
+ txid1 = self.nodes[1].sendrawtransaction(signedtx)
+
+ # generate bb1-bb2 on right side
+ self.nodes[2].generate(2)
+
+ # send from nodes[2]; this will end up in bb3
+ txid2 = self.nodes[2].sendrawtransaction(signedtx)
+
+ assert_equal(txid1, txid2)
+
+ # generate on both sides
+ lastblockhash = self.nodes[1].generate(3)[2]
+ self.nodes[2].generate(2)
+
+ self.join_network()
+
+ self.sync_all()
+
+ # gettransaction should work for txid1
+ self.nodes[0].gettransaction(txid1)
+
+ # listsinceblock(lastblockhash) should now include txid1 in transactions
+ # as well as in removed
+ lsbres = self.nodes[0].listsinceblock(lastblockhash)
+ assert any(tx['txid'] == txid1 for tx in lsbres['transactions'])
+ assert any(tx['txid'] == txid1 for tx in lsbres['removed'])
+
+ # find transaction and ensure confirmations is valid
+ for tx in lsbres['transactions']:
+ if tx['txid'] == txid1:
+ assert_equal(tx['confirmations'], 2)
+
+ # the same check for the removed array; confirmations should STILL be 2
+ for tx in lsbres['removed']:
+ if tx['txid'] == txid1:
+ assert_equal(tx['confirmations'], 2)
if __name__ == '__main__':
ListSinceBlockTest().main()
diff --git a/test/functional/listtransactions.py b/test/functional/listtransactions.py
index f75a8e29cc..e4522cc3b5 100755
--- a/test/functional/listtransactions.py
+++ b/test/functional/listtransactions.py
@@ -16,15 +16,9 @@ def txFromHex(hexstring):
return tx
class ListTransactionsTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
- self.num_nodes = 4
- self.setup_clean_chain = False
-
- def setup_nodes(self):
- #This test requires mocktime
+ def set_test_params(self):
+ self.num_nodes = 2
self.enable_mocktime()
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir)
def run_test(self):
# Simple send, 0 to 1:
diff --git a/test/functional/maxuploadtarget.py b/test/functional/maxuploadtarget.py
index 66e5bd29e6..5ef71c93cf 100755
--- a/test/functional/maxuploadtarget.py
+++ b/test/functional/maxuploadtarget.py
@@ -17,22 +17,21 @@ from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-class TestNode(NodeConnCB):
+class TestNode(P2PInterface):
def __init__(self):
super().__init__()
self.block_receive_map = defaultdict(int)
- def on_inv(self, conn, message):
+ def on_inv(self, message):
pass
- def on_block(self, conn, message):
+ def on_block(self, message):
message.block.calc_sha256()
self.block_receive_map[message.block.sha256] += 1
class MaxUploadTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-maxuploadtarget=800", "-blockmaxsize=999000"]]
@@ -50,19 +49,17 @@ class MaxUploadTest(BitcoinTestFramework):
# Generate some old blocks
self.nodes[0].generate(130)
- # test_nodes[0] will only request old blocks
- # test_nodes[1] will only request new blocks
- # test_nodes[2] will test resetting the counters
- test_nodes = []
- connections = []
+ # p2p_conns[0] will only request old blocks
+ # p2p_conns[1] will only request new blocks
+ # p2p_conns[2] will test resetting the counters
+ p2p_conns = []
- for i in range(3):
- test_nodes.append(TestNode())
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
- test_nodes[i].add_connection(connections[i])
+ for _ in range(3):
+ p2p_conns.append(self.nodes[0].add_p2p_connection(TestNode()))
NetworkThread().start() # Start up network handling in another thread
- [x.wait_for_verack() for x in test_nodes]
+ for p2pc in p2p_conns:
+ p2pc.wait_for_verack()
# Test logic begins here
@@ -84,7 +81,7 @@ class MaxUploadTest(BitcoinTestFramework):
big_new_block = self.nodes[0].getbestblockhash()
big_new_block = int(big_new_block, 16)
- # test_nodes[0] will test what happens if we just keep requesting the
+ # p2p_conns[0] will test what happens if we just keep requesting the
# the same big old block too many times (expect: disconnect)
getdata_request = msg_getdata()
@@ -98,34 +95,34 @@ class MaxUploadTest(BitcoinTestFramework):
# 576MB will be reserved for relaying new blocks, so expect this to
# succeed for ~235 tries.
for i in range(success_count):
- test_nodes[0].send_message(getdata_request)
- test_nodes[0].sync_with_ping()
- assert_equal(test_nodes[0].block_receive_map[big_old_block], i+1)
+ p2p_conns[0].send_message(getdata_request)
+ p2p_conns[0].sync_with_ping()
+ assert_equal(p2p_conns[0].block_receive_map[big_old_block], i+1)
assert_equal(len(self.nodes[0].getpeerinfo()), 3)
# At most a couple more tries should succeed (depending on how long
# the test has been running so far).
for i in range(3):
- test_nodes[0].send_message(getdata_request)
- test_nodes[0].wait_for_disconnect()
+ p2p_conns[0].send_message(getdata_request)
+ p2p_conns[0].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
self.log.info("Peer 0 disconnected after downloading old block too many times")
- # Requesting the current block on test_nodes[1] should succeed indefinitely,
+ # Requesting the current block on p2p_conns[1] should succeed indefinitely,
# even when over the max upload target.
# We'll try 800 times
getdata_request.inv = [CInv(2, big_new_block)]
for i in range(800):
- test_nodes[1].send_message(getdata_request)
- test_nodes[1].sync_with_ping()
- assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)
+ p2p_conns[1].send_message(getdata_request)
+ p2p_conns[1].sync_with_ping()
+ assert_equal(p2p_conns[1].block_receive_map[big_new_block], i+1)
self.log.info("Peer 1 able to repeatedly download new block")
- # But if test_nodes[1] tries for an old block, it gets disconnected too.
+ # But if p2p_conns[1] tries for an old block, it gets disconnected too.
getdata_request.inv = [CInv(2, big_old_block)]
- test_nodes[1].send_message(getdata_request)
- test_nodes[1].wait_for_disconnect()
+ p2p_conns[1].send_message(getdata_request)
+ p2p_conns[1].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 1)
self.log.info("Peer 1 disconnected after trying to download old block")
@@ -133,39 +130,37 @@ class MaxUploadTest(BitcoinTestFramework):
self.log.info("Advancing system time on node to clear counters...")
# If we advance the time by 24 hours, then the counters should reset,
- # and test_nodes[2] should be able to retrieve the old block.
+ # and p2p_conns[2] should be able to retrieve the old block.
self.nodes[0].setmocktime(int(time.time()))
- test_nodes[2].sync_with_ping()
- test_nodes[2].send_message(getdata_request)
- test_nodes[2].sync_with_ping()
- assert_equal(test_nodes[2].block_receive_map[big_old_block], 1)
+ p2p_conns[2].sync_with_ping()
+ p2p_conns[2].send_message(getdata_request)
+ p2p_conns[2].sync_with_ping()
+ assert_equal(p2p_conns[2].block_receive_map[big_old_block], 1)
self.log.info("Peer 2 able to download old block")
- [c.disconnect_node() for c in connections]
+ self.nodes[0].disconnect_p2ps()
#stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1
self.log.info("Restarting nodes with -whitelist=127.0.0.1")
self.stop_node(0)
- self.nodes[0] = self.start_node(0, self.options.tmpdir, ["-whitelist=127.0.0.1", "-maxuploadtarget=1", "-blockmaxsize=999000"])
+ self.start_node(0, ["-whitelist=127.0.0.1", "-maxuploadtarget=1", "-blockmaxsize=999000"])
- #recreate/reconnect a test node
- test_nodes = [TestNode()]
- connections = [NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[0])]
- test_nodes[0].add_connection(connections[0])
+ # Reconnect to self.nodes[0]
+ self.nodes[0].add_p2p_connection(TestNode())
NetworkThread().start() # Start up network handling in another thread
- test_nodes[0].wait_for_verack()
+ self.nodes[0].p2p.wait_for_verack()
#retrieve 20 blocks which should be enough to break the 1MB limit
getdata_request.inv = [CInv(2, big_new_block)]
for i in range(20):
- test_nodes[0].send_message(getdata_request)
- test_nodes[0].sync_with_ping()
- assert_equal(test_nodes[0].block_receive_map[big_new_block], i+1)
+ self.nodes[0].p2p.send_message(getdata_request)
+ self.nodes[0].p2p.sync_with_ping()
+ assert_equal(self.nodes[0].p2p.block_receive_map[big_new_block], i+1)
getdata_request.inv = [CInv(2, big_old_block)]
- test_nodes[0].send_and_ping(getdata_request)
+ self.nodes[0].p2p.send_and_ping(getdata_request)
assert_equal(len(self.nodes[0].getpeerinfo()), 1) #node is still connected because of the whitelist
self.log.info("Peer still connected after trying to download old block (whitelisted)")
diff --git a/test/functional/mempool_limit.py b/test/functional/mempool_limit.py
index 2777291dd0..e24dc5a464 100755
--- a/test/functional/mempool_limit.py
+++ b/test/functional/mempool_limit.py
@@ -8,9 +8,7 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class MempoolLimitTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-maxmempool=5", "-spendzeroconfchange=0"]]
diff --git a/test/functional/mempool_packages.py b/test/functional/mempool_packages.py
index e225493816..b845c75681 100755
--- a/test/functional/mempool_packages.py
+++ b/test/functional/mempool_packages.py
@@ -12,10 +12,8 @@ MAX_ANCESTORS = 25
MAX_DESCENDANTS = 25
class MempoolPackagesTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 2
- self.setup_clean_chain = False
self.extra_args = [["-maxorphantx=1000"], ["-maxorphantx=1000", "-limitancestorcount=5"]]
# Build a transaction that spends parent_txid:vout
@@ -117,7 +115,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
assert_equal(mempool[x]['descendantfees'], descendant_fees * COIN + 1000)
# Adding one more transaction on to the chain should fail.
- assert_raises_jsonrpc(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], txid, vout, value, fee, 1)
+ assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], txid, vout, value, fee, 1)
# Check that prioritising a tx before it's added to the mempool works
# First clear the mempool by mining a block.
@@ -169,7 +167,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
# Sending one more chained transaction will fail
utxo = transaction_package.pop(0)
- assert_raises_jsonrpc(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
+ assert_raises_rpc_error(-26, "too-long-mempool-chain", self.chain_transaction, self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
# TODO: check that node1's mempool is as expected
@@ -213,7 +211,7 @@ class MempoolPackagesTest(BitcoinTestFramework):
value = send_value
# Create tx1
- (tx1_id, tx1_value) = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1)
+ tx1_id, _ = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1)
# Create tx2-7
vout = 1
diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py
index e0889fd5e9..31a96ec60e 100755
--- a/test/functional/mempool_persist.py
+++ b/test/functional/mempool_persist.py
@@ -28,21 +28,22 @@ Test is as follows:
- Restart node0 with -persistmempool. Verify that it has 5
transactions in its mempool. This tests that -persistmempool=0
does not overwrite a previously valid mempool stored on disk.
+ - Remove node0 mempool.dat and verify savemempool RPC recreates it
+ and verify that node1 can load it and has 5 transaction in its
+ mempool.
+ - Verify that savemempool throws when the RPC is called if
+ node1 can't write to disk.
"""
+import os
import time
-from test_framework.mininode import wait_until
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class MempoolPersistTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
- # We need 3 nodes for this test. Node1 does not have a persistent mempool.
+ def set_test_params(self):
self.num_nodes = 3
- self.setup_clean_chain = False
self.extra_args = [[], ["-persistmempool=0"], []]
def run_test(self):
@@ -56,35 +57,60 @@ class MempoolPersistTest(BitcoinTestFramework):
self.log.debug("Send 5 transactions from node2 (to its own address)")
for i in range(5):
self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
+ node2_balance = self.nodes[2].getbalance()
self.sync_all()
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
- self.log.debug("Stop-start node0 and node1. Verify that node0 has the transactions in its mempool and node1 does not.")
+ self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
- self.nodes = []
- self.nodes.append(self.start_node(0, self.options.tmpdir))
- self.nodes.append(self.start_node(1, self.options.tmpdir))
+ self.start_node(0)
+ self.start_node(1)
+ self.start_node(2)
# Give bitcoind a second to reload the mempool
time.sleep(1)
- assert wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
+ wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
+ wait_until(lambda: len(self.nodes[2].getrawmempool()) == 5)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
+ # Verify accounting of mempool transactions after restart is correct
+ assert_equal(node2_balance, self.nodes[2].getbalance())
+
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
- self.nodes = []
- self.nodes.append(self.start_node(0, self.options.tmpdir, ["-persistmempool=0"]))
+ self.start_node(0, extra_args=["-persistmempool=0"])
# Give bitcoind a second to reload the mempool
time.sleep(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
- self.nodes = []
- self.nodes.append(self.start_node(0, self.options.tmpdir))
- assert wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
+ self.start_node(0)
+ wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
+
+ mempooldat0 = os.path.join(self.options.tmpdir, 'node0', 'regtest', 'mempool.dat')
+ mempooldat1 = os.path.join(self.options.tmpdir, 'node1', 'regtest', 'mempool.dat')
+ self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
+ os.remove(mempooldat0)
+ self.nodes[0].savemempool()
+ assert os.path.isfile(mempooldat0)
+
+ self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
+ os.rename(mempooldat0, mempooldat1)
+ self.stop_nodes()
+ self.start_node(1, extra_args=[])
+ wait_until(lambda: len(self.nodes[1].getrawmempool()) == 5)
+
+ self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
+ # to test the exception we are setting bad permissions on a tmp file called mempool.dat.new
+ # which is an implementation detail that could change and break this test
+ mempooldotnew1 = mempooldat1 + '.new'
+ with os.fdopen(os.open(mempooldotnew1, os.O_CREAT, 0o000), 'w'):
+ pass
+ assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
+ os.remove(mempooldotnew1)
if __name__ == '__main__':
MempoolPersistTest().main()
diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py
index 937bf4bab5..2803371f5b 100755
--- a/test/functional/mempool_reorg.py
+++ b/test/functional/mempool_reorg.py
@@ -13,10 +13,8 @@ from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class MempoolCoinbaseTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 2
- self.setup_clean_chain = False
self.extra_args = [["-checkmempool"]] * 2
alert_filename = None # Set by setup_network
@@ -52,14 +50,14 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
timelock_tx = self.nodes[0].signrawtransaction(timelock_tx)["hex"]
# This will raise an exception because the timelock transaction is too immature to spend
- assert_raises_jsonrpc(-26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx)
+ assert_raises_rpc_error(-26, "non-final", self.nodes[0].sendrawtransaction, timelock_tx)
# Broadcast and mine spend_102 and 103:
spend_102_id = self.nodes[0].sendrawtransaction(spend_102_raw)
spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw)
self.nodes[0].generate(1)
# Time-locked transaction is still too immature to spend
- assert_raises_jsonrpc(-26,'non-final', self.nodes[0].sendrawtransaction, timelock_tx)
+ assert_raises_rpc_error(-26,'non-final', self.nodes[0].sendrawtransaction, timelock_tx)
# Create 102_1 and 103_1:
spend_102_1_raw = create_tx(self.nodes[0], spend_102_id, node1_address, 49.98)
diff --git a/test/functional/mempool_resurrect_test.py b/test/functional/mempool_resurrect_test.py
index a2f6228df9..1263c9306b 100755
--- a/test/functional/mempool_resurrect_test.py
+++ b/test/functional/mempool_resurrect_test.py
@@ -9,12 +9,8 @@ from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class MempoolCoinbaseTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 1
- self.setup_clean_chain = False
- # Just need one node for this test
self.extra_args = [["-checkmempool"]]
def run_test(self):
diff --git a/test/functional/mempool_spendcoinbase.py b/test/functional/mempool_spendcoinbase.py
index 277ea45ad5..6e8a635a76 100755
--- a/test/functional/mempool_spendcoinbase.py
+++ b/test/functional/mempool_spendcoinbase.py
@@ -17,11 +17,8 @@ from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class MempoolSpendCoinbaseTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 1
- self.setup_clean_chain = False
self.extra_args = [["-checkmempool"]]
def run_test(self):
@@ -39,7 +36,7 @@ class MempoolSpendCoinbaseTest(BitcoinTestFramework):
spend_101_id = self.nodes[0].sendrawtransaction(spends_raw[0])
# coinbase at height 102 should be too immature to spend
- assert_raises_jsonrpc(-26,"bad-txns-premature-spend-of-coinbase", self.nodes[0].sendrawtransaction, spends_raw[1])
+ assert_raises_rpc_error(-26,"bad-txns-premature-spend-of-coinbase", self.nodes[0].sendrawtransaction, spends_raw[1])
# mempool should have just spend_101:
assert_equal(self.nodes[0].getrawmempool(), [ spend_101_id ])
diff --git a/test/functional/merkle_blocks.py b/test/functional/merkle_blocks.py
index bcc65c8408..b3989a4c54 100755
--- a/test/functional/merkle_blocks.py
+++ b/test/functional/merkle_blocks.py
@@ -8,11 +8,9 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class MerkleBlockTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
- self.setup_clean_chain = True
+ def set_test_params(self):
self.num_nodes = 4
+ self.setup_clean_chain = True
# Nodes 0/1 are "wallet" nodes, Nodes 2/3 are used for testing
self.extra_args = [[], [], [], ["-txindex"]]
@@ -40,7 +38,7 @@ class MerkleBlockTest(BitcoinTestFramework):
tx2 = self.nodes[0].createrawtransaction([node0utxos.pop()], {self.nodes[1].getnewaddress(): 49.99})
txid2 = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransaction(tx2)["hex"])
# This will raise an exception because the transaction is not yet in a block
- assert_raises_jsonrpc(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1])
+ assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1])
self.nodes[0].generate(1)
blockhash = self.nodes[0].getblockhash(chain_height + 1)
@@ -65,11 +63,11 @@ class MerkleBlockTest(BitcoinTestFramework):
txid_unspent = txid1 if txin_spent["txid"] != txid1 else txid2
# We can't find the block from a fully-spent tx
- assert_raises_jsonrpc(-5, "Transaction not yet in block", self.nodes[2].gettxoutproof, [txid_spent])
+ assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[2].gettxoutproof, [txid_spent])
# We can get the proof if we specify the block
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_spent], blockhash)), [txid_spent])
# We can't get the proof if we specify a non-existent block
- assert_raises_jsonrpc(-5, "Block not found", self.nodes[2].gettxoutproof, [txid_spent], "00000000000000000000000000000000")
+ assert_raises_rpc_error(-5, "Block not found", self.nodes[2].gettxoutproof, [txid_spent], "00000000000000000000000000000000")
# We can get the proof if the transaction is unspent
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[2].gettxoutproof([txid_unspent])), [txid_unspent])
# We can get the proof if we provide a list of transactions and one of them is unspent. The ordering of the list should not matter.
@@ -78,7 +76,7 @@ class MerkleBlockTest(BitcoinTestFramework):
# We can always get a proof if we have a -txindex
assert_equal(self.nodes[2].verifytxoutproof(self.nodes[3].gettxoutproof([txid_spent])), [txid_spent])
# We can't get a proof if we specify transactions from different blocks
- assert_raises_jsonrpc(-5, "Not all transactions found in specified or retrieved block", self.nodes[2].gettxoutproof, [txid1, txid3])
+ assert_raises_rpc_error(-5, "Not all transactions found in specified or retrieved block", self.nodes[2].gettxoutproof, [txid1, txid3])
if __name__ == '__main__':
diff --git a/test/functional/minchainwork.py b/test/functional/minchainwork.py
new file mode 100755
index 0000000000..90a3de0e0d
--- /dev/null
+++ b/test/functional/minchainwork.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test logic for setting nMinimumChainWork on command line.
+
+Nodes don't consider themselves out of "initial block download" until
+their active chain has more work than nMinimumChainWork.
+
+Nodes don't download blocks from a peer unless the peer's best known block
+has more work than nMinimumChainWork.
+
+While in initial block download, nodes won't relay blocks to their peers, so
+test that this parameter functions as intended by verifying that block relay
+only succeeds past a given node once its nMinimumChainWork has been exceeded.
+"""
+
+import time
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import connect_nodes, assert_equal
+
+# 2 hashes required per regtest block (with no difficulty adjustment)
+REGTEST_WORK_PER_BLOCK = 2
+
+class MinimumChainWorkTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 3
+
+ self.extra_args = [[], ["-minimumchainwork=0x65"], ["-minimumchainwork=0x65"]]
+ self.node_min_work = [0, 101, 101]
+
+ def setup_network(self):
+ # This test relies on the chain setup being:
+ # node0 <- node1 <- node2
+ # Before leaving IBD, nodes prefer to download blocks from outbound
+ # peers, so ensure that we're mining on an outbound peer and testing
+ # block relay to inbound peers.
+ self.setup_nodes()
+ for i in range(self.num_nodes-1):
+ connect_nodes(self.nodes[i+1], i)
+
+ def run_test(self):
+ # Start building a chain on node0. node2 shouldn't be able to sync until node1's
+ # minchainwork is exceeded
+ starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work
+ self.log.info("Testing relay across node %d (minChainWork = %d)", 1, self.node_min_work[1])
+
+ starting_blockcount = self.nodes[2].getblockcount()
+
+ num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK)
+ self.log.info("Generating %d blocks on node0", num_blocks_to_generate)
+ hashes = self.nodes[0].generate(num_blocks_to_generate)
+
+ self.log.info("Node0 current chain work: %s", self.nodes[0].getblockheader(hashes[-1])['chainwork'])
+
+ # Sleep a few seconds and verify that node2 didn't get any new blocks
+ # or headers. We sleep, rather than sync_blocks(node0, node1) because
+ # it's reasonable either way for node1 to get the blocks, or not get
+ # them (since they're below node1's minchainwork).
+ time.sleep(3)
+
+ self.log.info("Verifying node 2 has no more blocks than before")
+ self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes])
+ # Node2 shouldn't have any new headers yet, because node1 should not
+ # have relayed anything.
+ assert_equal(len(self.nodes[2].getchaintips()), 1)
+ assert_equal(self.nodes[2].getchaintips()[0]['height'], 0)
+
+ assert self.nodes[1].getbestblockhash() != self.nodes[0].getbestblockhash()
+ assert_equal(self.nodes[2].getblockcount(), starting_blockcount)
+
+ self.log.info("Generating one more block")
+ self.nodes[0].generate(1)
+
+ self.log.info("Verifying nodes are all synced")
+
+ # Because nodes in regtest are all manual connections (eg using
+ # addnode), node1 should not have disconnected node0. If not for that,
+ # we'd expect node1 to have disconnected node0 for serving an
+ # insufficient work chain, in which case we'd need to reconnect them to
+ # continue the test.
+
+ self.sync_all()
+ self.log.info("Blockcounts: %s", [n.getblockcount() for n in self.nodes])
+
+if __name__ == '__main__':
+ MinimumChainWorkTest().main()
diff --git a/test/functional/mining.py b/test/functional/mining.py
new file mode 100755
index 0000000000..9aee06864e
--- /dev/null
+++ b/test/functional/mining.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test mining RPCs
+
+- getmininginfo
+- getblocktemplate proposal mode
+- submitblock"""
+
+import copy
+from binascii import b2a_hex
+from decimal import Decimal
+
+from test_framework.blocktools import create_coinbase
+from test_framework.mininode import CBlock
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal, assert_raises_rpc_error
+
+def b2x(b):
+ return b2a_hex(b).decode('ascii')
+
+def assert_template(node, block, expect, rehash=True):
+ if rehash:
+ block.hashMerkleRoot = block.calc_merkle_root()
+ rsp = node.getblocktemplate({'data': b2x(block.serialize()), 'mode': 'proposal'})
+ assert_equal(rsp, expect)
+
+class MiningTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+ self.setup_clean_chain = False
+
+ def run_test(self):
+ node = self.nodes[0]
+
+ self.log.info('getmininginfo')
+ mining_info = node.getmininginfo()
+ assert_equal(mining_info['blocks'], 200)
+ assert_equal(mining_info['chain'], 'regtest')
+ assert_equal(mining_info['currentblocktx'], 0)
+ assert_equal(mining_info['currentblockweight'], 0)
+ assert_equal(mining_info['difficulty'], Decimal('4.656542373906925E-10'))
+ assert_equal(mining_info['networkhashps'], Decimal('0.003333333333333334'))
+ assert_equal(mining_info['pooledtx'], 0)
+
+ # Mine a block to leave initial block download
+ node.generate(1)
+ tmpl = node.getblocktemplate()
+ self.log.info("getblocktemplate: Test capability advertised")
+ assert 'proposal' in tmpl['capabilities']
+ assert 'coinbasetxn' not in tmpl
+
+ coinbase_tx = create_coinbase(height=int(tmpl["height"]) + 1)
+ # sequence numbers must not be max for nLockTime to have effect
+ coinbase_tx.vin[0].nSequence = 2 ** 32 - 2
+ coinbase_tx.rehash()
+
+ block = CBlock()
+ block.nVersion = tmpl["version"]
+ block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
+ block.nTime = tmpl["curtime"]
+ block.nBits = int(tmpl["bits"], 16)
+ block.nNonce = 0
+ block.vtx = [coinbase_tx]
+
+ self.log.info("getblocktemplate: Test valid block")
+ assert_template(node, block, None)
+
+ self.log.info("submitblock: Test block decode failure")
+ assert_raises_rpc_error(-22, "Block decode failed", node.submitblock, b2x(block.serialize()[:-15]))
+
+ self.log.info("getblocktemplate: Test bad input hash for coinbase transaction")
+ bad_block = copy.deepcopy(block)
+ bad_block.vtx[0].vin[0].prevout.hash += 1
+ bad_block.vtx[0].rehash()
+ assert_template(node, bad_block, 'bad-cb-missing')
+
+ self.log.info("submitblock: Test invalid coinbase transaction")
+ assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, b2x(bad_block.serialize()))
+
+ self.log.info("getblocktemplate: Test truncated final transaction")
+ assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(block.serialize()[:-1]), 'mode': 'proposal'})
+
+ self.log.info("getblocktemplate: Test duplicate transaction")
+ bad_block = copy.deepcopy(block)
+ bad_block.vtx.append(bad_block.vtx[0])
+ assert_template(node, bad_block, 'bad-txns-duplicate')
+
+ self.log.info("getblocktemplate: Test invalid transaction")
+ bad_block = copy.deepcopy(block)
+ bad_tx = copy.deepcopy(bad_block.vtx[0])
+ bad_tx.vin[0].prevout.hash = 255
+ bad_tx.rehash()
+ bad_block.vtx.append(bad_tx)
+ assert_template(node, bad_block, 'bad-txns-inputs-missingorspent')
+
+ self.log.info("getblocktemplate: Test nonfinal transaction")
+ bad_block = copy.deepcopy(block)
+ bad_block.vtx[0].nLockTime = 2 ** 32 - 1
+ bad_block.vtx[0].rehash()
+ assert_template(node, bad_block, 'bad-txns-nonfinal')
+
+ self.log.info("getblocktemplate: Test bad tx count")
+ # The tx count is immediately after the block header
+ TX_COUNT_OFFSET = 80
+ bad_block_sn = bytearray(block.serialize())
+ assert_equal(bad_block_sn[TX_COUNT_OFFSET], 1)
+ bad_block_sn[TX_COUNT_OFFSET] += 1
+ assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {'data': b2x(bad_block_sn), 'mode': 'proposal'})
+
+ self.log.info("getblocktemplate: Test bad bits")
+ bad_block = copy.deepcopy(block)
+ bad_block.nBits = 469762303 # impossible in the real world
+ assert_template(node, bad_block, 'bad-diffbits')
+
+ self.log.info("getblocktemplate: Test bad merkle root")
+ bad_block = copy.deepcopy(block)
+ bad_block.hashMerkleRoot += 1
+ assert_template(node, bad_block, 'bad-txnmrklroot', False)
+
+ self.log.info("getblocktemplate: Test bad timestamps")
+ bad_block = copy.deepcopy(block)
+ bad_block.nTime = 2 ** 31 - 1
+ assert_template(node, bad_block, 'time-too-new')
+ bad_block.nTime = 0
+ assert_template(node, bad_block, 'time-too-old')
+
+ self.log.info("getblocktemplate: Test not best block")
+ bad_block = copy.deepcopy(block)
+ bad_block.hashPrevBlock = 123
+ assert_template(node, bad_block, 'inconclusive-not-best-prevblk')
+
+if __name__ == '__main__':
+ MiningTest().main()
diff --git a/test/functional/multi_rpc.py b/test/functional/multi_rpc.py
index a30e15ace9..a2b346f274 100755
--- a/test/functional/multi_rpc.py
+++ b/test/functional/multi_rpc.py
@@ -12,10 +12,7 @@ import http.client
import urllib.parse
class HTTPBasicsTest (BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
- self.setup_clean_chain = False
+ def set_test_params(self):
self.num_nodes = 2
def setup_chain(self):
diff --git a/test/functional/multiwallet.py b/test/functional/multiwallet.py
new file mode 100755
index 0000000000..06409b6f31
--- /dev/null
+++ b/test/functional/multiwallet.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test multiwallet.
+
+Verify that a bitcoind node can load multiple wallet files
+"""
+import os
+import shutil
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal, assert_raises_rpc_error
+
+class MultiWalletTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+ self.extra_args = [['-wallet=w1', '-wallet=w2', '-wallet=w3', '-wallet=w']]
+
+ def run_test(self):
+ assert_equal(set(self.nodes[0].listwallets()), {"w1", "w2", "w3", "w"})
+
+ self.stop_node(0)
+
+ # should not initialize if there are duplicate wallets
+ self.assert_start_raises_init_error(0, ['-wallet=w1', '-wallet=w1'], 'Error loading wallet w1. Duplicate -wallet filename specified.')
+
+ # should not initialize if wallet file is a directory
+ wallet_dir = os.path.join(self.options.tmpdir, 'node0', 'regtest', 'wallets')
+ os.mkdir(os.path.join(wallet_dir, 'w11'))
+ self.assert_start_raises_init_error(0, ['-wallet=w11'], 'Error loading wallet w11. -wallet filename must be a regular file.')
+
+ # should not initialize if one wallet is a copy of another
+ shutil.copyfile(os.path.join(wallet_dir, 'w2'), os.path.join(wallet_dir, 'w22'))
+ self.assert_start_raises_init_error(0, ['-wallet=w2', '-wallet=w22'], 'duplicates fileid')
+
+ # should not initialize if wallet file is a symlink
+ os.symlink(os.path.join(wallet_dir, 'w1'), os.path.join(wallet_dir, 'w12'))
+ self.assert_start_raises_init_error(0, ['-wallet=w12'], 'Error loading wallet w12. -wallet filename must be a regular file.')
+
+ # should not initialize if the specified walletdir does not exist
+ self.assert_start_raises_init_error(0, ['-walletdir=bad'], 'Error: Specified wallet directory "bad" does not exist.')
+
+ # if wallets/ doesn't exist, datadir should be the default wallet dir
+ wallet_dir2 = os.path.join(self.options.tmpdir, 'node0', 'regtest', 'walletdir')
+ os.rename(wallet_dir, wallet_dir2)
+ self.start_node(0, ['-wallet=w4', '-wallet=w5'])
+ assert_equal(set(self.nodes[0].listwallets()), {"w4", "w5"})
+ w5 = self.nodes[0].get_wallet_rpc("w5")
+ w5.generate(1)
+ self.stop_node(0)
+
+ # now if wallets/ exists again, but the rootdir is specified as the walletdir, w4 and w5 should still be loaded
+ os.rename(wallet_dir2, wallet_dir)
+ self.start_node(0, ['-wallet=w4', '-wallet=w5', '-walletdir=' + os.path.join(self.options.tmpdir, 'node0', 'regtest')])
+ assert_equal(set(self.nodes[0].listwallets()), {"w4", "w5"})
+ w5 = self.nodes[0].get_wallet_rpc("w5")
+ w5_info = w5.getwalletinfo()
+ assert_equal(w5_info['immature_balance'], 50)
+
+ self.stop_node(0)
+
+ self.start_node(0, self.extra_args[0])
+
+ w1 = self.nodes[0].get_wallet_rpc("w1")
+ w2 = self.nodes[0].get_wallet_rpc("w2")
+ w3 = self.nodes[0].get_wallet_rpc("w3")
+ w4 = self.nodes[0].get_wallet_rpc("w")
+ wallet_bad = self.nodes[0].get_wallet_rpc("bad")
+
+ w1.generate(1)
+
+ # accessing invalid wallet fails
+ assert_raises_rpc_error(-18, "Requested wallet does not exist or is not loaded", wallet_bad.getwalletinfo)
+
+ # accessing wallet RPC without using wallet endpoint fails
+ assert_raises_rpc_error(-19, "Wallet file not specified", self.nodes[0].getwalletinfo)
+
+ # check w1 wallet balance
+ w1_info = w1.getwalletinfo()
+ assert_equal(w1_info['immature_balance'], 50)
+ w1_name = w1_info['walletname']
+ assert_equal(w1_name, "w1")
+
+ # check w2 wallet balance
+ w2_info = w2.getwalletinfo()
+ assert_equal(w2_info['immature_balance'], 0)
+ w2_name = w2_info['walletname']
+ assert_equal(w2_name, "w2")
+
+ w3_name = w3.getwalletinfo()['walletname']
+ assert_equal(w3_name, "w3")
+
+ w4_name = w4.getwalletinfo()['walletname']
+ assert_equal(w4_name, "w")
+
+ w1.generate(101)
+ assert_equal(w1.getbalance(), 100)
+ assert_equal(w2.getbalance(), 0)
+ assert_equal(w3.getbalance(), 0)
+ assert_equal(w4.getbalance(), 0)
+
+ w1.sendtoaddress(w2.getnewaddress(), 1)
+ w1.sendtoaddress(w3.getnewaddress(), 2)
+ w1.sendtoaddress(w4.getnewaddress(), 3)
+ w1.generate(1)
+ assert_equal(w2.getbalance(), 1)
+ assert_equal(w3.getbalance(), 2)
+ assert_equal(w4.getbalance(), 3)
+
+ batch = w1.batch([w1.getblockchaininfo.get_request(), w1.getwalletinfo.get_request()])
+ assert_equal(batch[0]["result"]["chain"], "regtest")
+ assert_equal(batch[1]["result"]["walletname"], "w1")
+
+if __name__ == '__main__':
+ MultiWalletTest().main()
diff --git a/test/functional/net.py b/test/functional/net.py
index 3ba3764cf9..16e4f6adb4 100755
--- a/test/functional/net.py
+++ b/test/functional/net.py
@@ -12,15 +12,13 @@ import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- assert_raises_jsonrpc,
+ assert_raises_rpc_error,
connect_nodes_bi,
p2p_port,
)
-
class NetTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
@@ -85,8 +83,8 @@ class NetTest(BitcoinTestFramework):
added_nodes = self.nodes[0].getaddednodeinfo(ip_port)
assert_equal(len(added_nodes), 1)
assert_equal(added_nodes[0]['addednode'], ip_port)
- # check that a non-existant node returns an error
- assert_raises_jsonrpc(-24, "Node has not been added",
+ # check that a non-existent node returns an error
+ assert_raises_rpc_error(-24, "Node has not been added",
self.nodes[0].getaddednodeinfo, '1.1.1.1')
def _test_getpeerinfo(self):
diff --git a/test/functional/node_network_limited.py b/test/functional/node_network_limited.py
new file mode 100755
index 0000000000..6d1bf7ced2
--- /dev/null
+++ b/test/functional/node_network_limited.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.mininode import *
+
+class BaseNode(P2PInterface):
+ nServices = 0
+ firstAddrnServices = 0
+ def on_version(self, message):
+ self.nServices = message.nServices
+
+class NodeNetworkLimitedTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+ self.extra_args = [['-prune=550']]
+
+ def getSignaledServiceFlags(self):
+ node = self.nodes[0].add_p2p_connection(BaseNode())
+ NetworkThread().start()
+ node.wait_for_verack()
+ services = node.nServices
+ self.nodes[0].disconnect_p2ps()
+ node.wait_for_disconnect()
+ return services
+
+ def tryGetBlockViaGetData(self, blockhash, must_disconnect):
+ node = self.nodes[0].add_p2p_connection(BaseNode())
+ NetworkThread().start()
+ node.wait_for_verack()
+ node.send_message(msg_verack())
+ getdata_request = msg_getdata()
+ getdata_request.inv.append(CInv(2, int(blockhash, 16)))
+ node.send_message(getdata_request)
+
+ if (must_disconnect):
+ #ensure we get disconnected
+ node.wait_for_disconnect(5)
+ else:
+ # check if the peer sends us the requested block
+ node.wait_for_block(int(blockhash, 16), 3)
+ self.nodes[0].disconnect_p2ps()
+ node.wait_for_disconnect()
+
+ def run_test(self):
+ #NODE_BLOOM & NODE_WITNESS & NODE_NETWORK_LIMITED must now be signaled
+ assert_equal(self.getSignaledServiceFlags(), 1036) #1036 == 0x40C == 0100 0000 1100
+# | ||
+# | |^--- NODE_BLOOM
+# | ^---- NODE_WITNESS
+# ^-- NODE_NETWORK_LIMITED
+
+ #now mine some blocks over the NODE_NETWORK_LIMITED + 2(racy buffer ext.) target
+ firstblock = self.nodes[0].generate(1)[0]
+ blocks = self.nodes[0].generate(292)
+ blockWithinLimitedRange = blocks[-1]
+
+ #make sure we can max retrive block at tip-288
+ #requesting block at height 2 (tip-289) must fail (ignored)
+ self.tryGetBlockViaGetData(firstblock, True) #first block must lead to disconnect
+ self.tryGetBlockViaGetData(blocks[1], False) #last block in valid range
+ self.tryGetBlockViaGetData(blocks[0], True) #first block outside of the 288+2 limit
+
+ #NODE_NETWORK_LIMITED must still be signaled after restart
+ self.restart_node(0)
+ assert_equal(self.getSignaledServiceFlags(), 1036)
+
+ #test the RPC service flags
+ assert_equal(self.nodes[0].getnetworkinfo()['localservices'], "000000000000040c")
+
+ # getdata a block above the NODE_NETWORK_LIMITED threshold must be possible
+ self.tryGetBlockViaGetData(blockWithinLimitedRange, False)
+
+ # getdata a block below the NODE_NETWORK_LIMITED threshold must be ignored
+ self.tryGetBlockViaGetData(firstblock, True)
+
+if __name__ == '__main__':
+ NodeNetworkLimitedTest().main()
diff --git a/test/functional/notifications.py b/test/functional/notifications.py
new file mode 100755
index 0000000000..c88972ab91
--- /dev/null
+++ b/test/functional/notifications.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+# Copyright (c) 2014-2016 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the -alertnotify, -blocknotify and -walletnotify options."""
+import os
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal, wait_until, connect_nodes_bi
+
+class NotificationsTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
+ self.setup_clean_chain = True
+
+ def setup_network(self):
+ self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
+ self.block_filename = os.path.join(self.options.tmpdir, "blocks.txt")
+ self.tx_filename = os.path.join(self.options.tmpdir, "transactions.txt")
+
+ # -alertnotify and -blocknotify on node0, walletnotify on node1
+ self.extra_args = [["-blockversion=2",
+ "-alertnotify=echo %%s >> %s" % self.alert_filename,
+ "-blocknotify=echo %%s >> %s" % self.block_filename],
+ ["-blockversion=211",
+ "-rescan",
+ "-walletnotify=echo %%s >> %s" % self.tx_filename]]
+ super().setup_network()
+
+ def run_test(self):
+ self.log.info("test -blocknotify")
+ block_count = 10
+ blocks = self.nodes[1].generate(block_count)
+
+ # wait at most 10 seconds for expected file size before reading the content
+ wait_until(lambda: os.path.isfile(self.block_filename) and os.stat(self.block_filename).st_size >= (block_count * 65), timeout=10)
+
+ # file content should equal the generated blocks hashes
+ with open(self.block_filename, 'r') as f:
+ assert_equal(sorted(blocks), sorted(f.read().splitlines()))
+
+ self.log.info("test -walletnotify")
+ # wait at most 10 seconds for expected file size before reading the content
+ wait_until(lambda: os.path.isfile(self.tx_filename) and os.stat(self.tx_filename).st_size >= (block_count * 65), timeout=10)
+
+ # file content should equal the generated transaction hashes
+ txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
+ with open(self.tx_filename, 'r') as f:
+ assert_equal(sorted(txids_rpc), sorted(f.read().splitlines()))
+ os.remove(self.tx_filename)
+
+ self.log.info("test -walletnotify after rescan")
+ # restart node to rescan to force wallet notifications
+ self.restart_node(1)
+ connect_nodes_bi(self.nodes, 0, 1)
+
+ wait_until(lambda: os.path.isfile(self.tx_filename) and os.stat(self.tx_filename).st_size >= (block_count * 65), timeout=10)
+
+ # file content should equal the generated transaction hashes
+ txids_rpc = list(map(lambda t: t['txid'], self.nodes[1].listtransactions("*", block_count)))
+ with open(self.tx_filename, 'r') as f:
+ assert_equal(sorted(txids_rpc), sorted(f.read().splitlines()))
+
+ # Mine another 41 up-version blocks. -alertnotify should trigger on the 51st.
+ self.log.info("test -alertnotify")
+ self.nodes[1].generate(41)
+ self.sync_all()
+
+ # Give bitcoind 10 seconds to write the alert notification
+ wait_until(lambda: os.path.isfile(self.alert_filename) and os.path.getsize(self.alert_filename), timeout=10)
+
+ with open(self.alert_filename, 'r', encoding='utf8') as f:
+ alert_text = f.read()
+
+ # Mine more up-version blocks, should not get more alerts:
+ self.nodes[1].generate(2)
+ self.sync_all()
+
+ with open(self.alert_filename, 'r', encoding='utf8') as f:
+ alert_text2 = f.read()
+
+ self.log.info("-alertnotify should not continue notifying for more unknown version blocks")
+ assert_equal(alert_text, alert_text2)
+
+if __name__ == '__main__':
+ NotificationsTest().main()
diff --git a/test/functional/nulldummy.py b/test/functional/nulldummy.py
index 9717add272..7bc7c168f4 100755
--- a/test/functional/nulldummy.py
+++ b/test/functional/nulldummy.py
@@ -37,11 +37,12 @@ def trueDummy(tx):
class NULLDUMMYTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
- self.extra_args = [['-whitelist=127.0.0.1', '-walletprematurewitness']]
+ # This script tests NULLDUMMY activation, which is part of the 'segwit' deployment, so we go through
+ # normal segwit activation here (and don't use the default always-on behaviour).
+ self.extra_args = [['-whitelist=127.0.0.1', '-walletprematurewitness', '-vbparams=segwit:0:999999999999']]
def run_test(self):
self.address = self.nodes[0].getnewaddress()
@@ -72,7 +73,7 @@ class NULLDUMMYTest(BitcoinTestFramework):
self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation")
test2tx = self.create_transaction(self.nodes[0], txid2, self.ms_address, 47)
trueDummy(test2tx)
- assert_raises_jsonrpc(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True)
+ assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True)
self.log.info("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]")
self.block_submit(self.nodes[0], [test2tx], False, True)
@@ -81,14 +82,14 @@ class NULLDUMMYTest(BitcoinTestFramework):
test4tx = self.create_transaction(self.nodes[0], test2tx.hash, self.address, 46)
test6txs=[CTransaction(test4tx)]
trueDummy(test4tx)
- assert_raises_jsonrpc(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True)
+ assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test4tx])
self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
test5tx = self.create_transaction(self.nodes[0], txid3, self.wit_address, 48)
test6txs.append(CTransaction(test5tx))
test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
- assert_raises_jsonrpc(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True)
+ assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test5tx], True)
self.log.info("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]")
diff --git a/test/functional/p2p-acceptblock.py b/test/functional/p2p-acceptblock.py
index 322cb767db..d9d7c24416 100755
--- a/test/functional/p2p-acceptblock.py
+++ b/test/functional/p2p-acceptblock.py
@@ -4,37 +4,32 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test processing of unrequested blocks.
-Since behavior differs when receiving unrequested blocks from whitelisted peers
-versus non-whitelisted peers, this tests the behavior of both (effectively two
-separate tests running in parallel).
+Setup: two nodes, node0+node1, not connected to each other. Node1 will have
+nMinimumChainWork set to 0x10, so it won't process low-work unrequested blocks.
-Setup: two nodes, node0 and node1, not connected to each other. Node0 does not
-whitelist localhost, but node1 does. They will each be on their own chain for
-this test.
-
-We have one NodeConn connection to each, test_node and white_node respectively.
+We have one P2PInterface connection to node0 called test_node, and one to node1
+called min_work_node.
The test:
1. Generate one block on each node, to leave IBD.
2. Mine a new block on each tip, and deliver to each node from node's peer.
- The tip should advance.
+ The tip should advance for node0, but node1 should skip processing due to
+ nMinimumChainWork.
+
+Node1 is unused in tests 3-7:
-3. Mine a block that forks the previous block, and deliver to each node from
- corresponding peer.
- Node0 should not process this block (just accept the header), because it is
- unrequested and doesn't have more work than the tip.
- Node1 should process because this is coming from a whitelisted peer.
+3. Mine a block that forks from the genesis block, and deliver to test_node.
+ Node0 should not process this block (just accept the header), because it
+ is unrequested and doesn't have more or equal work to the tip.
-4. Send another block that builds on the forking block.
- Node0 should process this block but be stuck on the shorter chain, because
- it's missing an intermediate block.
- Node1 should reorg to this longer chain.
+4a,b. Send another two blocks that build on the forking block.
+ Node0 should process the second block but be stuck on the shorter chain,
+ because it's missing an intermediate block.
-4b.Send 288 more blocks on the longer chain.
+4c.Send 288 more blocks on the longer chain (the number of blocks ahead
+ we currently store).
Node0 should process all but the last block (too far ahead in height).
- Send all headers to Node1, and then send the last block in that chain.
- Node1 should accept the block because it's coming from a whitelisted peer.
5. Send a duplicate of the block in #3 to Node0.
Node0 should not process the block because it is unrequested, and stay on
@@ -46,13 +41,21 @@ The test:
7. Send Node0 the missing block again.
Node0 should process and the tip should advance.
+
+8. Create a fork which is invalid at a height longer than the current chain
+ (ie to which the node will try to reorg) but which has headers built on top
+ of the invalid block. Check that we get disconnected if we send more headers
+ on the chain the node now knows to be invalid.
+
+9. Test Node1 is able to sync when connected to node0 (which should have sufficient
+ work on its chain).
"""
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
-from test_framework.blocktools import create_block, create_coinbase
+from test_framework.blocktools import create_block, create_coinbase, create_transaction
class AcceptBlockTest(BitcoinTestFramework):
def add_options(self, parser):
@@ -60,41 +63,38 @@ class AcceptBlockTest(BitcoinTestFramework):
default=os.getenv("BITCOIND", "bitcoind"),
help="bitcoind binary to test")
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
- self.extra_args = [[], ["-whitelist=127.0.0.1"]]
+ self.extra_args = [[], ["-minimumchainwork=0x10"]]
def setup_network(self):
# Node0 will be used to test behavior of processing unrequested blocks
# from peers which are not whitelisted, while Node1 will be used for
# the whitelisted case.
+ # Node2 will be used for non-whitelisted peers to test the interaction
+ # with nMinimumChainWork.
self.setup_nodes()
def run_test(self):
# Setup the p2p connections and start up the network thread.
- test_node = NodeConnCB() # connects to node0 (not whitelisted)
- white_node = NodeConnCB() # connects to node1 (whitelisted)
-
- connections = []
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
- connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], white_node))
- test_node.add_connection(connections[0])
- white_node.add_connection(connections[1])
+ # test_node connects to node0 (not whitelisted)
+ test_node = self.nodes[0].add_p2p_connection(P2PInterface())
+ # min_work_node connects to node1 (whitelisted)
+ min_work_node = self.nodes[1].add_p2p_connection(P2PInterface())
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
test_node.wait_for_verack()
- white_node.wait_for_verack()
+ min_work_node.wait_for_verack()
- # 1. Have both nodes mine a block (leave IBD)
+ # 1. Have nodes mine a block (leave IBD)
[ n.generate(1) for n in self.nodes ]
tips = [ int("0x" + n.getbestblockhash(), 0) for n in self.nodes ]
# 2. Send one block that builds on each tip.
- # This should be accepted.
+ # This should be accepted by node0
blocks_h2 = [] # the height 2 blocks on each node's chain
block_time = int(time.time()) + 1
for i in range(2):
@@ -102,95 +102,116 @@ class AcceptBlockTest(BitcoinTestFramework):
blocks_h2[i].solve()
block_time += 1
test_node.send_message(msg_block(blocks_h2[0]))
- white_node.send_message(msg_block(blocks_h2[1]))
+ min_work_node.send_message(msg_block(blocks_h2[1]))
- [ x.sync_with_ping() for x in [test_node, white_node] ]
+ for x in [test_node, min_work_node]:
+ x.sync_with_ping()
assert_equal(self.nodes[0].getblockcount(), 2)
- assert_equal(self.nodes[1].getblockcount(), 2)
- self.log.info("First height 2 block accepted by both nodes")
+ assert_equal(self.nodes[1].getblockcount(), 1)
+ self.log.info("First height 2 block accepted by node0; correctly rejected by node1")
- # 3. Send another block that builds on the original tip.
- blocks_h2f = [] # Blocks at height 2 that fork off the main chain
- for i in range(2):
- blocks_h2f.append(create_block(tips[i], create_coinbase(2), blocks_h2[i].nTime+1))
- blocks_h2f[i].solve()
- test_node.send_message(msg_block(blocks_h2f[0]))
- white_node.send_message(msg_block(blocks_h2f[1]))
+ # 3. Send another block that builds on genesis.
+ block_h1f = create_block(int("0x" + self.nodes[0].getblockhash(0), 0), create_coinbase(1), block_time)
+ block_time += 1
+ block_h1f.solve()
+ test_node.send_message(msg_block(block_h1f))
- [ x.sync_with_ping() for x in [test_node, white_node] ]
+ test_node.sync_with_ping()
+ tip_entry_found = False
for x in self.nodes[0].getchaintips():
- if x['hash'] == blocks_h2f[0].hash:
+ if x['hash'] == block_h1f.hash:
assert_equal(x['status'], "headers-only")
+ tip_entry_found = True
+ assert(tip_entry_found)
+ assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_h1f.hash)
+
+ # 4. Send another two block that build on the fork.
+ block_h2f = create_block(block_h1f.sha256, create_coinbase(2), block_time)
+ block_time += 1
+ block_h2f.solve()
+ test_node.send_message(msg_block(block_h2f))
- for x in self.nodes[1].getchaintips():
- if x['hash'] == blocks_h2f[1].hash:
- assert_equal(x['status'], "valid-headers")
+ test_node.sync_with_ping()
+ # Since the earlier block was not processed by node, the new block
+ # can't be fully validated.
+ tip_entry_found = False
+ for x in self.nodes[0].getchaintips():
+ if x['hash'] == block_h2f.hash:
+ assert_equal(x['status'], "headers-only")
+ tip_entry_found = True
+ assert(tip_entry_found)
- self.log.info("Second height 2 block accepted only from whitelisted peer")
+ # But this block should be accepted by node since it has equal work.
+ self.nodes[0].getblock(block_h2f.hash)
+ self.log.info("Second height 2 block accepted, but not reorg'ed to")
- # 4. Now send another block that builds on the forking chain.
- blocks_h3 = []
- for i in range(2):
- blocks_h3.append(create_block(blocks_h2f[i].sha256, create_coinbase(3), blocks_h2f[i].nTime+1))
- blocks_h3[i].solve()
- test_node.send_message(msg_block(blocks_h3[0]))
- white_node.send_message(msg_block(blocks_h3[1]))
+ # 4b. Now send another block that builds on the forking chain.
+ block_h3 = create_block(block_h2f.sha256, create_coinbase(3), block_h2f.nTime+1)
+ block_h3.solve()
+ test_node.send_message(msg_block(block_h3))
- [ x.sync_with_ping() for x in [test_node, white_node] ]
- # Since the earlier block was not processed by node0, the new block
+ test_node.sync_with_ping()
+ # Since the earlier block was not processed by node, the new block
# can't be fully validated.
+ tip_entry_found = False
for x in self.nodes[0].getchaintips():
- if x['hash'] == blocks_h3[0].hash:
+ if x['hash'] == block_h3.hash:
assert_equal(x['status'], "headers-only")
+ tip_entry_found = True
+ assert(tip_entry_found)
+ self.nodes[0].getblock(block_h3.hash)
+
+ # But this block should be accepted by node since it has more work.
+ self.nodes[0].getblock(block_h3.hash)
+ self.log.info("Unrequested more-work block accepted")
+
+ # 4c. Now mine 288 more blocks and deliver; all should be processed but
+ # the last (height-too-high) on node (as long as its not missing any headers)
+ tip = block_h3
+ all_blocks = []
+ for i in range(288):
+ next_block = create_block(tip.sha256, create_coinbase(i + 4), tip.nTime+1)
+ next_block.solve()
+ all_blocks.append(next_block)
+ tip = next_block
+
+ # Now send the block at height 5 and check that it wasn't accepted (missing header)
+ test_node.send_message(msg_block(all_blocks[1]))
+ test_node.sync_with_ping()
+ assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock, all_blocks[1].hash)
+ assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockheader, all_blocks[1].hash)
- # But this block should be accepted by node0 since it has more work.
- self.nodes[0].getblock(blocks_h3[0].hash)
- self.log.info("Unrequested more-work block accepted from non-whitelisted peer")
+ # The block at height 5 should be accepted if we provide the missing header, though
+ headers_message = msg_headers()
+ headers_message.headers.append(CBlockHeader(all_blocks[0]))
+ test_node.send_message(headers_message)
+ test_node.send_message(msg_block(all_blocks[1]))
+ test_node.sync_with_ping()
+ self.nodes[0].getblock(all_blocks[1].hash)
- # Node1 should have accepted and reorged.
- assert_equal(self.nodes[1].getblockcount(), 3)
- self.log.info("Successfully reorged to length 3 chain from whitelisted peer")
+ # Now send the blocks in all_blocks
+ for i in range(288):
+ test_node.send_message(msg_block(all_blocks[i]))
+ test_node.sync_with_ping()
- # 4b. Now mine 288 more blocks and deliver; all should be processed but
- # the last (height-too-high) on node0. Node1 should process the tip if
- # we give it the headers chain leading to the tip.
- tips = blocks_h3
- headers_message = msg_headers()
- all_blocks = [] # node0's blocks
- for j in range(2):
- for i in range(288):
- next_block = create_block(tips[j].sha256, create_coinbase(i + 4), tips[j].nTime+1)
- next_block.solve()
- if j==0:
- test_node.send_message(msg_block(next_block))
- all_blocks.append(next_block)
- else:
- headers_message.headers.append(CBlockHeader(next_block))
- tips[j] = next_block
-
- time.sleep(2)
# Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead
for x in all_blocks[:-1]:
self.nodes[0].getblock(x.hash)
- assert_raises_jsonrpc(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash)
-
- headers_message.headers.pop() # Ensure the last block is unrequested
- white_node.send_message(headers_message) # Send headers leading to tip
- white_node.send_message(msg_block(tips[1])) # Now deliver the tip
- white_node.sync_with_ping()
- self.nodes[1].getblock(tips[1].hash)
- self.log.info("Unrequested block far ahead of tip accepted from whitelisted peer")
+ assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash)
# 5. Test handling of unrequested block on the node that didn't process
# Should still not be processed (even though it has a child that has more
# work).
- test_node.send_message(msg_block(blocks_h2f[0]))
- # Here, if the sleep is too short, the test could falsely succeed (if the
- # node hasn't processed the block by the time the sleep returns, and then
- # the node processes it and incorrectly advances the tip).
- # But this would be caught later on, when we verify that an inv triggers
- # a getdata request for this block.
+ # The node should have requested the blocks at some point, so
+ # disconnect/reconnect first
+
+ self.nodes[0].disconnect_p2ps()
+ test_node = self.nodes[0].add_p2p_connection(P2PInterface())
+
+ test_node.wait_for_verack()
+ test_node.send_message(msg_block(block_h1f))
+
test_node.sync_with_ping()
assert_equal(self.nodes[0].getblockcount(), 2)
self.log.info("Unrequested block that would complete more-work chain was ignored")
@@ -201,24 +222,98 @@ class AcceptBlockTest(BitcoinTestFramework):
with mininode_lock:
# Clear state so we can check the getdata request
test_node.last_message.pop("getdata", None)
- test_node.send_message(msg_inv([CInv(2, blocks_h3[0].sha256)]))
+ test_node.send_message(msg_inv([CInv(2, block_h3.sha256)]))
test_node.sync_with_ping()
with mininode_lock:
getdata = test_node.last_message["getdata"]
# Check that the getdata includes the right block
- assert_equal(getdata.inv[0].hash, blocks_h2f[0].sha256)
+ assert_equal(getdata.inv[0].hash, block_h1f.sha256)
self.log.info("Inv at tip triggered getdata for unprocessed block")
# 7. Send the missing block for the third time (now it is requested)
- test_node.send_message(msg_block(blocks_h2f[0]))
+ test_node.send_message(msg_block(block_h1f))
test_node.sync_with_ping()
assert_equal(self.nodes[0].getblockcount(), 290)
+ self.nodes[0].getblock(all_blocks[286].hash)
+ assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
+ assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[287].hash)
self.log.info("Successfully reorged to longer chain from non-whitelisted peer")
- [ c.disconnect_node() for c in connections ]
+ # 8. Create a chain which is invalid at a height longer than the
+ # current chain, but which has more blocks on top of that
+ block_289f = create_block(all_blocks[284].sha256, create_coinbase(289), all_blocks[284].nTime+1)
+ block_289f.solve()
+ block_290f = create_block(block_289f.sha256, create_coinbase(290), block_289f.nTime+1)
+ block_290f.solve()
+ block_291 = create_block(block_290f.sha256, create_coinbase(291), block_290f.nTime+1)
+ # block_291 spends a coinbase below maturity!
+ block_291.vtx.append(create_transaction(block_290f.vtx[0], 0, b"42", 1))
+ block_291.hashMerkleRoot = block_291.calc_merkle_root()
+ block_291.solve()
+ block_292 = create_block(block_291.sha256, create_coinbase(292), block_291.nTime+1)
+ block_292.solve()
+
+ # Now send all the headers on the chain and enough blocks to trigger reorg
+ headers_message = msg_headers()
+ headers_message.headers.append(CBlockHeader(block_289f))
+ headers_message.headers.append(CBlockHeader(block_290f))
+ headers_message.headers.append(CBlockHeader(block_291))
+ headers_message.headers.append(CBlockHeader(block_292))
+ test_node.send_message(headers_message)
+
+ test_node.sync_with_ping()
+ tip_entry_found = False
+ for x in self.nodes[0].getchaintips():
+ if x['hash'] == block_292.hash:
+ assert_equal(x['status'], "headers-only")
+ tip_entry_found = True
+ assert(tip_entry_found)
+ assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_292.hash)
+
+ test_node.send_message(msg_block(block_289f))
+ test_node.send_message(msg_block(block_290f))
+
+ test_node.sync_with_ping()
+ self.nodes[0].getblock(block_289f.hash)
+ self.nodes[0].getblock(block_290f.hash)
+
+ test_node.send_message(msg_block(block_291))
+
+ # At this point we've sent an obviously-bogus block, wait for full processing
+ # without assuming whether we will be disconnected or not
+ try:
+ # Only wait a short while so the test doesn't take forever if we do get
+ # disconnected
+ test_node.sync_with_ping(timeout=1)
+ except AssertionError:
+ test_node.wait_for_disconnect()
+
+ self.nodes[0].disconnect_p2ps()
+ test_node = self.nodes[0].add_p2p_connection(P2PInterface())
+
+ NetworkThread().start() # Start up network handling in another thread
+ test_node.wait_for_verack()
+
+ # We should have failed reorg and switched back to 290 (but have block 291)
+ assert_equal(self.nodes[0].getblockcount(), 290)
+ assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
+ assert_equal(self.nodes[0].getblock(block_291.hash)["confirmations"], -1)
+
+ # Now send a new header on the invalid chain, indicating we're forked off, and expect to get disconnected
+ block_293 = create_block(block_292.sha256, create_coinbase(293), block_292.nTime+1)
+ block_293.solve()
+ headers_message = msg_headers()
+ headers_message.headers.append(CBlockHeader(block_293))
+ test_node.send_message(headers_message)
+ test_node.wait_for_disconnect()
+
+ # 9. Connect node1 to node0 and ensure it is able to sync
+ connect_nodes(self.nodes[0], 1)
+ sync_blocks([self.nodes[0], self.nodes[1]])
+ self.log.info("Successfully synced nodes 1 and 0")
if __name__ == '__main__':
AcceptBlockTest().main()
diff --git a/test/functional/p2p-compactblocks.py b/test/functional/p2p-compactblocks.py
index ff76e49fba..c43744328c 100755
--- a/test/functional/p2p-compactblocks.py
+++ b/test/functional/p2p-compactblocks.py
@@ -15,7 +15,7 @@ from test_framework.blocktools import create_block, create_coinbase, add_witness
from test_framework.script import CScript, OP_TRUE
# TestNode: A peer we use to send messages to bitcoind, and store responses.
-class TestNode(NodeConnCB):
+class TestNode(P2PInterface):
def __init__(self):
super().__init__()
self.last_sendcmpct = []
@@ -25,21 +25,21 @@ class TestNode(NodeConnCB):
# so we can eg wait until a particular block is announced.
self.announced_blockhashes = set()
- def on_sendcmpct(self, conn, message):
+ def on_sendcmpct(self, message):
self.last_sendcmpct.append(message)
- def on_cmpctblock(self, conn, message):
+ def on_cmpctblock(self, message):
self.block_announced = True
self.last_message["cmpctblock"].header_and_shortids.header.calc_sha256()
self.announced_blockhashes.add(self.last_message["cmpctblock"].header_and_shortids.header.sha256)
- def on_headers(self, conn, message):
+ def on_headers(self, message):
self.block_announced = True
for x in self.last_message["headers"].headers:
x.calc_sha256()
self.announced_blockhashes.add(x.sha256)
- def on_inv(self, conn, message):
+ def on_inv(self, message):
for x in self.last_message["inv"].inv:
if x.type == 2:
self.block_announced = True
@@ -60,7 +60,7 @@ class TestNode(NodeConnCB):
msg = msg_getheaders()
msg.locator.vHave = locator
msg.hashstop = hashstop
- self.connection.send_message(msg)
+ self.send_message(msg)
def send_header_for_blocks(self, new_blocks):
headers_message = msg_headers()
@@ -70,7 +70,7 @@ class TestNode(NodeConnCB):
def request_headers_and_sync(self, locator, hashstop=0):
self.clear_block_announcement()
self.get_headers(locator, hashstop)
- assert wait_until(self.received_block_announcement, timeout=30)
+ wait_until(self.received_block_announcement, timeout=30, lock=mininode_lock)
self.clear_block_announcement()
# Block until a block announcement for a particular block hash is
@@ -78,7 +78,7 @@ class TestNode(NodeConnCB):
def wait_for_block_announcement(self, block_hash, timeout=30):
def received_hash():
return (block_hash in self.announced_blockhashes)
- return wait_until(received_hash, timeout=timeout)
+ wait_until(received_hash, timeout=timeout, lock=mininode_lock)
def send_await_disconnect(self, message, timeout=30):
"""Sends a message to the node and wait for disconnect.
@@ -86,19 +86,16 @@ class TestNode(NodeConnCB):
This is used when we want to send a message into the node that we expect
will get us disconnected, eg an invalid block."""
self.send_message(message)
- success = wait_until(lambda: not self.connected, timeout=timeout)
- if not success:
- logger.error("send_await_disconnect failed!")
- raise AssertionError("send_await_disconnect failed!")
- return success
+ wait_until(lambda: self.state != "connected", timeout=timeout, lock=mininode_lock)
class CompactBlocksTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
# Node0 = pre-segwit, node1 = segwit-aware
self.num_nodes = 2
- self.extra_args = [["-vbparams=segwit:0:0"], ["-txindex"]]
+ # This test was written assuming SegWit is activated using BIP9 at height 432 (3x confirmation window).
+ # TODO: Rewrite this test to support SegWit being always active.
+ self.extra_args = [["-vbparams=segwit:0:0"], ["-vbparams=segwit:0:999999999999", "-txindex"]]
self.utxos = []
def build_block_on_tip(self, node, segwit=False):
@@ -150,9 +147,7 @@ class CompactBlocksTest(BitcoinTestFramework):
# Make sure we get a SENDCMPCT message from our peer
def received_sendcmpct():
return (len(test_node.last_sendcmpct) > 0)
- got_message = wait_until(received_sendcmpct, timeout=30)
- assert(received_sendcmpct())
- assert(got_message)
+ wait_until(received_sendcmpct, timeout=30, lock=mininode_lock)
with mininode_lock:
# Check that the first version received is the preferred one
assert_equal(test_node.last_sendcmpct[0].version, preferred_version)
@@ -167,7 +162,6 @@ class CompactBlocksTest(BitcoinTestFramework):
block_hash = int(node.generate(1)[0], 16)
peer.wait_for_block_announcement(block_hash, timeout=30)
assert(peer.block_announced)
- assert(got_message)
with mininode_lock:
assert predicate(peer), (
@@ -282,7 +276,7 @@ class CompactBlocksTest(BitcoinTestFramework):
# Wait until we've seen the block announcement for the resulting tip
tip = int(node.getbestblockhash(), 16)
- assert(test_node.wait_for_block_announcement(tip))
+ test_node.wait_for_block_announcement(tip)
# Make sure we will receive a fast-announce compact block
self.request_cb_announcements(test_node, node, version)
@@ -293,12 +287,12 @@ class CompactBlocksTest(BitcoinTestFramework):
# Store the raw block in our internal format.
block = FromHex(CBlock(), node.getblock("%02x" % block_hash, False))
- [tx.calc_sha256() for tx in block.vtx]
+ for tx in block.vtx:
+ tx.calc_sha256()
block.rehash()
# Wait until the block was announced (via compact blocks)
- wait_until(test_node.received_block_announcement, timeout=30)
- assert(test_node.received_block_announcement())
+ wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
# Now fetch and check the compact block
header_and_shortids = None
@@ -314,8 +308,7 @@ class CompactBlocksTest(BitcoinTestFramework):
inv = CInv(4, block_hash) # 4 == "CompactBlock"
test_node.send_message(msg_getdata([inv]))
- wait_until(test_node.received_block_announcement, timeout=30)
- assert(test_node.received_block_announcement())
+ wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
# Now fetch and check the compact block
header_and_shortids = None
@@ -386,13 +379,11 @@ class CompactBlocksTest(BitcoinTestFramework):
if announce == "inv":
test_node.send_message(msg_inv([CInv(2, block.sha256)]))
- success = wait_until(lambda: "getheaders" in test_node.last_message, timeout=30)
- assert(success)
+ wait_until(lambda: "getheaders" in test_node.last_message, timeout=30, lock=mininode_lock)
test_node.send_header_for_blocks([block])
else:
test_node.send_header_for_blocks([block])
- success = wait_until(lambda: "getdata" in test_node.last_message, timeout=30)
- assert(success)
+ wait_until(lambda: "getdata" in test_node.last_message, timeout=30, lock=mininode_lock)
assert_equal(len(test_node.last_message["getdata"].inv), 1)
assert_equal(test_node.last_message["getdata"].inv[0].type, 4)
assert_equal(test_node.last_message["getdata"].inv[0].hash, block.sha256)
@@ -571,8 +562,7 @@ class CompactBlocksTest(BitcoinTestFramework):
assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)
# We should receive a getdata request
- success = wait_until(lambda: "getdata" in test_node.last_message, timeout=10)
- assert(success)
+ wait_until(lambda: "getdata" in test_node.last_message, timeout=10, lock=mininode_lock)
assert_equal(len(test_node.last_message["getdata"].inv), 1)
assert(test_node.last_message["getdata"].inv[0].type == 2 or test_node.last_message["getdata"].inv[0].type == 2|MSG_WITNESS_FLAG)
assert_equal(test_node.last_message["getdata"].inv[0].hash, block.sha256)
@@ -599,8 +589,7 @@ class CompactBlocksTest(BitcoinTestFramework):
num_to_request = random.randint(1, len(block.vtx))
msg.block_txn_request.from_absolute(sorted(random.sample(range(len(block.vtx)), num_to_request)))
test_node.send_message(msg)
- success = wait_until(lambda: "blocktxn" in test_node.last_message, timeout=10)
- assert(success)
+ wait_until(lambda: "blocktxn" in test_node.last_message, timeout=10, lock=mininode_lock)
[tx.calc_sha256() for tx in block.vtx]
with mininode_lock:
@@ -639,22 +628,20 @@ class CompactBlocksTest(BitcoinTestFramework):
for i in range(MAX_CMPCTBLOCK_DEPTH + 1):
test_node.clear_block_announcement()
new_blocks.append(node.generate(1)[0])
- wait_until(test_node.received_block_announcement, timeout=30)
+ wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
test_node.clear_block_announcement()
test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
- success = wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30)
- assert(success)
+ wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30, lock=mininode_lock)
test_node.clear_block_announcement()
node.generate(1)
- wait_until(test_node.received_block_announcement, timeout=30)
+ wait_until(test_node.received_block_announcement, timeout=30, lock=mininode_lock)
test_node.clear_block_announcement()
with mininode_lock:
test_node.last_message.pop("block", None)
test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
- success = wait_until(lambda: "block" in test_node.last_message, timeout=30)
- assert(success)
+ wait_until(lambda: "block" in test_node.last_message, timeout=30, lock=mininode_lock)
with mininode_lock:
test_node.last_message["block"].block.calc_sha256()
assert_equal(test_node.last_message["block"].block.sha256, int(new_blocks[0], 16))
@@ -705,7 +692,7 @@ class CompactBlocksTest(BitcoinTestFramework):
node.submitblock(ToHex(block))
for l in listeners:
- wait_until(lambda: l.received_block_announcement(), timeout=30)
+ wait_until(lambda: l.received_block_announcement(), timeout=30, lock=mininode_lock)
with mininode_lock:
for l in listeners:
assert "cmpctblock" in l.last_message
@@ -801,23 +788,12 @@ class CompactBlocksTest(BitcoinTestFramework):
def run_test(self):
# Setup the p2p connections and start up the network thread.
- self.test_node = TestNode()
- self.segwit_node = TestNode()
- self.old_node = TestNode() # version 1 peer <--> segwit node
-
- connections = []
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node))
- connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1],
- self.segwit_node, services=NODE_NETWORK|NODE_WITNESS))
- connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1],
- self.old_node, services=NODE_NETWORK))
- self.test_node.add_connection(connections[0])
- self.segwit_node.add_connection(connections[1])
- self.old_node.add_connection(connections[2])
+ self.test_node = self.nodes[0].add_p2p_connection(TestNode())
+ self.segwit_node = self.nodes[1].add_p2p_connection(TestNode(), services=NODE_NETWORK|NODE_WITNESS)
+ self.old_node = self.nodes[1].add_p2p_connection(TestNode(), services=NODE_NETWORK)
NetworkThread().start() # Start up network handling in another thread
- # Test logic begins here
self.test_node.wait_for_verack()
# We will need UTXOs to construct transactions in later tests.
diff --git a/test/functional/p2p-feefilter.py b/test/functional/p2p-feefilter.py
index dbccb633a5..ac55336e3d 100755
--- a/test/functional/p2p-feefilter.py
+++ b/test/functional/p2p-feefilter.py
@@ -22,12 +22,12 @@ def allInvsMatch(invsExpected, testnode):
time.sleep(1)
return False
-class TestNode(NodeConnCB):
+class TestNode(P2PInterface):
def __init__(self):
super().__init__()
self.txinvs = []
- def on_inv(self, conn, message):
+ def on_inv(self, message):
for i in message.inv:
if (i.type == 1):
self.txinvs.append(hashToHex(i.hash))
@@ -37,11 +37,8 @@ class TestNode(NodeConnCB):
self.txinvs = []
class FeeFilterTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 2
- self.setup_clean_chain = False
def run_test(self):
node1 = self.nodes[1]
@@ -51,25 +48,23 @@ class FeeFilterTest(BitcoinTestFramework):
sync_blocks(self.nodes)
# Setup the p2p connections and start up the network thread.
- test_node = TestNode()
- connection = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node)
- test_node.add_connection(connection)
+ self.nodes[0].add_p2p_connection(TestNode())
NetworkThread().start()
- test_node.wait_for_verack()
+ self.nodes[0].p2p.wait_for_verack()
# Test that invs are received for all txs at feerate of 20 sat/byte
node1.settxfee(Decimal("0.00020000"))
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
- assert(allInvsMatch(txids, test_node))
- test_node.clear_invs()
+ assert(allInvsMatch(txids, self.nodes[0].p2p))
+ self.nodes[0].p2p.clear_invs()
# Set a filter of 15 sat/byte
- test_node.send_and_ping(msg_feefilter(15000))
+ self.nodes[0].p2p.send_and_ping(msg_feefilter(15000))
# Test that txs are still being received (paying 20 sat/byte)
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
- assert(allInvsMatch(txids, test_node))
- test_node.clear_invs()
+ assert(allInvsMatch(txids, self.nodes[0].p2p))
+ self.nodes[0].p2p.clear_invs()
# Change tx fee rate to 10 sat/byte and test they are no longer received
node1.settxfee(Decimal("0.00010000"))
@@ -85,14 +80,14 @@ class FeeFilterTest(BitcoinTestFramework):
# as well.
node0.settxfee(Decimal("0.00020000"))
txids = [node0.sendtoaddress(node0.getnewaddress(), 1)]
- assert(allInvsMatch(txids, test_node))
- test_node.clear_invs()
+ assert(allInvsMatch(txids, self.nodes[0].p2p))
+ self.nodes[0].p2p.clear_invs()
# Remove fee filter and check that txs are received again
- test_node.send_and_ping(msg_feefilter(0))
+ self.nodes[0].p2p.send_and_ping(msg_feefilter(0))
txids = [node1.sendtoaddress(node1.getnewaddress(), 1) for x in range(3)]
- assert(allInvsMatch(txids, test_node))
- test_node.clear_invs()
+ assert(allInvsMatch(txids, self.nodes[0].p2p))
+ self.nodes[0].p2p.clear_invs()
if __name__ == '__main__':
FeeFilterTest().main()
diff --git a/test/functional/p2p-fingerprint.py b/test/functional/p2p-fingerprint.py
new file mode 100755
index 0000000000..209c789f22
--- /dev/null
+++ b/test/functional/p2p-fingerprint.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test various fingerprinting protections.
+
+If an stale block more than a month old or its header are requested by a peer,
+the node should pretend that it does not have it to avoid fingerprinting.
+"""
+
+import time
+
+from test_framework.blocktools import (create_block, create_coinbase)
+from test_framework.mininode import (
+ CInv,
+ NetworkThread,
+ P2PInterface,
+ msg_headers,
+ msg_block,
+ msg_getdata,
+ msg_getheaders,
+ wait_until,
+)
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+)
+
+class P2PFingerprintTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+
+ # Build a chain of blocks on top of given one
+ def build_chain(self, nblocks, prev_hash, prev_height, prev_median_time):
+ blocks = []
+ for _ in range(nblocks):
+ coinbase = create_coinbase(prev_height + 1)
+ block_time = prev_median_time + 1
+ block = create_block(int(prev_hash, 16), coinbase, block_time)
+ block.solve()
+
+ blocks.append(block)
+ prev_hash = block.hash
+ prev_height += 1
+ prev_median_time = block_time
+ return blocks
+
+ # Send a getdata request for a given block hash
+ def send_block_request(self, block_hash, node):
+ msg = msg_getdata()
+ msg.inv.append(CInv(2, block_hash)) # 2 == "Block"
+ node.send_message(msg)
+
+ # Send a getheaders request for a given single block hash
+ def send_header_request(self, block_hash, node):
+ msg = msg_getheaders()
+ msg.hashstop = block_hash
+ node.send_message(msg)
+
+ # Check whether last block received from node has a given hash
+ def last_block_equals(self, expected_hash, node):
+ block_msg = node.last_message.get("block")
+ return block_msg and block_msg.block.rehash() == expected_hash
+
+ # Check whether last block header received from node has a given hash
+ def last_header_equals(self, expected_hash, node):
+ headers_msg = node.last_message.get("headers")
+ return (headers_msg and
+ headers_msg.headers and
+ headers_msg.headers[0].rehash() == expected_hash)
+
+ # Checks that stale blocks timestamped more than a month ago are not served
+ # by the node while recent stale blocks and old active chain blocks are.
+ # This does not currently test that stale blocks timestamped within the
+ # last month but that have over a month's worth of work are also withheld.
+ def run_test(self):
+ node0 = self.nodes[0].add_p2p_connection(P2PInterface())
+
+ NetworkThread().start()
+ node0.wait_for_verack()
+
+ # Set node time to 60 days ago
+ self.nodes[0].setmocktime(int(time.time()) - 60 * 24 * 60 * 60)
+
+ # Generating a chain of 10 blocks
+ block_hashes = self.nodes[0].generate(nblocks=10)
+
+ # Create longer chain starting 2 blocks before current tip
+ height = len(block_hashes) - 2
+ block_hash = block_hashes[height - 1]
+ block_time = self.nodes[0].getblockheader(block_hash)["mediantime"] + 1
+ new_blocks = self.build_chain(5, block_hash, height, block_time)
+
+ # Force reorg to a longer chain
+ node0.send_message(msg_headers(new_blocks))
+ node0.wait_for_getdata()
+ for block in new_blocks:
+ node0.send_and_ping(msg_block(block))
+
+ # Check that reorg succeeded
+ assert_equal(self.nodes[0].getblockcount(), 13)
+
+ stale_hash = int(block_hashes[-1], 16)
+
+ # Check that getdata request for stale block succeeds
+ self.send_block_request(stale_hash, node0)
+ test_function = lambda: self.last_block_equals(stale_hash, node0)
+ wait_until(test_function, timeout=3)
+
+ # Check that getheader request for stale block header succeeds
+ self.send_header_request(stale_hash, node0)
+ test_function = lambda: self.last_header_equals(stale_hash, node0)
+ wait_until(test_function, timeout=3)
+
+ # Longest chain is extended so stale is much older than chain tip
+ self.nodes[0].setmocktime(0)
+ tip = self.nodes[0].generate(nblocks=1)[0]
+ assert_equal(self.nodes[0].getblockcount(), 14)
+
+ # Send getdata & getheaders to refresh last received getheader message
+ block_hash = int(tip, 16)
+ self.send_block_request(block_hash, node0)
+ self.send_header_request(block_hash, node0)
+ node0.sync_with_ping()
+
+ # Request for very old stale block should now fail
+ self.send_block_request(stale_hash, node0)
+ time.sleep(3)
+ assert not self.last_block_equals(stale_hash, node0)
+
+ # Request for very old stale block header should now fail
+ self.send_header_request(stale_hash, node0)
+ time.sleep(3)
+ assert not self.last_header_equals(stale_hash, node0)
+
+ # Verify we can fetch very old blocks and headers on the active chain
+ block_hash = int(block_hashes[2], 16)
+ self.send_block_request(block_hash, node0)
+ self.send_header_request(block_hash, node0)
+ node0.sync_with_ping()
+
+ self.send_block_request(block_hash, node0)
+ test_function = lambda: self.last_block_equals(block_hash, node0)
+ wait_until(test_function, timeout=3)
+
+ self.send_header_request(block_hash, node0)
+ test_function = lambda: self.last_header_equals(block_hash, node0)
+ wait_until(test_function, timeout=3)
+
+if __name__ == '__main__':
+ P2PFingerprintTest().main()
diff --git a/test/functional/p2p-fullblocktest.py b/test/functional/p2p-fullblocktest.py
index e7fe7372c8..f19b845a32 100755
--- a/test/functional/p2p-fullblocktest.py
+++ b/test/functional/p2p-fullblocktest.py
@@ -20,7 +20,7 @@ from test_framework.key import CECKey
from test_framework.script import *
import struct
-class PreviousSpendableOutput(object):
+class PreviousSpendableOutput():
def __init__(self, tx = CTransaction(), n = -1):
self.tx = tx
self.n = n # the output we're spending
@@ -49,12 +49,11 @@ class CBrokenBlock(CBlock):
return r
class FullBlockTest(ComparisonTestFramework):
-
# Can either run this test as 1 node with expected answers, or two and compare them.
# Change the "outcome" variable from each TestInstance object to only do the comparison.
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 1
+ self.setup_clean_chain = True
self.block_heights = {}
self.coinbase_key = CECKey()
self.coinbase_key.set_secretbytes(b"horsebattery")
@@ -397,7 +396,7 @@ class FullBlockTest(ComparisonTestFramework):
yield rejected(RejectResult(16, b'bad-cb-length'))
# Extend the b26 chain to make sure bitcoind isn't accepting b26
- b27 = block(27, spend=out[7])
+ block(27, spend=out[7])
yield rejected(False)
# Now try a too-large-coinbase script
@@ -409,7 +408,7 @@ class FullBlockTest(ComparisonTestFramework):
yield rejected(RejectResult(16, b'bad-cb-length'))
# Extend the b28 chain to make sure bitcoind isn't accepting b28
- b29 = block(29, spend=out[7])
+ block(29, spend=out[7])
yield rejected(False)
# b30 has a max-sized coinbase scriptSig.
@@ -581,7 +580,7 @@ class FullBlockTest(ComparisonTestFramework):
# same as b40, but one less sigop
tip(39)
- b41 = block(41, spend=None)
+ block(41, spend=None)
update_block(41, b40.vtx[1:-1])
b41_sigops_to_fill = b40_sigops_to_fill - 1
tx = CTransaction()
@@ -927,7 +926,7 @@ class FullBlockTest(ComparisonTestFramework):
# -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
#
tip(64)
- b65 = block(65)
+ block(65)
tx1 = create_and_sign_tx(out[19].tx, out[19].n, out[19].tx.vout[0].nValue)
tx2 = create_and_sign_tx(tx1, 0, 0)
update_block(65, [tx1, tx2])
@@ -939,7 +938,7 @@ class FullBlockTest(ComparisonTestFramework):
# -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
# \-> b66 (20)
tip(65)
- b66 = block(66)
+ block(66)
tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue)
tx2 = create_and_sign_tx(tx1, 0, 1)
update_block(66, [tx2, tx1])
@@ -952,7 +951,7 @@ class FullBlockTest(ComparisonTestFramework):
#
#
tip(65)
- b67 = block(67)
+ block(67)
tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue)
tx2 = create_and_sign_tx(tx1, 0, 1)
tx3 = create_and_sign_tx(tx1, 0, 2)
@@ -972,7 +971,7 @@ class FullBlockTest(ComparisonTestFramework):
# this succeeds
#
tip(65)
- b68 = block(68, additional_coinbase_value=10)
+ block(68, additional_coinbase_value=10)
tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-9)
update_block(68, [tx])
yield rejected(RejectResult(16, b'bad-cb-amount'))
@@ -1175,7 +1174,7 @@ class FullBlockTest(ComparisonTestFramework):
#
# -> b81 (26) -> b82 (27) -> b83 (28)
#
- b83 = block(83)
+ block(83)
op_codes = [OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF]
script = CScript(op_codes)
tx1 = create_and_sign_tx(out[28].tx, out[28].n, out[28].tx.vout[0].nValue, script)
@@ -1195,7 +1194,7 @@ class FullBlockTest(ComparisonTestFramework):
# \-> b85 (29) -> b86 (30) \-> b89a (32)
#
#
- b84 = block(84)
+ block(84)
tx1 = create_tx(out[29].tx, out[29].n, 0, CScript([OP_RETURN]))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
diff --git a/test/functional/p2p-leaktests.py b/test/functional/p2p-leaktests.py
index 33b57ef33d..b469a9a47a 100755
--- a/test/functional/p2p-leaktests.py
+++ b/test/functional/p2p-leaktests.py
@@ -9,7 +9,10 @@ received a VERACK.
This test connects to a node and sends it a few messages, trying to intice it
into sending us something it shouldn't.
-"""
+
+Also test that nodes that send unsupported service bits to bitcoind are disconnected
+and don't receive a VERACK. Unsupported service bits are currently 1 << 5 and
+1 << 7 (until August 1st 2018)."""
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
@@ -17,7 +20,7 @@ from test_framework.util import *
banscore = 10
-class CLazyNode(NodeConnCB):
+class CLazyNode(P2PInterface):
def __init__(self):
super().__init__()
self.unexpected_msg = False
@@ -27,44 +30,42 @@ class CLazyNode(NodeConnCB):
self.unexpected_msg = True
self.log.info("should not have received message: %s" % message.command)
- def on_open(self, conn):
- self.connected = True
+ def on_open(self):
self.ever_connected = True
- def on_version(self, conn, message): self.bad_message(message)
- def on_verack(self, conn, message): self.bad_message(message)
- def on_reject(self, conn, message): self.bad_message(message)
- def on_inv(self, conn, message): self.bad_message(message)
- def on_addr(self, conn, message): self.bad_message(message)
- def on_alert(self, conn, message): self.bad_message(message)
- def on_getdata(self, conn, message): self.bad_message(message)
- def on_getblocks(self, conn, message): self.bad_message(message)
- def on_tx(self, conn, message): self.bad_message(message)
- def on_block(self, conn, message): self.bad_message(message)
- def on_getaddr(self, conn, message): self.bad_message(message)
- def on_headers(self, conn, message): self.bad_message(message)
- def on_getheaders(self, conn, message): self.bad_message(message)
- def on_ping(self, conn, message): self.bad_message(message)
- def on_mempool(self, conn): self.bad_message(message)
- def on_pong(self, conn, message): self.bad_message(message)
- def on_feefilter(self, conn, message): self.bad_message(message)
- def on_sendheaders(self, conn, message): self.bad_message(message)
- def on_sendcmpct(self, conn, message): self.bad_message(message)
- def on_cmpctblock(self, conn, message): self.bad_message(message)
- def on_getblocktxn(self, conn, message): self.bad_message(message)
- def on_blocktxn(self, conn, message): self.bad_message(message)
+ def on_version(self, message): self.bad_message(message)
+ def on_verack(self, message): self.bad_message(message)
+ def on_reject(self, message): self.bad_message(message)
+ def on_inv(self, message): self.bad_message(message)
+ def on_addr(self, message): self.bad_message(message)
+ def on_getdata(self, message): self.bad_message(message)
+ def on_getblocks(self, message): self.bad_message(message)
+ def on_tx(self, message): self.bad_message(message)
+ def on_block(self, message): self.bad_message(message)
+ def on_getaddr(self, message): self.bad_message(message)
+ def on_headers(self, message): self.bad_message(message)
+ def on_getheaders(self, message): self.bad_message(message)
+ def on_ping(self, message): self.bad_message(message)
+ def on_mempool(self, message): self.bad_message(message)
+ def on_pong(self, message): self.bad_message(message)
+ def on_feefilter(self, message): self.bad_message(message)
+ def on_sendheaders(self, message): self.bad_message(message)
+ def on_sendcmpct(self, message): self.bad_message(message)
+ def on_cmpctblock(self, message): self.bad_message(message)
+ def on_getblocktxn(self, message): self.bad_message(message)
+ def on_blocktxn(self, message): self.bad_message(message)
# Node that never sends a version. We'll use this to send a bunch of messages
# anyway, and eventually get disconnected.
class CNodeNoVersionBan(CLazyNode):
# send a bunch of veracks without sending a message. This should get us disconnected.
# NOTE: implementation-specific check here. Remove if bitcoind ban behavior changes
- def on_open(self, conn):
- super().on_open(conn)
+ def on_open(self):
+ super().on_open()
for i in range(banscore):
self.send_message(msg_verack())
- def on_reject(self, conn, message): pass
+ def on_reject(self, message): pass
# Node that never sends a version. This one just sits idle and hopes to receive
# any message (it shouldn't!)
@@ -78,40 +79,37 @@ class CNodeNoVerackIdle(CLazyNode):
self.version_received = False
super().__init__()
- def on_reject(self, conn, message): pass
- def on_verack(self, conn, message): pass
+ def on_reject(self, message): pass
+ def on_verack(self, message): pass
# When version is received, don't reply with a verack. Instead, see if the
# node will give us a message that it shouldn't. This is not an exhaustive
# list!
- def on_version(self, conn, message):
+ def on_version(self, message):
self.version_received = True
- conn.send_message(msg_ping())
- conn.send_message(msg_getaddr())
+ self.send_message(msg_ping())
+ self.send_message(msg_getaddr())
class P2PLeakTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-banscore='+str(banscore)]]
def run_test(self):
- no_version_bannode = CNodeNoVersionBan()
- no_version_idlenode = CNodeNoVersionIdle()
- no_verack_idlenode = CNodeNoVerackIdle()
-
- connections = []
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], no_version_bannode, send_version=False))
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], no_version_idlenode, send_version=False))
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], no_verack_idlenode))
- no_version_bannode.add_connection(connections[0])
- no_version_idlenode.add_connection(connections[1])
- no_verack_idlenode.add_connection(connections[2])
+ self.nodes[0].setmocktime(1501545600) # August 1st 2017
+
+ no_version_bannode = self.nodes[0].add_p2p_connection(CNodeNoVersionBan(), send_version=False)
+ no_version_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVersionIdle(), send_version=False)
+ no_verack_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVerackIdle())
+ unsupported_service_bit5_node = self.nodes[0].add_p2p_connection(CLazyNode(), services=NODE_NETWORK|NODE_UNSUPPORTED_SERVICE_BIT_5)
+ unsupported_service_bit7_node = self.nodes[0].add_p2p_connection(CLazyNode(), services=NODE_NETWORK|NODE_UNSUPPORTED_SERVICE_BIT_7)
NetworkThread().start() # Start up network handling in another thread
- assert wait_until(lambda: no_version_bannode.ever_connected, timeout=10)
- assert wait_until(lambda: no_version_idlenode.ever_connected, timeout=10)
- assert wait_until(lambda: no_verack_idlenode.version_received, timeout=10)
+ wait_until(lambda: no_version_bannode.ever_connected, timeout=10, lock=mininode_lock)
+ wait_until(lambda: no_version_idlenode.ever_connected, timeout=10, lock=mininode_lock)
+ wait_until(lambda: no_verack_idlenode.version_received, timeout=10, lock=mininode_lock)
+ wait_until(lambda: unsupported_service_bit5_node.ever_connected, timeout=10, lock=mininode_lock)
+ wait_until(lambda: unsupported_service_bit7_node.ever_connected, timeout=10, lock=mininode_lock)
# Mine a block and make sure that it's not sent to the connected nodes
self.nodes[0].generate(1)
@@ -120,14 +118,34 @@ class P2PLeakTest(BitcoinTestFramework):
time.sleep(5)
#This node should have been banned
- assert not no_version_bannode.connected
+ assert no_version_bannode.state != "connected"
+
+ # These nodes should have been disconnected
+ assert unsupported_service_bit5_node.state != "connected"
+ assert unsupported_service_bit7_node.state != "connected"
- [conn.disconnect_node() for conn in connections]
+ self.nodes[0].disconnect_p2ps()
+
+ # Wait until all connections are closed
+ wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0)
# Make sure no unexpected messages came in
assert(no_version_bannode.unexpected_msg == False)
assert(no_version_idlenode.unexpected_msg == False)
assert(no_verack_idlenode.unexpected_msg == False)
+ assert not unsupported_service_bit5_node.unexpected_msg
+ assert not unsupported_service_bit7_node.unexpected_msg
+
+ self.log.info("Service bits 5 and 7 are allowed after August 1st 2018")
+ self.nodes[0].setmocktime(1533168000) # August 2nd 2018
+
+ allowed_service_bit5_node = self.nodes[0].add_p2p_connection(P2PInterface(), services=NODE_NETWORK|NODE_UNSUPPORTED_SERVICE_BIT_5)
+ allowed_service_bit7_node = self.nodes[0].add_p2p_connection(P2PInterface(), services=NODE_NETWORK|NODE_UNSUPPORTED_SERVICE_BIT_7)
+
+ NetworkThread().start() # Network thread stopped when all previous P2PInterfaces disconnected. Restart it
+
+ wait_until(lambda: allowed_service_bit5_node.message_count["verack"], lock=mininode_lock)
+ wait_until(lambda: allowed_service_bit7_node.message_count["verack"], lock=mininode_lock)
if __name__ == '__main__':
P2PLeakTest().main()
diff --git a/test/functional/p2p-mempool.py b/test/functional/p2p-mempool.py
index 34ef249eea..d24dbac51d 100755
--- a/test/functional/p2p-mempool.py
+++ b/test/functional/p2p-mempool.py
@@ -13,24 +13,20 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class P2PMempoolTests(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-peerbloomfilters=0"]]
def run_test(self):
- #connect a mininode
- aTestNode = NodeConnCB()
- node = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], aTestNode)
- aTestNode.add_connection(node)
+ # Add a p2p connection
+ self.nodes[0].add_p2p_connection(P2PInterface())
NetworkThread().start()
- aTestNode.wait_for_verack()
+ self.nodes[0].p2p.wait_for_verack()
#request mempool
- aTestNode.send_message(msg_mempool())
- aTestNode.wait_for_disconnect()
+ self.nodes[0].p2p.send_message(msg_mempool())
+ self.nodes[0].p2p.wait_for_disconnect()
#mininode must be disconnected at this point
assert_equal(len(self.nodes[0].getpeerinfo()), 0)
diff --git a/test/functional/p2p-segwit.py b/test/functional/p2p-segwit.py
index 63dfbb8ae6..a240d79013 100755
--- a/test/functional/p2p-segwit.py
+++ b/test/functional/p2p-segwit.py
@@ -17,7 +17,6 @@ from binascii import hexlify
# The versionbit bit used to signal activation of SegWit
VB_WITNESS_BIT = 1
VB_PERIOD = 144
-VB_ACTIVATION_THRESHOLD = 108
VB_TOP_BITS = 0x20000000
MAX_SIGOP_COST = 80000
@@ -32,12 +31,40 @@ def get_virtual_size(witness_block):
vsize = int((3*base_size + total_size + 3)/4)
return vsize
-class TestNode(NodeConnCB):
+def test_transaction_acceptance(rpc, p2p, tx, with_witness, accepted, reason=None):
+ """Send a transaction to the node and check that it's accepted to the mempool
+
+ - Submit the transaction over the p2p interface
+ - use the getrawmempool rpc to check for acceptance."""
+ tx_message = msg_tx(tx)
+ if with_witness:
+ tx_message = msg_witness_tx(tx)
+ p2p.send_message(tx_message)
+ p2p.sync_with_ping()
+ assert_equal(tx.hash in rpc.getrawmempool(), accepted)
+ if (reason != None and not accepted):
+ # Check the rejection reason as well.
+ with mininode_lock:
+ assert_equal(p2p.last_message["reject"].reason, reason)
+
+def test_witness_block(rpc, p2p, block, accepted, with_witness=True):
+ """Send a block to the node and check that it's accepted
+
+ - Submit the block over the p2p interface
+ - use the getbestblockhash rpc to check for acceptance."""
+ if with_witness:
+ p2p.send_message(msg_witness_block(block))
+ else:
+ p2p.send_message(msg_block(block))
+ p2p.sync_with_ping()
+ assert_equal(rpc.getbestblockhash() == block.hash, accepted)
+
+class TestNode(P2PInterface):
def __init__(self):
super().__init__()
self.getdataset = set()
- def on_getdata(self, conn, message):
+ def on_getdata(self, message):
for inv in message.inv:
self.getdataset.add(inv.hash)
@@ -68,29 +95,8 @@ class TestNode(NodeConnCB):
self.wait_for_block(blockhash, timeout)
return self.last_message["block"].block
- def test_transaction_acceptance(self, tx, with_witness, accepted, reason=None):
- tx_message = msg_tx(tx)
- if with_witness:
- tx_message = msg_witness_tx(tx)
- self.send_message(tx_message)
- self.sync_with_ping()
- assert_equal(tx.hash in self.connection.rpc.getrawmempool(), accepted)
- if (reason != None and not accepted):
- # Check the rejection reason as well.
- with mininode_lock:
- assert_equal(self.last_message["reject"].reason, reason)
-
- # Test whether a witness block had the correct effect on the tip
- def test_witness_block(self, block, accepted, with_witness=True):
- if with_witness:
- self.send_message(msg_witness_block(block))
- else:
- self.send_message(msg_block(block))
- self.sync_with_ping()
- assert_equal(self.connection.rpc.getbestblockhash() == block.hash, accepted)
-
# Used to keep track of anyone-can-spend outputs that we can use in the tests
-class UTXO(object):
+class UTXO():
def __init__(self, sha256, n, nValue):
self.sha256 = sha256
self.n = n
@@ -109,12 +115,11 @@ def sign_P2PK_witness_input(script, txTo, inIdx, hashtype, value, key):
class SegWitTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
- self.extra_args = [["-whitelist=127.0.0.1"], ["-whitelist=127.0.0.1", "-acceptnonstdtxn=0"], ["-whitelist=127.0.0.1", "-vbparams=segwit:0:0"]]
+ # This test tests SegWit both pre and post-activation, so use the normal BIP9 activation.
+ self.extra_args = [["-whitelist=127.0.0.1", "-vbparams=segwit:0:999999999999"], ["-whitelist=127.0.0.1", "-acceptnonstdtxn=0", "-vbparams=segwit:0:999999999999"], ["-whitelist=127.0.0.1", "-vbparams=segwit:0:0"]]
def setup_network(self):
self.setup_nodes()
@@ -143,7 +148,7 @@ class SegWitTest(BitcoinTestFramework):
''' Individual tests '''
def test_witness_services(self):
self.log.info("Verifying NODE_WITNESS service bit")
- assert((self.test_node.connection.nServices & NODE_WITNESS) != 0)
+ assert((self.test_node.nServices & NODE_WITNESS) != 0)
# See if sending a regular transaction works, and create a utxo
@@ -202,7 +207,7 @@ class SegWitTest(BitcoinTestFramework):
self.update_witness_block_with_transactions(block, [tx])
# Sending witness data before activation is not allowed (anti-spam
# rule).
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# TODO: fix synchronization so we can test reject reason
# Right now, bitcoind delays sending reject messages for blocks
# until the future, making synchronization here difficult.
@@ -229,7 +234,7 @@ class SegWitTest(BitcoinTestFramework):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, scriptPubKey))
tx2.rehash()
- self.test_node.test_transaction_acceptance(tx2, False, True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx2, False, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
@@ -246,18 +251,18 @@ class SegWitTest(BitcoinTestFramework):
tx3.rehash()
# Note that this should be rejected for the premature witness reason,
# rather than a policy check, since segwit hasn't activated yet.
- self.std_node.test_transaction_acceptance(tx3, True, False, b'no-witness-yet')
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, tx3, True, False, b'no-witness-yet')
# If we send without witness, it should be accepted.
- self.std_node.test_transaction_acceptance(tx3, False, True)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, tx3, False, True)
# Now create a new anyone-can-spend utxo for the next test.
tx4 = CTransaction()
tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), CScript([p2sh_program])))
tx4.vout.append(CTxOut(tx3.vout[0].nValue-1000, CScript([OP_TRUE])))
tx4.rehash()
- self.test_node.test_transaction_acceptance(tx3, False, True)
- self.test_node.test_transaction_acceptance(tx4, False, True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx3, False, True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx4, False, True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
@@ -318,7 +323,7 @@ class SegWitTest(BitcoinTestFramework):
assert(msg_witness_block(block).serialize() != msg_block(block).serialize())
# This empty block should be valid.
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Try to tweak the nonce
block_2 = self.build_next_block()
@@ -329,7 +334,7 @@ class SegWitTest(BitcoinTestFramework):
assert(block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1])
# This should also be valid.
- self.test_node.test_witness_block(block_2, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block_2, accepted=True)
# Now test commitments with actual transactions
assert (len(self.utxo) > 0)
@@ -362,7 +367,7 @@ class SegWitTest(BitcoinTestFramework):
block_3.rehash()
block_3.solve()
- self.test_node.test_witness_block(block_3, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block_3, accepted=False)
# Add a different commitment with different nonce, but in the
# right location, and with some funds burned(!).
@@ -376,7 +381,7 @@ class SegWitTest(BitcoinTestFramework):
block_3.rehash()
assert(len(block_3.vtx[0].vout) == 4) # 3 OP_returns
block_3.solve()
- self.test_node.test_witness_block(block_3, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block_3, accepted=True)
# Finally test that a block with no witness transactions can
# omit the commitment.
@@ -388,7 +393,7 @@ class SegWitTest(BitcoinTestFramework):
block_4.vtx.append(tx3)
block_4.hashMerkleRoot = block_4.calc_merkle_root()
block_4.solve()
- self.test_node.test_witness_block(block_4, with_witness=False, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block_4, with_witness=False, accepted=True)
# Update available utxo's for use in later test.
self.utxo.pop(0)
@@ -429,11 +434,11 @@ class SegWitTest(BitcoinTestFramework):
# Change the nonce -- should not cause the block to be permanently
# failed
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(1) ]
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Changing the witness nonce doesn't change the block hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(0) ]
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
def test_witness_block_size(self):
@@ -498,7 +503,7 @@ class SegWitTest(BitcoinTestFramework):
# limit
assert(len(block.serialize(True)) > 2*1024*1024)
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Now resize the second transaction to make the block fit.
cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0])
@@ -508,7 +513,7 @@ class SegWitTest(BitcoinTestFramework):
block.solve()
assert(get_virtual_size(block) == MAX_BLOCK_BASE_SIZE)
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Update available utxo's
self.utxo.pop(0)
@@ -575,7 +580,7 @@ class SegWitTest(BitcoinTestFramework):
self.update_witness_block_with_transactions(block, [tx])
# Extra witness data should not be allowed.
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Try extra signature data. Ok if we're not spending a witness output.
block.vtx[1].wit.vtxinwit = []
@@ -584,7 +589,7 @@ class SegWitTest(BitcoinTestFramework):
add_witness_commitment(block)
block.solve()
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Now try extra witness/signature data on an input that DOES require a
# witness
@@ -600,7 +605,7 @@ class SegWitTest(BitcoinTestFramework):
self.update_witness_block_with_transactions(block, [tx2])
# This has extra witness data, so it should fail.
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Now get rid of the extra witness, but add extra scriptSig data
tx2.vin[0].scriptSig = CScript([OP_TRUE])
@@ -612,7 +617,7 @@ class SegWitTest(BitcoinTestFramework):
block.solve()
# This has extra signature data for a witness input, so it should fail.
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Now get rid of the extra scriptsig on the witness input, and verify
# success (even with extra scriptsig data in the non-witness input)
@@ -621,7 +626,7 @@ class SegWitTest(BitcoinTestFramework):
add_witness_commitment(block)
block.solve()
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Update utxo for later tests
self.utxo.pop(0)
@@ -654,14 +659,14 @@ class SegWitTest(BitcoinTestFramework):
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Now reduce the length of the stack element
tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(MAX_SCRIPT_ELEMENT_SIZE)
add_witness_commitment(block)
block.solve()
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Update the utxo for later tests
self.utxo.pop()
@@ -696,7 +701,7 @@ class SegWitTest(BitcoinTestFramework):
self.update_witness_block_with_transactions(block, [tx, tx2])
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Try again with one less byte in the witness program
witness_program = CScript([b'a'*520]*19 + [OP_DROP]*62 + [OP_TRUE])
@@ -711,7 +716,7 @@ class SegWitTest(BitcoinTestFramework):
tx2.rehash()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx, tx2])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@@ -737,7 +742,7 @@ class SegWitTest(BitcoinTestFramework):
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Try various ways to spend tx that should all break.
# This "broken" transaction serializer will not normalize
@@ -772,7 +777,7 @@ class SegWitTest(BitcoinTestFramework):
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Now try using a too short vtxinwit
tx2.wit.vtxinwit.pop()
@@ -780,7 +785,7 @@ class SegWitTest(BitcoinTestFramework):
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Now make one of the intermediate witnesses be incorrect
tx2.wit.vtxinwit.append(CTxInWitness())
@@ -789,13 +794,13 @@ class SegWitTest(BitcoinTestFramework):
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Fix the broken witness and the block should be accepted.
tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_program]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@@ -835,11 +840,11 @@ class SegWitTest(BitcoinTestFramework):
# its from)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
- self.old_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
- self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
+ test_transaction_acceptance(self.nodes[0].rpc, self.old_node, tx, with_witness=True, accepted=False)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx, with_witness=True, accepted=False)
# But eliminating the witness should fix it
- self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx, with_witness=False, accepted=True)
# Cleanup: mine the first transaction and update utxo
self.nodes[0].generate(1)
@@ -871,11 +876,11 @@ class SegWitTest(BitcoinTestFramework):
# Verify that unnecessary witnesses are rejected.
self.test_node.announce_tx_and_wait_for_getdata(tx)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
- self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx, with_witness=True, accepted=False)
# Verify that removing the witness succeeds.
self.test_node.announce_tx_and_wait_for_getdata(tx)
- self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx, with_witness=False, accepted=True)
# Now try to add extra witness data to a valid witness tx.
witness_program = CScript([OP_TRUE])
@@ -900,24 +905,24 @@ class SegWitTest(BitcoinTestFramework):
# Node will not be blinded to the transaction
self.std_node.announce_tx_and_wait_for_getdata(tx3)
- self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size')
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, tx3, True, False, b'tx-size')
self.std_node.announce_tx_and_wait_for_getdata(tx3)
- self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size')
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, tx3, True, False, b'tx-size')
# Remove witness stuffing, instead add extra witness push on stack
tx3.vout[0] = CTxOut(tx2.vout[0].nValue-1000, CScript([OP_TRUE]))
tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_program ]
tx3.rehash()
- self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True)
- self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx2, with_witness=True, accepted=True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx3, with_witness=True, accepted=False)
# Get rid of the extra witness, and verify acceptance.
tx3.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ]
# Also check that old_node gets a tx announcement, even though this is
# a witness transaction.
self.old_node.wait_for_inv([CInv(1, tx2.sha256)]) # wait until tx2 was inv'ed
- self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx3, with_witness=True, accepted=True)
self.old_node.wait_for_inv([CInv(1, tx3.sha256)])
# Test that getrawtransaction returns correct witness information
@@ -956,20 +961,20 @@ class SegWitTest(BitcoinTestFramework):
self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False)
assert(self.test_node.last_message["getdata"].inv[0].type == blocktype)
- self.test_node.test_witness_block(block1, True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block1, True)
block2 = self.build_next_block(nVersion=4)
block2.solve()
self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True)
assert(self.test_node.last_message["getdata"].inv[0].type == blocktype)
- self.test_node.test_witness_block(block2, True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block2, True)
block3 = self.build_next_block(nVersion=(VB_TOP_BITS | (1<<15)))
block3.solve()
self.test_node.announce_block_and_wait_for_getdata(block3, use_header=True)
assert(self.test_node.last_message["getdata"].inv[0].type == blocktype)
- self.test_node.test_witness_block(block3, True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block3, True)
# Check that we can getdata for witness blocks or regular blocks,
# and the right thing happens.
@@ -999,7 +1004,7 @@ class SegWitTest(BitcoinTestFramework):
# This gives us a witness commitment.
assert(len(block.vtx[0].wit.vtxinwit) == 1)
assert(len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1)
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Now try to retrieve it...
rpc_block = self.nodes[0].getblock(block.hash, False)
non_wit_block = self.test_node.request_block(block.sha256, 2)
@@ -1053,7 +1058,7 @@ class SegWitTest(BitcoinTestFramework):
p2sh_tx.rehash()
# Mine it on test_node to create the confirmed output.
- self.test_node.test_transaction_acceptance(p2sh_tx, with_witness=True, accepted=True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, p2sh_tx, with_witness=True, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
@@ -1065,7 +1070,7 @@ class SegWitTest(BitcoinTestFramework):
tx.vout.append(CTxOut(8000, scriptPubKey)) # Might burn this later
tx.rehash()
- self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=segwit_activated)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, tx, with_witness=True, accepted=segwit_activated)
# Now create something that looks like a P2PKH output. This won't be spendable.
scriptPubKey = CScript([OP_0, hash160(witness_hash)])
@@ -1082,7 +1087,7 @@ class SegWitTest(BitcoinTestFramework):
tx2.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000, scriptPubKey)]
tx2.rehash()
- self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=segwit_activated)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, tx2, with_witness=True, accepted=segwit_activated)
# Now update self.utxo for later tests.
tx3 = CTransaction()
@@ -1095,13 +1100,13 @@ class SegWitTest(BitcoinTestFramework):
tx3.wit.vtxinwit.append(CTxInWitness())
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program]
tx3.rehash()
- self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx3, with_witness=True, accepted=True)
else:
# tx and tx2 didn't go anywhere; just clean up the p2sh_tx output.
tx3.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))]
tx3.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000, witness_program)]
tx3.rehash()
- self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx3, with_witness=True, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
@@ -1125,7 +1130,7 @@ class SegWitTest(BitcoinTestFramework):
tx.rehash()
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
self.utxo.pop(0)
for i in range(NUM_TESTS):
self.utxo.append(UTXO(tx.sha256, i, split_value))
@@ -1144,8 +1149,8 @@ class SegWitTest(BitcoinTestFramework):
tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
tx.vout = [CTxOut(self.utxo[0].nValue-1000, scriptPubKey)]
tx.rehash()
- self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=False)
- self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, tx, with_witness=True, accepted=False)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx, with_witness=True, accepted=True)
self.utxo.pop(0)
temp_utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
@@ -1164,8 +1169,8 @@ class SegWitTest(BitcoinTestFramework):
tx2.rehash()
# Gets accepted to test_node, because standardness of outputs isn't
# checked with fRequireStandard
- self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True)
- self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=False)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx2, with_witness=True, accepted=True)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, tx2, with_witness=True, accepted=False)
temp_utxo.pop() # last entry in temp_utxo was the output we just spent
temp_utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@@ -1181,7 +1186,7 @@ class SegWitTest(BitcoinTestFramework):
tx3.rehash()
# Spending a higher version witness output is not allowed by policy,
# even with fRequireStandard=false.
- self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx3, with_witness=True, accepted=False)
self.test_node.sync_with_ping()
with mininode_lock:
assert(b"reserved for soft-fork upgrades" in self.test_node.last_message["reject"].reason)
@@ -1189,7 +1194,7 @@ class SegWitTest(BitcoinTestFramework):
# Building a block with the transaction must be valid, however.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2, tx3])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
sync_blocks(self.nodes)
# Add utxo to our list
@@ -1207,7 +1212,7 @@ class SegWitTest(BitcoinTestFramework):
# This next line will rehash the coinbase and update the merkle
# root, and solve.
self.update_witness_block_with_transactions(block, [])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
spend_tx = CTransaction()
spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].sha256, 0), b"")]
@@ -1221,13 +1226,13 @@ class SegWitTest(BitcoinTestFramework):
sync_blocks(self.nodes)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
- self.test_node.test_witness_block(block2, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block2, accepted=False)
# Advancing one more block should allow the spend.
self.nodes[0].generate(1)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
- self.test_node.test_witness_block(block2, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block2, accepted=True)
sync_blocks(self.nodes)
@@ -1248,11 +1253,11 @@ class SegWitTest(BitcoinTestFramework):
tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey))
tx.rehash()
- self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx, with_witness=True, accepted=True)
# Mine this transaction in preparation for following tests.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
sync_blocks(self.nodes)
self.utxo.pop(0)
@@ -1269,19 +1274,19 @@ class SegWitTest(BitcoinTestFramework):
# Too-large input value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue+1, key)
self.update_witness_block_with_transactions(block, [tx])
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Too-small input value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue-1, key)
block.vtx.pop() # remove last tx
self.update_witness_block_with_transactions(block, [tx])
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Now try correct value
sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue, key)
block.vtx.pop()
self.update_witness_block_with_transactions(block, [tx])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
@@ -1305,7 +1310,7 @@ class SegWitTest(BitcoinTestFramework):
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
block = self.build_next_block()
used_sighash_single_out_of_bounds = False
@@ -1347,7 +1352,7 @@ class SegWitTest(BitcoinTestFramework):
# Test the block periodically, if we're close to maxblocksize
if (get_virtual_size(block) > MAX_BLOCK_BASE_SIZE - 1000):
self.update_witness_block_with_transactions(block, [])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
block = self.build_next_block()
if (not used_sighash_single_out_of_bounds):
@@ -1355,7 +1360,7 @@ class SegWitTest(BitcoinTestFramework):
# Test the transactions we've added to the block
if (len(block.vtx) > 1):
self.update_witness_block_with_transactions(block, [])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Now test witness version 0 P2PKH transactions
pubkeyhash = hash160(pubkey)
@@ -1377,7 +1382,7 @@ class SegWitTest(BitcoinTestFramework):
tx2.vin[0].scriptSig = CScript([signature, pubkey])
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx, tx2])
- self.test_node.test_witness_block(block, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=False)
# Move the signature to the witness.
block.vtx.pop()
@@ -1387,7 +1392,7 @@ class SegWitTest(BitcoinTestFramework):
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx2])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
temp_utxos.pop(0)
@@ -1406,7 +1411,7 @@ class SegWitTest(BitcoinTestFramework):
index += 1
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
for i in range(len(tx.vout)):
self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue))
@@ -1433,10 +1438,10 @@ class SegWitTest(BitcoinTestFramework):
tx.rehash()
# Verify mempool acceptance and block validity
- self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx, with_witness=False, accepted=True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
- self.test_node.test_witness_block(block, accepted=True, with_witness=segwit_activated)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True, with_witness=segwit_activated)
sync_blocks(self.nodes)
# Now test attempts to spend the output.
@@ -1450,12 +1455,12 @@ class SegWitTest(BitcoinTestFramework):
# will require a witness to spend a witness program regardless of
# segwit activation. Note that older bitcoind's that are not
# segwit-aware would also reject this for failing CLEANSTACK.
- self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, spend_tx, with_witness=False, accepted=False)
# Try to put the witness script in the scriptSig, should also fail.
spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a'])
spend_tx.rehash()
- self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, spend_tx, with_witness=False, accepted=False)
# Now put the witness script in the witness, should succeed after
# segwit activates.
@@ -1465,7 +1470,7 @@ class SegWitTest(BitcoinTestFramework):
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a', witness_program ]
# Verify mempool acceptance
- self.test_node.test_transaction_acceptance(spend_tx, with_witness=True, accepted=segwit_activated)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, spend_tx, with_witness=True, accepted=segwit_activated)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [spend_tx])
@@ -1473,9 +1478,9 @@ class SegWitTest(BitcoinTestFramework):
# should be valid. If we're after activation, then sending this with
# witnesses should be valid.
if segwit_activated:
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
else:
- self.test_node.test_witness_block(block, accepted=True, with_witness=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True, with_witness=False)
# Update self.utxo
self.utxo.pop(0)
@@ -1496,7 +1501,7 @@ class SegWitTest(BitcoinTestFramework):
# Restart with the new binary
self.stop_node(node_id)
- self.nodes[node_id] = self.start_node(node_id, self.options.tmpdir)
+ self.start_node(node_id, extra_args=["-vbparams=segwit:0:999999999999"])
connect_nodes(self.nodes[0], node_id)
sync_blocks(self.nodes)
@@ -1559,7 +1564,7 @@ class SegWitTest(BitcoinTestFramework):
block_1 = self.build_next_block()
self.update_witness_block_with_transactions(block_1, [tx])
- self.test_node.test_witness_block(block_1, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block_1, accepted=True)
tx2 = CTransaction()
# If we try to spend the first n-1 outputs from tx, that should be
@@ -1576,7 +1581,7 @@ class SegWitTest(BitcoinTestFramework):
block_2 = self.build_next_block()
self.update_witness_block_with_transactions(block_2, [tx2])
- self.test_node.test_witness_block(block_2, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block_2, accepted=False)
# Try dropping the last input in tx2, and add an output that has
# too many sigops (contributing to legacy sigop count).
@@ -1589,14 +1594,14 @@ class SegWitTest(BitcoinTestFramework):
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx2])
- self.test_node.test_witness_block(block_3, accepted=False)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block_3, accepted=False)
# If we drop the last checksig in this output, the tx should succeed.
block_4 = self.build_next_block()
tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG]*(checksig_count-1))
tx2.rehash()
self.update_witness_block_with_transactions(block_4, [tx2])
- self.test_node.test_witness_block(block_4, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block_4, accepted=True)
# Reset the tip back down for the next test
sync_blocks(self.nodes)
@@ -1612,7 +1617,7 @@ class SegWitTest(BitcoinTestFramework):
tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_justright ]
tx2.rehash()
self.update_witness_block_with_transactions(block_5, [tx2])
- self.test_node.test_witness_block(block_5, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block_5, accepted=True)
# TODO: test p2sh sigop counting
@@ -1690,7 +1695,7 @@ class SegWitTest(BitcoinTestFramework):
# Confirm it in a block.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Now try to spend it. Send it to a P2WSH output, which we'll
# use in the next test.
@@ -1709,11 +1714,11 @@ class SegWitTest(BitcoinTestFramework):
tx2.rehash()
# Should fail policy test.
- self.test_node.test_transaction_acceptance(tx2, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx2, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Test 2: P2WSH
# Try to spend the P2WSH output created in last test.
@@ -1729,11 +1734,11 @@ class SegWitTest(BitcoinTestFramework):
sign_P2PK_witness_input(witness_program, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key)
# Should fail policy test.
- self.test_node.test_transaction_acceptance(tx3, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx3, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx3])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Test 3: P2SH(P2WSH)
# Try to spend the P2SH output created in the last test.
@@ -1746,10 +1751,10 @@ class SegWitTest(BitcoinTestFramework):
sign_P2PK_witness_input(witness_program, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key)
# Should fail policy test.
- self.test_node.test_transaction_acceptance(tx4, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx4, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx4])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
# Test 4: Uncompressed pubkeys should still be valid in non-segwit
# transactions.
@@ -1761,10 +1766,10 @@ class SegWitTest(BitcoinTestFramework):
tx5.vin[0].scriptSig = CScript([signature, pubkey])
tx5.rehash()
# Should pass policy and consensus.
- self.test_node.test_transaction_acceptance(tx5, True, True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx5, True, True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx5])
- self.test_node.test_witness_block(block, accepted=True)
+ test_witness_block(self.nodes[0].rpc, self.test_node, block, accepted=True)
self.utxo.append(UTXO(tx5.sha256, 0, tx5.vout[0].nValue))
def test_non_standard_witness(self):
@@ -1794,7 +1799,7 @@ class SegWitTest(BitcoinTestFramework):
tx.vout.append(CTxOut(outputvalue, CScript([OP_HASH160, p2sh, OP_EQUAL])))
tx.rehash()
txid = tx.sha256
- self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, tx, with_witness=False, accepted=True)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
@@ -1819,45 +1824,45 @@ class SegWitTest(BitcoinTestFramework):
# Testing native P2WSH
# Witness stack size, excluding witnessScript, over 100 is non-standard
p2wsh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
- self.std_node.test_transaction_acceptance(p2wsh_txs[0], True, False, b'bad-witness-nonstandard')
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, p2wsh_txs[0], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
- self.test_node.test_transaction_acceptance(p2wsh_txs[0], True, True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, p2wsh_txs[0], True, True)
# Stack element size over 80 bytes is non-standard
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
- self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, False, b'bad-witness-nonstandard')
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, p2wsh_txs[1], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
- self.test_node.test_transaction_acceptance(p2wsh_txs[1], True, True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, p2wsh_txs[1], True, True)
# Standard nodes should accept if element size is not over 80 bytes
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
- self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, True)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, p2wsh_txs[1], True, True)
# witnessScript size at 3600 bytes is standard
p2wsh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
- self.test_node.test_transaction_acceptance(p2wsh_txs[2], True, True)
- self.std_node.test_transaction_acceptance(p2wsh_txs[2], True, True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, p2wsh_txs[2], True, True)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, p2wsh_txs[2], True, True)
# witnessScript size at 3601 bytes is non-standard
p2wsh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
- self.std_node.test_transaction_acceptance(p2wsh_txs[3], True, False, b'bad-witness-nonstandard')
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, p2wsh_txs[3], True, False, b'bad-witness-nonstandard')
# Non-standard nodes should accept
- self.test_node.test_transaction_acceptance(p2wsh_txs[3], True, True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, p2wsh_txs[3], True, True)
# Repeating the same tests with P2SH-P2WSH
p2sh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
- self.std_node.test_transaction_acceptance(p2sh_txs[0], True, False, b'bad-witness-nonstandard')
- self.test_node.test_transaction_acceptance(p2sh_txs[0], True, True)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, p2sh_txs[0], True, False, b'bad-witness-nonstandard')
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, p2sh_txs[0], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
- self.std_node.test_transaction_acceptance(p2sh_txs[1], True, False, b'bad-witness-nonstandard')
- self.test_node.test_transaction_acceptance(p2sh_txs[1], True, True)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, p2sh_txs[1], True, False, b'bad-witness-nonstandard')
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, p2sh_txs[1], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
- self.std_node.test_transaction_acceptance(p2sh_txs[1], True, True)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, p2sh_txs[1], True, True)
p2sh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
- self.test_node.test_transaction_acceptance(p2sh_txs[2], True, True)
- self.std_node.test_transaction_acceptance(p2sh_txs[2], True, True)
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, p2sh_txs[2], True, True)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, p2sh_txs[2], True, True)
p2sh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
- self.std_node.test_transaction_acceptance(p2sh_txs[3], True, False, b'bad-witness-nonstandard')
- self.test_node.test_transaction_acceptance(p2sh_txs[3], True, True)
+ test_transaction_acceptance(self.nodes[1].rpc, self.std_node, p2sh_txs[3], True, False, b'bad-witness-nonstandard')
+ test_transaction_acceptance(self.nodes[0].rpc, self.test_node, p2sh_txs[3], True, True)
self.nodes[0].generate(1) # Mine and clean up the mempool of non-standard node
# Valid but non-standard transactions in a block should be accepted by standard node
@@ -1870,19 +1875,12 @@ class SegWitTest(BitcoinTestFramework):
def run_test(self):
# Setup the p2p connections and start up the network thread.
- self.test_node = TestNode() # sets NODE_WITNESS|NODE_NETWORK
- self.old_node = TestNode() # only NODE_NETWORK
- self.std_node = TestNode() # for testing node1 (fRequireStandard=true)
-
- self.p2p_connections = [self.test_node, self.old_node]
-
- self.connections = []
- self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node, services=NODE_NETWORK|NODE_WITNESS))
- self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.old_node, services=NODE_NETWORK))
- self.connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], self.std_node, services=NODE_NETWORK|NODE_WITNESS))
- self.test_node.add_connection(self.connections[0])
- self.old_node.add_connection(self.connections[1])
- self.std_node.add_connection(self.connections[2])
+ # self.test_node sets NODE_WITNESS|NODE_NETWORK
+ self.test_node = self.nodes[0].add_p2p_connection(TestNode(), services=NODE_NETWORK|NODE_WITNESS)
+ # self.old_node sets only NODE_NETWORK
+ self.old_node = self.nodes[0].add_p2p_connection(TestNode(), services=NODE_NETWORK)
+ # self.std_node is for testing node1 (fRequireStandard=true)
+ self.std_node = self.nodes[1].add_p2p_connection(TestNode(), services=NODE_NETWORK|NODE_WITNESS)
NetworkThread().start() # Start up network handling in another thread
diff --git a/test/functional/p2p-timeouts.py b/test/functional/p2p-timeouts.py
index c3b29c215b..b2f3a861cf 100755
--- a/test/functional/p2p-timeouts.py
+++ b/test/functional/p2p-timeouts.py
@@ -27,59 +27,49 @@ from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-class TestNode(NodeConnCB):
- def on_version(self, conn, message):
+class TestNode(P2PInterface):
+ def on_version(self, message):
# Don't send a verack in response
pass
class TimeoutsTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
# Setup the p2p connections and start up the network thread.
- self.no_verack_node = TestNode() # never send verack
- self.no_version_node = TestNode() # never send version (just ping)
- self.no_send_node = TestNode() # never send anything
-
- connections = []
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.no_verack_node))
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.no_version_node, send_version=False))
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.no_send_node, send_version=False))
- self.no_verack_node.add_connection(connections[0])
- self.no_version_node.add_connection(connections[1])
- self.no_send_node.add_connection(connections[2])
+ no_verack_node = self.nodes[0].add_p2p_connection(TestNode())
+ no_version_node = self.nodes[0].add_p2p_connection(TestNode(), send_version=False)
+ no_send_node = self.nodes[0].add_p2p_connection(TestNode(), send_version=False)
NetworkThread().start() # Start up network handling in another thread
sleep(1)
- assert(self.no_verack_node.connected)
- assert(self.no_version_node.connected)
- assert(self.no_send_node.connected)
+ assert no_verack_node.connected
+ assert no_version_node.connected
+ assert no_send_node.connected
- ping_msg = msg_ping()
- connections[0].send_message(ping_msg)
- connections[1].send_message(ping_msg)
+ no_verack_node.send_message(msg_ping())
+ no_version_node.send_message(msg_ping())
sleep(30)
- assert "version" in self.no_verack_node.last_message
+ assert "version" in no_verack_node.last_message
- assert(self.no_verack_node.connected)
- assert(self.no_version_node.connected)
- assert(self.no_send_node.connected)
+ assert no_verack_node.connected
+ assert no_version_node.connected
+ assert no_send_node.connected
- connections[0].send_message(ping_msg)
- connections[1].send_message(ping_msg)
+ no_verack_node.send_message(msg_ping())
+ no_version_node.send_message(msg_ping())
sleep(31)
- assert(not self.no_verack_node.connected)
- assert(not self.no_version_node.connected)
- assert(not self.no_send_node.connected)
+ assert not no_verack_node.connected
+ assert not no_version_node.connected
+ assert not no_send_node.connected
if __name__ == '__main__':
TimeoutsTest().main()
diff --git a/test/functional/p2p-versionbits-warning.py b/test/functional/p2p-versionbits-warning.py
index df7e8ce5c1..be137381d0 100755
--- a/test/functional/p2p-versionbits-warning.py
+++ b/test/functional/p2p-versionbits-warning.py
@@ -23,13 +23,12 @@ WARN_UNKNOWN_RULES_MINED = "Unknown block versions being mined! It's possible un
WARN_UNKNOWN_RULES_ACTIVE = "unknown new rules activated (versionbit {})".format(VB_UNKNOWN_BIT)
VB_PATTERN = re.compile("^Warning.*versionbit")
-class TestNode(NodeConnCB):
- def on_inv(self, conn, message):
+class TestNode(P2PInterface):
+ def on_inv(self, message):
pass
class VersionBitsWarningTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
@@ -65,16 +64,12 @@ class VersionBitsWarningTest(BitcoinTestFramework):
def run_test(self):
# Setup the p2p connection and start up the network thread.
- test_node = TestNode()
-
- connections = []
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
- test_node.add_connection(connections[0])
+ self.nodes[0].add_p2p_connection(TestNode())
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
- test_node.wait_for_verack()
+ self.nodes[0].p2p.wait_for_verack()
# 1. Have the node mine one period worth of blocks
self.nodes[0].generate(VB_PERIOD)
@@ -82,26 +77,24 @@ class VersionBitsWarningTest(BitcoinTestFramework):
# 2. Now build one period of blocks on the tip, with < VB_THRESHOLD
# blocks signaling some unknown bit.
nVersion = VB_TOP_BITS | (1<<VB_UNKNOWN_BIT)
- self.send_blocks_with_version(test_node, VB_THRESHOLD-1, nVersion)
+ self.send_blocks_with_version(self.nodes[0].p2p, VB_THRESHOLD-1, nVersion)
# Fill rest of period with regular version blocks
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD + 1)
# Check that we're not getting any versionbit-related errors in
# get*info()
- assert(not VB_PATTERN.match(self.nodes[0].getinfo()["errors"]))
- assert(not VB_PATTERN.match(self.nodes[0].getmininginfo()["errors"]))
+ assert(not VB_PATTERN.match(self.nodes[0].getmininginfo()["warnings"]))
assert(not VB_PATTERN.match(self.nodes[0].getnetworkinfo()["warnings"]))
# 3. Now build one period of blocks with >= VB_THRESHOLD blocks signaling
# some unknown bit
- self.send_blocks_with_version(test_node, VB_THRESHOLD, nVersion)
+ self.send_blocks_with_version(self.nodes[0].p2p, VB_THRESHOLD, nVersion)
self.nodes[0].generate(VB_PERIOD - VB_THRESHOLD)
# Might not get a versionbits-related alert yet, as we should
# have gotten a different alert due to more than 51/100 blocks
# being of unexpected version.
# Check that get*info() shows some kind of error.
- assert(WARN_UNKNOWN_RULES_MINED in self.nodes[0].getinfo()["errors"])
- assert(WARN_UNKNOWN_RULES_MINED in self.nodes[0].getmininginfo()["errors"])
+ assert(WARN_UNKNOWN_RULES_MINED in self.nodes[0].getmininginfo()["warnings"])
assert(WARN_UNKNOWN_RULES_MINED in self.nodes[0].getnetworkinfo()["warnings"])
# Mine a period worth of expected blocks so the generic block-version warning
@@ -112,18 +105,17 @@ class VersionBitsWarningTest(BitcoinTestFramework):
# Empty out the alert file
with open(self.alert_filename, 'w', encoding='utf8') as _:
pass
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, self.extra_args)
+ self.start_nodes()
# Connecting one block should be enough to generate an error.
self.nodes[0].generate(1)
- assert(WARN_UNKNOWN_RULES_ACTIVE in self.nodes[0].getinfo()["errors"])
- assert(WARN_UNKNOWN_RULES_ACTIVE in self.nodes[0].getmininginfo()["errors"])
+ assert(WARN_UNKNOWN_RULES_ACTIVE in self.nodes[0].getmininginfo()["warnings"])
assert(WARN_UNKNOWN_RULES_ACTIVE in self.nodes[0].getnetworkinfo()["warnings"])
self.stop_nodes()
self.test_versionbits_in_alert_file()
# Test framework expects the node to still be running...
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, self.extra_args)
+ self.start_nodes()
if __name__ == '__main__':
VersionBitsWarningTest().main()
diff --git a/test/functional/preciousblock.py b/test/functional/preciousblock.py
index 04b41e76ba..1466f901c0 100755
--- a/test/functional/preciousblock.py
+++ b/test/functional/preciousblock.py
@@ -35,8 +35,7 @@ def node_sync_via_rpc(nodes):
unidirectional_node_sync_via_rpc(node_src, node_dest)
class PreciousTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
@@ -47,16 +46,16 @@ class PreciousTest(BitcoinTestFramework):
self.log.info("Ensure submitblock can in principle reorg to a competing chain")
self.nodes[0].generate(1)
assert_equal(self.nodes[0].getblockcount(), 1)
- (hashY, hashZ) = self.nodes[1].generate(2)
+ hashZ = self.nodes[1].generate(2)[-1]
assert_equal(self.nodes[1].getblockcount(), 2)
node_sync_via_rpc(self.nodes[0:3])
assert_equal(self.nodes[0].getbestblockhash(), hashZ)
self.log.info("Mine blocks A-B-C on Node 0")
- (hashA, hashB, hashC) = self.nodes[0].generate(3)
+ hashC = self.nodes[0].generate(3)[-1]
assert_equal(self.nodes[0].getblockcount(), 5)
self.log.info("Mine competing blocks E-F-G on Node 1")
- (hashE, hashF, hashG) = self.nodes[1].generate(3)
+ hashG = self.nodes[1].generate(3)[-1]
assert_equal(self.nodes[1].getblockcount(), 5)
assert(hashC != hashG)
self.log.info("Connect nodes and check no reorg occurs")
diff --git a/test/functional/prioritise_transaction.py b/test/functional/prioritise_transaction.py
index 4fc03d2547..bb56db9b40 100755
--- a/test/functional/prioritise_transaction.py
+++ b/test/functional/prioritise_transaction.py
@@ -9,9 +9,7 @@ from test_framework.util import *
from test_framework.mininode import COIN, MAX_BLOCK_BASE_SIZE
class PrioritiseTransactionTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [["-printpriority=1"], ["-printpriority=1"]]
@@ -103,7 +101,7 @@ class PrioritiseTransactionTest(BitcoinTestFramework):
tx_id = self.nodes[0].decoderawtransaction(tx_hex)["txid"]
# This will raise an exception due to min relay fee not being met
- assert_raises_jsonrpc(-26, "66: min relay fee not met", self.nodes[0].sendrawtransaction, tx_hex)
+ assert_raises_rpc_error(-26, "66: min relay fee not met", self.nodes[0].sendrawtransaction, tx_hex)
assert(tx_id not in self.nodes[0].getrawmempool())
# This is a less than 1000-byte transaction, so just set the fee
diff --git a/test/functional/proxy_test.py b/test/functional/proxy_test.py
index ae6f843ddc..81b99d1bf4 100755
--- a/test/functional/proxy_test.py
+++ b/test/functional/proxy_test.py
@@ -41,12 +41,9 @@ from test_framework.netutil import test_ipv6_local
RANGE_BEGIN = PORT_MIN + 2 * PORT_RANGE # Start after p2p and rpc ports
-
class ProxyTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 4
- self.setup_clean_chain = False
def setup_nodes(self):
self.have_ipv6 = test_ipv6_local()
@@ -89,7 +86,8 @@ class ProxyTest(BitcoinTestFramework):
]
if self.have_ipv6:
args[3] = ['-listen', '-proxy=[%s]:%i' % (self.conf3.addr),'-proxyrandomize=0', '-noonion']
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, extra_args=args)
+ self.add_nodes(self.num_nodes, extra_args=args)
+ self.start_nodes()
def node_test(self, node, proxies, auth, test_onion=True):
rv = []
diff --git a/test/functional/pruning.py b/test/functional/pruning.py
index 0af91e0658..0101f61185 100755
--- a/test/functional/pruning.py
+++ b/test/functional/pruning.py
@@ -26,9 +26,7 @@ def calc_usage(blockdir):
return sum(os.path.getsize(blockdir+f) for f in os.listdir(blockdir) if os.path.isfile(blockdir+f)) / (1024. * 1024.)
class PruneTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 6
@@ -56,6 +54,10 @@ class PruneTest(BitcoinTestFramework):
connect_nodes(self.nodes[0], 4)
sync_blocks(self.nodes[0:5])
+ def setup_nodes(self):
+ self.add_nodes(self.num_nodes, self.extra_args, timewait=900)
+ self.start_nodes()
+
def create_big_chain(self):
# Start by creating some coinbases we can spend later
self.nodes[1].generate(200)
@@ -98,7 +100,7 @@ class PruneTest(BitcoinTestFramework):
# Node 2 stays connected, so it hears about the stale blocks and then reorg's when node0 reconnects
# Stopping node 0 also clears its mempool, so it doesn't have node1's transactions to accidentally mine
self.stop_node(0)
- self.nodes[0]=self.start_node(0, self.options.tmpdir, self.full_node_default_args, timewait=900)
+ self.start_node(0, extra_args=self.full_node_default_args)
# Mine 24 blocks in node 1
for i in range(24):
if j == 0:
@@ -126,7 +128,7 @@ class PruneTest(BitcoinTestFramework):
# Reboot node 1 to clear its mempool (hopefully make the invalidate faster)
# Lower the block max size so we don't keep mining all our big mempool transactions (from disconnected blocks)
self.stop_node(1)
- self.nodes[1] = self.start_node(1, self.options.tmpdir, ["-maxreceivebuffer=20000","-blockmaxsize=5000", "-checkblocks=5", "-disablesafemode"], timewait=900)
+ self.start_node(1, extra_args=["-maxreceivebuffer=20000","-blockmaxsize=5000", "-checkblocks=5", "-disablesafemode"])
height = self.nodes[1].getblockcount()
self.log.info("Current block height: %d" % height)
@@ -136,7 +138,7 @@ class PruneTest(BitcoinTestFramework):
self.log.info("Invalidating block %s at height %d" % (badhash,invalidheight))
self.nodes[1].invalidateblock(badhash)
- # We've now switched to our previously mined-24 block fork on node 1, but thats not what we want
+ # We've now switched to our previously mined-24 block fork on node 1, but that's not what we want
# So invalidate that fork as well, until we're on the same chain as node 0/2 (but at an ancestor 288 blocks ago)
mainchainhash = self.nodes[0].getblockhash(invalidheight - 1)
curhash = self.nodes[1].getblockhash(invalidheight - 1)
@@ -149,7 +151,7 @@ class PruneTest(BitcoinTestFramework):
# Reboot node1 to clear those giant tx's from mempool
self.stop_node(1)
- self.nodes[1] = self.start_node(1, self.options.tmpdir, ["-maxreceivebuffer=20000","-blockmaxsize=5000", "-checkblocks=5", "-disablesafemode"], timewait=900)
+ self.start_node(1, extra_args=["-maxreceivebuffer=20000","-blockmaxsize=5000", "-checkblocks=5", "-disablesafemode"])
self.log.info("Generating new longer chain of 300 more blocks")
self.nodes[1].generate(300)
@@ -183,7 +185,7 @@ class PruneTest(BitcoinTestFramework):
def reorg_back(self):
# Verify that a block on the old main chain fork has been pruned away
- assert_raises_jsonrpc(-1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash)
+ assert_raises_rpc_error(-1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash)
self.log.info("Will need to redownload block %d" % self.forkheight)
# Verify that we have enough history to reorg back to the fork point
@@ -199,7 +201,7 @@ class PruneTest(BitcoinTestFramework):
goalbesthash = self.mainchainhash2
# As of 0.10 the current block download logic is not able to reorg to the original chain created in
- # create_chain_with_stale_blocks because it doesn't know of any peer thats on that chain from which to
+ # create_chain_with_stale_blocks because it doesn't know of any peer that's on that chain from which to
# redownload its missing blocks.
# Invalidate the reorg_test chain in node 0 as well, it can successfully switch to the original chain
# because it has all the block data.
@@ -227,13 +229,15 @@ class PruneTest(BitcoinTestFramework):
def manual_test(self, node_number, use_timestamp):
# at this point, node has 995 blocks and has not yet run in prune mode
- node = self.nodes[node_number] = self.start_node(node_number, self.options.tmpdir, timewait=900)
+ self.start_node(node_number)
+ node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
- assert_raises_jsonrpc(-1, "not in prune mode", node.pruneblockchain, 500)
- self.stop_node(node_number)
+ assert_raises_rpc_error(-1, "not in prune mode", node.pruneblockchain, 500)
# now re-start in manual pruning mode
- node = self.nodes[node_number] = self.start_node(node_number, self.options.tmpdir, ["-prune=1"], timewait=900)
+ self.stop_node(node_number)
+ self.start_node(node_number, extra_args=["-prune=1"])
+ node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
def height(index):
@@ -261,14 +265,14 @@ class PruneTest(BitcoinTestFramework):
return os.path.isfile(self.options.tmpdir + "/node{}/regtest/blocks/blk{:05}.dat".format(node_number, index))
# should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000)
- assert_raises_jsonrpc(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
+ assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
# mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight)
node.generate(6)
assert_equal(node.getblockchaininfo()["blocks"], 1001)
# negative heights should raise an exception
- assert_raises_jsonrpc(-8, "Negative", node.pruneblockchain, -10)
+ assert_raises_rpc_error(-8, "Negative", node.pruneblockchain, -10)
# height=100 too low to prune first block file so this is a no-op
prune(100)
@@ -307,7 +311,7 @@ class PruneTest(BitcoinTestFramework):
# stop node, start back up with auto-prune at 550MB, make sure still runs
self.stop_node(node_number)
- self.nodes[node_number] = self.start_node(node_number, self.options.tmpdir, ["-prune=550"], timewait=900)
+ self.start_node(node_number, extra_args=["-prune=550"])
self.log.info("Success")
@@ -315,7 +319,7 @@ class PruneTest(BitcoinTestFramework):
# check that the pruning node's wallet is still in good shape
self.log.info("Stop and start pruning node to trigger wallet rescan")
self.stop_node(2)
- self.nodes[2] = self.start_node(2, self.options.tmpdir, ["-prune=550"])
+ self.start_node(2, extra_args=["-prune=550"])
self.log.info("Success")
# check that wallet loads successfully when restarting a pruned node after IBD.
@@ -325,7 +329,7 @@ class PruneTest(BitcoinTestFramework):
nds = [self.nodes[0], self.nodes[5]]
sync_blocks(nds, wait=5, timeout=300)
self.stop_node(5) #stop and start to trigger rescan
- self.nodes[5] = self.start_node(5, self.options.tmpdir, ["-prune=550"])
+ self.start_node(5, extra_args=["-prune=550"])
self.log.info("Success")
def run_test(self):
diff --git a/test/functional/rawtransactions.py b/test/functional/rawtransactions.py
index 35debf9cab..79f2a2834e 100755
--- a/test/functional/rawtransactions.py
+++ b/test/functional/rawtransactions.py
@@ -2,7 +2,7 @@
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test the rawtranscation RPCs.
+"""Test the rawtransaction RPCs.
Test the following RPCs:
- createrawtransaction
@@ -17,9 +17,7 @@ from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
@@ -50,7 +48,37 @@ class RawTransactionsTest(BitcoinTestFramework):
rawtx = self.nodes[2].signrawtransaction(rawtx)
# This will raise an exception since there are missing inputs
- assert_raises_jsonrpc(-25, "Missing inputs", self.nodes[2].sendrawtransaction, rawtx['hex'])
+ assert_raises_rpc_error(-25, "Missing inputs", self.nodes[2].sendrawtransaction, rawtx['hex'])
+
+ #####################################
+ # getrawtransaction with block hash #
+ #####################################
+
+ # make a tx by sending then generate 2 blocks; block1 has the tx in it
+ tx = self.nodes[2].sendtoaddress(self.nodes[1].getnewaddress(), 1)
+ block1, block2 = self.nodes[2].generate(2)
+ self.sync_all()
+ # We should be able to get the raw transaction by providing the correct block
+ gottx = self.nodes[0].getrawtransaction(tx, True, block1)
+ assert_equal(gottx['txid'], tx)
+ assert_equal(gottx['in_active_chain'], True)
+ # We should not have the 'in_active_chain' flag when we don't provide a block
+ gottx = self.nodes[0].getrawtransaction(tx, True)
+ assert_equal(gottx['txid'], tx)
+ assert 'in_active_chain' not in gottx
+ # We should not get the tx if we provide an unrelated block
+ assert_raises_rpc_error(-5, "No such transaction found", self.nodes[0].getrawtransaction, tx, True, block2)
+ # An invalid block hash should raise the correct errors
+ assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal", self.nodes[0].getrawtransaction, tx, True, True)
+ assert_raises_rpc_error(-8, "parameter 3 must be hexadecimal", self.nodes[0].getrawtransaction, tx, True, "foobar")
+ assert_raises_rpc_error(-8, "parameter 3 must be of length 64", self.nodes[0].getrawtransaction, tx, True, "abcd1234")
+ assert_raises_rpc_error(-5, "Block hash not found", self.nodes[0].getrawtransaction, tx, True, "0000000000000000000000000000000000000000000000000000000000000000")
+ # Undo the blocks and check in_active_chain
+ self.nodes[0].invalidateblock(block1)
+ gottx = self.nodes[0].getrawtransaction(txid=tx, verbose=True, blockhash=block1)
+ assert_equal(gottx['in_active_chain'], False)
+ self.nodes[0].reconsiderblock(block1)
+ assert_equal(self.nodes[0].getbestblockhash(), block2)
#########################
# RAW TX MULTISIG TESTS #
@@ -63,7 +91,6 @@ class RawTransactionsTest(BitcoinTestFramework):
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
- mSigObjValid = self.nodes[2].validateaddress(mSigObj)
#use balance deltas instead of absolute values
bal = self.nodes[2].getbalance()
@@ -87,7 +114,6 @@ class RawTransactionsTest(BitcoinTestFramework):
addr3Obj = self.nodes[2].validateaddress(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])
- mSigObjValid = self.nodes[2].validateaddress(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
@@ -114,7 +140,7 @@ class RawTransactionsTest(BitcoinTestFramework):
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransaction(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
-
+
rawTxSigned = self.nodes[2].signrawtransaction(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) #node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
@@ -124,6 +150,55 @@ class RawTransactionsTest(BitcoinTestFramework):
self.sync_all()
assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
+ # 2of2 test for combining transactions
+ bal = self.nodes[2].getbalance()
+ addr1 = self.nodes[1].getnewaddress()
+ addr2 = self.nodes[2].getnewaddress()
+
+ addr1Obj = self.nodes[1].validateaddress(addr1)
+ addr2Obj = self.nodes[2].validateaddress(addr2)
+
+ self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
+ mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
+ mSigObjValid = self.nodes[2].validateaddress(mSigObj)
+
+ txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
+ decTx = self.nodes[0].gettransaction(txId)
+ rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex'])
+ self.sync_all()
+ self.nodes[0].generate(1)
+ self.sync_all()
+
+ assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable
+
+ txDetails = self.nodes[0].gettransaction(txId, True)
+ rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex'])
+ vout = False
+ for outpoint in rawTx2['vout']:
+ if outpoint['value'] == Decimal('2.20000000'):
+ vout = outpoint
+ break
+
+ bal = self.nodes[0].getbalance()
+ inputs = [{ "txid" : txId, "vout" : vout['n'], "scriptPubKey" : vout['scriptPubKey']['hex'], "redeemScript" : mSigObjValid['hex']}]
+ outputs = { self.nodes[0].getnewaddress() : 2.19 }
+ rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
+ rawTxPartialSigned1 = self.nodes[1].signrawtransaction(rawTx2, inputs)
+ self.log.info(rawTxPartialSigned1)
+ assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
+
+ rawTxPartialSigned2 = self.nodes[2].signrawtransaction(rawTx2, inputs)
+ self.log.info(rawTxPartialSigned2)
+ assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx
+ rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
+ self.log.info(rawTxComb)
+ self.nodes[2].sendrawtransaction(rawTxComb)
+ rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
+ self.sync_all()
+ self.nodes[0].generate(1)
+ self.sync_all()
+ assert_equal(self.nodes[0].getbalance(), bal+Decimal('50.00000000')+Decimal('2.19000000')) #block reward + tx
+
# getrawtransaction tests
# 1. valid parameters - only supply txid
txHash = rawTx["hash"]
@@ -143,30 +218,30 @@ class RawTransactionsTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getrawtransaction(txHash, True)["hex"], rawTxSigned['hex'])
# 6. invalid parameters - supply txid and string "Flase"
- assert_raises_jsonrpc(-3,"Invalid type", self.nodes[0].getrawtransaction, txHash, "Flase")
+ assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, "Flase")
# 7. invalid parameters - supply txid and empty array
- assert_raises_jsonrpc(-3,"Invalid type", self.nodes[0].getrawtransaction, txHash, [])
+ assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, [])
# 8. invalid parameters - supply txid and empty dict
- assert_raises_jsonrpc(-3,"Invalid type", self.nodes[0].getrawtransaction, txHash, {})
+ assert_raises_rpc_error(-1, "not a boolean", self.nodes[0].getrawtransaction, txHash, {})
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 1000}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx= self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], 1000)
-
+
# 9. invalid parameters - sequence number out of range
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : -1}]
outputs = { self.nodes[0].getnewaddress() : 1 }
- assert_raises_jsonrpc(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
-
+ assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
+
# 10. invalid parameters - sequence number out of range
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967296}]
outputs = { self.nodes[0].getnewaddress() : 1 }
- assert_raises_jsonrpc(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
-
+ assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range', self.nodes[0].createrawtransaction, inputs, outputs)
+
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1, 'sequence' : 4294967294}]
outputs = { self.nodes[0].getnewaddress() : 1 }
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
diff --git a/test/functional/receivedby.py b/test/functional/receivedby.py
index 19d99c9c9e..97da19546f 100755
--- a/test/functional/receivedby.py
+++ b/test/functional/receivedby.py
@@ -3,104 +3,83 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the listreceivedbyaddress RPC."""
+from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import *
-
-def get_sub_array_from_array(object_array, to_match):
- '''
- Finds and returns a sub array from an array of arrays.
- to_match should be a unique idetifier of a sub array
- '''
- for item in object_array:
- all_match = True
- for key,value in to_match.items():
- if item[key] != value:
- all_match = False
- if not all_match:
- continue
- return item
- return []
+from test_framework.util import (assert_array_result,
+ assert_equal,
+ assert_raises_rpc_error,
+ )
class ReceivedByTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 2
- def __init__(self):
- super().__init__()
- self.num_nodes = 4
- self.setup_clean_chain = False
+ def run_test(self):
+ # Generate block to get out of IBD
+ self.nodes[0].generate(1)
- def setup_nodes(self):
- #This test requires mocktime
- self.enable_mocktime()
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir)
+ self.log.info("listreceivedbyaddress Test")
- def run_test(self):
- '''
- listreceivedbyaddress Test
- '''
# Send from node 0 to 1
addr = self.nodes[1].getnewaddress()
txid = self.nodes[0].sendtoaddress(addr, 0.1)
self.sync_all()
- #Check not listed in listreceivedbyaddress because has 0 confirmations
+ # Check not listed in listreceivedbyaddress because has 0 confirmations
assert_array_result(self.nodes[1].listreceivedbyaddress(),
- {"address":addr},
- { },
- True)
- #Bury Tx under 10 block so it will be returned by listreceivedbyaddress
+ {"address": addr},
+ {},
+ True)
+ # Bury Tx under 10 block so it will be returned by listreceivedbyaddress
self.nodes[1].generate(10)
self.sync_all()
assert_array_result(self.nodes[1].listreceivedbyaddress(),
- {"address":addr},
- {"address":addr, "account":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]})
- #With min confidence < 10
+ {"address": addr},
+ {"address": addr, "account": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]})
+ # With min confidence < 10
assert_array_result(self.nodes[1].listreceivedbyaddress(5),
- {"address":addr},
- {"address":addr, "account":"", "amount":Decimal("0.1"), "confirmations":10, "txids":[txid,]})
- #With min confidence > 10, should not find Tx
- assert_array_result(self.nodes[1].listreceivedbyaddress(11),{"address":addr},{ },True)
+ {"address": addr},
+ {"address": addr, "account": "", "amount": Decimal("0.1"), "confirmations": 10, "txids": [txid, ]})
+ # With min confidence > 10, should not find Tx
+ assert_array_result(self.nodes[1].listreceivedbyaddress(11), {"address": addr}, {}, True)
- #Empty Tx
+ # Empty Tx
addr = self.nodes[1].getnewaddress()
- assert_array_result(self.nodes[1].listreceivedbyaddress(0,True),
- {"address":addr},
- {"address":addr, "account":"", "amount":0, "confirmations":0, "txids":[]})
+ assert_array_result(self.nodes[1].listreceivedbyaddress(0, True),
+ {"address": addr},
+ {"address": addr, "account": "", "amount": 0, "confirmations": 0, "txids": []})
+
+ self.log.info("getreceivedbyaddress Test")
- '''
- getreceivedbyaddress Test
- '''
# Send from node 0 to 1
addr = self.nodes[1].getnewaddress()
txid = self.nodes[0].sendtoaddress(addr, 0.1)
self.sync_all()
- #Check balance is 0 because of 0 confirmations
+ # Check balance is 0 because of 0 confirmations
balance = self.nodes[1].getreceivedbyaddress(addr)
- if balance != Decimal("0.0"):
- raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance))
+ assert_equal(balance, Decimal("0.0"))
- #Check balance is 0.1
- balance = self.nodes[1].getreceivedbyaddress(addr,0)
- if balance != Decimal("0.1"):
- raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance))
+ # Check balance is 0.1
+ balance = self.nodes[1].getreceivedbyaddress(addr, 0)
+ assert_equal(balance, Decimal("0.1"))
- #Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress
+ # Bury Tx under 10 block so it will be returned by the default getreceivedbyaddress
self.nodes[1].generate(10)
self.sync_all()
balance = self.nodes[1].getreceivedbyaddress(addr)
- if balance != Decimal("0.1"):
- raise AssertionError("Wrong balance returned by getreceivedbyaddress, %0.2f"%(balance))
+ assert_equal(balance, Decimal("0.1"))
+
+ # Trying to getreceivedby for an address the wallet doesn't own should return an error
+ assert_raises_rpc_error(-4, "Address not found in wallet", self.nodes[0].getreceivedbyaddress, addr)
+
+ self.log.info("listreceivedbyaccount + getreceivedbyaccount Test")
- '''
- listreceivedbyaccount + getreceivedbyaccount Test
- '''
- #set pre-state
+ # set pre-state
addrArr = self.nodes[1].getnewaddress()
account = self.nodes[1].getaccount(addrArr)
- received_by_account_json = get_sub_array_from_array(self.nodes[1].listreceivedbyaccount(),{"account":account})
- if len(received_by_account_json) == 0:
- raise AssertionError("No accounts found in node")
+ received_by_account_json = [r for r in self.nodes[1].listreceivedbyaccount() if r["account"] == account][0]
balance_by_account = self.nodes[1].getreceivedbyaccount(account)
txid = self.nodes[0].sendtoaddress(addr, 0.1)
@@ -108,40 +87,34 @@ class ReceivedByTest(BitcoinTestFramework):
# listreceivedbyaccount should return received_by_account_json because of 0 confirmations
assert_array_result(self.nodes[1].listreceivedbyaccount(),
- {"account":account},
- received_by_account_json)
+ {"account": account},
+ received_by_account_json)
# getreceivedbyaddress should return same balance because of 0 confirmations
balance = self.nodes[1].getreceivedbyaccount(account)
- if balance != balance_by_account:
- raise AssertionError("Wrong balance returned by getreceivedbyaccount, %0.2f"%(balance))
+ assert_equal(balance, balance_by_account)
self.nodes[1].generate(10)
self.sync_all()
# listreceivedbyaccount should return updated account balance
assert_array_result(self.nodes[1].listreceivedbyaccount(),
- {"account":account},
- {"account":received_by_account_json["account"], "amount":(received_by_account_json["amount"] + Decimal("0.1"))})
+ {"account": account},
+ {"account": received_by_account_json["account"], "amount": (received_by_account_json["amount"] + Decimal("0.1"))})
# getreceivedbyaddress should return updates balance
balance = self.nodes[1].getreceivedbyaccount(account)
- if balance != balance_by_account + Decimal("0.1"):
- raise AssertionError("Wrong balance returned by getreceivedbyaccount, %0.2f"%(balance))
+ assert_equal(balance, balance_by_account + Decimal("0.1"))
- #Create a new account named "mynewaccount" that has a 0 balance
+ # Create a new account named "mynewaccount" that has a 0 balance
self.nodes[1].getaccountaddress("mynewaccount")
- received_by_account_json = get_sub_array_from_array(self.nodes[1].listreceivedbyaccount(0,True),{"account":"mynewaccount"})
- if len(received_by_account_json) == 0:
- raise AssertionError("No accounts found in node")
+ received_by_account_json = [r for r in self.nodes[1].listreceivedbyaccount(0, True) if r["account"] == "mynewaccount"][0]
# Test includeempty of listreceivedbyaccount
- if received_by_account_json["amount"] != Decimal("0.0"):
- raise AssertionError("Wrong balance returned by listreceivedbyaccount, %0.2f"%(received_by_account_json["amount"]))
+ assert_equal(received_by_account_json["amount"], Decimal("0.0"))
# Test getreceivedbyaccount for 0 amount accounts
balance = self.nodes[1].getreceivedbyaccount("mynewaccount")
- if balance != Decimal("0.0"):
- raise AssertionError("Wrong balance returned by getreceivedbyaccount, %0.2f"%(balance))
+ assert_equal(balance, Decimal("0.0"))
if __name__ == '__main__':
ReceivedByTest().main()
diff --git a/test/functional/reindex.py b/test/functional/reindex.py
index b446baa04d..1f684a1afe 100755
--- a/test/functional/reindex.py
+++ b/test/functional/reindex.py
@@ -15,8 +15,7 @@ import time
class ReindexTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
@@ -25,7 +24,7 @@ class ReindexTest(BitcoinTestFramework):
blockcount = self.nodes[0].getblockcount()
self.stop_nodes()
extra_args = [["-reindex-chainstate" if justchainstate else "-reindex", "-checkblockindex=1"]]
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, extra_args)
+ self.start_nodes(extra_args)
while self.nodes[0].getblockcount() < blockcount:
time.sleep(0.1)
assert_equal(self.nodes[0].getblockcount(), blockcount)
diff --git a/test/functional/replace-by-fee.py b/test/functional/replace-by-fee.py
index d6bf3ea59f..815e964848 100755
--- a/test/functional/replace-by-fee.py
+++ b/test/functional/replace-by-fee.py
@@ -61,20 +61,25 @@ def make_utxo(node, amount, confirmed=True, scriptPubKey=CScript([1])):
class ReplaceByFeeTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
- self.num_nodes = 1
- self.setup_clean_chain = False
+ def set_test_params(self):
+ self.num_nodes = 2
self.extra_args= [["-maxorphantx=1000",
"-whitelist=127.0.0.1",
"-limitancestorcount=50",
"-limitancestorsize=101",
"-limitdescendantcount=200",
- "-limitdescendantsize=101"]]
+ "-limitdescendantsize=101"],
+ ["-mempoolreplacement=0"]]
def run_test(self):
+ # Leave IBD
+ self.nodes[0].generate(1)
+
make_utxo(self.nodes[0], 1*COIN)
+ # Ensure nodes are synced
+ self.sync_all()
+
self.log.info("Running test simple doublespend...")
self.test_simple_doublespend()
@@ -111,12 +116,19 @@ class ReplaceByFeeTest(BitcoinTestFramework):
"""Simple doublespend"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
+ # make_utxo may have generated a bunch of blocks, so we need to sync
+ # before we can spend the coins generated, or else the resulting
+ # transactions might not be accepted by our peers.
+ self.sync_all()
+
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
+ self.sync_all()
+
# Should fail because we haven't changed the fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
@@ -124,13 +136,18 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx1b_hex = txToHex(tx1b)
# This will raise an exception due to insufficient fee
- assert_raises_jsonrpc(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True)
+ assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True)
+ # This will raise an exception due to transaction replacement being disabled
+ assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[1].sendrawtransaction, tx1b_hex, True)
# Extra 0.1 BTC fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.9*COIN), CScript([b'b']))]
tx1b_hex = txToHex(tx1b)
+ # Replacement still disabled even with "enough fee"
+ assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[1].sendrawtransaction, tx1b_hex, True)
+ # Works when enabled
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
mempool = self.nodes[0].getrawmempool()
@@ -140,6 +157,11 @@ class ReplaceByFeeTest(BitcoinTestFramework):
assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid))
+ # Second node is running mempoolreplacement=0, will not replace originally-seen txn
+ mempool = self.nodes[1].getrawmempool()
+ assert tx1a_txid in mempool
+ assert tx1b_txid not in mempool
+
def test_doublespend_chain(self):
"""Doublespend of a long chain"""
@@ -167,7 +189,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception due to insufficient fee
- assert_raises_jsonrpc(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, True)
+ assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, True)
# Accepted with sufficient fee
dbl_tx = CTransaction()
@@ -228,7 +250,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
dbl_tx.vout = [CTxOut(initial_nValue - fee*n, CScript([1]))]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception due to insufficient fee
- assert_raises_jsonrpc(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, True)
+ assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, True)
# 1 BTC fee is enough
dbl_tx = CTransaction()
@@ -256,7 +278,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
dbl_tx.vout = [CTxOut(initial_nValue - 2*fee*n, CScript([1]))]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception
- assert_raises_jsonrpc(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, dbl_tx_hex, True)
+ assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, dbl_tx_hex, True)
for tx in tree_txs:
tx.rehash()
@@ -270,7 +292,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx1a_hex = txToHex(tx1a)
- tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
+ self.nodes[0].sendrawtransaction(tx1a_hex, True)
# Higher fee, but the fee per KB is much lower, so the replacement is
# rejected.
@@ -280,7 +302,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx1b_hex = txToHex(tx1b)
# This will raise an exception due to insufficient fee
- assert_raises_jsonrpc(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True)
+ assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True)
def test_spends_of_conflicting_outputs(self):
"""Replacements that spend conflicting tx outputs are rejected"""
@@ -303,7 +325,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx2_hex = txToHex(tx2)
# This will raise an exception
- assert_raises_jsonrpc(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, True)
+ assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, True)
# Spend tx1a's output to test the indirect case.
tx1b = CTransaction()
@@ -320,7 +342,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx2_hex = txToHex(tx2)
# This will raise an exception
- assert_raises_jsonrpc(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, True)
+ assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, True)
def test_new_unconfirmed_inputs(self):
"""Replacements that add new unconfirmed inputs are rejected"""
@@ -331,7 +353,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx1.vin = [CTxIn(confirmed_utxo)]
tx1.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx1_hex = txToHex(tx1)
- tx1_txid = self.nodes[0].sendrawtransaction(tx1_hex, True)
+ self.nodes[0].sendrawtransaction(tx1_hex, True)
tx2 = CTransaction()
tx2.vin = [CTxIn(confirmed_utxo), CTxIn(unconfirmed_utxo)]
@@ -339,7 +361,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx2_hex = txToHex(tx2)
# This will raise an exception
- assert_raises_jsonrpc(-26, "replacement-adds-unconfirmed", self.nodes[0].sendrawtransaction, tx2_hex, True)
+ assert_raises_rpc_error(-26, "replacement-adds-unconfirmed", self.nodes[0].sendrawtransaction, tx2_hex, True)
def test_too_many_replacements(self):
"""Replacements that evict too many transactions are rejected"""
@@ -385,7 +407,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
double_tx_hex = txToHex(double_tx)
# This will raise an exception
- assert_raises_jsonrpc(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, double_tx_hex, True)
+ assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, double_tx_hex, True)
# If we remove an input, it should pass
double_tx = CTransaction()
@@ -412,7 +434,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx1b_hex = txToHex(tx1b)
# This will raise an exception
- assert_raises_jsonrpc(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx1b_hex, True)
+ assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx1b_hex, True)
tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
@@ -430,7 +452,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx2b_hex = txToHex(tx2b)
# This will raise an exception
- assert_raises_jsonrpc(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx2b_hex, True)
+ assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx2b_hex, True)
# Now create a new transaction that spends from tx1a and tx2a
# opt-in on one of the inputs
@@ -482,7 +504,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx1b_hex = txToHex(tx1b)
# Verify tx1b cannot replace tx1a.
- assert_raises_jsonrpc(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True)
+ assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True)
# Use prioritisetransaction to set tx1a's fee to 0.
self.nodes[0].prioritisetransaction(txid=tx1a_txid, fee_delta=int(-0.1*COIN))
@@ -499,7 +521,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2a.vout = [CTxOut(1*COIN, CScript([b'a']))]
tx2a_hex = txToHex(tx2a)
- tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, True)
+ self.nodes[0].sendrawtransaction(tx2a_hex, True)
# Lower fee, but we'll prioritise it
tx2b = CTransaction()
@@ -509,7 +531,7 @@ class ReplaceByFeeTest(BitcoinTestFramework):
tx2b_hex = txToHex(tx2b)
# Verify tx2b cannot replace tx2a.
- assert_raises_jsonrpc(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx2b_hex, True)
+ assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx2b_hex, True)
# Now prioritise tx2b to have a higher modified fee
self.nodes[0].prioritisetransaction(txid=tx2b.hash, fee_delta=int(0.1*COIN))
@@ -531,8 +553,8 @@ class ReplaceByFeeTest(BitcoinTestFramework):
assert_equal(json1["vin"][0]["sequence"], 4294967295)
rawtx2 = self.nodes[0].createrawtransaction([], outs)
- frawtx2a = self.nodes[0].fundrawtransaction(rawtx2, {"optIntoRbf": True})
- frawtx2b = self.nodes[0].fundrawtransaction(rawtx2, {"optIntoRbf": False})
+ frawtx2a = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": True})
+ frawtx2b = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": False})
json0 = self.nodes[0].decoderawtransaction(frawtx2a['hex'])
json1 = self.nodes[0].decoderawtransaction(frawtx2b['hex'])
diff --git a/test/functional/resendwallettransactions.py b/test/functional/resendwallettransactions.py
new file mode 100755
index 0000000000..d959bb4c38
--- /dev/null
+++ b/test/functional/resendwallettransactions.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test resendwallettransactions RPC."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal, assert_raises_rpc_error
+
+class ResendWalletTransactionsTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+ self.extra_args = [['--walletbroadcast=false']]
+
+ def run_test(self):
+ # Should raise RPC_WALLET_ERROR (-4) if walletbroadcast is disabled.
+ assert_raises_rpc_error(-4, "Error: Wallet transaction broadcasting is disabled with -walletbroadcast", self.nodes[0].resendwallettransactions)
+
+ # Should return an empty array if there aren't unconfirmed wallet transactions.
+ self.stop_node(0)
+ self.start_node(0, extra_args=[])
+ assert_equal(self.nodes[0].resendwallettransactions(), [])
+
+ # Should return an array with the unconfirmed wallet transaction.
+ txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
+ assert_equal(self.nodes[0].resendwallettransactions(), [txid])
+
+if __name__ == '__main__':
+ ResendWalletTransactionsTest().main()
diff --git a/test/functional/rest.py b/test/functional/rest.py
index a69dbb5013..437111a4d7 100755
--- a/test/functional/rest.py
+++ b/test/functional/rest.py
@@ -43,8 +43,7 @@ def http_post_call(host, port, path, requestdata = '', response_object = 0):
class RESTTest (BitcoinTestFramework):
FORMAT_SEPARATOR = "."
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
diff --git a/test/functional/rpcbind_test.py b/test/functional/rpcbind_test.py
index 951685aa76..0e8c3fa209 100755
--- a/test/functional/rpcbind_test.py
+++ b/test/functional/rpcbind_test.py
@@ -11,19 +11,13 @@ from test_framework.test_framework import BitcoinTestFramework, SkipTest
from test_framework.util import *
from test_framework.netutil import *
-
class RPCBindTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self):
- pass
-
- def setup_nodes(self):
- pass
+ self.add_nodes(self.num_nodes, None)
def run_bind_test(self, allow_ips, connect_to, addresses, expected):
'''
@@ -31,13 +25,15 @@ class RPCBindTest(BitcoinTestFramework):
then try to connect, and check if the set of bound addresses
matches the expected set.
'''
+ self.log.info("Bind test for %s" % str(addresses))
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_ips:
base_args += ['-rpcallowip=' + x for x in allow_ips]
binds = ['-rpcbind='+addr for addr in addresses]
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, [base_args + binds], connect_to)
- pid = self.bitcoind_processes[0].pid
+ self.nodes[0].rpchost = connect_to
+ self.start_node(0, base_args + binds)
+ pid = self.nodes[0].process.pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
self.stop_nodes()
@@ -46,10 +42,12 @@ class RPCBindTest(BitcoinTestFramework):
Start a node with rpcallow IP, and request getnetworkinfo
at a non-localhost IP.
'''
+ self.log.info("Allow IP test for %s:%d" % (rpchost, rpcport))
base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips]
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, [base_args])
+ self.nodes[0].rpchost = None
+ self.start_nodes([base_args])
# connect to node through non-loopback interface
- node = get_rpc_proxy(rpc_url(get_datadir_path(self.options.tmpdir, 0), 0, "%s:%d" % (rpchost, rpcport)), 0)
+ node = get_rpc_proxy(rpc_url(get_datadir_path(self.options.tmpdir, 0), 0, "%s:%d" % (rpchost, rpcport)), 0, coveragedir=self.options.coveragedir)
node.getnetworkinfo()
self.stop_nodes()
@@ -103,7 +101,7 @@ class RPCBindTest(BitcoinTestFramework):
# Check that with invalid rpcallowip, we are denied
self.run_allowip_test([non_loopback_ip], non_loopback_ip, defaultport)
- assert_raises_jsonrpc(-342, "non-JSON HTTP response with '403 Forbidden' from server", self.run_allowip_test, ['1.1.1.1'], non_loopback_ip, defaultport)
+ assert_raises_rpc_error(-342, "non-JSON HTTP response with '403 Forbidden' from server", self.run_allowip_test, ['1.1.1.1'], non_loopback_ip, defaultport)
if __name__ == '__main__':
RPCBindTest().main()
diff --git a/test/functional/rpcnamedargs.py b/test/functional/rpcnamedargs.py
index 3b286000a1..c47212bddb 100755
--- a/test/functional/rpcnamedargs.py
+++ b/test/functional/rpcnamedargs.py
@@ -7,26 +7,19 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- assert_raises_jsonrpc,
+ assert_raises_rpc_error,
)
-
class NamedArgumentTest(BitcoinTestFramework):
- """
- Test named arguments on RPC calls.
- """
-
- def __init__(self):
- super().__init__()
- self.setup_clean_chain = False
+ def set_test_params(self):
self.num_nodes = 1
def run_test(self):
node = self.nodes[0]
- h = node.help(command='getinfo')
- assert(h.startswith('getinfo\n'))
+ h = node.help(command='getblockchaininfo')
+ assert(h.startswith('getblockchaininfo\n'))
- assert_raises_jsonrpc(-8, 'Unknown named parameter', node.help, random='getinfo')
+ assert_raises_rpc_error(-8, 'Unknown named parameter', node.help, random='getblockchaininfo')
h = node.getblockhash(height=0)
node.getblock(blockhash=h)
diff --git a/test/functional/segwit.py b/test/functional/segwit.py
index ac95d66466..338fa1bc52 100755
--- a/test/functional/segwit.py
+++ b/test/functional/segwit.py
@@ -7,12 +7,11 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import sha256, CTransaction, CTxIn, COutPoint, CTxOut, COIN, ToHex, FromHex
-from test_framework.address import script_to_p2sh, key_to_p2pkh
+from test_framework.address import script_to_p2sh, key_to_p2pkh, key_to_p2sh_p2wpkh, key_to_p2wpkh, script_to_p2sh_p2wsh, script_to_p2wsh, program_to_witness
from test_framework.script import CScript, OP_HASH160, OP_CHECKSIG, OP_0, hash160, OP_EQUAL, OP_DUP, OP_EQUALVERIFY, OP_1, OP_2, OP_CHECKMULTISIG, OP_TRUE
from io import BytesIO
NODE_0 = 0
-NODE_1 = 1
NODE_2 = 2
WIT_V0 = 0
WIT_V1 = 1
@@ -34,15 +33,15 @@ def witness_script(use_p2wsh, pubkey):
# Return a transaction (in hex) that spends the given utxo to a segwit output,
# optionally wrapping the segwit output using P2SH.
-def create_witnessprogram(use_p2wsh, utxo, pubkey, encode_p2sh, amount):
- pkscript = hex_str_to_bytes(witness_script(use_p2wsh, pubkey))
- if (encode_p2sh):
- p2sh_hash = hash160(pkscript)
- pkscript = CScript([OP_HASH160, p2sh_hash, OP_EQUAL])
- tx = CTransaction()
- tx.vin.append(CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), b""))
- tx.vout.append(CTxOut(int(amount*COIN), pkscript))
- return ToHex(tx)
+def create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount):
+ if use_p2wsh:
+ program = CScript([OP_1, hex_str_to_bytes(pubkey), OP_1, OP_CHECKMULTISIG])
+ addr = script_to_p2sh_p2wsh(program) if encode_p2sh else script_to_p2wsh(program)
+ else:
+ addr = key_to_p2sh_p2wpkh(pubkey) if encode_p2sh else key_to_p2wpkh(pubkey)
+ if not encode_p2sh:
+ assert_equal(node.validateaddress(addr)['scriptPubKey'], witness_script(use_p2wsh, pubkey))
+ return node.createrawtransaction([utxo], {addr: amount})
# Create a transaction spending a given utxo to a segwit output corresponding
# to the given pubkey: use_p2wsh determines whether to use P2WPKH or P2WSH;
@@ -50,7 +49,7 @@ def create_witnessprogram(use_p2wsh, utxo, pubkey, encode_p2sh, amount):
# sign=True will have the given node sign the transaction.
# insert_redeem_script will be added to the scriptSig, if given.
def send_to_witness(use_p2wsh, node, utxo, pubkey, encode_p2sh, amount, sign=True, insert_redeem_script=""):
- tx_to_witness = create_witnessprogram(use_p2wsh, utxo, pubkey, encode_p2sh, amount)
+ tx_to_witness = create_witness_tx(node, use_p2wsh, utxo, pubkey, encode_p2sh, amount)
if (sign):
signed = node.signrawtransaction(tx_to_witness)
assert("errors" not in signed or len(["errors"]) == 0)
@@ -75,14 +74,13 @@ def find_unspent(node, min_value):
return utxo
class SegWitTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
- self.extra_args = [["-walletprematurewitness", "-rpcserialversion=0"],
- ["-blockversion=4", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness", "-rpcserialversion=1"],
- ["-blockversion=536870915", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness"]]
+ # This test tests SegWit both pre and post-activation, so use the normal BIP9 activation.
+ self.extra_args = [["-walletprematurewitness", "-rpcserialversion=0", "-vbparams=segwit:0:999999999999"],
+ ["-blockversion=4", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness", "-rpcserialversion=1", "-vbparams=segwit:0:999999999999"],
+ ["-blockversion=536870915", "-promiscuousmempoolflags=517", "-prematurewitness", "-walletprematurewitness", "-vbparams=segwit:0:999999999999"]]
def setup_network(self):
super().setup_network()
@@ -102,11 +100,11 @@ class SegWitTest(BitcoinTestFramework):
sync_blocks(self.nodes)
def fail_accept(self, node, error_msg, txid, sign, redeem_script=""):
- assert_raises_jsonrpc(-26, error_msg, send_to_witness, 1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
+ assert_raises_rpc_error(-26, error_msg, send_to_witness, 1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
def fail_mine(self, node, txid, sign, redeem_script=""):
send_to_witness(1, node, getutxo(txid), self.pubkey[0], False, Decimal("49.998"), sign, redeem_script)
- assert_raises_jsonrpc(-1, "CreateNewBlock: TestBlockValidity failed", node.generate, 1)
+ assert_raises_rpc_error(-1, "CreateNewBlock: TestBlockValidity failed", node.generate, 1)
sync_blocks(self.nodes)
def run_test(self):
@@ -136,8 +134,15 @@ class SegWitTest(BitcoinTestFramework):
newaddress = self.nodes[i].getnewaddress()
self.pubkey.append(self.nodes[i].validateaddress(newaddress)["pubkey"])
multiaddress = self.nodes[i].addmultisigaddress(1, [self.pubkey[-1]])
- self.nodes[i].addwitnessaddress(newaddress)
- self.nodes[i].addwitnessaddress(multiaddress)
+ multiscript = CScript([OP_1, hex_str_to_bytes(self.pubkey[-1]), OP_1, OP_CHECKMULTISIG])
+ p2sh_addr = self.nodes[i].addwitnessaddress(newaddress, True)
+ bip173_addr = self.nodes[i].addwitnessaddress(newaddress, False)
+ p2sh_ms_addr = self.nodes[i].addwitnessaddress(multiaddress, True)
+ bip173_ms_addr = self.nodes[i].addwitnessaddress(multiaddress, False)
+ assert_equal(p2sh_addr, key_to_p2sh_p2wpkh(self.pubkey[-1]))
+ assert_equal(bip173_addr, key_to_p2wpkh(self.pubkey[-1]))
+ assert_equal(p2sh_ms_addr, script_to_p2sh_p2wsh(multiscript))
+ assert_equal(bip173_ms_addr, script_to_p2wsh(multiscript))
p2sh_ids.append([])
wit_ids.append([])
for v in range(2):
@@ -282,6 +287,9 @@ class SegWitTest(BitcoinTestFramework):
assert(txid2 in template_txids)
assert(txid3 in template_txids)
+ # Check that wtxid is properly reported in mempool entry
+ assert_equal(int(self.nodes[0].getmempoolentry(txid3)["wtxid"], 16), tx.calc_sha256(True))
+
# Mine a block to clear the gbt cache again.
self.nodes[0].generate(1)
@@ -445,11 +453,7 @@ class SegWitTest(BitcoinTestFramework):
for i in importlist:
# import all generated addresses. The wallet already has the private keys for some of these, so catch JSON RPC
# exceptions and continue.
- try:
- self.nodes[0].importaddress(i,"",False,True)
- except JSONRPCException as exp:
- assert_equal(exp.error["message"], "The wallet already contains the private key for this address or script")
- assert_equal(exp.error["code"], -4)
+ try_rpc(-4, "The wallet already contains the private key for this address or script", self.nodes[0].importaddress, i, "", False, True)
self.nodes[0].importaddress(script_to_p2sh(op0)) # import OP_0 as address only
self.nodes[0].importaddress(multisig_without_privkey_address) # Test multisig_without_privkey
@@ -459,12 +463,13 @@ class SegWitTest(BitcoinTestFramework):
self.mine_and_test_listunspent(unsolvable_after_importaddress, 1)
self.mine_and_test_listunspent(unseen_anytime, 0)
- # addwitnessaddress should refuse to return a witness address if an uncompressed key is used or the address is
- # not in the wallet
+ # addwitnessaddress should refuse to return a witness address if an uncompressed key is used
# note that no witness address should be returned by unsolvable addresses
- # the multisig_without_privkey_address will fail because its keys were not added with importpubkey
- for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address + [multisig_without_privkey_address]:
- assert_raises_jsonrpc(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
+ for i in uncompressed_spendable_address + uncompressed_solvable_address + unknown_address + unsolvable_address:
+ assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
+
+ # addwitnessaddress should return a witness addresses even if keys are not in the wallet
+ self.nodes[0].addwitnessaddress(multisig_without_privkey_address)
for i in compressed_spendable_address + compressed_solvable_address:
witaddress = self.nodes[0].addwitnessaddress(i)
@@ -542,9 +547,9 @@ class SegWitTest(BitcoinTestFramework):
# addwitnessaddress should refuse to return a witness address if an uncompressed key is used
# note that a multisig address returned by addmultisigaddress is not solvable until it is added with importaddress
# premature_witaddress are not accepted until the script is added with addwitnessaddress first
- for i in uncompressed_spendable_address + uncompressed_solvable_address + premature_witaddress + [compressed_solvable_address[1]]:
+ for i in uncompressed_spendable_address + uncompressed_solvable_address + premature_witaddress:
# This will raise an exception
- assert_raises_jsonrpc(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
+ assert_raises_rpc_error(-4, "Public key or redeemscript not known to wallet, or the key is uncompressed", self.nodes[0].addwitnessaddress, i)
# after importaddress it should pass addwitnessaddress
v = self.nodes[0].validateaddress(compressed_solvable_address[1])
@@ -557,6 +562,13 @@ class SegWitTest(BitcoinTestFramework):
solvable_txid.append(self.mine_and_test_listunspent(solvable_after_addwitnessaddress, 1))
self.mine_and_test_listunspent(unseen_anytime, 0)
+ # Check that createrawtransaction/decoderawtransaction with non-v0 Bech32 works
+ v1_addr = program_to_witness(1, [3,5])
+ v1_tx = self.nodes[0].createrawtransaction([getutxo(spendable_txid[0])],{v1_addr: 1})
+ v1_decoded = self.nodes[1].decoderawtransaction(v1_tx)
+ assert_equal(v1_decoded['vout'][0]['scriptPubKey']['addresses'][0], v1_addr)
+ assert_equal(v1_decoded['vout'][0]['scriptPubKey']['hex'], "51020305")
+
# Check that spendable outputs are really spendable
self.create_and_mine_tx_from_txids(spendable_txid)
@@ -569,6 +581,29 @@ class SegWitTest(BitcoinTestFramework):
self.nodes[0].importprivkey("cTW5mR5M45vHxXkeChZdtSPozrFwFgmEvTNnanCW6wrqwaCZ1X7K")
self.create_and_mine_tx_from_txids(solvable_txid)
+ # Test that importing native P2WPKH/P2WSH scripts works
+ for use_p2wsh in [False, True]:
+ if use_p2wsh:
+ scriptPubKey = "00203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a"
+ transaction = "01000000000100e1f505000000002200203a59f3f56b713fdcf5d1a57357f02c44342cbf306ffe0c4741046837bf90561a00000000"
+ else:
+ scriptPubKey = "a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d87"
+ transaction = "01000000000100e1f5050000000017a9142f8c469c2f0084c48e11f998ffbe7efa7549f26d8700000000"
+
+ self.nodes[1].importaddress(scriptPubKey, "", False)
+ rawtxfund = self.nodes[1].fundrawtransaction(transaction)['hex']
+ rawtxfund = self.nodes[1].signrawtransaction(rawtxfund)["hex"]
+ txid = self.nodes[1].sendrawtransaction(rawtxfund)
+
+ assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
+ assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
+
+ # Assert it is properly saved
+ self.stop_node(1)
+ self.start_node(1)
+ assert_equal(self.nodes[1].gettransaction(txid, True)["txid"], txid)
+ assert_equal(self.nodes[1].listtransactions("*", 1, 0, True)[0]["txid"], txid)
+
def mine_and_test_listunspent(self, script_list, ismine):
utxo = find_unspent(self.nodes[0], 50)
tx = CTransaction()
diff --git a/test/functional/sendheaders.py b/test/functional/sendheaders.py
index 44c357c6db..99b7f6b99e 100755
--- a/test/functional/sendheaders.py
+++ b/test/functional/sendheaders.py
@@ -4,11 +4,24 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test behavior of headers messages to announce blocks.
-Setup:
+Setup:
-- Two nodes, two p2p connections to node0. One p2p connection should only ever
- receive inv's (omitted from testing description below, this is our control).
- Second node is used for creating reorgs.
+- Two nodes:
+ - node0 is the node-under-test. We create two p2p connections to it. The
+ first p2p connection is a control and should only ever receive inv's. The
+ second p2p connection tests the headers sending logic.
+ - node1 is used to create reorgs.
+
+test_null_locators
+==================
+
+Sends two getheaders requests with null locator values. First request's hashstop
+value refers to validated block, while second request's hashstop value refers to
+a block which hasn't been validated. Verifies only the first request returns
+headers.
+
+test_nonnull_locators
+=====================
Part 1: No headers announcements before "sendheaders"
a. node mines a block [expect: inv]
@@ -72,163 +85,197 @@ d. Announce 49 headers that don't connect.
e. Announce one more that doesn't connect.
Expect: disconnect.
"""
-
-from test_framework.mininode import *
-from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import *
from test_framework.blocktools import create_block, create_coinbase
+from test_framework.mininode import (
+ CBlockHeader,
+ CInv,
+ NODE_WITNESS,
+ NetworkThread,
+ P2PInterface,
+ mininode_lock,
+ msg_block,
+ msg_getblocks,
+ msg_getdata,
+ msg_getheaders,
+ msg_headers,
+ msg_inv,
+ msg_sendheaders,
+)
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ sync_blocks,
+ wait_until,
+)
+DIRECT_FETCH_RESPONSE_TIME = 0.05
-direct_fetch_response_time = 0.05
-
-class TestNode(NodeConnCB):
+class BaseNode(P2PInterface):
def __init__(self):
super().__init__()
+
self.block_announced = False
self.last_blockhash_announced = None
- def clear_last_announcement(self):
- with mininode_lock:
- self.block_announced = False
- self.last_message.pop("inv", None)
- self.last_message.pop("headers", None)
-
- # Request data for a list of block hashes
- def get_data(self, block_hashes):
+ def send_get_data(self, block_hashes):
+ """Request data for a list of block hashes."""
msg = msg_getdata()
for x in block_hashes:
msg.inv.append(CInv(2, x))
- self.connection.send_message(msg)
+ self.send_message(msg)
- def get_headers(self, locator, hashstop):
+ def send_get_headers(self, locator, hashstop):
msg = msg_getheaders()
msg.locator.vHave = locator
msg.hashstop = hashstop
- self.connection.send_message(msg)
+ self.send_message(msg)
def send_block_inv(self, blockhash):
msg = msg_inv()
msg.inv = [CInv(2, blockhash)]
- self.connection.send_message(msg)
+ self.send_message(msg)
- def on_inv(self, conn, message):
+ def send_header_for_blocks(self, new_blocks):
+ headers_message = msg_headers()
+ headers_message.headers = [CBlockHeader(b) for b in new_blocks]
+ self.send_message(headers_message)
+
+ def send_getblocks(self, locator):
+ getblocks_message = msg_getblocks()
+ getblocks_message.locator.vHave = locator
+ self.send_message(getblocks_message)
+
+ def wait_for_getdata(self, hash_list, timeout=60):
+ if hash_list == []:
+ return
+
+ test_function = lambda: "getdata" in self.last_message and [x.hash for x in self.last_message["getdata"].inv] == hash_list
+ wait_until(test_function, timeout=timeout, lock=mininode_lock)
+
+ def wait_for_block_announcement(self, block_hash, timeout=60):
+ test_function = lambda: self.last_blockhash_announced == block_hash
+ wait_until(test_function, timeout=timeout, lock=mininode_lock)
+
+ def on_inv(self, message):
self.block_announced = True
self.last_blockhash_announced = message.inv[-1].hash
- def on_headers(self, conn, message):
+ def on_headers(self, message):
if len(message.headers):
self.block_announced = True
message.headers[-1].calc_sha256()
self.last_blockhash_announced = message.headers[-1].sha256
- def on_block(self, conn, message):
- self.last_message["block"].calc_sha256()
+ def clear_last_announcement(self):
+ with mininode_lock:
+ self.block_announced = False
+ self.last_message.pop("inv", None)
+ self.last_message.pop("headers", None)
- # Test whether the last announcement we received had the
- # right header or the right inv
- # inv and headers should be lists of block hashes
def check_last_announcement(self, headers=None, inv=None):
- expect_headers = headers if headers != None else []
- expect_inv = inv if inv != None else []
+ """Test whether the last announcement received had the right header or the right inv.
+
+ inv and headers should be lists of block hashes."""
+
test_function = lambda: self.block_announced
- assert(wait_until(test_function, timeout=60))
+ wait_until(test_function, timeout=60, lock=mininode_lock)
+
with mininode_lock:
self.block_announced = False
- success = True
compare_inv = []
if "inv" in self.last_message:
compare_inv = [x.hash for x in self.last_message["inv"].inv]
- if compare_inv != expect_inv:
- success = False
+ if inv is not None:
+ assert_equal(compare_inv, inv)
- hash_headers = []
+ compare_headers = []
if "headers" in self.last_message:
- # treat headers as a list of block hashes
- hash_headers = [ x.sha256 for x in self.last_message["headers"].headers ]
- if hash_headers != expect_headers:
- success = False
+ compare_headers = [x.sha256 for x in self.last_message["headers"].headers]
+ if headers is not None:
+ assert_equal(compare_headers, headers)
self.last_message.pop("inv", None)
self.last_message.pop("headers", None)
- return success
-
- def wait_for_getdata(self, hash_list, timeout=60):
- if hash_list == []:
- return
-
- test_function = lambda: "getdata" in self.last_message and [x.hash for x in self.last_message["getdata"].inv] == hash_list
- assert(wait_until(test_function, timeout=timeout))
- return
-
- def wait_for_block_announcement(self, block_hash, timeout=60):
- test_function = lambda: self.last_blockhash_announced == block_hash
- assert(wait_until(test_function, timeout=timeout))
- return
-
- def send_header_for_blocks(self, new_blocks):
- headers_message = msg_headers()
- headers_message.headers = [ CBlockHeader(b) for b in new_blocks ]
- self.send_message(headers_message)
-
- def send_getblocks(self, locator):
- getblocks_message = msg_getblocks()
- getblocks_message.locator.vHave = locator
- self.send_message(getblocks_message)
class SendHeadersTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
- # mine count blocks and return the new tip
def mine_blocks(self, count):
+ """Mine count blocks and return the new tip."""
+
# Clear out last block announcement from each p2p listener
- [ x.clear_last_announcement() for x in self.p2p_connections ]
+ [x.clear_last_announcement() for x in self.nodes[0].p2ps]
self.nodes[0].generate(count)
return int(self.nodes[0].getbestblockhash(), 16)
- # mine a reorg that invalidates length blocks (replacing them with
- # length+1 blocks).
- # Note: we clear the state of our p2p connections after the
- # to-be-reorged-out blocks are mined, so that we don't break later tests.
- # return the list of block hashes newly mined
def mine_reorg(self, length):
- self.nodes[0].generate(length) # make sure all invalidated blocks are node0's
+ """Mine a reorg that invalidates length blocks (replacing them with # length+1 blocks).
+
+ Note: we clear the state of our p2p connections after the
+ to-be-reorged-out blocks are mined, so that we don't break later tests.
+ return the list of block hashes newly mined."""
+
+ self.nodes[0].generate(length) # make sure all invalidated blocks are node0's
sync_blocks(self.nodes, wait=0.1)
- for x in self.p2p_connections:
+ for x in self.nodes[0].p2ps:
x.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16))
x.clear_last_announcement()
tip_height = self.nodes[1].getblockcount()
- hash_to_invalidate = self.nodes[1].getblockhash(tip_height-(length-1))
+ hash_to_invalidate = self.nodes[1].getblockhash(tip_height - (length - 1))
self.nodes[1].invalidateblock(hash_to_invalidate)
- all_hashes = self.nodes[1].generate(length+1) # Must be longer than the orig chain
+ all_hashes = self.nodes[1].generate(length + 1) # Must be longer than the orig chain
sync_blocks(self.nodes, wait=0.1)
return [int(x, 16) for x in all_hashes]
def run_test(self):
# Setup the p2p connections and start up the network thread.
- inv_node = TestNode()
- test_node = TestNode()
-
- self.p2p_connections = [inv_node, test_node]
-
- connections = []
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], inv_node))
- # Set nServices to 0 for test_node, so no block download will occur outside of
- # direct fetching
- connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node, services=0))
- inv_node.add_connection(connections[0])
- test_node.add_connection(connections[1])
+ inv_node = self.nodes[0].add_p2p_connection(BaseNode())
+ # Make sure NODE_NETWORK is not set for test_node, so no block download
+ # will occur outside of direct fetching
+ test_node = self.nodes[0].add_p2p_connection(BaseNode(), services=NODE_WITNESS)
- NetworkThread().start() # Start up network handling in another thread
+ NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
inv_node.wait_for_verack()
test_node.wait_for_verack()
+ # Ensure verack's have been processed by our peer
+ inv_node.sync_with_ping()
+ test_node.sync_with_ping()
+
+ self.test_null_locators(test_node, inv_node)
+ self.test_nonnull_locators(test_node, inv_node)
+
+ def test_null_locators(self, test_node, inv_node):
+ tip = self.nodes[0].getblockheader(self.nodes[0].generate(1)[0])
+ tip_hash = int(tip["hash"], 16)
+
+ inv_node.check_last_announcement(inv=[tip_hash], headers=[])
+ test_node.check_last_announcement(inv=[tip_hash], headers=[])
+
+ self.log.info("Verify getheaders with null locator and valid hashstop returns headers.")
+ test_node.clear_last_announcement()
+ test_node.send_get_headers(locator=[], hashstop=tip_hash)
+ test_node.check_last_announcement(headers=[tip_hash])
+
+ self.log.info("Verify getheaders with null locator and invalid hashstop does not return headers.")
+ block = create_block(int(tip["hash"], 16), create_coinbase(tip["height"] + 1), tip["mediantime"] + 1)
+ block.solve()
+ test_node.send_header_for_blocks([block])
+ test_node.clear_last_announcement()
+ test_node.send_get_headers(locator=[], hashstop=int(block.hash, 16))
+ test_node.sync_with_ping()
+ assert_equal(test_node.block_announced, False)
+ inv_node.clear_last_announcement()
+ test_node.send_message(msg_block(block))
+ inv_node.check_last_announcement(inv=[int(block.hash, 16)], headers=[])
+
+ def test_nonnull_locators(self, test_node, inv_node):
tip = int(self.nodes[0].getbestblockhash(), 16)
# PART 1
@@ -237,30 +284,30 @@ class SendHeadersTest(BitcoinTestFramework):
for i in range(4):
old_tip = tip
tip = self.mine_blocks(1)
- assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
- assert_equal(test_node.check_last_announcement(inv=[tip]), True)
+ inv_node.check_last_announcement(inv=[tip], headers=[])
+ test_node.check_last_announcement(inv=[tip], headers=[])
# Try a few different responses; none should affect next announcement
if i == 0:
# first request the block
- test_node.get_data([tip])
+ test_node.send_get_data([tip])
test_node.wait_for_block(tip)
elif i == 1:
# next try requesting header and block
- test_node.get_headers(locator=[old_tip], hashstop=tip)
- test_node.get_data([tip])
+ test_node.send_get_headers(locator=[old_tip], hashstop=tip)
+ test_node.send_get_data([tip])
test_node.wait_for_block(tip)
- test_node.clear_last_announcement() # since we requested headers...
+ test_node.clear_last_announcement() # since we requested headers...
elif i == 2:
# this time announce own block via headers
height = self.nodes[0].getblockcount()
last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']
block_time = last_time + 1
- new_block = create_block(tip, create_coinbase(height+1), block_time)
+ new_block = create_block(tip, create_coinbase(height + 1), block_time)
new_block.solve()
test_node.send_header_for_blocks([new_block])
test_node.wait_for_getdata([new_block.sha256])
test_node.send_message(msg_block(new_block))
- test_node.sync_with_ping() # make sure this block is processed
+ test_node.sync_with_ping() # make sure this block is processed
inv_node.clear_last_announcement()
test_node.clear_last_announcement()
@@ -271,15 +318,15 @@ class SendHeadersTest(BitcoinTestFramework):
# commence and keep working.
test_node.send_message(msg_sendheaders())
prev_tip = int(self.nodes[0].getbestblockhash(), 16)
- test_node.get_headers(locator=[prev_tip], hashstop=0)
+ test_node.send_get_headers(locator=[prev_tip], hashstop=0)
test_node.sync_with_ping()
# Now that we've synced headers, headers announcements should work
tip = self.mine_blocks(1)
- assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
- assert_equal(test_node.check_last_announcement(headers=[tip]), True)
+ inv_node.check_last_announcement(inv=[tip], headers=[])
+ test_node.check_last_announcement(headers=[tip])
- height = self.nodes[0].getblockcount()+1
+ height = self.nodes[0].getblockcount() + 1
block_time += 10 # Advance far enough ahead
for i in range(10):
# Mine i blocks, and alternate announcing either via
@@ -288,7 +335,7 @@ class SendHeadersTest(BitcoinTestFramework):
# with block header, even though the blocks are never requested
for j in range(2):
blocks = []
- for b in range(i+1):
+ for b in range(i + 1):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
@@ -302,7 +349,7 @@ class SendHeadersTest(BitcoinTestFramework):
test_node.send_header_for_blocks(blocks)
# Test that duplicate inv's won't result in duplicate
# getdata requests, or duplicate headers announcements
- [ inv_node.send_block_inv(x.sha256) for x in blocks ]
+ [inv_node.send_block_inv(x.sha256) for x in blocks]
test_node.wait_for_getdata([x.sha256 for x in blocks])
inv_node.sync_with_ping()
else:
@@ -313,7 +360,7 @@ class SendHeadersTest(BitcoinTestFramework):
# getdata requests (the check is further down)
inv_node.send_header_for_blocks(blocks)
inv_node.sync_with_ping()
- [ test_node.send_message(msg_block(x)) for x in blocks ]
+ [test_node.send_message(msg_block(x)) for x in blocks]
test_node.sync_with_ping()
inv_node.sync_with_ping()
# This block should not be announced to the inv node (since it also
@@ -321,8 +368,8 @@ class SendHeadersTest(BitcoinTestFramework):
assert "inv" not in inv_node.last_message
assert "headers" not in inv_node.last_message
tip = self.mine_blocks(1)
- assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
- assert_equal(test_node.check_last_announcement(headers=[tip]), True)
+ inv_node.check_last_announcement(inv=[tip], headers=[])
+ test_node.check_last_announcement(headers=[tip])
height += 1
block_time += 1
@@ -336,16 +383,16 @@ class SendHeadersTest(BitcoinTestFramework):
# First try mining a reorg that can propagate with header announcement
new_block_hashes = self.mine_reorg(length=7)
tip = new_block_hashes[-1]
- assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
- assert_equal(test_node.check_last_announcement(headers=new_block_hashes), True)
+ inv_node.check_last_announcement(inv=[tip], headers=[])
+ test_node.check_last_announcement(headers=new_block_hashes)
- block_time += 8
+ block_time += 8
# Mine a too-large reorg, which should be announced with a single inv
new_block_hashes = self.mine_reorg(length=8)
tip = new_block_hashes[-1]
- assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
- assert_equal(test_node.check_last_announcement(inv=[tip]), True)
+ inv_node.check_last_announcement(inv=[tip], headers=[])
+ test_node.check_last_announcement(inv=[tip], headers=[])
block_time += 9
@@ -353,42 +400,42 @@ class SendHeadersTest(BitcoinTestFramework):
fork_point = int(fork_point, 16)
# Use getblocks/getdata
- test_node.send_getblocks(locator = [fork_point])
- assert_equal(test_node.check_last_announcement(inv=new_block_hashes), True)
- test_node.get_data(new_block_hashes)
+ test_node.send_getblocks(locator=[fork_point])
+ test_node.check_last_announcement(inv=new_block_hashes, headers=[])
+ test_node.send_get_data(new_block_hashes)
test_node.wait_for_block(new_block_hashes[-1])
for i in range(3):
# Mine another block, still should get only an inv
tip = self.mine_blocks(1)
- assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
- assert_equal(test_node.check_last_announcement(inv=[tip]), True)
+ inv_node.check_last_announcement(inv=[tip], headers=[])
+ test_node.check_last_announcement(inv=[tip], headers=[])
if i == 0:
# Just get the data -- shouldn't cause headers announcements to resume
- test_node.get_data([tip])
+ test_node.send_get_data([tip])
test_node.wait_for_block(tip)
elif i == 1:
# Send a getheaders message that shouldn't trigger headers announcements
# to resume (best header sent will be too old)
- test_node.get_headers(locator=[fork_point], hashstop=new_block_hashes[1])
- test_node.get_data([tip])
+ test_node.send_get_headers(locator=[fork_point], hashstop=new_block_hashes[1])
+ test_node.send_get_data([tip])
test_node.wait_for_block(tip)
elif i == 2:
- test_node.get_data([tip])
+ test_node.send_get_data([tip])
test_node.wait_for_block(tip)
# This time, try sending either a getheaders to trigger resumption
- # of headers announcements, or mine a new block and inv it, also
+ # of headers announcements, or mine a new block and inv it, also
# triggering resumption of headers announcements.
if j == 0:
- test_node.get_headers(locator=[tip], hashstop=0)
+ test_node.send_get_headers(locator=[tip], hashstop=0)
test_node.sync_with_ping()
else:
test_node.send_block_inv(tip)
test_node.sync_with_ping()
# New blocks should now be announced with header
tip = self.mine_blocks(1)
- assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
- assert_equal(test_node.check_last_announcement(headers=[tip]), True)
+ inv_node.check_last_announcement(inv=[tip], headers=[])
+ test_node.check_last_announcement(headers=[tip])
self.log.info("Part 3: success!")
@@ -408,7 +455,7 @@ class SendHeadersTest(BitcoinTestFramework):
height += 1
inv_node.send_message(msg_block(blocks[-1]))
- inv_node.sync_with_ping() # Make sure blocks are processed
+ inv_node.sync_with_ping() # Make sure blocks are processed
test_node.last_message.pop("getdata", None)
test_node.send_header_for_blocks(blocks)
test_node.sync_with_ping()
@@ -427,9 +474,9 @@ class SendHeadersTest(BitcoinTestFramework):
test_node.send_header_for_blocks(blocks)
test_node.sync_with_ping()
- test_node.wait_for_getdata([x.sha256 for x in blocks], timeout=direct_fetch_response_time)
+ test_node.wait_for_getdata([x.sha256 for x in blocks], timeout=DIRECT_FETCH_RESPONSE_TIME)
- [ test_node.send_message(msg_block(x)) for x in blocks ]
+ [test_node.send_message(msg_block(x)) for x in blocks]
test_node.sync_with_ping()
@@ -458,13 +505,13 @@ class SendHeadersTest(BitcoinTestFramework):
# both blocks (same work as tip)
test_node.send_header_for_blocks(blocks[1:2])
test_node.sync_with_ping()
- test_node.wait_for_getdata([x.sha256 for x in blocks[0:2]], timeout=direct_fetch_response_time)
+ test_node.wait_for_getdata([x.sha256 for x in blocks[0:2]], timeout=DIRECT_FETCH_RESPONSE_TIME)
# Announcing 16 more headers should trigger direct fetch for 14 more
# blocks
test_node.send_header_for_blocks(blocks[2:18])
test_node.sync_with_ping()
- test_node.wait_for_getdata([x.sha256 for x in blocks[2:16]], timeout=direct_fetch_response_time)
+ test_node.wait_for_getdata([x.sha256 for x in blocks[2:16]], timeout=DIRECT_FETCH_RESPONSE_TIME)
# Announcing 1 more header should not trigger any response
test_node.last_message.pop("getdata", None)
@@ -476,7 +523,7 @@ class SendHeadersTest(BitcoinTestFramework):
self.log.info("Part 4: success!")
# Now deliver all those blocks we announced.
- [ test_node.send_message(msg_block(x)) for x in blocks ]
+ [test_node.send_message(msg_block(x)) for x in blocks]
self.log.info("Part 5: Testing handling of unconnecting headers")
# First we test that receipt of an unconnecting header doesn't prevent
@@ -498,7 +545,7 @@ class SendHeadersTest(BitcoinTestFramework):
test_node.wait_for_getheaders()
test_node.send_header_for_blocks(blocks)
test_node.wait_for_getdata([x.sha256 for x in blocks])
- [ test_node.send_message(msg_block(x)) for x in blocks ]
+ [test_node.send_message(msg_block(x)) for x in blocks]
test_node.sync_with_ping()
assert_equal(int(self.nodes[0].getbestblockhash(), 16), blocks[1].sha256)
@@ -506,7 +553,7 @@ class SendHeadersTest(BitcoinTestFramework):
# Now we test that if we repeatedly don't send connecting headers, we
# don't go into an infinite loop trying to get them to connect.
MAX_UNCONNECTING_HEADERS = 10
- for j in range(MAX_UNCONNECTING_HEADERS+1):
+ for j in range(MAX_UNCONNECTING_HEADERS + 1):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
@@ -528,11 +575,11 @@ class SendHeadersTest(BitcoinTestFramework):
# Now try to see how many unconnecting headers we can send
# before we get disconnected. Should be 5*MAX_UNCONNECTING_HEADERS
- for i in range(5*MAX_UNCONNECTING_HEADERS - 1):
+ for i in range(5 * MAX_UNCONNECTING_HEADERS - 1):
# Send a header that doesn't connect, check that we get a getheaders.
with mininode_lock:
test_node.last_message.pop("getheaders", None)
- test_node.send_header_for_blocks([blocks[i%len(blocks)]])
+ test_node.send_header_for_blocks([blocks[i % len(blocks)]])
test_node.wait_for_getheaders()
# Eventually this stops working.
diff --git a/test/functional/signmessages.py b/test/functional/signmessages.py
index 42f6a9daaf..52ba6a5ad7 100755
--- a/test/functional/signmessages.py
+++ b/test/functional/signmessages.py
@@ -5,31 +5,34 @@
"""Test RPC commands for signing and verifying messages."""
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
class SignMessagesTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
message = 'This is just a test message'
- # Test the signing with a privkey
- privKey = 'cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N'
+ self.log.info('test signing with priv_key')
+ priv_key = 'cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N'
address = 'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB'
- signature = self.nodes[0].signmessagewithprivkey(privKey, message)
-
- # Verify the message
+ expected_signature = 'INbVnW4e6PeRmsv2Qgu8NuopvrVjkcxob+sX8OcZG0SALhWybUjzMLPdAsXI46YZGb0KQTRii+wWIQzRpG/U+S0='
+ signature = self.nodes[0].signmessagewithprivkey(priv_key, message)
+ assert_equal(expected_signature, signature)
assert(self.nodes[0].verifymessage(address, signature, message))
- # Test the signing with an address with wallet
+ self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
-
- # Verify the message
assert(self.nodes[0].verifymessage(address, signature, message))
+ self.log.info('test verifying with another address should not work')
+ other_address = self.nodes[0].getnewaddress()
+ other_signature = self.nodes[0].signmessage(other_address, message)
+ assert(not self.nodes[0].verifymessage(other_address, signature, message))
+ assert(not self.nodes[0].verifymessage(address, other_signature, message))
+
if __name__ == '__main__':
SignMessagesTest().main()
diff --git a/test/functional/signrawtransactions.py b/test/functional/signrawtransactions.py
index 437905e764..9a45d53cb8 100755
--- a/test/functional/signrawtransactions.py
+++ b/test/functional/signrawtransactions.py
@@ -9,8 +9,7 @@ from test_framework.util import *
class SignRawTransactionsTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
@@ -43,22 +42,6 @@ class SignRawTransactionsTest(BitcoinTestFramework):
# 2) No script verification error occurred
assert 'errors' not in rawTxSigned
- # Check that signrawtransaction doesn't blow up on garbage merge attempts
- dummyTxInconsistent = self.nodes[0].createrawtransaction([inputs[0]], outputs)
- rawTxUnsigned = self.nodes[0].signrawtransaction(rawTx + dummyTxInconsistent, inputs)
-
- assert 'complete' in rawTxUnsigned
- assert_equal(rawTxUnsigned['complete'], False)
-
- # Check that signrawtransaction properly merges unsigned and signed txn, even with garbage in the middle
- rawTxSigned2 = self.nodes[0].signrawtransaction(rawTxUnsigned["hex"] + dummyTxInconsistent + rawTxSigned["hex"], inputs)
-
- assert 'complete' in rawTxSigned2
- assert_equal(rawTxSigned2['complete'], True)
-
- assert 'errors' not in rawTxSigned2
-
-
def script_verification_error_test(self):
"""Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script.
@@ -99,7 +82,7 @@ class SignRawTransactionsTest(BitcoinTestFramework):
assert_equal(decodedRawTx["vin"][i]["vout"], inp["vout"])
# Make sure decoderawtransaction throws if there is extra data
- assert_raises(JSONRPCException, self.nodes[0].decoderawtransaction, rawTx + "00")
+ assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, rawTx + "00")
rawTxSigned = self.nodes[0].signrawtransaction(rawTx, scripts, privKeys)
diff --git a/test/functional/smartfees.py b/test/functional/smartfees.py
index bc42a319df..986f4546a8 100755
--- a/test/functional/smartfees.py
+++ b/test/functional/smartfees.py
@@ -141,11 +141,8 @@ def check_estimates(node, fees_seen, max_invalid, print_estimates = True):
class EstimateFeeTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 3
- self.setup_clean_chain = False
def setup_network(self):
"""
@@ -153,57 +150,16 @@ class EstimateFeeTest(BitcoinTestFramework):
But first we need to use one node to create a lot of outputs
which we will use to generate our transactions.
"""
- self.nodes = []
+ self.add_nodes(3, extra_args=[["-maxorphantx=1000", "-whitelist=127.0.0.1"],
+ ["-blockmaxsize=17000", "-maxorphantx=1000", "-deprecatedrpc=estimatefee"],
+ ["-blockmaxsize=8000", "-maxorphantx=1000"]])
# Use node0 to mine blocks for input splitting
- self.nodes.append(self.start_node(0, self.options.tmpdir, ["-maxorphantx=1000",
- "-whitelist=127.0.0.1"]))
-
- self.log.info("This test is time consuming, please be patient")
- self.log.info("Splitting inputs so we can generate tx's")
- self.txouts = []
- self.txouts2 = []
- # Split a coinbase into two transaction puzzle outputs
- split_inputs(self.nodes[0], self.nodes[0].listunspent(0), self.txouts, True)
-
- # Mine
- while (len(self.nodes[0].getrawmempool()) > 0):
- self.nodes[0].generate(1)
-
- # Repeatedly split those 2 outputs, doubling twice for each rep
- # Use txouts to monitor the available utxo, since these won't be tracked in wallet
- reps = 0
- while (reps < 5):
- #Double txouts to txouts2
- while (len(self.txouts)>0):
- split_inputs(self.nodes[0], self.txouts, self.txouts2)
- while (len(self.nodes[0].getrawmempool()) > 0):
- self.nodes[0].generate(1)
- #Double txouts2 to txouts
- while (len(self.txouts2)>0):
- split_inputs(self.nodes[0], self.txouts2, self.txouts)
- while (len(self.nodes[0].getrawmempool()) > 0):
- self.nodes[0].generate(1)
- reps += 1
- self.log.info("Finished splitting")
-
- # Now we can connect the other nodes, didn't want to connect them earlier
- # so the estimates would not be affected by the splitting transactions
# Node1 mines small blocks but that are bigger than the expected transaction rate.
# NOTE: the CreateNewBlock code starts counting block size at 1,000 bytes,
# (17k is room enough for 110 or so transactions)
- self.nodes.append(self.start_node(1, self.options.tmpdir,
- ["-blockmaxsize=17000", "-maxorphantx=1000"]))
- connect_nodes(self.nodes[1], 0)
-
# Node2 is a stingy miner, that
# produces too small blocks (room for only 55 or so transactions)
- node2args = ["-blockmaxsize=8000", "-maxorphantx=1000"]
- self.nodes.append(self.start_node(2, self.options.tmpdir, node2args))
- connect_nodes(self.nodes[0], 2)
- connect_nodes(self.nodes[2], 1)
-
- self.sync_all()
def transact_and_mine(self, numblocks, mining_node):
min_fee = Decimal("0.00001")
@@ -232,9 +188,51 @@ class EstimateFeeTest(BitcoinTestFramework):
self.memutxo = newmem
def run_test(self):
+ self.log.info("This test is time consuming, please be patient")
+ self.log.info("Splitting inputs so we can generate tx's")
+
# Make log handler available to helper functions
global log
log = self.log
+
+ # Start node0
+ self.start_node(0)
+ self.txouts = []
+ self.txouts2 = []
+ # Split a coinbase into two transaction puzzle outputs
+ split_inputs(self.nodes[0], self.nodes[0].listunspent(0), self.txouts, True)
+
+ # Mine
+ while (len(self.nodes[0].getrawmempool()) > 0):
+ self.nodes[0].generate(1)
+
+ # Repeatedly split those 2 outputs, doubling twice for each rep
+ # Use txouts to monitor the available utxo, since these won't be tracked in wallet
+ reps = 0
+ while (reps < 5):
+ #Double txouts to txouts2
+ while (len(self.txouts)>0):
+ split_inputs(self.nodes[0], self.txouts, self.txouts2)
+ while (len(self.nodes[0].getrawmempool()) > 0):
+ self.nodes[0].generate(1)
+ #Double txouts2 to txouts
+ while (len(self.txouts2)>0):
+ split_inputs(self.nodes[0], self.txouts2, self.txouts)
+ while (len(self.nodes[0].getrawmempool()) > 0):
+ self.nodes[0].generate(1)
+ reps += 1
+ self.log.info("Finished splitting")
+
+ # Now we can connect the other nodes, didn't want to connect them earlier
+ # so the estimates would not be affected by the splitting transactions
+ self.start_node(1)
+ self.start_node(2)
+ connect_nodes(self.nodes[1], 0)
+ connect_nodes(self.nodes[0], 2)
+ connect_nodes(self.nodes[2], 1)
+
+ self.sync_all()
+
self.fees_per_kb = []
self.memutxo = []
self.confutxo = self.txouts # Start with the set of confirmed txouts after splitting
diff --git a/test/functional/test_framework/address.py b/test/functional/test_framework/address.py
index 96bebe1ea1..2e2db5ffb2 100644
--- a/test/functional/test_framework/address.py
+++ b/test/functional/test_framework/address.py
@@ -7,6 +7,8 @@
from .script import hash256, hash160, sha256, CScript, OP_0
from .util import bytes_to_hex_str, hex_str_to_bytes
+from . import segwit_addr
+
chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
def byte_to_base58(b, version):
@@ -49,6 +51,22 @@ def key_to_p2sh_p2wpkh(key, main = False):
p2shscript = CScript([OP_0, hash160(key)])
return script_to_p2sh(p2shscript, main)
+def program_to_witness(version, program, main = False):
+ if (type(program) is str):
+ program = hex_str_to_bytes(program)
+ assert 0 <= version <= 16
+ assert 2 <= len(program) <= 40
+ assert version > 0 or len(program) in [20, 32]
+ return segwit_addr.encode("bc" if main else "bcrt", version, program)
+
+def script_to_p2wsh(script, main = False):
+ script = check_script(script)
+ return program_to_witness(0, sha256(script), main)
+
+def key_to_p2wpkh(key, main = False):
+ key = check_key(key)
+ return program_to_witness(0, hash160(key), main)
+
def script_to_p2sh_p2wsh(script, main = False):
script = check_script(script)
p2shscript = CScript([OP_0, sha256(script)])
diff --git a/test/functional/test_framework/authproxy.py b/test/functional/test_framework/authproxy.py
index dfcc524313..bd3a3b3fab 100644
--- a/test/functional/test_framework/authproxy.py
+++ b/test/functional/test_framework/authproxy.py
@@ -33,24 +33,17 @@ ServiceProxy class:
- uses standard Python json lib
"""
-try:
- import http.client as httplib
-except ImportError:
- import httplib
import base64
import decimal
+import http.client
import json
import logging
import socket
import time
-try:
- import urllib.parse as urlparse
-except ImportError:
- import urlparse
-
-USER_AGENT = "AuthServiceProxy/0.1"
+import urllib.parse
HTTP_TIMEOUT = 30
+USER_AGENT = "AuthServiceProxy/0.1"
log = logging.getLogger("BitcoinRPC")
@@ -60,7 +53,7 @@ class JSONRPCException(Exception):
errmsg = '%(message)s (%(code)i)' % rpc_error
except (KeyError, TypeError):
errmsg = ''
- Exception.__init__(self, errmsg)
+ super().__init__(errmsg)
self.error = rpc_error
@@ -69,28 +62,18 @@ def EncodeDecimal(o):
return str(o)
raise TypeError(repr(o) + " is not JSON serializable")
-class AuthServiceProxy(object):
+class AuthServiceProxy():
__id_count = 0
# ensure_ascii: escape unicode as \uXXXX, passed to json.dumps
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None, ensure_ascii=True):
self.__service_url = service_url
self._service_name = service_name
- self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
- self.__url = urlparse.urlparse(service_url)
- if self.__url.port is None:
- port = 80
- else:
- port = self.__url.port
- (user, passwd) = (self.__url.username, self.__url.password)
- try:
- user = user.encode('utf8')
- except AttributeError:
- pass
- try:
- passwd = passwd.encode('utf8')
- except AttributeError:
- pass
+ self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests
+ self.__url = urllib.parse.urlparse(service_url)
+ port = 80 if self.__url.port is None else self.__url.port
+ user = None if self.__url.username is None else self.__url.username.encode('utf8')
+ passwd = None if self.__url.password is None else self.__url.password.encode('utf8')
authpair = user + b':' + passwd
self.__auth_header = b'Basic ' + base64.b64encode(authpair)
@@ -98,11 +81,9 @@ class AuthServiceProxy(object):
# Callables re-use the connection of the original proxy
self.__conn = connection
elif self.__url.scheme == 'https':
- self.__conn = httplib.HTTPSConnection(self.__url.hostname, port,
- timeout=timeout)
+ self.__conn = http.client.HTTPSConnection(self.__url.hostname, port, timeout=timeout)
else:
- self.__conn = httplib.HTTPConnection(self.__url.hostname, port,
- timeout=timeout)
+ self.__conn = http.client.HTTPConnection(self.__url.hostname, port, timeout=timeout)
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
@@ -124,31 +105,34 @@ class AuthServiceProxy(object):
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
- except httplib.BadStatusLine as e:
- if e.line == "''": # if connection was closed, try again
+ except http.client.BadStatusLine as e:
+ if e.line == "''": # if connection was closed, try again
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
- except (BrokenPipeError,ConnectionResetError):
+ except (BrokenPipeError, ConnectionResetError):
# Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset
# ConnectionResetError happens on FreeBSD with Python 3.4
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
- def __call__(self, *args, **argsn):
+ def get_request(self, *args, **argsn):
AuthServiceProxy.__id_count += 1
- log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self._service_name,
- json.dumps(args, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
+ log.debug("-%s-> %s %s" % (AuthServiceProxy.__id_count, self._service_name,
+ json.dumps(args, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
if args and argsn:
raise ValueError('Cannot handle both named and positional arguments')
- postdata = json.dumps({'version': '1.1',
- 'method': self._service_name,
- 'params': args or argsn,
- 'id': AuthServiceProxy.__id_count}, default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
+ return {'version': '1.1',
+ 'method': self._service_name,
+ 'params': args or argsn,
+ 'id': AuthServiceProxy.__id_count}
+
+ def __call__(self, *args, **argsn):
+ postdata = json.dumps(self.get_request(*args, **argsn), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
response = self._request('POST', self.__url.path, postdata.encode('utf-8'))
if response['error'] is not None:
raise JSONRPCException(response['error'])
@@ -158,9 +142,9 @@ class AuthServiceProxy(object):
else:
return response['result']
- def _batch(self, rpc_call_list):
+ def batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii)
- log.debug("--> "+postdata)
+ log.debug("--> " + postdata)
return self._request('POST', self.__url.path, postdata.encode('utf-8'))
def _get_response(self):
@@ -187,7 +171,10 @@ class AuthServiceProxy(object):
response = json.loads(responsedata, parse_float=decimal.Decimal)
elapsed = time.time() - req_start_time
if "error" in response and response["error"] is None:
- log.debug("<-%s- [%.6f] %s"%(response["id"], elapsed, json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
+ log.debug("<-%s- [%.6f] %s" % (response["id"], elapsed, json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii)))
else:
- log.debug("<-- [%.6f] %s"%(elapsed,responsedata))
+ log.debug("<-- [%.6f] %s" % (elapsed, responsedata))
return response
+
+ def __truediv__(self, relative_uri):
+ return AuthServiceProxy("{}/{}".format(self.__service_url, relative_uri), self._service_name, connection=self.__conn)
diff --git a/test/functional/test_framework/bignum.py b/test/functional/test_framework/bignum.py
index 024611da6e..db5ccd62c2 100644
--- a/test/functional/test_framework/bignum.py
+++ b/test/functional/test_framework/bignum.py
@@ -26,12 +26,6 @@ def bn2bin(v):
i -= 1
return s
-def bin2bn(s):
- l = 0
- for ch in s:
- l = (l << 8) | ch
- return l
-
def bn2mpi(v):
have_ext = False
if v.bit_length() > 0:
@@ -54,30 +48,6 @@ def bn2mpi(v):
v_bin[0] |= 0x80
return s + ext + v_bin
-def mpi2bn(s):
- if len(s) < 4:
- return None
- s_size = bytes(s[:4])
- v_len = struct.unpack(b">I", s_size)[0]
- if len(s) != (v_len + 4):
- return None
- if v_len == 0:
- return 0
-
- v_str = bytearray(s[4:])
- neg = False
- i = v_str[0]
- if i & 0x80:
- neg = True
- i &= ~0x80
- v_str[0] = i
-
- v = bin2bn(v_str)
-
- if neg:
- return -v
- return v
-
# bitcoin-specific little endian format, with implicit size
def mpi2vch(s):
r = s[4:] # strip size
@@ -86,12 +56,3 @@ def mpi2vch(s):
def bn2vch(v):
return bytes(mpi2vch(bn2mpi(v)))
-
-def vch2mpi(s):
- r = struct.pack(b">I", len(s)) # size
- r += s[::-1] # reverse string, converting LE->BE
- return r
-
-def vch2bn(s):
- return mpi2bn(vch2mpi(s))
-
diff --git a/test/functional/test_framework/blockstore.py b/test/functional/test_framework/blockstore.py
index 4cfd682bb5..051c57a6c7 100644
--- a/test/functional/test_framework/blockstore.py
+++ b/test/functional/test_framework/blockstore.py
@@ -10,7 +10,7 @@ import dbm.dumb as dbmd
logger = logging.getLogger("TestFramework.blockstore")
-class BlockStore(object):
+class BlockStore():
"""BlockStore helper class.
BlockStore keeps a map of blocks and implements helper functions for
@@ -100,7 +100,7 @@ class BlockStore(object):
def get_blocks(self, inv):
responses = []
for i in inv:
- if (i.type == 2): # MSG_BLOCK
+ if (i.type == 2 or i.type == (2 | (1 << 30))): # MSG_BLOCK or MSG_WITNESS_BLOCK
data = self.get(i.hash)
if data is not None:
# Use msg_generic to avoid re-serialization
@@ -127,7 +127,7 @@ class BlockStore(object):
locator.vHave = r
return locator
-class TxStore(object):
+class TxStore():
def __init__(self, datadir):
self.txDB = dbmd.open(datadir + "/transactions", 'c')
@@ -143,16 +143,6 @@ class TxStore(object):
return None
return value
- def get_transaction(self, txhash):
- ret = None
- serialized_tx = self.get(txhash)
- if serialized_tx is not None:
- f = BytesIO(serialized_tx)
- ret = CTransaction()
- ret.deserialize(f)
- ret.calc_sha256()
- return ret
-
def add_transaction(self, tx):
tx.calc_sha256()
try:
@@ -163,7 +153,7 @@ class TxStore(object):
def get_transactions(self, inv):
responses = []
for i in inv:
- if (i.type == 1): # MSG_TX
+ if (i.type == 1 or i.type == (1 | (1 << 30))): # MSG_TX or MSG_WITNESS_TX
tx = self.get(i.hash)
if tx is not None:
responses.append(msg_generic(b"tx", tx))
diff --git a/test/functional/test_framework/comptool.py b/test/functional/test_framework/comptool.py
index 9f062865a3..f0f5c847ca 100755
--- a/test/functional/test_framework/comptool.py
+++ b/test/functional/test_framework/comptool.py
@@ -19,7 +19,7 @@ TestNode behaves as follows:
from .mininode import *
from .blockstore import BlockStore, TxStore
-from .util import p2p_port
+from .util import p2p_port, wait_until
import logging
@@ -27,7 +27,7 @@ logger=logging.getLogger("TestFramework.comptool")
global mininode_lock
-class RejectResult(object):
+class RejectResult():
"""Outcome that expects rejection of a transaction or block."""
def __init__(self, code, reason=b''):
self.code = code
@@ -39,11 +39,10 @@ class RejectResult(object):
def __repr__(self):
return '%i:%s' % (self.code,self.reason or '*')
-class TestNode(NodeConnCB):
+class TestNode(P2PInterface):
def __init__(self, block_store, tx_store):
super().__init__()
- self.conn = None
self.bestblockhash = None
self.block_store = block_store
self.block_request_map = {}
@@ -58,43 +57,40 @@ class TestNode(NodeConnCB):
self.lastInv = []
self.closed = False
- def on_close(self, conn):
+ def on_close(self):
self.closed = True
- def add_connection(self, conn):
- self.conn = conn
-
- def on_headers(self, conn, message):
+ def on_headers(self, message):
if len(message.headers) > 0:
best_header = message.headers[-1]
best_header.calc_sha256()
self.bestblockhash = best_header.sha256
- def on_getheaders(self, conn, message):
+ def on_getheaders(self, message):
response = self.block_store.headers_for(message.locator, message.hashstop)
if response is not None:
- conn.send_message(response)
+ self.send_message(response)
- def on_getdata(self, conn, message):
- [conn.send_message(r) for r in self.block_store.get_blocks(message.inv)]
- [conn.send_message(r) for r in self.tx_store.get_transactions(message.inv)]
+ def on_getdata(self, message):
+ [self.send_message(r) for r in self.block_store.get_blocks(message.inv)]
+ [self.send_message(r) for r in self.tx_store.get_transactions(message.inv)]
for i in message.inv:
- if i.type == 1:
+ if i.type == 1 or i.type == 1 | (1 << 30): # MSG_TX or MSG_WITNESS_TX
self.tx_request_map[i.hash] = True
- elif i.type == 2:
+ elif i.type == 2 or i.type == 2 | (1 << 30): # MSG_BLOCK or MSG_WITNESS_BLOCK
self.block_request_map[i.hash] = True
- def on_inv(self, conn, message):
+ def on_inv(self, message):
self.lastInv = [x.hash for x in message.inv]
- def on_pong(self, conn, message):
+ def on_pong(self, message):
try:
del self.pingMap[message.nonce]
except KeyError:
raise AssertionError("Got pong for unknown ping [%s]" % repr(message))
- def on_reject(self, conn, message):
+ def on_reject(self, message):
if message.message == b'tx':
self.tx_reject_map[message.data] = RejectResult(message.code, message.reason)
if message.message == b'block':
@@ -102,30 +98,30 @@ class TestNode(NodeConnCB):
def send_inv(self, obj):
mtype = 2 if isinstance(obj, CBlock) else 1
- self.conn.send_message(msg_inv([CInv(mtype, obj.sha256)]))
+ self.send_message(msg_inv([CInv(mtype, obj.sha256)]))
def send_getheaders(self):
# We ask for headers from their last tip.
m = msg_getheaders()
m.locator = self.block_store.get_locator(self.bestblockhash)
- self.conn.send_message(m)
+ self.send_message(m)
def send_header(self, header):
m = msg_headers()
m.headers.append(header)
- self.conn.send_message(m)
+ self.send_message(m)
# This assumes BIP31
def send_ping(self, nonce):
self.pingMap[nonce] = True
- self.conn.send_message(msg_ping(nonce))
+ self.send_message(msg_ping(nonce))
def received_ping_response(self, nonce):
return nonce not in self.pingMap
def send_mempool(self):
self.lastInv = []
- self.conn.send_message(msg_mempool())
+ self.send_message(msg_mempool())
# TestInstance:
#
@@ -156,18 +152,17 @@ class TestNode(NodeConnCB):
# across all connections. (If outcome of final tx is specified as true
# or false, then only the last tx is tested against outcome.)
-class TestInstance(object):
+class TestInstance():
def __init__(self, objects=None, sync_every_block=True, sync_every_tx=False):
self.blocks_and_transactions = objects if objects else []
self.sync_every_block = sync_every_block
self.sync_every_tx = sync_every_tx
-class TestManager(object):
+class TestManager():
def __init__(self, testgen, datadir):
self.test_generator = testgen
- self.connections = []
- self.test_nodes = []
+ self.p2p_connections= []
self.block_store = BlockStore(datadir)
self.tx_store = TxStore(datadir)
self.ping_counter = 1
@@ -175,29 +170,25 @@ class TestManager(object):
def add_all_connections(self, nodes):
for i in range(len(nodes)):
# Create a p2p connection to each node
- test_node = TestNode(self.block_store, self.tx_store)
- self.test_nodes.append(test_node)
- self.connections.append(NodeConn('127.0.0.1', p2p_port(i), nodes[i], test_node))
- # Make sure the TestNode (callback class) has a reference to its
- # associated NodeConn
- test_node.add_connection(self.connections[-1])
+ node = TestNode(self.block_store, self.tx_store)
+ node.peer_connect('127.0.0.1', p2p_port(i))
+ self.p2p_connections.append(node)
def clear_all_connections(self):
- self.connections = []
- self.test_nodes = []
+ self.p2p_connections = []
def wait_for_disconnections(self):
def disconnected():
- return all(node.closed for node in self.test_nodes)
- return wait_until(disconnected, timeout=10)
+ return all(node.closed for node in self.p2p_connections)
+ wait_until(disconnected, timeout=10, lock=mininode_lock)
def wait_for_verack(self):
- return all(node.wait_for_verack() for node in self.test_nodes)
+ return all(node.wait_for_verack() for node in self.p2p_connections)
def wait_for_pings(self, counter):
def received_pongs():
- return all(node.received_ping_response(counter) for node in self.test_nodes)
- return wait_until(received_pongs)
+ return all(node.received_ping_response(counter) for node in self.p2p_connections)
+ wait_until(received_pongs, lock=mininode_lock)
# sync_blocks: Wait for all connections to request the blockhash given
# then send get_headers to find out the tip of each node, and synchronize
@@ -206,18 +197,17 @@ class TestManager(object):
def blocks_requested():
return all(
blockhash in node.block_request_map and node.block_request_map[blockhash]
- for node in self.test_nodes
+ for node in self.p2p_connections
)
# --> error if not requested
- if not wait_until(blocks_requested, attempts=20*num_blocks):
- raise AssertionError("Not all nodes requested block")
+ wait_until(blocks_requested, attempts=20*num_blocks, lock=mininode_lock)
# Send getheaders message
- [ c.cb.send_getheaders() for c in self.connections ]
+ [ c.send_getheaders() for c in self.p2p_connections ]
# Send ping and wait for response -- synchronization hack
- [ c.cb.send_ping(self.ping_counter) for c in self.connections ]
+ [ c.send_ping(self.ping_counter) for c in self.p2p_connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
@@ -227,43 +217,42 @@ class TestManager(object):
def transaction_requested():
return all(
txhash in node.tx_request_map and node.tx_request_map[txhash]
- for node in self.test_nodes
+ for node in self.p2p_connections
)
# --> error if not requested
- if not wait_until(transaction_requested, attempts=20*num_events):
- raise AssertionError("Not all nodes requested transaction")
+ wait_until(transaction_requested, attempts=20*num_events, lock=mininode_lock)
# Get the mempool
- [ c.cb.send_mempool() for c in self.connections ]
+ [ c.send_mempool() for c in self.p2p_connections ]
# Send ping and wait for response -- synchronization hack
- [ c.cb.send_ping(self.ping_counter) for c in self.connections ]
+ [ c.send_ping(self.ping_counter) for c in self.p2p_connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
# Sort inv responses from each node
with mininode_lock:
- [ c.cb.lastInv.sort() for c in self.connections ]
+ [ c.lastInv.sort() for c in self.p2p_connections ]
# Verify that the tip of each connection all agree with each other, and
# with the expected outcome (if given)
def check_results(self, blockhash, outcome):
with mininode_lock:
- for c in self.connections:
+ for c in self.p2p_connections:
if outcome is None:
- if c.cb.bestblockhash != self.connections[0].cb.bestblockhash:
+ if c.bestblockhash != self.p2p_connections[0].bestblockhash:
return False
elif isinstance(outcome, RejectResult): # Check that block was rejected w/ code
- if c.cb.bestblockhash == blockhash:
+ if c.bestblockhash == blockhash:
return False
- if blockhash not in c.cb.block_reject_map:
+ if blockhash not in c.block_reject_map:
logger.error('Block not in reject map: %064x' % (blockhash))
return False
- if not outcome.match(c.cb.block_reject_map[blockhash]):
- logger.error('Block rejected with %s instead of expected %s: %064x' % (c.cb.block_reject_map[blockhash], outcome, blockhash))
+ if not outcome.match(c.block_reject_map[blockhash]):
+ logger.error('Block rejected with %s instead of expected %s: %064x' % (c.block_reject_map[blockhash], outcome, blockhash))
return False
- elif ((c.cb.bestblockhash == blockhash) != outcome):
+ elif ((c.bestblockhash == blockhash) != outcome):
return False
return True
@@ -275,21 +264,21 @@ class TestManager(object):
# a particular tx's existence in the mempool is the same across all nodes.
def check_mempool(self, txhash, outcome):
with mininode_lock:
- for c in self.connections:
+ for c in self.p2p_connections:
if outcome is None:
# Make sure the mempools agree with each other
- if c.cb.lastInv != self.connections[0].cb.lastInv:
+ if c.lastInv != self.p2p_connections[0].lastInv:
return False
elif isinstance(outcome, RejectResult): # Check that tx was rejected w/ code
- if txhash in c.cb.lastInv:
+ if txhash in c.lastInv:
return False
- if txhash not in c.cb.tx_reject_map:
+ if txhash not in c.tx_reject_map:
logger.error('Tx not in reject map: %064x' % (txhash))
return False
- if not outcome.match(c.cb.tx_reject_map[txhash]):
- logger.error('Tx rejected with %s instead of expected %s: %064x' % (c.cb.tx_reject_map[txhash], outcome, txhash))
+ if not outcome.match(c.tx_reject_map[txhash]):
+ logger.error('Tx rejected with %s instead of expected %s: %064x' % (c.tx_reject_map[txhash], outcome, txhash))
return False
- elif ((txhash in c.cb.lastInv) != outcome):
+ elif ((txhash in c.lastInv) != outcome):
return False
return True
@@ -297,8 +286,11 @@ class TestManager(object):
# Wait until verack is received
self.wait_for_verack()
- test_number = 1
- for test_instance in self.test_generator.get_tests():
+ test_number = 0
+ tests = self.test_generator.get_tests()
+ for test_instance in tests:
+ test_number += 1
+ logger.info("Running test %d: %s line %s" % (test_number, tests.gi_code.co_filename, tests.gi_frame.f_lineno))
# We use these variables to keep track of the last block
# and last transaction in the tests, which are used
# if we're not syncing on every block or every tx.
@@ -331,25 +323,25 @@ class TestManager(object):
first_block_with_hash = False
with mininode_lock:
self.block_store.add_block(block)
- for c in self.connections:
- if first_block_with_hash and block.sha256 in c.cb.block_request_map and c.cb.block_request_map[block.sha256] == True:
+ for c in self.p2p_connections:
+ if first_block_with_hash and block.sha256 in c.block_request_map and c.block_request_map[block.sha256] == True:
# There was a previous request for this block hash
# Most likely, we delivered a header for this block
# but never had the block to respond to the getdata
c.send_message(msg_block(block))
else:
- c.cb.block_request_map[block.sha256] = False
+ c.block_request_map[block.sha256] = False
# Either send inv's to each node and sync, or add
# to invqueue for later inv'ing.
if (test_instance.sync_every_block):
# if we expect success, send inv and sync every block
# if we expect failure, just push the block and see what happens.
if outcome == True:
- [ c.cb.send_inv(block) for c in self.connections ]
+ [ c.send_inv(block) for c in self.p2p_connections ]
self.sync_blocks(block.sha256, 1)
else:
- [ c.send_message(msg_block(block)) for c in self.connections ]
- [ c.cb.send_ping(self.ping_counter) for c in self.connections ]
+ [ c.send_message(msg_block(block)) for c in self.p2p_connections ]
+ [ c.send_ping(self.ping_counter) for c in self.p2p_connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
if (not self.check_results(tip, outcome)):
@@ -359,7 +351,7 @@ class TestManager(object):
elif isinstance(b_or_t, CBlockHeader):
block_header = b_or_t
self.block_store.add_header(block_header)
- [ c.cb.send_header(block_header) for c in self.connections ]
+ [ c.send_header(block_header) for c in self.p2p_connections ]
else: # Tx test runner
assert(isinstance(b_or_t, CTransaction))
@@ -368,11 +360,11 @@ class TestManager(object):
# Add to shared tx store and clear map entry
with mininode_lock:
self.tx_store.add_transaction(tx)
- for c in self.connections:
- c.cb.tx_request_map[tx.sha256] = False
+ for c in self.p2p_connections:
+ c.tx_request_map[tx.sha256] = False
# Again, either inv to all nodes or save for later
if (test_instance.sync_every_tx):
- [ c.cb.send_inv(tx) for c in self.connections ]
+ [ c.send_inv(tx) for c in self.p2p_connections ]
self.sync_transaction(tx.sha256, 1)
if (not self.check_mempool(tx.sha256, outcome)):
raise AssertionError("Test failed at test %d" % test_number)
@@ -380,29 +372,26 @@ class TestManager(object):
invqueue.append(CInv(1, tx.sha256))
# Ensure we're not overflowing the inv queue
if len(invqueue) == MAX_INV_SZ:
- [ c.send_message(msg_inv(invqueue)) for c in self.connections ]
+ [ c.send_message(msg_inv(invqueue)) for c in self.p2p_connections ]
invqueue = []
# Do final sync if we weren't syncing on every block or every tx.
if (not test_instance.sync_every_block and block is not None):
if len(invqueue) > 0:
- [ c.send_message(msg_inv(invqueue)) for c in self.connections ]
+ [ c.send_message(msg_inv(invqueue)) for c in self.p2p_connections ]
invqueue = []
self.sync_blocks(block.sha256, len(test_instance.blocks_and_transactions))
if (not self.check_results(tip, block_outcome)):
raise AssertionError("Block test failed at test %d" % test_number)
if (not test_instance.sync_every_tx and tx is not None):
if len(invqueue) > 0:
- [ c.send_message(msg_inv(invqueue)) for c in self.connections ]
+ [ c.send_message(msg_inv(invqueue)) for c in self.p2p_connections ]
invqueue = []
self.sync_transaction(tx.sha256, len(test_instance.blocks_and_transactions))
if (not self.check_mempool(tx.sha256, tx_outcome)):
raise AssertionError("Mempool test failed at test %d" % test_number)
- logger.info("Test %d: PASS" % test_number)
- test_number += 1
-
- [ c.disconnect_node() for c in self.connections ]
+ [ c.disconnect_node() for c in self.p2p_connections ]
self.wait_for_disconnections()
self.block_store.close()
self.tx_store.close()
diff --git a/test/functional/test_framework/coverage.py b/test/functional/test_framework/coverage.py
index 3f87ef91f6..ddc3c515b2 100644
--- a/test/functional/test_framework/coverage.py
+++ b/test/functional/test_framework/coverage.py
@@ -14,7 +14,7 @@ import os
REFERENCE_FILENAME = 'rpc_interface.txt'
-class AuthServiceProxyWrapper(object):
+class AuthServiceProxyWrapper():
"""
An object that wraps AuthServiceProxy to record specific RPC calls.
@@ -31,10 +31,11 @@ class AuthServiceProxyWrapper(object):
self.auth_service_proxy_instance = auth_service_proxy_instance
self.coverage_logfile = coverage_logfile
- def __getattr__(self, *args, **kwargs):
- return_val = self.auth_service_proxy_instance.__getattr__(
- *args, **kwargs)
-
+ def __getattr__(self, name):
+ return_val = getattr(self.auth_service_proxy_instance, name)
+ if not isinstance(return_val, type(self.auth_service_proxy_instance)):
+ # If proxy getattr returned an unwrapped value, do the same here.
+ return return_val
return AuthServiceProxyWrapper(return_val, self.coverage_logfile)
def __call__(self, *args, **kwargs):
@@ -44,18 +45,23 @@ class AuthServiceProxyWrapper(object):
"""
return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs)
+ self._log_call()
+ return return_val
+
+ def _log_call(self):
rpc_method = self.auth_service_proxy_instance._service_name
if self.coverage_logfile:
with open(self.coverage_logfile, 'a+', encoding='utf8') as f:
f.write("%s\n" % rpc_method)
- return return_val
-
- @property
- def url(self):
- return self.auth_service_proxy_instance.url
+ def __truediv__(self, relative_uri):
+ return AuthServiceProxyWrapper(self.auth_service_proxy_instance / relative_uri,
+ self.coverage_logfile)
+ def get_request(self, *args, **kwargs):
+ self._log_call()
+ return self.auth_service_proxy_instance.get_request(*args, **kwargs)
def get_filename(dirname, n_node):
"""
diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py
index 85a6158a2f..aa91fb5b0d 100644
--- a/test/functional/test_framework/key.py
+++ b/test/functional/test_framework/key.py
@@ -84,7 +84,7 @@ def _check_result(val, func, args):
ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
ssl.EC_KEY_new_by_curve_name.errcheck = _check_result
-class CECKey(object):
+class CECKey():
"""Wrapper around OpenSSL's EC_KEY"""
POINT_CONVERSION_COMPRESSED = 2
diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py
new file mode 100644
index 0000000000..2ab1bdac0f
--- /dev/null
+++ b/test/functional/test_framework/messages.py
@@ -0,0 +1,1304 @@
+#!/usr/bin/env python3
+# Copyright (c) 2010 ArtForz -- public domain half-a-node
+# Copyright (c) 2012 Jeff Garzik
+# Copyright (c) 2010-2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Bitcoin test framework primitive and message strcutures
+
+CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
+ data structures that should map to corresponding structures in
+ bitcoin/primitives
+
+msg_block, msg_tx, msg_headers, etc.:
+ data structures that represent network messages
+
+ser_*, deser_*: functions that handle serialization/deserialization."""
+from codecs import encode
+import copy
+import hashlib
+from io import BytesIO
+import random
+import socket
+import struct
+import time
+
+from test_framework.siphash import siphash256
+from test_framework.util import hex_str_to_bytes, bytes_to_hex_str
+
+MIN_VERSION_SUPPORTED = 60001
+MY_VERSION = 70014 # past bip-31 for ping/pong
+MY_SUBVERSION = b"/python-mininode-tester:0.0.3/"
+MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37)
+
+MAX_INV_SZ = 50000
+MAX_BLOCK_BASE_SIZE = 1000000
+
+COIN = 100000000 # 1 btc in satoshis
+
+NODE_NETWORK = (1 << 0)
+# NODE_GETUTXO = (1 << 1)
+# NODE_BLOOM = (1 << 2)
+NODE_WITNESS = (1 << 3)
+NODE_UNSUPPORTED_SERVICE_BIT_5 = (1 << 5)
+NODE_UNSUPPORTED_SERVICE_BIT_7 = (1 << 7)
+
+# Serialization/deserialization tools
+def sha256(s):
+ return hashlib.new('sha256', s).digest()
+
+def ripemd160(s):
+ return hashlib.new('ripemd160', s).digest()
+
+def hash256(s):
+ return sha256(sha256(s))
+
+def ser_compact_size(l):
+ r = b""
+ if l < 253:
+ r = struct.pack("B", l)
+ elif l < 0x10000:
+ r = struct.pack("<BH", 253, l)
+ elif l < 0x100000000:
+ r = struct.pack("<BI", 254, l)
+ else:
+ r = struct.pack("<BQ", 255, l)
+ return r
+
+def deser_compact_size(f):
+ nit = struct.unpack("<B", f.read(1))[0]
+ if nit == 253:
+ nit = struct.unpack("<H", f.read(2))[0]
+ elif nit == 254:
+ nit = struct.unpack("<I", f.read(4))[0]
+ elif nit == 255:
+ nit = struct.unpack("<Q", f.read(8))[0]
+ return nit
+
+def deser_string(f):
+ nit = deser_compact_size(f)
+ return f.read(nit)
+
+def ser_string(s):
+ return ser_compact_size(len(s)) + s
+
+def deser_uint256(f):
+ r = 0
+ for i in range(8):
+ t = struct.unpack("<I", f.read(4))[0]
+ r += t << (i * 32)
+ return r
+
+
+def ser_uint256(u):
+ rs = b""
+ for i in range(8):
+ rs += struct.pack("<I", u & 0xFFFFFFFF)
+ u >>= 32
+ return rs
+
+
+def uint256_from_str(s):
+ r = 0
+ t = struct.unpack("<IIIIIIII", s[:32])
+ for i in range(8):
+ r += t[i] << (i * 32)
+ return r
+
+
+def uint256_from_compact(c):
+ nbytes = (c >> 24) & 0xFF
+ v = (c & 0xFFFFFF) << (8 * (nbytes - 3))
+ return v
+
+
+def deser_vector(f, c):
+ nit = deser_compact_size(f)
+ r = []
+ for i in range(nit):
+ t = c()
+ t.deserialize(f)
+ r.append(t)
+ return r
+
+
+# ser_function_name: Allow for an alternate serialization function on the
+# entries in the vector (we use this for serializing the vector of transactions
+# for a witness block).
+def ser_vector(l, ser_function_name=None):
+ r = ser_compact_size(len(l))
+ for i in l:
+ if ser_function_name:
+ r += getattr(i, ser_function_name)()
+ else:
+ r += i.serialize()
+ return r
+
+
+def deser_uint256_vector(f):
+ nit = deser_compact_size(f)
+ r = []
+ for i in range(nit):
+ t = deser_uint256(f)
+ r.append(t)
+ return r
+
+
+def ser_uint256_vector(l):
+ r = ser_compact_size(len(l))
+ for i in l:
+ r += ser_uint256(i)
+ return r
+
+
+def deser_string_vector(f):
+ nit = deser_compact_size(f)
+ r = []
+ for i in range(nit):
+ t = deser_string(f)
+ r.append(t)
+ return r
+
+
+def ser_string_vector(l):
+ r = ser_compact_size(len(l))
+ for sv in l:
+ r += ser_string(sv)
+ return r
+
+
+# Deserialize from a hex string representation (eg from RPC)
+def FromHex(obj, hex_string):
+ obj.deserialize(BytesIO(hex_str_to_bytes(hex_string)))
+ return obj
+
+# Convert a binary-serializable object to hex (eg for submission via RPC)
+def ToHex(obj):
+ return bytes_to_hex_str(obj.serialize())
+
+# Objects that map to bitcoind objects, which can be serialized/deserialized
+
+class CAddress():
+ def __init__(self):
+ self.nServices = 1
+ self.pchReserved = b"\x00" * 10 + b"\xff" * 2
+ self.ip = "0.0.0.0"
+ self.port = 0
+
+ def deserialize(self, f):
+ self.nServices = struct.unpack("<Q", f.read(8))[0]
+ self.pchReserved = f.read(12)
+ self.ip = socket.inet_ntoa(f.read(4))
+ self.port = struct.unpack(">H", f.read(2))[0]
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<Q", self.nServices)
+ r += self.pchReserved
+ r += socket.inet_aton(self.ip)
+ r += struct.pack(">H", self.port)
+ return r
+
+ def __repr__(self):
+ return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
+ self.ip, self.port)
+
+MSG_WITNESS_FLAG = 1<<30
+
+class CInv():
+ typemap = {
+ 0: "Error",
+ 1: "TX",
+ 2: "Block",
+ 1|MSG_WITNESS_FLAG: "WitnessTx",
+ 2|MSG_WITNESS_FLAG : "WitnessBlock",
+ 4: "CompactBlock"
+ }
+
+ def __init__(self, t=0, h=0):
+ self.type = t
+ self.hash = h
+
+ def deserialize(self, f):
+ self.type = struct.unpack("<i", f.read(4))[0]
+ self.hash = deser_uint256(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<i", self.type)
+ r += ser_uint256(self.hash)
+ return r
+
+ def __repr__(self):
+ return "CInv(type=%s hash=%064x)" \
+ % (self.typemap[self.type], self.hash)
+
+
+class CBlockLocator():
+ def __init__(self):
+ self.nVersion = MY_VERSION
+ self.vHave = []
+
+ def deserialize(self, f):
+ self.nVersion = struct.unpack("<i", f.read(4))[0]
+ self.vHave = deser_uint256_vector(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<i", self.nVersion)
+ r += ser_uint256_vector(self.vHave)
+ return r
+
+ def __repr__(self):
+ return "CBlockLocator(nVersion=%i vHave=%s)" \
+ % (self.nVersion, repr(self.vHave))
+
+
+class COutPoint():
+ def __init__(self, hash=0, n=0):
+ self.hash = hash
+ self.n = n
+
+ def deserialize(self, f):
+ self.hash = deser_uint256(f)
+ self.n = struct.unpack("<I", f.read(4))[0]
+
+ def serialize(self):
+ r = b""
+ r += ser_uint256(self.hash)
+ r += struct.pack("<I", self.n)
+ return r
+
+ def __repr__(self):
+ return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
+
+
+class CTxIn():
+ def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
+ if outpoint is None:
+ self.prevout = COutPoint()
+ else:
+ self.prevout = outpoint
+ self.scriptSig = scriptSig
+ self.nSequence = nSequence
+
+ def deserialize(self, f):
+ self.prevout = COutPoint()
+ self.prevout.deserialize(f)
+ self.scriptSig = deser_string(f)
+ self.nSequence = struct.unpack("<I", f.read(4))[0]
+
+ def serialize(self):
+ r = b""
+ r += self.prevout.serialize()
+ r += ser_string(self.scriptSig)
+ r += struct.pack("<I", self.nSequence)
+ return r
+
+ def __repr__(self):
+ return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
+ % (repr(self.prevout), bytes_to_hex_str(self.scriptSig),
+ self.nSequence)
+
+
+class CTxOut():
+ def __init__(self, nValue=0, scriptPubKey=b""):
+ self.nValue = nValue
+ self.scriptPubKey = scriptPubKey
+
+ def deserialize(self, f):
+ self.nValue = struct.unpack("<q", f.read(8))[0]
+ self.scriptPubKey = deser_string(f)
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<q", self.nValue)
+ r += ser_string(self.scriptPubKey)
+ return r
+
+ def __repr__(self):
+ return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
+ % (self.nValue // COIN, self.nValue % COIN,
+ bytes_to_hex_str(self.scriptPubKey))
+
+
+class CScriptWitness():
+ def __init__(self):
+ # stack is a vector of strings
+ self.stack = []
+
+ def __repr__(self):
+ return "CScriptWitness(%s)" % \
+ (",".join([bytes_to_hex_str(x) for x in self.stack]))
+
+ def is_null(self):
+ if self.stack:
+ return False
+ return True
+
+
+class CTxInWitness():
+ def __init__(self):
+ self.scriptWitness = CScriptWitness()
+
+ def deserialize(self, f):
+ self.scriptWitness.stack = deser_string_vector(f)
+
+ def serialize(self):
+ return ser_string_vector(self.scriptWitness.stack)
+
+ def __repr__(self):
+ return repr(self.scriptWitness)
+
+ def is_null(self):
+ return self.scriptWitness.is_null()
+
+
+class CTxWitness():
+ def __init__(self):
+ self.vtxinwit = []
+
+ def deserialize(self, f):
+ for i in range(len(self.vtxinwit)):
+ self.vtxinwit[i].deserialize(f)
+
+ def serialize(self):
+ r = b""
+ # This is different than the usual vector serialization --
+ # we omit the length of the vector, which is required to be
+ # the same length as the transaction's vin vector.
+ for x in self.vtxinwit:
+ r += x.serialize()
+ return r
+
+ def __repr__(self):
+ return "CTxWitness(%s)" % \
+ (';'.join([repr(x) for x in self.vtxinwit]))
+
+ def is_null(self):
+ for x in self.vtxinwit:
+ if not x.is_null():
+ return False
+ return True
+
+
+class CTransaction():
+ def __init__(self, tx=None):
+ if tx is None:
+ self.nVersion = 1
+ self.vin = []
+ self.vout = []
+ self.wit = CTxWitness()
+ self.nLockTime = 0
+ self.sha256 = None
+ self.hash = None
+ else:
+ self.nVersion = tx.nVersion
+ self.vin = copy.deepcopy(tx.vin)
+ self.vout = copy.deepcopy(tx.vout)
+ self.nLockTime = tx.nLockTime
+ self.sha256 = tx.sha256
+ self.hash = tx.hash
+ self.wit = copy.deepcopy(tx.wit)
+
+ def deserialize(self, f):
+ self.nVersion = struct.unpack("<i", f.read(4))[0]
+ self.vin = deser_vector(f, CTxIn)
+ flags = 0
+ if len(self.vin) == 0:
+ flags = struct.unpack("<B", f.read(1))[0]
+ # Not sure why flags can't be zero, but this
+ # matches the implementation in bitcoind
+ if (flags != 0):
+ self.vin = deser_vector(f, CTxIn)
+ self.vout = deser_vector(f, CTxOut)
+ else:
+ self.vout = deser_vector(f, CTxOut)
+ if flags != 0:
+ self.wit.vtxinwit = [CTxInWitness() for i in range(len(self.vin))]
+ self.wit.deserialize(f)
+ self.nLockTime = struct.unpack("<I", f.read(4))[0]
+ self.sha256 = None
+ self.hash = None
+
+ def serialize_without_witness(self):
+ r = b""
+ r += struct.pack("<i", self.nVersion)
+ r += ser_vector(self.vin)
+ r += ser_vector(self.vout)
+ r += struct.pack("<I", self.nLockTime)
+ return r
+
+ # Only serialize with witness when explicitly called for
+ def serialize_with_witness(self):
+ flags = 0
+ if not self.wit.is_null():
+ flags |= 1
+ r = b""
+ r += struct.pack("<i", self.nVersion)
+ if flags:
+ dummy = []
+ r += ser_vector(dummy)
+ r += struct.pack("<B", flags)
+ r += ser_vector(self.vin)
+ r += ser_vector(self.vout)
+ if flags & 1:
+ if (len(self.wit.vtxinwit) != len(self.vin)):
+ # vtxinwit must have the same length as vin
+ self.wit.vtxinwit = self.wit.vtxinwit[:len(self.vin)]
+ for i in range(len(self.wit.vtxinwit), len(self.vin)):
+ self.wit.vtxinwit.append(CTxInWitness())
+ r += self.wit.serialize()
+ r += struct.pack("<I", self.nLockTime)
+ return r
+
+ # Regular serialization is without witness -- must explicitly
+ # call serialize_with_witness to include witness data.
+ def serialize(self):
+ return self.serialize_without_witness()
+
+ # Recalculate the txid (transaction hash without witness)
+ def rehash(self):
+ self.sha256 = None
+ self.calc_sha256()
+
+ # We will only cache the serialization without witness in
+ # self.sha256 and self.hash -- those are expected to be the txid.
+ def calc_sha256(self, with_witness=False):
+ if with_witness:
+ # Don't cache the result, just return it
+ return uint256_from_str(hash256(self.serialize_with_witness()))
+
+ if self.sha256 is None:
+ self.sha256 = uint256_from_str(hash256(self.serialize_without_witness()))
+ self.hash = encode(hash256(self.serialize())[::-1], 'hex_codec').decode('ascii')
+
+ def is_valid(self):
+ self.calc_sha256()
+ for tout in self.vout:
+ if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
+ return False
+ return True
+
+ def __repr__(self):
+ return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \
+ % (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime)
+
+
+class CBlockHeader():
+ def __init__(self, header=None):
+ if header is None:
+ self.set_null()
+ else:
+ self.nVersion = header.nVersion
+ self.hashPrevBlock = header.hashPrevBlock
+ self.hashMerkleRoot = header.hashMerkleRoot
+ self.nTime = header.nTime
+ self.nBits = header.nBits
+ self.nNonce = header.nNonce
+ self.sha256 = header.sha256
+ self.hash = header.hash
+ self.calc_sha256()
+
+ def set_null(self):
+ self.nVersion = 1
+ self.hashPrevBlock = 0
+ self.hashMerkleRoot = 0
+ self.nTime = 0
+ self.nBits = 0
+ self.nNonce = 0
+ self.sha256 = None
+ self.hash = None
+
+ def deserialize(self, f):
+ self.nVersion = struct.unpack("<i", f.read(4))[0]
+ self.hashPrevBlock = deser_uint256(f)
+ self.hashMerkleRoot = deser_uint256(f)
+ self.nTime = struct.unpack("<I", f.read(4))[0]
+ self.nBits = struct.unpack("<I", f.read(4))[0]
+ self.nNonce = struct.unpack("<I", f.read(4))[0]
+ self.sha256 = None
+ self.hash = None
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<i", self.nVersion)
+ r += ser_uint256(self.hashPrevBlock)
+ r += ser_uint256(self.hashMerkleRoot)
+ r += struct.pack("<I", self.nTime)
+ r += struct.pack("<I", self.nBits)
+ r += struct.pack("<I", self.nNonce)
+ return r
+
+ def calc_sha256(self):
+ if self.sha256 is None:
+ r = b""
+ r += struct.pack("<i", self.nVersion)
+ r += ser_uint256(self.hashPrevBlock)
+ r += ser_uint256(self.hashMerkleRoot)
+ r += struct.pack("<I", self.nTime)
+ r += struct.pack("<I", self.nBits)
+ r += struct.pack("<I", self.nNonce)
+ self.sha256 = uint256_from_str(hash256(r))
+ self.hash = encode(hash256(r)[::-1], 'hex_codec').decode('ascii')
+
+ def rehash(self):
+ self.sha256 = None
+ self.calc_sha256()
+ return self.sha256
+
+ def __repr__(self):
+ return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
+ % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
+ time.ctime(self.nTime), self.nBits, self.nNonce)
+
+
+class CBlock(CBlockHeader):
+ def __init__(self, header=None):
+ super(CBlock, self).__init__(header)
+ self.vtx = []
+
+ def deserialize(self, f):
+ super(CBlock, self).deserialize(f)
+ self.vtx = deser_vector(f, CTransaction)
+
+ def serialize(self, with_witness=False):
+ r = b""
+ r += super(CBlock, self).serialize()
+ if with_witness:
+ r += ser_vector(self.vtx, "serialize_with_witness")
+ else:
+ r += ser_vector(self.vtx)
+ return r
+
+ # Calculate the merkle root given a vector of transaction hashes
+ @classmethod
+ def get_merkle_root(cls, hashes):
+ while len(hashes) > 1:
+ newhashes = []
+ for i in range(0, len(hashes), 2):
+ i2 = min(i+1, len(hashes)-1)
+ newhashes.append(hash256(hashes[i] + hashes[i2]))
+ hashes = newhashes
+ return uint256_from_str(hashes[0])
+
+ def calc_merkle_root(self):
+ hashes = []
+ for tx in self.vtx:
+ tx.calc_sha256()
+ hashes.append(ser_uint256(tx.sha256))
+ return self.get_merkle_root(hashes)
+
+ def calc_witness_merkle_root(self):
+ # For witness root purposes, the hash of the
+ # coinbase, with witness, is defined to be 0...0
+ hashes = [ser_uint256(0)]
+
+ for tx in self.vtx[1:]:
+ # Calculate the hashes with witness data
+ hashes.append(ser_uint256(tx.calc_sha256(True)))
+
+ return self.get_merkle_root(hashes)
+
+ def is_valid(self):
+ self.calc_sha256()
+ target = uint256_from_compact(self.nBits)
+ if self.sha256 > target:
+ return False
+ for tx in self.vtx:
+ if not tx.is_valid():
+ return False
+ if self.calc_merkle_root() != self.hashMerkleRoot:
+ return False
+ return True
+
+ def solve(self):
+ self.rehash()
+ target = uint256_from_compact(self.nBits)
+ while self.sha256 > target:
+ self.nNonce += 1
+ self.rehash()
+
+ def __repr__(self):
+ return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
+ % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
+ time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
+
+
+class PrefilledTransaction():
+ def __init__(self, index=0, tx = None):
+ self.index = index
+ self.tx = tx
+
+ def deserialize(self, f):
+ self.index = deser_compact_size(f)
+ self.tx = CTransaction()
+ self.tx.deserialize(f)
+
+ def serialize(self, with_witness=False):
+ r = b""
+ r += ser_compact_size(self.index)
+ if with_witness:
+ r += self.tx.serialize_with_witness()
+ else:
+ r += self.tx.serialize_without_witness()
+ return r
+
+ def serialize_with_witness(self):
+ return self.serialize(with_witness=True)
+
+ def __repr__(self):
+ return "PrefilledTransaction(index=%d, tx=%s)" % (self.index, repr(self.tx))
+
+# This is what we send on the wire, in a cmpctblock message.
+class P2PHeaderAndShortIDs():
+ def __init__(self):
+ self.header = CBlockHeader()
+ self.nonce = 0
+ self.shortids_length = 0
+ self.shortids = []
+ self.prefilled_txn_length = 0
+ self.prefilled_txn = []
+
+ def deserialize(self, f):
+ self.header.deserialize(f)
+ self.nonce = struct.unpack("<Q", f.read(8))[0]
+ self.shortids_length = deser_compact_size(f)
+ for i in range(self.shortids_length):
+ # shortids are defined to be 6 bytes in the spec, so append
+ # two zero bytes and read it in as an 8-byte number
+ self.shortids.append(struct.unpack("<Q", f.read(6) + b'\x00\x00')[0])
+ self.prefilled_txn = deser_vector(f, PrefilledTransaction)
+ self.prefilled_txn_length = len(self.prefilled_txn)
+
+ # When using version 2 compact blocks, we must serialize with_witness.
+ def serialize(self, with_witness=False):
+ r = b""
+ r += self.header.serialize()
+ r += struct.pack("<Q", self.nonce)
+ r += ser_compact_size(self.shortids_length)
+ for x in self.shortids:
+ # We only want the first 6 bytes
+ r += struct.pack("<Q", x)[0:6]
+ if with_witness:
+ r += ser_vector(self.prefilled_txn, "serialize_with_witness")
+ else:
+ r += ser_vector(self.prefilled_txn)
+ return r
+
+ def __repr__(self):
+ return "P2PHeaderAndShortIDs(header=%s, nonce=%d, shortids_length=%d, shortids=%s, prefilled_txn_length=%d, prefilledtxn=%s" % (repr(self.header), self.nonce, self.shortids_length, repr(self.shortids), self.prefilled_txn_length, repr(self.prefilled_txn))
+
+# P2P version of the above that will use witness serialization (for compact
+# block version 2)
+class P2PHeaderAndShortWitnessIDs(P2PHeaderAndShortIDs):
+ def serialize(self):
+ return super(P2PHeaderAndShortWitnessIDs, self).serialize(with_witness=True)
+
+# Calculate the BIP 152-compact blocks shortid for a given transaction hash
+def calculate_shortid(k0, k1, tx_hash):
+ expected_shortid = siphash256(k0, k1, tx_hash)
+ expected_shortid &= 0x0000ffffffffffff
+ return expected_shortid
+
+# This version gets rid of the array lengths, and reinterprets the differential
+# encoding into indices that can be used for lookup.
+class HeaderAndShortIDs():
+ def __init__(self, p2pheaders_and_shortids = None):
+ self.header = CBlockHeader()
+ self.nonce = 0
+ self.shortids = []
+ self.prefilled_txn = []
+ self.use_witness = False
+
+ if p2pheaders_and_shortids != None:
+ self.header = p2pheaders_and_shortids.header
+ self.nonce = p2pheaders_and_shortids.nonce
+ self.shortids = p2pheaders_and_shortids.shortids
+ last_index = -1
+ for x in p2pheaders_and_shortids.prefilled_txn:
+ self.prefilled_txn.append(PrefilledTransaction(x.index + last_index + 1, x.tx))
+ last_index = self.prefilled_txn[-1].index
+
+ def to_p2p(self):
+ if self.use_witness:
+ ret = P2PHeaderAndShortWitnessIDs()
+ else:
+ ret = P2PHeaderAndShortIDs()
+ ret.header = self.header
+ ret.nonce = self.nonce
+ ret.shortids_length = len(self.shortids)
+ ret.shortids = self.shortids
+ ret.prefilled_txn_length = len(self.prefilled_txn)
+ ret.prefilled_txn = []
+ last_index = -1
+ for x in self.prefilled_txn:
+ ret.prefilled_txn.append(PrefilledTransaction(x.index - last_index - 1, x.tx))
+ last_index = x.index
+ return ret
+
+ def get_siphash_keys(self):
+ header_nonce = self.header.serialize()
+ header_nonce += struct.pack("<Q", self.nonce)
+ hash_header_nonce_as_str = sha256(header_nonce)
+ key0 = struct.unpack("<Q", hash_header_nonce_as_str[0:8])[0]
+ key1 = struct.unpack("<Q", hash_header_nonce_as_str[8:16])[0]
+ return [ key0, key1 ]
+
+ # Version 2 compact blocks use wtxid in shortids (rather than txid)
+ def initialize_from_block(self, block, nonce=0, prefill_list = [0], use_witness = False):
+ self.header = CBlockHeader(block)
+ self.nonce = nonce
+ self.prefilled_txn = [ PrefilledTransaction(i, block.vtx[i]) for i in prefill_list ]
+ self.shortids = []
+ self.use_witness = use_witness
+ [k0, k1] = self.get_siphash_keys()
+ for i in range(len(block.vtx)):
+ if i not in prefill_list:
+ tx_hash = block.vtx[i].sha256
+ if use_witness:
+ tx_hash = block.vtx[i].calc_sha256(with_witness=True)
+ self.shortids.append(calculate_shortid(k0, k1, tx_hash))
+
+ def __repr__(self):
+ return "HeaderAndShortIDs(header=%s, nonce=%d, shortids=%s, prefilledtxn=%s" % (repr(self.header), self.nonce, repr(self.shortids), repr(self.prefilled_txn))
+
+
+class BlockTransactionsRequest():
+
+ def __init__(self, blockhash=0, indexes = None):
+ self.blockhash = blockhash
+ self.indexes = indexes if indexes != None else []
+
+ def deserialize(self, f):
+ self.blockhash = deser_uint256(f)
+ indexes_length = deser_compact_size(f)
+ for i in range(indexes_length):
+ self.indexes.append(deser_compact_size(f))
+
+ def serialize(self):
+ r = b""
+ r += ser_uint256(self.blockhash)
+ r += ser_compact_size(len(self.indexes))
+ for x in self.indexes:
+ r += ser_compact_size(x)
+ return r
+
+ # helper to set the differentially encoded indexes from absolute ones
+ def from_absolute(self, absolute_indexes):
+ self.indexes = []
+ last_index = -1
+ for x in absolute_indexes:
+ self.indexes.append(x-last_index-1)
+ last_index = x
+
+ def to_absolute(self):
+ absolute_indexes = []
+ last_index = -1
+ for x in self.indexes:
+ absolute_indexes.append(x+last_index+1)
+ last_index = absolute_indexes[-1]
+ return absolute_indexes
+
+ def __repr__(self):
+ return "BlockTransactionsRequest(hash=%064x indexes=%s)" % (self.blockhash, repr(self.indexes))
+
+
+class BlockTransactions():
+
+ def __init__(self, blockhash=0, transactions = None):
+ self.blockhash = blockhash
+ self.transactions = transactions if transactions != None else []
+
+ def deserialize(self, f):
+ self.blockhash = deser_uint256(f)
+ self.transactions = deser_vector(f, CTransaction)
+
+ def serialize(self, with_witness=False):
+ r = b""
+ r += ser_uint256(self.blockhash)
+ if with_witness:
+ r += ser_vector(self.transactions, "serialize_with_witness")
+ else:
+ r += ser_vector(self.transactions)
+ return r
+
+ def __repr__(self):
+ return "BlockTransactions(hash=%064x transactions=%s)" % (self.blockhash, repr(self.transactions))
+
+
+# Objects that correspond to messages on the wire
+class msg_version():
+ command = b"version"
+
+ def __init__(self):
+ self.nVersion = MY_VERSION
+ self.nServices = NODE_NETWORK | NODE_WITNESS
+ self.nTime = int(time.time())
+ self.addrTo = CAddress()
+ self.addrFrom = CAddress()
+ self.nNonce = random.getrandbits(64)
+ self.strSubVer = MY_SUBVERSION
+ self.nStartingHeight = -1
+ self.nRelay = MY_RELAY
+
+ def deserialize(self, f):
+ self.nVersion = struct.unpack("<i", f.read(4))[0]
+ if self.nVersion == 10300:
+ self.nVersion = 300
+ self.nServices = struct.unpack("<Q", f.read(8))[0]
+ self.nTime = struct.unpack("<q", f.read(8))[0]
+ self.addrTo = CAddress()
+ self.addrTo.deserialize(f)
+
+ if self.nVersion >= 106:
+ self.addrFrom = CAddress()
+ self.addrFrom.deserialize(f)
+ self.nNonce = struct.unpack("<Q", f.read(8))[0]
+ self.strSubVer = deser_string(f)
+ else:
+ self.addrFrom = None
+ self.nNonce = None
+ self.strSubVer = None
+ self.nStartingHeight = None
+
+ if self.nVersion >= 209:
+ self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
+ else:
+ self.nStartingHeight = None
+
+ if self.nVersion >= 70001:
+ # Relay field is optional for version 70001 onwards
+ try:
+ self.nRelay = struct.unpack("<b", f.read(1))[0]
+ except:
+ self.nRelay = 0
+ else:
+ self.nRelay = 0
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<i", self.nVersion)
+ r += struct.pack("<Q", self.nServices)
+ r += struct.pack("<q", self.nTime)
+ r += self.addrTo.serialize()
+ r += self.addrFrom.serialize()
+ r += struct.pack("<Q", self.nNonce)
+ r += ser_string(self.strSubVer)
+ r += struct.pack("<i", self.nStartingHeight)
+ r += struct.pack("<b", self.nRelay)
+ return r
+
+ def __repr__(self):
+ return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i nRelay=%i)' \
+ % (self.nVersion, self.nServices, time.ctime(self.nTime),
+ repr(self.addrTo), repr(self.addrFrom), self.nNonce,
+ self.strSubVer, self.nStartingHeight, self.nRelay)
+
+
+class msg_verack():
+ command = b"verack"
+
+ def __init__(self):
+ pass
+
+ def deserialize(self, f):
+ pass
+
+ def serialize(self):
+ return b""
+
+ def __repr__(self):
+ return "msg_verack()"
+
+
+class msg_addr():
+ command = b"addr"
+
+ def __init__(self):
+ self.addrs = []
+
+ def deserialize(self, f):
+ self.addrs = deser_vector(f, CAddress)
+
+ def serialize(self):
+ return ser_vector(self.addrs)
+
+ def __repr__(self):
+ return "msg_addr(addrs=%s)" % (repr(self.addrs))
+
+
+class msg_inv():
+ command = b"inv"
+
+ def __init__(self, inv=None):
+ if inv is None:
+ self.inv = []
+ else:
+ self.inv = inv
+
+ def deserialize(self, f):
+ self.inv = deser_vector(f, CInv)
+
+ def serialize(self):
+ return ser_vector(self.inv)
+
+ def __repr__(self):
+ return "msg_inv(inv=%s)" % (repr(self.inv))
+
+
+class msg_getdata():
+ command = b"getdata"
+
+ def __init__(self, inv=None):
+ self.inv = inv if inv != None else []
+
+ def deserialize(self, f):
+ self.inv = deser_vector(f, CInv)
+
+ def serialize(self):
+ return ser_vector(self.inv)
+
+ def __repr__(self):
+ return "msg_getdata(inv=%s)" % (repr(self.inv))
+
+
+class msg_getblocks():
+ command = b"getblocks"
+
+ def __init__(self):
+ self.locator = CBlockLocator()
+ self.hashstop = 0
+
+ def deserialize(self, f):
+ self.locator = CBlockLocator()
+ self.locator.deserialize(f)
+ self.hashstop = deser_uint256(f)
+
+ def serialize(self):
+ r = b""
+ r += self.locator.serialize()
+ r += ser_uint256(self.hashstop)
+ return r
+
+ def __repr__(self):
+ return "msg_getblocks(locator=%s hashstop=%064x)" \
+ % (repr(self.locator), self.hashstop)
+
+
+class msg_tx():
+ command = b"tx"
+
+ def __init__(self, tx=CTransaction()):
+ self.tx = tx
+
+ def deserialize(self, f):
+ self.tx.deserialize(f)
+
+ def serialize(self):
+ return self.tx.serialize_without_witness()
+
+ def __repr__(self):
+ return "msg_tx(tx=%s)" % (repr(self.tx))
+
+class msg_witness_tx(msg_tx):
+
+ def serialize(self):
+ return self.tx.serialize_with_witness()
+
+
+class msg_block():
+ command = b"block"
+
+ def __init__(self, block=None):
+ if block is None:
+ self.block = CBlock()
+ else:
+ self.block = block
+
+ def deserialize(self, f):
+ self.block.deserialize(f)
+
+ def serialize(self):
+ return self.block.serialize()
+
+ def __repr__(self):
+ return "msg_block(block=%s)" % (repr(self.block))
+
+# for cases where a user needs tighter control over what is sent over the wire
+# note that the user must supply the name of the command, and the data
+class msg_generic():
+ def __init__(self, command, data=None):
+ self.command = command
+ self.data = data
+
+ def serialize(self):
+ return self.data
+
+ def __repr__(self):
+ return "msg_generic()"
+
+class msg_witness_block(msg_block):
+
+ def serialize(self):
+ r = self.block.serialize(with_witness=True)
+ return r
+
+class msg_getaddr():
+ command = b"getaddr"
+
+ def __init__(self):
+ pass
+
+ def deserialize(self, f):
+ pass
+
+ def serialize(self):
+ return b""
+
+ def __repr__(self):
+ return "msg_getaddr()"
+
+
+class msg_ping():
+ command = b"ping"
+
+ def __init__(self, nonce=0):
+ self.nonce = nonce
+
+ def deserialize(self, f):
+ self.nonce = struct.unpack("<Q", f.read(8))[0]
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<Q", self.nonce)
+ return r
+
+ def __repr__(self):
+ return "msg_ping(nonce=%08x)" % self.nonce
+
+
+class msg_pong():
+ command = b"pong"
+
+ def __init__(self, nonce=0):
+ self.nonce = nonce
+
+ def deserialize(self, f):
+ self.nonce = struct.unpack("<Q", f.read(8))[0]
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<Q", self.nonce)
+ return r
+
+ def __repr__(self):
+ return "msg_pong(nonce=%08x)" % self.nonce
+
+
+class msg_mempool():
+ command = b"mempool"
+
+ def __init__(self):
+ pass
+
+ def deserialize(self, f):
+ pass
+
+ def serialize(self):
+ return b""
+
+ def __repr__(self):
+ return "msg_mempool()"
+
+class msg_sendheaders():
+ command = b"sendheaders"
+
+ def __init__(self):
+ pass
+
+ def deserialize(self, f):
+ pass
+
+ def serialize(self):
+ return b""
+
+ def __repr__(self):
+ return "msg_sendheaders()"
+
+
+# getheaders message has
+# number of entries
+# vector of hashes
+# hash_stop (hash of last desired block header, 0 to get as many as possible)
+class msg_getheaders():
+ command = b"getheaders"
+
+ def __init__(self):
+ self.locator = CBlockLocator()
+ self.hashstop = 0
+
+ def deserialize(self, f):
+ self.locator = CBlockLocator()
+ self.locator.deserialize(f)
+ self.hashstop = deser_uint256(f)
+
+ def serialize(self):
+ r = b""
+ r += self.locator.serialize()
+ r += ser_uint256(self.hashstop)
+ return r
+
+ def __repr__(self):
+ return "msg_getheaders(locator=%s, stop=%064x)" \
+ % (repr(self.locator), self.hashstop)
+
+
+# headers message has
+# <count> <vector of block headers>
+class msg_headers():
+ command = b"headers"
+
+ def __init__(self, headers=None):
+ self.headers = headers if headers is not None else []
+
+ def deserialize(self, f):
+ # comment in bitcoind indicates these should be deserialized as blocks
+ blocks = deser_vector(f, CBlock)
+ for x in blocks:
+ self.headers.append(CBlockHeader(x))
+
+ def serialize(self):
+ blocks = [CBlock(x) for x in self.headers]
+ return ser_vector(blocks)
+
+ def __repr__(self):
+ return "msg_headers(headers=%s)" % repr(self.headers)
+
+
+class msg_reject():
+ command = b"reject"
+ REJECT_MALFORMED = 1
+
+ def __init__(self):
+ self.message = b""
+ self.code = 0
+ self.reason = b""
+ self.data = 0
+
+ def deserialize(self, f):
+ self.message = deser_string(f)
+ self.code = struct.unpack("<B", f.read(1))[0]
+ self.reason = deser_string(f)
+ if (self.code != self.REJECT_MALFORMED and
+ (self.message == b"block" or self.message == b"tx")):
+ self.data = deser_uint256(f)
+
+ def serialize(self):
+ r = ser_string(self.message)
+ r += struct.pack("<B", self.code)
+ r += ser_string(self.reason)
+ if (self.code != self.REJECT_MALFORMED and
+ (self.message == b"block" or self.message == b"tx")):
+ r += ser_uint256(self.data)
+ return r
+
+ def __repr__(self):
+ return "msg_reject: %s %d %s [%064x]" \
+ % (self.message, self.code, self.reason, self.data)
+
+class msg_feefilter():
+ command = b"feefilter"
+
+ def __init__(self, feerate=0):
+ self.feerate = feerate
+
+ def deserialize(self, f):
+ self.feerate = struct.unpack("<Q", f.read(8))[0]
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<Q", self.feerate)
+ return r
+
+ def __repr__(self):
+ return "msg_feefilter(feerate=%08x)" % self.feerate
+
+class msg_sendcmpct():
+ command = b"sendcmpct"
+
+ def __init__(self):
+ self.announce = False
+ self.version = 1
+
+ def deserialize(self, f):
+ self.announce = struct.unpack("<?", f.read(1))[0]
+ self.version = struct.unpack("<Q", f.read(8))[0]
+
+ def serialize(self):
+ r = b""
+ r += struct.pack("<?", self.announce)
+ r += struct.pack("<Q", self.version)
+ return r
+
+ def __repr__(self):
+ return "msg_sendcmpct(announce=%s, version=%lu)" % (self.announce, self.version)
+
+class msg_cmpctblock():
+ command = b"cmpctblock"
+
+ def __init__(self, header_and_shortids = None):
+ self.header_and_shortids = header_and_shortids
+
+ def deserialize(self, f):
+ self.header_and_shortids = P2PHeaderAndShortIDs()
+ self.header_and_shortids.deserialize(f)
+
+ def serialize(self):
+ r = b""
+ r += self.header_and_shortids.serialize()
+ return r
+
+ def __repr__(self):
+ return "msg_cmpctblock(HeaderAndShortIDs=%s)" % repr(self.header_and_shortids)
+
+class msg_getblocktxn():
+ command = b"getblocktxn"
+
+ def __init__(self):
+ self.block_txn_request = None
+
+ def deserialize(self, f):
+ self.block_txn_request = BlockTransactionsRequest()
+ self.block_txn_request.deserialize(f)
+
+ def serialize(self):
+ r = b""
+ r += self.block_txn_request.serialize()
+ return r
+
+ def __repr__(self):
+ return "msg_getblocktxn(block_txn_request=%s)" % (repr(self.block_txn_request))
+
+class msg_blocktxn():
+ command = b"blocktxn"
+
+ def __init__(self):
+ self.block_transactions = BlockTransactions()
+
+ def deserialize(self, f):
+ self.block_transactions.deserialize(f)
+
+ def serialize(self):
+ r = b""
+ r += self.block_transactions.serialize()
+ return r
+
+ def __repr__(self):
+ return "msg_blocktxn(block_transactions=%s)" % (repr(self.block_transactions))
+
+class msg_witness_blocktxn(msg_blocktxn):
+ def serialize(self):
+ r = b""
+ r += self.block_transactions.serialize(with_witness=True)
+ return r
diff --git a/test/functional/test_framework/mininode.py b/test/functional/test_framework/mininode.py
index 688347a68f..9e92a70da1 100755
--- a/test/functional/test_framework/mininode.py
+++ b/test/functional/test_framework/mininode.py
@@ -9,1477 +9,248 @@
This python code was modified from ArtForz' public domain half-a-node, as
found in the mini-node branch of http://github.com/jgarzik/pynode.
-NodeConn: an object which manages p2p connectivity to a bitcoin node
-NodeConnCB: a base class that describes the interface for receiving
- callbacks with network messages from a NodeConn
-CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
- data structures that should map to corresponding structures in
- bitcoin/primitives
-msg_block, msg_tx, msg_headers, etc.:
- data structures that represent network messages
-ser_*, deser_*: functions that handle serialization/deserialization
-"""
-
+P2PConnection: A low-level connection object to a node's P2P interface
+P2PInterface: A high-level interface object for communicating to a node over P2P"""
import asyncore
-from codecs import encode
from collections import defaultdict
-import copy
-import hashlib
from io import BytesIO
import logging
-import random
import socket
import struct
import sys
-import time
from threading import RLock, Thread
-from test_framework.siphash import siphash256
-from test_framework.util import hex_str_to_bytes, bytes_to_hex_str
-
-BIP0031_VERSION = 60000
-MY_VERSION = 70014 # past bip-31 for ping/pong
-MY_SUBVERSION = b"/python-mininode-tester:0.0.3/"
-MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37)
-
-MAX_INV_SZ = 50000
-MAX_BLOCK_BASE_SIZE = 1000000
-
-COIN = 100000000 # 1 btc in satoshis
-
-NODE_NETWORK = (1 << 0)
-NODE_GETUTXO = (1 << 1)
-NODE_BLOOM = (1 << 2)
-NODE_WITNESS = (1 << 3)
+from test_framework.messages import *
+from test_framework.util import wait_until
logger = logging.getLogger("TestFramework.mininode")
-# Keep our own socket map for asyncore, so that we can track disconnects
-# ourselves (to workaround an issue with closing an asyncore socket when
-# using select)
-mininode_socket_map = dict()
-
-# One lock for synchronizing all data access between the networking thread (see
-# NetworkThread below) and the thread running the test logic. For simplicity,
-# NodeConn acquires this lock whenever delivering a message to a NodeConnCB,
-# and whenever adding anything to the send buffer (in send_message()). This
-# lock should be acquired in the thread running the test logic to synchronize
-# access to any data shared with the NodeConnCB or NodeConn.
-mininode_lock = RLock()
-
-# Serialization/deserialization tools
-def sha256(s):
- return hashlib.new('sha256', s).digest()
-
-def ripemd160(s):
- return hashlib.new('ripemd160', s).digest()
-
-def hash256(s):
- return sha256(sha256(s))
-
-def ser_compact_size(l):
- r = b""
- if l < 253:
- r = struct.pack("B", l)
- elif l < 0x10000:
- r = struct.pack("<BH", 253, l)
- elif l < 0x100000000:
- r = struct.pack("<BI", 254, l)
- else:
- r = struct.pack("<BQ", 255, l)
- return r
-
-def deser_compact_size(f):
- nit = struct.unpack("<B", f.read(1))[0]
- if nit == 253:
- nit = struct.unpack("<H", f.read(2))[0]
- elif nit == 254:
- nit = struct.unpack("<I", f.read(4))[0]
- elif nit == 255:
- nit = struct.unpack("<Q", f.read(8))[0]
- return nit
-
-def deser_string(f):
- nit = deser_compact_size(f)
- return f.read(nit)
-
-def ser_string(s):
- return ser_compact_size(len(s)) + s
-
-def deser_uint256(f):
- r = 0
- for i in range(8):
- t = struct.unpack("<I", f.read(4))[0]
- r += t << (i * 32)
- return r
-
-
-def ser_uint256(u):
- rs = b""
- for i in range(8):
- rs += struct.pack("<I", u & 0xFFFFFFFF)
- u >>= 32
- return rs
-
-
-def uint256_from_str(s):
- r = 0
- t = struct.unpack("<IIIIIIII", s[:32])
- for i in range(8):
- r += t[i] << (i * 32)
- return r
-
-
-def uint256_from_compact(c):
- nbytes = (c >> 24) & 0xFF
- v = (c & 0xFFFFFF) << (8 * (nbytes - 3))
- return v
-
-
-def deser_vector(f, c):
- nit = deser_compact_size(f)
- r = []
- for i in range(nit):
- t = c()
- t.deserialize(f)
- r.append(t)
- return r
-
-
-# ser_function_name: Allow for an alternate serialization function on the
-# entries in the vector (we use this for serializing the vector of transactions
-# for a witness block).
-def ser_vector(l, ser_function_name=None):
- r = ser_compact_size(len(l))
- for i in l:
- if ser_function_name:
- r += getattr(i, ser_function_name)()
- else:
- r += i.serialize()
- return r
-
-
-def deser_uint256_vector(f):
- nit = deser_compact_size(f)
- r = []
- for i in range(nit):
- t = deser_uint256(f)
- r.append(t)
- return r
-
-
-def ser_uint256_vector(l):
- r = ser_compact_size(len(l))
- for i in l:
- r += ser_uint256(i)
- return r
-
-
-def deser_string_vector(f):
- nit = deser_compact_size(f)
- r = []
- for i in range(nit):
- t = deser_string(f)
- r.append(t)
- return r
-
-
-def ser_string_vector(l):
- r = ser_compact_size(len(l))
- for sv in l:
- r += ser_string(sv)
- return r
-
-
-def deser_int_vector(f):
- nit = deser_compact_size(f)
- r = []
- for i in range(nit):
- t = struct.unpack("<i", f.read(4))[0]
- r.append(t)
- return r
-
-
-def ser_int_vector(l):
- r = ser_compact_size(len(l))
- for i in l:
- r += struct.pack("<i", i)
- return r
-
-# Deserialize from a hex string representation (eg from RPC)
-def FromHex(obj, hex_string):
- obj.deserialize(BytesIO(hex_str_to_bytes(hex_string)))
- return obj
-
-# Convert a binary-serializable object to hex (eg for submission via RPC)
-def ToHex(obj):
- return bytes_to_hex_str(obj.serialize())
-
-# Objects that map to bitcoind objects, which can be serialized/deserialized
-
-class CAddress(object):
- def __init__(self):
- self.nServices = 1
- self.pchReserved = b"\x00" * 10 + b"\xff" * 2
- self.ip = "0.0.0.0"
- self.port = 0
-
- def deserialize(self, f):
- self.nServices = struct.unpack("<Q", f.read(8))[0]
- self.pchReserved = f.read(12)
- self.ip = socket.inet_ntoa(f.read(4))
- self.port = struct.unpack(">H", f.read(2))[0]
-
- def serialize(self):
- r = b""
- r += struct.pack("<Q", self.nServices)
- r += self.pchReserved
- r += socket.inet_aton(self.ip)
- r += struct.pack(">H", self.port)
- return r
-
- def __repr__(self):
- return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
- self.ip, self.port)
-
-MSG_WITNESS_FLAG = 1<<30
-
-class CInv(object):
- typemap = {
- 0: "Error",
- 1: "TX",
- 2: "Block",
- 1|MSG_WITNESS_FLAG: "WitnessTx",
- 2|MSG_WITNESS_FLAG : "WitnessBlock",
- 4: "CompactBlock"
- }
-
- def __init__(self, t=0, h=0):
- self.type = t
- self.hash = h
-
- def deserialize(self, f):
- self.type = struct.unpack("<i", f.read(4))[0]
- self.hash = deser_uint256(f)
-
- def serialize(self):
- r = b""
- r += struct.pack("<i", self.type)
- r += ser_uint256(self.hash)
- return r
-
- def __repr__(self):
- return "CInv(type=%s hash=%064x)" \
- % (self.typemap[self.type], self.hash)
-
-
-class CBlockLocator(object):
- def __init__(self):
- self.nVersion = MY_VERSION
- self.vHave = []
-
- def deserialize(self, f):
- self.nVersion = struct.unpack("<i", f.read(4))[0]
- self.vHave = deser_uint256_vector(f)
-
- def serialize(self):
- r = b""
- r += struct.pack("<i", self.nVersion)
- r += ser_uint256_vector(self.vHave)
- return r
-
- def __repr__(self):
- return "CBlockLocator(nVersion=%i vHave=%s)" \
- % (self.nVersion, repr(self.vHave))
-
-
-class COutPoint(object):
- def __init__(self, hash=0, n=0):
- self.hash = hash
- self.n = n
-
- def deserialize(self, f):
- self.hash = deser_uint256(f)
- self.n = struct.unpack("<I", f.read(4))[0]
-
- def serialize(self):
- r = b""
- r += ser_uint256(self.hash)
- r += struct.pack("<I", self.n)
- return r
-
- def __repr__(self):
- return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
-
-
-class CTxIn(object):
- def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
- if outpoint is None:
- self.prevout = COutPoint()
- else:
- self.prevout = outpoint
- self.scriptSig = scriptSig
- self.nSequence = nSequence
-
- def deserialize(self, f):
- self.prevout = COutPoint()
- self.prevout.deserialize(f)
- self.scriptSig = deser_string(f)
- self.nSequence = struct.unpack("<I", f.read(4))[0]
-
- def serialize(self):
- r = b""
- r += self.prevout.serialize()
- r += ser_string(self.scriptSig)
- r += struct.pack("<I", self.nSequence)
- return r
-
- def __repr__(self):
- return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
- % (repr(self.prevout), bytes_to_hex_str(self.scriptSig),
- self.nSequence)
-
-
-class CTxOut(object):
- def __init__(self, nValue=0, scriptPubKey=b""):
- self.nValue = nValue
- self.scriptPubKey = scriptPubKey
-
- def deserialize(self, f):
- self.nValue = struct.unpack("<q", f.read(8))[0]
- self.scriptPubKey = deser_string(f)
-
- def serialize(self):
- r = b""
- r += struct.pack("<q", self.nValue)
- r += ser_string(self.scriptPubKey)
- return r
-
- def __repr__(self):
- return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
- % (self.nValue // COIN, self.nValue % COIN,
- bytes_to_hex_str(self.scriptPubKey))
-
-
-class CScriptWitness(object):
- def __init__(self):
- # stack is a vector of strings
- self.stack = []
-
- def __repr__(self):
- return "CScriptWitness(%s)" % \
- (",".join([bytes_to_hex_str(x) for x in self.stack]))
-
- def is_null(self):
- if self.stack:
- return False
- return True
-
-
-class CTxInWitness(object):
- def __init__(self):
- self.scriptWitness = CScriptWitness()
-
- def deserialize(self, f):
- self.scriptWitness.stack = deser_string_vector(f)
-
- def serialize(self):
- return ser_string_vector(self.scriptWitness.stack)
-
- def __repr__(self):
- return repr(self.scriptWitness)
-
- def is_null(self):
- return self.scriptWitness.is_null()
-
-
-class CTxWitness(object):
- def __init__(self):
- self.vtxinwit = []
-
- def deserialize(self, f):
- for i in range(len(self.vtxinwit)):
- self.vtxinwit[i].deserialize(f)
-
- def serialize(self):
- r = b""
- # This is different than the usual vector serialization --
- # we omit the length of the vector, which is required to be
- # the same length as the transaction's vin vector.
- for x in self.vtxinwit:
- r += x.serialize()
- return r
-
- def __repr__(self):
- return "CTxWitness(%s)" % \
- (';'.join([repr(x) for x in self.vtxinwit]))
-
- def is_null(self):
- for x in self.vtxinwit:
- if not x.is_null():
- return False
- return True
-
-
-class CTransaction(object):
- def __init__(self, tx=None):
- if tx is None:
- self.nVersion = 1
- self.vin = []
- self.vout = []
- self.wit = CTxWitness()
- self.nLockTime = 0
- self.sha256 = None
- self.hash = None
- else:
- self.nVersion = tx.nVersion
- self.vin = copy.deepcopy(tx.vin)
- self.vout = copy.deepcopy(tx.vout)
- self.nLockTime = tx.nLockTime
- self.sha256 = tx.sha256
- self.hash = tx.hash
- self.wit = copy.deepcopy(tx.wit)
-
- def deserialize(self, f):
- self.nVersion = struct.unpack("<i", f.read(4))[0]
- self.vin = deser_vector(f, CTxIn)
- flags = 0
- if len(self.vin) == 0:
- flags = struct.unpack("<B", f.read(1))[0]
- # Not sure why flags can't be zero, but this
- # matches the implementation in bitcoind
- if (flags != 0):
- self.vin = deser_vector(f, CTxIn)
- self.vout = deser_vector(f, CTxOut)
- else:
- self.vout = deser_vector(f, CTxOut)
- if flags != 0:
- self.wit.vtxinwit = [CTxInWitness() for i in range(len(self.vin))]
- self.wit.deserialize(f)
- self.nLockTime = struct.unpack("<I", f.read(4))[0]
- self.sha256 = None
- self.hash = None
-
- def serialize_without_witness(self):
- r = b""
- r += struct.pack("<i", self.nVersion)
- r += ser_vector(self.vin)
- r += ser_vector(self.vout)
- r += struct.pack("<I", self.nLockTime)
- return r
-
- # Only serialize with witness when explicitly called for
- def serialize_with_witness(self):
- flags = 0
- if not self.wit.is_null():
- flags |= 1
- r = b""
- r += struct.pack("<i", self.nVersion)
- if flags:
- dummy = []
- r += ser_vector(dummy)
- r += struct.pack("<B", flags)
- r += ser_vector(self.vin)
- r += ser_vector(self.vout)
- if flags & 1:
- if (len(self.wit.vtxinwit) != len(self.vin)):
- # vtxinwit must have the same length as vin
- self.wit.vtxinwit = self.wit.vtxinwit[:len(self.vin)]
- for i in range(len(self.wit.vtxinwit), len(self.vin)):
- self.wit.vtxinwit.append(CTxInWitness())
- r += self.wit.serialize()
- r += struct.pack("<I", self.nLockTime)
- return r
-
- # Regular serialization is without witness -- must explicitly
- # call serialize_with_witness to include witness data.
- def serialize(self):
- return self.serialize_without_witness()
-
- # Recalculate the txid (transaction hash without witness)
- def rehash(self):
- self.sha256 = None
- self.calc_sha256()
-
- # We will only cache the serialization without witness in
- # self.sha256 and self.hash -- those are expected to be the txid.
- def calc_sha256(self, with_witness=False):
- if with_witness:
- # Don't cache the result, just return it
- return uint256_from_str(hash256(self.serialize_with_witness()))
-
- if self.sha256 is None:
- self.sha256 = uint256_from_str(hash256(self.serialize_without_witness()))
- self.hash = encode(hash256(self.serialize())[::-1], 'hex_codec').decode('ascii')
-
- def is_valid(self):
- self.calc_sha256()
- for tout in self.vout:
- if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
- return False
- return True
-
- def __repr__(self):
- return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \
- % (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime)
-
-
-class CBlockHeader(object):
- def __init__(self, header=None):
- if header is None:
- self.set_null()
- else:
- self.nVersion = header.nVersion
- self.hashPrevBlock = header.hashPrevBlock
- self.hashMerkleRoot = header.hashMerkleRoot
- self.nTime = header.nTime
- self.nBits = header.nBits
- self.nNonce = header.nNonce
- self.sha256 = header.sha256
- self.hash = header.hash
- self.calc_sha256()
-
- def set_null(self):
- self.nVersion = 1
- self.hashPrevBlock = 0
- self.hashMerkleRoot = 0
- self.nTime = 0
- self.nBits = 0
- self.nNonce = 0
- self.sha256 = None
- self.hash = None
-
- def deserialize(self, f):
- self.nVersion = struct.unpack("<i", f.read(4))[0]
- self.hashPrevBlock = deser_uint256(f)
- self.hashMerkleRoot = deser_uint256(f)
- self.nTime = struct.unpack("<I", f.read(4))[0]
- self.nBits = struct.unpack("<I", f.read(4))[0]
- self.nNonce = struct.unpack("<I", f.read(4))[0]
- self.sha256 = None
- self.hash = None
-
- def serialize(self):
- r = b""
- r += struct.pack("<i", self.nVersion)
- r += ser_uint256(self.hashPrevBlock)
- r += ser_uint256(self.hashMerkleRoot)
- r += struct.pack("<I", self.nTime)
- r += struct.pack("<I", self.nBits)
- r += struct.pack("<I", self.nNonce)
- return r
-
- def calc_sha256(self):
- if self.sha256 is None:
- r = b""
- r += struct.pack("<i", self.nVersion)
- r += ser_uint256(self.hashPrevBlock)
- r += ser_uint256(self.hashMerkleRoot)
- r += struct.pack("<I", self.nTime)
- r += struct.pack("<I", self.nBits)
- r += struct.pack("<I", self.nNonce)
- self.sha256 = uint256_from_str(hash256(r))
- self.hash = encode(hash256(r)[::-1], 'hex_codec').decode('ascii')
-
- def rehash(self):
- self.sha256 = None
- self.calc_sha256()
- return self.sha256
-
- def __repr__(self):
- return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
- % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
- time.ctime(self.nTime), self.nBits, self.nNonce)
-
-
-class CBlock(CBlockHeader):
- def __init__(self, header=None):
- super(CBlock, self).__init__(header)
- self.vtx = []
-
- def deserialize(self, f):
- super(CBlock, self).deserialize(f)
- self.vtx = deser_vector(f, CTransaction)
-
- def serialize(self, with_witness=False):
- r = b""
- r += super(CBlock, self).serialize()
- if with_witness:
- r += ser_vector(self.vtx, "serialize_with_witness")
- else:
- r += ser_vector(self.vtx)
- return r
-
- # Calculate the merkle root given a vector of transaction hashes
- @classmethod
- def get_merkle_root(cls, hashes):
- while len(hashes) > 1:
- newhashes = []
- for i in range(0, len(hashes), 2):
- i2 = min(i+1, len(hashes)-1)
- newhashes.append(hash256(hashes[i] + hashes[i2]))
- hashes = newhashes
- return uint256_from_str(hashes[0])
-
- def calc_merkle_root(self):
- hashes = []
- for tx in self.vtx:
- tx.calc_sha256()
- hashes.append(ser_uint256(tx.sha256))
- return self.get_merkle_root(hashes)
-
- def calc_witness_merkle_root(self):
- # For witness root purposes, the hash of the
- # coinbase, with witness, is defined to be 0...0
- hashes = [ser_uint256(0)]
-
- for tx in self.vtx[1:]:
- # Calculate the hashes with witness data
- hashes.append(ser_uint256(tx.calc_sha256(True)))
-
- return self.get_merkle_root(hashes)
-
- def is_valid(self):
- self.calc_sha256()
- target = uint256_from_compact(self.nBits)
- if self.sha256 > target:
- return False
- for tx in self.vtx:
- if not tx.is_valid():
- return False
- if self.calc_merkle_root() != self.hashMerkleRoot:
- return False
- return True
-
- def solve(self):
- self.rehash()
- target = uint256_from_compact(self.nBits)
- while self.sha256 > target:
- self.nNonce += 1
- self.rehash()
-
- def __repr__(self):
- return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
- % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
- time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
-
-
-class CUnsignedAlert(object):
- def __init__(self):
- self.nVersion = 1
- self.nRelayUntil = 0
- self.nExpiration = 0
- self.nID = 0
- self.nCancel = 0
- self.setCancel = []
- self.nMinVer = 0
- self.nMaxVer = 0
- self.setSubVer = []
- self.nPriority = 0
- self.strComment = b""
- self.strStatusBar = b""
- self.strReserved = b""
-
- def deserialize(self, f):
- self.nVersion = struct.unpack("<i", f.read(4))[0]
- self.nRelayUntil = struct.unpack("<q", f.read(8))[0]
- self.nExpiration = struct.unpack("<q", f.read(8))[0]
- self.nID = struct.unpack("<i", f.read(4))[0]
- self.nCancel = struct.unpack("<i", f.read(4))[0]
- self.setCancel = deser_int_vector(f)
- self.nMinVer = struct.unpack("<i", f.read(4))[0]
- self.nMaxVer = struct.unpack("<i", f.read(4))[0]
- self.setSubVer = deser_string_vector(f)
- self.nPriority = struct.unpack("<i", f.read(4))[0]
- self.strComment = deser_string(f)
- self.strStatusBar = deser_string(f)
- self.strReserved = deser_string(f)
-
- def serialize(self):
- r = b""
- r += struct.pack("<i", self.nVersion)
- r += struct.pack("<q", self.nRelayUntil)
- r += struct.pack("<q", self.nExpiration)
- r += struct.pack("<i", self.nID)
- r += struct.pack("<i", self.nCancel)
- r += ser_int_vector(self.setCancel)
- r += struct.pack("<i", self.nMinVer)
- r += struct.pack("<i", self.nMaxVer)
- r += ser_string_vector(self.setSubVer)
- r += struct.pack("<i", self.nPriority)
- r += ser_string(self.strComment)
- r += ser_string(self.strStatusBar)
- r += ser_string(self.strReserved)
- return r
-
- def __repr__(self):
- return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" \
- % (self.nVersion, self.nRelayUntil, self.nExpiration, self.nID,
- self.nCancel, self.nMinVer, self.nMaxVer, self.nPriority,
- self.strComment, self.strStatusBar, self.strReserved)
-
-
-class CAlert(object):
- def __init__(self):
- self.vchMsg = b""
- self.vchSig = b""
-
- def deserialize(self, f):
- self.vchMsg = deser_string(f)
- self.vchSig = deser_string(f)
-
- def serialize(self):
- r = b""
- r += ser_string(self.vchMsg)
- r += ser_string(self.vchSig)
- return r
-
- def __repr__(self):
- return "CAlert(vchMsg.sz %d, vchSig.sz %d)" \
- % (len(self.vchMsg), len(self.vchSig))
-
-
-class PrefilledTransaction(object):
- def __init__(self, index=0, tx = None):
- self.index = index
- self.tx = tx
-
- def deserialize(self, f):
- self.index = deser_compact_size(f)
- self.tx = CTransaction()
- self.tx.deserialize(f)
-
- def serialize(self, with_witness=False):
- r = b""
- r += ser_compact_size(self.index)
- if with_witness:
- r += self.tx.serialize_with_witness()
- else:
- r += self.tx.serialize_without_witness()
- return r
-
- def serialize_with_witness(self):
- return self.serialize(with_witness=True)
-
- def __repr__(self):
- return "PrefilledTransaction(index=%d, tx=%s)" % (self.index, repr(self.tx))
-
-# This is what we send on the wire, in a cmpctblock message.
-class P2PHeaderAndShortIDs(object):
- def __init__(self):
- self.header = CBlockHeader()
- self.nonce = 0
- self.shortids_length = 0
- self.shortids = []
- self.prefilled_txn_length = 0
- self.prefilled_txn = []
-
- def deserialize(self, f):
- self.header.deserialize(f)
- self.nonce = struct.unpack("<Q", f.read(8))[0]
- self.shortids_length = deser_compact_size(f)
- for i in range(self.shortids_length):
- # shortids are defined to be 6 bytes in the spec, so append
- # two zero bytes and read it in as an 8-byte number
- self.shortids.append(struct.unpack("<Q", f.read(6) + b'\x00\x00')[0])
- self.prefilled_txn = deser_vector(f, PrefilledTransaction)
- self.prefilled_txn_length = len(self.prefilled_txn)
-
- # When using version 2 compact blocks, we must serialize with_witness.
- def serialize(self, with_witness=False):
- r = b""
- r += self.header.serialize()
- r += struct.pack("<Q", self.nonce)
- r += ser_compact_size(self.shortids_length)
- for x in self.shortids:
- # We only want the first 6 bytes
- r += struct.pack("<Q", x)[0:6]
- if with_witness:
- r += ser_vector(self.prefilled_txn, "serialize_with_witness")
- else:
- r += ser_vector(self.prefilled_txn)
- return r
-
- def __repr__(self):
- return "P2PHeaderAndShortIDs(header=%s, nonce=%d, shortids_length=%d, shortids=%s, prefilled_txn_length=%d, prefilledtxn=%s" % (repr(self.header), self.nonce, self.shortids_length, repr(self.shortids), self.prefilled_txn_length, repr(self.prefilled_txn))
-
-# P2P version of the above that will use witness serialization (for compact
-# block version 2)
-class P2PHeaderAndShortWitnessIDs(P2PHeaderAndShortIDs):
- def serialize(self):
- return super(P2PHeaderAndShortWitnessIDs, self).serialize(with_witness=True)
-
-# Calculate the BIP 152-compact blocks shortid for a given transaction hash
-def calculate_shortid(k0, k1, tx_hash):
- expected_shortid = siphash256(k0, k1, tx_hash)
- expected_shortid &= 0x0000ffffffffffff
- return expected_shortid
-
-# This version gets rid of the array lengths, and reinterprets the differential
-# encoding into indices that can be used for lookup.
-class HeaderAndShortIDs(object):
- def __init__(self, p2pheaders_and_shortids = None):
- self.header = CBlockHeader()
- self.nonce = 0
- self.shortids = []
- self.prefilled_txn = []
- self.use_witness = False
-
- if p2pheaders_and_shortids != None:
- self.header = p2pheaders_and_shortids.header
- self.nonce = p2pheaders_and_shortids.nonce
- self.shortids = p2pheaders_and_shortids.shortids
- last_index = -1
- for x in p2pheaders_and_shortids.prefilled_txn:
- self.prefilled_txn.append(PrefilledTransaction(x.index + last_index + 1, x.tx))
- last_index = self.prefilled_txn[-1].index
-
- def to_p2p(self):
- if self.use_witness:
- ret = P2PHeaderAndShortWitnessIDs()
- else:
- ret = P2PHeaderAndShortIDs()
- ret.header = self.header
- ret.nonce = self.nonce
- ret.shortids_length = len(self.shortids)
- ret.shortids = self.shortids
- ret.prefilled_txn_length = len(self.prefilled_txn)
- ret.prefilled_txn = []
- last_index = -1
- for x in self.prefilled_txn:
- ret.prefilled_txn.append(PrefilledTransaction(x.index - last_index - 1, x.tx))
- last_index = x.index
- return ret
-
- def get_siphash_keys(self):
- header_nonce = self.header.serialize()
- header_nonce += struct.pack("<Q", self.nonce)
- hash_header_nonce_as_str = sha256(header_nonce)
- key0 = struct.unpack("<Q", hash_header_nonce_as_str[0:8])[0]
- key1 = struct.unpack("<Q", hash_header_nonce_as_str[8:16])[0]
- return [ key0, key1 ]
-
- # Version 2 compact blocks use wtxid in shortids (rather than txid)
- def initialize_from_block(self, block, nonce=0, prefill_list = [0], use_witness = False):
- self.header = CBlockHeader(block)
- self.nonce = nonce
- self.prefilled_txn = [ PrefilledTransaction(i, block.vtx[i]) for i in prefill_list ]
- self.shortids = []
- self.use_witness = use_witness
- [k0, k1] = self.get_siphash_keys()
- for i in range(len(block.vtx)):
- if i not in prefill_list:
- tx_hash = block.vtx[i].sha256
- if use_witness:
- tx_hash = block.vtx[i].calc_sha256(with_witness=True)
- self.shortids.append(calculate_shortid(k0, k1, tx_hash))
-
- def __repr__(self):
- return "HeaderAndShortIDs(header=%s, nonce=%d, shortids=%s, prefilledtxn=%s" % (repr(self.header), self.nonce, repr(self.shortids), repr(self.prefilled_txn))
-
-
-class BlockTransactionsRequest(object):
-
- def __init__(self, blockhash=0, indexes = None):
- self.blockhash = blockhash
- self.indexes = indexes if indexes != None else []
-
- def deserialize(self, f):
- self.blockhash = deser_uint256(f)
- indexes_length = deser_compact_size(f)
- for i in range(indexes_length):
- self.indexes.append(deser_compact_size(f))
-
- def serialize(self):
- r = b""
- r += ser_uint256(self.blockhash)
- r += ser_compact_size(len(self.indexes))
- for x in self.indexes:
- r += ser_compact_size(x)
- return r
-
- # helper to set the differentially encoded indexes from absolute ones
- def from_absolute(self, absolute_indexes):
- self.indexes = []
- last_index = -1
- for x in absolute_indexes:
- self.indexes.append(x-last_index-1)
- last_index = x
-
- def to_absolute(self):
- absolute_indexes = []
- last_index = -1
- for x in self.indexes:
- absolute_indexes.append(x+last_index+1)
- last_index = absolute_indexes[-1]
- return absolute_indexes
-
- def __repr__(self):
- return "BlockTransactionsRequest(hash=%064x indexes=%s)" % (self.blockhash, repr(self.indexes))
-
-
-class BlockTransactions(object):
-
- def __init__(self, blockhash=0, transactions = None):
- self.blockhash = blockhash
- self.transactions = transactions if transactions != None else []
-
- def deserialize(self, f):
- self.blockhash = deser_uint256(f)
- self.transactions = deser_vector(f, CTransaction)
-
- def serialize(self, with_witness=False):
- r = b""
- r += ser_uint256(self.blockhash)
- if with_witness:
- r += ser_vector(self.transactions, "serialize_with_witness")
- else:
- r += ser_vector(self.transactions)
- return r
-
- def __repr__(self):
- return "BlockTransactions(hash=%064x transactions=%s)" % (self.blockhash, repr(self.transactions))
-
-
-# Objects that correspond to messages on the wire
-class msg_version(object):
- command = b"version"
-
- def __init__(self):
- self.nVersion = MY_VERSION
- self.nServices = 1
- self.nTime = int(time.time())
- self.addrTo = CAddress()
- self.addrFrom = CAddress()
- self.nNonce = random.getrandbits(64)
- self.strSubVer = MY_SUBVERSION
- self.nStartingHeight = -1
- self.nRelay = MY_RELAY
-
- def deserialize(self, f):
- self.nVersion = struct.unpack("<i", f.read(4))[0]
- if self.nVersion == 10300:
- self.nVersion = 300
- self.nServices = struct.unpack("<Q", f.read(8))[0]
- self.nTime = struct.unpack("<q", f.read(8))[0]
- self.addrTo = CAddress()
- self.addrTo.deserialize(f)
-
- if self.nVersion >= 106:
- self.addrFrom = CAddress()
- self.addrFrom.deserialize(f)
- self.nNonce = struct.unpack("<Q", f.read(8))[0]
- self.strSubVer = deser_string(f)
- else:
- self.addrFrom = None
- self.nNonce = None
- self.strSubVer = None
- self.nStartingHeight = None
-
- if self.nVersion >= 209:
- self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
- else:
- self.nStartingHeight = None
-
- if self.nVersion >= 70001:
- # Relay field is optional for version 70001 onwards
- try:
- self.nRelay = struct.unpack("<b", f.read(1))[0]
- except:
- self.nRelay = 0
- else:
- self.nRelay = 0
-
- def serialize(self):
- r = b""
- r += struct.pack("<i", self.nVersion)
- r += struct.pack("<Q", self.nServices)
- r += struct.pack("<q", self.nTime)
- r += self.addrTo.serialize()
- r += self.addrFrom.serialize()
- r += struct.pack("<Q", self.nNonce)
- r += ser_string(self.strSubVer)
- r += struct.pack("<i", self.nStartingHeight)
- r += struct.pack("<b", self.nRelay)
- return r
-
- def __repr__(self):
- return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i nRelay=%i)' \
- % (self.nVersion, self.nServices, time.ctime(self.nTime),
- repr(self.addrTo), repr(self.addrFrom), self.nNonce,
- self.strSubVer, self.nStartingHeight, self.nRelay)
-
-
-class msg_verack(object):
- command = b"verack"
-
- def __init__(self):
- pass
-
- def deserialize(self, f):
- pass
-
- def serialize(self):
- return b""
-
- def __repr__(self):
- return "msg_verack()"
-
-
-class msg_addr(object):
- command = b"addr"
+MESSAGEMAP = {
+ b"addr": msg_addr,
+ b"block": msg_block,
+ b"blocktxn": msg_blocktxn,
+ b"cmpctblock": msg_cmpctblock,
+ b"feefilter": msg_feefilter,
+ b"getaddr": msg_getaddr,
+ b"getblocks": msg_getblocks,
+ b"getblocktxn": msg_getblocktxn,
+ b"getdata": msg_getdata,
+ b"getheaders": msg_getheaders,
+ b"headers": msg_headers,
+ b"inv": msg_inv,
+ b"mempool": msg_mempool,
+ b"ping": msg_ping,
+ b"pong": msg_pong,
+ b"reject": msg_reject,
+ b"sendcmpct": msg_sendcmpct,
+ b"sendheaders": msg_sendheaders,
+ b"tx": msg_tx,
+ b"verack": msg_verack,
+ b"version": msg_version,
+}
+
+MAGIC_BYTES = {
+ "mainnet": b"\xf9\xbe\xb4\xd9", # mainnet
+ "testnet3": b"\x0b\x11\x09\x07", # testnet3
+ "regtest": b"\xfa\xbf\xb5\xda", # regtest
+}
+
+class P2PConnection(asyncore.dispatcher):
+ """A low-level connection object to a node's P2P interface.
+
+ This class is responsible for:
+
+ - opening and closing the TCP connection to the node
+ - reading bytes from and writing bytes to the socket
+ - deserializing and serializing the P2P message header
+ - logging messages as they are sent and received
+
+ This class contains no logic for handing the P2P message payloads. It must be
+ sub-classed and the on_message() callback overridden."""
def __init__(self):
- self.addrs = []
-
- def deserialize(self, f):
- self.addrs = deser_vector(f, CAddress)
-
- def serialize(self):
- return ser_vector(self.addrs)
-
- def __repr__(self):
- return "msg_addr(addrs=%s)" % (repr(self.addrs))
-
-
-class msg_alert(object):
- command = b"alert"
-
- def __init__(self):
- self.alert = CAlert()
-
- def deserialize(self, f):
- self.alert = CAlert()
- self.alert.deserialize(f)
-
- def serialize(self):
- r = b""
- r += self.alert.serialize()
- return r
-
- def __repr__(self):
- return "msg_alert(alert=%s)" % (repr(self.alert), )
-
-
-class msg_inv(object):
- command = b"inv"
-
- def __init__(self, inv=None):
- if inv is None:
- self.inv = []
- else:
- self.inv = inv
-
- def deserialize(self, f):
- self.inv = deser_vector(f, CInv)
-
- def serialize(self):
- return ser_vector(self.inv)
-
- def __repr__(self):
- return "msg_inv(inv=%s)" % (repr(self.inv))
-
-
-class msg_getdata(object):
- command = b"getdata"
-
- def __init__(self, inv=None):
- self.inv = inv if inv != None else []
-
- def deserialize(self, f):
- self.inv = deser_vector(f, CInv)
-
- def serialize(self):
- return ser_vector(self.inv)
-
- def __repr__(self):
- return "msg_getdata(inv=%s)" % (repr(self.inv))
-
-
-class msg_getblocks(object):
- command = b"getblocks"
-
- def __init__(self):
- self.locator = CBlockLocator()
- self.hashstop = 0
-
- def deserialize(self, f):
- self.locator = CBlockLocator()
- self.locator.deserialize(f)
- self.hashstop = deser_uint256(f)
-
- def serialize(self):
- r = b""
- r += self.locator.serialize()
- r += ser_uint256(self.hashstop)
- return r
-
- def __repr__(self):
- return "msg_getblocks(locator=%s hashstop=%064x)" \
- % (repr(self.locator), self.hashstop)
-
-
-class msg_tx(object):
- command = b"tx"
-
- def __init__(self, tx=CTransaction()):
- self.tx = tx
-
- def deserialize(self, f):
- self.tx.deserialize(f)
-
- def serialize(self):
- return self.tx.serialize_without_witness()
-
- def __repr__(self):
- return "msg_tx(tx=%s)" % (repr(self.tx))
-
-class msg_witness_tx(msg_tx):
-
- def serialize(self):
- return self.tx.serialize_with_witness()
-
-
-class msg_block(object):
- command = b"block"
-
- def __init__(self, block=None):
- if block is None:
- self.block = CBlock()
- else:
- self.block = block
-
- def deserialize(self, f):
- self.block.deserialize(f)
-
- def serialize(self):
- return self.block.serialize()
-
- def __repr__(self):
- return "msg_block(block=%s)" % (repr(self.block))
-
-# for cases where a user needs tighter control over what is sent over the wire
-# note that the user must supply the name of the command, and the data
-class msg_generic(object):
- def __init__(self, command, data=None):
- self.command = command
- self.data = data
-
- def serialize(self):
- return self.data
-
- def __repr__(self):
- return "msg_generic()"
-
-class msg_witness_block(msg_block):
-
- def serialize(self):
- r = self.block.serialize(with_witness=True)
- return r
+ super().__init__(map=mininode_socket_map)
-class msg_getaddr(object):
- command = b"getaddr"
-
- def __init__(self):
- pass
-
- def deserialize(self, f):
- pass
-
- def serialize(self):
- return b""
-
- def __repr__(self):
- return "msg_getaddr()"
-
-
-class msg_ping_prebip31(object):
- command = b"ping"
-
- def __init__(self):
- pass
-
- def deserialize(self, f):
- pass
-
- def serialize(self):
- return b""
-
- def __repr__(self):
- return "msg_ping() (pre-bip31)"
-
-
-class msg_ping(object):
- command = b"ping"
-
- def __init__(self, nonce=0):
- self.nonce = nonce
-
- def deserialize(self, f):
- self.nonce = struct.unpack("<Q", f.read(8))[0]
-
- def serialize(self):
- r = b""
- r += struct.pack("<Q", self.nonce)
- return r
-
- def __repr__(self):
- return "msg_ping(nonce=%08x)" % self.nonce
-
-
-class msg_pong(object):
- command = b"pong"
-
- def __init__(self, nonce=0):
- self.nonce = nonce
-
- def deserialize(self, f):
- self.nonce = struct.unpack("<Q", f.read(8))[0]
-
- def serialize(self):
- r = b""
- r += struct.pack("<Q", self.nonce)
- return r
-
- def __repr__(self):
- return "msg_pong(nonce=%08x)" % self.nonce
-
-
-class msg_mempool(object):
- command = b"mempool"
-
- def __init__(self):
- pass
-
- def deserialize(self, f):
- pass
-
- def serialize(self):
- return b""
-
- def __repr__(self):
- return "msg_mempool()"
-
-class msg_sendheaders(object):
- command = b"sendheaders"
-
- def __init__(self):
- pass
-
- def deserialize(self, f):
- pass
-
- def serialize(self):
- return b""
-
- def __repr__(self):
- return "msg_sendheaders()"
-
-
-# getheaders message has
-# number of entries
-# vector of hashes
-# hash_stop (hash of last desired block header, 0 to get as many as possible)
-class msg_getheaders(object):
- command = b"getheaders"
+ def peer_connect(self, dstaddr, dstport, net="regtest"):
+ self.dstaddr = dstaddr
+ self.dstport = dstport
+ self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+ self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ self.sendbuf = b""
+ self.recvbuf = b""
+ self.state = "connecting"
+ self.network = net
+ self.disconnect = False
- def __init__(self):
- self.locator = CBlockLocator()
- self.hashstop = 0
+ logger.info('Connecting to Bitcoin Node: %s:%d' % (self.dstaddr, self.dstport))
- def deserialize(self, f):
- self.locator = CBlockLocator()
- self.locator.deserialize(f)
- self.hashstop = deser_uint256(f)
+ try:
+ self.connect((dstaddr, dstport))
+ except:
+ self.handle_close()
- def serialize(self):
- r = b""
- r += self.locator.serialize()
- r += ser_uint256(self.hashstop)
- return r
+ def peer_disconnect(self):
+ # Connection could have already been closed by other end.
+ if self.state == "connected":
+ self.disconnect_node()
- def __repr__(self):
- return "msg_getheaders(locator=%s, stop=%064x)" \
- % (repr(self.locator), self.hashstop)
+ # Connection and disconnection methods
+ def handle_connect(self):
+ """asyncore callback when a connection is opened."""
+ if self.state != "connected":
+ logger.debug("Connected & Listening: %s:%d" % (self.dstaddr, self.dstport))
+ self.state = "connected"
+ self.on_open()
-# headers message has
-# <count> <vector of block headers>
-class msg_headers(object):
- command = b"headers"
+ def handle_close(self):
+ """asyncore callback when a connection is closed."""
+ logger.debug("Closing connection to: %s:%d" % (self.dstaddr, self.dstport))
+ self.state = "closed"
+ self.recvbuf = b""
+ self.sendbuf = b""
+ try:
+ self.close()
+ except:
+ pass
+ self.on_close()
- def __init__(self):
- self.headers = []
+ def disconnect_node(self):
+ """Disconnect the p2p connection.
- def deserialize(self, f):
- # comment in bitcoind indicates these should be deserialized as blocks
- blocks = deser_vector(f, CBlock)
- for x in blocks:
- self.headers.append(CBlockHeader(x))
+ Called by the test logic thread. Causes the p2p connection
+ to be disconnected on the next iteration of the asyncore loop."""
+ self.disconnect = True
- def serialize(self):
- blocks = [CBlock(x) for x in self.headers]
- return ser_vector(blocks)
+ # Socket read methods
- def __repr__(self):
- return "msg_headers(headers=%s)" % repr(self.headers)
+ def handle_read(self):
+ """asyncore callback when data is read from the socket."""
+ t = self.recv(8192)
+ if len(t) > 0:
+ self.recvbuf += t
+ self._on_data()
+
+ def _on_data(self):
+ """Try to read P2P messages from the recv buffer.
+
+ This method reads data from the buffer in a loop. It deserializes,
+ parses and verifies the P2P header, then passes the P2P payload to
+ the on_message callback for processing."""
+ try:
+ while True:
+ if len(self.recvbuf) < 4:
+ return
+ if self.recvbuf[:4] != MAGIC_BYTES[self.network]:
+ raise ValueError("got garbage %s" % repr(self.recvbuf))
+ if len(self.recvbuf) < 4 + 12 + 4 + 4:
+ return
+ command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
+ msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
+ checksum = self.recvbuf[4+12+4:4+12+4+4]
+ if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
+ return
+ msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
+ th = sha256(msg)
+ h = sha256(th)
+ if checksum != h[:4]:
+ raise ValueError("got bad checksum " + repr(self.recvbuf))
+ self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
+ if command not in MESSAGEMAP:
+ raise ValueError("Received unknown command from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, command, repr(msg)))
+ f = BytesIO(msg)
+ t = MESSAGEMAP[command]()
+ t.deserialize(f)
+ self._log_message("receive", t)
+ self.on_message(t)
+ except Exception as e:
+ logger.exception('Error reading message:', repr(e))
+ raise
+ def on_message(self, message):
+ """Callback for processing a P2P payload. Must be overridden by derived class."""
+ raise NotImplementedError
-class msg_reject(object):
- command = b"reject"
- REJECT_MALFORMED = 1
+ # Socket write methods
- def __init__(self):
- self.message = b""
- self.code = 0
- self.reason = b""
- self.data = 0
-
- def deserialize(self, f):
- self.message = deser_string(f)
- self.code = struct.unpack("<B", f.read(1))[0]
- self.reason = deser_string(f)
- if (self.code != self.REJECT_MALFORMED and
- (self.message == b"block" or self.message == b"tx")):
- self.data = deser_uint256(f)
-
- def serialize(self):
- r = ser_string(self.message)
- r += struct.pack("<B", self.code)
- r += ser_string(self.reason)
- if (self.code != self.REJECT_MALFORMED and
- (self.message == b"block" or self.message == b"tx")):
- r += ser_uint256(self.data)
- return r
-
- def __repr__(self):
- return "msg_reject: %s %d %s [%064x]" \
- % (self.message, self.code, self.reason, self.data)
-
-# Helper function
-def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf')):
- if attempts == float('inf') and timeout == float('inf'):
- timeout = 60
- attempt = 0
- elapsed = 0
-
- while attempt < attempts and elapsed < timeout:
+ def writable(self):
+ """asyncore method to determine whether the handle_write() callback should be called on the next loop."""
with mininode_lock:
- if predicate():
- return True
- attempt += 1
- elapsed += 0.05
- time.sleep(0.05)
-
- return False
-
-class msg_feefilter(object):
- command = b"feefilter"
-
- def __init__(self, feerate=0):
- self.feerate = feerate
-
- def deserialize(self, f):
- self.feerate = struct.unpack("<Q", f.read(8))[0]
-
- def serialize(self):
- r = b""
- r += struct.pack("<Q", self.feerate)
- return r
-
- def __repr__(self):
- return "msg_feefilter(feerate=%08x)" % self.feerate
-
-class msg_sendcmpct(object):
- command = b"sendcmpct"
-
- def __init__(self):
- self.announce = False
- self.version = 1
-
- def deserialize(self, f):
- self.announce = struct.unpack("<?", f.read(1))[0]
- self.version = struct.unpack("<Q", f.read(8))[0]
-
- def serialize(self):
- r = b""
- r += struct.pack("<?", self.announce)
- r += struct.pack("<Q", self.version)
- return r
-
- def __repr__(self):
- return "msg_sendcmpct(announce=%s, version=%lu)" % (self.announce, self.version)
-
-class msg_cmpctblock(object):
- command = b"cmpctblock"
-
- def __init__(self, header_and_shortids = None):
- self.header_and_shortids = header_and_shortids
-
- def deserialize(self, f):
- self.header_and_shortids = P2PHeaderAndShortIDs()
- self.header_and_shortids.deserialize(f)
-
- def serialize(self):
- r = b""
- r += self.header_and_shortids.serialize()
- return r
-
- def __repr__(self):
- return "msg_cmpctblock(HeaderAndShortIDs=%s)" % repr(self.header_and_shortids)
-
-class msg_getblocktxn(object):
- command = b"getblocktxn"
-
- def __init__(self):
- self.block_txn_request = None
-
- def deserialize(self, f):
- self.block_txn_request = BlockTransactionsRequest()
- self.block_txn_request.deserialize(f)
+ pre_connection = self.state == "connecting"
+ length = len(self.sendbuf)
+ return (length > 0 or pre_connection)
- def serialize(self):
- r = b""
- r += self.block_txn_request.serialize()
- return r
+ def handle_write(self):
+ """asyncore callback when data should be written to the socket."""
+ with mininode_lock:
+ # asyncore does not expose socket connection, only the first read/write
+ # event, thus we must check connection manually here to know when we
+ # actually connect
+ if self.state == "connecting":
+ self.handle_connect()
+ if not self.writable():
+ return
- def __repr__(self):
- return "msg_getblocktxn(block_txn_request=%s)" % (repr(self.block_txn_request))
+ try:
+ sent = self.send(self.sendbuf)
+ except:
+ self.handle_close()
+ return
+ self.sendbuf = self.sendbuf[sent:]
-class msg_blocktxn(object):
- command = b"blocktxn"
+ def send_message(self, message, pushbuf=False):
+ """Send a P2P message over the socket.
- def __init__(self):
- self.block_transactions = BlockTransactions()
+ This method takes a P2P payload, builds the P2P header and adds
+ the message to the send buffer to be sent over the socket."""
+ if self.state != "connected" and not pushbuf:
+ raise IOError('Not connected, no pushbuf')
+ self._log_message("send", message)
+ command = message.command
+ data = message.serialize()
+ tmsg = MAGIC_BYTES[self.network]
+ tmsg += command
+ tmsg += b"\x00" * (12 - len(command))
+ tmsg += struct.pack("<I", len(data))
+ th = sha256(data)
+ h = sha256(th)
+ tmsg += h[:4]
+ tmsg += data
+ with mininode_lock:
+ if (len(self.sendbuf) == 0 and not pushbuf):
+ try:
+ sent = self.send(tmsg)
+ self.sendbuf = tmsg[sent:]
+ except BlockingIOError:
+ self.sendbuf = tmsg
+ else:
+ self.sendbuf += tmsg
- def deserialize(self, f):
- self.block_transactions.deserialize(f)
+ # Class utility methods
- def serialize(self):
- r = b""
- r += self.block_transactions.serialize()
- return r
+ def _log_message(self, direction, msg):
+ """Logs a message being sent or received over the connection."""
+ if direction == "send":
+ log_message = "Send message to "
+ elif direction == "receive":
+ log_message = "Received message from "
+ log_message += "%s:%d: %s" % (self.dstaddr, self.dstport, repr(msg)[:500])
+ if len(log_message) > 500:
+ log_message += "... (msg truncated)"
+ logger.debug(log_message)
- def __repr__(self):
- return "msg_blocktxn(block_transactions=%s)" % (repr(self.block_transactions))
-class msg_witness_blocktxn(msg_blocktxn):
- def serialize(self):
- r = b""
- r += self.block_transactions.serialize(with_witness=True)
- return r
+class P2PInterface(P2PConnection):
+ """A high-level P2P interface class for communicating with a Bitcoin node.
-class NodeConnCB(object):
- """Callback and helper functions for P2P connection to a bitcoind node.
+ This class provides high-level callbacks for processing P2P message
+ payloads, as well as convenience methods for interacting with the
+ node over P2P.
Individual testcases should subclass this and override the on_* methods
- if they want to alter message handling behaviour.
- """
-
+ if they want to alter message handling behaviour."""
def __init__(self):
- # Track whether we have a P2P connection open to the node
- self.connected = False
- self.connection = None
+ super().__init__()
# Track number of messages of each type received and the most recent
# message of each type
@@ -1489,121 +260,104 @@ class NodeConnCB(object):
# A count of the number of ping messages we've sent to the node
self.ping_counter = 1
- # deliver_sleep_time is helpful for debugging race conditions in p2p
- # tests; it causes message delivery to sleep for the specified time
- # before acquiring the global lock and delivering the next message.
- self.deliver_sleep_time = None
+ # The network services received from the peer
+ self.nServices = 0
+
+ def peer_connect(self, *args, services=NODE_NETWORK|NODE_WITNESS, send_version=True, **kwargs):
+ super().peer_connect(*args, **kwargs)
- # Remember the services our peer has advertised
- self.peer_services = None
+ if send_version:
+ # Send a version msg
+ vt = msg_version()
+ vt.nServices = services
+ vt.addrTo.ip = self.dstaddr
+ vt.addrTo.port = self.dstport
+ vt.addrFrom.ip = "0.0.0.0"
+ vt.addrFrom.port = 0
+ self.send_message(vt, True)
# Message receiving methods
- def deliver(self, conn, message):
+ def on_message(self, message):
"""Receive message and dispatch message to appropriate callback.
We keep a count of how many of each message type has been received
- and the most recent message of each type.
-
- Optionally waits for deliver_sleep_time before dispatching message.
- """
-
- deliver_sleep = self.get_deliver_sleep_time()
- if deliver_sleep is not None:
- time.sleep(deliver_sleep)
+ and the most recent message of each type."""
with mininode_lock:
try:
command = message.command.decode('ascii')
self.message_count[command] += 1
self.last_message[command] = message
- getattr(self, 'on_' + command)(conn, message)
+ getattr(self, 'on_' + command)(message)
except:
- print("ERROR delivering %s (%s)" % (repr(message),
- sys.exc_info()[0]))
-
- def set_deliver_sleep_time(self, value):
- with mininode_lock:
- self.deliver_sleep_time = value
-
- def get_deliver_sleep_time(self):
- with mininode_lock:
- return self.deliver_sleep_time
+ print("ERROR delivering %s (%s)" % (repr(message), sys.exc_info()[0]))
+ raise
# Callback methods. Can be overridden by subclasses in individual test
# cases to provide custom message handling behaviour.
- def on_open(self, conn):
- self.connected = True
-
- def on_close(self, conn):
- self.connected = False
- self.connection = None
-
- def on_addr(self, conn, message): pass
- def on_alert(self, conn, message): pass
- def on_block(self, conn, message): pass
- def on_blocktxn(self, conn, message): pass
- def on_cmpctblock(self, conn, message): pass
- def on_feefilter(self, conn, message): pass
- def on_getaddr(self, conn, message): pass
- def on_getblocks(self, conn, message): pass
- def on_getblocktxn(self, conn, message): pass
- def on_getdata(self, conn, message): pass
- def on_getheaders(self, conn, message): pass
- def on_headers(self, conn, message): pass
- def on_mempool(self, conn): pass
- def on_pong(self, conn, message): pass
- def on_reject(self, conn, message): pass
- def on_sendcmpct(self, conn, message): pass
- def on_sendheaders(self, conn, message): pass
- def on_tx(self, conn, message): pass
-
- def on_inv(self, conn, message):
+ def on_open(self):
+ pass
+
+ def on_close(self):
+ pass
+
+ def on_addr(self, message): pass
+ def on_block(self, message): pass
+ def on_blocktxn(self, message): pass
+ def on_cmpctblock(self, message): pass
+ def on_feefilter(self, message): pass
+ def on_getaddr(self, message): pass
+ def on_getblocks(self, message): pass
+ def on_getblocktxn(self, message): pass
+ def on_getdata(self, message): pass
+ def on_getheaders(self, message): pass
+ def on_headers(self, message): pass
+ def on_mempool(self, message): pass
+ def on_pong(self, message): pass
+ def on_reject(self, message): pass
+ def on_sendcmpct(self, message): pass
+ def on_sendheaders(self, message): pass
+ def on_tx(self, message): pass
+
+ def on_inv(self, message):
want = msg_getdata()
for i in message.inv:
if i.type != 0:
want.inv.append(i)
if len(want.inv):
- conn.send_message(want)
+ self.send_message(want)
- def on_ping(self, conn, message):
- if conn.ver_send > BIP0031_VERSION:
- conn.send_message(msg_pong(message.nonce))
+ def on_ping(self, message):
+ self.send_message(msg_pong(message.nonce))
- def on_verack(self, conn, message):
- conn.ver_recv = conn.ver_send
+ def on_verack(self, message):
self.verack_received = True
- def on_version(self, conn, message):
- if message.nVersion >= 209:
- conn.send_message(msg_verack())
- conn.ver_send = min(MY_VERSION, message.nVersion)
- if message.nVersion < 209:
- conn.ver_recv = conn.ver_send
- conn.nServices = message.nServices
+ def on_version(self, message):
+ assert message.nVersion >= MIN_VERSION_SUPPORTED, "Version {} received. Test framework only supports versions greater than {}".format(message.nVersion, MIN_VERSION_SUPPORTED)
+ self.send_message(msg_verack())
+ self.nServices = message.nServices
# Connection helper methods
- def add_connection(self, conn):
- self.connection = conn
-
def wait_for_disconnect(self, timeout=60):
- test_function = lambda: not self.connected
- assert wait_until(test_function, timeout=timeout)
+ test_function = lambda: self.state != "connected"
+ wait_until(test_function, timeout=timeout, lock=mininode_lock)
# Message receiving helper methods
def wait_for_block(self, blockhash, timeout=60):
test_function = lambda: self.last_message.get("block") and self.last_message["block"].block.rehash() == blockhash
- assert wait_until(test_function, timeout=timeout)
+ wait_until(test_function, timeout=timeout, lock=mininode_lock)
def wait_for_getdata(self, timeout=60):
test_function = lambda: self.last_message.get("getdata")
- assert wait_until(test_function, timeout=timeout)
+ wait_until(test_function, timeout=timeout, lock=mininode_lock)
def wait_for_getheaders(self, timeout=60):
test_function = lambda: self.last_message.get("getheaders")
- assert wait_until(test_function, timeout=timeout)
+ wait_until(test_function, timeout=timeout, lock=mininode_lock)
def wait_for_inv(self, expected_inv, timeout=60):
"""Waits for an INV message and checks that the first inv object in the message was as expected."""
@@ -1612,20 +366,14 @@ class NodeConnCB(object):
test_function = lambda: self.last_message.get("inv") and \
self.last_message["inv"].inv[0].type == expected_inv[0].type and \
self.last_message["inv"].inv[0].hash == expected_inv[0].hash
- assert wait_until(test_function, timeout=timeout)
+ wait_until(test_function, timeout=timeout, lock=mininode_lock)
def wait_for_verack(self, timeout=60):
test_function = lambda: self.message_count["verack"]
- assert wait_until(test_function, timeout=timeout)
+ wait_until(test_function, timeout=timeout, lock=mininode_lock)
# Message sending helper functions
- def send_message(self, message):
- if self.connection:
- self.connection.send_message(message)
- else:
- logger.error("Cannot send message. No connection to node!")
-
def send_and_ping(self, message):
self.send_message(message)
self.sync_with_ping()
@@ -1634,211 +382,22 @@ class NodeConnCB(object):
def sync_with_ping(self, timeout=60):
self.send_message(msg_ping(nonce=self.ping_counter))
test_function = lambda: self.last_message.get("pong") and self.last_message["pong"].nonce == self.ping_counter
- assert wait_until(test_function, timeout=timeout)
+ wait_until(test_function, timeout=timeout, lock=mininode_lock)
self.ping_counter += 1
- return True
-
-# The actual NodeConn class
-# This class provides an interface for a p2p connection to a specified node
-class NodeConn(asyncore.dispatcher):
- messagemap = {
- b"version": msg_version,
- b"verack": msg_verack,
- b"addr": msg_addr,
- b"alert": msg_alert,
- b"inv": msg_inv,
- b"getdata": msg_getdata,
- b"getblocks": msg_getblocks,
- b"tx": msg_tx,
- b"block": msg_block,
- b"getaddr": msg_getaddr,
- b"ping": msg_ping,
- b"pong": msg_pong,
- b"headers": msg_headers,
- b"getheaders": msg_getheaders,
- b"reject": msg_reject,
- b"mempool": msg_mempool,
- b"feefilter": msg_feefilter,
- b"sendheaders": msg_sendheaders,
- b"sendcmpct": msg_sendcmpct,
- b"cmpctblock": msg_cmpctblock,
- b"getblocktxn": msg_getblocktxn,
- b"blocktxn": msg_blocktxn
- }
- MAGIC_BYTES = {
- "mainnet": b"\xf9\xbe\xb4\xd9", # mainnet
- "testnet3": b"\x0b\x11\x09\x07", # testnet3
- "regtest": b"\xfa\xbf\xb5\xda", # regtest
- }
-
- def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=NODE_NETWORK, send_version=True):
- asyncore.dispatcher.__init__(self, map=mininode_socket_map)
- self.dstaddr = dstaddr
- self.dstport = dstport
- self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
- self.sendbuf = b""
- self.recvbuf = b""
- self.ver_send = 209
- self.ver_recv = 209
- self.last_sent = 0
- self.state = "connecting"
- self.network = net
- self.cb = callback
- self.disconnect = False
- self.nServices = 0
-
- if send_version:
- # stuff version msg into sendbuf
- vt = msg_version()
- vt.nServices = services
- vt.addrTo.ip = self.dstaddr
- vt.addrTo.port = self.dstport
- vt.addrFrom.ip = "0.0.0.0"
- vt.addrFrom.port = 0
- self.send_message(vt, True)
-
- logger.info('Connecting to Bitcoin Node: %s:%d' % (self.dstaddr, self.dstport))
-
- try:
- self.connect((dstaddr, dstport))
- except:
- self.handle_close()
- self.rpc = rpc
-
- def handle_connect(self):
- if self.state != "connected":
- logger.debug("Connected & Listening: %s:%d" % (self.dstaddr, self.dstport))
- self.state = "connected"
- self.cb.on_open(self)
-
- def handle_close(self):
- logger.debug("Closing connection to: %s:%d" % (self.dstaddr, self.dstport))
- self.state = "closed"
- self.recvbuf = b""
- self.sendbuf = b""
- try:
- self.close()
- except:
- pass
- self.cb.on_close(self)
-
- def handle_read(self):
- try:
- t = self.recv(8192)
- if len(t) > 0:
- self.recvbuf += t
- self.got_data()
- except:
- pass
-
- def readable(self):
- return True
-
- def writable(self):
- with mininode_lock:
- pre_connection = self.state == "connecting"
- length = len(self.sendbuf)
- return (length > 0 or pre_connection)
-
- def handle_write(self):
- with mininode_lock:
- # asyncore does not expose socket connection, only the first read/write
- # event, thus we must check connection manually here to know when we
- # actually connect
- if self.state == "connecting":
- self.handle_connect()
- if not self.writable():
- return
-
- try:
- sent = self.send(self.sendbuf)
- except:
- self.handle_close()
- return
- self.sendbuf = self.sendbuf[sent:]
-
- def got_data(self):
- try:
- while True:
- if len(self.recvbuf) < 4:
- return
- if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]:
- raise ValueError("got garbage %s" % repr(self.recvbuf))
- if self.ver_recv < 209:
- if len(self.recvbuf) < 4 + 12 + 4:
- return
- command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
- msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
- checksum = None
- if len(self.recvbuf) < 4 + 12 + 4 + msglen:
- return
- msg = self.recvbuf[4+12+4:4+12+4+msglen]
- self.recvbuf = self.recvbuf[4+12+4+msglen:]
- else:
- if len(self.recvbuf) < 4 + 12 + 4 + 4:
- return
- command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
- msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
- checksum = self.recvbuf[4+12+4:4+12+4+4]
- if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
- return
- msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
- th = sha256(msg)
- h = sha256(th)
- if checksum != h[:4]:
- raise ValueError("got bad checksum " + repr(self.recvbuf))
- self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
- if command in self.messagemap:
- f = BytesIO(msg)
- t = self.messagemap[command]()
- t.deserialize(f)
- self.got_message(t)
- else:
- logger.warning("Received unknown command from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, command, repr(msg)))
- except Exception as e:
- logger.exception('got_data:', repr(e))
- def send_message(self, message, pushbuf=False):
- if self.state != "connected" and not pushbuf:
- raise IOError('Not connected, no pushbuf')
- self._log_message("send", message)
- command = message.command
- data = message.serialize()
- tmsg = self.MAGIC_BYTES[self.network]
- tmsg += command
- tmsg += b"\x00" * (12 - len(command))
- tmsg += struct.pack("<I", len(data))
- if self.ver_send >= 209:
- th = sha256(data)
- h = sha256(th)
- tmsg += h[:4]
- tmsg += data
- with mininode_lock:
- self.sendbuf += tmsg
- self.last_sent = time.time()
-
- def got_message(self, message):
- if message.command == b"version":
- if message.nVersion <= BIP0031_VERSION:
- self.messagemap[b'ping'] = msg_ping_prebip31
- if self.last_sent + 30 * 60 < time.time():
- self.send_message(self.messagemap[b'ping']())
- self._log_message("receive", message)
- self.cb.deliver(self, message)
- def _log_message(self, direction, msg):
- if direction == "send":
- log_message = "Send message to "
- elif direction == "receive":
- log_message = "Received message from "
- log_message += "%s:%d: %s" % (self.dstaddr, self.dstport, repr(msg)[:500])
- if len(log_message) > 500:
- log_message += "... (msg truncated)"
- logger.debug(log_message)
-
- def disconnect_node(self):
- self.disconnect = True
+# Keep our own socket map for asyncore, so that we can track disconnects
+# ourselves (to workaround an issue with closing an asyncore socket when
+# using select)
+mininode_socket_map = dict()
+# One lock for synchronizing all data access between the networking thread (see
+# NetworkThread below) and the thread running the test logic. For simplicity,
+# P2PConnection acquires this lock whenever delivering a message to a P2PInterface,
+# and whenever adding anything to the send buffer (in send_message()). This
+# lock should be acquired in the thread running the test logic to synchronize
+# access to any data shared with the P2PInterface or P2PConnection.
+mininode_lock = RLock()
class NetworkThread(Thread):
def run(self):
@@ -1850,15 +409,6 @@ class NetworkThread(Thread):
for fd, obj in mininode_socket_map.items():
if obj.disconnect:
disconnected.append(obj)
- [ obj.handle_close() for obj in disconnected ]
+ [obj.handle_close() for obj in disconnected]
asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1)
-
-
-# An exception we can raise if we detect a potential disconnect
-# (p2p or rpc) before the test is complete
-class EarlyDisconnectError(Exception):
- def __init__(self, value):
- self.value = value
-
- def __str__(self):
- return repr(self.value)
+ logger.debug("Network thread closing")
diff --git a/test/functional/test_framework/netutil.py b/test/functional/test_framework/netutil.py
index 45d8e22d22..e5d415788f 100644
--- a/test/functional/test_framework/netutil.py
+++ b/test/functional/test_framework/netutil.py
@@ -15,17 +15,17 @@ import array
import os
from binascii import unhexlify, hexlify
-STATE_ESTABLISHED = '01'
-STATE_SYN_SENT = '02'
-STATE_SYN_RECV = '03'
-STATE_FIN_WAIT1 = '04'
-STATE_FIN_WAIT2 = '05'
-STATE_TIME_WAIT = '06'
-STATE_CLOSE = '07'
-STATE_CLOSE_WAIT = '08'
-STATE_LAST_ACK = '09'
+# STATE_ESTABLISHED = '01'
+# STATE_SYN_SENT = '02'
+# STATE_SYN_RECV = '03'
+# STATE_FIN_WAIT1 = '04'
+# STATE_FIN_WAIT2 = '05'
+# STATE_TIME_WAIT = '06'
+# STATE_CLOSE = '07'
+# STATE_CLOSE_WAIT = '08'
+# STATE_LAST_ACK = '09'
STATE_LISTEN = '0A'
-STATE_CLOSING = '0B'
+# STATE_CLOSING = '0B'
def get_socket_inodes(pid):
'''
diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py
index 3d9572788e..a4c046bd3d 100644
--- a/test/functional/test_framework/script.py
+++ b/test/functional/test_framework/script.py
@@ -23,9 +23,7 @@ import struct
from .bignum import bn2vch
-MAX_SCRIPT_SIZE = 10000
MAX_SCRIPT_ELEMENT_SIZE = 520
-MAX_SCRIPT_OPCODES = 201
OPCODE_NAMES = {}
@@ -242,131 +240,6 @@ OP_PUBKEY = CScriptOp(0xfe)
OP_INVALIDOPCODE = CScriptOp(0xff)
-VALID_OPCODES = {
- OP_1NEGATE,
- OP_RESERVED,
- OP_1,
- OP_2,
- OP_3,
- OP_4,
- OP_5,
- OP_6,
- OP_7,
- OP_8,
- OP_9,
- OP_10,
- OP_11,
- OP_12,
- OP_13,
- OP_14,
- OP_15,
- OP_16,
-
- OP_NOP,
- OP_VER,
- OP_IF,
- OP_NOTIF,
- OP_VERIF,
- OP_VERNOTIF,
- OP_ELSE,
- OP_ENDIF,
- OP_VERIFY,
- OP_RETURN,
-
- OP_TOALTSTACK,
- OP_FROMALTSTACK,
- OP_2DROP,
- OP_2DUP,
- OP_3DUP,
- OP_2OVER,
- OP_2ROT,
- OP_2SWAP,
- OP_IFDUP,
- OP_DEPTH,
- OP_DROP,
- OP_DUP,
- OP_NIP,
- OP_OVER,
- OP_PICK,
- OP_ROLL,
- OP_ROT,
- OP_SWAP,
- OP_TUCK,
-
- OP_CAT,
- OP_SUBSTR,
- OP_LEFT,
- OP_RIGHT,
- OP_SIZE,
-
- OP_INVERT,
- OP_AND,
- OP_OR,
- OP_XOR,
- OP_EQUAL,
- OP_EQUALVERIFY,
- OP_RESERVED1,
- OP_RESERVED2,
-
- OP_1ADD,
- OP_1SUB,
- OP_2MUL,
- OP_2DIV,
- OP_NEGATE,
- OP_ABS,
- OP_NOT,
- OP_0NOTEQUAL,
-
- OP_ADD,
- OP_SUB,
- OP_MUL,
- OP_DIV,
- OP_MOD,
- OP_LSHIFT,
- OP_RSHIFT,
-
- OP_BOOLAND,
- OP_BOOLOR,
- OP_NUMEQUAL,
- OP_NUMEQUALVERIFY,
- OP_NUMNOTEQUAL,
- OP_LESSTHAN,
- OP_GREATERTHAN,
- OP_LESSTHANOREQUAL,
- OP_GREATERTHANOREQUAL,
- OP_MIN,
- OP_MAX,
-
- OP_WITHIN,
-
- OP_RIPEMD160,
- OP_SHA1,
- OP_SHA256,
- OP_HASH160,
- OP_HASH256,
- OP_CODESEPARATOR,
- OP_CHECKSIG,
- OP_CHECKSIGVERIFY,
- OP_CHECKMULTISIG,
- OP_CHECKMULTISIGVERIFY,
-
- OP_NOP1,
- OP_CHECKLOCKTIMEVERIFY,
- OP_CHECKSEQUENCEVERIFY,
- OP_NOP4,
- OP_NOP5,
- OP_NOP6,
- OP_NOP7,
- OP_NOP8,
- OP_NOP9,
- OP_NOP10,
-
- OP_SMALLINTEGER,
- OP_PUBKEYS,
- OP_PUBKEYHASH,
- OP_PUBKEY,
-}
-
OPCODE_NAMES.update({
OP_0 : 'OP_0',
OP_PUSHDATA1 : 'OP_PUSHDATA1',
@@ -486,124 +359,6 @@ OPCODE_NAMES.update({
OP_INVALIDOPCODE : 'OP_INVALIDOPCODE',
})
-OPCODES_BY_NAME = {
- 'OP_0' : OP_0,
- 'OP_PUSHDATA1' : OP_PUSHDATA1,
- 'OP_PUSHDATA2' : OP_PUSHDATA2,
- 'OP_PUSHDATA4' : OP_PUSHDATA4,
- 'OP_1NEGATE' : OP_1NEGATE,
- 'OP_RESERVED' : OP_RESERVED,
- 'OP_1' : OP_1,
- 'OP_2' : OP_2,
- 'OP_3' : OP_3,
- 'OP_4' : OP_4,
- 'OP_5' : OP_5,
- 'OP_6' : OP_6,
- 'OP_7' : OP_7,
- 'OP_8' : OP_8,
- 'OP_9' : OP_9,
- 'OP_10' : OP_10,
- 'OP_11' : OP_11,
- 'OP_12' : OP_12,
- 'OP_13' : OP_13,
- 'OP_14' : OP_14,
- 'OP_15' : OP_15,
- 'OP_16' : OP_16,
- 'OP_NOP' : OP_NOP,
- 'OP_VER' : OP_VER,
- 'OP_IF' : OP_IF,
- 'OP_NOTIF' : OP_NOTIF,
- 'OP_VERIF' : OP_VERIF,
- 'OP_VERNOTIF' : OP_VERNOTIF,
- 'OP_ELSE' : OP_ELSE,
- 'OP_ENDIF' : OP_ENDIF,
- 'OP_VERIFY' : OP_VERIFY,
- 'OP_RETURN' : OP_RETURN,
- 'OP_TOALTSTACK' : OP_TOALTSTACK,
- 'OP_FROMALTSTACK' : OP_FROMALTSTACK,
- 'OP_2DROP' : OP_2DROP,
- 'OP_2DUP' : OP_2DUP,
- 'OP_3DUP' : OP_3DUP,
- 'OP_2OVER' : OP_2OVER,
- 'OP_2ROT' : OP_2ROT,
- 'OP_2SWAP' : OP_2SWAP,
- 'OP_IFDUP' : OP_IFDUP,
- 'OP_DEPTH' : OP_DEPTH,
- 'OP_DROP' : OP_DROP,
- 'OP_DUP' : OP_DUP,
- 'OP_NIP' : OP_NIP,
- 'OP_OVER' : OP_OVER,
- 'OP_PICK' : OP_PICK,
- 'OP_ROLL' : OP_ROLL,
- 'OP_ROT' : OP_ROT,
- 'OP_SWAP' : OP_SWAP,
- 'OP_TUCK' : OP_TUCK,
- 'OP_CAT' : OP_CAT,
- 'OP_SUBSTR' : OP_SUBSTR,
- 'OP_LEFT' : OP_LEFT,
- 'OP_RIGHT' : OP_RIGHT,
- 'OP_SIZE' : OP_SIZE,
- 'OP_INVERT' : OP_INVERT,
- 'OP_AND' : OP_AND,
- 'OP_OR' : OP_OR,
- 'OP_XOR' : OP_XOR,
- 'OP_EQUAL' : OP_EQUAL,
- 'OP_EQUALVERIFY' : OP_EQUALVERIFY,
- 'OP_RESERVED1' : OP_RESERVED1,
- 'OP_RESERVED2' : OP_RESERVED2,
- 'OP_1ADD' : OP_1ADD,
- 'OP_1SUB' : OP_1SUB,
- 'OP_2MUL' : OP_2MUL,
- 'OP_2DIV' : OP_2DIV,
- 'OP_NEGATE' : OP_NEGATE,
- 'OP_ABS' : OP_ABS,
- 'OP_NOT' : OP_NOT,
- 'OP_0NOTEQUAL' : OP_0NOTEQUAL,
- 'OP_ADD' : OP_ADD,
- 'OP_SUB' : OP_SUB,
- 'OP_MUL' : OP_MUL,
- 'OP_DIV' : OP_DIV,
- 'OP_MOD' : OP_MOD,
- 'OP_LSHIFT' : OP_LSHIFT,
- 'OP_RSHIFT' : OP_RSHIFT,
- 'OP_BOOLAND' : OP_BOOLAND,
- 'OP_BOOLOR' : OP_BOOLOR,
- 'OP_NUMEQUAL' : OP_NUMEQUAL,
- 'OP_NUMEQUALVERIFY' : OP_NUMEQUALVERIFY,
- 'OP_NUMNOTEQUAL' : OP_NUMNOTEQUAL,
- 'OP_LESSTHAN' : OP_LESSTHAN,
- 'OP_GREATERTHAN' : OP_GREATERTHAN,
- 'OP_LESSTHANOREQUAL' : OP_LESSTHANOREQUAL,
- 'OP_GREATERTHANOREQUAL' : OP_GREATERTHANOREQUAL,
- 'OP_MIN' : OP_MIN,
- 'OP_MAX' : OP_MAX,
- 'OP_WITHIN' : OP_WITHIN,
- 'OP_RIPEMD160' : OP_RIPEMD160,
- 'OP_SHA1' : OP_SHA1,
- 'OP_SHA256' : OP_SHA256,
- 'OP_HASH160' : OP_HASH160,
- 'OP_HASH256' : OP_HASH256,
- 'OP_CODESEPARATOR' : OP_CODESEPARATOR,
- 'OP_CHECKSIG' : OP_CHECKSIG,
- 'OP_CHECKSIGVERIFY' : OP_CHECKSIGVERIFY,
- 'OP_CHECKMULTISIG' : OP_CHECKMULTISIG,
- 'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY,
- 'OP_NOP1' : OP_NOP1,
- 'OP_CHECKLOCKTIMEVERIFY' : OP_CHECKLOCKTIMEVERIFY,
- 'OP_CHECKSEQUENCEVERIFY' : OP_CHECKSEQUENCEVERIFY,
- 'OP_NOP4' : OP_NOP4,
- 'OP_NOP5' : OP_NOP5,
- 'OP_NOP6' : OP_NOP6,
- 'OP_NOP7' : OP_NOP7,
- 'OP_NOP8' : OP_NOP8,
- 'OP_NOP9' : OP_NOP9,
- 'OP_NOP10' : OP_NOP10,
- 'OP_SMALLINTEGER' : OP_SMALLINTEGER,
- 'OP_PUBKEYS' : OP_PUBKEYS,
- 'OP_PUBKEYHASH' : OP_PUBKEYHASH,
- 'OP_PUBKEY' : OP_PUBKEY,
-}
-
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
@@ -615,7 +370,7 @@ class CScriptTruncatedPushDataError(CScriptInvalidError):
super(CScriptTruncatedPushDataError, self).__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
-class CScriptNum(object):
+class CScriptNum():
def __init__(self, d=0):
self.value = d
diff --git a/test/functional/test_framework/segwit_addr.py b/test/functional/test_framework/segwit_addr.py
new file mode 100644
index 0000000000..02368e938f
--- /dev/null
+++ b/test/functional/test_framework/segwit_addr.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 Pieter Wuille
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Reference implementation for Bech32 and segwit addresses."""
+
+
+CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
+
+
+def bech32_polymod(values):
+ """Internal function that computes the Bech32 checksum."""
+ generator = [0x3b6a57b2, 0x26508e6d, 0x1ea119fa, 0x3d4233dd, 0x2a1462b3]
+ chk = 1
+ for value in values:
+ top = chk >> 25
+ chk = (chk & 0x1ffffff) << 5 ^ value
+ for i in range(5):
+ chk ^= generator[i] if ((top >> i) & 1) else 0
+ return chk
+
+
+def bech32_hrp_expand(hrp):
+ """Expand the HRP into values for checksum computation."""
+ return [ord(x) >> 5 for x in hrp] + [0] + [ord(x) & 31 for x in hrp]
+
+
+def bech32_verify_checksum(hrp, data):
+ """Verify a checksum given HRP and converted data characters."""
+ return bech32_polymod(bech32_hrp_expand(hrp) + data) == 1
+
+
+def bech32_create_checksum(hrp, data):
+ """Compute the checksum values given HRP and data."""
+ values = bech32_hrp_expand(hrp) + data
+ polymod = bech32_polymod(values + [0, 0, 0, 0, 0, 0]) ^ 1
+ return [(polymod >> 5 * (5 - i)) & 31 for i in range(6)]
+
+
+def bech32_encode(hrp, data):
+ """Compute a Bech32 string given HRP and data values."""
+ combined = data + bech32_create_checksum(hrp, data)
+ return hrp + '1' + ''.join([CHARSET[d] for d in combined])
+
+
+def bech32_decode(bech):
+ """Validate a Bech32 string, and determine HRP and data."""
+ if ((any(ord(x) < 33 or ord(x) > 126 for x in bech)) or
+ (bech.lower() != bech and bech.upper() != bech)):
+ return (None, None)
+ bech = bech.lower()
+ pos = bech.rfind('1')
+ if pos < 1 or pos + 7 > len(bech) or len(bech) > 90:
+ return (None, None)
+ if not all(x in CHARSET for x in bech[pos+1:]):
+ return (None, None)
+ hrp = bech[:pos]
+ data = [CHARSET.find(x) for x in bech[pos+1:]]
+ if not bech32_verify_checksum(hrp, data):
+ return (None, None)
+ return (hrp, data[:-6])
+
+
+def convertbits(data, frombits, tobits, pad=True):
+ """General power-of-2 base conversion."""
+ acc = 0
+ bits = 0
+ ret = []
+ maxv = (1 << tobits) - 1
+ max_acc = (1 << (frombits + tobits - 1)) - 1
+ for value in data:
+ if value < 0 or (value >> frombits):
+ return None
+ acc = ((acc << frombits) | value) & max_acc
+ bits += frombits
+ while bits >= tobits:
+ bits -= tobits
+ ret.append((acc >> bits) & maxv)
+ if pad:
+ if bits:
+ ret.append((acc << (tobits - bits)) & maxv)
+ elif bits >= frombits or ((acc << (tobits - bits)) & maxv):
+ return None
+ return ret
+
+
+def decode(hrp, addr):
+ """Decode a segwit address."""
+ hrpgot, data = bech32_decode(addr)
+ if hrpgot != hrp:
+ return (None, None)
+ decoded = convertbits(data[1:], 5, 8, False)
+ if decoded is None or len(decoded) < 2 or len(decoded) > 40:
+ return (None, None)
+ if data[0] > 16:
+ return (None, None)
+ if data[0] == 0 and len(decoded) != 20 and len(decoded) != 32:
+ return (None, None)
+ return (data[0], decoded)
+
+
+def encode(hrp, witver, witprog):
+ """Encode a segwit address."""
+ ret = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5))
+ if decode(hrp, ret) == (None, None):
+ return None
+ return ret
diff --git a/test/functional/test_framework/socks5.py b/test/functional/test_framework/socks5.py
index a08b03ed24..7b40c47fbf 100644
--- a/test/functional/test_framework/socks5.py
+++ b/test/functional/test_framework/socks5.py
@@ -31,7 +31,7 @@ def recvall(s, n):
return rv
### Implementation classes
-class Socks5Configuration(object):
+class Socks5Configuration():
"""Proxy configuration."""
def __init__(self):
self.addr = None # Bind address (must be set)
@@ -39,7 +39,7 @@ class Socks5Configuration(object):
self.unauth = False # Support unauthenticated
self.auth = False # Support authentication
-class Socks5Command(object):
+class Socks5Command():
"""Information about an incoming socks5 command."""
def __init__(self, cmd, atyp, addr, port, username, password):
self.cmd = cmd # Command (one of Command.*)
@@ -51,7 +51,7 @@ class Socks5Command(object):
def __repr__(self):
return 'Socks5Command(%s,%s,%s,%s,%s,%s)' % (self.cmd, self.atyp, self.addr, self.port, self.username, self.password)
-class Socks5Connection(object):
+class Socks5Connection():
def __init__(self, serv, conn, peer):
self.serv = serv
self.conn = conn
@@ -91,7 +91,7 @@ class Socks5Connection(object):
self.conn.sendall(bytearray([0x01, 0x00]))
# Read connect request
- (ver,cmd,rsv,atyp) = recvall(self.conn, 4)
+ ver, cmd, _, atyp = recvall(self.conn, 4)
if ver != 0x05:
raise IOError('Invalid socks version %i in connect request' % ver)
if cmd != Command.CONNECT:
@@ -122,7 +122,7 @@ class Socks5Connection(object):
finally:
self.conn.close()
-class Socks5Server(object):
+class Socks5Server():
def __init__(self, conf):
self.conf = conf
self.s = socket.socket(conf.af)
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index 8d698a7327..a46312d62c 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -4,22 +4,19 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Base class for RPC testing."""
-from collections import deque
-import errno
from enum import Enum
-import http.client
import logging
import optparse
import os
+import pdb
import shutil
-import subprocess
import sys
import tempfile
import time
-import traceback
from .authproxy import JSONRPCException
from . import coverage
+from .test_node import TestNode
from .util import (
MAX_NODES,
PortSeed,
@@ -27,12 +24,9 @@ from .util import (
check_json_precision,
connect_nodes_bi,
disconnect_nodes,
- get_rpc_proxy,
initialize_datadir,
- get_datadir_path,
log_filename,
p2p_port,
- rpc_url,
set_node_times,
sync_blocks,
sync_mempools,
@@ -47,63 +41,33 @@ TEST_EXIT_PASSED = 0
TEST_EXIT_FAILED = 1
TEST_EXIT_SKIPPED = 77
-BITCOIND_PROC_WAIT_TIMEOUT = 60
-
-class BitcoinTestFramework(object):
+class BitcoinTestFramework():
"""Base class for a bitcoin test script.
- Individual bitcoin test scripts should subclass this class and override the following methods:
+ Individual bitcoin test scripts should subclass this class and override the set_test_params() and run_test() methods.
+
+ Individual tests can also override the following methods to customize the test setup:
- - __init__()
- add_options()
- setup_chain()
- setup_network()
- - run_test()
+ - setup_nodes()
- The main() method should not be overridden.
+ The __init__() and main() methods should not be overridden.
This class also contains various public and private helper methods."""
- # Methods to override in subclass test scripts.
def __init__(self):
- self.num_nodes = 4
+ """Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method"""
self.setup_clean_chain = False
self.nodes = []
- self.bitcoind_processes = {}
self.mocktime = 0
+ self.set_test_params()
- def add_options(self, parser):
- pass
-
- def setup_chain(self):
- self.log.info("Initializing test directory " + self.options.tmpdir)
- if self.setup_clean_chain:
- self._initialize_chain_clean(self.options.tmpdir, self.num_nodes)
- else:
- self._initialize_chain(self.options.tmpdir, self.num_nodes, self.options.cachedir)
-
- def setup_network(self):
- self.setup_nodes()
-
- # Connect the nodes as a "chain". This allows us
- # to split the network between nodes 1 and 2 to get
- # two halves that can work on competing chains.
- for i in range(self.num_nodes - 1):
- connect_nodes_bi(self.nodes, i, i + 1)
- self.sync_all()
-
- def setup_nodes(self):
- extra_args = None
- if hasattr(self, "extra_args"):
- extra_args = self.extra_args
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, extra_args)
-
- def run_test(self):
- raise NotImplementedError
-
- # Main function. This should not be overridden by the subclass test scripts.
+ assert hasattr(self, "num_nodes"), "Test must set self.num_nodes in set_test_params()"
def main(self):
+ """Main function. This should not be overridden by the subclass test scripts."""
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
@@ -125,6 +89,8 @@ class BitcoinTestFramework(object):
help="Write tested RPC commands into this directory")
parser.add_option("--configfile", dest="configfile",
help="Location of the test framework config file")
+ parser.add_option("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true",
+ help="Attach a python debugger if test fails")
self.add_options(parser)
(self.options, self.args) = parser.parse_args()
@@ -134,8 +100,11 @@ class BitcoinTestFramework(object):
check_json_precision()
+ self.options.cachedir = os.path.abspath(self.options.cachedir)
+
# Set up temp directory and start logging
if self.options.tmpdir:
+ self.options.tmpdir = os.path.abspath(self.options.tmpdir)
os.makedirs(self.options.tmpdir, exist_ok=False)
else:
self.options.tmpdir = tempfile.mkdtemp(prefix="test")
@@ -162,6 +131,10 @@ class BitcoinTestFramework(object):
except KeyboardInterrupt as e:
self.log.warning("Exiting after keyboard interrupt")
+ if success == TestStatus.FAILED and self.options.pdbonfailure:
+ print("Testcase failed. Attaching python debugger. Enter ? for help")
+ pdb.set_trace()
+
if not self.options.noshutdown:
self.log.info("Stopping nodes")
if self.nodes:
@@ -174,104 +147,134 @@ class BitcoinTestFramework(object):
shutil.rmtree(self.options.tmpdir)
else:
self.log.warning("Not cleaning up dir %s" % self.options.tmpdir)
- if os.getenv("PYTHON_DEBUG", ""):
- # Dump the end of the debug logs, to aid in debugging rare
- # travis failures.
- import glob
- filenames = [self.options.tmpdir + "/test_framework.log"]
- filenames += glob.glob(self.options.tmpdir + "/node*/regtest/debug.log")
- MAX_LINES_TO_PRINT = 1000
- for fn in filenames:
- try:
- with open(fn, 'r') as f:
- print("From", fn, ":")
- print("".join(deque(f, MAX_LINES_TO_PRINT)))
- except OSError:
- print("Opening file %s failed." % fn)
- traceback.print_exc()
if success == TestStatus.PASSED:
self.log.info("Tests successful")
- sys.exit(TEST_EXIT_PASSED)
+ exit_code = TEST_EXIT_PASSED
elif success == TestStatus.SKIPPED:
self.log.info("Test skipped")
- sys.exit(TEST_EXIT_SKIPPED)
+ exit_code = TEST_EXIT_SKIPPED
else:
self.log.error("Test failed. Test logging available at %s/test_framework.log", self.options.tmpdir)
- logging.shutdown()
- sys.exit(TEST_EXIT_FAILED)
+ self.log.error("Hint: Call {} '{}' to consolidate all logs".format(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../combine_logs.py"), self.options.tmpdir))
+ exit_code = TEST_EXIT_FAILED
+ logging.shutdown()
+ sys.exit(exit_code)
- # Public helper methods. These can be accessed by the subclass test scripts.
+ # Methods to override in subclass test scripts.
+ def set_test_params(self):
+ """Tests must this method to change default values for number of nodes, topology, etc"""
+ raise NotImplementedError
- def start_node(self, i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None, stderr=None):
- """Start a bitcoind and return RPC connection to it"""
+ def add_options(self, parser):
+ """Override this method to add command-line options to the test"""
+ pass
- datadir = os.path.join(dirname, "node" + str(i))
- if binary is None:
- binary = os.getenv("BITCOIND", "bitcoind")
- args = [binary, "-datadir=" + datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(self.mocktime), "-uacomment=testnode%d" % i]
- if extra_args is not None:
- args.extend(extra_args)
- self.bitcoind_processes[i] = subprocess.Popen(args, stderr=stderr)
- self.log.debug("initialize_chain: bitcoind started, waiting for RPC to come up")
- self._wait_for_bitcoind_start(self.bitcoind_processes[i], datadir, i, rpchost)
- self.log.debug("initialize_chain: RPC successfully started")
- proxy = get_rpc_proxy(rpc_url(datadir, i, rpchost), i, timeout=timewait)
+ def setup_chain(self):
+ """Override this method to customize blockchain setup"""
+ self.log.info("Initializing test directory " + self.options.tmpdir)
+ if self.setup_clean_chain:
+ self._initialize_chain_clean()
+ else:
+ self._initialize_chain()
- if self.options.coveragedir:
- coverage.write_all_rpc_commands(self.options.coveragedir, proxy)
+ def setup_network(self):
+ """Override this method to customize test network topology"""
+ self.setup_nodes()
+
+ # Connect the nodes as a "chain". This allows us
+ # to split the network between nodes 1 and 2 to get
+ # two halves that can work on competing chains.
+ for i in range(self.num_nodes - 1):
+ connect_nodes_bi(self.nodes, i, i + 1)
+ self.sync_all()
+
+ def setup_nodes(self):
+ """Override this method to customize test node setup"""
+ extra_args = None
+ if hasattr(self, "extra_args"):
+ extra_args = self.extra_args
+ self.add_nodes(self.num_nodes, extra_args)
+ self.start_nodes()
+
+ def run_test(self):
+ """Tests must override this method to define test logic"""
+ raise NotImplementedError
- return proxy
+ # Public helper methods. These can be accessed by the subclass test scripts.
- def start_nodes(self, num_nodes, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
- """Start multiple bitcoinds, return RPC connections to them"""
+ def add_nodes(self, num_nodes, extra_args=None, rpchost=None, timewait=None, binary=None):
+ """Instantiate TestNode objects"""
if extra_args is None:
- extra_args = [None] * num_nodes
+ extra_args = [[]] * num_nodes
if binary is None:
binary = [None] * num_nodes
assert_equal(len(extra_args), num_nodes)
assert_equal(len(binary), num_nodes)
- rpcs = []
+ for i in range(num_nodes):
+ self.nodes.append(TestNode(i, self.options.tmpdir, extra_args[i], rpchost, timewait=timewait, binary=binary[i], stderr=None, mocktime=self.mocktime, coverage_dir=self.options.coveragedir))
+
+ def start_node(self, i, extra_args=None, stderr=None):
+ """Start a bitcoind"""
+
+ node = self.nodes[i]
+
+ node.start(extra_args, stderr)
+ node.wait_for_rpc_connection()
+
+ if self.options.coveragedir is not None:
+ coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
+
+ def start_nodes(self, extra_args=None):
+ """Start multiple bitcoinds"""
+
+ if extra_args is None:
+ extra_args = [None] * self.num_nodes
+ assert_equal(len(extra_args), self.num_nodes)
try:
- for i in range(num_nodes):
- rpcs.append(self.start_node(i, dirname, extra_args[i], rpchost, timewait=timewait, binary=binary[i]))
+ for i, node in enumerate(self.nodes):
+ node.start(extra_args[i])
+ for node in self.nodes:
+ node.wait_for_rpc_connection()
except:
# If one node failed to start, stop the others
- # TODO: abusing self.nodes in this way is a little hacky.
- # Eventually we should do a better job of tracking nodes
- self.nodes.extend(rpcs)
self.stop_nodes()
- self.nodes = []
raise
- return rpcs
+
+ if self.options.coveragedir is not None:
+ for node in self.nodes:
+ coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def stop_node(self, i):
"""Stop a bitcoind test node"""
-
- self.log.debug("Stopping node %d" % i)
- try:
- self.nodes[i].stop()
- except http.client.CannotSendRequest as e:
- self.log.exception("Unable to stop node")
- return_code = self.bitcoind_processes[i].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT)
- del self.bitcoind_processes[i]
- assert_equal(return_code, 0)
+ self.nodes[i].stop_node()
+ self.nodes[i].wait_until_stopped()
def stop_nodes(self):
"""Stop multiple bitcoind test nodes"""
+ for node in self.nodes:
+ # Issue RPC to stop nodes
+ node.stop_node()
- for i in range(len(self.nodes)):
- self.stop_node(i)
- assert not self.bitcoind_processes.values() # All connections must be gone now
+ for node in self.nodes:
+ # Wait for nodes to stop
+ node.wait_until_stopped()
- def assert_start_raises_init_error(self, i, dirname, extra_args=None, expected_msg=None):
+ def restart_node(self, i, extra_args=None):
+ """Stop and start a test node"""
+ self.stop_node(i)
+ self.start_node(i, extra_args)
+
+ def assert_start_raises_init_error(self, i, extra_args=None, expected_msg=None):
with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr:
try:
- self.start_node(i, dirname, extra_args, stderr=log_stderr)
+ self.start_node(i, extra_args, stderr=log_stderr)
self.stop_node(i)
except Exception as e:
assert 'bitcoind exited' in str(e) # node must have shutdown
+ self.nodes[i].running = False
+ self.nodes[i].process = None
if expected_msg is not None:
log_stderr.seek(0)
stderr = log_stderr.read().decode('utf-8')
@@ -285,7 +288,7 @@ class BitcoinTestFramework(object):
raise AssertionError(assert_msg)
def wait_for_node_exit(self, i, timeout):
- self.bitcoind_processes[i].wait(timeout)
+ self.nodes[i].process.wait(timeout)
def split_network(self):
"""
@@ -355,16 +358,16 @@ class BitcoinTestFramework(object):
rpc_handler.setLevel(logging.DEBUG)
rpc_logger.addHandler(rpc_handler)
- def _initialize_chain(self, test_dir, num_nodes, cachedir):
+ def _initialize_chain(self):
"""Initialize a pre-mined blockchain for use by the test.
Create a cache of a 200-block-long chain (with wallet) for MAX_NODES
Afterward, create num_nodes copies from the cache."""
- assert num_nodes <= MAX_NODES
+ assert self.num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
- if not os.path.isdir(os.path.join(cachedir, 'node' + str(i))):
+ if not os.path.isdir(os.path.join(self.options.cachedir, 'node' + str(i))):
create_cache = True
break
@@ -373,27 +376,22 @@ class BitcoinTestFramework(object):
# find and delete old cache directories if any exist
for i in range(MAX_NODES):
- if os.path.isdir(os.path.join(cachedir, "node" + str(i))):
- shutil.rmtree(os.path.join(cachedir, "node" + str(i)))
+ if os.path.isdir(os.path.join(self.options.cachedir, "node" + str(i))):
+ shutil.rmtree(os.path.join(self.options.cachedir, "node" + str(i)))
# Create cache directories, run bitcoinds:
for i in range(MAX_NODES):
- datadir = initialize_datadir(cachedir, i)
+ datadir = initialize_datadir(self.options.cachedir, i)
args = [os.getenv("BITCOIND", "bitcoind"), "-server", "-keypool=1", "-datadir=" + datadir, "-discover=0"]
if i > 0:
args.append("-connect=127.0.0.1:" + str(p2p_port(0)))
- self.bitcoind_processes[i] = subprocess.Popen(args)
- self.log.debug("initialize_chain: bitcoind started, waiting for RPC to come up")
- self._wait_for_bitcoind_start(self.bitcoind_processes[i], datadir, i)
- self.log.debug("initialize_chain: RPC successfully started")
+ self.nodes.append(TestNode(i, self.options.cachedir, extra_args=[], rpchost=None, timewait=None, binary=None, stderr=None, mocktime=self.mocktime, coverage_dir=None))
+ self.nodes[i].args = args
+ self.start_node(i)
- self.nodes = []
- for i in range(MAX_NODES):
- try:
- self.nodes.append(get_rpc_proxy(rpc_url(get_datadir_path(cachedir, i), i), i))
- except:
- self.log.exception("Error connecting to node %d" % i)
- sys.exit(1)
+ # Wait for RPC connections to be ready
+ for node in self.nodes:
+ node.wait_for_rpc_connection()
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
@@ -418,48 +416,24 @@ class BitcoinTestFramework(object):
self.nodes = []
self.disable_mocktime()
for i in range(MAX_NODES):
- os.remove(log_filename(cachedir, i, "debug.log"))
- os.remove(log_filename(cachedir, i, "db.log"))
- os.remove(log_filename(cachedir, i, "peers.dat"))
- os.remove(log_filename(cachedir, i, "fee_estimates.dat"))
-
- for i in range(num_nodes):
- from_dir = os.path.join(cachedir, "node" + str(i))
- to_dir = os.path.join(test_dir, "node" + str(i))
+ os.remove(log_filename(self.options.cachedir, i, "debug.log"))
+ os.remove(log_filename(self.options.cachedir, i, "wallets/db.log"))
+ os.remove(log_filename(self.options.cachedir, i, "peers.dat"))
+ os.remove(log_filename(self.options.cachedir, i, "fee_estimates.dat"))
+
+ for i in range(self.num_nodes):
+ from_dir = os.path.join(self.options.cachedir, "node" + str(i))
+ to_dir = os.path.join(self.options.tmpdir, "node" + str(i))
shutil.copytree(from_dir, to_dir)
- initialize_datadir(test_dir, i) # Overwrite port/rpcport in bitcoin.conf
+ initialize_datadir(self.options.tmpdir, i) # Overwrite port/rpcport in bitcoin.conf
- def _initialize_chain_clean(self, test_dir, num_nodes):
+ def _initialize_chain_clean(self):
"""Initialize empty blockchain for use by the test.
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization."""
- for i in range(num_nodes):
- initialize_datadir(test_dir, i)
-
- def _wait_for_bitcoind_start(self, process, datadir, i, rpchost=None):
- """Wait for bitcoind to start.
-
- This means that RPC is accessible and fully initialized.
- Raise an exception if bitcoind exits during initialization."""
- while True:
- if process.poll() is not None:
- raise Exception('bitcoind exited with status %i during initialization' % process.returncode)
- try:
- # Check if .cookie file to be created
- rpc = get_rpc_proxy(rpc_url(datadir, i, rpchost), i, coveragedir=self.options.coveragedir)
- rpc.getblockcount()
- break # break out of loop on success
- except IOError as e:
- if e.errno != errno.ECONNREFUSED: # Port not yet open?
- raise # unknown IO error
- except JSONRPCException as e: # Initialization phase
- if e.error['code'] != -28: # RPC in warmup?
- raise # unknown JSON RPC exception
- except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting
- if "No RPC credentials" not in str(e):
- raise
- time.sleep(0.25)
+ for i in range(self.num_nodes):
+ initialize_datadir(self.options.tmpdir, i)
class ComparisonTestFramework(BitcoinTestFramework):
"""Test framework for doing p2p comparison testing
@@ -469,8 +443,7 @@ class ComparisonTestFramework(BitcoinTestFramework):
- 2 binaries: 1 test binary, 1 ref binary
- n>2 binaries: 1 test binary, n-1 ref binaries"""
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
@@ -483,13 +456,13 @@ class ComparisonTestFramework(BitcoinTestFramework):
help="bitcoind binary to use for reference nodes (if any)")
def setup_network(self):
- extra_args = [['-whitelist=127.0.0.1']]*self.num_nodes
+ extra_args = [['-whitelist=127.0.0.1']] * self.num_nodes
if hasattr(self, "extra_args"):
extra_args = self.extra_args
- self.nodes = self.start_nodes(
- self.num_nodes, self.options.tmpdir, extra_args,
- binary=[self.options.testbinary] +
- [self.options.refbinary] * (self.num_nodes - 1))
+ self.add_nodes(self.num_nodes, extra_args,
+ binary=[self.options.testbinary] +
+ [self.options.refbinary] * (self.num_nodes - 1))
+ self.start_nodes()
class SkipTest(Exception):
"""This exception is raised to skip a test"""
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
new file mode 100755
index 0000000000..a9248c764e
--- /dev/null
+++ b/test/functional/test_framework/test_node.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Class for bitcoind node under test"""
+
+import decimal
+import errno
+import http.client
+import json
+import logging
+import os
+import subprocess
+import time
+
+from .authproxy import JSONRPCException
+from .util import (
+ assert_equal,
+ get_rpc_proxy,
+ rpc_url,
+ wait_until,
+ p2p_port,
+)
+
+BITCOIND_PROC_WAIT_TIMEOUT = 60
+
+class TestNode():
+ """A class for representing a bitcoind node under test.
+
+ This class contains:
+
+ - state about the node (whether it's running, etc)
+ - a Python subprocess.Popen object representing the running process
+ - an RPC connection to the node
+ - one or more P2P connections to the node
+
+
+ To make things easier for the test writer, any unrecognised messages will
+ be dispatched to the RPC connection."""
+
+ def __init__(self, i, dirname, extra_args, rpchost, timewait, binary, stderr, mocktime, coverage_dir):
+ self.index = i
+ self.datadir = os.path.join(dirname, "node" + str(i))
+ self.rpchost = rpchost
+ if timewait:
+ self.rpc_timeout = timewait
+ else:
+ # Wait for up to 60 seconds for the RPC server to respond
+ self.rpc_timeout = 60
+ if binary is None:
+ self.binary = os.getenv("BITCOIND", "bitcoind")
+ else:
+ self.binary = binary
+ self.stderr = stderr
+ self.coverage_dir = coverage_dir
+ # Most callers will just need to add extra args to the standard list below. For those callers that need more flexibity, they can just set the args property directly.
+ self.extra_args = extra_args
+ self.args = [self.binary, "-datadir=" + self.datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-logtimemicros", "-debug", "-debugexclude=libevent", "-debugexclude=leveldb", "-mocktime=" + str(mocktime), "-uacomment=testnode%d" % i]
+
+ self.cli = TestNodeCLI(os.getenv("BITCOINCLI", "bitcoin-cli"), self.datadir)
+
+ self.running = False
+ self.process = None
+ self.rpc_connected = False
+ self.rpc = None
+ self.url = None
+ self.log = logging.getLogger('TestFramework.node%d' % i)
+
+ self.p2ps = []
+
+ def __getattr__(self, name):
+ """Dispatches any unrecognised messages to the RPC connection."""
+ assert self.rpc_connected and self.rpc is not None, "Error: no RPC connection"
+ return getattr(self.rpc, name)
+
+ def start(self, extra_args=None, stderr=None):
+ """Start the node."""
+ if extra_args is None:
+ extra_args = self.extra_args
+ if stderr is None:
+ stderr = self.stderr
+ self.process = subprocess.Popen(self.args + extra_args, stderr=stderr)
+ self.running = True
+ self.log.debug("bitcoind started, waiting for RPC to come up")
+
+ def wait_for_rpc_connection(self):
+ """Sets up an RPC connection to the bitcoind process. Returns False if unable to connect."""
+ # Poll at a rate of four times per second
+ poll_per_s = 4
+ for _ in range(poll_per_s * self.rpc_timeout):
+ assert self.process.poll() is None, "bitcoind exited with status %i during initialization" % self.process.returncode
+ try:
+ self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir)
+ self.rpc.getblockcount()
+ # If the call to getblockcount() succeeds then the RPC connection is up
+ self.rpc_connected = True
+ self.url = self.rpc.url
+ self.log.debug("RPC successfully started")
+ return
+ except IOError as e:
+ if e.errno != errno.ECONNREFUSED: # Port not yet open?
+ raise # unknown IO error
+ except JSONRPCException as e: # Initialization phase
+ if e.error['code'] != -28: # RPC in warmup?
+ raise # unknown JSON RPC exception
+ except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting
+ if "No RPC credentials" not in str(e):
+ raise
+ time.sleep(1.0 / poll_per_s)
+ raise AssertionError("Unable to connect to bitcoind")
+
+ def get_wallet_rpc(self, wallet_name):
+ assert self.rpc_connected
+ assert self.rpc
+ wallet_path = "wallet/%s" % wallet_name
+ return self.rpc / wallet_path
+
+ def stop_node(self):
+ """Stop the node."""
+ if not self.running:
+ return
+ self.log.debug("Stopping node")
+ try:
+ self.stop()
+ except http.client.CannotSendRequest:
+ self.log.exception("Unable to stop node.")
+ del self.p2ps[:]
+
+ def is_node_stopped(self):
+ """Checks whether the node has stopped.
+
+ Returns True if the node has stopped. False otherwise.
+ This method is responsible for freeing resources (self.process)."""
+ if not self.running:
+ return True
+ return_code = self.process.poll()
+ if return_code is None:
+ return False
+
+ # process has stopped. Assert that it didn't return an error code.
+ assert_equal(return_code, 0)
+ self.running = False
+ self.process = None
+ self.rpc_connected = False
+ self.rpc = None
+ self.log.debug("Node stopped")
+ return True
+
+ def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT):
+ wait_until(self.is_node_stopped, timeout=timeout)
+
+ def node_encrypt_wallet(self, passphrase):
+ """"Encrypts the wallet.
+
+ This causes bitcoind to shutdown, so this method takes
+ care of cleaning up resources."""
+ self.encryptwallet(passphrase)
+ self.wait_until_stopped()
+
+ def add_p2p_connection(self, p2p_conn, *args, **kwargs):
+ """Add a p2p connection to the node.
+
+ This method adds the p2p connection to the self.p2ps list and also
+ returns the connection to the caller."""
+ if 'dstport' not in kwargs:
+ kwargs['dstport'] = p2p_port(self.index)
+ if 'dstaddr' not in kwargs:
+ kwargs['dstaddr'] = '127.0.0.1'
+
+ p2p_conn.peer_connect(*args, **kwargs)
+ self.p2ps.append(p2p_conn)
+
+ return p2p_conn
+
+ @property
+ def p2p(self):
+ """Return the first p2p connection
+
+ Convenience property - most tests only use a single p2p connection to each
+ node, so this saves having to write node.p2ps[0] many times."""
+ assert self.p2ps, "No p2p connection"
+ return self.p2ps[0]
+
+ def disconnect_p2ps(self):
+ """Close all p2p connections to the node."""
+ for p in self.p2ps:
+ p.peer_disconnect()
+ del self.p2ps[:]
+
+
+class TestNodeCLI():
+ """Interface to bitcoin-cli for an individual node"""
+
+ def __init__(self, binary, datadir):
+ self.args = []
+ self.binary = binary
+ self.datadir = datadir
+ self.input = None
+
+ def __call__(self, *args, input=None):
+ # TestNodeCLI is callable with bitcoin-cli command-line args
+ self.args = [str(arg) for arg in args]
+ self.input = input
+ return self
+
+ def __getattr__(self, command):
+ def dispatcher(*args, **kwargs):
+ return self.send_cli(command, *args, **kwargs)
+ return dispatcher
+
+ def send_cli(self, command, *args, **kwargs):
+ """Run bitcoin-cli command. Deserializes returned string as python object."""
+
+ pos_args = [str(arg) for arg in args]
+ named_args = [str(key) + "=" + str(value) for (key, value) in kwargs.items()]
+ assert not (pos_args and named_args), "Cannot use positional arguments and named arguments in the same bitcoin-cli call"
+ p_args = [self.binary, "-datadir=" + self.datadir] + self.args
+ if named_args:
+ p_args += ["-named"]
+ p_args += [command] + pos_args + named_args
+ process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
+ cli_stdout, cli_stderr = process.communicate(input=self.input)
+ returncode = process.poll()
+ if returncode:
+ # Ignore cli_stdout, raise with cli_stderr
+ raise subprocess.CalledProcessError(returncode, self.binary, output=cli_stderr)
+ return json.loads(cli_stdout, parse_float=decimal.Decimal)
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
index 3c918b48fb..102c903018 100644
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -7,11 +7,13 @@
from base64 import b64encode
from binascii import hexlify, unhexlify
from decimal import Decimal, ROUND_DOWN
+import hashlib
import json
import logging
import os
import random
import re
+from subprocess import CalledProcessError
import time
from . import coverage
@@ -49,6 +51,8 @@ def assert_raises(exc, fun, *args, **kwds):
def assert_raises_message(exc, message, fun, *args, **kwds):
try:
fun(*args, **kwds)
+ except JSONRPCException:
+ raise AssertionError("Use assert_raises_rpc_error() to test RPC failures")
except exc as e:
if message is not None and message not in e.error['message']:
raise AssertionError("Expected substring not found:" + e.error['message'])
@@ -57,22 +61,53 @@ def assert_raises_message(exc, message, fun, *args, **kwds):
else:
raise AssertionError("No exception raised")
-def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
+def assert_raises_process_error(returncode, output, fun, *args, **kwds):
+ """Execute a process and asserts the process return code and output.
+
+ Calls function `fun` with arguments `args` and `kwds`. Catches a CalledProcessError
+ and verifies that the return code and output are as expected. Throws AssertionError if
+ no CalledProcessError was raised or if the return code and output are not as expected.
+
+ Args:
+ returncode (int): the process return code.
+ output (string): [a substring of] the process output.
+ fun (function): the function to call. This should execute a process.
+ args*: positional arguments for the function.
+ kwds**: named arguments for the function.
+ """
+ try:
+ fun(*args, **kwds)
+ except CalledProcessError as e:
+ if returncode != e.returncode:
+ raise AssertionError("Unexpected returncode %i" % e.returncode)
+ if output not in e.output:
+ raise AssertionError("Expected substring not found:" + e.output)
+ else:
+ raise AssertionError("No exception raised")
+
+def assert_raises_rpc_error(code, message, fun, *args, **kwds):
"""Run an RPC and verify that a specific JSONRPC exception code and message is raised.
Calls function `fun` with arguments `args` and `kwds`. Catches a JSONRPCException
and verifies that the error code and message are as expected. Throws AssertionError if
- no JSONRPCException was returned or if the error code/message are not as expected.
+ no JSONRPCException was raised or if the error code/message are not as expected.
Args:
code (int), optional: the error code returned by the RPC call (defined
in src/rpc/protocol.h). Set to None if checking the error code is not required.
message (string), optional: [a substring of] the error string returned by the
- RPC call. Set to None if checking the error string is not required
+ RPC call. Set to None if checking the error string is not required.
fun (function): the function to call. This should be the name of an RPC.
args*: positional arguments for the function.
kwds**: named arguments for the function.
"""
+ assert try_rpc(code, message, fun, *args, **kwds), "No exception raised"
+
+def try_rpc(code, message, fun, *args, **kwds):
+ """Tries to run an rpc command.
+
+ Test against error code and message if the rpc fails.
+ Returns whether a JSONRPCException was raised."""
try:
fun(*args, **kwds)
except JSONRPCException as e:
@@ -81,10 +116,11 @@ def assert_raises_jsonrpc(code, message, fun, *args, **kwds):
raise AssertionError("Unexpected JSONRPC error code %i" % e.error["code"])
if (message is not None) and (message not in e.error['message']):
raise AssertionError("Expected substring not found:" + e.error['message'])
+ return True
except Exception as e:
raise AssertionError("Unexpected exception raised: " + type(e).__name__)
else:
- raise AssertionError("No exception raised")
+ return False
def assert_is_hex_string(string):
try:
@@ -148,6 +184,13 @@ def count_bytes(hex_string):
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
+def hash256(byte_str):
+ sha256 = hashlib.sha256()
+ sha256.update(byte_str)
+ sha256d = hashlib.sha256()
+ sha256d.update(sha256.digest())
+ return sha256d.digest()[::-1]
+
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
@@ -157,6 +200,28 @@ def str_to_b64str(string):
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
+def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None):
+ if attempts == float('inf') and timeout == float('inf'):
+ timeout = 60
+ attempt = 0
+ timeout += time.time()
+
+ while attempt < attempts and time.time() < timeout:
+ if lock:
+ with lock:
+ if predicate():
+ return
+ else:
+ if predicate():
+ return
+ attempt += 1
+ time.sleep(0.05)
+
+ # Print the cause of the timeout
+ assert_greater_than(attempts, attempt)
+ assert_greater_than(timeout, time.time())
+ raise RuntimeError('Unreachable')
+
# RPC/P2P connection constants and functions
############################################
@@ -204,7 +269,7 @@ def rpc_port(n):
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES)
def rpc_url(datadir, i, rpchost=None):
- rpc_u, rpc_p = get_auth_cookie(datadir, i)
+ rpc_u, rpc_p = get_auth_cookie(datadir)
host = '127.0.0.1'
port = rpc_port(i)
if rpchost:
@@ -232,11 +297,11 @@ def initialize_datadir(dirname, n):
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
-def get_auth_cookie(datadir, n):
+def get_auth_cookie(datadir):
user = None
password = None
if os.path.isfile(os.path.join(datadir, "bitcoin.conf")):
- with open(os.path.join(datadir, "bitcoin.conf"), 'r') as f:
+ with open(os.path.join(datadir, "bitcoin.conf"), 'r', encoding='utf8') as f:
for line in f:
if line.startswith("rpcuser="):
assert user is None # Ensure that there is only one rpcuser line
@@ -412,7 +477,10 @@ def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
- node.generate(int(0.5 * count) + 101)
+ to_generate = int(0.5 * count) + 101
+ while to_generate > 0:
+ node.generate(min(25, to_generate))
+ to_generate -= 25
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py
index 54f625514b..58faec521d 100755
--- a/test/functional/test_runner.py
+++ b/test/functional/test_runner.py
@@ -15,6 +15,7 @@ For a description of arguments recognized by test scripts, see
"""
import argparse
+from collections import deque
import configparser
import datetime
import os
@@ -79,7 +80,9 @@ BASE_SCRIPTS= [
'rawtransactions.py',
'reindex.py',
# vv Tests less than 30s vv
+ 'keypool-topup.py',
'zmq_test.py',
+ 'bitcoin_cli.py',
'mempool_resurrect_test.py',
'txn_doublespend.py --mineblock',
'txn_clone.py',
@@ -88,6 +91,7 @@ BASE_SCRIPTS= [
'mempool_spendcoinbase.py',
'mempool_reorg.py',
'mempool_persist.py',
+ 'multiwallet.py',
'httpbasics.py',
'multi_rpc.py',
'proxy_test.py',
@@ -95,6 +99,7 @@ BASE_SCRIPTS= [
'disconnect_ban.py',
'decodescript.py',
'blockchain.py',
+ 'deprecated_rpc.py',
'disablewallet.py',
'net.py',
'keypool.py',
@@ -108,12 +113,22 @@ BASE_SCRIPTS= [
'signmessages.py',
'nulldummy.py',
'import-rescan.py',
+ 'mining.py',
'bumpfee.py',
'rpcnamedargs.py',
'listsinceblock.py',
'p2p-leaktests.py',
'wallet-encryption.py',
+ 'bipdersig-p2p.py',
+ 'bip65-cltv-p2p.py',
'uptime.py',
+ 'resendwallettransactions.py',
+ 'minchainwork.py',
+ 'p2p-fingerprint.py',
+ 'uacomment.py',
+ 'p2p-acceptblock.py',
+ 'feature_logging.py',
+ 'node_network_limited.py',
]
EXTENDED_SCRIPTS = [
@@ -136,17 +151,11 @@ EXTENDED_SCRIPTS = [
'rpcbind_test.py',
# vv Tests less than 30s vv
'assumevalid.py',
- 'bip65-cltv.py',
- 'bip65-cltv-p2p.py',
- 'bipdersig-p2p.py',
- 'bipdersig.py',
'example_test.py',
- 'getblocktemplate_proposals.py',
'txn_doublespend.py',
'txn_clone.py --mineblock',
- 'forknotify.py',
+ 'notifications.py',
'invalidateblock.py',
- 'p2p-acceptblock.py',
'replace-by-fee.py',
]
@@ -168,8 +177,9 @@ def main():
epilog='''
Help text and arguments for individual test script:''',
formatter_class=argparse.RawTextHelpFormatter)
+ parser.add_argument('--combinedlogslen', '-c', type=int, default=0, help='print a combined log (of length n lines) from all test nodes and test framework to the console on failure.')
parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface')
- parser.add_argument('--exclude', '-x', help='specify a comma-seperated-list of scripts to exclude.')
+ parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.')
parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests')
parser.add_argument('--force', '-f', action='store_true', help='run tests even on platforms where they are disabled by default (e.g. windows).')
parser.add_argument('--help', '-h', '-?', action='store_true', help='print help text and exit')
@@ -260,9 +270,9 @@ def main():
if not args.keepcache:
shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True)
- run_tests(test_list, config["environment"]["SRCDIR"], config["environment"]["BUILDDIR"], config["environment"]["EXEEXT"], tmpdir, args.jobs, args.coverage, passon_args)
+ run_tests(test_list, config["environment"]["SRCDIR"], config["environment"]["BUILDDIR"], config["environment"]["EXEEXT"], tmpdir, args.jobs, args.coverage, passon_args, args.combinedlogslen)
-def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_coverage=False, args=[]):
+def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_coverage=False, args=[], combined_logs_len=0):
# Warn if bitcoind is already running (unix only)
try:
if subprocess.check_output(["pidof", "bitcoind"]) is not None:
@@ -278,6 +288,7 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove
#Set env vars
if "BITCOIND" not in os.environ:
os.environ["BITCOIND"] = build_dir + '/src/bitcoind' + exeext
+ os.environ["BITCOINCLI"] = build_dir + '/src/bitcoin-cli' + exeext
tests_dir = src_dir + '/test/functional/'
@@ -293,7 +304,11 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove
if len(test_list) > 1 and jobs > 1:
# Populate cache
- subprocess.check_output([tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir])
+ try:
+ subprocess.check_output([tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir])
+ except Exception as e:
+ print(e.output)
+ raise e
#Run Tests
job_queue = TestHandler(jobs, tests_dir, tmpdir, test_list, flags)
@@ -303,7 +318,7 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove
max_len_name = len(max(test_list, key=len))
for _ in range(len(test_list)):
- test_result, stdout, stderr = job_queue.get_next()
+ test_result, testdir, stdout, stderr = job_queue.get_next()
test_results.append(test_result)
if test_result.status == "Passed":
@@ -314,6 +329,14 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove
print("\n%s%s%s failed, Duration: %s s\n" % (BOLD[1], test_result.name, BOLD[0], test_result.time))
print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n')
print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n')
+ if combined_logs_len and os.path.isdir(testdir):
+ # Print the final `combinedlogslen` lines of the combined logs
+ print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], combined_logs_len, BOLD[0]))
+ print('\n============')
+ print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0]))
+ print('============\n')
+ combined_logs, _ = subprocess.Popen([os.path.join(tests_dir, 'combine_logs.py'), '-c', testdir], universal_newlines=True, stdout=subprocess.PIPE).communicate()
+ print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
print_results(test_results, max_len_name, (int(time.time() - time0)))
@@ -351,7 +374,7 @@ def print_results(test_results, max_len_name, runtime):
class TestHandler:
"""
- Trigger the testscrips passed in via the list.
+ Trigger the test scripts passed in via the list.
"""
def __init__(self, num_tests_parallel, tests_dir, tmpdir, test_list=None, flags=None):
@@ -378,13 +401,15 @@ class TestHandler:
log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16)
log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16)
test_argv = t.split()
- tmpdir = ["--tmpdir=%s/%s_%s" % (self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)]
+ testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)
+ tmpdir_arg = ["--tmpdir={}".format(testdir)]
self.jobs.append((t,
time.time(),
- subprocess.Popen([self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir,
+ subprocess.Popen([self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg,
universal_newlines=True,
stdout=log_stdout,
stderr=log_stderr),
+ testdir,
log_stdout,
log_stderr))
if not self.jobs:
@@ -393,7 +418,7 @@ class TestHandler:
# Return first proc that finishes
time.sleep(.5)
for j in self.jobs:
- (name, time0, proc, log_out, log_err) = j
+ (name, time0, proc, testdir, log_out, log_err) = j
if os.getenv('TRAVIS') == 'true' and int(time.time() - time0) > 20 * 60:
# In travis, timeout individual tests after 20 minutes (to stop tests hanging and not
# providing useful output.
@@ -411,7 +436,7 @@ class TestHandler:
self.num_running -= 1
self.jobs.remove(j)
- return TestResult(name, status, int(time.time() - time0)), stdout, stderr
+ return TestResult(name, status, int(time.time() - time0)), testdir, stdout, stderr
print('.', end='', flush=True)
class TestResult():
@@ -453,7 +478,7 @@ def check_script_list(src_dir):
# On travis this warning is an error to prevent merging incomplete commits into master
sys.exit(1)
-class RPCCoverage(object):
+class RPCCoverage():
"""
Coverage reporting utilities for test_runner.
diff --git a/test/functional/txn_clone.py b/test/functional/txn_clone.py
index 9b81af96cf..740bb2d4c5 100755
--- a/test/functional/txn_clone.py
+++ b/test/functional/txn_clone.py
@@ -8,11 +8,8 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class TxnMallTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 4
- self.setup_clean_chain = False
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
diff --git a/test/functional/txn_doublespend.py b/test/functional/txn_doublespend.py
index 1bd3b3271c..69629ef951 100755
--- a/test/functional/txn_doublespend.py
+++ b/test/functional/txn_doublespend.py
@@ -8,11 +8,8 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class TxnMallTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.num_nodes = 4
- self.setup_clean_chain = False
def add_options(self, parser):
parser.add_option("--mineblock", dest="mine_block", default=False, action="store_true",
diff --git a/test/functional/uacomment.py b/test/functional/uacomment.py
new file mode 100755
index 0000000000..0b2c64ab69
--- /dev/null
+++ b/test/functional/uacomment.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python3
+# Copyright (c) 2017 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Test the -uacomment option."""
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+class UacommentTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.num_nodes = 1
+ self.setup_clean_chain = True
+
+ def run_test(self):
+ self.log.info("test multiple -uacomment")
+ test_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-12:-1]
+ assert_equal(test_uacomment, "(testnode0)")
+
+ self.restart_node(0, ["-uacomment=foo"])
+ foo_uacomment = self.nodes[0].getnetworkinfo()["subversion"][-17:-1]
+ assert_equal(foo_uacomment, "(testnode0; foo)")
+
+ self.log.info("test -uacomment max length")
+ self.stop_node(0)
+ expected = "Total length of network version string (286) exceeds maximum length (256). Reduce the number or size of uacomments."
+ self.assert_start_raises_init_error(0, ["-uacomment=" + 'a' * 256], expected)
+
+ self.log.info("test -uacomment unsafe characters")
+ for unsafe_char in ['/', ':', '(', ')']:
+ expected = "User Agent comment (" + unsafe_char + ") contains unsafe characters"
+ self.assert_start_raises_init_error(0, ["-uacomment=" + unsafe_char], expected)
+
+if __name__ == '__main__':
+ UacommentTest().main()
diff --git a/test/functional/uptime.py b/test/functional/uptime.py
index b20d6f5cb6..78236b2393 100755
--- a/test/functional/uptime.py
+++ b/test/functional/uptime.py
@@ -13,9 +13,7 @@ from test_framework.test_framework import BitcoinTestFramework
class UptimeTest(BitcoinTestFramework):
- def __init__(self):
- super().__init__()
-
+ def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
diff --git a/test/functional/wallet-accounts.py b/test/functional/wallet-accounts.py
index 158aa9ae89..bc1efaee15 100755
--- a/test/functional/wallet-accounts.py
+++ b/test/functional/wallet-accounts.py
@@ -17,9 +17,7 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class WalletAccountsTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [[]]
@@ -74,62 +72,135 @@ class WalletAccountsTest(BitcoinTestFramework):
# otherwise we're off by exactly the fee amount as that's mined
# and matures in the next 100 blocks
node.sendfrom("", common_address, fee)
- accounts = ["a", "b", "c", "d", "e"]
amount_to_send = 1.0
- account_addresses = dict()
+
+ # Create accounts and make sure subsequent account API calls
+ # recognize the account/address associations.
+ accounts = [Account(name) for name in ("a", "b", "c", "d", "e")]
for account in accounts:
- address = node.getaccountaddress(account)
- account_addresses[account] = address
-
- node.getnewaddress(account)
- assert_equal(node.getaccount(address), account)
- assert(address in node.getaddressesbyaccount(account))
-
- node.sendfrom("", address, amount_to_send)
-
+ account.add_receive_address(node.getaccountaddress(account.name))
+ account.verify(node)
+
+ # Send a transaction to each account, and make sure this forces
+ # getaccountaddress to generate a new receiving address.
+ for account in accounts:
+ node.sendtoaddress(account.receive_address, amount_to_send)
+ account.add_receive_address(node.getaccountaddress(account.name))
+ account.verify(node)
+
+ # Check the amounts received.
node.generate(1)
+ for account in accounts:
+ assert_equal(
+ node.getreceivedbyaddress(account.addresses[0]), amount_to_send)
+ assert_equal(node.getreceivedbyaccount(account.name), amount_to_send)
- for i in range(len(accounts)):
- from_account = accounts[i]
+ # Check that sendfrom account reduces listaccounts balances.
+ for i, account in enumerate(accounts):
to_account = accounts[(i+1) % len(accounts)]
- to_address = account_addresses[to_account]
- node.sendfrom(from_account, to_address, amount_to_send)
-
+ node.sendfrom(account.name, to_account.receive_address, amount_to_send)
node.generate(1)
-
for account in accounts:
- address = node.getaccountaddress(account)
- assert(address != account_addresses[account])
- assert_equal(node.getreceivedbyaccount(account), 2)
- node.move(account, "", node.getbalance(account))
-
+ account.add_receive_address(node.getaccountaddress(account.name))
+ account.verify(node)
+ assert_equal(node.getreceivedbyaccount(account.name), 2)
+ node.move(account.name, "", node.getbalance(account.name))
+ account.verify(node)
node.generate(101)
-
expected_account_balances = {"": 5200}
for account in accounts:
- expected_account_balances[account] = 0
-
+ expected_account_balances[account.name] = 0
assert_equal(node.listaccounts(), expected_account_balances)
-
assert_equal(node.getbalance(""), 5200)
+ # Check that setaccount can assign an account to a new unused address.
for account in accounts:
address = node.getaccountaddress("")
- node.setaccount(address, account)
- assert(address in node.getaddressesbyaccount(account))
+ node.setaccount(address, account.name)
+ account.add_address(address)
+ account.verify(node)
assert(address not in node.getaddressesbyaccount(""))
+ # Check that addmultisigaddress can assign accounts.
for account in accounts:
addresses = []
for x in range(10):
addresses.append(node.getnewaddress())
- multisig_address = node.addmultisigaddress(5, addresses, account)
+ multisig_address = node.addmultisigaddress(5, addresses, account.name)
+ account.add_address(multisig_address)
+ account.verify(node)
node.sendfrom("", multisig_address, 50)
-
node.generate(101)
-
for account in accounts:
- assert_equal(node.getbalance(account), 50)
+ assert_equal(node.getbalance(account.name), 50)
+
+ # Check that setaccount can change the account of an address from a
+ # different account.
+ change_account(node, accounts[0].addresses[0], accounts[0], accounts[1])
+
+ # Check that setaccount can change the account of an address which
+ # is the receiving address of a different account.
+ change_account(node, accounts[0].receive_address, accounts[0], accounts[1])
+
+ # Check that setaccount can set the account of an address already
+ # in the account. This is a no-op.
+ change_account(node, accounts[2].addresses[0], accounts[2], accounts[2])
+
+ # Check that setaccount can set the account of an address which is
+ # already the receiving address of the account. It would probably make
+ # sense for this to be a no-op, but right now it resets the receiving
+ # address, causing getaccountaddress to return a brand new address.
+ change_account(node, accounts[2].receive_address, accounts[2], accounts[2])
+
+class Account:
+ def __init__(self, name):
+ # Account name
+ self.name = name
+ # Current receiving address associated with this account.
+ self.receive_address = None
+ # List of all addresses assigned with this account
+ self.addresses = []
+
+ def add_address(self, address):
+ assert_equal(address not in self.addresses, True)
+ self.addresses.append(address)
+
+ def add_receive_address(self, address):
+ self.add_address(address)
+ self.receive_address = address
+
+ def verify(self, node):
+ if self.receive_address is not None:
+ assert self.receive_address in self.addresses
+ assert_equal(node.getaccountaddress(self.name), self.receive_address)
+
+ for address in self.addresses:
+ assert_equal(node.getaccount(address), self.name)
+
+ assert_equal(
+ set(node.getaddressesbyaccount(self.name)), set(self.addresses))
+
+
+def change_account(node, address, old_account, new_account):
+ assert_equal(address in old_account.addresses, True)
+ node.setaccount(address, new_account.name)
+
+ old_account.addresses.remove(address)
+ new_account.add_address(address)
+
+ # Calling setaccount on an address which was previously the receiving
+ # address of a different account should reset the receiving address of
+ # the old account, causing getaccountaddress to return a brand new
+ # address.
+ if address == old_account.receive_address:
+ new_address = node.getaccountaddress(old_account.name)
+ assert_equal(new_address not in old_account.addresses, True)
+ assert_equal(new_address not in new_account.addresses, True)
+ old_account.add_receive_address(new_address)
+
+ old_account.verify(node)
+ new_account.verify(node)
+
if __name__ == '__main__':
WalletAccountsTest().main()
diff --git a/test/functional/wallet-dump.py b/test/functional/wallet-dump.py
index 569cc46e6c..47de8777a6 100755
--- a/test/functional/wallet-dump.py
+++ b/test/functional/wallet-dump.py
@@ -7,7 +7,7 @@
import os
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal
+from test_framework.util import (assert_equal, assert_raises_rpc_error)
def read_dump(file_name, addrs, hd_master_addr_old):
@@ -56,10 +56,7 @@ def read_dump(file_name, addrs, hd_master_addr_old):
class WalletDumpTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
- self.setup_clean_chain = False
+ def set_test_params(self):
self.num_nodes = 1
self.extra_args = [["-keypool=90"]]
@@ -68,7 +65,8 @@ class WalletDumpTest(BitcoinTestFramework):
# longer than the default 30 seconds due to an expensive
# CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in
# the test often takes even longer.
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, self.extra_args, timewait=60)
+ self.add_nodes(self.num_nodes, self.extra_args, timewait=60)
+ self.start_nodes()
def run_test (self):
tmpdir = self.options.tmpdir
@@ -94,19 +92,21 @@ class WalletDumpTest(BitcoinTestFramework):
assert_equal(found_addr_rsv, 90*2) # 90 keys plus 100% internal keys
#encrypt wallet, restart, unlock and dump
- self.nodes[0].encryptwallet('test')
- self.bitcoind_processes[0].wait()
- self.nodes[0] = self.start_node(0, self.options.tmpdir, self.extra_args[0])
+ self.nodes[0].node_encrypt_wallet('test')
+ self.start_node(0)
self.nodes[0].walletpassphrase('test', 10)
# Should be a no-op:
self.nodes[0].keypoolrefill()
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")
- found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_enc = \
+ found_addr, found_addr_chg, found_addr_rsv, _ = \
read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, hd_master_addr_unenc)
assert_equal(found_addr, test_addr_count)
assert_equal(found_addr_chg, 90*2 + 50) # old reserve keys are marked as change now
assert_equal(found_addr_rsv, 90*2)
+ # Overwriting should fail
+ assert_raises_rpc_error(-8, "already exists", self.nodes[0].dumpwallet, tmpdir + "/node0/wallet.unencrypted.dump")
+
if __name__ == '__main__':
WalletDumpTest().main ()
diff --git a/test/functional/wallet-encryption.py b/test/functional/wallet-encryption.py
index ba72918fe1..db62e1e30f 100755
--- a/test/functional/wallet-encryption.py
+++ b/test/functional/wallet-encryption.py
@@ -6,16 +6,14 @@
import time
-from test_framework.test_framework import BitcoinTestFramework, BITCOIND_PROC_WAIT_TIMEOUT
+from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
- assert_raises_jsonrpc,
+ assert_raises_rpc_error,
)
class WalletEncryptionTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
@@ -30,12 +28,11 @@ class WalletEncryptionTest(BitcoinTestFramework):
assert_equal(len(privkey), 52)
# Encrypt the wallet
- self.nodes[0].encryptwallet(passphrase)
- self.bitcoind_processes[0].wait(timeout=BITCOIND_PROC_WAIT_TIMEOUT)
- self.nodes[0] = self.start_node(0, self.options.tmpdir)
+ self.nodes[0].node_encrypt_wallet(passphrase)
+ self.start_node(0)
# Test that the wallet is encrypted
- assert_raises_jsonrpc(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
# Check that walletpassphrase works
self.nodes[0].walletpassphrase(passphrase, 2)
@@ -43,20 +40,20 @@ class WalletEncryptionTest(BitcoinTestFramework):
# Check that the timeout is right
time.sleep(2)
- assert_raises_jsonrpc(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
# Test wrong passphrase
- assert_raises_jsonrpc(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10)
+ assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10)
# Test walletlock
self.nodes[0].walletpassphrase(passphrase, 84600)
assert_equal(privkey, self.nodes[0].dumpprivkey(address))
self.nodes[0].walletlock()
- assert_raises_jsonrpc(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
+ assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].dumpprivkey, address)
# Test passphrase changes
self.nodes[0].walletpassphrasechange(passphrase, passphrase2)
- assert_raises_jsonrpc(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10)
+ assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10)
self.nodes[0].walletpassphrase(passphrase2, 10)
assert_equal(privkey, self.nodes[0].dumpprivkey(address))
diff --git a/test/functional/wallet-hd.py b/test/functional/wallet-hd.py
index dfd3dc83c5..d21656a971 100755
--- a/test/functional/wallet-hd.py
+++ b/test/functional/wallet-hd.py
@@ -9,25 +9,22 @@ from test_framework.util import (
assert_equal,
connect_nodes_bi,
)
-import os
import shutil
-
+import os
class WalletHDTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
- self.extra_args = [['-usehd=0'], ['-usehd=1', '-keypool=0']]
+ self.extra_args = [[], ['-keypool=0']]
def run_test (self):
tmpdir = self.options.tmpdir
# Make sure can't switch off usehd after wallet creation
self.stop_node(1)
- self.assert_start_raises_init_error(1, self.options.tmpdir, ['-usehd=0'], 'already existing HD wallet')
- self.nodes[1] = self.start_node(1, self.options.tmpdir, self.extra_args[1])
+ self.assert_start_raises_init_error(1, ['-usehd=0'], 'already existing HD wallet')
+ self.start_node(1)
connect_nodes_bi(self.nodes, 0, 1)
# Make sure we use hd, keep masterkeyid
@@ -43,7 +40,7 @@ class WalletHDTest(BitcoinTestFramework):
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
- # This should be enough to keep the master key and the non-HD key
+ # This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(tmpdir + "/hd.bak")
#self.nodes[1].dumpwallet(tmpdir + "/hd.dump")
@@ -55,7 +52,7 @@ class WalletHDTest(BitcoinTestFramework):
for i in range(num_hd_adds):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].validateaddress(hd_add)
- assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i+1)+"'")
+ assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'")
assert_equal(hd_info["hdmasterkeyid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
@@ -72,24 +69,43 @@ class WalletHDTest(BitcoinTestFramework):
self.log.info("Restore backup ...")
self.stop_node(1)
- os.remove(self.options.tmpdir + "/node1/regtest/wallet.dat")
- shutil.copyfile(tmpdir + "/hd.bak", tmpdir + "/node1/regtest/wallet.dat")
- self.nodes[1] = self.start_node(1, self.options.tmpdir, self.extra_args[1])
- #connect_nodes_bi(self.nodes, 0, 1)
+ # we need to delete the complete regtest directory
+ # otherwise node1 would auto-recover all funds in flag the keypool keys as used
+ shutil.rmtree(os.path.join(tmpdir, "node1/regtest/blocks"))
+ shutil.rmtree(os.path.join(tmpdir, "node1/regtest/chainstate"))
+ shutil.copyfile(os.path.join(tmpdir, "hd.bak"), os.path.join(tmpdir, "node1/regtest/wallets/wallet.dat"))
+ self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
for _ in range(num_hd_adds):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].validateaddress(hd_add_2)
- assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(_+1)+"'")
+ assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(_)+"'")
assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
+ connect_nodes_bi(self.nodes, 0, 1)
+ self.sync_all()
# Needs rescan
self.stop_node(1)
- self.nodes[1] = self.start_node(1, self.options.tmpdir, self.extra_args[1] + ['-rescan'])
- #connect_nodes_bi(self.nodes, 0, 1)
+ self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
+ assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
+
+ # Try a RPC based rescan
+ self.stop_node(1)
+ shutil.rmtree(os.path.join(tmpdir, "node1/regtest/blocks"))
+ shutil.rmtree(os.path.join(tmpdir, "node1/regtest/chainstate"))
+ shutil.copyfile(os.path.join(tmpdir, "hd.bak"), os.path.join(tmpdir, "node1/regtest/wallet.dat"))
+ self.start_node(1, extra_args=self.extra_args[1])
+ connect_nodes_bi(self.nodes, 0, 1)
+ self.sync_all()
+ out = self.nodes[1].rescanblockchain(0, 1)
+ assert_equal(out['start_height'], 0)
+ assert_equal(out['stop_height'], 1)
+ out = self.nodes[1].rescanblockchain()
+ assert_equal(out['start_height'], 0)
+ assert_equal(out['stop_height'], self.nodes[1].getblockcount())
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
# send a tx and make sure its using the internal chain for the changeoutput
@@ -99,7 +115,7 @@ class WalletHDTest(BitcoinTestFramework):
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].validateaddress(out['scriptPubKey']['addresses'][0])['hdkeypath']
-
+
assert_equal(keypath[0:7], "m/0'/1'")
if __name__ == '__main__':
diff --git a/test/functional/wallet.py b/test/functional/wallet.py
index 3e3e8fcddb..db60df18ed 100755
--- a/test/functional/wallet.py
+++ b/test/functional/wallet.py
@@ -7,28 +7,27 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class WalletTest(BitcoinTestFramework):
-
- def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size):
- """Return curr_balance after asserting the fee was in range"""
- fee = balance_with_fee - curr_balance
- assert_fee_amount(fee, tx_size, fee_per_byte * 1000)
- return curr_balance
-
- def __init__(self):
- super().__init__()
- self.setup_clean_chain = True
+ def set_test_params(self):
self.num_nodes = 4
- self.extra_args = [['-usehd={:d}'.format(i%2==0)] for i in range(4)]
+ self.setup_clean_chain = True
def setup_network(self):
- self.nodes = self.start_nodes(3, self.options.tmpdir, self.extra_args[:3])
+ self.add_nodes(4)
+ self.start_node(0)
+ self.start_node(1)
+ self.start_node(2)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
- def run_test(self):
+ def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size):
+ """Return curr_balance after asserting the fee was in range"""
+ fee = balance_with_fee - curr_balance
+ assert_fee_amount(fee, tx_size, fee_per_byte * 1000)
+ return curr_balance
+ def run_test(self):
# Check that there's no UTXO on none of the nodes
assert_equal(len(self.nodes[0].listunspent()), 0)
assert_equal(len(self.nodes[1].listunspent()), 0)
@@ -42,9 +41,9 @@ class WalletTest(BitcoinTestFramework):
assert_equal(walletinfo['immature_balance'], 50)
assert_equal(walletinfo['balance'], 0)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
self.nodes[1].generate(101)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[0].getbalance(), 50)
assert_equal(self.nodes[1].getbalance(), 50)
@@ -56,6 +55,15 @@ class WalletTest(BitcoinTestFramework):
assert_equal(len(self.nodes[1].listunspent()), 1)
assert_equal(len(self.nodes[2].listunspent()), 0)
+ self.log.info("test gettxout")
+ confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"]
+ # First, outputs that are unspent both in the chain and in the
+ # mempool should appear with or without include_mempool
+ txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=False)
+ assert_equal(txout['value'], 50)
+ txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True)
+ assert_equal(txout['value'], 50)
+
# Send 21 BTC from 0 to 2 using sendtoaddress call.
# Locked memory should use at least 32 bytes to sign each transaction
self.log.info("test getmemoryinfo")
@@ -65,10 +73,9 @@ class WalletTest(BitcoinTestFramework):
memory_after = self.nodes[0].getmemoryinfo()
assert(memory_before['locked']['used'] + 64 <= memory_after['locked']['used'])
- self.log.info("test gettxout")
+ self.log.info("test gettxout (second part)")
# utxo spent in mempool should be visible if you exclude mempool
# but invisible if you include mempool
- confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"]
txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False)
assert_equal(txout['value'], 50)
txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True)
@@ -88,20 +95,28 @@ class WalletTest(BitcoinTestFramework):
# Have node0 mine a block, thus it will collect its own fee.
self.nodes[0].generate(1)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
# Exercise locking of unspent outputs
unspent_0 = self.nodes[2].listunspent()[0]
unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
+ assert_raises_rpc_error(-8, "Invalid parameter, expected locked output", self.nodes[2].lockunspent, True, [unspent_0])
self.nodes[2].lockunspent(False, [unspent_0])
- assert_raises_jsonrpc(-4, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
+ assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0])
+ assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
assert_equal([unspent_0], self.nodes[2].listlockunspent())
self.nodes[2].lockunspent(True, [unspent_0])
assert_equal(len(self.nodes[2].listlockunspent()), 0)
+ assert_raises_rpc_error(-8, "Invalid parameter, unknown transaction",
+ self.nodes[2].lockunspent, False,
+ [{"txid": "0000000000000000000000000000000000", "vout": 0}])
+ assert_raises_rpc_error(-8, "Invalid parameter, vout index out of bounds",
+ self.nodes[2].lockunspent, False,
+ [{"txid": unspent_0["txid"], "vout": 999}])
# Have node1 generate 100 blocks (so node0 can recover the fee)
self.nodes[1].generate(100)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
# node0 should end up with 100 btc in block rewards plus fees, but
# minus the 21 plus fees sent to node2
@@ -130,26 +145,30 @@ class WalletTest(BitcoinTestFramework):
# Have node1 mine a block to confirm transactions:
self.nodes[1].generate(1)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[0].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 94)
assert_equal(self.nodes[2].getbalance("from1"), 94-21)
+ # Verify that a spent output cannot be locked anymore
+ spent_0 = {"txid": node0utxos[0]["txid"], "vout": node0utxos[0]["vout"]}
+ assert_raises_rpc_error(-8, "Invalid parameter, expected unspent output", self.nodes[0].lockunspent, False, [spent_0])
+
# Send 10 BTC normal
address = self.nodes[0].getnewaddress("test")
fee_per_byte = Decimal('0.001') / 1000
self.nodes[2].settxfee(fee_per_byte * 1000)
txid = self.nodes[2].sendtoaddress(address, 10, "", "", False)
self.nodes[2].generate(1)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('84'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
assert_equal(self.nodes[0].getbalance(), Decimal('10'))
# Send 10 BTC with subtract fee from amount
txid = self.nodes[2].sendtoaddress(address, 10, "", "", True)
self.nodes[2].generate(1)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
node_2_bal -= Decimal('10')
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('20'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
@@ -157,7 +176,7 @@ class WalletTest(BitcoinTestFramework):
# Sendmany 10 BTC
txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "", [])
self.nodes[2].generate(1)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
node_0_bal += Decimal('10')
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
assert_equal(self.nodes[0].getbalance(), node_0_bal)
@@ -165,7 +184,7 @@ class WalletTest(BitcoinTestFramework):
# Sendmany 10 BTC with subtract fee from amount
txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "", [address])
self.nodes[2].generate(1)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
node_2_bal -= Decimal('10')
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
@@ -176,9 +195,9 @@ class WalletTest(BitcoinTestFramework):
# EXPECT: nodes[3] should have those transactions in its mempool.
txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
txid2 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
- sync_mempools(self.nodes)
+ sync_mempools(self.nodes[0:2])
- self.nodes.append(self.start_node(3, self.options.tmpdir, self.extra_args[3]))
+ self.start_node(3)
connect_nodes_bi(self.nodes, 0, 3)
sync_blocks(self.nodes)
@@ -206,7 +225,7 @@ class WalletTest(BitcoinTestFramework):
signedRawTx = self.nodes[1].signrawtransaction(rawTx)
decRawTx = self.nodes[1].decoderawtransaction(signedRawTx['hex'])
zeroValueTxid= decRawTx['txid']
- sendResp = self.nodes[1].sendrawtransaction(signedRawTx['hex'])
+ self.nodes[1].sendrawtransaction(signedRawTx['hex'])
self.sync_all()
self.nodes[1].generate(1) #mine a block
@@ -222,22 +241,24 @@ class WalletTest(BitcoinTestFramework):
#do some -walletbroadcast tests
self.stop_nodes()
- self.nodes = self.start_nodes(3, self.options.tmpdir, [["-walletbroadcast=0"],["-walletbroadcast=0"],["-walletbroadcast=0"]])
+ self.start_node(0, ["-walletbroadcast=0"])
+ self.start_node(1, ["-walletbroadcast=0"])
+ self.start_node(2, ["-walletbroadcast=0"])
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2)
txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted)
self.nodes[1].generate(1) #mine a block, tx should not be in there
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
assert_equal(self.nodes[2].getbalance(), node_2_bal) #should not be changed because tx was not broadcasted
#now broadcast from another node, mine a block, sync, and check the balance
self.nodes[1].sendrawtransaction(txObjNotBroadcasted['hex'])
self.nodes[1].generate(1)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
node_2_bal += 2
txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted)
assert_equal(self.nodes[2].getbalance(), node_2_bal)
@@ -247,14 +268,16 @@ class WalletTest(BitcoinTestFramework):
#restart the nodes with -walletbroadcast=1
self.stop_nodes()
- self.nodes = self.start_nodes(3, self.options.tmpdir)
+ self.start_node(0)
+ self.start_node(1)
+ self.start_node(2)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
- sync_blocks(self.nodes)
+ sync_blocks(self.nodes[0:3])
self.nodes[0].generate(1)
- sync_blocks(self.nodes)
+ sync_blocks(self.nodes[0:3])
node_2_bal += 2
#tx should be added to balance because after restarting the nodes tx should be broadcastet
@@ -275,17 +298,17 @@ class WalletTest(BitcoinTestFramework):
assert_equal(txObj['amount'], Decimal('-0.0001'))
# This will raise an exception because the amount type is wrong
- assert_raises_jsonrpc(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4")
+ assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4")
# This will raise an exception since generate does not accept a string
- assert_raises_jsonrpc(-1, "not an integer", self.nodes[0].generate, "2")
+ assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2")
# Import address and private key to check correct behavior of spendable unspents
# 1. Send some coins to generate new UTXO
address_to_import = self.nodes[2].getnewaddress()
txid = self.nodes[0].sendtoaddress(address_to_import, 1)
self.nodes[0].generate(1)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
# 2. Import address from node2 to node1
self.nodes[1].importaddress(address_to_import)
@@ -311,15 +334,15 @@ class WalletTest(BitcoinTestFramework):
cbAddr = self.nodes[1].getnewaddress()
blkHash = self.nodes[0].generatetoaddress(1, cbAddr)[0]
cbTxId = self.nodes[0].getblock(blkHash)['tx'][0]
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
# Check that the txid and balance is found by node1
self.nodes[1].gettransaction(cbTxId)
# check if wallet or blockchain maintenance changes the balance
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
blocks = self.nodes[0].generate(2)
- self.sync_all()
+ self.sync_all([self.nodes[0:3]])
balance_nodes = [self.nodes[i].getbalance() for i in range(3)]
block_count = self.nodes[0].getblockcount()
@@ -350,7 +373,9 @@ class WalletTest(BitcoinTestFramework):
self.log.info("check " + m)
self.stop_nodes()
# set lower ancestor limit for later
- self.nodes = self.start_nodes(3, self.options.tmpdir, [[m, "-limitancestorcount="+str(chainlimit)]] * 3)
+ self.start_node(0, [m, "-limitancestorcount="+str(chainlimit)])
+ self.start_node(1, [m, "-limitancestorcount="+str(chainlimit)])
+ self.start_node(2, [m, "-limitancestorcount="+str(chainlimit)])
while m == '-reindex' and [block_count] * 3 != [self.nodes[i].getblockcount() for i in range(3)]:
# reindex will leave rpc warm up "early"; Wait for it to finish
time.sleep(0.1)
@@ -398,7 +423,7 @@ class WalletTest(BitcoinTestFramework):
# Try with walletrejectlongchains
# Double chain limit but require combining inputs, so we pass SelectCoinsMinConf
self.stop_node(0)
- self.nodes[0] = self.start_node(0, self.options.tmpdir, ["-walletrejectlongchains", "-limitancestorcount="+str(2*chainlimit)])
+ self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount="+str(2*chainlimit)])
# wait for loadmempool
timeout = 10
@@ -409,7 +434,7 @@ class WalletTest(BitcoinTestFramework):
node0_balance = self.nodes[0].getbalance()
# With walletrejectlongchains we will not create the tx and store it in our wallet.
- assert_raises_jsonrpc(-4, "Transaction has too long of a mempool chain", self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('0.01'))
+ assert_raises_rpc_error(-4, "Transaction has too long of a mempool chain", self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('0.01'))
# Verify nothing new in wallet
assert_equal(total_txs, len(self.nodes[0].listtransactions("*",99999)))
diff --git a/test/functional/walletbackup.py b/test/functional/walletbackup.py
index ff51cba4b3..8ef5620cd8 100755
--- a/test/functional/walletbackup.py
+++ b/test/functional/walletbackup.py
@@ -37,11 +37,9 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class WalletBackupTest(BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
- self.setup_clean_chain = True
+ def set_test_params(self):
self.num_nodes = 4
+ self.setup_clean_chain = True
# nodes 1, 2,3 are spenders, let's give them a keypool=100
self.extra_args = [["-keypool=100"], ["-keypool=100"], ["-keypool=100"], []]
@@ -78,9 +76,9 @@ class WalletBackupTest(BitcoinTestFramework):
# As above, this mirrors the original bash test.
def start_three(self):
- self.nodes[0] = self.start_node(0, self.options.tmpdir)
- self.nodes[1] = self.start_node(1, self.options.tmpdir)
- self.nodes[2] = self.start_node(2, self.options.tmpdir)
+ self.start_node(0)
+ self.start_node(1)
+ self.start_node(2)
connect_nodes(self.nodes[0], 3)
connect_nodes(self.nodes[1], 3)
connect_nodes(self.nodes[2], 3)
@@ -92,9 +90,9 @@ class WalletBackupTest(BitcoinTestFramework):
self.stop_node(2)
def erase_three(self):
- os.remove(self.options.tmpdir + "/node0/regtest/wallet.dat")
- os.remove(self.options.tmpdir + "/node1/regtest/wallet.dat")
- os.remove(self.options.tmpdir + "/node2/regtest/wallet.dat")
+ os.remove(self.options.tmpdir + "/node0/regtest/wallets/wallet.dat")
+ os.remove(self.options.tmpdir + "/node1/regtest/wallets/wallet.dat")
+ os.remove(self.options.tmpdir + "/node2/regtest/wallets/wallet.dat")
def run_test(self):
self.log.info("Generating initial blockchain")
@@ -156,9 +154,9 @@ class WalletBackupTest(BitcoinTestFramework):
shutil.rmtree(self.options.tmpdir + "/node2/regtest/chainstate")
# Restore wallets from backup
- shutil.copyfile(tmpdir + "/node0/wallet.bak", tmpdir + "/node0/regtest/wallet.dat")
- shutil.copyfile(tmpdir + "/node1/wallet.bak", tmpdir + "/node1/regtest/wallet.dat")
- shutil.copyfile(tmpdir + "/node2/wallet.bak", tmpdir + "/node2/regtest/wallet.dat")
+ shutil.copyfile(tmpdir + "/node0/wallet.bak", tmpdir + "/node0/regtest/wallets/wallet.dat")
+ shutil.copyfile(tmpdir + "/node1/wallet.bak", tmpdir + "/node1/regtest/wallets/wallet.dat")
+ shutil.copyfile(tmpdir + "/node2/wallet.bak", tmpdir + "/node2/regtest/wallets/wallet.dat")
self.log.info("Re-starting nodes")
self.start_three()
@@ -192,6 +190,16 @@ class WalletBackupTest(BitcoinTestFramework):
assert_equal(self.nodes[1].getbalance(), balance1)
assert_equal(self.nodes[2].getbalance(), balance2)
+ # Backup to source wallet file must fail
+ sourcePaths = [
+ tmpdir + "/node0/regtest/wallets/wallet.dat",
+ tmpdir + "/node0/./regtest/wallets/wallet.dat",
+ tmpdir + "/node0/regtest/wallets/",
+ tmpdir + "/node0/regtest/wallets"]
+
+ for sourcePath in sourcePaths:
+ assert_raises_rpc_error(-4, "backup failed", self.nodes[0].backupwallet, sourcePath)
+
if __name__ == '__main__':
WalletBackupTest().main()
diff --git a/test/functional/zapwallettxes.py b/test/functional/zapwallettxes.py
index e4d40520ef..8cd622dc8e 100755
--- a/test/functional/zapwallettxes.py
+++ b/test/functional/zapwallettxes.py
@@ -4,77 +4,75 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the zapwallettxes functionality.
-- start three bitcoind nodes
-- create four transactions on node 0 - two are confirmed and two are
- unconfirmed.
-- restart node 1 and verify that both the confirmed and the unconfirmed
+- start two bitcoind nodes
+- create two transactions on node 0 - one is confirmed and one is unconfirmed.
+- restart node 0 and verify that both the confirmed and the unconfirmed
transactions are still available.
-- restart node 0 and verify that the confirmed transactions are still
- available, but that the unconfirmed transaction has been zapped.
+- restart node 0 with zapwallettxes and persistmempool, and verify that both
+ the confirmed and the unconfirmed transactions are still available.
+- restart node 0 with just zapwallettxed and verify that the confirmed
+ transactions are still available, but that the unconfirmed transaction has
+ been zapped.
"""
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import *
-
+from test_framework.util import (
+ assert_equal,
+ assert_raises_rpc_error,
+ wait_until,
+)
class ZapWalletTXesTest (BitcoinTestFramework):
-
- def __init__(self):
- super().__init__()
+ def set_test_params(self):
self.setup_clean_chain = True
- self.num_nodes = 3
+ self.num_nodes = 2
- def setup_network(self):
- super().setup_network()
- connect_nodes_bi(self.nodes,0,2)
-
- def run_test (self):
+ def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.sync_all()
- self.nodes[1].generate(101)
- self.sync_all()
-
- assert_equal(self.nodes[0].getbalance(), 50)
-
- txid0 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
- txid1 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
+ self.nodes[1].generate(100)
self.sync_all()
+
+ # This transaction will be confirmed
+ txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10)
+
self.nodes[0].generate(1)
self.sync_all()
-
- txid2 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
- txid3 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
-
- tx0 = self.nodes[0].gettransaction(txid0)
- assert_equal(tx0['txid'], txid0) #tx0 must be available (confirmed)
-
- tx1 = self.nodes[0].gettransaction(txid1)
- assert_equal(tx1['txid'], txid1) #tx1 must be available (confirmed)
-
- tx2 = self.nodes[0].gettransaction(txid2)
- assert_equal(tx2['txid'], txid2) #tx2 must be available (unconfirmed)
-
- tx3 = self.nodes[0].gettransaction(txid3)
- assert_equal(tx3['txid'], txid3) #tx3 must be available (unconfirmed)
-
- #restart bitcoind
+
+ # This transaction will not be confirmed
+ txid2 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 20)
+
+ # Confirmed and unconfirmed transactions are now in the wallet.
+ assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
+ assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
+
+ # Stop-start node0. Both confirmed and unconfirmed transactions remain in the wallet.
+ self.stop_node(0)
+ self.start_node(0)
+
+ assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
+ assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
+
+ # Stop node0 and restart with zapwallettxes and persistmempool. The unconfirmed
+ # transaction is zapped from the wallet, but is re-added when the mempool is reloaded.
self.stop_node(0)
- self.nodes[0] = self.start_node(0,self.options.tmpdir)
-
- tx3 = self.nodes[0].gettransaction(txid3)
- assert_equal(tx3['txid'], txid3) #tx must be available (unconfirmed)
-
+ self.start_node(0, ["-persistmempool=1", "-zapwallettxes=2"])
+
+ wait_until(lambda: self.nodes[0].getmempoolinfo()['size'] == 1, timeout=3)
+
+ assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
+ assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
+
+ # Stop node0 and restart with zapwallettxes, but not persistmempool.
+ # The unconfirmed transaction is zapped and is no longer in the wallet.
self.stop_node(0)
-
- #restart bitcoind with zapwallettxes
- self.nodes[0] = self.start_node(0,self.options.tmpdir, ["-zapwallettxes=1"])
-
- assert_raises(JSONRPCException, self.nodes[0].gettransaction, [txid3])
- #there must be an exception because the unconfirmed wallettx0 must be gone by now
+ self.start_node(0, ["-zapwallettxes=2"])
- tx0 = self.nodes[0].gettransaction(txid0)
- assert_equal(tx0['txid'], txid0) #tx0 (confirmed) must still be available because it was confirmed
+ # tx1 is still be available because it was confirmed
+ assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
+ # This will raise an exception because the unconfirmed transaction has been zapped
+ assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', self.nodes[0].gettransaction, txid2)
if __name__ == '__main__':
- ZapWalletTXesTest ().main ()
+ ZapWalletTXesTest().main()
diff --git a/test/functional/zmq_test.py b/test/functional/zmq_test.py
index 26c946d215..fa30318416 100755
--- a/test/functional/zmq_test.py
+++ b/test/functional/zmq_test.py
@@ -2,20 +2,40 @@
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Test the ZMQ API."""
+"""Test the ZMQ notification interface."""
import configparser
import os
import struct
from test_framework.test_framework import BitcoinTestFramework, SkipTest
+from test_framework.mininode import CTransaction
from test_framework.util import (assert_equal,
bytes_to_hex_str,
- )
+ hash256,
+ )
+from io import BytesIO
+
+class ZMQSubscriber:
+ def __init__(self, socket, topic):
+ self.sequence = 0
+ self.socket = socket
+ self.topic = topic
+
+ import zmq
+ self.socket.setsockopt(zmq.SUBSCRIBE, self.topic)
+
+ def receive(self):
+ topic, body, seq = self.socket.recv_multipart()
+ # Topic should match the subscriber topic.
+ assert_equal(topic, self.topic)
+ # Sequence should be incremental.
+ assert_equal(struct.unpack('<I', seq)[-1], self.sequence)
+ self.sequence += 1
+ return body
-class ZMQTest (BitcoinTestFramework):
- def __init__(self):
- super().__init__()
+class ZMQTest (BitcoinTestFramework):
+ def set_test_params(self):
self.num_nodes = 2
def setup_nodes(self):
@@ -25,90 +45,82 @@ class ZMQTest (BitcoinTestFramework):
except ImportError:
raise SkipTest("python3-zmq module not available.")
- # Check that bitcoin has been built with ZMQ enabled
+ # Check that bitcoin has been built with ZMQ enabled.
config = configparser.ConfigParser()
if not self.options.configfile:
- self.options.configfile = os.path.dirname(__file__) + "/../config.ini"
+ self.options.configfile = os.path.abspath(os.path.join(os.path.dirname(__file__), "../config.ini"))
config.read_file(open(self.options.configfile))
if not config["components"].getboolean("ENABLE_ZMQ"):
raise SkipTest("bitcoind has not been built with zmq enabled.")
- self.zmqContext = zmq.Context()
- self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
- self.zmqSubSocket.set(zmq.RCVTIMEO, 60000)
- self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashblock")
- self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, b"hashtx")
- ip_address = "tcp://127.0.0.1:28332"
- self.zmqSubSocket.connect(ip_address)
- extra_args = [['-zmqpubhashtx=%s' % ip_address, '-zmqpubhashblock=%s' % ip_address], []]
- self.nodes = self.start_nodes(self.num_nodes, self.options.tmpdir, extra_args)
+ # Initialize ZMQ context and socket.
+ # All messages are received in the same socket which means
+ # that this test fails if the publishing order changes.
+ # Note that the publishing order is not defined in the documentation and
+ # is subject to change.
+ address = "tcp://127.0.0.1:28332"
+ self.zmq_context = zmq.Context()
+ socket = self.zmq_context.socket(zmq.SUB)
+ socket.set(zmq.RCVTIMEO, 60000)
+ socket.connect(address)
+
+ # Subscribe to all available topics.
+ self.hashblock = ZMQSubscriber(socket, b"hashblock")
+ self.hashtx = ZMQSubscriber(socket, b"hashtx")
+ self.rawblock = ZMQSubscriber(socket, b"rawblock")
+ self.rawtx = ZMQSubscriber(socket, b"rawtx")
+
+ self.extra_args = [["-zmqpub%s=%s" % (sub.topic.decode(), address) for sub in [self.hashblock, self.hashtx, self.rawblock, self.rawtx]], []]
+ self.add_nodes(self.num_nodes, self.extra_args)
+ self.start_nodes()
def run_test(self):
try:
self._zmq_test()
finally:
- # Destroy the zmq context
- self.log.debug("Destroying zmq context")
- self.zmqContext.destroy(linger=None)
+ # Destroy the ZMQ context.
+ self.log.debug("Destroying ZMQ context")
+ self.zmq_context.destroy(linger=None)
def _zmq_test(self):
- genhashes = self.nodes[0].generate(1)
+ num_blocks = 5
+ self.log.info("Generate %(n)d blocks (and %(n)d coinbase txes)" % {"n": num_blocks})
+ genhashes = self.nodes[0].generate(num_blocks)
self.sync_all()
- self.log.info("Wait for tx")
- msg = self.zmqSubSocket.recv_multipart()
- topic = msg[0]
- assert_equal(topic, b"hashtx")
- body = msg[1]
- msgSequence = struct.unpack('<I', msg[-1])[-1]
- assert_equal(msgSequence, 0) # must be sequence 0 on hashtx
-
- self.log.info("Wait for block")
- msg = self.zmqSubSocket.recv_multipart()
- topic = msg[0]
- body = msg[1]
- msgSequence = struct.unpack('<I', msg[-1])[-1]
- assert_equal(msgSequence, 0) # must be sequence 0 on hashblock
- blkhash = bytes_to_hex_str(body)
-
- assert_equal(genhashes[0], blkhash) # blockhash from generate must be equal to the hash received over zmq
-
- self.log.info("Generate 10 blocks (and 10 coinbase txes)")
- n = 10
- genhashes = self.nodes[1].generate(n)
- self.sync_all()
+ for x in range(num_blocks):
+ # Should receive the coinbase txid.
+ txid = self.hashtx.receive()
+
+ # Should receive the coinbase raw transaction.
+ hex = self.rawtx.receive()
+ tx = CTransaction()
+ tx.deserialize(BytesIO(hex))
+ tx.calc_sha256()
+ assert_equal(tx.hash, bytes_to_hex_str(txid))
+
+ # Should receive the generated block hash.
+ hash = bytes_to_hex_str(self.hashblock.receive())
+ assert_equal(genhashes[x], hash)
+ # The block should only have the coinbase txid.
+ assert_equal([bytes_to_hex_str(txid)], self.nodes[1].getblock(hash)["tx"])
- zmqHashes = []
- blockcount = 0
- for x in range(n * 2):
- msg = self.zmqSubSocket.recv_multipart()
- topic = msg[0]
- body = msg[1]
- if topic == b"hashblock":
- zmqHashes.append(bytes_to_hex_str(body))
- msgSequence = struct.unpack('<I', msg[-1])[-1]
- assert_equal(msgSequence, blockcount + 1)
- blockcount += 1
-
- for x in range(n):
- assert_equal(genhashes[x], zmqHashes[x]) # blockhash from generate must be equal to the hash received over zmq
+ # Should receive the generated raw block.
+ block = self.rawblock.receive()
+ assert_equal(genhashes[x], bytes_to_hex_str(hash256(block[:80])))
self.log.info("Wait for tx from second node")
- # test tx from a second node
- hashRPC = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
+ payment_txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
self.sync_all()
- # now we should receive a zmq msg because the tx was broadcast
- msg = self.zmqSubSocket.recv_multipart()
- topic = msg[0]
- body = msg[1]
- assert_equal(topic, b"hashtx")
- hashZMQ = bytes_to_hex_str(body)
- msgSequence = struct.unpack('<I', msg[-1])[-1]
- assert_equal(msgSequence, blockcount + 1)
+ # Should receive the broadcasted txid.
+ txid = self.hashtx.receive()
+ assert_equal(payment_txid, bytes_to_hex_str(txid))
- assert_equal(hashRPC, hashZMQ) # txid from sendtoaddress must be equal to the hash received over zmq
+ # Should receive the broadcasted raw transaction.
+ hex = self.rawtx.receive()
+ assert_equal(payment_txid, bytes_to_hex_str(hash256(hex)))
if __name__ == '__main__':
ZMQTest().main()
diff --git a/test/util/bitcoin-util-test.py b/test/util/bitcoin-util-test.py
index d15d6a6011..ef34955d90 100755
--- a/test/util/bitcoin-util-test.py
+++ b/test/util/bitcoin-util-test.py
@@ -9,9 +9,14 @@ Runs automatically during `make check`.
Can also be run manually."""
+from __future__ import division,print_function,unicode_literals
+
import argparse
import binascii
-import configparser
+try:
+ import configparser
+except ImportError:
+ import ConfigParser as configparser
import difflib
import json
import logging
@@ -22,7 +27,9 @@ import sys
def main():
config = configparser.ConfigParser()
- config.read_file(open(os.path.dirname(__file__) + "/../config.ini"))
+ config.optionxform = str
+ config.readfp(open(os.path.join(os.path.dirname(__file__), "../config.ini")))
+ env_conf = dict(config.items('environment'))
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-v', '--verbose', action='store_true')
@@ -37,7 +44,7 @@ def main():
# Add the format/level to the logger
logging.basicConfig(format=formatter, level=level)
- bctester(config["environment"]["SRCDIR"] + "/test/util/data", "bitcoin-util-test.json", config["environment"])
+ bctester(os.path.join(env_conf["SRCDIR"], "test/util/data"), "bitcoin-util-test.json", env_conf)
def bctester(testDir, input_basename, buildenv):
""" Loads and parses the input file, runs all tests and reports results"""
diff --git a/test/util/data/bitcoin-util-test.json b/test/util/data/bitcoin-util-test.json
index b61a4f7f8f..89b28bba6c 100644
--- a/test/util/data/bitcoin-util-test.json
+++ b/test/util/data/bitcoin-util-test.json
@@ -263,6 +263,13 @@
},
{ "exec": "./bitcoin-tx",
"args":
+ ["-json", "-create", "outpubkey=0:047d1368ba7ae01c94bc32293efd70bd7e3be7aa7912d07d0b1c659c1008d179b8642f5fb90f47580feb29f045e216ff5a4716d3a0fed36da414d332046303c44a:WS", "nversion=1"],
+ "return_code": 1,
+ "error_txt": "error: Uncompressed pubkeys are not useable for SegWit outputs",
+ "description": "Creates a new transaction with a single pay-to-pub-key output, wrapped in P2SH (output as json)"
+ },
+ { "exec": "./bitcoin-tx",
+ "args":
["-create",
"in=5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f:0",
"outdata=4:badhexdata"],
@@ -388,5 +395,16 @@
"args": ["-json", "-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485:WS", "nversion=1"],
"output_cmp": "txcreatemultisig4.json",
"description": "Creates a new transaction with a single 2-of-3 multisig in a P2WSH output, wrapped in P2SH (output in json)"
+ },
+ { "exec": "./bitcoin-tx",
+ "args": ["-json", "-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:047d1368ba7ae01c94bc32293efd70bd7e3be7aa7912d07d0b1c659c1008d179b8642f5fb90f47580feb29f045e216ff5a4716d3a0fed36da414d332046303c44a:S"],
+ "output_cmp": "txcreatemultisig5.json",
+ "description": "Uncompressed pubkeys should work just fine for non-witness outputs"
+ },
+ { "exec": "./bitcoin-tx",
+ "args": ["-json", "-create", "outmultisig=1:2:3:02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397:021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d:047d1368ba7ae01c94bc32293efd70bd7e3be7aa7912d07d0b1c659c1008d179b8642f5fb90f47580feb29f045e216ff5a4716d3a0fed36da414d332046303c44a:WS"],
+ "return_code": 1,
+ "error_txt": "error: Uncompressed pubkeys are not useable for SegWit outputs",
+ "description": "Ensure adding witness outputs with uncompressed pubkeys fails"
}
]
diff --git a/test/util/data/tt-delin1-out.json b/test/util/data/tt-delin1-out.json
index f6dfbb51cc..de647f98b6 100644
--- a/test/util/data/tt-delin1-out.json
+++ b/test/util/data/tt-delin1-out.json
@@ -14,7 +14,7 @@
"hex": "493046022100b4251ecd63778a3dde0155abe4cd162947620ae9ee45a874353551092325b116022100db307baf4ff3781ec520bd18f387948cedd15dc27bafe17c894b0fe6ffffcafa012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "752f7f69b915637dc1c2f7aed1466ad676f6f3e24cf922809705f664e97ab3c1",
"vout": 1,
@@ -23,7 +23,7 @@
"hex": "473044022079bd62ee09621a3be96b760c39e8ef78170101d46313923c6b07ae60a95c90670220238e51ea29fc70b04b65508450523caedbb11cb4dd5aa608c81487de798925ba0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34"
},
"sequence": 4294967295
- },
+ },
{
"txid": "b0ac9cca2e69cd02410e31b1f4402a25758e71abd1ab06c265ef9077dc05d0ed",
"vout": 209,
@@ -32,7 +32,7 @@
"hex": "48304502207722d6f9038673c86a1019b1c4de2d687ae246477cd4ca7002762be0299de385022100e594a11e3a313942595f7666dcf7078bcb14f1330f4206b95c917e7ec0e82fac012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "a135eafb595eaf4c1ea59ccb111cdc0eae1b2c979b226a1e5aa8b76fe2d628df",
"vout": 0,
@@ -41,7 +41,7 @@
"hex": "483045022100a63a4788027b79b65c6f9d9e054f68cf3b4eed19efd82a2d53f70dcbe64683390220526f243671425b2bd05745fcf2729361f985cfe84ea80c7cfc817b93d8134374012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52"
},
"sequence": 4294967295
- },
+ },
{
"txid": "a5d6bf53ba21140b8a4d554feb00fe8bb9a62430ff9e4624aa2f58a120232aae",
"vout": 1,
@@ -50,7 +50,7 @@
"hex": "493046022100b200ac6db16842f76dab9abe807ce423c992805879bc50abd46ed8275a59d9cf022100c0d518e85dd345b3c29dd4dc47b9a420d3ce817b18720e94966d2fe23413a408012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "1b299cf14f1a22e81ea56d71b7affbd7cf386807bf2b4d4b79a18a54125accb3",
"vout": 0,
@@ -59,7 +59,7 @@
"hex": "483045022100ededc441c3103a6f2bd6cab7639421af0f6ec5e60503bce1e603cf34f00aee1c02205cb75f3f519a13fb348783b21db3085cb5ec7552c59e394fdbc3e1feea43f967012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52"
},
"sequence": 4294967295
- },
+ },
{
"txid": "071df1cdcb3f0070f9d6af7b0274f02d0be2324a274727cfd288383167531485",
"vout": 21,
@@ -68,7 +68,7 @@
"hex": "483045022100d9eed5413d2a4b4b98625aa6e3169edc4fb4663e7862316d69224454e70cd8ca022061e506521d5ced51dd0ea36496e75904d756a4c4f9fb111568555075d5f68d9a012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "b012e500eb7adf7a13ed332dd6ece849f94f7a62bb3eac5babab356d1fc19282",
"vout": 9,
@@ -77,7 +77,7 @@
"hex": "48304502207e84b27139c4c19c828cb1e30c349bba88e4d9b59be97286960793b5ddc0a2af0221008cdc7a951e7f31c20953ed5635fbabf228e80b7047f32faaa0313e7693005177012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "58840fee9c833f2f2d40575842f30f4b8d2553094d06ad88b03d06869acf3d88",
"vout": 30,
@@ -86,7 +86,7 @@
"hex": "4730440220426540dfed9c4ab5812e5f06df705b8bcf307dd7d20f7fa6512298b2a6314f420220064055096e3ca62f6c7352c66a5447767c53f946acdf35025ab3807ddb2fa404012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "e69f9cd16946e570a665245354428a3f507ea69f4568b581e4af98edb3db9766",
"vout": 114,
@@ -95,7 +95,7 @@
"hex": "47304402200a5e673996f2fc88e21cc8613611f08a650bc0370338803591d85d0ec5663764022040b6664a0d1ec83a7f01975b8fde5232992b8ca58bf48af6725d2f92a936ab2e012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "595d1257f654ed2cbe5a65421e8aefd2b4d70b5b6c89a03f1d7e518221fc3f02",
"vout": 103,
@@ -104,7 +104,7 @@
"hex": "493046022100d93b30219c5735f673be5c3b4688366d96f545561c74cb62c6958c00f6960806022100ec8200adcb028f2184fa2a4f6faac7f8bb57cb4503bb7584ac11051fece31b3d012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "06fc818f9555a261248ecd7aad0993eafb5a82ceb2b5c87c3ddfb06671c7f816",
"vout": 1,
@@ -113,7 +113,7 @@
"hex": "483045022100a13934e68d3f5b22b130c4cb33f4da468cffc52323a47fbfbe06b64858162246022047081e0a70ff770e64a2e2d31e5d520d9102268b57a47009a72fe73ec766901801210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd"
},
"sequence": 4294967295
- },
+ },
{
"txid": "fb416c8155d6bb1d43f9395466ca90a638a7c2dd3ff617aadf3a7ac8f3967b19",
"vout": 0,
@@ -122,7 +122,7 @@
"hex": "49304602210097f1f35d5bdc1a3a60390a1b015b8e7c4f916aa3847aafd969e04975e15bbe70022100a9052eb25517d481f1fda1b129eb1b534da50ea1a51f3ee012dca3601c11b86a0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34"
},
"sequence": 4294967295
- },
+ },
{
"txid": "3940b9683bd6104ad24c978e640ba4095993cafdb27d2ed91baa27ee61a2d920",
"vout": 221,
@@ -131,7 +131,7 @@
"hex": "483045022012b3138c591bf7154b6fef457f2c4a3c7162225003788ac0024a99355865ff13022100b71b125ae1ffb2e1d1571f580cd3ebc8cd049a2d7a8a41f138ba94aeb982106f012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "711b5714d3b5136147c02194cd95bde94a4648c4263ca6f972d86cd1d579f150",
"vout": 1,
@@ -140,7 +140,7 @@
"hex": "483045022100f834ccc8b22ee72712a3e5e6ef4acb8b2fb791b5385b70e2cd4332674d6667f4022024fbda0a997e0c253503f217501f508a4d56edce2c813ecdd9ad796dbeba907401210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd"
},
"sequence": 4294967295
- },
+ },
{
"txid": "6364b5c5efe018430789e7fb4e338209546cae5d9c5f5e300aac68155d861b55",
"vout": 27,
@@ -149,7 +149,7 @@
"hex": "48304502203b2fd1e39ae0e469d7a15768f262661b0de41470daf0fe8c4fd0c26542a0870002210081c57e331f9a2d214457d953e3542904727ee412c63028113635d7224da3dccc012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "0bb57f6e38012c86d4c5a28c904f2675082859147921a707d48961015a3e5057",
"vout": 1095,
@@ -158,7 +158,7 @@
"hex": "48304502206947a9c54f0664ece4430fd4ae999891dc50bb6126bc36b6a15a3189f29d25e9022100a86cfc4e2fdd9e39a20e305cfd1b76509c67b3e313e0f118229105caa0e823c9012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "9b34274814a2540bb062107117f8f3e75ef85d953e9372d8261a3e9dfbc1163f",
"vout": 37,
@@ -167,7 +167,7 @@
"hex": "483045022100c7128fe10b2d38744ae8177776054c29fc8ec13f07207723e70766ab7164847402201d2cf09009b9596de74c0183d1ab832e5edddb7a9965880bb400097e850850f8012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "b86b5cc0d8a7374d94e277850b0a249cb26a7b42ddf014f28a49b8859da64241",
"vout": 20,
@@ -176,7 +176,7 @@
"hex": "48304502203b89a71628a28cc3703d170ca3be77786cff6b867e38a18b719705f8a326578f022100b2a9879e1acf621faa6466c207746a7f3eb4c8514c1482969aba3f2a957f1321012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "3d0a2353eeec44d3c10aed259038db321912122cd4150048f7bfa4c0ecfee236",
"vout": 242,
@@ -189,7 +189,7 @@
],
"vout": [
{
- "value": 1.3782,
+ "value": 1.37820000,
"n": 0,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 8fd139bb39ced713f231c58a4d07bf6954d1c201 OP_EQUALVERIFY OP_CHECKSIG",
@@ -200,7 +200,7 @@
"1E7SGgAZFCHDnVZLuRViX3gUmxpMfdvd2o"
]
}
- },
+ },
{
"value": 0.01000001,
"n": 1,
diff --git a/test/util/data/tt-delout1-out.json b/test/util/data/tt-delout1-out.json
index 6769ed79ff..067ffe74e7 100644
--- a/test/util/data/tt-delout1-out.json
+++ b/test/util/data/tt-delout1-out.json
@@ -14,7 +14,7 @@
"hex": "493046022100b4251ecd63778a3dde0155abe4cd162947620ae9ee45a874353551092325b116022100db307baf4ff3781ec520bd18f387948cedd15dc27bafe17c894b0fe6ffffcafa012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "a72ec96bd0d022d1b0c2f9078cdd46b3725b8eecdd001e17b21e3ababad14ecb",
"vout": 0,
@@ -23,7 +23,7 @@
"hex": "493046022100a9b617843b68c284715d3e02fd120479cd0d96a6c43bf01e697fb0a460a21a3a022100ba0a12fbe8b993d4e7911fa3467615765dbe421ddf5c51b57a9c1ee19dcc00ba012103e633b4fa4ceb705c2da712390767199be8ef2448b3095dc01652e11b2b751505"
},
"sequence": 4294967295
- },
+ },
{
"txid": "752f7f69b915637dc1c2f7aed1466ad676f6f3e24cf922809705f664e97ab3c1",
"vout": 1,
@@ -32,7 +32,7 @@
"hex": "473044022079bd62ee09621a3be96b760c39e8ef78170101d46313923c6b07ae60a95c90670220238e51ea29fc70b04b65508450523caedbb11cb4dd5aa608c81487de798925ba0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34"
},
"sequence": 4294967295
- },
+ },
{
"txid": "b0ac9cca2e69cd02410e31b1f4402a25758e71abd1ab06c265ef9077dc05d0ed",
"vout": 209,
@@ -41,7 +41,7 @@
"hex": "48304502207722d6f9038673c86a1019b1c4de2d687ae246477cd4ca7002762be0299de385022100e594a11e3a313942595f7666dcf7078bcb14f1330f4206b95c917e7ec0e82fac012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "a135eafb595eaf4c1ea59ccb111cdc0eae1b2c979b226a1e5aa8b76fe2d628df",
"vout": 0,
@@ -50,7 +50,7 @@
"hex": "483045022100a63a4788027b79b65c6f9d9e054f68cf3b4eed19efd82a2d53f70dcbe64683390220526f243671425b2bd05745fcf2729361f985cfe84ea80c7cfc817b93d8134374012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52"
},
"sequence": 4294967295
- },
+ },
{
"txid": "a5d6bf53ba21140b8a4d554feb00fe8bb9a62430ff9e4624aa2f58a120232aae",
"vout": 1,
@@ -59,7 +59,7 @@
"hex": "493046022100b200ac6db16842f76dab9abe807ce423c992805879bc50abd46ed8275a59d9cf022100c0d518e85dd345b3c29dd4dc47b9a420d3ce817b18720e94966d2fe23413a408012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "1b299cf14f1a22e81ea56d71b7affbd7cf386807bf2b4d4b79a18a54125accb3",
"vout": 0,
@@ -68,7 +68,7 @@
"hex": "483045022100ededc441c3103a6f2bd6cab7639421af0f6ec5e60503bce1e603cf34f00aee1c02205cb75f3f519a13fb348783b21db3085cb5ec7552c59e394fdbc3e1feea43f967012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52"
},
"sequence": 4294967295
- },
+ },
{
"txid": "071df1cdcb3f0070f9d6af7b0274f02d0be2324a274727cfd288383167531485",
"vout": 21,
@@ -77,7 +77,7 @@
"hex": "483045022100d9eed5413d2a4b4b98625aa6e3169edc4fb4663e7862316d69224454e70cd8ca022061e506521d5ced51dd0ea36496e75904d756a4c4f9fb111568555075d5f68d9a012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "b012e500eb7adf7a13ed332dd6ece849f94f7a62bb3eac5babab356d1fc19282",
"vout": 9,
@@ -86,7 +86,7 @@
"hex": "48304502207e84b27139c4c19c828cb1e30c349bba88e4d9b59be97286960793b5ddc0a2af0221008cdc7a951e7f31c20953ed5635fbabf228e80b7047f32faaa0313e7693005177012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "58840fee9c833f2f2d40575842f30f4b8d2553094d06ad88b03d06869acf3d88",
"vout": 30,
@@ -95,7 +95,7 @@
"hex": "4730440220426540dfed9c4ab5812e5f06df705b8bcf307dd7d20f7fa6512298b2a6314f420220064055096e3ca62f6c7352c66a5447767c53f946acdf35025ab3807ddb2fa404012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "e69f9cd16946e570a665245354428a3f507ea69f4568b581e4af98edb3db9766",
"vout": 114,
@@ -104,7 +104,7 @@
"hex": "47304402200a5e673996f2fc88e21cc8613611f08a650bc0370338803591d85d0ec5663764022040b6664a0d1ec83a7f01975b8fde5232992b8ca58bf48af6725d2f92a936ab2e012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "595d1257f654ed2cbe5a65421e8aefd2b4d70b5b6c89a03f1d7e518221fc3f02",
"vout": 103,
@@ -113,7 +113,7 @@
"hex": "493046022100d93b30219c5735f673be5c3b4688366d96f545561c74cb62c6958c00f6960806022100ec8200adcb028f2184fa2a4f6faac7f8bb57cb4503bb7584ac11051fece31b3d012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "06fc818f9555a261248ecd7aad0993eafb5a82ceb2b5c87c3ddfb06671c7f816",
"vout": 1,
@@ -122,7 +122,7 @@
"hex": "483045022100a13934e68d3f5b22b130c4cb33f4da468cffc52323a47fbfbe06b64858162246022047081e0a70ff770e64a2e2d31e5d520d9102268b57a47009a72fe73ec766901801210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd"
},
"sequence": 4294967295
- },
+ },
{
"txid": "fb416c8155d6bb1d43f9395466ca90a638a7c2dd3ff617aadf3a7ac8f3967b19",
"vout": 0,
@@ -131,7 +131,7 @@
"hex": "49304602210097f1f35d5bdc1a3a60390a1b015b8e7c4f916aa3847aafd969e04975e15bbe70022100a9052eb25517d481f1fda1b129eb1b534da50ea1a51f3ee012dca3601c11b86a0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34"
},
"sequence": 4294967295
- },
+ },
{
"txid": "3940b9683bd6104ad24c978e640ba4095993cafdb27d2ed91baa27ee61a2d920",
"vout": 221,
@@ -140,7 +140,7 @@
"hex": "483045022012b3138c591bf7154b6fef457f2c4a3c7162225003788ac0024a99355865ff13022100b71b125ae1ffb2e1d1571f580cd3ebc8cd049a2d7a8a41f138ba94aeb982106f012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "711b5714d3b5136147c02194cd95bde94a4648c4263ca6f972d86cd1d579f150",
"vout": 1,
@@ -149,7 +149,7 @@
"hex": "483045022100f834ccc8b22ee72712a3e5e6ef4acb8b2fb791b5385b70e2cd4332674d6667f4022024fbda0a997e0c253503f217501f508a4d56edce2c813ecdd9ad796dbeba907401210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd"
},
"sequence": 4294967295
- },
+ },
{
"txid": "6364b5c5efe018430789e7fb4e338209546cae5d9c5f5e300aac68155d861b55",
"vout": 27,
@@ -158,7 +158,7 @@
"hex": "48304502203b2fd1e39ae0e469d7a15768f262661b0de41470daf0fe8c4fd0c26542a0870002210081c57e331f9a2d214457d953e3542904727ee412c63028113635d7224da3dccc012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "0bb57f6e38012c86d4c5a28c904f2675082859147921a707d48961015a3e5057",
"vout": 1095,
@@ -167,7 +167,7 @@
"hex": "48304502206947a9c54f0664ece4430fd4ae999891dc50bb6126bc36b6a15a3189f29d25e9022100a86cfc4e2fdd9e39a20e305cfd1b76509c67b3e313e0f118229105caa0e823c9012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "9b34274814a2540bb062107117f8f3e75ef85d953e9372d8261a3e9dfbc1163f",
"vout": 37,
@@ -176,7 +176,7 @@
"hex": "483045022100c7128fe10b2d38744ae8177776054c29fc8ec13f07207723e70766ab7164847402201d2cf09009b9596de74c0183d1ab832e5edddb7a9965880bb400097e850850f8012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "b86b5cc0d8a7374d94e277850b0a249cb26a7b42ddf014f28a49b8859da64241",
"vout": 20,
@@ -185,7 +185,7 @@
"hex": "48304502203b89a71628a28cc3703d170ca3be77786cff6b867e38a18b719705f8a326578f022100b2a9879e1acf621faa6466c207746a7f3eb4c8514c1482969aba3f2a957f1321012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "3d0a2353eeec44d3c10aed259038db321912122cd4150048f7bfa4c0ecfee236",
"vout": 242,
@@ -198,7 +198,7 @@
],
"vout": [
{
- "value": 1.3782,
+ "value": 1.37820000,
"n": 0,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 8fd139bb39ced713f231c58a4d07bf6954d1c201 OP_EQUALVERIFY OP_CHECKSIG",
diff --git a/test/util/data/tt-locktime317000-out.json b/test/util/data/tt-locktime317000-out.json
index 82b64df075..af7903d1dd 100644
--- a/test/util/data/tt-locktime317000-out.json
+++ b/test/util/data/tt-locktime317000-out.json
@@ -14,7 +14,7 @@
"hex": "493046022100b4251ecd63778a3dde0155abe4cd162947620ae9ee45a874353551092325b116022100db307baf4ff3781ec520bd18f387948cedd15dc27bafe17c894b0fe6ffffcafa012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "a72ec96bd0d022d1b0c2f9078cdd46b3725b8eecdd001e17b21e3ababad14ecb",
"vout": 0,
@@ -23,7 +23,7 @@
"hex": "493046022100a9b617843b68c284715d3e02fd120479cd0d96a6c43bf01e697fb0a460a21a3a022100ba0a12fbe8b993d4e7911fa3467615765dbe421ddf5c51b57a9c1ee19dcc00ba012103e633b4fa4ceb705c2da712390767199be8ef2448b3095dc01652e11b2b751505"
},
"sequence": 4294967295
- },
+ },
{
"txid": "752f7f69b915637dc1c2f7aed1466ad676f6f3e24cf922809705f664e97ab3c1",
"vout": 1,
@@ -32,7 +32,7 @@
"hex": "473044022079bd62ee09621a3be96b760c39e8ef78170101d46313923c6b07ae60a95c90670220238e51ea29fc70b04b65508450523caedbb11cb4dd5aa608c81487de798925ba0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34"
},
"sequence": 4294967295
- },
+ },
{
"txid": "b0ac9cca2e69cd02410e31b1f4402a25758e71abd1ab06c265ef9077dc05d0ed",
"vout": 209,
@@ -41,7 +41,7 @@
"hex": "48304502207722d6f9038673c86a1019b1c4de2d687ae246477cd4ca7002762be0299de385022100e594a11e3a313942595f7666dcf7078bcb14f1330f4206b95c917e7ec0e82fac012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "a135eafb595eaf4c1ea59ccb111cdc0eae1b2c979b226a1e5aa8b76fe2d628df",
"vout": 0,
@@ -50,7 +50,7 @@
"hex": "483045022100a63a4788027b79b65c6f9d9e054f68cf3b4eed19efd82a2d53f70dcbe64683390220526f243671425b2bd05745fcf2729361f985cfe84ea80c7cfc817b93d8134374012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52"
},
"sequence": 4294967295
- },
+ },
{
"txid": "a5d6bf53ba21140b8a4d554feb00fe8bb9a62430ff9e4624aa2f58a120232aae",
"vout": 1,
@@ -59,7 +59,7 @@
"hex": "493046022100b200ac6db16842f76dab9abe807ce423c992805879bc50abd46ed8275a59d9cf022100c0d518e85dd345b3c29dd4dc47b9a420d3ce817b18720e94966d2fe23413a408012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "1b299cf14f1a22e81ea56d71b7affbd7cf386807bf2b4d4b79a18a54125accb3",
"vout": 0,
@@ -68,7 +68,7 @@
"hex": "483045022100ededc441c3103a6f2bd6cab7639421af0f6ec5e60503bce1e603cf34f00aee1c02205cb75f3f519a13fb348783b21db3085cb5ec7552c59e394fdbc3e1feea43f967012103a621f08be22d1bbdcbe4e527ee4927006aa555fc65e2aafa767d4ea2fe9dfa52"
},
"sequence": 4294967295
- },
+ },
{
"txid": "071df1cdcb3f0070f9d6af7b0274f02d0be2324a274727cfd288383167531485",
"vout": 21,
@@ -77,7 +77,7 @@
"hex": "483045022100d9eed5413d2a4b4b98625aa6e3169edc4fb4663e7862316d69224454e70cd8ca022061e506521d5ced51dd0ea36496e75904d756a4c4f9fb111568555075d5f68d9a012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "b012e500eb7adf7a13ed332dd6ece849f94f7a62bb3eac5babab356d1fc19282",
"vout": 9,
@@ -86,7 +86,7 @@
"hex": "48304502207e84b27139c4c19c828cb1e30c349bba88e4d9b59be97286960793b5ddc0a2af0221008cdc7a951e7f31c20953ed5635fbabf228e80b7047f32faaa0313e7693005177012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "58840fee9c833f2f2d40575842f30f4b8d2553094d06ad88b03d06869acf3d88",
"vout": 30,
@@ -95,7 +95,7 @@
"hex": "4730440220426540dfed9c4ab5812e5f06df705b8bcf307dd7d20f7fa6512298b2a6314f420220064055096e3ca62f6c7352c66a5447767c53f946acdf35025ab3807ddb2fa404012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "e69f9cd16946e570a665245354428a3f507ea69f4568b581e4af98edb3db9766",
"vout": 114,
@@ -104,7 +104,7 @@
"hex": "47304402200a5e673996f2fc88e21cc8613611f08a650bc0370338803591d85d0ec5663764022040b6664a0d1ec83a7f01975b8fde5232992b8ca58bf48af6725d2f92a936ab2e012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "595d1257f654ed2cbe5a65421e8aefd2b4d70b5b6c89a03f1d7e518221fc3f02",
"vout": 103,
@@ -113,7 +113,7 @@
"hex": "493046022100d93b30219c5735f673be5c3b4688366d96f545561c74cb62c6958c00f6960806022100ec8200adcb028f2184fa2a4f6faac7f8bb57cb4503bb7584ac11051fece31b3d012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "06fc818f9555a261248ecd7aad0993eafb5a82ceb2b5c87c3ddfb06671c7f816",
"vout": 1,
@@ -122,7 +122,7 @@
"hex": "483045022100a13934e68d3f5b22b130c4cb33f4da468cffc52323a47fbfbe06b64858162246022047081e0a70ff770e64a2e2d31e5d520d9102268b57a47009a72fe73ec766901801210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd"
},
"sequence": 4294967295
- },
+ },
{
"txid": "fb416c8155d6bb1d43f9395466ca90a638a7c2dd3ff617aadf3a7ac8f3967b19",
"vout": 0,
@@ -131,7 +131,7 @@
"hex": "49304602210097f1f35d5bdc1a3a60390a1b015b8e7c4f916aa3847aafd969e04975e15bbe70022100a9052eb25517d481f1fda1b129eb1b534da50ea1a51f3ee012dca3601c11b86a0121027a759be8df971a6a04fafcb4f6babf75dc811c5cdaa0734cddbe9b942ce75b34"
},
"sequence": 4294967295
- },
+ },
{
"txid": "3940b9683bd6104ad24c978e640ba4095993cafdb27d2ed91baa27ee61a2d920",
"vout": 221,
@@ -140,7 +140,7 @@
"hex": "483045022012b3138c591bf7154b6fef457f2c4a3c7162225003788ac0024a99355865ff13022100b71b125ae1ffb2e1d1571f580cd3ebc8cd049a2d7a8a41f138ba94aeb982106f012103091137f3ef23f4acfc19a5953a68b2074fae942ad3563ef28c33b0cac9a93adc"
},
"sequence": 4294967295
- },
+ },
{
"txid": "711b5714d3b5136147c02194cd95bde94a4648c4263ca6f972d86cd1d579f150",
"vout": 1,
@@ -149,7 +149,7 @@
"hex": "483045022100f834ccc8b22ee72712a3e5e6ef4acb8b2fb791b5385b70e2cd4332674d6667f4022024fbda0a997e0c253503f217501f508a4d56edce2c813ecdd9ad796dbeba907401210234b9d9413f247bb78cd3293b7b65a2c38018ba5621ea9ee737f3a6a3523fb4cd"
},
"sequence": 4294967295
- },
+ },
{
"txid": "6364b5c5efe018430789e7fb4e338209546cae5d9c5f5e300aac68155d861b55",
"vout": 27,
@@ -158,7 +158,7 @@
"hex": "48304502203b2fd1e39ae0e469d7a15768f262661b0de41470daf0fe8c4fd0c26542a0870002210081c57e331f9a2d214457d953e3542904727ee412c63028113635d7224da3dccc012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "0bb57f6e38012c86d4c5a28c904f2675082859147921a707d48961015a3e5057",
"vout": 1095,
@@ -167,7 +167,7 @@
"hex": "48304502206947a9c54f0664ece4430fd4ae999891dc50bb6126bc36b6a15a3189f29d25e9022100a86cfc4e2fdd9e39a20e305cfd1b76509c67b3e313e0f118229105caa0e823c9012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "9b34274814a2540bb062107117f8f3e75ef85d953e9372d8261a3e9dfbc1163f",
"vout": 37,
@@ -176,7 +176,7 @@
"hex": "483045022100c7128fe10b2d38744ae8177776054c29fc8ec13f07207723e70766ab7164847402201d2cf09009b9596de74c0183d1ab832e5edddb7a9965880bb400097e850850f8012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "b86b5cc0d8a7374d94e277850b0a249cb26a7b42ddf014f28a49b8859da64241",
"vout": 20,
@@ -185,7 +185,7 @@
"hex": "48304502203b89a71628a28cc3703d170ca3be77786cff6b867e38a18b719705f8a326578f022100b2a9879e1acf621faa6466c207746a7f3eb4c8514c1482969aba3f2a957f1321012103f1575d6124ac78be398c25b31146d08313c6072d23a4d7df5ac6a9f87346c64c"
},
"sequence": 4294967295
- },
+ },
{
"txid": "3d0a2353eeec44d3c10aed259038db321912122cd4150048f7bfa4c0ecfee236",
"vout": 242,
@@ -198,7 +198,7 @@
],
"vout": [
{
- "value": 1.3782,
+ "value": 1.37820000,
"n": 0,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 8fd139bb39ced713f231c58a4d07bf6954d1c201 OP_EQUALVERIFY OP_CHECKSIG",
@@ -209,7 +209,7 @@
"1E7SGgAZFCHDnVZLuRViX3gUmxpMfdvd2o"
]
}
- },
+ },
{
"value": 0.01000001,
"n": 1,
diff --git a/test/util/data/txcreate1.json b/test/util/data/txcreate1.json
index 36741044c9..83a86649e0 100644
--- a/test/util/data/txcreate1.json
+++ b/test/util/data/txcreate1.json
@@ -14,7 +14,7 @@
"hex": ""
},
"sequence": 4294967295
- },
+ },
{
"txid": "bf829c6bcf84579331337659d31f89dfd138f7f7785802d5501c92333145ca7c",
"vout": 18,
@@ -23,7 +23,7 @@
"hex": ""
},
"sequence": 4294967295
- },
+ },
{
"txid": "22a6f904655d53ae2ff70e701a0bbd90aa3975c0f40bfc6cc996a9049e31cdfc",
"vout": 1,
@@ -36,7 +36,7 @@
],
"vout": [
{
- "value": 0.18,
+ "value": 0.18000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 1fc11f39be1729bf973a7ab6a615ca4729d64574 OP_EQUALVERIFY OP_CHECKSIG",
@@ -47,9 +47,9 @@
"13tuJJDR2RgArmgfv6JScSdreahzgc4T6o"
]
}
- },
+ },
{
- "value": 4.00,
+ "value": 4.00000000,
"n": 1,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 f2d4db28cad6502226ee484ae24505c2885cb12d OP_EQUALVERIFY OP_CHECKSIG",
diff --git a/test/util/data/txcreate2.json b/test/util/data/txcreate2.json
index 23fe7ace67..cca00f752b 100644
--- a/test/util/data/txcreate2.json
+++ b/test/util/data/txcreate2.json
@@ -9,7 +9,7 @@
],
"vout": [
{
- "value": 0.00,
+ "value": 0.00000000,
"n": 0,
"scriptPubKey": {
"asm": "",
diff --git a/test/util/data/txcreatedata1.json b/test/util/data/txcreatedata1.json
index e65a1859eb..15a4246ae5 100644
--- a/test/util/data/txcreatedata1.json
+++ b/test/util/data/txcreatedata1.json
@@ -18,7 +18,7 @@
],
"vout": [
{
- "value": 0.18,
+ "value": 0.18000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 1fc11f39be1729bf973a7ab6a615ca4729d64574 OP_EQUALVERIFY OP_CHECKSIG",
@@ -29,9 +29,9 @@
"13tuJJDR2RgArmgfv6JScSdreahzgc4T6o"
]
}
- },
+ },
{
- "value": 4.00,
+ "value": 4.00000000,
"n": 1,
"scriptPubKey": {
"asm": "OP_RETURN 54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e",
diff --git a/test/util/data/txcreatedata2.json b/test/util/data/txcreatedata2.json
index 8f1544e1c0..cb93c27971 100644
--- a/test/util/data/txcreatedata2.json
+++ b/test/util/data/txcreatedata2.json
@@ -18,7 +18,7 @@
],
"vout": [
{
- "value": 0.18,
+ "value": 0.18000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 1fc11f39be1729bf973a7ab6a615ca4729d64574 OP_EQUALVERIFY OP_CHECKSIG",
@@ -29,9 +29,9 @@
"13tuJJDR2RgArmgfv6JScSdreahzgc4T6o"
]
}
- },
+ },
{
- "value": 0.00,
+ "value": 0.00000000,
"n": 1,
"scriptPubKey": {
"asm": "OP_RETURN 54686973204f505f52455455524e207472616e73616374696f6e206f7574707574207761732063726561746564206279206d6f646966696564206372656174657261777472616e73616374696f6e2e",
diff --git a/test/util/data/txcreatedata_seq0.json b/test/util/data/txcreatedata_seq0.json
index e52401f418..4b5a7cab4a 100644
--- a/test/util/data/txcreatedata_seq0.json
+++ b/test/util/data/txcreatedata_seq0.json
@@ -18,7 +18,7 @@
],
"vout": [
{
- "value": 0.18,
+ "value": 0.18000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 1fc11f39be1729bf973a7ab6a615ca4729d64574 OP_EQUALVERIFY OP_CHECKSIG",
diff --git a/test/util/data/txcreatedata_seq1.json b/test/util/data/txcreatedata_seq1.json
index 093ff4a56b..dea48ba373 100644
--- a/test/util/data/txcreatedata_seq1.json
+++ b/test/util/data/txcreatedata_seq1.json
@@ -14,7 +14,7 @@
"hex": ""
},
"sequence": 4294967293
- },
+ },
{
"txid": "5897de6bd6027a475eadd57019d4e6872c396d0716c4875a5f1a6fcfdf385c1f",
"vout": 0,
@@ -27,7 +27,7 @@
],
"vout": [
{
- "value": 0.18,
+ "value": 0.18000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 1fc11f39be1729bf973a7ab6a615ca4729d64574 OP_EQUALVERIFY OP_CHECKSIG",
diff --git a/test/util/data/txcreatemultisig1.json b/test/util/data/txcreatemultisig1.json
index 0cc530836a..72e20c8691 100644
--- a/test/util/data/txcreatemultisig1.json
+++ b/test/util/data/txcreatemultisig1.json
@@ -9,7 +9,7 @@
],
"vout": [
{
- "value": 1.00,
+ "value": 1.00000000,
"n": 0,
"scriptPubKey": {
"asm": "2 02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397 021ac43c7ff740014c3b33737ede99c967e4764553d1b2b83db77c83b8715fa72d 02df2089105c77f266fa11a9d33f05c735234075f2e8780824c6b709415f9fb485 3 OP_CHECKMULTISIG",
@@ -17,8 +17,8 @@
"reqSigs": 2,
"type": "multisig",
"addresses": [
- "1FoG2386FG2tAJS9acMuiDsKy67aGg9MKz",
- "1FXtz9KU8JNmQDyHdiEm5HDiALuP3zdHvV",
+ "1FoG2386FG2tAJS9acMuiDsKy67aGg9MKz",
+ "1FXtz9KU8JNmQDyHdiEm5HDiALuP3zdHvV",
"14LuavcBbXZYJ6Tsz3cAUQj9SuQoL2xCQX"
]
}
diff --git a/test/util/data/txcreatemultisig2.json b/test/util/data/txcreatemultisig2.json
index 8ad2ffdc65..7d94ce7396 100644
--- a/test/util/data/txcreatemultisig2.json
+++ b/test/util/data/txcreatemultisig2.json
@@ -9,7 +9,7 @@
],
"vout": [
{
- "value": 1.00,
+ "value": 1.00000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_HASH160 1c6fbaf46d64221e80cbae182c33ddf81b9294ac OP_EQUAL",
diff --git a/test/util/data/txcreatemultisig3.json b/test/util/data/txcreatemultisig3.json
index 086bf44b8a..6c5b49d876 100644
--- a/test/util/data/txcreatemultisig3.json
+++ b/test/util/data/txcreatemultisig3.json
@@ -9,12 +9,16 @@
],
"vout": [
{
- "value": 1.00,
+ "value": 1.00000000,
"n": 0,
"scriptPubKey": {
"asm": "0 e15a86a23178f433d514dbbce042e87d72662b8b5edcacfd2e37ab7a2d135f05",
"hex": "0020e15a86a23178f433d514dbbce042e87d72662b8b5edcacfd2e37ab7a2d135f05",
- "type": "witness_v0_scripthash"
+ "reqSigs": 1,
+ "type": "witness_v0_scripthash",
+ "addresses": [
+ "bc1qu9dgdg330r6r84g5mw7wqshg04exv2uttmw2elfwx74h5tgntuzs44gyfg"
+ ]
}
}
],
diff --git a/test/util/data/txcreatemultisig4.json b/test/util/data/txcreatemultisig4.json
index d23ccc045e..9a5d2f4a06 100644
--- a/test/util/data/txcreatemultisig4.json
+++ b/test/util/data/txcreatemultisig4.json
@@ -9,7 +9,7 @@
],
"vout": [
{
- "value": 1.00,
+ "value": 1.00000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_HASH160 6edf12858999f0dae74f9c692e6694ee3621b2ac OP_EQUAL",
diff --git a/test/util/data/txcreatemultisig5.json b/test/util/data/txcreatemultisig5.json
new file mode 100644
index 0000000000..20e9bb077b
--- /dev/null
+++ b/test/util/data/txcreatemultisig5.json
@@ -0,0 +1,26 @@
+{
+ "txid": "813cf75e1f08debd242ef7c8192b7d478fb651355209369499a0de779ba7eb2f",
+ "hash": "813cf75e1f08debd242ef7c8192b7d478fb651355209369499a0de779ba7eb2f",
+ "version": 2,
+ "size": 42,
+ "vsize": 42,
+ "locktime": 0,
+ "vin": [
+ ],
+ "vout": [
+ {
+ "value": 1.00000000,
+ "n": 0,
+ "scriptPubKey": {
+ "asm": "OP_HASH160 a4051c02398868af83f28f083208fae99a769263 OP_EQUAL",
+ "hex": "a914a4051c02398868af83f28f083208fae99a76926387",
+ "reqSigs": 1,
+ "type": "scripthash",
+ "addresses": [
+ "3GeGs1eHUxPz5YyuFe9WPpXid2UsUb5Jos"
+ ]
+ }
+ }
+ ],
+ "hex": "02000000000100e1f5050000000017a914a4051c02398868af83f28f083208fae99a7692638700000000"
+}
diff --git a/test/util/data/txcreateoutpubkey1.json b/test/util/data/txcreateoutpubkey1.json
index f10aaecf7a..2704ed7673 100644
--- a/test/util/data/txcreateoutpubkey1.json
+++ b/test/util/data/txcreateoutpubkey1.json
@@ -9,7 +9,7 @@
],
"vout": [
{
- "value": 0.00,
+ "value": 0.00000000,
"n": 0,
"scriptPubKey": {
"asm": "02a5613bd857b7048924264d1e70e08fb2a7e6527d32b7ab1bb993ac59964ff397 OP_CHECKSIG",
diff --git a/test/util/data/txcreateoutpubkey2.json b/test/util/data/txcreateoutpubkey2.json
index 5a473b76c3..4ba5dcb282 100644
--- a/test/util/data/txcreateoutpubkey2.json
+++ b/test/util/data/txcreateoutpubkey2.json
@@ -9,12 +9,16 @@
],
"vout": [
{
- "value": 0.00,
+ "value": 0.00000000,
"n": 0,
"scriptPubKey": {
"asm": "0 a2516e770582864a6a56ed21a102044e388c62e3",
"hex": "0014a2516e770582864a6a56ed21a102044e388c62e3",
- "type": "witness_v0_keyhash"
+ "reqSigs": 1,
+ "type": "witness_v0_keyhash",
+ "addresses": [
+ "bc1q5fgkuac9s2ry56jka5s6zqsyfcugcchry5cwu0"
+ ]
}
}
],
diff --git a/test/util/data/txcreateoutpubkey3.json b/test/util/data/txcreateoutpubkey3.json
index b8389b8f7e..0a5d489e15 100644
--- a/test/util/data/txcreateoutpubkey3.json
+++ b/test/util/data/txcreateoutpubkey3.json
@@ -9,7 +9,7 @@
],
"vout": [
{
- "value": 0.00,
+ "value": 0.00000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_HASH160 a5ab14c9804d0d8bf02f1aea4e82780733ad0a83 OP_EQUAL",
diff --git a/test/util/data/txcreatescript1.json b/test/util/data/txcreatescript1.json
index 823168e9fb..5072452fed 100644
--- a/test/util/data/txcreatescript1.json
+++ b/test/util/data/txcreatescript1.json
@@ -9,7 +9,7 @@
],
"vout": [
{
- "value": 0.00,
+ "value": 0.00000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_DROP",
diff --git a/test/util/data/txcreatescript2.json b/test/util/data/txcreatescript2.json
index d4c7e10c78..94b669ffb6 100644
--- a/test/util/data/txcreatescript2.json
+++ b/test/util/data/txcreatescript2.json
@@ -9,7 +9,7 @@
],
"vout": [
{
- "value": 0.00,
+ "value": 0.00000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_HASH160 71ed53322d470bb96657deb786b94f97dd46fb15 OP_EQUAL",
diff --git a/test/util/data/txcreatescript3.json b/test/util/data/txcreatescript3.json
index 001e69511f..31b6459214 100644
--- a/test/util/data/txcreatescript3.json
+++ b/test/util/data/txcreatescript3.json
@@ -9,12 +9,16 @@
],
"vout": [
{
- "value": 0.00,
+ "value": 0.00000000,
"n": 0,
"scriptPubKey": {
"asm": "0 0bfe935e70c321c7ca3afc75ce0d0ca2f98b5422e008bb31c00c6d7f1f1c0ad6",
"hex": "00200bfe935e70c321c7ca3afc75ce0d0ca2f98b5422e008bb31c00c6d7f1f1c0ad6",
- "type": "witness_v0_scripthash"
+ "reqSigs": 1,
+ "type": "witness_v0_scripthash",
+ "addresses": [
+ "bc1qp0lfxhnscvsu0j36l36uurgv5tuck4pzuqytkvwqp3kh78cupttqyf705v"
+ ]
}
}
],
diff --git a/test/util/data/txcreatescript4.json b/test/util/data/txcreatescript4.json
index 20094bcd44..eecdf858b7 100644
--- a/test/util/data/txcreatescript4.json
+++ b/test/util/data/txcreatescript4.json
@@ -9,7 +9,7 @@
],
"vout": [
{
- "value": 0.00,
+ "value": 0.00000000,
"n": 0,
"scriptPubKey": {
"asm": "OP_HASH160 6a2c482f4985f57e702f325816c90e3723ca81ae OP_EQUAL",
diff --git a/test/util/data/txcreatesignv1.json b/test/util/data/txcreatesignv1.json
index 519d3ab066..92a3f76a07 100644
--- a/test/util/data/txcreatesignv1.json
+++ b/test/util/data/txcreatesignv1.json
@@ -18,7 +18,7 @@
],
"vout": [
{
- "value": 0.001,
+ "value": 0.00100000,
"n": 0,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 5834479edbbe0539b31ffd3a8f8ebadc2165ed01 OP_EQUALVERIFY OP_CHECKSIG",