aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rwxr-xr-xtest/functional/combine_logs.py2
-rwxr-xr-xtest/functional/feature_pruning.py21
-rwxr-xr-xtest/functional/feature_reindex.py7
-rwxr-xr-xtest/functional/p2p_invalid_block.py116
-rwxr-xr-xtest/functional/p2p_invalid_tx.py7
-rwxr-xr-xtest/functional/rpc_blockchain.py57
-rwxr-xr-xtest/functional/rpc_net.py18
-rwxr-xr-xtest/functional/rpc_preciousblock.py3
-rwxr-xr-xtest/functional/rpc_rawtransaction.py64
-rwxr-xr-xtest/functional/test_framework/test_framework.py25
-rw-r--r--test/functional/test_framework/util.py57
-rwxr-xr-xtest/functional/test_runner.py68
-rwxr-xr-xtest/functional/wallet_basic.py4
13 files changed, 242 insertions, 207 deletions
diff --git a/test/functional/combine_logs.py b/test/functional/combine_logs.py
index 3ca74ea35e..d1bf9206b2 100755
--- a/test/functional/combine_logs.py
+++ b/test/functional/combine_logs.py
@@ -13,7 +13,7 @@ import re
import sys
# Matches on the date format at the start of the log event
-TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{6}")
+TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{6}Z")
LogEvent = namedtuple('LogEvent', ['timestamp', 'source', 'event'])
diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py
index 49ad7f838c..8dfa1be2fa 100755
--- a/test/functional/feature_pruning.py
+++ b/test/functional/feature_pruning.py
@@ -11,7 +11,6 @@ This test takes 30 mins or more (up to 2 hours)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-import time
import os
MIN_BLOCKS_TO_KEEP = 288
@@ -23,7 +22,7 @@ TIMESTAMP_WINDOW = 2 * 60 * 60
def calc_usage(blockdir):
- return sum(os.path.getsize(blockdir+f) for f in os.listdir(blockdir) if os.path.isfile(blockdir+f)) / (1024. * 1024.)
+ return sum(os.path.getsize(blockdir+f) for f in os.listdir(blockdir) if os.path.isfile(os.path.join(blockdir, f))) / (1024. * 1024.)
class PruneTest(BitcoinTestFramework):
def set_test_params(self):
@@ -70,7 +69,7 @@ class PruneTest(BitcoinTestFramework):
sync_blocks(self.nodes[0:5])
def test_height_min(self):
- if not os.path.isfile(self.prunedir+"blk00000.dat"):
+ if not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")):
raise AssertionError("blk00000.dat is missing, pruning too early")
self.log.info("Success")
self.log.info("Though we're already using more than 550MiB, current usage: %d" % calc_usage(self.prunedir))
@@ -79,11 +78,8 @@ class PruneTest(BitcoinTestFramework):
for i in range(25):
mine_large_block(self.nodes[0], self.utxo_cache_0)
- waitstart = time.time()
- while os.path.isfile(self.prunedir+"blk00000.dat"):
- time.sleep(0.1)
- if time.time() - waitstart > 30:
- raise AssertionError("blk00000.dat not pruned when it should be")
+ # Wait for blk00000.dat to be pruned
+ wait_until(lambda: not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), timeout=30)
self.log.info("Success")
usage = calc_usage(self.prunedir)
@@ -218,11 +214,8 @@ class PruneTest(BitcoinTestFramework):
goalbestheight = first_reorg_height + 1
self.log.info("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload")
- waitstart = time.time()
- while self.nodes[2].getblockcount() < goalbestheight:
- time.sleep(0.1)
- if time.time() - waitstart > 900:
- raise AssertionError("Node 2 didn't reorg to proper height")
+ # Wait for Node 2 to reorg to proper height
+ wait_until(lambda: self.nodes[2].getblockcount() >= goalbestheight, timeout=900)
assert(self.nodes[2].getbestblockhash() == goalbesthash)
# Verify we can now have the data for a block previously pruned
assert(self.nodes[2].getblock(self.forkhash)["height"] == self.forkheight)
@@ -262,7 +255,7 @@ class PruneTest(BitcoinTestFramework):
assert_equal(ret, expected_ret)
def has_block(index):
- return os.path.isfile(self.options.tmpdir + "/node{}/regtest/blocks/blk{:05}.dat".format(node_number, index))
+ return os.path.isfile(os.path.join(self.nodes[node_number].datadir, "regtest", "blocks", "blk{:05}.dat".format(index)))
# should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000)
assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
diff --git a/test/functional/feature_reindex.py b/test/functional/feature_reindex.py
index ac67e6e9ba..d1d3f1d7f1 100755
--- a/test/functional/feature_reindex.py
+++ b/test/functional/feature_reindex.py
@@ -10,8 +10,7 @@
"""
from test_framework.test_framework import BitcoinTestFramework
-from test_framework.util import assert_equal
-import time
+from test_framework.util import wait_until
class ReindexTest(BitcoinTestFramework):
@@ -25,9 +24,7 @@ class ReindexTest(BitcoinTestFramework):
self.stop_nodes()
extra_args = [["-reindex-chainstate" if justchainstate else "-reindex", "-checkblockindex=1"]]
self.start_nodes(extra_args)
- while self.nodes[0].getblockcount() < blockcount:
- time.sleep(0.1)
- assert_equal(self.nodes[0].getblockcount(), blockcount)
+ wait_until(lambda: self.nodes[0].getblockcount() == blockcount)
self.log.info("Success")
def run_test(self):
diff --git a/test/functional/p2p_invalid_block.py b/test/functional/p2p_invalid_block.py
index edcade63c1..e1f328ba77 100755
--- a/test/functional/p2p_invalid_block.py
+++ b/test/functional/p2p_invalid_block.py
@@ -10,75 +10,63 @@ In this test we connect to one node over p2p, and test block requests:
3) Invalid block with bad coinbase value should be rejected and not
re-requested.
"""
-
-from test_framework.test_framework import ComparisonTestFramework
-from test_framework.util import *
-from test_framework.comptool import TestManager, TestInstance, RejectResult
-from test_framework.blocktools import *
-from test_framework.mininode import network_thread_start
import copy
-import time
-# Use the ComparisonTestFramework with 1 node: only use --testbinary.
-class InvalidBlockRequestTest(ComparisonTestFramework):
+from test_framework.blocktools import create_block, create_coinbase, create_transaction
+from test_framework.messages import COIN
+from test_framework.mininode import network_thread_start, P2PDataStore
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
- ''' Can either run this test as 1 node with expected answers, or two and compare them.
- Change the "outcome" variable from each TestInstance object to only do the comparison. '''
+class InvalidBlockRequestTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
+ self.extra_args = [["-whitelist=127.0.0.1"]]
def run_test(self):
- test = TestManager(self, self.options.tmpdir)
- test.add_all_connections(self.nodes)
- self.tip = None
- self.block_time = None
+ # Add p2p connection to node0
+ node = self.nodes[0] # convenience reference to the node
+ node.add_p2p_connection(P2PDataStore())
+
network_thread_start()
- test.run()
+ node.p2p.wait_for_verack()
+
+ best_block = node.getblock(node.getbestblockhash())
+ tip = int(node.getbestblockhash(), 16)
+ height = best_block["height"] + 1
+ block_time = best_block["time"] + 1
- def get_tests(self):
- if self.tip is None:
- self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
- self.block_time = int(time.time())+1
+ self.log.info("Create a new block with an anyone-can-spend coinbase")
- '''
- Create a new block with an anyone-can-spend coinbase
- '''
height = 1
- block = create_block(self.tip, create_coinbase(height), self.block_time)
- self.block_time += 1
+ block = create_block(tip, create_coinbase(height), block_time)
block.solve()
# Save the coinbase for later
- self.block1 = block
- self.tip = block.sha256
- height += 1
- yield TestInstance([[block, True]])
-
- '''
- Now we need that block to mature so we can spend the coinbase.
- '''
- test = TestInstance(sync_every_block=False)
- for i in range(100):
- block = create_block(self.tip, create_coinbase(height), self.block_time)
- block.solve()
- self.tip = block.sha256
- self.block_time += 1
- test.blocks_and_transactions.append([block, True])
- height += 1
- yield test
-
- '''
- Now we use merkle-root malleability to generate an invalid block with
- same blockheader.
- Manufacture a block with 3 transactions (coinbase, spend of prior
- coinbase, spend of that spend). Duplicate the 3rd transaction to
- leave merkle root and blockheader unchanged but invalidate the block.
- '''
- block2 = create_block(self.tip, create_coinbase(height), self.block_time)
- self.block_time += 1
+ block1 = block
+ tip = block.sha256
+ node.p2p.send_blocks_and_test([block1], node, True)
+
+ self.log.info("Mature the block.")
+ node.generate(100)
+
+ best_block = node.getblock(node.getbestblockhash())
+ tip = int(node.getbestblockhash(), 16)
+ height = best_block["height"] + 1
+ block_time = best_block["time"] + 1
+
+ # Use merkle-root malleability to generate an invalid block with
+ # same blockheader.
+ # Manufacture a block with 3 transactions (coinbase, spend of prior
+ # coinbase, spend of that spend). Duplicate the 3rd transaction to
+ # leave merkle root and blockheader unchanged but invalidate the block.
+ self.log.info("Test merkle root malleability.")
+
+ block2 = create_block(tip, create_coinbase(height), block_time)
+ block_time += 1
# b'0x51' is OP_TRUE
- tx1 = create_transaction(self.block1.vtx[0], 0, b'\x51', 50 * COIN)
+ tx1 = create_transaction(block1.vtx[0], 0, b'\x51', 50 * COIN)
tx2 = create_transaction(tx1, 0, b'\x51', 50 * COIN)
block2.vtx.extend([tx1, tx2])
@@ -94,24 +82,20 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
assert_equal(orig_hash, block2.rehash())
assert(block2_orig.vtx != block2.vtx)
- self.tip = block2.sha256
- yield TestInstance([[block2, RejectResult(16, b'bad-txns-duplicate')], [block2_orig, True]])
- height += 1
-
- '''
- Make sure that a totally screwed up block is not valid.
- '''
- block3 = create_block(self.tip, create_coinbase(height), self.block_time)
- self.block_time += 1
- block3.vtx[0].vout[0].nValue = 100 * COIN # Too high!
- block3.vtx[0].sha256=None
+ node.p2p.send_blocks_and_test([block2], node, False, False, 16, b'bad-txns-duplicate')
+
+ self.log.info("Test very broken block.")
+
+ block3 = create_block(tip, create_coinbase(height), block_time)
+ block_time += 1
+ block3.vtx[0].vout[0].nValue = 100 * COIN # Too high!
+ block3.vtx[0].sha256 = None
block3.vtx[0].calc_sha256()
block3.hashMerkleRoot = block3.calc_merkle_root()
block3.rehash()
block3.solve()
- yield TestInstance([[block3, RejectResult(16, b'bad-cb-amount')]])
-
+ node.p2p.send_blocks_and_test([block3], node, False, False, 16, b'bad-cb-amount')
if __name__ == '__main__':
InvalidBlockRequestTest().main()
diff --git a/test/functional/p2p_invalid_tx.py b/test/functional/p2p_invalid_tx.py
index 64fada38e2..69ce529ad6 100755
--- a/test/functional/p2p_invalid_tx.py
+++ b/test/functional/p2p_invalid_tx.py
@@ -33,12 +33,10 @@ class InvalidTxRequestTest(BitcoinTestFramework):
self.log.info("Create a new block with an anyone-can-spend coinbase.")
height = 1
block = create_block(tip, create_coinbase(height), block_time)
- block_time += 1
block.solve()
# Save the coinbase for later
block1 = block
tip = block.sha256
- height += 1
node.p2p.send_blocks_and_test([block], node, success=True)
self.log.info("Mature the block.")
@@ -49,7 +47,10 @@ class InvalidTxRequestTest(BitcoinTestFramework):
tx1 = create_transaction(block1.vtx[0], 0, b'\x64', 50 * COIN - 12000)
node.p2p.send_txs_and_test([tx1], node, success=False, reject_code=16, reject_reason=b'mandatory-script-verify-flag-failed (Invalid OP_IF construction)')
- # TODO: test further transactions...
+ # Verify valid transaction
+ tx1 = create_transaction(block1.vtx[0], 0, b'', 50 * COIN - 12000)
+ node.p2p.send_txs_and_test([tx1], node, success=True)
+
if __name__ == '__main__':
InvalidTxRequestTest().main()
diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py
index a9e14d3e3c..17e24453e5 100755
--- a/test/functional/rpc_blockchain.py
+++ b/test/functional/rpc_blockchain.py
@@ -32,6 +32,18 @@ from test_framework.util import (
assert_is_hex_string,
assert_is_hash_string,
)
+from test_framework.blocktools import (
+ create_block,
+ create_coinbase,
+)
+from test_framework.messages import (
+ msg_block,
+)
+from test_framework.mininode import (
+ P2PInterface,
+ network_thread_start,
+)
+
class BlockchainTest(BitcoinTestFramework):
def set_test_params(self):
@@ -46,6 +58,7 @@ class BlockchainTest(BitcoinTestFramework):
self._test_getdifficulty()
self._test_getnetworkhashps()
self._test_stopatheight()
+ self._test_waitforblockheight()
assert self.nodes[0].verifychain(4, 0)
def _test_getblockchaininfo(self):
@@ -241,6 +254,50 @@ class BlockchainTest(BitcoinTestFramework):
self.start_node(0)
assert_equal(self.nodes[0].getblockcount(), 207)
+ def _test_waitforblockheight(self):
+ self.log.info("Test waitforblockheight")
+
+ node = self.nodes[0]
+
+ # Start a P2P connection since we'll need to create some blocks.
+ node.add_p2p_connection(P2PInterface())
+ network_thread_start()
+ node.p2p.wait_for_verack()
+
+ current_height = node.getblock(node.getbestblockhash())['height']
+
+ # Create a fork somewhere below our current height, invalidate the tip
+ # of that fork, and then ensure that waitforblockheight still
+ # works as expected.
+ #
+ # (Previously this was broken based on setting
+ # `rpc/blockchain.cpp:latestblock` incorrectly.)
+ #
+ b20hash = node.getblockhash(20)
+ b20 = node.getblock(b20hash)
+
+ def solve_and_send_block(prevhash, height, time):
+ b = create_block(prevhash, create_coinbase(height), time)
+ b.solve()
+ node.p2p.send_message(msg_block(b))
+ node.p2p.sync_with_ping()
+ return b
+
+ b21f = solve_and_send_block(int(b20hash, 16), 21, b20['time'] + 1)
+ b22f = solve_and_send_block(b21f.sha256, 22, b21f.nTime + 1)
+
+ node.invalidateblock(b22f.hash)
+
+ def assert_waitforheight(height, timeout=2):
+ assert_equal(
+ node.waitforblockheight(height, timeout)['height'],
+ current_height)
+
+ assert_waitforheight(0)
+ assert_waitforheight(current_height - 1)
+ assert_waitforheight(current_height)
+ assert_waitforheight(current_height + 1)
+
if __name__ == '__main__':
BlockchainTest().main()
diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py
index 16e4f6adb4..d8348432aa 100755
--- a/test/functional/rpc_net.py
+++ b/test/functional/rpc_net.py
@@ -7,14 +7,13 @@
Tests correspond to code in rpc/net.cpp.
"""
-import time
-
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
connect_nodes_bi,
p2p_port,
+ wait_until,
)
class NetTest(BitcoinTestFramework):
@@ -47,14 +46,13 @@ class NetTest(BitcoinTestFramework):
# the bytes sent/received should change
# note ping and pong are 32 bytes each
self.nodes[0].ping()
- time.sleep(0.1)
+ wait_until(lambda: (net_totals['totalbytessent'] + 32*2) == self.nodes[0].getnettotals()['totalbytessent'], timeout=1)
+ wait_until(lambda: (net_totals['totalbytesrecv'] + 32*2) == self.nodes[0].getnettotals()['totalbytesrecv'], timeout=1)
+
peer_info_after_ping = self.nodes[0].getpeerinfo()
- net_totals_after_ping = self.nodes[0].getnettotals()
for before, after in zip(peer_info, peer_info_after_ping):
assert_equal(before['bytesrecv_per_msg']['pong'] + 32, after['bytesrecv_per_msg']['pong'])
assert_equal(before['bytessent_per_msg']['ping'] + 32, after['bytessent_per_msg']['ping'])
- assert_equal(net_totals['totalbytesrecv'] + 32*2, net_totals_after_ping['totalbytesrecv'])
- assert_equal(net_totals['totalbytessent'] + 32*2, net_totals_after_ping['totalbytessent'])
def _test_getnetworkinginfo(self):
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
@@ -62,12 +60,8 @@ class NetTest(BitcoinTestFramework):
self.nodes[0].setnetworkactive(False)
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False)
- timeout = 3
- while self.nodes[0].getnetworkinfo()['connections'] != 0:
- # Wait a bit for all sockets to close
- assert timeout > 0, 'not all connections closed in time'
- timeout -= 0.1
- time.sleep(0.1)
+ # Wait a bit for all sockets to close
+ wait_until(lambda: self.nodes[0].getnetworkinfo()['connections'] == 0, timeout=3)
self.nodes[0].setnetworkactive(True)
connect_nodes_bi(self.nodes, 0, 1)
diff --git a/test/functional/rpc_preciousblock.py b/test/functional/rpc_preciousblock.py
index 960cd0ad12..796a2edbef 100755
--- a/test/functional/rpc_preciousblock.py
+++ b/test/functional/rpc_preciousblock.py
@@ -8,7 +8,6 @@ from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes_bi,
- sync_chain,
sync_blocks,
)
@@ -72,7 +71,7 @@ class PreciousTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getbestblockhash(), hashC)
self.log.info("Make Node1 prefer block C")
self.nodes[1].preciousblock(hashC)
- sync_chain(self.nodes[0:2]) # wait because node 1 may not have downloaded hashC
+ sync_blocks(self.nodes[0:2]) # wait because node 1 may not have downloaded hashC
assert_equal(self.nodes[1].getbestblockhash(), hashC)
self.log.info("Make Node1 prefer block G again")
self.nodes[1].preciousblock(hashG)
diff --git a/test/functional/rpc_rawtransaction.py b/test/functional/rpc_rawtransaction.py
index e074f5bd74..825b897871 100755
--- a/test/functional/rpc_rawtransaction.py
+++ b/test/functional/rpc_rawtransaction.py
@@ -12,7 +12,12 @@ Test the following RPCs:
- getrawtransaction
"""
+from collections import OrderedDict
+from io import BytesIO
from test_framework.test_framework import BitcoinTestFramework
+from test_framework.messages import (
+ CTransaction,
+)
from test_framework.util import *
@@ -43,11 +48,10 @@ class RawTransactionsTest(BitcoinTestFramework):
def setup_network(self, split=False):
super().setup_network()
- connect_nodes_bi(self.nodes,0,2)
+ connect_nodes_bi(self.nodes, 0, 2)
def run_test(self):
-
- #prepare some coins for multiple *rawtransaction commands
+ self.log.info('prepare some coins for multiple *rawtransaction commands')
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(101)
@@ -59,10 +63,11 @@ class RawTransactionsTest(BitcoinTestFramework):
self.nodes[0].generate(5)
self.sync_all()
- # Test getrawtransaction on genesis block coinbase returns an error
+ self.log.info('Test getrawtransaction on genesis block coinbase returns an error')
block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))
assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot'])
+ self.log.info('Check parameter types and required parameters of createrawtransaction')
# Test `createrawtransaction` required parameters
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction)
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [])
@@ -83,12 +88,18 @@ class RawTransactionsTest(BitcoinTestFramework):
# Test `createrawtransaction` invalid `outputs`
address = self.nodes[0].getnewaddress()
- assert_raises_rpc_error(-3, "Expected type object", self.nodes[0].createrawtransaction, [], 'foo')
+ address2 = self.nodes[0].getnewaddress()
+ assert_raises_rpc_error(-1, "JSON value is not an array as expected", self.nodes[0].createrawtransaction, [], 'foo')
+ self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility
+ self.nodes[0].createrawtransaction(inputs=[], outputs=[])
assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'})
assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0})
assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'})
assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1})
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
+ assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
+ assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
+ assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])
# Test `createrawtransaction` invalid `locktime`
assert_raises_rpc_error(-3, "Expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo')
@@ -98,9 +109,38 @@ class RawTransactionsTest(BitcoinTestFramework):
# Test `createrawtransaction` invalid `replaceable`
assert_raises_rpc_error(-3, "Expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo')
- #########################################
- # sendrawtransaction with missing input #
- #########################################
+ self.log.info('Check that createrawtransaction accepts an array and object as outputs')
+ tx = CTransaction()
+ # One output
+ tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs={address: 99}))))
+ assert_equal(len(tx.vout), 1)
+ assert_equal(
+ bytes_to_hex_str(tx.serialize()),
+ self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}]),
+ )
+ # Two outputs
+ tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))))
+ assert_equal(len(tx.vout), 2)
+ assert_equal(
+ bytes_to_hex_str(tx.serialize()),
+ self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]),
+ )
+ # Two data outputs
+ tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([('data', '99'), ('data', '99')])))))
+ assert_equal(len(tx.vout), 2)
+ assert_equal(
+ bytes_to_hex_str(tx.serialize()),
+ self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{'data': '99'}, {'data': '99'}]),
+ )
+ # Multiple mixed outputs
+ tx.deserialize(BytesIO(hex_str_to_bytes(self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=multidict([(address, 99), ('data', '99'), ('data', '99')])))))
+ assert_equal(len(tx.vout), 3)
+ assert_equal(
+ bytes_to_hex_str(tx.serialize()),
+ self.nodes[2].createrawtransaction(inputs=[{'txid': txid, 'vout': 9}], outputs=[{address: 99}, {'data': '99'}, {'data': '99'}]),
+ )
+
+ self.log.info('sendrawtransaction with missing input')
inputs = [ {'txid' : "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000", 'vout' : 1}] #won't exists
outputs = { self.nodes[0].getnewaddress() : 4.998 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
@@ -248,14 +288,14 @@ class RawTransactionsTest(BitcoinTestFramework):
outputs = { self.nodes[0].getnewaddress() : 2.19 }
rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)
- self.log.info(rawTxPartialSigned1)
+ self.log.debug(rawTxPartialSigned1)
assert_equal(rawTxPartialSigned['complete'], False) #node1 only has one key, can't comp. sign the tx
rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)
- self.log.info(rawTxPartialSigned2)
+ self.log.debug(rawTxPartialSigned2)
assert_equal(rawTxPartialSigned2['complete'], False) #node2 only has one key, can't comp. sign the tx
rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
- self.log.info(rawTxComb)
+ self.log.debug(rawTxComb)
self.nodes[2].sendrawtransaction(rawTxComb)
rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
self.sync_all()
@@ -273,7 +313,7 @@ class RawTransactionsTest(BitcoinTestFramework):
encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
-
+
# getrawtransaction tests
# 1. valid parameters - only supply txid
txHash = rawTx["hash"]
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index 86c1150abd..8efac9c475 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -24,8 +24,8 @@ from .util import (
check_json_precision,
connect_nodes_bi,
disconnect_nodes,
+ get_datadir_path,
initialize_datadir,
- log_filename,
p2p_port,
set_node_times,
sync_blocks,
@@ -358,7 +358,7 @@ class BitcoinTestFramework():
ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper()
ch.setLevel(ll)
# Format logs the same as bitcoind's debug.log with microprecision (so log files can be concatenated and sorted)
- formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
+ formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000Z %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%dT%H:%M:%S')
formatter.converter = time.gmtime
fh.setFormatter(formatter)
ch.setFormatter(formatter)
@@ -382,7 +382,7 @@ class BitcoinTestFramework():
assert self.num_nodes <= MAX_NODES
create_cache = False
for i in range(MAX_NODES):
- if not os.path.isdir(os.path.join(self.options.cachedir, 'node' + str(i))):
+ if not os.path.isdir(get_datadir_path(self.options.cachedir, i)):
create_cache = True
break
@@ -391,8 +391,8 @@ class BitcoinTestFramework():
# find and delete old cache directories if any exist
for i in range(MAX_NODES):
- if os.path.isdir(os.path.join(self.options.cachedir, "node" + str(i))):
- shutil.rmtree(os.path.join(self.options.cachedir, "node" + str(i)))
+ if os.path.isdir(get_datadir_path(self.options.cachedir, i)):
+ shutil.rmtree(get_datadir_path(self.options.cachedir, i))
# Create cache directories, run bitcoinds:
for i in range(MAX_NODES):
@@ -430,15 +430,18 @@ class BitcoinTestFramework():
self.stop_nodes()
self.nodes = []
self.disable_mocktime()
+
+ def cache_path(n, *paths):
+ return os.path.join(get_datadir_path(self.options.cachedir, n), "regtest", *paths)
+
for i in range(MAX_NODES):
- os.remove(log_filename(self.options.cachedir, i, "debug.log"))
- os.remove(log_filename(self.options.cachedir, i, "wallets/db.log"))
- os.remove(log_filename(self.options.cachedir, i, "peers.dat"))
- os.remove(log_filename(self.options.cachedir, i, "fee_estimates.dat"))
+ for entry in os.listdir(cache_path(i)):
+ if entry not in ['wallets', 'chainstate', 'blocks']:
+ os.remove(cache_path(i, entry))
for i in range(self.num_nodes):
- from_dir = os.path.join(self.options.cachedir, "node" + str(i))
- to_dir = os.path.join(self.options.tmpdir, "node" + str(i))
+ from_dir = get_datadir_path(self.options.cachedir, i)
+ to_dir = get_datadir_path(self.options.tmpdir, i)
shutil.copytree(from_dir, to_dir)
initialize_datadir(self.options.tmpdir, i) # Overwrite port/rpcport in bitcoin.conf
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
index a4b8d5af02..68ac97d755 100644
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -328,9 +328,6 @@ def get_auth_cookie(datadir):
raise ValueError("No RPC credentials")
return user, password
-def log_filename(dirname, n_node, logname):
- return os.path.join(dirname, "node" + str(n_node), "regtest", logname)
-
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]
@@ -343,20 +340,15 @@ def disconnect_nodes(from_connection, node_num):
for peer_id in [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']]:
from_connection.disconnectnode(nodeid=peer_id)
- for _ in range(50):
- if [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == []:
- break
- time.sleep(0.1)
- else:
- raise AssertionError("timed out waiting for disconnect")
+ # wait to disconnect
+ wait_until(lambda: [peer['id'] for peer in from_connection.getpeerinfo() if "testnode%d" % node_num in peer['subver']] == [], timeout=5)
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:" + str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
- while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
- time.sleep(0.1)
+ wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
@@ -370,54 +362,29 @@ def sync_blocks(rpc_connections, *, wait=1, timeout=60):
one node already synced to the latest, stable tip, otherwise there's a
chance it might return before all nodes are stably synced.
"""
- # Use getblockcount() instead of waitforblockheight() to determine the
- # initial max height because the two RPCs look at different internal global
- # variables (chainActive vs latestBlock) and the former gets updated
- # earlier.
- maxheight = max(x.getblockcount() for x in rpc_connections)
- start_time = cur_time = time.time()
- while cur_time <= start_time + timeout:
- tips = [r.waitforblockheight(maxheight, int(wait * 1000)) for r in rpc_connections]
- if all(t["height"] == maxheight for t in tips):
- if all(t["hash"] == tips[0]["hash"] for t in tips):
- return
- raise AssertionError("Block sync failed, mismatched block hashes:{}".format(
- "".join("\n {!r}".format(tip) for tip in tips)))
- cur_time = time.time()
- raise AssertionError("Block sync to height {} timed out:{}".format(
- maxheight, "".join("\n {!r}".format(tip) for tip in tips)))
-
-def sync_chain(rpc_connections, *, wait=1, timeout=60):
- """
- Wait until everybody has the same best block
- """
- while timeout > 0:
+ stop_time = time.time() + timeout
+ while time.time() <= stop_time:
best_hash = [x.getbestblockhash() for x in rpc_connections]
- if best_hash == [best_hash[0]] * len(best_hash):
+ if best_hash.count(best_hash[0]) == len(rpc_connections):
return
time.sleep(wait)
- timeout -= wait
- raise AssertionError("Chain sync failed: Best block hashes don't match")
+ raise AssertionError("Block sync timed out:{}".format("".join("\n {!r}".format(b) for b in best_hash)))
def sync_mempools(rpc_connections, *, wait=1, timeout=60, flush_scheduler=True):
"""
Wait until everybody has the same transactions in their memory
pools
"""
- while timeout > 0:
- pool = set(rpc_connections[0].getrawmempool())
- num_match = 1
- for i in range(1, len(rpc_connections)):
- if set(rpc_connections[i].getrawmempool()) == pool:
- num_match = num_match + 1
- if num_match == len(rpc_connections):
+ stop_time = time.time() + timeout
+ while time.time() <= stop_time:
+ pool = [set(r.getrawmempool()) for r in rpc_connections]
+ if pool.count(pool[0]) == len(rpc_connections):
if flush_scheduler:
for r in rpc_connections:
r.syncwithvalidationinterfacequeue()
return
time.sleep(wait)
- timeout -= wait
- raise AssertionError("Mempool sync failed")
+ raise AssertionError("Mempool sync timed out:{}".format("".join("\n {!r}".format(m) for m in pool)))
# Transaction/Block functions
#############################
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py
index 082191098e..09f7f50de0 100755
--- a/test/functional/test_runner.py
+++ b/test/functional/test_runner.py
@@ -52,6 +52,9 @@ if os.name == 'posix':
TEST_EXIT_PASSED = 0
TEST_EXIT_SKIPPED = 77
+# 20 minutes represented in seconds
+TRAVIS_TIMEOUT_DURATION = 20 * 60
+
BASE_SCRIPTS= [
# Scripts that are run by the travis build process.
# Longest test should go first, to favor running tests in parallel
@@ -233,29 +236,27 @@ def main():
sys.exit(0)
# Build list of tests
+ test_list = []
if tests:
# Individual tests have been specified. Run specified tests that exist
# in the ALL_SCRIPTS list. Accept the name with or without .py extension.
- tests = [re.sub("\.py$", "", t) + ".py" for t in tests]
- test_list = []
- for t in tests:
- if t in ALL_SCRIPTS:
- test_list.append(t)
+ tests = [re.sub("\.py$", "", test) + ".py" for test in tests]
+ for test in tests:
+ if test in ALL_SCRIPTS:
+ test_list.append(test)
else:
- print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], t))
+ print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], test))
+ elif args.extended:
+ # Include extended tests
+ test_list += ALL_SCRIPTS
else:
- # No individual tests have been specified.
- # Run all base tests, and optionally run extended tests.
- test_list = BASE_SCRIPTS
- if args.extended:
- # place the EXTENDED_SCRIPTS first since the three longest ones
- # are there and the list is shorter
- test_list = EXTENDED_SCRIPTS + test_list
+ # Run base tests only
+ test_list += BASE_SCRIPTS
# Remove the test cases that the user has explicitly asked to exclude.
if args.exclude:
- tests_excl = [re.sub("\.py$", "", t) + ".py" for t in args.exclude.split(',')]
- for exclude_test in tests_excl:
+ exclude_tests = [re.sub("\.py$", "", test) + ".py" for test in args.exclude.split(',')]
+ for exclude_test in exclude_tests:
if exclude_test in test_list:
test_list.remove(exclude_test)
else:
@@ -320,7 +321,7 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove
#Run Tests
job_queue = TestHandler(jobs, tests_dir, tmpdir, test_list, flags)
- time0 = time.time()
+ start_time = time.time()
test_results = []
max_len_name = len(max(test_list, key=len))
@@ -346,7 +347,7 @@ def run_tests(test_list, src_dir, build_dir, exeext, tmpdir, jobs=1, enable_cove
combined_logs, _ = subprocess.Popen([sys.executable, os.path.join(tests_dir, 'combine_logs.py'), '-c', testdir], universal_newlines=True, stdout=subprocess.PIPE).communicate()
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
- print_results(test_results, max_len_name, (int(time.time() - time0)))
+ print_results(test_results, max_len_name, (int(time.time() - start_time)))
if coverage:
coverage.report_rpc_coverage()
@@ -403,15 +404,15 @@ class TestHandler:
while self.num_running < self.num_jobs and self.test_list:
# Add tests
self.num_running += 1
- t = self.test_list.pop(0)
+ test = self.test_list.pop(0)
portseed = len(self.test_list) + self.portseed_offset
portseed_arg = ["--portseed={}".format(portseed)]
log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16)
log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16)
- test_argv = t.split()
+ test_argv = test.split()
testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)
tmpdir_arg = ["--tmpdir={}".format(testdir)]
- self.jobs.append((t,
+ self.jobs.append((test,
time.time(),
subprocess.Popen([sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg,
universal_newlines=True,
@@ -425,15 +426,14 @@ class TestHandler:
while True:
# Return first proc that finishes
time.sleep(.5)
- for j in self.jobs:
- (name, time0, proc, testdir, log_out, log_err) = j
- if os.getenv('TRAVIS') == 'true' and int(time.time() - time0) > 20 * 60:
- # In travis, timeout individual tests after 20 minutes (to stop tests hanging and not
- # providing useful output.
+ for job in self.jobs:
+ (name, start_time, proc, testdir, log_out, log_err) = job
+ if os.getenv('TRAVIS') == 'true' and int(time.time() - start_time) > TRAVIS_TIMEOUT_DURATION:
+ # In travis, timeout individual tests (to stop tests hanging and not providing useful output).
proc.send_signal(signal.SIGINT)
if proc.poll() is not None:
log_out.seek(0), log_err.seek(0)
- [stdout, stderr] = [l.read().decode('utf-8') for l in (log_out, log_err)]
+ [stdout, stderr] = [log_file.read().decode('utf-8') for log_file in (log_out, log_err)]
log_out.close(), log_err.close()
if proc.returncode == TEST_EXIT_PASSED and stderr == "":
status = "Passed"
@@ -442,9 +442,9 @@ class TestHandler:
else:
status = "Failed"
self.num_running -= 1
- self.jobs.remove(j)
+ self.jobs.remove(job)
- return TestResult(name, status, int(time.time() - time0)), testdir, stdout, stderr
+ return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr
print('.', end='', flush=True)
class TestResult():
@@ -490,7 +490,7 @@ def check_script_list(src_dir):
Check that there are no scripts in the functional tests directory which are
not being run by pull-tester.py."""
script_dir = src_dir + '/test/functional/'
- python_files = set([t for t in os.listdir(script_dir) if t[-3:] == ".py"])
+ python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")])
missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS)))
if len(missed_tests) != 0:
print("%sWARNING!%s The following scripts are not being run: %s. Check the test lists in test_runner.py." % (BOLD[1], BOLD[0], str(missed_tests)))
@@ -526,7 +526,7 @@ class RPCCoverage():
if uncovered:
print("Uncovered RPC commands:")
- print("".join((" - %s\n" % i) for i in sorted(uncovered)))
+ print("".join((" - %s\n" % command) for command in sorted(uncovered)))
else:
print("All RPC commands covered.")
@@ -550,8 +550,8 @@ class RPCCoverage():
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
- with open(coverage_ref_filename, 'r') as f:
- all_cmds.update([i.strip() for i in f.readlines()])
+ with open(coverage_ref_filename, 'r') as coverage_ref_file:
+ all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
for root, dirs, files in os.walk(self.dir):
for filename in files:
@@ -559,8 +559,8 @@ class RPCCoverage():
coverage_filenames.add(os.path.join(root, filename))
for filename in coverage_filenames:
- with open(filename, 'r') as f:
- covered_cmds.update([i.strip() for i in f.readlines()])
+ with open(filename, 'r') as coverage_file:
+ covered_cmds.update([line.strip() for line in coverage_file.readlines()])
return all_cmds - covered_cmds
diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py
index f686cb6ea5..dcd6c54d97 100755
--- a/test/functional/wallet_basic.py
+++ b/test/functional/wallet_basic.py
@@ -379,9 +379,9 @@ class WalletTest(BitcoinTestFramework):
self.start_node(0, [m, "-limitancestorcount="+str(chainlimit)])
self.start_node(1, [m, "-limitancestorcount="+str(chainlimit)])
self.start_node(2, [m, "-limitancestorcount="+str(chainlimit)])
- while m == '-reindex' and [block_count] * 3 != [self.nodes[i].getblockcount() for i in range(3)]:
+ if m == '-reindex':
# reindex will leave rpc warm up "early"; Wait for it to finish
- time.sleep(0.1)
+ wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)])
assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)])
# Exercise listsinceblock with the last two blocks