aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rwxr-xr-xtest/functional/feature_csv_activation.py566
-rwxr-xr-xtest/functional/feature_help.py42
-rwxr-xr-xtest/functional/rpc_net.py34
-rwxr-xr-xtest/functional/test_framework/test_node.py14
-rwxr-xr-xtest/functional/test_runner.py1
5 files changed, 337 insertions, 320 deletions
diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py
index 8b5e5681e4..37d60aad61 100755
--- a/test/functional/feature_csv_activation.py
+++ b/test/functional/feature_csv_activation.py
@@ -42,98 +42,131 @@ bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {re
bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
bip112tx_special - test negative argument to OP_CSV
"""
-
-from test_framework.test_framework import ComparisonTestFramework
-from test_framework.util import *
-from test_framework.mininode import ToHex, CTransaction, network_thread_start
-from test_framework.blocktools import create_coinbase, create_block
-from test_framework.comptool import TestInstance, TestManager
-from test_framework.script import *
+from decimal import Decimal
+from itertools import product
from io import BytesIO
import time
-base_relative_locktime = 10
-seq_disable_flag = 1<<31
-seq_random_high_bit = 1<<25
-seq_type_flag = 1<<22
-seq_random_low_bit = 1<<18
-
-# b31,b25,b22,b18 represent the 31st, 25th, 22nd and 18th bits respectively in the nSequence field
-# relative_locktimes[b31][b25][b22][b18] is a base_relative_locktime with the indicated bits set if their indices are 1
-relative_locktimes = []
-for b31 in range(2):
- b25times = []
- for b25 in range(2):
- b22times = []
- for b22 in range(2):
- b18times = []
- for b18 in range(2):
- rlt = base_relative_locktime
- if (b31):
- rlt = rlt | seq_disable_flag
- if (b25):
- rlt = rlt | seq_random_high_bit
- if (b22):
- rlt = rlt | seq_type_flag
- if (b18):
- rlt = rlt | seq_random_low_bit
- b18times.append(rlt)
- b22times.append(b18times)
- b25times.append(b22times)
- relative_locktimes.append(b25times)
-
-def all_rlt_txs(txarray):
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.messages import ToHex, CTransaction
+from test_framework.mininode import network_thread_start, P2PDataStore
+from test_framework.script import (
+ CScript,
+ OP_CHECKSEQUENCEVERIFY,
+ OP_DROP,
+)
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ assert_equal,
+ get_bip9_status,
+ hex_str_to_bytes,
+)
+
+BASE_RELATIVE_LOCKTIME = 10
+SEQ_DISABLE_FLAG = 1 << 31
+SEQ_RANDOM_HIGH_BIT = 1 << 25
+SEQ_TYPE_FLAG = 1 << 22
+SEQ_RANDOM_LOW_BIT = 1 << 18
+
+def relative_locktime(sdf, srhb, stf, srlb):
+ """Returns a locktime with certain bits set."""
+
+ locktime = BASE_RELATIVE_LOCKTIME
+ if sdf:
+ locktime |= SEQ_DISABLE_FLAG
+ if srhb:
+ locktime |= SEQ_RANDOM_HIGH_BIT
+ if stf:
+ locktime |= SEQ_TYPE_FLAG
+ if srlb:
+ locktime |= SEQ_RANDOM_LOW_BIT
+ return locktime
+
+def all_rlt_txs(txs):
+ return [tx['tx'] for tx in txs]
+
+def create_transaction(node, txid, to_address, amount):
+ inputs = [{"txid": txid, "vout": 0}]
+ outputs = {to_address: amount}
+ rawtx = node.createrawtransaction(inputs, outputs)
+ tx = CTransaction()
+ f = BytesIO(hex_str_to_bytes(rawtx))
+ tx.deserialize(f)
+ return tx
+
+def sign_transaction(node, unsignedtx):
+ rawtx = ToHex(unsignedtx)
+ signresult = node.signrawtransactionwithwallet(rawtx)
+ tx = CTransaction()
+ f = BytesIO(hex_str_to_bytes(signresult['hex']))
+ tx.deserialize(f)
+ return tx
+
+def create_bip112special(node, input, txversion, address):
+ tx = create_transaction(node, input, address, Decimal("49.98"))
+ tx.nVersion = txversion
+ signtx = sign_transaction(node, tx)
+ signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ return signtx
+
+def send_generic_input_tx(node, coinbases, address):
+ amount = Decimal("49.99")
+ return node.sendrawtransaction(ToHex(sign_transaction(node, create_transaction(node, node.getblock(coinbases.pop())['tx'][0], address, amount))))
+
+def create_bip68txs(node, bip68inputs, txversion, address, locktime_delta=0):
+ """Returns a list of bip68 transactions with different bits set."""
txs = []
- for b31 in range(2):
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- txs.append(txarray[b31][b25][b22][b18])
+ assert(len(bip68inputs) >= 16)
+ for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)):
+ locktime = relative_locktime(sdf, srhb, stf, srlb)
+ tx = create_transaction(node, bip68inputs[i], address, Decimal("49.98"))
+ tx.nVersion = txversion
+ tx.vin[0].nSequence = locktime + locktime_delta
+ tx = sign_transaction(node, tx)
+ tx.rehash()
+ txs.append({'tx': tx, 'sdf': sdf, 'stf': stf})
+
return txs
-class BIP68_112_113Test(ComparisonTestFramework):
+def create_bip112txs(node, bip112inputs, varyOP_CSV, txversion, address, locktime_delta=0):
+ """Returns a list of bip68 transactions with different bits set."""
+ txs = []
+ assert(len(bip112inputs) >= 16)
+ for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)):
+ locktime = relative_locktime(sdf, srhb, stf, srlb)
+ tx = create_transaction(node, bip112inputs[i], address, Decimal("49.98"))
+ if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed
+ tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME + locktime_delta
+ else: # vary nSequence instead, OP_CSV is fixed
+ tx.vin[0].nSequence = locktime + locktime_delta
+ tx.nVersion = txversion
+ signtx = sign_transaction(node, tx)
+ if (varyOP_CSV):
+ signtx.vin[0].scriptSig = CScript([locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ else:
+ signtx.vin[0].scriptSig = CScript([BASE_RELATIVE_LOCKTIME, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ tx.rehash()
+ txs.append({'tx': signtx, 'sdf': sdf, 'stf': stf})
+ return txs
+
+class BIP68_112_113Test(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.extra_args = [['-whitelist=127.0.0.1', '-blockversion=4', '-addresstype=legacy']]
- def run_test(self):
- test = TestManager(self, self.options.tmpdir)
- test.add_all_connections(self.nodes)
- network_thread_start()
- test.run()
-
- def send_generic_input_tx(self, node, coinbases):
- amount = Decimal("49.99")
- return node.sendrawtransaction(ToHex(self.sign_transaction(node, self.create_transaction(node, node.getblock(coinbases.pop())['tx'][0], self.nodeaddress, amount))))
-
- def create_transaction(self, node, txid, to_address, amount):
- inputs = [{ "txid" : txid, "vout" : 0}]
- outputs = { to_address : amount }
- rawtx = node.createrawtransaction(inputs, outputs)
- tx = CTransaction()
- f = BytesIO(hex_str_to_bytes(rawtx))
- tx.deserialize(f)
- return tx
-
- def sign_transaction(self, node, unsignedtx):
- rawtx = ToHex(unsignedtx)
- signresult = node.signrawtransactionwithwallet(rawtx)
- tx = CTransaction()
- f = BytesIO(hex_str_to_bytes(signresult['hex']))
- tx.deserialize(f)
- return tx
-
- def generate_blocks(self, number, version, test_blocks = []):
+ def generate_blocks(self, number, version, test_blocks=None):
+ if test_blocks is None:
+ test_blocks = []
for i in range(number):
block = self.create_test_block([], version)
- test_blocks.append([block, True])
+ test_blocks.append(block)
self.last_block_time += 600
self.tip = block.sha256
self.tipheight += 1
return test_blocks
- def create_test_block(self, txs, version = 536870912):
+ def create_test_block(self, txs, version=536870912):
block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600)
block.nVersion = version
block.vtx.extend(txs)
@@ -142,184 +175,148 @@ class BIP68_112_113Test(ComparisonTestFramework):
block.solve()
return block
- def create_bip68txs(self, bip68inputs, txversion, locktime_delta = 0):
- txs = []
- assert(len(bip68inputs) >= 16)
- i = 0
- for b31 in range(2):
- b25txs = []
- for b25 in range(2):
- b22txs = []
- for b22 in range(2):
- b18txs = []
- for b18 in range(2):
- tx = self.create_transaction(self.nodes[0], bip68inputs[i], self.nodeaddress, Decimal("49.98"))
- i += 1
- tx.nVersion = txversion
- tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
- b18txs.append(self.sign_transaction(self.nodes[0], tx))
- b22txs.append(b18txs)
- b25txs.append(b22txs)
- txs.append(b25txs)
- return txs
-
- def create_bip112special(self, input, txversion):
- tx = self.create_transaction(self.nodes[0], input, self.nodeaddress, Decimal("49.98"))
- tx.nVersion = txversion
- signtx = self.sign_transaction(self.nodes[0], tx)
- signtx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
- return signtx
-
- def create_bip112txs(self, bip112inputs, varyOP_CSV, txversion, locktime_delta = 0):
- txs = []
- assert(len(bip112inputs) >= 16)
- i = 0
- for b31 in range(2):
- b25txs = []
- for b25 in range(2):
- b22txs = []
- for b22 in range(2):
- b18txs = []
- for b18 in range(2):
- tx = self.create_transaction(self.nodes[0], bip112inputs[i], self.nodeaddress, Decimal("49.98"))
- i += 1
- if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed
- tx.vin[0].nSequence = base_relative_locktime + locktime_delta
- else: # vary nSequence instead, OP_CSV is fixed
- tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
- tx.nVersion = txversion
- signtx = self.sign_transaction(self.nodes[0], tx)
- if (varyOP_CSV):
- signtx.vin[0].scriptSig = CScript([relative_locktimes[b31][b25][b22][b18], OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
- else:
- signtx.vin[0].scriptSig = CScript([base_relative_locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
- b18txs.append(signtx)
- b22txs.append(b18txs)
- b25txs.append(b22txs)
- txs.append(b25txs)
- return txs
-
- def get_tests(self):
- long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
- self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
- self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2*32 + 1) # 82 blocks generated for inputs
- self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
- self.tipheight = 82 # height of the next block to build
+ def sync_blocks(self, blocks, success=True, reject_code=None, reject_reason=None, request_block=True):
+ """Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.
+
+ Call with success = False if the tip shouldn't advance to the most recent block."""
+ self.nodes[0].p2p.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_code=reject_code, reject_reason=reject_reason, request_block=request_block)
+
+ def run_test(self):
+ self.nodes[0].add_p2p_connection(P2PDataStore())
+ network_thread_start()
+ self.nodes[0].p2p.wait_for_verack()
+
+ self.log.info("Generate blocks in the past for coinbase outputs.")
+ long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
+ self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
+ self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2 * 32 + 1) # 82 blocks generated for inputs
+ self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
+ self.tipheight = 82 # height of the next block to build
self.last_block_time = long_past_time
- self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
+ self.tip = int(self.nodes[0].getbestblockhash(), 16)
self.nodeaddress = self.nodes[0].getnewaddress()
+ self.log.info("Test that the csv softfork is DEFINED")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'defined')
test_blocks = self.generate_blocks(61, 4)
- yield TestInstance(test_blocks, sync_every_block=False) # 1
- # Advanced from DEFINED to STARTED, height = 143
+ self.sync_blocks(test_blocks)
+
+ self.log.info("Advance from DEFINED to STARTED, height = 143")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
- # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0
- # using a variety of bits to simulate multiple parallel softforks
- test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready)
- test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
- test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
- test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
- yield TestInstance(test_blocks, sync_every_block=False) # 2
- # Failed to advance past STARTED, height = 287
+ self.log.info("Fail to achieve LOCKED_IN")
+ # 100 out of 144 signal bit 0. Use a variety of bits to simulate multiple parallel softforks
+
+ test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
+ self.sync_blocks(test_blocks)
+
+ self.log.info("Failed to advance past STARTED, height = 287")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'started')
+ self.log.info("Generate blocks to achieve LOCK-IN")
# 108 out of 144 signal bit 0 to achieve lock-in
# using a variety of bits to simulate multiple parallel softforks
- test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready)
- test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
- test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
- test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
- yield TestInstance(test_blocks, sync_every_block=False) # 3
- # Advanced from STARTED to LOCKED_IN, height = 431
+ test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
+ self.sync_blocks(test_blocks)
+
+ self.log.info("Advanced from STARTED to LOCKED_IN, height = 431")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
- # 140 more version 4 blocks
+ # Generate 140 more version 4 blocks
test_blocks = self.generate_blocks(140, 4)
- yield TestInstance(test_blocks, sync_every_block=False) # 4
+ self.sync_blocks(test_blocks)
- ### Inputs at height = 572
+ # Inputs at height = 572
+ #
# Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
# Note we reuse inputs for v1 and v2 txs so must test these separately
# 16 normal inputs
bip68inputs = []
for i in range(16):
- bip68inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ bip68inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
+
# 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
bip112basicinputs = []
for j in range(2):
inputs = []
for i in range(16):
- inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
bip112basicinputs.append(inputs)
+
# 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
bip112diverseinputs = []
for j in range(2):
inputs = []
for i in range(16):
- inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ inputs.append(send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress))
bip112diverseinputs.append(inputs)
+
# 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
- bip112specialinput = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
+ bip112specialinput = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)
+
# 1 normal input
- bip113input = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
+ bip113input = send_generic_input_tx(self.nodes[0], self.coinbase_blocks, self.nodeaddress)
self.nodes[0].setmocktime(self.last_block_time + 600)
- inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
+ inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
self.nodes[0].setmocktime(0)
- self.tip = int("0x" + inputblockhash, 0)
+ self.tip = int(inputblockhash, 16)
self.tipheight += 1
self.last_block_time += 600
- assert_equal(len(self.nodes[0].getblock(inputblockhash,True)["tx"]), 82+1)
+ assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]), 82 + 1)
# 2 more version 4 blocks
test_blocks = self.generate_blocks(2, 4)
- yield TestInstance(test_blocks, sync_every_block=False) # 5
- # Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)
+ self.sync_blocks(test_blocks)
+
+ self.log.info("Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)")
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'locked_in')
# Test both version 1 and version 2 transactions for all tests
# BIP113 test transaction will be modified before each use to put in appropriate block time
- bip113tx_v1 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
+ bip113tx_v1 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
bip113tx_v1.nVersion = 1
- bip113tx_v2 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
+ bip113tx_v2 = create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
bip113tx_v2.nVersion = 2
# For BIP68 test all 16 relative sequence locktimes
- bip68txs_v1 = self.create_bip68txs(bip68inputs, 1)
- bip68txs_v2 = self.create_bip68txs(bip68inputs, 2)
+ bip68txs_v1 = create_bip68txs(self.nodes[0], bip68inputs, 1, self.nodeaddress)
+ bip68txs_v2 = create_bip68txs(self.nodes[0], bip68inputs, 2, self.nodeaddress)
# For BIP112 test:
# 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
- bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1)
- bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2)
+ bip112txs_vary_nSequence_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 1, self.nodeaddress)
+ bip112txs_vary_nSequence_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[0], False, 2, self.nodeaddress)
# 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
- bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1)
- bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1)
+ bip112txs_vary_nSequence_9_v1 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 1, self.nodeaddress, -1)
+ bip112txs_vary_nSequence_9_v2 = create_bip112txs(self.nodes[0], bip112basicinputs[1], False, 2, self.nodeaddress, -1)
# sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
- bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1)
- bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2)
+ bip112txs_vary_OP_CSV_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 1, self.nodeaddress)
+ bip112txs_vary_OP_CSV_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[0], True, 2, self.nodeaddress)
# sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
- bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1)
- bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1)
+ bip112txs_vary_OP_CSV_9_v1 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 1, self.nodeaddress, -1)
+ bip112txs_vary_OP_CSV_9_v2 = create_bip112txs(self.nodes[0], bip112diverseinputs[1], True, 2, self.nodeaddress, -1)
# -1 OP_CSV OP_DROP input
- bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1)
- bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2)
+ bip112tx_special_v1 = create_bip112special(self.nodes[0], bip112specialinput, 1, self.nodeaddress)
+ bip112tx_special_v2 = create_bip112special(self.nodes[0], bip112specialinput, 2, self.nodeaddress)
+
+ self.log.info("TESTING")
+ self.log.info("Pre-Soft Fork Tests. All txs should pass.")
+ self.log.info("Test version 1 txs")
- ### TESTING ###
- ##################################
- ### Before Soft Forks Activate ###
- ##################################
- # All txs should pass
- ### Version 1 txs ###
success_txs = []
# add BIP113 tx and -1 CSV tx
- bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
- bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
success_txs.append(bip113signed1)
success_txs.append(bip112tx_special_v1)
# add BIP 68 txs
@@ -330,14 +327,15 @@ class BIP68_112_113Test(ComparisonTestFramework):
# try BIP 112 with seq=9 txs
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 6
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
- ### Version 2 txs ###
+ self.log.info("Test version 2 txs")
+
success_txs = []
# add BIP113 tx and -1 CSV tx
- bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
- bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
success_txs.append(bip113signed2)
success_txs.append(bip112tx_special_v2)
# add BIP 68 txs
@@ -348,187 +346,149 @@ class BIP68_112_113Test(ComparisonTestFramework):
# try BIP 112 with seq=9 txs
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 7
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
-
# 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
test_blocks = self.generate_blocks(1, 4)
- yield TestInstance(test_blocks, sync_every_block=False) # 8
+ self.sync_blocks(test_blocks)
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], 'active')
+ self.log.info("Post-Soft Fork Tests.")
- #################################
- ### After Soft Forks Activate ###
- #################################
- ### BIP 113 ###
+ self.log.info("BIP 113 tests")
# BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
- bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
- bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
- bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
- bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
for bip113tx in [bip113signed1, bip113signed2]:
- yield TestInstance([[self.create_test_block([bip113tx]), False]]) # 9,10
+ self.sync_blocks([self.create_test_block([bip113tx])], success=False)
# BIP 113 tests should now pass if the locktime is < MTP
- bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
- bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
- bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
- bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
+ bip113signed1 = sign_transaction(self.nodes[0], bip113tx_v1)
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
+ bip113signed2 = sign_transaction(self.nodes[0], bip113tx_v2)
for bip113tx in [bip113signed1, bip113signed2]:
- yield TestInstance([[self.create_test_block([bip113tx]), True]]) # 11,12
+ self.sync_blocks([self.create_test_block([bip113tx])])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# Next block height = 580 after 4 blocks of random version
test_blocks = self.generate_blocks(4, 1234)
- yield TestInstance(test_blocks, sync_every_block=False) # 13
+ self.sync_blocks(test_blocks)
+
+ self.log.info("BIP 68 tests")
+ self.log.info("Test version 1 txs - all should still pass")
- ### BIP 68 ###
- ### Version 1 txs ###
- # All still pass
success_txs = []
success_txs.extend(all_rlt_txs(bip68txs_v1))
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 14
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
- ### Version 2 txs ###
- bip68success_txs = []
+ self.log.info("Test version 2 txs")
+
# All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- bip68success_txs.append(bip68txs_v2[1][b25][b22][b18])
- yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 15
+ bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']]
+ self.sync_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
# All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
- bip68timetxs = []
- for b25 in range(2):
- for b18 in range(2):
- bip68timetxs.append(bip68txs_v2[0][b25][1][b18])
+ bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']]
for tx in bip68timetxs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 16 - 19
- bip68heighttxs = []
- for b25 in range(2):
- for b18 in range(2):
- bip68heighttxs.append(bip68txs_v2[0][b25][0][b18])
+ self.sync_blocks([self.create_test_block([tx])], success=False)
+
+ bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']]
for tx in bip68heighttxs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 20 - 23
+ self.sync_blocks([self.create_test_block([tx])], success=False)
# Advance one block to 581
test_blocks = self.generate_blocks(1, 1234)
- yield TestInstance(test_blocks, sync_every_block=False) # 24
+ self.sync_blocks(test_blocks)
# Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
bip68success_txs.extend(bip68timetxs)
- yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 25
+ self.sync_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
for tx in bip68heighttxs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 26 - 29
+ self.sync_blocks([self.create_test_block([tx])], success=False)
# Advance one block to 582
test_blocks = self.generate_blocks(1, 1234)
- yield TestInstance(test_blocks, sync_every_block=False) # 30
+ self.sync_blocks(test_blocks)
# All BIP 68 txs should pass
bip68success_txs.extend(bip68heighttxs)
- yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 31
+ self.sync_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ self.log.info("BIP 112 tests")
+ self.log.info("Test version 1 txs")
- ### BIP 112 ###
- ### Version 1 txs ###
# -1 OP_CSV tx should fail
- yield TestInstance([[self.create_test_block([bip112tx_special_v1]), False]]) #32
+ self.sync_blocks([self.create_test_block([bip112tx_special_v1])], success=False)
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
- success_txs = []
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- success_txs.append(bip112txs_vary_OP_CSV_v1[1][b25][b22][b18])
- success_txs.append(bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18])
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 33
+
+ success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']]
+ success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']]
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
- fail_txs = []
- fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
- fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18])
- fail_txs.append(bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18])
-
+ fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1)
+ fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1)
+ fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
+ fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
for tx in fail_txs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 34 - 81
+ self.sync_blocks([self.create_test_block([tx])], success=False)
+
+ self.log.info("Test version 2 txs")
- ### Version 2 txs ###
# -1 OP_CSV tx should fail
- yield TestInstance([[self.create_test_block([bip112tx_special_v2]), False]]) #82
+ self.sync_blocks([self.create_test_block([bip112tx_special_v2])], success=False)
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
- success_txs = []
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV
- success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV_9
+ success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']]
+ success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']]
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 83
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
- ## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
- # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
- fail_txs = []
- fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) # 16/16 of vary_nSequence_9
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22][b18]) # 16/16 of vary_OP_CSV_9
+ # SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
+ # All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
+ fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2)
+ fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']]
for tx in fail_txs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 84 - 107
+ self.sync_blocks([self.create_test_block([tx])], success=False)
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
- fail_txs = []
- for b25 in range(2):
- for b22 in range(2):
- for b18 in range(2):
- fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22][b18]) # 8/16 of vary_nSequence
+ fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']]
for tx in fail_txs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 108-115
+ self.sync_blocks([self.create_test_block([tx])], success=False)
# If sequencelock types mismatch, tx should fail
- fail_txs = []
- for b25 in range(2):
- for b18 in range(2):
- fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1][b18]) # 12/16 of vary_nSequence
- fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1][b18]) # 12/16 of vary_OP_CSV
+ fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']]
+ fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]
for tx in fail_txs:
- yield TestInstance([[self.create_test_block([tx]), False]]) # 116-123
+ self.sync_blocks([self.create_test_block([tx])], success=False)
# Remaining txs should pass, just test masking works properly
- success_txs = []
- for b25 in range(2):
- for b18 in range(2):
- success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0][b18]) # 16/16 of vary_nSequence
- success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0][b18]) # 16/16 of vary_OP_CSV
- yield TestInstance([[self.create_test_block(success_txs), True]]) # 124
+ success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']]
+ success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']]
+ self.sync_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# Additional test, of checking that comparison of two time types works properly
time_txs = []
- for b25 in range(2):
- for b18 in range(2):
- tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18]
- tx.vin[0].nSequence = base_relative_locktime | seq_type_flag
- signtx = self.sign_transaction(self.nodes[0], tx)
- time_txs.append(signtx)
- yield TestInstance([[self.create_test_block(time_txs), True]]) # 125
- self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ for tx in [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]:
+ tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG
+ signtx = sign_transaction(self.nodes[0], tx)
+ time_txs.append(signtx)
- ### Missing aspects of test
- ## Testing empty stack fails
+ self.sync_blocks([self.create_test_block(time_txs)])
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ # TODO: Test empty stack fails
if __name__ == '__main__':
BIP68_112_113Test().main()
diff --git a/test/functional/feature_help.py b/test/functional/feature_help.py
new file mode 100755
index 0000000000..1e62d7a409
--- /dev/null
+++ b/test/functional/feature_help.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+# Copyright (c) 2018 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""Verify that starting bitcoin with -h works as expected."""
+import subprocess
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+class HelpTest(BitcoinTestFramework):
+ def set_test_params(self):
+ self.setup_clean_chain = True
+ self.num_nodes = 1
+
+ def setup_network(self):
+ self.add_nodes(self.num_nodes)
+ # Don't start the node
+
+ def run_test(self):
+ self.log.info("Start bitcoin with -h for help text")
+ self.nodes[0].start(extra_args=['-h'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
+ # Node should exit immediately and output help to stdout.
+ ret_code = self.nodes[0].process.wait(timeout=1)
+ assert_equal(ret_code, 0)
+ output = self.nodes[0].process.stdout.read()
+ assert b'Options' in output
+ self.log.info("Help text received: {} (...)".format(output[0:60]))
+ self.nodes[0].running = False
+
+ self.log.info("Start bitcoin with -version for version information")
+ self.nodes[0].start(extra_args=['-version'], stderr=subprocess.PIPE, stdout=subprocess.PIPE)
+ # Node should exit immediately and output version to stdout.
+ ret_code = self.nodes[0].process.wait(timeout=1)
+ assert_equal(ret_code, 0)
+ output = self.nodes[0].process.stdout.read()
+ assert b'version' in output
+ self.log.info("Version text received: {} (...)".format(output[0:60]))
+ self.nodes[0].running = False
+
+if __name__ == '__main__':
+ HelpTest().main()
diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py
index d8348432aa..72b5f4748f 100755
--- a/test/functional/rpc_net.py
+++ b/test/functional/rpc_net.py
@@ -10,6 +10,7 @@ Tests correspond to code in rpc/net.cpp.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
+ assert_greater_than_or_equal,
assert_raises_rpc_error,
connect_nodes_bi,
p2p_port,
@@ -33,26 +34,34 @@ class NetTest(BitcoinTestFramework):
assert_equal(self.nodes[0].getconnectioncount(), 2)
def _test_getnettotals(self):
- # check that getnettotals totalbytesrecv and totalbytessent
- # are consistent with getpeerinfo
+ # getnettotals totalbytesrecv and totalbytessent should be
+ # consistent with getpeerinfo. Since the RPC calls are not atomic,
+ # and messages might have been recvd or sent between RPC calls, call
+ # getnettotals before and after and verify that the returned values
+ # from getpeerinfo are bounded by those values.
+ net_totals_before = self.nodes[0].getnettotals()
peer_info = self.nodes[0].getpeerinfo()
+ net_totals_after = self.nodes[0].getnettotals()
assert_equal(len(peer_info), 2)
- net_totals = self.nodes[0].getnettotals()
- assert_equal(sum([peer['bytesrecv'] for peer in peer_info]),
- net_totals['totalbytesrecv'])
- assert_equal(sum([peer['bytessent'] for peer in peer_info]),
- net_totals['totalbytessent'])
+ peers_recv = sum([peer['bytesrecv'] for peer in peer_info])
+ peers_sent = sum([peer['bytessent'] for peer in peer_info])
+
+ assert_greater_than_or_equal(peers_recv, net_totals_before['totalbytesrecv'])
+ assert_greater_than_or_equal(net_totals_after['totalbytesrecv'], peers_recv)
+ assert_greater_than_or_equal(peers_sent, net_totals_before['totalbytessent'])
+ assert_greater_than_or_equal(net_totals_after['totalbytessent'], peers_sent)
+
# test getnettotals and getpeerinfo by doing a ping
# the bytes sent/received should change
# note ping and pong are 32 bytes each
self.nodes[0].ping()
- wait_until(lambda: (net_totals['totalbytessent'] + 32*2) == self.nodes[0].getnettotals()['totalbytessent'], timeout=1)
- wait_until(lambda: (net_totals['totalbytesrecv'] + 32*2) == self.nodes[0].getnettotals()['totalbytesrecv'], timeout=1)
+ wait_until(lambda: (self.nodes[0].getnettotals()['totalbytessent'] >= net_totals_after['totalbytessent'] + 32 * 2), timeout=1)
+ wait_until(lambda: (self.nodes[0].getnettotals()['totalbytesrecv'] >= net_totals_after['totalbytesrecv'] + 32 * 2), timeout=1)
peer_info_after_ping = self.nodes[0].getpeerinfo()
for before, after in zip(peer_info, peer_info_after_ping):
- assert_equal(before['bytesrecv_per_msg']['pong'] + 32, after['bytesrecv_per_msg']['pong'])
- assert_equal(before['bytessent_per_msg']['ping'] + 32, after['bytessent_per_msg']['ping'])
+ assert_greater_than_or_equal(after['bytesrecv_per_msg']['pong'], before['bytesrecv_per_msg']['pong'] + 32)
+ assert_greater_than_or_equal(after['bytessent_per_msg']['ping'], before['bytessent_per_msg']['ping'] + 32)
def _test_getnetworkinginfo(self):
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
@@ -78,8 +87,7 @@ class NetTest(BitcoinTestFramework):
assert_equal(len(added_nodes), 1)
assert_equal(added_nodes[0]['addednode'], ip_port)
# check that a non-existent node returns an error
- assert_raises_rpc_error(-24, "Node has not been added",
- self.nodes[0].getaddednodeinfo, '1.1.1.1')
+ assert_raises_rpc_error(-24, "Node has not been added", self.nodes[0].getaddednodeinfo, '1.1.1.1')
def _test_getpeerinfo(self):
peer_info = [x.getpeerinfo() for x in self.nodes]
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
index 583d07deec..291ac3ee46 100755
--- a/test/functional/test_framework/test_node.py
+++ b/test/functional/test_framework/test_node.py
@@ -30,6 +30,11 @@ JSONDecodeError = getattr(json, "JSONDecodeError", ValueError)
BITCOIND_PROC_WAIT_TIMEOUT = 60
+
+class FailedToStartError(Exception):
+ """Raised when a node fails to start correctly."""
+
+
class TestNode():
"""A class for representing a bitcoind node under test.
@@ -102,7 +107,8 @@ class TestNode():
# Poll at a rate of four times per second
poll_per_s = 4
for _ in range(poll_per_s * self.rpc_timeout):
- assert self.process.poll() is None, "bitcoind exited with status %i during initialization" % self.process.returncode
+ if self.process.poll() is not None:
+ raise FailedToStartError('bitcoind exited with status {} during initialization'.format(self.process.returncode))
try:
self.rpc = get_rpc_proxy(rpc_url(self.datadir, self.index, self.rpchost), self.index, timeout=self.rpc_timeout, coveragedir=self.coverage_dir)
self.rpc.getblockcount()
@@ -179,9 +185,9 @@ class TestNode():
self.start(extra_args, stderr=log_stderr, *args, **kwargs)
self.wait_for_rpc_connection()
self.stop_node()
- self.wait_util_stopped()
- except Exception as e:
- assert 'bitcoind exited' in str(e) # node must have shutdown
+ self.wait_until_stopped()
+ except FailedToStartError as e:
+ self.log.debug('bitcoind failed to start: %s', e)
self.running = False
self.process = None
# Check stderr for expected message
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py
index 39f1180a45..a2e92dce3b 100755
--- a/test/functional/test_runner.py
+++ b/test/functional/test_runner.py
@@ -138,6 +138,7 @@ BASE_SCRIPTS= [
'p2p_node_network_limited.py',
'feature_blocksdir.py',
'feature_config_args.py',
+ 'feature_help.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]