aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xqa/pull-tester/rpc-tests.py2
-rwxr-xr-xqa/rpc-tests/bip68-112-113-p2p.py547
-rwxr-xr-xqa/rpc-tests/bip9-softforks.py220
-rwxr-xr-xqa/rpc-tests/test_framework/comptool.py4
-rw-r--r--src/chainparams.cpp17
-rw-r--r--src/consensus/params.h1
-rw-r--r--src/init.cpp2
-rw-r--r--src/main.cpp22
-rw-r--r--src/policy/policy.cpp2
-rw-r--r--src/primitives/transaction.h7
-rw-r--r--src/qt/transactiondesc.cpp3
-rw-r--r--src/qt/transactionrecord.cpp7
-rw-r--r--src/qt/transactionrecord.h4
-rw-r--r--src/rpc/blockchain.cpp1
-rw-r--r--src/timedata.cpp2
-rw-r--r--src/timedata.h2
16 files changed, 828 insertions, 15 deletions
diff --git a/qa/pull-tester/rpc-tests.py b/qa/pull-tester/rpc-tests.py
index d5d5f10610..d794dbe806 100755
--- a/qa/pull-tester/rpc-tests.py
+++ b/qa/pull-tester/rpc-tests.py
@@ -83,6 +83,7 @@ if EXEEXT == ".exe" and "-win" not in opts:
#Tests
testScripts = [
+ 'bip68-112-113-p2p.py',
'wallet.py',
'listtransactions.py',
'receivedby.py',
@@ -119,6 +120,7 @@ testScripts = [
'importprunedfunds.py',
]
testScriptsExt = [
+ 'bip9-softforks.py',
'bip65-cltv.py',
'bip65-cltv-p2p.py',
'bip68-sequence.py',
diff --git a/qa/rpc-tests/bip68-112-113-p2p.py b/qa/rpc-tests/bip68-112-113-p2p.py
new file mode 100755
index 0000000000..c226f4dad4
--- /dev/null
+++ b/qa/rpc-tests/bip68-112-113-p2p.py
@@ -0,0 +1,547 @@
+#!/usr/bin/env python2
+# Copyright (c) 2015 The Bitcoin Core developers
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.mininode import ToHex, CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.comptool import TestInstance, TestManager
+from test_framework.script import *
+from binascii import unhexlify
+import cStringIO
+import time
+
+'''
+This test is meant to exercise activation of the first version bits soft fork
+This soft fork will activate the following BIPS:
+BIP 68 - nSequence relative lock times
+BIP 112 - CHECKSEQUENCEVERIFY
+BIP 113 - MedianTimePast semantics for nLockTime
+
+regtest lock-in with 108/144 block signalling
+activation after a further 144 blocks
+
+mine 82 blocks whose coinbases will be used to generate inputs for our tests
+mine 61 blocks to transition from DEFINED to STARTED
+mine 144 blocks only 100 of which are signaling readiness in order to fail to change state this period
+mine 144 blocks with 108 signaling and verify STARTED->LOCKED_IN
+mine 140 blocks and seed block chain with the 82 inputs will use for our tests at height 572
+mine 3 blocks and verify still at LOCKED_IN and test that enforcement has not triggered
+mine 1 block and test that enforcement has triggered (which triggers ACTIVE)
+Test BIP 113 is enforced
+Mine 4 blocks so next height is 580 and test BIP 68 is enforced for time and height
+Mine 1 block so next height is 581 and test BIP 68 now passes time but not height
+Mine 1 block so next height is 582 and test BIP 68 now passes time and height
+Test that BIP 112 is enforced
+
+Various transactions will be used to test that the BIPs rules are not enforced before the soft fork activates
+And that after the soft fork activates transactions pass and fail as they should according to the rules.
+For each BIP, transactions of versions 1 and 2 will be tested.
+----------------
+BIP 113:
+bip113tx - modify the nLocktime variable
+
+BIP 68:
+bip68txs - 16 txs with nSequence relative locktime of 10 with various bits set as per the relative_locktimes below
+
+BIP 112:
+bip112txs_vary_nSequence - 16 txs with nSequence relative_locktimes of 10 evaluated against 10 OP_CSV OP_DROP
+bip112txs_vary_nSequence_9 - 16 txs with nSequence relative_locktimes of 9 evaluated against 10 OP_CSV OP_DROP
+bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
+bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
+bip112tx_special - test negative argument to OP_CSV
+'''
+
+base_relative_locktime = 10
+seq_disable_flag = 1<<31
+seq_random_high_bit = 1<<25
+seq_type_flag = 1<<22
+seq_random_low_bit = 1<<18
+
+# b31,b25,b22,b18 represent the 31st, 25th, 22nd and 18th bits respectively in the nSequence field
+# relative_locktimes[b31][b25][b22][b18] is a base_relative_locktime with the indicated bits set if their indices are 1
+relative_locktimes = []
+for b31 in xrange(2):
+ b25times = []
+ for b25 in xrange(2):
+ b22times = []
+ for b22 in xrange(2):
+ b18times = []
+ for b18 in xrange(2):
+ rlt = base_relative_locktime
+ if (b31):
+ rlt = rlt | seq_disable_flag
+ if (b25):
+ rlt = rlt | seq_random_high_bit
+ if (b22):
+ rlt = rlt | seq_type_flag
+ if (b18):
+ rlt = rlt | seq_random_low_bit
+ b18times.append(rlt)
+ b22times.append(b18times)
+ b25times.append(b22times)
+ relative_locktimes.append(b25times)
+
+def all_rlt_txs(txarray):
+ txs = []
+ for b31 in xrange(2):
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ txs.append(txarray[b31][b25][b22][b18])
+ return txs
+
+class BIP68_112_113Test(ComparisonTestFramework):
+ def __init__(self):
+ self.num_nodes = 1
+
+ def setup_network(self):
+ # Must set the blockversion for this test
+ self.nodes = start_nodes(1, self.options.tmpdir,
+ extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=4']],
+ binary=[self.options.testbinary])
+
+ def run_test(self):
+ test = TestManager(self, self.options.tmpdir)
+ test.add_all_connections(self.nodes)
+ NetworkThread().start() # Start up network handling in another thread
+ test.run()
+
+ def send_generic_input_tx(self, node, coinbases):
+ amount = Decimal("49.99")
+ return node.sendrawtransaction(ToHex(self.sign_transaction(node, self.create_transaction(node, node.getblock(coinbases.pop())['tx'][0], self.nodeaddress, amount))))
+
+ def create_transaction(self, node, txid, to_address, amount):
+ inputs = [{ "txid" : txid, "vout" : 0}]
+ outputs = { to_address : amount }
+ rawtx = node.createrawtransaction(inputs, outputs)
+ tx = CTransaction()
+ f = cStringIO.StringIO(unhexlify(rawtx))
+ tx.deserialize(f)
+ return tx
+
+ def sign_transaction(self, node, unsignedtx):
+ rawtx = ToHex(unsignedtx)
+ signresult = node.signrawtransaction(rawtx)
+ tx = CTransaction()
+ f = cStringIO.StringIO(unhexlify(signresult['hex']))
+ tx.deserialize(f)
+ return tx
+
+ def generate_blocks(self, number, version, test_blocks = []):
+ for i in xrange(number):
+ block = self.create_test_block([], version)
+ test_blocks.append([block, True])
+ self.last_block_time += 600
+ self.tip = block.sha256
+ self.tipheight += 1
+ return test_blocks
+
+ def create_test_block(self, txs, version = 536870912):
+ block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600)
+ block.nVersion = version
+ block.vtx.extend(txs)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+ return block
+
+ def get_bip9_status(self, key):
+ info = self.nodes[0].getblockchaininfo()
+ for row in info['bip9_softforks']:
+ if row['id'] == key:
+ return row
+ raise IndexError ('key:"%s" not found' % key)
+
+ def create_bip68txs(self, bip68inputs, txversion, locktime_delta = 0):
+ txs = []
+ assert(len(bip68inputs) >= 16)
+ i = 0
+ for b31 in xrange(2):
+ b25txs = []
+ for b25 in xrange(2):
+ b22txs = []
+ for b22 in xrange(2):
+ b18txs = []
+ for b18 in xrange(2):
+ tx = self.create_transaction(self.nodes[0], bip68inputs[i], self.nodeaddress, Decimal("49.98"))
+ i += 1
+ tx.nVersion = txversion
+ tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
+ b18txs.append(self.sign_transaction(self.nodes[0], tx))
+ b22txs.append(b18txs)
+ b25txs.append(b22txs)
+ txs.append(b25txs)
+ return txs
+
+ def create_bip112special(self, input, txversion):
+ tx = self.create_transaction(self.nodes[0], input, self.nodeaddress, Decimal("49.98"))
+ tx.nVersion = txversion
+ signtx = self.sign_transaction(self.nodes[0], tx)
+ signtx.vin[0].scriptSig = CScript([-1, OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ return signtx
+
+ def create_bip112txs(self, bip112inputs, varyOP_CSV, txversion, locktime_delta = 0):
+ txs = []
+ assert(len(bip112inputs) >= 16)
+ i = 0
+ for b31 in xrange(2):
+ b25txs = []
+ for b25 in xrange(2):
+ b22txs = []
+ for b22 in xrange(2):
+ b18txs = []
+ for b18 in xrange(2):
+ tx = self.create_transaction(self.nodes[0], bip112inputs[i], self.nodeaddress, Decimal("49.98"))
+ i += 1
+ if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed
+ tx.vin[0].nSequence = base_relative_locktime + locktime_delta
+ else: # vary nSequence instead, OP_CSV is fixed
+ tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta
+ tx.nVersion = txversion
+ signtx = self.sign_transaction(self.nodes[0], tx)
+ if (varyOP_CSV):
+ signtx.vin[0].scriptSig = CScript([relative_locktimes[b31][b25][b22][b18], OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ else:
+ signtx.vin[0].scriptSig = CScript([base_relative_locktime, OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig)))
+ b18txs.append(signtx)
+ b22txs.append(b18txs)
+ b25txs.append(b22txs)
+ txs.append(b25txs)
+ return txs
+
+ def get_tests(self):
+ long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
+ self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
+ self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2*32 + 1) # 82 blocks generated for inputs
+ self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
+ self.tipheight = 82 # height of the next block to build
+ self.last_block_time = long_past_time
+ self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
+ self.nodeaddress = self.nodes[0].getnewaddress()
+
+ assert_equal(self.get_bip9_status('csv')['status'], 'defined')
+ test_blocks = self.generate_blocks(61, 4)
+ yield TestInstance(test_blocks, sync_every_block=False) # 1
+ # Advanced from DEFINED to STARTED, height = 143
+ assert_equal(self.get_bip9_status('csv')['status'], 'started')
+
+ # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0
+ # using a variety of bits to simulate multiple parallel softforks
+ test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not)
+ yield TestInstance(test_blocks, sync_every_block=False) # 2
+ # Failed to advance past STARTED, height = 287
+ assert_equal(self.get_bip9_status('csv')['status'], 'started')
+
+ # 108 out of 144 signal bit 0 to achieve lock-in
+ # using a variety of bits to simulate multiple parallel softforks
+ test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not)
+ yield TestInstance(test_blocks, sync_every_block=False) # 3
+ # Advanced from STARTED to LOCKED_IN, height = 431
+ assert_equal(self.get_bip9_status('csv')['status'], 'locked_in')
+
+ # 140 more version 4 blocks
+ test_blocks = self.generate_blocks(140, 4)
+ yield TestInstance(test_blocks, sync_every_block=False) # 4
+
+ ### Inputs at height = 572
+ # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block)
+ # Note we reuse inputs for v1 and v2 txs so must test these separately
+ # 16 normal inputs
+ bip68inputs = []
+ for i in xrange(16):
+ bip68inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
+ bip112basicinputs = []
+ for j in xrange(2):
+ inputs = []
+ for i in xrange(16):
+ inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ bip112basicinputs.append(inputs)
+ # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
+ bip112diverseinputs = []
+ for j in xrange(2):
+ inputs = []
+ for i in xrange(16):
+ inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks))
+ bip112diverseinputs.append(inputs)
+ # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
+ bip112specialinput = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
+ # 1 normal input
+ bip113input = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)
+
+ self.nodes[0].setmocktime(self.last_block_time + 600)
+ inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572
+ self.nodes[0].setmocktime(0)
+ self.tip = int("0x" + inputblockhash + "L", 0)
+ self.tipheight += 1
+ self.last_block_time += 600
+ assert_equal(len(self.nodes[0].getblock(inputblockhash,True)["tx"]), 82+1)
+
+ # 2 more version 4 blocks
+ test_blocks = self.generate_blocks(2, 4)
+ yield TestInstance(test_blocks, sync_every_block=False) # 5
+ # Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575)
+ assert_equal(self.get_bip9_status('csv')['status'], 'locked_in')
+
+ # Test both version 1 and version 2 transactions for all tests
+ # BIP113 test transaction will be modified before each use to put in appropriate block time
+ bip113tx_v1 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
+ bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
+ bip113tx_v2 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98"))
+ bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
+ bip113tx_v2.nVersion = 2
+
+ # For BIP68 test all 16 relative sequence locktimes
+ bip68txs_v1 = self.create_bip68txs(bip68inputs, 1)
+ bip68txs_v2 = self.create_bip68txs(bip68inputs, 2)
+
+ # For BIP112 test:
+ # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
+ bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1)
+ bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2)
+ # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
+ bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1)
+ bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1)
+ # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
+ bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1)
+ bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2)
+ # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
+ bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1)
+ bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1)
+ # -1 OP_CSV OP_DROP input
+ bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1)
+ bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2)
+
+
+ ### TESTING ###
+ ##################################
+ ### Before Soft Forks Activate ###
+ ##################################
+ # All txs should pass
+ ### Version 1 txs ###
+ success_txs = []
+ # add BIP113 tx and -1 CSV tx
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+ success_txs.append(bip113signed1)
+ success_txs.append(bip112tx_special_v1)
+ # add BIP 68 txs
+ success_txs.extend(all_rlt_txs(bip68txs_v1))
+ # add BIP 112 with seq=10 txs
+ success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
+ success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
+ # try BIP 112 with seq=9 txs
+ success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
+ success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 6
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ ### Version 2 txs ###
+ success_txs = []
+ # add BIP113 tx and -1 CSV tx
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ success_txs.append(bip113signed2)
+ success_txs.append(bip112tx_special_v2)
+ # add BIP 68 txs
+ success_txs.extend(all_rlt_txs(bip68txs_v2))
+ # add BIP 112 with seq=10 txs
+ success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
+ success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
+ # try BIP 112 with seq=9 txs
+ success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
+ success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 7
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+
+ # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block
+ test_blocks = self.generate_blocks(1, 4)
+ yield TestInstance(test_blocks, sync_every_block=False) # 8
+ assert_equal(self.get_bip9_status('csv')['status'], 'active')
+
+
+ #################################
+ ### After Soft Forks Activate ###
+ #################################
+ ### BIP 113 ###
+ # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
+ bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ for bip113tx in [bip113signed1, bip113signed2]:
+ yield TestInstance([[self.create_test_block([bip113tx]), False]]) # 9,10
+ # BIP 113 tests should now pass if the locktime is < MTP
+ bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # = MTP of prior block (not <) but < time put on current block
+ bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1)
+ bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # = MTP of prior block (not <) but < time put on current block
+ bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2)
+ for bip113tx in [bip113signed1, bip113signed2]:
+ yield TestInstance([[self.create_test_block([bip113tx]), True]]) # 11,12
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ # Next block height = 580 after 4 blocks of random version
+ test_blocks = self.generate_blocks(4, 1234)
+ yield TestInstance(test_blocks, sync_every_block=False) # 13
+
+ ### BIP 68 ###
+ ### Version 1 txs ###
+ # All still pass
+ success_txs = []
+ success_txs.extend(all_rlt_txs(bip68txs_v1))
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 14
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ ### Version 2 txs ###
+ bip68success_txs = []
+ # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ bip68success_txs.append(bip68txs_v2[1][b25][b22][b18])
+ yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 15
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
+ bip68timetxs = []
+ for b25 in xrange(2):
+ for b18 in xrange(2):
+ bip68timetxs.append(bip68txs_v2[0][b25][1][b18])
+ for tx in bip68timetxs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 16 - 19
+ bip68heighttxs = []
+ for b25 in xrange(2):
+ for b18 in xrange(2):
+ bip68heighttxs.append(bip68txs_v2[0][b25][0][b18])
+ for tx in bip68heighttxs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 20 - 23
+
+ # Advance one block to 581
+ test_blocks = self.generate_blocks(1, 1234)
+ yield TestInstance(test_blocks, sync_every_block=False) # 24
+
+ # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
+ bip68success_txs.extend(bip68timetxs)
+ yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 25
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ for tx in bip68heighttxs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 26 - 29
+
+ # Advance one block to 582
+ test_blocks = self.generate_blocks(1, 1234)
+ yield TestInstance(test_blocks, sync_every_block=False) # 30
+
+ # All BIP 68 txs should pass
+ bip68success_txs.extend(bip68heighttxs)
+ yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 31
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+
+ ### BIP 112 ###
+ ### Version 1 txs ###
+ # -1 OP_CSV tx should fail
+ yield TestInstance([[self.create_test_block([bip112tx_special_v1]), False]]) #32
+ # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
+ success_txs = []
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ success_txs.append(bip112txs_vary_OP_CSV_v1[1][b25][b22][b18])
+ success_txs.append(bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18])
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 33
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
+ fail_txs = []
+ fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
+ fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18])
+ fail_txs.append(bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18])
+
+ for tx in fail_txs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 34 - 81
+
+ ### Version 2 txs ###
+ # -1 OP_CSV tx should fail
+ yield TestInstance([[self.create_test_block([bip112tx_special_v2]), False]]) #82
+
+ # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
+ success_txs = []
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV
+ success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV_9
+
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 83
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ ## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
+ # All txs with nSequence 11 should fail either due to earlier mismatch or failing the CSV check
+ fail_txs = []
+ fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) # 16/16 of vary_nSequence_9
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22][b18]) # 16/16 of vary_OP_CSV_9
+
+ for tx in fail_txs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 84 - 107
+
+ # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
+ fail_txs = []
+ for b25 in xrange(2):
+ for b22 in xrange(2):
+ for b18 in xrange(2):
+ fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22][b18]) # 8/16 of vary_nSequence
+ for tx in fail_txs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 108-115
+
+ # If sequencelock types mismatch, tx should fail
+ fail_txs = []
+ for b25 in xrange(2):
+ for b18 in xrange(2):
+ fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1][b18]) # 12/16 of vary_nSequence
+ fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1][b18]) # 12/16 of vary_OP_CSV
+ for tx in fail_txs:
+ yield TestInstance([[self.create_test_block([tx]), False]]) # 116-123
+
+ # Remaining txs should pass, just test masking works properly
+ success_txs = []
+ for b25 in xrange(2):
+ for b18 in xrange(2):
+ success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0][b18]) # 16/16 of vary_nSequence
+ success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0][b18]) # 16/16 of vary_OP_CSV
+ yield TestInstance([[self.create_test_block(success_txs), True]]) # 124
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ # Additional test, of checking that comparison of two time types works properly
+ time_txs = []
+ for b25 in xrange(2):
+ for b18 in xrange(2):
+ tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18]
+ tx.vin[0].nSequence = base_relative_locktime | seq_type_flag
+ signtx = self.sign_transaction(self.nodes[0], tx)
+ time_txs.append(signtx)
+ yield TestInstance([[self.create_test_block(time_txs), True]]) # 125
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ ### Missing aspects of test
+ ## Testing empty stack fails
+
+
+if __name__ == '__main__':
+ BIP68_112_113Test().main()
diff --git a/qa/rpc-tests/bip9-softforks.py b/qa/rpc-tests/bip9-softforks.py
new file mode 100755
index 0000000000..cbb1b7d4ce
--- /dev/null
+++ b/qa/rpc-tests/bip9-softforks.py
@@ -0,0 +1,220 @@
+#!/usr/bin/env python2
+# Copyright (c) 2015 The Bitcoin Core developers
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.mininode import CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.comptool import TestInstance, TestManager
+from test_framework.script import CScript, OP_1NEGATE, OP_NOP3, OP_DROP
+from binascii import hexlify, unhexlify
+import cStringIO
+import time
+import itertools
+
+'''
+This test is meant to exercise BIP forks
+Connect to a single node.
+regtest lock-in with 108/144 block signalling
+activation after a further 144 blocks
+mine 2 block and save coinbases for later use
+mine 141 blocks to transition from DEFINED to STARTED
+mine 100 blocks signalling readiness and 44 not in order to fail to change state this period
+mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN)
+mine a further 143 blocks (LOCKED_IN)
+test that enforcement has not triggered (which triggers ACTIVE)
+test that enforcement has triggered
+'''
+
+
+
+class BIP9SoftForksTest(ComparisonTestFramework):
+
+ def __init__(self):
+ self.num_nodes = 1
+
+ def setup_network(self):
+ self.nodes = start_nodes(1, self.options.tmpdir,
+ extra_args=[['-debug', '-whitelist=127.0.0.1']],
+ binary=[self.options.testbinary])
+
+ def run_test(self):
+ self.test = TestManager(self, self.options.tmpdir)
+ self.test.add_all_connections(self.nodes)
+ NetworkThread().start() # Start up network handling in another thread
+ self.test.run()
+
+ def create_transaction(self, node, coinbase, to_address, amount):
+ from_txid = node.getblock(coinbase)['tx'][0]
+ inputs = [{ "txid" : from_txid, "vout" : 0}]
+ outputs = { to_address : amount }
+ rawtx = node.createrawtransaction(inputs, outputs)
+ tx = CTransaction()
+ f = cStringIO.StringIO(unhexlify(rawtx))
+ tx.deserialize(f)
+ tx.nVersion = 2
+ return tx
+
+ def sign_transaction(self, node, tx):
+ signresult = node.signrawtransaction(hexlify(tx.serialize()))
+ tx = CTransaction()
+ f = cStringIO.StringIO(unhexlify(signresult['hex']))
+ tx.deserialize(f)
+ return tx
+
+ def generate_blocks(self, number, version, test_blocks = []):
+ for i in xrange(number):
+ block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
+ block.nVersion = version
+ block.rehash()
+ block.solve()
+ test_blocks.append([block, True])
+ self.last_block_time += 1
+ self.tip = block.sha256
+ self.height += 1
+ return test_blocks
+
+ def get_bip9_status(self, key):
+ info = self.nodes[0].getblockchaininfo()
+ for row in info['bip9_softforks']:
+ if row['id'] == key:
+ return row
+ raise IndexError ('key:"%s" not found' % key)
+
+
+ def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature):
+ # generate some coins for later
+ self.coinbase_blocks = self.nodes[0].generate(2)
+ self.height = 3 # height of the next block to build
+ self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
+ self.nodeaddress = self.nodes[0].getnewaddress()
+ self.last_block_time = time.time()
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
+
+ # Test 1
+ # Advance from DEFINED to STARTED
+ test_blocks = self.generate_blocks(141, 4)
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'started')
+
+ # Test 2
+ # Fail to achieve LOCKED_IN 100 out of 144 signal bit 1
+ # using a variety of bits to simulate multiple parallel softforks
+ test_blocks = self.generate_blocks(50, activated_version) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(24, 4, test_blocks) # 0x20010000 (signalling not)
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'started')
+
+ # Test 3
+ # 108 out of 144 signal bit 1 to achieve LOCKED_IN
+ # using a variety of bits to simulate multiple parallel softforks
+ test_blocks = self.generate_blocks(58, activated_version) # 0x20000001 (signalling ready)
+ test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
+ test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
+ test_blocks = self.generate_blocks(10, 4, test_blocks) # 0x20010000 (signalling not)
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
+
+ # Test 4
+ # 143 more version 536870913 blocks (waiting period-1)
+ test_blocks = self.generate_blocks(143, 4)
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
+
+ # Test 5
+ # Check that the new rule is enforced
+ spendtx = self.create_transaction(self.nodes[0],
+ self.coinbase_blocks[0], self.nodeaddress, 1.0)
+ invalidate(spendtx)
+ spendtx = self.sign_transaction(self.nodes[0], spendtx)
+ spendtx.rehash()
+ invalidatePostSignature(spendtx)
+ spendtx.rehash()
+ block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
+ block.nVersion = activated_version
+ block.vtx.append(spendtx)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+
+ self.last_block_time += 1
+ self.tip = block.sha256
+ self.height += 1
+ yield TestInstance([[block, True]])
+
+ assert_equal(self.get_bip9_status(bipName)['status'], 'active')
+
+ # Test 6
+ # Check that the new sequence lock rules are enforced
+ spendtx = self.create_transaction(self.nodes[0],
+ self.coinbase_blocks[1], self.nodeaddress, 1.0)
+ invalidate(spendtx)
+ spendtx = self.sign_transaction(self.nodes[0], spendtx)
+ spendtx.rehash()
+ invalidatePostSignature(spendtx)
+ spendtx.rehash()
+
+ block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
+ block.nVersion = 5
+ block.vtx.append(spendtx)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+ self.last_block_time += 1
+ yield TestInstance([[block, False]])
+
+ # Restart all
+ stop_nodes(self.nodes)
+ wait_bitcoinds()
+ shutil.rmtree(self.options.tmpdir)
+ self.setup_chain()
+ self.setup_network()
+ self.test.clear_all_connections()
+ self.test.add_all_connections(self.nodes)
+ NetworkThread().start() # Start up network handling in another thread
+
+
+
+ def get_tests(self):
+ for test in itertools.chain(
+ self.test_BIP('csv', 536870913, self.sequence_lock_invalidate, self.donothing),
+ self.test_BIP('csv', 536870913, self.mtp_invalidate, self.donothing),
+ self.test_BIP('csv', 536870913, self.donothing, self.csv_invalidate)
+ ):
+ yield test
+
+ def donothing(self, tx):
+ return
+
+ def csv_invalidate(self, tx):
+ '''Modify the signature in vin 0 of the tx to fail CSV
+ Prepends -1 CSV DROP in the scriptSig itself.
+ '''
+ tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP3, OP_DROP] +
+ list(CScript(tx.vin[0].scriptSig)))
+
+ def sequence_lock_invalidate(self, tx):
+ '''Modify the nSequence to make it fails once sequence lock rule is activated (high timespan)
+ '''
+ tx.vin[0].nSequence = 0x00FFFFFF
+ tx.nLockTime = 0
+
+ def mtp_invalidate(self, tx):
+ '''Modify the nLockTime to make it fails once MTP rule is activated
+ '''
+ # Disable Sequence lock, Activate nLockTime
+ tx.vin[0].nSequence = 0x90FFFFFF
+ tx.nLockTime = self.last_block_time
+
+if __name__ == '__main__':
+ BIP9SoftForksTest().main() \ No newline at end of file
diff --git a/qa/rpc-tests/test_framework/comptool.py b/qa/rpc-tests/test_framework/comptool.py
index e3f9b43235..f19edbf069 100755
--- a/qa/rpc-tests/test_framework/comptool.py
+++ b/qa/rpc-tests/test_framework/comptool.py
@@ -179,6 +179,10 @@ class TestManager(object):
# associated NodeConn
test_node.add_connection(self.connections[-1])
+ def clear_all_connections(self):
+ self.connections = []
+ self.test_nodes = []
+
def wait_for_disconnections(self):
def disconnected():
return all(node.closed for node in self.test_nodes)
diff --git a/src/chainparams.cpp b/src/chainparams.cpp
index 508c4de167..965d131695 100644
--- a/src/chainparams.cpp
+++ b/src/chainparams.cpp
@@ -86,7 +86,13 @@ public:
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].bit = 28;
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nStartTime = 1199145601; // January 1, 2008
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nTimeout = 1230767999; // December 31, 2008
- /**
+
+ // Deployment of BIP68, BIP112, and BIP113.
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].bit = 0;
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 1462060800; // May 1st, 2016
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 1493596800; // May 1st, 2017
+
+ /**
* The message start string is designed to be unlikely to occur in normal data.
* The characters are rarely used upper ASCII, not valid as UTF-8, and produce
* a large 32-bit integer with any alignment.
@@ -171,6 +177,12 @@ public:
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].bit = 28;
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nStartTime = 1199145601; // January 1, 2008
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nTimeout = 1230767999; // December 31, 2008
+
+ // Deployment of BIP68, BIP112, and BIP113.
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].bit = 0;
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 1456790400; // March 1st, 2016
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 1493596800; // May 1st, 2017
+
pchMessageStart[0] = 0x0b;
pchMessageStart[1] = 0x11;
pchMessageStart[2] = 0x09;
@@ -238,6 +250,9 @@ public:
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].bit = 28;
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nStartTime = 0;
consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nTimeout = 999999999999ULL;
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].bit = 0;
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 0;
+ consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 999999999999ULL;
pchMessageStart[0] = 0xfa;
pchMessageStart[1] = 0xbf;
diff --git a/src/consensus/params.h b/src/consensus/params.h
index 7c3a8e84c3..4f3480b89b 100644
--- a/src/consensus/params.h
+++ b/src/consensus/params.h
@@ -15,6 +15,7 @@ namespace Consensus {
enum DeploymentPos
{
DEPLOYMENT_TESTDUMMY,
+ DEPLOYMENT_CSV, // Deployment of BIP68, BIP112, and BIP113.
MAX_VERSION_BITS_DEPLOYMENTS
};
diff --git a/src/init.cpp b/src/init.cpp
index 0c371b288a..a872abd16c 100644
--- a/src/init.cpp
+++ b/src/init.cpp
@@ -28,6 +28,7 @@
#include "script/standard.h"
#include "script/sigcache.h"
#include "scheduler.h"
+#include "timedata.h"
#include "txdb.h"
#include "txmempool.h"
#include "torcontrol.h"
@@ -366,6 +367,7 @@ std::string HelpMessage(HelpMessageMode mode)
strUsage += HelpMessageOpt("-maxconnections=<n>", strprintf(_("Maintain at most <n> connections to peers (default: %u)"), DEFAULT_MAX_PEER_CONNECTIONS));
strUsage += HelpMessageOpt("-maxreceivebuffer=<n>", strprintf(_("Maximum per-connection receive buffer, <n>*1000 bytes (default: %u)"), DEFAULT_MAXRECEIVEBUFFER));
strUsage += HelpMessageOpt("-maxsendbuffer=<n>", strprintf(_("Maximum per-connection send buffer, <n>*1000 bytes (default: %u)"), DEFAULT_MAXSENDBUFFER));
+ strUsage += HelpMessageOpt("-maxtimeadjustment", strprintf(_("Maximum allowed median peer time offset adjustment. Local perspective of time may be influenced by peers forward or backward by this amount. (default: %u seconds)"), DEFAULT_MAX_TIME_ADJUSTMENT));
strUsage += HelpMessageOpt("-onion=<ip:port>", strprintf(_("Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)"), "-proxy"));
strUsage += HelpMessageOpt("-onlynet=<net>", _("Only connect to nodes in network <net> (ipv4, ipv6 or onion)"));
strUsage += HelpMessageOpt("-permitbaremultisig", strprintf(_("Relay non-P2SH multisig (default: %u)"), DEFAULT_PERMIT_BAREMULTISIG));
diff --git a/src/main.cpp b/src/main.cpp
index 36189f4ffc..dfd92fa8f0 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -2286,6 +2286,13 @@ bool ConnectBlock(const CBlock& block, CValidationState& state, CBlockIndex* pin
flags |= SCRIPT_VERIFY_CHECKLOCKTIMEVERIFY;
}
+ // Start enforcing BIP68 (sequence locks) and BIP112 (CHECKSEQUENCEVERIFY) using versionbits logic.
+ int nLockTimeFlags = 0;
+ if (VersionBitsState(pindex->pprev, chainparams.GetConsensus(), Consensus::DEPLOYMENT_CSV, versionbitscache) == THRESHOLD_ACTIVE) {
+ flags |= SCRIPT_VERIFY_CHECKSEQUENCEVERIFY;
+ nLockTimeFlags |= LOCKTIME_VERIFY_SEQUENCE;
+ }
+
int64_t nTime2 = GetTimeMicros(); nTimeForks += nTime2 - nTime1;
LogPrint("bench", " - Fork checks: %.2fms [%.2fs]\n", 0.001 * (nTime2 - nTime1), nTimeForks * 0.000001);
@@ -2294,7 +2301,6 @@ bool ConnectBlock(const CBlock& block, CValidationState& state, CBlockIndex* pin
CCheckQueueControl<CScriptCheck> control(fScriptChecks && nScriptCheckThreads ? &scriptcheckqueue : NULL);
std::vector<int> prevheights;
- int nLockTimeFlags = 0;
CAmount nFees = 0;
int nInputs = 0;
unsigned int nSigOps = 0;
@@ -3295,12 +3301,18 @@ bool ContextualCheckBlock(const CBlock& block, CValidationState& state, CBlockIn
const int nHeight = pindexPrev == NULL ? 0 : pindexPrev->nHeight + 1;
const Consensus::Params& consensusParams = Params().GetConsensus();
+ // Start enforcing BIP113 (Median Time Past) using versionbits logic.
+ int nLockTimeFlags = 0;
+ if (VersionBitsState(pindexPrev, consensusParams, Consensus::DEPLOYMENT_CSV, versionbitscache) == THRESHOLD_ACTIVE) {
+ nLockTimeFlags |= LOCKTIME_MEDIAN_TIME_PAST;
+ }
+
+ int64_t nLockTimeCutoff = (nLockTimeFlags & LOCKTIME_MEDIAN_TIME_PAST)
+ ? pindexPrev->GetMedianTimePast()
+ : block.GetBlockTime();
+
// Check that all transactions are finalized
BOOST_FOREACH(const CTransaction& tx, block.vtx) {
- int nLockTimeFlags = 0;
- int64_t nLockTimeCutoff = (nLockTimeFlags & LOCKTIME_MEDIAN_TIME_PAST)
- ? pindexPrev->GetMedianTimePast()
- : block.GetBlockTime();
if (!IsFinalTx(tx, nHeight, nLockTimeCutoff)) {
return state.DoS(10, false, REJECT_INVALID, "bad-txns-nonfinal", false, "non-final transaction");
}
diff --git a/src/policy/policy.cpp b/src/policy/policy.cpp
index 332abc430e..e3ed7be000 100644
--- a/src/policy/policy.cpp
+++ b/src/policy/policy.cpp
@@ -55,7 +55,7 @@ bool IsStandard(const CScript& scriptPubKey, txnouttype& whichType)
bool IsStandardTx(const CTransaction& tx, std::string& reason)
{
- if (tx.nVersion > CTransaction::CURRENT_VERSION || tx.nVersion < 1) {
+ if (tx.nVersion > CTransaction::MAX_STANDARD_VERSION || tx.nVersion < 1) {
reason = "version";
return false;
}
diff --git a/src/primitives/transaction.h b/src/primitives/transaction.h
index e124dca365..149816406a 100644
--- a/src/primitives/transaction.h
+++ b/src/primitives/transaction.h
@@ -207,8 +207,15 @@ private:
void UpdateHash() const;
public:
+ // Default transaction version.
static const int32_t CURRENT_VERSION=1;
+ // Changing the default transaction version requires a two step process: first
+ // adapting relay policy by bumping MAX_STANDARD_VERSION, and then later date
+ // bumping the default CURRENT_VERSION at which point both CURRENT_VERSION and
+ // MAX_STANDARD_VERSION will be equal.
+ static const int32_t MAX_STANDARD_VERSION=2;
+
// The local variables are made const to prevent unintended modification
// without updating the cached hash value. However, CTransaction is not
// actually immutable; deserialization and assignment are implemented,
diff --git a/src/qt/transactiondesc.cpp b/src/qt/transactiondesc.cpp
index 5cb4cd5af7..5abefc144e 100644
--- a/src/qt/transactiondesc.cpp
+++ b/src/qt/transactiondesc.cpp
@@ -240,7 +240,8 @@ QString TransactionDesc::toHTML(CWallet *wallet, CWalletTx &wtx, TransactionReco
if (wtx.mapValue.count("comment") && !wtx.mapValue["comment"].empty())
strHTML += "<br><b>" + tr("Comment") + ":</b><br>" + GUIUtil::HtmlEscape(wtx.mapValue["comment"], true) + "<br>";
- strHTML += "<b>" + tr("Transaction ID") + ":</b> " + TransactionRecord::formatSubTxId(wtx.GetHash(), rec->idx) + "<br>";
+ strHTML += "<b>" + tr("Transaction ID") + ":</b> " + rec->getTxID() + "<br>";
+ strHTML += "<b>" + tr("Output index") + ":</b> " + QString::number(rec->getOutputIndex()) + "<br>";
// Message from normal bitcoin:URI (bitcoin:123...?message=example)
Q_FOREACH (const PAIRTYPE(std::string, std::string)& r, wtx.vOrderForm)
diff --git a/src/qt/transactionrecord.cpp b/src/qt/transactionrecord.cpp
index 5b16b108e6..97b77cc93d 100644
--- a/src/qt/transactionrecord.cpp
+++ b/src/qt/transactionrecord.cpp
@@ -260,11 +260,10 @@ bool TransactionRecord::statusUpdateNeeded()
QString TransactionRecord::getTxID() const
{
- return formatSubTxId(hash, idx);
+ return QString::fromStdString(hash.ToString());
}
-QString TransactionRecord::formatSubTxId(const uint256 &hash, int vout)
+int TransactionRecord::getOutputIndex() const
{
- return QString::fromStdString(hash.ToString() + strprintf("-%03d", vout));
+ return idx;
}
-
diff --git a/src/qt/transactionrecord.h b/src/qt/transactionrecord.h
index 49753ee31f..95ab98c10d 100644
--- a/src/qt/transactionrecord.h
+++ b/src/qt/transactionrecord.h
@@ -128,8 +128,8 @@ public:
/** Return the unique identifier for this transaction (part) */
QString getTxID() const;
- /** Format subtransaction id */
- static QString formatSubTxId(const uint256 &hash, int vout);
+ /** Return the output index of the subtransaction */
+ int getOutputIndex() const;
/** Update status from core wallet tx.
*/
diff --git a/src/rpc/blockchain.cpp b/src/rpc/blockchain.cpp
index b947609b1a..f1c9d9f7ab 100644
--- a/src/rpc/blockchain.cpp
+++ b/src/rpc/blockchain.cpp
@@ -681,6 +681,7 @@ UniValue getblockchaininfo(const UniValue& params, bool fHelp)
softforks.push_back(SoftForkDesc("bip34", 2, tip, consensusParams));
softforks.push_back(SoftForkDesc("bip66", 3, tip, consensusParams));
softforks.push_back(SoftForkDesc("bip65", 4, tip, consensusParams));
+ bip9_softforks.push_back(BIP9SoftForkDesc("csv", consensusParams, Consensus::DEPLOYMENT_CSV));
obj.push_back(Pair("softforks", softforks));
obj.push_back(Pair("bip9_softforks", bip9_softforks));
diff --git a/src/timedata.cpp b/src/timedata.cpp
index 4d2f8d1e3b..b6bcf86fbf 100644
--- a/src/timedata.cpp
+++ b/src/timedata.cpp
@@ -83,7 +83,7 @@ void AddTimeData(const CNetAddr& ip, int64_t nOffsetSample)
int64_t nMedian = vTimeOffsets.median();
std::vector<int64_t> vSorted = vTimeOffsets.sorted();
// Only let other nodes change our time by so much
- if (abs64(nMedian) < 70 * 60)
+ if (abs64(nMedian) <= std::max<int64_t>(0, GetArg("-maxtimeadjustment", DEFAULT_MAX_TIME_ADJUSTMENT)))
{
nTimeOffset = nMedian;
}
diff --git a/src/timedata.h b/src/timedata.h
index 2296baf11b..9f2499c85c 100644
--- a/src/timedata.h
+++ b/src/timedata.h
@@ -10,6 +10,8 @@
#include <stdint.h>
#include <vector>
+static const int64_t DEFAULT_MAX_TIME_ADJUSTMENT = 70 * 60;
+
class CNetAddr;
/**