From 65751a3cf2421a9419172949cad9dc49b7383551 Mon Sep 17 00:00:00 2001 From: Pieter Wuille Date: Sat, 20 Feb 2016 23:37:13 +0100 Subject: Add CHECKSEQUENCEVERIFY softfork through BIP9 --- src/chainparams.cpp | 17 ++++++++++++++++- src/consensus/params.h | 1 + src/main.cpp | 5 +++++ src/rpc/blockchain.cpp | 1 + 4 files changed, 23 insertions(+), 1 deletion(-) diff --git a/src/chainparams.cpp b/src/chainparams.cpp index 35e090a0b3..f48937d67d 100644 --- a/src/chainparams.cpp +++ b/src/chainparams.cpp @@ -86,7 +86,13 @@ public: consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].bit = 28; consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nStartTime = 1199145601; // January 1, 2008 consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nTimeout = 1230767999; // December 31, 2008 - /** + + // Deployment of BIP68, BIP112, and BIP113. + consensus.vDeployments[Consensus::DEPLOYMENT_CSV].bit = 0; + consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 1462060800; // May 1st, 2016 + consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 1493596800; // May 1st, 2017 + + /** * The message start string is designed to be unlikely to occur in normal data. * The characters are rarely used upper ASCII, not valid as UTF-8, and produce * a large 32-bit integer with any alignment. @@ -172,6 +178,12 @@ public: consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].bit = 28; consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nStartTime = 1199145601; // January 1, 2008 consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nTimeout = 1230767999; // December 31, 2008 + + // Deployment of BIP68, BIP112, and BIP113. + consensus.vDeployments[Consensus::DEPLOYMENT_CSV].bit = 0; + consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 1456790400; // March 1st, 2016 + consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 1493596800; // May 1st, 2017 + pchMessageStart[0] = 0x0b; pchMessageStart[1] = 0x11; pchMessageStart[2] = 0x09; @@ -240,6 +252,9 @@ public: consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].bit = 28; consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nStartTime = 0; consensus.vDeployments[Consensus::DEPLOYMENT_TESTDUMMY].nTimeout = 999999999999ULL; + consensus.vDeployments[Consensus::DEPLOYMENT_CSV].bit = 0; + consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nStartTime = 0; + consensus.vDeployments[Consensus::DEPLOYMENT_CSV].nTimeout = 999999999999ULL; pchMessageStart[0] = 0xfa; pchMessageStart[1] = 0xbf; diff --git a/src/consensus/params.h b/src/consensus/params.h index 7c3a8e84c3..4f3480b89b 100644 --- a/src/consensus/params.h +++ b/src/consensus/params.h @@ -15,6 +15,7 @@ namespace Consensus { enum DeploymentPos { DEPLOYMENT_TESTDUMMY, + DEPLOYMENT_CSV, // Deployment of BIP68, BIP112, and BIP113. MAX_VERSION_BITS_DEPLOYMENTS }; diff --git a/src/main.cpp b/src/main.cpp index 1bc88326b6..cfa69817b6 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -2262,6 +2262,11 @@ bool ConnectBlock(const CBlock& block, CValidationState& state, CBlockIndex* pin flags |= SCRIPT_VERIFY_CHECKLOCKTIMEVERIFY; } + // Start enforcing CHECKSEQUENCEVERIFY using versionbits logic. + if (VersionBitsState(pindex->pprev, chainparams.GetConsensus(), Consensus::DEPLOYMENT_CSV, versionbitscache) == THRESHOLD_ACTIVE) { + flags |= SCRIPT_VERIFY_CHECKSEQUENCEVERIFY; + } + int64_t nTime2 = GetTimeMicros(); nTimeForks += nTime2 - nTime1; LogPrint("bench", " - Fork checks: %.2fms [%.2fs]\n", 0.001 * (nTime2 - nTime1), nTimeForks * 0.000001); diff --git a/src/rpc/blockchain.cpp b/src/rpc/blockchain.cpp index a110dff0d1..f5d75c20b3 100644 --- a/src/rpc/blockchain.cpp +++ b/src/rpc/blockchain.cpp @@ -681,6 +681,7 @@ UniValue getblockchaininfo(const UniValue& params, bool fHelp) softforks.push_back(SoftForkDesc("bip34", 2, tip, consensusParams)); softforks.push_back(SoftForkDesc("bip66", 3, tip, consensusParams)); softforks.push_back(SoftForkDesc("bip65", 4, tip, consensusParams)); + bip9_softforks.push_back(BIP9SoftForkDesc("csv", consensusParams, Consensus::DEPLOYMENT_CSV)); obj.push_back(Pair("softforks", softforks)); obj.push_back(Pair("bip9_softforks", bip9_softforks)); -- cgit v1.2.3 From 478fba6d5213a3f1ffeca5feeacf28aaf6844fd6 Mon Sep 17 00:00:00 2001 From: BtcDrak Date: Tue, 16 Feb 2016 16:33:31 +0000 Subject: Soft fork logic for BIP113 --- src/main.cpp | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/main.cpp b/src/main.cpp index cfa69817b6..857bf218ba 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -3276,12 +3276,18 @@ bool ContextualCheckBlock(const CBlock& block, CValidationState& state, CBlockIn const int nHeight = pindexPrev == NULL ? 0 : pindexPrev->nHeight + 1; const Consensus::Params& consensusParams = Params().GetConsensus(); + // Start enforcing BIP113 (Median Time Past) using versionbits logic. + int nLockTimeFlags = 0; + if (VersionBitsState(pindexPrev, consensusParams, Consensus::DEPLOYMENT_CSV, versionbitscache) == THRESHOLD_ACTIVE) { + nLockTimeFlags |= LOCKTIME_MEDIAN_TIME_PAST; + } + + int64_t nLockTimeCutoff = (nLockTimeFlags & LOCKTIME_MEDIAN_TIME_PAST) + ? pindexPrev->GetMedianTimePast() + : block.GetBlockTime(); + // Check that all transactions are finalized BOOST_FOREACH(const CTransaction& tx, block.vtx) { - int nLockTimeFlags = 0; - int64_t nLockTimeCutoff = (nLockTimeFlags & LOCKTIME_MEDIAN_TIME_PAST) - ? pindexPrev->GetMedianTimePast() - : block.GetBlockTime(); if (!IsFinalTx(tx, nHeight, nLockTimeCutoff)) { return state.DoS(10, false, REJECT_INVALID, "bad-txns-nonfinal", false, "non-final transaction"); } -- cgit v1.2.3 From 02c243580295a7f1c0298fcd9afc2e76b607e724 Mon Sep 17 00:00:00 2001 From: BtcDrak Date: Tue, 16 Feb 2016 16:37:43 +0000 Subject: Soft fork logic for BIP68 --- src/main.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main.cpp b/src/main.cpp index 857bf218ba..f74cb30574 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -2262,9 +2262,11 @@ bool ConnectBlock(const CBlock& block, CValidationState& state, CBlockIndex* pin flags |= SCRIPT_VERIFY_CHECKLOCKTIMEVERIFY; } - // Start enforcing CHECKSEQUENCEVERIFY using versionbits logic. + // Start enforcing BIP68 (sequence locks) and BIP112 (CHECKSEQUENCEVERIFY) using versionbits logic. + int nLockTimeFlags = 0; if (VersionBitsState(pindex->pprev, chainparams.GetConsensus(), Consensus::DEPLOYMENT_CSV, versionbitscache) == THRESHOLD_ACTIVE) { flags |= SCRIPT_VERIFY_CHECKSEQUENCEVERIFY; + nLockTimeFlags |= LOCKTIME_VERIFY_SEQUENCE; } int64_t nTime2 = GetTimeMicros(); nTimeForks += nTime2 - nTime1; @@ -2275,7 +2277,6 @@ bool ConnectBlock(const CBlock& block, CValidationState& state, CBlockIndex* pin CCheckQueueControl control(fScriptChecks && nScriptCheckThreads ? &scriptcheckqueue : NULL); std::vector prevheights; - int nLockTimeFlags = 0; CAmount nFees = 0; int nInputs = 0; unsigned int nSigOps = 0; -- cgit v1.2.3 From 12c89c918534f8e615e80381b692d89d6b09d174 Mon Sep 17 00:00:00 2001 From: BtcDrak Date: Fri, 19 Feb 2016 19:52:31 +0000 Subject: Policy: allow transaction version 2 relay policy. This commit introduces a way to gracefully bump the default transaction version in a two step process. --- src/policy/policy.cpp | 2 +- src/primitives/transaction.h | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/policy/policy.cpp b/src/policy/policy.cpp index 332abc430e..e3ed7be000 100644 --- a/src/policy/policy.cpp +++ b/src/policy/policy.cpp @@ -55,7 +55,7 @@ bool IsStandard(const CScript& scriptPubKey, txnouttype& whichType) bool IsStandardTx(const CTransaction& tx, std::string& reason) { - if (tx.nVersion > CTransaction::CURRENT_VERSION || tx.nVersion < 1) { + if (tx.nVersion > CTransaction::MAX_STANDARD_VERSION || tx.nVersion < 1) { reason = "version"; return false; } diff --git a/src/primitives/transaction.h b/src/primitives/transaction.h index 07ae39e0b4..9f7d6f3943 100644 --- a/src/primitives/transaction.h +++ b/src/primitives/transaction.h @@ -206,8 +206,15 @@ private: void UpdateHash() const; public: + // Default transaction version. static const int32_t CURRENT_VERSION=1; + // Changing the default transaction version requires a two step process: first + // adapting relay policy by bumping MAX_STANDARD_VERSION, and then later date + // bumping the default CURRENT_VERSION at which point both CURRENT_VERSION and + // MAX_STANDARD_VERSION will be equal. + static const int32_t MAX_STANDARD_VERSION=2; + // The local variables are made const to prevent unintended modification // without updating the cached hash value. However, CTransaction is not // actually immutable; deserialization and assignment are implemented, -- cgit v1.2.3 From 19d73d540c8de4a73b5b2a05bebd762e74890a20 Mon Sep 17 00:00:00 2001 From: Alex Morcos Date: Thu, 10 Mar 2016 18:36:55 -0500 Subject: Add RPC test for BIP 68/112/113 soft fork. This RPC test will test both the activation mechanism of the first versionbits soft fork as well as testing many code branches of the consensus logic for BIP's 68, 112, and 113. --- qa/pull-tester/rpc-tests.py | 1 + qa/rpc-tests/bip68-112-113-p2p.py | 547 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 548 insertions(+) create mode 100755 qa/rpc-tests/bip68-112-113-p2p.py diff --git a/qa/pull-tester/rpc-tests.py b/qa/pull-tester/rpc-tests.py index f15eaacbda..cbc10abd21 100755 --- a/qa/pull-tester/rpc-tests.py +++ b/qa/pull-tester/rpc-tests.py @@ -83,6 +83,7 @@ if EXEEXT == ".exe" and "-win" not in opts: #Tests testScripts = [ + 'bip68-112-113-p2p.py', 'wallet.py', 'listtransactions.py', 'receivedby.py', diff --git a/qa/rpc-tests/bip68-112-113-p2p.py b/qa/rpc-tests/bip68-112-113-p2p.py new file mode 100755 index 0000000000..c226f4dad4 --- /dev/null +++ b/qa/rpc-tests/bip68-112-113-p2p.py @@ -0,0 +1,547 @@ +#!/usr/bin/env python2 +# Copyright (c) 2015 The Bitcoin Core developers +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +from test_framework.test_framework import ComparisonTestFramework +from test_framework.util import * +from test_framework.mininode import ToHex, CTransaction, NetworkThread +from test_framework.blocktools import create_coinbase, create_block +from test_framework.comptool import TestInstance, TestManager +from test_framework.script import * +from binascii import unhexlify +import cStringIO +import time + +''' +This test is meant to exercise activation of the first version bits soft fork +This soft fork will activate the following BIPS: +BIP 68 - nSequence relative lock times +BIP 112 - CHECKSEQUENCEVERIFY +BIP 113 - MedianTimePast semantics for nLockTime + +regtest lock-in with 108/144 block signalling +activation after a further 144 blocks + +mine 82 blocks whose coinbases will be used to generate inputs for our tests +mine 61 blocks to transition from DEFINED to STARTED +mine 144 blocks only 100 of which are signaling readiness in order to fail to change state this period +mine 144 blocks with 108 signaling and verify STARTED->LOCKED_IN +mine 140 blocks and seed block chain with the 82 inputs will use for our tests at height 572 +mine 3 blocks and verify still at LOCKED_IN and test that enforcement has not triggered +mine 1 block and test that enforcement has triggered (which triggers ACTIVE) +Test BIP 113 is enforced +Mine 4 blocks so next height is 580 and test BIP 68 is enforced for time and height +Mine 1 block so next height is 581 and test BIP 68 now passes time but not height +Mine 1 block so next height is 582 and test BIP 68 now passes time and height +Test that BIP 112 is enforced + +Various transactions will be used to test that the BIPs rules are not enforced before the soft fork activates +And that after the soft fork activates transactions pass and fail as they should according to the rules. +For each BIP, transactions of versions 1 and 2 will be tested. +---------------- +BIP 113: +bip113tx - modify the nLocktime variable + +BIP 68: +bip68txs - 16 txs with nSequence relative locktime of 10 with various bits set as per the relative_locktimes below + +BIP 112: +bip112txs_vary_nSequence - 16 txs with nSequence relative_locktimes of 10 evaluated against 10 OP_CSV OP_DROP +bip112txs_vary_nSequence_9 - 16 txs with nSequence relative_locktimes of 9 evaluated against 10 OP_CSV OP_DROP +bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP +bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP +bip112tx_special - test negative argument to OP_CSV +''' + +base_relative_locktime = 10 +seq_disable_flag = 1<<31 +seq_random_high_bit = 1<<25 +seq_type_flag = 1<<22 +seq_random_low_bit = 1<<18 + +# b31,b25,b22,b18 represent the 31st, 25th, 22nd and 18th bits respectively in the nSequence field +# relative_locktimes[b31][b25][b22][b18] is a base_relative_locktime with the indicated bits set if their indices are 1 +relative_locktimes = [] +for b31 in xrange(2): + b25times = [] + for b25 in xrange(2): + b22times = [] + for b22 in xrange(2): + b18times = [] + for b18 in xrange(2): + rlt = base_relative_locktime + if (b31): + rlt = rlt | seq_disable_flag + if (b25): + rlt = rlt | seq_random_high_bit + if (b22): + rlt = rlt | seq_type_flag + if (b18): + rlt = rlt | seq_random_low_bit + b18times.append(rlt) + b22times.append(b18times) + b25times.append(b22times) + relative_locktimes.append(b25times) + +def all_rlt_txs(txarray): + txs = [] + for b31 in xrange(2): + for b25 in xrange(2): + for b22 in xrange(2): + for b18 in xrange(2): + txs.append(txarray[b31][b25][b22][b18]) + return txs + +class BIP68_112_113Test(ComparisonTestFramework): + def __init__(self): + self.num_nodes = 1 + + def setup_network(self): + # Must set the blockversion for this test + self.nodes = start_nodes(1, self.options.tmpdir, + extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=4']], + binary=[self.options.testbinary]) + + def run_test(self): + test = TestManager(self, self.options.tmpdir) + test.add_all_connections(self.nodes) + NetworkThread().start() # Start up network handling in another thread + test.run() + + def send_generic_input_tx(self, node, coinbases): + amount = Decimal("49.99") + return node.sendrawtransaction(ToHex(self.sign_transaction(node, self.create_transaction(node, node.getblock(coinbases.pop())['tx'][0], self.nodeaddress, amount)))) + + def create_transaction(self, node, txid, to_address, amount): + inputs = [{ "txid" : txid, "vout" : 0}] + outputs = { to_address : amount } + rawtx = node.createrawtransaction(inputs, outputs) + tx = CTransaction() + f = cStringIO.StringIO(unhexlify(rawtx)) + tx.deserialize(f) + return tx + + def sign_transaction(self, node, unsignedtx): + rawtx = ToHex(unsignedtx) + signresult = node.signrawtransaction(rawtx) + tx = CTransaction() + f = cStringIO.StringIO(unhexlify(signresult['hex'])) + tx.deserialize(f) + return tx + + def generate_blocks(self, number, version, test_blocks = []): + for i in xrange(number): + block = self.create_test_block([], version) + test_blocks.append([block, True]) + self.last_block_time += 600 + self.tip = block.sha256 + self.tipheight += 1 + return test_blocks + + def create_test_block(self, txs, version = 536870912): + block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600) + block.nVersion = version + block.vtx.extend(txs) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + return block + + def get_bip9_status(self, key): + info = self.nodes[0].getblockchaininfo() + for row in info['bip9_softforks']: + if row['id'] == key: + return row + raise IndexError ('key:"%s" not found' % key) + + def create_bip68txs(self, bip68inputs, txversion, locktime_delta = 0): + txs = [] + assert(len(bip68inputs) >= 16) + i = 0 + for b31 in xrange(2): + b25txs = [] + for b25 in xrange(2): + b22txs = [] + for b22 in xrange(2): + b18txs = [] + for b18 in xrange(2): + tx = self.create_transaction(self.nodes[0], bip68inputs[i], self.nodeaddress, Decimal("49.98")) + i += 1 + tx.nVersion = txversion + tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta + b18txs.append(self.sign_transaction(self.nodes[0], tx)) + b22txs.append(b18txs) + b25txs.append(b22txs) + txs.append(b25txs) + return txs + + def create_bip112special(self, input, txversion): + tx = self.create_transaction(self.nodes[0], input, self.nodeaddress, Decimal("49.98")) + tx.nVersion = txversion + signtx = self.sign_transaction(self.nodes[0], tx) + signtx.vin[0].scriptSig = CScript([-1, OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig))) + return signtx + + def create_bip112txs(self, bip112inputs, varyOP_CSV, txversion, locktime_delta = 0): + txs = [] + assert(len(bip112inputs) >= 16) + i = 0 + for b31 in xrange(2): + b25txs = [] + for b25 in xrange(2): + b22txs = [] + for b22 in xrange(2): + b18txs = [] + for b18 in xrange(2): + tx = self.create_transaction(self.nodes[0], bip112inputs[i], self.nodeaddress, Decimal("49.98")) + i += 1 + if (varyOP_CSV): # if varying OP_CSV, nSequence is fixed + tx.vin[0].nSequence = base_relative_locktime + locktime_delta + else: # vary nSequence instead, OP_CSV is fixed + tx.vin[0].nSequence = relative_locktimes[b31][b25][b22][b18] + locktime_delta + tx.nVersion = txversion + signtx = self.sign_transaction(self.nodes[0], tx) + if (varyOP_CSV): + signtx.vin[0].scriptSig = CScript([relative_locktimes[b31][b25][b22][b18], OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig))) + else: + signtx.vin[0].scriptSig = CScript([base_relative_locktime, OP_NOP3, OP_DROP] + list(CScript(signtx.vin[0].scriptSig))) + b18txs.append(signtx) + b22txs.append(b18txs) + b25txs.append(b22txs) + txs.append(b25txs) + return txs + + def get_tests(self): + long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future + self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time + self.coinbase_blocks = self.nodes[0].generate(1 + 16 + 2*32 + 1) # 82 blocks generated for inputs + self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time + self.tipheight = 82 # height of the next block to build + self.last_block_time = long_past_time + self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0) + self.nodeaddress = self.nodes[0].getnewaddress() + + assert_equal(self.get_bip9_status('csv')['status'], 'defined') + test_blocks = self.generate_blocks(61, 4) + yield TestInstance(test_blocks, sync_every_block=False) # 1 + # Advanced from DEFINED to STARTED, height = 143 + assert_equal(self.get_bip9_status('csv')['status'], 'started') + + # Fail to achieve LOCKED_IN 100 out of 144 signal bit 0 + # using a variety of bits to simulate multiple parallel softforks + test_blocks = self.generate_blocks(50, 536870913) # 0x20000001 (signalling ready) + test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not) + test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready) + test_blocks = self.generate_blocks(24, 536936448, test_blocks) # 0x20010000 (signalling not) + yield TestInstance(test_blocks, sync_every_block=False) # 2 + # Failed to advance past STARTED, height = 287 + assert_equal(self.get_bip9_status('csv')['status'], 'started') + + # 108 out of 144 signal bit 0 to achieve lock-in + # using a variety of bits to simulate multiple parallel softforks + test_blocks = self.generate_blocks(58, 536870913) # 0x20000001 (signalling ready) + test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not) + test_blocks = self.generate_blocks(50, 536871169, test_blocks) # 0x20000101 (signalling ready) + test_blocks = self.generate_blocks(10, 536936448, test_blocks) # 0x20010000 (signalling not) + yield TestInstance(test_blocks, sync_every_block=False) # 3 + # Advanced from STARTED to LOCKED_IN, height = 431 + assert_equal(self.get_bip9_status('csv')['status'], 'locked_in') + + # 140 more version 4 blocks + test_blocks = self.generate_blocks(140, 4) + yield TestInstance(test_blocks, sync_every_block=False) # 4 + + ### Inputs at height = 572 + # Put inputs for all tests in the chain at height 572 (tip now = 571) (time increases by 600s per block) + # Note we reuse inputs for v1 and v2 txs so must test these separately + # 16 normal inputs + bip68inputs = [] + for i in xrange(16): + bip68inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)) + # 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig) + bip112basicinputs = [] + for j in xrange(2): + inputs = [] + for i in xrange(16): + inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)) + bip112basicinputs.append(inputs) + # 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig) + bip112diverseinputs = [] + for j in xrange(2): + inputs = [] + for i in xrange(16): + inputs.append(self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks)) + bip112diverseinputs.append(inputs) + # 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig) + bip112specialinput = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks) + # 1 normal input + bip113input = self.send_generic_input_tx(self.nodes[0], self.coinbase_blocks) + + self.nodes[0].setmocktime(self.last_block_time + 600) + inputblockhash = self.nodes[0].generate(1)[0] # 1 block generated for inputs to be in chain at height 572 + self.nodes[0].setmocktime(0) + self.tip = int("0x" + inputblockhash + "L", 0) + self.tipheight += 1 + self.last_block_time += 600 + assert_equal(len(self.nodes[0].getblock(inputblockhash,True)["tx"]), 82+1) + + # 2 more version 4 blocks + test_blocks = self.generate_blocks(2, 4) + yield TestInstance(test_blocks, sync_every_block=False) # 5 + # Not yet advanced to ACTIVE, height = 574 (will activate for block 576, not 575) + assert_equal(self.get_bip9_status('csv')['status'], 'locked_in') + + # Test both version 1 and version 2 transactions for all tests + # BIP113 test transaction will be modified before each use to put in appropriate block time + bip113tx_v1 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98")) + bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE + bip113tx_v2 = self.create_transaction(self.nodes[0], bip113input, self.nodeaddress, Decimal("49.98")) + bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE + bip113tx_v2.nVersion = 2 + + # For BIP68 test all 16 relative sequence locktimes + bip68txs_v1 = self.create_bip68txs(bip68inputs, 1) + bip68txs_v2 = self.create_bip68txs(bip68inputs, 2) + + # For BIP112 test: + # 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs + bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1) + bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2) + # 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs + bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1) + bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1) + # sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs + bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1) + bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2) + # sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs + bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1) + bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1) + # -1 OP_CSV OP_DROP input + bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1) + bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2) + + + ### TESTING ### + ################################## + ### Before Soft Forks Activate ### + ################################## + # All txs should pass + ### Version 1 txs ### + success_txs = [] + # add BIP113 tx and -1 CSV tx + bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block + bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1) + success_txs.append(bip113signed1) + success_txs.append(bip112tx_special_v1) + # add BIP 68 txs + success_txs.extend(all_rlt_txs(bip68txs_v1)) + # add BIP 112 with seq=10 txs + success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1)) + success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1)) + # try BIP 112 with seq=9 txs + success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1)) + success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1)) + yield TestInstance([[self.create_test_block(success_txs), True]]) # 6 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + ### Version 2 txs ### + success_txs = [] + # add BIP113 tx and -1 CSV tx + bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block + bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2) + success_txs.append(bip113signed2) + success_txs.append(bip112tx_special_v2) + # add BIP 68 txs + success_txs.extend(all_rlt_txs(bip68txs_v2)) + # add BIP 112 with seq=10 txs + success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2)) + success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2)) + # try BIP 112 with seq=9 txs + success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) + success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2)) + yield TestInstance([[self.create_test_block(success_txs), True]]) # 7 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + + # 1 more version 4 block to get us to height 575 so the fork should now be active for the next block + test_blocks = self.generate_blocks(1, 4) + yield TestInstance(test_blocks, sync_every_block=False) # 8 + assert_equal(self.get_bip9_status('csv')['status'], 'active') + + + ################################# + ### After Soft Forks Activate ### + ################################# + ### BIP 113 ### + # BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules + bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block + bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1) + bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block + bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2) + for bip113tx in [bip113signed1, bip113signed2]: + yield TestInstance([[self.create_test_block([bip113tx]), False]]) # 9,10 + # BIP 113 tests should now pass if the locktime is < MTP + bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # = MTP of prior block (not <) but < time put on current block + bip113signed1 = self.sign_transaction(self.nodes[0], bip113tx_v1) + bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # = MTP of prior block (not <) but < time put on current block + bip113signed2 = self.sign_transaction(self.nodes[0], bip113tx_v2) + for bip113tx in [bip113signed1, bip113signed2]: + yield TestInstance([[self.create_test_block([bip113tx]), True]]) # 11,12 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # Next block height = 580 after 4 blocks of random version + test_blocks = self.generate_blocks(4, 1234) + yield TestInstance(test_blocks, sync_every_block=False) # 13 + + ### BIP 68 ### + ### Version 1 txs ### + # All still pass + success_txs = [] + success_txs.extend(all_rlt_txs(bip68txs_v1)) + yield TestInstance([[self.create_test_block(success_txs), True]]) # 14 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + ### Version 2 txs ### + bip68success_txs = [] + # All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass + for b25 in xrange(2): + for b22 in xrange(2): + for b18 in xrange(2): + bip68success_txs.append(bip68txs_v2[1][b25][b22][b18]) + yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 15 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + # All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512 + bip68timetxs = [] + for b25 in xrange(2): + for b18 in xrange(2): + bip68timetxs.append(bip68txs_v2[0][b25][1][b18]) + for tx in bip68timetxs: + yield TestInstance([[self.create_test_block([tx]), False]]) # 16 - 19 + bip68heighttxs = [] + for b25 in xrange(2): + for b18 in xrange(2): + bip68heighttxs.append(bip68txs_v2[0][b25][0][b18]) + for tx in bip68heighttxs: + yield TestInstance([[self.create_test_block([tx]), False]]) # 20 - 23 + + # Advance one block to 581 + test_blocks = self.generate_blocks(1, 1234) + yield TestInstance(test_blocks, sync_every_block=False) # 24 + + # Height txs should fail and time txs should now pass 9 * 600 > 10 * 512 + bip68success_txs.extend(bip68timetxs) + yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 25 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + for tx in bip68heighttxs: + yield TestInstance([[self.create_test_block([tx]), False]]) # 26 - 29 + + # Advance one block to 582 + test_blocks = self.generate_blocks(1, 1234) + yield TestInstance(test_blocks, sync_every_block=False) # 30 + + # All BIP 68 txs should pass + bip68success_txs.extend(bip68heighttxs) + yield TestInstance([[self.create_test_block(bip68success_txs), True]]) # 31 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + + ### BIP 112 ### + ### Version 1 txs ### + # -1 OP_CSV tx should fail + yield TestInstance([[self.create_test_block([bip112tx_special_v1]), False]]) #32 + # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass + success_txs = [] + for b25 in xrange(2): + for b22 in xrange(2): + for b18 in xrange(2): + success_txs.append(bip112txs_vary_OP_CSV_v1[1][b25][b22][b18]) + success_txs.append(bip112txs_vary_OP_CSV_9_v1[1][b25][b22][b18]) + yield TestInstance([[self.create_test_block(success_txs), True]]) # 33 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail + fail_txs = [] + fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1)) + fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1)) + for b25 in xrange(2): + for b22 in xrange(2): + for b18 in xrange(2): + fail_txs.append(bip112txs_vary_OP_CSV_v1[0][b25][b22][b18]) + fail_txs.append(bip112txs_vary_OP_CSV_9_v1[0][b25][b22][b18]) + + for tx in fail_txs: + yield TestInstance([[self.create_test_block([tx]), False]]) # 34 - 81 + + ### Version 2 txs ### + # -1 OP_CSV tx should fail + yield TestInstance([[self.create_test_block([bip112tx_special_v2]), False]]) #82 + + # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met) + success_txs = [] + for b25 in xrange(2): + for b22 in xrange(2): + for b18 in xrange(2): + success_txs.append(bip112txs_vary_OP_CSV_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV + success_txs.append(bip112txs_vary_OP_CSV_9_v2[1][b25][b22][b18]) # 8/16 of vary_OP_CSV_9 + + yield TestInstance([[self.create_test_block(success_txs), True]]) # 83 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + ## SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ## + # All txs with nSequence 11 should fail either due to earlier mismatch or failing the CSV check + fail_txs = [] + fail_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2)) # 16/16 of vary_nSequence_9 + for b25 in xrange(2): + for b22 in xrange(2): + for b18 in xrange(2): + fail_txs.append(bip112txs_vary_OP_CSV_9_v2[0][b25][b22][b18]) # 16/16 of vary_OP_CSV_9 + + for tx in fail_txs: + yield TestInstance([[self.create_test_block([tx]), False]]) # 84 - 107 + + # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail + fail_txs = [] + for b25 in xrange(2): + for b22 in xrange(2): + for b18 in xrange(2): + fail_txs.append(bip112txs_vary_nSequence_v2[1][b25][b22][b18]) # 8/16 of vary_nSequence + for tx in fail_txs: + yield TestInstance([[self.create_test_block([tx]), False]]) # 108-115 + + # If sequencelock types mismatch, tx should fail + fail_txs = [] + for b25 in xrange(2): + for b18 in xrange(2): + fail_txs.append(bip112txs_vary_nSequence_v2[0][b25][1][b18]) # 12/16 of vary_nSequence + fail_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][1][b18]) # 12/16 of vary_OP_CSV + for tx in fail_txs: + yield TestInstance([[self.create_test_block([tx]), False]]) # 116-123 + + # Remaining txs should pass, just test masking works properly + success_txs = [] + for b25 in xrange(2): + for b18 in xrange(2): + success_txs.append(bip112txs_vary_nSequence_v2[0][b25][0][b18]) # 16/16 of vary_nSequence + success_txs.append(bip112txs_vary_OP_CSV_v2[0][b25][0][b18]) # 16/16 of vary_OP_CSV + yield TestInstance([[self.create_test_block(success_txs), True]]) # 124 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + # Additional test, of checking that comparison of two time types works properly + time_txs = [] + for b25 in xrange(2): + for b18 in xrange(2): + tx = bip112txs_vary_OP_CSV_v2[0][b25][1][b18] + tx.vin[0].nSequence = base_relative_locktime | seq_type_flag + signtx = self.sign_transaction(self.nodes[0], tx) + time_txs.append(signtx) + yield TestInstance([[self.create_test_block(time_txs), True]]) # 125 + self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash()) + + ### Missing aspects of test + ## Testing empty stack fails + + +if __name__ == '__main__': + BIP68_112_113Test().main() -- cgit v1.2.3 From 71527a0f31ae67edad0a7fcda59c75a6ce5666ca Mon Sep 17 00:00:00 2001 From: NicolasDorier Date: Wed, 16 Mar 2016 14:30:04 +0900 Subject: Test of BIP9 fork activation of mtp, csv, sequence_lock --- qa/pull-tester/rpc-tests.py | 1 + qa/rpc-tests/bip9-softforks.py | 220 ++++++++++++++++++++++++++++++++ qa/rpc-tests/test_framework/comptool.py | 4 + 3 files changed, 225 insertions(+) create mode 100755 qa/rpc-tests/bip9-softforks.py diff --git a/qa/pull-tester/rpc-tests.py b/qa/pull-tester/rpc-tests.py index cbc10abd21..b32d8d93a9 100755 --- a/qa/pull-tester/rpc-tests.py +++ b/qa/pull-tester/rpc-tests.py @@ -119,6 +119,7 @@ testScripts = [ 'p2p-versionbits-warning.py', ] testScriptsExt = [ + 'bip9-softforks.py', 'bip65-cltv.py', 'bip65-cltv-p2p.py', 'bip68-sequence.py', diff --git a/qa/rpc-tests/bip9-softforks.py b/qa/rpc-tests/bip9-softforks.py new file mode 100755 index 0000000000..cbb1b7d4ce --- /dev/null +++ b/qa/rpc-tests/bip9-softforks.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python2 +# Copyright (c) 2015 The Bitcoin Core developers +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +from test_framework.test_framework import ComparisonTestFramework +from test_framework.util import * +from test_framework.mininode import CTransaction, NetworkThread +from test_framework.blocktools import create_coinbase, create_block +from test_framework.comptool import TestInstance, TestManager +from test_framework.script import CScript, OP_1NEGATE, OP_NOP3, OP_DROP +from binascii import hexlify, unhexlify +import cStringIO +import time +import itertools + +''' +This test is meant to exercise BIP forks +Connect to a single node. +regtest lock-in with 108/144 block signalling +activation after a further 144 blocks +mine 2 block and save coinbases for later use +mine 141 blocks to transition from DEFINED to STARTED +mine 100 blocks signalling readiness and 44 not in order to fail to change state this period +mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN) +mine a further 143 blocks (LOCKED_IN) +test that enforcement has not triggered (which triggers ACTIVE) +test that enforcement has triggered +''' + + + +class BIP9SoftForksTest(ComparisonTestFramework): + + def __init__(self): + self.num_nodes = 1 + + def setup_network(self): + self.nodes = start_nodes(1, self.options.tmpdir, + extra_args=[['-debug', '-whitelist=127.0.0.1']], + binary=[self.options.testbinary]) + + def run_test(self): + self.test = TestManager(self, self.options.tmpdir) + self.test.add_all_connections(self.nodes) + NetworkThread().start() # Start up network handling in another thread + self.test.run() + + def create_transaction(self, node, coinbase, to_address, amount): + from_txid = node.getblock(coinbase)['tx'][0] + inputs = [{ "txid" : from_txid, "vout" : 0}] + outputs = { to_address : amount } + rawtx = node.createrawtransaction(inputs, outputs) + tx = CTransaction() + f = cStringIO.StringIO(unhexlify(rawtx)) + tx.deserialize(f) + tx.nVersion = 2 + return tx + + def sign_transaction(self, node, tx): + signresult = node.signrawtransaction(hexlify(tx.serialize())) + tx = CTransaction() + f = cStringIO.StringIO(unhexlify(signresult['hex'])) + tx.deserialize(f) + return tx + + def generate_blocks(self, number, version, test_blocks = []): + for i in xrange(number): + block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1) + block.nVersion = version + block.rehash() + block.solve() + test_blocks.append([block, True]) + self.last_block_time += 1 + self.tip = block.sha256 + self.height += 1 + return test_blocks + + def get_bip9_status(self, key): + info = self.nodes[0].getblockchaininfo() + for row in info['bip9_softforks']: + if row['id'] == key: + return row + raise IndexError ('key:"%s" not found' % key) + + + def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature): + # generate some coins for later + self.coinbase_blocks = self.nodes[0].generate(2) + self.height = 3 # height of the next block to build + self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0) + self.nodeaddress = self.nodes[0].getnewaddress() + self.last_block_time = time.time() + + assert_equal(self.get_bip9_status(bipName)['status'], 'defined') + + # Test 1 + # Advance from DEFINED to STARTED + test_blocks = self.generate_blocks(141, 4) + yield TestInstance(test_blocks, sync_every_block=False) + + assert_equal(self.get_bip9_status(bipName)['status'], 'started') + + # Test 2 + # Fail to achieve LOCKED_IN 100 out of 144 signal bit 1 + # using a variety of bits to simulate multiple parallel softforks + test_blocks = self.generate_blocks(50, activated_version) # 0x20000001 (signalling ready) + test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not) + test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready) + test_blocks = self.generate_blocks(24, 4, test_blocks) # 0x20010000 (signalling not) + yield TestInstance(test_blocks, sync_every_block=False) + + assert_equal(self.get_bip9_status(bipName)['status'], 'started') + + # Test 3 + # 108 out of 144 signal bit 1 to achieve LOCKED_IN + # using a variety of bits to simulate multiple parallel softforks + test_blocks = self.generate_blocks(58, activated_version) # 0x20000001 (signalling ready) + test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not) + test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready) + test_blocks = self.generate_blocks(10, 4, test_blocks) # 0x20010000 (signalling not) + yield TestInstance(test_blocks, sync_every_block=False) + + assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in') + + # Test 4 + # 143 more version 536870913 blocks (waiting period-1) + test_blocks = self.generate_blocks(143, 4) + yield TestInstance(test_blocks, sync_every_block=False) + + assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in') + + # Test 5 + # Check that the new rule is enforced + spendtx = self.create_transaction(self.nodes[0], + self.coinbase_blocks[0], self.nodeaddress, 1.0) + invalidate(spendtx) + spendtx = self.sign_transaction(self.nodes[0], spendtx) + spendtx.rehash() + invalidatePostSignature(spendtx) + spendtx.rehash() + block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1) + block.nVersion = activated_version + block.vtx.append(spendtx) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + + self.last_block_time += 1 + self.tip = block.sha256 + self.height += 1 + yield TestInstance([[block, True]]) + + assert_equal(self.get_bip9_status(bipName)['status'], 'active') + + # Test 6 + # Check that the new sequence lock rules are enforced + spendtx = self.create_transaction(self.nodes[0], + self.coinbase_blocks[1], self.nodeaddress, 1.0) + invalidate(spendtx) + spendtx = self.sign_transaction(self.nodes[0], spendtx) + spendtx.rehash() + invalidatePostSignature(spendtx) + spendtx.rehash() + + block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1) + block.nVersion = 5 + block.vtx.append(spendtx) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + self.last_block_time += 1 + yield TestInstance([[block, False]]) + + # Restart all + stop_nodes(self.nodes) + wait_bitcoinds() + shutil.rmtree(self.options.tmpdir) + self.setup_chain() + self.setup_network() + self.test.clear_all_connections() + self.test.add_all_connections(self.nodes) + NetworkThread().start() # Start up network handling in another thread + + + + def get_tests(self): + for test in itertools.chain( + self.test_BIP('csv', 536870913, self.sequence_lock_invalidate, self.donothing), + self.test_BIP('csv', 536870913, self.mtp_invalidate, self.donothing), + self.test_BIP('csv', 536870913, self.donothing, self.csv_invalidate) + ): + yield test + + def donothing(self, tx): + return + + def csv_invalidate(self, tx): + '''Modify the signature in vin 0 of the tx to fail CSV + Prepends -1 CSV DROP in the scriptSig itself. + ''' + tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP3, OP_DROP] + + list(CScript(tx.vin[0].scriptSig))) + + def sequence_lock_invalidate(self, tx): + '''Modify the nSequence to make it fails once sequence lock rule is activated (high timespan) + ''' + tx.vin[0].nSequence = 0x00FFFFFF + tx.nLockTime = 0 + + def mtp_invalidate(self, tx): + '''Modify the nLockTime to make it fails once MTP rule is activated + ''' + # Disable Sequence lock, Activate nLockTime + tx.vin[0].nSequence = 0x90FFFFFF + tx.nLockTime = self.last_block_time + +if __name__ == '__main__': + BIP9SoftForksTest().main() \ No newline at end of file diff --git a/qa/rpc-tests/test_framework/comptool.py b/qa/rpc-tests/test_framework/comptool.py index a4cd4d0a89..d8fcd807f6 100755 --- a/qa/rpc-tests/test_framework/comptool.py +++ b/qa/rpc-tests/test_framework/comptool.py @@ -193,6 +193,10 @@ class TestManager(object): # associated NodeConn test_node.add_connection(self.connections[-1]) + def clear_all_connections(self): + self.connections = [] + self.test_nodes = [] + def wait_for_disconnections(self): def disconnected(): return all(node.closed for node in self.test_nodes) -- cgit v1.2.3