aboutsummaryrefslogtreecommitdiff
path: root/qa/rpc-tests
diff options
context:
space:
mode:
Diffstat (limited to 'qa/rpc-tests')
-rw-r--r--qa/rpc-tests/README.md115
-rwxr-xr-xqa/rpc-tests/bip65-cltv-p2p.py175
-rwxr-xr-xqa/rpc-tests/bip65-cltv.py89
-rwxr-xr-xqa/rpc-tests/bipdersig-p2p.py23
-rwxr-xr-xqa/rpc-tests/blockchain.py52
-rwxr-xr-xqa/rpc-tests/decodescript.py72
-rwxr-xr-xqa/rpc-tests/fundrawtransaction.py69
-rwxr-xr-xqa/rpc-tests/httpbasics.py19
-rwxr-xr-xqa/rpc-tests/invalidblockrequest.py12
-rwxr-xr-xqa/rpc-tests/keypool.py15
-rwxr-xr-xqa/rpc-tests/listtransactions.py10
-rwxr-xr-xqa/rpc-tests/maxuploadtarget.py248
-rwxr-xr-xqa/rpc-tests/mempool_packages.py178
-rwxr-xr-xqa/rpc-tests/nodehandling.py8
-rwxr-xr-xqa/rpc-tests/p2p-acceptblock.py91
-rwxr-xr-xqa/rpc-tests/p2p-fullblocktest.py272
-rwxr-xr-xqa/rpc-tests/pruning.py5
-rwxr-xr-xqa/rpc-tests/rest.py60
-rwxr-xr-xqa/rpc-tests/script_test.py20
-rw-r--r--qa/rpc-tests/test_framework/authproxy.py36
-rw-r--r--qa/rpc-tests/test_framework/blockstore.py23
-rw-r--r--qa/rpc-tests/test_framework/blocktools.py20
-rwxr-xr-xqa/rpc-tests/test_framework/comptool.py152
-rw-r--r--qa/rpc-tests/test_framework/key.py215
-rw-r--r--qa/rpc-tests/test_framework/script.py4
-rw-r--r--qa/rpc-tests/test_framework/util.py15
-rwxr-xr-xqa/rpc-tests/wallet.py66
-rwxr-xr-xqa/rpc-tests/zmq_test.py91
28 files changed, 1975 insertions, 180 deletions
diff --git a/qa/rpc-tests/README.md b/qa/rpc-tests/README.md
index cfda8fe91f..d2db00362f 100644
--- a/qa/rpc-tests/README.md
+++ b/qa/rpc-tests/README.md
@@ -1,5 +1,5 @@
-Regression tests of RPC interface
-=================================
+Regression tests
+================
### [python-bitcoinrpc](https://github.com/jgarzik/python-bitcoinrpc)
Git subtree of [https://github.com/jgarzik/python-bitcoinrpc](https://github.com/jgarzik/python-bitcoinrpc).
@@ -12,12 +12,38 @@ Base class for new regression tests.
### [test_framework/util.py](test_framework/util.py)
Generally useful functions.
+### [test_framework/mininode.py](test_framework/mininode.py)
+Basic code to support p2p connectivity to a bitcoind.
+
+### [test_framework/comptool.py](test_framework/comptool.py)
+Framework for comparison-tool style, p2p tests.
+
+### [test_framework/script.py](test_framework/script.py)
+Utilities for manipulating transaction scripts (originally from python-bitcoinlib)
+
+### [test_framework/blockstore.py](test_framework/blockstore.py)
+Implements disk-backed block and tx storage.
+
+### [test_framework/key.py](test_framework/key.py)
+Wrapper around OpenSSL EC_Key (originally from python-bitcoinlib)
+
+### [test_framework/bignum.py](test_framework/bignum.py)
+Helpers for script.py
+
+### [test_framework/blocktools.py](test_framework/blocktools.py)
+Helper functions for creating blocks and transactions.
+
+
Notes
=====
-You can run a single test by calling `qa/pull-tester/rpc-tests.sh <testname>`.
+You can run any single test by calling `qa/pull-tester/rpc-tests.py <testname>`.
+
+Or you can run any combination of tests by calling `qa/pull-tester/rpc-tests.py <testname1> <testname2> <testname3> ...`
+
+Run the regression test suite with `qa/pull-tester/rpc-tests.py`
-Run all possible tests with `qa/pull-tester/rpc-tests.sh -extended`.
+Run all possible tests with `qa/pull-tester/rpc-tests.py -extended`
Possible options:
@@ -31,7 +57,7 @@ Possible options:
--tracerpc Print out all RPC calls as they are made
```
-If you set the environment variable `PYTHON_DEBUG=1` you will get some debug output (example: `PYTHON_DEBUG=1 qa/pull-tester/rpc-tests.sh wallet`).
+If you set the environment variable `PYTHON_DEBUG=1` you will get some debug output (example: `PYTHON_DEBUG=1 qa/pull-tester/rpc-tests.py wallet`).
A 200-block -regtest blockchain and wallets for four nodes
is created the first time a regression test is run and
@@ -49,3 +75,82 @@ to recover with:
rm -rf cache
killall bitcoind
```
+
+P2P test design notes
+---------------------
+
+## Mininode
+
+* ```mininode.py``` contains all the definitions for objects that pass
+over the network (```CBlock```, ```CTransaction```, etc, along with the network-level
+wrappers for them, ```msg_block```, ```msg_tx```, etc).
+
+* P2P tests have two threads. One thread handles all network communication
+with the bitcoind(s) being tested (using python's asyncore package); the other
+implements the test logic.
+
+* ```NodeConn``` is the class used to connect to a bitcoind. If you implement
+a callback class that derives from ```NodeConnCB``` and pass that to the
+```NodeConn``` object, your code will receive the appropriate callbacks when
+events of interest arrive. NOTE: be sure to call
+```self.create_callback_map()``` in your derived classes' ```__init__```
+function, so that the correct mappings are set up between p2p messages and your
+callback functions.
+
+* You can pass the same handler to multiple ```NodeConn```'s if you like, or pass
+different ones to each -- whatever makes the most sense for your test.
+
+* Call ```NetworkThread.start()``` after all ```NodeConn``` objects are created to
+start the networking thread. (Continue with the test logic in your existing
+thread.)
+
+* RPC calls are available in p2p tests.
+
+* Can be used to write free-form tests, where specific p2p-protocol behavior
+is tested. Examples: ```p2p-accept-block.py```, ```maxblocksinflight.py```.
+
+## Comptool
+
+* Testing framework for writing tests that compare the block/tx acceptance
+behavior of a bitcoind against 1 or more other bitcoind instances, or against
+known outcomes, or both.
+
+* Set the ```num_nodes``` variable (defined in ```ComparisonTestFramework```) to start up
+1 or more nodes. If using 1 node, then ```--testbinary``` can be used as a command line
+option to change the bitcoind binary used by the test. If using 2 or more nodes,
+then ```--refbinary``` can be optionally used to change the bitcoind that will be used
+on nodes 2 and up.
+
+* Implement a (generator) function called ```get_tests()``` which yields ```TestInstance```s.
+Each ```TestInstance``` consists of:
+ - a list of ```[object, outcome, hash]``` entries
+ * ```object``` is a ```CBlock```, ```CTransaction```, or
+ ```CBlockHeader```. ```CBlock```'s and ```CTransaction```'s are tested for
+ acceptance. ```CBlockHeader```s can be used so that the test runner can deliver
+ complete headers-chains when requested from the bitcoind, to allow writing
+ tests where blocks can be delivered out of order but still processed by
+ headers-first bitcoind's.
+ * ```outcome``` is ```True```, ```False```, or ```None```. If ```True```
+ or ```False```, the tip is compared with the expected tip -- either the
+ block passed in, or the hash specified as the optional 3rd entry. If
+ ```None``` is specified, then the test will compare all the bitcoind's
+ being tested to see if they all agree on what the best tip is.
+ * ```hash``` is the block hash of the tip to compare against. Optional to
+ specify; if left out then the hash of the block passed in will be used as
+ the expected tip. This allows for specifying an expected tip while testing
+ the handling of either invalid blocks or blocks delivered out of order,
+ which complete a longer chain.
+ - ```sync_every_block```: ```True/False```. If ```False```, then all blocks
+ are inv'ed together, and the test runner waits until the node receives the
+ last one, and tests only the last block for tip acceptance using the
+ outcome and specified tip. If ```True```, then each block is tested in
+ sequence and synced (this is slower when processing many blocks).
+ - ```sync_every_transaction```: ```True/False```. Analogous to
+ ```sync_every_block```, except if the outcome on the last tx is "None",
+ then the contents of the entire mempool are compared across all bitcoind
+ connections. If ```True``` or ```False```, then only the last tx's
+ acceptance is tested against the given outcome.
+
+* For examples of tests written in this framework, see
+ ```invalidblockrequest.py``` and ```p2p-fullblocktest.py```.
+
diff --git a/qa/rpc-tests/bip65-cltv-p2p.py b/qa/rpc-tests/bip65-cltv-p2p.py
new file mode 100755
index 0000000000..1f8548c219
--- /dev/null
+++ b/qa/rpc-tests/bip65-cltv-p2p.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python2
+#
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.mininode import CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.comptool import TestInstance, TestManager
+from test_framework.script import CScript, OP_1NEGATE, OP_NOP2, OP_DROP
+from binascii import hexlify, unhexlify
+import cStringIO
+import time
+
+def cltv_invalidate(tx):
+ '''Modify the signature in vin 0 of the tx to fail CLTV
+
+ Prepends -1 CLTV DROP in the scriptSig itself.
+ '''
+ tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_NOP2, OP_DROP] +
+ list(CScript(tx.vin[0].scriptSig)))
+
+'''
+This test is meant to exercise BIP65 (CHECKLOCKTIMEVERIFY)
+Connect to a single node.
+Mine 2 (version 3) blocks (save the coinbases for later).
+Generate 98 more version 3 blocks, verify the node accepts.
+Mine 749 version 4 blocks, verify the node accepts.
+Check that the new CLTV rules are not enforced on the 750th version 4 block.
+Check that the new CLTV rules are enforced on the 751st version 4 block.
+Mine 199 new version blocks.
+Mine 1 old-version block.
+Mine 1 new version block.
+Mine 1 old version block, see that the node rejects.
+'''
+
+class BIP65Test(ComparisonTestFramework):
+
+ def __init__(self):
+ self.num_nodes = 1
+
+ def setup_network(self):
+ # Must set the blockversion for this test
+ self.nodes = start_nodes(1, self.options.tmpdir,
+ extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=3']],
+ binary=[self.options.testbinary])
+
+ def run_test(self):
+ test = TestManager(self, self.options.tmpdir)
+ test.add_all_connections(self.nodes)
+ NetworkThread().start() # Start up network handling in another thread
+ test.run()
+
+ def create_transaction(self, node, coinbase, to_address, amount):
+ from_txid = node.getblock(coinbase)['tx'][0]
+ inputs = [{ "txid" : from_txid, "vout" : 0}]
+ outputs = { to_address : amount }
+ rawtx = node.createrawtransaction(inputs, outputs)
+ signresult = node.signrawtransaction(rawtx)
+ tx = CTransaction()
+ f = cStringIO.StringIO(unhexlify(signresult['hex']))
+ tx.deserialize(f)
+ return tx
+
+ def get_tests(self):
+
+ self.coinbase_blocks = self.nodes[0].generate(2)
+ self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
+ self.nodeaddress = self.nodes[0].getnewaddress()
+ self.last_block_time = time.time()
+
+ ''' 98 more version 3 blocks '''
+ test_blocks = []
+ for i in xrange(98):
+ block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
+ block.nVersion = 3
+ block.rehash()
+ block.solve()
+ test_blocks.append([block, True])
+ self.last_block_time += 1
+ self.tip = block.sha256
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ ''' Mine 749 version 4 blocks '''
+ test_blocks = []
+ for i in xrange(749):
+ block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
+ block.nVersion = 4
+ block.rehash()
+ block.solve()
+ test_blocks.append([block, True])
+ self.last_block_time += 1
+ self.tip = block.sha256
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ '''
+ Check that the new CLTV rules are not enforced in the 750th
+ version 3 block.
+ '''
+ spendtx = self.create_transaction(self.nodes[0],
+ self.coinbase_blocks[0], self.nodeaddress, 1.0)
+ cltv_invalidate(spendtx)
+ spendtx.rehash()
+
+ block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
+ block.nVersion = 4
+ block.vtx.append(spendtx)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+
+ self.last_block_time += 1
+ self.tip = block.sha256
+ yield TestInstance([[block, True]])
+
+ '''
+ Check that the new CLTV rules are enforced in the 751st version 4
+ block.
+ '''
+ spendtx = self.create_transaction(self.nodes[0],
+ self.coinbase_blocks[1], self.nodeaddress, 1.0)
+ cltv_invalidate(spendtx)
+ spendtx.rehash()
+
+ block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block.nVersion = 4
+ block.vtx.append(spendtx)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+ self.last_block_time += 1
+ yield TestInstance([[block, False]])
+
+ ''' Mine 199 new version blocks on last valid tip '''
+ test_blocks = []
+ for i in xrange(199):
+ block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block.nVersion = 4
+ block.rehash()
+ block.solve()
+ test_blocks.append([block, True])
+ self.last_block_time += 1
+ self.tip = block.sha256
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ ''' Mine 1 old version block '''
+ block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block.nVersion = 3
+ block.rehash()
+ block.solve()
+ self.last_block_time += 1
+ self.tip = block.sha256
+ yield TestInstance([[block, True]])
+
+ ''' Mine 1 new version block '''
+ block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block.nVersion = 4
+ block.rehash()
+ block.solve()
+ self.last_block_time += 1
+ self.tip = block.sha256
+ yield TestInstance([[block, True]])
+
+ ''' Mine 1 old version block, should be invalid '''
+ block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block.nVersion = 3
+ block.rehash()
+ block.solve()
+ self.last_block_time += 1
+ yield TestInstance([[block, False]])
+
+if __name__ == '__main__':
+ BIP65Test().main()
diff --git a/qa/rpc-tests/bip65-cltv.py b/qa/rpc-tests/bip65-cltv.py
new file mode 100755
index 0000000000..e90e11e6a7
--- /dev/null
+++ b/qa/rpc-tests/bip65-cltv.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python2
+# Copyright (c) 2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Test the CHECKLOCKTIMEVERIFY (BIP65) soft-fork logic
+#
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import os
+import shutil
+
+class BIP65Test(BitcoinTestFramework):
+
+ def setup_network(self):
+ self.nodes = []
+ self.nodes.append(start_node(0, self.options.tmpdir, []))
+ self.nodes.append(start_node(1, self.options.tmpdir, ["-blockversion=3"]))
+ self.nodes.append(start_node(2, self.options.tmpdir, ["-blockversion=4"]))
+ connect_nodes(self.nodes[1], 0)
+ connect_nodes(self.nodes[2], 0)
+ self.is_network_split = False
+ self.sync_all()
+
+ def run_test(self):
+ cnt = self.nodes[0].getblockcount()
+
+ # Mine some old-version blocks
+ self.nodes[1].generate(100)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 100):
+ raise AssertionError("Failed to mine 100 version=3 blocks")
+
+ # Mine 750 new-version blocks
+ for i in xrange(15):
+ self.nodes[2].generate(50)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 850):
+ raise AssertionError("Failed to mine 750 version=4 blocks")
+
+ # TODO: check that new CHECKLOCKTIMEVERIFY rules are not enforced
+
+ # Mine 1 new-version block
+ self.nodes[2].generate(1)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 851):
+ raise AssertionFailure("Failed to mine a version=4 blocks")
+
+ # TODO: check that new CHECKLOCKTIMEVERIFY rules are enforced
+
+ # Mine 198 new-version blocks
+ for i in xrange(2):
+ self.nodes[2].generate(99)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 1049):
+ raise AssertionError("Failed to mine 198 version=4 blocks")
+
+ # Mine 1 old-version block
+ self.nodes[1].generate(1)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 1050):
+ raise AssertionError("Failed to mine a version=3 block after 949 version=4 blocks")
+
+ # Mine 1 new-version blocks
+ self.nodes[2].generate(1)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 1051):
+ raise AssertionError("Failed to mine a version=4 block")
+
+ # Mine 1 old-version blocks
+ try:
+ self.nodes[1].generate(1)
+ raise AssertionError("Succeeded to mine a version=3 block after 950 version=4 blocks")
+ except JSONRPCException:
+ pass
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 1051):
+ raise AssertionError("Accepted a version=3 block after 950 version=4 blocks")
+
+ # Mine 1 new-version blocks
+ self.nodes[2].generate(1)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 1052):
+ raise AssertionError("Failed to mine a version=4 block")
+
+if __name__ == '__main__':
+ BIP65Test().main()
diff --git a/qa/rpc-tests/bipdersig-p2p.py b/qa/rpc-tests/bipdersig-p2p.py
index 41717377b2..ec1678cc2c 100755
--- a/qa/rpc-tests/bipdersig-p2p.py
+++ b/qa/rpc-tests/bipdersig-p2p.py
@@ -75,6 +75,7 @@ class BIP66Test(ComparisonTestFramework):
def get_tests(self):
self.coinbase_blocks = self.nodes[0].generate(2)
+ height = 3 # height of the next block to build
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.nodeaddress = self.nodes[0].getnewaddress()
self.last_block_time = time.time()
@@ -82,25 +83,27 @@ class BIP66Test(ComparisonTestFramework):
''' 98 more version 2 blocks '''
test_blocks = []
for i in xrange(98):
- block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 749 version 3 blocks '''
test_blocks = []
for i in xrange(749):
- block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance(test_blocks, sync_every_block=False)
'''
@@ -112,7 +115,7 @@ class BIP66Test(ComparisonTestFramework):
unDERify(spendtx)
spendtx.rehash()
- block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
@@ -121,6 +124,7 @@ class BIP66Test(ComparisonTestFramework):
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance([[block, True]])
'''
@@ -132,7 +136,7 @@ class BIP66Test(ComparisonTestFramework):
unDERify(spendtx)
spendtx.rehash()
- block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
@@ -144,35 +148,38 @@ class BIP66Test(ComparisonTestFramework):
''' Mine 199 new version blocks on last valid tip '''
test_blocks = []
for i in xrange(199):
- block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 1 old version block '''
- block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance([[block, True]])
''' Mine 1 new version block '''
- block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance([[block, True]])
''' Mine 1 old version block, should be invalid '''
- block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
diff --git a/qa/rpc-tests/blockchain.py b/qa/rpc-tests/blockchain.py
new file mode 100755
index 0000000000..a5c98b777e
--- /dev/null
+++ b/qa/rpc-tests/blockchain.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python2
+# Copyright (c) 2014 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Test RPC calls related to blockchain state.
+#
+
+import decimal
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ initialize_chain,
+ assert_equal,
+ start_nodes,
+ connect_nodes_bi,
+)
+
+class BlockchainTest(BitcoinTestFramework):
+ """
+ Test blockchain-related RPC calls:
+
+ - gettxoutsetinfo
+
+ """
+
+ def setup_chain(self):
+ print("Initializing test directory " + self.options.tmpdir)
+ initialize_chain(self.options.tmpdir)
+
+ def setup_network(self, split=False):
+ self.nodes = start_nodes(2, self.options.tmpdir)
+ connect_nodes_bi(self.nodes, 0, 1)
+ self.is_network_split = False
+ self.sync_all()
+
+ def run_test(self):
+ node = self.nodes[0]
+ res = node.gettxoutsetinfo()
+
+ assert_equal(res[u'total_amount'], decimal.Decimal('8725.00000000'))
+ assert_equal(res[u'transactions'], 200)
+ assert_equal(res[u'height'], 200)
+ assert_equal(res[u'txouts'], 200)
+ assert_equal(res[u'bytes_serialized'], 13000),
+ assert_equal(len(res[u'bestblock']), 64)
+ assert_equal(len(res[u'hash_serialized']), 64)
+
+
+if __name__ == '__main__':
+ BlockchainTest().main()
diff --git a/qa/rpc-tests/decodescript.py b/qa/rpc-tests/decodescript.py
index ce3bc94ef7..4bca623380 100755
--- a/qa/rpc-tests/decodescript.py
+++ b/qa/rpc-tests/decodescript.py
@@ -5,6 +5,9 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
+from test_framework.mininode import *
+from binascii import hexlify, unhexlify
+from cStringIO import StringIO
class DecodeScriptTest(BitcoinTestFramework):
"""Tests decoding scripts via RPC command "decodescript"."""
@@ -107,10 +110,77 @@ class DecodeScriptTest(BitcoinTestFramework):
rpc_result = self.nodes[0].decodescript('63' + push_public_key + 'ad670320a107b17568' + push_public_key + 'ac')
assert_equal('OP_IF ' + public_key + ' OP_CHECKSIGVERIFY OP_ELSE 500000 OP_NOP2 OP_DROP OP_ENDIF ' + public_key + ' OP_CHECKSIG', rpc_result['asm'])
+ def decoderawtransaction_asm_sighashtype(self):
+ """Tests decoding scripts via RPC command "decoderawtransaction".
+
+ This test is in with the "decodescript" tests because they are testing the same "asm" script decodes.
+ """
+
+ # this test case uses a random plain vanilla mainnet transaction with a single P2PKH input and output
+ tx = '0100000001696a20784a2c70143f634e95227dbdfdf0ecd51647052e70854512235f5986ca010000008a47304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb014104d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536ffffffff0100e1f505000000001976a914eb6c6e0cdb2d256a32d97b8df1fc75d1920d9bca88ac00000000'
+ rpc_result = self.nodes[0].decoderawtransaction(tx)
+ assert_equal('304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb[ALL] 04d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536', rpc_result['vin'][0]['scriptSig']['asm'])
+
+ # this test case uses a mainnet transaction that has a P2SH input and both P2PKH and P2SH outputs.
+ # it's from James D'Angelo's awesome introductory videos about multisig: https://www.youtube.com/watch?v=zIbUSaZBJgU and https://www.youtube.com/watch?v=OSA1pwlaypc
+ # verify that we have not altered scriptPubKey decoding.
+ tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914dc863734a218bfe83ef770ee9d41a27f824a6e5688acee2a02000000000017a9142a5edea39971049a540474c6a99edf0aa4074c588700000000'
+ rpc_result = self.nodes[0].decoderawtransaction(tx)
+ assert_equal('8e3730608c3b0bb5df54f09076e196bc292a8e39a78e73b44b6ba08c78f5cbb0', rpc_result['txid'])
+ assert_equal('0 3045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea[ALL] 3045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75[ALL] 5221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53ae', rpc_result['vin'][0]['scriptSig']['asm'])
+ assert_equal('OP_DUP OP_HASH160 dc863734a218bfe83ef770ee9d41a27f824a6e56 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
+ assert_equal('OP_HASH160 2a5edea39971049a540474c6a99edf0aa4074c58 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
+ txSave = CTransaction()
+ txSave.deserialize(StringIO(unhexlify(tx)))
+
+ # make sure that a specifically crafted op_return value will not pass all the IsDERSignature checks and then get decoded as a sighash type
+ tx = '01000000015ded05872fdbda629c7d3d02b194763ce3b9b1535ea884e3c8e765d42e316724020000006b48304502204c10d4064885c42638cbff3585915b322de33762598321145ba033fc796971e2022100bb153ad3baa8b757e30a2175bd32852d2e1cb9080f84d7e32fcdfd667934ef1b012103163c0ff73511ea1743fb5b98384a2ff09dd06949488028fd819f4d83f56264efffffffff0200000000000000000b6a0930060201000201000180380100000000001976a9141cabd296e753837c086da7a45a6c2fe0d49d7b7b88ac00000000'
+ rpc_result = self.nodes[0].decoderawtransaction(tx)
+ assert_equal('OP_RETURN 300602010002010001', rpc_result['vout'][0]['scriptPubKey']['asm'])
+
+ # verify that we have not altered scriptPubKey processing even of a specially crafted P2PKH pubkeyhash and P2SH redeem script hash that is made to pass the der signature checks
+ tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914301102070101010101010102060101010101010188acee2a02000000000017a91430110207010101010101010206010101010101018700000000'
+ rpc_result = self.nodes[0].decoderawtransaction(tx)
+ assert_equal('OP_DUP OP_HASH160 3011020701010101010101020601010101010101 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
+ assert_equal('OP_HASH160 3011020701010101010101020601010101010101 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
+
+ # some more full transaction tests of varying specific scriptSigs. used instead of
+ # tests in decodescript_script_sig because the decodescript RPC is specifically
+ # for working on scriptPubKeys (argh!).
+ push_signature = hexlify(txSave.vin[0].scriptSig)[2:(0x48*2+4)]
+ signature = push_signature[2:]
+ der_signature = signature[:-2]
+ signature_sighash_decoded = der_signature + '[ALL]'
+ signature_2 = der_signature + '82'
+ push_signature_2 = '48' + signature_2
+ signature_2_sighash_decoded = der_signature + '[NONE|ANYONECANPAY]'
+
+ # 1) P2PK scriptSig
+ txSave.vin[0].scriptSig = unhexlify(push_signature)
+ rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
+ assert_equal(signature_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
+
+ # make sure that the sighash decodes come out correctly for a more complex / lesser used case.
+ txSave.vin[0].scriptSig = unhexlify(push_signature_2)
+ rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
+ assert_equal(signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
+
+ # 2) multisig scriptSig
+ txSave.vin[0].scriptSig = unhexlify('00' + push_signature + push_signature_2)
+ rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
+ assert_equal('0 ' + signature_sighash_decoded + ' ' + signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
+
+ # 3) test a scriptSig that contains more than push operations.
+ # in fact, it contains an OP_RETURN with data specially crafted to cause improper decode if the code does not catch it.
+ txSave.vin[0].scriptSig = unhexlify('6a143011020701010101010101020601010101010101')
+ rpc_result = self.nodes[0].decoderawtransaction(hexlify(txSave.serialize()))
+ print(hexlify('636174'))
+ assert_equal('OP_RETURN 3011020701010101010101020601010101010101', rpc_result['vin'][0]['scriptSig']['asm'])
+
def run_test(self):
self.decodescript_script_sig()
self.decodescript_script_pub_key()
+ self.decoderawtransaction_asm_sighashtype()
if __name__ == '__main__':
DecodeScriptTest().main()
-
diff --git a/qa/rpc-tests/fundrawtransaction.py b/qa/rpc-tests/fundrawtransaction.py
index ce52247b2e..93d13faa06 100755
--- a/qa/rpc-tests/fundrawtransaction.py
+++ b/qa/rpc-tests/fundrawtransaction.py
@@ -13,29 +13,47 @@ class RawTransactionsTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
- initialize_chain_clean(self.options.tmpdir, 3)
+ initialize_chain_clean(self.options.tmpdir, 4)
def setup_network(self, split=False):
- self.nodes = start_nodes(3, self.options.tmpdir)
+ self.nodes = start_nodes(4, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
+ connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
print "Mining blocks..."
- feeTolerance = Decimal(0.00000002) #if the fee's positive delta is higher than this value tests will fail, neg. delta always fail the tests
+
+ min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
+ # if the fee's positive delta is higher than this value tests will fail,
+ # neg. delta always fail the tests.
+ # The size of the signature of every input may be at most 2 bytes larger
+ # than a minimum sized signature.
+
+ # = 2 bytes * minRelayTxFeePerByte
+ feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
- self.nodes[0].generate(101)
+ self.nodes[0].generate(121)
self.sync_all()
+
+ watchonly_address = self.nodes[0].getnewaddress()
+ watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
+ watchonly_amount = 200
+ self.nodes[3].importpubkey(watchonly_pubkey, "", True)
+ watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
+ self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10);
+
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5);
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0);
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0);
+
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
@@ -428,11 +446,12 @@ class RawTransactionsTest(BitcoinTestFramework):
stop_nodes(self.nodes)
wait_bitcoinds()
- self.nodes = start_nodes(3, self.options.tmpdir)
+ self.nodes = start_nodes(4, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
+ connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
@@ -541,5 +560,45 @@ class RawTransactionsTest(BitcoinTestFramework):
assert_equal(len(dec_tx['vout']), 2) # one change output added
+ ##################################################
+ # test a fundrawtransaction using only watchonly #
+ ##################################################
+
+ inputs = []
+ outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
+ rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
+
+ result = self.nodes[3].fundrawtransaction(rawtx, True)
+ res_dec = self.nodes[0].decoderawtransaction(result["hex"])
+ assert_equal(len(res_dec["vin"]), 1)
+ assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
+
+ assert_equal("fee" in result.keys(), True)
+ assert_greater_than(result["changepos"], -1)
+
+ ###############################################################
+ # test fundrawtransaction using the entirety of watched funds #
+ ###############################################################
+
+ inputs = []
+ outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
+ rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
+
+ result = self.nodes[3].fundrawtransaction(rawtx, True)
+ res_dec = self.nodes[0].decoderawtransaction(result["hex"])
+ assert_equal(len(res_dec["vin"]), 2)
+ assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
+
+ assert_greater_than(result["fee"], 0)
+ assert_greater_than(result["changepos"], -1)
+ assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
+
+ signedtx = self.nodes[3].signrawtransaction(result["hex"])
+ assert(not signedtx["complete"])
+ signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
+ assert(signedtx["complete"])
+ self.nodes[0].sendrawtransaction(signedtx["hex"])
+
+
if __name__ == '__main__':
RawTransactionsTest().main()
diff --git a/qa/rpc-tests/httpbasics.py b/qa/rpc-tests/httpbasics.py
index 8ccb821286..7888114c54 100755
--- a/qa/rpc-tests/httpbasics.py
+++ b/qa/rpc-tests/httpbasics.py
@@ -22,7 +22,7 @@ except ImportError:
class HTTPBasicsTest (BitcoinTestFramework):
def setup_nodes(self):
- return start_nodes(4, self.options.tmpdir, extra_args=[['-rpckeepalive=1'], ['-rpckeepalive=0'], [], []])
+ return start_nodes(4, self.options.tmpdir)
def run_test(self):
@@ -84,9 +84,8 @@ class HTTPBasicsTest (BitcoinTestFramework):
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read();
assert_equal('"error":null' in out1, True)
- assert_equal(conn.sock!=None, False) #connection must be closed because keep-alive was set to false
- #node2 (third node) is running with standard keep-alive parameters which means keep-alive is off
+ #node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
urlNode2 = urlparse.urlparse(self.nodes[2].url)
authpair = urlNode2.username + ':' + urlNode2.password
headers = {"Authorization": "Basic " + base64.b64encode(authpair)}
@@ -98,5 +97,19 @@ class HTTPBasicsTest (BitcoinTestFramework):
assert_equal('"error":null' in out1, True)
assert_equal(conn.sock!=None, True) #connection must be closed because bitcoind should use keep-alive by default
+ # Check excessive request size
+ conn = httplib.HTTPConnection(urlNode2.hostname, urlNode2.port)
+ conn.connect()
+ conn.request('GET', '/' + ('x'*1000), '', headers)
+ out1 = conn.getresponse()
+ assert_equal(out1.status, httplib.NOT_FOUND)
+
+ conn = httplib.HTTPConnection(urlNode2.hostname, urlNode2.port)
+ conn.connect()
+ conn.request('GET', '/' + ('x'*10000), '', headers)
+ out1 = conn.getresponse()
+ assert_equal(out1.status, httplib.BAD_REQUEST)
+
+
if __name__ == '__main__':
HTTPBasicsTest ().main ()
diff --git a/qa/rpc-tests/invalidblockrequest.py b/qa/rpc-tests/invalidblockrequest.py
index 64b8e26395..6a7980cd45 100755
--- a/qa/rpc-tests/invalidblockrequest.py
+++ b/qa/rpc-tests/invalidblockrequest.py
@@ -46,12 +46,14 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
'''
Create a new block with an anyone-can-spend coinbase
'''
- block = create_block(self.tip, create_coinbase(), self.block_time)
+ height = 1
+ block = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
block.solve()
# Save the coinbase for later
self.block1 = block
self.tip = block.sha256
+ height += 1
yield TestInstance([[block, True]])
'''
@@ -59,11 +61,12 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
'''
test = TestInstance(sync_every_block=False)
for i in xrange(100):
- block = create_block(self.tip, create_coinbase(), self.block_time)
+ block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
self.tip = block.sha256
self.block_time += 1
test.blocks_and_transactions.append([block, True])
+ height += 1
yield test
'''
@@ -73,7 +76,7 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
coinbase, spend of that spend). Duplicate the 3rd transaction to
leave merkle root and blockheader unchanged but invalidate the block.
'''
- block2 = create_block(self.tip, create_coinbase(), self.block_time)
+ block2 = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
# chr(81) is OP_TRUE
@@ -95,11 +98,12 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
self.tip = block2.sha256
yield TestInstance([[block2, False], [block2_orig, True]])
+ height += 1
'''
Make sure that a totally screwed up block is not valid.
'''
- block3 = create_block(self.tip, create_coinbase(), self.block_time)
+ block3 = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
block3.vtx[0].vout[0].nValue = 100*100000000 # Too high!
block3.vtx[0].sha256=None
diff --git a/qa/rpc-tests/keypool.py b/qa/rpc-tests/keypool.py
index aee29a596a..5a67220021 100755
--- a/qa/rpc-tests/keypool.py
+++ b/qa/rpc-tests/keypool.py
@@ -73,6 +73,21 @@ def run_test(nodes, tmpdir):
except JSONRPCException,e:
assert(e.error['code']==-12)
+ # refill keypool with three new addresses
+ nodes[0].walletpassphrase('test', 12000)
+ nodes[0].keypoolrefill(3)
+ nodes[0].walletlock()
+
+ # drain them by mining
+ nodes[0].generate(1)
+ nodes[0].generate(1)
+ nodes[0].generate(1)
+ nodes[0].generate(1)
+ try:
+ nodes[0].generate(1)
+ raise AssertionError('Keypool should be exhausted after three addesses')
+ except JSONRPCException,e:
+ assert(e.error['code']==-12)
def main():
import optparse
diff --git a/qa/rpc-tests/listtransactions.py b/qa/rpc-tests/listtransactions.py
index eeae2d2fa2..b30a6bc9d1 100755
--- a/qa/rpc-tests/listtransactions.py
+++ b/qa/rpc-tests/listtransactions.py
@@ -93,6 +93,16 @@ class ListTransactionsTest(BitcoinTestFramework):
{"category":"receive","amount":Decimal("0.44")},
{"txid":txid, "account" : "toself"} )
+ multisig = self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
+ self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
+ txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
+ self.nodes[1].generate(1)
+ self.sync_all()
+ assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0)
+ check_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True),
+ {"category":"receive","amount":Decimal("0.1")},
+ {"txid":txid, "account" : "watchonly"} )
+
if __name__ == '__main__':
ListTransactionsTest().main()
diff --git a/qa/rpc-tests/maxuploadtarget.py b/qa/rpc-tests/maxuploadtarget.py
new file mode 100755
index 0000000000..67c4a50985
--- /dev/null
+++ b/qa/rpc-tests/maxuploadtarget.py
@@ -0,0 +1,248 @@
+#!/usr/bin/env python2
+#
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.mininode import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.comptool import wait_until
+import time
+
+'''
+Test behavior of -maxuploadtarget.
+
+* Verify that getdata requests for old blocks (>1week) are dropped
+if uploadtarget has been reached.
+* Verify that getdata requests for recent blocks are respecteved even
+if uploadtarget has been reached.
+* Verify that the upload counters are reset after 24 hours.
+'''
+
+# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
+# p2p messages to a node, generating the messages in the main testing logic.
+class TestNode(NodeConnCB):
+ def __init__(self):
+ NodeConnCB.__init__(self)
+ self.create_callback_map()
+ self.connection = None
+ self.ping_counter = 1
+ self.last_pong = msg_pong()
+ self.block_receive_map = {}
+
+ def add_connection(self, conn):
+ self.connection = conn
+ self.peer_disconnected = False
+
+ def on_inv(self, conn, message):
+ pass
+
+ # Track the last getdata message we receive (used in the test)
+ def on_getdata(self, conn, message):
+ self.last_getdata = message
+
+ def on_block(self, conn, message):
+ message.block.calc_sha256()
+ try:
+ self.block_receive_map[message.block.sha256] += 1
+ except KeyError as e:
+ self.block_receive_map[message.block.sha256] = 1
+
+ # Spin until verack message is received from the node.
+ # We use this to signal that our test can begin. This
+ # is called from the testing thread, so it needs to acquire
+ # the global lock.
+ def wait_for_verack(self):
+ def veracked():
+ return self.verack_received
+ return wait_until(veracked, timeout=10)
+
+ def wait_for_disconnect(self):
+ def disconnected():
+ return self.peer_disconnected
+ return wait_until(disconnected, timeout=10)
+
+ # Wrapper for the NodeConn's send_message function
+ def send_message(self, message):
+ self.connection.send_message(message)
+
+ def on_pong(self, conn, message):
+ self.last_pong = message
+
+ def on_close(self, conn):
+ self.peer_disconnected = True
+
+ # Sync up with the node after delivery of a block
+ def sync_with_ping(self, timeout=30):
+ def received_pong():
+ return (self.last_pong.nonce == self.ping_counter)
+ self.connection.send_message(msg_ping(nonce=self.ping_counter))
+ success = wait_until(received_pong, timeout)
+ self.ping_counter += 1
+ return success
+
+class MaxUploadTest(BitcoinTestFramework):
+ def __init__(self):
+ self.utxo = []
+
+ # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
+ # So we have big transactions and full blocks to fill up our block files
+ # create one script_pubkey
+ script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
+ for i in xrange (512):
+ script_pubkey = script_pubkey + "01"
+ # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
+ self.txouts = "81"
+ for k in xrange(128):
+ # add txout value
+ self.txouts = self.txouts + "0000000000000000"
+ # add length of script_pubkey
+ self.txouts = self.txouts + "fd0402"
+ # add script_pubkey
+ self.txouts = self.txouts + script_pubkey
+
+ def add_options(self, parser):
+ parser.add_option("--testbinary", dest="testbinary",
+ default=os.getenv("BITCOIND", "bitcoind"),
+ help="bitcoind binary to test")
+
+ def setup_chain(self):
+ initialize_chain_clean(self.options.tmpdir, 2)
+
+ def setup_network(self):
+ # Start a node with maxuploadtarget of 200 MB (/24h)
+ self.nodes = []
+ self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-maxuploadtarget=200", "-blockmaxsize=999000"]))
+
+ def mine_full_block(self, node, address):
+ # Want to create a full block
+ # We'll generate a 66k transaction below, and 14 of them is close to the 1MB block limit
+ for j in xrange(14):
+ if len(self.utxo) < 14:
+ self.utxo = node.listunspent()
+ inputs=[]
+ outputs = {}
+ t = self.utxo.pop()
+ inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
+ remchange = t["amount"] - Decimal("0.001000")
+ outputs[address]=remchange
+ # Create a basic transaction that will send change back to ourself after account for a fee
+ # And then insert the 128 generated transaction outs in the middle rawtx[92] is where the #
+ # of txouts is stored and is the only thing we overwrite from the original transaction
+ rawtx = node.createrawtransaction(inputs, outputs)
+ newtx = rawtx[0:92]
+ newtx = newtx + self.txouts
+ newtx = newtx + rawtx[94:]
+ # Appears to be ever so slightly faster to sign with SIGHASH_NONE
+ signresult = node.signrawtransaction(newtx,None,None,"NONE")
+ txid = node.sendrawtransaction(signresult["hex"], True)
+ # Mine a full sized block which will be these transactions we just created
+ node.generate(1)
+
+ def run_test(self):
+ # Before we connect anything, we first set the time on the node
+ # to be in the past, otherwise things break because the CNode
+ # time counters can't be reset backward after initialization
+ old_time = int(time.time() - 2*60*60*24*7)
+ self.nodes[0].setmocktime(old_time)
+
+ # Generate some old blocks
+ self.nodes[0].generate(130)
+
+ # test_nodes[0] will only request old blocks
+ # test_nodes[1] will only request new blocks
+ # test_nodes[2] will test resetting the counters
+ test_nodes = []
+ connections = []
+
+ for i in xrange(3):
+ test_nodes.append(TestNode())
+ connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
+ test_nodes[i].add_connection(connections[i])
+
+ NetworkThread().start() # Start up network handling in another thread
+ [x.wait_for_verack() for x in test_nodes]
+
+ # Test logic begins here
+
+ # Now mine a big block
+ self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())
+
+ # Store the hash; we'll request this later
+ big_old_block = self.nodes[0].getbestblockhash()
+ old_block_size = self.nodes[0].getblock(big_old_block, True)['size']
+ big_old_block = int(big_old_block, 16)
+
+ # Advance to two days ago
+ self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)
+
+ # Mine one more block, so that the prior block looks old
+ self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())
+
+ # We'll be requesting this new block too
+ big_new_block = self.nodes[0].getbestblockhash()
+ new_block_size = self.nodes[0].getblock(big_new_block)['size']
+ big_new_block = int(big_new_block, 16)
+
+ # test_nodes[0] will test what happens if we just keep requesting the
+ # the same big old block too many times (expect: disconnect)
+
+ getdata_request = msg_getdata()
+ getdata_request.inv.append(CInv(2, big_old_block))
+
+ max_bytes_per_day = 200*1024*1024
+ max_bytes_available = max_bytes_per_day - 144*1000000
+ success_count = max_bytes_available / old_block_size
+
+ # 144MB will be reserved for relaying new blocks, so expect this to
+ # succeed for ~70 tries.
+ for i in xrange(success_count):
+ test_nodes[0].send_message(getdata_request)
+ test_nodes[0].sync_with_ping()
+ assert_equal(test_nodes[0].block_receive_map[big_old_block], i+1)
+
+ assert_equal(len(self.nodes[0].getpeerinfo()), 3)
+ # At most a couple more tries should succeed (depending on how long
+ # the test has been running so far).
+ for i in xrange(3):
+ test_nodes[0].send_message(getdata_request)
+ test_nodes[0].wait_for_disconnect()
+ assert_equal(len(self.nodes[0].getpeerinfo()), 2)
+ print "Peer 0 disconnected after downloading old block too many times"
+
+ # Requesting the current block on test_nodes[1] should succeed indefinitely,
+ # even when over the max upload target.
+ # We'll try 200 times
+ getdata_request.inv = [CInv(2, big_new_block)]
+ for i in xrange(200):
+ test_nodes[1].send_message(getdata_request)
+ test_nodes[1].sync_with_ping()
+ assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)
+
+ print "Peer 1 able to repeatedly download new block"
+
+ # But if test_nodes[1] tries for an old block, it gets disconnected too.
+ getdata_request.inv = [CInv(2, big_old_block)]
+ test_nodes[1].send_message(getdata_request)
+ test_nodes[1].wait_for_disconnect()
+ assert_equal(len(self.nodes[0].getpeerinfo()), 1)
+
+ print "Peer 1 disconnected after trying to download old block"
+
+ print "Advancing system time on node to clear counters..."
+
+ # If we advance the time by 24 hours, then the counters should reset,
+ # and test_nodes[2] should be able to retrieve the old block.
+ self.nodes[0].setmocktime(int(time.time()))
+ test_nodes[2].sync_with_ping()
+ test_nodes[2].send_message(getdata_request)
+ test_nodes[2].sync_with_ping()
+ assert_equal(test_nodes[2].block_receive_map[big_old_block], 1)
+
+ print "Peer 2 able to download old block"
+
+ [c.disconnect_node() for c in connections]
+
+if __name__ == '__main__':
+ MaxUploadTest().main()
diff --git a/qa/rpc-tests/mempool_packages.py b/qa/rpc-tests/mempool_packages.py
new file mode 100755
index 0000000000..6bc6e43f0b
--- /dev/null
+++ b/qa/rpc-tests/mempool_packages.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python2
+# Copyright (c) 2014-2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+# Test descendant package tracking code
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+def satoshi_round(amount):
+ return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
+
+class MempoolPackagesTest(BitcoinTestFramework):
+
+ def setup_network(self):
+ self.nodes = []
+ self.nodes.append(start_node(0, self.options.tmpdir, ["-maxorphantx=1000", "-relaypriority=0", "-debug"]))
+ self.nodes.append(start_node(1, self.options.tmpdir, ["-maxorphantx=1000", "-relaypriority=0", "-limitancestorcount=5", "-debug"]))
+ connect_nodes(self.nodes[0], 1)
+ self.is_network_split = False
+ self.sync_all()
+
+ # Build a transaction that spends parent_txid:vout
+ # Return amount sent
+ def chain_transaction(self, node, parent_txid, vout, value, fee, num_outputs):
+ send_value = satoshi_round((value - fee)/num_outputs)
+ inputs = [ {'txid' : parent_txid, 'vout' : vout} ]
+ outputs = {}
+ for i in xrange(num_outputs):
+ outputs[node.getnewaddress()] = send_value
+ rawtx = node.createrawtransaction(inputs, outputs)
+ signedtx = node.signrawtransaction(rawtx)
+ txid = node.sendrawtransaction(signedtx['hex'])
+ fulltx = node.getrawtransaction(txid, 1)
+ assert(len(fulltx['vout']) == num_outputs) # make sure we didn't generate a change output
+ return (txid, send_value)
+
+ def run_test(self):
+ ''' Mine some blocks and have them mature. '''
+ self.nodes[0].generate(101)
+ utxo = self.nodes[0].listunspent(10)
+ txid = utxo[0]['txid']
+ vout = utxo[0]['vout']
+ value = utxo[0]['amount']
+
+ fee = Decimal("0.0001")
+ # 100 transactions off a confirmed tx should be fine
+ chain = []
+ for i in xrange(100):
+ (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, 0, value, fee, 1)
+ value = sent_value
+ chain.append(txid)
+
+ # Check mempool has 100 transactions in it, and descendant
+ # count and fees should look correct
+ mempool = self.nodes[0].getrawmempool(True)
+ assert_equal(len(mempool), 100)
+ descendant_count = 1
+ descendant_fees = 0
+ descendant_size = 0
+ SATOSHIS = 100000000
+
+ for x in reversed(chain):
+ assert_equal(mempool[x]['descendantcount'], descendant_count)
+ descendant_fees += mempool[x]['fee']
+ assert_equal(mempool[x]['descendantfees'], SATOSHIS*descendant_fees)
+ descendant_size += mempool[x]['size']
+ assert_equal(mempool[x]['descendantsize'], descendant_size)
+ descendant_count += 1
+
+ # Adding one more transaction on to the chain should fail.
+ try:
+ self.chain_transaction(self.nodes[0], txid, vout, value, fee, 1)
+ except JSONRPCException as e:
+ print "too-long-ancestor-chain successfully rejected"
+
+ # TODO: check that node1's mempool is as expected
+
+ # TODO: test ancestor size limits
+
+ # Now test descendant chain limits
+ txid = utxo[1]['txid']
+ value = utxo[1]['amount']
+ vout = utxo[1]['vout']
+
+ transaction_package = []
+ # First create one parent tx with 10 children
+ (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 10)
+ parent_transaction = txid
+ for i in xrange(10):
+ transaction_package.append({'txid': txid, 'vout': i, 'amount': sent_value})
+
+ for i in xrange(1000):
+ utxo = transaction_package.pop(0)
+ try:
+ (txid, sent_value) = self.chain_transaction(self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
+ for j in xrange(10):
+ transaction_package.append({'txid': txid, 'vout': j, 'amount': sent_value})
+ if i == 998:
+ mempool = self.nodes[0].getrawmempool(True)
+ assert_equal(mempool[parent_transaction]['descendantcount'], 1000)
+ except JSONRPCException as e:
+ print e.error['message']
+ assert_equal(i, 999)
+ print "tx that would create too large descendant package successfully rejected"
+
+ # TODO: check that node1's mempool is as expected
+
+ # TODO: test descendant size limits
+
+ # Test reorg handling
+ # First, the basics:
+ self.nodes[0].generate(1)
+ sync_blocks(self.nodes)
+ self.nodes[1].invalidateblock(self.nodes[0].getbestblockhash())
+ self.nodes[1].reconsiderblock(self.nodes[0].getbestblockhash())
+
+ # Now test the case where node1 has a transaction T in its mempool that
+ # depends on transactions A and B which are in a mined block, and the
+ # block containing A and B is disconnected, AND B is not accepted back
+ # into node1's mempool because its ancestor count is too high.
+
+ # Create 8 transactions, like so:
+ # Tx0 -> Tx1 (vout0)
+ # \--> Tx2 (vout1) -> Tx3 -> Tx4 -> Tx5 -> Tx6 -> Tx7
+ #
+ # Mine them in the next block, then generate a new tx8 that spends
+ # Tx1 and Tx7, and add to node1's mempool, then disconnect the
+ # last block.
+
+ # Create tx0 with 2 outputs
+ utxo = self.nodes[0].listunspent()
+ txid = utxo[0]['txid']
+ value = utxo[0]['amount']
+ vout = utxo[0]['vout']
+
+ send_value = satoshi_round((value - fee)/2)
+ inputs = [ {'txid' : txid, 'vout' : vout} ]
+ outputs = {}
+ for i in xrange(2):
+ outputs[self.nodes[0].getnewaddress()] = send_value
+ rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
+ signedtx = self.nodes[0].signrawtransaction(rawtx)
+ txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
+ tx0_id = txid
+ value = send_value
+
+ # Create tx1
+ (tx1_id, tx1_value) = self.chain_transaction(self.nodes[0], tx0_id, 0, value, fee, 1)
+
+ # Create tx2-7
+ vout = 1
+ txid = tx0_id
+ for i in xrange(6):
+ (txid, sent_value) = self.chain_transaction(self.nodes[0], txid, vout, value, fee, 1)
+ vout = 0
+ value = sent_value
+
+ # Mine these in a block
+ self.nodes[0].generate(1)
+ self.sync_all()
+
+ # Now generate tx8, with a big fee
+ inputs = [ {'txid' : tx1_id, 'vout': 0}, {'txid' : txid, 'vout': 0} ]
+ outputs = { self.nodes[0].getnewaddress() : send_value + value - 4*fee }
+ rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
+ signedtx = self.nodes[0].signrawtransaction(rawtx)
+ txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
+ sync_mempools(self.nodes)
+
+ # Now try to disconnect the tip on each node...
+ self.nodes[1].invalidateblock(self.nodes[1].getbestblockhash())
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ sync_blocks(self.nodes)
+
+if __name__ == '__main__':
+ MempoolPackagesTest().main()
diff --git a/qa/rpc-tests/nodehandling.py b/qa/rpc-tests/nodehandling.py
index d89cfcf59b..e383a3a12c 100755
--- a/qa/rpc-tests/nodehandling.py
+++ b/qa/rpc-tests/nodehandling.py
@@ -55,7 +55,7 @@ class NodeHandlingTest (BitcoinTestFramework):
self.nodes[2].setban("192.168.0.1", "add", 1) #ban for 1 seconds
self.nodes[2].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) #ban for 1000 seconds
listBeforeShutdown = self.nodes[2].listbanned();
- assert_equal("192.168.0.1/255.255.255.255", listBeforeShutdown[2]['address']) #must be here
+ assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address']) #must be here
time.sleep(2) #make 100% sure we expired 192.168.0.1 node time
#stop node
@@ -63,9 +63,9 @@ class NodeHandlingTest (BitcoinTestFramework):
self.nodes[2] = start_node(2, self.options.tmpdir)
listAfterShutdown = self.nodes[2].listbanned();
- assert_equal("127.0.0.0/255.255.255.0", listAfterShutdown[0]['address'])
- assert_equal("127.0.0.0/255.255.255.255", listAfterShutdown[1]['address'])
- assert_equal("2001:4000::/ffff:e000:0:0:0:0:0:0", listAfterShutdown[2]['address'])
+ assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
+ assert_equal("127.0.0.0/32", listAfterShutdown[1]['address'])
+ assert_equal("/19" in listAfterShutdown[2]['address'], True)
###########################
# RPC disconnectnode test #
diff --git a/qa/rpc-tests/p2p-acceptblock.py b/qa/rpc-tests/p2p-acceptblock.py
index fcdd1e1b99..700deab207 100755
--- a/qa/rpc-tests/p2p-acceptblock.py
+++ b/qa/rpc-tests/p2p-acceptblock.py
@@ -40,6 +40,11 @@ The test:
it's missing an intermediate block.
Node1 should reorg to this longer chain.
+4b.Send 288 more blocks on the longer chain.
+ Node0 should process all but the last block (too far ahead in height).
+ Send all headers to Node1, and then send the last block in that chain.
+ Node1 should accept the block because it's coming from a whitelisted peer.
+
5. Send a duplicate of the block in #3 to Node0.
Node0 should not process the block because it is unrequested, and stay on
the shorter chain.
@@ -59,6 +64,8 @@ class TestNode(NodeConnCB):
NodeConnCB.__init__(self)
self.create_callback_map()
self.connection = None
+ self.ping_counter = 1
+ self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
@@ -82,6 +89,24 @@ class TestNode(NodeConnCB):
def send_message(self, message):
self.connection.send_message(message)
+ def on_pong(self, conn, message):
+ self.last_pong = message
+
+ # Sync up with the node after delivery of a block
+ def sync_with_ping(self, timeout=30):
+ self.connection.send_message(msg_ping(nonce=self.ping_counter))
+ received_pong = False
+ sleep_time = 0.05
+ while not received_pong and timeout > 0:
+ time.sleep(sleep_time)
+ timeout -= sleep_time
+ with mininode_lock:
+ if self.last_pong.nonce == self.ping_counter:
+ received_pong = True
+ self.ping_counter += 1
+ return received_pong
+
+
class AcceptBlockTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
@@ -126,13 +151,15 @@ class AcceptBlockTest(BitcoinTestFramework):
# 2. Send one block that builds on each tip.
# This should be accepted.
blocks_h2 = [] # the height 2 blocks on each node's chain
+ block_time = time.time() + 1
for i in xrange(2):
- blocks_h2.append(create_block(tips[i], create_coinbase(), time.time()+1))
+ blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time))
blocks_h2[i].solve()
+ block_time += 1
test_node.send_message(msg_block(blocks_h2[0]))
white_node.send_message(msg_block(blocks_h2[1]))
- time.sleep(1)
+ [ x.sync_with_ping() for x in [test_node, white_node] ]
assert_equal(self.nodes[0].getblockcount(), 2)
assert_equal(self.nodes[1].getblockcount(), 2)
print "First height 2 block accepted by both nodes"
@@ -140,12 +167,12 @@ class AcceptBlockTest(BitcoinTestFramework):
# 3. Send another block that builds on the original tip.
blocks_h2f = [] # Blocks at height 2 that fork off the main chain
for i in xrange(2):
- blocks_h2f.append(create_block(tips[i], create_coinbase(), blocks_h2[i].nTime+1))
+ blocks_h2f.append(create_block(tips[i], create_coinbase(2), blocks_h2[i].nTime+1))
blocks_h2f[i].solve()
test_node.send_message(msg_block(blocks_h2f[0]))
white_node.send_message(msg_block(blocks_h2f[1]))
- time.sleep(1) # Give time to process the block
+ [ x.sync_with_ping() for x in [test_node, white_node] ]
for x in self.nodes[0].getchaintips():
if x['hash'] == blocks_h2f[0].hash:
assert_equal(x['status'], "headers-only")
@@ -159,12 +186,12 @@ class AcceptBlockTest(BitcoinTestFramework):
# 4. Now send another block that builds on the forking chain.
blocks_h3 = []
for i in xrange(2):
- blocks_h3.append(create_block(blocks_h2f[i].sha256, create_coinbase(), blocks_h2f[i].nTime+1))
+ blocks_h3.append(create_block(blocks_h2f[i].sha256, create_coinbase(3), blocks_h2f[i].nTime+1))
blocks_h3[i].solve()
test_node.send_message(msg_block(blocks_h3[0]))
white_node.send_message(msg_block(blocks_h3[1]))
- time.sleep(1)
+ [ x.sync_with_ping() for x in [test_node, white_node] ]
# Since the earlier block was not processed by node0, the new block
# can't be fully validated.
for x in self.nodes[0].getchaintips():
@@ -182,6 +209,45 @@ class AcceptBlockTest(BitcoinTestFramework):
assert_equal(self.nodes[1].getblockcount(), 3)
print "Successfully reorged to length 3 chain from whitelisted peer"
+ # 4b. Now mine 288 more blocks and deliver; all should be processed but
+ # the last (height-too-high) on node0. Node1 should process the tip if
+ # we give it the headers chain leading to the tip.
+ tips = blocks_h3
+ headers_message = msg_headers()
+ all_blocks = [] # node0's blocks
+ for j in xrange(2):
+ for i in xrange(288):
+ next_block = create_block(tips[j].sha256, create_coinbase(i + 4), tips[j].nTime+1)
+ next_block.solve()
+ if j==0:
+ test_node.send_message(msg_block(next_block))
+ all_blocks.append(next_block)
+ else:
+ headers_message.headers.append(CBlockHeader(next_block))
+ tips[j] = next_block
+
+ time.sleep(2)
+ for x in all_blocks:
+ try:
+ self.nodes[0].getblock(x.hash)
+ if x == all_blocks[287]:
+ raise AssertionError("Unrequested block too far-ahead should have been ignored")
+ except:
+ if x == all_blocks[287]:
+ print "Unrequested block too far-ahead not processed"
+ else:
+ raise AssertionError("Unrequested block with more work should have been accepted")
+
+ headers_message.headers.pop() # Ensure the last block is unrequested
+ white_node.send_message(headers_message) # Send headers leading to tip
+ white_node.send_message(msg_block(tips[1])) # Now deliver the tip
+ try:
+ white_node.sync_with_ping()
+ self.nodes[1].getblock(tips[1].hash)
+ print "Unrequested block far ahead of tip accepted from whitelisted peer"
+ except:
+ raise AssertionError("Unrequested block from whitelisted peer not accepted")
+
# 5. Test handling of unrequested block on the node that didn't process
# Should still not be processed (even though it has a child that has more
# work).
@@ -192,7 +258,7 @@ class AcceptBlockTest(BitcoinTestFramework):
# the node processes it and incorrectly advances the tip).
# But this would be caught later on, when we verify that an inv triggers
# a getdata request for this block.
- time.sleep(1)
+ test_node.sync_with_ping()
assert_equal(self.nodes[0].getblockcount(), 2)
print "Unrequested block that would complete more-work chain was ignored"
@@ -204,21 +270,20 @@ class AcceptBlockTest(BitcoinTestFramework):
test_node.last_getdata = None
test_node.send_message(msg_inv([CInv(2, blocks_h3[0].sha256)]))
- time.sleep(1)
+ test_node.sync_with_ping()
with mininode_lock:
getdata = test_node.last_getdata
- # Check that the getdata is for the right block
- assert_equal(len(getdata.inv), 1)
+ # Check that the getdata includes the right block
assert_equal(getdata.inv[0].hash, blocks_h2f[0].sha256)
print "Inv at tip triggered getdata for unprocessed block"
# 7. Send the missing block for the third time (now it is requested)
test_node.send_message(msg_block(blocks_h2f[0]))
- time.sleep(1)
- assert_equal(self.nodes[0].getblockcount(), 3)
- print "Successfully reorged to length 3 chain from non-whitelisted peer"
+ test_node.sync_with_ping()
+ assert_equal(self.nodes[0].getblockcount(), 290)
+ print "Successfully reorged to longer chain from non-whitelisted peer"
[ c.disconnect_node() for c in connections ]
diff --git a/qa/rpc-tests/p2p-fullblocktest.py b/qa/rpc-tests/p2p-fullblocktest.py
new file mode 100755
index 0000000000..9555940cec
--- /dev/null
+++ b/qa/rpc-tests/p2p-fullblocktest.py
@@ -0,0 +1,272 @@
+#!/usr/bin/env python2
+
+#
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.comptool import TestManager, TestInstance
+from test_framework.mininode import *
+from test_framework.blocktools import *
+import logging
+import copy
+import time
+import numbers
+from test_framework.key import CECKey
+from test_framework.script import CScript, CScriptOp, SignatureHash, SIGHASH_ALL, OP_TRUE
+
+class PreviousSpendableOutput(object):
+ def __init__(self, tx = CTransaction(), n = -1):
+ self.tx = tx
+ self.n = n # the output we're spending
+
+'''
+This reimplements tests from the bitcoinj/FullBlockTestGenerator used
+by the pull-tester.
+
+We use the testing framework in which we expect a particular answer from
+each test.
+'''
+
+class FullBlockTest(ComparisonTestFramework):
+
+ ''' Can either run this test as 1 node with expected answers, or two and compare them.
+ Change the "outcome" variable from each TestInstance object to only do the comparison. '''
+ def __init__(self):
+ self.num_nodes = 1
+ self.block_heights = {}
+ self.coinbase_key = CECKey()
+ self.coinbase_key.set_secretbytes(bytes("horsebattery"))
+ self.coinbase_pubkey = self.coinbase_key.get_pubkey()
+ self.block_time = int(time.time())+1
+ self.tip = None
+ self.blocks = {}
+
+ def run_test(self):
+ test = TestManager(self, self.options.tmpdir)
+ test.add_all_connections(self.nodes)
+ NetworkThread().start() # Start up network handling in another thread
+ test.run()
+
+ def add_transactions_to_block(self, block, tx_list):
+ [ tx.rehash() for tx in tx_list ]
+ block.vtx.extend(tx_list)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ return block
+
+ # Create a block on top of self.tip, and advance self.tip to point to the new block
+ # if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output,
+ # and rest will go to fees.
+ def next_block(self, number, spend=None, additional_coinbase_value=0, script=None):
+ if self.tip == None:
+ base_block_hash = self.genesis_hash
+ else:
+ base_block_hash = self.tip.sha256
+ # First create the coinbase
+ height = self.block_heights[base_block_hash] + 1
+ coinbase = create_coinbase(height, self.coinbase_pubkey)
+ coinbase.vout[0].nValue += additional_coinbase_value
+ if (spend != None):
+ coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
+ coinbase.rehash()
+ block = create_block(base_block_hash, coinbase, self.block_time)
+ if (spend != None):
+ tx = CTransaction()
+ tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), "", 0xffffffff)) # no signature yet
+ # This copies the java comparison tool testing behavior: the first
+ # txout has a garbage scriptPubKey, "to make sure we're not
+ # pre-verifying too much" (?)
+ tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255])))
+ if script == None:
+ tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
+ else:
+ tx.vout.append(CTxOut(1, script))
+ # Now sign it if necessary
+ scriptSig = ""
+ scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
+ if (scriptPubKey[0] == OP_TRUE): # looks like an anyone-can-spend
+ scriptSig = CScript([OP_TRUE])
+ else:
+ # We have to actually sign it
+ (sighash, err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL)
+ scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
+ tx.vin[0].scriptSig = scriptSig
+ # Now add the transaction to the block
+ block = self.add_transactions_to_block(block, [tx])
+ block.solve()
+ self.tip = block
+ self.block_heights[block.sha256] = height
+ self.block_time += 1
+ assert number not in self.blocks
+ self.blocks[number] = block
+ return block
+
+ def get_tests(self):
+ self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
+ self.block_heights[self.genesis_hash] = 0
+ spendable_outputs = []
+
+ # save the current tip so it can be spent by a later block
+ def save_spendable_output():
+ spendable_outputs.append(self.tip)
+
+ # get an output that we previous marked as spendable
+ def get_spendable_output():
+ return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)
+
+ # returns a test case that asserts that the current tip was accepted
+ def accepted():
+ return TestInstance([[self.tip, True]])
+
+ # returns a test case that asserts that the current tip was rejected
+ def rejected():
+ return TestInstance([[self.tip, False]])
+
+ # move the tip back to a previous block
+ def tip(number):
+ self.tip = self.blocks[number]
+
+ # creates a new block and advances the tip to that block
+ block = self.next_block
+
+
+ # Create a new block
+ block(0)
+ save_spendable_output()
+ yield accepted()
+
+
+ # Now we need that block to mature so we can spend the coinbase.
+ test = TestInstance(sync_every_block=False)
+ for i in range(100):
+ block(1000 + i)
+ test.blocks_and_transactions.append([self.tip, True])
+ save_spendable_output()
+ yield test
+
+
+ # Start by bulding a couple of blocks on top (which output is spent is in parentheses):
+ # genesis -> b1 (0) -> b2 (1)
+ out0 = get_spendable_output()
+ block(1, spend=out0)
+ save_spendable_output()
+ yield accepted()
+
+ out1 = get_spendable_output()
+ block(2, spend=out1)
+ # Inv again, then deliver twice (shouldn't break anything).
+ yield accepted()
+
+
+ # so fork like this:
+ #
+ # genesis -> b1 (0) -> b2 (1)
+ # \-> b3 (1)
+ #
+ # Nothing should happen at this point. We saw b2 first so it takes priority.
+ tip(1)
+ block(3, spend=out1)
+ # Deliver twice (should still not break anything)
+ yield rejected()
+
+
+ # Now we add another block to make the alternative chain longer.
+ #
+ # genesis -> b1 (0) -> b2 (1)
+ # \-> b3 (1) -> b4 (2)
+ out2 = get_spendable_output()
+ block(4, spend=out2)
+ yield accepted()
+
+
+ # ... and back to the first chain.
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b3 (1) -> b4 (2)
+ tip(2)
+ block(5, spend=out2)
+ save_spendable_output()
+ yield rejected()
+
+ out3 = get_spendable_output()
+ block(6, spend=out3)
+ yield accepted()
+
+
+ # Try to create a fork that double-spends
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b7 (2) -> b8 (4)
+ # \-> b3 (1) -> b4 (2)
+ tip(5)
+ block(7, spend=out2)
+ yield rejected()
+
+ out4 = get_spendable_output()
+ block(8, spend=out4)
+ yield rejected()
+
+
+ # Try to create a block that has too much fee
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b9 (4)
+ # \-> b3 (1) -> b4 (2)
+ tip(6)
+ block(9, spend=out4, additional_coinbase_value=1)
+ yield rejected()
+
+
+ # Create a fork that ends in a block with too much fee (the one that causes the reorg)
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b10 (3) -> b11 (4)
+ # \-> b3 (1) -> b4 (2)
+ tip(5)
+ block(10, spend=out3)
+ yield rejected()
+
+ block(11, spend=out4, additional_coinbase_value=1)
+ yield rejected()
+
+
+ # Try again, but with a valid fork first
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b12 (3) -> b13 (4) -> b14 (5)
+ # (b12 added last)
+ # \-> b3 (1) -> b4 (2)
+ tip(5)
+ b12 = block(12, spend=out3)
+ save_spendable_output()
+ #yield TestInstance([[b12, False]])
+ b13 = block(13, spend=out4)
+ # Deliver the block header for b12, and the block b13.
+ # b13 should be accepted but the tip won't advance until b12 is delivered.
+ yield TestInstance([[CBlockHeader(b12), None], [b13, False]])
+
+ save_spendable_output()
+ out5 = get_spendable_output()
+ # b14 is invalid, but the node won't know that until it tries to connect
+ # Tip still can't advance because b12 is missing
+ block(14, spend=out5, additional_coinbase_value=1)
+ yield rejected()
+
+ yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.
+
+
+ # Test that a block with a lot of checksigs is okay
+ lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50 - 1))
+ tip(13)
+ block(15, spend=out5, script=lots_of_checksigs)
+ yield accepted()
+
+
+ # Test that a block with too many checksigs is rejected
+ out6 = get_spendable_output()
+ too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50))
+ block(16, spend=out6, script=too_many_checksigs)
+ yield rejected()
+
+
+
+if __name__ == '__main__':
+ FullBlockTest().main()
diff --git a/qa/rpc-tests/pruning.py b/qa/rpc-tests/pruning.py
index 2824c51ce7..21f8d69382 100755
--- a/qa/rpc-tests/pruning.py
+++ b/qa/rpc-tests/pruning.py
@@ -61,6 +61,9 @@ class PruneTest(BitcoinTestFramework):
self.address[0] = self.nodes[0].getnewaddress()
self.address[1] = self.nodes[1].getnewaddress()
+ # Determine default relay fee
+ self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
+
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
connect_nodes(self.nodes[2], 0)
@@ -239,7 +242,7 @@ class PruneTest(BitcoinTestFramework):
outputs = {}
t = self.utxo.pop()
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
- remchange = t["amount"] - Decimal("0.001000")
+ remchange = t["amount"] - 100*self.relayfee # Fee must be above min relay rate for 66kb tx
outputs[address]=remchange
# Create a basic transaction that will send change back to ourself after account for a fee
# And then insert the 128 generated transaction outs in the middle rawtx[92] is where the #
diff --git a/qa/rpc-tests/rest.py b/qa/rpc-tests/rest.py
index 1a2d326cc3..e084ad55ab 100755
--- a/qa/rpc-tests/rest.py
+++ b/qa/rpc-tests/rest.py
@@ -14,6 +14,7 @@ from struct import *
import binascii
import json
import StringIO
+import decimal
try:
import http.client as httplib
@@ -31,10 +32,20 @@ def deser_uint256(f):
r += t << (i * 32)
return r
-#allows simple http get calls with a request body
-def http_get_call(host, port, path, requestdata = '', response_object = 0):
+#allows simple http get calls
+def http_get_call(host, port, path, response_object = 0):
conn = httplib.HTTPConnection(host, port)
- conn.request('GET', path, requestdata)
+ conn.request('GET', path)
+
+ if response_object:
+ return conn.getresponse()
+
+ return conn.getresponse().read()
+
+#allows simple http post calls with a request body
+def http_post_call(host, port, path, requestdata = '', response_object = 0):
+ conn = httplib.HTTPConnection(host, port)
+ conn.request('POST', path, requestdata)
if response_object:
return conn.getresponse()
@@ -136,7 +147,7 @@ class RESTTest (BitcoinTestFramework):
binaryRequest += binascii.unhexlify(vintx);
binaryRequest += pack("i", 0);
- bin_response = http_get_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', binaryRequest)
+ bin_response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', binaryRequest)
output = StringIO.StringIO()
output.write(bin_response)
output.seek(0)
@@ -174,14 +185,14 @@ class RESTTest (BitcoinTestFramework):
#do some invalid requests
json_request = '{"checkmempool'
- response = http_get_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'json', json_request, True)
+ response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'json', json_request, True)
assert_equal(response.status, 500) #must be a 500 because we send a invalid json request
json_request = '{"checkmempool'
- response = http_get_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', json_request, True)
+ response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', json_request, True)
assert_equal(response.status, 500) #must be a 500 because we send a invalid bin request
- response = http_get_call(url.hostname, url.port, '/rest/getutxos/checkmempool'+self.FORMAT_SEPARATOR+'bin', '', True)
+ response = http_post_call(url.hostname, url.port, '/rest/getutxos/checkmempool'+self.FORMAT_SEPARATOR+'bin', '', True)
assert_equal(response.status, 500) #must be a 500 because we send a invalid bin request
#test limits
@@ -189,17 +200,17 @@ class RESTTest (BitcoinTestFramework):
for x in range(0, 20):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/")
- response = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
+ response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
assert_equal(response.status, 500) #must be a 500 because we exceeding the limits
json_request = '/checkmempool/'
for x in range(0, 15):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/");
- response = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
+ response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
assert_equal(response.status, 200) #must be a 500 because we exceeding the limits
- self.nodes[0].generate(1) #generate block to not affect upcomming tests
+ self.nodes[0].generate(1) #generate block to not affect upcoming tests
self.sync_all()
################
@@ -207,27 +218,27 @@ class RESTTest (BitcoinTestFramework):
################
# check binary format
- response = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"bin", "", True)
+ response = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"bin", True)
assert_equal(response.status, 200)
assert_greater_than(int(response.getheader('content-length')), 80)
response_str = response.read()
# compare with block header
- response_header = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"bin", "", True)
+ response_header = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"bin", True)
assert_equal(response_header.status, 200)
assert_equal(int(response_header.getheader('content-length')), 80)
response_header_str = response_header.read()
assert_equal(response_str[0:80], response_header_str)
# check block hex format
- response_hex = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"hex", "", True)
+ response_hex = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(response_hex.status, 200)
assert_greater_than(int(response_hex.getheader('content-length')), 160)
response_hex_str = response_hex.read()
assert_equal(response_str.encode("hex")[0:160], response_hex_str[0:160])
# compare with hex block header
- response_header_hex = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"hex", "", True)
+ response_header_hex = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(response_header_hex.status, 200)
assert_greater_than(int(response_header_hex.getheader('content-length')), 160)
response_header_hex_str = response_header_hex.read()
@@ -240,10 +251,10 @@ class RESTTest (BitcoinTestFramework):
assert_equal(block_json_obj['hash'], bb_hash)
# compare with json block header
- response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"json", "", True)
+ response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
assert_equal(response_header_json.status, 200)
response_header_json_str = response_header_json.read()
- json_obj = json.loads(response_header_json_str)
+ json_obj = json.loads(response_header_json_str, parse_float=decimal.Decimal)
assert_equal(len(json_obj), 1) #ensure that there is one header in the json response
assert_equal(json_obj[0]['hash'], bb_hash) #request/response hash should be the same
@@ -264,7 +275,7 @@ class RESTTest (BitcoinTestFramework):
#see if we can get 5 headers in one response
self.nodes[1].generate(5)
self.sync_all()
- response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/5/'+bb_hash+self.FORMAT_SEPARATOR+"json", "", True)
+ response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/5/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
assert_equal(response_header_json.status, 200)
response_header_json_str = response_header_json.read()
json_obj = json.loads(response_header_json_str)
@@ -277,7 +288,7 @@ class RESTTest (BitcoinTestFramework):
assert_equal(json_obj['txid'], tx_hash)
# check hex format response
- hex_string = http_get_call(url.hostname, url.port, '/rest/tx/'+tx_hash+self.FORMAT_SEPARATOR+"hex", "", True)
+ hex_string = http_get_call(url.hostname, url.port, '/rest/tx/'+tx_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(hex_string.status, 200)
assert_greater_than(int(response.getheader('content-length')), 10)
@@ -291,6 +302,19 @@ class RESTTest (BitcoinTestFramework):
txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
self.sync_all()
+ # check that there are exactly 3 transactions in the TX memory pool before generating the block
+ json_string = http_get_call(url.hostname, url.port, '/rest/mempool/info'+self.FORMAT_SEPARATOR+'json')
+ json_obj = json.loads(json_string)
+ assert_equal(json_obj['size'], 3)
+ # the size of the memory pool should be greater than 3x ~100 bytes
+ assert_greater_than(json_obj['bytes'], 300)
+
+ # check that there are our submitted transactions in the TX memory pool
+ json_string = http_get_call(url.hostname, url.port, '/rest/mempool/contents'+self.FORMAT_SEPARATOR+'json')
+ json_obj = json.loads(json_string)
+ for tx in txs:
+ assert_equal(tx in json_obj, True)
+
# now mine the transactions
newblockhash = self.nodes[1].generate(1)
self.sync_all()
diff --git a/qa/rpc-tests/script_test.py b/qa/rpc-tests/script_test.py
index 860fa56b64..afc44b51b5 100755
--- a/qa/rpc-tests/script_test.py
+++ b/qa/rpc-tests/script_test.py
@@ -124,10 +124,10 @@ def ParseScript(json_script):
return parsed_script
class TestBuilder(object):
- def create_credit_tx(self, scriptPubKey):
+ def create_credit_tx(self, scriptPubKey, height):
# self.tx1 is a coinbase transaction, modeled after the one created by script_tests.cpp
# This allows us to reuse signatures created in the unit test framework.
- self.tx1 = create_coinbase() # this has a bip34 scriptsig,
+ self.tx1 = create_coinbase(height) # this has a bip34 scriptsig,
self.tx1.vin[0].scriptSig = CScript([0, 0]) # but this matches the unit tests
self.tx1.vout[0].nValue = 0
self.tx1.vout[0].scriptPubKey = scriptPubKey
@@ -168,7 +168,7 @@ class ScriptTest(ComparisonTestFramework):
test = TestInstance(sync_every_block=False)
test_build = TestBuilder()
- test_build.create_credit_tx(scriptpubkey)
+ test_build.create_credit_tx(scriptpubkey, self.height)
test_build.create_spend_tx(scriptsig)
test_build.rehash()
@@ -176,16 +176,18 @@ class ScriptTest(ComparisonTestFramework):
self.block_time += 1
block.solve()
self.tip = block.sha256
+ self.height += 1
test.blocks_and_transactions = [[block, True]]
for i in xrange(100):
- block = create_block(self.tip, create_coinbase(), self.block_time)
+ block = create_block(self.tip, create_coinbase(self.height), self.block_time)
self.block_time += 1
block.solve()
self.tip = block.sha256
+ self.height += 1
test.blocks_and_transactions.append([block, True])
- block = create_block(self.tip, create_coinbase(), self.block_time)
+ block = create_block(self.tip, create_coinbase(self.height), self.block_time)
self.block_time += 1
block.vtx.append(test_build.tx2)
block.hashMerkleRoot = block.calc_merkle_root()
@@ -198,14 +200,16 @@ class ScriptTest(ComparisonTestFramework):
def get_tests(self):
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.block_time = 1333230000 # before the BIP16 switchover
+ self.height = 1
'''
Create a new block with an anyone-can-spend coinbase
'''
- block = create_block(self.tip, create_coinbase(), self.block_time)
+ block = create_block(self.tip, create_coinbase(self.height), self.block_time)
self.block_time += 1
block.solve()
self.tip = block.sha256
+ self.height += 1
yield TestInstance(objects=[[block, True]])
'''
@@ -213,11 +217,12 @@ class ScriptTest(ComparisonTestFramework):
'''
test = TestInstance(objects=[], sync_every_block=False, sync_every_tx=False)
for i in xrange(100):
- b = create_block(self.tip, create_coinbase(), self.block_time)
+ b = create_block(self.tip, create_coinbase(self.height), self.block_time)
b.solve()
test.blocks_and_transactions.append([b, True])
self.tip = b.sha256
self.block_time += 1
+ self.height += 1
yield test
''' Iterate through script tests. '''
@@ -229,6 +234,7 @@ class ScriptTest(ComparisonTestFramework):
self.nodes[1].invalidateblock(self.nodes[1].getblockhash(102))
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
+ self.height = 102
[scriptsig, scriptpubkey, flags] = script_test[0:3]
flags = ParseScriptFlags(flags)
diff --git a/qa/rpc-tests/test_framework/authproxy.py b/qa/rpc-tests/test_framework/authproxy.py
index bc7d655fdf..33014dc139 100644
--- a/qa/rpc-tests/test_framework/authproxy.py
+++ b/qa/rpc-tests/test_framework/authproxy.py
@@ -106,6 +106,26 @@ class AuthServiceProxy(object):
name = "%s.%s" % (self.__service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
+ def _request(self, method, path, postdata):
+ '''
+ Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
+ This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
+ '''
+ headers = {'Host': self.__url.hostname,
+ 'User-Agent': USER_AGENT,
+ 'Authorization': self.__auth_header,
+ 'Content-type': 'application/json'}
+ try:
+ self.__conn.request(method, path, postdata, headers)
+ return self._get_response()
+ except httplib.BadStatusLine as e:
+ if e.line == "''": # if connection was closed, try again
+ self.__conn.close()
+ self.__conn.request(method, path, postdata, headers)
+ return self._get_response()
+ else:
+ raise
+
def __call__(self, *args):
AuthServiceProxy.__id_count += 1
@@ -115,13 +135,7 @@ class AuthServiceProxy(object):
'method': self.__service_name,
'params': args,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal)
- self.__conn.request('POST', self.__url.path, postdata,
- {'Host': self.__url.hostname,
- 'User-Agent': USER_AGENT,
- 'Authorization': self.__auth_header,
- 'Content-type': 'application/json'})
-
- response = self._get_response()
+ response = self._request('POST', self.__url.path, postdata)
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
@@ -133,13 +147,7 @@ class AuthServiceProxy(object):
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal)
log.debug("--> "+postdata)
- self.__conn.request('POST', self.__url.path, postdata,
- {'Host': self.__url.hostname,
- 'User-Agent': USER_AGENT,
- 'Authorization': self.__auth_header,
- 'Content-type': 'application/json'})
-
- return self._get_response()
+ return self._request('POST', self.__url.path, postdata)
def _get_response(self):
http_response = self.__conn.getresponse()
diff --git a/qa/rpc-tests/test_framework/blockstore.py b/qa/rpc-tests/test_framework/blockstore.py
index c57b6df81b..b9775b477c 100644
--- a/qa/rpc-tests/test_framework/blockstore.py
+++ b/qa/rpc-tests/test_framework/blockstore.py
@@ -10,6 +10,7 @@ class BlockStore(object):
def __init__(self, datadir):
self.blockDB = dbm.open(datadir + "/blocks", 'c')
self.currentBlock = 0L
+ self.headers_map = dict()
def close(self):
self.blockDB.close()
@@ -26,24 +27,30 @@ class BlockStore(object):
ret.calc_sha256()
return ret
+ def get_header(self, blockhash):
+ try:
+ return self.headers_map[blockhash]
+ except KeyError:
+ return None
+
# Note: this pulls full blocks out of the database just to retrieve
# the headers -- perhaps we could keep a separate data structure
# to avoid this overhead.
def headers_for(self, locator, hash_stop, current_tip=None):
if current_tip is None:
current_tip = self.currentBlock
- current_block = self.get(current_tip)
- if current_block is None:
+ current_block_header = self.get_header(current_tip)
+ if current_block_header is None:
return None
response = msg_headers()
- headersList = [ CBlockHeader(current_block) ]
+ headersList = [ current_block_header ]
maxheaders = 2000
while (headersList[0].sha256 not in locator.vHave):
prevBlockHash = headersList[0].hashPrevBlock
- prevBlock = self.get(prevBlockHash)
- if prevBlock is not None:
- headersList.insert(0, CBlockHeader(prevBlock))
+ prevBlockHeader = self.get_header(prevBlockHash)
+ if prevBlockHeader is not None:
+ headersList.insert(0, prevBlockHeader)
else:
break
headersList = headersList[:maxheaders] # truncate if we have too many
@@ -61,6 +68,10 @@ class BlockStore(object):
except TypeError as e:
print "Unexpected error: ", sys.exc_info()[0], e.args
self.currentBlock = block.sha256
+ self.headers_map[block.sha256] = CBlockHeader(block)
+
+ def add_header(self, header):
+ self.headers_map[header.sha256] = header
def get_blocks(self, inv):
responses = []
diff --git a/qa/rpc-tests/test_framework/blocktools.py b/qa/rpc-tests/test_framework/blocktools.py
index f397fe7cd6..59aa8c15cc 100644
--- a/qa/rpc-tests/test_framework/blocktools.py
+++ b/qa/rpc-tests/test_framework/blocktools.py
@@ -5,7 +5,7 @@
#
from mininode import *
-from script import CScript, CScriptOp
+from script import CScript, CScriptOp, OP_TRUE, OP_CHECKSIG
# Create a block (with regtest difficulty)
def create_block(hashprev, coinbase, nTime=None):
@@ -37,19 +37,21 @@ def serialize_script_num(value):
r[-1] |= 0x80
return r
-counter=1
-# Create an anyone-can-spend coinbase transaction, assuming no miner fees
-def create_coinbase(heightAdjust = 0):
- global counter
+# Create a coinbase transaction, assuming no miner fees.
+# If pubkey is passed in, the coinbase output will be a P2PK output;
+# otherwise an anyone-can-spend output.
+def create_coinbase(height, pubkey = None):
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff),
- ser_string(serialize_script_num(counter+heightAdjust)), 0xffffffff))
- counter += 1
+ ser_string(serialize_script_num(height)), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = 50*100000000
- halvings = int((counter+heightAdjust)/150) # regtest
+ halvings = int(height/150) # regtest
coinbaseoutput.nValue >>= halvings
- coinbaseoutput.scriptPubKey = ""
+ if (pubkey != None):
+ coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
+ else:
+ coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [ coinbaseoutput ]
coinbase.calc_sha256()
return coinbase
diff --git a/qa/rpc-tests/test_framework/comptool.py b/qa/rpc-tests/test_framework/comptool.py
index 23a979250c..e0b3ce040d 100755
--- a/qa/rpc-tests/test_framework/comptool.py
+++ b/qa/rpc-tests/test_framework/comptool.py
@@ -27,6 +27,20 @@ generator that returns TestInstance objects. See below for definition.
global mininode_lock
+def wait_until(predicate, attempts=float('inf'), timeout=float('inf')):
+ attempt = 0
+ elapsed = 0
+
+ while attempt < attempts and elapsed < timeout:
+ with mininode_lock:
+ if predicate():
+ return True
+ attempt += 1
+ elapsed += 0.05
+ time.sleep(0.05)
+
+ return False
+
class TestNode(NodeConnCB):
def __init__(self, block_store, tx_store):
@@ -43,6 +57,10 @@ class TestNode(NodeConnCB):
# a response
self.pingMap = {}
self.lastInv = []
+ self.closed = False
+
+ def on_close(self, conn):
+ self.closed = True
def add_connection(self, conn):
self.conn = conn
@@ -104,26 +122,33 @@ class TestNode(NodeConnCB):
# Instances of these are generated by the test generator, and fed into the
# comptool.
#
-# "blocks_and_transactions" should be an array of [obj, True/False/None]:
-# - obj is either a CBlock or a CTransaction, and
+# "blocks_and_transactions" should be an array of
+# [obj, True/False/None, hash/None]:
+# - obj is either a CBlock, CBlockHeader, or a CTransaction, and
# - the second value indicates whether the object should be accepted
# into the blockchain or mempool (for tests where we expect a certain
# answer), or "None" if we don't expect a certain answer and are just
# comparing the behavior of the nodes being tested.
+# - the third value is the hash to test the tip against (if None or omitted,
+# use the hash of the block)
+# - NOTE: if a block header, no test is performed; instead the header is
+# just added to the block_store. This is to facilitate block delivery
+# when communicating with headers-first clients (when withholding an
+# intermediate block).
# sync_every_block: if True, then each block will be inv'ed, synced, and
# nodes will be tested based on the outcome for the block. If False,
# then inv's accumulate until all blocks are processed (or max inv size
# is reached) and then sent out in one inv message. Then the final block
# will be synced across all connections, and the outcome of the final
# block will be tested.
-# sync_every_tx: analagous to behavior for sync_every_block, except if outcome
+# sync_every_tx: analogous to behavior for sync_every_block, except if outcome
# on the final tx is None, then contents of entire mempool are compared
# across all connections. (If outcome of final tx is specified as true
# or false, then only the last tx is tested against outcome.)
class TestInstance(object):
- def __init__(self, objects=[], sync_every_block=True, sync_every_tx=False):
- self.blocks_and_transactions = objects
+ def __init__(self, objects=None, sync_every_block=True, sync_every_tx=False):
+ self.blocks_and_transactions = objects if objects else []
self.sync_every_block = sync_every_block
self.sync_every_tx = sync_every_tx
@@ -132,6 +157,7 @@ class TestManager(object):
def __init__(self, testgen, datadir):
self.test_generator = testgen
self.connections = []
+ self.test_nodes = []
self.block_store = BlockStore(datadir)
self.tx_store = TxStore(datadir)
self.ping_counter = 1
@@ -139,57 +165,42 @@ class TestManager(object):
def add_all_connections(self, nodes):
for i in range(len(nodes)):
# Create a p2p connection to each node
- self.connections.append(NodeConn('127.0.0.1', p2p_port(i),
- nodes[i], TestNode(self.block_store, self.tx_store)))
+ test_node = TestNode(self.block_store, self.tx_store)
+ self.test_nodes.append(test_node)
+ self.connections.append(NodeConn('127.0.0.1', p2p_port(i), nodes[i], test_node))
# Make sure the TestNode (callback class) has a reference to its
# associated NodeConn
- self.connections[-1].cb.add_connection(self.connections[-1])
+ test_node.add_connection(self.connections[-1])
+
+ def wait_for_disconnections(self):
+ def disconnected():
+ return all(node.closed for node in self.test_nodes)
+ return wait_until(disconnected, timeout=10)
def wait_for_verack(self):
- sleep_time = 0.05
- max_tries = 10 / sleep_time # Wait at most 10 seconds
- while max_tries > 0:
- done = True
- with mininode_lock:
- for c in self.connections:
- if c.cb.verack_received is False:
- done = False
- break
- if done:
- break
- time.sleep(sleep_time)
+ def veracked():
+ return all(node.verack_received for node in self.test_nodes)
+ return wait_until(veracked, timeout=10)
def wait_for_pings(self, counter):
- received_pongs = False
- while received_pongs is not True:
- time.sleep(0.05)
- received_pongs = True
- with mininode_lock:
- for c in self.connections:
- if c.cb.received_ping_response(counter) is not True:
- received_pongs = False
- break
+ def received_pongs():
+ return all(node.received_ping_response(counter) for node in self.test_nodes)
+ return wait_until(received_pongs)
# sync_blocks: Wait for all connections to request the blockhash given
# then send get_headers to find out the tip of each node, and synchronize
# the response by using a ping (and waiting for pong with same nonce).
def sync_blocks(self, blockhash, num_blocks):
- # Wait for nodes to request block (50ms sleep * 20 tries * num_blocks)
- max_tries = 20*num_blocks
- while max_tries > 0:
- with mininode_lock:
- results = [ blockhash in c.cb.block_request_map and
- c.cb.block_request_map[blockhash] for c in self.connections ]
- if False not in results:
- break
- time.sleep(0.05)
- max_tries -= 1
+ def blocks_requested():
+ return all(
+ blockhash in node.block_request_map and node.block_request_map[blockhash]
+ for node in self.test_nodes
+ )
# --> error if not requested
- if max_tries == 0:
+ if not wait_until(blocks_requested, attempts=20*num_blocks):
# print [ c.cb.block_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested block")
- # --> Answer request (we did this inline!)
# Send getheaders message
[ c.cb.send_getheaders() for c in self.connections ]
@@ -202,21 +213,16 @@ class TestManager(object):
# Analogous to sync_block (see above)
def sync_transaction(self, txhash, num_events):
# Wait for nodes to request transaction (50ms sleep * 20 tries * num_events)
- max_tries = 20*num_events
- while max_tries > 0:
- with mininode_lock:
- results = [ txhash in c.cb.tx_request_map and
- c.cb.tx_request_map[txhash] for c in self.connections ]
- if False not in results:
- break
- time.sleep(0.05)
- max_tries -= 1
+ def transaction_requested():
+ return all(
+ txhash in node.tx_request_map and node.tx_request_map[txhash]
+ for node in self.test_nodes
+ )
# --> error if not requested
- if max_tries == 0:
+ if not wait_until(transaction_requested, attempts=20*num_events):
# print [ c.cb.tx_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested transaction")
- # --> Answer request (we did this inline!)
# Get the mempool
[ c.cb.send_mempool() for c in self.connections ]
@@ -271,29 +277,55 @@ class TestManager(object):
# We use these variables to keep track of the last block
# and last transaction in the tests, which are used
# if we're not syncing on every block or every tx.
- [ block, block_outcome ] = [ None, None ]
+ [ block, block_outcome, tip ] = [ None, None, None ]
[ tx, tx_outcome ] = [ None, None ]
invqueue = []
- for b_or_t, outcome in test_instance.blocks_and_transactions:
+ for test_obj in test_instance.blocks_and_transactions:
+ b_or_t = test_obj[0]
+ outcome = test_obj[1]
# Determine if we're dealing with a block or tx
if isinstance(b_or_t, CBlock): # Block test runner
block = b_or_t
block_outcome = outcome
+ tip = block.sha256
+ # each test_obj can have an optional third argument
+ # to specify the tip we should compare with
+ # (default is to use the block being tested)
+ if len(test_obj) >= 3:
+ tip = test_obj[2]
+
# Add to shared block_store, set as current block
+ # If there was an open getdata request for the block
+ # previously, and we didn't have an entry in the
+ # block_store, then immediately deliver, because the
+ # node wouldn't send another getdata request while
+ # the earlier one is outstanding.
+ first_block_with_hash = True
+ if self.block_store.get(block.sha256) is not None:
+ first_block_with_hash = False
with mininode_lock:
self.block_store.add_block(block)
for c in self.connections:
- c.cb.block_request_map[block.sha256] = False
+ if first_block_with_hash and block.sha256 in c.cb.block_request_map and c.cb.block_request_map[block.sha256] == True:
+ # There was a previous request for this block hash
+ # Most likely, we delivered a header for this block
+ # but never had the block to respond to the getdata
+ c.send_message(msg_block(block))
+ else:
+ c.cb.block_request_map[block.sha256] = False
# Either send inv's to each node and sync, or add
# to invqueue for later inv'ing.
if (test_instance.sync_every_block):
[ c.cb.send_inv(block) for c in self.connections ]
self.sync_blocks(block.sha256, 1)
- if (not self.check_results(block.sha256, outcome)):
+ if (not self.check_results(tip, outcome)):
raise AssertionError("Test failed at test %d" % test_number)
else:
invqueue.append(CInv(2, block.sha256))
+ elif isinstance(b_or_t, CBlockHeader):
+ block_header = b_or_t
+ self.block_store.add_header(block_header)
else: # Tx test runner
assert(isinstance(b_or_t, CTransaction))
tx = b_or_t
@@ -321,9 +353,8 @@ class TestManager(object):
if len(invqueue) > 0:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
- self.sync_blocks(block.sha256,
- len(test_instance.blocks_and_transactions))
- if (not self.check_results(block.sha256, block_outcome)):
+ self.sync_blocks(block.sha256, len(test_instance.blocks_and_transactions))
+ if (not self.check_results(tip, block_outcome)):
raise AssertionError("Block test failed at test %d" % test_number)
if (not test_instance.sync_every_tx and tx is not None):
if len(invqueue) > 0:
@@ -336,6 +367,7 @@ class TestManager(object):
print "Test %d: PASS" % test_number, [ c.rpc.getblockcount() for c in self.connections ]
test_number += 1
+ [ c.disconnect_node() for c in self.connections ]
+ self.wait_for_disconnections()
self.block_store.close()
self.tx_store.close()
- [ c.disconnect_node() for c in self.connections ]
diff --git a/qa/rpc-tests/test_framework/key.py b/qa/rpc-tests/test_framework/key.py
new file mode 100644
index 0000000000..ba3038fe04
--- /dev/null
+++ b/qa/rpc-tests/test_framework/key.py
@@ -0,0 +1,215 @@
+# Copyright (c) 2011 Sam Rushing
+#
+# key.py - OpenSSL wrapper
+#
+# This file is modified from python-bitcoinlib.
+#
+
+"""ECC secp256k1 crypto routines
+
+WARNING: This module does not mlock() secrets; your private keys may end up on
+disk in swap! Use with caution!
+"""
+
+import ctypes
+import ctypes.util
+import hashlib
+import sys
+
+ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library ('ssl') or 'libeay32')
+
+ssl.BN_new.restype = ctypes.c_void_p
+ssl.BN_new.argtypes = []
+
+ssl.BN_bin2bn.restype = ctypes.c_void_p
+ssl.BN_bin2bn.argtypes = [ctypes.c_char_p, ctypes.c_int, ctypes.c_void_p]
+
+ssl.BN_CTX_free.restype = None
+ssl.BN_CTX_free.argtypes = [ctypes.c_void_p]
+
+ssl.BN_CTX_new.restype = ctypes.c_void_p
+ssl.BN_CTX_new.argtypes = []
+
+ssl.ECDH_compute_key.restype = ctypes.c_int
+ssl.ECDH_compute_key.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p]
+
+ssl.ECDSA_sign.restype = ctypes.c_int
+ssl.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
+
+ssl.ECDSA_verify.restype = ctypes.c_int
+ssl.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
+
+ssl.EC_KEY_free.restype = None
+ssl.EC_KEY_free.argtypes = [ctypes.c_void_p]
+
+ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
+ssl.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int]
+
+ssl.EC_KEY_get0_group.restype = ctypes.c_void_p
+ssl.EC_KEY_get0_group.argtypes = [ctypes.c_void_p]
+
+ssl.EC_KEY_get0_public_key.restype = ctypes.c_void_p
+ssl.EC_KEY_get0_public_key.argtypes = [ctypes.c_void_p]
+
+ssl.EC_KEY_set_private_key.restype = ctypes.c_int
+ssl.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
+
+ssl.EC_KEY_set_conv_form.restype = None
+ssl.EC_KEY_set_conv_form.argtypes = [ctypes.c_void_p, ctypes.c_int]
+
+ssl.EC_KEY_set_public_key.restype = ctypes.c_int
+ssl.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
+
+ssl.i2o_ECPublicKey.restype = ctypes.c_void_p
+ssl.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
+
+ssl.EC_POINT_new.restype = ctypes.c_void_p
+ssl.EC_POINT_new.argtypes = [ctypes.c_void_p]
+
+ssl.EC_POINT_free.restype = None
+ssl.EC_POINT_free.argtypes = [ctypes.c_void_p]
+
+ssl.EC_POINT_mul.restype = ctypes.c_int
+ssl.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
+
+# this specifies the curve used with ECDSA.
+NID_secp256k1 = 714 # from openssl/obj_mac.h
+
+# Thx to Sam Devlin for the ctypes magic 64-bit fix.
+def _check_result(val, func, args):
+ if val == 0:
+ raise ValueError
+ else:
+ return ctypes.c_void_p (val)
+
+ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
+ssl.EC_KEY_new_by_curve_name.errcheck = _check_result
+
+class CECKey(object):
+ """Wrapper around OpenSSL's EC_KEY"""
+
+ POINT_CONVERSION_COMPRESSED = 2
+ POINT_CONVERSION_UNCOMPRESSED = 4
+
+ def __init__(self):
+ self.k = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
+
+ def __del__(self):
+ if ssl:
+ ssl.EC_KEY_free(self.k)
+ self.k = None
+
+ def set_secretbytes(self, secret):
+ priv_key = ssl.BN_bin2bn(secret, 32, ssl.BN_new())
+ group = ssl.EC_KEY_get0_group(self.k)
+ pub_key = ssl.EC_POINT_new(group)
+ ctx = ssl.BN_CTX_new()
+ if not ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx):
+ raise ValueError("Could not derive public key from the supplied secret.")
+ ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx)
+ ssl.EC_KEY_set_private_key(self.k, priv_key)
+ ssl.EC_KEY_set_public_key(self.k, pub_key)
+ ssl.EC_POINT_free(pub_key)
+ ssl.BN_CTX_free(ctx)
+ return self.k
+
+ def set_privkey(self, key):
+ self.mb = ctypes.create_string_buffer(key)
+ return ssl.d2i_ECPrivateKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
+
+ def set_pubkey(self, key):
+ self.mb = ctypes.create_string_buffer(key)
+ return ssl.o2i_ECPublicKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
+
+ def get_privkey(self):
+ size = ssl.i2d_ECPrivateKey(self.k, 0)
+ mb_pri = ctypes.create_string_buffer(size)
+ ssl.i2d_ECPrivateKey(self.k, ctypes.byref(ctypes.pointer(mb_pri)))
+ return mb_pri.raw
+
+ def get_pubkey(self):
+ size = ssl.i2o_ECPublicKey(self.k, 0)
+ mb = ctypes.create_string_buffer(size)
+ ssl.i2o_ECPublicKey(self.k, ctypes.byref(ctypes.pointer(mb)))
+ return mb.raw
+
+ def get_raw_ecdh_key(self, other_pubkey):
+ ecdh_keybuffer = ctypes.create_string_buffer(32)
+ r = ssl.ECDH_compute_key(ctypes.pointer(ecdh_keybuffer), 32,
+ ssl.EC_KEY_get0_public_key(other_pubkey.k),
+ self.k, 0)
+ if r != 32:
+ raise Exception('CKey.get_ecdh_key(): ECDH_compute_key() failed')
+ return ecdh_keybuffer.raw
+
+ def get_ecdh_key(self, other_pubkey, kdf=lambda k: hashlib.sha256(k).digest()):
+ # FIXME: be warned it's not clear what the kdf should be as a default
+ r = self.get_raw_ecdh_key(other_pubkey)
+ return kdf(r)
+
+ def sign(self, hash):
+ # FIXME: need unit tests for below cases
+ if not isinstance(hash, bytes):
+ raise TypeError('Hash must be bytes instance; got %r' % hash.__class__)
+ if len(hash) != 32:
+ raise ValueError('Hash must be exactly 32 bytes long')
+
+ sig_size0 = ctypes.c_uint32()
+ sig_size0.value = ssl.ECDSA_size(self.k)
+ mb_sig = ctypes.create_string_buffer(sig_size0.value)
+ result = ssl.ECDSA_sign(0, hash, len(hash), mb_sig, ctypes.byref(sig_size0), self.k)
+ assert 1 == result
+ return mb_sig.raw[:sig_size0.value]
+
+ def verify(self, hash, sig):
+ """Verify a DER signature"""
+ return ssl.ECDSA_verify(0, hash, len(hash), sig, len(sig), self.k) == 1
+
+ def set_compressed(self, compressed):
+ if compressed:
+ form = self.POINT_CONVERSION_COMPRESSED
+ else:
+ form = self.POINT_CONVERSION_UNCOMPRESSED
+ ssl.EC_KEY_set_conv_form(self.k, form)
+
+
+class CPubKey(bytes):
+ """An encapsulated public key
+
+ Attributes:
+
+ is_valid - Corresponds to CPubKey.IsValid()
+ is_fullyvalid - Corresponds to CPubKey.IsFullyValid()
+ is_compressed - Corresponds to CPubKey.IsCompressed()
+ """
+
+ def __new__(cls, buf, _cec_key=None):
+ self = super(CPubKey, cls).__new__(cls, buf)
+ if _cec_key is None:
+ _cec_key = CECKey()
+ self._cec_key = _cec_key
+ self.is_fullyvalid = _cec_key.set_pubkey(self) != 0
+ return self
+
+ @property
+ def is_valid(self):
+ return len(self) > 0
+
+ @property
+ def is_compressed(self):
+ return len(self) == 33
+
+ def verify(self, hash, sig):
+ return self._cec_key.verify(hash, sig)
+
+ def __str__(self):
+ return repr(self)
+
+ def __repr__(self):
+ # Always have represent as b'<secret>' so test cases don't have to
+ # change for py2/3
+ if sys.version > '3':
+ return '%s(%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__())
+ else:
+ return '%s(b%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__())
+
diff --git a/qa/rpc-tests/test_framework/script.py b/qa/rpc-tests/test_framework/script.py
index e37ab5d45a..0a78cf6fb1 100644
--- a/qa/rpc-tests/test_framework/script.py
+++ b/qa/rpc-tests/test_framework/script.py
@@ -27,7 +27,7 @@ if sys.version > '3':
import copy
import struct
-import test_framework.bignum
+from test_framework.bignum import bn2vch
MAX_SCRIPT_SIZE = 10000
MAX_SCRIPT_ELEMENT_SIZE = 520
@@ -664,7 +664,7 @@ class CScript(bytes):
elif other == -1:
other = bytes(bchr(OP_1NEGATE))
else:
- other = CScriptOp.encode_op_pushdata(bignum.bn2vch(other))
+ other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = CScriptOp.encode_op_pushdata(other)
return other
diff --git a/qa/rpc-tests/test_framework/util.py b/qa/rpc-tests/test_framework/util.py
index c236ec2602..3759cc8162 100644
--- a/qa/rpc-tests/test_framework/util.py
+++ b/qa/rpc-tests/test_framework/util.py
@@ -78,8 +78,17 @@ def initialize_chain(test_dir):
bitcoind and bitcoin-cli must be in search path.
"""
- if not os.path.isdir(os.path.join("cache", "node0")):
- devnull = open("/dev/null", "w+")
+ if (not os.path.isdir(os.path.join("cache","node0"))
+ or not os.path.isdir(os.path.join("cache","node1"))
+ or not os.path.isdir(os.path.join("cache","node2"))
+ or not os.path.isdir(os.path.join("cache","node3"))):
+
+ #find and delete old cache directories if any exist
+ for i in range(4):
+ if os.path.isdir(os.path.join("cache","node"+str(i))):
+ shutil.rmtree(os.path.join("cache","node"+str(i)))
+
+ devnull = open(os.devnull, "w")
# Create cache directories, run bitcoinds:
for i in range(4):
datadir=initialize_datadir("cache", i)
@@ -171,7 +180,7 @@ def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=
args = [ binary, "-datadir="+datadir, "-keypool=1", "-discover=0", "-rest" ]
if extra_args is not None: args.extend(extra_args)
bitcoind_processes[i] = subprocess.Popen(args)
- devnull = open("/dev/null", "w+")
+ devnull = open(os.devnull, "w")
if os.getenv("PYTHON_DEBUG", ""):
print "start_node: bitcoind started, calling bitcoin-cli -rpcwait getblockcount"
subprocess.check_call([ os.getenv("BITCOINCLI", "bitcoin-cli"), "-datadir="+datadir] +
diff --git a/qa/rpc-tests/wallet.py b/qa/rpc-tests/wallet.py
index 46dc7765b6..f9ec6f429b 100755
--- a/qa/rpc-tests/wallet.py
+++ b/qa/rpc-tests/wallet.py
@@ -4,11 +4,11 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
-# Exercise the wallet. Ported from wallet.sh.
+# Exercise the wallet. Ported from wallet.sh.
# Does the following:
# a) creates 3 nodes, with an empty chain (no blocks).
# b) node0 mines a block
-# c) node1 mines 101 blocks, so now nodes 0 and 1 have 50btc, node2 has none.
+# c) node1 mines 101 blocks, so now nodes 0 and 1 have 50btc, node2 has none.
# d) node0 sends 21 btc to node2, in two transactions (11 btc, then 10 btc).
# e) node0 mines a block, collects the fee on the second transaction
# f) node1 mines 100 blocks, to mature node0's just-mined block
@@ -75,14 +75,14 @@ class WalletTest (BitcoinTestFramework):
assert_equal(self.nodes[2].getbalance(), 21)
# Node0 should have two unspent outputs.
- # Create a couple of transactions to send them to node2, submit them through
- # node1, and make sure both node0 and node2 pick them up properly:
+ # Create a couple of transactions to send them to node2, submit them through
+ # node1, and make sure both node0 and node2 pick them up properly:
node0utxos = self.nodes[0].listunspent(1)
assert_equal(len(node0utxos), 2)
# create both transactions
txns_to_send = []
- for utxo in node0utxos:
+ for utxo in node0utxos:
inputs = []
outputs = {}
inputs.append({ "txid" : utxo["txid"], "vout" : utxo["vout"]})
@@ -149,27 +149,27 @@ class WalletTest (BitcoinTestFramework):
sync_mempools(self.nodes)
assert(txid1 in self.nodes[3].getrawmempool())
-
+
#check if we can list zero value tx as available coins
#1. create rawtx
- #2. hex-changed one output to 0.0
+ #2. hex-changed one output to 0.0
#3. sign and send
#4. check if recipient (node0) can list the zero value tx
usp = self.nodes[1].listunspent()
inputs = [{"txid":usp[0]['txid'], "vout":usp[0]['vout']}]
outputs = {self.nodes[1].getnewaddress(): 49.998, self.nodes[0].getnewaddress(): 11.11}
-
+
rawTx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") #replace 11.11 with 0.0 (int32)
decRawTx = self.nodes[1].decoderawtransaction(rawTx)
signedRawTx = self.nodes[1].signrawtransaction(rawTx)
decRawTx = self.nodes[1].decoderawtransaction(signedRawTx['hex'])
zeroValueTxid= decRawTx['txid']
sendResp = self.nodes[1].sendrawtransaction(signedRawTx['hex'])
-
+
self.sync_all()
self.nodes[1].generate(1) #mine a block
self.sync_all()
-
+
unspentTxs = self.nodes[0].listunspent() #zero value tx must be in listunspents output
found = False
for uTx in unspentTxs:
@@ -177,7 +177,7 @@ class WalletTest (BitcoinTestFramework):
found = True
assert_equal(uTx['amount'], Decimal('0.00000000'));
assert(found)
-
+
#do some -walletbroadcast tests
stop_nodes(self.nodes)
wait_bitcoinds()
@@ -192,17 +192,17 @@ class WalletTest (BitcoinTestFramework):
self.nodes[1].generate(1) #mine a block, tx should not be in there
self.sync_all()
assert_equal(self.nodes[2].getbalance(), Decimal('59.99800000')); #should not be changed because tx was not broadcasted
-
+
#now broadcast from another node, mine a block, sync, and check the balance
self.nodes[1].sendrawtransaction(txObjNotBroadcasted['hex'])
self.nodes[1].generate(1)
self.sync_all()
txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted)
assert_equal(self.nodes[2].getbalance(), Decimal('61.99800000')); #should not be
-
+
#create another tx
txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2);
-
+
#restart the nodes with -walletbroadcast=1
stop_nodes(self.nodes)
wait_bitcoinds()
@@ -211,12 +211,44 @@ class WalletTest (BitcoinTestFramework):
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
sync_blocks(self.nodes)
-
+
self.nodes[0].generate(1)
sync_blocks(self.nodes)
-
+
#tx should be added to balance because after restarting the nodes tx should be broadcastet
assert_equal(self.nodes[2].getbalance(), Decimal('63.99800000')); #should not be
-
+
+ #send a tx with value in a string (PR#6380 +)
+ txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2")
+ txObj = self.nodes[0].gettransaction(txId)
+ assert_equal(txObj['amount'], Decimal('-2.00000000'))
+
+ txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001")
+ txObj = self.nodes[0].gettransaction(txId)
+ assert_equal(txObj['amount'], Decimal('-0.00010000'))
+
+ #check if JSON parser can handle scientific notation in strings
+ txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4")
+ txObj = self.nodes[0].gettransaction(txId)
+ assert_equal(txObj['amount'], Decimal('-0.00010000'))
+
+ #this should fail
+ errorString = ""
+ try:
+ txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1f-4")
+ except JSONRPCException,e:
+ errorString = e.error['message']
+
+ assert_equal("Invalid amount" in errorString, True);
+
+ errorString = ""
+ try:
+ self.nodes[0].generate("2") #use a string to as block amount parameter must fail because it's not interpreted as amount
+ except JSONRPCException,e:
+ errorString = e.error['message']
+
+ assert_equal("not an integer" in errorString, True);
+
+
if __name__ == '__main__':
WalletTest ().main ()
diff --git a/qa/rpc-tests/zmq_test.py b/qa/rpc-tests/zmq_test.py
new file mode 100755
index 0000000000..bcb132321a
--- /dev/null
+++ b/qa/rpc-tests/zmq_test.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python2
+# Copyright (c) 2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Test ZMQ interface
+#
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import zmq
+import binascii
+from test_framework.mininode import hash256
+
+try:
+ import http.client as httplib
+except ImportError:
+ import httplib
+try:
+ import urllib.parse as urlparse
+except ImportError:
+ import urlparse
+
+class ZMQTest (BitcoinTestFramework):
+
+ port = 28332
+
+ def setup_nodes(self):
+ self.zmqContext = zmq.Context()
+ self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
+ self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, "hashblock")
+ self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, "hashtx")
+ self.zmqSubSocket.connect("tcp://127.0.0.1:%i" % self.port)
+ return start_nodes(4, self.options.tmpdir, extra_args=[
+ ['-zmqpubhashtx=tcp://127.0.0.1:'+str(self.port), '-zmqpubhashblock=tcp://127.0.0.1:'+str(self.port)],
+ [],
+ [],
+ []
+ ])
+
+ def run_test(self):
+ self.sync_all()
+
+ genhashes = self.nodes[0].generate(1);
+ self.sync_all()
+
+ print "listen..."
+ msg = self.zmqSubSocket.recv_multipart()
+ topic = str(msg[0])
+ body = msg[1]
+
+ msg = self.zmqSubSocket.recv_multipart()
+ topic = str(msg[0])
+ body = msg[1]
+ blkhash = binascii.hexlify(body)
+
+ assert_equal(genhashes[0], blkhash) #blockhash from generate must be equal to the hash received over zmq
+
+ n = 10
+ genhashes = self.nodes[1].generate(n);
+ self.sync_all()
+
+ zmqHashes = []
+ for x in range(0,n*2):
+ msg = self.zmqSubSocket.recv_multipart()
+ topic = str(msg[0])
+ body = msg[1]
+ if topic == "hashblock":
+ zmqHashes.append(binascii.hexlify(body))
+
+ for x in range(0,n):
+ assert_equal(genhashes[x], zmqHashes[x]) #blockhash from generate must be equal to the hash received over zmq
+
+ #test tx from a second node
+ hashRPC = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
+ self.sync_all()
+
+ #now we should receive a zmq msg because the tx was broadcastet
+ msg = self.zmqSubSocket.recv_multipart()
+ topic = str(msg[0])
+ body = msg[1]
+ hashZMQ = ""
+ if topic == "hashtx":
+ hashZMQ = binascii.hexlify(body)
+
+ assert_equal(hashRPC, hashZMQ) #blockhash from generate must be equal to the hash received over zmq
+
+
+if __name__ == '__main__':
+ ZMQTest ().main ()