aboutsummaryrefslogtreecommitdiff
path: root/qa
diff options
context:
space:
mode:
Diffstat (limited to 'qa')
-rw-r--r--qa/README.md57
-rwxr-xr-x[-rw-r--r--]qa/pull-tester/rpc-tests.py226
-rw-r--r--[-rwxr-xr-x]qa/pull-tester/run-bitcoind-for-test.sh.in0
-rw-r--r--qa/rpc-tests/README.md54
-rwxr-xr-xqa/rpc-tests/abandonconflict.py153
-rwxr-xr-xqa/rpc-tests/bip65-cltv-p2p.py182
-rwxr-xr-xqa/rpc-tests/bip65-cltv.py89
-rwxr-xr-xqa/rpc-tests/bipdersig.py2
-rwxr-xr-xqa/rpc-tests/blockchain.py52
-rwxr-xr-xqa/rpc-tests/decodescript.py4
-rwxr-xr-xqa/rpc-tests/disablewallet.py32
-rwxr-xr-xqa/rpc-tests/forknotify.py2
-rwxr-xr-xqa/rpc-tests/fundrawtransaction.py21
-rwxr-xr-xqa/rpc-tests/getblocktemplate_longpoll.py4
-rwxr-xr-xqa/rpc-tests/getblocktemplate_proposals.py2
-rwxr-xr-xqa/rpc-tests/getchaintips.py2
-rwxr-xr-xqa/rpc-tests/httpbasics.py16
-rwxr-xr-xqa/rpc-tests/invalidateblock.py2
-rwxr-xr-xqa/rpc-tests/invalidblockrequest.py6
-rwxr-xr-xqa/rpc-tests/invalidtxrequest.py76
-rwxr-xr-xqa/rpc-tests/keypool.py165
-rwxr-xr-xqa/rpc-tests/listtransactions.py2
-rwxr-xr-xqa/rpc-tests/maxblocksinflight.py1
-rwxr-xr-xqa/rpc-tests/maxuploadtarget.py266
-rwxr-xr-xqa/rpc-tests/mempool_limit.py55
-rwxr-xr-xqa/rpc-tests/mempool_packages.py44
-rwxr-xr-xqa/rpc-tests/mempool_reorg.py (renamed from qa/rpc-tests/mempool_coinbase_spends.py)28
-rwxr-xr-xqa/rpc-tests/mempool_resurrect_test.py2
-rwxr-xr-xqa/rpc-tests/mempool_spendcoinbase.py2
-rwxr-xr-xqa/rpc-tests/merkle_blocks.py2
-rwxr-xr-xqa/rpc-tests/multi_rpc.py122
-rwxr-xr-xqa/rpc-tests/nodehandling.py2
-rwxr-xr-xqa/rpc-tests/p2p-acceptblock.py1
-rwxr-xr-xqa/rpc-tests/p2p-fullblocktest.py157
-rwxr-xr-xqa/rpc-tests/prioritise_transaction.py126
-rwxr-xr-xqa/rpc-tests/pruning.py26
-rwxr-xr-xqa/rpc-tests/rawtransactions.py2
-rwxr-xr-xqa/rpc-tests/receivedby.py2
-rwxr-xr-xqa/rpc-tests/reindex.py2
-rwxr-xr-xqa/rpc-tests/replace-by-fee.py593
-rwxr-xr-xqa/rpc-tests/rest.py2
-rwxr-xr-xqa/rpc-tests/rpcbind_test.py4
-rwxr-xr-xqa/rpc-tests/script_test.py259
-rwxr-xr-xqa/rpc-tests/sendheaders.py518
-rwxr-xr-xqa/rpc-tests/smartfees.py55
-rw-r--r--qa/rpc-tests/test_framework/authproxy.py10
-rwxr-xr-xqa/rpc-tests/test_framework/comptool.py41
-rw-r--r--qa/rpc-tests/test_framework/coverage.py101
-rwxr-xr-xqa/rpc-tests/test_framework/mininode.py70
-rw-r--r--qa/rpc-tests/test_framework/netutil.py2
-rw-r--r--qa/rpc-tests/test_framework/script.py8
-rwxr-xr-xqa/rpc-tests/test_framework/test_framework.py32
-rw-r--r--qa/rpc-tests/test_framework/util.py140
-rwxr-xr-xqa/rpc-tests/txn_clone.py4
-rwxr-xr-xqa/rpc-tests/txn_doublespend.py11
-rwxr-xr-xqa/rpc-tests/wallet.py97
-rwxr-xr-xqa/rpc-tests/walletbackup.py2
-rwxr-xr-xqa/rpc-tests/zapwallettxes.py2
58 files changed, 3276 insertions, 664 deletions
diff --git a/qa/README.md b/qa/README.md
new file mode 100644
index 0000000000..758d1f47e5
--- /dev/null
+++ b/qa/README.md
@@ -0,0 +1,57 @@
+The [pull-tester](/qa/pull-tester/) folder contains a script to call
+multiple tests from the [rpc-tests](/qa/rpc-tests/) folder.
+
+Every pull request to the bitcoin repository is built and run through
+the regression test suite. You can also run all or only individual
+tests locally.
+
+Running tests
+=============
+
+You can run any single test by calling `qa/pull-tester/rpc-tests.py <testname>`.
+
+Or you can run any combination of tests by calling `qa/pull-tester/rpc-tests.py <testname1> <testname2> <testname3> ...`
+
+Run the regression test suite with `qa/pull-tester/rpc-tests.py`
+
+Run all possible tests with `qa/pull-tester/rpc-tests.py -extended`
+
+Possible options:
+
+```
+ -h, --help show this help message and exit
+ --nocleanup Leave bitcoinds and test.* datadir on exit or error
+ --noshutdown Don't stop bitcoinds after the test execution
+ --srcdir=SRCDIR Source directory containing bitcoind/bitcoin-cli
+ (default: ../../src)
+ --tmpdir=TMPDIR Root directory for datadirs
+ --tracerpc Print out all RPC calls as they are made
+ --coveragedir=COVERAGEDIR
+ Write tested RPC commands into this directory
+```
+
+If you set the environment variable `PYTHON_DEBUG=1` you will get some debug
+output (example: `PYTHON_DEBUG=1 qa/pull-tester/rpc-tests.py wallet`).
+
+A 200-block -regtest blockchain and wallets for four nodes
+is created the first time a regression test is run and
+is stored in the cache/ directory. Each node has 25 mature
+blocks (25*50=1250 BTC) in its wallet.
+
+After the first run, the cache/ blockchain and wallets are
+copied into a temporary directory and used as the initial
+test state.
+
+If you get into a bad state, you should be able
+to recover with:
+
+```bash
+rm -rf cache
+killall bitcoind
+```
+
+Writing tests
+=============
+You are encouraged to write tests for new or existing features.
+Further information about the test framework and individual rpc
+tests is found in [qa/rpc-tests](/qa/rpc-tests).
diff --git a/qa/pull-tester/rpc-tests.py b/qa/pull-tester/rpc-tests.py
index 68c1c0471e..e7173fda08 100644..100755
--- a/qa/pull-tester/rpc-tests.py
+++ b/qa/pull-tester/rpc-tests.py
@@ -1,19 +1,35 @@
#!/usr/bin/env python2
-
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-#
-# Run Regression Test Suite
-#
+"""
+Run Regression Test Suite
+
+This module calls down into individual test cases via subprocess. It will
+forward all unrecognized arguments onto the individual test scripts, other
+than:
+
+ - `-extended`: run the "extended" test suite in addition to the basic one.
+ - `-win`: signal that this is running in a Windows environment, and we
+ should run the tests.
+ - `--coverage`: this generates a basic coverage report for the RPC
+ interface.
+
+For a description of arguments recognized by test scripts, see
+`qa/pull-tester/test_framework/test_framework.py:BitcoinTestFramework.main`.
+
+"""
import os
+import time
+import shutil
import sys
import subprocess
+import tempfile
import re
+
from tests_config import *
-from sets import Set
#If imported values are not defined then set to zero (or disabled)
if not vars().has_key('ENABLE_WALLET'):
@@ -25,21 +41,32 @@ if not vars().has_key('ENABLE_UTILS'):
if not vars().has_key('ENABLE_ZMQ'):
ENABLE_ZMQ=0
+ENABLE_COVERAGE=0
+
#Create a set to store arguments and create the passOn string
-opts = Set()
+opts = set()
passOn = ""
p = re.compile("^--")
-for i in range(1,len(sys.argv)):
- if (p.match(sys.argv[i]) or sys.argv[i] == "-h"):
- passOn += " " + sys.argv[i]
+
+bold = ("","")
+if (os.name == 'posix'):
+ bold = ('\033[0m', '\033[1m')
+
+for arg in sys.argv[1:]:
+ if arg == '--coverage':
+ ENABLE_COVERAGE = 1
+ elif (p.match(arg) or arg == "-h"):
+ passOn += " " + arg
else:
- opts.add(sys.argv[i])
+ opts.add(arg)
#Set env vars
buildDir = BUILDDIR
-os.environ["BITCOIND"] = buildDir + '/src/bitcoind' + EXEEXT
-os.environ["BITCOINCLI"] = buildDir + '/src/bitcoin-cli' + EXEEXT
-
+if "BITCOIND" not in os.environ:
+ os.environ["BITCOIND"] = buildDir + '/src/bitcoind' + EXEEXT
+if "BITCOINCLI" not in os.environ:
+ os.environ["BITCOINCLI"] = buildDir + '/src/bitcoin-cli' + EXEEXT
+
#Disable Windows tests by default
if EXEEXT == ".exe" and "-win" not in opts:
print "Win tests currently disabled. Use -win option to enable"
@@ -49,6 +76,7 @@ if EXEEXT == ".exe" and "-win" not in opts:
testScripts = [
'wallet.py',
'listtransactions.py',
+ 'receivedby.py',
'mempool_resurrect_test.py',
'txn_doublespend.py --mineblock',
'txn_clone.py',
@@ -56,8 +84,10 @@ testScripts = [
'rawtransactions.py',
'rest.py',
'mempool_spendcoinbase.py',
- 'mempool_coinbase_spends.py',
+ 'mempool_reorg.py',
+ 'mempool_limit.py',
'httpbasics.py',
+ 'multi_rpc.py',
'zapwallettxes.py',
'proxy_test.py',
'merkle_blocks.py',
@@ -68,8 +98,18 @@ testScripts = [
'reindex.py',
'decodescript.py',
'p2p-fullblocktest.py',
+ 'blockchain.py',
+ 'disablewallet.py',
+ 'sendheaders.py',
+ 'keypool.py',
+ 'prioritise_transaction.py',
+ 'invalidblockrequest.py',
+ 'invalidtxrequest.py',
+ 'abandonconflict.py',
]
testScriptsExt = [
+ 'bip65-cltv.py',
+ 'bip65-cltv-p2p.py',
'bipdersig-p2p.py',
'bipdersig.py',
'getblocktemplate_longpoll.py',
@@ -79,40 +119,142 @@ testScriptsExt = [
'pruning.py',
'forknotify.py',
'invalidateblock.py',
- 'keypool.py',
- 'receivedby.py',
- 'rpcbind_test.py',
-# 'script_test.py',
+# 'rpcbind_test.py', #temporary, bug in libevent, see #6655
'smartfees.py',
'maxblocksinflight.py',
- 'invalidblockrequest.py',
-# 'forknotify.py',
'p2p-acceptblock.py',
'mempool_packages.py',
+ 'maxuploadtarget.py',
+ 'replace-by-fee.py',
]
#Enable ZMQ tests
if ENABLE_ZMQ == 1:
testScripts.append('zmq_test.py')
-if(ENABLE_WALLET == 1 and ENABLE_UTILS == 1 and ENABLE_BITCOIND == 1):
- rpcTestDir = buildDir + '/qa/rpc-tests/'
- #Run Tests
- for i in range(len(testScripts)):
- if (len(opts) == 0 or (len(opts) == 1 and "-win" in opts ) or '-extended' in opts
- or testScripts[i] in opts or re.sub(".py$", "", testScripts[i]) in opts ):
- print "Running testscript " + testScripts[i] + "..."
- subprocess.call(rpcTestDir + testScripts[i] + " --srcdir " + buildDir + '/src ' + passOn,shell=True)
- #exit if help is called so we print just one set of instructions
- p = re.compile(" -h| --help")
- if p.match(passOn):
- sys.exit(0)
-
- #Run Extended Tests
- for i in range(len(testScriptsExt)):
- if ('-extended' in opts or testScriptsExt[i] in opts
- or re.sub(".py$", "", testScriptsExt[i]) in opts):
- print "Running 2nd level testscript " + testScriptsExt[i] + "..."
- subprocess.call(rpcTestDir + testScriptsExt[i] + " --srcdir " + buildDir + '/src ' + passOn,shell=True)
-else:
- print "No rpc tests to run. Wallet, utils, and bitcoind must all be enabled"
+
+def runtests():
+ coverage = None
+
+ if ENABLE_COVERAGE:
+ coverage = RPCCoverage()
+ print("Initializing coverage directory at %s\n" % coverage.dir)
+
+ if(ENABLE_WALLET == 1 and ENABLE_UTILS == 1 and ENABLE_BITCOIND == 1):
+ rpcTestDir = buildDir + '/qa/rpc-tests/'
+ run_extended = '-extended' in opts
+ cov_flag = coverage.flag if coverage else ''
+ flags = " --srcdir %s/src %s %s" % (buildDir, cov_flag, passOn)
+
+ #Run Tests
+ for i in range(len(testScripts)):
+ if (len(opts) == 0
+ or (len(opts) == 1 and "-win" in opts )
+ or run_extended
+ or testScripts[i] in opts
+ or re.sub(".py$", "", testScripts[i]) in opts ):
+
+ print("Running testscript %s%s%s ..." % (bold[1], testScripts[i], bold[0]))
+ time0 = time.time()
+ subprocess.check_call(
+ rpcTestDir + testScripts[i] + flags, shell=True)
+ print("Duration: %s s\n" % (int(time.time() - time0)))
+
+ # exit if help is called so we print just one set of
+ # instructions
+ p = re.compile(" -h| --help")
+ if p.match(passOn):
+ sys.exit(0)
+
+ # Run Extended Tests
+ for i in range(len(testScriptsExt)):
+ if (run_extended or testScriptsExt[i] in opts
+ or re.sub(".py$", "", testScriptsExt[i]) in opts):
+
+ print(
+ "Running 2nd level testscript "
+ + "%s%s%s ..." % (bold[1], testScriptsExt[i], bold[0]))
+ time0 = time.time()
+ subprocess.check_call(
+ rpcTestDir + testScriptsExt[i] + flags, shell=True)
+ print("Duration: %s s\n" % (int(time.time() - time0)))
+
+ if coverage:
+ coverage.report_rpc_coverage()
+
+ print("Cleaning up coverage data")
+ coverage.cleanup()
+
+ else:
+ print "No rpc tests to run. Wallet, utils, and bitcoind must all be enabled"
+
+
+class RPCCoverage(object):
+ """
+ Coverage reporting utilities for pull-tester.
+
+ Coverage calculation works by having each test script subprocess write
+ coverage files into a particular directory. These files contain the RPC
+ commands invoked during testing, as well as a complete listing of RPC
+ commands per `bitcoin-cli help` (`rpc_interface.txt`).
+
+ After all tests complete, the commands run are combined and diff'd against
+ the complete list to calculate uncovered RPC commands.
+
+ See also: qa/rpc-tests/test_framework/coverage.py
+
+ """
+ def __init__(self):
+ self.dir = tempfile.mkdtemp(prefix="coverage")
+ self.flag = '--coveragedir %s' % self.dir
+
+ def report_rpc_coverage(self):
+ """
+ Print out RPC commands that were unexercised by tests.
+
+ """
+ uncovered = self._get_uncovered_rpc_commands()
+
+ if uncovered:
+ print("Uncovered RPC commands:")
+ print("".join((" - %s\n" % i) for i in sorted(uncovered)))
+ else:
+ print("All RPC commands covered.")
+
+ def cleanup(self):
+ return shutil.rmtree(self.dir)
+
+ def _get_uncovered_rpc_commands(self):
+ """
+ Return a set of currently untested RPC commands.
+
+ """
+ # This is shared from `qa/rpc-tests/test-framework/coverage.py`
+ REFERENCE_FILENAME = 'rpc_interface.txt'
+ COVERAGE_FILE_PREFIX = 'coverage.'
+
+ coverage_ref_filename = os.path.join(self.dir, REFERENCE_FILENAME)
+ coverage_filenames = set()
+ all_cmds = set()
+ covered_cmds = set()
+
+ if not os.path.isfile(coverage_ref_filename):
+ raise RuntimeError("No coverage reference found")
+
+ with open(coverage_ref_filename, 'r') as f:
+ all_cmds.update([i.strip() for i in f.readlines()])
+
+ for root, dirs, files in os.walk(self.dir):
+ for filename in files:
+ if filename.startswith(COVERAGE_FILE_PREFIX):
+ coverage_filenames.add(os.path.join(root, filename))
+
+ for filename in coverage_filenames:
+ with open(filename, 'r') as f:
+ covered_cmds.update([i.strip() for i in f.readlines()])
+
+ return all_cmds - covered_cmds
+
+
+if __name__ == '__main__':
+ runtests()
diff --git a/qa/pull-tester/run-bitcoind-for-test.sh.in b/qa/pull-tester/run-bitcoind-for-test.sh.in
index 14ae08e4e5..14ae08e4e5 100755..100644
--- a/qa/pull-tester/run-bitcoind-for-test.sh.in
+++ b/qa/pull-tester/run-bitcoind-for-test.sh.in
diff --git a/qa/rpc-tests/README.md b/qa/rpc-tests/README.md
index 3391ee03ae..651b01f18a 100644
--- a/qa/rpc-tests/README.md
+++ b/qa/rpc-tests/README.md
@@ -1,10 +1,8 @@
Regression tests
================
-### [python-bitcoinrpc](https://github.com/jgarzik/python-bitcoinrpc)
-Git subtree of [https://github.com/jgarzik/python-bitcoinrpc](https://github.com/jgarzik/python-bitcoinrpc).
-Changes to python-bitcoinrpc should be made upstream, and then
-pulled here using git subtree.
+### [test_framework/authproxy.py](test_framework/authproxy.py)
+Taken from the [python-bitcoinrpc repository](https://github.com/jgarzik/python-bitcoinrpc).
### [test_framework/test_framework.py](test_framework/test_framework.py)
Base class for new regression tests.
@@ -33,49 +31,6 @@ Helpers for script.py
### [test_framework/blocktools.py](test_framework/blocktools.py)
Helper functions for creating blocks and transactions.
-
-Notes
-=====
-
-You can run any single test by calling qa/pull-tester/rpc-tests.py <testname>
-
-Or you can run any combination of tests by calling `qa/pull-tester/rpc-tests.py <testname1> <testname2> <testname3> ...`
-
-Run the regression test suite with `qa/pull-tester/rpc-tests.py'
-
-Run all possible tests with `qa/pull-tester/rpc-tests.py -extended`
-
-Possible options:
-
-```
--h, --help show this help message and exit
- --nocleanup Leave bitcoinds and test.* datadir on exit or error
- --noshutdown Don't stop bitcoinds after the test execution
- --srcdir=SRCDIR Source directory containing bitcoind/bitcoin-cli (default:
- ../../src)
- --tmpdir=TMPDIR Root directory for datadirs
- --tracerpc Print out all RPC calls as they are made
-```
-
-If you set the environment variable `PYTHON_DEBUG=1` you will get some debug output (example: `PYTHON_DEBUG=1 qa/pull-tester/rpc-tests.py wallet`).
-
-A 200-block -regtest blockchain and wallets for four nodes
-is created the first time a regression test is run and
-is stored in the cache/ directory. Each node has 25 mature
-blocks (25*50=1250 BTC) in its wallet.
-
-After the first run, the cache/ blockchain and wallets are
-copied into a temporary directory and used as the initial
-test state.
-
-If you get into a bad state, you should be able
-to recover with:
-
-```bash
-rm -rf cache
-killall bitcoind
-```
-
P2P test design notes
---------------------
@@ -92,10 +47,7 @@ implements the test logic.
* ```NodeConn``` is the class used to connect to a bitcoind. If you implement
a callback class that derives from ```NodeConnCB``` and pass that to the
```NodeConn``` object, your code will receive the appropriate callbacks when
-events of interest arrive. NOTE: be sure to call
-```self.create_callback_map()``` in your derived classes' ```__init__```
-function, so that the correct mappings are set up between p2p messages and your
-callback functions.
+events of interest arrive.
* You can pass the same handler to multiple ```NodeConn```'s if you like, or pass
different ones to each -- whatever makes the most sense for your test.
diff --git a/qa/rpc-tests/abandonconflict.py b/qa/rpc-tests/abandonconflict.py
new file mode 100755
index 0000000000..38028df079
--- /dev/null
+++ b/qa/rpc-tests/abandonconflict.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python2
+# Copyright (c) 2014-2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+try:
+ import urllib.parse as urlparse
+except ImportError:
+ import urlparse
+
+class AbandonConflictTest(BitcoinTestFramework):
+
+ def setup_network(self):
+ self.nodes = []
+ self.nodes.append(start_node(0, self.options.tmpdir, ["-debug","-logtimemicros","-minrelaytxfee=0.00001"]))
+ self.nodes.append(start_node(1, self.options.tmpdir, ["-debug","-logtimemicros"]))
+ connect_nodes(self.nodes[0], 1)
+
+ def run_test(self):
+ self.nodes[1].generate(100)
+ sync_blocks(self.nodes)
+ balance = self.nodes[0].getbalance()
+ txA = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
+ txB = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
+ txC = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
+ sync_mempools(self.nodes)
+ self.nodes[1].generate(1)
+
+ sync_blocks(self.nodes)
+ newbalance = self.nodes[0].getbalance()
+ assert(balance - newbalance < Decimal("0.001")) #no more than fees lost
+ balance = newbalance
+
+ url = urlparse.urlparse(self.nodes[1].url)
+ self.nodes[0].disconnectnode(url.hostname+":"+str(p2p_port(1)))
+
+ # Identify the 10btc outputs
+ nA = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txA, 1)["vout"]) if vout["value"] == Decimal("10"))
+ nB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txB, 1)["vout"]) if vout["value"] == Decimal("10"))
+ nC = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txC, 1)["vout"]) if vout["value"] == Decimal("10"))
+
+ inputs =[]
+ # spend 10btc outputs from txA and txB
+ inputs.append({"txid":txA, "vout":nA})
+ inputs.append({"txid":txB, "vout":nB})
+ outputs = {}
+
+ outputs[self.nodes[0].getnewaddress()] = Decimal("14.99998")
+ outputs[self.nodes[1].getnewaddress()] = Decimal("5")
+ signed = self.nodes[0].signrawtransaction(self.nodes[0].createrawtransaction(inputs, outputs))
+ txAB1 = self.nodes[0].sendrawtransaction(signed["hex"])
+
+ # Identify the 14.99998btc output
+ nAB = next(i for i, vout in enumerate(self.nodes[0].getrawtransaction(txAB1, 1)["vout"]) if vout["value"] == Decimal("14.99998"))
+
+ #Create a child tx spending AB1 and C
+ inputs = []
+ inputs.append({"txid":txAB1, "vout":nAB})
+ inputs.append({"txid":txC, "vout":nC})
+ outputs = {}
+ outputs[self.nodes[0].getnewaddress()] = Decimal("24.9996")
+ signed2 = self.nodes[0].signrawtransaction(self.nodes[0].createrawtransaction(inputs, outputs))
+ txABC2 = self.nodes[0].sendrawtransaction(signed2["hex"])
+
+ # In mempool txs from self should increase balance from change
+ newbalance = self.nodes[0].getbalance()
+ assert(newbalance == balance - Decimal("30") + Decimal("24.9996"))
+ balance = newbalance
+
+ # Restart the node with a higher min relay fee so the parent tx is no longer in mempool
+ # TODO: redo with eviction
+ # Note had to make sure tx did not have AllowFree priority
+ stop_node(self.nodes[0],0)
+ self.nodes[0]=start_node(0, self.options.tmpdir, ["-debug","-logtimemicros","-minrelaytxfee=0.0001"])
+
+ # Verify txs no longer in mempool
+ assert(len(self.nodes[0].getrawmempool()) == 0)
+
+ # Not in mempool txs from self should only reduce balance
+ # inputs are still spent, but change not received
+ newbalance = self.nodes[0].getbalance()
+ assert(newbalance == balance - Decimal("24.9996"))
+ balance = newbalance
+
+ # Abandon original transaction and verify inputs are available again
+ # including that the child tx was also abandoned
+ self.nodes[0].abandontransaction(txAB1)
+ newbalance = self.nodes[0].getbalance()
+ assert(newbalance == balance + Decimal("30"))
+ balance = newbalance
+
+ # Verify that even with a low min relay fee, the tx is not reaccepted from wallet on startup once abandoned
+ stop_node(self.nodes[0],0)
+ self.nodes[0]=start_node(0, self.options.tmpdir, ["-debug","-logtimemicros","-minrelaytxfee=0.00001"])
+ assert(len(self.nodes[0].getrawmempool()) == 0)
+ assert(self.nodes[0].getbalance() == balance)
+
+ # But if its received again then it is unabandoned
+ # And since now in mempool, the change is available
+ # But its child tx remains abandoned
+ self.nodes[0].sendrawtransaction(signed["hex"])
+ newbalance = self.nodes[0].getbalance()
+ assert(newbalance == balance - Decimal("20") + Decimal("14.99998"))
+ balance = newbalance
+
+ # Send child tx again so its unabandoned
+ self.nodes[0].sendrawtransaction(signed2["hex"])
+ newbalance = self.nodes[0].getbalance()
+ assert(newbalance == balance - Decimal("10") - Decimal("14.99998") + Decimal("24.9996"))
+ balance = newbalance
+
+ # Remove using high relay fee again
+ stop_node(self.nodes[0],0)
+ self.nodes[0]=start_node(0, self.options.tmpdir, ["-debug","-logtimemicros","-minrelaytxfee=0.0001"])
+ assert(len(self.nodes[0].getrawmempool()) == 0)
+ newbalance = self.nodes[0].getbalance()
+ assert(newbalance == balance - Decimal("24.9996"))
+ balance = newbalance
+
+ # Create a double spend of AB1 by spending again from only A's 10 output
+ # Mine double spend from node 1
+ inputs =[]
+ inputs.append({"txid":txA, "vout":nA})
+ outputs = {}
+ outputs[self.nodes[1].getnewaddress()] = Decimal("9.9999")
+ tx = self.nodes[0].createrawtransaction(inputs, outputs)
+ signed = self.nodes[0].signrawtransaction(tx)
+ self.nodes[1].sendrawtransaction(signed["hex"])
+ self.nodes[1].generate(1)
+
+ connect_nodes(self.nodes[0], 1)
+ sync_blocks(self.nodes)
+
+ # Verify that B and C's 10 BTC outputs are available for spending again because AB1 is now conflicted
+ newbalance = self.nodes[0].getbalance()
+ assert(newbalance == balance + Decimal("20"))
+ balance = newbalance
+
+ # There is currently a minor bug around this and so this test doesn't work. See Issue #7315
+ # Invalidate the block with the double spend and B's 10 BTC output should no longer be available
+ # Don't think C's should either
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ newbalance = self.nodes[0].getbalance()
+ #assert(newbalance == balance - Decimal("10"))
+ print "If balance has not declined after invalidateblock then out of mempool wallet tx which is no longer"
+ print "conflicted has not resumed causing its inputs to be seen as spent. See Issue #7315"
+ print balance , " -> " , newbalance , " ?"
+
+if __name__ == '__main__':
+ AbandonConflictTest().main()
diff --git a/qa/rpc-tests/bip65-cltv-p2p.py b/qa/rpc-tests/bip65-cltv-p2p.py
new file mode 100755
index 0000000000..5bb41df1ad
--- /dev/null
+++ b/qa/rpc-tests/bip65-cltv-p2p.py
@@ -0,0 +1,182 @@
+#!/usr/bin/env python2
+#
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.mininode import CTransaction, NetworkThread
+from test_framework.blocktools import create_coinbase, create_block
+from test_framework.comptool import TestInstance, TestManager
+from test_framework.script import CScript, OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP
+from binascii import hexlify, unhexlify
+import cStringIO
+import time
+
+def cltv_invalidate(tx):
+ '''Modify the signature in vin 0 of the tx to fail CLTV
+
+ Prepends -1 CLTV DROP in the scriptSig itself.
+ '''
+ tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKLOCKTIMEVERIFY, OP_DROP] +
+ list(CScript(tx.vin[0].scriptSig)))
+
+'''
+This test is meant to exercise BIP65 (CHECKLOCKTIMEVERIFY)
+Connect to a single node.
+Mine 2 (version 3) blocks (save the coinbases for later).
+Generate 98 more version 3 blocks, verify the node accepts.
+Mine 749 version 4 blocks, verify the node accepts.
+Check that the new CLTV rules are not enforced on the 750th version 4 block.
+Check that the new CLTV rules are enforced on the 751st version 4 block.
+Mine 199 new version blocks.
+Mine 1 old-version block.
+Mine 1 new version block.
+Mine 1 old version block, see that the node rejects.
+'''
+
+class BIP65Test(ComparisonTestFramework):
+
+ def __init__(self):
+ self.num_nodes = 1
+
+ def setup_network(self):
+ # Must set the blockversion for this test
+ self.nodes = start_nodes(1, self.options.tmpdir,
+ extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=3']],
+ binary=[self.options.testbinary])
+
+ def run_test(self):
+ test = TestManager(self, self.options.tmpdir)
+ test.add_all_connections(self.nodes)
+ NetworkThread().start() # Start up network handling in another thread
+ test.run()
+
+ def create_transaction(self, node, coinbase, to_address, amount):
+ from_txid = node.getblock(coinbase)['tx'][0]
+ inputs = [{ "txid" : from_txid, "vout" : 0}]
+ outputs = { to_address : amount }
+ rawtx = node.createrawtransaction(inputs, outputs)
+ signresult = node.signrawtransaction(rawtx)
+ tx = CTransaction()
+ f = cStringIO.StringIO(unhexlify(signresult['hex']))
+ tx.deserialize(f)
+ return tx
+
+ def get_tests(self):
+
+ self.coinbase_blocks = self.nodes[0].generate(2)
+ height = 3 # height of the next block to build
+ self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
+ self.nodeaddress = self.nodes[0].getnewaddress()
+ self.last_block_time = time.time()
+
+ ''' 98 more version 3 blocks '''
+ test_blocks = []
+ for i in xrange(98):
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ block.nVersion = 3
+ block.rehash()
+ block.solve()
+ test_blocks.append([block, True])
+ self.last_block_time += 1
+ self.tip = block.sha256
+ height += 1
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ ''' Mine 749 version 4 blocks '''
+ test_blocks = []
+ for i in xrange(749):
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ block.nVersion = 4
+ block.rehash()
+ block.solve()
+ test_blocks.append([block, True])
+ self.last_block_time += 1
+ self.tip = block.sha256
+ height += 1
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ '''
+ Check that the new CLTV rules are not enforced in the 750th
+ version 3 block.
+ '''
+ spendtx = self.create_transaction(self.nodes[0],
+ self.coinbase_blocks[0], self.nodeaddress, 1.0)
+ cltv_invalidate(spendtx)
+ spendtx.rehash()
+
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ block.nVersion = 4
+ block.vtx.append(spendtx)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+
+ self.last_block_time += 1
+ self.tip = block.sha256
+ height += 1
+ yield TestInstance([[block, True]])
+
+ '''
+ Check that the new CLTV rules are enforced in the 751st version 4
+ block.
+ '''
+ spendtx = self.create_transaction(self.nodes[0],
+ self.coinbase_blocks[1], self.nodeaddress, 1.0)
+ cltv_invalidate(spendtx)
+ spendtx.rehash()
+
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ block.nVersion = 4
+ block.vtx.append(spendtx)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ block.solve()
+ self.last_block_time += 1
+ yield TestInstance([[block, False]])
+
+ ''' Mine 199 new version blocks on last valid tip '''
+ test_blocks = []
+ for i in xrange(199):
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ block.nVersion = 4
+ block.rehash()
+ block.solve()
+ test_blocks.append([block, True])
+ self.last_block_time += 1
+ self.tip = block.sha256
+ height += 1
+ yield TestInstance(test_blocks, sync_every_block=False)
+
+ ''' Mine 1 old version block '''
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ block.nVersion = 3
+ block.rehash()
+ block.solve()
+ self.last_block_time += 1
+ self.tip = block.sha256
+ height += 1
+ yield TestInstance([[block, True]])
+
+ ''' Mine 1 new version block '''
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ block.nVersion = 4
+ block.rehash()
+ block.solve()
+ self.last_block_time += 1
+ self.tip = block.sha256
+ height += 1
+ yield TestInstance([[block, True]])
+
+ ''' Mine 1 old version block, should be invalid '''
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
+ block.nVersion = 3
+ block.rehash()
+ block.solve()
+ self.last_block_time += 1
+ yield TestInstance([[block, False]])
+
+if __name__ == '__main__':
+ BIP65Test().main()
diff --git a/qa/rpc-tests/bip65-cltv.py b/qa/rpc-tests/bip65-cltv.py
new file mode 100755
index 0000000000..e90e11e6a7
--- /dev/null
+++ b/qa/rpc-tests/bip65-cltv.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python2
+# Copyright (c) 2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Test the CHECKLOCKTIMEVERIFY (BIP65) soft-fork logic
+#
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import os
+import shutil
+
+class BIP65Test(BitcoinTestFramework):
+
+ def setup_network(self):
+ self.nodes = []
+ self.nodes.append(start_node(0, self.options.tmpdir, []))
+ self.nodes.append(start_node(1, self.options.tmpdir, ["-blockversion=3"]))
+ self.nodes.append(start_node(2, self.options.tmpdir, ["-blockversion=4"]))
+ connect_nodes(self.nodes[1], 0)
+ connect_nodes(self.nodes[2], 0)
+ self.is_network_split = False
+ self.sync_all()
+
+ def run_test(self):
+ cnt = self.nodes[0].getblockcount()
+
+ # Mine some old-version blocks
+ self.nodes[1].generate(100)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 100):
+ raise AssertionError("Failed to mine 100 version=3 blocks")
+
+ # Mine 750 new-version blocks
+ for i in xrange(15):
+ self.nodes[2].generate(50)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 850):
+ raise AssertionError("Failed to mine 750 version=4 blocks")
+
+ # TODO: check that new CHECKLOCKTIMEVERIFY rules are not enforced
+
+ # Mine 1 new-version block
+ self.nodes[2].generate(1)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 851):
+ raise AssertionFailure("Failed to mine a version=4 blocks")
+
+ # TODO: check that new CHECKLOCKTIMEVERIFY rules are enforced
+
+ # Mine 198 new-version blocks
+ for i in xrange(2):
+ self.nodes[2].generate(99)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 1049):
+ raise AssertionError("Failed to mine 198 version=4 blocks")
+
+ # Mine 1 old-version block
+ self.nodes[1].generate(1)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 1050):
+ raise AssertionError("Failed to mine a version=3 block after 949 version=4 blocks")
+
+ # Mine 1 new-version blocks
+ self.nodes[2].generate(1)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 1051):
+ raise AssertionError("Failed to mine a version=4 block")
+
+ # Mine 1 old-version blocks
+ try:
+ self.nodes[1].generate(1)
+ raise AssertionError("Succeeded to mine a version=3 block after 950 version=4 blocks")
+ except JSONRPCException:
+ pass
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 1051):
+ raise AssertionError("Accepted a version=3 block after 950 version=4 blocks")
+
+ # Mine 1 new-version blocks
+ self.nodes[2].generate(1)
+ self.sync_all()
+ if (self.nodes[0].getblockcount() != cnt + 1052):
+ raise AssertionError("Failed to mine a version=4 block")
+
+if __name__ == '__main__':
+ BIP65Test().main()
diff --git a/qa/rpc-tests/bipdersig.py b/qa/rpc-tests/bipdersig.py
index 243f816f65..5afc9ddde8 100755
--- a/qa/rpc-tests/bipdersig.py
+++ b/qa/rpc-tests/bipdersig.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/blockchain.py b/qa/rpc-tests/blockchain.py
new file mode 100755
index 0000000000..673f1cc545
--- /dev/null
+++ b/qa/rpc-tests/blockchain.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python2
+# Copyright (c) 2014-2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Test RPC calls related to blockchain state.
+#
+
+import decimal
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import (
+ initialize_chain,
+ assert_equal,
+ start_nodes,
+ connect_nodes_bi,
+)
+
+class BlockchainTest(BitcoinTestFramework):
+ """
+ Test blockchain-related RPC calls:
+
+ - gettxoutsetinfo
+
+ """
+
+ def setup_chain(self):
+ print("Initializing test directory " + self.options.tmpdir)
+ initialize_chain(self.options.tmpdir)
+
+ def setup_network(self, split=False):
+ self.nodes = start_nodes(2, self.options.tmpdir)
+ connect_nodes_bi(self.nodes, 0, 1)
+ self.is_network_split = False
+ self.sync_all()
+
+ def run_test(self):
+ node = self.nodes[0]
+ res = node.gettxoutsetinfo()
+
+ assert_equal(res[u'total_amount'], decimal.Decimal('8725.00000000'))
+ assert_equal(res[u'transactions'], 200)
+ assert_equal(res[u'height'], 200)
+ assert_equal(res[u'txouts'], 200)
+ assert_equal(res[u'bytes_serialized'], 13924),
+ assert_equal(len(res[u'bestblock']), 64)
+ assert_equal(len(res[u'hash_serialized']), 64)
+
+
+if __name__ == '__main__':
+ BlockchainTest().main()
diff --git a/qa/rpc-tests/decodescript.py b/qa/rpc-tests/decodescript.py
index 4bca623380..490808d49d 100755
--- a/qa/rpc-tests/decodescript.py
+++ b/qa/rpc-tests/decodescript.py
@@ -102,13 +102,13 @@ class DecodeScriptTest(BitcoinTestFramework):
# OP_IF
# <receiver-pubkey> OP_CHECKSIGVERIFY
# OP_ELSE
- # <lock-until> OP_NOP2 OP_DROP
+ # <lock-until> OP_CHECKLOCKTIMEVERIFY OP_DROP
# OP_ENDIF
# <sender-pubkey> OP_CHECKSIG
#
# lock until block 500,000
rpc_result = self.nodes[0].decodescript('63' + push_public_key + 'ad670320a107b17568' + push_public_key + 'ac')
- assert_equal('OP_IF ' + public_key + ' OP_CHECKSIGVERIFY OP_ELSE 500000 OP_NOP2 OP_DROP OP_ENDIF ' + public_key + ' OP_CHECKSIG', rpc_result['asm'])
+ assert_equal('OP_IF ' + public_key + ' OP_CHECKSIGVERIFY OP_ELSE 500000 OP_CHECKLOCKTIMEVERIFY OP_DROP OP_ENDIF ' + public_key + ' OP_CHECKSIG', rpc_result['asm'])
def decoderawtransaction_asm_sighashtype(self):
"""Tests decoding scripts via RPC command "decoderawtransaction".
diff --git a/qa/rpc-tests/disablewallet.py b/qa/rpc-tests/disablewallet.py
new file mode 100755
index 0000000000..2112097af1
--- /dev/null
+++ b/qa/rpc-tests/disablewallet.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python2
+# Copyright (c) 2014-2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Exercise API with -disablewallet.
+#
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class DisableWalletTest (BitcoinTestFramework):
+
+ def setup_chain(self):
+ print("Initializing test directory "+self.options.tmpdir)
+ initialize_chain_clean(self.options.tmpdir, 1)
+
+ def setup_network(self, split=False):
+ self.nodes = start_nodes(1, self.options.tmpdir, [['-disablewallet']])
+ self.is_network_split = False
+ self.sync_all()
+
+ def run_test (self):
+ # Check regression: https://github.com/bitcoin/bitcoin/issues/6963#issuecomment-154548880
+ x = self.nodes[0].validateaddress('3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
+ assert(x['isvalid'] == False)
+ x = self.nodes[0].validateaddress('mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
+ assert(x['isvalid'] == True)
+
+if __name__ == '__main__':
+ DisableWalletTest ().main ()
diff --git a/qa/rpc-tests/forknotify.py b/qa/rpc-tests/forknotify.py
index 0acef8e30b..2deede0c38 100755
--- a/qa/rpc-tests/forknotify.py
+++ b/qa/rpc-tests/forknotify.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/fundrawtransaction.py b/qa/rpc-tests/fundrawtransaction.py
index fc29789218..dda9166151 100755
--- a/qa/rpc-tests/fundrawtransaction.py
+++ b/qa/rpc-tests/fundrawtransaction.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -28,7 +28,20 @@ class RawTransactionsTest(BitcoinTestFramework):
def run_test(self):
print "Mining blocks..."
- feeTolerance = Decimal(0.00000002) #if the fee's positive delta is higher than this value tests will fail, neg. delta always fail the tests
+
+ min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
+ # This test is not meant to test fee estimation and we'd like
+ # to be sure all txs are sent at a consistent desired feerate
+ for node in self.nodes:
+ node.settxfee(min_relay_tx_fee)
+
+ # if the fee's positive delta is higher than this value tests will fail,
+ # neg. delta always fail the tests.
+ # The size of the signature of every input may be at most 2 bytes larger
+ # than a minimum sized signature.
+
+ # = 2 bytes * minRelayTxFeePerByte
+ feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
@@ -439,6 +452,10 @@ class RawTransactionsTest(BitcoinTestFramework):
wait_bitcoinds()
self.nodes = start_nodes(4, self.options.tmpdir)
+ # This test is not meant to test fee estimation and we'd like
+ # to be sure all txs are sent at a consistent desired feerate
+ for node in self.nodes:
+ node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
diff --git a/qa/rpc-tests/getblocktemplate_longpoll.py b/qa/rpc-tests/getblocktemplate_longpoll.py
index aab4562422..3e85957ae2 100755
--- a/qa/rpc-tests/getblocktemplate_longpoll.py
+++ b/qa/rpc-tests/getblocktemplate_longpoll.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -38,7 +38,7 @@ class LongpollThread(threading.Thread):
self.longpollid = templat['longpollid']
# create a new connection to the node, we can't use the same
# connection from two threads
- self.node = AuthServiceProxy(node.url, timeout=600)
+ self.node = get_rpc_proxy(node.url, 1, timeout=600)
def run(self):
self.node.getblocktemplate({'longpollid':self.longpollid})
diff --git a/qa/rpc-tests/getblocktemplate_proposals.py b/qa/rpc-tests/getblocktemplate_proposals.py
index aca0cd7495..f83b5f140d 100755
--- a/qa/rpc-tests/getblocktemplate_proposals.py
+++ b/qa/rpc-tests/getblocktemplate_proposals.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/getchaintips.py b/qa/rpc-tests/getchaintips.py
index 6a2bcb2969..e8d2d8f3fd 100755
--- a/qa/rpc-tests/getchaintips.py
+++ b/qa/rpc-tests/getchaintips.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/httpbasics.py b/qa/rpc-tests/httpbasics.py
index b66533543d..5b9fa00976 100755
--- a/qa/rpc-tests/httpbasics.py
+++ b/qa/rpc-tests/httpbasics.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -97,5 +97,19 @@ class HTTPBasicsTest (BitcoinTestFramework):
assert_equal('"error":null' in out1, True)
assert_equal(conn.sock!=None, True) #connection must be closed because bitcoind should use keep-alive by default
+ # Check excessive request size
+ conn = httplib.HTTPConnection(urlNode2.hostname, urlNode2.port)
+ conn.connect()
+ conn.request('GET', '/' + ('x'*1000), '', headers)
+ out1 = conn.getresponse()
+ assert_equal(out1.status, httplib.NOT_FOUND)
+
+ conn = httplib.HTTPConnection(urlNode2.hostname, urlNode2.port)
+ conn.connect()
+ conn.request('GET', '/' + ('x'*10000), '', headers)
+ out1 = conn.getresponse()
+ assert_equal(out1.status, httplib.BAD_REQUEST)
+
+
if __name__ == '__main__':
HTTPBasicsTest ().main ()
diff --git a/qa/rpc-tests/invalidateblock.py b/qa/rpc-tests/invalidateblock.py
index 2b9c8154e0..0e78a3c806 100755
--- a/qa/rpc-tests/invalidateblock.py
+++ b/qa/rpc-tests/invalidateblock.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/invalidblockrequest.py b/qa/rpc-tests/invalidblockrequest.py
index 6a7980cd45..a74ecb1288 100755
--- a/qa/rpc-tests/invalidblockrequest.py
+++ b/qa/rpc-tests/invalidblockrequest.py
@@ -6,7 +6,7 @@
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
-from test_framework.comptool import TestManager, TestInstance
+from test_framework.comptool import TestManager, TestInstance, RejectResult
from test_framework.mininode import *
from test_framework.blocktools import *
import logging
@@ -97,7 +97,7 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
assert(block2_orig.vtx != block2.vtx)
self.tip = block2.sha256
- yield TestInstance([[block2, False], [block2_orig, True]])
+ yield TestInstance([[block2, RejectResult(16,'bad-txns-duplicate')], [block2_orig, True]])
height += 1
'''
@@ -112,7 +112,7 @@ class InvalidBlockRequestTest(ComparisonTestFramework):
block3.rehash()
block3.solve()
- yield TestInstance([[block3, False]])
+ yield TestInstance([[block3, RejectResult(16,'bad-cb-amount')]])
if __name__ == '__main__':
diff --git a/qa/rpc-tests/invalidtxrequest.py b/qa/rpc-tests/invalidtxrequest.py
new file mode 100755
index 0000000000..d17b3d0980
--- /dev/null
+++ b/qa/rpc-tests/invalidtxrequest.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python2
+#
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.comptool import TestManager, TestInstance, RejectResult
+from test_framework.mininode import *
+from test_framework.blocktools import *
+import logging
+import copy
+import time
+
+
+'''
+In this test we connect to one node over p2p, and test tx requests.
+'''
+
+# Use the ComparisonTestFramework with 1 node: only use --testbinary.
+class InvalidTxRequestTest(ComparisonTestFramework):
+
+ ''' Can either run this test as 1 node with expected answers, or two and compare them.
+ Change the "outcome" variable from each TestInstance object to only do the comparison. '''
+ def __init__(self):
+ self.num_nodes = 1
+
+ def run_test(self):
+ test = TestManager(self, self.options.tmpdir)
+ test.add_all_connections(self.nodes)
+ self.tip = None
+ self.block_time = None
+ NetworkThread().start() # Start up network handling in another thread
+ test.run()
+
+ def get_tests(self):
+ if self.tip is None:
+ self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
+ self.block_time = int(time.time())+1
+
+ '''
+ Create a new block with an anyone-can-spend coinbase
+ '''
+ height = 1
+ block = create_block(self.tip, create_coinbase(height), self.block_time)
+ self.block_time += 1
+ block.solve()
+ # Save the coinbase for later
+ self.block1 = block
+ self.tip = block.sha256
+ height += 1
+ yield TestInstance([[block, True]])
+
+ '''
+ Now we need that block to mature so we can spend the coinbase.
+ '''
+ test = TestInstance(sync_every_block=False)
+ for i in xrange(100):
+ block = create_block(self.tip, create_coinbase(height), self.block_time)
+ block.solve()
+ self.tip = block.sha256
+ self.block_time += 1
+ test.blocks_and_transactions.append([block, True])
+ height += 1
+ yield test
+
+ # chr(100) is OP_NOTIF
+ # Transaction will be rejected with code 16 (REJECT_INVALID)
+ tx1 = create_transaction(self.block1.vtx[0], 0, chr(100), 50*100000000)
+ yield TestInstance([[tx1, RejectResult(16, 'mandatory-script-verify-flag-failed')]])
+
+ # TODO: test further transactions...
+
+if __name__ == '__main__':
+ InvalidTxRequestTest().main()
diff --git a/qa/rpc-tests/keypool.py b/qa/rpc-tests/keypool.py
index 5a67220021..c300bbc57e 100755
--- a/qa/rpc-tests/keypool.py
+++ b/qa/rpc-tests/keypool.py
@@ -1,20 +1,13 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the wallet keypool, and interaction with wallet encryption/locking
# Add python-bitcoinrpc to module search path:
-import os
-import sys
-
-import json
-import shutil
-import subprocess
-import tempfile
-import traceback
+from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
@@ -39,107 +32,65 @@ def check_array_result(object_array, to_match, expected):
if num_matched == 0:
raise AssertionError("No objects matched %s"%(str(to_match)))
-def run_test(nodes, tmpdir):
- # Encrypt wallet and wait to terminate
- nodes[0].encryptwallet('test')
- bitcoind_processes[0].wait()
- # Restart node 0
- nodes[0] = start_node(0, tmpdir)
- # Keep creating keys
- addr = nodes[0].getnewaddress()
- try:
- addr = nodes[0].getnewaddress()
- raise AssertionError('Keypool should be exhausted after one address')
- except JSONRPCException,e:
- assert(e.error['code']==-12)
-
- # put three new keys in the keypool
- nodes[0].walletpassphrase('test', 12000)
- nodes[0].keypoolrefill(3)
- nodes[0].walletlock()
-
- # drain the keys
- addr = set()
- addr.add(nodes[0].getrawchangeaddress())
- addr.add(nodes[0].getrawchangeaddress())
- addr.add(nodes[0].getrawchangeaddress())
- addr.add(nodes[0].getrawchangeaddress())
- # assert that four unique addresses were returned
- assert(len(addr) == 4)
- # the next one should fail
- try:
- addr = nodes[0].getrawchangeaddress()
- raise AssertionError('Keypool should be exhausted after three addresses')
- except JSONRPCException,e:
- assert(e.error['code']==-12)
-
- # refill keypool with three new addresses
- nodes[0].walletpassphrase('test', 12000)
- nodes[0].keypoolrefill(3)
- nodes[0].walletlock()
+class KeyPoolTest(BitcoinTestFramework):
- # drain them by mining
- nodes[0].generate(1)
- nodes[0].generate(1)
- nodes[0].generate(1)
- nodes[0].generate(1)
- try:
+ def run_test(self):
+ nodes = self.nodes
+ # Encrypt wallet and wait to terminate
+ nodes[0].encryptwallet('test')
+ bitcoind_processes[0].wait()
+ # Restart node 0
+ nodes[0] = start_node(0, self.options.tmpdir)
+ # Keep creating keys
+ addr = nodes[0].getnewaddress()
+ try:
+ addr = nodes[0].getnewaddress()
+ raise AssertionError('Keypool should be exhausted after one address')
+ except JSONRPCException,e:
+ assert(e.error['code']==-12)
+
+ # put three new keys in the keypool
+ nodes[0].walletpassphrase('test', 12000)
+ nodes[0].keypoolrefill(3)
+ nodes[0].walletlock()
+
+ # drain the keys
+ addr = set()
+ addr.add(nodes[0].getrawchangeaddress())
+ addr.add(nodes[0].getrawchangeaddress())
+ addr.add(nodes[0].getrawchangeaddress())
+ addr.add(nodes[0].getrawchangeaddress())
+ # assert that four unique addresses were returned
+ assert(len(addr) == 4)
+ # the next one should fail
+ try:
+ addr = nodes[0].getrawchangeaddress()
+ raise AssertionError('Keypool should be exhausted after three addresses')
+ except JSONRPCException,e:
+ assert(e.error['code']==-12)
+
+ # refill keypool with three new addresses
+ nodes[0].walletpassphrase('test', 12000)
+ nodes[0].keypoolrefill(3)
+ nodes[0].walletlock()
+
+ # drain them by mining
nodes[0].generate(1)
- raise AssertionError('Keypool should be exhausted after three addesses')
- except JSONRPCException,e:
- assert(e.error['code']==-12)
-
-def main():
- import optparse
-
- parser = optparse.OptionParser(usage="%prog [options]")
- parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
- help="Leave bitcoinds and test.* datadir on exit or error")
- parser.add_option("--srcdir", dest="srcdir", default="../../src",
- help="Source directory containing bitcoind/bitcoin-cli (default: %default%)")
- parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
- help="Root directory for datadirs")
- (options, args) = parser.parse_args()
-
- os.environ['PATH'] = options.srcdir+":"+os.environ['PATH']
-
- check_json_precision()
-
- success = False
- nodes = []
- try:
- print("Initializing test directory "+options.tmpdir)
- if not os.path.isdir(options.tmpdir):
- os.makedirs(options.tmpdir)
- initialize_chain(options.tmpdir)
-
- nodes = start_nodes(1, options.tmpdir)
-
- run_test(nodes, options.tmpdir)
-
- success = True
-
- except AssertionError as e:
- print("Assertion failed: "+e.message)
- except JSONRPCException as e:
- print("JSONRPC error: "+e.error['message'])
- traceback.print_tb(sys.exc_info()[2])
- except Exception as e:
- print("Unexpected exception caught during testing: "+str(sys.exc_info()[0]))
- traceback.print_tb(sys.exc_info()[2])
+ nodes[0].generate(1)
+ nodes[0].generate(1)
+ nodes[0].generate(1)
+ try:
+ nodes[0].generate(1)
+ raise AssertionError('Keypool should be exhausted after three addesses')
+ except JSONRPCException,e:
+ assert(e.error['code']==-12)
- if not options.nocleanup:
- print("Cleaning up")
- stop_nodes(nodes)
- wait_bitcoinds()
- shutil.rmtree(options.tmpdir)
+ def setup_chain(self):
+ print("Initializing test directory "+self.options.tmpdir)
+ initialize_chain(self.options.tmpdir)
- if success:
- print("Tests successful")
- sys.exit(0)
- else:
- print("Failed")
- sys.exit(1)
+ def setup_network(self):
+ self.nodes = start_nodes(1, self.options.tmpdir)
if __name__ == '__main__':
- main()
+ KeyPoolTest().main()
diff --git a/qa/rpc-tests/listtransactions.py b/qa/rpc-tests/listtransactions.py
index b30a6bc9d1..8a1e3dc4bc 100755
--- a/qa/rpc-tests/listtransactions.py
+++ b/qa/rpc-tests/listtransactions.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/maxblocksinflight.py b/qa/rpc-tests/maxblocksinflight.py
index a601147ce8..1a9ae480ab 100755
--- a/qa/rpc-tests/maxblocksinflight.py
+++ b/qa/rpc-tests/maxblocksinflight.py
@@ -34,7 +34,6 @@ class TestManager(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.log = logging.getLogger("BlockRelayTest")
- self.create_callback_map()
def add_new_connection(self, connection):
self.connection = connection
diff --git a/qa/rpc-tests/maxuploadtarget.py b/qa/rpc-tests/maxuploadtarget.py
new file mode 100755
index 0000000000..4d6b343f77
--- /dev/null
+++ b/qa/rpc-tests/maxuploadtarget.py
@@ -0,0 +1,266 @@
+#!/usr/bin/env python2
+#
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.mininode import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.comptool import wait_until
+import time
+
+'''
+Test behavior of -maxuploadtarget.
+
+* Verify that getdata requests for old blocks (>1week) are dropped
+if uploadtarget has been reached.
+* Verify that getdata requests for recent blocks are respecteved even
+if uploadtarget has been reached.
+* Verify that the upload counters are reset after 24 hours.
+'''
+
+# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
+# p2p messages to a node, generating the messages in the main testing logic.
+class TestNode(NodeConnCB):
+ def __init__(self):
+ NodeConnCB.__init__(self)
+ self.connection = None
+ self.ping_counter = 1
+ self.last_pong = msg_pong()
+ self.block_receive_map = {}
+
+ def add_connection(self, conn):
+ self.connection = conn
+ self.peer_disconnected = False
+
+ def on_inv(self, conn, message):
+ pass
+
+ # Track the last getdata message we receive (used in the test)
+ def on_getdata(self, conn, message):
+ self.last_getdata = message
+
+ def on_block(self, conn, message):
+ message.block.calc_sha256()
+ try:
+ self.block_receive_map[message.block.sha256] += 1
+ except KeyError as e:
+ self.block_receive_map[message.block.sha256] = 1
+
+ # Spin until verack message is received from the node.
+ # We use this to signal that our test can begin. This
+ # is called from the testing thread, so it needs to acquire
+ # the global lock.
+ def wait_for_verack(self):
+ def veracked():
+ return self.verack_received
+ return wait_until(veracked, timeout=10)
+
+ def wait_for_disconnect(self):
+ def disconnected():
+ return self.peer_disconnected
+ return wait_until(disconnected, timeout=10)
+
+ # Wrapper for the NodeConn's send_message function
+ def send_message(self, message):
+ self.connection.send_message(message)
+
+ def on_pong(self, conn, message):
+ self.last_pong = message
+
+ def on_close(self, conn):
+ self.peer_disconnected = True
+
+ # Sync up with the node after delivery of a block
+ def sync_with_ping(self, timeout=30):
+ def received_pong():
+ return (self.last_pong.nonce == self.ping_counter)
+ self.connection.send_message(msg_ping(nonce=self.ping_counter))
+ success = wait_until(received_pong, timeout)
+ self.ping_counter += 1
+ return success
+
+class MaxUploadTest(BitcoinTestFramework):
+ def __init__(self):
+ self.utxo = []
+ self.txouts = gen_return_txouts()
+
+ def add_options(self, parser):
+ parser.add_option("--testbinary", dest="testbinary",
+ default=os.getenv("BITCOIND", "bitcoind"),
+ help="bitcoind binary to test")
+
+ def setup_chain(self):
+ initialize_chain_clean(self.options.tmpdir, 2)
+
+ def setup_network(self):
+ # Start a node with maxuploadtarget of 200 MB (/24h)
+ self.nodes = []
+ self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-maxuploadtarget=200", "-blockmaxsize=999000"]))
+
+ def mine_full_block(self, node, address):
+ # Want to create a full block
+ # We'll generate a 66k transaction below, and 14 of them is close to the 1MB block limit
+ for j in xrange(14):
+ if len(self.utxo) < 14:
+ self.utxo = node.listunspent()
+ inputs=[]
+ outputs = {}
+ t = self.utxo.pop()
+ inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
+ remchange = t["amount"] - Decimal("0.001000")
+ outputs[address]=remchange
+ # Create a basic transaction that will send change back to ourself after account for a fee
+ # And then insert the 128 generated transaction outs in the middle rawtx[92] is where the #
+ # of txouts is stored and is the only thing we overwrite from the original transaction
+ rawtx = node.createrawtransaction(inputs, outputs)
+ newtx = rawtx[0:92]
+ newtx = newtx + self.txouts
+ newtx = newtx + rawtx[94:]
+ # Appears to be ever so slightly faster to sign with SIGHASH_NONE
+ signresult = node.signrawtransaction(newtx,None,None,"NONE")
+ txid = node.sendrawtransaction(signresult["hex"], True)
+ # Mine a full sized block which will be these transactions we just created
+ node.generate(1)
+
+ def run_test(self):
+ # Before we connect anything, we first set the time on the node
+ # to be in the past, otherwise things break because the CNode
+ # time counters can't be reset backward after initialization
+ old_time = int(time.time() - 2*60*60*24*7)
+ self.nodes[0].setmocktime(old_time)
+
+ # Generate some old blocks
+ self.nodes[0].generate(130)
+
+ # test_nodes[0] will only request old blocks
+ # test_nodes[1] will only request new blocks
+ # test_nodes[2] will test resetting the counters
+ test_nodes = []
+ connections = []
+
+ for i in xrange(3):
+ test_nodes.append(TestNode())
+ connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
+ test_nodes[i].add_connection(connections[i])
+
+ NetworkThread().start() # Start up network handling in another thread
+ [x.wait_for_verack() for x in test_nodes]
+
+ # Test logic begins here
+
+ # Now mine a big block
+ self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())
+
+ # Store the hash; we'll request this later
+ big_old_block = self.nodes[0].getbestblockhash()
+ old_block_size = self.nodes[0].getblock(big_old_block, True)['size']
+ big_old_block = int(big_old_block, 16)
+
+ # Advance to two days ago
+ self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)
+
+ # Mine one more block, so that the prior block looks old
+ self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())
+
+ # We'll be requesting this new block too
+ big_new_block = self.nodes[0].getbestblockhash()
+ new_block_size = self.nodes[0].getblock(big_new_block)['size']
+ big_new_block = int(big_new_block, 16)
+
+ # test_nodes[0] will test what happens if we just keep requesting the
+ # the same big old block too many times (expect: disconnect)
+
+ getdata_request = msg_getdata()
+ getdata_request.inv.append(CInv(2, big_old_block))
+
+ max_bytes_per_day = 200*1024*1024
+ daily_buffer = 144 * 1000000
+ max_bytes_available = max_bytes_per_day - daily_buffer
+ success_count = max_bytes_available / old_block_size
+
+ # 144MB will be reserved for relaying new blocks, so expect this to
+ # succeed for ~70 tries.
+ for i in xrange(success_count):
+ test_nodes[0].send_message(getdata_request)
+ test_nodes[0].sync_with_ping()
+ assert_equal(test_nodes[0].block_receive_map[big_old_block], i+1)
+
+ assert_equal(len(self.nodes[0].getpeerinfo()), 3)
+ # At most a couple more tries should succeed (depending on how long
+ # the test has been running so far).
+ for i in xrange(3):
+ test_nodes[0].send_message(getdata_request)
+ test_nodes[0].wait_for_disconnect()
+ assert_equal(len(self.nodes[0].getpeerinfo()), 2)
+ print "Peer 0 disconnected after downloading old block too many times"
+
+ # Requesting the current block on test_nodes[1] should succeed indefinitely,
+ # even when over the max upload target.
+ # We'll try 200 times
+ getdata_request.inv = [CInv(2, big_new_block)]
+ for i in xrange(200):
+ test_nodes[1].send_message(getdata_request)
+ test_nodes[1].sync_with_ping()
+ assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)
+
+ print "Peer 1 able to repeatedly download new block"
+
+ # But if test_nodes[1] tries for an old block, it gets disconnected too.
+ getdata_request.inv = [CInv(2, big_old_block)]
+ test_nodes[1].send_message(getdata_request)
+ test_nodes[1].wait_for_disconnect()
+ assert_equal(len(self.nodes[0].getpeerinfo()), 1)
+
+ print "Peer 1 disconnected after trying to download old block"
+
+ print "Advancing system time on node to clear counters..."
+
+ # If we advance the time by 24 hours, then the counters should reset,
+ # and test_nodes[2] should be able to retrieve the old block.
+ self.nodes[0].setmocktime(int(time.time()))
+ test_nodes[2].sync_with_ping()
+ test_nodes[2].send_message(getdata_request)
+ test_nodes[2].sync_with_ping()
+ assert_equal(test_nodes[2].block_receive_map[big_old_block], 1)
+
+ print "Peer 2 able to download old block"
+
+ [c.disconnect_node() for c in connections]
+
+ #stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1
+ print "Restarting nodes with -whitelist=127.0.0.1"
+ stop_node(self.nodes[0], 0)
+ self.nodes[0] = start_node(0, self.options.tmpdir, ["-debug", "-whitelist=127.0.0.1", "-maxuploadtarget=1", "-blockmaxsize=999000"])
+
+ #recreate/reconnect 3 test nodes
+ test_nodes = []
+ connections = []
+
+ for i in xrange(3):
+ test_nodes.append(TestNode())
+ connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
+ test_nodes[i].add_connection(connections[i])
+
+ NetworkThread().start() # Start up network handling in another thread
+ [x.wait_for_verack() for x in test_nodes]
+
+ #retrieve 20 blocks which should be enough to break the 1MB limit
+ getdata_request.inv = [CInv(2, big_new_block)]
+ for i in xrange(20):
+ test_nodes[1].send_message(getdata_request)
+ test_nodes[1].sync_with_ping()
+ assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)
+
+ getdata_request.inv = [CInv(2, big_old_block)]
+ test_nodes[1].send_message(getdata_request)
+ test_nodes[1].wait_for_disconnect()
+ assert_equal(len(self.nodes[0].getpeerinfo()), 3) #node is still connected because of the whitelist
+
+ print "Peer 1 still connected after trying to download old block (whitelisted)"
+
+ [c.disconnect_node() for c in connections]
+
+if __name__ == '__main__':
+ MaxUploadTest().main()
diff --git a/qa/rpc-tests/mempool_limit.py b/qa/rpc-tests/mempool_limit.py
new file mode 100755
index 0000000000..a8cf6360ee
--- /dev/null
+++ b/qa/rpc-tests/mempool_limit.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python2
+# Copyright (c) 2014-2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+# Test mempool limiting together/eviction with the wallet
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+class MempoolLimitTest(BitcoinTestFramework):
+
+ def __init__(self):
+ self.txouts = gen_return_txouts()
+
+ def setup_network(self):
+ self.nodes = []
+ self.nodes.append(start_node(0, self.options.tmpdir, ["-maxmempool=5", "-spendzeroconfchange=0", "-debug"]))
+ self.is_network_split = False
+ self.sync_all()
+ self.relayfee = self.nodes[0].getnetworkinfo()['relayfee']
+
+ def setup_chain(self):
+ print("Initializing test directory "+self.options.tmpdir)
+ initialize_chain_clean(self.options.tmpdir, 2)
+
+ def run_test(self):
+ txids = []
+ utxos = create_confirmed_utxos(self.relayfee, self.nodes[0], 90)
+
+ #create a mempool tx that will be evicted
+ us0 = utxos.pop()
+ inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
+ outputs = {self.nodes[0].getnewaddress() : 0.0001}
+ tx = self.nodes[0].createrawtransaction(inputs, outputs)
+ self.nodes[0].settxfee(self.relayfee) # specifically fund this tx with low fee
+ txF = self.nodes[0].fundrawtransaction(tx)
+ self.nodes[0].settxfee(0) # return to automatic fee selection
+ txFS = self.nodes[0].signrawtransaction(txF['hex'])
+ txid = self.nodes[0].sendrawtransaction(txFS['hex'])
+ self.nodes[0].lockunspent(True, [us0])
+
+ relayfee = self.nodes[0].getnetworkinfo()['relayfee']
+ base_fee = relayfee*100
+ for i in xrange (4):
+ txids.append([])
+ txids[i] = create_lots_of_big_transactions(self.nodes[0], self.txouts, utxos[30*i:30*i+30], (i+1)*base_fee)
+
+ # by now, the tx should be evicted, check confirmation state
+ assert(txid not in self.nodes[0].getrawmempool())
+ txdata = self.nodes[0].gettransaction(txid);
+ assert(txdata['confirmations'] == 0) #confirmation should still be 0
+
+if __name__ == '__main__':
+ MempoolLimitTest().main()
diff --git a/qa/rpc-tests/mempool_packages.py b/qa/rpc-tests/mempool_packages.py
index 6bc6e43f0b..063308d394 100755
--- a/qa/rpc-tests/mempool_packages.py
+++ b/qa/rpc-tests/mempool_packages.py
@@ -8,8 +8,8 @@
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
-def satoshi_round(amount):
- return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
+MAX_ANCESTORS = 25
+MAX_DESCENDANTS = 25
class MempoolPackagesTest(BitcoinTestFramework):
@@ -45,17 +45,17 @@ class MempoolPackagesTest(BitcoinTestFramework):
value = utxo[0]['amount']
fee = Decimal("0.0001")
- # 100 transactions off a confirmed tx should be fine
+ # MAX_ANCESTORS transactions off a confirmed tx should be fine
chain = []
- for i in xrange(100):
+ for i in xrange(MAX_ANCESTORS):
(txid, sent_value) = self.chain_transaction(self.nodes[0], txid, 0, value, fee, 1)
value = sent_value
chain.append(txid)
- # Check mempool has 100 transactions in it, and descendant
+ # Check mempool has MAX_ANCESTORS transactions in it, and descendant
# count and fees should look correct
mempool = self.nodes[0].getrawmempool(True)
- assert_equal(len(mempool), 100)
+ assert_equal(len(mempool), MAX_ANCESTORS)
descendant_count = 1
descendant_fees = 0
descendant_size = 0
@@ -64,17 +64,41 @@ class MempoolPackagesTest(BitcoinTestFramework):
for x in reversed(chain):
assert_equal(mempool[x]['descendantcount'], descendant_count)
descendant_fees += mempool[x]['fee']
+ assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee'])
assert_equal(mempool[x]['descendantfees'], SATOSHIS*descendant_fees)
descendant_size += mempool[x]['size']
assert_equal(mempool[x]['descendantsize'], descendant_size)
descendant_count += 1
+ # Check that descendant modified fees includes fee deltas from
+ # prioritisetransaction
+ self.nodes[0].prioritisetransaction(chain[-1], 0, 1000)
+ mempool = self.nodes[0].getrawmempool(True)
+
+ descendant_fees = 0
+ for x in reversed(chain):
+ descendant_fees += mempool[x]['fee']
+ assert_equal(mempool[x]['descendantfees'], SATOSHIS*descendant_fees+1000)
+
# Adding one more transaction on to the chain should fail.
try:
self.chain_transaction(self.nodes[0], txid, vout, value, fee, 1)
except JSONRPCException as e:
print "too-long-ancestor-chain successfully rejected"
+ # Check that prioritising a tx before it's added to the mempool works
+ self.nodes[0].generate(1)
+ self.nodes[0].prioritisetransaction(chain[-1], 0, 2000)
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+ mempool = self.nodes[0].getrawmempool(True)
+
+ descendant_fees = 0
+ for x in reversed(chain):
+ descendant_fees += mempool[x]['fee']
+ if (x == chain[-1]):
+ assert_equal(mempool[x]['modifiedfee'], mempool[x]['fee']+satoshi_round(0.00002))
+ assert_equal(mempool[x]['descendantfees'], SATOSHIS*descendant_fees+2000)
+
# TODO: check that node1's mempool is as expected
# TODO: test ancestor size limits
@@ -91,18 +115,18 @@ class MempoolPackagesTest(BitcoinTestFramework):
for i in xrange(10):
transaction_package.append({'txid': txid, 'vout': i, 'amount': sent_value})
- for i in xrange(1000):
+ for i in xrange(MAX_DESCENDANTS):
utxo = transaction_package.pop(0)
try:
(txid, sent_value) = self.chain_transaction(self.nodes[0], utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
for j in xrange(10):
transaction_package.append({'txid': txid, 'vout': j, 'amount': sent_value})
- if i == 998:
+ if i == MAX_DESCENDANTS - 2:
mempool = self.nodes[0].getrawmempool(True)
- assert_equal(mempool[parent_transaction]['descendantcount'], 1000)
+ assert_equal(mempool[parent_transaction]['descendantcount'], MAX_DESCENDANTS)
except JSONRPCException as e:
print e.error['message']
- assert_equal(i, 999)
+ assert_equal(i, MAX_DESCENDANTS - 1)
print "tx that would create too large descendant package successfully rejected"
# TODO: check that node1's mempool is as expected
diff --git a/qa/rpc-tests/mempool_coinbase_spends.py b/qa/rpc-tests/mempool_reorg.py
index c64a15b9f5..d96a3f8266 100755
--- a/qa/rpc-tests/mempool_coinbase_spends.py
+++ b/qa/rpc-tests/mempool_reorg.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -52,16 +52,25 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
# 3. Indirect (coinbase and child both in chain) : spend_103 and spend_103_1
# Use invalidatblock to make all of the above coinbase spends invalid (immature coinbase),
# and make sure the mempool code behaves correctly.
- b = [ self.nodes[0].getblockhash(n) for n in range(102, 105) ]
+ b = [ self.nodes[0].getblockhash(n) for n in range(101, 105) ]
coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ]
- spend_101_raw = self.create_tx(coinbase_txids[0], node1_address, 50)
- spend_102_raw = self.create_tx(coinbase_txids[1], node0_address, 50)
- spend_103_raw = self.create_tx(coinbase_txids[2], node0_address, 50)
+ spend_101_raw = self.create_tx(coinbase_txids[1], node1_address, 50)
+ spend_102_raw = self.create_tx(coinbase_txids[2], node0_address, 50)
+ spend_103_raw = self.create_tx(coinbase_txids[3], node0_address, 50)
+
+ # Create a block-height-locked transaction which will be invalid after reorg
+ timelock_tx = self.nodes[0].createrawtransaction([{"txid": coinbase_txids[0], "vout": 0}], {node0_address: 50})
+ # Set the time lock
+ timelock_tx = timelock_tx.replace("ffffffff", "11111111", 1)
+ timelock_tx = timelock_tx[:-8] + hex(self.nodes[0].getblockcount() + 2)[2:] + "000000"
+ timelock_tx = self.nodes[0].signrawtransaction(timelock_tx)["hex"]
+ assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, timelock_tx)
# Broadcast and mine spend_102 and 103:
spend_102_id = self.nodes[0].sendrawtransaction(spend_102_raw)
spend_103_id = self.nodes[0].sendrawtransaction(spend_103_raw)
self.nodes[0].generate(1)
+ assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, timelock_tx)
# Create 102_1 and 103_1:
spend_102_1_raw = self.create_tx(spend_102_id, node1_address, 50)
@@ -69,7 +78,8 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
# Broadcast and mine 103_1:
spend_103_1_id = self.nodes[0].sendrawtransaction(spend_103_1_raw)
- self.nodes[0].generate(1)
+ last_block = self.nodes[0].generate(1)
+ timelock_tx_id = self.nodes[0].sendrawtransaction(timelock_tx)
# ... now put spend_101 and spend_102_1 in memory pools:
spend_101_id = self.nodes[0].sendrawtransaction(spend_101_raw)
@@ -77,7 +87,11 @@ class MempoolCoinbaseTest(BitcoinTestFramework):
self.sync_all()
- assert_equal(set(self.nodes[0].getrawmempool()), set([ spend_101_id, spend_102_1_id ]))
+ assert_equal(set(self.nodes[0].getrawmempool()), set([ spend_101_id, spend_102_1_id, timelock_tx_id ]))
+
+ for node in self.nodes:
+ node.invalidateblock(last_block[0])
+ assert_equal(set(self.nodes[0].getrawmempool()), set([ spend_101_id, spend_102_1_id, spend_103_1_id ]))
# Use invalidateblock to re-org back and make all those coinbase spends
# immature/invalid:
diff --git a/qa/rpc-tests/mempool_resurrect_test.py b/qa/rpc-tests/mempool_resurrect_test.py
index 19c74bb751..750953ee5e 100755
--- a/qa/rpc-tests/mempool_resurrect_test.py
+++ b/qa/rpc-tests/mempool_resurrect_test.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/mempool_spendcoinbase.py b/qa/rpc-tests/mempool_spendcoinbase.py
index fc17c50692..35ce76e244 100755
--- a/qa/rpc-tests/mempool_spendcoinbase.py
+++ b/qa/rpc-tests/mempool_spendcoinbase.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/merkle_blocks.py b/qa/rpc-tests/merkle_blocks.py
index 72a80ce6ca..08e5db45fa 100755
--- a/qa/rpc-tests/merkle_blocks.py
+++ b/qa/rpc-tests/merkle_blocks.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/multi_rpc.py b/qa/rpc-tests/multi_rpc.py
new file mode 100755
index 0000000000..62071d426e
--- /dev/null
+++ b/qa/rpc-tests/multi_rpc.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python2
+# Copyright (c) 2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Test mulitple rpc user config option rpcauth
+#
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import base64
+
+try:
+ import http.client as httplib
+except ImportError:
+ import httplib
+try:
+ import urllib.parse as urlparse
+except ImportError:
+ import urlparse
+
+class HTTPBasicsTest (BitcoinTestFramework):
+ def setup_nodes(self):
+ return start_nodes(4, self.options.tmpdir)
+
+ def setup_chain(self):
+ print("Initializing test directory "+self.options.tmpdir)
+ initialize_chain(self.options.tmpdir)
+ #Append rpcauth to bitcoin.conf before initialization
+ rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
+ rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
+ with open(os.path.join(self.options.tmpdir+"/node0", "bitcoin.conf"), 'a') as f:
+ f.write(rpcauth+"\n")
+ f.write(rpcauth2+"\n")
+
+ def run_test(self):
+
+ ##################################################
+ # Check correctness of the rpcauth config option #
+ ##################################################
+ url = urlparse.urlparse(self.nodes[0].url)
+
+ #Old authpair
+ authpair = url.username + ':' + url.password
+
+ #New authpair generated via contrib/rpcuser tool
+ rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
+ password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
+
+ #Second authpair with different username
+ rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
+ password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
+ authpairnew = "rt:"+password
+
+ headers = {"Authorization": "Basic " + base64.b64encode(authpair)}
+
+ conn = httplib.HTTPConnection(url.hostname, url.port)
+ conn.connect()
+ conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+ resp = conn.getresponse()
+ assert_equal(resp.status==401, False)
+ conn.close()
+
+ #Use new authpair to confirm both work
+ headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
+
+ conn = httplib.HTTPConnection(url.hostname, url.port)
+ conn.connect()
+ conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+ resp = conn.getresponse()
+ assert_equal(resp.status==401, False)
+ conn.close()
+
+ #Wrong login name with rt's password
+ authpairnew = "rtwrong:"+password
+ headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
+
+ conn = httplib.HTTPConnection(url.hostname, url.port)
+ conn.connect()
+ conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+ resp = conn.getresponse()
+ assert_equal(resp.status==401, True)
+ conn.close()
+
+ #Wrong password for rt
+ authpairnew = "rt:"+password+"wrong"
+ headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
+
+ conn = httplib.HTTPConnection(url.hostname, url.port)
+ conn.connect()
+ conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+ resp = conn.getresponse()
+ assert_equal(resp.status==401, True)
+ conn.close()
+
+ #Correct for rt2
+ authpairnew = "rt2:"+password2
+ headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
+
+ conn = httplib.HTTPConnection(url.hostname, url.port)
+ conn.connect()
+ conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+ resp = conn.getresponse()
+ assert_equal(resp.status==401, False)
+ conn.close()
+
+ #Wrong password for rt2
+ authpairnew = "rt2:"+password2+"wrong"
+ headers = {"Authorization": "Basic " + base64.b64encode(authpairnew)}
+
+ conn = httplib.HTTPConnection(url.hostname, url.port)
+ conn.connect()
+ conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
+ resp = conn.getresponse()
+ assert_equal(resp.status==401, True)
+ conn.close()
+
+
+
+if __name__ == '__main__':
+ HTTPBasicsTest ().main ()
diff --git a/qa/rpc-tests/nodehandling.py b/qa/rpc-tests/nodehandling.py
index e383a3a12c..3239dd0339 100755
--- a/qa/rpc-tests/nodehandling.py
+++ b/qa/rpc-tests/nodehandling.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/p2p-acceptblock.py b/qa/rpc-tests/p2p-acceptblock.py
index 700deab207..23872d8494 100755
--- a/qa/rpc-tests/p2p-acceptblock.py
+++ b/qa/rpc-tests/p2p-acceptblock.py
@@ -62,7 +62,6 @@ The test:
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
- self.create_callback_map()
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
diff --git a/qa/rpc-tests/p2p-fullblocktest.py b/qa/rpc-tests/p2p-fullblocktest.py
index 9555940cec..a6525e6793 100755
--- a/qa/rpc-tests/p2p-fullblocktest.py
+++ b/qa/rpc-tests/p2p-fullblocktest.py
@@ -7,7 +7,7 @@
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
-from test_framework.comptool import TestManager, TestInstance
+from test_framework.comptool import TestManager, TestInstance, RejectResult
from test_framework.mininode import *
from test_framework.blocktools import *
import logging
@@ -15,7 +15,7 @@ import copy
import time
import numbers
from test_framework.key import CECKey
-from test_framework.script import CScript, CScriptOp, SignatureHash, SIGHASH_ALL, OP_TRUE
+from test_framework.script import CScript, CScriptOp, SignatureHash, SIGHASH_ALL, OP_TRUE, OP_FALSE
class PreviousSpendableOutput(object):
def __init__(self, tx = CTransaction(), n = -1):
@@ -122,13 +122,29 @@ class FullBlockTest(ComparisonTestFramework):
return TestInstance([[self.tip, True]])
# returns a test case that asserts that the current tip was rejected
- def rejected():
- return TestInstance([[self.tip, False]])
+ def rejected(reject = None):
+ if reject is None:
+ return TestInstance([[self.tip, False]])
+ else:
+ return TestInstance([[self.tip, reject]])
# move the tip back to a previous block
def tip(number):
self.tip = self.blocks[number]
+ # add transactions to a block produced by next_block
+ def update_block(block_number, new_transactions):
+ block = self.blocks[block_number]
+ old_hash = block.sha256
+ self.add_transactions_to_block(block, new_transactions)
+ block.solve()
+ # Update the internal state just like in next_block
+ self.tip = block
+ self.block_heights[block.sha256] = self.block_heights[old_hash]
+ del self.block_heights[old_hash]
+ self.blocks[block_number] = block
+ return block
+
# creates a new block and advances the tip to that block
block = self.next_block
@@ -141,14 +157,15 @@ class FullBlockTest(ComparisonTestFramework):
# Now we need that block to mature so we can spend the coinbase.
test = TestInstance(sync_every_block=False)
- for i in range(100):
+ for i in range(99):
block(1000 + i)
test.blocks_and_transactions.append([self.tip, True])
save_spendable_output()
yield test
- # Start by bulding a couple of blocks on top (which output is spent is in parentheses):
+ # Start by building a couple of blocks on top (which output is spent is
+ # in parentheses):
# genesis -> b1 (0) -> b2 (1)
out0 = get_spendable_output()
block(1, spend=out0)
@@ -156,8 +173,7 @@ class FullBlockTest(ComparisonTestFramework):
yield accepted()
out1 = get_spendable_output()
- block(2, spend=out1)
- # Inv again, then deliver twice (shouldn't break anything).
+ b2 = block(2, spend=out1)
yield accepted()
@@ -168,8 +184,8 @@ class FullBlockTest(ComparisonTestFramework):
#
# Nothing should happen at this point. We saw b2 first so it takes priority.
tip(1)
- block(3, spend=out1)
- # Deliver twice (should still not break anything)
+ b3 = block(3, spend=out1)
+ txout_b3 = PreviousSpendableOutput(b3.vtx[1], 1)
yield rejected()
@@ -214,7 +230,7 @@ class FullBlockTest(ComparisonTestFramework):
# \-> b3 (1) -> b4 (2)
tip(6)
block(9, spend=out4, additional_coinbase_value=1)
- yield rejected()
+ yield rejected(RejectResult(16, 'bad-cb-amount'))
# Create a fork that ends in a block with too much fee (the one that causes the reorg)
@@ -226,7 +242,7 @@ class FullBlockTest(ComparisonTestFramework):
yield rejected()
block(11, spend=out4, additional_coinbase_value=1)
- yield rejected()
+ yield rejected(RejectResult(16, 'bad-cb-amount'))
# Try again, but with a valid fork first
@@ -252,6 +268,10 @@ class FullBlockTest(ComparisonTestFramework):
yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.
+ # Add a block with MAX_BLOCK_SIGOPS and one with one more sigop
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)
+ # \-> b3 (1) -> b4 (2)
# Test that a block with a lot of checksigs is okay
lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50 - 1))
@@ -264,8 +284,121 @@ class FullBlockTest(ComparisonTestFramework):
out6 = get_spendable_output()
too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50))
block(16, spend=out6, script=too_many_checksigs)
+ yield rejected(RejectResult(16, 'bad-blk-sigops'))
+
+
+ # Attempt to spend a transaction created on a different fork
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
+ # \-> b3 (1) -> b4 (2)
+ tip(15)
+ block(17, spend=txout_b3)
+ yield rejected(RejectResult(16, 'bad-txns-inputs-missingorspent'))
+
+ # Attempt to spend a transaction created on a different fork (on a fork this time)
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b12 (3) -> b13 (4) -> b15 (5)
+ # \-> b18 (b3.vtx[1]) -> b19 (6)
+ # \-> b3 (1) -> b4 (2)
+ tip(13)
+ block(18, spend=txout_b3)
+ yield rejected()
+
+ block(19, spend=out6)
yield rejected()
+ # Attempt to spend a coinbase at depth too low
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
+ # \-> b3 (1) -> b4 (2)
+ tip(15)
+ out7 = get_spendable_output()
+ block(20, spend=out7)
+ yield rejected(RejectResult(16, 'bad-txns-premature-spend-of-coinbase'))
+
+ # Attempt to spend a coinbase at depth too low (on a fork this time)
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b12 (3) -> b13 (4) -> b15 (5)
+ # \-> b21 (6) -> b22 (5)
+ # \-> b3 (1) -> b4 (2)
+ tip(13)
+ block(21, spend=out6)
+ yield rejected()
+
+ block(22, spend=out5)
+ yield rejected()
+
+ # Create a block on either side of MAX_BLOCK_SIZE and make sure its accepted/rejected
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
+ # \-> b24 (6) -> b25 (7)
+ # \-> b3 (1) -> b4 (2)
+ tip(15)
+ b23 = block(23, spend=out6)
+ old_hash = b23.sha256
+ tx = CTransaction()
+ script_length = MAX_BLOCK_SIZE - len(b23.serialize()) - 69
+ script_output = CScript([chr(0)*script_length])
+ tx.vout.append(CTxOut(0, script_output))
+ tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 1)))
+ b23 = update_block(23, [tx])
+ # Make sure the math above worked out to produce a max-sized block
+ assert_equal(len(b23.serialize()), MAX_BLOCK_SIZE)
+ yield accepted()
+
+ # Make the next block one byte bigger and check that it fails
+ tip(15)
+ b24 = block(24, spend=out6)
+ script_length = MAX_BLOCK_SIZE - len(b24.serialize()) - 69
+ script_output = CScript([chr(0)*(script_length+1)])
+ tx.vout = [CTxOut(0, script_output)]
+ b24 = update_block(24, [tx])
+ assert_equal(len(b24.serialize()), MAX_BLOCK_SIZE+1)
+ yield rejected(RejectResult(16, 'bad-blk-length'))
+
+ b25 = block(25, spend=out7)
+ yield rejected()
+
+ # Create blocks with a coinbase input script size out of range
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
+ # \-> ... (6) -> ... (7)
+ # \-> b3 (1) -> b4 (2)
+ tip(15)
+ b26 = block(26, spend=out6)
+ b26.vtx[0].vin[0].scriptSig = chr(0)
+ b26.vtx[0].rehash()
+ # update_block causes the merkle root to get updated, even with no new
+ # transactions, and updates the required state.
+ b26 = update_block(26, [])
+ yield rejected(RejectResult(16, 'bad-cb-length'))
+
+ # Extend the b26 chain to make sure bitcoind isn't accepting b26
+ b27 = block(27, spend=out7)
+ yield rejected()
+
+ # Now try a too-large-coinbase script
+ tip(15)
+ b28 = block(28, spend=out6)
+ b28.vtx[0].vin[0].scriptSig = chr(0)*101
+ b28.vtx[0].rehash()
+ b28 = update_block(28, [])
+ yield rejected(RejectResult(16, 'bad-cb-length'))
+
+ # Extend the b28 chain to make sure bitcoind isn't accepted b28
+ b29 = block(29, spend=out7)
+ # TODO: Should get a reject message back with "bad-prevblk", except
+ # there's a bug that prevents this from being detected. Just note
+ # failure for now, and add the reject result later.
+ yield rejected()
+
+ # b30 has a max-sized coinbase scriptSig.
+ tip(23)
+ b30 = block(30)
+ b30.vtx[0].vin[0].scriptSig = chr(0)*100
+ b30.vtx[0].rehash()
+ b30 = update_block(30, [])
+ yield accepted()
if __name__ == '__main__':
diff --git a/qa/rpc-tests/prioritise_transaction.py b/qa/rpc-tests/prioritise_transaction.py
new file mode 100755
index 0000000000..4a79d38da0
--- /dev/null
+++ b/qa/rpc-tests/prioritise_transaction.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python2
+# Copyright (c) 2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Test PrioritiseTransaction code
+#
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+COIN = 100000000
+
+class PrioritiseTransactionTest(BitcoinTestFramework):
+
+ def __init__(self):
+ self.txouts = gen_return_txouts()
+
+ def setup_chain(self):
+ print("Initializing test directory "+self.options.tmpdir)
+ initialize_chain_clean(self.options.tmpdir, 1)
+
+ def setup_network(self):
+ self.nodes = []
+ self.is_network_split = False
+
+ self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-printpriority=1"]))
+ self.relayfee = self.nodes[0].getnetworkinfo()['relayfee']
+
+ def run_test(self):
+ utxos = create_confirmed_utxos(self.relayfee, self.nodes[0], 90)
+ base_fee = self.relayfee*100 # our transactions are smaller than 100kb
+ txids = []
+
+ # Create 3 batches of transactions at 3 different fee rate levels
+ for i in xrange(3):
+ txids.append([])
+ txids[i] = create_lots_of_big_transactions(self.nodes[0], self.txouts, utxos[30*i:30*i+30], (i+1)*base_fee)
+
+ # add a fee delta to something in the cheapest bucket and make sure it gets mined
+ # also check that a different entry in the cheapest bucket is NOT mined (lower
+ # the priority to ensure its not mined due to priority)
+ self.nodes[0].prioritisetransaction(txids[0][0], 0, int(3*base_fee*COIN))
+ self.nodes[0].prioritisetransaction(txids[0][1], -1e15, 0)
+
+ self.nodes[0].generate(1)
+
+ mempool = self.nodes[0].getrawmempool()
+ print "Assert that prioritised transasction was mined"
+ assert(txids[0][0] not in mempool)
+ assert(txids[0][1] in mempool)
+
+ high_fee_tx = None
+ for x in txids[2]:
+ if x not in mempool:
+ high_fee_tx = x
+
+ # Something high-fee should have been mined!
+ assert(high_fee_tx != None)
+
+ # Add a prioritisation before a tx is in the mempool (de-prioritising a
+ # high-fee transaction).
+ self.nodes[0].prioritisetransaction(high_fee_tx, -1e15, -int(2*base_fee*COIN))
+
+ # Add everything back to mempool
+ self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
+
+ # Check to make sure our high fee rate tx is back in the mempool
+ mempool = self.nodes[0].getrawmempool()
+ assert(high_fee_tx in mempool)
+
+ # Now verify the high feerate transaction isn't mined.
+ self.nodes[0].generate(5)
+
+ # High fee transaction should not have been mined, but other high fee rate
+ # transactions should have been.
+ mempool = self.nodes[0].getrawmempool()
+ print "Assert that de-prioritised transaction is still in mempool"
+ assert(high_fee_tx in mempool)
+ for x in txids[2]:
+ if (x != high_fee_tx):
+ assert(x not in mempool)
+
+ # Create a free, low priority transaction. Should be rejected.
+ utxo_list = self.nodes[0].listunspent()
+ assert(len(utxo_list) > 0)
+ utxo = utxo_list[0]
+
+ inputs = []
+ outputs = {}
+ inputs.append({"txid" : utxo["txid"], "vout" : utxo["vout"]})
+ outputs[self.nodes[0].getnewaddress()] = utxo["amount"] - self.relayfee
+ raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
+ tx_hex = self.nodes[0].signrawtransaction(raw_tx)["hex"]
+ txid = self.nodes[0].sendrawtransaction(tx_hex)
+
+ # A tx that spends an in-mempool tx has 0 priority, so we can use it to
+ # test the effect of using prioritise transaction for mempool acceptance
+ inputs = []
+ inputs.append({"txid": txid, "vout": 0})
+ outputs = {}
+ outputs[self.nodes[0].getnewaddress()] = utxo["amount"] - self.relayfee
+ raw_tx2 = self.nodes[0].createrawtransaction(inputs, outputs)
+ tx2_hex = self.nodes[0].signrawtransaction(raw_tx2)["hex"]
+ tx2_id = self.nodes[0].decoderawtransaction(tx2_hex)["txid"]
+
+ try:
+ self.nodes[0].sendrawtransaction(tx2_hex)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26) # insufficient fee
+ assert(tx2_id not in self.nodes[0].getrawmempool())
+ else:
+ assert(False)
+
+ # This is a less than 1000-byte transaction, so just set the fee
+ # to be the minimum for a 1000 byte transaction and check that it is
+ # accepted.
+ self.nodes[0].prioritisetransaction(tx2_id, 0, int(self.relayfee*COIN))
+
+ print "Assert that prioritised free transaction is accepted to mempool"
+ assert_equal(self.nodes[0].sendrawtransaction(tx2_hex), tx2_id)
+ assert(tx2_id in self.nodes[0].getrawmempool())
+
+if __name__ == '__main__':
+ PrioritiseTransactionTest().main()
diff --git a/qa/rpc-tests/pruning.py b/qa/rpc-tests/pruning.py
index 2824c51ce7..26ae4af010 100755
--- a/qa/rpc-tests/pruning.py
+++ b/qa/rpc-tests/pruning.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -23,24 +23,7 @@ class PruneTest(BitcoinTestFramework):
def __init__(self):
self.utxo = []
self.address = ["",""]
-
- # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
- # So we have big transactions and full blocks to fill up our block files
-
- # create one script_pubkey
- script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
- for i in xrange (512):
- script_pubkey = script_pubkey + "01"
- # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
- self.txouts = "81"
- for k in xrange(128):
- # add txout value
- self.txouts = self.txouts + "0000000000000000"
- # add length of script_pubkey
- self.txouts = self.txouts + "fd0402"
- # add script_pubkey
- self.txouts = self.txouts + script_pubkey
-
+ self.txouts = gen_return_txouts()
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
@@ -61,6 +44,9 @@ class PruneTest(BitcoinTestFramework):
self.address[0] = self.nodes[0].getnewaddress()
self.address[1] = self.nodes[1].getnewaddress()
+ # Determine default relay fee
+ self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
+
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
connect_nodes(self.nodes[2], 0)
@@ -239,7 +225,7 @@ class PruneTest(BitcoinTestFramework):
outputs = {}
t = self.utxo.pop()
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
- remchange = t["amount"] - Decimal("0.001000")
+ remchange = t["amount"] - 100*self.relayfee # Fee must be above min relay rate for 66kb tx
outputs[address]=remchange
# Create a basic transaction that will send change back to ourself after account for a fee
# And then insert the 128 generated transaction outs in the middle rawtx[92] is where the #
diff --git a/qa/rpc-tests/rawtransactions.py b/qa/rpc-tests/rawtransactions.py
index 173faf736e..d77b41979b 100755
--- a/qa/rpc-tests/rawtransactions.py
+++ b/qa/rpc-tests/rawtransactions.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/receivedby.py b/qa/rpc-tests/receivedby.py
index 16d6bd4cf1..18af0e8102 100755
--- a/qa/rpc-tests/receivedby.py
+++ b/qa/rpc-tests/receivedby.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/reindex.py b/qa/rpc-tests/reindex.py
index f2e3f248ea..d90177a029 100755
--- a/qa/rpc-tests/reindex.py
+++ b/qa/rpc-tests/reindex.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/replace-by-fee.py b/qa/rpc-tests/replace-by-fee.py
new file mode 100755
index 0000000000..734db33b51
--- /dev/null
+++ b/qa/rpc-tests/replace-by-fee.py
@@ -0,0 +1,593 @@
+#!/usr/bin/env python2
+# Copyright (c) 2014-2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Test replace by fee code
+#
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+from test_framework.script import *
+from test_framework.mininode import *
+import binascii
+
+COIN = 100000000
+MAX_REPLACEMENT_LIMIT = 100
+
+def satoshi_round(amount):
+ return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
+
+def txToHex(tx):
+ return binascii.hexlify(tx.serialize()).decode('utf-8')
+
+def make_utxo(node, amount, confirmed=True, scriptPubKey=CScript([1])):
+ """Create a txout with a given amount and scriptPubKey
+
+ Mines coins as needed.
+
+ confirmed - txouts created will be confirmed in the blockchain;
+ unconfirmed otherwise.
+ """
+ fee = 1*COIN
+ while node.getbalance() < satoshi_round((amount + fee)/COIN):
+ node.generate(100)
+ #print (node.getbalance(), amount, fee)
+
+ new_addr = node.getnewaddress()
+ #print new_addr
+ txid = node.sendtoaddress(new_addr, satoshi_round((amount+fee)/COIN))
+ tx1 = node.getrawtransaction(txid, 1)
+ txid = int(txid, 16)
+ i = None
+
+ for i, txout in enumerate(tx1['vout']):
+ #print i, txout['scriptPubKey']['addresses']
+ if txout['scriptPubKey']['addresses'] == [new_addr]:
+ #print i
+ break
+ assert i is not None
+
+ tx2 = CTransaction()
+ tx2.vin = [CTxIn(COutPoint(txid, i))]
+ tx2.vout = [CTxOut(amount, scriptPubKey)]
+ tx2.rehash()
+
+ tx2_hex = binascii.hexlify(tx2.serialize()).decode('utf-8')
+ #print tx2_hex
+
+ signed_tx = node.signrawtransaction(binascii.hexlify(tx2.serialize()).decode('utf-8'))
+
+ txid = node.sendrawtransaction(signed_tx['hex'], True)
+
+ # If requested, ensure txouts are confirmed.
+ if confirmed:
+ mempool_size = len(node.getrawmempool())
+ while mempool_size > 0:
+ node.generate(1)
+ new_size = len(node.getrawmempool())
+ # Error out if we have something stuck in the mempool, as this
+ # would likely be a bug.
+ assert(new_size < mempool_size)
+ mempool_size = new_size
+
+ return COutPoint(int(txid, 16), 0)
+
+class ReplaceByFeeTest(BitcoinTestFramework):
+
+ def setup_network(self):
+ self.nodes = []
+ self.nodes.append(start_node(0, self.options.tmpdir, ["-maxorphantx=1000", "-debug",
+ "-relaypriority=0", "-whitelist=127.0.0.1",
+ "-limitancestorcount=50",
+ "-limitancestorsize=101",
+ "-limitdescendantcount=200",
+ "-limitdescendantsize=101"
+ ]))
+ self.is_network_split = False
+
+ def run_test(self):
+ make_utxo(self.nodes[0], 1*COIN)
+
+ print "Running test simple doublespend..."
+ self.test_simple_doublespend()
+
+ print "Running test doublespend chain..."
+ self.test_doublespend_chain()
+
+ print "Running test doublespend tree..."
+ self.test_doublespend_tree()
+
+ print "Running test replacement feeperkb..."
+ self.test_replacement_feeperkb()
+
+ print "Running test spends of conflicting outputs..."
+ self.test_spends_of_conflicting_outputs()
+
+ print "Running test new unconfirmed inputs..."
+ self.test_new_unconfirmed_inputs()
+
+ print "Running test too many replacements..."
+ self.test_too_many_replacements()
+
+ print "Running test opt-in..."
+ self.test_opt_in()
+
+ print "Running test prioritised transactions..."
+ self.test_prioritised_transactions()
+
+ print "Passed\n"
+
+ def test_simple_doublespend(self):
+ """Simple doublespend"""
+ tx0_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
+
+ tx1a = CTransaction()
+ tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
+ tx1a_hex = txToHex(tx1a)
+ tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
+
+ # Should fail because we haven't changed the fee
+ tx1b = CTransaction()
+ tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ tx1b.vout = [CTxOut(1*COIN, CScript([b'b']))]
+ tx1b_hex = txToHex(tx1b)
+
+ try:
+ tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26) # insufficient fee
+ else:
+ assert(False)
+
+ # Extra 0.1 BTC fee
+ tx1b = CTransaction()
+ tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ tx1b.vout = [CTxOut(0.9*COIN, CScript([b'b']))]
+ tx1b_hex = txToHex(tx1b)
+ tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
+
+ mempool = self.nodes[0].getrawmempool()
+
+ assert (tx1a_txid not in mempool)
+ assert (tx1b_txid in mempool)
+
+ assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid))
+
+ def test_doublespend_chain(self):
+ """Doublespend of a long chain"""
+
+ initial_nValue = 50*COIN
+ tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
+
+ prevout = tx0_outpoint
+ remaining_value = initial_nValue
+ chain_txids = []
+ while remaining_value > 10*COIN:
+ remaining_value -= 1*COIN
+ tx = CTransaction()
+ tx.vin = [CTxIn(prevout, nSequence=0)]
+ tx.vout = [CTxOut(remaining_value, CScript([1]))]
+ tx_hex = txToHex(tx)
+ txid = self.nodes[0].sendrawtransaction(tx_hex, True)
+ chain_txids.append(txid)
+ prevout = COutPoint(int(txid, 16), 0)
+
+ # Whether the double-spend is allowed is evaluated by including all
+ # child fees - 40 BTC - so this attempt is rejected.
+ dbl_tx = CTransaction()
+ dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ dbl_tx.vout = [CTxOut(initial_nValue - 30*COIN, CScript([1]))]
+ dbl_tx_hex = txToHex(dbl_tx)
+
+ try:
+ self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26) # insufficient fee
+ else:
+ assert(False) # transaction mistakenly accepted!
+
+ # Accepted with sufficient fee
+ dbl_tx = CTransaction()
+ dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ dbl_tx.vout = [CTxOut(1*COIN, CScript([1]))]
+ dbl_tx_hex = txToHex(dbl_tx)
+ self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
+
+ mempool = self.nodes[0].getrawmempool()
+ for doublespent_txid in chain_txids:
+ assert(doublespent_txid not in mempool)
+
+ def test_doublespend_tree(self):
+ """Doublespend of a big tree of transactions"""
+
+ initial_nValue = 50*COIN
+ tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
+
+ def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.0001*COIN, _total_txs=None):
+ if _total_txs is None:
+ _total_txs = [0]
+ if _total_txs[0] >= max_txs:
+ return
+
+ txout_value = (initial_value - fee) // tree_width
+ if txout_value < fee:
+ return
+
+ vout = [CTxOut(txout_value, CScript([i+1]))
+ for i in range(tree_width)]
+ tx = CTransaction()
+ tx.vin = [CTxIn(prevout, nSequence=0)]
+ tx.vout = vout
+ tx_hex = txToHex(tx)
+
+ assert(len(tx.serialize()) < 100000)
+ txid = self.nodes[0].sendrawtransaction(tx_hex, True)
+ yield tx
+ _total_txs[0] += 1
+
+ txid = int(txid, 16)
+
+ for i, txout in enumerate(tx.vout):
+ for x in branch(COutPoint(txid, i), txout_value,
+ max_txs,
+ tree_width=tree_width, fee=fee,
+ _total_txs=_total_txs):
+ yield x
+
+ fee = 0.0001*COIN
+ n = MAX_REPLACEMENT_LIMIT
+ tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
+ assert_equal(len(tree_txs), n)
+
+ # Attempt double-spend, will fail because too little fee paid
+ dbl_tx = CTransaction()
+ dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ dbl_tx.vout = [CTxOut(initial_nValue - fee*n, CScript([1]))]
+ dbl_tx_hex = txToHex(dbl_tx)
+ try:
+ self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26) # insufficient fee
+ else:
+ assert(False)
+
+ # 1 BTC fee is enough
+ dbl_tx = CTransaction()
+ dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ dbl_tx.vout = [CTxOut(initial_nValue - fee*n - 1*COIN, CScript([1]))]
+ dbl_tx_hex = txToHex(dbl_tx)
+ self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
+
+ mempool = self.nodes[0].getrawmempool()
+
+ for tx in tree_txs:
+ tx.rehash()
+ assert (tx.hash not in mempool)
+
+ # Try again, but with more total transactions than the "max txs
+ # double-spent at once" anti-DoS limit.
+ for n in (MAX_REPLACEMENT_LIMIT+1, MAX_REPLACEMENT_LIMIT*2):
+ fee = 0.0001*COIN
+ tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
+ tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
+ assert_equal(len(tree_txs), n)
+
+ dbl_tx = CTransaction()
+ dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ dbl_tx.vout = [CTxOut(initial_nValue - 2*fee*n, CScript([1]))]
+ dbl_tx_hex = txToHex(dbl_tx)
+ try:
+ self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26)
+ assert_equal("too many potential replacements" in exp.error['message'], True)
+ else:
+ assert(False)
+
+ for tx in tree_txs:
+ tx.rehash()
+ self.nodes[0].getrawtransaction(tx.hash)
+
+ def test_replacement_feeperkb(self):
+ """Replacement requires fee-per-KB to be higher"""
+ tx0_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
+
+ tx1a = CTransaction()
+ tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
+ tx1a_hex = txToHex(tx1a)
+ tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
+
+ # Higher fee, but the fee per KB is much lower, so the replacement is
+ # rejected.
+ tx1b = CTransaction()
+ tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ tx1b.vout = [CTxOut(0.001*COIN, CScript([b'a'*999000]))]
+ tx1b_hex = txToHex(tx1b)
+
+ try:
+ tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26) # insufficient fee
+ else:
+ assert(False)
+
+ def test_spends_of_conflicting_outputs(self):
+ """Replacements that spend conflicting tx outputs are rejected"""
+ utxo1 = make_utxo(self.nodes[0], 1.2*COIN)
+ utxo2 = make_utxo(self.nodes[0], 3.0*COIN)
+
+ tx1a = CTransaction()
+ tx1a.vin = [CTxIn(utxo1, nSequence=0)]
+ tx1a.vout = [CTxOut(1.1*COIN, CScript([b'a']))]
+ tx1a_hex = txToHex(tx1a)
+ tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
+
+ tx1a_txid = int(tx1a_txid, 16)
+
+ # Direct spend an output of the transaction we're replacing.
+ tx2 = CTransaction()
+ tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0)]
+ tx2.vin.append(CTxIn(COutPoint(tx1a_txid, 0), nSequence=0))
+ tx2.vout = tx1a.vout
+ tx2_hex = txToHex(tx2)
+
+ try:
+ tx2_txid = self.nodes[0].sendrawtransaction(tx2_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26)
+ else:
+ assert(False)
+
+ # Spend tx1a's output to test the indirect case.
+ tx1b = CTransaction()
+ tx1b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
+ tx1b.vout = [CTxOut(1.0*COIN, CScript([b'a']))]
+ tx1b_hex = txToHex(tx1b)
+ tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
+ tx1b_txid = int(tx1b_txid, 16)
+
+ tx2 = CTransaction()
+ tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0),
+ CTxIn(COutPoint(tx1b_txid, 0))]
+ tx2.vout = tx1a.vout
+ tx2_hex = txToHex(tx2)
+
+ try:
+ tx2_txid = self.nodes[0].sendrawtransaction(tx2_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26)
+ else:
+ assert(False)
+
+ def test_new_unconfirmed_inputs(self):
+ """Replacements that add new unconfirmed inputs are rejected"""
+ confirmed_utxo = make_utxo(self.nodes[0], 1.1*COIN)
+ unconfirmed_utxo = make_utxo(self.nodes[0], 0.1*COIN, False)
+
+ tx1 = CTransaction()
+ tx1.vin = [CTxIn(confirmed_utxo)]
+ tx1.vout = [CTxOut(1.0*COIN, CScript([b'a']))]
+ tx1_hex = txToHex(tx1)
+ tx1_txid = self.nodes[0].sendrawtransaction(tx1_hex, True)
+
+ tx2 = CTransaction()
+ tx2.vin = [CTxIn(confirmed_utxo), CTxIn(unconfirmed_utxo)]
+ tx2.vout = tx1.vout
+ tx2_hex = txToHex(tx2)
+
+ try:
+ tx2_txid = self.nodes[0].sendrawtransaction(tx2_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26)
+ else:
+ assert(False)
+
+ def test_too_many_replacements(self):
+ """Replacements that evict too many transactions are rejected"""
+ # Try directly replacing more than MAX_REPLACEMENT_LIMIT
+ # transactions
+
+ # Start by creating a single transaction with many outputs
+ initial_nValue = 10*COIN
+ utxo = make_utxo(self.nodes[0], initial_nValue)
+ fee = 0.0001*COIN
+ split_value = int((initial_nValue-fee)/(MAX_REPLACEMENT_LIMIT+1))
+ actual_fee = initial_nValue - split_value*(MAX_REPLACEMENT_LIMIT+1)
+
+ outputs = []
+ for i in range(MAX_REPLACEMENT_LIMIT+1):
+ outputs.append(CTxOut(split_value, CScript([1])))
+
+ splitting_tx = CTransaction()
+ splitting_tx.vin = [CTxIn(utxo, nSequence=0)]
+ splitting_tx.vout = outputs
+ splitting_tx_hex = txToHex(splitting_tx)
+
+ txid = self.nodes[0].sendrawtransaction(splitting_tx_hex, True)
+ txid = int(txid, 16)
+
+ # Now spend each of those outputs individually
+ for i in range(MAX_REPLACEMENT_LIMIT+1):
+ tx_i = CTransaction()
+ tx_i.vin = [CTxIn(COutPoint(txid, i), nSequence=0)]
+ tx_i.vout = [CTxOut(split_value-fee, CScript([b'a']))]
+ tx_i_hex = txToHex(tx_i)
+ self.nodes[0].sendrawtransaction(tx_i_hex, True)
+
+ # Now create doublespend of the whole lot; should fail.
+ # Need a big enough fee to cover all spending transactions and have
+ # a higher fee rate
+ double_spend_value = (split_value-100*fee)*(MAX_REPLACEMENT_LIMIT+1)
+ inputs = []
+ for i in range(MAX_REPLACEMENT_LIMIT+1):
+ inputs.append(CTxIn(COutPoint(txid, i), nSequence=0))
+ double_tx = CTransaction()
+ double_tx.vin = inputs
+ double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
+ double_tx_hex = txToHex(double_tx)
+
+ try:
+ self.nodes[0].sendrawtransaction(double_tx_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26)
+ assert_equal("too many potential replacements" in exp.error['message'], True)
+ else:
+ assert(False)
+
+ # If we remove an input, it should pass
+ double_tx = CTransaction()
+ double_tx.vin = inputs[0:-1]
+ double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
+ double_tx_hex = txToHex(double_tx)
+ self.nodes[0].sendrawtransaction(double_tx_hex, True)
+
+ def test_opt_in(self):
+ """ Replacing should only work if orig tx opted in """
+ tx0_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
+
+ # Create a non-opting in transaction
+ tx1a = CTransaction()
+ tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0xffffffff)]
+ tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
+ tx1a_hex = txToHex(tx1a)
+ tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
+
+ # Shouldn't be able to double-spend
+ tx1b = CTransaction()
+ tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ tx1b.vout = [CTxOut(0.9*COIN, CScript([b'b']))]
+ tx1b_hex = txToHex(tx1b)
+
+ try:
+ tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26)
+ else:
+ print tx1b_txid
+ assert(False)
+
+ tx1_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
+
+ # Create a different non-opting in transaction
+ tx2a = CTransaction()
+ tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0xfffffffe)]
+ tx2a.vout = [CTxOut(1*COIN, CScript([b'a']))]
+ tx2a_hex = txToHex(tx2a)
+ tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, True)
+
+ # Still shouldn't be able to double-spend
+ tx2b = CTransaction()
+ tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
+ tx2b.vout = [CTxOut(0.9*COIN, CScript([b'b']))]
+ tx2b_hex = txToHex(tx2b)
+
+ try:
+ tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26)
+ else:
+ assert(False)
+
+ # Now create a new transaction that spends from tx1a and tx2a
+ # opt-in on one of the inputs
+ # Transaction should be replaceable on either input
+
+ tx1a_txid = int(tx1a_txid, 16)
+ tx2a_txid = int(tx2a_txid, 16)
+
+ tx3a = CTransaction()
+ tx3a.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0xffffffff),
+ CTxIn(COutPoint(tx2a_txid, 0), nSequence=0xfffffffd)]
+ tx3a.vout = [CTxOut(0.9*COIN, CScript([b'c'])), CTxOut(0.9*COIN, CScript([b'd']))]
+ tx3a_hex = txToHex(tx3a)
+
+ self.nodes[0].sendrawtransaction(tx3a_hex, True)
+
+ tx3b = CTransaction()
+ tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
+ tx3b.vout = [CTxOut(0.5*COIN, CScript([b'e']))]
+ tx3b_hex = txToHex(tx3b)
+
+ tx3c = CTransaction()
+ tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), nSequence=0)]
+ tx3c.vout = [CTxOut(0.5*COIN, CScript([b'f']))]
+ tx3c_hex = txToHex(tx3c)
+
+ self.nodes[0].sendrawtransaction(tx3b_hex, True)
+ # If tx3b was accepted, tx3c won't look like a replacement,
+ # but make sure it is accepted anyway
+ self.nodes[0].sendrawtransaction(tx3c_hex, True)
+
+ def test_prioritised_transactions(self):
+ # Ensure that fee deltas used via prioritisetransaction are
+ # correctly used by replacement logic
+
+ # 1. Check that feeperkb uses modified fees
+ tx0_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
+
+ tx1a = CTransaction()
+ tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ tx1a.vout = [CTxOut(1*COIN, CScript([b'a']))]
+ tx1a_hex = txToHex(tx1a)
+ tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
+
+ # Higher fee, but the actual fee per KB is much lower.
+ tx1b = CTransaction()
+ tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
+ tx1b.vout = [CTxOut(0.001*COIN, CScript([b'a'*740000]))]
+ tx1b_hex = txToHex(tx1b)
+
+ # Verify tx1b cannot replace tx1a.
+ try:
+ tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26)
+ else:
+ assert(False)
+
+ # Use prioritisetransaction to set tx1a's fee to 0.
+ self.nodes[0].prioritisetransaction(tx1a_txid, 0, int(-0.1*COIN))
+
+ # Now tx1b should be able to replace tx1a
+ tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
+
+ assert(tx1b_txid in self.nodes[0].getrawmempool())
+
+ # 2. Check that absolute fee checks use modified fee.
+ tx1_outpoint = make_utxo(self.nodes[0], 1.1*COIN)
+
+ tx2a = CTransaction()
+ tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
+ tx2a.vout = [CTxOut(1*COIN, CScript([b'a']))]
+ tx2a_hex = txToHex(tx2a)
+ tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, True)
+
+ # Lower fee, but we'll prioritise it
+ tx2b = CTransaction()
+ tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
+ tx2b.vout = [CTxOut(1.01*COIN, CScript([b'a']))]
+ tx2b.rehash()
+ tx2b_hex = txToHex(tx2b)
+
+ # Verify tx2b cannot replace tx2a.
+ try:
+ tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True)
+ except JSONRPCException as exp:
+ assert_equal(exp.error['code'], -26)
+ else:
+ assert(False)
+
+ # Now prioritise tx2b to have a higher modified fee
+ self.nodes[0].prioritisetransaction(tx2b.hash, 0, int(0.1*COIN))
+
+ # tx2b should now be accepted
+ tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True)
+
+ assert(tx2b_txid in self.nodes[0].getrawmempool())
+
+if __name__ == '__main__':
+ ReplaceByFeeTest().main()
diff --git a/qa/rpc-tests/rest.py b/qa/rpc-tests/rest.py
index e084ad55ab..682c531691 100755
--- a/qa/rpc-tests/rest.py
+++ b/qa/rpc-tests/rest.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/rpcbind_test.py b/qa/rpc-tests/rpcbind_test.py
index 04110c2831..5f409ad616 100755
--- a/qa/rpc-tests/rpcbind_test.py
+++ b/qa/rpc-tests/rpcbind_test.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -47,7 +47,7 @@ def run_allowip_test(tmpdir, allow_ips, rpchost, rpcport):
try:
# connect to node through non-loopback interface
url = "http://rt:rt@%s:%d" % (rpchost, rpcport,)
- node = AuthServiceProxy(url)
+ node = get_rpc_proxy(url, 1)
node.getinfo()
finally:
node = None # make sure connection will be garbage collected and closed
diff --git a/qa/rpc-tests/script_test.py b/qa/rpc-tests/script_test.py
deleted file mode 100755
index afc44b51b5..0000000000
--- a/qa/rpc-tests/script_test.py
+++ /dev/null
@@ -1,259 +0,0 @@
-#!/usr/bin/env python2
-#
-# Distributed under the MIT/X11 software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-#
-
-'''
-Test notes:
-This test uses the script_valid and script_invalid tests from the unittest
-framework to do end-to-end testing where we compare that two nodes agree on
-whether blocks containing a given test script are valid.
-
-We generally ignore the script flags associated with each test (since we lack
-the precision to test each script using those flags in this framework), but
-for tests with SCRIPT_VERIFY_P2SH, we can use a block time after the BIP16
-switchover date to try to test with that flag enabled (and for tests without
-that flag, we use a block time before the switchover date).
-
-NOTE: This test is very slow and may take more than 40 minutes to run.
-'''
-
-from test_framework.test_framework import ComparisonTestFramework
-from test_framework.util import *
-from test_framework.comptool import TestInstance, TestManager
-from test_framework.mininode import *
-from test_framework.blocktools import *
-from test_framework.script import *
-import logging
-import copy
-import json
-
-script_valid_file = "../../src/test/data/script_valid.json"
-script_invalid_file = "../../src/test/data/script_invalid.json"
-
-# Pass in a set of json files to open.
-class ScriptTestFile(object):
-
- def __init__(self, files):
- self.files = files
- self.index = -1
- self.data = []
-
- def load_files(self):
- for f in self.files:
- self.data.extend(json.loads(open(os.path.dirname(os.path.abspath(__file__))+"/"+f).read()))
-
- # Skip over records that are not long enough to be tests
- def get_records(self):
- while (self.index < len(self.data)):
- if len(self.data[self.index]) >= 3:
- yield self.data[self.index]
- self.index += 1
-
-
-# Helper for parsing the flags specified in the .json files
-SCRIPT_VERIFY_NONE = 0
-SCRIPT_VERIFY_P2SH = 1
-SCRIPT_VERIFY_STRICTENC = 1 << 1
-SCRIPT_VERIFY_DERSIG = 1 << 2
-SCRIPT_VERIFY_LOW_S = 1 << 3
-SCRIPT_VERIFY_NULLDUMMY = 1 << 4
-SCRIPT_VERIFY_SIGPUSHONLY = 1 << 5
-SCRIPT_VERIFY_MINIMALDATA = 1 << 6
-SCRIPT_VERIFY_DISCOURAGE_UPGRADABLE_NOPS = 1 << 7
-SCRIPT_VERIFY_CLEANSTACK = 1 << 8
-
-flag_map = {
- "": SCRIPT_VERIFY_NONE,
- "NONE": SCRIPT_VERIFY_NONE,
- "P2SH": SCRIPT_VERIFY_P2SH,
- "STRICTENC": SCRIPT_VERIFY_STRICTENC,
- "DERSIG": SCRIPT_VERIFY_DERSIG,
- "LOW_S": SCRIPT_VERIFY_LOW_S,
- "NULLDUMMY": SCRIPT_VERIFY_NULLDUMMY,
- "SIGPUSHONLY": SCRIPT_VERIFY_SIGPUSHONLY,
- "MINIMALDATA": SCRIPT_VERIFY_MINIMALDATA,
- "DISCOURAGE_UPGRADABLE_NOPS": SCRIPT_VERIFY_DISCOURAGE_UPGRADABLE_NOPS,
- "CLEANSTACK": SCRIPT_VERIFY_CLEANSTACK,
-}
-
-def ParseScriptFlags(flag_string):
- flags = 0
- for x in flag_string.split(","):
- if x in flag_map:
- flags |= flag_map[x]
- else:
- print "Error: unrecognized script flag: ", x
- return flags
-
-'''
-Given a string that is a scriptsig or scriptpubkey from the .json files above,
-convert it to a CScript()
-'''
-# Replicates behavior from core_read.cpp
-def ParseScript(json_script):
- script = json_script.split(" ")
- parsed_script = CScript()
- for x in script:
- if len(x) == 0:
- # Empty string, ignore.
- pass
- elif x.isdigit() or (len(x) >= 1 and x[0] == "-" and x[1:].isdigit()):
- # Number
- n = int(x, 0)
- if (n == -1) or (n >= 1 and n <= 16):
- parsed_script = CScript(bytes(parsed_script) + bytes(CScript([n])))
- else:
- parsed_script += CScriptNum(int(x, 0))
- elif x.startswith("0x"):
- # Raw hex data, inserted NOT pushed onto stack:
- for i in xrange(2, len(x), 2):
- parsed_script = CScript(bytes(parsed_script) + bytes(chr(int(x[i:i+2],16))))
- elif x.startswith("'") and x.endswith("'") and len(x) >= 2:
- # Single-quoted string, pushed as data.
- parsed_script += CScript([x[1:-1]])
- else:
- # opcode, e.g. OP_ADD or ADD:
- tryopname = "OP_" + x
- if tryopname in OPCODES_BY_NAME:
- parsed_script += CScriptOp(OPCODES_BY_NAME["OP_" + x])
- else:
- print "ParseScript: error parsing '%s'" % x
- return ""
- return parsed_script
-
-class TestBuilder(object):
- def create_credit_tx(self, scriptPubKey, height):
- # self.tx1 is a coinbase transaction, modeled after the one created by script_tests.cpp
- # This allows us to reuse signatures created in the unit test framework.
- self.tx1 = create_coinbase(height) # this has a bip34 scriptsig,
- self.tx1.vin[0].scriptSig = CScript([0, 0]) # but this matches the unit tests
- self.tx1.vout[0].nValue = 0
- self.tx1.vout[0].scriptPubKey = scriptPubKey
- self.tx1.rehash()
- def create_spend_tx(self, scriptSig):
- self.tx2 = create_transaction(self.tx1, 0, CScript(), 0)
- self.tx2.vin[0].scriptSig = scriptSig
- self.tx2.vout[0].scriptPubKey = CScript()
- self.tx2.rehash()
- def rehash(self):
- self.tx1.rehash()
- self.tx2.rehash()
-
-# This test uses the (default) two nodes provided by ComparisonTestFramework,
-# specified on the command line with --testbinary and --refbinary.
-# See comptool.py
-class ScriptTest(ComparisonTestFramework):
-
- def run_test(self):
- # Set up the comparison tool TestManager
- test = TestManager(self, self.options.tmpdir)
- test.add_all_connections(self.nodes)
-
- # Load scripts
- self.scripts = ScriptTestFile([script_valid_file, script_invalid_file])
- self.scripts.load_files()
-
- # Some variables we re-use between test instances (to build blocks)
- self.tip = None
- self.block_time = None
-
- NetworkThread().start() # Start up network handling in another thread
- test.run()
-
- def generate_test_instance(self, pubkeystring, scriptsigstring):
- scriptpubkey = ParseScript(pubkeystring)
- scriptsig = ParseScript(scriptsigstring)
-
- test = TestInstance(sync_every_block=False)
- test_build = TestBuilder()
- test_build.create_credit_tx(scriptpubkey, self.height)
- test_build.create_spend_tx(scriptsig)
- test_build.rehash()
-
- block = create_block(self.tip, test_build.tx1, self.block_time)
- self.block_time += 1
- block.solve()
- self.tip = block.sha256
- self.height += 1
- test.blocks_and_transactions = [[block, True]]
-
- for i in xrange(100):
- block = create_block(self.tip, create_coinbase(self.height), self.block_time)
- self.block_time += 1
- block.solve()
- self.tip = block.sha256
- self.height += 1
- test.blocks_and_transactions.append([block, True])
-
- block = create_block(self.tip, create_coinbase(self.height), self.block_time)
- self.block_time += 1
- block.vtx.append(test_build.tx2)
- block.hashMerkleRoot = block.calc_merkle_root()
- block.rehash()
- block.solve()
- test.blocks_and_transactions.append([block, None])
- return test
-
- # This generates the tests for TestManager.
- def get_tests(self):
- self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
- self.block_time = 1333230000 # before the BIP16 switchover
- self.height = 1
-
- '''
- Create a new block with an anyone-can-spend coinbase
- '''
- block = create_block(self.tip, create_coinbase(self.height), self.block_time)
- self.block_time += 1
- block.solve()
- self.tip = block.sha256
- self.height += 1
- yield TestInstance(objects=[[block, True]])
-
- '''
- Build out to 100 blocks total, maturing the coinbase.
- '''
- test = TestInstance(objects=[], sync_every_block=False, sync_every_tx=False)
- for i in xrange(100):
- b = create_block(self.tip, create_coinbase(self.height), self.block_time)
- b.solve()
- test.blocks_and_transactions.append([b, True])
- self.tip = b.sha256
- self.block_time += 1
- self.height += 1
- yield test
-
- ''' Iterate through script tests. '''
- counter = 0
- for script_test in self.scripts.get_records():
- ''' Reset the blockchain to genesis block + 100 blocks. '''
- if self.nodes[0].getblockcount() > 101:
- self.nodes[0].invalidateblock(self.nodes[0].getblockhash(102))
- self.nodes[1].invalidateblock(self.nodes[1].getblockhash(102))
-
- self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
- self.height = 102
-
- [scriptsig, scriptpubkey, flags] = script_test[0:3]
- flags = ParseScriptFlags(flags)
-
- # We can use block time to determine whether the nodes should be
- # enforcing BIP16.
- #
- # We intentionally let the block time grow by 1 each time.
- # This forces the block hashes to differ between tests, so that
- # a call to invalidateblock doesn't interfere with a later test.
- if (flags & SCRIPT_VERIFY_P2SH):
- self.block_time = 1333238400 + counter # Advance to enforcing BIP16
- else:
- self.block_time = 1333230000 + counter # Before the BIP16 switchover
-
- print "Script test: [%s]" % script_test
-
- yield self.generate_test_instance(scriptpubkey, scriptsig)
- counter += 1
-
-if __name__ == '__main__':
- ScriptTest().main()
diff --git a/qa/rpc-tests/sendheaders.py b/qa/rpc-tests/sendheaders.py
new file mode 100755
index 0000000000..7572bc2776
--- /dev/null
+++ b/qa/rpc-tests/sendheaders.py
@@ -0,0 +1,518 @@
+#!/usr/bin/env python2
+#
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.mininode import *
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import time
+from test_framework.blocktools import create_block, create_coinbase
+
+'''
+SendHeadersTest -- test behavior of headers messages to announce blocks.
+
+Setup:
+
+- Two nodes, two p2p connections to node0. One p2p connection should only ever
+ receive inv's (omitted from testing description below, this is our control).
+ Second node is used for creating reorgs.
+
+Part 1: No headers announcements before "sendheaders"
+a. node mines a block [expect: inv]
+ send getdata for the block [expect: block]
+b. node mines another block [expect: inv]
+ send getheaders and getdata [expect: headers, then block]
+c. node mines another block [expect: inv]
+ peer mines a block, announces with header [expect: getdata]
+d. node mines another block [expect: inv]
+
+Part 2: After "sendheaders", headers announcements should generally work.
+a. peer sends sendheaders [expect: no response]
+ peer sends getheaders with current tip [expect: no response]
+b. node mines a block [expect: tip header]
+c. for N in 1, ..., 10:
+ * for announce-type in {inv, header}
+ - peer mines N blocks, announces with announce-type
+ [ expect: getheaders/getdata or getdata, deliver block(s) ]
+ - node mines a block [ expect: 1 header ]
+
+Part 3: Headers announcements stop after large reorg and resume after getheaders or inv from peer.
+- For response-type in {inv, getheaders}
+ * node mines a 7 block reorg [ expect: headers announcement of 8 blocks ]
+ * node mines an 8-block reorg [ expect: inv at tip ]
+ * peer responds with getblocks/getdata [expect: inv, blocks ]
+ * node mines another block [ expect: inv at tip, peer sends getdata, expect: block ]
+ * node mines another block at tip [ expect: inv ]
+ * peer responds with getheaders with an old hashstop more than 8 blocks back [expect: headers]
+ * peer requests block [ expect: block ]
+ * node mines another block at tip [ expect: inv, peer sends getdata, expect: block ]
+ * peer sends response-type [expect headers if getheaders, getheaders/getdata if mining new block]
+ * node mines 1 block [expect: 1 header, peer responds with getdata]
+
+Part 4: Test direct fetch behavior
+a. Announce 2 old block headers.
+ Expect: no getdata requests.
+b. Announce 3 new blocks via 1 headers message.
+ Expect: one getdata request for all 3 blocks.
+ (Send blocks.)
+c. Announce 1 header that forks off the last two blocks.
+ Expect: no response.
+d. Announce 1 more header that builds on that fork.
+ Expect: one getdata request for two blocks.
+e. Announce 16 more headers that build on that fork.
+ Expect: getdata request for 14 more blocks.
+f. Announce 1 more header that builds on that fork.
+ Expect: no response.
+'''
+
+class BaseNode(NodeConnCB):
+ def __init__(self):
+ NodeConnCB.__init__(self)
+ self.connection = None
+ self.last_inv = None
+ self.last_headers = None
+ self.last_block = None
+ self.ping_counter = 1
+ self.last_pong = msg_pong(0)
+ self.last_getdata = None
+ self.sleep_time = 0.05
+ self.block_announced = False
+
+ def clear_last_announcement(self):
+ with mininode_lock:
+ self.block_announced = False
+ self.last_inv = None
+ self.last_headers = None
+
+ def add_connection(self, conn):
+ self.connection = conn
+
+ # Request data for a list of block hashes
+ def get_data(self, block_hashes):
+ msg = msg_getdata()
+ for x in block_hashes:
+ msg.inv.append(CInv(2, x))
+ self.connection.send_message(msg)
+
+ def get_headers(self, locator, hashstop):
+ msg = msg_getheaders()
+ msg.locator.vHave = locator
+ msg.hashstop = hashstop
+ self.connection.send_message(msg)
+
+ def send_block_inv(self, blockhash):
+ msg = msg_inv()
+ msg.inv = [CInv(2, blockhash)]
+ self.connection.send_message(msg)
+
+ # Wrapper for the NodeConn's send_message function
+ def send_message(self, message):
+ self.connection.send_message(message)
+
+ def on_inv(self, conn, message):
+ self.last_inv = message
+ self.block_announced = True
+
+ def on_headers(self, conn, message):
+ self.last_headers = message
+ self.block_announced = True
+
+ def on_block(self, conn, message):
+ self.last_block = message.block
+ self.last_block.calc_sha256()
+
+ def on_getdata(self, conn, message):
+ self.last_getdata = message
+
+ def on_pong(self, conn, message):
+ self.last_pong = message
+
+ # Test whether the last announcement we received had the
+ # right header or the right inv
+ # inv and headers should be lists of block hashes
+ def check_last_announcement(self, headers=None, inv=None):
+ expect_headers = headers if headers != None else []
+ expect_inv = inv if inv != None else []
+ test_function = lambda: self.block_announced
+ self.sync(test_function)
+ with mininode_lock:
+ self.block_announced = False
+
+ success = True
+ compare_inv = []
+ if self.last_inv != None:
+ compare_inv = [x.hash for x in self.last_inv.inv]
+ if compare_inv != expect_inv:
+ success = False
+
+ hash_headers = []
+ if self.last_headers != None:
+ # treat headers as a list of block hashes
+ hash_headers = [ x.sha256 for x in self.last_headers.headers ]
+ if hash_headers != expect_headers:
+ success = False
+
+ self.last_inv = None
+ self.last_headers = None
+ return success
+
+ # Syncing helpers
+ def sync(self, test_function, timeout=60):
+ while timeout > 0:
+ with mininode_lock:
+ if test_function():
+ return
+ time.sleep(self.sleep_time)
+ timeout -= self.sleep_time
+ raise AssertionError("Sync failed to complete")
+
+ def sync_with_ping(self, timeout=60):
+ self.send_message(msg_ping(nonce=self.ping_counter))
+ test_function = lambda: self.last_pong.nonce == self.ping_counter
+ self.sync(test_function, timeout)
+ self.ping_counter += 1
+ return
+
+ def wait_for_block(self, blockhash, timeout=60):
+ test_function = lambda: self.last_block != None and self.last_block.sha256 == blockhash
+ self.sync(test_function, timeout)
+ return
+
+ def wait_for_getdata(self, hash_list, timeout=60):
+ if hash_list == []:
+ return
+
+ test_function = lambda: self.last_getdata != None and [x.hash for x in self.last_getdata.inv] == hash_list
+ self.sync(test_function, timeout)
+ return
+
+ def send_header_for_blocks(self, new_blocks):
+ headers_message = msg_headers()
+ headers_message.headers = [ CBlockHeader(b) for b in new_blocks ]
+ self.send_message(headers_message)
+
+ def send_getblocks(self, locator):
+ getblocks_message = msg_getblocks()
+ getblocks_message.locator.vHave = locator
+ self.send_message(getblocks_message)
+
+# InvNode: This peer should only ever receive inv's, because it doesn't ever send a
+# "sendheaders" message.
+class InvNode(BaseNode):
+ def __init__(self):
+ BaseNode.__init__(self)
+
+# TestNode: This peer is the one we use for most of the testing.
+class TestNode(BaseNode):
+ def __init__(self):
+ BaseNode.__init__(self)
+
+class SendHeadersTest(BitcoinTestFramework):
+ def setup_chain(self):
+ initialize_chain_clean(self.options.tmpdir, 2)
+
+ def setup_network(self):
+ self.nodes = []
+ self.nodes = start_nodes(2, self.options.tmpdir, [["-debug", "-logtimemicros=1"]]*2)
+ connect_nodes(self.nodes[0], 1)
+
+ # mine count blocks and return the new tip
+ def mine_blocks(self, count):
+ # Clear out last block announcement from each p2p listener
+ [ x.clear_last_announcement() for x in self.p2p_connections ]
+ self.nodes[0].generate(count)
+ return int(self.nodes[0].getbestblockhash(), 16)
+
+ # mine a reorg that invalidates length blocks (replacing them with
+ # length+1 blocks).
+ # Note: we clear the state of our p2p connections after the
+ # to-be-reorged-out blocks are mined, so that we don't break later tests.
+ # return the list of block hashes newly mined
+ def mine_reorg(self, length):
+ self.nodes[0].generate(length) # make sure all invalidated blocks are node0's
+ sync_blocks(self.nodes, wait=0.1)
+ [x.clear_last_announcement() for x in self.p2p_connections]
+
+ tip_height = self.nodes[1].getblockcount()
+ hash_to_invalidate = self.nodes[1].getblockhash(tip_height-(length-1))
+ self.nodes[1].invalidateblock(hash_to_invalidate)
+ all_hashes = self.nodes[1].generate(length+1) # Must be longer than the orig chain
+ sync_blocks(self.nodes, wait=0.1)
+ return [int(x, 16) for x in all_hashes]
+
+ def run_test(self):
+ # Setup the p2p connections and start up the network thread.
+ inv_node = InvNode()
+ test_node = TestNode()
+
+ self.p2p_connections = [inv_node, test_node]
+
+ connections = []
+ connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], inv_node))
+ # Set nServices to 0 for test_node, so no block download will occur outside of
+ # direct fetching
+ connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node, services=0))
+ inv_node.add_connection(connections[0])
+ test_node.add_connection(connections[1])
+
+ NetworkThread().start() # Start up network handling in another thread
+
+ # Test logic begins here
+ inv_node.wait_for_verack()
+ test_node.wait_for_verack()
+
+ tip = int(self.nodes[0].getbestblockhash(), 16)
+
+ # PART 1
+ # 1. Mine a block; expect inv announcements each time
+ print "Part 1: headers don't start before sendheaders message..."
+ for i in xrange(4):
+ old_tip = tip
+ tip = self.mine_blocks(1)
+ assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
+ assert_equal(test_node.check_last_announcement(inv=[tip]), True)
+ # Try a few different responses; none should affect next announcement
+ if i == 0:
+ # first request the block
+ test_node.get_data([tip])
+ test_node.wait_for_block(tip, timeout=5)
+ elif i == 1:
+ # next try requesting header and block
+ test_node.get_headers(locator=[old_tip], hashstop=tip)
+ test_node.get_data([tip])
+ test_node.wait_for_block(tip)
+ test_node.clear_last_announcement() # since we requested headers...
+ elif i == 2:
+ # this time announce own block via headers
+ height = self.nodes[0].getblockcount()
+ last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']
+ block_time = last_time + 1
+ new_block = create_block(tip, create_coinbase(height+1), block_time)
+ new_block.solve()
+ test_node.send_header_for_blocks([new_block])
+ test_node.wait_for_getdata([new_block.sha256], timeout=5)
+ test_node.send_message(msg_block(new_block))
+ test_node.sync_with_ping() # make sure this block is processed
+ inv_node.clear_last_announcement()
+ test_node.clear_last_announcement()
+
+ print "Part 1: success!"
+ print "Part 2: announce blocks with headers after sendheaders message..."
+ # PART 2
+ # 2. Send a sendheaders message and test that headers announcements
+ # commence and keep working.
+ test_node.send_message(msg_sendheaders())
+ prev_tip = int(self.nodes[0].getbestblockhash(), 16)
+ test_node.get_headers(locator=[prev_tip], hashstop=0L)
+ test_node.sync_with_ping()
+
+ # Now that we've synced headers, headers announcements should work
+ tip = self.mine_blocks(1)
+ assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
+ assert_equal(test_node.check_last_announcement(headers=[tip]), True)
+
+ height = self.nodes[0].getblockcount()+1
+ block_time += 10 # Advance far enough ahead
+ for i in xrange(10):
+ # Mine i blocks, and alternate announcing either via
+ # inv (of tip) or via headers. After each, new blocks
+ # mined by the node should successfully be announced
+ # with block header, even though the blocks are never requested
+ for j in xrange(2):
+ blocks = []
+ for b in xrange(i+1):
+ blocks.append(create_block(tip, create_coinbase(height), block_time))
+ blocks[-1].solve()
+ tip = blocks[-1].sha256
+ block_time += 1
+ height += 1
+ if j == 0:
+ # Announce via inv
+ test_node.send_block_inv(tip)
+ test_node.wait_for_getdata([tip], timeout=5)
+ # Test that duplicate inv's won't result in duplicate
+ # getdata requests, or duplicate headers announcements
+ inv_node.send_block_inv(tip)
+ # Should have received a getheaders as well!
+ test_node.send_header_for_blocks(blocks)
+ test_node.wait_for_getdata([x.sha256 for x in blocks[0:-1]], timeout=5)
+ [ inv_node.send_block_inv(x.sha256) for x in blocks[0:-1] ]
+ inv_node.sync_with_ping()
+ else:
+ # Announce via headers
+ test_node.send_header_for_blocks(blocks)
+ test_node.wait_for_getdata([x.sha256 for x in blocks], timeout=5)
+ # Test that duplicate headers won't result in duplicate
+ # getdata requests (the check is further down)
+ inv_node.send_header_for_blocks(blocks)
+ inv_node.sync_with_ping()
+ [ test_node.send_message(msg_block(x)) for x in blocks ]
+ test_node.sync_with_ping()
+ inv_node.sync_with_ping()
+ # This block should not be announced to the inv node (since it also
+ # broadcast it)
+ assert_equal(inv_node.last_inv, None)
+ assert_equal(inv_node.last_headers, None)
+ tip = self.mine_blocks(1)
+ assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
+ assert_equal(test_node.check_last_announcement(headers=[tip]), True)
+ height += 1
+ block_time += 1
+
+ print "Part 2: success!"
+
+ print "Part 3: headers announcements can stop after large reorg, and resume after headers/inv from peer..."
+
+ # PART 3. Headers announcements can stop after large reorg, and resume after
+ # getheaders or inv from peer.
+ for j in xrange(2):
+ # First try mining a reorg that can propagate with header announcement
+ new_block_hashes = self.mine_reorg(length=7)
+ tip = new_block_hashes[-1]
+ assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
+ assert_equal(test_node.check_last_announcement(headers=new_block_hashes), True)
+
+ block_time += 8
+
+ # Mine a too-large reorg, which should be announced with a single inv
+ new_block_hashes = self.mine_reorg(length=8)
+ tip = new_block_hashes[-1]
+ assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
+ assert_equal(test_node.check_last_announcement(inv=[tip]), True)
+
+ block_time += 9
+
+ fork_point = self.nodes[0].getblock("%02x" % new_block_hashes[0])["previousblockhash"]
+ fork_point = int(fork_point, 16)
+
+ # Use getblocks/getdata
+ test_node.send_getblocks(locator = [fork_point])
+ assert_equal(test_node.check_last_announcement(inv=new_block_hashes), True)
+ test_node.get_data(new_block_hashes)
+ test_node.wait_for_block(new_block_hashes[-1])
+
+ for i in xrange(3):
+ # Mine another block, still should get only an inv
+ tip = self.mine_blocks(1)
+ assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
+ assert_equal(test_node.check_last_announcement(inv=[tip]), True)
+ if i == 0:
+ # Just get the data -- shouldn't cause headers announcements to resume
+ test_node.get_data([tip])
+ test_node.wait_for_block(tip)
+ elif i == 1:
+ # Send a getheaders message that shouldn't trigger headers announcements
+ # to resume (best header sent will be too old)
+ test_node.get_headers(locator=[fork_point], hashstop=new_block_hashes[1])
+ test_node.get_data([tip])
+ test_node.wait_for_block(tip)
+ elif i == 2:
+ test_node.get_data([tip])
+ test_node.wait_for_block(tip)
+ # This time, try sending either a getheaders to trigger resumption
+ # of headers announcements, or mine a new block and inv it, also
+ # triggering resumption of headers announcements.
+ if j == 0:
+ test_node.get_headers(locator=[tip], hashstop=0L)
+ test_node.sync_with_ping()
+ else:
+ test_node.send_block_inv(tip)
+ test_node.sync_with_ping()
+ # New blocks should now be announced with header
+ tip = self.mine_blocks(1)
+ assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
+ assert_equal(test_node.check_last_announcement(headers=[tip]), True)
+
+ print "Part 3: success!"
+
+ print "Part 4: Testing direct fetch behavior..."
+ tip = self.mine_blocks(1)
+ height = self.nodes[0].getblockcount() + 1
+ last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']
+ block_time = last_time + 1
+
+ # Create 2 blocks. Send the blocks, then send the headers.
+ blocks = []
+ for b in xrange(2):
+ blocks.append(create_block(tip, create_coinbase(height), block_time))
+ blocks[-1].solve()
+ tip = blocks[-1].sha256
+ block_time += 1
+ height += 1
+ inv_node.send_message(msg_block(blocks[-1]))
+
+ inv_node.sync_with_ping() # Make sure blocks are processed
+ test_node.last_getdata = None
+ test_node.send_header_for_blocks(blocks);
+ test_node.sync_with_ping()
+ # should not have received any getdata messages
+ with mininode_lock:
+ assert_equal(test_node.last_getdata, None)
+
+ # This time, direct fetch should work
+ blocks = []
+ for b in xrange(3):
+ blocks.append(create_block(tip, create_coinbase(height), block_time))
+ blocks[-1].solve()
+ tip = blocks[-1].sha256
+ block_time += 1
+ height += 1
+
+ test_node.send_header_for_blocks(blocks)
+ test_node.sync_with_ping()
+ test_node.wait_for_getdata([x.sha256 for x in blocks], timeout=test_node.sleep_time)
+
+ [ test_node.send_message(msg_block(x)) for x in blocks ]
+
+ test_node.sync_with_ping()
+
+ # Now announce a header that forks the last two blocks
+ tip = blocks[0].sha256
+ height -= 1
+ blocks = []
+
+ # Create extra blocks for later
+ for b in xrange(20):
+ blocks.append(create_block(tip, create_coinbase(height), block_time))
+ blocks[-1].solve()
+ tip = blocks[-1].sha256
+ block_time += 1
+ height += 1
+
+ # Announcing one block on fork should not trigger direct fetch
+ # (less work than tip)
+ test_node.last_getdata = None
+ test_node.send_header_for_blocks(blocks[0:1])
+ test_node.sync_with_ping()
+ with mininode_lock:
+ assert_equal(test_node.last_getdata, None)
+
+ # Announcing one more block on fork should trigger direct fetch for
+ # both blocks (same work as tip)
+ test_node.send_header_for_blocks(blocks[1:2])
+ test_node.sync_with_ping()
+ test_node.wait_for_getdata([x.sha256 for x in blocks[0:2]], timeout=test_node.sleep_time)
+
+ # Announcing 16 more headers should trigger direct fetch for 14 more
+ # blocks
+ test_node.send_header_for_blocks(blocks[2:18])
+ test_node.sync_with_ping()
+ test_node.wait_for_getdata([x.sha256 for x in blocks[2:16]], timeout=test_node.sleep_time)
+
+ # Announcing 1 more header should not trigger any response
+ test_node.last_getdata = None
+ test_node.send_header_for_blocks(blocks[18:19])
+ test_node.sync_with_ping()
+ with mininode_lock:
+ assert_equal(test_node.last_getdata, None)
+
+ print "Part 4: success!"
+
+ # Finally, check that the inv node never received a getdata request,
+ # throughout the test
+ assert_equal(inv_node.last_getdata, None)
+
+if __name__ == '__main__':
+ SendHeadersTest().main()
diff --git a/qa/rpc-tests/smartfees.py b/qa/rpc-tests/smartfees.py
index c15c5fda09..b209ae0c16 100755
--- a/qa/rpc-tests/smartfees.py
+++ b/qa/rpc-tests/smartfees.py
@@ -19,9 +19,6 @@ P2SH_2 = "2NBdpwq8Aoo1EEKEXPNrKvr5xQr3M9UfcZA" # P2SH of "OP_2 OP_DROP"
# 4 bytes of OP_TRUE and push 2-byte redeem script of "OP_1 OP_DROP" or "OP_2 OP_DROP"
SCRIPT_SIG = ["0451025175", "0451025275"]
-def satoshi_round(amount):
- return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
-
def small_txpuzzle_randfee(from_node, conflist, unconflist, amount, min_fee, fee_increment):
'''
Create and send a transaction with a random fee.
@@ -120,15 +117,26 @@ def check_estimates(node, fees_seen, max_invalid, print_estimates = True):
last_e = e
valid_estimate = False
invalid_estimates = 0
- for e in all_estimates:
+ for i,e in enumerate(all_estimates): # estimate is for i+1
if e >= 0:
valid_estimate = True
+ # estimatesmartfee should return the same result
+ assert_equal(node.estimatesmartfee(i+1)["feerate"], e)
+
else:
invalid_estimates += 1
- # Once we're at a high enough confirmation count that we can give an estimate
- # We should have estimates for all higher confirmation counts
- if valid_estimate and e < 0:
- raise AssertionError("Invalid estimate appears at higher confirm count than valid estimate")
+
+ # estimatesmartfee should still be valid
+ approx_estimate = node.estimatesmartfee(i+1)["feerate"]
+ answer_found = node.estimatesmartfee(i+1)["blocks"]
+ assert(approx_estimate > 0)
+ assert(answer_found > i+1)
+
+ # Once we're at a high enough confirmation count that we can give an estimate
+ # We should have estimates for all higher confirmation counts
+ if valid_estimate:
+ raise AssertionError("Invalid estimate appears at higher confirm count than valid estimate")
+
# Check on the expected number of different confirmation counts
# that we might not have valid estimates for
if invalid_estimates > max_invalid:
@@ -184,13 +192,13 @@ class EstimateFeeTest(BitcoinTestFramework):
# NOTE: the CreateNewBlock code starts counting block size at 1,000 bytes,
# (17k is room enough for 110 or so transactions)
self.nodes.append(start_node(1, self.options.tmpdir,
- ["-blockprioritysize=1500", "-blockmaxsize=18000",
+ ["-blockprioritysize=1500", "-blockmaxsize=17000",
"-maxorphantx=1000", "-relaypriority=0", "-debug=estimatefee"]))
connect_nodes(self.nodes[1], 0)
# Node2 is a stingy miner, that
- # produces too small blocks (room for only 70 or so transactions)
- node2args = ["-blockprioritysize=0", "-blockmaxsize=12000", "-maxorphantx=1000", "-relaypriority=0"]
+ # produces too small blocks (room for only 55 or so transactions)
+ node2args = ["-blockprioritysize=0", "-blockmaxsize=8000", "-maxorphantx=1000", "-relaypriority=0"]
self.nodes.append(start_node(2, self.options.tmpdir, node2args))
connect_nodes(self.nodes[0], 2)
@@ -229,22 +237,19 @@ class EstimateFeeTest(BitcoinTestFramework):
self.fees_per_kb = []
self.memutxo = []
self.confutxo = self.txouts # Start with the set of confirmed txouts after splitting
- print("Checking estimates for 1/2/3/6/15/25 blocks")
- print("Creating transactions and mining them with a huge block size")
- # Create transactions and mine 20 big blocks with node 0 such that the mempool is always emptied
- self.transact_and_mine(30, self.nodes[0])
- check_estimates(self.nodes[1], self.fees_per_kb, 1)
+ print("Will output estimates for 1/2/3/6/15/25 blocks")
- print("Creating transactions and mining them with a block size that can't keep up")
- # Create transactions and mine 30 small blocks with node 2, but create txs faster than we can mine
- self.transact_and_mine(20, self.nodes[2])
- check_estimates(self.nodes[1], self.fees_per_kb, 3)
+ for i in xrange(2):
+ print("Creating transactions and mining them with a block size that can't keep up")
+ # Create transactions and mine 10 small blocks with node 2, but create txs faster than we can mine
+ self.transact_and_mine(10, self.nodes[2])
+ check_estimates(self.nodes[1], self.fees_per_kb, 14)
- print("Creating transactions and mining them at a block size that is just big enough")
- # Generate transactions while mining 40 more blocks, this time with node1
- # which mines blocks with capacity just above the rate that transactions are being created
- self.transact_and_mine(40, self.nodes[1])
- check_estimates(self.nodes[1], self.fees_per_kb, 2)
+ print("Creating transactions and mining them at a block size that is just big enough")
+ # Generate transactions while mining 10 more blocks, this time with node1
+ # which mines blocks with capacity just above the rate that transactions are being created
+ self.transact_and_mine(10, self.nodes[1])
+ check_estimates(self.nodes[1], self.fees_per_kb, 2)
# Finish by mining a normal-sized block:
while len(self.nodes[1].getrawmempool()) > 0:
diff --git a/qa/rpc-tests/test_framework/authproxy.py b/qa/rpc-tests/test_framework/authproxy.py
index 33014dc139..fba469a0dd 100644
--- a/qa/rpc-tests/test_framework/authproxy.py
+++ b/qa/rpc-tests/test_framework/authproxy.py
@@ -69,7 +69,7 @@ class AuthServiceProxy(object):
def __init__(self, service_url, service_name=None, timeout=HTTP_TIMEOUT, connection=None):
self.__service_url = service_url
- self.__service_name = service_name
+ self._service_name = service_name
self.__url = urlparse.urlparse(service_url)
if self.__url.port is None:
port = 80
@@ -102,8 +102,8 @@ class AuthServiceProxy(object):
if name.startswith('__') and name.endswith('__'):
# Python internal stuff
raise AttributeError
- if self.__service_name is not None:
- name = "%s.%s" % (self.__service_name, name)
+ if self._service_name is not None:
+ name = "%s.%s" % (self._service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
def _request(self, method, path, postdata):
@@ -129,10 +129,10 @@ class AuthServiceProxy(object):
def __call__(self, *args):
AuthServiceProxy.__id_count += 1
- log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self.__service_name,
+ log.debug("-%s-> %s %s"%(AuthServiceProxy.__id_count, self._service_name,
json.dumps(args, default=EncodeDecimal)))
postdata = json.dumps({'version': '1.1',
- 'method': self.__service_name,
+ 'method': self._service_name,
'params': args,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal)
response = self._request('POST', self.__url.path, postdata)
diff --git a/qa/rpc-tests/test_framework/comptool.py b/qa/rpc-tests/test_framework/comptool.py
index e0b3ce040d..badbc0a1fb 100755
--- a/qa/rpc-tests/test_framework/comptool.py
+++ b/qa/rpc-tests/test_framework/comptool.py
@@ -41,17 +41,32 @@ def wait_until(predicate, attempts=float('inf'), timeout=float('inf')):
return False
+class RejectResult(object):
+ '''
+ Outcome that expects rejection of a transaction or block.
+ '''
+ def __init__(self, code, reason=''):
+ self.code = code
+ self.reason = reason
+ def match(self, other):
+ if self.code != other.code:
+ return False
+ return other.reason.startswith(self.reason)
+ def __repr__(self):
+ return '%i:%s' % (self.code,self.reason or '*')
+
class TestNode(NodeConnCB):
def __init__(self, block_store, tx_store):
NodeConnCB.__init__(self)
- self.create_callback_map()
self.conn = None
self.bestblockhash = None
self.block_store = block_store
self.block_request_map = {}
self.tx_store = tx_store
self.tx_request_map = {}
+ self.block_reject_map = {}
+ self.tx_reject_map = {}
# When the pingmap is non-empty we're waiting for
# a response
@@ -95,6 +110,12 @@ class TestNode(NodeConnCB):
except KeyError:
raise AssertionError("Got pong for unknown ping [%s]" % repr(message))
+ def on_reject(self, conn, message):
+ if message.message == 'tx':
+ self.tx_reject_map[message.data] = RejectResult(message.code, message.reason)
+ if message.message == 'block':
+ self.block_reject_map[message.data] = RejectResult(message.code, message.reason)
+
def send_inv(self, obj):
mtype = 2 if isinstance(obj, CBlock) else 1
self.conn.send_message(msg_inv([CInv(mtype, obj.sha256)]))
@@ -244,6 +265,15 @@ class TestManager(object):
if outcome is None:
if c.cb.bestblockhash != self.connections[0].cb.bestblockhash:
return False
+ elif isinstance(outcome, RejectResult): # Check that block was rejected w/ code
+ if c.cb.bestblockhash == blockhash:
+ return False
+ if blockhash not in c.cb.block_reject_map:
+ print 'Block not in reject map: %064x' % (blockhash)
+ return False
+ if not outcome.match(c.cb.block_reject_map[blockhash]):
+ print 'Block rejected with %s instead of expected %s: %064x' % (c.cb.block_reject_map[blockhash], outcome, blockhash)
+ return False
elif ((c.cb.bestblockhash == blockhash) != outcome):
# print c.cb.bestblockhash, blockhash, outcome
return False
@@ -263,6 +293,15 @@ class TestManager(object):
if c.cb.lastInv != self.connections[0].cb.lastInv:
# print c.rpc.getrawmempool()
return False
+ elif isinstance(outcome, RejectResult): # Check that tx was rejected w/ code
+ if txhash in c.cb.lastInv:
+ return False
+ if txhash not in c.cb.tx_reject_map:
+ print 'Tx not in reject map: %064x' % (txhash)
+ return False
+ if not outcome.match(c.cb.tx_reject_map[txhash]):
+ print 'Tx rejected with %s instead of expected %s: %064x' % (c.cb.tx_reject_map[txhash], outcome, txhash)
+ return False
elif ((txhash in c.cb.lastInv) != outcome):
# print c.rpc.getrawmempool(), c.cb.lastInv
return False
diff --git a/qa/rpc-tests/test_framework/coverage.py b/qa/rpc-tests/test_framework/coverage.py
new file mode 100644
index 0000000000..50f066a850
--- /dev/null
+++ b/qa/rpc-tests/test_framework/coverage.py
@@ -0,0 +1,101 @@
+"""
+This module contains utilities for doing coverage analysis on the RPC
+interface.
+
+It provides a way to track which RPC commands are exercised during
+testing.
+
+"""
+import os
+
+
+REFERENCE_FILENAME = 'rpc_interface.txt'
+
+
+class AuthServiceProxyWrapper(object):
+ """
+ An object that wraps AuthServiceProxy to record specific RPC calls.
+
+ """
+ def __init__(self, auth_service_proxy_instance, coverage_logfile=None):
+ """
+ Kwargs:
+ auth_service_proxy_instance (AuthServiceProxy): the instance
+ being wrapped.
+ coverage_logfile (str): if specified, write each service_name
+ out to a file when called.
+
+ """
+ self.auth_service_proxy_instance = auth_service_proxy_instance
+ self.coverage_logfile = coverage_logfile
+
+ def __getattr__(self, *args, **kwargs):
+ return_val = self.auth_service_proxy_instance.__getattr__(
+ *args, **kwargs)
+
+ return AuthServiceProxyWrapper(return_val, self.coverage_logfile)
+
+ def __call__(self, *args, **kwargs):
+ """
+ Delegates to AuthServiceProxy, then writes the particular RPC method
+ called to a file.
+
+ """
+ return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs)
+ rpc_method = self.auth_service_proxy_instance._service_name
+
+ if self.coverage_logfile:
+ with open(self.coverage_logfile, 'a+') as f:
+ f.write("%s\n" % rpc_method)
+
+ return return_val
+
+ @property
+ def url(self):
+ return self.auth_service_proxy_instance.url
+
+
+def get_filename(dirname, n_node):
+ """
+ Get a filename unique to the test process ID and node.
+
+ This file will contain a list of RPC commands covered.
+ """
+ pid = str(os.getpid())
+ return os.path.join(
+ dirname, "coverage.pid%s.node%s.txt" % (pid, str(n_node)))
+
+
+def write_all_rpc_commands(dirname, node):
+ """
+ Write out a list of all RPC functions available in `bitcoin-cli` for
+ coverage comparison. This will only happen once per coverage
+ directory.
+
+ Args:
+ dirname (str): temporary test dir
+ node (AuthServiceProxy): client
+
+ Returns:
+ bool. if the RPC interface file was written.
+
+ """
+ filename = os.path.join(dirname, REFERENCE_FILENAME)
+
+ if os.path.isfile(filename):
+ return False
+
+ help_output = node.help().split('\n')
+ commands = set()
+
+ for line in help_output:
+ line = line.strip()
+
+ # Ignore blanks and headers
+ if line and not line.startswith('='):
+ commands.add("%s\n" % line.split()[0])
+
+ with open(filename, 'w') as f:
+ f.writelines(list(commands))
+
+ return True
diff --git a/qa/rpc-tests/test_framework/mininode.py b/qa/rpc-tests/test_framework/mininode.py
index b7d78e74fa..ca65fb6e79 100755
--- a/qa/rpc-tests/test_framework/mininode.py
+++ b/qa/rpc-tests/test_framework/mininode.py
@@ -36,6 +36,7 @@ MY_VERSION = 60001 # past bip-31 for ping/pong
MY_SUBVERSION = "/python-mininode-tester:0.0.1/"
MAX_INV_SZ = 50000
+MAX_BLOCK_SIZE = 1000000
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
@@ -751,8 +752,8 @@ class msg_inv(object):
class msg_getdata(object):
command = "getdata"
- def __init__(self):
- self.inv = []
+ def __init__(self, inv=None):
+ self.inv = inv if inv != None else []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
@@ -905,6 +906,20 @@ class msg_mempool(object):
def __repr__(self):
return "msg_mempool()"
+class msg_sendheaders(object):
+ command = "sendheaders"
+
+ def __init__(self):
+ pass
+
+ def deserialize(self, f):
+ pass
+
+ def serialize(self):
+ return ""
+
+ def __repr__(self):
+ return "msg_sendheaders()"
# getheaders message has
# number of entries
@@ -989,33 +1004,37 @@ class msg_reject(object):
class NodeConnCB(object):
def __init__(self):
self.verack_received = False
+ # deliver_sleep_time is helpful for debugging race conditions in p2p
+ # tests; it causes message delivery to sleep for the specified time
+ # before acquiring the global lock and delivering the next message.
+ self.deliver_sleep_time = None
+
+ def set_deliver_sleep_time(self, value):
+ with mininode_lock:
+ self.deliver_sleep_time = value
- # Derived classes should call this function once to set the message map
- # which associates the derived classes' functions to incoming messages
- def create_callback_map(self):
- self.cbmap = {
- "version": self.on_version,
- "verack": self.on_verack,
- "addr": self.on_addr,
- "alert": self.on_alert,
- "inv": self.on_inv,
- "getdata": self.on_getdata,
- "getblocks": self.on_getblocks,
- "tx": self.on_tx,
- "block": self.on_block,
- "getaddr": self.on_getaddr,
- "ping": self.on_ping,
- "pong": self.on_pong,
- "headers": self.on_headers,
- "getheaders": self.on_getheaders,
- "reject": self.on_reject,
- "mempool": self.on_mempool
- }
+ def get_deliver_sleep_time(self):
+ with mininode_lock:
+ return self.deliver_sleep_time
+
+ # Spin until verack message is received from the node.
+ # Tests may want to use this as a signal that the test can begin.
+ # This can be called from the testing thread, so it needs to acquire the
+ # global lock.
+ def wait_for_verack(self):
+ while True:
+ with mininode_lock:
+ if self.verack_received:
+ return
+ time.sleep(0.05)
def deliver(self, conn, message):
+ deliver_sleep = self.get_deliver_sleep_time()
+ if deliver_sleep is not None:
+ time.sleep(deliver_sleep)
with mininode_lock:
try:
- self.cbmap[message.command](conn, message)
+ getattr(self, 'on_' + message.command)(conn, message)
except:
print "ERROR delivering %s (%s)" % (repr(message),
sys.exc_info()[0])
@@ -1084,7 +1103,7 @@ class NodeConn(asyncore.dispatcher):
"regtest": "\xfa\xbf\xb5\xda" # regtest
}
- def __init__(self, dstaddr, dstport, rpc, callback, net="regtest"):
+ def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=1):
asyncore.dispatcher.__init__(self, map=mininode_socket_map)
self.log = logging.getLogger("NodeConn(%s:%d)" % (dstaddr, dstport))
self.dstaddr = dstaddr
@@ -1102,6 +1121,7 @@ class NodeConn(asyncore.dispatcher):
# stuff version msg into sendbuf
vt = msg_version()
+ vt.nServices = services
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
diff --git a/qa/rpc-tests/test_framework/netutil.py b/qa/rpc-tests/test_framework/netutil.py
index b30a88a4f7..50daa87937 100644
--- a/qa/rpc-tests/test_framework/netutil.py
+++ b/qa/rpc-tests/test_framework/netutil.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/test_framework/script.py b/qa/rpc-tests/test_framework/script.py
index 0a78cf6fb1..0088876028 100644
--- a/qa/rpc-tests/test_framework/script.py
+++ b/qa/rpc-tests/test_framework/script.py
@@ -226,7 +226,7 @@ OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
-OP_NOP2 = CScriptOp(0xb1)
+OP_CHECKLOCKTIMEVERIFY = CScriptOp(0xb1)
OP_NOP3 = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
@@ -353,7 +353,7 @@ VALID_OPCODES = {
OP_CHECKMULTISIGVERIFY,
OP_NOP1,
- OP_NOP2,
+ OP_CHECKLOCKTIMEVERIFY,
OP_NOP3,
OP_NOP4,
OP_NOP5,
@@ -472,7 +472,7 @@ OPCODE_NAMES.update({
OP_CHECKMULTISIG : 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
OP_NOP1 : 'OP_NOP1',
- OP_NOP2 : 'OP_NOP2',
+ OP_CHECKLOCKTIMEVERIFY : 'OP_CHECKLOCKTIMEVERIFY',
OP_NOP3 : 'OP_NOP3',
OP_NOP4 : 'OP_NOP4',
OP_NOP5 : 'OP_NOP5',
@@ -591,7 +591,7 @@ OPCODES_BY_NAME = {
'OP_CHECKMULTISIG' : OP_CHECKMULTISIG,
'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY,
'OP_NOP1' : OP_NOP1,
- 'OP_NOP2' : OP_NOP2,
+ 'OP_CHECKLOCKTIMEVERIFY' : OP_CHECKLOCKTIMEVERIFY,
'OP_NOP3' : OP_NOP3,
'OP_NOP4' : OP_NOP4,
'OP_NOP5' : OP_NOP5,
diff --git a/qa/rpc-tests/test_framework/test_framework.py b/qa/rpc-tests/test_framework/test_framework.py
index 5671431f6e..584f318d0b 100755
--- a/qa/rpc-tests/test_framework/test_framework.py
+++ b/qa/rpc-tests/test_framework/test_framework.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -13,8 +13,20 @@ import shutil
import tempfile
import traceback
+from .util import (
+ initialize_chain,
+ assert_equal,
+ start_nodes,
+ connect_nodes_bi,
+ sync_blocks,
+ sync_mempools,
+ stop_nodes,
+ wait_bitcoinds,
+ enable_coverage,
+ check_json_precision,
+ initialize_chain_clean,
+)
from authproxy import AuthServiceProxy, JSONRPCException
-from util import *
class BitcoinTestFramework(object):
@@ -96,6 +108,8 @@ class BitcoinTestFramework(object):
help="Root directory for datadirs")
parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true",
help="Print out all RPC calls as they are made")
+ parser.add_option("--coveragedir", dest="coveragedir",
+ help="Write tested RPC commands into this directory")
self.add_options(parser)
(self.options, self.args) = parser.parse_args()
@@ -103,7 +117,10 @@ class BitcoinTestFramework(object):
import logging
logging.basicConfig(level=logging.DEBUG)
- os.environ['PATH'] = self.options.srcdir+":"+os.environ['PATH']
+ if self.options.coveragedir:
+ enable_coverage(self.options.coveragedir)
+
+ os.environ['PATH'] = self.options.srcdir+":"+self.options.srcdir+"/qt:"+os.environ['PATH']
check_json_precision()
@@ -173,7 +190,8 @@ class ComparisonTestFramework(BitcoinTestFramework):
initialize_chain_clean(self.options.tmpdir, self.num_nodes)
def setup_network(self):
- self.nodes = start_nodes(self.num_nodes, self.options.tmpdir,
- extra_args=[['-debug', '-whitelist=127.0.0.1']] * self.num_nodes,
- binary=[self.options.testbinary] +
- [self.options.refbinary]*(self.num_nodes-1))
+ self.nodes = start_nodes(
+ self.num_nodes, self.options.tmpdir,
+ extra_args=[['-debug', '-whitelist=127.0.0.1']] * self.num_nodes,
+ binary=[self.options.testbinary] +
+ [self.options.refbinary]*(self.num_nodes-1))
diff --git a/qa/rpc-tests/test_framework/util.py b/qa/rpc-tests/test_framework/util.py
index 3759cc8162..0388e08115 100644
--- a/qa/rpc-tests/test_framework/util.py
+++ b/qa/rpc-tests/test_framework/util.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
@@ -17,8 +17,43 @@ import subprocess
import time
import re
-from authproxy import AuthServiceProxy, JSONRPCException
-from util import *
+from . import coverage
+from .authproxy import AuthServiceProxy, JSONRPCException
+
+COVERAGE_DIR = None
+
+
+def enable_coverage(dirname):
+ """Maintain a log of which RPC calls are made during testing."""
+ global COVERAGE_DIR
+ COVERAGE_DIR = dirname
+
+
+def get_rpc_proxy(url, node_number, timeout=None):
+ """
+ Args:
+ url (str): URL of the RPC server to call
+ node_number (int): the node number (or id) that this calls to
+
+ Kwargs:
+ timeout (int): HTTP timeout in seconds
+
+ Returns:
+ AuthServiceProxy. convenience object for making RPC calls.
+
+ """
+ proxy_kwargs = {}
+ if timeout is not None:
+ proxy_kwargs['timeout'] = timeout
+
+ proxy = AuthServiceProxy(url, **proxy_kwargs)
+ proxy.url = url # store URL on proxy for info
+
+ coverage_logfile = coverage.get_filename(
+ COVERAGE_DIR, node_number) if COVERAGE_DIR else None
+
+ return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
+
def p2p_port(n):
return 11000 + n + os.getpid()%999
@@ -32,6 +67,9 @@ def check_json_precision():
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
+def count_bytes(hex_string):
+ return len(bytearray.fromhex(hex_string))
+
def sync_blocks(rpc_connections, wait=1):
"""
Wait until everybody has the same block count
@@ -69,6 +107,7 @@ def initialize_datadir(dirname, n):
f.write("rpcpassword=rt\n");
f.write("port="+str(p2p_port(n))+"\n");
f.write("rpcport="+str(rpc_port(n))+"\n");
+ f.write("listenonion=0\n");
return datadir
def initialize_chain(test_dir):
@@ -79,20 +118,20 @@ def initialize_chain(test_dir):
"""
if (not os.path.isdir(os.path.join("cache","node0"))
- or not os.path.isdir(os.path.join("cache","node1"))
- or not os.path.isdir(os.path.join("cache","node2"))
+ or not os.path.isdir(os.path.join("cache","node1"))
+ or not os.path.isdir(os.path.join("cache","node2"))
or not os.path.isdir(os.path.join("cache","node3"))):
#find and delete old cache directories if any exist
for i in range(4):
- if os.path.isdir(os.path.join("cache","node"+str(i))):
+ if os.path.isdir(os.path.join("cache","node"+str(i))):
shutil.rmtree(os.path.join("cache","node"+str(i)))
devnull = open(os.devnull, "w")
# Create cache directories, run bitcoinds:
for i in range(4):
datadir=initialize_datadir("cache", i)
- args = [ os.getenv("BITCOIND", "bitcoind"), "-keypool=1", "-datadir="+datadir, "-discover=0" ]
+ args = [ os.getenv("BITCOIND", "bitcoind"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
bitcoind_processes[i] = subprocess.Popen(args)
@@ -103,11 +142,13 @@ def initialize_chain(test_dir):
if os.getenv("PYTHON_DEBUG", ""):
print "initialize_chain: bitcoin-cli -rpcwait getblockcount completed"
devnull.close()
+
rpcs = []
+
for i in range(4):
try:
- url = "http://rt:rt@127.0.0.1:%d"%(rpc_port(i),)
- rpcs.append(AuthServiceProxy(url))
+ url = "http://rt:rt@127.0.0.1:%d" % (rpc_port(i),)
+ rpcs.append(get_rpc_proxy(url, i))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
@@ -177,7 +218,8 @@ def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=
datadir = os.path.join(dirname, "node"+str(i))
if binary is None:
binary = os.getenv("BITCOIND", "bitcoind")
- args = [ binary, "-datadir="+datadir, "-keypool=1", "-discover=0", "-rest" ]
+ # RPC tests still depend on free transactions
+ args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-blockprioritysize=50000" ]
if extra_args is not None: args.extend(extra_args)
bitcoind_processes[i] = subprocess.Popen(args)
devnull = open(os.devnull, "w")
@@ -190,11 +232,12 @@ def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=
print "start_node: calling bitcoin-cli -rpcwait getblockcount returned"
devnull.close()
url = "http://rt:rt@%s:%d" % (rpchost or '127.0.0.1', rpc_port(i))
- if timewait is not None:
- proxy = AuthServiceProxy(url, timeout=timewait)
- else:
- proxy = AuthServiceProxy(url)
- proxy.url = url # store URL on proxy for info
+
+ proxy = get_rpc_proxy(url, i, timeout=timewait)
+
+ if COVERAGE_DIR:
+ coverage.write_all_rpc_commands(COVERAGE_DIR, proxy)
+
return proxy
def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, binary=None):
@@ -363,3 +406,70 @@ def assert_raises(exc, fun, *args, **kwds):
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
+
+def satoshi_round(amount):
+ return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
+
+def create_confirmed_utxos(fee, node, count):
+ node.generate(int(0.5*count)+101)
+ utxos = node.listunspent()
+ iterations = count - len(utxos)
+ addr1 = node.getnewaddress()
+ addr2 = node.getnewaddress()
+ if iterations <= 0:
+ return utxos
+ for i in xrange(iterations):
+ t = utxos.pop()
+ inputs = []
+ inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
+ outputs = {}
+ send_value = t['amount'] - fee
+ outputs[addr1] = satoshi_round(send_value/2)
+ outputs[addr2] = satoshi_round(send_value/2)
+ raw_tx = node.createrawtransaction(inputs, outputs)
+ signed_tx = node.signrawtransaction(raw_tx)["hex"]
+ txid = node.sendrawtransaction(signed_tx)
+
+ while (node.getmempoolinfo()['size'] > 0):
+ node.generate(1)
+
+ utxos = node.listunspent()
+ assert(len(utxos) >= count)
+ return utxos
+
+def gen_return_txouts():
+ # Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
+ # So we have big transactions (and therefore can't fit very many into each block)
+ # create one script_pubkey
+ script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
+ for i in xrange (512):
+ script_pubkey = script_pubkey + "01"
+ # concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
+ txouts = "81"
+ for k in xrange(128):
+ # add txout value
+ txouts = txouts + "0000000000000000"
+ # add length of script_pubkey
+ txouts = txouts + "fd0402"
+ # add script_pubkey
+ txouts = txouts + script_pubkey
+ return txouts
+
+def create_lots_of_big_transactions(node, txouts, utxos, fee):
+ addr = node.getnewaddress()
+ txids = []
+ for i in xrange(len(utxos)):
+ t = utxos.pop()
+ inputs = []
+ inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
+ outputs = {}
+ send_value = t['amount'] - fee
+ outputs[addr] = satoshi_round(send_value)
+ rawtx = node.createrawtransaction(inputs, outputs)
+ newtx = rawtx[0:92]
+ newtx = newtx + txouts
+ newtx = newtx + rawtx[94:]
+ signresult = node.signrawtransaction(newtx, None, None, "NONE")
+ txid = node.sendrawtransaction(signresult["hex"], True)
+ txids.append(txid)
+ return txids
diff --git a/qa/rpc-tests/txn_clone.py b/qa/rpc-tests/txn_clone.py
index e8ced0e5bb..bad090bcb4 100755
--- a/qa/rpc-tests/txn_clone.py
+++ b/qa/rpc-tests/txn_clone.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -136,7 +136,7 @@ class TxnMallTest(BitcoinTestFramework):
tx2 = self.nodes[0].gettransaction(txid2)
# Verify expected confirmations
- assert_equal(tx1["confirmations"], -1)
+ assert_equal(tx1["confirmations"], -2)
assert_equal(tx1_clone["confirmations"], 2)
assert_equal(tx2["confirmations"], 1)
diff --git a/qa/rpc-tests/txn_doublespend.py b/qa/rpc-tests/txn_doublespend.py
index 36081127b4..05a3a34788 100755
--- a/qa/rpc-tests/txn_doublespend.py
+++ b/qa/rpc-tests/txn_doublespend.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
@@ -99,7 +99,7 @@ class TxnMallTest(BitcoinTestFramework):
# Now give doublespend and its parents to miner:
self.nodes[2].sendrawtransaction(fund_foo_tx["hex"])
self.nodes[2].sendrawtransaction(fund_bar_tx["hex"])
- self.nodes[2].sendrawtransaction(doublespend["hex"])
+ doublespend_txid = self.nodes[2].sendrawtransaction(doublespend["hex"])
# ... mine a block...
self.nodes[2].generate(1)
@@ -107,14 +107,15 @@ class TxnMallTest(BitcoinTestFramework):
connect_nodes(self.nodes[1], 2)
self.nodes[2].generate(1) # Mine another block to make sure we sync
sync_blocks(self.nodes)
+ assert_equal(self.nodes[0].gettransaction(doublespend_txid)["confirmations"], 2)
# Re-fetch transaction info:
tx1 = self.nodes[0].gettransaction(txid1)
tx2 = self.nodes[0].gettransaction(txid2)
-
+
# Both transactions should be conflicted
- assert_equal(tx1["confirmations"], -1)
- assert_equal(tx2["confirmations"], -1)
+ assert_equal(tx1["confirmations"], -2)
+ assert_equal(tx2["confirmations"], -2)
# Node0's total balance should be starting balance, plus 100BTC for
# two more matured blocks, minus 1240 for the double-spend, plus fees (which are
diff --git a/qa/rpc-tests/wallet.py b/qa/rpc-tests/wallet.py
index f9ec6f429b..43ec621a40 100755
--- a/qa/rpc-tests/wallet.py
+++ b/qa/rpc-tests/wallet.py
@@ -1,29 +1,25 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-#
-# Exercise the wallet. Ported from wallet.sh.
-# Does the following:
-# a) creates 3 nodes, with an empty chain (no blocks).
-# b) node0 mines a block
-# c) node1 mines 101 blocks, so now nodes 0 and 1 have 50btc, node2 has none.
-# d) node0 sends 21 btc to node2, in two transactions (11 btc, then 10 btc).
-# e) node0 mines a block, collects the fee on the second transaction
-# f) node1 mines 100 blocks, to mature node0's just-mined block
-# g) check that node0 has 100-21, node2 has 21
-# h) node0 should now have 2 unspent outputs; send these to node2 via raw tx broadcast by node1
-# i) have node1 mine a block
-# j) check balances - node0 should have 0, node2 should have 100
-# k) test ResendWalletTransactions - create transactions, startup fourth node, make sure it syncs
-#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class WalletTest (BitcoinTestFramework):
+ def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size):
+ """Return curr_balance after asserting the fee was in range"""
+ fee = balance_with_fee - curr_balance
+ target_fee = fee_per_byte * tx_size
+ if fee < target_fee:
+ raise AssertionError("Fee of %s BTC too low! (Should be %s BTC)"%(str(fee), str(target_fee)))
+ # allow the node's estimation to be at most 2 bytes off
+ if fee > fee_per_byte * (tx_size + 2):
+ raise AssertionError("Fee of %s BTC too high! (Should be %s BTC)"%(str(fee), str(target_fee)))
+ return curr_balance
+
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 4)
@@ -104,33 +100,37 @@ class WalletTest (BitcoinTestFramework):
# Send 10 BTC normal
address = self.nodes[0].getnewaddress("test")
- self.nodes[2].settxfee(Decimal('0.001'))
+ fee_per_byte = Decimal('0.001') / 1000
+ self.nodes[2].settxfee(fee_per_byte * 1000)
txid = self.nodes[2].sendtoaddress(address, 10, "", "", False)
self.nodes[2].generate(1)
self.sync_all()
- assert_equal(self.nodes[2].getbalance(), Decimal('89.99900000'))
- assert_equal(self.nodes[0].getbalance(), Decimal('10.00000000'))
+ node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('90'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
+ assert_equal(self.nodes[0].getbalance(), Decimal('10'))
# Send 10 BTC with subtract fee from amount
txid = self.nodes[2].sendtoaddress(address, 10, "", "", True)
self.nodes[2].generate(1)
self.sync_all()
- assert_equal(self.nodes[2].getbalance(), Decimal('79.99900000'))
- assert_equal(self.nodes[0].getbalance(), Decimal('19.99900000'))
+ node_2_bal -= Decimal('10')
+ assert_equal(self.nodes[2].getbalance(), node_2_bal)
+ node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('20'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
# Sendmany 10 BTC
txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "", [])
self.nodes[2].generate(1)
self.sync_all()
- assert_equal(self.nodes[2].getbalance(), Decimal('69.99800000'))
- assert_equal(self.nodes[0].getbalance(), Decimal('29.99900000'))
+ node_0_bal += Decimal('10')
+ node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
+ assert_equal(self.nodes[0].getbalance(), node_0_bal)
# Sendmany 10 BTC with subtract fee from amount
txid = self.nodes[2].sendmany('from1', {address: 10}, 0, "", [address])
self.nodes[2].generate(1)
self.sync_all()
- assert_equal(self.nodes[2].getbalance(), Decimal('59.99800000'))
- assert_equal(self.nodes[0].getbalance(), Decimal('39.99800000'))
+ node_2_bal -= Decimal('10')
+ assert_equal(self.nodes[2].getbalance(), node_2_bal)
+ node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, count_bytes(self.nodes[2].getrawtransaction(txid)))
# Test ResendWalletTransactions:
# Create a couple of transactions, then start up a fourth
@@ -175,7 +175,7 @@ class WalletTest (BitcoinTestFramework):
for uTx in unspentTxs:
if uTx['txid'] == zeroValueTxid:
found = True
- assert_equal(uTx['amount'], Decimal('0.00000000'));
+ assert_equal(uTx['amount'], Decimal('0'))
assert(found)
#do some -walletbroadcast tests
@@ -187,21 +187,22 @@ class WalletTest (BitcoinTestFramework):
connect_nodes_bi(self.nodes,0,2)
self.sync_all()
- txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2);
+ txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2)
txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted)
self.nodes[1].generate(1) #mine a block, tx should not be in there
self.sync_all()
- assert_equal(self.nodes[2].getbalance(), Decimal('59.99800000')); #should not be changed because tx was not broadcasted
+ assert_equal(self.nodes[2].getbalance(), node_2_bal) #should not be changed because tx was not broadcasted
#now broadcast from another node, mine a block, sync, and check the balance
self.nodes[1].sendrawtransaction(txObjNotBroadcasted['hex'])
self.nodes[1].generate(1)
self.sync_all()
+ node_2_bal += 2
txObjNotBroadcasted = self.nodes[0].gettransaction(txIdNotBroadcasted)
- assert_equal(self.nodes[2].getbalance(), Decimal('61.99800000')); #should not be
+ assert_equal(self.nodes[2].getbalance(), node_2_bal)
#create another tx
- txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2);
+ txIdNotBroadcasted = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2)
#restart the nodes with -walletbroadcast=1
stop_nodes(self.nodes)
@@ -214,23 +215,24 @@ class WalletTest (BitcoinTestFramework):
self.nodes[0].generate(1)
sync_blocks(self.nodes)
+ node_2_bal += 2
#tx should be added to balance because after restarting the nodes tx should be broadcastet
- assert_equal(self.nodes[2].getbalance(), Decimal('63.99800000')); #should not be
+ assert_equal(self.nodes[2].getbalance(), node_2_bal)
#send a tx with value in a string (PR#6380 +)
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2")
txObj = self.nodes[0].gettransaction(txId)
- assert_equal(txObj['amount'], Decimal('-2.00000000'))
+ assert_equal(txObj['amount'], Decimal('-2'))
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001")
txObj = self.nodes[0].gettransaction(txId)
- assert_equal(txObj['amount'], Decimal('-0.00010000'))
+ assert_equal(txObj['amount'], Decimal('-0.0001'))
#check if JSON parser can handle scientific notation in strings
txId = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4")
txObj = self.nodes[0].gettransaction(txId)
- assert_equal(txObj['amount'], Decimal('-0.00010000'))
+ assert_equal(txObj['amount'], Decimal('-0.0001'))
#this should fail
errorString = ""
@@ -239,7 +241,7 @@ class WalletTest (BitcoinTestFramework):
except JSONRPCException,e:
errorString = e.error['message']
- assert_equal("Invalid amount" in errorString, True);
+ assert_equal("Invalid amount" in errorString, True)
errorString = ""
try:
@@ -247,7 +249,30 @@ class WalletTest (BitcoinTestFramework):
except JSONRPCException,e:
errorString = e.error['message']
- assert_equal("not an integer" in errorString, True);
+ assert_equal("not an integer" in errorString, True)
+
+ #check if wallet or blochchain maintenance changes the balance
+ self.sync_all()
+ self.nodes[0].generate(1)
+ self.sync_all()
+ balance_nodes = [self.nodes[i].getbalance() for i in range(3)]
+
+ maintenance = [
+ '-rescan',
+ '-reindex',
+ '-zapwallettxes=1',
+ '-zapwallettxes=2',
+ '-salvagewallet',
+ ]
+ for m in maintenance:
+ stop_nodes(self.nodes)
+ wait_bitcoinds()
+ self.nodes = start_nodes(3, self.options.tmpdir, [[m]] * 3)
+ connect_nodes_bi(self.nodes,0,1)
+ connect_nodes_bi(self.nodes,1,2)
+ connect_nodes_bi(self.nodes,0,2)
+ self.sync_all()
+ assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)])
if __name__ == '__main__':
diff --git a/qa/rpc-tests/walletbackup.py b/qa/rpc-tests/walletbackup.py
index da100d7fc0..1221a09116 100755
--- a/qa/rpc-tests/walletbackup.py
+++ b/qa/rpc-tests/walletbackup.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
diff --git a/qa/rpc-tests/zapwallettxes.py b/qa/rpc-tests/zapwallettxes.py
index 0ec8ec5364..1ee0f79ac0 100755
--- a/qa/rpc-tests/zapwallettxes.py
+++ b/qa/rpc-tests/zapwallettxes.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2
-# Copyright (c) 2014 The Bitcoin Core developers
+# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.