aboutsummaryrefslogtreecommitdiff
path: root/qa/rpc-tests
diff options
context:
space:
mode:
Diffstat (limited to 'qa/rpc-tests')
-rwxr-xr-xqa/rpc-tests/mempool_packages.py107
-rw-r--r--qa/rpc-tests/test_framework/authproxy.py36
-rwxr-xr-xqa/rpc-tests/zmq_test.py93
3 files changed, 222 insertions, 14 deletions
diff --git a/qa/rpc-tests/mempool_packages.py b/qa/rpc-tests/mempool_packages.py
new file mode 100755
index 0000000000..6041f3a3dd
--- /dev/null
+++ b/qa/rpc-tests/mempool_packages.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python2
+# Copyright (c) 2014-2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+# Test descendant package tracking code
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+
+def satoshi_round(amount):
+ return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
+
+class MempoolPackagesTest(BitcoinTestFramework):
+
+ def setup_network(self):
+ self.nodes = []
+ self.nodes.append(start_node(0, self.options.tmpdir, ["-maxorphantx=1000", "-relaypriority=0"]))
+ self.is_network_split = False
+ self.sync_all()
+
+ # Build a transaction that spends parent_txid:vout
+ # Return amount sent
+ def chain_transaction(self, parent_txid, vout, value, fee, num_outputs):
+ send_value = satoshi_round((value - fee)/num_outputs)
+ inputs = [ {'txid' : parent_txid, 'vout' : vout} ]
+ outputs = {}
+ for i in xrange(num_outputs):
+ outputs[self.nodes[0].getnewaddress()] = send_value
+ rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
+ signedtx = self.nodes[0].signrawtransaction(rawtx)
+ txid = self.nodes[0].sendrawtransaction(signedtx['hex'])
+ fulltx = self.nodes[0].getrawtransaction(txid, 1)
+ assert(len(fulltx['vout']) == num_outputs) # make sure we didn't generate a change output
+ return (txid, send_value)
+
+ def run_test(self):
+ ''' Mine some blocks and have them mature. '''
+ self.nodes[0].generate(101)
+ utxo = self.nodes[0].listunspent(10)
+ txid = utxo[0]['txid']
+ vout = utxo[0]['vout']
+ value = utxo[0]['amount']
+
+ fee = Decimal("0.0001")
+ # 100 transactions off a confirmed tx should be fine
+ chain = []
+ for i in xrange(100):
+ (txid, sent_value) = self.chain_transaction(txid, 0, value, fee, 1)
+ value = sent_value
+ chain.append(txid)
+
+ # Check mempool has 100 transactions in it, and descendant
+ # count and fees should look correct
+ mempool = self.nodes[0].getrawmempool(True)
+ assert_equal(len(mempool), 100)
+ descendant_count = 1
+ descendant_fees = 0
+ descendant_size = 0
+ SATOSHIS = 100000000
+
+ for x in reversed(chain):
+ assert_equal(mempool[x]['descendantcount'], descendant_count)
+ descendant_fees += mempool[x]['fee']
+ assert_equal(mempool[x]['descendantfees'], SATOSHIS*descendant_fees)
+ descendant_size += mempool[x]['size']
+ assert_equal(mempool[x]['descendantsize'], descendant_size)
+ descendant_count += 1
+
+ # Adding one more transaction on to the chain should fail.
+ try:
+ self.chain_transaction(txid, vout, value, fee, 1)
+ except JSONRPCException as e:
+ print "too-long-ancestor-chain successfully rejected"
+
+ # TODO: test ancestor size limits
+
+ # Now test descendant chain limits
+ txid = utxo[1]['txid']
+ value = utxo[1]['amount']
+ vout = utxo[1]['vout']
+
+ transaction_package = []
+ # First create one parent tx with 10 children
+ (txid, sent_value) = self.chain_transaction(txid, vout, value, fee, 10)
+ parent_transaction = txid
+ for i in xrange(10):
+ transaction_package.append({'txid': txid, 'vout': i, 'amount': sent_value})
+
+ for i in xrange(1000):
+ utxo = transaction_package.pop(0)
+ try:
+ (txid, sent_value) = self.chain_transaction(utxo['txid'], utxo['vout'], utxo['amount'], fee, 10)
+ for j in xrange(10):
+ transaction_package.append({'txid': txid, 'vout': j, 'amount': sent_value})
+ if i == 998:
+ mempool = self.nodes[0].getrawmempool(True)
+ assert_equal(mempool[parent_transaction]['descendantcount'], 1000)
+ except JSONRPCException as e:
+ print e.error['message']
+ assert_equal(i, 999)
+ print "tx that would create too large descendant package successfully rejected"
+
+ # TODO: test descendant size limits
+
+if __name__ == '__main__':
+ MempoolPackagesTest().main()
diff --git a/qa/rpc-tests/test_framework/authproxy.py b/qa/rpc-tests/test_framework/authproxy.py
index bc7d655fdf..33014dc139 100644
--- a/qa/rpc-tests/test_framework/authproxy.py
+++ b/qa/rpc-tests/test_framework/authproxy.py
@@ -106,6 +106,26 @@ class AuthServiceProxy(object):
name = "%s.%s" % (self.__service_name, name)
return AuthServiceProxy(self.__service_url, name, connection=self.__conn)
+ def _request(self, method, path, postdata):
+ '''
+ Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout).
+ This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5.
+ '''
+ headers = {'Host': self.__url.hostname,
+ 'User-Agent': USER_AGENT,
+ 'Authorization': self.__auth_header,
+ 'Content-type': 'application/json'}
+ try:
+ self.__conn.request(method, path, postdata, headers)
+ return self._get_response()
+ except httplib.BadStatusLine as e:
+ if e.line == "''": # if connection was closed, try again
+ self.__conn.close()
+ self.__conn.request(method, path, postdata, headers)
+ return self._get_response()
+ else:
+ raise
+
def __call__(self, *args):
AuthServiceProxy.__id_count += 1
@@ -115,13 +135,7 @@ class AuthServiceProxy(object):
'method': self.__service_name,
'params': args,
'id': AuthServiceProxy.__id_count}, default=EncodeDecimal)
- self.__conn.request('POST', self.__url.path, postdata,
- {'Host': self.__url.hostname,
- 'User-Agent': USER_AGENT,
- 'Authorization': self.__auth_header,
- 'Content-type': 'application/json'})
-
- response = self._get_response()
+ response = self._request('POST', self.__url.path, postdata)
if response['error'] is not None:
raise JSONRPCException(response['error'])
elif 'result' not in response:
@@ -133,13 +147,7 @@ class AuthServiceProxy(object):
def _batch(self, rpc_call_list):
postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal)
log.debug("--> "+postdata)
- self.__conn.request('POST', self.__url.path, postdata,
- {'Host': self.__url.hostname,
- 'User-Agent': USER_AGENT,
- 'Authorization': self.__auth_header,
- 'Content-type': 'application/json'})
-
- return self._get_response()
+ return self._request('POST', self.__url.path, postdata)
def _get_response(self):
http_response = self.__conn.getresponse()
diff --git a/qa/rpc-tests/zmq_test.py b/qa/rpc-tests/zmq_test.py
new file mode 100755
index 0000000000..fffaf677d6
--- /dev/null
+++ b/qa/rpc-tests/zmq_test.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python2
+# Copyright (c) 2015 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+#
+# Test ZMQ interface
+#
+
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import *
+import zmq
+import binascii
+from test_framework.mininode import hash256
+
+try:
+ import http.client as httplib
+except ImportError:
+ import httplib
+try:
+ import urllib.parse as urlparse
+except ImportError:
+ import urlparse
+
+class ZMQTest (BitcoinTestFramework):
+
+ port = 28332
+
+ def setup_nodes(self):
+ self.zmqContext = zmq.Context()
+ self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
+ self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, "hashblock")
+ self.zmqSubSocket.setsockopt(zmq.SUBSCRIBE, "hashtx")
+ self.zmqSubSocket.connect("tcp://127.0.0.1:%i" % self.port)
+ # Note: proxies are not used to connect to local nodes
+ # this is because the proxy to use is based on CService.GetNetwork(), which return NET_UNROUTABLE for localhost
+ return start_nodes(4, self.options.tmpdir, extra_args=[
+ ['-zmqpubhashtx=tcp://127.0.0.1:'+str(self.port), '-zmqpubhashblock=tcp://127.0.0.1:'+str(self.port)],
+ [],
+ [],
+ []
+ ])
+
+ def run_test(self):
+ self.sync_all()
+
+ genhashes = self.nodes[0].generate(1);
+ self.sync_all()
+
+ print "listen..."
+ msg = self.zmqSubSocket.recv_multipart()
+ topic = str(msg[0])
+ body = msg[1]
+
+ msg = self.zmqSubSocket.recv_multipart()
+ topic = str(msg[0])
+ body = msg[1]
+ blkhash = binascii.hexlify(body)
+
+ assert_equal(genhashes[0], blkhash) #blockhash from generate must be equal to the hash received over zmq
+
+ n = 10
+ genhashes = self.nodes[1].generate(n);
+ self.sync_all()
+
+ zmqHashes = []
+ for x in range(0,n*2):
+ msg = self.zmqSubSocket.recv_multipart()
+ topic = str(msg[0])
+ body = msg[1]
+ if topic == "hashblock":
+ zmqHashes.append(binascii.hexlify(body))
+
+ for x in range(0,n):
+ assert_equal(genhashes[x], zmqHashes[x]) #blockhash from generate must be equal to the hash received over zmq
+
+ #test tx from a second node
+ hashRPC = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.0)
+ self.sync_all()
+
+ #now we should receive a zmq msg because the tx was broadcastet
+ msg = self.zmqSubSocket.recv_multipart()
+ topic = str(msg[0])
+ body = msg[1]
+ hashZMQ = ""
+ if topic == "hashtx":
+ hashZMQ = binascii.hexlify(body)
+
+ assert_equal(hashRPC, hashZMQ) #blockhash from generate must be equal to the hash received over zmq
+
+
+if __name__ == '__main__':
+ ZMQTest ().main ()