diff options
author | Wladimir J. van der Laan <laanwj@gmail.com> | 2015-04-30 14:49:02 +0200 |
---|---|---|
committer | Wladimir J. van der Laan <laanwj@gmail.com> | 2015-04-30 14:49:24 +0200 |
commit | da38dc696c86aeb7394768ab34eb9a6e46bb614c (patch) | |
tree | 94ff73a7242151da59cab821d598a63b33c78965 | |
parent | 9c25397619339c90b018f98bbc081c14186d7623 (diff) | |
parent | 2703412a39c95c811a40c3fff6929e4ce59c3c62 (diff) |
Merge pull request #5981
2703412 Fix default binary in p2p tests to use environment variable (Suhas Daftuar)
29bff0e Add some travis debugging for python scripts (Suhas Daftuar)
d76412b Add script manipulation tools for use in mininode testing framework (Suhas Daftuar)
b93974c Add comparison tool test runner, built on mininode (Suhas Daftuar)
6c1d1ba Python p2p testing framework (Suhas Daftuar)
-rw-r--r-- | .travis.yml | 1 | ||||
-rwxr-xr-x | qa/pull-tester/rpc-tests.sh | 2 | ||||
-rw-r--r-- | qa/rpc-tests/bignum.py | 102 | ||||
-rwxr-xr-x | qa/rpc-tests/bipdersig-p2p.py | 183 | ||||
-rw-r--r-- | qa/rpc-tests/blockstore.py | 127 | ||||
-rw-r--r-- | qa/rpc-tests/blocktools.py | 65 | ||||
-rwxr-xr-x | qa/rpc-tests/comptool.py | 330 | ||||
-rwxr-xr-x | qa/rpc-tests/invalidblockrequest.py | 115 | ||||
-rwxr-xr-x | qa/rpc-tests/maxblocksinflight.py | 100 | ||||
-rwxr-xr-x | qa/rpc-tests/mininode.py | 1247 | ||||
-rw-r--r-- | qa/rpc-tests/script.py | 896 | ||||
-rwxr-xr-x | qa/rpc-tests/script_test.py | 253 | ||||
-rwxr-xr-x | qa/rpc-tests/test_framework.py | 31 | ||||
-rw-r--r-- | qa/rpc-tests/util.py | 19 |
14 files changed, 3467 insertions, 4 deletions
diff --git a/.travis.yml b/.travis.yml index 44ea7b62d7..e6578ee078 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,6 +16,7 @@ env: - CCACHE_COMPRESS=1 - BASE_OUTDIR=$TRAVIS_BUILD_DIR/out - SDK_URL=https://bitcoincore.org/depends-sources/sdks + - PYTHON_DEBUG=1 cache: apt: true directories: diff --git a/qa/pull-tester/rpc-tests.sh b/qa/pull-tester/rpc-tests.sh index dd2f8d4e5e..ae27a94b8d 100755 --- a/qa/pull-tester/rpc-tests.sh +++ b/qa/pull-tester/rpc-tests.sh @@ -30,6 +30,8 @@ testScripts=( 'proxy_test.py' 'merkle_blocks.py' # 'forknotify.py' + 'maxblocksinflight.py' + 'invalidblockrequest.py' ); if [ "x${ENABLE_BITCOIND}${ENABLE_UTILS}${ENABLE_WALLET}" = "x111" ]; then for (( i = 0; i < ${#testScripts[@]}; i++ )) diff --git a/qa/rpc-tests/bignum.py b/qa/rpc-tests/bignum.py new file mode 100644 index 0000000000..b0c58ccd47 --- /dev/null +++ b/qa/rpc-tests/bignum.py @@ -0,0 +1,102 @@ +# +# +# bignum.py +# +# This file is copied from python-bitcoinlib. +# +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +"""Bignum routines""" + +from __future__ import absolute_import, division, print_function, unicode_literals + +import struct + + +# generic big endian MPI format + +def bn_bytes(v, have_ext=False): + ext = 0 + if have_ext: + ext = 1 + return ((v.bit_length()+7)//8) + ext + +def bn2bin(v): + s = bytearray() + i = bn_bytes(v) + while i > 0: + s.append((v >> ((i-1) * 8)) & 0xff) + i -= 1 + return s + +def bin2bn(s): + l = 0 + for ch in s: + l = (l << 8) | ch + return l + +def bn2mpi(v): + have_ext = False + if v.bit_length() > 0: + have_ext = (v.bit_length() & 0x07) == 0 + + neg = False + if v < 0: + neg = True + v = -v + + s = struct.pack(b">I", bn_bytes(v, have_ext)) + ext = bytearray() + if have_ext: + ext.append(0) + v_bin = bn2bin(v) + if neg: + if have_ext: + ext[0] |= 0x80 + else: + v_bin[0] |= 0x80 + return s + ext + v_bin + +def mpi2bn(s): + if len(s) < 4: + return None + s_size = bytes(s[:4]) + v_len = struct.unpack(b">I", s_size)[0] + if len(s) != (v_len + 4): + return None + if v_len == 0: + return 0 + + v_str = bytearray(s[4:]) + neg = False + i = v_str[0] + if i & 0x80: + neg = True + i &= ~0x80 + v_str[0] = i + + v = bin2bn(v_str) + + if neg: + return -v + return v + +# bitcoin-specific little endian format, with implicit size +def mpi2vch(s): + r = s[4:] # strip size + r = r[::-1] # reverse string, converting BE->LE + return r + +def bn2vch(v): + return bytes(mpi2vch(bn2mpi(v))) + +def vch2mpi(s): + r = struct.pack(b">I", len(s)) # size + r += s[::-1] # reverse string, converting LE->BE + return r + +def vch2bn(s): + return mpi2bn(vch2mpi(s)) + diff --git a/qa/rpc-tests/bipdersig-p2p.py b/qa/rpc-tests/bipdersig-p2p.py new file mode 100755 index 0000000000..ff0c878898 --- /dev/null +++ b/qa/rpc-tests/bipdersig-p2p.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python2 +# +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +from test_framework import ComparisonTestFramework +from util import * +from mininode import CTransaction, NetworkThread +from blocktools import create_coinbase, create_block +from binascii import hexlify, unhexlify +import cStringIO +from comptool import TestInstance, TestManager +from script import CScript +import time + +# A canonical signature consists of: +# <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype> +def unDERify(tx): + ''' + Make the signature in vin 0 of a tx non-DER-compliant, + by adding padding after the S-value. + ''' + scriptSig = CScript(tx.vin[0].scriptSig) + newscript = [] + for i in scriptSig: + if (len(newscript) == 0): + newscript.append(i[0:-1] + '\0' + i[-1]) + else: + newscript.append(i) + tx.vin[0].scriptSig = CScript(newscript) + +''' +This test is meant to exercise BIP66 (DER SIG). +Connect to a single node. +Mine 2 (version 2) blocks (save the coinbases for later). +Generate 98 more version 2 blocks, verify the node accepts. +Mine 749 version 3 blocks, verify the node accepts. +Check that the new DERSIG rules are not enforced on the 750th version 3 block. +Check that the new DERSIG rules are enforced on the 751st version 3 block. +Mine 199 new version blocks. +Mine 1 old-version block. +Mine 1 new version block. +Mine 1 old version block, see that the node rejects. +''' + +class BIP66Test(ComparisonTestFramework): + + def __init__(self): + self.num_nodes = 1 + + def setup_network(self): + # Must set the blockversion for this test + self.nodes = start_nodes(1, self.options.tmpdir, + extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=2']], + binary=[self.options.testbinary]) + + def run_test(self): + test = TestManager(self, self.options.tmpdir) + test.add_all_connections(self.nodes) + NetworkThread().start() # Start up network handling in another thread + test.run() + + def create_transaction(self, node, coinbase, to_address, amount): + from_txid = node.getblock(coinbase)['tx'][0] + inputs = [{ "txid" : from_txid, "vout" : 0}] + outputs = { to_address : amount } + rawtx = node.createrawtransaction(inputs, outputs) + signresult = node.signrawtransaction(rawtx) + tx = CTransaction() + f = cStringIO.StringIO(unhexlify(signresult['hex'])) + tx.deserialize(f) + return tx + + def get_tests(self): + + self.coinbase_blocks = self.nodes[0].generate(2) + self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0) + self.nodeaddress = self.nodes[0].getnewaddress() + self.last_block_time = time.time() + + ''' 98 more version 2 blocks ''' + test_blocks = [] + for i in xrange(98): + block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1) + block.nVersion = 2 + block.rehash() + block.solve() + test_blocks.append([block, True]) + self.last_block_time += 1 + self.tip = block.sha256 + yield TestInstance(test_blocks, sync_every_block=False) + + ''' Mine 749 version 3 blocks ''' + test_blocks = [] + for i in xrange(749): + block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1) + block.nVersion = 3 + block.rehash() + block.solve() + test_blocks.append([block, True]) + self.last_block_time += 1 + self.tip = block.sha256 + yield TestInstance(test_blocks, sync_every_block=False) + + ''' + Check that the new DERSIG rules are not enforced in the 750th + version 3 block. + ''' + spendtx = self.create_transaction(self.nodes[0], + self.coinbase_blocks[0], self.nodeaddress, 1.0) + unDERify(spendtx) + spendtx.rehash() + + block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1) + block.nVersion = 3 + block.vtx.append(spendtx) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + + self.last_block_time += 1 + self.tip = block.sha256 + yield TestInstance([[block, True]]) + + ''' + Check that the new DERSIG rules are enforced in the 751st version 3 + block. + ''' + spendtx = self.create_transaction(self.nodes[0], + self.coinbase_blocks[1], self.nodeaddress, 1.0) + unDERify(spendtx) + spendtx.rehash() + + block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1) + block.nVersion = 3 + block.vtx.append(spendtx) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + self.last_block_time += 1 + yield TestInstance([[block, False]]) + + ''' Mine 199 new version blocks on last valid tip ''' + test_blocks = [] + for i in xrange(199): + block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1) + block.nVersion = 3 + block.rehash() + block.solve() + test_blocks.append([block, True]) + self.last_block_time += 1 + self.tip = block.sha256 + yield TestInstance(test_blocks, sync_every_block=False) + + ''' Mine 1 old version block ''' + block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1) + block.nVersion = 2 + block.rehash() + block.solve() + self.last_block_time += 1 + self.tip = block.sha256 + yield TestInstance([[block, True]]) + + ''' Mine 1 new version block ''' + block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1) + block.nVersion = 3 + block.rehash() + block.solve() + self.last_block_time += 1 + self.tip = block.sha256 + yield TestInstance([[block, True]]) + + ''' Mine 1 old version block, should be invalid ''' + block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1) + block.nVersion = 2 + block.rehash() + block.solve() + self.last_block_time += 1 + yield TestInstance([[block, False]]) + +if __name__ == '__main__': + BIP66Test().main() diff --git a/qa/rpc-tests/blockstore.py b/qa/rpc-tests/blockstore.py new file mode 100644 index 0000000000..c57b6df81b --- /dev/null +++ b/qa/rpc-tests/blockstore.py @@ -0,0 +1,127 @@ +# BlockStore: a helper class that keeps a map of blocks and implements +# helper functions for responding to getheaders and getdata, +# and for constructing a getheaders message +# + +from mininode import * +import dbm + +class BlockStore(object): + def __init__(self, datadir): + self.blockDB = dbm.open(datadir + "/blocks", 'c') + self.currentBlock = 0L + + def close(self): + self.blockDB.close() + + def get(self, blockhash): + serialized_block = None + try: + serialized_block = self.blockDB[repr(blockhash)] + except KeyError: + return None + f = cStringIO.StringIO(serialized_block) + ret = CBlock() + ret.deserialize(f) + ret.calc_sha256() + return ret + + # Note: this pulls full blocks out of the database just to retrieve + # the headers -- perhaps we could keep a separate data structure + # to avoid this overhead. + def headers_for(self, locator, hash_stop, current_tip=None): + if current_tip is None: + current_tip = self.currentBlock + current_block = self.get(current_tip) + if current_block is None: + return None + + response = msg_headers() + headersList = [ CBlockHeader(current_block) ] + maxheaders = 2000 + while (headersList[0].sha256 not in locator.vHave): + prevBlockHash = headersList[0].hashPrevBlock + prevBlock = self.get(prevBlockHash) + if prevBlock is not None: + headersList.insert(0, CBlockHeader(prevBlock)) + else: + break + headersList = headersList[:maxheaders] # truncate if we have too many + hashList = [x.sha256 for x in headersList] + index = len(headersList) + if (hash_stop in hashList): + index = hashList.index(hash_stop)+1 + response.headers = headersList[:index] + return response + + def add_block(self, block): + block.calc_sha256() + try: + self.blockDB[repr(block.sha256)] = bytes(block.serialize()) + except TypeError as e: + print "Unexpected error: ", sys.exc_info()[0], e.args + self.currentBlock = block.sha256 + + def get_blocks(self, inv): + responses = [] + for i in inv: + if (i.type == 2): # MSG_BLOCK + block = self.get(i.hash) + if block is not None: + responses.append(msg_block(block)) + return responses + + def get_locator(self, current_tip=None): + if current_tip is None: + current_tip = self.currentBlock + r = [] + counter = 0 + step = 1 + lastBlock = self.get(current_tip) + while lastBlock is not None: + r.append(lastBlock.hashPrevBlock) + for i in range(step): + lastBlock = self.get(lastBlock.hashPrevBlock) + if lastBlock is None: + break + counter += 1 + if counter > 10: + step *= 2 + locator = CBlockLocator() + locator.vHave = r + return locator + +class TxStore(object): + def __init__(self, datadir): + self.txDB = dbm.open(datadir + "/transactions", 'c') + + def close(self): + self.txDB.close() + + def get(self, txhash): + serialized_tx = None + try: + serialized_tx = self.txDB[repr(txhash)] + except KeyError: + return None + f = cStringIO.StringIO(serialized_tx) + ret = CTransaction() + ret.deserialize(f) + ret.calc_sha256() + return ret + + def add_transaction(self, tx): + tx.calc_sha256() + try: + self.txDB[repr(tx.sha256)] = bytes(tx.serialize()) + except TypeError as e: + print "Unexpected error: ", sys.exc_info()[0], e.args + + def get_transactions(self, inv): + responses = [] + for i in inv: + if (i.type == 1): # MSG_TX + tx = self.get(i.hash) + if tx is not None: + responses.append(msg_tx(tx)) + return responses diff --git a/qa/rpc-tests/blocktools.py b/qa/rpc-tests/blocktools.py new file mode 100644 index 0000000000..f397fe7cd6 --- /dev/null +++ b/qa/rpc-tests/blocktools.py @@ -0,0 +1,65 @@ +# blocktools.py - utilities for manipulating blocks and transactions +# +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +from mininode import * +from script import CScript, CScriptOp + +# Create a block (with regtest difficulty) +def create_block(hashprev, coinbase, nTime=None): + block = CBlock() + if nTime is None: + import time + block.nTime = int(time.time()+600) + else: + block.nTime = nTime + block.hashPrevBlock = hashprev + block.nBits = 0x207fffff # Will break after a difficulty adjustment... + block.vtx.append(coinbase) + block.hashMerkleRoot = block.calc_merkle_root() + block.calc_sha256() + return block + +def serialize_script_num(value): + r = bytearray(0) + if value == 0: + return r + neg = value < 0 + absvalue = -value if neg else value + while (absvalue): + r.append(chr(absvalue & 0xff)) + absvalue >>= 8 + if r[-1] & 0x80: + r.append(0x80 if neg else 0) + elif neg: + r[-1] |= 0x80 + return r + +counter=1 +# Create an anyone-can-spend coinbase transaction, assuming no miner fees +def create_coinbase(heightAdjust = 0): + global counter + coinbase = CTransaction() + coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), + ser_string(serialize_script_num(counter+heightAdjust)), 0xffffffff)) + counter += 1 + coinbaseoutput = CTxOut() + coinbaseoutput.nValue = 50*100000000 + halvings = int((counter+heightAdjust)/150) # regtest + coinbaseoutput.nValue >>= halvings + coinbaseoutput.scriptPubKey = "" + coinbase.vout = [ coinbaseoutput ] + coinbase.calc_sha256() + return coinbase + +# Create a transaction with an anyone-can-spend output, that spends the +# nth output of prevtx. +def create_transaction(prevtx, n, sig, value): + tx = CTransaction() + assert(n < len(prevtx.vout)) + tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff)) + tx.vout.append(CTxOut(value, "")) + tx.calc_sha256() + return tx diff --git a/qa/rpc-tests/comptool.py b/qa/rpc-tests/comptool.py new file mode 100755 index 0000000000..6125bae51e --- /dev/null +++ b/qa/rpc-tests/comptool.py @@ -0,0 +1,330 @@ +#!/usr/bin/env python2 +# +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +from mininode import * +from blockstore import BlockStore, TxStore +from util import p2p_port + +''' +This is a tool for comparing two or more bitcoinds to each other +using a script provided. + +To use, create a class that implements get_tests(), and pass it in +as the test generator to TestManager. get_tests() should be a python +generator that returns TestInstance objects. See below for definition. +''' + +# TestNode behaves as follows: +# Configure with a BlockStore and TxStore +# on_inv: log the message but don't request +# on_headers: log the chain tip +# on_pong: update ping response map (for synchronization) +# on_getheaders: provide headers via BlockStore +# on_getdata: provide blocks via BlockStore + +class TestNode(NodeConnCB): + + def __init__(self, block_store, tx_store): + NodeConnCB.__init__(self) + self.create_callback_map() + self.conn = None + self.bestblockhash = None + self.block_store = block_store + self.block_request_map = {} + self.tx_store = tx_store + self.tx_request_map = {} + + # When the pingmap is non-empty we're waiting for + # a response + self.pingMap = {} + self.lastInv = [] + + def add_connection(self, conn): + self.conn = conn + + def on_headers(self, conn, message): + if len(message.headers) > 0: + best_header = message.headers[-1] + best_header.calc_sha256() + self.bestblockhash = best_header.sha256 + + def on_getheaders(self, conn, message): + response = self.block_store.headers_for(message.locator, message.hashstop) + if response is not None: + conn.send_message(response) + + def on_getdata(self, conn, message): + [conn.send_message(r) for r in self.block_store.get_blocks(message.inv)] + [conn.send_message(r) for r in self.tx_store.get_transactions(message.inv)] + + for i in message.inv: + if i.type == 1: + self.tx_request_map[i.hash] = True + elif i.type == 2: + self.block_request_map[i.hash] = True + + def on_inv(self, conn, message): + self.lastInv = [x.hash for x in message.inv] + + def on_pong(self, conn, message): + try: + del self.pingMap[message.nonce] + except KeyError: + raise AssertionError("Got pong for unknown ping [%s]" % repr(message)) + + def send_inv(self, obj): + mtype = 2 if isinstance(obj, CBlock) else 1 + self.conn.send_message(msg_inv([CInv(mtype, obj.sha256)])) + + def send_getheaders(self): + # We ask for headers from their last tip. + m = msg_getheaders() + m.locator = self.block_store.get_locator(self.bestblockhash) + self.conn.send_message(m) + + # This assumes BIP31 + def send_ping(self, nonce): + self.pingMap[nonce] = True + self.conn.send_message(msg_ping(nonce)) + + def received_ping_response(self, nonce): + return nonce not in self.pingMap + + def send_mempool(self): + self.lastInv = [] + self.conn.send_message(msg_mempool()) + +# TestInstance: +# +# Instances of these are generated by the test generator, and fed into the +# comptool. +# +# "blocks_and_transactions" should be an array of [obj, True/False/None]: +# - obj is either a CBlock or a CTransaction, and +# - the second value indicates whether the object should be accepted +# into the blockchain or mempool (for tests where we expect a certain +# answer), or "None" if we don't expect a certain answer and are just +# comparing the behavior of the nodes being tested. +# sync_every_block: if True, then each block will be inv'ed, synced, and +# nodes will be tested based on the outcome for the block. If False, +# then inv's accumulate until all blocks are processed (or max inv size +# is reached) and then sent out in one inv message. Then the final block +# will be synced across all connections, and the outcome of the final +# block will be tested. +# sync_every_tx: analagous to behavior for sync_every_block, except if outcome +# on the final tx is None, then contents of entire mempool are compared +# across all connections. (If outcome of final tx is specified as true +# or false, then only the last tx is tested against outcome.) + +class TestInstance(object): + def __init__(self, objects=[], sync_every_block=True, sync_every_tx=False): + self.blocks_and_transactions = objects + self.sync_every_block = sync_every_block + self.sync_every_tx = sync_every_tx + +class TestManager(object): + + def __init__(self, testgen, datadir): + self.test_generator = testgen + self.connections = [] + self.block_store = BlockStore(datadir) + self.tx_store = TxStore(datadir) + self.ping_counter = 1 + + def add_all_connections(self, nodes): + for i in range(len(nodes)): + # Create a p2p connection to each node + self.connections.append(NodeConn('127.0.0.1', p2p_port(i), + nodes[i], TestNode(self.block_store, self.tx_store))) + # Make sure the TestNode (callback class) has a reference to its + # associated NodeConn + self.connections[-1].cb.add_connection(self.connections[-1]) + + def wait_for_verack(self): + sleep_time = 0.05 + max_tries = 10 / sleep_time # Wait at most 10 seconds + while max_tries > 0: + done = True + for c in self.connections: + if c.cb.verack_received is False: + done = False + break + if done: + break + time.sleep(sleep_time) + + def wait_for_pings(self, counter): + received_pongs = False + while received_pongs is not True: + time.sleep(0.05) + received_pongs = True + for c in self.connections: + if c.cb.received_ping_response(counter) is not True: + received_pongs = False + break + + # sync_blocks: Wait for all connections to request the blockhash given + # then send get_headers to find out the tip of each node, and synchronize + # the response by using a ping (and waiting for pong with same nonce). + def sync_blocks(self, blockhash, num_blocks): + # Wait for nodes to request block (50ms sleep * 20 tries * num_blocks) + max_tries = 20*num_blocks + while max_tries > 0: + results = [ blockhash in c.cb.block_request_map and + c.cb.block_request_map[blockhash] for c in self.connections ] + if False not in results: + break + time.sleep(0.05) + max_tries -= 1 + + # --> error if not requested + if max_tries == 0: + # print [ c.cb.block_request_map for c in self.connections ] + raise AssertionError("Not all nodes requested block") + # --> Answer request (we did this inline!) + + # Send getheaders message + [ c.cb.send_getheaders() for c in self.connections ] + + # Send ping and wait for response -- synchronization hack + [ c.cb.send_ping(self.ping_counter) for c in self.connections ] + self.wait_for_pings(self.ping_counter) + self.ping_counter += 1 + + # Analogous to sync_block (see above) + def sync_transaction(self, txhash, num_events): + # Wait for nodes to request transaction (50ms sleep * 20 tries * num_events) + max_tries = 20*num_events + while max_tries > 0: + results = [ txhash in c.cb.tx_request_map and + c.cb.tx_request_map[txhash] for c in self.connections ] + if False not in results: + break + time.sleep(0.05) + max_tries -= 1 + + # --> error if not requested + if max_tries == 0: + # print [ c.cb.tx_request_map for c in self.connections ] + raise AssertionError("Not all nodes requested transaction") + # --> Answer request (we did this inline!) + + # Get the mempool + [ c.cb.send_mempool() for c in self.connections ] + + # Send ping and wait for response -- synchronization hack + [ c.cb.send_ping(self.ping_counter) for c in self.connections ] + self.wait_for_pings(self.ping_counter) + self.ping_counter += 1 + + # Sort inv responses from each node + [ c.cb.lastInv.sort() for c in self.connections ] + + # Verify that the tip of each connection all agree with each other, and + # with the expected outcome (if given) + def check_results(self, blockhash, outcome): + for c in self.connections: + if outcome is None: + if c.cb.bestblockhash != self.connections[0].cb.bestblockhash: + return False + elif ((c.cb.bestblockhash == blockhash) != outcome): + # print c.cb.bestblockhash, blockhash, outcome + return False + return True + + # Either check that the mempools all agree with each other, or that + # txhash's presence in the mempool matches the outcome specified. + # This is somewhat of a strange comparison, in that we're either comparing + # a particular tx to an outcome, or the entire mempools altogether; + # perhaps it would be useful to add the ability to check explicitly that + # a particular tx's existence in the mempool is the same across all nodes. + def check_mempool(self, txhash, outcome): + for c in self.connections: + if outcome is None: + # Make sure the mempools agree with each other + if c.cb.lastInv != self.connections[0].cb.lastInv: + # print c.rpc.getrawmempool() + return False + elif ((txhash in c.cb.lastInv) != outcome): + # print c.rpc.getrawmempool(), c.cb.lastInv + return False + return True + + def run(self): + # Wait until verack is received + self.wait_for_verack() + + test_number = 1 + for test_instance in self.test_generator.get_tests(): + # We use these variables to keep track of the last block + # and last transaction in the tests, which are used + # if we're not syncing on every block or every tx. + [ block, block_outcome ] = [ None, None ] + [ tx, tx_outcome ] = [ None, None ] + invqueue = [] + + for b_or_t, outcome in test_instance.blocks_and_transactions: + # Determine if we're dealing with a block or tx + if isinstance(b_or_t, CBlock): # Block test runner + block = b_or_t + block_outcome = outcome + # Add to shared block_store, set as current block + self.block_store.add_block(block) + for c in self.connections: + c.cb.block_request_map[block.sha256] = False + # Either send inv's to each node and sync, or add + # to invqueue for later inv'ing. + if (test_instance.sync_every_block): + [ c.cb.send_inv(block) for c in self.connections ] + self.sync_blocks(block.sha256, 1) + if (not self.check_results(block.sha256, outcome)): + raise AssertionError("Test failed at test %d" % test_number) + else: + invqueue.append(CInv(2, block.sha256)) + else: # Tx test runner + assert(isinstance(b_or_t, CTransaction)) + tx = b_or_t + tx_outcome = outcome + # Add to shared tx store + self.tx_store.add_transaction(tx) + for c in self.connections: + c.cb.tx_request_map[tx.sha256] = False + # Again, either inv to all nodes or save for later + if (test_instance.sync_every_tx): + [ c.cb.send_inv(tx) for c in self.connections ] + self.sync_transaction(tx.sha256, 1) + if (not self.check_mempool(tx.sha256, outcome)): + raise AssertionError("Test failed at test %d" % test_number) + else: + invqueue.append(CInv(1, tx.sha256)) + # Ensure we're not overflowing the inv queue + if len(invqueue) == MAX_INV_SZ: + [ c.sb.send_message(msg_inv(invqueue)) for c in self.connections ] + invqueue = [] + + # Do final sync if we weren't syncing on every block or every tx. + if (not test_instance.sync_every_block and block is not None): + if len(invqueue) > 0: + [ c.send_message(msg_inv(invqueue)) for c in self.connections ] + invqueue = [] + self.sync_blocks(block.sha256, + len(test_instance.blocks_and_transactions)) + if (not self.check_results(block.sha256, block_outcome)): + raise AssertionError("Block test failed at test %d" % test_number) + if (not test_instance.sync_every_tx and tx is not None): + if len(invqueue) > 0: + [ c.send_message(msg_inv(invqueue)) for c in self.connections ] + invqueue = [] + self.sync_transaction(tx.sha256, len(test_instance.blocks_and_transactions)) + if (not self.check_mempool(tx.sha256, tx_outcome)): + raise AssertionError("Mempool test failed at test %d" % test_number) + + print "Test %d: PASS" % test_number, [ c.rpc.getblockcount() for c in self.connections ] + test_number += 1 + + self.block_store.close() + self.tx_store.close() + [ c.disconnect_node() for c in self.connections ] diff --git a/qa/rpc-tests/invalidblockrequest.py b/qa/rpc-tests/invalidblockrequest.py new file mode 100755 index 0000000000..8b685ed9b2 --- /dev/null +++ b/qa/rpc-tests/invalidblockrequest.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python2 +# +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +from test_framework import ComparisonTestFramework +from util import * +from comptool import TestManager, TestInstance +from mininode import * +from blocktools import * +import logging +import copy +import time + + +''' +In this test we connect to one node over p2p, and test block requests: +1) Valid blocks should be requested and become chain tip. +2) Invalid block with duplicated transaction should be re-requested. +3) Invalid block with bad coinbase value should be rejected and not +re-requested. +''' + +# Use the ComparisonTestFramework with 1 node: only use --testbinary. +class InvalidBlockRequestTest(ComparisonTestFramework): + + ''' Can either run this test as 1 node with expected answers, or two and compare them. + Change the "outcome" variable from each TestInstance object to only do the comparison. ''' + def __init__(self): + self.num_nodes = 1 + + def run_test(self): + test = TestManager(self, self.options.tmpdir) + test.add_all_connections(self.nodes) + self.tip = None + self.block_time = None + NetworkThread().start() # Start up network handling in another thread + test.run() + + def get_tests(self): + if self.tip is None: + self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0) + self.block_time = int(time.time())+1 + + ''' + Create a new block with an anyone-can-spend coinbase + ''' + block = create_block(self.tip, create_coinbase(), self.block_time) + self.block_time += 1 + block.solve() + # Save the coinbase for later + self.block1 = block + self.tip = block.sha256 + yield TestInstance([[block, True]]) + + ''' + Now we need that block to mature so we can spend the coinbase. + ''' + test = TestInstance(sync_every_block=False) + for i in xrange(100): + block = create_block(self.tip, create_coinbase(), self.block_time) + block.solve() + self.tip = block.sha256 + self.block_time += 1 + test.blocks_and_transactions.append([block, True]) + yield test + + ''' + Now we use merkle-root malleability to generate an invalid block with + same blockheader. + Manufacture a block with 3 transactions (coinbase, spend of prior + coinbase, spend of that spend). Duplicate the 3rd transaction to + leave merkle root and blockheader unchanged but invalidate the block. + ''' + block2 = create_block(self.tip, create_coinbase(), self.block_time) + self.block_time += 1 + + # chr(81) is OP_TRUE + tx1 = create_transaction(self.block1.vtx[0], 0, chr(81), 50*100000000) + tx2 = create_transaction(tx1, 0, chr(81), 50*100000000) + + block2.vtx.extend([tx1, tx2]) + block2.hashMerkleRoot = block2.calc_merkle_root() + block2.rehash() + block2.solve() + orig_hash = block2.sha256 + block2_orig = copy.deepcopy(block2) + + # Mutate block 2 + block2.vtx.append(tx2) + assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root()) + assert_equal(orig_hash, block2.rehash()) + assert(block2_orig.vtx != block2.vtx) + + self.tip = block2.sha256 + yield TestInstance([[block2, False], [block2_orig, True]]) + + ''' + Make sure that a totally screwed up block is not valid. + ''' + block3 = create_block(self.tip, create_coinbase(), self.block_time) + self.block_time += 1 + block3.vtx[0].vout[0].nValue = 100*100000000 # Too high! + block3.vtx[0].sha256=None + block3.vtx[0].calc_sha256() + block3.hashMerkleRoot = block3.calc_merkle_root() + block3.rehash() + block3.solve() + + yield TestInstance([[block3, False]]) + + +if __name__ == '__main__': + InvalidBlockRequestTest().main() diff --git a/qa/rpc-tests/maxblocksinflight.py b/qa/rpc-tests/maxblocksinflight.py new file mode 100755 index 0000000000..94535822d8 --- /dev/null +++ b/qa/rpc-tests/maxblocksinflight.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python2 +# +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +from mininode import * +from test_framework import BitcoinTestFramework +from util import * +import logging + +''' +In this test we connect to one node over p2p, send it numerous inv's, and +compare the resulting number of getdata requests to a max allowed value. We +test for exceeding 128 blocks in flight, which was the limit an 0.9 client will +reach. [0.10 clients shouldn't request more than 16 from a single peer.] +''' +MAX_REQUESTS = 128 + +class TestManager(NodeConnCB): + # set up NodeConnCB callbacks, overriding base class + def on_getdata(self, conn, message): + self.log.debug("got getdata %s" % repr(message)) + # Log the requests + for inv in message.inv: + if inv.hash not in self.blockReqCounts: + self.blockReqCounts[inv.hash] = 0 + self.blockReqCounts[inv.hash] += 1 + + def on_close(self, conn): + if not self.disconnectOkay: + raise EarlyDisconnectError(0) + + def __init__(self): + NodeConnCB.__init__(self) + self.log = logging.getLogger("BlockRelayTest") + self.create_callback_map() + + def add_new_connection(self, connection): + self.connection = connection + self.blockReqCounts = {} + self.disconnectOkay = False + + def run(self): + try: + fail = False + self.connection.rpc.generate(1) # Leave IBD + + numBlocksToGenerate = [ 8, 16, 128, 1024 ] + for count in range(len(numBlocksToGenerate)): + current_invs = [] + for i in range(numBlocksToGenerate[count]): + current_invs.append(CInv(2, random.randrange(0, 1<<256))) + if len(current_invs) >= 50000: + self.connection.send_message(msg_inv(current_invs)) + current_invs = [] + if len(current_invs) > 0: + self.connection.send_message(msg_inv(current_invs)) + + # Wait and see how many blocks were requested + time.sleep(2) + + total_requests = 0 + for key in self.blockReqCounts: + total_requests += self.blockReqCounts[key] + if self.blockReqCounts[key] > 1: + raise AssertionError("Error, test failed: block %064x requested more than once" % key) + if total_requests > MAX_REQUESTS: + raise AssertionError("Error, too many blocks (%d) requested" % total_requests) + print "Round %d: success (total requests: %d)" % (count, total_requests) + except AssertionError as e: + print "TEST FAILED: ", e.args + + self.disconnectOkay = True + self.connection.disconnect_node() + + +class MaxBlocksInFlightTest(BitcoinTestFramework): + def add_options(self, parser): + parser.add_option("--testbinary", dest="testbinary", + default=os.getenv("BITCOIND", "bitcoind"), + help="Binary to test max block requests behavior") + + def setup_chain(self): + print "Initializing test directory "+self.options.tmpdir + initialize_chain_clean(self.options.tmpdir, 1) + + def setup_network(self): + self.nodes = start_nodes(1, self.options.tmpdir, + extra_args=[['-debug', '-whitelist=127.0.0.1']], + binary=[self.options.testbinary]) + + def run_test(self): + test = TestManager() + test.add_new_connection(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test)) + NetworkThread().start() # Start up network handling in another thread + test.run() + +if __name__ == '__main__': + MaxBlocksInFlightTest().main() diff --git a/qa/rpc-tests/mininode.py b/qa/rpc-tests/mininode.py new file mode 100755 index 0000000000..c5c1bcfbbe --- /dev/null +++ b/qa/rpc-tests/mininode.py @@ -0,0 +1,1247 @@ +# mininode.py - Bitcoin P2P network half-a-node +# +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# +# This python code was modified from ArtForz' public domain half-a-node, as +# found in the mini-node branch of http://github.com/jgarzik/pynode. +# +# NodeConn: an object which manages p2p connectivity to a bitcoin node +# NodeConnCB: a base class that describes the interface for receiving +# callbacks with network messages from a NodeConn +# CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....: +# data structures that should map to corresponding structures in +# bitcoin/primitives +# msg_block, msg_tx, msg_headers, etc.: +# data structures that represent network messages +# ser_*, deser_*: functions that handle serialization/deserialization + + +import struct +import socket +import asyncore +import binascii +import time +import sys +import random +import cStringIO +import hashlib +from threading import Lock +from threading import Thread +import logging +import copy + +BIP0031_VERSION = 60000 +MY_VERSION = 60001 # past bip-31 for ping/pong +MY_SUBVERSION = "/python-mininode-tester:0.0.1/" + +MAX_INV_SZ = 50000 + +# Serialization/deserialization tools +def sha256(s): + return hashlib.new('sha256', s).digest() + + +def hash256(s): + return sha256(sha256(s)) + + +def deser_string(f): + nit = struct.unpack("<B", f.read(1))[0] + if nit == 253: + nit = struct.unpack("<H", f.read(2))[0] + elif nit == 254: + nit = struct.unpack("<I", f.read(4))[0] + elif nit == 255: + nit = struct.unpack("<Q", f.read(8))[0] + return f.read(nit) + + +def ser_string(s): + if len(s) < 253: + return chr(len(s)) + s + elif len(s) < 0x10000: + return chr(253) + struct.pack("<H", len(s)) + s + elif len(s) < 0x100000000L: + return chr(254) + struct.pack("<I", len(s)) + s + return chr(255) + struct.pack("<Q", len(s)) + s + + +def deser_uint256(f): + r = 0L + for i in xrange(8): + t = struct.unpack("<I", f.read(4))[0] + r += t << (i * 32) + return r + + +def ser_uint256(u): + rs = "" + for i in xrange(8): + rs += struct.pack("<I", u & 0xFFFFFFFFL) + u >>= 32 + return rs + + +def uint256_from_str(s): + r = 0L + t = struct.unpack("<IIIIIIII", s[:32]) + for i in xrange(8): + r += t[i] << (i * 32) + return r + + +def uint256_from_compact(c): + nbytes = (c >> 24) & 0xFF + v = (c & 0xFFFFFFL) << (8 * (nbytes - 3)) + return v + + +def deser_vector(f, c): + nit = struct.unpack("<B", f.read(1))[0] + if nit == 253: + nit = struct.unpack("<H", f.read(2))[0] + elif nit == 254: + nit = struct.unpack("<I", f.read(4))[0] + elif nit == 255: + nit = struct.unpack("<Q", f.read(8))[0] + r = [] + for i in xrange(nit): + t = c() + t.deserialize(f) + r.append(t) + return r + + +def ser_vector(l): + r = "" + if len(l) < 253: + r = chr(len(l)) + elif len(l) < 0x10000: + r = chr(253) + struct.pack("<H", len(l)) + elif len(l) < 0x100000000L: + r = chr(254) + struct.pack("<I", len(l)) + else: + r = chr(255) + struct.pack("<Q", len(l)) + for i in l: + r += i.serialize() + return r + + +def deser_uint256_vector(f): + nit = struct.unpack("<B", f.read(1))[0] + if nit == 253: + nit = struct.unpack("<H", f.read(2))[0] + elif nit == 254: + nit = struct.unpack("<I", f.read(4))[0] + elif nit == 255: + nit = struct.unpack("<Q", f.read(8))[0] + r = [] + for i in xrange(nit): + t = deser_uint256(f) + r.append(t) + return r + + +def ser_uint256_vector(l): + r = "" + if len(l) < 253: + r = chr(len(l)) + elif len(l) < 0x10000: + r = chr(253) + struct.pack("<H", len(l)) + elif len(l) < 0x100000000L: + r = chr(254) + struct.pack("<I", len(l)) + else: + r = chr(255) + struct.pack("<Q", len(l)) + for i in l: + r += ser_uint256(i) + return r + + +def deser_string_vector(f): + nit = struct.unpack("<B", f.read(1))[0] + if nit == 253: + nit = struct.unpack("<H", f.read(2))[0] + elif nit == 254: + nit = struct.unpack("<I", f.read(4))[0] + elif nit == 255: + nit = struct.unpack("<Q", f.read(8))[0] + r = [] + for i in xrange(nit): + t = deser_string(f) + r.append(t) + return r + + +def ser_string_vector(l): + r = "" + if len(l) < 253: + r = chr(len(l)) + elif len(l) < 0x10000: + r = chr(253) + struct.pack("<H", len(l)) + elif len(l) < 0x100000000L: + r = chr(254) + struct.pack("<I", len(l)) + else: + r = chr(255) + struct.pack("<Q", len(l)) + for sv in l: + r += ser_string(sv) + return r + + +def deser_int_vector(f): + nit = struct.unpack("<B", f.read(1))[0] + if nit == 253: + nit = struct.unpack("<H", f.read(2))[0] + elif nit == 254: + nit = struct.unpack("<I", f.read(4))[0] + elif nit == 255: + nit = struct.unpack("<Q", f.read(8))[0] + r = [] + for i in xrange(nit): + t = struct.unpack("<i", f.read(4))[0] + r.append(t) + return r + + +def ser_int_vector(l): + r = "" + if len(l) < 253: + r = chr(len(l)) + elif len(l) < 0x10000: + r = chr(253) + struct.pack("<H", len(l)) + elif len(l) < 0x100000000L: + r = chr(254) + struct.pack("<I", len(l)) + else: + r = chr(255) + struct.pack("<Q", len(l)) + for i in l: + r += struct.pack("<i", i) + return r + + +# Objects that map to bitcoind objects, which can be serialized/deserialized + +class CAddress(object): + def __init__(self): + self.nServices = 1 + self.pchReserved = "\x00" * 10 + "\xff" * 2 + self.ip = "0.0.0.0" + self.port = 0 + + def deserialize(self, f): + self.nServices = struct.unpack("<Q", f.read(8))[0] + self.pchReserved = f.read(12) + self.ip = socket.inet_ntoa(f.read(4)) + self.port = struct.unpack(">H", f.read(2))[0] + + def serialize(self): + r = "" + r += struct.pack("<Q", self.nServices) + r += self.pchReserved + r += socket.inet_aton(self.ip) + r += struct.pack(">H", self.port) + return r + + def __repr__(self): + return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices, + self.ip, self.port) + + +class CInv(object): + typemap = { + 0: "Error", + 1: "TX", + 2: "Block"} + + def __init__(self, t=0, h=0L): + self.type = t + self.hash = h + + def deserialize(self, f): + self.type = struct.unpack("<i", f.read(4))[0] + self.hash = deser_uint256(f) + + def serialize(self): + r = "" + r += struct.pack("<i", self.type) + r += ser_uint256(self.hash) + return r + + def __repr__(self): + return "CInv(type=%s hash=%064x)" \ + % (self.typemap[self.type], self.hash) + + +class CBlockLocator(object): + def __init__(self): + self.nVersion = MY_VERSION + self.vHave = [] + + def deserialize(self, f): + self.nVersion = struct.unpack("<i", f.read(4))[0] + self.vHave = deser_uint256_vector(f) + + def serialize(self): + r = "" + r += struct.pack("<i", self.nVersion) + r += ser_uint256_vector(self.vHave) + return r + + def __repr__(self): + return "CBlockLocator(nVersion=%i vHave=%s)" \ + % (self.nVersion, repr(self.vHave)) + + +class COutPoint(object): + def __init__(self, hash=0, n=0): + self.hash = hash + self.n = n + + def deserialize(self, f): + self.hash = deser_uint256(f) + self.n = struct.unpack("<I", f.read(4))[0] + + def serialize(self): + r = "" + r += ser_uint256(self.hash) + r += struct.pack("<I", self.n) + return r + + def __repr__(self): + return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n) + + +class CTxIn(object): + def __init__(self, outpoint=None, scriptSig="", nSequence=0): + if outpoint is None: + self.prevout = COutPoint() + else: + self.prevout = outpoint + self.scriptSig = scriptSig + self.nSequence = nSequence + + def deserialize(self, f): + self.prevout = COutPoint() + self.prevout.deserialize(f) + self.scriptSig = deser_string(f) + self.nSequence = struct.unpack("<I", f.read(4))[0] + + def serialize(self): + r = "" + r += self.prevout.serialize() + r += ser_string(self.scriptSig) + r += struct.pack("<I", self.nSequence) + return r + + def __repr__(self): + return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \ + % (repr(self.prevout), binascii.hexlify(self.scriptSig), + self.nSequence) + + +class CTxOut(object): + def __init__(self, nValue=0, scriptPubKey=""): + self.nValue = nValue + self.scriptPubKey = scriptPubKey + + def deserialize(self, f): + self.nValue = struct.unpack("<q", f.read(8))[0] + self.scriptPubKey = deser_string(f) + + def serialize(self): + r = "" + r += struct.pack("<q", self.nValue) + r += ser_string(self.scriptPubKey) + return r + + def __repr__(self): + return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \ + % (self.nValue // 100000000, self.nValue % 100000000, + binascii.hexlify(self.scriptPubKey)) + + +class CTransaction(object): + def __init__(self, tx=None): + if tx is None: + self.nVersion = 1 + self.vin = [] + self.vout = [] + self.nLockTime = 0 + self.sha256 = None + self.hash = None + else: + self.nVersion = tx.nVersion + self.vin = copy.deepcopy(tx.vin) + self.vout = copy.deepcopy(tx.vout) + self.nLockTime = tx.nLockTime + self.sha256 = None + self.hash = None + + def deserialize(self, f): + self.nVersion = struct.unpack("<i", f.read(4))[0] + self.vin = deser_vector(f, CTxIn) + self.vout = deser_vector(f, CTxOut) + self.nLockTime = struct.unpack("<I", f.read(4))[0] + self.sha256 = None + self.hash = None + + def serialize(self): + r = "" + r += struct.pack("<i", self.nVersion) + r += ser_vector(self.vin) + r += ser_vector(self.vout) + r += struct.pack("<I", self.nLockTime) + return r + + def rehash(self): + self.sha256 = None + self.calc_sha256() + + def calc_sha256(self): + if self.sha256 is None: + self.sha256 = uint256_from_str(hash256(self.serialize())) + self.hash = hash256(self.serialize())[::-1].encode('hex_codec') + + def is_valid(self): + self.calc_sha256() + for tout in self.vout: + if tout.nValue < 0 or tout.nValue > 21000000L * 100000000L: + return False + return True + + def __repr__(self): + return "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i)" \ + % (self.nVersion, repr(self.vin), repr(self.vout), self.nLockTime) + + +class CBlockHeader(object): + def __init__(self, header=None): + if header is None: + self.set_null() + else: + self.nVersion = header.nVersion + self.hashPrevBlock = header.hashPrevBlock + self.hashMerkleRoot = header.hashMerkleRoot + self.nTime = header.nTime + self.nBits = header.nBits + self.nNonce = header.nNonce + self.sha256 = header.sha256 + self.hash = header.hash + self.calc_sha256() + + def set_null(self): + self.nVersion = 1 + self.hashPrevBlock = 0 + self.hashMerkleRoot = 0 + self.nTime = 0 + self.nBits = 0 + self.nNonce = 0 + self.sha256 = None + self.hash = None + + def deserialize(self, f): + self.nVersion = struct.unpack("<i", f.read(4))[0] + self.hashPrevBlock = deser_uint256(f) + self.hashMerkleRoot = deser_uint256(f) + self.nTime = struct.unpack("<I", f.read(4))[0] + self.nBits = struct.unpack("<I", f.read(4))[0] + self.nNonce = struct.unpack("<I", f.read(4))[0] + self.sha256 = None + self.hash = None + + def serialize(self): + r = "" + r += struct.pack("<i", self.nVersion) + r += ser_uint256(self.hashPrevBlock) + r += ser_uint256(self.hashMerkleRoot) + r += struct.pack("<I", self.nTime) + r += struct.pack("<I", self.nBits) + r += struct.pack("<I", self.nNonce) + return r + + def calc_sha256(self): + if self.sha256 is None: + r = "" + r += struct.pack("<i", self.nVersion) + r += ser_uint256(self.hashPrevBlock) + r += ser_uint256(self.hashMerkleRoot) + r += struct.pack("<I", self.nTime) + r += struct.pack("<I", self.nBits) + r += struct.pack("<I", self.nNonce) + self.sha256 = uint256_from_str(hash256(r)) + self.hash = hash256(r)[::-1].encode('hex_codec') + + def rehash(self): + self.sha256 = None + self.calc_sha256() + return self.sha256 + + def __repr__(self): + return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \ + % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, + time.ctime(self.nTime), self.nBits, self.nNonce) + + +class CBlock(CBlockHeader): + def __init__(self, header=None): + super(CBlock, self).__init__(header) + self.vtx = [] + + def deserialize(self, f): + super(CBlock, self).deserialize(f) + self.vtx = deser_vector(f, CTransaction) + + def serialize(self): + r = "" + r += super(CBlock, self).serialize() + r += ser_vector(self.vtx) + return r + + def calc_merkle_root(self): + hashes = [] + for tx in self.vtx: + tx.calc_sha256() + hashes.append(ser_uint256(tx.sha256)) + while len(hashes) > 1: + newhashes = [] + for i in xrange(0, len(hashes), 2): + i2 = min(i+1, len(hashes)-1) + newhashes.append(hash256(hashes[i] + hashes[i2])) + hashes = newhashes + return uint256_from_str(hashes[0]) + + def is_valid(self): + self.calc_sha256() + target = uint256_from_compact(self.nBits) + if self.sha256 > target: + return False + for tx in self.vtx: + if not tx.is_valid(): + return False + if self.calc_merkle_root() != self.hashMerkleRoot: + return False + return True + + def solve(self): + self.calc_sha256() + target = uint256_from_compact(self.nBits) + while self.sha256 > target: + self.nNonce += 1 + self.rehash() + + def __repr__(self): + return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \ + % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, + time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx)) + + +class CUnsignedAlert(object): + def __init__(self): + self.nVersion = 1 + self.nRelayUntil = 0 + self.nExpiration = 0 + self.nID = 0 + self.nCancel = 0 + self.setCancel = [] + self.nMinVer = 0 + self.nMaxVer = 0 + self.setSubVer = [] + self.nPriority = 0 + self.strComment = "" + self.strStatusBar = "" + self.strReserved = "" + + def deserialize(self, f): + self.nVersion = struct.unpack("<i", f.read(4))[0] + self.nRelayUntil = struct.unpack("<q", f.read(8))[0] + self.nExpiration = struct.unpack("<q", f.read(8))[0] + self.nID = struct.unpack("<i", f.read(4))[0] + self.nCancel = struct.unpack("<i", f.read(4))[0] + self.setCancel = deser_int_vector(f) + self.nMinVer = struct.unpack("<i", f.read(4))[0] + self.nMaxVer = struct.unpack("<i", f.read(4))[0] + self.setSubVer = deser_string_vector(f) + self.nPriority = struct.unpack("<i", f.read(4))[0] + self.strComment = deser_string(f) + self.strStatusBar = deser_string(f) + self.strReserved = deser_string(f) + + def serialize(self): + r = "" + r += struct.pack("<i", self.nVersion) + r += struct.pack("<q", self.nRelayUntil) + r += struct.pack("<q", self.nExpiration) + r += struct.pack("<i", self.nID) + r += struct.pack("<i", self.nCancel) + r += ser_int_vector(self.setCancel) + r += struct.pack("<i", self.nMinVer) + r += struct.pack("<i", self.nMaxVer) + r += ser_string_vector(self.setSubVer) + r += struct.pack("<i", self.nPriority) + r += ser_string(self.strComment) + r += ser_string(self.strStatusBar) + r += ser_string(self.strReserved) + return r + + def __repr__(self): + return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" \ + % (self.nVersion, self.nRelayUntil, self.nExpiration, self.nID, + self.nCancel, self.nMinVer, self.nMaxVer, self.nPriority, + self.strComment, self.strStatusBar, self.strReserved) + + +class CAlert(object): + def __init__(self): + self.vchMsg = "" + self.vchSig = "" + + def deserialize(self, f): + self.vchMsg = deser_string(f) + self.vchSig = deser_string(f) + + def serialize(self): + r = "" + r += ser_string(self.vchMsg) + r += ser_string(self.vchSig) + return r + + def __repr__(self): + return "CAlert(vchMsg.sz %d, vchSig.sz %d)" \ + % (len(self.vchMsg), len(self.vchSig)) + + +# Objects that correspond to messages on the wire +class msg_version(object): + command = "version" + + def __init__(self): + self.nVersion = MY_VERSION + self.nServices = 1 + self.nTime = time.time() + self.addrTo = CAddress() + self.addrFrom = CAddress() + self.nNonce = random.getrandbits(64) + self.strSubVer = MY_SUBVERSION + self.nStartingHeight = -1 + + def deserialize(self, f): + self.nVersion = struct.unpack("<i", f.read(4))[0] + if self.nVersion == 10300: + self.nVersion = 300 + self.nServices = struct.unpack("<Q", f.read(8))[0] + self.nTime = struct.unpack("<q", f.read(8))[0] + self.addrTo = CAddress() + self.addrTo.deserialize(f) + if self.nVersion >= 106: + self.addrFrom = CAddress() + self.addrFrom.deserialize(f) + self.nNonce = struct.unpack("<Q", f.read(8))[0] + self.strSubVer = deser_string(f) + if self.nVersion >= 209: + self.nStartingHeight = struct.unpack("<i", f.read(4))[0] + else: + self.nStartingHeight = None + else: + self.addrFrom = None + self.nNonce = None + self.strSubVer = None + self.nStartingHeight = None + + def serialize(self): + r = "" + r += struct.pack("<i", self.nVersion) + r += struct.pack("<Q", self.nServices) + r += struct.pack("<q", self.nTime) + r += self.addrTo.serialize() + r += self.addrFrom.serialize() + r += struct.pack("<Q", self.nNonce) + r += ser_string(self.strSubVer) + r += struct.pack("<i", self.nStartingHeight) + return r + + def __repr__(self): + return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i)' \ + % (self.nVersion, self.nServices, time.ctime(self.nTime), + repr(self.addrTo), repr(self.addrFrom), self.nNonce, + self.strSubVer, self.nStartingHeight) + + +class msg_verack(object): + command = "verack" + + def __init__(self): + pass + + def deserialize(self, f): + pass + + def serialize(self): + return "" + + def __repr__(self): + return "msg_verack()" + + +class msg_addr(object): + command = "addr" + + def __init__(self): + self.addrs = [] + + def deserialize(self, f): + self.addrs = deser_vector(f, CAddress) + + def serialize(self): + return ser_vector(self.addrs) + + def __repr__(self): + return "msg_addr(addrs=%s)" % (repr(self.addrs)) + + +class msg_alert(object): + command = "alert" + + def __init__(self): + self.alert = CAlert() + + def deserialize(self, f): + self.alert = CAlert() + self.alert.deserialize(f) + + def serialize(self): + r = "" + r += self.alert.serialize() + return r + + def __repr__(self): + return "msg_alert(alert=%s)" % (repr(self.alert), ) + + +class msg_inv(object): + command = "inv" + + def __init__(self, inv=None): + if inv is None: + self.inv = [] + else: + self.inv = inv + + def deserialize(self, f): + self.inv = deser_vector(f, CInv) + + def serialize(self): + return ser_vector(self.inv) + + def __repr__(self): + return "msg_inv(inv=%s)" % (repr(self.inv)) + + +class msg_getdata(object): + command = "getdata" + + def __init__(self): + self.inv = [] + + def deserialize(self, f): + self.inv = deser_vector(f, CInv) + + def serialize(self): + return ser_vector(self.inv) + + def __repr__(self): + return "msg_getdata(inv=%s)" % (repr(self.inv)) + + +class msg_getblocks(object): + command = "getblocks" + + def __init__(self): + self.locator = CBlockLocator() + self.hashstop = 0L + + def deserialize(self, f): + self.locator = CBlockLocator() + self.locator.deserialize(f) + self.hashstop = deser_uint256(f) + + def serialize(self): + r = "" + r += self.locator.serialize() + r += ser_uint256(self.hashstop) + return r + + def __repr__(self): + return "msg_getblocks(locator=%s hashstop=%064x)" \ + % (repr(self.locator), self.hashstop) + + +class msg_tx(object): + command = "tx" + + def __init__(self, tx=CTransaction()): + self.tx = tx + + def deserialize(self, f): + self.tx.deserialize(f) + + def serialize(self): + return self.tx.serialize() + + def __repr__(self): + return "msg_tx(tx=%s)" % (repr(self.tx)) + + +class msg_block(object): + command = "block" + + def __init__(self, block=None): + if block is None: + self.block = CBlock() + else: + self.block = block + + def deserialize(self, f): + self.block.deserialize(f) + + def serialize(self): + return self.block.serialize() + + def __repr__(self): + return "msg_block(block=%s)" % (repr(self.block)) + + +class msg_getaddr(object): + command = "getaddr" + + def __init__(self): + pass + + def deserialize(self, f): + pass + + def serialize(self): + return "" + + def __repr__(self): + return "msg_getaddr()" + + +class msg_ping_prebip31(object): + command = "ping" + + def __init__(self): + pass + + def deserialize(self, f): + pass + + def serialize(self): + return "" + + def __repr__(self): + return "msg_ping() (pre-bip31)" + + +class msg_ping(object): + command = "ping" + + def __init__(self, nonce=0L): + self.nonce = nonce + + def deserialize(self, f): + self.nonce = struct.unpack("<Q", f.read(8))[0] + + def serialize(self): + r = "" + r += struct.pack("<Q", self.nonce) + return r + + def __repr__(self): + return "msg_ping(nonce=%08x)" % self.nonce + + +class msg_pong(object): + command = "pong" + + def __init__(self, nonce=0L): + self.nonce = nonce + + def deserialize(self, f): + self.nonce = struct.unpack("<Q", f.read(8))[0] + + def serialize(self): + r = "" + r += struct.pack("<Q", self.nonce) + return r + + def __repr__(self): + return "msg_pong(nonce=%08x)" % self.nonce + + +class msg_mempool(object): + command = "mempool" + + def __init__(self): + pass + + def deserialize(self, f): + pass + + def serialize(self): + return "" + + def __repr__(self): + return "msg_mempool()" + + +# getheaders message has +# number of entries +# vector of hashes +# hash_stop (hash of last desired block header, 0 to get as many as possible) +class msg_getheaders(object): + command = "getheaders" + + def __init__(self): + self.locator = CBlockLocator() + self.hashstop = 0L + + def deserialize(self, f): + self.locator = CBlockLocator() + self.locator.deserialize(f) + self.hashstop = deser_uint256(f) + + def serialize(self): + r = "" + r += self.locator.serialize() + r += ser_uint256(self.hashstop) + return r + + def __repr__(self): + return "msg_getheaders(locator=%s, stop=%064x)" \ + % (repr(self.locator), self.hashstop) + + +# headers message has +# <count> <vector of block headers> +class msg_headers(object): + command = "headers" + + def __init__(self): + self.headers = [] + + def deserialize(self, f): + # comment in bitcoind indicates these should be deserialized as blocks + blocks = deser_vector(f, CBlock) + for x in blocks: + self.headers.append(CBlockHeader(x)) + + def serialize(self): + blocks = [CBlock(x) for x in self.headers] + return ser_vector(blocks) + + def __repr__(self): + return "msg_headers(headers=%s)" % repr(self.headers) + + +class msg_reject(object): + command = "reject" + + def __init__(self): + self.message = "" + self.code = "" + self.reason = "" + self.data = 0L + + def deserialize(self, f): + self.message = deser_string(f) + self.code = struct.unpack("<B", f.read(1))[0] + self.reason = deser_string(f) + if (self.message == "block" or self.message == "tx"): + self.data = deser_uint256(f) + + def serialize(self): + r = ser_string(self.message) + r += struct.pack("<B", self.code) + r += ser_string(self.reason) + if (self.message == "block" or self.message == "tx"): + r += ser_uint256(self.data) + return r + + def __repr__(self): + return "msg_reject: %s %d %s [%064x]" \ + % (self.message, self.code, self.reason, self.data) + + +# This is what a callback should look like for NodeConn +# Reimplement the on_* functions to provide handling for events +class NodeConnCB(object): + def __init__(self): + # Acquire on all callbacks -- overkill for now since asyncore is + # single-threaded, but may be useful for synchronizing access to + # member variables in derived classes. + self.cbLock = Lock() + self.verack_received = False + + # Derived classes should call this function once to set the message map + # which associates the derived classes' functions to incoming messages + def create_callback_map(self): + self.cbmap = { + "version": self.on_version, + "verack": self.on_verack, + "addr": self.on_addr, + "alert": self.on_alert, + "inv": self.on_inv, + "getdata": self.on_getdata, + "getblocks": self.on_getblocks, + "tx": self.on_tx, + "block": self.on_block, + "getaddr": self.on_getaddr, + "ping": self.on_ping, + "pong": self.on_pong, + "headers": self.on_headers, + "getheaders": self.on_getheaders, + "reject": self.on_reject, + "mempool": self.on_mempool + } + + def deliver(self, conn, message): + with self.cbLock: + try: + self.cbmap[message.command](conn, message) + except: + print "ERROR delivering %s (%s)" % (repr(message), + sys.exc_info()[0]) + + def on_version(self, conn, message): + if message.nVersion >= 209: + conn.send_message(msg_verack()) + conn.ver_send = min(MY_VERSION, message.nVersion) + if message.nVersion < 209: + conn.ver_recv = conn.ver_send + + def on_verack(self, conn, message): + conn.ver_recv = conn.ver_send + self.verack_received = True + + def on_inv(self, conn, message): + want = msg_getdata() + for i in message.inv: + if i.type != 0: + want.inv.append(i) + if len(want.inv): + conn.send_message(want) + + def on_addr(self, conn, message): pass + def on_alert(self, conn, message): pass + def on_getdata(self, conn, message): pass + def on_getblocks(self, conn, message): pass + def on_tx(self, conn, message): pass + def on_block(self, conn, message): pass + def on_getaddr(self, conn, message): pass + def on_headers(self, conn, message): pass + def on_getheaders(self, conn, message): pass + def on_ping(self, conn, message): + if conn.ver_send > BIP0031_VERSION: + conn.send_message(msg_pong(message.nonce)) + def on_reject(self, conn, message): pass + def on_close(self, conn): pass + def on_mempool(self, conn): pass + def on_pong(self, conn, message): pass + + +# The actual NodeConn class +# This class provides an interface for a p2p connection to a specified node +class NodeConn(asyncore.dispatcher): + messagemap = { + "version": msg_version, + "verack": msg_verack, + "addr": msg_addr, + "alert": msg_alert, + "inv": msg_inv, + "getdata": msg_getdata, + "getblocks": msg_getblocks, + "tx": msg_tx, + "block": msg_block, + "getaddr": msg_getaddr, + "ping": msg_ping, + "pong": msg_pong, + "headers": msg_headers, + "getheaders": msg_getheaders, + "reject": msg_reject, + "mempool": msg_mempool + } + MAGIC_BYTES = { + "mainnet": "\xf9\xbe\xb4\xd9", # mainnet + "testnet3": "\x0b\x11\x09\x07", # testnet3 + "regtest": "\xfa\xbf\xb5\xda" # regtest + } + + def __init__(self, dstaddr, dstport, rpc, callback, net="regtest"): + asyncore.dispatcher.__init__(self) + self.log = logging.getLogger("NodeConn(%s:%d)" % (dstaddr, dstport)) + self.dstaddr = dstaddr + self.dstport = dstport + self.create_socket(socket.AF_INET, socket.SOCK_STREAM) + self.sendbuf = "" + self.recvbuf = "" + self.ver_send = 209 + self.ver_recv = 209 + self.last_sent = 0 + self.state = "connecting" + self.network = net + self.cb = callback + self.sendbufLock = Lock() # for protecting the sendbuffer + self.disconnect = False + + # stuff version msg into sendbuf + vt = msg_version() + vt.addrTo.ip = self.dstaddr + vt.addrTo.port = self.dstport + vt.addrFrom.ip = "0.0.0.0" + vt.addrFrom.port = 0 + self.send_message(vt, True) + print 'MiniNode: Connecting to Bitcoin Node IP # ' + dstaddr + ':' \ + + str(dstport) + + try: + self.connect((dstaddr, dstport)) + except: + self.handle_close() + self.rpc = rpc + + def show_debug_msg(self, msg): + self.log.debug(msg) + + def handle_connect(self): + self.show_debug_msg("MiniNode: Connected & Listening: \n") + self.state = "connected" + + def handle_close(self): + self.show_debug_msg("MiniNode: Closing Connection to %s:%d... " + % (self.dstaddr, self.dstport)) + self.state = "closed" + self.recvbuf = "" + self.sendbuf = "" + try: + self.close() + except: + pass + self.cb.on_close(self) + + def handle_read(self): + try: + t = self.recv(8192) + if len(t) > 0: + self.recvbuf += t + self.got_data() + except: + pass + + def readable(self): + return True + + def writable(self): + if self.disconnect: + self.handle_close() + return False + else: + self.sendbufLock.acquire() + length = len(self.sendbuf) + self.sendbufLock.release() + return (length > 0) + + def handle_write(self): + self.sendbufLock.acquire() + try: + sent = self.send(self.sendbuf) + except: + self.handle_close() + return + self.sendbuf = self.sendbuf[sent:] + self.sendbufLock.release() + + def got_data(self): + while True: + if len(self.recvbuf) < 4: + return + if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]: + raise ValueError("got garbage %s" % repr(self.recvbuf)) + if self.ver_recv < 209: + if len(self.recvbuf) < 4 + 12 + 4: + return + command = self.recvbuf[4:4+12].split("\x00", 1)[0] + msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0] + checksum = None + if len(self.recvbuf) < 4 + 12 + 4 + msglen: + return + msg = self.recvbuf[4+12+4:4+12+4+msglen] + self.recvbuf = self.recvbuf[4+12+4+msglen:] + else: + if len(self.recvbuf) < 4 + 12 + 4 + 4: + return + command = self.recvbuf[4:4+12].split("\x00", 1)[0] + msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0] + checksum = self.recvbuf[4+12+4:4+12+4+4] + if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen: + return + msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen] + th = sha256(msg) + h = sha256(th) + if checksum != h[:4]: + raise ValueError("got bad checksum " + repr(self.recvbuf)) + self.recvbuf = self.recvbuf[4+12+4+4+msglen:] + if command in self.messagemap: + f = cStringIO.StringIO(msg) + t = self.messagemap[command]() + t.deserialize(f) + self.got_message(t) + else: + self.show_debug_msg("Unknown command: '" + command + "' " + + repr(msg)) + + def send_message(self, message, pushbuf=False): + if self.state != "connected" and not pushbuf: + return + self.sendbufLock.acquire() + self.show_debug_msg("Send %s" % repr(message)) + command = message.command + data = message.serialize() + tmsg = self.MAGIC_BYTES[self.network] + tmsg += command + tmsg += "\x00" * (12 - len(command)) + tmsg += struct.pack("<I", len(data)) + if self.ver_send >= 209: + th = sha256(data) + h = sha256(th) + tmsg += h[:4] + tmsg += data + self.sendbuf += tmsg + self.last_sent = time.time() + self.sendbufLock.release() + + def got_message(self, message): + if message.command == "version": + if message.nVersion <= BIP0031_VERSION: + self.messagemap['ping'] = msg_ping_prebip31 + if self.last_sent + 30 * 60 < time.time(): + self.send_message(self.messagemap['ping']()) + self.show_debug_msg("Recv %s" % repr(message)) + self.cb.deliver(self, message) + + def disconnect_node(self): + self.disconnect = True + self.send_message(self.messagemap['ping']()) + + +class NetworkThread(Thread): + def run(self): + asyncore.loop(0.1, True) + + +# An exception we can raise if we detect a potential disconnect +# (p2p or rpc) before the test is complete +class EarlyDisconnectError(Exception): + def __init__(self, value): + self.value = value + + def __str__(self): + return repr(self.value) diff --git a/qa/rpc-tests/script.py b/qa/rpc-tests/script.py new file mode 100644 index 0000000000..03695b8635 --- /dev/null +++ b/qa/rpc-tests/script.py @@ -0,0 +1,896 @@ +# +# script.py +# +# This file is modified from python-bitcoinlib. +# +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +"""Scripts + +Functionality to build scripts, as well as SignatureHash(). +""" + +from __future__ import absolute_import, division, print_function, unicode_literals + +from mininode import CTransaction, CTxOut, hash256 + +import sys +bchr = chr +bord = ord +if sys.version > '3': + long = int + bchr = lambda x: bytes([x]) + bord = lambda x: x + +import copy +import struct + +import bignum + +MAX_SCRIPT_SIZE = 10000 +MAX_SCRIPT_ELEMENT_SIZE = 520 +MAX_SCRIPT_OPCODES = 201 + +OPCODE_NAMES = {} + +_opcode_instances = [] +class CScriptOp(int): + """A single script opcode""" + __slots__ = [] + + @staticmethod + def encode_op_pushdata(d): + """Encode a PUSHDATA op, returning bytes""" + if len(d) < 0x4c: + return b'' + bchr(len(d)) + d # OP_PUSHDATA + elif len(d) <= 0xff: + return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1 + elif len(d) <= 0xffff: + return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2 + elif len(d) <= 0xffffffff: + return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4 + else: + raise ValueError("Data too long to encode in a PUSHDATA op") + + @staticmethod + def encode_op_n(n): + """Encode a small integer op, returning an opcode""" + if not (0 <= n <= 16): + raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n) + + if n == 0: + return OP_0 + else: + return CScriptOp(OP_1 + n-1) + + def decode_op_n(self): + """Decode a small integer opcode, returning an integer""" + if self == OP_0: + return 0 + + if not (self == OP_0 or OP_1 <= self <= OP_16): + raise ValueError('op %r is not an OP_N' % self) + + return int(self - OP_1+1) + + def is_small_int(self): + """Return true if the op pushes a small integer to the stack""" + if 0x51 <= self <= 0x60 or self == 0: + return True + else: + return False + + def __str__(self): + return repr(self) + + def __repr__(self): + if self in OPCODE_NAMES: + return OPCODE_NAMES[self] + else: + return 'CScriptOp(0x%x)' % self + + def __new__(cls, n): + try: + return _opcode_instances[n] + except IndexError: + assert len(_opcode_instances) == n + _opcode_instances.append(super(CScriptOp, cls).__new__(cls, n)) + return _opcode_instances[n] + +# Populate opcode instance table +for n in range(0xff+1): + CScriptOp(n) + + +# push value +OP_0 = CScriptOp(0x00) +OP_FALSE = OP_0 +OP_PUSHDATA1 = CScriptOp(0x4c) +OP_PUSHDATA2 = CScriptOp(0x4d) +OP_PUSHDATA4 = CScriptOp(0x4e) +OP_1NEGATE = CScriptOp(0x4f) +OP_RESERVED = CScriptOp(0x50) +OP_1 = CScriptOp(0x51) +OP_TRUE=OP_1 +OP_2 = CScriptOp(0x52) +OP_3 = CScriptOp(0x53) +OP_4 = CScriptOp(0x54) +OP_5 = CScriptOp(0x55) +OP_6 = CScriptOp(0x56) +OP_7 = CScriptOp(0x57) +OP_8 = CScriptOp(0x58) +OP_9 = CScriptOp(0x59) +OP_10 = CScriptOp(0x5a) +OP_11 = CScriptOp(0x5b) +OP_12 = CScriptOp(0x5c) +OP_13 = CScriptOp(0x5d) +OP_14 = CScriptOp(0x5e) +OP_15 = CScriptOp(0x5f) +OP_16 = CScriptOp(0x60) + +# control +OP_NOP = CScriptOp(0x61) +OP_VER = CScriptOp(0x62) +OP_IF = CScriptOp(0x63) +OP_NOTIF = CScriptOp(0x64) +OP_VERIF = CScriptOp(0x65) +OP_VERNOTIF = CScriptOp(0x66) +OP_ELSE = CScriptOp(0x67) +OP_ENDIF = CScriptOp(0x68) +OP_VERIFY = CScriptOp(0x69) +OP_RETURN = CScriptOp(0x6a) + +# stack ops +OP_TOALTSTACK = CScriptOp(0x6b) +OP_FROMALTSTACK = CScriptOp(0x6c) +OP_2DROP = CScriptOp(0x6d) +OP_2DUP = CScriptOp(0x6e) +OP_3DUP = CScriptOp(0x6f) +OP_2OVER = CScriptOp(0x70) +OP_2ROT = CScriptOp(0x71) +OP_2SWAP = CScriptOp(0x72) +OP_IFDUP = CScriptOp(0x73) +OP_DEPTH = CScriptOp(0x74) +OP_DROP = CScriptOp(0x75) +OP_DUP = CScriptOp(0x76) +OP_NIP = CScriptOp(0x77) +OP_OVER = CScriptOp(0x78) +OP_PICK = CScriptOp(0x79) +OP_ROLL = CScriptOp(0x7a) +OP_ROT = CScriptOp(0x7b) +OP_SWAP = CScriptOp(0x7c) +OP_TUCK = CScriptOp(0x7d) + +# splice ops +OP_CAT = CScriptOp(0x7e) +OP_SUBSTR = CScriptOp(0x7f) +OP_LEFT = CScriptOp(0x80) +OP_RIGHT = CScriptOp(0x81) +OP_SIZE = CScriptOp(0x82) + +# bit logic +OP_INVERT = CScriptOp(0x83) +OP_AND = CScriptOp(0x84) +OP_OR = CScriptOp(0x85) +OP_XOR = CScriptOp(0x86) +OP_EQUAL = CScriptOp(0x87) +OP_EQUALVERIFY = CScriptOp(0x88) +OP_RESERVED1 = CScriptOp(0x89) +OP_RESERVED2 = CScriptOp(0x8a) + +# numeric +OP_1ADD = CScriptOp(0x8b) +OP_1SUB = CScriptOp(0x8c) +OP_2MUL = CScriptOp(0x8d) +OP_2DIV = CScriptOp(0x8e) +OP_NEGATE = CScriptOp(0x8f) +OP_ABS = CScriptOp(0x90) +OP_NOT = CScriptOp(0x91) +OP_0NOTEQUAL = CScriptOp(0x92) + +OP_ADD = CScriptOp(0x93) +OP_SUB = CScriptOp(0x94) +OP_MUL = CScriptOp(0x95) +OP_DIV = CScriptOp(0x96) +OP_MOD = CScriptOp(0x97) +OP_LSHIFT = CScriptOp(0x98) +OP_RSHIFT = CScriptOp(0x99) + +OP_BOOLAND = CScriptOp(0x9a) +OP_BOOLOR = CScriptOp(0x9b) +OP_NUMEQUAL = CScriptOp(0x9c) +OP_NUMEQUALVERIFY = CScriptOp(0x9d) +OP_NUMNOTEQUAL = CScriptOp(0x9e) +OP_LESSTHAN = CScriptOp(0x9f) +OP_GREATERTHAN = CScriptOp(0xa0) +OP_LESSTHANOREQUAL = CScriptOp(0xa1) +OP_GREATERTHANOREQUAL = CScriptOp(0xa2) +OP_MIN = CScriptOp(0xa3) +OP_MAX = CScriptOp(0xa4) + +OP_WITHIN = CScriptOp(0xa5) + +# crypto +OP_RIPEMD160 = CScriptOp(0xa6) +OP_SHA1 = CScriptOp(0xa7) +OP_SHA256 = CScriptOp(0xa8) +OP_HASH160 = CScriptOp(0xa9) +OP_HASH256 = CScriptOp(0xaa) +OP_CODESEPARATOR = CScriptOp(0xab) +OP_CHECKSIG = CScriptOp(0xac) +OP_CHECKSIGVERIFY = CScriptOp(0xad) +OP_CHECKMULTISIG = CScriptOp(0xae) +OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf) + +# expansion +OP_NOP1 = CScriptOp(0xb0) +OP_NOP2 = CScriptOp(0xb1) +OP_NOP3 = CScriptOp(0xb2) +OP_NOP4 = CScriptOp(0xb3) +OP_NOP5 = CScriptOp(0xb4) +OP_NOP6 = CScriptOp(0xb5) +OP_NOP7 = CScriptOp(0xb6) +OP_NOP8 = CScriptOp(0xb7) +OP_NOP9 = CScriptOp(0xb8) +OP_NOP10 = CScriptOp(0xb9) + +# template matching params +OP_SMALLINTEGER = CScriptOp(0xfa) +OP_PUBKEYS = CScriptOp(0xfb) +OP_PUBKEYHASH = CScriptOp(0xfd) +OP_PUBKEY = CScriptOp(0xfe) + +OP_INVALIDOPCODE = CScriptOp(0xff) + +VALID_OPCODES = { + OP_1NEGATE, + OP_RESERVED, + OP_1, + OP_2, + OP_3, + OP_4, + OP_5, + OP_6, + OP_7, + OP_8, + OP_9, + OP_10, + OP_11, + OP_12, + OP_13, + OP_14, + OP_15, + OP_16, + + OP_NOP, + OP_VER, + OP_IF, + OP_NOTIF, + OP_VERIF, + OP_VERNOTIF, + OP_ELSE, + OP_ENDIF, + OP_VERIFY, + OP_RETURN, + + OP_TOALTSTACK, + OP_FROMALTSTACK, + OP_2DROP, + OP_2DUP, + OP_3DUP, + OP_2OVER, + OP_2ROT, + OP_2SWAP, + OP_IFDUP, + OP_DEPTH, + OP_DROP, + OP_DUP, + OP_NIP, + OP_OVER, + OP_PICK, + OP_ROLL, + OP_ROT, + OP_SWAP, + OP_TUCK, + + OP_CAT, + OP_SUBSTR, + OP_LEFT, + OP_RIGHT, + OP_SIZE, + + OP_INVERT, + OP_AND, + OP_OR, + OP_XOR, + OP_EQUAL, + OP_EQUALVERIFY, + OP_RESERVED1, + OP_RESERVED2, + + OP_1ADD, + OP_1SUB, + OP_2MUL, + OP_2DIV, + OP_NEGATE, + OP_ABS, + OP_NOT, + OP_0NOTEQUAL, + + OP_ADD, + OP_SUB, + OP_MUL, + OP_DIV, + OP_MOD, + OP_LSHIFT, + OP_RSHIFT, + + OP_BOOLAND, + OP_BOOLOR, + OP_NUMEQUAL, + OP_NUMEQUALVERIFY, + OP_NUMNOTEQUAL, + OP_LESSTHAN, + OP_GREATERTHAN, + OP_LESSTHANOREQUAL, + OP_GREATERTHANOREQUAL, + OP_MIN, + OP_MAX, + + OP_WITHIN, + + OP_RIPEMD160, + OP_SHA1, + OP_SHA256, + OP_HASH160, + OP_HASH256, + OP_CODESEPARATOR, + OP_CHECKSIG, + OP_CHECKSIGVERIFY, + OP_CHECKMULTISIG, + OP_CHECKMULTISIGVERIFY, + + OP_NOP1, + OP_NOP2, + OP_NOP3, + OP_NOP4, + OP_NOP5, + OP_NOP6, + OP_NOP7, + OP_NOP8, + OP_NOP9, + OP_NOP10, + + OP_SMALLINTEGER, + OP_PUBKEYS, + OP_PUBKEYHASH, + OP_PUBKEY, +} + +OPCODE_NAMES.update({ + OP_0 : 'OP_0', + OP_PUSHDATA1 : 'OP_PUSHDATA1', + OP_PUSHDATA2 : 'OP_PUSHDATA2', + OP_PUSHDATA4 : 'OP_PUSHDATA4', + OP_1NEGATE : 'OP_1NEGATE', + OP_RESERVED : 'OP_RESERVED', + OP_1 : 'OP_1', + OP_2 : 'OP_2', + OP_3 : 'OP_3', + OP_4 : 'OP_4', + OP_5 : 'OP_5', + OP_6 : 'OP_6', + OP_7 : 'OP_7', + OP_8 : 'OP_8', + OP_9 : 'OP_9', + OP_10 : 'OP_10', + OP_11 : 'OP_11', + OP_12 : 'OP_12', + OP_13 : 'OP_13', + OP_14 : 'OP_14', + OP_15 : 'OP_15', + OP_16 : 'OP_16', + OP_NOP : 'OP_NOP', + OP_VER : 'OP_VER', + OP_IF : 'OP_IF', + OP_NOTIF : 'OP_NOTIF', + OP_VERIF : 'OP_VERIF', + OP_VERNOTIF : 'OP_VERNOTIF', + OP_ELSE : 'OP_ELSE', + OP_ENDIF : 'OP_ENDIF', + OP_VERIFY : 'OP_VERIFY', + OP_RETURN : 'OP_RETURN', + OP_TOALTSTACK : 'OP_TOALTSTACK', + OP_FROMALTSTACK : 'OP_FROMALTSTACK', + OP_2DROP : 'OP_2DROP', + OP_2DUP : 'OP_2DUP', + OP_3DUP : 'OP_3DUP', + OP_2OVER : 'OP_2OVER', + OP_2ROT : 'OP_2ROT', + OP_2SWAP : 'OP_2SWAP', + OP_IFDUP : 'OP_IFDUP', + OP_DEPTH : 'OP_DEPTH', + OP_DROP : 'OP_DROP', + OP_DUP : 'OP_DUP', + OP_NIP : 'OP_NIP', + OP_OVER : 'OP_OVER', + OP_PICK : 'OP_PICK', + OP_ROLL : 'OP_ROLL', + OP_ROT : 'OP_ROT', + OP_SWAP : 'OP_SWAP', + OP_TUCK : 'OP_TUCK', + OP_CAT : 'OP_CAT', + OP_SUBSTR : 'OP_SUBSTR', + OP_LEFT : 'OP_LEFT', + OP_RIGHT : 'OP_RIGHT', + OP_SIZE : 'OP_SIZE', + OP_INVERT : 'OP_INVERT', + OP_AND : 'OP_AND', + OP_OR : 'OP_OR', + OP_XOR : 'OP_XOR', + OP_EQUAL : 'OP_EQUAL', + OP_EQUALVERIFY : 'OP_EQUALVERIFY', + OP_RESERVED1 : 'OP_RESERVED1', + OP_RESERVED2 : 'OP_RESERVED2', + OP_1ADD : 'OP_1ADD', + OP_1SUB : 'OP_1SUB', + OP_2MUL : 'OP_2MUL', + OP_2DIV : 'OP_2DIV', + OP_NEGATE : 'OP_NEGATE', + OP_ABS : 'OP_ABS', + OP_NOT : 'OP_NOT', + OP_0NOTEQUAL : 'OP_0NOTEQUAL', + OP_ADD : 'OP_ADD', + OP_SUB : 'OP_SUB', + OP_MUL : 'OP_MUL', + OP_DIV : 'OP_DIV', + OP_MOD : 'OP_MOD', + OP_LSHIFT : 'OP_LSHIFT', + OP_RSHIFT : 'OP_RSHIFT', + OP_BOOLAND : 'OP_BOOLAND', + OP_BOOLOR : 'OP_BOOLOR', + OP_NUMEQUAL : 'OP_NUMEQUAL', + OP_NUMEQUALVERIFY : 'OP_NUMEQUALVERIFY', + OP_NUMNOTEQUAL : 'OP_NUMNOTEQUAL', + OP_LESSTHAN : 'OP_LESSTHAN', + OP_GREATERTHAN : 'OP_GREATERTHAN', + OP_LESSTHANOREQUAL : 'OP_LESSTHANOREQUAL', + OP_GREATERTHANOREQUAL : 'OP_GREATERTHANOREQUAL', + OP_MIN : 'OP_MIN', + OP_MAX : 'OP_MAX', + OP_WITHIN : 'OP_WITHIN', + OP_RIPEMD160 : 'OP_RIPEMD160', + OP_SHA1 : 'OP_SHA1', + OP_SHA256 : 'OP_SHA256', + OP_HASH160 : 'OP_HASH160', + OP_HASH256 : 'OP_HASH256', + OP_CODESEPARATOR : 'OP_CODESEPARATOR', + OP_CHECKSIG : 'OP_CHECKSIG', + OP_CHECKSIGVERIFY : 'OP_CHECKSIGVERIFY', + OP_CHECKMULTISIG : 'OP_CHECKMULTISIG', + OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY', + OP_NOP1 : 'OP_NOP1', + OP_NOP2 : 'OP_NOP2', + OP_NOP3 : 'OP_NOP3', + OP_NOP4 : 'OP_NOP4', + OP_NOP5 : 'OP_NOP5', + OP_NOP6 : 'OP_NOP6', + OP_NOP7 : 'OP_NOP7', + OP_NOP8 : 'OP_NOP8', + OP_NOP9 : 'OP_NOP9', + OP_NOP10 : 'OP_NOP10', + OP_SMALLINTEGER : 'OP_SMALLINTEGER', + OP_PUBKEYS : 'OP_PUBKEYS', + OP_PUBKEYHASH : 'OP_PUBKEYHASH', + OP_PUBKEY : 'OP_PUBKEY', + OP_INVALIDOPCODE : 'OP_INVALIDOPCODE', +}) + +OPCODES_BY_NAME = { + 'OP_0' : OP_0, + 'OP_PUSHDATA1' : OP_PUSHDATA1, + 'OP_PUSHDATA2' : OP_PUSHDATA2, + 'OP_PUSHDATA4' : OP_PUSHDATA4, + 'OP_1NEGATE' : OP_1NEGATE, + 'OP_RESERVED' : OP_RESERVED, + 'OP_1' : OP_1, + 'OP_2' : OP_2, + 'OP_3' : OP_3, + 'OP_4' : OP_4, + 'OP_5' : OP_5, + 'OP_6' : OP_6, + 'OP_7' : OP_7, + 'OP_8' : OP_8, + 'OP_9' : OP_9, + 'OP_10' : OP_10, + 'OP_11' : OP_11, + 'OP_12' : OP_12, + 'OP_13' : OP_13, + 'OP_14' : OP_14, + 'OP_15' : OP_15, + 'OP_16' : OP_16, + 'OP_NOP' : OP_NOP, + 'OP_VER' : OP_VER, + 'OP_IF' : OP_IF, + 'OP_NOTIF' : OP_NOTIF, + 'OP_VERIF' : OP_VERIF, + 'OP_VERNOTIF' : OP_VERNOTIF, + 'OP_ELSE' : OP_ELSE, + 'OP_ENDIF' : OP_ENDIF, + 'OP_VERIFY' : OP_VERIFY, + 'OP_RETURN' : OP_RETURN, + 'OP_TOALTSTACK' : OP_TOALTSTACK, + 'OP_FROMALTSTACK' : OP_FROMALTSTACK, + 'OP_2DROP' : OP_2DROP, + 'OP_2DUP' : OP_2DUP, + 'OP_3DUP' : OP_3DUP, + 'OP_2OVER' : OP_2OVER, + 'OP_2ROT' : OP_2ROT, + 'OP_2SWAP' : OP_2SWAP, + 'OP_IFDUP' : OP_IFDUP, + 'OP_DEPTH' : OP_DEPTH, + 'OP_DROP' : OP_DROP, + 'OP_DUP' : OP_DUP, + 'OP_NIP' : OP_NIP, + 'OP_OVER' : OP_OVER, + 'OP_PICK' : OP_PICK, + 'OP_ROLL' : OP_ROLL, + 'OP_ROT' : OP_ROT, + 'OP_SWAP' : OP_SWAP, + 'OP_TUCK' : OP_TUCK, + 'OP_CAT' : OP_CAT, + 'OP_SUBSTR' : OP_SUBSTR, + 'OP_LEFT' : OP_LEFT, + 'OP_RIGHT' : OP_RIGHT, + 'OP_SIZE' : OP_SIZE, + 'OP_INVERT' : OP_INVERT, + 'OP_AND' : OP_AND, + 'OP_OR' : OP_OR, + 'OP_XOR' : OP_XOR, + 'OP_EQUAL' : OP_EQUAL, + 'OP_EQUALVERIFY' : OP_EQUALVERIFY, + 'OP_RESERVED1' : OP_RESERVED1, + 'OP_RESERVED2' : OP_RESERVED2, + 'OP_1ADD' : OP_1ADD, + 'OP_1SUB' : OP_1SUB, + 'OP_2MUL' : OP_2MUL, + 'OP_2DIV' : OP_2DIV, + 'OP_NEGATE' : OP_NEGATE, + 'OP_ABS' : OP_ABS, + 'OP_NOT' : OP_NOT, + 'OP_0NOTEQUAL' : OP_0NOTEQUAL, + 'OP_ADD' : OP_ADD, + 'OP_SUB' : OP_SUB, + 'OP_MUL' : OP_MUL, + 'OP_DIV' : OP_DIV, + 'OP_MOD' : OP_MOD, + 'OP_LSHIFT' : OP_LSHIFT, + 'OP_RSHIFT' : OP_RSHIFT, + 'OP_BOOLAND' : OP_BOOLAND, + 'OP_BOOLOR' : OP_BOOLOR, + 'OP_NUMEQUAL' : OP_NUMEQUAL, + 'OP_NUMEQUALVERIFY' : OP_NUMEQUALVERIFY, + 'OP_NUMNOTEQUAL' : OP_NUMNOTEQUAL, + 'OP_LESSTHAN' : OP_LESSTHAN, + 'OP_GREATERTHAN' : OP_GREATERTHAN, + 'OP_LESSTHANOREQUAL' : OP_LESSTHANOREQUAL, + 'OP_GREATERTHANOREQUAL' : OP_GREATERTHANOREQUAL, + 'OP_MIN' : OP_MIN, + 'OP_MAX' : OP_MAX, + 'OP_WITHIN' : OP_WITHIN, + 'OP_RIPEMD160' : OP_RIPEMD160, + 'OP_SHA1' : OP_SHA1, + 'OP_SHA256' : OP_SHA256, + 'OP_HASH160' : OP_HASH160, + 'OP_HASH256' : OP_HASH256, + 'OP_CODESEPARATOR' : OP_CODESEPARATOR, + 'OP_CHECKSIG' : OP_CHECKSIG, + 'OP_CHECKSIGVERIFY' : OP_CHECKSIGVERIFY, + 'OP_CHECKMULTISIG' : OP_CHECKMULTISIG, + 'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY, + 'OP_NOP1' : OP_NOP1, + 'OP_NOP2' : OP_NOP2, + 'OP_NOP3' : OP_NOP3, + 'OP_NOP4' : OP_NOP4, + 'OP_NOP5' : OP_NOP5, + 'OP_NOP6' : OP_NOP6, + 'OP_NOP7' : OP_NOP7, + 'OP_NOP8' : OP_NOP8, + 'OP_NOP9' : OP_NOP9, + 'OP_NOP10' : OP_NOP10, + 'OP_SMALLINTEGER' : OP_SMALLINTEGER, + 'OP_PUBKEYS' : OP_PUBKEYS, + 'OP_PUBKEYHASH' : OP_PUBKEYHASH, + 'OP_PUBKEY' : OP_PUBKEY, +} + +class CScriptInvalidError(Exception): + """Base class for CScript exceptions""" + pass + +class CScriptTruncatedPushDataError(CScriptInvalidError): + """Invalid pushdata due to truncation""" + def __init__(self, msg, data): + self.data = data + super(CScriptTruncatedPushDataError, self).__init__(msg) + +# This is used, eg, for blockchain heights in coinbase scripts (bip34) +class CScriptNum(object): + def __init__(self, d=0): + self.value = d + + @staticmethod + def encode(obj): + r = bytearray(0) + if obj.value == 0: + return bytes(r) + neg = obj.value < 0 + absvalue = -obj.value if neg else obj.value + while (absvalue): + r.append(chr(absvalue & 0xff)) + absvalue >>= 8 + if r[-1] & 0x80: + r.append(0x80 if neg else 0) + elif neg: + r[-1] |= 0x80 + return bytes(bchr(len(r)) + r) + + +class CScript(bytes): + """Serialized script + + A bytes subclass, so you can use this directly whenever bytes are accepted. + Note that this means that indexing does *not* work - you'll get an index by + byte rather than opcode. This format was chosen for efficiency so that the + general case would not require creating a lot of little CScriptOP objects. + + iter(script) however does iterate by opcode. + """ + @classmethod + def __coerce_instance(cls, other): + # Coerce other into bytes + if isinstance(other, CScriptOp): + other = bchr(other) + elif isinstance(other, CScriptNum): + if (other.value == 0): + other = bchr(CScriptOp(OP_0)) + else: + other = CScriptNum.encode(other) + elif isinstance(other, (int, long)): + if 0 <= other <= 16: + other = bytes(bchr(CScriptOp.encode_op_n(other))) + elif other == -1: + other = bytes(bchr(OP_1NEGATE)) + else: + other = CScriptOp.encode_op_pushdata(bignum.bn2vch(other)) + elif isinstance(other, (bytes, bytearray)): + other = CScriptOp.encode_op_pushdata(other) + return other + + def __add__(self, other): + # Do the coercion outside of the try block so that errors in it are + # noticed. + other = self.__coerce_instance(other) + + try: + # bytes.__add__ always returns bytes instances unfortunately + return CScript(super(CScript, self).__add__(other)) + except TypeError: + raise TypeError('Can not add a %r instance to a CScript' % other.__class__) + + def join(self, iterable): + # join makes no sense for a CScript() + raise NotImplementedError + + def __new__(cls, value=b''): + if isinstance(value, bytes) or isinstance(value, bytearray): + return super(CScript, cls).__new__(cls, value) + else: + def coerce_iterable(iterable): + for instance in iterable: + yield cls.__coerce_instance(instance) + # Annoyingly on both python2 and python3 bytes.join() always + # returns a bytes instance even when subclassed. + return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value))) + + def raw_iter(self): + """Raw iteration + + Yields tuples of (opcode, data, sop_idx) so that the different possible + PUSHDATA encodings can be accurately distinguished, as well as + determining the exact opcode byte indexes. (sop_idx) + """ + i = 0 + while i < len(self): + sop_idx = i + opcode = bord(self[i]) + i += 1 + + if opcode > OP_PUSHDATA4: + yield (opcode, None, sop_idx) + else: + datasize = None + pushdata_type = None + if opcode < OP_PUSHDATA1: + pushdata_type = 'PUSHDATA(%d)' % opcode + datasize = opcode + + elif opcode == OP_PUSHDATA1: + pushdata_type = 'PUSHDATA1' + if i >= len(self): + raise CScriptInvalidError('PUSHDATA1: missing data length') + datasize = bord(self[i]) + i += 1 + + elif opcode == OP_PUSHDATA2: + pushdata_type = 'PUSHDATA2' + if i + 1 >= len(self): + raise CScriptInvalidError('PUSHDATA2: missing data length') + datasize = bord(self[i]) + (bord(self[i+1]) << 8) + i += 2 + + elif opcode == OP_PUSHDATA4: + pushdata_type = 'PUSHDATA4' + if i + 3 >= len(self): + raise CScriptInvalidError('PUSHDATA4: missing data length') + datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24) + i += 4 + + else: + assert False # shouldn't happen + + + data = bytes(self[i:i+datasize]) + + # Check for truncation + if len(data) < datasize: + raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data) + + i += datasize + + yield (opcode, data, sop_idx) + + def __iter__(self): + """'Cooked' iteration + + Returns either a CScriptOP instance, an integer, or bytes, as + appropriate. + + See raw_iter() if you need to distinguish the different possible + PUSHDATA encodings. + """ + for (opcode, data, sop_idx) in self.raw_iter(): + if data is not None: + yield data + else: + opcode = CScriptOp(opcode) + + if opcode.is_small_int(): + yield opcode.decode_op_n() + else: + yield CScriptOp(opcode) + + def __repr__(self): + # For Python3 compatibility add b before strings so testcases don't + # need to change + def _repr(o): + if isinstance(o, bytes): + return "x('%s')" % binascii.hexlify(o).decode('utf8') + else: + return repr(o) + + ops = [] + i = iter(self) + while True: + op = None + try: + op = _repr(next(i)) + except CScriptTruncatedPushDataError as err: + op = '%s...<ERROR: %s>' % (_repr(err.data), err) + break + except CScriptInvalidError as err: + op = '<ERROR: %s>' % err + break + except StopIteration: + break + finally: + if op is not None: + ops.append(op) + + return "CScript([%s])" % ', '.join(ops) + + def GetSigOpCount(self, fAccurate): + """Get the SigOp count. + + fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details. + + Note that this is consensus-critical. + """ + n = 0 + lastOpcode = OP_INVALIDOPCODE + for (opcode, data, sop_idx) in self.raw_iter(): + if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY): + n += 1 + elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY): + if fAccurate and (OP_1 <= lastOpcode <= OP_16): + n += opcode.decode_op_n() + else: + n += 20 + lastOpcode = opcode + return n + + +SIGHASH_ALL = 1 +SIGHASH_NONE = 2 +SIGHASH_SINGLE = 3 +SIGHASH_ANYONECANPAY = 0x80 + +def FindAndDelete(script, sig): + """Consensus critical, see FindAndDelete() in Satoshi codebase""" + r = b'' + last_sop_idx = sop_idx = 0 + skip = True + for (opcode, data, sop_idx) in script.raw_iter(): + if not skip: + r += script[last_sop_idx:sop_idx] + last_sop_idx = sop_idx + if script[sop_idx:sop_idx + len(sig)] == sig: + skip = True + else: + skip = False + if not skip: + r += script[last_sop_idx:] + return CScript(r) + + +def SignatureHash(script, txTo, inIdx, hashtype): + """Consensus-correct SignatureHash + + Returns (hash, err) to precisely match the consensus-critical behavior of + the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity) + """ + HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' + + if inIdx >= len(txTo.vin): + return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin))) + txtmp = CTransaction(txTo) + + for txin in txtmp.vin: + txin.scriptSig = b'' + txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR])) + + if (hashtype & 0x1f) == SIGHASH_NONE: + txtmp.vout = [] + + for i in range(len(txtmp.vin)): + if i != inIdx: + txtmp.vin[i].nSequence = 0 + + elif (hashtype & 0x1f) == SIGHASH_SINGLE: + outIdx = inIdx + if outIdx >= len(txtmp.vout): + return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout))) + + tmp = txtmp.vout[outIdx] + txtmp.vout = [] + for i in range(outIdx): + txtmp.vout.append(CTxOut()) + txtmp.vout.append(tmp) + + for i in range(len(txtmp.vin)): + if i != inIdx: + txtmp.vin[i].nSequence = 0 + + if hashtype & SIGHASH_ANYONECANPAY: + tmp = txtmp.vin[inIdx] + txtmp.vin = [] + txtmp.vin.append(tmp) + + s = txtmp.serialize() + s += struct.pack(b"<I", hashtype) + + hash = hash256(s) + + return (hash, None) diff --git a/qa/rpc-tests/script_test.py b/qa/rpc-tests/script_test.py new file mode 100755 index 0000000000..1ba3a478a8 --- /dev/null +++ b/qa/rpc-tests/script_test.py @@ -0,0 +1,253 @@ +#!/usr/bin/env python2 +# +# Distributed under the MIT/X11 software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +# + +''' +Test notes: +This test uses the script_valid and script_invalid tests from the unittest +framework to do end-to-end testing where we compare that two nodes agree on +whether blocks containing a given test script are valid. + +We generally ignore the script flags associated with each test (since we lack +the precision to test each script using those flags in this framework), but +for tests with SCRIPT_VERIFY_P2SH, we can use a block time after the BIP16 +switchover date to try to test with that flag enabled (and for tests without +that flag, we use a block time before the switchover date). + +NOTE: This test is very slow and may take more than 40 minutes to run. +''' + +from test_framework import ComparisonTestFramework +from util import * +from comptool import TestInstance, TestManager +from mininode import * +from blocktools import * +from script import * +import logging +import copy +import json + +script_valid_file = "../../src/test/data/script_valid.json" +script_invalid_file = "../../src/test/data/script_invalid.json" + +# Pass in a set of json files to open. +class ScriptTestFile(object): + + def __init__(self, files): + self.files = files + self.index = -1 + self.data = [] + + def load_files(self): + for f in self.files: + self.data.extend(json.loads(open(f).read())) + + # Skip over records that are not long enough to be tests + def get_records(self): + while (self.index < len(self.data)): + if len(self.data[self.index]) >= 3: + yield self.data[self.index] + self.index += 1 + + +# Helper for parsing the flags specified in the .json files +SCRIPT_VERIFY_NONE = 0 +SCRIPT_VERIFY_P2SH = 1 +SCRIPT_VERIFY_STRICTENC = 1 << 1 +SCRIPT_VERIFY_DERSIG = 1 << 2 +SCRIPT_VERIFY_LOW_S = 1 << 3 +SCRIPT_VERIFY_NULLDUMMY = 1 << 4 +SCRIPT_VERIFY_SIGPUSHONLY = 1 << 5 +SCRIPT_VERIFY_MINIMALDATA = 1 << 6 +SCRIPT_VERIFY_DISCOURAGE_UPGRADABLE_NOPS = 1 << 7 +SCRIPT_VERIFY_CLEANSTACK = 1 << 8 + +flag_map = { + "": SCRIPT_VERIFY_NONE, + "NONE": SCRIPT_VERIFY_NONE, + "P2SH": SCRIPT_VERIFY_P2SH, + "STRICTENC": SCRIPT_VERIFY_STRICTENC, + "DERSIG": SCRIPT_VERIFY_DERSIG, + "LOW_S": SCRIPT_VERIFY_LOW_S, + "NULLDUMMY": SCRIPT_VERIFY_NULLDUMMY, + "SIGPUSHONLY": SCRIPT_VERIFY_SIGPUSHONLY, + "MINIMALDATA": SCRIPT_VERIFY_MINIMALDATA, + "DISCOURAGE_UPGRADABLE_NOPS": SCRIPT_VERIFY_DISCOURAGE_UPGRADABLE_NOPS, + "CLEANSTACK": SCRIPT_VERIFY_CLEANSTACK, +} + +def ParseScriptFlags(flag_string): + flags = 0 + for x in flag_string.split(","): + if x in flag_map: + flags |= flag_map[x] + else: + print "Error: unrecognized script flag: ", x + return flags + +''' +Given a string that is a scriptsig or scriptpubkey from the .json files above, +convert it to a CScript() +''' +# Replicates behavior from core_read.cpp +def ParseScript(json_script): + script = json_script.split(" ") + parsed_script = CScript() + for x in script: + if len(x) == 0: + # Empty string, ignore. + pass + elif x.isdigit() or (len(x) >= 1 and x[0] == "-" and x[1:].isdigit()): + # Number + n = int(x, 0) + if (n == -1) or (n >= 1 and n <= 16): + parsed_script = CScript(bytes(parsed_script) + bytes(CScript([n]))) + else: + parsed_script += CScriptNum(int(x, 0)) + elif x.startswith("0x"): + # Raw hex data, inserted NOT pushed onto stack: + for i in xrange(2, len(x), 2): + parsed_script = CScript(bytes(parsed_script) + bytes(chr(int(x[i:i+2],16)))) + elif x.startswith("'") and x.endswith("'") and len(x) >= 2: + # Single-quoted string, pushed as data. + parsed_script += CScript([x[1:-1]]) + else: + # opcode, e.g. OP_ADD or ADD: + tryopname = "OP_" + x + if tryopname in OPCODES_BY_NAME: + parsed_script += CScriptOp(OPCODES_BY_NAME["OP_" + x]) + else: + print "ParseScript: error parsing '%s'" % x + return "" + return parsed_script + +class TestBuilder(object): + def create_credit_tx(self, scriptPubKey): + # self.tx1 is a coinbase transaction, modeled after the one created by script_tests.cpp + # This allows us to reuse signatures created in the unit test framework. + self.tx1 = create_coinbase() # this has a bip34 scriptsig, + self.tx1.vin[0].scriptSig = CScript([0, 0]) # but this matches the unit tests + self.tx1.vout[0].nValue = 0 + self.tx1.vout[0].scriptPubKey = scriptPubKey + self.tx1.rehash() + def create_spend_tx(self, scriptSig): + self.tx2 = create_transaction(self.tx1, 0, CScript(), 0) + self.tx2.vin[0].scriptSig = scriptSig + self.tx2.vout[0].scriptPubKey = CScript() + self.tx2.rehash() + def rehash(self): + self.tx1.rehash() + self.tx2.rehash() + +# This test uses the (default) two nodes provided by ComparisonTestFramework, +# specified on the command line with --testbinary and --refbinary. +# See comptool.py +class ScriptTest(ComparisonTestFramework): + + def run_test(self): + # Set up the comparison tool TestManager + test = TestManager(self, self.options.tmpdir) + test.add_all_connections(self.nodes) + + # Load scripts + self.scripts = ScriptTestFile([script_valid_file, script_invalid_file]) + self.scripts.load_files() + + # Some variables we re-use between test instances (to build blocks) + self.tip = None + self.block_time = None + + NetworkThread().start() # Start up network handling in another thread + test.run() + + def generate_test_instance(self, pubkeystring, scriptsigstring): + scriptpubkey = ParseScript(pubkeystring) + scriptsig = ParseScript(scriptsigstring) + + test = TestInstance(sync_every_block=False) + test_build = TestBuilder() + test_build.create_credit_tx(scriptpubkey) + test_build.create_spend_tx(scriptsig) + test_build.rehash() + + block = create_block(self.tip, test_build.tx1, self.block_time) + self.block_time += 1 + block.solve() + self.tip = block.sha256 + test.blocks_and_transactions = [[block, True]] + + for i in xrange(100): + block = create_block(self.tip, create_coinbase(), self.block_time) + self.block_time += 1 + block.solve() + self.tip = block.sha256 + test.blocks_and_transactions.append([block, True]) + + block = create_block(self.tip, create_coinbase(), self.block_time) + self.block_time += 1 + block.vtx.append(test_build.tx2) + block.hashMerkleRoot = block.calc_merkle_root() + block.rehash() + block.solve() + test.blocks_and_transactions.append([block, None]) + return test + + # This generates the tests for TestManager. + def get_tests(self): + self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0) + self.block_time = 1333230000 # before the BIP16 switchover + + ''' + Create a new block with an anyone-can-spend coinbase + ''' + block = create_block(self.tip, create_coinbase(), self.block_time) + self.block_time += 1 + block.solve() + self.tip = block.sha256 + yield TestInstance(objects=[[block, True]]) + + ''' + Build out to 100 blocks total, maturing the coinbase. + ''' + test = TestInstance(objects=[], sync_every_block=False, sync_every_tx=False) + for i in xrange(100): + b = create_block(self.tip, create_coinbase(), self.block_time) + b.solve() + test.blocks_and_transactions.append([b, True]) + self.tip = b.sha256 + self.block_time += 1 + yield test + + ''' Iterate through script tests. ''' + counter = 0 + for script_test in self.scripts.get_records(): + ''' Reset the blockchain to genesis block + 100 blocks. ''' + if self.nodes[0].getblockcount() > 101: + self.nodes[0].invalidateblock(self.nodes[0].getblockhash(102)) + self.nodes[1].invalidateblock(self.nodes[1].getblockhash(102)) + + self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0) + + [scriptsig, scriptpubkey, flags] = script_test[0:3] + flags = ParseScriptFlags(flags) + + # We can use block time to determine whether the nodes should be + # enforcing BIP16. + # + # We intentionally let the block time grow by 1 each time. + # This forces the block hashes to differ between tests, so that + # a call to invalidateblock doesn't interfere with a later test. + if (flags & SCRIPT_VERIFY_P2SH): + self.block_time = 1333238400 + counter # Advance to enforcing BIP16 + else: + self.block_time = 1333230000 + counter # Before the BIP16 switchover + + print "Script test: [%s]" % script_test + + yield self.generate_test_instance(scriptpubkey, scriptsig) + counter += 1 + +if __name__ == '__main__': + ScriptTest().main() diff --git a/qa/rpc-tests/test_framework.py b/qa/rpc-tests/test_framework.py index 6f3cc19dda..15a357a340 100755 --- a/qa/rpc-tests/test_framework.py +++ b/qa/rpc-tests/test_framework.py @@ -147,3 +147,34 @@ class BitcoinTestFramework(object): else: print("Failed") sys.exit(1) + + +# Test framework for doing p2p comparison testing, which sets up some bitcoind +# binaries: +# 1 binary: test binary +# 2 binaries: 1 test binary, 1 ref binary +# n>2 binaries: 1 test binary, n-1 ref binaries + +class ComparisonTestFramework(BitcoinTestFramework): + + # Can override the num_nodes variable to indicate how many nodes to run. + def __init__(self): + self.num_nodes = 2 + + def add_options(self, parser): + parser.add_option("--testbinary", dest="testbinary", + default=os.getenv("BITCOIND", "bitcoind"), + help="bitcoind binary to test") + parser.add_option("--refbinary", dest="refbinary", + default=os.getenv("BITCOIND", "bitcoind"), + help="bitcoind binary to use for reference nodes (if any)") + + def setup_chain(self): + print "Initializing test directory "+self.options.tmpdir + initialize_chain_clean(self.options.tmpdir, self.num_nodes) + + def setup_network(self): + self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, + extra_args=[['-debug', '-whitelist=127.0.0.1']] * self.num_nodes, + binary=[self.options.testbinary] + + [self.options.refbinary]*(self.num_nodes-1)) diff --git a/qa/rpc-tests/util.py b/qa/rpc-tests/util.py index cf789f48e2..1cf96b314a 100644 --- a/qa/rpc-tests/util.py +++ b/qa/rpc-tests/util.py @@ -88,8 +88,12 @@ def initialize_chain(test_dir): if i > 0: args.append("-connect=127.0.0.1:"+str(p2p_port(0))) bitcoind_processes[i] = subprocess.Popen(args) + if os.getenv("PYTHON_DEBUG", ""): + print "initialize_chain: bitcoind started, calling bitcoin-cli -rpcwait getblockcount" subprocess.check_call([ os.getenv("BITCOINCLI", "bitcoin-cli"), "-datadir="+datadir, "-rpcwait", "getblockcount"], stdout=devnull) + if os.getenv("PYTHON_DEBUG", ""): + print "initialize_chain: bitcoin-cli -rpcwait getblockcount completed" devnull.close() rpcs = [] for i in range(4): @@ -158,18 +162,24 @@ def _rpchost_to_args(rpchost): rv += ['-rpcport=' + rpcport] return rv -def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None): +def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None): """ Start a bitcoind and return RPC connection to it """ datadir = os.path.join(dirname, "node"+str(i)) - args = [ os.getenv("BITCOIND", "bitcoind"), "-datadir="+datadir, "-keypool=1", "-discover=0", "-rest" ] + if binary is None: + binary = os.getenv("BITCOIND", "bitcoind") + args = [ binary, "-datadir="+datadir, "-keypool=1", "-discover=0", "-rest" ] if extra_args is not None: args.extend(extra_args) bitcoind_processes[i] = subprocess.Popen(args) devnull = open("/dev/null", "w+") + if os.getenv("PYTHON_DEBUG", ""): + print "start_node: bitcoind started, calling bitcoin-cli -rpcwait getblockcount" subprocess.check_call([ os.getenv("BITCOINCLI", "bitcoin-cli"), "-datadir="+datadir] + _rpchost_to_args(rpchost) + ["-rpcwait", "getblockcount"], stdout=devnull) + if os.getenv("PYTHON_DEBUG", ""): + print "start_node: calling bitcoin-cli -rpcwait getblockcount returned" devnull.close() url = "http://rt:rt@%s:%d" % (rpchost or '127.0.0.1', rpc_port(i)) if timewait is not None: @@ -179,12 +189,13 @@ def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None): proxy.url = url # store URL on proxy for info return proxy -def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None): +def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, binary=None): """ Start multiple bitcoinds, return RPC connections to them """ if extra_args is None: extra_args = [ None for i in range(num_nodes) ] - return [ start_node(i, dirname, extra_args[i], rpchost) for i in range(num_nodes) ] + if binary is None: binary = [ None for i in range(num_nodes) ] + return [ start_node(i, dirname, extra_args[i], rpchost, binary=binary[i]) for i in range(num_nodes) ] def log_filename(dirname, n_node, logname): return os.path.join(dirname, "node"+str(n_node), "regtest", logname) |