aboutsummaryrefslogtreecommitdiff
path: root/test/functional/test_framework
diff options
context:
space:
mode:
Diffstat (limited to 'test/functional/test_framework')
-rw-r--r--test/functional/test_framework/address.py2
-rw-r--r--test/functional/test_framework/authproxy.py19
-rw-r--r--test/functional/test_framework/bignum.py58
-rw-r--r--test/functional/test_framework/descriptors.py9
-rw-r--r--test/functional/test_framework/key.py13
-rwxr-xr-xtest/functional/test_framework/messages.py157
-rwxr-xr-xtest/functional/test_framework/mininode.py111
-rw-r--r--test/functional/test_framework/script.py331
-rwxr-xr-xtest/functional/test_framework/script_util.py1
-rwxr-xr-xtest/functional/test_framework/test_framework.py91
-rwxr-xr-xtest/functional/test_framework/test_node.py195
-rw-r--r--test/functional/test_framework/util.py32
-rwxr-xr-xtest/functional/test_framework/wallet_util.py23
13 files changed, 724 insertions, 318 deletions
diff --git a/test/functional/test_framework/address.py b/test/functional/test_framework/address.py
index 6a7e91216a..8f410f233e 100644
--- a/test/functional/test_framework/address.py
+++ b/test/functional/test_framework/address.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2016-2019 The Bitcoin Core developers
+# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Encode and decode BASE58, P2PKH and P2SH addresses."""
diff --git a/test/functional/test_framework/authproxy.py b/test/functional/test_framework/authproxy.py
index 4ba6ac1db2..05308931e3 100644
--- a/test/functional/test_framework/authproxy.py
+++ b/test/functional/test_framework/authproxy.py
@@ -101,23 +101,26 @@ class AuthServiceProxy():
if os.name == 'nt':
# Windows somehow does not like to re-use connections
# TODO: Find out why the connection would disconnect occasionally and make it reusable on Windows
+ # Avoid "ConnectionAbortedError: [WinError 10053] An established connection was aborted by the software in your host machine"
self._set_conn()
try:
self.__conn.request(method, path, postdata, headers)
return self._get_response()
- except http.client.BadStatusLine as e:
- if e.line == "''": # if connection was closed, try again
+ except (BrokenPipeError, ConnectionResetError):
+ # Python 3.5+ raises BrokenPipeError when the connection was reset
+ # ConnectionResetError happens on FreeBSD
+ self.__conn.close()
+ self.__conn.request(method, path, postdata, headers)
+ return self._get_response()
+ except OSError as e:
+ retry = (
+ '[WinError 10053] An established connection was aborted by the software in your host machine' in str(e))
+ if retry:
self.__conn.close()
self.__conn.request(method, path, postdata, headers)
return self._get_response()
else:
raise
- except (BrokenPipeError, ConnectionResetError):
- # Python 3.5+ raises BrokenPipeError instead of BadStatusLine when the connection was reset
- # ConnectionResetError happens on FreeBSD with Python 3.4
- self.__conn.close()
- self.__conn.request(method, path, postdata, headers)
- return self._get_response()
def get_request(self, *args, **argsn):
AuthServiceProxy.__id_count += 1
diff --git a/test/functional/test_framework/bignum.py b/test/functional/test_framework/bignum.py
deleted file mode 100644
index db5ccd62c2..0000000000
--- a/test/functional/test_framework/bignum.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python3
-#
-# Distributed under the MIT software license, see the accompanying
-# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Big number routines.
-
-This file is copied from python-bitcoinlib.
-"""
-
-import struct
-
-
-# generic big endian MPI format
-
-def bn_bytes(v, have_ext=False):
- ext = 0
- if have_ext:
- ext = 1
- return ((v.bit_length()+7)//8) + ext
-
-def bn2bin(v):
- s = bytearray()
- i = bn_bytes(v)
- while i > 0:
- s.append((v >> ((i-1) * 8)) & 0xff)
- i -= 1
- return s
-
-def bn2mpi(v):
- have_ext = False
- if v.bit_length() > 0:
- have_ext = (v.bit_length() & 0x07) == 0
-
- neg = False
- if v < 0:
- neg = True
- v = -v
-
- s = struct.pack(b">I", bn_bytes(v, have_ext))
- ext = bytearray()
- if have_ext:
- ext.append(0)
- v_bin = bn2bin(v)
- if neg:
- if have_ext:
- ext[0] |= 0x80
- else:
- v_bin[0] |= 0x80
- return s + ext + v_bin
-
-# bitcoin-specific little endian format, with implicit size
-def mpi2vch(s):
- r = s[4:] # strip size
- r = r[::-1] # reverse string, converting BE->LE
- return r
-
-def bn2vch(v):
- return bytes(mpi2vch(bn2mpi(v)))
diff --git a/test/functional/test_framework/descriptors.py b/test/functional/test_framework/descriptors.py
index 29482ce01e..46b405749b 100644
--- a/test/functional/test_framework/descriptors.py
+++ b/test/functional/test_framework/descriptors.py
@@ -4,6 +4,8 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utility functions related to output descriptors"""
+import re
+
INPUT_CHARSET = "0123456789()[],'/*abcdefgh@:$%{}IJKLMNOPQRSTUVWXYZ&+-.;<=>?!^_|~ijklmnopqrstuvwxyzABCDEFGH`#\"\\ "
CHECKSUM_CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
GENERATOR = [0xf5dee51989, 0xa9fdca3312, 0x1bab10e32d, 0x3706b1677a, 0x644d626ffd]
@@ -53,3 +55,10 @@ def descsum_check(s, require=True):
return False
symbols = descsum_expand(s[:-9]) + [CHECKSUM_CHARSET.find(x) for x in s[-8:]]
return descsum_polymod(symbols) == 1
+
+def drop_origins(s):
+ '''Drop the key origins from a descriptor'''
+ desc = re.sub(r'\[.+?\]', '', s)
+ if '#' in s:
+ desc = desc[:desc.index('#')]
+ return descsum_create(desc)
diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py
index 912c0ca978..f2d6fba4a6 100644
--- a/test/functional/test_framework/key.py
+++ b/test/functional/test_framework/key.py
@@ -8,6 +8,8 @@ keys, and is trivially vulnerable to side channel attacks. Do not use for
anything but tests."""
import random
+from .address import byte_to_base58
+
def modinv(a, n):
"""Compute the modular inverse of a modulo n
@@ -384,3 +386,14 @@ class ECKey():
rb = r.to_bytes((r.bit_length() + 8) // 8, 'big')
sb = s.to_bytes((s.bit_length() + 8) // 8, 'big')
return b'\x30' + bytes([4 + len(rb) + len(sb), 2, len(rb)]) + rb + bytes([2, len(sb)]) + sb
+
+def bytes_to_wif(b, compressed=True):
+ if compressed:
+ b += b'\x01'
+ return byte_to_base58(b, 239)
+
+def generate_wif_key():
+ # Makes a WIF privkey for imports
+ k = ECKey()
+ k.generate()
+ return bytes_to_wif(k.get_bytes(), k.is_compressed)
diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py
index 4f7a9a8b13..4855f62a8f 100755
--- a/test/functional/test_framework/messages.py
+++ b/test/functional/test_framework/messages.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
-# Copyright (c) 2010-2019 The Bitcoin Core developers
+# Copyright (c) 2010-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Bitcoin test framework primitive and message structures
@@ -37,8 +37,11 @@ MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version
MAX_LOCATOR_SZ = 101
MAX_BLOCK_BASE_SIZE = 1000000
+MAX_BLOOM_FILTER_SIZE = 36000
+MAX_BLOOM_HASH_FUNCS = 50
COIN = 100000000 # 1 btc in satoshis
+MAX_MONEY = 21000000 * COIN
BIP125_SEQUENCE_NUMBER = 0xfffffffd # Sequence number that is BIP 125 opt-in and BIP 68-opt-out
@@ -50,6 +53,7 @@ NODE_NETWORK_LIMITED = (1 << 10)
MSG_TX = 1
MSG_BLOCK = 2
+MSG_FILTERED_BLOCK = 3
MSG_WITNESS_FLAG = 1 << 30
MSG_TYPE_MASK = 0xffffffff >> 2
@@ -224,10 +228,11 @@ class CInv:
typemap = {
0: "Error",
- 1: "TX",
- 2: "Block",
- 1|MSG_WITNESS_FLAG: "WitnessTx",
- 2|MSG_WITNESS_FLAG : "WitnessBlock",
+ MSG_TX: "TX",
+ MSG_BLOCK: "Block",
+ MSG_TX | MSG_WITNESS_FLAG: "WitnessTx",
+ MSG_BLOCK | MSG_WITNESS_FLAG: "WitnessBlock",
+ MSG_FILTERED_BLOCK: "filtered Block",
4: "CompactBlock"
}
@@ -598,16 +603,16 @@ class CBlock(CBlockHeader):
__slots__ = ("vtx",)
def __init__(self, header=None):
- super(CBlock, self).__init__(header)
+ super().__init__(header)
self.vtx = []
def deserialize(self, f):
- super(CBlock, self).deserialize(f)
+ super().deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self, with_witness=True):
r = b""
- r += super(CBlock, self).serialize()
+ r += super().serialize()
if with_witness:
r += ser_vector(self.vtx, "serialize_with_witness")
else:
@@ -747,7 +752,7 @@ class P2PHeaderAndShortIDs:
class P2PHeaderAndShortWitnessIDs(P2PHeaderAndShortIDs):
__slots__ = ()
def serialize(self):
- return super(P2PHeaderAndShortWitnessIDs, self).serialize(with_witness=True)
+ return super().serialize(with_witness=True)
# Calculate the BIP 152-compact blocks shortid for a given transaction hash
def calculate_shortid(k0, k1, tx_hash):
@@ -943,7 +948,7 @@ class CMerkleBlock:
class msg_version:
__slots__ = ("addrFrom", "addrTo", "nNonce", "nRelay", "nServices",
"nStartingHeight", "nTime", "nVersion", "strSubVer")
- command = b"version"
+ msgtype = b"version"
def __init__(self):
self.nVersion = MY_VERSION
@@ -1001,7 +1006,7 @@ class msg_version:
class msg_verack:
__slots__ = ()
- command = b"verack"
+ msgtype = b"verack"
def __init__(self):
pass
@@ -1018,7 +1023,7 @@ class msg_verack:
class msg_addr:
__slots__ = ("addrs",)
- command = b"addr"
+ msgtype = b"addr"
def __init__(self):
self.addrs = []
@@ -1035,7 +1040,7 @@ class msg_addr:
class msg_inv:
__slots__ = ("inv",)
- command = b"inv"
+ msgtype = b"inv"
def __init__(self, inv=None):
if inv is None:
@@ -1055,7 +1060,7 @@ class msg_inv:
class msg_getdata:
__slots__ = ("inv",)
- command = b"getdata"
+ msgtype = b"getdata"
def __init__(self, inv=None):
self.inv = inv if inv is not None else []
@@ -1072,7 +1077,7 @@ class msg_getdata:
class msg_getblocks:
__slots__ = ("locator", "hashstop")
- command = b"getblocks"
+ msgtype = b"getblocks"
def __init__(self):
self.locator = CBlockLocator()
@@ -1096,7 +1101,7 @@ class msg_getblocks:
class msg_tx:
__slots__ = ("tx",)
- command = b"tx"
+ msgtype = b"tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
@@ -1120,7 +1125,7 @@ class msg_no_witness_tx(msg_tx):
class msg_block:
__slots__ = ("block",)
- command = b"block"
+ msgtype = b"block"
def __init__(self, block=None):
if block is None:
@@ -1139,12 +1144,12 @@ class msg_block:
# for cases where a user needs tighter control over what is sent over the wire
-# note that the user must supply the name of the command, and the data
+# note that the user must supply the name of the msgtype, and the data
class msg_generic:
- __slots__ = ("command", "data")
+ __slots__ = ("msgtype", "data")
- def __init__(self, command, data=None):
- self.command = command
+ def __init__(self, msgtype, data=None):
+ self.msgtype = msgtype
self.data = data
def serialize(self):
@@ -1162,7 +1167,7 @@ class msg_no_witness_block(msg_block):
class msg_getaddr:
__slots__ = ()
- command = b"getaddr"
+ msgtype = b"getaddr"
def __init__(self):
pass
@@ -1179,7 +1184,7 @@ class msg_getaddr:
class msg_ping:
__slots__ = ("nonce",)
- command = b"ping"
+ msgtype = b"ping"
def __init__(self, nonce=0):
self.nonce = nonce
@@ -1198,7 +1203,7 @@ class msg_ping:
class msg_pong:
__slots__ = ("nonce",)
- command = b"pong"
+ msgtype = b"pong"
def __init__(self, nonce=0):
self.nonce = nonce
@@ -1217,7 +1222,7 @@ class msg_pong:
class msg_mempool:
__slots__ = ()
- command = b"mempool"
+ msgtype = b"mempool"
def __init__(self):
pass
@@ -1234,7 +1239,7 @@ class msg_mempool:
class msg_notfound:
__slots__ = ("vec", )
- command = b"notfound"
+ msgtype = b"notfound"
def __init__(self, vec=None):
self.vec = vec or []
@@ -1251,7 +1256,7 @@ class msg_notfound:
class msg_sendheaders:
__slots__ = ()
- command = b"sendheaders"
+ msgtype = b"sendheaders"
def __init__(self):
pass
@@ -1272,7 +1277,7 @@ class msg_sendheaders:
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders:
__slots__ = ("hashstop", "locator",)
- command = b"getheaders"
+ msgtype = b"getheaders"
def __init__(self):
self.locator = CBlockLocator()
@@ -1298,7 +1303,7 @@ class msg_getheaders:
# <count> <vector of block headers>
class msg_headers:
__slots__ = ("headers",)
- command = b"headers"
+ msgtype = b"headers"
def __init__(self, headers=None):
self.headers = headers if headers is not None else []
@@ -1317,10 +1322,94 @@ class msg_headers:
return "msg_headers(headers=%s)" % repr(self.headers)
+class msg_merkleblock:
+ __slots__ = ("merkleblock",)
+ msgtype = b"merkleblock"
+
+ def __init__(self, merkleblock=None):
+ if merkleblock is None:
+ self.merkleblock = CMerkleBlock()
+ else:
+ self.merkleblock = merkleblock
+
+ def deserialize(self, f):
+ self.merkleblock.deserialize(f)
+
+ def serialize(self):
+ return self.merkleblock.serialize()
+
+ def __repr__(self):
+ return "msg_merkleblock(merkleblock=%s)" % (repr(self.merkleblock))
+
+
+class msg_filterload:
+ __slots__ = ("data", "nHashFuncs", "nTweak", "nFlags")
+ msgtype = b"filterload"
+
+ def __init__(self, data=b'00', nHashFuncs=0, nTweak=0, nFlags=0):
+ self.data = data
+ self.nHashFuncs = nHashFuncs
+ self.nTweak = nTweak
+ self.nFlags = nFlags
+
+ def deserialize(self, f):
+ self.data = deser_string(f)
+ self.nHashFuncs = struct.unpack("<I", f.read(4))[0]
+ self.nTweak = struct.unpack("<I", f.read(4))[0]
+ self.nFlags = struct.unpack("<B", f.read(1))[0]
+
+ def serialize(self):
+ r = b""
+ r += ser_string(self.data)
+ r += struct.pack("<I", self.nHashFuncs)
+ r += struct.pack("<I", self.nTweak)
+ r += struct.pack("<B", self.nFlags)
+ return r
+
+ def __repr__(self):
+ return "msg_filterload(data={}, nHashFuncs={}, nTweak={}, nFlags={})".format(
+ self.data, self.nHashFuncs, self.nTweak, self.nFlags)
+
+
+class msg_filteradd:
+ __slots__ = ("data")
+ msgtype = b"filteradd"
+
+ def __init__(self, data):
+ self.data = data
+
+ def deserialize(self, f):
+ self.data = deser_string(f)
+
+ def serialize(self):
+ r = b""
+ r += ser_string(self.data)
+ return r
+
+ def __repr__(self):
+ return "msg_filteradd(data={})".format(self.data)
+
+
+class msg_filterclear:
+ __slots__ = ()
+ msgtype = b"filterclear"
+
+ def __init__(self):
+ pass
+
+ def deserialize(self, f):
+ pass
+
+ def serialize(self):
+ return b""
+
+ def __repr__(self):
+ return "msg_filterclear()"
+
class msg_feefilter:
__slots__ = ("feerate",)
- command = b"feefilter"
+ msgtype = b"feefilter"
def __init__(self, feerate=0):
self.feerate = feerate
@@ -1339,7 +1428,7 @@ class msg_feefilter:
class msg_sendcmpct:
__slots__ = ("announce", "version")
- command = b"sendcmpct"
+ msgtype = b"sendcmpct"
def __init__(self):
self.announce = False
@@ -1361,7 +1450,7 @@ class msg_sendcmpct:
class msg_cmpctblock:
__slots__ = ("header_and_shortids",)
- command = b"cmpctblock"
+ msgtype = b"cmpctblock"
def __init__(self, header_and_shortids = None):
self.header_and_shortids = header_and_shortids
@@ -1381,7 +1470,7 @@ class msg_cmpctblock:
class msg_getblocktxn:
__slots__ = ("block_txn_request",)
- command = b"getblocktxn"
+ msgtype = b"getblocktxn"
def __init__(self):
self.block_txn_request = None
@@ -1401,7 +1490,7 @@ class msg_getblocktxn:
class msg_blocktxn:
__slots__ = ("block_transactions",)
- command = b"blocktxn"
+ msgtype = b"blocktxn"
def __init__(self):
self.block_transactions = BlockTransactions()
diff --git a/test/functional/test_framework/mininode.py b/test/functional/test_framework/mininode.py
index a9e669fea9..31cec66ee7 100755
--- a/test/functional/test_framework/mininode.py
+++ b/test/functional/test_framework/mininode.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
-# Copyright (c) 2010-2019 The Bitcoin Core developers
+# Copyright (c) 2010-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Bitcoin P2P network half-a-node.
@@ -12,7 +12,10 @@ found in the mini-node branch of http://github.com/jgarzik/pynode.
P2PConnection: A low-level connection object to a node's P2P interface
P2PInterface: A high-level interface object for communicating to a node over P2P
P2PDataStore: A p2p interface class that keeps a store of transactions and blocks
- and can respond correctly to getdata and getheaders messages"""
+ and can respond correctly to getdata and getheaders messages
+P2PTxInvStore: A p2p interface class that inherits from P2PDataStore, and keeps
+ a count of how many times each txid has been announced."""
+
import asyncio
from collections import defaultdict
from io import BytesIO
@@ -30,6 +33,9 @@ from test_framework.messages import (
msg_blocktxn,
msg_cmpctblock,
msg_feefilter,
+ msg_filteradd,
+ msg_filterclear,
+ msg_filterload,
msg_getaddr,
msg_getblocks,
msg_getblocktxn,
@@ -38,6 +44,7 @@ from test_framework.messages import (
msg_headers,
msg_inv,
msg_mempool,
+ msg_merkleblock,
msg_notfound,
msg_ping,
msg_pong,
@@ -62,6 +69,9 @@ MESSAGEMAP = {
b"blocktxn": msg_blocktxn,
b"cmpctblock": msg_cmpctblock,
b"feefilter": msg_feefilter,
+ b"filteradd": msg_filteradd,
+ b"filterclear": msg_filterclear,
+ b"filterload": msg_filterload,
b"getaddr": msg_getaddr,
b"getblocks": msg_getblocks,
b"getblocktxn": msg_getblocktxn,
@@ -70,6 +80,7 @@ MESSAGEMAP = {
b"headers": msg_headers,
b"inv": msg_inv,
b"mempool": msg_mempool,
+ b"merkleblock": msg_merkleblock,
b"notfound": msg_notfound,
b"ping": msg_ping,
b"pong": msg_pong,
@@ -109,8 +120,9 @@ class P2PConnection(asyncio.Protocol):
def is_connected(self):
return self._transport is not None
- def peer_connect(self, dstaddr, dstport, *, net):
+ def peer_connect(self, dstaddr, dstport, *, net, factor):
assert not self.is_connected
+ self.factor = factor
self.dstaddr = dstaddr
self.dstport = dstport
# The initial message to send after the connection was made:
@@ -172,7 +184,7 @@ class P2PConnection(asyncio.Protocol):
raise ValueError("magic bytes mismatch: {} != {}".format(repr(self.magic_bytes), repr(self.recvbuf)))
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
- command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
+ msgtype = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
@@ -183,10 +195,10 @@ class P2PConnection(asyncio.Protocol):
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
- if command not in MESSAGEMAP:
- raise ValueError("Received unknown command from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, command, repr(msg)))
+ if msgtype not in MESSAGEMAP:
+ raise ValueError("Received unknown msgtype from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, msgtype, repr(msg)))
f = BytesIO(msg)
- t = MESSAGEMAP[command]()
+ t = MESSAGEMAP[msgtype]()
t.deserialize(f)
self._log_message("receive", t)
self.on_message(t)
@@ -225,11 +237,11 @@ class P2PConnection(asyncio.Protocol):
def build_message(self, message):
"""Build a serialized P2P message"""
- command = message.command
+ msgtype = message.msgtype
data = message.serialize()
tmsg = self.magic_bytes
- tmsg += command
- tmsg += b"\x00" * (12 - len(command))
+ tmsg += msgtype
+ tmsg += b"\x00" * (12 - len(msgtype))
tmsg += struct.pack("<I", len(data))
th = sha256(data)
h = sha256(th)
@@ -296,10 +308,10 @@ class P2PInterface(P2PConnection):
and the most recent message of each type."""
with mininode_lock:
try:
- command = message.command.decode('ascii')
- self.message_count[command] += 1
- self.last_message[command] = message
- getattr(self, 'on_' + command)(message)
+ msgtype = message.msgtype.decode('ascii')
+ self.message_count[msgtype] += 1
+ self.last_message[msgtype] = message
+ getattr(self, 'on_' + msgtype)(message)
except:
print("ERROR delivering %s (%s)" % (repr(message), sys.exc_info()[0]))
raise
@@ -318,6 +330,9 @@ class P2PInterface(P2PConnection):
def on_blocktxn(self, message): pass
def on_cmpctblock(self, message): pass
def on_feefilter(self, message): pass
+ def on_filteradd(self, message): pass
+ def on_filterclear(self, message): pass
+ def on_filterload(self, message): pass
def on_getaddr(self, message): pass
def on_getblocks(self, message): pass
def on_getblocktxn(self, message): pass
@@ -325,9 +340,9 @@ class P2PInterface(P2PConnection):
def on_getheaders(self, message): pass
def on_headers(self, message): pass
def on_mempool(self, message): pass
+ def on_merkleblock(self, message): pass
def on_notfound(self, message): pass
def on_pong(self, message): pass
- def on_reject(self, message): pass
def on_sendcmpct(self, message): pass
def on_sendheaders(self, message): pass
def on_tx(self, message): pass
@@ -353,9 +368,12 @@ class P2PInterface(P2PConnection):
# Connection helper methods
+ def wait_until(self, test_function, timeout):
+ wait_until(test_function, timeout=timeout, lock=mininode_lock, factor=self.factor)
+
def wait_for_disconnect(self, timeout=60):
test_function = lambda: not self.is_connected
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
# Message receiving helper methods
@@ -366,14 +384,14 @@ class P2PInterface(P2PConnection):
return False
return self.last_message['tx'].tx.rehash() == txid
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_block(self, blockhash, timeout=60):
def test_function():
assert self.is_connected
return self.last_message.get("block") and self.last_message["block"].block.rehash() == blockhash
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_header(self, blockhash, timeout=60):
def test_function():
@@ -381,23 +399,33 @@ class P2PInterface(P2PConnection):
last_headers = self.last_message.get('headers')
if not last_headers:
return False
- return last_headers.headers[0].rehash() == blockhash
+ return last_headers.headers[0].rehash() == int(blockhash, 16)
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
- def wait_for_getdata(self, timeout=60):
+ def wait_for_merkleblock(self, blockhash, timeout=60):
+ def test_function():
+ assert self.is_connected
+ last_filtered_block = self.last_message.get('merkleblock')
+ if not last_filtered_block:
+ return False
+ return last_filtered_block.merkleblock.header.rehash() == int(blockhash, 16)
+
+ self.wait_until(test_function, timeout=timeout)
+
+ def wait_for_getdata(self, hash_list, timeout=60):
"""Waits for a getdata message.
- Receiving any getdata message will satisfy the predicate. the last_message["getdata"]
- value must be explicitly cleared before calling this method, or this will return
- immediately with success. TODO: change this method to take a hash value and only
- return true if the correct block/tx has been requested."""
+ The object hashes in the inventory vector must match the provided hash_list."""
def test_function():
assert self.is_connected
- return self.last_message.get("getdata")
+ last_data = self.last_message.get("getdata")
+ if not last_data:
+ return False
+ return [x.hash for x in last_data.inv] == hash_list
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_getheaders(self, timeout=60):
"""Waits for a getheaders message.
@@ -411,7 +439,7 @@ class P2PInterface(P2PConnection):
assert self.is_connected
return self.last_message.get("getheaders")
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_inv(self, expected_inv, timeout=60):
"""Waits for an INV message and checks that the first inv object in the message was as expected."""
@@ -424,13 +452,13 @@ class P2PInterface(P2PConnection):
self.last_message["inv"].inv[0].type == expected_inv[0].type and \
self.last_message["inv"].inv[0].hash == expected_inv[0].hash
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
def wait_for_verack(self, timeout=60):
def test_function():
return self.message_count["verack"]
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
# Message sending helper functions
@@ -446,7 +474,7 @@ class P2PInterface(P2PConnection):
assert self.is_connected
return self.last_message.get("pong") and self.last_message["pong"].nonce == self.ping_counter
- wait_until(test_function, timeout=timeout, lock=mininode_lock)
+ self.wait_until(test_function, timeout=timeout)
self.ping_counter += 1
@@ -562,7 +590,7 @@ class P2PDataStore(P2PInterface):
self.send_message(msg_block(block=b))
else:
self.send_message(msg_headers([CBlockHeader(block) for block in blocks]))
- wait_until(lambda: blocks[-1].sha256 in self.getdata_requests, timeout=timeout, lock=mininode_lock)
+ self.wait_until(lambda: blocks[-1].sha256 in self.getdata_requests, timeout=timeout)
if expect_disconnect:
self.wait_for_disconnect(timeout=timeout)
@@ -570,7 +598,7 @@ class P2PDataStore(P2PInterface):
self.sync_with_ping(timeout=timeout)
if success:
- wait_until(lambda: node.getbestblockhash() == blocks[-1].hash, timeout=timeout)
+ self.wait_until(lambda: node.getbestblockhash() == blocks[-1].hash, timeout=timeout)
else:
assert node.getbestblockhash() != blocks[-1].hash
@@ -606,3 +634,20 @@ class P2PDataStore(P2PInterface):
# Check that none of the txs are now in the mempool
for tx in txs:
assert tx.hash not in raw_mempool, "{} tx found in mempool".format(tx.hash)
+
+class P2PTxInvStore(P2PInterface):
+ """A P2PInterface which stores a count of how many times each txid has been announced."""
+ def __init__(self):
+ super().__init__()
+ self.tx_invs_received = defaultdict(int)
+
+ def on_inv(self, message):
+ # Store how many times invs have been received for each tx.
+ for i in message.inv:
+ if i.type == MSG_TX:
+ # save txid
+ self.tx_invs_received[i.hash] += 1
+
+ def get_invs(self):
+ with mininode_lock:
+ return list(self.tx_invs_received.keys())
diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py
index 51aa9057f7..9102266456 100644
--- a/test/functional/test_framework/script.py
+++ b/test/functional/test_framework/script.py
@@ -1,26 +1,41 @@
#!/usr/bin/env python3
-# Copyright (c) 2015-2019 The Bitcoin Core developers
+# Copyright (c) 2015-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Functionality to build scripts, as well as signature hash functions.
This file is modified from python-bitcoinlib.
"""
-
-from .messages import CTransaction, CTxOut, sha256, hash256, uint256_from_str, ser_uint256, ser_string
-
import hashlib
import struct
-
-from .bignum import bn2vch
+import unittest
+
+from .messages import (
+ CTransaction,
+ CTxOut,
+ hash256,
+ ser_string,
+ ser_uint256,
+ sha256,
+ uint256_from_str,
+)
MAX_SCRIPT_ELEMENT_SIZE = 520
-
OPCODE_NAMES = {}
def hash160(s):
return hashlib.new('ripemd160', sha256(s)).digest()
+def bn2vch(v):
+ """Convert number to bitcoin-specific little endian format."""
+ # We need v.bit_length() bits, plus a sign bit for every nonzero number.
+ n_bits = v.bit_length() + (v != 0)
+ # The number of bytes for that is:
+ n_bytes = (n_bits + 7) // 8
+ # Convert number to absolute value + sign in top bit.
+ encoded_v = 0 if v == 0 else abs(v) | ((v < 0) << (n_bytes * 8 - 1))
+ # Serialize to bytes
+ return encoded_v.to_bytes(n_bytes, 'little')
_opcode_instances = []
class CScriptOp(int):
@@ -31,13 +46,13 @@ class CScriptOp(int):
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
- return b'' + bytes([len(d)]) + d # OP_PUSHDATA
+ return b'' + bytes([len(d)]) + d # OP_PUSHDATA
elif len(d) <= 0xff:
- return b'\x4c' + bytes([len(d)]) + d # OP_PUSHDATA1
+ return b'\x4c' + bytes([len(d)]) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
- return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
+ return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
- return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
+ return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@@ -50,7 +65,7 @@ class CScriptOp(int):
if n == 0:
return OP_0
else:
- return CScriptOp(OP_1 + n-1)
+ return CScriptOp(OP_1 + n - 1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
@@ -60,7 +75,7 @@ class CScriptOp(int):
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
- return int(self - OP_1+1)
+ return int(self - OP_1 + 1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
@@ -83,11 +98,11 @@ class CScriptOp(int):
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
- _opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
+ _opcode_instances.append(super().__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
-for n in range(0xff+1):
+for n in range(0xff + 1):
CScriptOp(n)
@@ -100,7 +115,7 @@ OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
-OP_TRUE=OP_1
+OP_TRUE = OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
@@ -232,122 +247,122 @@ OP_PUBKEY = CScriptOp(0xfe)
OP_INVALIDOPCODE = CScriptOp(0xff)
OPCODE_NAMES.update({
- OP_0 : 'OP_0',
- OP_PUSHDATA1 : 'OP_PUSHDATA1',
- OP_PUSHDATA2 : 'OP_PUSHDATA2',
- OP_PUSHDATA4 : 'OP_PUSHDATA4',
- OP_1NEGATE : 'OP_1NEGATE',
- OP_RESERVED : 'OP_RESERVED',
- OP_1 : 'OP_1',
- OP_2 : 'OP_2',
- OP_3 : 'OP_3',
- OP_4 : 'OP_4',
- OP_5 : 'OP_5',
- OP_6 : 'OP_6',
- OP_7 : 'OP_7',
- OP_8 : 'OP_8',
- OP_9 : 'OP_9',
- OP_10 : 'OP_10',
- OP_11 : 'OP_11',
- OP_12 : 'OP_12',
- OP_13 : 'OP_13',
- OP_14 : 'OP_14',
- OP_15 : 'OP_15',
- OP_16 : 'OP_16',
- OP_NOP : 'OP_NOP',
- OP_VER : 'OP_VER',
- OP_IF : 'OP_IF',
- OP_NOTIF : 'OP_NOTIF',
- OP_VERIF : 'OP_VERIF',
- OP_VERNOTIF : 'OP_VERNOTIF',
- OP_ELSE : 'OP_ELSE',
- OP_ENDIF : 'OP_ENDIF',
- OP_VERIFY : 'OP_VERIFY',
- OP_RETURN : 'OP_RETURN',
- OP_TOALTSTACK : 'OP_TOALTSTACK',
- OP_FROMALTSTACK : 'OP_FROMALTSTACK',
- OP_2DROP : 'OP_2DROP',
- OP_2DUP : 'OP_2DUP',
- OP_3DUP : 'OP_3DUP',
- OP_2OVER : 'OP_2OVER',
- OP_2ROT : 'OP_2ROT',
- OP_2SWAP : 'OP_2SWAP',
- OP_IFDUP : 'OP_IFDUP',
- OP_DEPTH : 'OP_DEPTH',
- OP_DROP : 'OP_DROP',
- OP_DUP : 'OP_DUP',
- OP_NIP : 'OP_NIP',
- OP_OVER : 'OP_OVER',
- OP_PICK : 'OP_PICK',
- OP_ROLL : 'OP_ROLL',
- OP_ROT : 'OP_ROT',
- OP_SWAP : 'OP_SWAP',
- OP_TUCK : 'OP_TUCK',
- OP_CAT : 'OP_CAT',
- OP_SUBSTR : 'OP_SUBSTR',
- OP_LEFT : 'OP_LEFT',
- OP_RIGHT : 'OP_RIGHT',
- OP_SIZE : 'OP_SIZE',
- OP_INVERT : 'OP_INVERT',
- OP_AND : 'OP_AND',
- OP_OR : 'OP_OR',
- OP_XOR : 'OP_XOR',
- OP_EQUAL : 'OP_EQUAL',
- OP_EQUALVERIFY : 'OP_EQUALVERIFY',
- OP_RESERVED1 : 'OP_RESERVED1',
- OP_RESERVED2 : 'OP_RESERVED2',
- OP_1ADD : 'OP_1ADD',
- OP_1SUB : 'OP_1SUB',
- OP_2MUL : 'OP_2MUL',
- OP_2DIV : 'OP_2DIV',
- OP_NEGATE : 'OP_NEGATE',
- OP_ABS : 'OP_ABS',
- OP_NOT : 'OP_NOT',
- OP_0NOTEQUAL : 'OP_0NOTEQUAL',
- OP_ADD : 'OP_ADD',
- OP_SUB : 'OP_SUB',
- OP_MUL : 'OP_MUL',
- OP_DIV : 'OP_DIV',
- OP_MOD : 'OP_MOD',
- OP_LSHIFT : 'OP_LSHIFT',
- OP_RSHIFT : 'OP_RSHIFT',
- OP_BOOLAND : 'OP_BOOLAND',
- OP_BOOLOR : 'OP_BOOLOR',
- OP_NUMEQUAL : 'OP_NUMEQUAL',
- OP_NUMEQUALVERIFY : 'OP_NUMEQUALVERIFY',
- OP_NUMNOTEQUAL : 'OP_NUMNOTEQUAL',
- OP_LESSTHAN : 'OP_LESSTHAN',
- OP_GREATERTHAN : 'OP_GREATERTHAN',
- OP_LESSTHANOREQUAL : 'OP_LESSTHANOREQUAL',
- OP_GREATERTHANOREQUAL : 'OP_GREATERTHANOREQUAL',
- OP_MIN : 'OP_MIN',
- OP_MAX : 'OP_MAX',
- OP_WITHIN : 'OP_WITHIN',
- OP_RIPEMD160 : 'OP_RIPEMD160',
- OP_SHA1 : 'OP_SHA1',
- OP_SHA256 : 'OP_SHA256',
- OP_HASH160 : 'OP_HASH160',
- OP_HASH256 : 'OP_HASH256',
- OP_CODESEPARATOR : 'OP_CODESEPARATOR',
- OP_CHECKSIG : 'OP_CHECKSIG',
- OP_CHECKSIGVERIFY : 'OP_CHECKSIGVERIFY',
- OP_CHECKMULTISIG : 'OP_CHECKMULTISIG',
- OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
- OP_NOP1 : 'OP_NOP1',
- OP_CHECKLOCKTIMEVERIFY : 'OP_CHECKLOCKTIMEVERIFY',
- OP_CHECKSEQUENCEVERIFY : 'OP_CHECKSEQUENCEVERIFY',
- OP_NOP4 : 'OP_NOP4',
- OP_NOP5 : 'OP_NOP5',
- OP_NOP6 : 'OP_NOP6',
- OP_NOP7 : 'OP_NOP7',
- OP_NOP8 : 'OP_NOP8',
- OP_NOP9 : 'OP_NOP9',
- OP_NOP10 : 'OP_NOP10',
- OP_SMALLINTEGER : 'OP_SMALLINTEGER',
- OP_PUBKEYS : 'OP_PUBKEYS',
- OP_PUBKEYHASH : 'OP_PUBKEYHASH',
- OP_PUBKEY : 'OP_PUBKEY',
- OP_INVALIDOPCODE : 'OP_INVALIDOPCODE',
+ OP_0: 'OP_0',
+ OP_PUSHDATA1: 'OP_PUSHDATA1',
+ OP_PUSHDATA2: 'OP_PUSHDATA2',
+ OP_PUSHDATA4: 'OP_PUSHDATA4',
+ OP_1NEGATE: 'OP_1NEGATE',
+ OP_RESERVED: 'OP_RESERVED',
+ OP_1: 'OP_1',
+ OP_2: 'OP_2',
+ OP_3: 'OP_3',
+ OP_4: 'OP_4',
+ OP_5: 'OP_5',
+ OP_6: 'OP_6',
+ OP_7: 'OP_7',
+ OP_8: 'OP_8',
+ OP_9: 'OP_9',
+ OP_10: 'OP_10',
+ OP_11: 'OP_11',
+ OP_12: 'OP_12',
+ OP_13: 'OP_13',
+ OP_14: 'OP_14',
+ OP_15: 'OP_15',
+ OP_16: 'OP_16',
+ OP_NOP: 'OP_NOP',
+ OP_VER: 'OP_VER',
+ OP_IF: 'OP_IF',
+ OP_NOTIF: 'OP_NOTIF',
+ OP_VERIF: 'OP_VERIF',
+ OP_VERNOTIF: 'OP_VERNOTIF',
+ OP_ELSE: 'OP_ELSE',
+ OP_ENDIF: 'OP_ENDIF',
+ OP_VERIFY: 'OP_VERIFY',
+ OP_RETURN: 'OP_RETURN',
+ OP_TOALTSTACK: 'OP_TOALTSTACK',
+ OP_FROMALTSTACK: 'OP_FROMALTSTACK',
+ OP_2DROP: 'OP_2DROP',
+ OP_2DUP: 'OP_2DUP',
+ OP_3DUP: 'OP_3DUP',
+ OP_2OVER: 'OP_2OVER',
+ OP_2ROT: 'OP_2ROT',
+ OP_2SWAP: 'OP_2SWAP',
+ OP_IFDUP: 'OP_IFDUP',
+ OP_DEPTH: 'OP_DEPTH',
+ OP_DROP: 'OP_DROP',
+ OP_DUP: 'OP_DUP',
+ OP_NIP: 'OP_NIP',
+ OP_OVER: 'OP_OVER',
+ OP_PICK: 'OP_PICK',
+ OP_ROLL: 'OP_ROLL',
+ OP_ROT: 'OP_ROT',
+ OP_SWAP: 'OP_SWAP',
+ OP_TUCK: 'OP_TUCK',
+ OP_CAT: 'OP_CAT',
+ OP_SUBSTR: 'OP_SUBSTR',
+ OP_LEFT: 'OP_LEFT',
+ OP_RIGHT: 'OP_RIGHT',
+ OP_SIZE: 'OP_SIZE',
+ OP_INVERT: 'OP_INVERT',
+ OP_AND: 'OP_AND',
+ OP_OR: 'OP_OR',
+ OP_XOR: 'OP_XOR',
+ OP_EQUAL: 'OP_EQUAL',
+ OP_EQUALVERIFY: 'OP_EQUALVERIFY',
+ OP_RESERVED1: 'OP_RESERVED1',
+ OP_RESERVED2: 'OP_RESERVED2',
+ OP_1ADD: 'OP_1ADD',
+ OP_1SUB: 'OP_1SUB',
+ OP_2MUL: 'OP_2MUL',
+ OP_2DIV: 'OP_2DIV',
+ OP_NEGATE: 'OP_NEGATE',
+ OP_ABS: 'OP_ABS',
+ OP_NOT: 'OP_NOT',
+ OP_0NOTEQUAL: 'OP_0NOTEQUAL',
+ OP_ADD: 'OP_ADD',
+ OP_SUB: 'OP_SUB',
+ OP_MUL: 'OP_MUL',
+ OP_DIV: 'OP_DIV',
+ OP_MOD: 'OP_MOD',
+ OP_LSHIFT: 'OP_LSHIFT',
+ OP_RSHIFT: 'OP_RSHIFT',
+ OP_BOOLAND: 'OP_BOOLAND',
+ OP_BOOLOR: 'OP_BOOLOR',
+ OP_NUMEQUAL: 'OP_NUMEQUAL',
+ OP_NUMEQUALVERIFY: 'OP_NUMEQUALVERIFY',
+ OP_NUMNOTEQUAL: 'OP_NUMNOTEQUAL',
+ OP_LESSTHAN: 'OP_LESSTHAN',
+ OP_GREATERTHAN: 'OP_GREATERTHAN',
+ OP_LESSTHANOREQUAL: 'OP_LESSTHANOREQUAL',
+ OP_GREATERTHANOREQUAL: 'OP_GREATERTHANOREQUAL',
+ OP_MIN: 'OP_MIN',
+ OP_MAX: 'OP_MAX',
+ OP_WITHIN: 'OP_WITHIN',
+ OP_RIPEMD160: 'OP_RIPEMD160',
+ OP_SHA1: 'OP_SHA1',
+ OP_SHA256: 'OP_SHA256',
+ OP_HASH160: 'OP_HASH160',
+ OP_HASH256: 'OP_HASH256',
+ OP_CODESEPARATOR: 'OP_CODESEPARATOR',
+ OP_CHECKSIG: 'OP_CHECKSIG',
+ OP_CHECKSIGVERIFY: 'OP_CHECKSIGVERIFY',
+ OP_CHECKMULTISIG: 'OP_CHECKMULTISIG',
+ OP_CHECKMULTISIGVERIFY: 'OP_CHECKMULTISIGVERIFY',
+ OP_NOP1: 'OP_NOP1',
+ OP_CHECKLOCKTIMEVERIFY: 'OP_CHECKLOCKTIMEVERIFY',
+ OP_CHECKSEQUENCEVERIFY: 'OP_CHECKSEQUENCEVERIFY',
+ OP_NOP4: 'OP_NOP4',
+ OP_NOP5: 'OP_NOP5',
+ OP_NOP6: 'OP_NOP6',
+ OP_NOP7: 'OP_NOP7',
+ OP_NOP8: 'OP_NOP8',
+ OP_NOP9: 'OP_NOP9',
+ OP_NOP10: 'OP_NOP10',
+ OP_SMALLINTEGER: 'OP_SMALLINTEGER',
+ OP_PUBKEYS: 'OP_PUBKEYS',
+ OP_PUBKEYHASH: 'OP_PUBKEYHASH',
+ OP_PUBKEY: 'OP_PUBKEY',
+ OP_INVALIDOPCODE: 'OP_INVALIDOPCODE',
})
class CScriptInvalidError(Exception):
@@ -358,7 +373,7 @@ class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
- super(CScriptTruncatedPushDataError, self).__init__(msg)
+ super().__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
@@ -392,10 +407,10 @@ class CScriptNum:
if len(value) == 0:
return result
for i, byte in enumerate(value):
- result |= int(byte) << 8*i
+ result |= int(byte) << 8 * i
if value[-1] >= 0x80:
# Mask for all but the highest result bit
- num_mask = (2**(len(value)*8) - 1) >> 1
+ num_mask = (2**(len(value) * 8) - 1) >> 1
result &= num_mask
result *= -1
return result
@@ -435,15 +450,8 @@ class CScript(bytes):
return other
def __add__(self, other):
- # Do the coercion outside of the try block so that errors in it are
- # noticed.
- other = self.__coerce_instance(other)
-
- try:
- # bytes.__add__ always returns bytes instances unfortunately
- return CScript(super(CScript, self).__add__(other))
- except TypeError:
- raise TypeError('Can not add a %r instance to a CScript' % other.__class__)
+ # add makes no sense for a CScript()
+ raise NotImplementedError
def join(self, iterable):
# join makes no sense for a CScript()
@@ -451,14 +459,14 @@ class CScript(bytes):
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
- return super(CScript, cls).__new__(cls, value)
+ return super().__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
- return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
+ return super().__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
@@ -493,21 +501,20 @@ class CScript(bytes):
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
- datasize = self[i] + (self[i+1] << 8)
+ datasize = self[i] + (self[i + 1] << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
- datasize = self[i] + (self[i+1] << 8) + (self[i+2] << 16) + (self[i+3] << 24)
+ datasize = self[i] + (self[i + 1] << 8) + (self[i + 2] << 16) + (self[i + 3] << 24)
i += 4
else:
- assert False # shouldn't happen
+ assert False # shouldn't happen
-
- data = bytes(self[i:i+datasize])
+ data = bytes(self[i:i + datasize])
# Check for truncation
if len(data) < datasize:
@@ -702,3 +709,25 @@ def SegwitV0SignatureHash(script, txTo, inIdx, hashtype, amount):
ss += struct.pack("<I", hashtype)
return hash256(ss)
+
+class TestFrameworkScript(unittest.TestCase):
+ def test_bn2vch(self):
+ self.assertEqual(bn2vch(0), bytes([]))
+ self.assertEqual(bn2vch(1), bytes([0x01]))
+ self.assertEqual(bn2vch(-1), bytes([0x81]))
+ self.assertEqual(bn2vch(0x7F), bytes([0x7F]))
+ self.assertEqual(bn2vch(-0x7F), bytes([0xFF]))
+ self.assertEqual(bn2vch(0x80), bytes([0x80, 0x00]))
+ self.assertEqual(bn2vch(-0x80), bytes([0x80, 0x80]))
+ self.assertEqual(bn2vch(0xFF), bytes([0xFF, 0x00]))
+ self.assertEqual(bn2vch(-0xFF), bytes([0xFF, 0x80]))
+ self.assertEqual(bn2vch(0x100), bytes([0x00, 0x01]))
+ self.assertEqual(bn2vch(-0x100), bytes([0x00, 0x81]))
+ self.assertEqual(bn2vch(0x7FFF), bytes([0xFF, 0x7F]))
+ self.assertEqual(bn2vch(-0x8000), bytes([0x00, 0x80, 0x80]))
+ self.assertEqual(bn2vch(-0x7FFFFF), bytes([0xFF, 0xFF, 0xFF]))
+ self.assertEqual(bn2vch(0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x00]))
+ self.assertEqual(bn2vch(-0x80000000), bytes([0x00, 0x00, 0x00, 0x80, 0x80]))
+ self.assertEqual(bn2vch(0xFFFFFFFF), bytes([0xFF, 0xFF, 0xFF, 0xFF, 0x00]))
+ self.assertEqual(bn2vch(123456789), bytes([0x15, 0xCD, 0x5B, 0x07]))
+ self.assertEqual(bn2vch(-54321), bytes([0x31, 0xD4, 0x80]))
diff --git a/test/functional/test_framework/script_util.py b/test/functional/test_framework/script_util.py
index 5ef67226c4..80fbae70bf 100755
--- a/test/functional/test_framework/script_util.py
+++ b/test/functional/test_framework/script_util.py
@@ -23,3 +23,4 @@ from test_framework.script import CScript
# scriptPubKeys are needed, to guarantee that the minimum transaction size is
# met.
DUMMY_P2WPKH_SCRIPT = CScript([b'a' * 21])
+DUMMY_2_P2WPKH_SCRIPT = CScript([b'b' * 21])
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index da92c6325a..11c96deefb 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -6,11 +6,12 @@
import configparser
from enum import Enum
-import logging
import argparse
+import logging
import os
import pdb
import random
+import re
import shutil
import subprocess
import sys
@@ -101,6 +102,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.bind_to_localhost_only = True
self.set_test_params()
self.parse_args()
+ self.rpc_timeout = int(self.rpc_timeout * self.options.factor) # optionally, increase timeout by a factor
def main(self):
"""Main function. This should not be overridden by the subclass test scripts."""
@@ -165,6 +167,9 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
help="run nodes under the valgrind memory error detector: expect at least a ~10x slowdown, valgrind 3.14 or later required")
parser.add_argument("--randomseed", type=int,
help="set a random seed for deterministically reproducing a previous test run")
+ parser.add_argument("--descriptors", default=False, action="store_true",
+ help="Run test using a descriptor wallet")
+ parser.add_argument('--factor', type=float, default=1.0, help='adjust test timeouts by a factor')
self.add_options(parser)
self.options = parser.parse_args()
@@ -183,10 +188,11 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.options.bitcoind = os.getenv("BITCOIND", default=config["environment"]["BUILDDIR"] + '/src/bitcoind' + config["environment"]["EXEEXT"])
self.options.bitcoincli = os.getenv("BITCOINCLI", default=config["environment"]["BUILDDIR"] + '/src/bitcoin-cli' + config["environment"]["EXEEXT"])
+ self.options.previous_releases_path = os.getenv("PREVIOUS_RELEASES_DIR") or os.getcwd() + "/releases"
+
os.environ['PATH'] = os.pathsep.join([
os.path.join(config['environment']['BUILDDIR'], 'src'),
- os.path.join(config['environment']['BUILDDIR'], 'src', 'qt'),
- os.environ['PATH']
+ os.path.join(config['environment']['BUILDDIR'], 'src', 'qt'), os.environ['PATH']
])
# Set up temp directory and start logging
@@ -333,11 +339,23 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
def setup_nodes(self):
"""Override this method to customize test node setup"""
- extra_args = None
+ extra_args = [[]] * self.num_nodes
+ wallets = [[]] * self.num_nodes
if hasattr(self, "extra_args"):
extra_args = self.extra_args
+ wallets = [[x for x in eargs if x.startswith('-wallet=')] for eargs in extra_args]
+ extra_args = [x + ['-nowallet'] for x in extra_args]
self.add_nodes(self.num_nodes, extra_args)
self.start_nodes()
+ for i, n in enumerate(self.nodes):
+ n.extra_args.pop()
+ if '-wallet=0' in n.extra_args or '-nowallet' in n.extra_args or '-disablewallet' in n.extra_args or not self.is_wallet_compiled():
+ continue
+ if '-wallet=' not in wallets[i] and not any([x.startswith('-wallet=') for x in wallets[i]]):
+ wallets[i].append('-wallet=')
+ for w in wallets[i]:
+ wallet_name = w.split('=', 1)[1]
+ n.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors)
self.import_deterministic_coinbase_privkeys()
if not self.setup_clean_chain:
for n in self.nodes:
@@ -369,22 +387,47 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
# Public helper methods. These can be accessed by the subclass test scripts.
- def add_nodes(self, num_nodes, extra_args=None, *, rpchost=None, binary=None):
+ def add_nodes(self, num_nodes, extra_args=None, *, rpchost=None, binary=None, binary_cli=None, versions=None):
"""Instantiate TestNode objects.
Should only be called once after the nodes have been specified in
set_test_params()."""
+ def get_bin_from_version(version, bin_name, bin_default):
+ if not version:
+ return bin_default
+ return os.path.join(
+ self.options.previous_releases_path,
+ re.sub(
+ r'\.0$',
+ '', # remove trailing .0 for point releases
+ 'v{}.{}.{}.{}'.format(
+ (version % 100000000) // 1000000,
+ (version % 1000000) // 10000,
+ (version % 10000) // 100,
+ (version % 100) // 1,
+ ),
+ ),
+ 'bin',
+ bin_name,
+ )
+
if self.bind_to_localhost_only:
extra_confs = [["bind=127.0.0.1"]] * num_nodes
else:
extra_confs = [[]] * num_nodes
if extra_args is None:
extra_args = [[]] * num_nodes
+ if versions is None:
+ versions = [None] * num_nodes
if binary is None:
- binary = [self.options.bitcoind] * num_nodes
+ binary = [get_bin_from_version(v, 'bitcoind', self.options.bitcoind) for v in versions]
+ if binary_cli is None:
+ binary_cli = [get_bin_from_version(v, 'bitcoin-cli', self.options.bitcoincli) for v in versions]
assert_equal(len(extra_confs), num_nodes)
assert_equal(len(extra_args), num_nodes)
+ assert_equal(len(versions), num_nodes)
assert_equal(len(binary), num_nodes)
+ assert_equal(len(binary_cli), num_nodes)
for i in range(num_nodes):
self.nodes.append(TestNode(
i,
@@ -392,8 +435,10 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
chain=self.chain,
rpchost=rpchost,
timewait=self.rpc_timeout,
+ factor=self.options.factor,
bitcoind=binary[i],
- bitcoin_cli=self.options.bitcoincli,
+ bitcoin_cli=binary_cli[i],
+ version=versions[i],
coverage_dir=self.options.coveragedir,
cwd=self.options.tmpdir,
extra_conf=extra_confs[i],
@@ -401,6 +446,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
use_cli=self.options.usecli,
start_perf=self.options.perf,
use_valgrind=self.options.valgrind,
+ descriptors=self.options.descriptors,
))
def start_node(self, i, *args, **kwargs):
@@ -536,15 +582,21 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
extra_args=['-disablewallet'],
rpchost=None,
timewait=self.rpc_timeout,
+ factor=self.options.factor,
bitcoind=self.options.bitcoind,
bitcoin_cli=self.options.bitcoincli,
coverage_dir=None,
cwd=self.options.tmpdir,
+ descriptors=self.options.descriptors,
))
self.start_node(CACHE_NODE_ID)
+ cache_node = self.nodes[CACHE_NODE_ID]
# Wait for RPC connections to be ready
- self.nodes[CACHE_NODE_ID].wait_for_rpc_connection()
+ cache_node.wait_for_rpc_connection()
+
+ # Set a time in the past, so that blocks don't end up in the future
+ cache_node.setmocktime(cache_node.getblockheader(cache_node.getbestblockhash())['time'])
# Create a 199-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
@@ -553,12 +605,12 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
# This is needed so that we are out of IBD when the test starts,
# see the tip age check in IsInitialBlockDownload().
for i in range(8):
- self.nodes[CACHE_NODE_ID].generatetoaddress(
+ cache_node.generatetoaddress(
nblocks=25 if i != 7 else 24,
address=TestNode.PRIV_KEYS[i % 4].address,
)
- assert_equal(self.nodes[CACHE_NODE_ID].getblockchaininfo()["blocks"], 199)
+ assert_equal(cache_node.getblockchaininfo()["blocks"], 199)
# Shut it down, and clean up cache directories:
self.stop_nodes()
@@ -613,6 +665,25 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
if not self.is_cli_compiled():
raise SkipTest("bitcoin-cli has not been compiled.")
+ def skip_if_no_previous_releases(self):
+ """Skip the running test if previous releases are not available."""
+ if not self.has_previous_releases():
+ raise SkipTest("previous releases not available or disabled")
+
+ def has_previous_releases(self):
+ """Checks whether previous releases are present and enabled."""
+ if os.getenv("TEST_PREVIOUS_RELEASES") == "false":
+ # disabled
+ return False
+
+ if not os.path.isdir(self.options.previous_releases_path):
+ if os.getenv("TEST_PREVIOUS_RELEASES") == "true":
+ raise AssertionError("TEST_PREVIOUS_RELEASES=true but releases missing: {}".format(
+ self.options.previous_releases_path))
+ # missing
+ return False
+ return True
+
def is_cli_compiled(self):
"""Checks whether bitcoin-cli was compiled."""
return self.config["components"].getboolean("ENABLE_CLI")
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
index 0742dbe617..e6ec3c1b2d 100755
--- a/test/functional/test_framework/test_node.py
+++ b/test/functional/test_framework/test_node.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2017-2019 The Bitcoin Core developers
+# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Class for bitcoind node under test"""
@@ -22,10 +22,12 @@ import shlex
import sys
from .authproxy import JSONRPCException
+from .descriptors import descsum_create
from .util import (
MAX_NODES,
append_config,
delete_cookie_file,
+ get_auth_cookie,
get_rpc_proxy,
rpc_url,
wait_until,
@@ -60,7 +62,7 @@ class TestNode():
To make things easier for the test writer, any unrecognised messages will
be dispatched to the RPC connection."""
- def __init__(self, i, datadir, *, chain, rpchost, timewait, bitcoind, bitcoin_cli, coverage_dir, cwd, extra_conf=None, extra_args=None, use_cli=False, start_perf=False, use_valgrind=False):
+ def __init__(self, i, datadir, *, chain, rpchost, timewait, factor, bitcoind, bitcoin_cli, coverage_dir, cwd, extra_conf=None, extra_args=None, use_cli=False, start_perf=False, use_valgrind=False, version=None, descriptors=False):
"""
Kwargs:
start_perf (bool): If True, begin profiling the node with `perf` as soon as
@@ -78,12 +80,14 @@ class TestNode():
self.binary = bitcoind
self.coverage_dir = coverage_dir
self.cwd = cwd
+ self.descriptors = descriptors
if extra_conf is not None:
append_config(datadir, extra_conf)
# Most callers will just need to add extra args to the standard list below.
# For those callers that need more flexibility, they can just set the args property directly.
# Note that common args are set in the config file (see initialize_datadir)
self.extra_args = extra_args
+ self.version = version
# Configuration for logging is set as command-line args rather than in the bitcoin.conf file.
# This means that starting a bitcoind using the temp dir to debug a failed test won't
# spam debug.log.
@@ -91,7 +95,6 @@ class TestNode():
self.binary,
"-datadir=" + self.datadir,
"-logtimemicros",
- "-logthreadnames",
"-debug",
"-debugexclude=libevent",
"-debugexclude=leveldb",
@@ -107,6 +110,9 @@ class TestNode():
"--gen-suppressions=all", "--exit-on-first-error=yes",
"--error-exitcode=1", "--quiet"] + self.args
+ if self.version is None or self.version >= 190000:
+ self.args.append("-logthreadnames")
+
self.cli = TestNodeCLI(bitcoin_cli, self.datadir)
self.use_cli = use_cli
self.start_perf = start_perf
@@ -122,6 +128,7 @@ class TestNode():
self.perf_subprocesses = {}
self.p2ps = []
+ self.factor = factor
AddressKeyPair = collections.namedtuple('AddressKeyPair', ['address', 'key'])
PRIV_KEYS = [
@@ -166,10 +173,10 @@ class TestNode():
def __getattr__(self, name):
"""Dispatches any unrecognised messages to the RPC connection or a CLI instance."""
if self.use_cli:
- return getattr(self.cli, name)
+ return getattr(RPCOverloadWrapper(self.cli, True, self.descriptors), name)
else:
assert self.rpc_connected and self.rpc is not None, self._node_msg("Error: no RPC connection")
- return getattr(self.rpc, name)
+ return getattr(RPCOverloadWrapper(self.rpc, descriptors=self.descriptors), name)
def start(self, extra_args=None, *, cwd=None, stdout=None, stderr=None, **kwargs):
"""Start the node."""
@@ -222,19 +229,38 @@ class TestNode():
self.rpc_connected = True
self.url = self.rpc.url
return
- except IOError as e:
- if e.errno != errno.ECONNREFUSED: # Port not yet open?
- raise # unknown IO error
except JSONRPCException as e: # Initialization phase
# -28 RPC in warmup
# -342 Service unavailable, RPC server started but is shutting down due to error
if e.error['code'] != -28 and e.error['code'] != -342:
raise # unknown JSON RPC exception
- except ValueError as e: # cookie file not found and no rpcuser or rpcassword. bitcoind still starting
+ except ConnectionResetError:
+ # This might happen when the RPC server is in warmup, but shut down before the call to getblockcount
+ # succeeds. Try again to properly raise the FailedToStartError
+ pass
+ except OSError as e:
+ if e.errno != errno.ECONNREFUSED: # Port not yet open?
+ raise # unknown OS error
+ except ValueError as e: # cookie file not found and no rpcuser or rpcpassword; bitcoind is still starting
if "No RPC credentials" not in str(e):
raise
time.sleep(1.0 / poll_per_s)
- self._raise_assertion_error("Unable to connect to bitcoind")
+ self._raise_assertion_error("Unable to connect to bitcoind after {}s".format(self.rpc_timeout))
+
+ def wait_for_cookie_credentials(self):
+ """Ensures auth cookie credentials can be read, e.g. for testing CLI with -rpcwait before RPC connection is up."""
+ self.log.debug("Waiting for cookie credentials")
+ # Poll at a rate of four times per second.
+ poll_per_s = 4
+ for _ in range(poll_per_s * self.rpc_timeout):
+ try:
+ get_auth_cookie(self.datadir, self.chain)
+ self.log.debug("Cookie credentials successfully retrieved")
+ return
+ except ValueError: # cookie file not found and no rpcuser or rpcpassword; bitcoind is still starting
+ pass # so we continue polling until RPC credentials are retrieved
+ time.sleep(1.0 / poll_per_s)
+ self._raise_assertion_error("Unable to retrieve cookie credentials after {}s".format(self.rpc_timeout))
def generate(self, nblocks, maxtries=1000000):
self.log.debug("TestNode.generate() dispatches `generate` call to `generatetoaddress`")
@@ -242,11 +268,11 @@ class TestNode():
def get_wallet_rpc(self, wallet_name):
if self.use_cli:
- return self.cli("-rpcwallet={}".format(wallet_name))
+ return RPCOverloadWrapper(self.cli("-rpcwallet={}".format(wallet_name)), True, self.descriptors)
else:
assert self.rpc_connected and self.rpc, self._node_msg("RPC not connected")
wallet_path = "wallet/{}".format(urllib.parse.quote(wallet_name))
- return self.rpc / wallet_path
+ return RPCOverloadWrapper(self.rpc / wallet_path, descriptors=self.descriptors)
def stop_node(self, expected_stderr='', wait=0):
"""Stop the node."""
@@ -254,7 +280,11 @@ class TestNode():
return
self.log.debug("Stopping node")
try:
- self.stop(wait=wait)
+ # Do not use wait argument when testing older nodes, e.g. in feature_backwards_compatibility.py
+ if self.version is None or self.version >= 180000:
+ self.stop(wait=wait)
+ else:
+ self.stop()
except http.client.CannotSendRequest:
self.log.exception("Unable to stop node.")
@@ -295,13 +325,13 @@ class TestNode():
return True
def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT):
- wait_until(self.is_node_stopped, timeout=timeout)
+ wait_until(self.is_node_stopped, timeout=timeout, factor=self.factor)
@contextlib.contextmanager
def assert_debug_log(self, expected_msgs, unexpected_msgs=None, timeout=2):
if unexpected_msgs is None:
unexpected_msgs = []
- time_end = time.time() + timeout
+ time_end = time.time() + timeout * self.factor
debug_log = os.path.join(self.datadir, self.chain, 'debug.log')
with open(debug_log, encoding='utf-8') as dl:
dl.seek(0, 2)
@@ -458,10 +488,22 @@ class TestNode():
if 'dstaddr' not in kwargs:
kwargs['dstaddr'] = '127.0.0.1'
- p2p_conn.peer_connect(**kwargs, net=self.chain)()
+ p2p_conn.peer_connect(**kwargs, net=self.chain, factor=self.factor)()
self.p2ps.append(p2p_conn)
if wait_for_verack:
+ # Wait for the node to send us the version and verack
p2p_conn.wait_for_verack()
+ # At this point we have sent our version message and received the version and verack, however the full node
+ # has not yet received the verack from us (in reply to their version). So, the connection is not yet fully
+ # established (fSuccessfullyConnected).
+ #
+ # This shouldn't lead to any issues when sending messages, since the verack will be in-flight before the
+ # message we send. However, it might lead to races where we are expecting to receive a message. E.g. a
+ # transaction that will be added to the mempool as soon as we return here.
+ #
+ # So syncing here is redundant when we only want to send a message, but the cost is low (a few milliseconds)
+ # in comparision to the upside of making tests less fragile and unexpected intermittent errors less likely.
+ p2p_conn.sync_with_ping()
return p2p_conn
@@ -480,6 +522,7 @@ class TestNode():
p.peer_disconnect()
del self.p2ps[:]
+
class TestNodeCLIAttr:
def __init__(self, cli, command):
self.cli = cli
@@ -491,6 +534,7 @@ class TestNodeCLIAttr:
def get_request(self, *args, **kwargs):
return lambda: self(*args, **kwargs)
+
def arg_to_cli(arg):
if isinstance(arg, bool):
return str(arg).lower()
@@ -499,9 +543,9 @@ def arg_to_cli(arg):
else:
return str(arg)
+
class TestNodeCLI():
"""Interface to bitcoin-cli for an individual node"""
-
def __init__(self, binary, datadir):
self.options = []
self.binary = binary
@@ -539,7 +583,7 @@ class TestNodeCLI():
if command is not None:
p_args += [command]
p_args += pos_args + named_args
- self.log.debug("Running bitcoin-cli command: %s" % command)
+ self.log.debug("Running bitcoin-cli {}".format(p_args[2:]))
process = subprocess.Popen(p_args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
cli_stdout, cli_stderr = process.communicate(input=self.input)
returncode = process.poll()
@@ -554,3 +598,118 @@ class TestNodeCLI():
return json.loads(cli_stdout, parse_float=decimal.Decimal)
except json.JSONDecodeError:
return cli_stdout.rstrip("\n")
+
+class RPCOverloadWrapper():
+ def __init__(self, rpc, cli=False, descriptors=False):
+ self.rpc = rpc
+ self.is_cli = cli
+ self.descriptors = descriptors
+
+ def __getattr__(self, name):
+ return getattr(self.rpc, name)
+
+ def createwallet(self, wallet_name, disable_private_keys=None, blank=None, passphrase=None, avoid_reuse=None, descriptors=None):
+ if self.is_cli:
+ if disable_private_keys is None:
+ disable_private_keys = 'null'
+ if blank is None:
+ blank = 'null'
+ if passphrase is None:
+ passphrase = ''
+ if avoid_reuse is None:
+ avoid_reuse = 'null'
+ if descriptors is None:
+ descriptors = self.descriptors
+ return self.__getattr__('createwallet')(wallet_name, disable_private_keys, blank, passphrase, avoid_reuse, descriptors)
+
+ def importprivkey(self, privkey, label=None, rescan=None):
+ wallet_info = self.getwalletinfo()
+ if self.is_cli:
+ if label is None:
+ label = 'null'
+ if rescan is None:
+ rescan = 'null'
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('importprivkey')(privkey, label, rescan)
+ desc = descsum_create('combo(' + privkey + ')')
+ req = [{
+ 'desc': desc,
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ }]
+ import_res = self.importdescriptors(req)
+ if not import_res[0]['success']:
+ raise JSONRPCException(import_res[0]['error'])
+
+ def addmultisigaddress(self, nrequired, keys, label=None, address_type=None):
+ wallet_info = self.getwalletinfo()
+ if self.is_cli:
+ if label is None:
+ label = 'null'
+ if address_type is None:
+ address_type = 'null'
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('addmultisigaddress')(nrequired, keys, label, address_type)
+ cms = self.createmultisig(nrequired, keys, address_type)
+ req = [{
+ 'desc': cms['descriptor'],
+ 'timestamp': 0,
+ 'label': label if label else ''
+ }]
+ import_res = self.importdescriptors(req)
+ if not import_res[0]['success']:
+ raise JSONRPCException(import_res[0]['error'])
+ return cms
+
+ def importpubkey(self, pubkey, label=None, rescan=None):
+ wallet_info = self.getwalletinfo()
+ if self.is_cli:
+ if label is None:
+ label = 'null'
+ if rescan is None:
+ rescan = 'null'
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('importpubkey')(pubkey, label, rescan)
+ desc = descsum_create('combo(' + pubkey + ')')
+ req = [{
+ 'desc': desc,
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ }]
+ import_res = self.importdescriptors(req)
+ if not import_res[0]['success']:
+ raise JSONRPCException(import_res[0]['error'])
+
+ def importaddress(self, address, label=None, rescan=None, p2sh=None):
+ wallet_info = self.getwalletinfo()
+ if self.is_cli:
+ if label is None:
+ label = 'null'
+ if rescan is None:
+ rescan = 'null'
+ if p2sh is None:
+ p2sh = 'null'
+ if 'descriptors' not in wallet_info or ('descriptors' in wallet_info and not wallet_info['descriptors']):
+ return self.__getattr__('importaddress')(address, label, rescan, p2sh)
+ is_hex = False
+ try:
+ int(address ,16)
+ is_hex = True
+ desc = descsum_create('raw(' + address + ')')
+ except:
+ desc = descsum_create('addr(' + address + ')')
+ reqs = [{
+ 'desc': desc,
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ }]
+ if is_hex and p2sh:
+ reqs.append({
+ 'desc': descsum_create('p2sh(raw(' + address + '))'),
+ 'timestamp': 0 if rescan else 'now',
+ 'label': label if label else ''
+ })
+ import_res = self.importdescriptors(reqs)
+ for res in import_res:
+ if not res['success']:
+ raise JSONRPCException(res['error'])
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
index 5bb73aee7e..20ab9ee464 100644
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
-# Copyright (c) 2014-2019 The Bitcoin Core developers
+# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Helpful routines for regression testing."""
@@ -208,9 +208,10 @@ def str_to_b64str(string):
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
-def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None):
+def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None, factor=1.0):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
+ timeout = timeout * factor
attempt = 0
time_end = time.time() + timeout
@@ -265,7 +266,7 @@ def get_rpc_proxy(url, node_number, *, timeout=None, coveragedir=None):
"""
proxy_kwargs = {}
if timeout is not None:
- proxy_kwargs['timeout'] = timeout
+ proxy_kwargs['timeout'] = int(timeout)
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
@@ -326,6 +327,13 @@ def initialize_datadir(dirname, n, chain):
os.makedirs(os.path.join(datadir, 'stdout'), exist_ok=True)
return datadir
+def adjust_bitcoin_conf_for_pre_17(conf_file):
+ with open(conf_file,'r', encoding='utf8') as conf:
+ conf_data = conf.read()
+ with open(conf_file, 'w', encoding='utf8') as conf:
+ conf_data_changed = conf_data.replace('[regtest]', '')
+ conf.write(conf_data_changed)
+
def get_datadir_path(dirname, n):
return os.path.join(dirname, "node" + str(n))
@@ -393,6 +401,7 @@ def connect_nodes(from_connection, node_num):
# with transaction relaying
wait_until(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
+
def sync_blocks(rpc_connections, *, wait=1, timeout=60):
"""
Wait until everybody has the same tip.
@@ -406,8 +415,14 @@ def sync_blocks(rpc_connections, *, wait=1, timeout=60):
best_hash = [x.getbestblockhash() for x in rpc_connections]
if best_hash.count(best_hash[0]) == len(rpc_connections):
return
+ # Check that each peer has at least one connection
+ assert (all([len(x.getpeerinfo()) for x in rpc_connections]))
time.sleep(wait)
- raise AssertionError("Block sync timed out:{}".format("".join("\n {!r}".format(b) for b in best_hash)))
+ raise AssertionError("Block sync timed out after {}s:{}".format(
+ timeout,
+ "".join("\n {!r}".format(b) for b in best_hash),
+ ))
+
def sync_mempools(rpc_connections, *, wait=1, timeout=60, flush_scheduler=True):
"""
@@ -422,12 +437,19 @@ def sync_mempools(rpc_connections, *, wait=1, timeout=60, flush_scheduler=True):
for r in rpc_connections:
r.syncwithvalidationinterfacequeue()
return
+ # Check that each peer has at least one connection
+ assert (all([len(x.getpeerinfo()) for x in rpc_connections]))
time.sleep(wait)
- raise AssertionError("Mempool sync timed out:{}".format("".join("\n {!r}".format(m) for m in pool)))
+ raise AssertionError("Mempool sync timed out after {}s:{}".format(
+ timeout,
+ "".join("\n {!r}".format(m) for m in pool),
+ ))
+
# Transaction/Block functions
#############################
+
def find_output(node, txid, amount, *, blockhash=None):
"""
Return index to output of txid with value amount
diff --git a/test/functional/test_framework/wallet_util.py b/test/functional/test_framework/wallet_util.py
index eb537015fb..1b6686ff45 100755
--- a/test/functional/test_framework/wallet_util.py
+++ b/test/functional/test_framework/wallet_util.py
@@ -13,6 +13,10 @@ from test_framework.address import (
script_to_p2sh_p2wsh,
script_to_p2wsh,
)
+from test_framework.key import (
+ bytes_to_wif,
+ ECKey,
+)
from test_framework.script import (
CScript,
OP_0,
@@ -66,6 +70,25 @@ def get_key(node):
p2sh_p2wpkh_redeem_script=CScript([OP_0, pkh]).hex(),
p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
+def get_generate_key():
+ """Generate a fresh key
+
+ Returns a named tuple of privkey, pubkey and all address and scripts."""
+ eckey = ECKey()
+ eckey.generate()
+ privkey = bytes_to_wif(eckey.get_bytes())
+ pubkey = eckey.get_pubkey().get_bytes().hex()
+ pkh = hash160(hex_str_to_bytes(pubkey))
+ return Key(privkey=privkey,
+ pubkey=pubkey,
+ p2pkh_script=CScript([OP_DUP, OP_HASH160, pkh, OP_EQUALVERIFY, OP_CHECKSIG]).hex(),
+ p2pkh_addr=key_to_p2pkh(pubkey),
+ p2wpkh_script=CScript([OP_0, pkh]).hex(),
+ p2wpkh_addr=key_to_p2wpkh(pubkey),
+ p2sh_p2wpkh_script=CScript([OP_HASH160, hash160(CScript([OP_0, pkh])), OP_EQUAL]).hex(),
+ p2sh_p2wpkh_redeem_script=CScript([OP_0, pkh]).hex(),
+ p2sh_p2wpkh_addr=key_to_p2sh_p2wpkh(pubkey))
+
def get_multisig(node):
"""Generate a fresh 2-of-3 multisig on node