aboutsummaryrefslogtreecommitdiff
path: root/test/functional/test_framework
diff options
context:
space:
mode:
Diffstat (limited to 'test/functional/test_framework')
-rw-r--r--test/functional/test_framework/address.py72
-rw-r--r--test/functional/test_framework/bdb.py152
-rw-r--r--test/functional/test_framework/bip340_test_vectors.csv16
-rw-r--r--test/functional/test_framework/blocktools.py75
-rw-r--r--test/functional/test_framework/key.py200
-rwxr-xr-xtest/functional/test_framework/messages.py103
-rwxr-xr-xtest/functional/test_framework/p2p.py12
-rw-r--r--test/functional/test_framework/script.py133
-rwxr-xr-xtest/functional/test_framework/script_util.py59
-rw-r--r--test/functional/test_framework/segwit_addr.py22
-rwxr-xr-xtest/functional/test_framework/test_framework.py108
-rwxr-xr-xtest/functional/test_framework/test_node.py9
-rw-r--r--test/functional/test_framework/util.py107
13 files changed, 840 insertions, 228 deletions
diff --git a/test/functional/test_framework/address.py b/test/functional/test_framework/address.py
index 9506b63f82..360962b8da 100644
--- a/test/functional/test_framework/address.py
+++ b/test/functional/test_framework/address.py
@@ -2,17 +2,17 @@
# Copyright (c) 2016-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
-"""Encode and decode BASE58, P2PKH and P2SH addresses."""
+"""Encode and decode Bitcoin addresses.
+
+- base58 P2PKH and P2SH addresses.
+- bech32 segwit v0 P2WPKH and P2WSH addresses."""
import enum
import unittest
from .script import hash256, hash160, sha256, CScript, OP_0
-from .util import hex_str_to_bytes
-
-from . import segwit_addr
-
-from test_framework.util import assert_equal
+from .segwit_addr import encode_segwit_address
+from .util import assert_equal, hex_str_to_bytes
ADDRESS_BCRT1_UNSPENDABLE = 'bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj'
ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR = 'addr(bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj)#juyq9d97'
@@ -35,7 +35,7 @@ def byte_to_base58(b, version):
str = chr(version).encode('latin-1').hex() + str
checksum = hash256(hex_str_to_bytes(str)).hex()
str += checksum[:8]
- value = int('0x'+str,0)
+ value = int('0x' + str, 0)
while value > 0:
result = chars[value % 58] + result
value //= 58
@@ -45,7 +45,10 @@ def byte_to_base58(b, version):
return result
-def base58_to_byte(s, verify_checksum=True):
+def base58_to_byte(s):
+ """Converts a base58-encoded string to its data and version.
+
+ Throws if the base58 checksum is invalid."""
if not s:
return b''
n = 0
@@ -65,66 +68,67 @@ def base58_to_byte(s, verify_checksum=True):
else:
break
res = b'\x00' * pad + res
- if verify_checksum:
- assert_equal(hash256(res[:-4])[:4], res[-4:])
+
+ # Assert if the checksum is invalid
+ assert_equal(hash256(res[:-4])[:4], res[-4:])
return res[1:-4], int(res[0])
-def keyhash_to_p2pkh(hash, main = False):
+def keyhash_to_p2pkh(hash, main=False):
assert len(hash) == 20
version = 0 if main else 111
return byte_to_base58(hash, version)
-def scripthash_to_p2sh(hash, main = False):
+def scripthash_to_p2sh(hash, main=False):
assert len(hash) == 20
version = 5 if main else 196
return byte_to_base58(hash, version)
-def key_to_p2pkh(key, main = False):
+def key_to_p2pkh(key, main=False):
key = check_key(key)
return keyhash_to_p2pkh(hash160(key), main)
-def script_to_p2sh(script, main = False):
+def script_to_p2sh(script, main=False):
script = check_script(script)
return scripthash_to_p2sh(hash160(script), main)
-def key_to_p2sh_p2wpkh(key, main = False):
+def key_to_p2sh_p2wpkh(key, main=False):
key = check_key(key)
p2shscript = CScript([OP_0, hash160(key)])
return script_to_p2sh(p2shscript, main)
-def program_to_witness(version, program, main = False):
+def program_to_witness(version, program, main=False):
if (type(program) is str):
program = hex_str_to_bytes(program)
assert 0 <= version <= 16
assert 2 <= len(program) <= 40
assert version > 0 or len(program) in [20, 32]
- return segwit_addr.encode("bc" if main else "bcrt", version, program)
+ return encode_segwit_address("bc" if main else "bcrt", version, program)
-def script_to_p2wsh(script, main = False):
+def script_to_p2wsh(script, main=False):
script = check_script(script)
return program_to_witness(0, sha256(script), main)
-def key_to_p2wpkh(key, main = False):
+def key_to_p2wpkh(key, main=False):
key = check_key(key)
return program_to_witness(0, hash160(key), main)
-def script_to_p2sh_p2wsh(script, main = False):
+def script_to_p2sh_p2wsh(script, main=False):
script = check_script(script)
p2shscript = CScript([OP_0, sha256(script)])
return script_to_p2sh(p2shscript, main)
def check_key(key):
if (type(key) is str):
- key = hex_str_to_bytes(key) # Assuming this is hex string
+ key = hex_str_to_bytes(key) # Assuming this is hex string
if (type(key) is bytes and (len(key) == 33 or len(key) == 65)):
return key
assert False
def check_script(script):
if (type(script) is str):
- script = hex_str_to_bytes(script) # Assuming this is hex string
+ script = hex_str_to_bytes(script) # Assuming this is hex string
if (type(script) is bytes or type(script) is CScript):
return script
assert False
@@ -135,15 +139,15 @@ class TestFrameworkScript(unittest.TestCase):
def check_base58(data, version):
self.assertEqual(base58_to_byte(byte_to_base58(data, version)), (data, version))
- check_base58(b'\x1f\x8e\xa1p*{\xd4\x94\x1b\xca\tA\xb8R\xc4\xbb\xfe\xdb.\x05', 111)
- check_base58(b':\x0b\x05\xf4\xd7\xf6l;\xa7\x00\x9fE50)l\x84\\\xc9\xcf', 111)
- check_base58(b'A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
- check_base58(b'\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
- check_base58(b'\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
- check_base58(b'\0\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 111)
- check_base58(b'\x1f\x8e\xa1p*{\xd4\x94\x1b\xca\tA\xb8R\xc4\xbb\xfe\xdb.\x05', 0)
- check_base58(b':\x0b\x05\xf4\xd7\xf6l;\xa7\x00\x9fE50)l\x84\\\xc9\xcf', 0)
- check_base58(b'A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
- check_base58(b'\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
- check_base58(b'\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
- check_base58(b'\0\0\0A\xc1\xea\xf1\x11\x80%Y\xba\xd6\x1b`\xd6+\x1f\x89|c\x92\x8a', 0)
+ check_base58(bytes.fromhex('1f8ea1702a7bd4941bca0941b852c4bbfedb2e05'), 111)
+ check_base58(bytes.fromhex('3a0b05f4d7f66c3ba7009f453530296c845cc9cf'), 111)
+ check_base58(bytes.fromhex('41c1eaf111802559bad61b60d62b1f897c63928a'), 111)
+ check_base58(bytes.fromhex('0041c1eaf111802559bad61b60d62b1f897c63928a'), 111)
+ check_base58(bytes.fromhex('000041c1eaf111802559bad61b60d62b1f897c63928a'), 111)
+ check_base58(bytes.fromhex('00000041c1eaf111802559bad61b60d62b1f897c63928a'), 111)
+ check_base58(bytes.fromhex('1f8ea1702a7bd4941bca0941b852c4bbfedb2e05'), 0)
+ check_base58(bytes.fromhex('3a0b05f4d7f66c3ba7009f453530296c845cc9cf'), 0)
+ check_base58(bytes.fromhex('41c1eaf111802559bad61b60d62b1f897c63928a'), 0)
+ check_base58(bytes.fromhex('0041c1eaf111802559bad61b60d62b1f897c63928a'), 0)
+ check_base58(bytes.fromhex('000041c1eaf111802559bad61b60d62b1f897c63928a'), 0)
+ check_base58(bytes.fromhex('00000041c1eaf111802559bad61b60d62b1f897c63928a'), 0)
diff --git a/test/functional/test_framework/bdb.py b/test/functional/test_framework/bdb.py
new file mode 100644
index 0000000000..9de358aa0a
--- /dev/null
+++ b/test/functional/test_framework/bdb.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python3
+# Copyright (c) 2020 The Bitcoin Core developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""
+Utilities for working directly with the wallet's BDB database file
+
+This is specific to the configuration of BDB used in this project:
+ - pagesize: 4096 bytes
+ - Outer database contains single subdatabase named 'main'
+ - btree
+ - btree leaf pages
+
+Each key-value pair is two entries in a btree leaf. The first is the key, the one that follows
+is the value. And so on. Note that the entry data is itself not in the correct order. Instead
+entry offsets are stored in the correct order and those offsets are needed to then retrieve
+the data itself.
+
+Page format can be found in BDB source code dbinc/db_page.h
+This only implements the deserialization of btree metadata pages and normal btree pages. Overflow
+pages are not implemented but may be needed in the future if dealing with wallets with large
+transactions.
+
+`db_dump -da wallet.dat` is useful to see the data in a wallet.dat BDB file
+"""
+
+import binascii
+import struct
+
+# Important constants
+PAGESIZE = 4096
+OUTER_META_PAGE = 0
+INNER_META_PAGE = 2
+
+# Page type values
+BTREE_INTERNAL = 3
+BTREE_LEAF = 5
+BTREE_META = 9
+
+# Some magic numbers for sanity checking
+BTREE_MAGIC = 0x053162
+DB_VERSION = 9
+
+# Deserializes a leaf page into a dict.
+# Btree internal pages have the same header, for those, return None.
+# For the btree leaf pages, deserialize them and put all the data into a dict
+def dump_leaf_page(data):
+ page_info = {}
+ page_header = data[0:26]
+ _, pgno, prev_pgno, next_pgno, entries, hf_offset, level, pg_type = struct.unpack('QIIIHHBB', page_header)
+ page_info['pgno'] = pgno
+ page_info['prev_pgno'] = prev_pgno
+ page_info['next_pgno'] = next_pgno
+ page_info['entries'] = entries
+ page_info['hf_offset'] = hf_offset
+ page_info['level'] = level
+ page_info['pg_type'] = pg_type
+ page_info['entry_offsets'] = struct.unpack('{}H'.format(entries), data[26:26 + entries * 2])
+ page_info['entries'] = []
+
+ if pg_type == BTREE_INTERNAL:
+ # Skip internal pages. These are the internal nodes of the btree and don't contain anything relevant to us
+ return None
+
+ assert pg_type == BTREE_LEAF, 'A non-btree leaf page has been encountered while dumping leaves'
+
+ for i in range(0, entries):
+ offset = page_info['entry_offsets'][i]
+ entry = {'offset': offset}
+ page_data_header = data[offset:offset + 3]
+ e_len, pg_type = struct.unpack('HB', page_data_header)
+ entry['len'] = e_len
+ entry['pg_type'] = pg_type
+ entry['data'] = data[offset + 3:offset + 3 + e_len]
+ page_info['entries'].append(entry)
+
+ return page_info
+
+# Deserializes a btree metadata page into a dict.
+# Does a simple sanity check on the magic value, type, and version
+def dump_meta_page(page):
+ # metadata page
+ # general metadata
+ metadata = {}
+ meta_page = page[0:72]
+ _, pgno, magic, version, pagesize, encrypt_alg, pg_type, metaflags, _, free, last_pgno, nparts, key_count, record_count, flags, uid = struct.unpack('QIIIIBBBBIIIIII20s', meta_page)
+ metadata['pgno'] = pgno
+ metadata['magic'] = magic
+ metadata['version'] = version
+ metadata['pagesize'] = pagesize
+ metadata['encrypt_alg'] = encrypt_alg
+ metadata['pg_type'] = pg_type
+ metadata['metaflags'] = metaflags
+ metadata['free'] = free
+ metadata['last_pgno'] = last_pgno
+ metadata['nparts'] = nparts
+ metadata['key_count'] = key_count
+ metadata['record_count'] = record_count
+ metadata['flags'] = flags
+ metadata['uid'] = binascii.hexlify(uid)
+
+ assert magic == BTREE_MAGIC, 'bdb magic does not match bdb btree magic'
+ assert pg_type == BTREE_META, 'Metadata page is not a btree metadata page'
+ assert version == DB_VERSION, 'Database too new'
+
+ # btree metadata
+ btree_meta_page = page[72:512]
+ _, minkey, re_len, re_pad, root, _, crypto_magic, _, iv, chksum = struct.unpack('IIIII368sI12s16s20s', btree_meta_page)
+ metadata['minkey'] = minkey
+ metadata['re_len'] = re_len
+ metadata['re_pad'] = re_pad
+ metadata['root'] = root
+ metadata['crypto_magic'] = crypto_magic
+ metadata['iv'] = binascii.hexlify(iv)
+ metadata['chksum'] = binascii.hexlify(chksum)
+ return metadata
+
+# Given the dict from dump_leaf_page, get the key-value pairs and put them into a dict
+def extract_kv_pairs(page_data):
+ out = {}
+ last_key = None
+ for i, entry in enumerate(page_data['entries']):
+ # By virtue of these all being pairs, even number entries are keys, and odd are values
+ if i % 2 == 0:
+ out[entry['data']] = b''
+ last_key = entry['data']
+ else:
+ out[last_key] = entry['data']
+ return out
+
+# Extract the key-value pairs of the BDB file given in filename
+def dump_bdb_kv(filename):
+ # Read in the BDB file and start deserializing it
+ pages = []
+ with open(filename, 'rb') as f:
+ data = f.read(PAGESIZE)
+ while len(data) > 0:
+ pages.append(data)
+ data = f.read(PAGESIZE)
+
+ # Sanity check the meta pages
+ dump_meta_page(pages[OUTER_META_PAGE])
+ dump_meta_page(pages[INNER_META_PAGE])
+
+ # Fetch the kv pairs from the leaf pages
+ kv = {}
+ for i in range(3, len(pages)):
+ info = dump_leaf_page(pages[i])
+ if info is not None:
+ info_kv = extract_kv_pairs(info)
+ kv = {**kv, **info_kv}
+ return kv
diff --git a/test/functional/test_framework/bip340_test_vectors.csv b/test/functional/test_framework/bip340_test_vectors.csv
new file mode 100644
index 0000000000..e068322deb
--- /dev/null
+++ b/test/functional/test_framework/bip340_test_vectors.csv
@@ -0,0 +1,16 @@
+index,secret key,public key,aux_rand,message,signature,verification result,comment
+0,0000000000000000000000000000000000000000000000000000000000000003,F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9,0000000000000000000000000000000000000000000000000000000000000000,0000000000000000000000000000000000000000000000000000000000000000,E907831F80848D1069A5371B402410364BDF1C5F8307B0084C55F1CE2DCA821525F66A4A85EA8B71E482A74F382D2CE5EBEEE8FDB2172F477DF4900D310536C0,TRUE,
+1,B7E151628AED2A6ABF7158809CF4F3C762E7160F38B4DA56A784D9045190CFEF,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,0000000000000000000000000000000000000000000000000000000000000001,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6896BD60EEAE296DB48A229FF71DFE071BDE413E6D43F917DC8DCF8C78DE33418906D11AC976ABCCB20B091292BFF4EA897EFCB639EA871CFA95F6DE339E4B0A,TRUE,
+2,C90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B14E5C9,DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8,C87AA53824B4D7AE2EB035A2B5BBBCCC080E76CDC6D1692C4B0B62D798E6D906,7E2D58D8B3BCDF1ABADEC7829054F90DDA9805AAB56C77333024B9D0A508B75C,5831AAEED7B44BB74E5EAB94BA9D4294C49BCF2A60728D8B4C200F50DD313C1BAB745879A5AD954A72C45A91C3A51D3C7ADEA98D82F8481E0E1E03674A6F3FB7,TRUE,
+3,0B432B2677937381AEF05BB02A66ECD012773062CF3FA2549E44F58ED2401710,25D1DFF95105F5253C4022F628A996AD3A0D95FBF21D468A1B33F8C160D8F517,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF,7EB0509757E246F19449885651611CB965ECC1A187DD51B64FDA1EDC9637D5EC97582B9CB13DB3933705B32BA982AF5AF25FD78881EBB32771FC5922EFC66EA3,TRUE,test fails if msg is reduced modulo p or n
+4,,D69C3509BB99E412E68B0FE8544E72837DFA30746D8BE2AA65975F29D22DC7B9,,4DF3C3F68FCC83B27E9D42C90431A72499F17875C81A599B566C9889B9696703,00000000000000000000003B78CE563F89A0ED9414F5AA28AD0D96D6795F9C6376AFB1548AF603B3EB45C9F8207DEE1060CB71C04E80F593060B07D28308D7F4,TRUE,
+5,,EEFDEA4CDB677750A420FEE807EACF21EB9898AE79B9768766E4FAA04A2D4A34,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E17776969E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,public key not on the curve
+6,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,FFF97BD5755EEEA420453A14355235D382F6472F8568A18B2F057A14602975563CC27944640AC607CD107AE10923D9EF7A73C643E166BE5EBEAFA34B1AC553E2,FALSE,has_even_y(R) is false
+7,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,1FA62E331EDBC21C394792D2AB1100A7B432B013DF3F6FF4F99FCB33E0E1515F28890B3EDB6E7189B630448B515CE4F8622A954CFE545735AAEA5134FCCDB2BD,FALSE,negated message
+8,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E177769961764B3AA9B2FFCB6EF947B6887A226E8D7C93E00C5ED0C1834FF0D0C2E6DA6,FALSE,negated s value
+9,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,0000000000000000000000000000000000000000000000000000000000000000123DDA8328AF9C23A94C1FEECFD123BA4FB73476F0D594DCB65C6425BD186051,FALSE,sG - eP is infinite. Test fails in single verification if has_even_y(inf) is defined as true and x(inf) as 0
+10,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,00000000000000000000000000000000000000000000000000000000000000017615FBAF5AE28864013C099742DEADB4DBA87F11AC6754F93780D5A1837CF197,FALSE,sG - eP is infinite. Test fails in single verification if has_even_y(inf) is defined as true and x(inf) as 1
+11,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,4A298DACAE57395A15D0795DDBFD1DCB564DA82B0F269BC70A74F8220429BA1D69E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,sig[0:32] is not an X coordinate on the curve
+12,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F69E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,sig[0:32] is equal to field size
+13,,DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E177769FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141,FALSE,sig[32:64] is equal to curve order
+14,,FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30,,243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89,6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E17776969E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B,FALSE,public key is not a valid X coordinate because it exceeds the field size
diff --git a/test/functional/test_framework/blocktools.py b/test/functional/test_framework/blocktools.py
index afc1995009..6b7214f03a 100644
--- a/test/functional/test_framework/blocktools.py
+++ b/test/functional/test_framework/blocktools.py
@@ -4,6 +4,9 @@
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for manipulating blocks and transactions."""
+from binascii import a2b_hex
+import struct
+import time
import unittest
from .address import (
@@ -41,9 +44,10 @@ from .script import (
hash160,
)
from .util import assert_equal
-from io import BytesIO
+WITNESS_SCALE_FACTOR = 4
MAX_BLOCK_SIGOPS = 20000
+MAX_BLOCK_SIGOPS_WEIGHT = MAX_BLOCK_SIGOPS * WITNESS_SCALE_FACTOR
# Genesis block time (regtest)
TIME_GENESIS_BLOCK = 1296688602
@@ -51,19 +55,29 @@ TIME_GENESIS_BLOCK = 1296688602
# From BIP141
WITNESS_COMMITMENT_HEADER = b"\xaa\x21\xa9\xed"
+NORMAL_GBT_REQUEST_PARAMS = {"rules": ["segwit"]}
-def create_block(hashprev, coinbase, ntime=None, *, version=1):
+
+def create_block(hashprev=None, coinbase=None, ntime=None, *, version=None, tmpl=None, txlist=None):
"""Create a block (with regtest difficulty)."""
block = CBlock()
- block.nVersion = version
- if ntime is None:
- import time
- block.nTime = int(time.time() + 600)
+ if tmpl is None:
+ tmpl = {}
+ block.nVersion = version or tmpl.get('version') or 1
+ block.nTime = ntime or tmpl.get('curtime') or int(time.time() + 600)
+ block.hashPrevBlock = hashprev or int(tmpl['previousblockhash'], 0x10)
+ if tmpl and not tmpl.get('bits') is None:
+ block.nBits = struct.unpack('>I', a2b_hex(tmpl['bits']))[0]
else:
- block.nTime = ntime
- block.hashPrevBlock = hashprev
- block.nBits = 0x207fffff # difficulty retargeting is disabled in REGTEST chainparams
+ block.nBits = 0x207fffff # difficulty retargeting is disabled in REGTEST chainparams
+ if coinbase is None:
+ coinbase = create_coinbase(height=tmpl['height'])
block.vtx.append(coinbase)
+ if txlist:
+ for tx in txlist:
+ if not hasattr(tx, 'calc_sha256'):
+ tx = FromHex(CTransaction(), tx)
+ block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
@@ -101,22 +115,31 @@ def script_BIP34_coinbase_height(height):
return CScript([CScriptNum(height)])
-def create_coinbase(height, pubkey=None):
- """Create a coinbase transaction, assuming no miner fees.
+def create_coinbase(height, pubkey=None, extra_output_script=None, fees=0):
+ """Create a coinbase transaction.
If pubkey is passed in, the coinbase output will be a P2PK output;
- otherwise an anyone-can-spend output."""
+ otherwise an anyone-can-spend output.
+
+ If extra_output_script is given, make a 0-value output to that
+ script. This is useful to pad block weight/sigops as needed. """
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff), script_BIP34_coinbase_height(height), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = 50 * COIN
halvings = int(height / 150) # regtest
coinbaseoutput.nValue >>= halvings
- if (pubkey is not None):
+ coinbaseoutput.nValue += fees
+ if pubkey is not None:
coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
else:
coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [coinbaseoutput]
+ if extra_output_script is not None:
+ coinbaseoutput2 = CTxOut()
+ coinbaseoutput2.nValue = 0
+ coinbaseoutput2.scriptPubKey = extra_output_script
+ coinbase.vout.append(coinbaseoutput2)
coinbase.calc_sha256()
return coinbase
@@ -135,25 +158,27 @@ def create_tx_with_script(prevtx, n, script_sig=b"", *, amount, script_pub_key=C
def create_transaction(node, txid, to_address, *, amount):
""" Return signed transaction spending the first output of the
- input txid. Note that the node must be able to sign for the
- output that is being spent, and the node must not be running
- multiple wallets.
+ input txid. Note that the node must have a wallet that can
+ sign for the output that is being spent.
"""
raw_tx = create_raw_transaction(node, txid, to_address, amount=amount)
- tx = CTransaction()
- tx.deserialize(BytesIO(hex_str_to_bytes(raw_tx)))
+ tx = FromHex(CTransaction(), raw_tx)
return tx
def create_raw_transaction(node, txid, to_address, *, amount):
""" Return raw signed transaction spending the first output of the
- input txid. Note that the node must be able to sign for the
- output that is being spent, and the node must not be running
- multiple wallets.
+ input txid. Note that the node must have a wallet that can sign
+ for the output that is being spent.
"""
- rawtx = node.createrawtransaction(inputs=[{"txid": txid, "vout": 0}], outputs={to_address: amount})
- signresult = node.signrawtransactionwithwallet(rawtx)
- assert_equal(signresult["complete"], True)
- return signresult['hex']
+ psbt = node.createpsbt(inputs=[{"txid": txid, "vout": 0}], outputs={to_address: amount})
+ for _ in range(2):
+ for w in node.listwallets():
+ wrpc = node.get_wallet_rpc(w)
+ signed_psbt = wrpc.walletprocesspsbt(psbt)
+ psbt = signed_psbt['psbt']
+ final_psbt = node.finalizepsbt(psbt)
+ assert_equal(final_psbt["complete"], True)
+ return final_psbt['hex']
def get_legacy_sigopcount_block(block, accurate=True):
count = 0
diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py
index adbffb7dc7..f3d13c049b 100644
--- a/test/functional/test_framework/key.py
+++ b/test/functional/test_framework/key.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2019 Pieter Wuille
+# Copyright (c) 2019-2020 Pieter Wuille
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test-only secp256k1 elliptic curve implementation
@@ -6,10 +6,24 @@
WARNING: This code is slow, uses bad randomness, does not properly protect
keys, and is trivially vulnerable to side channel attacks. Do not use for
anything but tests."""
+import csv
+import hashlib
+import os
import random
+import unittest
from .util import modinv
+def TaggedHash(tag, data):
+ ss = hashlib.sha256(tag.encode('utf-8')).digest()
+ ss += ss
+ ss += data
+ return hashlib.sha256(ss).digest()
+
+def xor_bytes(b0, b1):
+ assert len(b0) == len(b1)
+ return bytes(x ^ y for (x, y) in zip(b0, b1))
+
def jacobi_symbol(n, k):
"""Compute the Jacobi symbol of n modulo k
@@ -68,6 +82,10 @@ class EllipticCurve:
inv_3 = (inv_2 * inv) % self.p
return ((inv_2 * x1) % self.p, (inv_3 * y1) % self.p, 1)
+ def has_even_y(self, p1):
+ """Whether the point p1 has an even Y coordinate when expressed in affine coordinates."""
+ return not (p1[2] == 0 or self.affine(p1)[1] & 1)
+
def negate(self, p1):
"""Negate a Jacobian point tuple p1."""
x1, y1, z1 = p1
@@ -86,13 +104,13 @@ class EllipticCurve:
return jacobi_symbol(x_3 + self.a * x + self.b, self.p) != -1
def lift_x(self, x):
- """Given an X coordinate on the curve, return a corresponding affine point."""
+ """Given an X coordinate on the curve, return a corresponding affine point for which the Y coordinate is even."""
x_3 = pow(x, 3, self.p)
v = x_3 + self.a * x + self.b
y = modsqrt(v, self.p)
if y is None:
return None
- return (x, y, 1)
+ return (x, self.p - y if y & 1 else y, 1)
def double(self, p1):
"""Double a Jacobian tuple p1
@@ -197,7 +215,8 @@ class EllipticCurve:
r = self.add(r, p)
return r
-SECP256K1 = EllipticCurve(2**256 - 2**32 - 977, 0, 7)
+SECP256K1_FIELD_SIZE = 2**256 - 2**32 - 977
+SECP256K1 = EllipticCurve(SECP256K1_FIELD_SIZE, 0, 7)
SECP256K1_G = (0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798, 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8, 1)
SECP256K1_ORDER = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141
SECP256K1_ORDER_HALF = SECP256K1_ORDER // 2
@@ -221,9 +240,9 @@ class ECPubKey():
x = int.from_bytes(data[1:33], 'big')
if SECP256K1.is_x_coord(x):
p = SECP256K1.lift_x(x)
- # if the oddness of the y co-ord isn't correct, find the other
- # valid y
- if (p[1] & 1) != (data[0] & 1):
+ # Make the Y coordinate odd if required (lift_x always produces
+ # a point with an even Y coordinate).
+ if data[0] & 1:
p = SECP256K1.negate(p)
self.p = p
self.valid = True
@@ -303,10 +322,14 @@ class ECPubKey():
u1 = z*w % SECP256K1_ORDER
u2 = r*w % SECP256K1_ORDER
R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, u1), (self.p, u2)]))
- if R is None or R[0] != r:
+ if R is None or (R[0] % SECP256K1_ORDER) != r:
return False
return True
+def generate_privkey():
+ """Generate a valid random 32-byte private key."""
+ return random.randrange(1, SECP256K1_ORDER).to_bytes(32, 'big')
+
class ECKey():
"""A secp256k1 private key"""
@@ -324,7 +347,7 @@ class ECKey():
def generate(self, compressed=True):
"""Generate a random private key (compressed or uncompressed)."""
- self.set(random.randrange(1, SECP256K1_ORDER).to_bytes(32, 'big'), compressed)
+ self.set(generate_privkey(), compressed)
def get_bytes(self):
"""Retrieve the 32-byte representation of this key."""
@@ -369,3 +392,162 @@ class ECKey():
rb = r.to_bytes((r.bit_length() + 8) // 8, 'big')
sb = s.to_bytes((s.bit_length() + 8) // 8, 'big')
return b'\x30' + bytes([4 + len(rb) + len(sb), 2, len(rb)]) + rb + bytes([2, len(sb)]) + sb
+
+def compute_xonly_pubkey(key):
+ """Compute an x-only (32 byte) public key from a (32 byte) private key.
+
+ This also returns whether the resulting public key was negated.
+ """
+
+ assert len(key) == 32
+ x = int.from_bytes(key, 'big')
+ if x == 0 or x >= SECP256K1_ORDER:
+ return (None, None)
+ P = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, x)]))
+ return (P[0].to_bytes(32, 'big'), not SECP256K1.has_even_y(P))
+
+def tweak_add_privkey(key, tweak):
+ """Tweak a private key (after negating it if needed)."""
+
+ assert len(key) == 32
+ assert len(tweak) == 32
+
+ x = int.from_bytes(key, 'big')
+ if x == 0 or x >= SECP256K1_ORDER:
+ return None
+ if not SECP256K1.has_even_y(SECP256K1.mul([(SECP256K1_G, x)])):
+ x = SECP256K1_ORDER - x
+ t = int.from_bytes(tweak, 'big')
+ if t >= SECP256K1_ORDER:
+ return None
+ x = (x + t) % SECP256K1_ORDER
+ if x == 0:
+ return None
+ return x.to_bytes(32, 'big')
+
+def tweak_add_pubkey(key, tweak):
+ """Tweak a public key and return whether the result had to be negated."""
+
+ assert len(key) == 32
+ assert len(tweak) == 32
+
+ x_coord = int.from_bytes(key, 'big')
+ if x_coord >= SECP256K1_FIELD_SIZE:
+ return None
+ P = SECP256K1.lift_x(x_coord)
+ if P is None:
+ return None
+ t = int.from_bytes(tweak, 'big')
+ if t >= SECP256K1_ORDER:
+ return None
+ Q = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, t), (P, 1)]))
+ if Q is None:
+ return None
+ return (Q[0].to_bytes(32, 'big'), not SECP256K1.has_even_y(Q))
+
+def verify_schnorr(key, sig, msg):
+ """Verify a Schnorr signature (see BIP 340).
+
+ - key is a 32-byte xonly pubkey (computed using compute_xonly_pubkey).
+ - sig is a 64-byte Schnorr signature
+ - msg is a 32-byte message
+ """
+ assert len(key) == 32
+ assert len(msg) == 32
+ assert len(sig) == 64
+
+ x_coord = int.from_bytes(key, 'big')
+ if x_coord == 0 or x_coord >= SECP256K1_FIELD_SIZE:
+ return False
+ P = SECP256K1.lift_x(x_coord)
+ if P is None:
+ return False
+ r = int.from_bytes(sig[0:32], 'big')
+ if r >= SECP256K1_FIELD_SIZE:
+ return False
+ s = int.from_bytes(sig[32:64], 'big')
+ if s >= SECP256K1_ORDER:
+ return False
+ e = int.from_bytes(TaggedHash("BIP0340/challenge", sig[0:32] + key + msg), 'big') % SECP256K1_ORDER
+ R = SECP256K1.mul([(SECP256K1_G, s), (P, SECP256K1_ORDER - e)])
+ if not SECP256K1.has_even_y(R):
+ return False
+ if ((r * R[2] * R[2]) % SECP256K1_FIELD_SIZE) != R[0]:
+ return False
+ return True
+
+def sign_schnorr(key, msg, aux=None, flip_p=False, flip_r=False):
+ """Create a Schnorr signature (see BIP 340)."""
+
+ if aux is None:
+ aux = bytes(32)
+
+ assert len(key) == 32
+ assert len(msg) == 32
+ assert len(aux) == 32
+
+ sec = int.from_bytes(key, 'big')
+ if sec == 0 or sec >= SECP256K1_ORDER:
+ return None
+ P = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, sec)]))
+ if SECP256K1.has_even_y(P) == flip_p:
+ sec = SECP256K1_ORDER - sec
+ t = (sec ^ int.from_bytes(TaggedHash("BIP0340/aux", aux), 'big')).to_bytes(32, 'big')
+ kp = int.from_bytes(TaggedHash("BIP0340/nonce", t + P[0].to_bytes(32, 'big') + msg), 'big') % SECP256K1_ORDER
+ assert kp != 0
+ R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, kp)]))
+ k = kp if SECP256K1.has_even_y(R) != flip_r else SECP256K1_ORDER - kp
+ e = int.from_bytes(TaggedHash("BIP0340/challenge", R[0].to_bytes(32, 'big') + P[0].to_bytes(32, 'big') + msg), 'big') % SECP256K1_ORDER
+ return R[0].to_bytes(32, 'big') + ((k + e * sec) % SECP256K1_ORDER).to_bytes(32, 'big')
+
+class TestFrameworkKey(unittest.TestCase):
+ def test_schnorr(self):
+ """Test the Python Schnorr implementation."""
+ byte_arrays = [generate_privkey() for _ in range(3)] + [v.to_bytes(32, 'big') for v in [0, SECP256K1_ORDER - 1, SECP256K1_ORDER, 2**256 - 1]]
+ keys = {}
+ for privkey in byte_arrays: # build array of key/pubkey pairs
+ pubkey, _ = compute_xonly_pubkey(privkey)
+ if pubkey is not None:
+ keys[privkey] = pubkey
+ for msg in byte_arrays: # test every combination of message, signing key, verification key
+ for sign_privkey, sign_pubkey in keys.items():
+ sig = sign_schnorr(sign_privkey, msg)
+ for verify_privkey, verify_pubkey in keys.items():
+ if verify_privkey == sign_privkey:
+ self.assertTrue(verify_schnorr(verify_pubkey, sig, msg))
+ sig = list(sig)
+ sig[random.randrange(64)] ^= (1 << (random.randrange(8))) # damaging signature should break things
+ sig = bytes(sig)
+ self.assertFalse(verify_schnorr(verify_pubkey, sig, msg))
+
+ def test_schnorr_testvectors(self):
+ """Implement the BIP340 test vectors (read from bip340_test_vectors.csv)."""
+ num_tests = 0
+ vectors_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'bip340_test_vectors.csv')
+ with open(vectors_file, newline='', encoding='utf8') as csvfile:
+ reader = csv.reader(csvfile)
+ next(reader)
+ for row in reader:
+ (i_str, seckey_hex, pubkey_hex, aux_rand_hex, msg_hex, sig_hex, result_str, comment) = row
+ i = int(i_str)
+ pubkey = bytes.fromhex(pubkey_hex)
+ msg = bytes.fromhex(msg_hex)
+ sig = bytes.fromhex(sig_hex)
+ result = result_str == 'TRUE'
+ if seckey_hex != '':
+ seckey = bytes.fromhex(seckey_hex)
+ pubkey_actual = compute_xonly_pubkey(seckey)[0]
+ self.assertEqual(pubkey.hex(), pubkey_actual.hex(), "BIP340 test vector %i (%s): pubkey mismatch" % (i, comment))
+ aux_rand = bytes.fromhex(aux_rand_hex)
+ try:
+ sig_actual = sign_schnorr(seckey, msg, aux_rand)
+ self.assertEqual(sig.hex(), sig_actual.hex(), "BIP340 test vector %i (%s): sig mismatch" % (i, comment))
+ except RuntimeError as e:
+ self.fail("BIP340 test vector %i (%s): signing raised exception %s" % (i, comment, e))
+ result_actual = verify_schnorr(pubkey, sig, msg)
+ if result:
+ self.assertEqual(result, result_actual, "BIP340 test vector %i (%s): verification failed" % (i, comment))
+ else:
+ self.assertEqual(result, result_actual, "BIP340 test vector %i (%s): verification succeeded unexpectedly" % (i, comment))
+ num_tests += 1
+ self.assertTrue(num_tests >= 15) # expect at least 15 test vectors
diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py
index 00cf1ef66d..ff7f73bdf4 100755
--- a/test/functional/test_framework/messages.py
+++ b/test/functional/test_framework/messages.py
@@ -136,12 +136,17 @@ def uint256_from_compact(c):
return v
-def deser_vector(f, c):
+# deser_function_name: Allow for an alternate deserialization function on the
+# entries in the vector.
+def deser_vector(f, c, deser_function_name=None):
nit = deser_compact_size(f)
r = []
for _ in range(nit):
t = c()
- t.deserialize(f)
+ if deser_function_name:
+ getattr(t, deser_function_name)(f)
+ else:
+ t.deserialize(f)
r.append(t)
return r
@@ -204,38 +209,82 @@ def ToHex(obj):
class CAddress:
- __slots__ = ("ip", "nServices", "pchReserved", "port", "time")
+ __slots__ = ("net", "ip", "nServices", "port", "time")
+
+ # see https://github.com/bitcoin/bips/blob/master/bip-0155.mediawiki
+ NET_IPV4 = 1
+
+ ADDRV2_NET_NAME = {
+ NET_IPV4: "IPv4"
+ }
+
+ ADDRV2_ADDRESS_LENGTH = {
+ NET_IPV4: 4
+ }
def __init__(self):
self.time = 0
self.nServices = 1
- self.pchReserved = b"\x00" * 10 + b"\xff" * 2
+ self.net = self.NET_IPV4
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f, *, with_time=True):
+ """Deserialize from addrv1 format (pre-BIP155)"""
if with_time:
# VERSION messages serialize CAddress objects without time
- self.time = struct.unpack("<i", f.read(4))[0]
+ self.time = struct.unpack("<I", f.read(4))[0]
self.nServices = struct.unpack("<Q", f.read(8))[0]
- self.pchReserved = f.read(12)
+ # We only support IPv4 which means skip 12 bytes and read the next 4 as IPv4 address.
+ f.read(12)
+ self.net = self.NET_IPV4
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self, *, with_time=True):
+ """Serialize in addrv1 format (pre-BIP155)"""
+ assert self.net == self.NET_IPV4
r = b""
if with_time:
# VERSION messages serialize CAddress objects without time
- r += struct.pack("<i", self.time)
+ r += struct.pack("<I", self.time)
r += struct.pack("<Q", self.nServices)
- r += self.pchReserved
+ r += b"\x00" * 10 + b"\xff" * 2
+ r += socket.inet_aton(self.ip)
+ r += struct.pack(">H", self.port)
+ return r
+
+ def deserialize_v2(self, f):
+ """Deserialize from addrv2 format (BIP155)"""
+ self.time = struct.unpack("<I", f.read(4))[0]
+
+ self.nServices = deser_compact_size(f)
+
+ self.net = struct.unpack("B", f.read(1))[0]
+ assert self.net == self.NET_IPV4
+
+ address_length = deser_compact_size(f)
+ assert address_length == self.ADDRV2_ADDRESS_LENGTH[self.net]
+
+ self.ip = socket.inet_ntoa(f.read(4))
+
+ self.port = struct.unpack(">H", f.read(2))[0]
+
+ def serialize_v2(self):
+ """Serialize in addrv2 format (BIP155)"""
+ assert self.net == self.NET_IPV4
+ r = b""
+ r += struct.pack("<I", self.time)
+ r += ser_compact_size(self.nServices)
+ r += struct.pack("B", self.net)
+ r += ser_compact_size(self.ADDRV2_ADDRESS_LENGTH[self.net])
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
- return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
- self.ip, self.port)
+ return ("CAddress(nServices=%i net=%s addr=%s port=%i)"
+ % (self.nServices, self.ADDRV2_NET_NAME[self.net], self.ip, self.port))
class CInv:
@@ -1064,6 +1113,40 @@ class msg_addr:
return "msg_addr(addrs=%s)" % (repr(self.addrs))
+class msg_addrv2:
+ __slots__ = ("addrs",)
+ msgtype = b"addrv2"
+
+ def __init__(self):
+ self.addrs = []
+
+ def deserialize(self, f):
+ self.addrs = deser_vector(f, CAddress, "deserialize_v2")
+
+ def serialize(self):
+ return ser_vector(self.addrs, "serialize_v2")
+
+ def __repr__(self):
+ return "msg_addrv2(addrs=%s)" % (repr(self.addrs))
+
+
+class msg_sendaddrv2:
+ __slots__ = ()
+ msgtype = b"sendaddrv2"
+
+ def __init__(self):
+ pass
+
+ def deserialize(self, f):
+ pass
+
+ def serialize(self):
+ return b""
+
+ def __repr__(self):
+ return "msg_sendaddrv2()"
+
+
class msg_inv:
__slots__ = ("inv",)
msgtype = b"inv"
diff --git a/test/functional/test_framework/p2p.py b/test/functional/test_framework/p2p.py
index 5f9b316b18..6846d31221 100755
--- a/test/functional/test_framework/p2p.py
+++ b/test/functional/test_framework/p2p.py
@@ -33,6 +33,7 @@ from test_framework.messages import (
MAX_HEADERS_RESULTS,
MIN_VERSION_SUPPORTED,
msg_addr,
+ msg_addrv2,
msg_block,
MSG_BLOCK,
msg_blocktxn,
@@ -56,6 +57,7 @@ from test_framework.messages import (
msg_notfound,
msg_ping,
msg_pong,
+ msg_sendaddrv2,
msg_sendcmpct,
msg_sendheaders,
msg_tx,
@@ -75,6 +77,7 @@ logger = logging.getLogger("TestFramework.p2p")
MESSAGEMAP = {
b"addr": msg_addr,
+ b"addrv2": msg_addrv2,
b"block": msg_block,
b"blocktxn": msg_blocktxn,
b"cfcheckpt": msg_cfcheckpt,
@@ -97,6 +100,7 @@ MESSAGEMAP = {
b"notfound": msg_notfound,
b"ping": msg_ping,
b"pong": msg_pong,
+ b"sendaddrv2": msg_sendaddrv2,
b"sendcmpct": msg_sendcmpct,
b"sendheaders": msg_sendheaders,
b"tx": msg_tx,
@@ -285,7 +289,7 @@ class P2PInterface(P2PConnection):
Individual testcases should subclass this and override the on_* methods
if they want to alter message handling behaviour."""
- def __init__(self):
+ def __init__(self, support_addrv2=False):
super().__init__()
# Track number of messages of each type received.
@@ -303,6 +307,8 @@ class P2PInterface(P2PConnection):
# The network services received from the peer
self.nServices = 0
+ self.support_addrv2 = support_addrv2
+
def peer_connect(self, *args, services=NODE_NETWORK|NODE_WITNESS, send_version=True, **kwargs):
create_conn = super().peer_connect(*args, **kwargs)
@@ -345,6 +351,7 @@ class P2PInterface(P2PConnection):
pass
def on_addr(self, message): pass
+ def on_addrv2(self, message): pass
def on_block(self, message): pass
def on_blocktxn(self, message): pass
def on_cfcheckpt(self, message): pass
@@ -365,6 +372,7 @@ class P2PInterface(P2PConnection):
def on_merkleblock(self, message): pass
def on_notfound(self, message): pass
def on_pong(self, message): pass
+ def on_sendaddrv2(self, message): pass
def on_sendcmpct(self, message): pass
def on_sendheaders(self, message): pass
def on_tx(self, message): pass
@@ -389,6 +397,8 @@ class P2PInterface(P2PConnection):
if message.nVersion >= 70016:
self.send_message(msg_wtxidrelay())
self.send_message(msg_verack())
+ if self.support_addrv2:
+ self.send_message(msg_sendaddrv2())
self.nServices = message.nServices
# Connection helper methods
diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py
index 5e35ba0fce..8e5848d493 100644
--- a/test/functional/test_framework/script.py
+++ b/test/functional/test_framework/script.py
@@ -6,11 +6,15 @@
This file is modified from python-bitcoinlib.
"""
+
+from collections import namedtuple
import hashlib
import struct
import unittest
from typing import List, Dict
+from .key import TaggedHash, tweak_add_pubkey
+
from .messages import (
CTransaction,
CTxOut,
@@ -22,8 +26,13 @@ from .messages import (
)
MAX_SCRIPT_ELEMENT_SIZE = 520
+LOCKTIME_THRESHOLD = 500000000
+ANNEX_TAG = 0x50
+
OPCODE_NAMES = {} # type: Dict[CScriptOp, str]
+LEAF_VERSION_TAPSCRIPT = 0xc0
+
def hash160(s):
return hashlib.new('ripemd160', sha256(s)).digest()
@@ -239,11 +248,8 @@ OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
-# template matching params
-OP_SMALLINTEGER = CScriptOp(0xfa)
-OP_PUBKEYS = CScriptOp(0xfb)
-OP_PUBKEYHASH = CScriptOp(0xfd)
-OP_PUBKEY = CScriptOp(0xfe)
+# BIP 342 opcodes (Tapscript)
+OP_CHECKSIGADD = CScriptOp(0xba)
OP_INVALIDOPCODE = CScriptOp(0xff)
@@ -359,10 +365,7 @@ OPCODE_NAMES.update({
OP_NOP8: 'OP_NOP8',
OP_NOP9: 'OP_NOP9',
OP_NOP10: 'OP_NOP10',
- OP_SMALLINTEGER: 'OP_SMALLINTEGER',
- OP_PUBKEYS: 'OP_PUBKEYS',
- OP_PUBKEYHASH: 'OP_PUBKEYHASH',
- OP_PUBKEY: 'OP_PUBKEY',
+ OP_CHECKSIGADD: 'OP_CHECKSIGADD',
OP_INVALIDOPCODE: 'OP_INVALIDOPCODE',
})
@@ -593,6 +596,7 @@ class CScript(bytes):
return n
+SIGHASH_DEFAULT = 0 # Taproot-only default, semantics same as SIGHASH_ALL
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
@@ -615,7 +619,6 @@ def FindAndDelete(script, sig):
r += script[last_sop_idx:]
return CScript(r)
-
def LegacySignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
@@ -738,3 +741,113 @@ class TestFrameworkScript(unittest.TestCase):
values = [0, 1, -1, -2, 127, 128, -255, 256, (1 << 15) - 1, -(1 << 16), (1 << 24) - 1, (1 << 31), 1 - (1 << 32), 1 << 40, 1500, -1500]
for value in values:
self.assertEqual(CScriptNum.decode(CScriptNum.encode(CScriptNum(value))), value)
+
+def TaprootSignatureHash(txTo, spent_utxos, hash_type, input_index = 0, scriptpath = False, script = CScript(), codeseparator_pos = -1, annex = None, leaf_ver = LEAF_VERSION_TAPSCRIPT):
+ assert (len(txTo.vin) == len(spent_utxos))
+ assert (input_index < len(txTo.vin))
+ out_type = SIGHASH_ALL if hash_type == 0 else hash_type & 3
+ in_type = hash_type & SIGHASH_ANYONECANPAY
+ spk = spent_utxos[input_index].scriptPubKey
+ ss = bytes([0, hash_type]) # epoch, hash_type
+ ss += struct.pack("<i", txTo.nVersion)
+ ss += struct.pack("<I", txTo.nLockTime)
+ if in_type != SIGHASH_ANYONECANPAY:
+ ss += sha256(b"".join(i.prevout.serialize() for i in txTo.vin))
+ ss += sha256(b"".join(struct.pack("<q", u.nValue) for u in spent_utxos))
+ ss += sha256(b"".join(ser_string(u.scriptPubKey) for u in spent_utxos))
+ ss += sha256(b"".join(struct.pack("<I", i.nSequence) for i in txTo.vin))
+ if out_type == SIGHASH_ALL:
+ ss += sha256(b"".join(o.serialize() for o in txTo.vout))
+ spend_type = 0
+ if annex is not None:
+ spend_type |= 1
+ if (scriptpath):
+ spend_type |= 2
+ ss += bytes([spend_type])
+ if in_type == SIGHASH_ANYONECANPAY:
+ ss += txTo.vin[input_index].prevout.serialize()
+ ss += struct.pack("<q", spent_utxos[input_index].nValue)
+ ss += ser_string(spk)
+ ss += struct.pack("<I", txTo.vin[input_index].nSequence)
+ else:
+ ss += struct.pack("<I", input_index)
+ if (spend_type & 1):
+ ss += sha256(ser_string(annex))
+ if out_type == SIGHASH_SINGLE:
+ if input_index < len(txTo.vout):
+ ss += sha256(txTo.vout[input_index].serialize())
+ else:
+ ss += bytes(0 for _ in range(32))
+ if (scriptpath):
+ ss += TaggedHash("TapLeaf", bytes([leaf_ver]) + ser_string(script))
+ ss += bytes([0])
+ ss += struct.pack("<i", codeseparator_pos)
+ assert len(ss) == 175 - (in_type == SIGHASH_ANYONECANPAY) * 49 - (out_type != SIGHASH_ALL and out_type != SIGHASH_SINGLE) * 32 + (annex is not None) * 32 + scriptpath * 37
+ return TaggedHash("TapSighash", ss)
+
+def taproot_tree_helper(scripts):
+ if len(scripts) == 0:
+ return ([], bytes(0 for _ in range(32)))
+ if len(scripts) == 1:
+ # One entry: treat as a leaf
+ script = scripts[0]
+ assert(not callable(script))
+ if isinstance(script, list):
+ return taproot_tree_helper(script)
+ assert(isinstance(script, tuple))
+ version = LEAF_VERSION_TAPSCRIPT
+ name = script[0]
+ code = script[1]
+ if len(script) == 3:
+ version = script[2]
+ assert version & 1 == 0
+ assert isinstance(code, bytes)
+ h = TaggedHash("TapLeaf", bytes([version]) + ser_string(code))
+ if name is None:
+ return ([], h)
+ return ([(name, version, code, bytes())], h)
+ elif len(scripts) == 2 and callable(scripts[1]):
+ # Two entries, and the right one is a function
+ left, left_h = taproot_tree_helper(scripts[0:1])
+ right_h = scripts[1](left_h)
+ left = [(name, version, script, control + right_h) for name, version, script, control in left]
+ right = []
+ else:
+ # Two or more entries: descend into each side
+ split_pos = len(scripts) // 2
+ left, left_h = taproot_tree_helper(scripts[0:split_pos])
+ right, right_h = taproot_tree_helper(scripts[split_pos:])
+ left = [(name, version, script, control + right_h) for name, version, script, control in left]
+ right = [(name, version, script, control + left_h) for name, version, script, control in right]
+ if right_h < left_h:
+ right_h, left_h = left_h, right_h
+ h = TaggedHash("TapBranch", left_h + right_h)
+ return (left + right, h)
+
+TaprootInfo = namedtuple("TaprootInfo", "scriptPubKey,inner_pubkey,negflag,tweak,leaves")
+TaprootLeafInfo = namedtuple("TaprootLeafInfo", "script,version,merklebranch")
+
+def taproot_construct(pubkey, scripts=None):
+ """Construct a tree of Taproot spending conditions
+
+ pubkey: an ECPubKey object for the internal pubkey
+ scripts: a list of items; each item is either:
+ - a (name, CScript) tuple
+ - a (name, CScript, leaf version) tuple
+ - another list of items (with the same structure)
+ - a function, which specifies how to compute the hashing partner
+ in function of the hash of whatever it is combined with
+
+ Returns: script (sPK or redeemScript), tweak, {name:(script, leaf version, negation flag, innerkey, merklepath), ...}
+ """
+ if scripts is None:
+ scripts = []
+
+ ret, h = taproot_tree_helper(scripts)
+ tweak = TaggedHash("TapTweak", pubkey + h)
+ tweaked, negated = tweak_add_pubkey(pubkey, tweak)
+ leaves = dict((name, TaprootLeafInfo(script, version, merklebranch)) for name, version, script, merklebranch in ret)
+ return TaprootInfo(CScript([OP_1, tweaked]), pubkey, negated + 0, tweak, leaves)
+
+def is_op_success(o):
+ return o == 0x50 or o == 0x62 or o == 0x89 or o == 0x8a or o == 0x8d or o == 0x8e or (o >= 0x7e and o <= 0x81) or (o >= 0x83 and o <= 0x86) or (o >= 0x95 and o <= 0x99) or (o >= 0xbb and o <= 0xfe)
diff --git a/test/functional/test_framework/script_util.py b/test/functional/test_framework/script_util.py
index 80fbae70bf..318a438705 100755
--- a/test/functional/test_framework/script_util.py
+++ b/test/functional/test_framework/script_util.py
@@ -3,7 +3,8 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Useful Script constants and utils."""
-from test_framework.script import CScript
+from test_framework.script import CScript, hash160, sha256, OP_0, OP_DUP, OP_HASH160, OP_CHECKSIG, OP_EQUAL, OP_EQUALVERIFY
+from test_framework.util import hex_str_to_bytes
# To prevent a "tx-size-small" policy rule error, a transaction has to have a
# non-witness size of at least 82 bytes (MIN_STANDARD_TX_NONWITNESS_SIZE in
@@ -24,3 +25,59 @@ from test_framework.script import CScript
# met.
DUMMY_P2WPKH_SCRIPT = CScript([b'a' * 21])
DUMMY_2_P2WPKH_SCRIPT = CScript([b'b' * 21])
+
+def keyhash_to_p2pkh_script(hash, main = False):
+ assert len(hash) == 20
+ return CScript([OP_DUP, OP_HASH160, hash, OP_EQUALVERIFY, OP_CHECKSIG])
+
+def scripthash_to_p2sh_script(hash, main = False):
+ assert len(hash) == 20
+ return CScript([OP_HASH160, hash, OP_EQUAL])
+
+def key_to_p2pkh_script(key, main = False):
+ key = check_key(key)
+ return keyhash_to_p2pkh_script(hash160(key), main)
+
+def script_to_p2sh_script(script, main = False):
+ script = check_script(script)
+ return scripthash_to_p2sh_script(hash160(script), main)
+
+def key_to_p2sh_p2wpkh_script(key, main = False):
+ key = check_key(key)
+ p2shscript = CScript([OP_0, hash160(key)])
+ return script_to_p2sh_script(p2shscript, main)
+
+def program_to_witness_script(version, program, main = False):
+ if isinstance(program, str):
+ program = hex_str_to_bytes(program)
+ assert 0 <= version <= 16
+ assert 2 <= len(program) <= 40
+ assert version > 0 or len(program) in [20, 32]
+ return CScript([version, program])
+
+def script_to_p2wsh_script(script, main = False):
+ script = check_script(script)
+ return program_to_witness_script(0, sha256(script), main)
+
+def key_to_p2wpkh_script(key, main = False):
+ key = check_key(key)
+ return program_to_witness_script(0, hash160(key), main)
+
+def script_to_p2sh_p2wsh_script(script, main = False):
+ script = check_script(script)
+ p2shscript = CScript([OP_0, sha256(script)])
+ return script_to_p2sh_script(p2shscript, main)
+
+def check_key(key):
+ if isinstance(key, str):
+ key = hex_str_to_bytes(key) # Assuming this is hex string
+ if isinstance(key, bytes) and (len(key) == 33 or len(key) == 65):
+ return key
+ assert False
+
+def check_script(script):
+ if isinstance(script, str):
+ script = hex_str_to_bytes(script) # Assuming this is hex string
+ if isinstance(script, bytes) or isinstance(script, CScript):
+ return script
+ assert False
diff --git a/test/functional/test_framework/segwit_addr.py b/test/functional/test_framework/segwit_addr.py
index 02368e938f..00c0d8a919 100644
--- a/test/functional/test_framework/segwit_addr.py
+++ b/test/functional/test_framework/segwit_addr.py
@@ -3,7 +3,7 @@
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Reference implementation for Bech32 and segwit addresses."""
-
+import unittest
CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l"
@@ -84,7 +84,7 @@ def convertbits(data, frombits, tobits, pad=True):
return ret
-def decode(hrp, addr):
+def decode_segwit_address(hrp, addr):
"""Decode a segwit address."""
hrpgot, data = bech32_decode(addr)
if hrpgot != hrp:
@@ -99,9 +99,23 @@ def decode(hrp, addr):
return (data[0], decoded)
-def encode(hrp, witver, witprog):
+def encode_segwit_address(hrp, witver, witprog):
"""Encode a segwit address."""
ret = bech32_encode(hrp, [witver] + convertbits(witprog, 8, 5))
- if decode(hrp, ret) == (None, None):
+ if decode_segwit_address(hrp, ret) == (None, None):
return None
return ret
+
+class TestFrameworkScript(unittest.TestCase):
+ def test_segwit_encode_decode(self):
+ def test_python_bech32(addr):
+ hrp = addr[:4]
+ self.assertEqual(hrp, "bcrt")
+ (witver, witprog) = decode_segwit_address(hrp, addr)
+ self.assertEqual(encode_segwit_address(hrp, witver, witprog), addr)
+
+ # P2WPKH
+ test_python_bech32('bcrt1qthmht0k2qnh3wy7336z05lu2km7emzfpm3wg46')
+ # P2WSH
+ test_python_bech32('bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj')
+ test_python_bech32('bcrt1qft5p2uhsdcdc3l2ua4ap5qqfg4pjaqlp250x7us7a8qqhrxrxfsqseac85')
diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py
index f41f5129b8..831599913d 100755
--- a/test/functional/test_framework/test_framework.py
+++ b/test/functional/test_framework/test_framework.py
@@ -27,10 +27,9 @@ from .util import (
PortSeed,
assert_equal,
check_json_precision,
- connect_nodes,
- disconnect_nodes,
get_datadir_path,
initialize_datadir,
+ p2p_port,
wait_until_helper,
)
@@ -102,8 +101,17 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.rpc_timeout = 60 # Wait for up to 60 seconds for the RPC server to respond
self.supports_cli = True
self.bind_to_localhost_only = True
- self.set_test_params()
self.parse_args()
+ self.default_wallet_name = "default_wallet" if self.options.descriptors else ""
+ self.wallet_data_filename = "wallet.dat"
+ # Optional list of wallet names that can be set in set_test_params to
+ # create and import keys to. If unset, default is len(nodes) *
+ # [default_wallet_name]. If wallet names are None, wallet creation is
+ # skipped. If list is truncated, wallet creation is skipped and keys
+ # are not imported.
+ self.wallet_names = None
+ self.set_test_params()
+ assert self.wallet_names is None or len(self.wallet_names) <= self.num_nodes
if self.options.timeout_factor == 0 :
self.options.timeout_factor = 99999
self.rpc_timeout = int(self.rpc_timeout * self.options.timeout_factor) # optionally, increase timeout by a factor
@@ -175,9 +183,14 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
help="run nodes under the valgrind memory error detector: expect at least a ~10x slowdown, valgrind 3.14 or later required")
parser.add_argument("--randomseed", type=int,
help="set a random seed for deterministically reproducing a previous test run")
- parser.add_argument("--descriptors", default=False, action="store_true",
- help="Run test using a descriptor wallet")
parser.add_argument('--timeout-factor', dest="timeout_factor", type=float, default=1.0, help='adjust test timeouts by a factor. Setting it to 0 disables all timeouts')
+
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument("--descriptors", default=False, action="store_true",
+ help="Run test using a descriptor wallet", dest='descriptors')
+ group.add_argument("--legacy-wallet", default=False, action="store_false",
+ help="Run test using legacy wallets", dest='descriptors')
+
self.add_options(parser)
self.options = parser.parse_args()
self.options.previous_releases_path = previous_releases_path
@@ -362,23 +375,12 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
def setup_nodes(self):
"""Override this method to customize test node setup"""
extra_args = [[]] * self.num_nodes
- wallets = [[]] * self.num_nodes
if hasattr(self, "extra_args"):
extra_args = self.extra_args
- wallets = [[x for x in eargs if x.startswith('-wallet=')] for eargs in extra_args]
- extra_args = [x + ['-nowallet'] for x in extra_args]
self.add_nodes(self.num_nodes, extra_args)
self.start_nodes()
- for i, n in enumerate(self.nodes):
- n.extra_args.pop()
- if '-wallet=0' in n.extra_args or '-nowallet' in n.extra_args or '-disablewallet' in n.extra_args or not self.is_wallet_compiled():
- continue
- if '-wallet=' not in wallets[i] and not any([x.startswith('-wallet=') for x in wallets[i]]):
- wallets[i].append('-wallet=')
- for w in wallets[i]:
- wallet_name = w.split('=', 1)[1]
- n.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors)
- self.import_deterministic_coinbase_privkeys()
+ if self.is_wallet_compiled():
+ self.import_deterministic_coinbase_privkeys()
if not self.setup_clean_chain:
for n in self.nodes:
assert_equal(n.getblockchaininfo()["blocks"], 199)
@@ -394,13 +396,15 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
assert_equal(chain_info["initialblockdownload"], False)
def import_deterministic_coinbase_privkeys(self):
- for n in self.nodes:
- try:
- n.getwalletinfo()
- except JSONRPCException as e:
- assert str(e).startswith('Method not found')
- continue
-
+ for i in range(self.num_nodes):
+ self.init_wallet(i)
+
+ def init_wallet(self, i):
+ wallet_name = self.default_wallet_name if self.wallet_names is None else self.wallet_names[i] if i < len(self.wallet_names) else False
+ if wallet_name is not False:
+ n = self.nodes[i]
+ if wallet_name is not None:
+ n.createwallet(wallet_name=wallet_name, descriptors=self.options.descriptors, load_on_startup=True)
n.importprivkey(privkey=n.get_deterministic_priv_key().key, label='coinbase')
def run_test(self):
@@ -534,10 +538,49 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
self.nodes[i].process.wait(timeout)
def connect_nodes(self, a, b):
- connect_nodes(self.nodes[a], b)
+ def connect_nodes_helper(from_connection, node_num):
+ ip_port = "127.0.0.1:" + str(p2p_port(node_num))
+ from_connection.addnode(ip_port, "onetry")
+ # poll until version handshake complete to avoid race conditions
+ # with transaction relaying
+ # See comments in net_processing:
+ # * Must have a version message before anything else
+ # * Must have a verack message before anything else
+ wait_until_helper(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
+ wait_until_helper(lambda: all(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in from_connection.getpeerinfo()))
+
+ connect_nodes_helper(self.nodes[a], b)
def disconnect_nodes(self, a, b):
- disconnect_nodes(self.nodes[a], b)
+ def disconnect_nodes_helper(from_connection, node_num):
+ def get_peer_ids():
+ result = []
+ for peer in from_connection.getpeerinfo():
+ if "testnode{}".format(node_num) in peer['subver']:
+ result.append(peer['id'])
+ return result
+
+ peer_ids = get_peer_ids()
+ if not peer_ids:
+ self.log.warning("disconnect_nodes: {} and {} were not connected".format(
+ from_connection.index,
+ node_num,
+ ))
+ return
+ for peer_id in peer_ids:
+ try:
+ from_connection.disconnectnode(nodeid=peer_id)
+ except JSONRPCException as e:
+ # If this node is disconnected between calculating the peer id
+ # and issuing the disconnect, don't worry about it.
+ # This avoids a race condition if we're mass-disconnecting peers.
+ if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
+ raise
+
+ # wait to disconnect
+ wait_until_helper(lambda: not get_peer_ids(), timeout=5)
+
+ disconnect_nodes_helper(self.nodes[a], b)
def split_network(self):
"""
@@ -731,6 +774,13 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
"""Skip the running test if wallet has not been compiled."""
if not self.is_wallet_compiled():
raise SkipTest("wallet has not been compiled.")
+ if self.options.descriptors:
+ self.skip_if_no_sqlite()
+
+ def skip_if_no_sqlite(self):
+ """Skip the running test if sqlite has not been compiled."""
+ if not self.is_sqlite_compiled():
+ raise SkipTest("sqlite has not been compiled.")
def skip_if_no_wallet_tool(self):
"""Skip the running test if bitcoin-wallet has not been compiled."""
@@ -770,3 +820,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass):
def is_zmq_compiled(self):
"""Checks whether the zmq module was compiled."""
return self.config["components"].getboolean("ENABLE_ZMQ")
+
+ def is_sqlite_compiled(self):
+ """Checks whether the wallet module was compiled."""
+ return self.config["components"].getboolean("USE_SQLITE")
diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py
index d034986821..046efe730e 100755
--- a/test/functional/test_framework/test_node.py
+++ b/test/functional/test_framework/test_node.py
@@ -542,15 +542,6 @@ class TestNode():
return p2p_conn
- @property
- def p2p(self):
- """Return the first p2p connection
-
- Convenience property - most tests only use a single p2p connection to each
- node, so this saves having to write node.p2ps[0] many times."""
- assert self.p2ps, self._node_msg("No p2p connection")
- return self.p2ps[0]
-
def num_test_p2p_connections(self):
"""Return number of test framework p2p connections to the node."""
return len([peer for peer in self.getpeerinfo() if peer['subver'] == MY_SUBVERSION])
diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py
index af7f0b62f4..62ff5c6e33 100644
--- a/test/functional/test_framework/util.py
+++ b/test/functional/test_framework/util.py
@@ -8,11 +8,11 @@ from base64 import b64encode
from binascii import unhexlify
from decimal import Decimal, ROUND_DOWN
from subprocess import CalledProcessError
+import hashlib
import inspect
import json
import logging
import os
-import random
import re
import time
import unittest
@@ -261,6 +261,14 @@ def wait_until_helper(predicate, *, attempts=float('inf'), timeout=float('inf'),
raise AssertionError("Predicate {} not true after {} seconds".format(predicate_source, timeout))
raise RuntimeError('Unreachable')
+def sha256sum_file(filename):
+ h = hashlib.sha256()
+ with open(filename, 'rb') as f:
+ d = f.read(4096)
+ while len(d) > 0:
+ h.update(d)
+ d = f.read(4096)
+ return h.digest()
# RPC/P2P connection constants and functions
############################################
@@ -412,47 +420,6 @@ def set_node_times(nodes, t):
node.setmocktime(t)
-def disconnect_nodes(from_connection, node_num):
- def get_peer_ids():
- result = []
- for peer in from_connection.getpeerinfo():
- if "testnode{}".format(node_num) in peer['subver']:
- result.append(peer['id'])
- return result
-
- peer_ids = get_peer_ids()
- if not peer_ids:
- logger.warning("disconnect_nodes: {} and {} were not connected".format(
- from_connection.index,
- node_num,
- ))
- return
- for peer_id in peer_ids:
- try:
- from_connection.disconnectnode(nodeid=peer_id)
- except JSONRPCException as e:
- # If this node is disconnected between calculating the peer id
- # and issuing the disconnect, don't worry about it.
- # This avoids a race condition if we're mass-disconnecting peers.
- if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED
- raise
-
- # wait to disconnect
- wait_until_helper(lambda: not get_peer_ids(), timeout=5)
-
-
-def connect_nodes(from_connection, node_num):
- ip_port = "127.0.0.1:" + str(p2p_port(node_num))
- from_connection.addnode(ip_port, "onetry")
- # poll until version handshake complete to avoid race conditions
- # with transaction relaying
- # See comments in net_processing:
- # * Must have a version message before anything else
- # * Must have a verack message before anything else
- wait_until_helper(lambda: all(peer['version'] != 0 for peer in from_connection.getpeerinfo()))
- wait_until_helper(lambda: all(peer['bytesrecv_per_msg'].pop('verack', 0) == 24 for peer in from_connection.getpeerinfo()))
-
-
# Transaction/Block functions
#############################
@@ -469,62 +436,6 @@ def find_output(node, txid, amount, *, blockhash=None):
raise RuntimeError("find_output txid %s : %s not found" % (txid, str(amount)))
-def gather_inputs(from_node, amount_needed, confirmations_required=1):
- """
- Return a random set of unspent txouts that are enough to pay amount_needed
- """
- assert confirmations_required >= 0
- utxo = from_node.listunspent(confirmations_required)
- random.shuffle(utxo)
- inputs = []
- total_in = Decimal("0.00000000")
- while total_in < amount_needed and len(utxo) > 0:
- t = utxo.pop()
- total_in += t["amount"]
- inputs.append({"txid": t["txid"], "vout": t["vout"], "address": t["address"]})
- if total_in < amount_needed:
- raise RuntimeError("Insufficient funds: need %d, have %d" % (amount_needed, total_in))
- return (total_in, inputs)
-
-
-def make_change(from_node, amount_in, amount_out, fee):
- """
- Create change output(s), return them
- """
- outputs = {}
- amount = amount_out + fee
- change = amount_in - amount
- if change > amount * 2:
- # Create an extra change output to break up big inputs
- change_address = from_node.getnewaddress()
- # Split change in two, being careful of rounding:
- outputs[change_address] = Decimal(change / 2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
- change = amount_in - amount - outputs[change_address]
- if change > 0:
- outputs[from_node.getnewaddress()] = change
- return outputs
-
-
-def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
- """
- Create a random transaction.
- Returns (txid, hex-encoded-transaction-data, fee)
- """
- from_node = random.choice(nodes)
- to_node = random.choice(nodes)
- fee = min_fee + fee_increment * random.randint(0, fee_variants)
-
- (total_in, inputs) = gather_inputs(from_node, amount + fee)
- outputs = make_change(from_node, total_in, amount, fee)
- outputs[to_node.getnewaddress()] = float(amount)
-
- rawtx = from_node.createrawtransaction(inputs, outputs)
- signresult = from_node.signrawtransactionwithwallet(rawtx)
- txid = from_node.sendrawtransaction(signresult["hex"], 0)
-
- return (txid, signresult["hex"], fee)
-
-
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):