diff options
Diffstat (limited to 'test/functional')
107 files changed, 2301 insertions, 831 deletions
diff --git a/test/functional/data/rpc_decodescript.json b/test/functional/data/rpc_decodescript.json index 5f3e725d4c..4a15ae8792 100644 --- a/test/functional/data/rpc_decodescript.json +++ b/test/functional/data/rpc_decodescript.json @@ -69,7 +69,7 @@ "p2sh": "2N34iiGoUUkVSPiaaTFpJjB1FR9TXQu3PGM", "segwit": { "asm": "0 96c2368fc30514a438a8bd909f93c49a1549d77198ccbdb792043b666cb24f42", - "desc": "wsh(raw(02eeee))#gtay4y0z", + "desc": "addr(bcrt1qjmprdr7rq522gw9ghkgfly7yng25n4m3nrxtmdujqsakvm9jfapqk795l5)#5akkdska", "hex": "002096c2368fc30514a438a8bd909f93c49a1549d77198ccbdb792043b666cb24f42", "address": "bcrt1qjmprdr7rq522gw9ghkgfly7yng25n4m3nrxtmdujqsakvm9jfapqk795l5", "type": "witness_v0_scripthash", diff --git a/test/functional/feature_abortnode.py b/test/functional/feature_abortnode.py index 586722aa65..740d3b7f0e 100755 --- a/test/functional/feature_abortnode.py +++ b/test/functional/feature_abortnode.py @@ -9,10 +9,7 @@ - Mine a fork that requires disconnecting the tip. - Verify that bitcoind AbortNode's. """ - from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import get_datadir_path -import os class AbortNodeTest(BitcoinTestFramework): @@ -26,10 +23,9 @@ class AbortNodeTest(BitcoinTestFramework): def run_test(self): self.generate(self.nodes[0], 3, sync_fun=self.no_op) - datadir = get_datadir_path(self.options.tmpdir, 0) # Deleting the undo file will result in reorg failure - os.unlink(os.path.join(datadir, self.chain, 'blocks', 'rev00000.dat')) + (self.nodes[0].blocks_path / "rev00000.dat").unlink() # Connecting to a node with a more work chain will trigger a reorg # attempt. @@ -40,7 +36,7 @@ class AbortNodeTest(BitcoinTestFramework): # Check that node0 aborted self.log.info("Waiting for crash") - self.nodes[0].wait_until_stopped(timeout=5, expect_error=True) + self.nodes[0].wait_until_stopped(timeout=5, expect_error=True, expected_stderr="Error: A fatal internal error occurred, see debug.log for details") self.log.info("Node crashed - now verifying restart fails") self.nodes[0].assert_start_raises_init_error() diff --git a/test/functional/feature_addrman.py b/test/functional/feature_addrman.py index 28c3880513..d901b7bcf9 100755 --- a/test/functional/feature_addrman.py +++ b/test/functional/feature_addrman.py @@ -32,12 +32,12 @@ def serialize_addrman( r += struct.pack("B", format) r += struct.pack("B", INCOMPATIBILITY_BASE + lowest_compatible) r += ser_uint256(bucket_key) - r += struct.pack("i", len_new or len(new)) - r += struct.pack("i", len_tried or len(tried)) + r += struct.pack("<i", len_new or len(new)) + r += struct.pack("<i", len_tried or len(tried)) ADDRMAN_NEW_BUCKET_COUNT = 1 << 10 - r += struct.pack("i", ADDRMAN_NEW_BUCKET_COUNT ^ (1 << 30)) + r += struct.pack("<i", ADDRMAN_NEW_BUCKET_COUNT ^ (1 << 30)) for _ in range(ADDRMAN_NEW_BUCKET_COUNT): - r += struct.pack("i", 0) + r += struct.pack("<i", 0) checksum = hash256(r) r += mock_checksum or checksum return r @@ -53,7 +53,7 @@ class AddrmanTest(BitcoinTestFramework): self.num_nodes = 1 def run_test(self): - peers_dat = os.path.join(self.nodes[0].datadir, self.chain, "peers.dat") + peers_dat = os.path.join(self.nodes[0].chain_path, "peers.dat") init_error = lambda reason: ( f"Error: Invalid or corrupt peers.dat \\({reason}\\). If you believe this " f"is a bug, please report it to {self.config['environment']['PACKAGE_BUGREPORT']}. " diff --git a/test/functional/feature_anchors.py b/test/functional/feature_anchors.py index 468ad1eafa..3b75a06d9e 100755 --- a/test/functional/feature_anchors.py +++ b/test/functional/feature_anchors.py @@ -6,12 +6,15 @@ import os -from test_framework.p2p import P2PInterface +from test_framework.p2p import P2PInterface, P2P_SERVICES +from test_framework.socks5 import Socks5Configuration, Socks5Server +from test_framework.messages import CAddress, hash256 from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import check_node_connections +from test_framework.util import check_node_connections, assert_equal, p2p_port INBOUND_CONNECTIONS = 5 BLOCK_RELAY_CONNECTIONS = 2 +ONION_ADDR = "pg6mmjiyjmcrsslvykfwnntlaru7p5svn6y2ymmju6nubxndf4pscryd.onion:8333" class AnchorsTest(BitcoinTestFramework): @@ -20,9 +23,7 @@ class AnchorsTest(BitcoinTestFramework): self.disable_autoconnect = False def run_test(self): - node_anchors_path = os.path.join( - self.nodes[0].datadir, "regtest", "anchors.dat" - ) + node_anchors_path = self.nodes[0].chain_path / "anchors.dat" self.log.info("When node starts, check if anchors.dat doesn't exist") assert not os.path.exists(node_anchors_path) @@ -56,7 +57,7 @@ class AnchorsTest(BitcoinTestFramework): else: inbound_nodes_port.append(hex(int(addr_split[1]))[2:]) - self.log.info("Stop node 0") + self.log.debug("Stop node") self.stop_node(0) # It should contain only the block-relay-only addresses @@ -80,12 +81,64 @@ class AnchorsTest(BitcoinTestFramework): tweaked_contents[20:20] = b'1' out_file_handler.write(bytes(tweaked_contents)) - self.log.info("Start node") + self.log.debug("Start node") self.start_node(0) self.log.info("When node starts, check if anchors.dat doesn't exist anymore") assert not os.path.exists(node_anchors_path) + self.log.info("Ensure addrv2 support") + # Use proxies to catch outbound connections to networks with 256-bit addresses + onion_conf = Socks5Configuration() + onion_conf.auth = True + onion_conf.unauth = True + onion_conf.addr = ('127.0.0.1', p2p_port(self.num_nodes)) + onion_conf.keep_alive = True + onion_proxy = Socks5Server(onion_conf) + onion_proxy.start() + self.restart_node(0, extra_args=[f"-onion={onion_conf.addr[0]}:{onion_conf.addr[1]}"]) + + self.log.info("Add 256-bit-address block-relay-only connections to node") + self.nodes[0].addconnection(ONION_ADDR, 'block-relay-only') + + self.log.debug("Stop node") + with self.nodes[0].assert_debug_log([f"DumpAnchors: Flush 1 outbound block-relay-only peer addresses to anchors.dat"]): + self.stop_node(0) + # Manually close keep_alive proxy connection + onion_proxy.stop() + + self.log.info("Check for addrv2 addresses in anchors.dat") + caddr = CAddress() + caddr.net = CAddress.NET_TORV3 + caddr.ip, port_str = ONION_ADDR.split(":") + caddr.port = int(port_str) + # TorV3 addrv2 serialization: + # time(4) | services(1) | networkID(1) | address length(1) | address(32) + expected_pubkey = caddr.serialize_v2()[7:39].hex() + + # position of services byte of first addr in anchors.dat + # network magic, vector length, version, nTime + services_index = 4 + 1 + 4 + 4 + data = bytes() + with open(node_anchors_path, "rb") as file_handler: + data = file_handler.read() + assert_equal(data[services_index], 0x00) # services == NONE + anchors2 = data.hex() + assert expected_pubkey in anchors2 + + with open(node_anchors_path, "wb") as file_handler: + # Modify service flags for this address even though we never connected to it. + # This is necessary because on restart we will not attempt an anchor connection + # to a host without our required services, even if its address is in the anchors.dat file + new_data = bytearray(data)[:-32] + new_data[services_index] = P2P_SERVICES + new_data_hash = hash256(new_data) + file_handler.write(new_data + new_data_hash) + + self.log.info("Restarting node attempts to reconnect to anchors") + with self.nodes[0].assert_debug_log([f"Trying to make an anchor connection to {ONION_ADDR}"]): + self.start_node(0, extra_args=[f"-onion={onion_conf.addr[0]}:{onion_conf.addr[1]}"]) + if __name__ == "__main__": AnchorsTest().main() diff --git a/test/functional/feature_asmap.py b/test/functional/feature_asmap.py index 9440ba11f5..9cff8042a8 100755 --- a/test/functional/feature_asmap.py +++ b/test/functional/feature_asmap.py @@ -113,7 +113,7 @@ class AsmapTest(BitcoinTestFramework): def run_test(self): self.node = self.nodes[0] - self.datadir = os.path.join(self.node.datadir, self.chain) + self.datadir = self.node.chain_path self.default_asmap = os.path.join(self.datadir, DEFAULT_ASMAP_FILENAME) self.asmap_raw = os.path.join(os.path.dirname(os.path.realpath(__file__)), ASMAP) diff --git a/test/functional/feature_block.py b/test/functional/feature_block.py index 765db97445..58ef1e761d 100755 --- a/test/functional/feature_block.py +++ b/test/functional/feature_block.py @@ -43,8 +43,7 @@ from test_framework.script import ( OP_INVALIDOPCODE, OP_RETURN, OP_TRUE, - SIGHASH_ALL, - LegacySignatureHash, + sign_input_legacy, ) from test_framework.script_util import ( script_to_p2sh_script, @@ -539,12 +538,8 @@ class FullBlockTest(BitcoinTestFramework): # second input is corresponding P2SH output from b39 tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b'')) # Note: must pass the redeem_script (not p2sh_script) to the signature hash function - (sighash, err) = LegacySignatureHash(redeem_script, tx, 1, SIGHASH_ALL) - sig = self.coinbase_key.sign_ecdsa(sighash) + bytes(bytearray([SIGHASH_ALL])) - scriptSig = CScript([sig, redeem_script]) - - tx.vin[1].scriptSig = scriptSig - tx.rehash() + tx.vin[1].scriptSig = CScript([redeem_script]) + sign_input_legacy(tx, 1, redeem_script, self.coinbase_key) new_txs.append(tx) lastOutpoint = COutPoint(tx.sha256, 0) @@ -1338,8 +1333,7 @@ class FullBlockTest(BitcoinTestFramework): if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend tx.vin[0].scriptSig = CScript() return - (sighash, err) = LegacySignatureHash(spend_tx.vout[0].scriptPubKey, tx, 0, SIGHASH_ALL) - tx.vin[0].scriptSig = CScript([self.coinbase_key.sign_ecdsa(sighash) + bytes(bytearray([SIGHASH_ALL]))]) + sign_input_legacy(tx, 0, spend_tx.vout[0].scriptPubKey, self.coinbase_key) def create_and_sign_transaction(self, spend_tx, value, script=CScript([OP_TRUE])): tx = self.create_tx(spend_tx, 0, value, script) diff --git a/test/functional/feature_blocksdir.py b/test/functional/feature_blocksdir.py index e8d2ec3676..76b9277e2f 100755 --- a/test/functional/feature_blocksdir.py +++ b/test/functional/feature_blocksdir.py @@ -18,7 +18,7 @@ class BlocksdirTest(BitcoinTestFramework): def run_test(self): self.stop_node(0) - assert os.path.isdir(os.path.join(self.nodes[0].datadir, self.chain, "blocks")) + assert os.path.isdir(os.path.join(self.nodes[0].blocks_path)) assert not os.path.isdir(os.path.join(self.nodes[0].datadir, "blocks")) shutil.rmtree(self.nodes[0].datadir) initialize_datadir(self.options.tmpdir, 0, self.chain) @@ -31,7 +31,7 @@ class BlocksdirTest(BitcoinTestFramework): self.log.info("mining blocks..") self.generatetoaddress(self.nodes[0], 10, self.nodes[0].get_deterministic_priv_key().address) assert os.path.isfile(os.path.join(blocksdir_path, self.chain, "blocks", "blk00000.dat")) - assert os.path.isdir(os.path.join(self.nodes[0].datadir, self.chain, "blocks", "index")) + assert os.path.isdir(os.path.join(self.nodes[0].blocks_path, "index")) if __name__ == '__main__': diff --git a/test/functional/feature_cltv.py b/test/functional/feature_cltv.py index 7730db9672..8c45fb5a4d 100755 --- a/test/functional/feature_cltv.py +++ b/test/functional/feature_cltv.py @@ -151,11 +151,11 @@ class BIP65Test(BitcoinTestFramework): cltv_invalidate(spendtx, i) expected_cltv_reject_reason = [ - "non-mandatory-script-verify-flag (Operation not valid with the current stack size)", - "non-mandatory-script-verify-flag (Negative locktime)", - "non-mandatory-script-verify-flag (Locktime requirement not satisfied)", - "non-mandatory-script-verify-flag (Locktime requirement not satisfied)", - "non-mandatory-script-verify-flag (Locktime requirement not satisfied)", + "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", + "mandatory-script-verify-flag-failed (Negative locktime)", + "mandatory-script-verify-flag-failed (Locktime requirement not satisfied)", + "mandatory-script-verify-flag-failed (Locktime requirement not satisfied)", + "mandatory-script-verify-flag-failed (Locktime requirement not satisfied)", ][i] # First we show that this tx is valid except for CLTV by getting it # rejected from the mempool for exactly that reason. diff --git a/test/functional/feature_config_args.py b/test/functional/feature_config_args.py index 2257605870..97ee9538dc 100755 --- a/test/functional/feature_config_args.py +++ b/test/functional/feature_config_args.py @@ -32,7 +32,7 @@ class ConfArgsTest(BitcoinTestFramework): self.stop_node(0) # Check that startup fails if conf= is set in bitcoin.conf or in an included conf file - bad_conf_file_path = os.path.join(self.options.tmpdir, 'node0', 'bitcoin_bad.conf') + bad_conf_file_path = self.nodes[0].datadir_path / "bitcoin_bad.conf" util.write_config(bad_conf_file_path, n=0, chain='', extra_config=f'conf=some.conf\n') conf_in_config_file_err = 'Error: Error reading configuration file: conf cannot be set in the configuration file; use includeconf= if you want to include additional config files' self.nodes[0].assert_start_raises_init_error( @@ -75,7 +75,7 @@ class ConfArgsTest(BitcoinTestFramework): conf.write("wallet=foo\n") self.nodes[0].assert_start_raises_init_error(expected_msg=f'Error: Config setting for -wallet only applied on {self.chain} network when in [{self.chain}] section.') - main_conf_file_path = os.path.join(self.options.tmpdir, 'node0', 'bitcoin_main.conf') + main_conf_file_path = self.nodes[0].datadir_path / "bitcoin_main.conf" util.write_config(main_conf_file_path, n=0, chain='', extra_config=f'includeconf={inc_conf_file_path}\n') with open(inc_conf_file_path, 'w', encoding='utf-8') as conf: conf.write('acceptnonstdtxn=1\n') @@ -249,28 +249,24 @@ class ConfArgsTest(BitcoinTestFramework): # No peers.dat exists and -dnsseed=0 # We expect the node will fallback immediately to fixed seeds assert not os.path.exists(os.path.join(default_data_dir, "peers.dat")) - start = time.time() with self.nodes[0].assert_debug_log(expected_msgs=[ "Loaded 0 addresses from peers.dat", "DNS seeding disabled", - "Adding fixed seeds as -dnsseed=0 (or IPv4/IPv6 connections are disabled via -onlynet), -addnode is not provided and all -seednode(s) attempted\n", + "Adding fixed seeds as -dnsseed=0 (or IPv4/IPv6 connections are disabled via -onlynet) and neither -addnode nor -seednode are provided\n", ]): self.start_node(0, extra_args=['-dnsseed=0', '-fixedseeds=1']) - assert time.time() - start < 60 self.stop_node(0) self.nodes[0].assert_start_raises_init_error(['-dnsseed=1', '-onlynet=i2p', '-i2psam=127.0.0.1:7656'], "Error: Incompatible options: -dnsseed=1 was explicitly specified, but -onlynet forbids connections to IPv4/IPv6") # No peers.dat exists and dns seeds are disabled. # We expect the node will not add fixed seeds when explicitly disabled. assert not os.path.exists(os.path.join(default_data_dir, "peers.dat")) - start = time.time() with self.nodes[0].assert_debug_log(expected_msgs=[ "Loaded 0 addresses from peers.dat", "DNS seeding disabled", "Fixed seeds are disabled", ]): self.start_node(0, extra_args=['-dnsseed=0', '-fixedseeds=0']) - assert time.time() - start < 60 self.stop_node(0) # No peers.dat exists and -dnsseed=0, but a -addnode is provided @@ -371,6 +367,14 @@ class ConfArgsTest(BitcoinTestFramework): f'is being used instead.') + r"[\s\S]*", env=env, match=ErrorMatch.FULL_REGEX) node.args = node_args + def test_acceptstalefeeestimates_arg_support(self): + self.log.info("Test -acceptstalefeeestimates option support") + conf_file = self.nodes[0].datadir_path / "bitcoin.conf" + for chain, chain_name in {("main", ""), ("test", "testnet3"), ("signet", "signet")}: + util.write_config(conf_file, n=0, chain=chain_name, extra_config='acceptstalefeeestimates=1\n') + self.nodes[0].assert_start_raises_init_error(expected_msg=f'Error: acceptstalefeeestimates is not supported on {chain} chain.') + util.write_config(conf_file, n=0, chain="regtest") # Reset to regtest + def run_test(self): self.test_log_buffer() self.test_args_log() @@ -383,6 +387,7 @@ class ConfArgsTest(BitcoinTestFramework): self.test_invalid_command_line_options() self.test_ignored_conf() self.test_ignored_default_conf() + self.test_acceptstalefeeestimates_arg_support() # Remove the -datadir argument so it doesn't override the config file self.nodes[0].args = [arg for arg in self.nodes[0].args if not arg.startswith("-datadir")] diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py index a88a97c813..92e4187f3c 100755 --- a/test/functional/feature_csv_activation.py +++ b/test/functional/feature_csv_activation.py @@ -407,9 +407,9 @@ class BIP68_112_113Test(BitcoinTestFramework): # -1 OP_CSV tx and (empty stack) OP_CSV tx should fail self.send_blocks([self.create_test_block([bip112tx_special_v1])], success=False, - reject_reason='non-mandatory-script-verify-flag (Negative locktime)') + reject_reason='mandatory-script-verify-flag-failed (Negative locktime)') self.send_blocks([self.create_test_block([bip112tx_emptystack_v1])], success=False, - reject_reason='non-mandatory-script-verify-flag (Operation not valid with the current stack size)') + reject_reason='mandatory-script-verify-flag-failed (Operation not valid with the current stack size)') # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']] @@ -424,15 +424,15 @@ class BIP68_112_113Test(BitcoinTestFramework): fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']] for tx in fail_txs: self.send_blocks([self.create_test_block([tx])], success=False, - reject_reason='non-mandatory-script-verify-flag (Locktime requirement not satisfied)') + reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)') self.log.info("Test version 2 txs") # -1 OP_CSV tx and (empty stack) OP_CSV tx should fail self.send_blocks([self.create_test_block([bip112tx_special_v2])], success=False, - reject_reason='non-mandatory-script-verify-flag (Negative locktime)') + reject_reason='mandatory-script-verify-flag-failed (Negative locktime)') self.send_blocks([self.create_test_block([bip112tx_emptystack_v2])], success=False, - reject_reason='non-mandatory-script-verify-flag (Operation not valid with the current stack size)') + reject_reason='mandatory-script-verify-flag-failed (Operation not valid with the current stack size)') # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met) success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']] @@ -448,20 +448,20 @@ class BIP68_112_113Test(BitcoinTestFramework): fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']] for tx in fail_txs: self.send_blocks([self.create_test_block([tx])], success=False, - reject_reason='non-mandatory-script-verify-flag (Locktime requirement not satisfied)') + reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)') # If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']] for tx in fail_txs: self.send_blocks([self.create_test_block([tx])], success=False, - reject_reason='non-mandatory-script-verify-flag (Locktime requirement not satisfied)') + reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)') # If sequencelock types mismatch, tx should fail fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']] fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']] for tx in fail_txs: self.send_blocks([self.create_test_block([tx])], success=False, - reject_reason='non-mandatory-script-verify-flag (Locktime requirement not satisfied)') + reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)') # Remaining txs should pass, just test masking works properly success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']] diff --git a/test/functional/feature_dersig.py b/test/functional/feature_dersig.py index 4a66863d91..44c12b2a59 100755 --- a/test/functional/feature_dersig.py +++ b/test/functional/feature_dersig.py @@ -120,7 +120,7 @@ class BIP66Test(BitcoinTestFramework): 'txid': spendtx.hash, 'wtxid': spendtx.getwtxid(), 'allowed': False, - 'reject-reason': 'non-mandatory-script-verify-flag (Non-canonical DER signature)', + 'reject-reason': 'mandatory-script-verify-flag-failed (Non-canonical DER signature)', }], self.nodes[0].testmempoolaccept(rawtxs=[spendtx.serialize().hex()], maxfeerate=0), ) @@ -130,7 +130,7 @@ class BIP66Test(BitcoinTestFramework): block.hashMerkleRoot = block.calc_merkle_root() block.solve() - with self.nodes[0].assert_debug_log(expected_msgs=[f'CheckInputScripts on {block.vtx[-1].hash} failed with non-mandatory-script-verify-flag (Non-canonical DER signature)']): + with self.nodes[0].assert_debug_log(expected_msgs=[f'CheckInputScripts on {block.vtx[-1].hash} failed with mandatory-script-verify-flag-failed (Non-canonical DER signature)']): peer.send_and_ping(msg_block(block)) assert_equal(int(self.nodes[0].getbestblockhash(), 16), tip) peer.sync_with_ping() diff --git a/test/functional/feature_dirsymlinks.py b/test/functional/feature_dirsymlinks.py index 288754c04c..96f4aed08a 100755 --- a/test/functional/feature_dirsymlinks.py +++ b/test/functional/feature_dirsymlinks.py @@ -26,7 +26,7 @@ class SymlinkTest(BitcoinTestFramework): self.stop_node(0) rename_and_link( - from_name=self.nodes[0].chain_path / "blocks", + from_name=self.nodes[0].blocks_path, to_name=dir_new_blocks, ) rename_and_link( diff --git a/test/functional/feature_fee_estimation.py b/test/functional/feature_fee_estimation.py index 03970415ac..4f56d585d3 100755 --- a/test/functional/feature_fee_estimation.py +++ b/test/functional/feature_fee_estimation.py @@ -421,7 +421,7 @@ class EstimateFeeTest(BitcoinTestFramework): self.log.info("Restarting node with fresh estimation") self.stop_node(0) - fee_dat = os.path.join(self.nodes[0].datadir, self.chain, "fee_estimates.dat") + fee_dat = os.path.join(self.nodes[0].chain_path, "fee_estimates.dat") os.remove(fee_dat) self.start_node(0) self.connect_nodes(0, 1) diff --git a/test/functional/feature_filelock.py b/test/functional/feature_filelock.py index bb4104bf8e..cf2f21d553 100755 --- a/test/functional/feature_filelock.py +++ b/test/functional/feature_filelock.py @@ -3,7 +3,6 @@ # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Check that it's not possible to start a second bitcoind instance using the same datadir or wallet.""" -import os import random import string @@ -24,7 +23,7 @@ class FilelockTest(BitcoinTestFramework): self.nodes[0].wait_for_rpc_connection() def run_test(self): - datadir = os.path.join(self.nodes[0].datadir, self.chain) + datadir = self.nodes[0].chain_path self.log.info(f"Using datadir {datadir}") self.log.info("Check that we can't start a second bitcoind instance using the same datadir") @@ -35,7 +34,7 @@ class FilelockTest(BitcoinTestFramework): def check_wallet_filelock(descriptors): wallet_name = ''.join([random.choice(string.ascii_lowercase) for _ in range(6)]) self.nodes[0].createwallet(wallet_name=wallet_name, descriptors=descriptors) - wallet_dir = os.path.join(datadir, 'wallets') + wallet_dir = self.nodes[0].wallets_path self.log.info("Check that we can't start a second bitcoind instance using the same wallet") if descriptors: expected_msg = f"Error: SQLiteDatabase: Unable to obtain an exclusive lock on the database, is it being used by another instance of {self.config['environment']['PACKAGE_NAME']}?" diff --git a/test/functional/feature_includeconf.py b/test/functional/feature_includeconf.py index 818e4c923b..58ab063e71 100755 --- a/test/functional/feature_includeconf.py +++ b/test/functional/feature_includeconf.py @@ -14,27 +14,25 @@ Verify that: 4. multiple includeconf arguments can be specified in the main config file. """ -import os - from test_framework.test_framework import BitcoinTestFramework + class IncludeConfTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 - def setup_chain(self): - super().setup_chain() + def run_test(self): # Create additional config files # - tmpdir/node0/relative.conf - with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f: + with open(self.nodes[0].datadir_path / "relative.conf", "w", encoding="utf8") as f: f.write("uacomment=relative\n") # - tmpdir/node0/relative2.conf - with open(os.path.join(self.options.tmpdir, "node0", "relative2.conf"), "w", encoding="utf8") as f: + with open(self.nodes[0].datadir_path / "relative2.conf", "w", encoding="utf8") as f: f.write("uacomment=relative2\n") - with open(os.path.join(self.options.tmpdir, "node0", "bitcoin.conf"), "a", encoding='utf8') as f: + with open(self.nodes[0].datadir_path / "bitcoin.conf", "a", encoding="utf8") as f: f.write("uacomment=main\nincludeconf=relative.conf\n") + self.restart_node(0) - def run_test(self): self.log.info("-includeconf works from config file. subversion should end with 'main; relative)/'") subversion = self.nodes[0].getnetworkinfo()["subversion"] @@ -52,7 +50,7 @@ class IncludeConfTest(BitcoinTestFramework): ) self.log.info("-includeconf cannot be used recursively. subversion should end with 'main; relative)/'") - with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "a", encoding="utf8") as f: + with open(self.nodes[0].datadir_path / "relative.conf", "a", encoding="utf8") as f: f.write("includeconf=relative2.conf\n") self.start_node(0) @@ -63,20 +61,20 @@ class IncludeConfTest(BitcoinTestFramework): self.log.info("-includeconf cannot contain invalid arg") # Commented out as long as we ignore invalid arguments in configuration files - #with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f: + #with open(self.nodes[0].datadir_path / "relative.conf", "w", encoding="utf8") as f: # f.write("foo=bar\n") #self.nodes[0].assert_start_raises_init_error(expected_msg="Error: Error reading configuration file: Invalid configuration value foo") self.log.info("-includeconf cannot be invalid path") - os.remove(os.path.join(self.options.tmpdir, "node0", "relative.conf")) + (self.nodes[0].datadir_path / "relative.conf").unlink() self.nodes[0].assert_start_raises_init_error(expected_msg="Error: Error reading configuration file: Failed to include configuration file relative.conf") self.log.info("multiple -includeconf args can be used from the base config file. subversion should end with 'main; relative; relative2)/'") - with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f: + with open(self.nodes[0].datadir_path / "relative.conf", "w", encoding="utf8") as f: # Restore initial file contents f.write("uacomment=relative\n") - with open(os.path.join(self.options.tmpdir, "node0", "bitcoin.conf"), "a", encoding='utf8') as f: + with open(self.nodes[0].datadir_path / "bitcoin.conf", "a", encoding="utf8") as f: f.write("includeconf=relative2.conf\n") self.start_node(0) diff --git a/test/functional/feature_index_prune.py b/test/functional/feature_index_prune.py index 77a056346a..d6e802b399 100755 --- a/test/functional/feature_index_prune.py +++ b/test/functional/feature_index_prune.py @@ -3,6 +3,7 @@ # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test indices in conjunction with prune.""" +import os from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, @@ -127,8 +128,9 @@ class FeatureIndexPruneTest(BitcoinTestFramework): self.log.info("make sure we get an init error when starting the nodes again with the indices") filter_msg = "Error: basic block filter index best block of the index goes beyond pruned data. Please disable the index or reindex (which will download the whole blockchain again)" stats_msg = "Error: coinstatsindex best block of the index goes beyond pruned data. Please disable the index or reindex (which will download the whole blockchain again)" + end_msg = f"{os.linesep}Error: Failed to start indexes, shutting down.." for i, msg in enumerate([filter_msg, stats_msg, filter_msg]): - self.nodes[i].assert_start_raises_init_error(extra_args=self.extra_args[i], expected_msg=msg) + self.nodes[i].assert_start_raises_init_error(extra_args=self.extra_args[i], expected_msg=msg+end_msg) self.log.info("make sure the nodes start again with the indices and an additional -reindex arg") for i in range(3): diff --git a/test/functional/feature_init.py b/test/functional/feature_init.py index 7af67730bd..64ca312b84 100755 --- a/test/functional/feature_init.py +++ b/test/functional/feature_init.py @@ -71,7 +71,7 @@ class InitStressTest(BitcoinTestFramework): b'init message: Starting network threads', b'net thread start', b'addcon thread start', - b'loadblk thread start', + b'initload thread start', b'txindex thread start', b'block filter index thread start', b'coinstatsindex thread start', diff --git a/test/functional/feature_loadblock.py b/test/functional/feature_loadblock.py index 7f030c6773..12d65fde68 100755 --- a/test/functional/feature_loadblock.py +++ b/test/functional/feature_loadblock.py @@ -37,7 +37,7 @@ class LoadblockTest(BitcoinTestFramework): cfg_file = os.path.join(data_dir, "linearize.cfg") bootstrap_file = os.path.join(self.options.tmpdir, "bootstrap.dat") genesis_block = self.nodes[0].getblockhash(0) - blocks_dir = os.path.join(data_dir, self.chain, "blocks") + blocks_dir = self.nodes[0].blocks_path hash_list = tempfile.NamedTemporaryFile(dir=data_dir, mode='w', delete=False, diff --git a/test/functional/feature_logging.py b/test/functional/feature_logging.py index b0788e2a2d..0e9aca358d 100755 --- a/test/functional/feature_logging.py +++ b/test/functional/feature_logging.py @@ -16,7 +16,7 @@ class LoggingTest(BitcoinTestFramework): self.setup_clean_chain = True def relative_log_path(self, name): - return os.path.join(self.nodes[0].datadir, self.chain, name) + return os.path.join(self.nodes[0].chain_path, name) def run_test(self): # test default log file name diff --git a/test/functional/feature_notifications.py b/test/functional/feature_notifications.py index 8cb633d454..adf6c13973 100755 --- a/test/functional/feature_notifications.py +++ b/test/functional/feature_notifications.py @@ -30,9 +30,6 @@ class NotificationsTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True - # The experimental syscall sandbox feature (-sandbox) is not compatible with -alertnotify, - # -blocknotify, -walletnotify or -shutdownnotify (which all invoke execve). - self.disable_syscall_sandbox = True def setup_network(self): self.wallet = ''.join(chr(i) for i in range(FILE_CHAR_START, FILE_CHAR_END) if chr(i) not in FILE_CHARS_DISALLOWED) diff --git a/test/functional/feature_nulldummy.py b/test/functional/feature_nulldummy.py index 7b2a29bdb4..f896cb6f43 100755 --- a/test/functional/feature_nulldummy.py +++ b/test/functional/feature_nulldummy.py @@ -37,7 +37,7 @@ from test_framework.util import ( from test_framework.wallet import getnewdestination from test_framework.wallet_util import generate_keypair -NULLDUMMY_ERROR = "non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero)" +NULLDUMMY_ERROR = "mandatory-script-verify-flag-failed (Dummy CHECKMULTISIG argument must be zero)" def invalidate_nulldummy_tx(tx): diff --git a/test/functional/feature_posix_fs_permissions.py b/test/functional/feature_posix_fs_permissions.py index c5a543e97a..40528779e6 100755 --- a/test/functional/feature_posix_fs_permissions.py +++ b/test/functional/feature_posix_fs_permissions.py @@ -31,11 +31,11 @@ class PosixFsPermissionsTest(BitcoinTestFramework): def run_test(self): self.stop_node(0) - datadir = os.path.join(self.nodes[0].datadir, self.chain) + datadir = self.nodes[0].chain_path self.check_directory_permissions(datadir) - walletsdir = os.path.join(datadir, "wallets") + walletsdir = self.nodes[0].wallets_path self.check_directory_permissions(walletsdir) - debuglog = os.path.join(datadir, "debug.log") + debuglog = self.nodes[0].debug_log_path self.check_file_permissions(debuglog) diff --git a/test/functional/feature_pruning.py b/test/functional/feature_pruning.py index b0c6138bcf..4b548ef0f3 100755 --- a/test/functional/feature_pruning.py +++ b/test/functional/feature_pruning.py @@ -91,7 +91,7 @@ class PruneTest(BitcoinTestFramework): def setup_network(self): self.setup_nodes() - self.prunedir = os.path.join(self.nodes[2].datadir, self.chain, 'blocks', '') + self.prunedir = os.path.join(self.nodes[2].blocks_path, '') self.connect_nodes(0, 1) self.connect_nodes(1, 2) @@ -290,7 +290,7 @@ class PruneTest(BitcoinTestFramework): assert_equal(ret + 1, node.getblockchaininfo()['pruneheight']) def has_block(index): - return os.path.isfile(os.path.join(self.nodes[node_number].datadir, self.chain, "blocks", f"blk{index:05}.dat")) + return os.path.isfile(os.path.join(self.nodes[node_number].blocks_path, f"blk{index:05}.dat")) # should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000) assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500)) diff --git a/test/functional/feature_reindex.py b/test/functional/feature_reindex.py index 0f6a8fd0d2..83f1c5003c 100755 --- a/test/functional/feature_reindex.py +++ b/test/functional/feature_reindex.py @@ -10,7 +10,6 @@ - Verify that out-of-order blocks are correctly processed, see LoadExternalBlockFile() """ -import os from test_framework.test_framework import BitcoinTestFramework from test_framework.p2p import MAGIC_BYTES from test_framework.util import assert_equal @@ -39,7 +38,7 @@ class ReindexTest(BitcoinTestFramework): # In this test environment, blocks will always be in order (since # we're generating them rather than getting them from peers), so to # test out-of-order handling, swap blocks 1 and 2 on disk. - blk0 = os.path.join(self.nodes[0].datadir, self.nodes[0].chain, 'blocks', 'blk00000.dat') + blk0 = self.nodes[0].blocks_path / "blk00000.dat" with open(blk0, 'r+b') as bf: # Read at least the first few blocks (including genesis) b = bf.read(2000) diff --git a/test/functional/feature_remove_pruned_files_on_startup.py b/test/functional/feature_remove_pruned_files_on_startup.py index ca0e5ace9f..c128587949 100755 --- a/test/functional/feature_remove_pruned_files_on_startup.py +++ b/test/functional/feature_remove_pruned_files_on_startup.py @@ -20,10 +20,10 @@ class FeatureRemovePrunedFilesOnStartupTest(BitcoinTestFramework): self.sync_blocks() def run_test(self): - blk0 = os.path.join(self.nodes[0].datadir, self.nodes[0].chain, 'blocks', 'blk00000.dat') - rev0 = os.path.join(self.nodes[0].datadir, self.nodes[0].chain, 'blocks', 'rev00000.dat') - blk1 = os.path.join(self.nodes[0].datadir, self.nodes[0].chain, 'blocks', 'blk00001.dat') - rev1 = os.path.join(self.nodes[0].datadir, self.nodes[0].chain, 'blocks', 'rev00001.dat') + blk0 = self.nodes[0].blocks_path / "blk00000.dat" + rev0 = self.nodes[0].blocks_path / "rev00000.dat" + blk1 = self.nodes[0].blocks_path / "blk00001.dat" + rev1 = self.nodes[0].blocks_path / "rev00001.dat" self.mine_batches(800) fo1 = os.open(blk0, os.O_RDONLY) fo2 = os.open(rev1, os.O_RDONLY) diff --git a/test/functional/feature_segwit.py b/test/functional/feature_segwit.py index 77f3e4feda..3ddf21ee5d 100755 --- a/test/functional/feature_segwit.py +++ b/test/functional/feature_segwit.py @@ -215,13 +215,13 @@ class SegWitTest(BitcoinTestFramework): self.log.info("Verify default node can't accept txs with missing witness") # unsigned, no scriptsig - self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program hash mismatch)", wit_ids[NODE_0][P2WPKH][0], sign=False) - self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program was passed an empty witness)", wit_ids[NODE_0][P2WSH][0], sign=False) + self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program hash mismatch)", wit_ids[NODE_0][P2WPKH][0], sign=False) + self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program was passed an empty witness)", wit_ids[NODE_0][P2WSH][0], sign=False) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WPKH][0], sign=False) self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Operation not valid with the current stack size)", p2sh_ids[NODE_0][P2WSH][0], sign=False) # unsigned with redeem script - self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program hash mismatch)", p2sh_ids[NODE_0][P2WPKH][0], sign=False, redeem_script=witness_script(False, self.pubkey[0])) - self.fail_accept(self.nodes[0], "non-mandatory-script-verify-flag (Witness program was passed an empty witness)", p2sh_ids[NODE_0][P2WSH][0], sign=False, redeem_script=witness_script(True, self.pubkey[0])) + self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program hash mismatch)", p2sh_ids[NODE_0][P2WPKH][0], sign=False, redeem_script=witness_script(False, self.pubkey[0])) + self.fail_accept(self.nodes[0], "mandatory-script-verify-flag-failed (Witness program was passed an empty witness)", p2sh_ids[NODE_0][P2WSH][0], sign=False, redeem_script=witness_script(True, self.pubkey[0])) self.log.info("Verify block and transaction serialization rpcs return differing serializations depending on rpc serialization flag") assert self.nodes[2].getblock(blockhash, False) != self.nodes[0].getblock(blockhash, False) @@ -244,10 +244,10 @@ class SegWitTest(BitcoinTestFramework): assert_equal(witnesses[0], '00' * 32) self.log.info("Verify witness txs without witness data are invalid after the fork") - self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', wit_ids[NODE_2][P2WPKH][2], sign=False) - self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', wit_ids[NODE_2][P2WSH][2], sign=False) - self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program hash mismatch)', p2sh_ids[NODE_2][P2WPKH][2], sign=False, redeem_script=witness_script(False, self.pubkey[2])) - self.fail_accept(self.nodes[2], 'non-mandatory-script-verify-flag (Witness program was passed an empty witness)', p2sh_ids[NODE_2][P2WSH][2], sign=False, redeem_script=witness_script(True, self.pubkey[2])) + self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program hash mismatch)', wit_ids[NODE_2][P2WPKH][2], sign=False) + self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program was passed an empty witness)', wit_ids[NODE_2][P2WSH][2], sign=False) + self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program hash mismatch)', p2sh_ids[NODE_2][P2WPKH][2], sign=False, redeem_script=witness_script(False, self.pubkey[2])) + self.fail_accept(self.nodes[2], 'mandatory-script-verify-flag-failed (Witness program was passed an empty witness)', p2sh_ids[NODE_2][P2WSH][2], sign=False, redeem_script=witness_script(True, self.pubkey[2])) self.log.info("Verify default node can now use witness txs") self.success_mine(self.nodes[0], wit_ids[NODE_0][P2WPKH][0], True) diff --git a/test/functional/feature_settings.py b/test/functional/feature_settings.py index 20018f010f..bcae963428 100755 --- a/test/functional/feature_settings.py +++ b/test/functional/feature_settings.py @@ -21,7 +21,7 @@ class SettingsTest(BitcoinTestFramework): def run_test(self): node, = self.nodes - settings = Path(node.datadir, self.chain, "settings.json") + settings = Path(node.chain_path, "settings.json") conf = Path(node.datadir, "bitcoin.conf") # Assert empty settings file was created diff --git a/test/functional/feature_startupnotify.py b/test/functional/feature_startupnotify.py index ff5272b281..a8e62c6244 100755 --- a/test/functional/feature_startupnotify.py +++ b/test/functional/feature_startupnotify.py @@ -3,9 +3,6 @@ # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test -startupnotify.""" - -import os - from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, @@ -18,15 +15,14 @@ FILE_NAME = "test.txt" class StartupNotifyTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 - self.disable_syscall_sandbox = True def run_test(self): - tmpdir_file = os.path.join(self.options.tmpdir, NODE_DIR, FILE_NAME) - assert not os.path.exists(tmpdir_file) + tmpdir_file = self.nodes[0].datadir_path / FILE_NAME + assert not tmpdir_file.exists() self.log.info("Test -startupnotify command is run when node starts") self.restart_node(0, extra_args=[f"-startupnotify=echo '{FILE_NAME}' >> {NODE_DIR}/{FILE_NAME}"]) - self.wait_until(lambda: os.path.exists(tmpdir_file)) + self.wait_until(lambda: tmpdir_file.exists()) self.log.info("Test -startupnotify is executed once") diff --git a/test/functional/feature_syscall_sandbox.py b/test/functional/feature_syscall_sandbox.py deleted file mode 100755 index 2200f6c2e6..0000000000 --- a/test/functional/feature_syscall_sandbox.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2021-2022 The Bitcoin Core developers -# Distributed under the MIT software license, see the accompanying -# file COPYING or http://www.opensource.org/licenses/mit-license.php. -"""Test bitcoind aborts if a disallowed syscall is used when compiled with the syscall sandbox.""" - -from test_framework.test_framework import BitcoinTestFramework, SkipTest - - -class SyscallSandboxTest(BitcoinTestFramework): - def set_test_params(self): - self.num_nodes = 1 - - def skip_test_if_missing_module(self): - if not self.is_syscall_sandbox_compiled(): - raise SkipTest("bitcoind has not been built with syscall sandbox enabled.") - if self.disable_syscall_sandbox: - raise SkipTest("--nosandbox passed to test runner.") - - def run_test(self): - disallowed_syscall_terminated_bitcoind = False - expected_log_entry = 'ERROR: The syscall "getgroups" (syscall number 115) is not allowed by the syscall sandbox' - with self.nodes[0].assert_debug_log([expected_log_entry]): - self.log.info("Invoking disallowed syscall") - try: - self.nodes[0].invokedisallowedsyscall() - except ConnectionError: - disallowed_syscall_terminated_bitcoind = True - assert disallowed_syscall_terminated_bitcoind - self.nodes = [] - - -if __name__ == "__main__": - SyscallSandboxTest().main() diff --git a/test/functional/feature_taproot.py b/test/functional/feature_taproot.py index b37bfd28ae..e32319961e 100755 --- a/test/functional/feature_taproot.py +++ b/test/functional/feature_taproot.py @@ -104,8 +104,8 @@ from test_framework.key import ( sign_schnorr, tweak_add_privkey, ECKey, - SECP256K1 ) +from test_framework import secp256k1 from test_framework.address import ( hash160, program_to_witness, @@ -695,7 +695,7 @@ def spenders_taproot_active(): # Generate an invalid public key while True: invalid_pub = random_bytes(32) - if not SECP256K1.is_x_coord(int.from_bytes(invalid_pub, 'big')): + if not secp256k1.GE.is_valid_x(int.from_bytes(invalid_pub, 'big')): break # Implement a test case that detects validation logic which maps invalid public keys to the @@ -739,7 +739,11 @@ def spenders_taproot_active(): scripts = [ ("pk_codesep", CScript(random_checksig_style(pubs[1]) + bytes([OP_CODESEPARATOR]))), # codesep after checksig ("codesep_pk", CScript(bytes([OP_CODESEPARATOR]) + random_checksig_style(pubs[1]))), # codesep before checksig - ("branched_codesep", CScript([random_bytes(random.randrange(511)), OP_DROP, OP_IF, OP_CODESEPARATOR, pubs[0], OP_ELSE, OP_CODESEPARATOR, pubs[1], OP_ENDIF, OP_CHECKSIG])), # branch dependent codesep + ("branched_codesep", CScript([random_bytes(random.randrange(2, 511)), OP_DROP, OP_IF, OP_CODESEPARATOR, pubs[0], OP_ELSE, OP_CODESEPARATOR, pubs[1], OP_ENDIF, OP_CHECKSIG])), # branch dependent codesep + # Note that the first data push in the "branched_codesep" script has the purpose of + # randomizing the sighash, both by varying script size and content. In order to + # avoid MINIMALDATA script verification errors caused by not-minimal-encoded data + # pushes (e.g. `OP_PUSH1 1` instead of `OP_1`), we set a minimum data size of 2 bytes. ] random.shuffle(scripts) tap = taproot_construct(pubs[0], scripts) diff --git a/test/functional/feature_txindex_compatibility.py b/test/functional/feature_txindex_compatibility.py index 48fefaa0ba..939271b385 100755 --- a/test/functional/feature_txindex_compatibility.py +++ b/test/functional/feature_txindex_compatibility.py @@ -7,20 +7,19 @@ Previous releases are required by this test, see test/README.md. """ -import os import shutil from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_raises_rpc_error from test_framework.wallet import MiniWallet class TxindexCompatibilityTest(BitcoinTestFramework): def set_test_params(self): - self.num_nodes = 3 + self.num_nodes = 2 self.extra_args = [ ["-reindex", "-txindex"], [], - [], ] def skip_test_if_missing_module(self): @@ -33,12 +32,10 @@ class TxindexCompatibilityTest(BitcoinTestFramework): versions=[ 160300, # Last release with legacy txindex None, # For MiniWallet, without migration code - 220000, # Last release with migration code (0.17.x - 22.x) ], ) self.start_nodes() self.connect_nodes(0, 1) - self.connect_nodes(1, 2) def run_test(self): mini_wallet = MiniWallet(self.nodes[1]) @@ -47,45 +44,23 @@ class TxindexCompatibilityTest(BitcoinTestFramework): self.generate(self.nodes[1], 1) self.log.info("Check legacy txindex") + assert_raises_rpc_error(-5, "Use -txindex", lambda: self.nodes[1].getrawtransaction(txid=spend_utxo["txid"])) self.nodes[0].getrawtransaction(txid=spend_utxo["txid"]) # Requires -txindex self.stop_nodes() - legacy_chain_dir = os.path.join(self.nodes[0].datadir, self.chain) - - self.log.info("Migrate legacy txindex") - migrate_chain_dir = os.path.join(self.nodes[2].datadir, self.chain) - shutil.rmtree(migrate_chain_dir) - shutil.copytree(legacy_chain_dir, migrate_chain_dir) - with self.nodes[2].assert_debug_log([ - "Upgrading txindex database...", - "txindex is enabled at height 200", - ]): - self.start_node(2, extra_args=["-txindex"]) - self.nodes[2].getrawtransaction(txid=spend_utxo["txid"]) # Requires -txindex + legacy_chain_dir = self.nodes[0].chain_path self.log.info("Drop legacy txindex") - drop_index_chain_dir = os.path.join(self.nodes[1].datadir, self.chain) + drop_index_chain_dir = self.nodes[1].chain_path shutil.rmtree(drop_index_chain_dir) shutil.copytree(legacy_chain_dir, drop_index_chain_dir) - self.nodes[1].assert_start_raises_init_error( - extra_args=["-txindex"], - expected_msg="Error: The block index db contains a legacy 'txindex'. To clear the occupied disk space, run a full -reindex, otherwise ignore this error. This error message will not be displayed again.", - ) # Build txindex from scratch and check there is no error this time self.start_node(1, extra_args=["-txindex"]) - self.nodes[2].getrawtransaction(txid=spend_utxo["txid"]) # Requires -txindex + self.wait_until(lambda: self.nodes[1].getindexinfo()["txindex"]["synced"] == True) + self.nodes[1].getrawtransaction(txid=spend_utxo["txid"]) # Requires -txindex self.stop_nodes() - self.log.info("Check migrated txindex cannot be read by legacy node") - err_msg = f": You need to rebuild the database using -reindex to change -txindex.{os.linesep}Please restart with -reindex or -reindex-chainstate to recover." - shutil.rmtree(legacy_chain_dir) - shutil.copytree(migrate_chain_dir, legacy_chain_dir) - self.nodes[0].assert_start_raises_init_error(extra_args=["-txindex"], expected_msg=err_msg) - shutil.rmtree(legacy_chain_dir) - shutil.copytree(drop_index_chain_dir, legacy_chain_dir) - self.nodes[0].assert_start_raises_init_error(extra_args=["-txindex"], expected_msg=err_msg) - if __name__ == "__main__": TxindexCompatibilityTest().main() diff --git a/test/functional/feature_unsupported_utxo_db.py b/test/functional/feature_unsupported_utxo_db.py index 1c8c08d1d8..6acf551216 100755 --- a/test/functional/feature_unsupported_utxo_db.py +++ b/test/functional/feature_unsupported_utxo_db.py @@ -40,9 +40,9 @@ class UnsupportedUtxoDbTest(BitcoinTestFramework): self.log.info("Check init error") legacy_utxos_dir = self.nodes[0].chain_path / "chainstate" - legacy_blocks_dir = self.nodes[0].chain_path / "blocks" + legacy_blocks_dir = self.nodes[0].blocks_path recent_utxos_dir = self.nodes[1].chain_path / "chainstate" - recent_blocks_dir = self.nodes[1].chain_path / "blocks" + recent_blocks_dir = self.nodes[1].blocks_path shutil.copytree(legacy_utxos_dir, recent_utxos_dir) shutil.copytree(legacy_blocks_dir, recent_blocks_dir) self.nodes[1].assert_start_raises_init_error( diff --git a/test/functional/feature_versionbits_warning.py b/test/functional/feature_versionbits_warning.py index 0a9e1d4448..073d3de812 100755 --- a/test/functional/feature_versionbits_warning.py +++ b/test/functional/feature_versionbits_warning.py @@ -28,9 +28,6 @@ class VersionBitsWarningTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 - # The experimental syscall sandbox feature (-sandbox) is not compatible with -alertnotify - # (which invokes execve). - self.disable_syscall_sandbox = True def setup_network(self): self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt") diff --git a/test/functional/interface_rest.py b/test/functional/interface_rest.py index 1ba8f60d99..c0679c5ba9 100755 --- a/test/functional/interface_rest.py +++ b/test/functional/interface_rest.py @@ -26,6 +26,7 @@ from test_framework.wallet import ( MiniWallet, getnewdestination, ) +from typing import Optional INVALID_PARAM = "abc" @@ -64,7 +65,7 @@ class RESTTest (BitcoinTestFramework): body: str = '', status: int = 200, ret_type: RetType = RetType.JSON, - query_params: typing.Dict[str, typing.Any] = None, + query_params: Optional[typing.Dict[str, typing.Any]] = None, ) -> typing.Union[http.client.HTTPResponse, bytes, str, None]: rest_uri = '/rest' + uri if req_type in ReqType: diff --git a/test/functional/interface_rpc.py b/test/functional/interface_rpc.py index 3725c89719..e873e2da0b 100755 --- a/test/functional/interface_rpc.py +++ b/test/functional/interface_rpc.py @@ -46,7 +46,7 @@ class RPCInterfaceTest(BitcoinTestFramework): command = info['active_commands'][0] assert_equal(command['method'], 'getrpcinfo') assert_greater_than_or_equal(command['duration'], 0) - assert_equal(info['logpath'], os.path.join(self.nodes[0].datadir, self.chain, 'debug.log')) + assert_equal(info['logpath'], os.path.join(self.nodes[0].chain_path, 'debug.log')) def test_batch_request(self): self.log.info("Testing basic JSON-RPC batch request...") diff --git a/test/functional/interface_usdt_mempool.py b/test/functional/interface_usdt_mempool.py index 208b065c34..d1e274480c 100755 --- a/test/functional/interface_usdt_mempool.py +++ b/test/functional/interface_usdt_mempool.py @@ -295,7 +295,10 @@ class MempoolTracepointTest(BitcoinTestFramework): assert_equal(1, len(events)) event = events[0] assert_equal(bytes(event.hash)[::-1].hex(), tx["tx"].hash) - assert_equal(event.reason.decode("UTF-8"), "min relay fee not met") + # The next test is already known to fail, so disable it to avoid + # wasting CPU time and developer time. See + # https://github.com/bitcoin/bitcoin/issues/27380 + #assert_equal(event.reason.decode("UTF-8"), "min relay fee not met") bpf.cleanup() self.generate(self.wallet, 1) diff --git a/test/functional/mempool_compatibility.py b/test/functional/mempool_compatibility.py index 7337802aea..fd3e219586 100755 --- a/test/functional/mempool_compatibility.py +++ b/test/functional/mempool_compatibility.py @@ -10,8 +10,6 @@ In case we need to break mempool compatibility we can continue to use the test b Previous releases are required by this test, see test/README.md. """ -import os - from test_framework.blocktools import COINBASE_MATURITY from test_framework.test_framework import BitcoinTestFramework from test_framework.wallet import ( @@ -23,6 +21,7 @@ from test_framework.wallet import ( class MempoolCompatibilityTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 + self.setup_clean_chain = True def skip_test_if_missing_module(self): self.skip_if_no_previous_releases() @@ -55,9 +54,9 @@ class MempoolCompatibilityTest(BitcoinTestFramework): self.stop_node(1) self.log.info("Move mempool.dat from old to new node") - old_node_mempool = os.path.join(old_node.datadir, self.chain, 'mempool.dat') - new_node_mempool = os.path.join(new_node.datadir, self.chain, 'mempool.dat') - os.rename(old_node_mempool, new_node_mempool) + old_node_mempool = old_node.chain_path / "mempool.dat" + new_node_mempool = new_node.chain_path / "mempool.dat" + old_node_mempool.rename(new_node_mempool) self.log.info("Start new node and verify mempool contains the tx") self.start_node(1) @@ -70,7 +69,7 @@ class MempoolCompatibilityTest(BitcoinTestFramework): self.stop_node(1) self.log.info("Move mempool.dat from new to old node") - os.rename(new_node_mempool, old_node_mempool) + new_node_mempool.rename(old_node_mempool) self.log.info("Start old node again and verify mempool contains both txs") self.start_node(0, ['-nowallet']) diff --git a/test/functional/mempool_datacarrier.py b/test/functional/mempool_datacarrier.py index c370d8fa91..951bf37ae8 100755 --- a/test/functional/mempool_datacarrier.py +++ b/test/functional/mempool_datacarrier.py @@ -22,16 +22,18 @@ from test_framework.wallet import MiniWallet class DataCarrierTest(BitcoinTestFramework): def set_test_params(self): - self.num_nodes = 3 + self.num_nodes = 4 self.extra_args = [ [], ["-datacarrier=0"], - ["-datacarrier=1", f"-datacarriersize={MAX_OP_RETURN_RELAY - 1}"] + ["-datacarrier=1", f"-datacarriersize={MAX_OP_RETURN_RELAY - 1}"], + ["-datacarrier=1", f"-datacarriersize=2"], ] - def test_null_data_transaction(self, node: TestNode, data: bytes, success: bool) -> None: + def test_null_data_transaction(self, node: TestNode, data, success: bool) -> None: tx = self.wallet.create_self_transfer(fee_rate=0)["tx"] - tx.vout.append(CTxOut(nValue=0, scriptPubKey=CScript([OP_RETURN, data]))) + data = [] if data is None else [data] + tx.vout.append(CTxOut(nValue=0, scriptPubKey=CScript([OP_RETURN] + data))) tx.vout[0].nValue -= tx.get_vsize() # simply pay 1sat/vbyte fee tx_hex = tx.serialize().hex() @@ -49,6 +51,8 @@ class DataCarrierTest(BitcoinTestFramework): default_size_data = random_bytes(MAX_OP_RETURN_RELAY - 3) too_long_data = random_bytes(MAX_OP_RETURN_RELAY - 2) small_data = random_bytes(MAX_OP_RETURN_RELAY - 4) + one_byte = random_bytes(1) + zero_bytes = random_bytes(0) self.log.info("Testing null data transaction with default -datacarrier and -datacarriersize values.") self.test_null_data_transaction(node=self.nodes[0], data=default_size_data, success=True) @@ -65,6 +69,24 @@ class DataCarrierTest(BitcoinTestFramework): self.log.info("Testing a null data transaction with a size smaller than accepted by -datacarriersize.") self.test_null_data_transaction(node=self.nodes[2], data=small_data, success=True) + self.log.info("Testing a null data transaction with no data.") + self.test_null_data_transaction(node=self.nodes[0], data=None, success=True) + self.test_null_data_transaction(node=self.nodes[1], data=None, success=False) + self.test_null_data_transaction(node=self.nodes[2], data=None, success=True) + self.test_null_data_transaction(node=self.nodes[3], data=None, success=True) + + self.log.info("Testing a null data transaction with zero bytes of data.") + self.test_null_data_transaction(node=self.nodes[0], data=zero_bytes, success=True) + self.test_null_data_transaction(node=self.nodes[1], data=zero_bytes, success=False) + self.test_null_data_transaction(node=self.nodes[2], data=zero_bytes, success=True) + self.test_null_data_transaction(node=self.nodes[3], data=zero_bytes, success=True) + + self.log.info("Testing a null data transaction with one byte of data.") + self.test_null_data_transaction(node=self.nodes[0], data=one_byte, success=True) + self.test_null_data_transaction(node=self.nodes[1], data=one_byte, success=False) + self.test_null_data_transaction(node=self.nodes[2], data=one_byte, success=True) + self.test_null_data_transaction(node=self.nodes[3], data=one_byte, success=False) + if __name__ == '__main__': DataCarrierTest().main() diff --git a/test/functional/mempool_persist.py b/test/functional/mempool_persist.py index 8f74d9de20..32a927084a 100755 --- a/test/functional/mempool_persist.py +++ b/test/functional/mempool_persist.py @@ -46,7 +46,7 @@ from test_framework.util import ( assert_greater_than_or_equal, assert_raises_rpc_error, ) -from test_framework.wallet import MiniWallet +from test_framework.wallet import MiniWallet, COIN class MempoolPersistTest(BitcoinTestFramework): @@ -143,8 +143,8 @@ class MempoolPersistTest(BitcoinTestFramework): self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet assert_equal(node2_balance, wallet_watch.getbalance()) - mempooldat0 = os.path.join(self.nodes[0].datadir, self.chain, 'mempool.dat') - mempooldat1 = os.path.join(self.nodes[1].datadir, self.chain, 'mempool.dat') + mempooldat0 = os.path.join(self.nodes[0].chain_path, 'mempool.dat') + mempooldat1 = os.path.join(self.nodes[1].chain_path, 'mempool.dat') self.log.debug("Force -persistmempool=0 node1 to savemempool to disk via RPC") assert not os.path.exists(mempooldat1) @@ -159,6 +159,16 @@ class MempoolPersistTest(BitcoinTestFramework): assert self.nodes[0].getmempoolinfo()["loaded"] assert_equal(len(self.nodes[0].getrawmempool()), 0) + self.log.debug("Import mempool at runtime to node0.") + assert_equal({}, self.nodes[0].importmempool(mempooldat0)) + assert_equal(len(self.nodes[0].getrawmempool()), 7) + fees = self.nodes[0].getmempoolentry(txid=last_txid)["fees"] + assert_equal(fees["base"], fees["modified"]) + assert_equal({}, self.nodes[0].importmempool(mempooldat0, {"apply_fee_delta_priority": True, "apply_unbroadcast_set": True})) + assert_equal(2, self.nodes[0].getmempoolinfo()["unbroadcastcount"]) + fees = self.nodes[0].getmempoolentry(txid=last_txid)["fees"] + assert_equal(fees["base"] + Decimal("0.00001000"), fees["modified"]) + self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.") self.stop_nodes() self.start_node(0) @@ -186,6 +196,7 @@ class MempoolPersistTest(BitcoinTestFramework): assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool) os.rmdir(mempooldotnew1) + self.test_importmempool_union() self.test_persist_unbroadcast() def test_persist_unbroadcast(self): @@ -210,6 +221,46 @@ class MempoolPersistTest(BitcoinTestFramework): node0.mockscheduler(16 * 60) # 15 min + 1 for buffer self.wait_until(lambda: len(conn.get_invs()) == 1) + def test_importmempool_union(self): + self.log.debug("Submit different transactions to node0 and node1's mempools") + self.start_node(0) + self.start_node(2) + tx_node0 = self.mini_wallet.send_self_transfer(from_node=self.nodes[0]) + tx_node1 = self.mini_wallet.send_self_transfer(from_node=self.nodes[1]) + tx_node01 = self.mini_wallet.create_self_transfer() + tx_node01_secret = self.mini_wallet.create_self_transfer() + self.nodes[0].prioritisetransaction(tx_node01["txid"], 0, COIN) + self.nodes[0].prioritisetransaction(tx_node01_secret["txid"], 0, 2 * COIN) + self.nodes[1].prioritisetransaction(tx_node01_secret["txid"], 0, 3 * COIN) + self.nodes[0].sendrawtransaction(tx_node01["hex"]) + self.nodes[1].sendrawtransaction(tx_node01["hex"]) + assert tx_node0["txid"] in self.nodes[0].getrawmempool() + assert not tx_node0["txid"] in self.nodes[1].getrawmempool() + assert not tx_node1["txid"] in self.nodes[0].getrawmempool() + assert tx_node1["txid"] in self.nodes[1].getrawmempool() + assert tx_node01["txid"] in self.nodes[0].getrawmempool() + assert tx_node01["txid"] in self.nodes[1].getrawmempool() + assert not tx_node01_secret["txid"] in self.nodes[0].getrawmempool() + assert not tx_node01_secret["txid"] in self.nodes[1].getrawmempool() + + self.log.debug("Check that importmempool can add txns without replacing the entire mempool") + mempooldat0 = str(self.nodes[0].chain_path / "mempool.dat") + result0 = self.nodes[0].savemempool() + assert_equal(mempooldat0, result0["filename"]) + assert_equal({}, self.nodes[1].importmempool(mempooldat0, {"apply_fee_delta_priority": True})) + # All transactions should be in node1's mempool now. + assert tx_node0["txid"] in self.nodes[1].getrawmempool() + assert tx_node1["txid"] in self.nodes[1].getrawmempool() + assert not tx_node1["txid"] in self.nodes[0].getrawmempool() + # For transactions that already existed, priority should be changed + entry_node01 = self.nodes[1].getmempoolentry(tx_node01["txid"]) + assert_equal(entry_node01["fees"]["base"] + 1, entry_node01["fees"]["modified"]) + # Deltas for not-yet-submitted transactions should be applied as well (prioritisation is stackable). + self.nodes[1].sendrawtransaction(tx_node01_secret["hex"]) + entry_node01_secret = self.nodes[1].getmempoolentry(tx_node01_secret["txid"]) + assert_equal(entry_node01_secret["fees"]["base"] + 5, entry_node01_secret["fees"]["modified"]) + self.stop_nodes() + if __name__ == "__main__": MempoolPersistTest().main() diff --git a/test/functional/mempool_reorg.py b/test/functional/mempool_reorg.py index 3a5bc1ebcd..691518ea09 100755 --- a/test/functional/mempool_reorg.py +++ b/test/functional/mempool_reorg.py @@ -8,6 +8,17 @@ Test re-org scenarios with a mempool that contains transactions that spend (directly or indirectly) coinbase transactions. """ +import time + +from test_framework.messages import ( + CInv, + MSG_WTX, + msg_getdata, +) +from test_framework.p2p import ( + P2PTxInvStore, + p2p_lock, +) from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error from test_framework.wallet import MiniWallet @@ -22,8 +33,84 @@ class MempoolCoinbaseTest(BitcoinTestFramework): [] ] + def test_reorg_relay(self): + self.log.info("Test that transactions from disconnected blocks are available for relay immediately") + # Prevent time from moving forward + self.nodes[1].setmocktime(int(time.time())) + self.connect_nodes(0, 1) + self.generate(self.wallet, 3) + + # Disconnect node0 and node1 to create different chains. + self.disconnect_nodes(0, 1) + # Connect a peer to node1, which doesn't have immediate tx relay + peer1 = self.nodes[1].add_p2p_connection(P2PTxInvStore()) + + # Create a transaction that is included in a block. + tx_disconnected = self.wallet.send_self_transfer(from_node=self.nodes[1]) + self.generate(self.nodes[1], 1, sync_fun=self.no_op) + + # Create a transaction and submit it to node1's mempool. + tx_before_reorg = self.wallet.send_self_transfer(from_node=self.nodes[1]) + + # Create a child of that transaction and submit it to node1's mempool. + tx_child = self.wallet.send_self_transfer(utxo_to_spend=tx_disconnected["new_utxo"], from_node=self.nodes[1]) + assert_equal(self.nodes[1].getmempoolentry(tx_child["txid"])["ancestorcount"], 1) + assert_equal(len(peer1.get_invs()), 0) + + # node0 has a longer chain in which tx_disconnected was not confirmed. + self.generate(self.nodes[0], 3, sync_fun=self.no_op) + + # Reconnect the nodes and sync chains. node0's chain should win. + self.connect_nodes(0, 1) + self.sync_blocks() + + # Child now has an ancestor from the disconnected block + assert_equal(self.nodes[1].getmempoolentry(tx_child["txid"])["ancestorcount"], 2) + assert_equal(self.nodes[1].getmempoolentry(tx_before_reorg["txid"])["ancestorcount"], 1) + + # peer1 should not have received an inv for any of the transactions during this time, as no + # mocktime has elapsed for those transactions to be announced. Likewise, it cannot + # request very recent, unanounced transactions. + assert_equal(len(peer1.get_invs()), 0) + # It's too early to request these two transactions + requests_too_recent = msg_getdata([CInv(t=MSG_WTX, h=int(tx["tx"].getwtxid(), 16)) for tx in [tx_before_reorg, tx_child]]) + peer1.send_and_ping(requests_too_recent) + for _ in range(len(requests_too_recent.inv)): + peer1.sync_with_ping() + with p2p_lock: + assert "tx" not in peer1.last_message + assert "notfound" in peer1.last_message + + # Request the tx from the disconnected block + request_disconnected_tx = msg_getdata([CInv(t=MSG_WTX, h=int(tx_disconnected["tx"].getwtxid(), 16))]) + peer1.send_and_ping(request_disconnected_tx) + + # The tx from the disconnected block was never announced, and it entered the mempool later + # than the transactions that are too recent. + assert_equal(len(peer1.get_invs()), 0) + with p2p_lock: + # However, the node will answer requests for the tx from the recently-disconnected block. + assert_equal(peer1.last_message["tx"].tx.getwtxid(),tx_disconnected["tx"].getwtxid()) + + self.nodes[1].setmocktime(int(time.time()) + 300) + peer1.sync_with_ping() + # the transactions are now announced + assert_equal(len(peer1.get_invs()), 3) + for _ in range(3): + # make sure all tx requests have been responded to + peer1.sync_with_ping() + last_tx_received = peer1.last_message["tx"] + + tx_after_reorg = self.wallet.send_self_transfer(from_node=self.nodes[1]) + request_after_reorg = msg_getdata([CInv(t=MSG_WTX, h=int(tx_after_reorg["tx"].getwtxid(), 16))]) + assert tx_after_reorg["txid"] in self.nodes[1].getrawmempool() + peer1.send_and_ping(request_after_reorg) + with p2p_lock: + assert_equal(peer1.last_message["tx"], last_tx_received) + def run_test(self): - wallet = MiniWallet(self.nodes[0]) + self.wallet = MiniWallet(self.nodes[0]) + wallet = self.wallet # Start with a 200 block chain assert_equal(self.nodes[0].getblockcount(), 200) @@ -103,6 +190,8 @@ class MempoolCoinbaseTest(BitcoinTestFramework): assert_equal(set(self.nodes[0].getrawmempool()), set()) self.sync_all() + self.test_reorg_relay() + if __name__ == '__main__': MempoolCoinbaseTest().main() diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py index aabf06ee53..da796d3f70 100755 --- a/test/functional/mining_basic.py +++ b/test/functional/mining_basic.py @@ -18,9 +18,10 @@ from test_framework.blocktools import ( TIME_GENESIS_BLOCK, ) from test_framework.messages import ( + BLOCK_HEADER_SIZE, CBlock, CBlockHeader, - BLOCK_HEADER_SIZE, + COIN, ser_uint256, ) from test_framework.p2p import P2PDataStore @@ -28,12 +29,14 @@ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_raises_rpc_error, + get_fee, ) from test_framework.wallet import MiniWallet VERSIONBITS_TOP_BITS = 0x20000000 VERSIONBITS_DEPLOYMENT_TESTDUMMY_BIT = 28 +DEFAULT_BLOCK_MIN_TX_FEE = 1000 # default `-blockmintxfee` setting [sat/kvB] def assert_template(node, block, expect, rehash=True): @@ -73,6 +76,45 @@ class MiningTest(BitcoinTestFramework): self.restart_node(0) self.connect_nodes(0, 1) + def test_blockmintxfee_parameter(self): + self.log.info("Test -blockmintxfee setting") + self.restart_node(0, extra_args=['-minrelaytxfee=0', '-persistmempool=0']) + node = self.nodes[0] + + # test default (no parameter), zero and a bunch of arbitrary blockmintxfee rates [sat/kvB] + for blockmintxfee_sat_kvb in (DEFAULT_BLOCK_MIN_TX_FEE, 0, 50, 100, 500, 2500, 5000, 21000, 333333, 2500000): + blockmintxfee_btc_kvb = blockmintxfee_sat_kvb / Decimal(COIN) + if blockmintxfee_sat_kvb == DEFAULT_BLOCK_MIN_TX_FEE: + self.log.info(f"-> Default -blockmintxfee setting ({blockmintxfee_sat_kvb} sat/kvB)...") + else: + blockmintxfee_parameter = f"-blockmintxfee={blockmintxfee_btc_kvb:.8f}" + self.log.info(f"-> Test {blockmintxfee_parameter} ({blockmintxfee_sat_kvb} sat/kvB)...") + self.restart_node(0, extra_args=[blockmintxfee_parameter, '-minrelaytxfee=0', '-persistmempool=0']) + self.wallet.rescan_utxos() # to avoid spending outputs of txs that are not in mempool anymore after restart + + # submit one tx with exactly the blockmintxfee rate, and one slightly below + tx_with_min_feerate = self.wallet.send_self_transfer(from_node=node, fee_rate=blockmintxfee_btc_kvb) + assert_equal(tx_with_min_feerate["fee"], get_fee(tx_with_min_feerate["tx"].get_vsize(), blockmintxfee_btc_kvb)) + if blockmintxfee_btc_kvb > 0: + lowerfee_btc_kvb = blockmintxfee_btc_kvb - Decimal(10)/COIN # 0.01 sat/vbyte lower + tx_below_min_feerate = self.wallet.send_self_transfer(from_node=node, fee_rate=lowerfee_btc_kvb) + assert_equal(tx_below_min_feerate["fee"], get_fee(tx_below_min_feerate["tx"].get_vsize(), lowerfee_btc_kvb)) + else: # go below zero fee by using modified fees + tx_below_min_feerate = self.wallet.send_self_transfer(from_node=node, fee_rate=blockmintxfee_btc_kvb) + node.prioritisetransaction(tx_below_min_feerate["txid"], 0, -1) + + # check that tx below specified fee-rate is neither in template nor in the actual block + block_template = node.getblocktemplate(NORMAL_GBT_REQUEST_PARAMS) + block_template_txids = [tx['txid'] for tx in block_template['transactions']] + self.generate(self.wallet, 1, sync_fun=self.no_op) + block = node.getblock(node.getbestblockhash(), verbosity=2) + block_txids = [tx['txid'] for tx in block['tx']] + + assert tx_with_min_feerate['txid'] in block_template_txids + assert tx_with_min_feerate['txid'] in block_txids + assert tx_below_min_feerate['txid'] not in block_template_txids + assert tx_below_min_feerate['txid'] not in block_txids + def run_test(self): node = self.nodes[0] self.wallet = MiniWallet(node) @@ -130,7 +172,7 @@ class MiningTest(BitcoinTestFramework): block.vtx = [coinbase_tx] self.log.info("getblocktemplate: segwit rule must be set") - assert_raises_rpc_error(-8, "getblocktemplate must be called with the segwit rule set", node.getblocktemplate) + assert_raises_rpc_error(-8, "getblocktemplate must be called with the segwit rule set", node.getblocktemplate, {}) self.log.info("getblocktemplate: Test valid block") assert_template(node, block, None) @@ -279,6 +321,8 @@ class MiningTest(BitcoinTestFramework): node.submitheader(hexdata=CBlockHeader(bad_block_root).serialize().hex()) assert_equal(node.submitblock(hexdata=block.serialize().hex()), 'duplicate') # valid + self.test_blockmintxfee_parameter() + if __name__ == '__main__': MiningTest().main() diff --git a/test/functional/mining_getblocktemplate_longpoll.py b/test/functional/mining_getblocktemplate_longpoll.py index 53182eb79e..c0e7195c82 100755 --- a/test/functional/mining_getblocktemplate_longpoll.py +++ b/test/functional/mining_getblocktemplate_longpoll.py @@ -41,7 +41,8 @@ class GetBlockTemplateLPTest(BitcoinTestFramework): self.log.info("Test that longpoll waits if we do nothing") thr = LongpollThread(self.nodes[0]) - thr.start() + with self.nodes[0].assert_debug_log(["ThreadRPCServer method=getblocktemplate"], timeout=3): + thr.start() # check that thread still lives thr.join(5) # wait 5 seconds or until thread exits assert thr.is_alive() @@ -55,14 +56,16 @@ class GetBlockTemplateLPTest(BitcoinTestFramework): self.log.info("Test that longpoll will terminate if we generate a block ourselves") thr = LongpollThread(self.nodes[0]) - thr.start() + with self.nodes[0].assert_debug_log(["ThreadRPCServer method=getblocktemplate"], timeout=3): + thr.start() self.generate(self.nodes[0], 1) # generate a block on own node thr.join(5) # wait 5 seconds or until thread exits assert not thr.is_alive() self.log.info("Test that introducing a new transaction into the mempool will terminate the longpoll") thr = LongpollThread(self.nodes[0]) - thr.start() + with self.nodes[0].assert_debug_log(["ThreadRPCServer method=getblocktemplate"], timeout=3): + thr.start() # generate a transaction and submit it self.miniwallet.send_self_transfer(from_node=random.choice(self.nodes)) # after one minute, every 10 seconds the mempool is probed, so in 80 seconds it should have returned diff --git a/test/functional/p2p_addr_relay.py b/test/functional/p2p_addr_relay.py index e002a520c6..63cd10896d 100755 --- a/test/functional/p2p_addr_relay.py +++ b/test/functional/p2p_addr_relay.py @@ -133,7 +133,7 @@ class AddrTest(BitcoinTestFramework): self.mocktime += 10 * 60 self.nodes[0].setmocktime(self.mocktime) for peer in receivers: - peer.sync_send_with_ping() + peer.sync_with_ping() def oversized_addr_test(self): self.log.info('Send an addr message that is too large') @@ -299,6 +299,16 @@ class AddrTest(BitcoinTestFramework): assert_equal(block_relay_peer.num_ipv4_received, 0) assert inbound_peer.num_ipv4_received > 100 + self.log.info('Check that we answer getaddr messages only once per connection') + received_addrs_before = inbound_peer.num_ipv4_received + with self.nodes[0].assert_debug_log(['Ignoring repeated "getaddr".']): + inbound_peer.send_and_ping(msg_getaddr()) + self.mocktime += 10 * 60 + self.nodes[0].setmocktime(self.mocktime) + inbound_peer.sync_with_ping() + received_addrs_after = inbound_peer.num_ipv4_received + assert_equal(received_addrs_before, received_addrs_after) + self.nodes[0].disconnect_p2ps() def blocksonly_mode_tests(self): diff --git a/test/functional/p2p_addrfetch.py b/test/functional/p2p_addrfetch.py index 25efd50040..3ead653ba6 100755 --- a/test/functional/p2p_addrfetch.py +++ b/test/functional/p2p_addrfetch.py @@ -48,7 +48,7 @@ class P2PAddrFetch(BitcoinTestFramework): self.assert_getpeerinfo(peer_ids=[peer_id]) self.log.info("Check that we send getaddr but don't try to sync headers with the addr-fetch peer") - peer.sync_send_with_ping() + peer.sync_with_ping() with p2p_lock: assert peer.message_count['getaddr'] == 1 assert peer.message_count['getheaders'] == 0 diff --git a/test/functional/p2p_addrv2_relay.py b/test/functional/p2p_addrv2_relay.py index 9ab190871f..f9a8c44be2 100755 --- a/test/functional/p2p_addrv2_relay.py +++ b/test/functional/p2p_addrv2_relay.py @@ -20,19 +20,24 @@ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal I2P_ADDR = "c4gfnttsuwqomiygupdqqqyy5y5emnk5c73hrfvatri67prd7vyq.b32.i2p" +ONION_ADDR = "pg6mmjiyjmcrsslvykfwnntlaru7p5svn6y2ymmju6nubxndf4pscryd.onion" ADDRS = [] for i in range(10): addr = CAddress() addr.time = int(time.time()) + i + addr.port = 8333 + i addr.nServices = P2P_SERVICES - # Add one I2P address at an arbitrary position. + # Add one I2P and one onion V3 address at an arbitrary position. if i == 5: addr.net = addr.NET_I2P addr.ip = I2P_ADDR + addr.port = 0 + elif i == 8: + addr.net = addr.NET_TORV3 + addr.ip = ONION_ADDR else: addr.ip = f"123.123.123.{i % 256}" - addr.port = 8333 + i ADDRS.append(addr) @@ -52,6 +57,17 @@ class AddrReceiver(P2PInterface): self.wait_until(lambda: "addrv2" in self.last_message) +def calc_addrv2_msg_size(addrs): + size = 1 # vector length byte + for addr in addrs: + size += 4 # time + size += 1 # services, COMPACTSIZE(P2P_SERVICES) + size += 1 # network id + size += 1 # address length byte + size += addr.ADDRV2_ADDRESS_LENGTH[addr.net] # address + size += 2 # port + return size + class AddrTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True @@ -71,9 +87,10 @@ class AddrTest(BitcoinTestFramework): self.log.info('Check that addrv2 message content is relayed and added to addrman') addr_receiver = self.nodes[0].add_p2p_connection(AddrReceiver()) msg.addrs = ADDRS + msg_size = calc_addrv2_msg_size(ADDRS) with self.nodes[0].assert_debug_log([ - 'received: addrv2 (159 bytes) peer=0', - 'sending addrv2 (159 bytes) peer=1', + f'received: addrv2 ({msg_size} bytes) peer=0', + f'sending addrv2 ({msg_size} bytes) peer=1', ]): addr_source.send_and_ping(msg) self.nodes[0].setmocktime(int(time.time()) + 30 * 60) diff --git a/test/functional/p2p_blocksonly.py b/test/functional/p2p_blocksonly.py index 110a1bd03f..637644e6e4 100755 --- a/test/functional/p2p_blocksonly.py +++ b/test/functional/p2p_blocksonly.py @@ -101,7 +101,7 @@ class P2PBlocksOnly(BitcoinTestFramework): # Bump time forward to ensure m_next_inv_send_time timer pops self.nodes[0].setmocktime(int(time.time()) + 60) - conn.sync_send_with_ping() + conn.sync_with_ping() assert int(txid, 16) not in conn.get_invs() def check_p2p_inv_violation(self, peer): diff --git a/test/functional/p2p_compactblocks_blocksonly.py b/test/functional/p2p_compactblocks_blocksonly.py index 3d0c421a93..761cd3a218 100755 --- a/test/functional/p2p_compactblocks_blocksonly.py +++ b/test/functional/p2p_compactblocks_blocksonly.py @@ -94,11 +94,11 @@ class P2PCompactBlocksBlocksOnly(BitcoinTestFramework): block1 = self.build_block_on_tip() p2p_conn_blocksonly.send_message(msg_headers(headers=[CBlockHeader(block1)])) - p2p_conn_blocksonly.sync_send_with_ping() + p2p_conn_blocksonly.sync_with_ping() assert_equal(p2p_conn_blocksonly.last_message['getdata'].inv, [CInv(MSG_BLOCK | MSG_WITNESS_FLAG, block1.sha256)]) p2p_conn_high_bw.send_message(msg_headers(headers=[CBlockHeader(block1)])) - p2p_conn_high_bw.sync_send_with_ping() + p2p_conn_high_bw.sync_with_ping() assert_equal(p2p_conn_high_bw.last_message['getdata'].inv, [CInv(MSG_CMPCT_BLOCK, block1.sha256)]) self.log.info("Test that getdata(CMPCT) is still sent on BIP152 low bandwidth connections" diff --git a/test/functional/p2p_filter.py b/test/functional/p2p_filter.py index 6699cc3528..665f57365f 100755 --- a/test/functional/p2p_filter.py +++ b/test/functional/p2p_filter.py @@ -177,7 +177,7 @@ class FilterTest(BitcoinTestFramework): filter_peer.merkleblock_received = False filter_peer.tx_received = False self.wallet.send_to(from_node=self.nodes[0], scriptPubKey=getnewdestination()[1], amount=7 * COIN) - filter_peer.sync_send_with_ping() + filter_peer.sync_with_ping() assert not filter_peer.merkleblock_received assert not filter_peer.tx_received diff --git a/test/functional/p2p_getaddr_caching.py b/test/functional/p2p_getaddr_caching.py index 1c9ad7289b..60b43c32ae 100755 --- a/test/functional/p2p_getaddr_caching.py +++ b/test/functional/p2p_getaddr_caching.py @@ -6,7 +6,6 @@ import time -from test_framework.messages import msg_getaddr from test_framework.p2p import ( P2PInterface, p2p_lock @@ -21,6 +20,7 @@ from test_framework.util import ( MAX_ADDR_TO_SEND = 1000 MAX_PCT_ADDR_TO_SEND = 23 + class AddrReceiver(P2PInterface): def __init__(self): @@ -70,11 +70,8 @@ class AddrTest(BitcoinTestFramework): cur_mock_time = int(time.time()) for i in range(N): addr_receiver_local = self.nodes[0].add_p2p_connection(AddrReceiver()) - addr_receiver_local.send_and_ping(msg_getaddr()) addr_receiver_onion1 = self.nodes[0].add_p2p_connection(AddrReceiver(), dstport=self.onion_port1) - addr_receiver_onion1.send_and_ping(msg_getaddr()) addr_receiver_onion2 = self.nodes[0].add_p2p_connection(AddrReceiver(), dstport=self.onion_port2) - addr_receiver_onion2.send_and_ping(msg_getaddr()) # Trigger response cur_mock_time += 5 * 60 @@ -105,11 +102,8 @@ class AddrTest(BitcoinTestFramework): self.log.info('After time passed, see a new response to addr request') addr_receiver_local = self.nodes[0].add_p2p_connection(AddrReceiver()) - addr_receiver_local.send_and_ping(msg_getaddr()) addr_receiver_onion1 = self.nodes[0].add_p2p_connection(AddrReceiver(), dstport=self.onion_port1) - addr_receiver_onion1.send_and_ping(msg_getaddr()) addr_receiver_onion2 = self.nodes[0].add_p2p_connection(AddrReceiver(), dstport=self.onion_port2) - addr_receiver_onion2.send_and_ping(msg_getaddr()) # Trigger response cur_mock_time += 5 * 60 @@ -123,5 +117,6 @@ class AddrTest(BitcoinTestFramework): assert set(last_response_on_onion_bind1) != set(addr_receiver_onion1.get_received_addrs()) assert set(last_response_on_onion_bind2) != set(addr_receiver_onion2.get_received_addrs()) + if __name__ == '__main__': AddrTest().main() diff --git a/test/functional/p2p_ibd_stalling.py b/test/functional/p2p_ibd_stalling.py index aca98ceb3f..0eb37fa92f 100755 --- a/test/functional/p2p_ibd_stalling.py +++ b/test/functional/p2p_ibd_stalling.py @@ -151,7 +151,7 @@ class P2PIBDStallingTest(BitcoinTestFramework): def all_sync_send_with_ping(self, peers): for p in peers: if p.is_connected: - p.sync_send_with_ping() + p.sync_with_ping() def is_block_requested(self, peers, hash): for p in peers: diff --git a/test/functional/p2p_ibd_txrelay.py b/test/functional/p2p_ibd_txrelay.py index 65a94ad31c..b93e39a925 100755 --- a/test/functional/p2p_ibd_txrelay.py +++ b/test/functional/p2p_ibd_txrelay.py @@ -53,7 +53,7 @@ class P2PIBDTxRelayTest(BitcoinTestFramework): peer_inver.send_and_ping(msg_inv([CInv(t=MSG_WTX, h=txid)])) # The node should not send a getdata, but if it did, it would first delay 2 seconds self.nodes[0].setmocktime(int(time.time() + NONPREF_PEER_TX_DELAY)) - peer_inver.sync_send_with_ping() + peer_inver.sync_with_ping() with p2p_lock: assert txid not in peer_inver.getdata_requests self.nodes[0].disconnect_p2ps() diff --git a/test/functional/p2p_invalid_block.py b/test/functional/p2p_invalid_block.py index 710f3d93e1..806fd9c6cb 100755 --- a/test/functional/p2p_invalid_block.py +++ b/test/functional/p2p_invalid_block.py @@ -46,12 +46,10 @@ class InvalidBlockRequestTest(BitcoinTestFramework): self.log.info("Create a new block with an anyone-can-spend coinbase") - height = 1 block = create_block(tip, create_coinbase(height), block_time) block.solve() # Save the coinbase for later block1 = block - tip = block.sha256 peer.send_blocks_and_test([block1], node, success=True) self.log.info("Mature the block.") diff --git a/test/functional/p2p_invalid_locator.py b/test/functional/p2p_invalid_locator.py index 626422370a..32a23532a2 100755 --- a/test/functional/p2p_invalid_locator.py +++ b/test/functional/p2p_invalid_locator.py @@ -32,7 +32,7 @@ class InvalidLocatorTest(BitcoinTestFramework): within_max_peer = node.add_p2p_connection(P2PInterface()) msg.locator.vHave = [int(node.getblockhash(i - 1), 16) for i in range(block_count, block_count - (MAX_LOCATOR_SZ), -1)] within_max_peer.send_message(msg) - if type(msg) == msg_getheaders: + if type(msg) is msg_getheaders: within_max_peer.wait_for_header(node.getbestblockhash()) else: within_max_peer.wait_for_block(int(node.getbestblockhash(), 16)) diff --git a/test/functional/p2p_message_capture.py b/test/functional/p2p_message_capture.py index 3ab0b79ba2..691a0b6409 100755 --- a/test/functional/p2p_message_capture.py +++ b/test/functional/p2p_message_capture.py @@ -19,7 +19,7 @@ TIME_SIZE = 8 LENGTH_SIZE = 4 MSGTYPE_SIZE = 12 -def mini_parser(dat_file): +def mini_parser(dat_file: str) -> None: """Parse a data file created by CaptureMessageToFile. From the data file we'll only check the structure. @@ -58,7 +58,7 @@ class MessageCaptureTest(BitcoinTestFramework): self.setup_clean_chain = True def run_test(self): - capturedir = os.path.join(self.nodes[0].datadir, "regtest/message_capture") + capturedir = self.nodes[0].chain_path / "message_capture" # Connect a node so that the handshake occurs self.nodes[0].add_p2p_connection(P2PDataStore()) self.nodes[0].disconnect_p2ps() diff --git a/test/functional/p2p_net_deadlock.py b/test/functional/p2p_net_deadlock.py new file mode 100755 index 0000000000..f69fe52146 --- /dev/null +++ b/test/functional/p2p_net_deadlock.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +# Copyright (c) 2023-present The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +import threading +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import random_bytes + + +class NetDeadlockTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 2 + + def run_test(self): + node0 = self.nodes[0] + node1 = self.nodes[1] + + self.log.info("Simultaneously send a large message on both sides") + rand_msg = random_bytes(4000000).hex() + + thread0 = threading.Thread(target=node0.sendmsgtopeer, args=(0, "unknown", rand_msg)) + thread1 = threading.Thread(target=node1.sendmsgtopeer, args=(0, "unknown", rand_msg)) + + thread0.start() + thread1.start() + thread0.join() + thread1.join() + + self.log.info("Check whether a deadlock happened") + self.generate(node0, 1) + self.sync_blocks() + + +if __name__ == '__main__': + NetDeadlockTest().main() diff --git a/test/functional/p2p_orphan_handling.py b/test/functional/p2p_orphan_handling.py new file mode 100755 index 0000000000..6166c62aa2 --- /dev/null +++ b/test/functional/p2p_orphan_handling.py @@ -0,0 +1,416 @@ +#!/usr/bin/env python3 +# Copyright (c) 2023 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +import time + +from test_framework.messages import ( + CInv, + MSG_TX, + MSG_WITNESS_TX, + MSG_WTX, + msg_getdata, + msg_inv, + msg_notfound, + msg_tx, + tx_from_hex, +) +from test_framework.p2p import ( + GETDATA_TX_INTERVAL, + NONPREF_PEER_TX_DELAY, + OVERLOADED_PEER_TX_DELAY, + p2p_lock, + P2PTxInvStore, + TXID_RELAY_DELAY, +) +from test_framework.util import ( + assert_equal, +) +from test_framework.test_framework import BitcoinTestFramework +from test_framework.wallet import ( + MiniWallet, + MiniWalletMode, +) + +# Time to bump forward (using setmocktime) before waiting for the node to send getdata(tx) in response +# to an inv(tx), in seconds. This delay includes all possible delays + 1, so it should only be used +# when the value of the delay is not interesting. If we want to test that the node waits x seconds +# for one peer and y seconds for another, use specific values instead. +TXREQUEST_TIME_SKIP = NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY + OVERLOADED_PEER_TX_DELAY + 1 + +def cleanup(func): + # Time to fastfoward (using setmocktime) in between subtests to ensure they do not interfere with + # one another, in seconds. Equal to 12 hours, which is enough to expire anything that may exist + # (though nothing should since state should be cleared) in p2p data structures. + LONG_TIME_SKIP = 12 * 60 * 60 + + def wrapper(self): + try: + func(self) + finally: + # Clear mempool + self.generate(self.nodes[0], 1) + self.nodes[0].disconnect_p2ps() + self.nodes[0].bumpmocktime(LONG_TIME_SKIP) + return wrapper + +class PeerTxRelayer(P2PTxInvStore): + """A P2PTxInvStore that also remembers all of the getdata and tx messages it receives.""" + def __init__(self): + super().__init__() + self._tx_received = [] + self._getdata_received = [] + + @property + def tx_received(self): + with p2p_lock: + return self._tx_received + + @property + def getdata_received(self): + with p2p_lock: + return self._getdata_received + + def on_tx(self, message): + self._tx_received.append(message) + + def on_getdata(self, message): + self._getdata_received.append(message) + + def wait_for_parent_requests(self, txids): + """Wait for requests for missing parents by txid with witness data (MSG_WITNESS_TX or + WitnessTx). Requires that the getdata message match these txids exactly; all txids must be + requested and no additional requests are allowed.""" + def test_function(): + last_getdata = self.last_message.get('getdata') + if not last_getdata: + return False + return len(last_getdata.inv) == len(txids) and all([item.type == MSG_WITNESS_TX and item.hash in txids for item in last_getdata.inv]) + self.wait_until(test_function, timeout=10) + + def assert_no_immediate_response(self, message): + """Check that the node does not immediately respond to this message with any of getdata, + inv, tx. The node may respond later. + """ + prev_lastmessage = self.last_message + self.send_and_ping(message) + after_lastmessage = self.last_message + for msgtype in ["getdata", "inv", "tx"]: + if msgtype not in prev_lastmessage: + assert msgtype not in after_lastmessage + else: + assert_equal(prev_lastmessage[msgtype], after_lastmessage[msgtype]) + + def assert_never_requested(self, txhash): + """Check that the node has never sent us a getdata for this hash (int type)""" + for getdata in self.getdata_received: + for request in getdata.inv: + assert request.hash != txhash + +class OrphanHandlingTest(BitcoinTestFramework): + def set_test_params(self): + self.num_nodes = 1 + self.extra_args = [[]] + + def create_parent_and_child(self): + """Create package with 1 parent and 1 child, normal fees (no cpfp).""" + parent = self.wallet.create_self_transfer() + child = self.wallet.create_self_transfer(utxo_to_spend=parent['new_utxo']) + return child["tx"].getwtxid(), child["tx"], parent["tx"] + + def relay_transaction(self, peer, tx): + """Relay transaction using MSG_WTX""" + wtxid = int(tx.getwtxid(), 16) + peer.send_and_ping(msg_inv([CInv(t=MSG_WTX, h=wtxid)])) + self.nodes[0].bumpmocktime(TXREQUEST_TIME_SKIP) + peer.wait_for_getdata([wtxid]) + peer.send_and_ping(msg_tx(tx)) + + @cleanup + def test_arrival_timing_orphan(self): + self.log.info("Test missing parents that arrive during delay are not requested") + node = self.nodes[0] + tx_parent_arrives = self.wallet.create_self_transfer() + tx_parent_doesnt_arrive = self.wallet.create_self_transfer() + # Fake orphan spends nonexistent outputs of the two parents + tx_fake_orphan = self.wallet.create_self_transfer_multi(utxos_to_spend=[ + {"txid": tx_parent_doesnt_arrive["txid"], "vout": 10, "value": tx_parent_doesnt_arrive["new_utxo"]["value"]}, + {"txid": tx_parent_arrives["txid"], "vout": 10, "value": tx_parent_arrives["new_utxo"]["value"]} + ]) + + peer_spy = node.add_p2p_connection(PeerTxRelayer()) + peer_normal = node.add_p2p_connection(PeerTxRelayer()) + # This transaction is an orphan because it is missing inputs. It is a "fake" orphan that the + # spy peer has crafted to learn information about tx_parent_arrives even though it isn't + # able to spend a real output of it, but it could also just be a normal, real child tx. + # The node should not immediately respond with a request for orphan parents. + # Also, no request should be sent later because it will be resolved by + # the time the request is scheduled to be sent. + peer_spy.assert_no_immediate_response(msg_tx(tx_fake_orphan["tx"])) + + # Node receives transaction. It attempts to obfuscate the exact timing at which this + # transaction entered its mempool. Send unsolicited because otherwise we need to wait for + # request delays. + peer_normal.send_and_ping(msg_tx(tx_parent_arrives["tx"])) + assert tx_parent_arrives["txid"] in node.getrawmempool() + + # Spy peer should not be able to query the node for the parent yet, since it hasn't been + # announced / insufficient time has elapsed. + parent_inv = CInv(t=MSG_WTX, h=int(tx_parent_arrives["tx"].getwtxid(), 16)) + assert_equal(len(peer_spy.get_invs()), 0) + peer_spy.assert_no_immediate_response(msg_getdata([parent_inv])) + + # Request would be scheduled with this delay because it is not a preferred relay peer. + self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY) + peer_spy.assert_never_requested(int(tx_parent_arrives["txid"], 16)) + peer_spy.assert_never_requested(int(tx_parent_doesnt_arrive["txid"], 16)) + # Request would be scheduled with this delay because it is by txid. + self.nodes[0].bumpmocktime(TXID_RELAY_DELAY) + peer_spy.wait_for_parent_requests([int(tx_parent_doesnt_arrive["txid"], 16)]) + peer_spy.assert_never_requested(int(tx_parent_arrives["txid"], 16)) + + @cleanup + def test_orphan_rejected_parents_exceptions(self): + node = self.nodes[0] + peer1 = node.add_p2p_connection(PeerTxRelayer()) + peer2 = node.add_p2p_connection(PeerTxRelayer()) + + self.log.info("Test orphan handling when a nonsegwit parent is known to be invalid") + parent_low_fee_nonsegwit = self.wallet_nonsegwit.create_self_transfer(fee_rate=0) + assert_equal(parent_low_fee_nonsegwit["txid"], parent_low_fee_nonsegwit["tx"].getwtxid()) + parent_other = self.wallet_nonsegwit.create_self_transfer() + child_nonsegwit = self.wallet_nonsegwit.create_self_transfer_multi( + utxos_to_spend=[parent_other["new_utxo"], parent_low_fee_nonsegwit["new_utxo"]]) + + # Relay the parent. It should be rejected because it pays 0 fees. + self.relay_transaction(peer1, parent_low_fee_nonsegwit["tx"]) + assert parent_low_fee_nonsegwit["txid"] not in node.getrawmempool() + + # Relay the child. It should not be accepted because it has missing inputs. + # Its parent should not be requested because its hash (txid == wtxid) has been added to the rejection filter. + with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(child_nonsegwit["txid"])]): + self.relay_transaction(peer2, child_nonsegwit["tx"]) + assert child_nonsegwit["txid"] not in node.getrawmempool() + + # No parents are requested. + self.nodes[0].bumpmocktime(GETDATA_TX_INTERVAL) + peer1.assert_never_requested(int(parent_other["txid"], 16)) + peer2.assert_never_requested(int(parent_other["txid"], 16)) + peer2.assert_never_requested(int(parent_low_fee_nonsegwit["txid"], 16)) + + self.log.info("Test orphan handling when a segwit parent was invalid but may be retried with another witness") + parent_low_fee = self.wallet.create_self_transfer(fee_rate=0) + child_low_fee = self.wallet.create_self_transfer(utxo_to_spend=parent_low_fee["new_utxo"]) + + # Relay the low fee parent. It should not be accepted. + self.relay_transaction(peer1, parent_low_fee["tx"]) + assert parent_low_fee["txid"] not in node.getrawmempool() + + # Relay the child. It should not be accepted because it has missing inputs. + self.relay_transaction(peer2, child_low_fee["tx"]) + assert child_low_fee["txid"] not in node.getrawmempool() + + # The parent should be requested because even though the txid commits to the fee, it doesn't + # commit to the feerate. Delayed because it's by txid and this is not a preferred relay peer. + self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY) + peer2.wait_for_getdata([int(parent_low_fee["tx"].rehash(), 16)]) + + self.log.info("Test orphan handling when a parent was previously downloaded with witness stripped") + parent_normal = self.wallet.create_self_transfer() + parent1_witness_stripped = tx_from_hex(parent_normal["tx"].serialize_without_witness().hex()) + child_invalid_witness = self.wallet.create_self_transfer(utxo_to_spend=parent_normal["new_utxo"]) + + # Relay the parent with witness stripped. It should not be accepted. + self.relay_transaction(peer1, parent1_witness_stripped) + assert_equal(parent_normal["txid"], parent1_witness_stripped.rehash()) + assert parent1_witness_stripped.rehash() not in node.getrawmempool() + + # Relay the child. It should not be accepted because it has missing inputs. + self.relay_transaction(peer2, child_invalid_witness["tx"]) + assert child_invalid_witness["txid"] not in node.getrawmempool() + + # The parent should be requested since the unstripped wtxid would differ. Delayed because + # it's by txid and this is not a preferred relay peer. + self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY) + peer2.wait_for_getdata([int(parent_normal["tx"].rehash(), 16)]) + + # parent_normal can be relayed again even though parent1_witness_stripped was rejected + self.relay_transaction(peer1, parent_normal["tx"]) + assert_equal(set(node.getrawmempool()), set([parent_normal["txid"], child_invalid_witness["txid"]])) + + @cleanup + def test_orphan_multiple_parents(self): + node = self.nodes[0] + peer = node.add_p2p_connection(PeerTxRelayer()) + + self.log.info("Test orphan parent requests with a mixture of confirmed, in-mempool and missing parents") + # This UTXO confirmed a long time ago. + utxo_conf_old = self.wallet.send_self_transfer(from_node=node)["new_utxo"] + txid_conf_old = utxo_conf_old["txid"] + self.generate(self.wallet, 10) + + # Create a fake reorg to trigger BlockDisconnected, which resets the rolling bloom filter. + # The alternative is to mine thousands of transactions to push it out of the filter. + last_block = node.getbestblockhash() + node.invalidateblock(last_block) + node.preciousblock(last_block) + node.syncwithvalidationinterfacequeue() + + # This UTXO confirmed recently. + utxo_conf_recent = self.wallet.send_self_transfer(from_node=node)["new_utxo"] + self.generate(node, 1) + + # This UTXO is unconfirmed and in the mempool. + assert_equal(len(node.getrawmempool()), 0) + mempool_tx = self.wallet.send_self_transfer(from_node=node) + utxo_unconf_mempool = mempool_tx["new_utxo"] + + # This UTXO is unconfirmed and missing. + missing_tx = self.wallet.create_self_transfer() + utxo_unconf_missing = missing_tx["new_utxo"] + assert missing_tx["txid"] not in node.getrawmempool() + + orphan = self.wallet.create_self_transfer_multi(utxos_to_spend=[utxo_conf_old, + utxo_conf_recent, utxo_unconf_mempool, utxo_unconf_missing]) + + self.relay_transaction(peer, orphan["tx"]) + self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY) + peer.sync_with_ping() + assert_equal(len(peer.last_message["getdata"].inv), 2) + peer.wait_for_parent_requests([int(txid_conf_old, 16), int(missing_tx["txid"], 16)]) + + # Even though the peer would send a notfound for the "old" confirmed transaction, the node + # doesn't give up on the orphan. Once all of the missing parents are received, it should be + # submitted to mempool. + peer.send_message(msg_notfound(vec=[CInv(MSG_WITNESS_TX, int(txid_conf_old, 16))])) + peer.send_and_ping(msg_tx(missing_tx["tx"])) + peer.sync_with_ping() + assert_equal(node.getmempoolentry(orphan["txid"])["ancestorcount"], 3) + + @cleanup + def test_orphans_overlapping_parents(self): + node = self.nodes[0] + # In the process of relaying inflight_parent_AB + peer_txrequest = node.add_p2p_connection(PeerTxRelayer()) + # Sends the orphans + peer_orphans = node.add_p2p_connection(PeerTxRelayer()) + + confirmed_utxos = [self.wallet_nonsegwit.get_utxo() for _ in range(4)] + assert all([utxo["confirmations"] > 0 for utxo in confirmed_utxos]) + self.log.info("Test handling of multiple orphans with missing parents that are already being requested") + # Parent of child_A only + missing_parent_A = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[0]) + # Parents of child_A and child_B + missing_parent_AB = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[1]) + inflight_parent_AB = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[2]) + # Parent of child_B only + missing_parent_B = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[3]) + child_A = self.wallet_nonsegwit.create_self_transfer_multi( + utxos_to_spend=[missing_parent_A["new_utxo"], missing_parent_AB["new_utxo"], inflight_parent_AB["new_utxo"]] + ) + child_B = self.wallet_nonsegwit.create_self_transfer_multi( + utxos_to_spend=[missing_parent_B["new_utxo"], missing_parent_AB["new_utxo"], inflight_parent_AB["new_utxo"]] + ) + + # The wtxid and txid need to be the same for the node to recognize that the missing input + # and in-flight request for inflight_parent_AB are the same transaction. + assert_equal(inflight_parent_AB["txid"], inflight_parent_AB["tx"].getwtxid()) + + # Announce inflight_parent_AB and wait for getdata + peer_txrequest.send_and_ping(msg_inv([CInv(t=MSG_WTX, h=int(inflight_parent_AB["tx"].getwtxid(), 16))])) + self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY) + peer_txrequest.wait_for_getdata([int(inflight_parent_AB["tx"].getwtxid(), 16)]) + + self.log.info("Test that the node does not request a parent if it has an in-flight txrequest") + # Relay orphan child_A + self.relay_transaction(peer_orphans, child_A["tx"]) + self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY) + # There are 3 missing parents. missing_parent_A and missing_parent_AB should be requested. + # But inflight_parent_AB should not, because there is already an in-flight request for it. + peer_orphans.wait_for_parent_requests([int(missing_parent_A["txid"], 16), int(missing_parent_AB["txid"], 16)]) + + self.log.info("Test that the node does not request a parent if it has an in-flight orphan parent request") + # Relay orphan child_B + self.relay_transaction(peer_orphans, child_B["tx"]) + self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY) + # Only missing_parent_B should be requested. Not inflight_parent_AB or missing_parent_AB + # because they are already being requested from peer_txrequest and peer_orphans respectively. + peer_orphans.wait_for_parent_requests([int(missing_parent_B["txid"], 16)]) + peer_orphans.assert_never_requested(int(inflight_parent_AB["txid"], 16)) + + @cleanup + def test_orphan_of_orphan(self): + node = self.nodes[0] + peer = node.add_p2p_connection(PeerTxRelayer()) + + self.log.info("Test handling of an orphan with a parent who is another orphan") + missing_grandparent = self.wallet_nonsegwit.create_self_transfer() + missing_parent_orphan = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=missing_grandparent["new_utxo"]) + missing_parent = self.wallet_nonsegwit.create_self_transfer() + orphan = self.wallet_nonsegwit.create_self_transfer_multi(utxos_to_spend=[missing_parent["new_utxo"], missing_parent_orphan["new_utxo"]]) + + # The node should put missing_parent_orphan into the orphanage and request missing_grandparent + self.relay_transaction(peer, missing_parent_orphan["tx"]) + self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY) + peer.wait_for_parent_requests([int(missing_grandparent["txid"], 16)]) + + # The node should put the orphan into the orphanage and request missing_parent, skipping + # missing_parent_orphan because it already has it in the orphanage. + self.relay_transaction(peer, orphan["tx"]) + self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY) + peer.wait_for_parent_requests([int(missing_parent["txid"], 16)]) + + @cleanup + def test_orphan_inherit_rejection(self): + node = self.nodes[0] + peer1 = node.add_p2p_connection(PeerTxRelayer()) + peer2 = node.add_p2p_connection(PeerTxRelayer()) + peer3 = node.add_p2p_connection(PeerTxRelayer()) + + self.log.info("Test that an orphan with rejected parents, along with any descendants, cannot be retried with an alternate witness") + parent_low_fee_nonsegwit = self.wallet_nonsegwit.create_self_transfer(fee_rate=0) + assert_equal(parent_low_fee_nonsegwit["txid"], parent_low_fee_nonsegwit["tx"].getwtxid()) + child = self.wallet.create_self_transfer(utxo_to_spend=parent_low_fee_nonsegwit["new_utxo"]) + grandchild = self.wallet.create_self_transfer(utxo_to_spend=child["new_utxo"]) + assert child["txid"] != child["tx"].getwtxid() + assert grandchild["txid"] != grandchild["tx"].getwtxid() + + # Relay the parent. It should be rejected because it pays 0 fees. + self.relay_transaction(peer1, parent_low_fee_nonsegwit["tx"]) + + # Relay the child. It should be rejected for having missing parents, and this rejection is + # cached by txid and wtxid. + with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(child["txid"])]): + self.relay_transaction(peer1, child["tx"]) + assert_equal(0, len(node.getrawmempool())) + peer1.assert_never_requested(parent_low_fee_nonsegwit["txid"]) + + # Grandchild should also not be kept in orphanage because its parent has been rejected. + with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(grandchild["txid"])]): + self.relay_transaction(peer2, grandchild["tx"]) + assert_equal(0, len(node.getrawmempool())) + peer2.assert_never_requested(child["txid"]) + peer2.assert_never_requested(child["tx"].getwtxid()) + + # The child should never be requested, even if announced again with potentially different witness. + peer3.send_and_ping(msg_inv([CInv(t=MSG_TX, h=int(child["txid"], 16))])) + self.nodes[0].bumpmocktime(TXREQUEST_TIME_SKIP) + peer3.assert_never_requested(child["txid"]) + + def run_test(self): + self.nodes[0].setmocktime(int(time.time())) + self.wallet_nonsegwit = MiniWallet(self.nodes[0], mode=MiniWalletMode.RAW_P2PK) + self.generate(self.wallet_nonsegwit, 10) + self.wallet = MiniWallet(self.nodes[0]) + self.generate(self.wallet, 160) + self.test_arrival_timing_orphan() + self.test_orphan_rejected_parents_exceptions() + self.test_orphan_multiple_parents() + self.test_orphans_overlapping_parents() + self.test_orphan_of_orphan() + self.test_orphan_inherit_rejection() + + +if __name__ == '__main__': + OrphanHandlingTest().main() diff --git a/test/functional/p2p_permissions.py b/test/functional/p2p_permissions.py index f84bbf67e6..6153e4a156 100755 --- a/test/functional/p2p_permissions.py +++ b/test/functional/p2p_permissions.py @@ -106,7 +106,7 @@ class P2PPermissionsTests(BitcoinTestFramework): self.log.debug("Check that node[1] will send the tx to node[0] even though it is already in the mempool") self.connect_nodes(1, 0) - with self.nodes[1].assert_debug_log(["Force relaying tx {} from peer=0".format(txid)]): + with self.nodes[1].assert_debug_log(["Force relaying tx {} (wtxid={}) from peer=0".format(txid, tx.getwtxid())]): p2p_rebroadcast_wallet.send_txs_and_test([tx], self.nodes[1]) self.wait_until(lambda: txid in self.nodes[0].getrawmempool()) @@ -119,14 +119,14 @@ class P2PPermissionsTests(BitcoinTestFramework): [tx], self.nodes[1], success=False, - reject_reason='{} from peer=0 was not accepted: txn-mempool-conflict'.format(txid) + reject_reason='{} (wtxid={}) from peer=0 was not accepted: txn-mempool-conflict'.format(txid, tx.getwtxid()) ) p2p_rebroadcast_wallet.send_txs_and_test( [tx], self.nodes[1], success=False, - reject_reason='Not relaying non-mempool transaction {} from forcerelay peer=0'.format(txid) + reject_reason='Not relaying non-mempool transaction {} (wtxid={}) from forcerelay peer=0'.format(txid, tx.getwtxid()) ) def checkpermission(self, args, expectedPermissions): diff --git a/test/functional/p2p_segwit.py b/test/functional/p2p_segwit.py index bfae190c66..cfc177574f 100755 --- a/test/functional/p2p_segwit.py +++ b/test/functional/p2p_segwit.py @@ -71,8 +71,8 @@ from test_framework.script import ( SIGHASH_NONE, SIGHASH_SINGLE, SegwitV0SignatureHash, - LegacySignatureHash, hash160, + sign_input_legacy, ) from test_framework.script_util import ( key_to_p2pk_script, @@ -512,10 +512,10 @@ class SegWitTest(BitcoinTestFramework): # without a witness is invalid). # Note: The reject reason for this failure could be # 'block-validation-failed' (if script check threads > 1) or - # 'non-mandatory-script-verify-flag (Witness program was passed an + # 'mandatory-script-verify-flag-failed (Witness program was passed an # empty witness)' (otherwise). test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=False, - reason='non-mandatory-script-verify-flag (Witness program was passed an empty witness)') + reason='mandatory-script-verify-flag-failed (Witness program was passed an empty witness)') self.utxo.pop(0) self.utxo.append(UTXO(txid, 2, value)) @@ -708,7 +708,7 @@ class SegWitTest(BitcoinTestFramework): # segwit activation. Note that older bitcoind's that are not # segwit-aware would also reject this for failing CLEANSTACK. with self.nodes[0].assert_debug_log( - expected_msgs=(spend_tx.hash, 'was not accepted: non-mandatory-script-verify-flag (Witness program was passed an empty witness)')): + expected_msgs=(spend_tx.hash, 'was not accepted: mandatory-script-verify-flag-failed (Witness program was passed an empty witness)')): test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False) # Try to put the witness script in the scriptSig, should also fail. @@ -999,7 +999,7 @@ class SegWitTest(BitcoinTestFramework): # Extra witness data should not be allowed. test_witness_block(self.nodes[0], self.test_node, block, accepted=False, - reason='non-mandatory-script-verify-flag (Witness provided for non-witness script)') + reason='mandatory-script-verify-flag-failed (Witness provided for non-witness script)') # Try extra signature data. Ok if we're not spending a witness output. block.vtx[1].wit.vtxinwit = [] @@ -1025,7 +1025,7 @@ class SegWitTest(BitcoinTestFramework): # This has extra witness data, so it should fail. test_witness_block(self.nodes[0], self.test_node, block, accepted=False, - reason='non-mandatory-script-verify-flag (Stack size must be exactly one after execution)') + reason='mandatory-script-verify-flag-failed (Stack size must be exactly one after execution)') # Now get rid of the extra witness, but add extra scriptSig data tx2.vin[0].scriptSig = CScript([OP_TRUE]) @@ -1038,7 +1038,7 @@ class SegWitTest(BitcoinTestFramework): # This has extra signature data for a witness input, so it should fail. test_witness_block(self.nodes[0], self.test_node, block, accepted=False, - reason='non-mandatory-script-verify-flag (Witness requires empty scriptSig)') + reason='mandatory-script-verify-flag-failed (Witness requires empty scriptSig)') # Now get rid of the extra scriptsig on the witness input, and verify # success (even with extra scriptsig data in the non-witness input) @@ -1077,7 +1077,7 @@ class SegWitTest(BitcoinTestFramework): self.update_witness_block_with_transactions(block, [tx, tx2]) test_witness_block(self.nodes[0], self.test_node, block, accepted=False, - reason='non-mandatory-script-verify-flag (Push value size limit exceeded)') + reason='mandatory-script-verify-flag-failed (Push value size limit exceeded)') # Now reduce the length of the stack element tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a' * (MAX_SCRIPT_ELEMENT_SIZE) @@ -1118,7 +1118,7 @@ class SegWitTest(BitcoinTestFramework): self.update_witness_block_with_transactions(block, [tx, tx2]) test_witness_block(self.nodes[0], self.test_node, block, accepted=False, - reason='non-mandatory-script-verify-flag (Script is too big)') + reason='mandatory-script-verify-flag-failed (Script is too big)') # Try again with one less byte in the witness script witness_script = CScript([b'a' * MAX_SCRIPT_ELEMENT_SIZE] * 19 + [OP_DROP] * 62 + [OP_TRUE]) @@ -1210,7 +1210,7 @@ class SegWitTest(BitcoinTestFramework): block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx2]) test_witness_block(self.nodes[0], self.test_node, block, accepted=False, - reason='non-mandatory-script-verify-flag (Operation not valid with the current stack size)') + reason='mandatory-script-verify-flag-failed (Operation not valid with the current stack size)') # Fix the broken witness and the block should be accepted. tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_script] @@ -1529,10 +1529,8 @@ class SegWitTest(BitcoinTestFramework): tx5 = CTransaction() tx5.vin.append(CTxIn(COutPoint(tx4.sha256, 0), b"")) tx5.vout.append(CTxOut(tx4.vout[0].nValue - 1000, CScript([OP_TRUE]))) - (sig_hash, err) = LegacySignatureHash(script_pubkey, tx5, 0, SIGHASH_ALL) - signature = key.sign_ecdsa(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL - tx5.vin[0].scriptSig = CScript([signature, pubkey]) - tx5.rehash() + tx5.vin[0].scriptSig = CScript([pubkey]) + sign_input_legacy(tx5, 0, script_pubkey, key) # Should pass policy and consensus. test_transaction_acceptance(self.nodes[0], self.test_node, tx5, True, True) block = self.build_next_block() @@ -1574,7 +1572,7 @@ class SegWitTest(BitcoinTestFramework): sign_p2pk_witness_input(witness_script, tx, 0, hashtype, prev_utxo.nValue + 1, key) self.update_witness_block_with_transactions(block, [tx]) test_witness_block(self.nodes[0], self.test_node, block, accepted=False, - reason='non-mandatory-script-verify-flag (Script evaluated without error ' + reason='mandatory-script-verify-flag-failed (Script evaluated without error ' 'but finished with a false/empty top stack element') # Too-small input value @@ -1582,7 +1580,7 @@ class SegWitTest(BitcoinTestFramework): block.vtx.pop() # remove last tx self.update_witness_block_with_transactions(block, [tx]) test_witness_block(self.nodes[0], self.test_node, block, accepted=False, - reason='non-mandatory-script-verify-flag (Script evaluated without error ' + reason='mandatory-script-verify-flag-failed (Script evaluated without error ' 'but finished with a false/empty top stack element') # Now try correct value @@ -1686,7 +1684,7 @@ class SegWitTest(BitcoinTestFramework): block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx, tx2]) test_witness_block(self.nodes[0], self.test_node, block, accepted=False, - reason='non-mandatory-script-verify-flag (Witness requires empty scriptSig)') + reason='mandatory-script-verify-flag-failed (Witness requires empty scriptSig)') # Move the signature to the witness. block.vtx.pop() diff --git a/test/functional/rpc_blockchain.py b/test/functional/rpc_blockchain.py index 6022042c11..18a0a0c6cc 100755 --- a/test/functional/rpc_blockchain.py +++ b/test/functional/rpc_blockchain.py @@ -49,7 +49,6 @@ from test_framework.util import ( assert_raises_rpc_error, assert_is_hex_string, assert_is_hash_string, - get_datadir_path, ) from test_framework.wallet import MiniWallet @@ -572,16 +571,15 @@ class BlockchainTest(BitcoinTestFramework): self.log.info("Test that getblock with verbosity 3 includes prevout") assert_vin_contains_prevout(3) - self.log.info("Test that getblock with verbosity 2 and 3 still works with pruned Undo data") - datadir = get_datadir_path(self.options.tmpdir, 0) - self.log.info("Test getblock with invalid verbosity type returns proper error message") assert_raises_rpc_error(-3, "JSON value of type string is not of expected type number", node.getblock, blockhash, "2") + self.log.info("Test that getblock with verbosity 2 and 3 still works with pruned Undo data") + def move_block_file(old, new): - old_path = os.path.join(datadir, self.chain, 'blocks', old) - new_path = os.path.join(datadir, self.chain, 'blocks', new) - os.rename(old_path, new_path) + old_path = self.nodes[0].blocks_path / old + new_path = self.nodes[0].blocks_path / new + old_path.rename(new_path) # Move instead of deleting so we can restore chain state afterwards move_block_file('rev00000.dat', 'rev_wrong') diff --git a/test/functional/rpc_createmultisig.py b/test/functional/rpc_createmultisig.py index 34e60d70f0..65d7b4c422 100755 --- a/test/functional/rpc_createmultisig.py +++ b/test/functional/rpc_createmultisig.py @@ -157,7 +157,7 @@ class RpcCreateMultiSigTest(BitcoinTestFramework): try: node1.loadwallet('wmulti') except JSONRPCException as e: - path = os.path.join(self.options.tmpdir, "node1", "regtest", "wallets", "wmulti") + path = self.nodes[1].wallets_path / "wmulti" if e.error['code'] == -18 and "Wallet file verification failed. Failed to load database path '{}'. Path does not exist.".format(path) in e.error['message']: node1.createwallet(wallet_name='wmulti', disable_private_keys=True) else: diff --git a/test/functional/rpc_decodescript.py b/test/functional/rpc_decodescript.py index 673836bd04..f37e61ab50 100755 --- a/test/functional/rpc_decodescript.py +++ b/test/functional/rpc_decodescript.py @@ -271,7 +271,7 @@ class DecodeScriptTest(BitcoinTestFramework): assert res["segwit"]["desc"] == "wsh(and_v(and_v(v:hash160(ffffffffffffffffffffffffffffffffffffffff),v:pk(0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0)),older(1)))#gm8xz4fl" # Miniscript-incompatible offered HTLC res = self.nodes[0].decodescript("82012088a914ffffffffffffffffffffffffffffffffffffffff882102ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffacb2") - assert res["segwit"]["desc"] == "wsh(raw(82012088a914ffffffffffffffffffffffffffffffffffffffff882102ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffacb2))#ra6w2xa7" + assert res["segwit"]["desc"] == "addr(bcrt1q73qyfypp47hvgnkjqnav0j3k2lq3v76wg22dk8tmwuz5sfgv66xsvxg6uu)#9p3q328s" # Miniscript-compatible multisig bigger than 520 byte P2SH limit. res = self.nodes[0].decodescript("5b21020e0338c96a8870479f2396c373cc7696ba124e8635d41b0ea581112b678172612102675333a4e4b8fb51d9d4e22fa5a8eaced3fdac8a8cbf9be8c030f75712e6af992102896807d54bc55c24981f24a453c60ad3e8993d693732288068a23df3d9f50d4821029e51a5ef5db3137051de8323b001749932f2ff0d34c82e96a2c2461de96ae56c2102a4e1a9638d46923272c266631d94d36bdb03a64ee0e14c7518e49d2f29bc401021031c41fdbcebe17bec8d49816e00ca1b5ac34766b91c9f2ac37d39c63e5e008afb2103079e252e85abffd3c401a69b087e590a9b86f33f574f08129ccbd3521ecf516b2103111cf405b627e22135b3b3733a4a34aa5723fb0f58379a16d32861bf576b0ec2210318f331b3e5d38156da6633b31929c5b220349859cc9ca3d33fb4e68aa08401742103230dae6b4ac93480aeab26d000841298e3b8f6157028e47b0897c1e025165de121035abff4281ff00660f99ab27bb53e6b33689c2cd8dcd364bc3c90ca5aea0d71a62103bd45cddfacf2083b14310ae4a84e25de61e451637346325222747b157446614c2103cc297026b06c71cbfa52089149157b5ff23de027ac5ab781800a578192d175462103d3bde5d63bdb3a6379b461be64dad45eabff42f758543a9645afd42f6d4248282103ed1e8d5109c9ed66f7941bc53cc71137baa76d50d274bda8d5e8ffbd6e61fe9a5fae736402c00fb269522103aab896d53a8e7d6433137bbba940f9c521e085dd07e60994579b64a6d992cf79210291b7d0b1b692f8f524516ed950872e5da10fb1b808b5a526dedc6fed1cf29807210386aa9372fbab374593466bc5451dc59954e90787f08060964d95c87ef34ca5bb53ae68") assert_equal(res["segwit"]["desc"], "wsh(or_d(multi(11,020e0338c96a8870479f2396c373cc7696ba124e8635d41b0ea581112b67817261,02675333a4e4b8fb51d9d4e22fa5a8eaced3fdac8a8cbf9be8c030f75712e6af99,02896807d54bc55c24981f24a453c60ad3e8993d693732288068a23df3d9f50d48,029e51a5ef5db3137051de8323b001749932f2ff0d34c82e96a2c2461de96ae56c,02a4e1a9638d46923272c266631d94d36bdb03a64ee0e14c7518e49d2f29bc4010,031c41fdbcebe17bec8d49816e00ca1b5ac34766b91c9f2ac37d39c63e5e008afb,03079e252e85abffd3c401a69b087e590a9b86f33f574f08129ccbd3521ecf516b,03111cf405b627e22135b3b3733a4a34aa5723fb0f58379a16d32861bf576b0ec2,0318f331b3e5d38156da6633b31929c5b220349859cc9ca3d33fb4e68aa0840174,03230dae6b4ac93480aeab26d000841298e3b8f6157028e47b0897c1e025165de1,035abff4281ff00660f99ab27bb53e6b33689c2cd8dcd364bc3c90ca5aea0d71a6,03bd45cddfacf2083b14310ae4a84e25de61e451637346325222747b157446614c,03cc297026b06c71cbfa52089149157b5ff23de027ac5ab781800a578192d17546,03d3bde5d63bdb3a6379b461be64dad45eabff42f758543a9645afd42f6d424828,03ed1e8d5109c9ed66f7941bc53cc71137baa76d50d274bda8d5e8ffbd6e61fe9a),and_v(v:older(4032),multi(2,03aab896d53a8e7d6433137bbba940f9c521e085dd07e60994579b64a6d992cf79,0291b7d0b1b692f8f524516ed950872e5da10fb1b808b5a526dedc6fed1cf29807,0386aa9372fbab374593466bc5451dc59954e90787f08060964d95c87ef34ca5bb))))#7jwwklk4") diff --git a/test/functional/rpc_dumptxoutset.py b/test/functional/rpc_dumptxoutset.py index 39a931be03..2cae602cc2 100755 --- a/test/functional/rpc_dumptxoutset.py +++ b/test/functional/rpc_dumptxoutset.py @@ -4,13 +4,15 @@ # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the generation of UTXO snapshots using `dumptxoutset`. """ +from pathlib import Path from test_framework.blocktools import COINBASE_MATURITY from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import assert_equal, assert_raises_rpc_error - -import hashlib -from pathlib import Path +from test_framework.util import ( + assert_equal, + assert_raises_rpc_error, + sha256sum_file, +) class DumptxoutsetTest(BitcoinTestFramework): @@ -39,11 +41,10 @@ class DumptxoutsetTest(BitcoinTestFramework): out['base_hash'], '09abf0e7b510f61ca6cf33bab104e9ee99b3528b371d27a2d4b39abb800fba7e') - with open(str(expected_path), 'rb') as f: - digest = hashlib.sha256(f.read()).hexdigest() - # UTXO snapshot hash should be deterministic based on mocked time. - assert_equal( - digest, 'b1bacb602eacf5fbc9a7c2ef6eeb0d229c04e98bdf0c2ea5929012cd0eae3830') + # UTXO snapshot hash should be deterministic based on mocked time. + assert_equal( + sha256sum_file(str(expected_path)).hex(), + 'b1bacb602eacf5fbc9a7c2ef6eeb0d229c04e98bdf0c2ea5929012cd0eae3830') assert_equal( out['txoutset_hash'], '1f7e3befd45dc13ae198dfbb22869a9c5c4196f8e9ef9735831af1288033f890') @@ -52,7 +53,7 @@ class DumptxoutsetTest(BitcoinTestFramework): # Specifying a path to an existing or invalid file will fail. assert_raises_rpc_error( -8, '{} already exists'.format(FILENAME), node.dumptxoutset, FILENAME) - invalid_path = str(Path(node.datadir) / "invalid" / "path") + invalid_path = node.datadir_path / "invalid" / "path" assert_raises_rpc_error( -8, "Couldn't open file {}.incomplete for writing".format(invalid_path), node.dumptxoutset, invalid_path) diff --git a/test/functional/rpc_misc.py b/test/functional/rpc_misc.py index 43d1e2c731..20485c01d3 100755 --- a/test/functional/rpc_misc.py +++ b/test/functional/rpc_misc.py @@ -27,7 +27,7 @@ class RpcMiscTest(BitcoinTestFramework): self.log.info("test CHECK_NONFATAL") assert_raises_rpc_error( -1, - 'Internal bug detected: "request.params[9].get_str() != "trigger_internal_bug""', + 'Internal bug detected: request.params[9].get_str() != "trigger_internal_bug"', lambda: node.echo(arg9='trigger_internal_bug'), ) diff --git a/test/functional/rpc_net.py b/test/functional/rpc_net.py index 5fdd5daddf..255f5108a2 100755 --- a/test/functional/rpc_net.py +++ b/test/functional/rpc_net.py @@ -65,6 +65,7 @@ class NetTest(BitcoinTestFramework): self.test_service_flags() self.test_getnodeaddresses() self.test_addpeeraddress() + self.test_sendmsgtopeer() def test_connection_count(self): self.log.info("Test getconnectioncount") @@ -328,6 +329,37 @@ class NetTest(BitcoinTestFramework): addrs = node.getnodeaddresses(count=0) # getnodeaddresses re-runs the addrman checks assert_equal(len(addrs), 2) + def test_sendmsgtopeer(self): + node = self.nodes[0] + + self.restart_node(0) + self.connect_nodes(0, 1) + + self.log.info("Test sendmsgtopeer") + self.log.debug("Send a valid message") + with self.nodes[1].assert_debug_log(expected_msgs=["received: addr"]): + node.sendmsgtopeer(peer_id=0, msg_type="addr", msg="FFFFFF") + + self.log.debug("Test error for sending to non-existing peer") + assert_raises_rpc_error(-1, "Error: Could not send message to peer", node.sendmsgtopeer, peer_id=100, msg_type="addr", msg="FF") + + self.log.debug("Test that zero-length msg_type is allowed") + node.sendmsgtopeer(peer_id=0, msg_type="addr", msg="") + + self.log.debug("Test error for msg_type that is too long") + assert_raises_rpc_error(-8, "Error: msg_type too long, max length is 12", node.sendmsgtopeer, peer_id=0, msg_type="long_msg_type", msg="FF") + + self.log.debug("Test that unknown msg_type is allowed") + node.sendmsgtopeer(peer_id=0, msg_type="unknown", msg="FF") + + self.log.debug("Test that empty msg is allowed") + node.sendmsgtopeer(peer_id=0, msg_type="addr", msg="FF") + + self.log.debug("Test that oversized messages are allowed, but get us disconnected") + zero_byte_string = b'\x00' * 4000001 + node.sendmsgtopeer(peer_id=0, msg_type="addr", msg=zero_byte_string.hex()) + self.wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0, timeout=10) + if __name__ == '__main__': NetTest().main() diff --git a/test/functional/rpc_psbt.py b/test/functional/rpc_psbt.py index c4ed4da0f2..b574a370d6 100755 --- a/test/functional/rpc_psbt.py +++ b/test/functional/rpc_psbt.py @@ -327,7 +327,7 @@ class PSBTTest(BitcoinTestFramework): assert_raises_rpc_error(-3, "Invalid amount", self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {param: invalid_value, "add_inputs": True}) # Test fee_rate values that cannot be represented in sat/vB. - for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]: + for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]: assert_raises_rpc_error(-3, "Invalid amount", self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"fee_rate": invalid_value, "add_inputs": True}) @@ -883,6 +883,9 @@ class PSBTTest(BitcoinTestFramework): comb_psbt = self.nodes[0].combinepsbt([psbt, parsed_psbt.to_base64()]) assert_equal(comb_psbt, psbt) + self.log.info("Test walletprocesspsbt raises if an invalid sighashtype is passed") + assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[0].walletprocesspsbt, psbt, sighashtype="all") + self.log.info("Test decoding PSBT with per-input preimage types") # note that the decodepsbt RPC doesn't check whether preimages and hashes match hash_ripemd160, preimage_ripemd160 = random_bytes(20), random_bytes(50) @@ -982,5 +985,9 @@ class PSBTTest(BitcoinTestFramework): rawtx = self.nodes[2].finalizepsbt(psbt)["hex"] self.nodes[2].sendrawtransaction(rawtx) + self.log.info("Test descriptorprocesspsbt raises if an invalid sighashtype is passed") + assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[2].descriptorprocesspsbt, psbt, [descriptor], sighashtype="all") + + if __name__ == '__main__': PSBTTest().main() diff --git a/test/functional/rpc_signer.py b/test/functional/rpc_signer.py index 4300190387..488682e959 100755 --- a/test/functional/rpc_signer.py +++ b/test/functional/rpc_signer.py @@ -21,15 +21,12 @@ class RPCSignerTest(BitcoinTestFramework): def mock_signer_path(self): path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mocks', 'signer.py') if platform.system() == "Windows": - return "py " + path + return "py -3 " + path else: return path def set_test_params(self): self.num_nodes = 4 - # The experimental syscall sandbox feature (-sandbox) is not compatible with -signer (which - # invokes execve). - self.disable_syscall_sandbox = True self.extra_args = [ [], diff --git a/test/functional/rpc_signrawtransactionwithkey.py b/test/functional/rpc_signrawtransactionwithkey.py index ac7a86704f..0913f5057e 100755 --- a/test/functional/rpc_signrawtransactionwithkey.py +++ b/test/functional/rpc_signrawtransactionwithkey.py @@ -14,6 +14,7 @@ from test_framework.address import ( from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, + assert_raises_rpc_error, find_vout_for_address, ) from test_framework.script_util import ( @@ -33,6 +34,14 @@ from decimal import ( Decimal, ) +INPUTS = [ + # Valid pay-to-pubkey scripts + {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0, + 'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'}, + {'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0, + 'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'}, +] +OUTPUTS = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1} class SignRawTransactionWithKeyTest(BitcoinTestFramework): def set_test_params(self): @@ -47,6 +56,11 @@ class SignRawTransactionWithKeyTest(BitcoinTestFramework): txid = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransactionwithkey(rawtx, [self.nodes[0].get_deterministic_priv_key().key])["hex"], 0) return txid + def assert_signing_completed_successfully(self, signed_tx): + assert 'errors' not in signed_tx + assert 'complete' in signed_tx + assert_equal(signed_tx['complete'], True) + def successful_signing_test(self): """Create and sign a valid raw transaction with one input. @@ -56,25 +70,10 @@ class SignRawTransactionWithKeyTest(BitcoinTestFramework): 2) No script verification error occurred""" self.log.info("Test valid raw transaction with one input") privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N', 'cVKpPfVKSJxKqVpE9awvXNWuLHCa5j5tiE7K6zbUSptFpTEtiFrA'] + rawTx = self.nodes[0].createrawtransaction(INPUTS, OUTPUTS) + rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, INPUTS) - inputs = [ - # Valid pay-to-pubkey scripts - {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0, - 'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'}, - {'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0, - 'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'}, - ] - - outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1} - - rawTx = self.nodes[0].createrawtransaction(inputs, outputs) - rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, inputs) - - # 1) The transaction has a complete set of signatures - assert rawTxSigned['complete'] - - # 2) No script verification error occurred - assert 'errors' not in rawTxSigned + self.assert_signing_completed_successfully(rawTxSigned) def witness_script_test(self): self.log.info("Test signing transaction to P2SH-P2WSH addresses without wallet") @@ -95,9 +94,7 @@ class SignRawTransactionWithKeyTest(BitcoinTestFramework): # Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys spending_tx = self.nodes[0].createrawtransaction([unspent_output], {getnewdestination()[2]: Decimal("49.998")}) spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [unspent_output]) - # Check the signing completed successfully - assert 'complete' in spending_tx_signed - assert_equal(spending_tx_signed['complete'], True) + self.assert_signing_completed_successfully(spending_tx_signed) # Now test with P2PKH and P2PK scripts as the witnessScript for tx_type in ['P2PKH', 'P2PK']: # these tests are order-independent @@ -120,14 +117,19 @@ class SignRawTransactionWithKeyTest(BitcoinTestFramework): # Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys spending_tx = self.nodes[0].createrawtransaction([{'txid': txid, 'vout': vout}], {getnewdestination()[2]: Decimal("9.999")}) spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [{'txid': txid, 'vout': vout, 'scriptPubKey': script_pub_key, 'redeemScript': redeem_script, 'witnessScript': witness_script, 'amount': 10}]) - # Check the signing completed successfully - assert 'complete' in spending_tx_signed - assert_equal(spending_tx_signed['complete'], True) + self.assert_signing_completed_successfully(spending_tx_signed) self.nodes[0].sendrawtransaction(spending_tx_signed['hex']) + def invalid_sighashtype_test(self): + self.log.info("Test signing transaction with invalid sighashtype") + tx = self.nodes[0].createrawtransaction(INPUTS, OUTPUTS) + privkeys = [self.nodes[0].get_deterministic_priv_key().key] + assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[0].signrawtransactionwithkey, tx, privkeys, sighashtype="all") + def run_test(self): self.successful_signing_test() self.witness_script_test() + self.invalid_sighashtype_test() if __name__ == '__main__': diff --git a/test/functional/rpc_users.py b/test/functional/rpc_users.py index 8cc3ec401e..66cdd7cf9a 100755 --- a/test/functional/rpc_users.py +++ b/test/functional/rpc_users.py @@ -7,11 +7,9 @@ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, - get_datadir_path, str_to_b64str, ) -import os import http.client import urllib.parse import subprocess @@ -38,8 +36,7 @@ class HTTPBasicsTest(BitcoinTestFramework): self.num_nodes = 2 self.supports_cli = False - def setup_chain(self): - super().setup_chain() + def conf_setup(self): #Append rpcauth to bitcoin.conf before initialization self.rtpassword = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM=" rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144" @@ -64,13 +61,15 @@ class HTTPBasicsTest(BitcoinTestFramework): rpcauth3 = lines[1] self.password = lines[3] - with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "bitcoin.conf"), 'a', encoding='utf8') as f: + with open(self.nodes[0].datadir_path / "bitcoin.conf", "a", encoding="utf8") as f: f.write(rpcauth + "\n") f.write(rpcauth2 + "\n") f.write(rpcauth3 + "\n") - with open(os.path.join(get_datadir_path(self.options.tmpdir, 1), "bitcoin.conf"), 'a', encoding='utf8') as f: + with open(self.nodes[1].datadir_path / "bitcoin.conf", "a", encoding="utf8") as f: f.write("rpcuser={}\n".format(self.rpcuser)) f.write("rpcpassword={}\n".format(self.rpcpassword)) + self.restart_node(0) + self.restart_node(1) def test_auth(self, node, user, password): self.log.info('Correct...') @@ -86,6 +85,7 @@ class HTTPBasicsTest(BitcoinTestFramework): assert_equal(401, call_with_auth(node, user + 'wrong', password + 'wrong').status) def run_test(self): + self.conf_setup() self.log.info('Check correctness of the rpcauth config option') url = urllib.parse.urlparse(self.nodes[0].url) @@ -112,8 +112,7 @@ class HTTPBasicsTest(BitcoinTestFramework): self.nodes[0].assert_start_raises_init_error(expected_msg=init_error, extra_args=['-rpcauth=foo$bar$baz']) self.log.info('Check that failure to write cookie file will abort the node gracefully') - cookie_file = os.path.join(get_datadir_path(self.options.tmpdir, 0), self.chain, '.cookie.tmp') - os.mkdir(cookie_file) + (self.nodes[0].chain_path / ".cookie.tmp").mkdir() self.nodes[0].assert_start_raises_init_error(expected_msg=init_error) diff --git a/test/functional/rpc_whitelist.py b/test/functional/rpc_whitelist.py index 219132410b..fb404fb479 100755 --- a/test/functional/rpc_whitelist.py +++ b/test/functional/rpc_whitelist.py @@ -6,11 +6,9 @@ A test for RPC users with restricted permissions """ from test_framework.test_framework import BitcoinTestFramework -import os from test_framework.util import ( - get_datadir_path, assert_equal, - str_to_b64str + str_to_b64str, ) import http.client import urllib.parse @@ -30,8 +28,7 @@ class RPCWhitelistTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 - def setup_chain(self): - super().setup_chain() + def run_test(self): # 0 => Username # 1 => Password (Hashed) # 2 => Permissions @@ -55,7 +52,7 @@ class RPCWhitelistTest(BitcoinTestFramework): ] # These commands shouldn't be allowed for any user to test failures self.never_allowed = ["getnetworkinfo"] - with open(os.path.join(get_datadir_path(self.options.tmpdir, 0), "bitcoin.conf"), 'a', encoding='utf8') as f: + with open(self.nodes[0].datadir_path / "bitcoin.conf", "a", encoding="utf8") as f: f.write("\nrpcwhitelistdefault=0\n") for user in self.users: f.write("rpcauth=" + user[0] + ":" + user[1] + "\n") @@ -64,9 +61,8 @@ class RPCWhitelistTest(BitcoinTestFramework): for strangedude in self.strange_users: f.write("rpcauth=" + strangedude[0] + ":" + strangedude[1] + "\n") f.write("rpcwhitelist=" + strangedude[0] + strangedude[2] + "\n") + self.restart_node(0) - - def run_test(self): for user in self.users: permissions = user[2].replace(" ", "").split(",") # Pop all empty items diff --git a/test/functional/test_framework/authproxy.py b/test/functional/test_framework/authproxy.py index f7765a9dfa..03042877b2 100644 --- a/test/functional/test_framework/authproxy.py +++ b/test/functional/test_framework/authproxy.py @@ -39,6 +39,7 @@ from http import HTTPStatus import http.client import json import logging +import pathlib import socket import time import urllib.parse @@ -59,9 +60,11 @@ class JSONRPCException(Exception): self.http_status = http_status -def EncodeDecimal(o): +def serialization_fallback(o): if isinstance(o, decimal.Decimal): return str(o) + if isinstance(o, pathlib.Path): + return str(o) raise TypeError(repr(o) + " is not JSON serializable") class AuthServiceProxy(): @@ -108,7 +111,7 @@ class AuthServiceProxy(): log.debug("-{}-> {} {}".format( AuthServiceProxy.__id_count, self._service_name, - json.dumps(args or argsn, default=EncodeDecimal, ensure_ascii=self.ensure_ascii), + json.dumps(args or argsn, default=serialization_fallback, ensure_ascii=self.ensure_ascii), )) if args and argsn: params = dict(args=args, **argsn) @@ -120,7 +123,7 @@ class AuthServiceProxy(): 'id': AuthServiceProxy.__id_count} def __call__(self, *args, **argsn): - postdata = json.dumps(self.get_request(*args, **argsn), default=EncodeDecimal, ensure_ascii=self.ensure_ascii) + postdata = json.dumps(self.get_request(*args, **argsn), default=serialization_fallback, ensure_ascii=self.ensure_ascii) response, status = self._request('POST', self.__url.path, postdata.encode('utf-8')) if response['error'] is not None: raise JSONRPCException(response['error'], status) @@ -134,7 +137,7 @@ class AuthServiceProxy(): return response['result'] def batch(self, rpc_call_list): - postdata = json.dumps(list(rpc_call_list), default=EncodeDecimal, ensure_ascii=self.ensure_ascii) + postdata = json.dumps(list(rpc_call_list), default=serialization_fallback, ensure_ascii=self.ensure_ascii) log.debug("--> " + postdata) response, status = self._request('POST', self.__url.path, postdata.encode('utf-8')) if status != HTTPStatus.OK: @@ -167,7 +170,7 @@ class AuthServiceProxy(): response = json.loads(responsedata, parse_float=decimal.Decimal) elapsed = time.time() - req_start_time if "error" in response and response["error"] is None: - log.debug("<-%s- [%.6f] %s" % (response["id"], elapsed, json.dumps(response["result"], default=EncodeDecimal, ensure_ascii=self.ensure_ascii))) + log.debug("<-%s- [%.6f] %s" % (response["id"], elapsed, json.dumps(response["result"], default=serialization_fallback, ensure_ascii=self.ensure_ascii))) else: log.debug("<-- [%.6f] %s" % (elapsed, responsedata)) return response, http_response.status diff --git a/test/functional/test_framework/coverage.py b/test/functional/test_framework/coverage.py index 4fb4f8bb82..912a945d95 100644 --- a/test/functional/test_framework/coverage.py +++ b/test/functional/test_framework/coverage.py @@ -11,6 +11,7 @@ testing. import os from .authproxy import AuthServiceProxy +from typing import Optional REFERENCE_FILENAME = 'rpc_interface.txt' @@ -20,7 +21,7 @@ class AuthServiceProxyWrapper(): An object that wraps AuthServiceProxy to record specific RPC calls. """ - def __init__(self, auth_service_proxy_instance: AuthServiceProxy, rpc_url: str, coverage_logfile: str=None): + def __init__(self, auth_service_proxy_instance: AuthServiceProxy, rpc_url: str, coverage_logfile: Optional[str]=None): """ Kwargs: auth_service_proxy_instance: the instance being wrapped. diff --git a/test/functional/test_framework/ellswift.py b/test/functional/test_framework/ellswift.py new file mode 100644 index 0000000000..97b10118e6 --- /dev/null +++ b/test/functional/test_framework/ellswift.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 +# Copyright (c) 2022 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +"""Test-only Elligator Swift implementation + +WARNING: This code is slow and uses bad randomness. +Do not use for anything but tests.""" + +import csv +import os +import random +import unittest + +from test_framework.secp256k1 import FE, G, GE + +# Precomputed constant square root of -3 (mod p). +MINUS_3_SQRT = FE(-3).sqrt() + +def xswiftec(u, t): + """Decode field elements (u, t) to an X coordinate on the curve.""" + if u == 0: + u = FE(1) + if t == 0: + t = FE(1) + if u**3 + t**2 + 7 == 0: + t = 2 * t + X = (u**3 + 7 - t**2) / (2 * t) + Y = (X + t) / (MINUS_3_SQRT * u) + for x in (u + 4 * Y**2, (-X / Y - u) / 2, (X / Y - u) / 2): + if GE.is_valid_x(x): + return x + assert False + +def xswiftec_inv(x, u, case): + """Given x and u, find t such that xswiftec(u, t) = x, or return None. + + Case selects which of the up to 8 results to return.""" + + if case & 2 == 0: + if GE.is_valid_x(-x - u): + return None + v = x + s = -(u**3 + 7) / (u**2 + u*v + v**2) + else: + s = x - u + if s == 0: + return None + r = (-s * (4 * (u**3 + 7) + 3 * s * u**2)).sqrt() + if r is None: + return None + if case & 1 and r == 0: + return None + v = (-u + r / s) / 2 + w = s.sqrt() + if w is None: + return None + if case & 5 == 0: + return -w * (u * (1 - MINUS_3_SQRT) / 2 + v) + if case & 5 == 1: + return w * (u * (1 + MINUS_3_SQRT) / 2 + v) + if case & 5 == 4: + return w * (u * (1 - MINUS_3_SQRT) / 2 + v) + if case & 5 == 5: + return -w * (u * (1 + MINUS_3_SQRT) / 2 + v) + +def xelligatorswift(x): + """Given a field element X on the curve, find (u, t) that encode them.""" + assert GE.is_valid_x(x) + while True: + u = FE(random.randrange(1, FE.SIZE)) + case = random.randrange(0, 8) + t = xswiftec_inv(x, u, case) + if t is not None: + return u, t + +def ellswift_create(): + """Generate a (privkey, ellswift_pubkey) pair.""" + priv = random.randrange(1, GE.ORDER) + u, t = xelligatorswift((priv * G).x) + return priv.to_bytes(32, 'big'), u.to_bytes() + t.to_bytes() + +def ellswift_ecdh_xonly(pubkey_theirs, privkey): + """Compute X coordinate of shared ECDH point between ellswift pubkey and privkey.""" + u = FE(int.from_bytes(pubkey_theirs[:32], 'big')) + t = FE(int.from_bytes(pubkey_theirs[32:], 'big')) + d = int.from_bytes(privkey, 'big') + return (d * GE.lift_x(xswiftec(u, t))).x.to_bytes() + + +class TestFrameworkEllSwift(unittest.TestCase): + def test_xswiftec(self): + """Verify that xswiftec maps all inputs to the curve.""" + for _ in range(32): + u = FE(random.randrange(0, FE.SIZE)) + t = FE(random.randrange(0, FE.SIZE)) + x = xswiftec(u, t) + self.assertTrue(GE.is_valid_x(x)) + + # Check that inputs which are considered undefined in the original + # SwiftEC paper can also be decoded successfully (by remapping) + undefined_inputs = [ + (FE(0), FE(23)), # u = 0 + (FE(42), FE(0)), # t = 0 + (FE(5), FE(-132).sqrt()), # u^3 + t^2 + 7 = 0 + ] + assert undefined_inputs[-1][0]**3 + undefined_inputs[-1][1]**2 + 7 == 0 + for u, t in undefined_inputs: + x = xswiftec(u, t) + self.assertTrue(GE.is_valid_x(x)) + + def test_elligator_roundtrip(self): + """Verify that encoding using xelligatorswift decodes back using xswiftec.""" + for _ in range(32): + while True: + # Loop until we find a valid X coordinate on the curve. + x = FE(random.randrange(1, FE.SIZE)) + if GE.is_valid_x(x): + break + # Encoding it to (u, t), decode it back, and compare. + u, t = xelligatorswift(x) + x2 = xswiftec(u, t) + self.assertEqual(x2, x) + + def test_ellswift_ecdh_xonly(self): + """Verify that shared secret computed by ellswift_ecdh_xonly match.""" + for _ in range(32): + privkey1, encoding1 = ellswift_create() + privkey2, encoding2 = ellswift_create() + shared_secret1 = ellswift_ecdh_xonly(encoding1, privkey2) + shared_secret2 = ellswift_ecdh_xonly(encoding2, privkey1) + self.assertEqual(shared_secret1, shared_secret2) + + def test_elligator_encode_testvectors(self): + """Implement the BIP324 test vectors for ellswift encoding (read from xswiftec_inv_test_vectors.csv).""" + vectors_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'xswiftec_inv_test_vectors.csv') + with open(vectors_file, newline='', encoding='utf8') as csvfile: + reader = csv.DictReader(csvfile) + for row in reader: + u = FE.from_bytes(bytes.fromhex(row['u'])) + x = FE.from_bytes(bytes.fromhex(row['x'])) + for case in range(8): + ret = xswiftec_inv(x, u, case) + if ret is None: + self.assertEqual(row[f"case{case}_t"], "") + else: + self.assertEqual(row[f"case{case}_t"], ret.to_bytes().hex()) + self.assertEqual(xswiftec(u, ret), x) + + def test_elligator_decode_testvectors(self): + """Implement the BIP324 test vectors for ellswift decoding (read from ellswift_decode_test_vectors.csv).""" + vectors_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ellswift_decode_test_vectors.csv') + with open(vectors_file, newline='', encoding='utf8') as csvfile: + reader = csv.DictReader(csvfile) + for row in reader: + encoding = bytes.fromhex(row['ellswift']) + assert len(encoding) == 64 + expected_x = FE(int(row['x'], 16)) + u = FE(int.from_bytes(encoding[:32], 'big')) + t = FE(int.from_bytes(encoding[32:], 'big')) + x = xswiftec(u, t) + self.assertEqual(x, expected_x) + self.assertTrue(GE.is_valid_x(x)) diff --git a/test/functional/test_framework/ellswift_decode_test_vectors.csv b/test/functional/test_framework/ellswift_decode_test_vectors.csv new file mode 100644 index 0000000000..1bab96b721 --- /dev/null +++ b/test/functional/test_framework/ellswift_decode_test_vectors.csv @@ -0,0 +1,77 @@ +ellswift,x,comment
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000,edd1fd3e327ce90cc7a3542614289aee9682003e9cf7dcc9cf2ca9743be5aa0c,u%p=0;t%p=0;valid_x(x2)
+000000000000000000000000000000000000000000000000000000000000000001d3475bf7655b0fb2d852921035b2ef607f49069b97454e6795251062741771,b5da00b73cd6560520e7c364086e7cd23a34bf60d0e707be9fc34d4cd5fdfa2c,u%p=0;valid_x(x1)
+000000000000000000000000000000000000000000000000000000000000000082277c4a71f9d22e66ece523f8fa08741a7c0912c66a69ce68514bfd3515b49f,f482f2e241753ad0fb89150d8491dc1e34ff0b8acfbb442cfe999e2e5e6fd1d2,u%p=0;valid_x(x3);valid_x(x2);valid_x(x1)
+00000000000000000000000000000000000000000000000000000000000000008421cc930e77c9f514b6915c3dbe2a94c6d8f690b5b739864ba6789fb8a55dd0,9f59c40275f5085a006f05dae77eb98c6fd0db1ab4a72ac47eae90a4fc9e57e0,u%p=0;valid_x(x2)
+0000000000000000000000000000000000000000000000000000000000000000bde70df51939b94c9c24979fa7dd04ebd9b3572da7802290438af2a681895441,aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa9fffffd6b,u%p=0;(u'^3-t'^2+7)%p=0;valid_x(x3)
+0000000000000000000000000000000000000000000000000000000000000000d19c182d2759cd99824228d94799f8c6557c38a1c0d6779b9d4b729c6f1ccc42,70720db7e238d04121f5b1afd8cc5ad9d18944c6bdc94881f502b7a3af3aecff,u%p=0;valid_x(x3)
+0000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,edd1fd3e327ce90cc7a3542614289aee9682003e9cf7dcc9cf2ca9743be5aa0c,u%p=0;t%p=0;valid_x(x2);t>=p
+0000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff2664bbd5,50873db31badcc71890e4f67753a65757f97aaa7dd5f1e82b753ace32219064b,u%p=0;valid_x(x3);valid_x(x2);valid_x(x1);t>=p
+0000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff7028de7d,1eea9cc59cfcf2fa151ac6c274eea4110feb4f7b68c5965732e9992e976ef68e,u%p=0;valid_x(x2);t>=p
+0000000000000000000000000000000000000000000000000000000000000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffffcbcfb7e7,12303941aedc208880735b1f1795c8e55be520ea93e103357b5d2adb7ed59b8e,u%p=0;valid_x(x1);t>=p
+0000000000000000000000000000000000000000000000000000000000000000fffffffffffffffffffffffffffffffffffffffffffffffffffffffff3113ad9,7eed6b70e7b0767c7d7feac04e57aa2a12fef5e0f48f878fcbb88b3b6b5e0783,u%p=0;valid_x(x3);t>=p
+0a2d2ba93507f1df233770c2a797962cc61f6d15da14ecd47d8d27ae1cd5f8530000000000000000000000000000000000000000000000000000000000000000,532167c11200b08c0e84a354e74dcc40f8b25f4fe686e30869526366278a0688,t%p=0;(u'^3+t'^2+7)%p=0;valid_x(x3);valid_x(x2);valid_x(x1)
+0a2d2ba93507f1df233770c2a797962cc61f6d15da14ecd47d8d27ae1cd5f853fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,532167c11200b08c0e84a354e74dcc40f8b25f4fe686e30869526366278a0688,t%p=0;(u'^3+t'^2+7)%p=0;valid_x(x3);valid_x(x2);valid_x(x1);t>=p
+0ffde9ca81d751e9cdaffc1a50779245320b28996dbaf32f822f20117c22fbd6c74d99efceaa550f1ad1c0f43f46e7ff1ee3bd0162b7bf55f2965da9c3450646,74e880b3ffd18fe3cddf7902522551ddf97fa4a35a3cfda8197f947081a57b8f,valid_x(x3)
+0ffde9ca81d751e9cdaffc1a50779245320b28996dbaf32f822f20117c22fbd6ffffffffffffffffffffffffffffffffffffffffffffffffffffffff156ca896,377b643fce2271f64e5c8101566107c1be4980745091783804f654781ac9217c,valid_x(x2);t>=p
+123658444f32be8f02ea2034afa7ef4bbe8adc918ceb49b12773b625f490b368ffffffffffffffffffffffffffffffffffffffffffffffffffffffff8dc5fe11,ed16d65cf3a9538fcb2c139f1ecbc143ee14827120cbc2659e667256800b8142,(u'^3-t'^2+7)%p=0;valid_x(x3);valid_x(x2);valid_x(x1);t>=p
+146f92464d15d36e35382bd3ca5b0f976c95cb08acdcf2d5b3570617990839d7ffffffffffffffffffffffffffffffffffffffffffffffffffffffff3145e93b,0d5cd840427f941f65193079ab8e2e83024ef2ee7ca558d88879ffd879fb6657,(u'^3+t'^2+7)%p=0;valid_x(x3);t>=p
+15fdf5cf09c90759add2272d574d2bb5fe1429f9f3c14c65e3194bf61b82aa73ffffffffffffffffffffffffffffffffffffffffffffffffffffffff04cfd906,16d0e43946aec93f62d57eb8cde68951af136cf4b307938dd1447411e07bffe1,(u'^3+t'^2+7)%p=0;valid_x(x2);t>=p
+1f67edf779a8a649d6def60035f2fa22d022dd359079a1a144073d84f19b92d50000000000000000000000000000000000000000000000000000000000000000,025661f9aba9d15c3118456bbe980e3e1b8ba2e047c737a4eb48a040bb566f6c,t%p=0;valid_x(x2)
+1f67edf779a8a649d6def60035f2fa22d022dd359079a1a144073d84f19b92d5fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,025661f9aba9d15c3118456bbe980e3e1b8ba2e047c737a4eb48a040bb566f6c,t%p=0;valid_x(x2);t>=p
+1fe1e5ef3fceb5c135ab7741333ce5a6e80d68167653f6b2b24bcbcfaaaff507fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,98bec3b2a351fa96cfd191c1778351931b9e9ba9ad1149f6d9eadca80981b801,t%p=0;(u'^3-t'^2+7)%p=0;valid_x(x3);valid_x(x2);valid_x(x1);t>=p
+4056a34a210eec7892e8820675c860099f857b26aad85470ee6d3cf1304a9dcf375e70374271f20b13c9986ed7d3c17799698cfc435dbed3a9f34b38c823c2b4,868aac2003b29dbcad1a3e803855e078a89d16543ac64392d122417298cec76e,(u'^3-t'^2+7)%p=0;valid_x(x3)
+4197ec3723c654cfdd32ab075506648b2ff5070362d01a4fff14b336b78f963fffffffffffffffffffffffffffffffffffffffffffffffffffffffffb3ab1e95,ba5a6314502a8952b8f456e085928105f665377a8ce27726a5b0eb7ec1ac0286,(u'^3+t'^2+7)%p=0;valid_x(x1);t>=p
+47eb3e208fedcdf8234c9421e9cd9a7ae873bfbdbc393723d1ba1e1e6a8e6b24ffffffffffffffffffffffffffffffffffffffffffffffffffffffff7cd12cb1,d192d52007e541c9807006ed0468df77fd214af0a795fe119359666fdcf08f7c,(u'^3+t'^2+7)%p=0;valid_x(x3);valid_x(x2);valid_x(x1);t>=p
+5eb9696a2336fe2c3c666b02c755db4c0cfd62825c7b589a7b7bb442e141c1d693413f0052d49e64abec6d5831d66c43612830a17df1fe4383db896468100221,ef6e1da6d6c7627e80f7a7234cb08a022c1ee1cf29e4d0f9642ae924cef9eb38,(u'^3+t'^2+7)%p=0;valid_x(x1)
+7bf96b7b6da15d3476a2b195934b690a3a3de3e8ab8474856863b0de3af90b0e0000000000000000000000000000000000000000000000000000000000000000,50851dfc9f418c314a437295b24feeea27af3d0cd2308348fda6e21c463e46ff,t%p=0;valid_x(x1)
+7bf96b7b6da15d3476a2b195934b690a3a3de3e8ab8474856863b0de3af90b0efffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,50851dfc9f418c314a437295b24feeea27af3d0cd2308348fda6e21c463e46ff,t%p=0;valid_x(x1);t>=p
+851b1ca94549371c4f1f7187321d39bf51c6b7fb61f7cbf027c9da62021b7a65fc54c96837fb22b362eda63ec52ec83d81bedd160c11b22d965d9f4a6d64d251,3e731051e12d33237eb324f2aa5b16bb868eb49a1aa1fadc19b6e8761b5a5f7b,(u'^3+t'^2+7)%p=0;valid_x(x2)
+943c2f775108b737fe65a9531e19f2fc2a197f5603e3a2881d1d83e4008f91250000000000000000000000000000000000000000000000000000000000000000,311c61f0ab2f32b7b1f0223fa72f0a78752b8146e46107f8876dd9c4f92b2942,t%p=0;valid_x(x3);valid_x(x2);valid_x(x1)
+943c2f775108b737fe65a9531e19f2fc2a197f5603e3a2881d1d83e4008f9125fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,311c61f0ab2f32b7b1f0223fa72f0a78752b8146e46107f8876dd9c4f92b2942,t%p=0;valid_x(x3);valid_x(x2);valid_x(x1);t>=p
+a0f18492183e61e8063e573606591421b06bc3513631578a73a39c1c3306239f2f32904f0d2a33ecca8a5451705bb537d3bf44e071226025cdbfd249fe0f7ad6,97a09cf1a2eae7c494df3c6f8a9445bfb8c09d60832f9b0b9d5eabe25fbd14b9,valid_x(x1)
+a1ed0a0bd79d8a23cfe4ec5fef5ba5cccfd844e4ff5cb4b0f2e71627341f1c5b17c499249e0ac08d5d11ea1c2c8ca7001616559a7994eadec9ca10fb4b8516dc,65a89640744192cdac64b2d21ddf989cdac7500725b645bef8e2200ae39691f2,valid_x(x2)
+ba94594a432721aa3580b84c161d0d134bc354b690404d7cd4ec57c16d3fbe98ffffffffffffffffffffffffffffffffffffffffffffffffffffffffea507dd7,5e0d76564aae92cb347e01a62afd389a9aa401c76c8dd227543dc9cd0efe685a,valid_x(x1);t>=p
+bcaf7219f2f6fbf55fe5e062dce0e48c18f68103f10b8198e974c184750e1be3932016cbf69c4471bd1f656c6a107f1973de4af7086db897277060e25677f19a,2d97f96cac882dfe73dc44db6ce0f1d31d6241358dd5d74eb3d3b50003d24c2b,valid_x(x3);valid_x(x2);valid_x(x1)
+bcaf7219f2f6fbf55fe5e062dce0e48c18f68103f10b8198e974c184750e1be3ffffffffffffffffffffffffffffffffffffffffffffffffffffffff6507d09a,e7008afe6e8cbd5055df120bd748757c686dadb41cce75e4addcc5e02ec02b44,valid_x(x3);valid_x(x2);valid_x(x1);t>=p
+c5981bae27fd84401c72a155e5707fbb811b2b620645d1028ea270cbe0ee225d4b62aa4dca6506c1acdbecc0552569b4b21436a5692e25d90d3bc2eb7ce24078,948b40e7181713bc018ec1702d3d054d15746c59a7020730dd13ecf985a010d7,(u'^3+t'^2+7)%p=0;valid_x(x3)
+c894ce48bfec433014b931a6ad4226d7dbd8eaa7b6e3faa8d0ef94052bcf8cff336eeb3919e2b4efb746c7f71bbca7e9383230fbbc48ffafe77e8bcc69542471,f1c91acdc2525330f9b53158434a4d43a1c547cff29f15506f5da4eb4fe8fa5a,(u'^3-t'^2+7)%p=0;valid_x(x3);valid_x(x2);valid_x(x1)
+cbb0deab125754f1fdb2038b0434ed9cb3fb53ab735391129994a535d925f6730000000000000000000000000000000000000000000000000000000000000000,872d81ed8831d9998b67cb7105243edbf86c10edfebb786c110b02d07b2e67cd,t%p=0;(u'^3-t'^2+7)%p=0;valid_x(x3);valid_x(x2);valid_x(x1)
+d917b786dac35670c330c9c5ae5971dfb495c8ae523ed97ee2420117b171f41effffffffffffffffffffffffffffffffffffffffffffffffffffffff2001f6f6,e45b71e110b831f2bdad8651994526e58393fde4328b1ec04d59897142584691,valid_x(x3);t>=p
+e28bd8f5929b467eb70e04332374ffb7e7180218ad16eaa46b7161aa679eb4260000000000000000000000000000000000000000000000000000000000000000,66b8c980a75c72e598d383a35a62879f844242ad1e73ff12edaa59f4e58632b5,t%p=0;valid_x(x3)
+e28bd8f5929b467eb70e04332374ffb7e7180218ad16eaa46b7161aa679eb426fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,66b8c980a75c72e598d383a35a62879f844242ad1e73ff12edaa59f4e58632b5,t%p=0;valid_x(x3);t>=p
+e7ee5814c1706bf8a89396a9b032bc014c2cac9c121127dbf6c99278f8bb53d1dfd04dbcda8e352466b6fcd5f2dea3e17d5e133115886eda20db8a12b54de71b,e842c6e3529b234270a5e97744edc34a04d7ba94e44b6d2523c9cf0195730a50,(u'^3+t'^2+7)%p=0;valid_x(x3);valid_x(x2);valid_x(x1)
+f292e46825f9225ad23dc057c1d91c4f57fcb1386f29ef10481cb1d22518593fffffffffffffffffffffffffffffffffffffffffffffffffffffffff7011c989,3cea2c53b8b0170166ac7da67194694adacc84d56389225e330134dab85a4d55,(u'^3-t'^2+7)%p=0;valid_x(x3);t>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f0000000000000000000000000000000000000000000000000000000000000000,edd1fd3e327ce90cc7a3542614289aee9682003e9cf7dcc9cf2ca9743be5aa0c,u%p=0;t%p=0;valid_x(x2);u>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f01d3475bf7655b0fb2d852921035b2ef607f49069b97454e6795251062741771,b5da00b73cd6560520e7c364086e7cd23a34bf60d0e707be9fc34d4cd5fdfa2c,u%p=0;valid_x(x1);u>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f4218f20ae6c646b363db68605822fb14264ca8d2587fdd6fbc750d587e76a7ee,aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa9fffffd6b,u%p=0;(u'^3-t'^2+7)%p=0;valid_x(x3);u>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f82277c4a71f9d22e66ece523f8fa08741a7c0912c66a69ce68514bfd3515b49f,f482f2e241753ad0fb89150d8491dc1e34ff0b8acfbb442cfe999e2e5e6fd1d2,u%p=0;valid_x(x3);valid_x(x2);valid_x(x1);u>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f8421cc930e77c9f514b6915c3dbe2a94c6d8f690b5b739864ba6789fb8a55dd0,9f59c40275f5085a006f05dae77eb98c6fd0db1ab4a72ac47eae90a4fc9e57e0,u%p=0;valid_x(x2);u>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fd19c182d2759cd99824228d94799f8c6557c38a1c0d6779b9d4b729c6f1ccc42,70720db7e238d04121f5b1afd8cc5ad9d18944c6bdc94881f502b7a3af3aecff,u%p=0;valid_x(x3);u>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,edd1fd3e327ce90cc7a3542614289aee9682003e9cf7dcc9cf2ca9743be5aa0c,u%p=0;t%p=0;valid_x(x2);u>=p;t>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fffffffffffffffffffffffffffffffffffffffffffffffffffffffff2664bbd5,50873db31badcc71890e4f67753a65757f97aaa7dd5f1e82b753ace32219064b,u%p=0;valid_x(x3);valid_x(x2);valid_x(x1);u>=p;t>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fffffffffffffffffffffffffffffffffffffffffffffffffffffffff7028de7d,1eea9cc59cfcf2fa151ac6c274eea4110feb4f7b68c5965732e9992e976ef68e,u%p=0;valid_x(x2);u>=p;t>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2fffffffffffffffffffffffffffffffffffffffffffffffffffffffffcbcfb7e7,12303941aedc208880735b1f1795c8e55be520ea93e103357b5d2adb7ed59b8e,u%p=0;valid_x(x1);u>=p;t>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2ffffffffffffffffffffffffffffffffffffffffffffffffffffffffff3113ad9,7eed6b70e7b0767c7d7feac04e57aa2a12fef5e0f48f878fcbb88b3b6b5e0783,u%p=0;valid_x(x3);u>=p;t>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff13cea4a70000000000000000000000000000000000000000000000000000000000000000,649984435b62b4a25d40c6133e8d9ab8c53d4b059ee8a154a3be0fcf4e892edb,t%p=0;valid_x(x1);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff13cea4a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,649984435b62b4a25d40c6133e8d9ab8c53d4b059ee8a154a3be0fcf4e892edb,t%p=0;valid_x(x1);u>=p;t>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff15028c590063f64d5a7f1c14915cd61eac886ab295bebd91992504cf77edb028bdd6267f,3fde5713f8282eead7d39d4201f44a7c85a5ac8a0681f35e54085c6b69543374,(u'^3+t'^2+7)%p=0;valid_x(x2);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff2715de860000000000000000000000000000000000000000000000000000000000000000,3524f77fa3a6eb4389c3cb5d27f1f91462086429cd6c0cb0df43ea8f1e7b3fb4,t%p=0;valid_x(x3);valid_x(x2);valid_x(x1);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff2715de86fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,3524f77fa3a6eb4389c3cb5d27f1f91462086429cd6c0cb0df43ea8f1e7b3fb4,t%p=0;valid_x(x3);valid_x(x2);valid_x(x1);u>=p;t>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff2c2c5709e7156c417717f2feab147141ec3da19fb759575cc6e37b2ea5ac9309f26f0f66,d2469ab3e04acbb21c65a1809f39caafe7a77c13d10f9dd38f391c01dc499c52,(u'^3-t'^2+7)%p=0;valid_x(x3);valid_x(x2);valid_x(x1);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff3a08cc1efffffffffffffffffffffffffffffffffffffffffffffffffffffffff760e9f0,38e2a5ce6a93e795e16d2c398bc99f0369202ce21e8f09d56777b40fc512bccc,valid_x(x3);u>=p;t>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff3e91257d932016cbf69c4471bd1f656c6a107f1973de4af7086db897277060e25677f19a,864b3dc902c376709c10a93ad4bbe29fce0012f3dc8672c6286bba28d7d6d6fc,valid_x(x3);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff795d6c1c322cadf599dbb86481522b3cc55f15a67932db2afa0111d9ed6981bcd124bf44,766dfe4a700d9bee288b903ad58870e3d4fe2f0ef780bcac5c823f320d9a9bef,(u'^3+t'^2+7)%p=0;valid_x(x1);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff8e426f0392389078c12b1a89e9542f0593bc96b6bfde8224f8654ef5d5cda935a3582194,faec7bc1987b63233fbc5f956edbf37d54404e7461c58ab8631bc68e451a0478,valid_x(x1);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff91192139ffffffffffffffffffffffffffffffffffffffffffffffffffffffff45f0f1eb,ec29a50bae138dbf7d8e24825006bb5fc1a2cc1243ba335bc6116fb9e498ec1f,valid_x(x2);u>=p;t>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff98eb9ab76e84499c483b3bf06214abfe065dddf43b8601de596d63b9e45a166a580541fe,1e0ff2dee9b09b136292a9e910f0d6ac3e552a644bba39e64e9dd3e3bbd3d4d4,(u'^3-t'^2+7)%p=0;valid_x(x3);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff9b77b7f2c74d99efceaa550f1ad1c0f43f46e7ff1ee3bd0162b7bf55f2965da9c3450646,8b7dd5c3edba9ee97b70eff438f22dca9849c8254a2f3345a0a572ffeaae0928,valid_x(x2);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffff9b77b7f2ffffffffffffffffffffffffffffffffffffffffffffffffffffffff156ca896,0881950c8f51d6b9a6387465d5f12609ef1bb25412a08a74cb2dfb200c74bfbf,valid_x(x3);valid_x(x2);valid_x(x1);u>=p;t>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffffa2f5cd838816c16c4fe8a1661d606fdb13cf9af04b979a2e159a09409ebc8645d58fde02,2f083207b9fd9b550063c31cd62b8746bd543bdc5bbf10e3a35563e927f440c8,(u'^3+t'^2+7)%p=0;valid_x(x3);valid_x(x2);valid_x(x1);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffffb13f75c00000000000000000000000000000000000000000000000000000000000000000,4f51e0be078e0cddab2742156adba7e7a148e73157072fd618cd60942b146bd0,t%p=0;valid_x(x3);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffffb13f75c0fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,4f51e0be078e0cddab2742156adba7e7a148e73157072fd618cd60942b146bd0,t%p=0;valid_x(x3);u>=p;t>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffffe7bc1f8d0000000000000000000000000000000000000000000000000000000000000000,16c2ccb54352ff4bd794f6efd613c72197ab7082da5b563bdf9cb3edaafe74c2,t%p=0;valid_x(x2);u>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffffe7bc1f8dfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f,16c2ccb54352ff4bd794f6efd613c72197ab7082da5b563bdf9cb3edaafe74c2,t%p=0;valid_x(x2);u>=p;t>=p
+ffffffffffffffffffffffffffffffffffffffffffffffffffffffffef64d162750546ce42b0431361e52d4f5242d8f24f33e6b1f99b591647cbc808f462af51,d41244d11ca4f65240687759f95ca9efbab767ededb38fd18c36e18cd3b6f6a9,(u'^3+t'^2+7)%p=0;valid_x(x3);u>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffff0e5be52372dd6e894b2a326fc3605a6e8f3c69c710bf27d630dfe2004988b78eb6eab36,64bf84dd5e03670fdb24c0f5d3c2c365736f51db6c92d95010716ad2d36134c8,valid_x(x3);valid_x(x2);valid_x(x1);u>=p
+fffffffffffffffffffffffffffffffffffffffffffffffffffffffffefbb982fffffffffffffffffffffffffffffffffffffffffffffffffffffffff6d6db1f,1c92ccdfcf4ac550c28db57cff0c8515cb26936c786584a70114008d6c33a34b,valid_x(x1);u>=p;t>=p
diff --git a/test/functional/test_framework/key.py b/test/functional/test_framework/key.py index efb4934ff0..c250fc6fe8 100644 --- a/test/functional/test_framework/key.py +++ b/test/functional/test_framework/key.py @@ -1,7 +1,7 @@ # Copyright (c) 2019-2020 Pieter Wuille # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. -"""Test-only secp256k1 elliptic curve implementation +"""Test-only secp256k1 elliptic curve protocols implementation WARNING: This code is slow, uses bad randomness, does not properly protect keys, and is trivially vulnerable to side channel attacks. Do not use for @@ -13,9 +13,13 @@ import os import random import unittest +from test_framework import secp256k1 + # Point with no known discrete log. H_POINT = "50929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0" +# Order of the secp256k1 curve +ORDER = secp256k1.GE.ORDER def TaggedHash(tag, data): ss = hashlib.sha256(tag.encode('utf-8')).digest() @@ -23,233 +27,18 @@ def TaggedHash(tag, data): ss += data return hashlib.sha256(ss).digest() -def jacobi_symbol(n, k): - """Compute the Jacobi symbol of n modulo k - - See https://en.wikipedia.org/wiki/Jacobi_symbol - - For our application k is always prime, so this is the same as the Legendre symbol.""" - assert k > 0 and k & 1, "jacobi symbol is only defined for positive odd k" - n %= k - t = 0 - while n != 0: - while n & 1 == 0: - n >>= 1 - r = k & 7 - t ^= (r == 3 or r == 5) - n, k = k, n - t ^= (n & k & 3 == 3) - n = n % k - if k == 1: - return -1 if t else 1 - return 0 - -def modsqrt(a, p): - """Compute the square root of a modulo p when p % 4 = 3. - - The Tonelli-Shanks algorithm can be used. See https://en.wikipedia.org/wiki/Tonelli-Shanks_algorithm - - Limiting this function to only work for p % 4 = 3 means we don't need to - iterate through the loop. The highest n such that p - 1 = 2^n Q with Q odd - is n = 1. Therefore Q = (p-1)/2 and sqrt = a^((Q+1)/2) = a^((p+1)/4) - - secp256k1's is defined over field of size 2**256 - 2**32 - 977, which is 3 mod 4. - """ - if p % 4 != 3: - raise NotImplementedError("modsqrt only implemented for p % 4 = 3") - sqrt = pow(a, (p + 1)//4, p) - if pow(sqrt, 2, p) == a % p: - return sqrt - return None - -class EllipticCurve: - def __init__(self, p, a, b): - """Initialize elliptic curve y^2 = x^3 + a*x + b over GF(p).""" - self.p = p - self.a = a % p - self.b = b % p - - def affine(self, p1): - """Convert a Jacobian point tuple p1 to affine form, or None if at infinity. - - An affine point is represented as the Jacobian (x, y, 1)""" - x1, y1, z1 = p1 - if z1 == 0: - return None - inv = pow(z1, -1, self.p) - inv_2 = (inv**2) % self.p - inv_3 = (inv_2 * inv) % self.p - return ((inv_2 * x1) % self.p, (inv_3 * y1) % self.p, 1) - - def has_even_y(self, p1): - """Whether the point p1 has an even Y coordinate when expressed in affine coordinates.""" - return not (p1[2] == 0 or self.affine(p1)[1] & 1) - - def negate(self, p1): - """Negate a Jacobian point tuple p1.""" - x1, y1, z1 = p1 - return (x1, (self.p - y1) % self.p, z1) - - def on_curve(self, p1): - """Determine whether a Jacobian tuple p is on the curve (and not infinity)""" - x1, y1, z1 = p1 - z2 = pow(z1, 2, self.p) - z4 = pow(z2, 2, self.p) - return z1 != 0 and (pow(x1, 3, self.p) + self.a * x1 * z4 + self.b * z2 * z4 - pow(y1, 2, self.p)) % self.p == 0 - - def is_x_coord(self, x): - """Test whether x is a valid X coordinate on the curve.""" - x_3 = pow(x, 3, self.p) - return jacobi_symbol(x_3 + self.a * x + self.b, self.p) != -1 - - def lift_x(self, x): - """Given an X coordinate on the curve, return a corresponding affine point for which the Y coordinate is even.""" - x_3 = pow(x, 3, self.p) - v = x_3 + self.a * x + self.b - y = modsqrt(v, self.p) - if y is None: - return None - return (x, self.p - y if y & 1 else y, 1) - - def double(self, p1): - """Double a Jacobian tuple p1 - - See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Doubling""" - x1, y1, z1 = p1 - if z1 == 0: - return (0, 1, 0) - y1_2 = (y1**2) % self.p - y1_4 = (y1_2**2) % self.p - x1_2 = (x1**2) % self.p - s = (4*x1*y1_2) % self.p - m = 3*x1_2 - if self.a: - m += self.a * pow(z1, 4, self.p) - m = m % self.p - x2 = (m**2 - 2*s) % self.p - y2 = (m*(s - x2) - 8*y1_4) % self.p - z2 = (2*y1*z1) % self.p - return (x2, y2, z2) - - def add_mixed(self, p1, p2): - """Add a Jacobian tuple p1 and an affine tuple p2 - - See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Addition (with affine point)""" - x1, y1, z1 = p1 - x2, y2, z2 = p2 - assert z2 == 1 - # Adding to the point at infinity is a no-op - if z1 == 0: - return p2 - z1_2 = (z1**2) % self.p - z1_3 = (z1_2 * z1) % self.p - u2 = (x2 * z1_2) % self.p - s2 = (y2 * z1_3) % self.p - if x1 == u2: - if (y1 != s2): - # p1 and p2 are inverses. Return the point at infinity. - return (0, 1, 0) - # p1 == p2. The formulas below fail when the two points are equal. - return self.double(p1) - h = u2 - x1 - r = s2 - y1 - h_2 = (h**2) % self.p - h_3 = (h_2 * h) % self.p - u1_h_2 = (x1 * h_2) % self.p - x3 = (r**2 - h_3 - 2*u1_h_2) % self.p - y3 = (r*(u1_h_2 - x3) - y1*h_3) % self.p - z3 = (h*z1) % self.p - return (x3, y3, z3) - - def add(self, p1, p2): - """Add two Jacobian tuples p1 and p2 - - See https://en.wikibooks.org/wiki/Cryptography/Prime_Curve/Jacobian_Coordinates - Point Addition""" - x1, y1, z1 = p1 - x2, y2, z2 = p2 - # Adding the point at infinity is a no-op - if z1 == 0: - return p2 - if z2 == 0: - return p1 - # Adding an Affine to a Jacobian is more efficient since we save field multiplications and squarings when z = 1 - if z1 == 1: - return self.add_mixed(p2, p1) - if z2 == 1: - return self.add_mixed(p1, p2) - z1_2 = (z1**2) % self.p - z1_3 = (z1_2 * z1) % self.p - z2_2 = (z2**2) % self.p - z2_3 = (z2_2 * z2) % self.p - u1 = (x1 * z2_2) % self.p - u2 = (x2 * z1_2) % self.p - s1 = (y1 * z2_3) % self.p - s2 = (y2 * z1_3) % self.p - if u1 == u2: - if (s1 != s2): - # p1 and p2 are inverses. Return the point at infinity. - return (0, 1, 0) - # p1 == p2. The formulas below fail when the two points are equal. - return self.double(p1) - h = u2 - u1 - r = s2 - s1 - h_2 = (h**2) % self.p - h_3 = (h_2 * h) % self.p - u1_h_2 = (u1 * h_2) % self.p - x3 = (r**2 - h_3 - 2*u1_h_2) % self.p - y3 = (r*(u1_h_2 - x3) - s1*h_3) % self.p - z3 = (h*z1*z2) % self.p - return (x3, y3, z3) - - def mul(self, ps): - """Compute a (multi) point multiplication - - ps is a list of (Jacobian tuple, scalar) pairs. - """ - r = (0, 1, 0) - for i in range(255, -1, -1): - r = self.double(r) - for (p, n) in ps: - if ((n >> i) & 1): - r = self.add(r, p) - return r - -SECP256K1_FIELD_SIZE = 2**256 - 2**32 - 977 -SECP256K1 = EllipticCurve(SECP256K1_FIELD_SIZE, 0, 7) -SECP256K1_G = (0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798, 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8, 1) -SECP256K1_ORDER = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 -SECP256K1_ORDER_HALF = SECP256K1_ORDER // 2 - -class ECPubKey(): + +class ECPubKey: """A secp256k1 public key""" def __init__(self): """Construct an uninitialized public key""" - self.valid = False + self.p = None def set(self, data): """Construct a public key from a serialization in compressed or uncompressed format""" - if (len(data) == 65 and data[0] == 0x04): - p = (int.from_bytes(data[1:33], 'big'), int.from_bytes(data[33:65], 'big'), 1) - self.valid = SECP256K1.on_curve(p) - if self.valid: - self.p = p - self.compressed = False - elif (len(data) == 33 and (data[0] == 0x02 or data[0] == 0x03)): - x = int.from_bytes(data[1:33], 'big') - if SECP256K1.is_x_coord(x): - p = SECP256K1.lift_x(x) - # Make the Y coordinate odd if required (lift_x always produces - # a point with an even Y coordinate). - if data[0] & 1: - p = SECP256K1.negate(p) - self.p = p - self.valid = True - self.compressed = True - else: - self.valid = False - else: - self.valid = False + self.p = secp256k1.GE.from_bytes(data) + self.compressed = len(data) == 33 @property def is_compressed(self): @@ -257,24 +46,21 @@ class ECPubKey(): @property def is_valid(self): - return self.valid + return self.p is not None def get_bytes(self): - assert self.valid - p = SECP256K1.affine(self.p) - if p is None: - return None + assert self.is_valid if self.compressed: - return bytes([0x02 + (p[1] & 1)]) + p[0].to_bytes(32, 'big') + return self.p.to_bytes_compressed() else: - return bytes([0x04]) + p[0].to_bytes(32, 'big') + p[1].to_bytes(32, 'big') + return self.p.to_bytes_uncompressed() def verify_ecdsa(self, sig, msg, low_s=True): """Verify a strictly DER-encoded ECDSA signature against this pubkey. See https://en.wikipedia.org/wiki/Elliptic_Curve_Digital_Signature_Algorithm for the ECDSA verifier algorithm""" - assert self.valid + assert self.is_valid # Extract r and s from the DER formatted signature. Return false for # any DER encoding errors. @@ -310,24 +96,22 @@ class ECPubKey(): s = int.from_bytes(sig[6+rlen:6+rlen+slen], 'big') # Verify that r and s are within the group order - if r < 1 or s < 1 or r >= SECP256K1_ORDER or s >= SECP256K1_ORDER: + if r < 1 or s < 1 or r >= ORDER or s >= ORDER: return False - if low_s and s >= SECP256K1_ORDER_HALF: + if low_s and s >= secp256k1.GE.ORDER_HALF: return False z = int.from_bytes(msg, 'big') # Run verifier algorithm on r, s - w = pow(s, -1, SECP256K1_ORDER) - u1 = z*w % SECP256K1_ORDER - u2 = r*w % SECP256K1_ORDER - R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, u1), (self.p, u2)])) - if R is None or (R[0] % SECP256K1_ORDER) != r: + w = pow(s, -1, ORDER) + R = secp256k1.GE.mul((z * w, secp256k1.G), (r * w, self.p)) + if R.infinity or (int(R.x) % ORDER) != r: return False return True def generate_privkey(): """Generate a valid random 32-byte private key.""" - return random.randrange(1, SECP256K1_ORDER).to_bytes(32, 'big') + return random.randrange(1, ORDER).to_bytes(32, 'big') def rfc6979_nonce(key): """Compute signing nonce using RFC6979.""" @@ -339,7 +123,7 @@ def rfc6979_nonce(key): v = hmac.new(k, v, 'sha256').digest() return hmac.new(k, v, 'sha256').digest() -class ECKey(): +class ECKey: """A secp256k1 private key""" def __init__(self): @@ -349,7 +133,7 @@ class ECKey(): """Construct a private key object with given 32-byte secret and compressed flag.""" assert len(secret) == 32 secret = int.from_bytes(secret, 'big') - self.valid = (secret > 0 and secret < SECP256K1_ORDER) + self.valid = (secret > 0 and secret < ORDER) if self.valid: self.secret = secret self.compressed = compressed @@ -375,9 +159,7 @@ class ECKey(): """Compute an ECPubKey object for this secret key.""" assert self.valid ret = ECPubKey() - p = SECP256K1.mul([(SECP256K1_G, self.secret)]) - ret.p = p - ret.valid = True + ret.p = self.secret * secp256k1.G ret.compressed = self.compressed return ret @@ -392,12 +174,12 @@ class ECKey(): if rfc6979: k = int.from_bytes(rfc6979_nonce(self.secret.to_bytes(32, 'big') + msg), 'big') else: - k = random.randrange(1, SECP256K1_ORDER) - R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, k)])) - r = R[0] % SECP256K1_ORDER - s = (pow(k, -1, SECP256K1_ORDER) * (z + self.secret * r)) % SECP256K1_ORDER - if low_s and s > SECP256K1_ORDER_HALF: - s = SECP256K1_ORDER - s + k = random.randrange(1, ORDER) + R = k * secp256k1.G + r = int(R.x) % ORDER + s = (pow(k, -1, ORDER) * (z + self.secret * r)) % ORDER + if low_s and s > secp256k1.GE.ORDER_HALF: + s = ORDER - s # Represent in DER format. The byte representations of r and s have # length rounded up (255 bits becomes 32 bytes and 256 bits becomes 33 # bytes). @@ -413,10 +195,10 @@ def compute_xonly_pubkey(key): assert len(key) == 32 x = int.from_bytes(key, 'big') - if x == 0 or x >= SECP256K1_ORDER: + if x == 0 or x >= ORDER: return (None, None) - P = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, x)])) - return (P[0].to_bytes(32, 'big'), not SECP256K1.has_even_y(P)) + P = x * secp256k1.G + return (P.to_bytes_xonly(), not P.y.is_even()) def tweak_add_privkey(key, tweak): """Tweak a private key (after negating it if needed).""" @@ -425,14 +207,14 @@ def tweak_add_privkey(key, tweak): assert len(tweak) == 32 x = int.from_bytes(key, 'big') - if x == 0 or x >= SECP256K1_ORDER: + if x == 0 or x >= ORDER: return None - if not SECP256K1.has_even_y(SECP256K1.mul([(SECP256K1_G, x)])): - x = SECP256K1_ORDER - x + if not (x * secp256k1.G).y.is_even(): + x = ORDER - x t = int.from_bytes(tweak, 'big') - if t >= SECP256K1_ORDER: + if t >= ORDER: return None - x = (x + t) % SECP256K1_ORDER + x = (x + t) % ORDER if x == 0: return None return x.to_bytes(32, 'big') @@ -443,19 +225,16 @@ def tweak_add_pubkey(key, tweak): assert len(key) == 32 assert len(tweak) == 32 - x_coord = int.from_bytes(key, 'big') - if x_coord >= SECP256K1_FIELD_SIZE: - return None - P = SECP256K1.lift_x(x_coord) + P = secp256k1.GE.from_bytes_xonly(key) if P is None: return None t = int.from_bytes(tweak, 'big') - if t >= SECP256K1_ORDER: + if t >= ORDER: return None - Q = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, t), (P, 1)])) - if Q is None: + Q = t * secp256k1.G + P + if Q.infinity: return None - return (Q[0].to_bytes(32, 'big'), not SECP256K1.has_even_y(Q)) + return (Q.to_bytes_xonly(), not Q.y.is_even()) def verify_schnorr(key, sig, msg): """Verify a Schnorr signature (see BIP 340). @@ -468,23 +247,20 @@ def verify_schnorr(key, sig, msg): assert len(msg) == 32 assert len(sig) == 64 - x_coord = int.from_bytes(key, 'big') - if x_coord == 0 or x_coord >= SECP256K1_FIELD_SIZE: - return False - P = SECP256K1.lift_x(x_coord) + P = secp256k1.GE.from_bytes_xonly(key) if P is None: return False r = int.from_bytes(sig[0:32], 'big') - if r >= SECP256K1_FIELD_SIZE: + if r >= secp256k1.FE.SIZE: return False s = int.from_bytes(sig[32:64], 'big') - if s >= SECP256K1_ORDER: + if s >= ORDER: return False - e = int.from_bytes(TaggedHash("BIP0340/challenge", sig[0:32] + key + msg), 'big') % SECP256K1_ORDER - R = SECP256K1.mul([(SECP256K1_G, s), (P, SECP256K1_ORDER - e)]) - if not SECP256K1.has_even_y(R): + e = int.from_bytes(TaggedHash("BIP0340/challenge", sig[0:32] + key + msg), 'big') % ORDER + R = secp256k1.GE.mul((s, secp256k1.G), (-e, P)) + if R.infinity or not R.y.is_even(): return False - if ((r * R[2] * R[2]) % SECP256K1_FIELD_SIZE) != R[0]: + if r != R.x: return False return True @@ -499,23 +275,24 @@ def sign_schnorr(key, msg, aux=None, flip_p=False, flip_r=False): assert len(aux) == 32 sec = int.from_bytes(key, 'big') - if sec == 0 or sec >= SECP256K1_ORDER: + if sec == 0 or sec >= ORDER: return None - P = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, sec)])) - if SECP256K1.has_even_y(P) == flip_p: - sec = SECP256K1_ORDER - sec + P = sec * secp256k1.G + if P.y.is_even() == flip_p: + sec = ORDER - sec t = (sec ^ int.from_bytes(TaggedHash("BIP0340/aux", aux), 'big')).to_bytes(32, 'big') - kp = int.from_bytes(TaggedHash("BIP0340/nonce", t + P[0].to_bytes(32, 'big') + msg), 'big') % SECP256K1_ORDER + kp = int.from_bytes(TaggedHash("BIP0340/nonce", t + P.to_bytes_xonly() + msg), 'big') % ORDER assert kp != 0 - R = SECP256K1.affine(SECP256K1.mul([(SECP256K1_G, kp)])) - k = kp if SECP256K1.has_even_y(R) != flip_r else SECP256K1_ORDER - kp - e = int.from_bytes(TaggedHash("BIP0340/challenge", R[0].to_bytes(32, 'big') + P[0].to_bytes(32, 'big') + msg), 'big') % SECP256K1_ORDER - return R[0].to_bytes(32, 'big') + ((k + e * sec) % SECP256K1_ORDER).to_bytes(32, 'big') + R = kp * secp256k1.G + k = kp if R.y.is_even() != flip_r else ORDER - kp + e = int.from_bytes(TaggedHash("BIP0340/challenge", R.to_bytes_xonly() + P.to_bytes_xonly() + msg), 'big') % ORDER + return R.to_bytes_xonly() + ((k + e * sec) % ORDER).to_bytes(32, 'big') + class TestFrameworkKey(unittest.TestCase): def test_schnorr(self): """Test the Python Schnorr implementation.""" - byte_arrays = [generate_privkey() for _ in range(3)] + [v.to_bytes(32, 'big') for v in [0, SECP256K1_ORDER - 1, SECP256K1_ORDER, 2**256 - 1]] + byte_arrays = [generate_privkey() for _ in range(3)] + [v.to_bytes(32, 'big') for v in [0, ORDER - 1, ORDER, 2**256 - 1]] keys = {} for privkey in byte_arrays: # build array of key/pubkey pairs pubkey, _ = compute_xonly_pubkey(privkey) diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py index a6764365c5..4d635556f4 100755 --- a/test/functional/test_framework/messages.py +++ b/test/functional/test_framework/messages.py @@ -27,6 +27,7 @@ import random import socket import struct import time +import unittest from test_framework.siphash import siphash256 from test_framework.util import assert_equal @@ -77,6 +78,10 @@ def sha256(s): return hashlib.sha256(s).digest() +def sha3(s): + return hashlib.sha3_256(s).digest() + + def hash256(s): return sha256(sha256(s)) @@ -229,16 +234,25 @@ class CAddress: # see https://github.com/bitcoin/bips/blob/master/bip-0155.mediawiki NET_IPV4 = 1 + NET_IPV6 = 2 + NET_TORV3 = 4 NET_I2P = 5 + NET_CJDNS = 6 ADDRV2_NET_NAME = { NET_IPV4: "IPv4", - NET_I2P: "I2P" + NET_IPV6: "IPv6", + NET_TORV3: "TorV3", + NET_I2P: "I2P", + NET_CJDNS: "CJDNS" } ADDRV2_ADDRESS_LENGTH = { NET_IPV4: 4, - NET_I2P: 32 + NET_IPV6: 16, + NET_TORV3: 32, + NET_I2P: 32, + NET_CJDNS: 16 } I2P_PAD = "====" @@ -285,7 +299,7 @@ class CAddress: self.nServices = deser_compact_size(f) self.net = struct.unpack("B", f.read(1))[0] - assert self.net in (self.NET_IPV4, self.NET_I2P) + assert self.net in self.ADDRV2_NET_NAME address_length = deser_compact_size(f) assert address_length == self.ADDRV2_ADDRESS_LENGTH[self.net] @@ -293,14 +307,25 @@ class CAddress: addr_bytes = f.read(address_length) if self.net == self.NET_IPV4: self.ip = socket.inet_ntoa(addr_bytes) - else: + elif self.net == self.NET_IPV6: + self.ip = socket.inet_ntop(socket.AF_INET6, addr_bytes) + elif self.net == self.NET_TORV3: + prefix = b".onion checksum" + version = bytes([3]) + checksum = sha3(prefix + addr_bytes + version)[:2] + self.ip = b32encode(addr_bytes + checksum + version).decode("ascii").lower() + ".onion" + elif self.net == self.NET_I2P: self.ip = b32encode(addr_bytes)[0:-len(self.I2P_PAD)].decode("ascii").lower() + ".b32.i2p" + elif self.net == self.NET_CJDNS: + self.ip = socket.inet_ntop(socket.AF_INET6, addr_bytes) + else: + raise Exception(f"Address type not supported") self.port = struct.unpack(">H", f.read(2))[0] def serialize_v2(self): """Serialize in addrv2 format (BIP155)""" - assert self.net in (self.NET_IPV4, self.NET_I2P) + assert self.net in self.ADDRV2_NET_NAME r = b"" r += struct.pack("<I", self.time) r += ser_compact_size(self.nServices) @@ -308,10 +333,20 @@ class CAddress: r += ser_compact_size(self.ADDRV2_ADDRESS_LENGTH[self.net]) if self.net == self.NET_IPV4: r += socket.inet_aton(self.ip) - else: + elif self.net == self.NET_IPV6: + r += socket.inet_pton(socket.AF_INET6, self.ip) + elif self.net == self.NET_TORV3: + sfx = ".onion" + assert self.ip.endswith(sfx) + r += b32decode(self.ip[0:-len(sfx)], True)[0:32] + elif self.net == self.NET_I2P: sfx = ".b32.i2p" assert self.ip.endswith(sfx) r += b32decode(self.ip[0:-len(sfx)] + self.I2P_PAD, True) + elif self.net == self.NET_CJDNS: + r += socket.inet_pton(socket.AF_INET6, self.ip) + else: + raise Exception(f"Address type not supported") r += struct.pack(">H", self.port) return r @@ -1852,3 +1887,19 @@ class msg_sendtxrcncl: def __repr__(self): return "msg_sendtxrcncl(version=%lu, salt=%lu)" %\ (self.version, self.salt) + +class TestFrameworkScript(unittest.TestCase): + def test_addrv2_encode_decode(self): + def check_addrv2(ip, net): + addr = CAddress() + addr.net, addr.ip = net, ip + ser = addr.serialize_v2() + actual = CAddress() + actual.deserialize_v2(BytesIO(ser)) + self.assertEqual(actual, addr) + + check_addrv2("1.65.195.98", CAddress.NET_IPV4) + check_addrv2("2001:41f0::62:6974:636f:696e", CAddress.NET_IPV6) + check_addrv2("2bqghnldu6mcug4pikzprwhtjjnsyederctvci6klcwzepnjd46ikjyd.onion", CAddress.NET_TORV3) + check_addrv2("255fhcp6ajvftnyo7bwz3an3t4a4brhopm3bamyh2iu5r3gnr2rq.b32.i2p", CAddress.NET_I2P) + check_addrv2("fc32:17ea:e415:c3bf:9808:149d:b5a2:c9aa", CAddress.NET_CJDNS) diff --git a/test/functional/test_framework/p2p.py b/test/functional/test_framework/p2p.py index 2433e52671..ceb4bbd7de 100755 --- a/test/functional/test_framework/p2p.py +++ b/test/functional/test_framework/p2p.py @@ -95,6 +95,12 @@ P2P_SUBVERSION = "/python-p2p-tester:0.0.3/" P2P_VERSION_RELAY = 1 # Delay after receiving a tx inv before requesting transactions from non-preferred peers, in seconds NONPREF_PEER_TX_DELAY = 2 +# Delay for requesting transactions via txids if we have wtxid-relaying peers, in seconds +TXID_RELAY_DELAY = 2 +# Delay for requesting transactions if the peer has MAX_PEER_TX_REQUEST_IN_FLIGHT or more requests +OVERLOADED_PEER_TX_DELAY = 2 +# How long to wait before downloading a transaction from an additional peer +GETDATA_TX_INTERVAL = 60 MESSAGEMAP = { b"addr": msg_addr, @@ -552,16 +558,12 @@ class P2PInterface(P2PConnection): self.send_message(message) self.sync_with_ping(timeout=timeout) - def sync_send_with_ping(self, timeout=60): - """Ensure SendMessages is called on this connection""" - # Calling sync_with_ping twice requires that the node calls + def sync_with_ping(self, timeout=60): + """Ensure ProcessMessages and SendMessages is called on this connection""" + # Sending two pings back-to-back, requires that the node calls # `ProcessMessage` twice, and thus ensures `SendMessages` must have # been called at least once - self.sync_with_ping() - self.sync_with_ping() - - def sync_with_ping(self, timeout=60): - """Ensure ProcessMessages is called on this connection""" + self.send_message(msg_ping(nonce=0)) self.send_message(msg_ping(nonce=self.ping_counter)) def test_function(): diff --git a/test/functional/test_framework/script.py b/test/functional/test_framework/script.py index 443cae86a1..78f58cf11f 100644 --- a/test/functional/test_framework/script.py +++ b/test/functional/test_framework/script.py @@ -689,6 +689,16 @@ def LegacySignatureHash(*args, **kwargs): else: return (hash256(msg), err) +def sign_input_legacy(tx, input_index, input_scriptpubkey, privkey, sighash_type=SIGHASH_ALL): + """Add legacy ECDSA signature for a given transaction input. Note that the signature + is prepended to the scriptSig field, i.e. additional data pushes necessary for more + complex spends than P2PK (e.g. pubkey for P2PKH) can be already set before.""" + (sighash, err) = LegacySignatureHash(input_scriptpubkey, tx, input_index, sighash_type) + assert err is None + der_sig = privkey.sign_ecdsa(sighash) + tx.vin[input_index].scriptSig = bytes(CScript([der_sig + bytes([sighash_type])])) + tx.vin[input_index].scriptSig + tx.rehash() + # TODO: Allow cached hashPrevouts/hashSequence/hashOutputs to be provided. # Performance optimization probably not necessary for python tests, however. # Note that this corresponds to sigversion == 1 in EvalScript, which is used diff --git a/test/functional/test_framework/secp256k1.py b/test/functional/test_framework/secp256k1.py new file mode 100644 index 0000000000..2e9e419da5 --- /dev/null +++ b/test/functional/test_framework/secp256k1.py @@ -0,0 +1,346 @@ +# Copyright (c) 2022-2023 The Bitcoin Core developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. + +"""Test-only implementation of low-level secp256k1 field and group arithmetic + +It is designed for ease of understanding, not performance. + +WARNING: This code is slow and trivially vulnerable to side channel attacks. Do not use for +anything but tests. + +Exports: +* FE: class for secp256k1 field elements +* GE: class for secp256k1 group elements +* G: the secp256k1 generator point +""" + + +class FE: + """Objects of this class represent elements of the field GF(2**256 - 2**32 - 977). + + They are represented internally in numerator / denominator form, in order to delay inversions. + """ + + # The size of the field (also its modulus and characteristic). + SIZE = 2**256 - 2**32 - 977 + + def __init__(self, a=0, b=1): + """Initialize a field element a/b; both a and b can be ints or field elements.""" + if isinstance(a, FE): + num = a._num + den = a._den + else: + num = a % FE.SIZE + den = 1 + if isinstance(b, FE): + den = (den * b._num) % FE.SIZE + num = (num * b._den) % FE.SIZE + else: + den = (den * b) % FE.SIZE + assert den != 0 + if num == 0: + den = 1 + self._num = num + self._den = den + + def __add__(self, a): + """Compute the sum of two field elements (second may be int).""" + if isinstance(a, FE): + return FE(self._num * a._den + self._den * a._num, self._den * a._den) + return FE(self._num + self._den * a, self._den) + + def __radd__(self, a): + """Compute the sum of an integer and a field element.""" + return FE(a) + self + + def __sub__(self, a): + """Compute the difference of two field elements (second may be int).""" + if isinstance(a, FE): + return FE(self._num * a._den - self._den * a._num, self._den * a._den) + return FE(self._num - self._den * a, self._den) + + def __rsub__(self, a): + """Compute the difference of an integer and a field element.""" + return FE(a) - self + + def __mul__(self, a): + """Compute the product of two field elements (second may be int).""" + if isinstance(a, FE): + return FE(self._num * a._num, self._den * a._den) + return FE(self._num * a, self._den) + + def __rmul__(self, a): + """Compute the product of an integer with a field element.""" + return FE(a) * self + + def __truediv__(self, a): + """Compute the ratio of two field elements (second may be int).""" + return FE(self, a) + + def __pow__(self, a): + """Raise a field element to an integer power.""" + return FE(pow(self._num, a, FE.SIZE), pow(self._den, a, FE.SIZE)) + + def __neg__(self): + """Negate a field element.""" + return FE(-self._num, self._den) + + def __int__(self): + """Convert a field element to an integer in range 0..p-1. The result is cached.""" + if self._den != 1: + self._num = (self._num * pow(self._den, -1, FE.SIZE)) % FE.SIZE + self._den = 1 + return self._num + + def sqrt(self): + """Compute the square root of a field element if it exists (None otherwise). + + Due to the fact that our modulus is of the form (p % 4) == 3, the Tonelli-Shanks + algorithm (https://en.wikipedia.org/wiki/Tonelli-Shanks_algorithm) is simply + raising the argument to the power (p + 1) / 4. + + To see why: (p-1) % 2 = 0, so 2 divides the order of the multiplicative group, + and thus only half of the non-zero field elements are squares. An element a is + a (nonzero) square when Euler's criterion, a^((p-1)/2) = 1 (mod p), holds. We're + looking for x such that x^2 = a (mod p). Given a^((p-1)/2) = 1, that is equivalent + to x^2 = a^(1 + (p-1)/2) mod p. As (1 + (p-1)/2) is even, this is equivalent to + x = a^((1 + (p-1)/2)/2) mod p, or x = a^((p+1)/4) mod p.""" + v = int(self) + s = pow(v, (FE.SIZE + 1) // 4, FE.SIZE) + if s**2 % FE.SIZE == v: + return FE(s) + return None + + def is_square(self): + """Determine if this field element has a square root.""" + # A more efficient algorithm is possible here (Jacobi symbol). + return self.sqrt() is not None + + def is_even(self): + """Determine whether this field element, represented as integer in 0..p-1, is even.""" + return int(self) & 1 == 0 + + def __eq__(self, a): + """Check whether two field elements are equal (second may be an int).""" + if isinstance(a, FE): + return (self._num * a._den - self._den * a._num) % FE.SIZE == 0 + return (self._num - self._den * a) % FE.SIZE == 0 + + def to_bytes(self): + """Convert a field element to a 32-byte array (BE byte order).""" + return int(self).to_bytes(32, 'big') + + @staticmethod + def from_bytes(b): + """Convert a 32-byte array to a field element (BE byte order, no overflow allowed).""" + v = int.from_bytes(b, 'big') + if v >= FE.SIZE: + return None + return FE(v) + + def __str__(self): + """Convert this field element to a 64 character hex string.""" + return f"{int(self):064x}" + + def __repr__(self): + """Get a string representation of this field element.""" + return f"FE(0x{int(self):x})" + + +class GE: + """Objects of this class represent secp256k1 group elements (curve points or infinity) + + Normal points on the curve have fields: + * x: the x coordinate (a field element) + * y: the y coordinate (a field element, satisfying y^2 = x^3 + 7) + * infinity: False + + The point at infinity has field: + * infinity: True + """ + + # Order of the group (number of points on the curve, plus 1 for infinity) + ORDER = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141 + + # Number of valid distinct x coordinates on the curve. + ORDER_HALF = ORDER // 2 + + def __init__(self, x=None, y=None): + """Initialize a group element with specified x and y coordinates, or infinity.""" + if x is None: + # Initialize as infinity. + assert y is None + self.infinity = True + else: + # Initialize as point on the curve (and check that it is). + fx = FE(x) + fy = FE(y) + assert fy**2 == fx**3 + 7 + self.infinity = False + self.x = fx + self.y = fy + + def __add__(self, a): + """Add two group elements together.""" + # Deal with infinity: a + infinity == infinity + a == a. + if self.infinity: + return a + if a.infinity: + return self + if self.x == a.x: + if self.y != a.y: + # A point added to its own negation is infinity. + assert self.y + a.y == 0 + return GE() + else: + # For identical inputs, use the tangent (doubling formula). + lam = (3 * self.x**2) / (2 * self.y) + else: + # For distinct inputs, use the line through both points (adding formula). + lam = (self.y - a.y) / (self.x - a.x) + # Determine point opposite to the intersection of that line with the curve. + x = lam**2 - (self.x + a.x) + y = lam * (self.x - x) - self.y + return GE(x, y) + + @staticmethod + def mul(*aps): + """Compute a (batch) scalar group element multiplication. + + GE.mul((a1, p1), (a2, p2), (a3, p3)) is identical to a1*p1 + a2*p2 + a3*p3, + but more efficient.""" + # Reduce all the scalars modulo order first (so we can deal with negatives etc). + naps = [(a % GE.ORDER, p) for a, p in aps] + # Start with point at infinity. + r = GE() + # Iterate over all bit positions, from high to low. + for i in range(255, -1, -1): + # Double what we have so far. + r = r + r + # Add then add the points for which the corresponding scalar bit is set. + for (a, p) in naps: + if (a >> i) & 1: + r += p + return r + + def __rmul__(self, a): + """Multiply an integer with a group element.""" + if self == G: + return FAST_G.mul(a) + return GE.mul((a, self)) + + def __neg__(self): + """Compute the negation of a group element.""" + if self.infinity: + return self + return GE(self.x, -self.y) + + def to_bytes_compressed(self): + """Convert a non-infinite group element to 33-byte compressed encoding.""" + assert not self.infinity + return bytes([3 - self.y.is_even()]) + self.x.to_bytes() + + def to_bytes_uncompressed(self): + """Convert a non-infinite group element to 65-byte uncompressed encoding.""" + assert not self.infinity + return b'\x04' + self.x.to_bytes() + self.y.to_bytes() + + def to_bytes_xonly(self): + """Convert (the x coordinate of) a non-infinite group element to 32-byte xonly encoding.""" + assert not self.infinity + return self.x.to_bytes() + + @staticmethod + def lift_x(x): + """Return group element with specified field element as x coordinate (and even y).""" + y = (FE(x)**3 + 7).sqrt() + if y is None: + return None + if not y.is_even(): + y = -y + return GE(x, y) + + @staticmethod + def from_bytes(b): + """Convert a compressed or uncompressed encoding to a group element.""" + assert len(b) in (33, 65) + if len(b) == 33: + if b[0] != 2 and b[0] != 3: + return None + x = FE.from_bytes(b[1:]) + if x is None: + return None + r = GE.lift_x(x) + if r is None: + return None + if b[0] == 3: + r = -r + return r + else: + if b[0] != 4: + return None + x = FE.from_bytes(b[1:33]) + y = FE.from_bytes(b[33:]) + if y**2 != x**3 + 7: + return None + return GE(x, y) + + @staticmethod + def from_bytes_xonly(b): + """Convert a point given in xonly encoding to a group element.""" + assert len(b) == 32 + x = FE.from_bytes(b) + if x is None: + return None + return GE.lift_x(x) + + @staticmethod + def is_valid_x(x): + """Determine whether the provided field element is a valid X coordinate.""" + return (FE(x)**3 + 7).is_square() + + def __str__(self): + """Convert this group element to a string.""" + if self.infinity: + return "(inf)" + return f"({self.x},{self.y})" + + def __repr__(self): + """Get a string representation for this group element.""" + if self.infinity: + return "GE()" + return f"GE(0x{int(self.x):x},0x{int(self.y):x})" + +# The secp256k1 generator point +G = GE.lift_x(0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798) + + +class FastGEMul: + """Table for fast multiplication with a constant group element. + + Speed up scalar multiplication with a fixed point P by using a precomputed lookup table with + its powers of 2: + + table = [P, 2*P, 4*P, (2^3)*P, (2^4)*P, ..., (2^255)*P] + + During multiplication, the points corresponding to each bit set in the scalar are added up, + i.e. on average ~128 point additions take place. + """ + + def __init__(self, p): + self.table = [p] # table[i] = (2^i) * p + for _ in range(255): + p = p + p + self.table.append(p) + + def mul(self, a): + result = GE() + a = a % GE.ORDER + for bit in range(a.bit_length()): + if a & (1 << bit): + result += self.table[bit] + return result + +# Precomputed table with multiples of G for fast multiplication +FAST_G = FastGEMul(G) diff --git a/test/functional/test_framework/siphash.py b/test/functional/test_framework/siphash.py index 884dbcab46..bd13b2c948 100644 --- a/test/functional/test_framework/siphash.py +++ b/test/functional/test_framework/siphash.py @@ -31,7 +31,7 @@ def siphash_round(v0, v1, v2, v3): def siphash(k0, k1, data): - assert type(data) == bytes + assert type(data) is bytes v0 = 0x736f6d6570736575 ^ k0 v1 = 0x646f72616e646f6d ^ k1 v2 = 0x6c7967656e657261 ^ k0 @@ -61,5 +61,5 @@ def siphash(k0, k1, data): def siphash256(k0, k1, num): - assert type(num) == int + assert type(num) is int return siphash(k0, k1, num.to_bytes(32, 'little')) diff --git a/test/functional/test_framework/socks5.py b/test/functional/test_framework/socks5.py index 799b1c74b8..0ca06a7396 100644 --- a/test/functional/test_framework/socks5.py +++ b/test/functional/test_framework/socks5.py @@ -40,6 +40,7 @@ class Socks5Configuration(): self.af = socket.AF_INET # Bind address family self.unauth = False # Support unauthenticated self.auth = False # Support authentication + self.keep_alive = False # Do not automatically close connections class Socks5Command(): """Information about an incoming socks5 command.""" @@ -115,13 +116,14 @@ class Socks5Connection(): cmdin = Socks5Command(cmd, atyp, addr, port, username, password) self.serv.queue.put(cmdin) - logger.info('Proxy: %s', cmdin) + logger.debug('Proxy: %s', cmdin) # Fall through to disconnect except Exception as e: logger.exception("socks5 request handling failed.") self.serv.queue.put(e) finally: - self.conn.close() + if not self.serv.keep_alive: + self.conn.close() class Socks5Server(): def __init__(self, conf): @@ -133,6 +135,7 @@ class Socks5Server(): self.running = False self.thread = None self.queue = queue.Queue() # report connections and exceptions to client + self.keep_alive = conf.keep_alive def run(self): while self.running: @@ -157,4 +160,3 @@ class Socks5Server(): s.connect(self.conf.addr) s.close() self.thread.join() - diff --git a/test/functional/test_framework/test_framework.py b/test/functional/test_framework/test_framework.py index d4dc90a517..73e7516ea7 100755 --- a/test/functional/test_framework/test_framework.py +++ b/test/functional/test_framework/test_framework.py @@ -92,7 +92,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): This class also contains various public and private helper methods.""" - def __init__(self): + def __init__(self) -> None: """Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method""" self.chain: str = 'regtest' self.setup_clean_chain: bool = False @@ -103,7 +103,6 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): self.supports_cli = True self.bind_to_localhost_only = True self.parse_args() - self.disable_syscall_sandbox = self.options.nosandbox or self.options.valgrind self.default_wallet_name = "default_wallet" if self.options.descriptors else "" self.wallet_data_filename = "wallet.dat" # Optional list of wallet names that can be set in set_test_params to @@ -160,8 +159,6 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): parser = argparse.ArgumentParser(usage="%(prog)s [options]") parser.add_argument("--nocleanup", dest="nocleanup", default=False, action="store_true", help="Leave bitcoinds and test.* datadir on exit or error") - parser.add_argument("--nosandbox", dest="nosandbox", default=False, action="store_true", - help="Don't use the syscall sandbox") parser.add_argument("--noshutdown", dest="noshutdown", default=False, action="store_true", help="Don't stop bitcoinds after the test execution") parser.add_argument("--cachedir", dest="cachedir", default=os.path.abspath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"), @@ -188,7 +185,7 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): parser.add_argument("--perf", dest="perf", default=False, action="store_true", help="profile running nodes with perf for the duration of the test") parser.add_argument("--valgrind", dest="valgrind", default=False, action="store_true", - help="run nodes under the valgrind memory error detector: expect at least a ~10x slowdown. valgrind 3.14 or later required. Forces --nosandbox.") + help="run nodes under the valgrind memory error detector: expect at least a ~10x slowdown. valgrind 3.14 or later required.") parser.add_argument("--randomseed", type=int, help="set a random seed for deterministically reproducing a previous test run") parser.add_argument("--timeout-factor", dest="timeout_factor", type=float, help="adjust test timeouts by a factor. Setting it to 0 disables all timeouts") @@ -497,11 +494,6 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): extra_args = [[]] * num_nodes if versions is None: versions = [None] * num_nodes - if self.is_syscall_sandbox_compiled() and not self.disable_syscall_sandbox: - for i in range(len(extra_args)): - # The -sandbox argument is not present in the v22.0 release. - if versions[i] is None or versions[i] >= 229900: - extra_args[i] = extra_args[i] + ["-sandbox=log-and-abort"] if binary is None: binary = [get_bin_from_version(v, 'bitcoind', self.options.bitcoind) for v in versions] if binary_cli is None: @@ -987,7 +979,3 @@ class BitcoinTestFramework(metaclass=BitcoinTestMetaClass): def is_bdb_compiled(self): """Checks whether the wallet module was compiled with BDB support.""" return self.config["components"].getboolean("USE_BDB") - - def is_syscall_sandbox_compiled(self): - """Checks whether the syscall sandbox was compiled.""" - return self.config["components"].getboolean("ENABLE_SYSCALL_SANDBOX") diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py index 9583d6f7d7..6e12f6c964 100755 --- a/test/functional/test_framework/test_node.py +++ b/test/functional/test_framework/test_node.py @@ -22,7 +22,10 @@ import shlex import sys from pathlib import Path -from .authproxy import JSONRPCException +from .authproxy import ( + JSONRPCException, + serialization_fallback, +) from .descriptors import descsum_create from .p2p import P2P_SUBVERSION from .util import ( @@ -35,7 +38,6 @@ from .util import ( rpc_url, wait_until_helper, p2p_port, - EncodeDecimal, ) BITCOIND_PROC_WAIT_TIMEOUT = 60 @@ -142,6 +144,8 @@ class TestNode(): self.p2ps = [] self.timeout_factor = timeout_factor + self.mocktime = None + AddressKeyPair = collections.namedtuple('AddressKeyPair', ['address', 'key']) PRIV_KEYS = [ # address , privkey @@ -248,7 +252,7 @@ class TestNode(): # Wait for the node to finish reindex, block import, and # loading the mempool. Usually importing happens fast or # even "immediate" when the node is started. However, there - # is no guarantee and sometimes ThreadImport might finish + # is no guarantee and sometimes ImportBlocks might finish # later. This is going to cause intermittent test failures, # because generally the tests assume the node is fully # ready after being started. @@ -322,6 +326,15 @@ class TestNode(): assert not invalid_call return self.__getattr__('generatetodescriptor')(*args, **kwargs) + def setmocktime(self, timestamp): + """Wrapper for setmocktime RPC, sets self.mocktime""" + if timestamp == 0: + # setmocktime(0) resets to system time. + self.mocktime = None + else: + self.mocktime = timestamp + return self.__getattr__('setmocktime')(timestamp) + def get_wallet_rpc(self, wallet_name): if self.use_cli: return RPCOverloadWrapper(self.cli("-rpcwallet={}".format(wallet_name)), True, self.descriptors) @@ -351,21 +364,13 @@ class TestNode(): for profile_name in tuple(self.perf_subprocesses.keys()): self._stop_perf(profile_name) - # Check that stderr is as expected - self.stderr.seek(0) - stderr = self.stderr.read().decode('utf-8').strip() - if stderr != expected_stderr: - raise AssertionError("Unexpected stderr {} != {}".format(stderr, expected_stderr)) - - self.stdout.close() - self.stderr.close() - del self.p2ps[:] + assert (not expected_stderr) or wait_until_stopped # Must wait to check stderr if wait_until_stopped: - self.wait_until_stopped() + self.wait_until_stopped(expected_stderr=expected_stderr) - def is_node_stopped(self, expected_ret_code=0): + def is_node_stopped(self, *, expected_stderr="", expected_ret_code=0): """Checks whether the node has stopped. Returns True if the node has stopped. False otherwise. @@ -379,6 +384,15 @@ class TestNode(): # process has stopped. Assert that it didn't return an error code. assert return_code == expected_ret_code, self._node_msg( f"Node returned unexpected exit code ({return_code}) vs ({expected_ret_code}) when stopping") + # Check that stderr is as expected + self.stderr.seek(0) + stderr = self.stderr.read().decode('utf-8').strip() + if stderr != expected_stderr: + raise AssertionError("Unexpected stderr {} != {}".format(stderr, expected_stderr)) + + self.stdout.close() + self.stderr.close() + self.running = False self.process = None self.rpc_connected = False @@ -386,9 +400,9 @@ class TestNode(): self.log.debug("Node stopped") return True - def wait_until_stopped(self, timeout=BITCOIND_PROC_WAIT_TIMEOUT, expect_error=False): + def wait_until_stopped(self, *, timeout=BITCOIND_PROC_WAIT_TIMEOUT, expect_error=False, **kwargs): expected_ret_code = 1 if expect_error else 0 # Whether node shutdown return EXIT_FAILURE or EXIT_SUCCESS - wait_until_helper(lambda: self.is_node_stopped(expected_ret_code=expected_ret_code), timeout=timeout, timeout_factor=self.timeout_factor) + wait_until_helper(lambda: self.is_node_stopped(expected_ret_code=expected_ret_code, **kwargs), timeout=timeout, timeout_factor=self.timeout_factor) def replace_in_config(self, replacements): """ @@ -406,15 +420,27 @@ class TestNode(): conf.write(conf_data) @property + def datadir_path(self) -> Path: + return Path(self.datadir) + + @property def chain_path(self) -> Path: - return Path(self.datadir) / self.chain + return self.datadir_path / self.chain @property def debug_log_path(self) -> Path: return self.chain_path / 'debug.log' - def debug_log_bytes(self) -> int: - with open(self.debug_log_path, encoding='utf-8') as dl: + @property + def blocks_path(self) -> Path: + return self.chain_path / "blocks" + + @property + def wallets_path(self) -> Path: + return self.chain_path / "wallets" + + def debug_log_size(self, **kwargs) -> int: + with open(self.debug_log_path, **kwargs) as dl: dl.seek(0, 2) return dl.tell() @@ -423,13 +449,13 @@ class TestNode(): if unexpected_msgs is None: unexpected_msgs = [] time_end = time.time() + timeout * self.timeout_factor - prev_size = self.debug_log_bytes() + prev_size = self.debug_log_size(encoding="utf-8") # Must use same encoding that is used to read() below yield while True: found = True - with open(self.debug_log_path, encoding='utf-8') as dl: + with open(self.debug_log_path, encoding="utf-8", errors="replace") as dl: dl.seek(prev_size) log = dl.read() print_log = " - " + "\n - ".join(log.splitlines()) @@ -454,7 +480,7 @@ class TestNode(): the number of log lines we encountered when matching """ time_end = time.time() + timeout * self.timeout_factor - prev_size = self.debug_log_bytes() + prev_size = self.debug_log_size(mode="rb") # Must use same mode that is used to read() below yield @@ -633,10 +659,14 @@ class TestNode(): # in comparison to the upside of making tests less fragile and unexpected intermittent errors less likely. p2p_conn.sync_with_ping() - # Consistency check that the Bitcoin Core has received our user agent string. This checks the - # node's newest peer. It could be racy if another Bitcoin Core node has connected since we opened - # our connection, but we don't expect that to happen. - assert_equal(self.getpeerinfo()[-1]['subver'], P2P_SUBVERSION) + # Consistency check that the node received our user agent string. + # Find our connection in getpeerinfo by our address:port and theirs, as this combination is unique. + sockname = p2p_conn._transport.get_extra_info("socket").getsockname() + our_addr_and_port = f"{sockname[0]}:{sockname[1]}" + dst_addr_and_port = f"{p2p_conn.dstaddr}:{p2p_conn.dstport}" + info = [peer for peer in self.getpeerinfo() if peer["addr"] == our_addr_and_port and peer["addrbind"] == dst_addr_and_port] + assert_equal(len(info), 1) + assert_equal(info[0]["subver"], P2P_SUBVERSION) return p2p_conn @@ -685,6 +715,13 @@ class TestNode(): wait_until_helper(lambda: self.num_test_p2p_connections() == 0, timeout_factor=self.timeout_factor) + def bumpmocktime(self, seconds): + """Fast forward using setmocktime to self.mocktime + seconds. Requires setmocktime to have + been called at some point in the past.""" + assert self.mocktime + self.mocktime += seconds + self.setmocktime(self.mocktime) + class TestNodeCLIAttr: def __init__(self, cli, command): @@ -704,7 +741,7 @@ def arg_to_cli(arg): elif arg is None: return 'null' elif isinstance(arg, dict) or isinstance(arg, list): - return json.dumps(arg, default=EncodeDecimal) + return json.dumps(arg, default=serialization_fallback) else: return str(arg) diff --git a/test/functional/test_framework/util.py b/test/functional/test_framework/util.py index d3b3e4d536..9143397042 100644 --- a/test/functional/test_framework/util.py +++ b/test/functional/test_framework/util.py @@ -211,12 +211,6 @@ def check_json_precision(): raise RuntimeError("JSON encode/decode loses precision") -def EncodeDecimal(o): - if isinstance(o, Decimal): - return str(o) - raise TypeError(repr(o) + " is not JSON serializable") - - def count_bytes(hex_string): return len(bytearray.fromhex(hex_string)) @@ -315,7 +309,7 @@ class PortSeed: n = None -def get_rpc_proxy(url: str, node_number: int, *, timeout: int=None, coveragedir: str=None) -> coverage.AuthServiceProxyWrapper: +def get_rpc_proxy(url: str, node_number: int, *, timeout: Optional[int]=None, coveragedir: Optional[str]=None) -> coverage.AuthServiceProxyWrapper: """ Args: url: URL of the RPC server to call diff --git a/test/functional/test_framework/wallet.py b/test/functional/test_framework/wallet.py index 271095ea21..4d75194353 100644 --- a/test/functional/test_framework/wallet.py +++ b/test/functional/test_framework/wallet.py @@ -36,12 +36,11 @@ from test_framework.messages import ( ) from test_framework.script import ( CScript, - LegacySignatureHash, LEAF_VERSION_TAPSCRIPT, OP_NOP, OP_RETURN, OP_TRUE, - SIGHASH_ALL, + sign_input_legacy, taproot_construct, ) from test_framework.script_util import ( @@ -166,18 +165,16 @@ class MiniWallet: def sign_tx(self, tx, fixed_length=True): if self._mode == MiniWalletMode.RAW_P2PK: - (sighash, err) = LegacySignatureHash(CScript(self._scriptPubKey), tx, 0, SIGHASH_ALL) - assert err is None # for exact fee calculation, create only signatures with fixed size by default (>49.89% probability): # 65 bytes: high-R val (33 bytes) + low-S val (32 bytes) - # with the DER header/skeleton data of 6 bytes added, this leads to a target size of 71 bytes - der_sig = b'' - while not len(der_sig) == 71: - der_sig = self._priv_key.sign_ecdsa(sighash) + # with the DER header/skeleton data of 6 bytes added, plus 2 bytes scriptSig overhead + # (OP_PUSHn and SIGHASH_ALL), this leads to a scriptSig target size of 73 bytes + tx.vin[0].scriptSig = b'' + while not len(tx.vin[0].scriptSig) == 73: + tx.vin[0].scriptSig = b'' + sign_input_legacy(tx, 0, self._scriptPubKey, self._priv_key) if not fixed_length: break - tx.vin[0].scriptSig = CScript([der_sig + bytes(bytearray([SIGHASH_ALL]))]) - tx.rehash() elif self._mode == MiniWalletMode.RAW_OP_TRUE: for i in tx.vin: i.scriptSig = CScript([OP_NOP] * 43) # pad to identical size diff --git a/test/functional/test_framework/xswiftec_inv_test_vectors.csv b/test/functional/test_framework/xswiftec_inv_test_vectors.csv new file mode 100644 index 0000000000..138c4cf85c --- /dev/null +++ b/test/functional/test_framework/xswiftec_inv_test_vectors.csv @@ -0,0 +1,33 @@ +u,x,case0_t,case1_t,case2_t,case3_t,case4_t,case5_t,case6_t,case7_t,comment
+05ff6bdad900fc3261bc7fe34e2fb0f569f06e091ae437d3a52e9da0cbfb9590,80cdf63774ec7022c89a5a8558e373a279170285e0ab27412dbce510bdfe23fc,,,45654798ece071ba79286d04f7f3eb1c3f1d17dd883610f2ad2efd82a287466b,0aeaa886f6b76c7158452418cbf5033adc5747e9e9b5d3b2303db96936528557,,,ba9ab867131f8e4586d792fb080c14e3c0e2e82277c9ef0d52d1027c5d78b5c4,f51557790948938ea7badbe7340afcc523a8b816164a2c4dcfc24695c9ad76d8,case0:bad[valid_x(-x-u)];case1:bad[valid_x(-x-u)];case2:info[v=0]&ok;case3:ok;case4:bad[valid_x(-x-u)];case5:bad[valid_x(-x-u)];case6:info[v=0]&ok;case7:ok
+1737a85f4c8d146cec96e3ffdca76d9903dcf3bd53061868d478c78c63c2aa9e,39e48dd150d2f429be088dfd5b61882e7e8407483702ae9a5ab35927b15f85ea,1be8cc0b04be0c681d0c6a68f733f82c6c896e0c8a262fcd392918e303a7abf4,605b5814bf9b8cb066667c9e5480d22dc5b6c92f14b4af3ee0a9eb83b03685e3,,,e41733f4fb41f397e2f3959708cc07d3937691f375d9d032c6d6e71bfc58503b,9fa4a7eb4064734f99998361ab7f2dd23a4936d0eb4b50c11f56147b4fc9764c,,,case0:ok;case1:ok;case2:info[v=0]&bad[non_square(s)];case3:bad[non_square(s)];case4:ok;case5:ok;case6:info[v=0]&bad[non_square(s)];case7:bad[non_square(s)]
+1aaa1ccebf9c724191033df366b36f691c4d902c228033ff4516d122b2564f68,c75541259d3ba98f207eaa30c69634d187d0b6da594e719e420f4898638fc5b0,,,,,,,,,case0:bad[valid_x(-x-u)];case1:bad[valid_x(-x-u)];case2:bad[non_square(q)];case3:bad[non_square(q)];case4:bad[valid_x(-x-u)];case5:bad[valid_x(-x-u)];case6:bad[non_square(q)];case7:bad[non_square(q)]
+2323a1d079b0fd72fc8bb62ec34230a815cb0596c2bfac998bd6b84260f5dc26,239342dfb675500a34a196310b8d87d54f49dcac9da50c1743ceab41a7b249ff,f63580b8aa49c4846de56e39e1b3e73f171e881eba8c66f614e67e5c975dfc07,b6307b332e699f1cf77841d90af25365404deb7fed5edb3090db49e642a156b6,,,09ca7f4755b63b7b921a91c61e4c18c0e8e177e145739909eb1981a268a20028,49cf84ccd19660e30887be26f50dac9abfb2148012a124cf6f24b618bd5ea579,,,case0:ok;case1:ok;case2:bad[non_square(q)];case3:bad[non_square(q)];case4:ok;case5:ok;case6:bad[non_square(q)];case7:bad[non_square(q)]
+2dc90e640cb646ae9164c0b5a9ef0169febe34dc4437d6e46acb0e27e219d1e8,d236f19bf349b9516e9b3f4a5610fe960141cb23bbc8291b9534f1d71de62a47,e69df7d9c026c36600ebdf588072675847c0c431c8eb730682533e964b6252c9,4f18bbdf7c2d6c5f818c18802fa35cd069eaa79fff74e4fc837c80d93fece2f8,,,196208263fd93c99ff1420a77f8d98a7b83f3bce37148cf97dacc168b49da966,b0e7442083d293a07e73e77fd05ca32f96155860008b1b037c837f25c0131937,,,case0:ok;case1:info[v=0]&ok;case2:bad[non_square(q)];case3:bad[non_square(q)];case4:ok;case5:info[v=0]&ok;case6:bad[non_square(q)];case7:bad[non_square(q)]
+3edd7b3980e2f2f34d1409a207069f881fda5f96f08027ac4465b63dc278d672,053a98de4a27b1961155822b3a3121f03b2a14458bd80eb4a560c4c7a85c149c,,,b3dae4b7dcf858e4c6968057cef2b156465431526538199cf52dc1b2d62fda30,4aa77dd55d6b6d3cfa10cc9d0fe42f79232e4575661049ae36779c1d0c666d88,,,4c251b482307a71b39697fa8310d4ea9b9abcead9ac7e6630ad23e4c29d021ff,b558822aa29492c305ef3362f01bd086dcd1ba8a99efb651c98863e1f3998ea7,case0:bad[valid_x(-x-u)];case1:bad[valid_x(-x-u)];case2:ok;case3:ok;case4:bad[valid_x(-x-u)];case5:bad[valid_x(-x-u)];case6:ok;case7:ok
+4295737efcb1da6fb1d96b9ca7dcd1e320024b37a736c4948b62598173069f70,fa7ffe4f25f88362831c087afe2e8a9b0713e2cac1ddca6a383205a266f14307,,,,,,,,,case0:bad[non_square(s)];case1:bad[non_square(s)];case2:bad[non_square(s)];case3:bad[non_square(s)];case4:bad[non_square(s)];case5:bad[non_square(s)];case6:bad[non_square(s)];case7:bad[non_square(s)]
+587c1a0cee91939e7f784d23b963004a3bf44f5d4e32a0081995ba20b0fca59e,2ea988530715e8d10363907ff25124524d471ba2454d5ce3be3f04194dfd3a3c,cfd5a094aa0b9b8891b76c6ab9438f66aa1c095a65f9f70135e8171292245e74,a89057d7c6563f0d6efa19ae84412b8a7b47e791a191ecdfdf2af84fd97bc339,475d0ae9ef46920df07b34117be5a0817de1023e3cc32689e9be145b406b0aef,a0759178ad80232454f827ef05ea3e72ad8d75418e6d4cc1cd4f5306c5e7c453,302a5f6b55f464776e48939546bc709955e3f6a59a0608feca17e8ec6ddb9dbb,576fa82839a9c0f29105e6517bbed47584b8186e5e6e132020d507af268438f6,b8a2f51610b96df20f84cbee841a5f7e821efdc1c33cd9761641eba3bf94f140,5f8a6e87527fdcdbab07d810fa15c18d52728abe7192b33e32b0acf83a1837dc,case0:ok;case1:ok;case2:ok;case3:ok;case4:ok;case5:ok;case6:ok;case7:ok
+5fa88b3365a635cbbcee003cce9ef51dd1a310de277e441abccdb7be1e4ba249,79461ff62bfcbcac4249ba84dd040f2cec3c63f725204dc7f464c16bf0ff3170,,,6bb700e1f4d7e236e8d193ff4a76c1b3bcd4e2b25acac3d51c8dac653fe909a0,f4c73410633da7f63a4f1d55aec6dd32c4c6d89ee74075edb5515ed90da9e683,,,9448ff1e0b281dc9172e6c00b5893e4c432b1d4da5353c2ae3725399c016f28f,0b38cbef9cc25809c5b0e2aa513922cd3b39276118bf8a124aaea125f25615ac,case0:bad[non_square(s)];case1:bad[non_square(s)];case2:ok;case3:info[v=0]&ok;case4:bad[non_square(s)];case5:bad[non_square(s)];case6:ok;case7:info[v=0]&ok
+6fb31c7531f03130b42b155b952779efbb46087dd9807d241a48eac63c3d96d6,56f81be753e8d4ae4940ea6f46f6ec9fda66a6f96cc95f506cb2b57490e94260,,,59059774795bdb7a837fbe1140a5fa59984f48af8df95d57dd6d1c05437dcec1,22a644db79376ad4e7b3a009e58b3f13137c54fdf911122cc93667c47077d784,,,a6fa688b86a424857c8041eebf5a05a667b0b7507206a2a82292e3f9bc822d6e,dd59bb2486c8952b184c5ff61a74c0ecec83ab0206eeedd336c9983a8f8824ab,case0:bad[valid_x(-x-u)];case1:bad[valid_x(-x-u)];case2:ok;case3:info[v=0]&ok;case4:bad[valid_x(-x-u)];case5:bad[valid_x(-x-u)];case6:ok;case7:info[v=0]&ok
+704cd226e71cb6826a590e80dac90f2d2f5830f0fdf135a3eae3965bff25ff12,138e0afa68936ee670bd2b8db53aedbb7bea2a8597388b24d0518edd22ad66ec,,,,,,,,,case0:bad[non_square(s)];case1:bad[non_square(s)];case2:bad[non_square(q)];case3:bad[non_square(q)];case4:bad[non_square(s)];case5:bad[non_square(s)];case6:bad[non_square(q)];case7:bad[non_square(q)]
+725e914792cb8c8949e7e1168b7cdd8a8094c91c6ec2202ccd53a6a18771edeb,8da16eb86d347376b6181ee9748322757f6b36e3913ddfd332ac595d788e0e44,dd357786b9f6873330391aa5625809654e43116e82a5a5d82ffd1d6624101fc4,a0b7efca01814594c59c9aae8e49700186ca5d95e88bcc80399044d9c2d8613d,,,22ca8879460978cccfc6e55a9da7f69ab1bcee917d5a5a27d002e298dbefdc6b,5f481035fe7eba6b3a63655171b68ffe7935a26a1774337fc66fbb253d279af2,,,case0:ok;case1:info[v=0]&ok;case2:bad[non_square(s)];case3:bad[non_square(s)];case4:ok;case5:info[v=0]&ok;case6:bad[non_square(s)];case7:bad[non_square(s)]
+78fe6b717f2ea4a32708d79c151bf503a5312a18c0963437e865cc6ed3f6ae97,8701948e80d15b5cd8f72863eae40afc5aced5e73f69cbc8179a33902c094d98,,,,,,,,,case0:bad[non_square(s)];case1:info[v=0]&bad[non_square(s)];case2:bad[non_square(q)];case3:bad[non_square(q)];case4:bad[non_square(s)];case5:info[v=0]&bad[non_square(s)];case6:bad[non_square(q)];case7:bad[non_square(q)]
+7c37bb9c5061dc07413f11acd5a34006e64c5c457fdb9a438f217255a961f50d,5c1a76b44568eb59d6789a7442d9ed7cdc6226b7752b4ff8eaf8e1a95736e507,,,b94d30cd7dbff60b64620c17ca0fafaa40b3d1f52d077a60a2e0cafd145086c2,,,,46b2cf32824009f49b9df3e835f05055bf4c2e0ad2f8859f5d1f3501ebaf756d,,case0:bad[non_square(s)];case1:bad[non_square(s)];case2:info[q=0]&info[X=0]&ok;case3:info[q=0]&bad[r=0];case4:bad[non_square(s)];case5:bad[non_square(s)];case6:info[q=0]&info[X=0]&ok;case7:info[q=0]&bad[r=0]
+82388888967f82a6b444438a7d44838e13c0d478b9ca060da95a41fb94303de6,29e9654170628fec8b4972898b113cf98807f4609274f4f3140d0674157c90a0,,,,,,,,,case0:bad[non_square(s)];case1:bad[non_square(s)];case2:bad[non_square(s)];case3:info[v=0]&bad[non_square(s)];case4:bad[non_square(s)];case5:bad[non_square(s)];case6:bad[non_square(s)];case7:info[v=0]&bad[non_square(s)]
+91298f5770af7a27f0a47188d24c3b7bf98ab2990d84b0b898507e3c561d6472,144f4ccbd9a74698a88cbf6fd00ad886d339d29ea19448f2c572cac0a07d5562,e6a0ffa3807f09dadbe71e0f4be4725f2832e76cad8dc1d943ce839375eff248,837b8e68d4917544764ad0903cb11f8615d2823cefbb06d89049dbabc69befda,,,195f005c7f80f6252418e1f0b41b8da0d7cd189352723e26bc317c6b8a1009e7,7c8471972b6e8abb89b52f6fc34ee079ea2d7dc31044f9276fb6245339640c55,,,case0:ok;case1:ok;case2:bad[non_square(s)];case3:info[v=0]&bad[non_square(s)];case4:ok;case5:ok;case6:bad[non_square(s)];case7:info[v=0]&bad[non_square(s)]
+b682f3d03bbb5dee4f54b5ebfba931b4f52f6a191e5c2f483c73c66e9ace97e1,904717bf0bc0cb7873fcdc38aa97f19e3a62630972acff92b24cc6dda197cb96,,,,,,,,,case0:bad[valid_x(-x-u)];case1:bad[valid_x(-x-u)];case2:bad[non_square(s)];case3:bad[non_square(s)];case4:bad[valid_x(-x-u)];case5:bad[valid_x(-x-u)];case6:bad[non_square(s)];case7:bad[non_square(s)]
+c17ec69e665f0fb0dbab48d9c2f94d12ec8a9d7eacb58084833091801eb0b80b,147756e66d96e31c426d3cc85ed0c4cfbef6341dd8b285585aa574ea0204b55e,6f4aea431a0043bdd03134d6d9159119ce034b88c32e50e8e36c4ee45eac7ae9,fd5be16d4ffa2690126c67c3ef7cb9d29b74d397c78b06b3605fda34dc9696a6,5e9c60792a2f000e45c6250f296f875e174efc0e9703e628706103a9dd2d82c7,,90b515bce5ffbc422fcecb2926ea6ee631fcb4773cd1af171c93b11aa1538146,02a41e92b005d96fed93983c1083462d648b2c683874f94c9fa025ca23696589,a1639f86d5d0fff1ba39daf0d69078a1e8b103f168fc19d78f9efc5522d27968,,case0:ok;case1:ok;case2:info[q=0]&info[X=0]&ok;case3:info[q=0]&bad[r=0];case4:ok;case5:ok;case6:info[q=0]&info[X=0]&ok;case7:info[q=0]&bad[r=0]
+c25172fc3f29b6fc4a1155b8575233155486b27464b74b8b260b499a3f53cb14,1ea9cbdb35cf6e0329aa31b0bb0a702a65123ed008655a93b7dcd5280e52e1ab,,,7422edc7843136af0053bb8854448a8299994f9ddcefd3a9a92d45462c59298a,78c7774a266f8b97ea23d05d064f033c77319f923f6b78bce4e20bf05fa5398d,,,8bdd12387bcec950ffac4477abbb757d6666b06223102c5656d2bab8d3a6d2a5,873888b5d990746815dc2fa2f9b0fcc388ce606dc09487431b1df40ea05ac2a2,case0:bad[non_square(s)];case1:bad[non_square(s)];case2:ok;case3:ok;case4:bad[non_square(s)];case5:bad[non_square(s)];case6:ok;case7:ok
+cab6626f832a4b1280ba7add2fc5322ff011caededf7ff4db6735d5026dc0367,2b2bef0852c6f7c95d72ac99a23802b875029cd573b248d1f1b3fc8033788eb6,,,,,,,,,case0:bad[non_square(s)];case1:bad[non_square(s)];case2:info[v=0]&bad[non_square(s)];case3:bad[non_square(s)];case4:bad[non_square(s)];case5:bad[non_square(s)];case6:info[v=0]&bad[non_square(s)];case7:bad[non_square(s)]
+d8621b4ffc85b9ed56e99d8dd1dd24aedcecb14763b861a17112dc771a104fd2,812cabe972a22aa67c7da0c94d8a936296eb9949d70c37cb2b2487574cb3ce58,fbc5febc6fdbc9ae3eb88a93b982196e8b6275a6d5a73c17387e000c711bd0e3,8724c96bd4e5527f2dd195a51c468d2d211ba2fac7cbe0b4b3434253409fb42d,,,043a014390243651c147756c467de691749d8a592a58c3e8c781fff28ee42b4c,78db36942b1aad80d22e6a5ae3b972d2dee45d0538341f4b4cbcbdabbf604802,,,case0:ok;case1:ok;case2:bad[non_square(s)];case3:bad[non_square(s)];case4:ok;case5:ok;case6:bad[non_square(s)];case7:bad[non_square(s)]
+da463164c6f4bf7129ee5f0ec00f65a675a8adf1bd931b39b64806afdcda9a22,25b9ce9b390b408ed611a0f13ff09a598a57520e426ce4c649b7f94f2325620d,,,,,,,,,case0:bad[non_square(s)];case1:info[v=0]&bad[non_square(s)];case2:bad[non_square(s)];case3:bad[non_square(s)];case4:bad[non_square(s)];case5:info[v=0]&bad[non_square(s)];case6:bad[non_square(s)];case7:bad[non_square(s)]
+dafc971e4a3a7b6dcfb42a08d9692d82ad9e7838523fcbda1d4827e14481ae2d,250368e1b5c58492304bd5f72696d27d526187c7adc03425e2b7d81dbb7e4e02,,,370c28f1be665efacde6aa436bf86fe21e6e314c1e53dd040e6c73a46b4c8c49,cd8acee98ffe56531a84d7eb3e48fa4034206ce825ace907d0edf0eaeb5e9ca2,,,c8f3d70e4199a105321955bc9407901de191ceb3e1ac22fbf1938c5a94b36fe6,327531167001a9ace57b2814c1b705bfcbdf9317da5316f82f120f1414a15f8d,case0:bad[non_square(s)];case1:info[v=0]&bad[non_square(s)];case2:ok;case3:ok;case4:bad[non_square(s)];case5:info[v=0]&bad[non_square(s)];case6:ok;case7:ok
+e0294c8bc1a36b4166ee92bfa70a5c34976fa9829405efea8f9cd54dcb29b99e,ae9690d13b8d20a0fbbf37bed8474f67a04e142f56efd78770a76b359165d8a1,,,dcd45d935613916af167b029058ba3a700d37150b9df34728cb05412c16d4182,,,,232ba26ca9ec6e950e984fd6fa745c58ff2c8eaf4620cb8d734fabec3e92baad,,case0:bad[valid_x(-x-u)];case1:bad[valid_x(-x-u)];case2:info[q=0]&info[X=0]&ok;case3:info[q=0]&bad[r=0];case4:bad[valid_x(-x-u)];case5:bad[valid_x(-x-u)];case6:info[q=0]&info[X=0]&ok;case7:info[q=0]&bad[r=0]
+e148441cd7b92b8b0e4fa3bd68712cfd0d709ad198cace611493c10e97f5394e,164a639794d74c53afc4d3294e79cdb3cd25f99f6df45c000f758aba54d699c0,,,,,,,,,case0:bad[valid_x(-x-u)];case1:bad[valid_x(-x-u)];case2:bad[non_square(s)];case3:info[v=0]&bad[non_square(s)];case4:bad[valid_x(-x-u)];case5:bad[valid_x(-x-u)];case6:bad[non_square(s)];case7:info[v=0]&bad[non_square(s)]
+e4b00ec97aadcca97644d3b0c8a931b14ce7bcf7bc8779546d6e35aa5937381c,94e9588d41647b3fcc772dc8d83c67ce3be003538517c834103d2cd49d62ef4d,c88d25f41407376bb2c03a7fffeb3ec7811cc43491a0c3aac0378cdc78357bee,51c02636ce00c2345ecd89adb6089fe4d5e18ac924e3145e6669501cd37a00d4,205b3512db40521cb200952e67b46f67e09e7839e0de44004138329ebd9138c5,58aab390ab6fb55c1d1b80897a207ce94a78fa5b4aa61a33398bcae9adb20d3e,3772da0bebf8c8944d3fc5800014c1387ee33bcb6e5f3c553fc8732287ca8041,ae3fd9c931ff3dcba132765249f7601b2a1e7536db1ceba19996afe22c85fb5b,dfa4caed24bfade34dff6ad1984b90981f6187c61f21bbffbec7cd60426ec36a,a7554c6f54904aa3e2e47f7685df8316b58705a4b559e5ccc6743515524deef1,case0:ok;case1:ok;case2:ok;case3:info[v=0]&ok;case4:ok;case5:ok;case6:ok;case7:info[v=0]&ok
+e5bbb9ef360d0a501618f0067d36dceb75f5be9a620232aa9fd5139d0863fde5,e5bbb9ef360d0a501618f0067d36dceb75f5be9a620232aa9fd5139d0863fde5,,,,,,,,,case0:bad[valid_x(-x-u)];case1:bad[valid_x(-x-u)];case2:bad[s=0];case3:bad[s=0];case4:bad[valid_x(-x-u)];case5:bad[valid_x(-x-u)];case6:bad[s=0];case7:bad[s=0]
+e6bcb5c3d63467d490bfa54fbbc6092a7248c25e11b248dc2964a6e15edb1457,19434a3c29cb982b6f405ab04439f6d58db73da1ee4db723d69b591da124e7d8,67119877832ab8f459a821656d8261f544a553b89ae4f25c52a97134b70f3426,ffee02f5e649c07f0560eff1867ec7b32d0e595e9b1c0ea6e2a4fc70c97cd71f,b5e0c189eb5b4bacd025b7444d74178be8d5246cfa4a9a207964a057ee969992,5746e4591bf7f4c3044609ea372e908603975d279fdef8349f0b08d32f07619d,98ee67887cd5470ba657de9a927d9e0abb5aac47651b0da3ad568eca48f0c809,0011fd0a19b63f80fa9f100e7981384cd2f1a6a164e3f1591d5b038e36832510,4a1f3e7614a4b4532fda48bbb28be874172adb9305b565df869b5fa71169629d,a8b91ba6e4080b3cfbb9f615c8d16f79fc68a2d8602107cb60f4f72bd0f89a92,case0:ok;case1:info[v=0]&ok;case2:ok;case3:ok;case4:ok;case5:info[v=0]&ok;case6:ok;case7:ok
+f28fba64af766845eb2f4302456e2b9f8d80affe57e7aae42738d7cddb1c2ce6,f28fba64af766845eb2f4302456e2b9f8d80affe57e7aae42738d7cddb1c2ce6,4f867ad8bb3d840409d26b67307e62100153273f72fa4b7484becfa14ebe7408,5bbc4f59e452cc5f22a99144b10ce8989a89a995ec3cea1c91ae10e8f721bb5d,,,b079852744c27bfbf62d9498cf819deffeacd8c08d05b48b7b41305db1418827,a443b0a61bad33a0dd566ebb4ef317676576566a13c315e36e51ef1608de40d2,,,case0:ok;case1:ok;case2:bad[s=0];case3:bad[s=0];case4:ok;case5:ok;case6:bad[s=0];case7:bad[s=0]
+f455605bc85bf48e3a908c31023faf98381504c6c6d3aeb9ede55f8dd528924d,d31fbcd5cdb798f6c00db6692f8fe8967fa9c79dd10958f4a194f01374905e99,,,0c00c5715b56fe632d814ad8a77f8e66628ea47a6116834f8c1218f3a03cbd50,df88e44fac84fa52df4d59f48819f18f6a8cd4151d162afaf773166f57c7ff46,,,f3ff3a8ea4a9019cd27eb527588071999d715b859ee97cb073ede70b5fc33edf,20771bb0537b05ad20b2a60b77e60e7095732beae2e9d505088ce98fa837fce9,case0:bad[non_square(s)];case1:bad[non_square(s)];case2:info[v=0]&ok;case3:ok;case4:bad[non_square(s)];case5:bad[non_square(s)];case6:info[v=0]&ok;case7:ok
+f58cd4d9830bad322699035e8246007d4be27e19b6f53621317b4f309b3daa9d,78ec2b3dc0948de560148bbc7c6dc9633ad5df70a5a5750cbed721804f082a3b,6c4c580b76c7594043569f9dae16dc2801c16a1fbe12860881b75f8ef929bce5,94231355e7385c5f25ca436aa64191471aea4393d6e86ab7a35fe2afacaefd0d,dff2a1951ada6db574df834048149da3397a75b829abf58c7e69db1b41ac0989,a52b66d3c907035548028bf804711bf422aba95f1a666fc86f4648e05f29caae,93b3a7f48938a6bfbca9606251e923d7fe3e95e041ed79f77e48a07006d63f4a,6bdcecaa18c7a3a0da35bc9559be6eb8e515bc6c291795485ca01d4f5350ff22,200d5e6ae525924a8b207cbfb7eb625cc6858a47d6540a73819624e3be53f2a6,5ad4992c36f8fcaab7fd7407fb8ee40bdd5456a0e599903790b9b71ea0d63181,case0:ok;case1:ok;case2:info[v=0]&ok;case3:ok;case4:ok;case5:ok;case6:info[v=0]&ok;case7:ok
+fd7d912a40f182a3588800d69ebfb5048766da206fd7ebc8d2436c81cbef6421,8d37c862054debe731694536ff46b273ec122b35a9bf1445ac3c4ff9f262c952,,,,,,,,,case0:bad[valid_x(-x-u)];case1:bad[valid_x(-x-u)];case2:info[v=0]&bad[non_square(s)];case3:bad[non_square(s)];case4:bad[valid_x(-x-u)];case5:bad[valid_x(-x-u)];case6:info[v=0]&bad[non_square(s)];case7:bad[non_square(s)]
diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py index c834086b6f..db04bb8bdb 100755 --- a/test/functional/test_runner.py +++ b/test/functional/test_runner.py @@ -74,12 +74,13 @@ TEST_EXIT_SKIPPED = 77 TEST_FRAMEWORK_MODULES = [ "address", "blocktools", - "muhash", + "ellswift", "key", + "messages", + "muhash", "ripemd160", "script", "segwit_addr", - "util", ] EXTENDED_SCRIPTS = [ @@ -122,6 +123,7 @@ BASE_SCRIPTS = [ 'feature_abortnode.py', 'wallet_address_types.py --legacy-wallet', 'wallet_address_types.py --descriptors', + 'p2p_orphan_handling.py', 'wallet_basic.py --legacy-wallet', 'wallet_basic.py --descriptors', 'feature_maxtipage.py', @@ -210,7 +212,6 @@ BASE_SCRIPTS = [ 'rpc_users.py', 'rpc_whitelist.py', 'feature_proxy.py', - 'feature_syscall_sandbox.py', 'wallet_signrawtransactionwithwallet.py --legacy-wallet', 'wallet_signrawtransactionwithwallet.py --descriptors', 'rpc_signrawtransactionwithkey.py', @@ -267,6 +268,7 @@ BASE_SCRIPTS = [ 'p2p_leak_tx.py', 'p2p_eviction.py', 'p2p_ibd_stalling.py', + 'p2p_net_deadlock.py', 'wallet_signmessagewithaddress.py', 'rpc_signmessagewithprivkey.py', 'rpc_generate.py', @@ -784,8 +786,8 @@ def check_script_prefixes(): def check_script_list(*, src_dir, fail_on_warn): """Check scripts directory. - Check that there are no scripts in the functional tests directory which are - not being run by pull-tester.py.""" + Check that all python files in this directory are categorized + as a test script or meta script.""" script_dir = src_dir + '/test/functional/' python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")]) missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS))) diff --git a/test/functional/tool_wallet.py b/test/functional/tool_wallet.py index 95999649b4..9d381a2cd2 100755 --- a/test/functional/tool_wallet.py +++ b/test/functional/tool_wallet.py @@ -4,7 +4,6 @@ # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test bitcoin-wallet.""" -import hashlib import os import stat import subprocess @@ -13,9 +12,10 @@ import textwrap from collections import OrderedDict from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import assert_equal - -BUFFER_SIZE = 16 * 1024 +from test_framework.util import ( + assert_equal, + sha256sum_file, +) class ToolWalletTest(BitcoinTestFramework): @@ -53,12 +53,7 @@ class ToolWalletTest(BitcoinTestFramework): assert_equal(p.poll(), 0) def wallet_shasum(self): - h = hashlib.sha1() - mv = memoryview(bytearray(BUFFER_SIZE)) - with open(self.wallet_path, 'rb', buffering=0) as f: - for n in iter(lambda: f.readinto(mv), 0): - h.update(mv[:n]) - return h.hexdigest() + return sha256sum_file(self.wallet_path).hex() def wallet_timestamp(self): return os.path.getmtime(self.wallet_path) @@ -173,12 +168,12 @@ class ToolWalletTest(BitcoinTestFramework): if file_format is not None and file_format != dump_data["format"]: load_output += "Warning: Dumpfile wallet format \"{}\" does not match command line specified format \"{}\".\n".format(dump_data["format"], file_format) self.assert_tool_output(load_output, *args) - assert os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest/wallets", wallet_name)) + assert (self.nodes[0].wallets_path / wallet_name).is_dir() self.assert_tool_output("The dumpfile may contain private keys. To ensure the safety of your Bitcoin, do not share the dumpfile.\n", '-wallet={}'.format(wallet_name), '-dumpfile={}'.format(rt_dumppath), 'dump') rt_dump_data = self.read_dump(rt_dumppath) - wallet_dat = os.path.join(self.nodes[0].datadir, "regtest/wallets/", wallet_name, "wallet.dat") + wallet_dat = self.nodes[0].wallets_path / wallet_name / "wallet.dat" if rt_dump_data["format"] == "bdb": self.assert_is_bdb(wallet_dat) else: @@ -193,7 +188,7 @@ class ToolWalletTest(BitcoinTestFramework): self.assert_raises_tool_error('Error parsing command line arguments: Invalid parameter -foo', '-foo') self.assert_raises_tool_error('No method provided. Run `bitcoin-wallet -help` for valid methods.') self.assert_raises_tool_error('Wallet name must be provided when creating a new wallet.', 'create') - locked_dir = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets") + locked_dir = self.nodes[0].wallets_path error = 'Error initializing wallet database environment "{}"!'.format(locked_dir) if self.options.descriptors: error = f"SQLiteDatabase: Unable to obtain an exclusive lock on the database, is it being used by another instance of {self.config['environment']['PACKAGE_NAME']}?" @@ -202,7 +197,7 @@ class ToolWalletTest(BitcoinTestFramework): '-wallet=' + self.default_wallet_name, 'info', ) - path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "nonexistent.dat") + path = self.nodes[0].wallets_path / "nonexistent.dat" self.assert_raises_tool_error("Failed to load database path '{}'. Path does not exist.".format(path), '-wallet=nonexistent.dat', 'info') def test_tool_wallet_info(self): @@ -347,7 +342,7 @@ class ToolWalletTest(BitcoinTestFramework): non_exist_dump = os.path.join(self.nodes[0].datadir, "wallet.nodump") self.assert_raises_tool_error('Unknown wallet file format "notaformat" provided. Please provide one of "bdb" or "sqlite".', '-wallet=todump', '-format=notaformat', '-dumpfile={}'.format(wallet_dump), 'createfromdump') self.assert_raises_tool_error('Dump file {} does not exist.'.format(non_exist_dump), '-wallet=todump', '-dumpfile={}'.format(non_exist_dump), 'createfromdump') - wallet_path = os.path.join(self.nodes[0].datadir, 'regtest', 'wallets', 'todump2') + wallet_path = self.nodes[0].wallets_path / "todump2" self.assert_raises_tool_error('Failed to create database path \'{}\'. Database already exists.'.format(wallet_path), '-wallet=todump2', '-dumpfile={}'.format(wallet_dump), 'createfromdump') self.assert_raises_tool_error("The -descriptors option can only be used with the 'create' command.", '-descriptors', '-wallet=todump2', '-dumpfile={}'.format(wallet_dump), 'createfromdump') @@ -363,18 +358,18 @@ class ToolWalletTest(BitcoinTestFramework): dump_data["BITCOIN_CORE_WALLET_DUMP"] = "0" self.write_dump(dump_data, bad_ver_wallet_dump) self.assert_raises_tool_error('Error: Dumpfile version is not supported. This version of bitcoin-wallet only supports version 1 dumpfiles. Got dumpfile with version 0', '-wallet=badload', '-dumpfile={}'.format(bad_ver_wallet_dump), 'createfromdump') - assert not os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest/wallets", "badload")) + assert not (self.nodes[0].wallets_path / "badload").is_dir() bad_ver_wallet_dump = os.path.join(self.nodes[0].datadir, "wallet-bad_ver2.dump") dump_data["BITCOIN_CORE_WALLET_DUMP"] = "2" self.write_dump(dump_data, bad_ver_wallet_dump) self.assert_raises_tool_error('Error: Dumpfile version is not supported. This version of bitcoin-wallet only supports version 1 dumpfiles. Got dumpfile with version 2', '-wallet=badload', '-dumpfile={}'.format(bad_ver_wallet_dump), 'createfromdump') - assert not os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest/wallets", "badload")) + assert not (self.nodes[0].wallets_path / "badload").is_dir() bad_magic_wallet_dump = os.path.join(self.nodes[0].datadir, "wallet-bad_magic.dump") del dump_data["BITCOIN_CORE_WALLET_DUMP"] dump_data["not_the_right_magic"] = "1" self.write_dump(dump_data, bad_magic_wallet_dump, "not_the_right_magic") self.assert_raises_tool_error('Error: Dumpfile identifier record is incorrect. Got "not_the_right_magic", expected "BITCOIN_CORE_WALLET_DUMP".', '-wallet=badload', '-dumpfile={}'.format(bad_magic_wallet_dump), 'createfromdump') - assert not os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest/wallets", "badload")) + assert not (self.nodes[0].wallets_path / "badload").is_dir() self.log.info('Checking createfromdump handling of checksums') bad_sum_wallet_dump = os.path.join(self.nodes[0].datadir, "wallet-bad_sum1.dump") @@ -383,25 +378,25 @@ class ToolWalletTest(BitcoinTestFramework): dump_data["checksum"] = "1" * 64 self.write_dump(dump_data, bad_sum_wallet_dump) self.assert_raises_tool_error('Error: Dumpfile checksum does not match. Computed {}, expected {}'.format(checksum, "1" * 64), '-wallet=bad', '-dumpfile={}'.format(bad_sum_wallet_dump), 'createfromdump') - assert not os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest/wallets", "badload")) + assert not (self.nodes[0].wallets_path / "badload").is_dir() bad_sum_wallet_dump = os.path.join(self.nodes[0].datadir, "wallet-bad_sum2.dump") del dump_data["checksum"] self.write_dump(dump_data, bad_sum_wallet_dump, skip_checksum=True) self.assert_raises_tool_error('Error: Missing checksum', '-wallet=badload', '-dumpfile={}'.format(bad_sum_wallet_dump), 'createfromdump') - assert not os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest/wallets", "badload")) + assert not (self.nodes[0].wallets_path / "badload").is_dir() bad_sum_wallet_dump = os.path.join(self.nodes[0].datadir, "wallet-bad_sum3.dump") dump_data["checksum"] = "2" * 10 self.write_dump(dump_data, bad_sum_wallet_dump) self.assert_raises_tool_error('Error: Checksum is not the correct size', '-wallet=badload', '-dumpfile={}'.format(bad_sum_wallet_dump), 'createfromdump') - assert not os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest/wallets", "badload")) + assert not (self.nodes[0].wallets_path / "badload").is_dir() dump_data["checksum"] = "3" * 66 self.write_dump(dump_data, bad_sum_wallet_dump) self.assert_raises_tool_error('Error: Checksum is not the correct size', '-wallet=badload', '-dumpfile={}'.format(bad_sum_wallet_dump), 'createfromdump') - assert not os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest/wallets", "badload")) + assert not (self.nodes[0].wallets_path / "badload").is_dir() def run_test(self): - self.wallet_path = os.path.join(self.nodes[0].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename) + self.wallet_path = os.path.join(self.nodes[0].wallets_path, self.default_wallet_name, self.wallet_data_filename) self.test_invalid_tool_commands_and_args() # Warning: The following tests are order-dependent. self.test_tool_wallet_info() diff --git a/test/functional/wallet_backup.py b/test/functional/wallet_backup.py index 4ad25d964e..9f6f54c7a6 100755 --- a/test/functional/wallet_backup.py +++ b/test/functional/wallet_backup.py @@ -109,16 +109,16 @@ class WalletBackupTest(BitcoinTestFramework): self.stop_node(2) def erase_three(self): - os.remove(os.path.join(self.nodes[0].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename)) - os.remove(os.path.join(self.nodes[1].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename)) - os.remove(os.path.join(self.nodes[2].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename)) + os.remove(os.path.join(self.nodes[0].wallets_path, self.default_wallet_name, self.wallet_data_filename)) + os.remove(os.path.join(self.nodes[1].wallets_path, self.default_wallet_name, self.wallet_data_filename)) + os.remove(os.path.join(self.nodes[2].wallets_path, self.default_wallet_name, self.wallet_data_filename)) def restore_invalid_wallet(self): node = self.nodes[3] invalid_wallet_file = os.path.join(self.nodes[0].datadir, 'invalid_wallet_file.bak') open(invalid_wallet_file, 'a', encoding="utf8").write('invald wallet') wallet_name = "res0" - not_created_wallet_file = os.path.join(node.datadir, self.chain, 'wallets', wallet_name) + not_created_wallet_file = os.path.join(node.wallets_path, wallet_name) error_message = "Wallet file verification failed. Failed to load database path '{}'. Data is not in recognized format.".format(not_created_wallet_file) assert_raises_rpc_error(-18, error_message, node.restorewallet, wallet_name, invalid_wallet_file) assert not os.path.exists(not_created_wallet_file) @@ -128,23 +128,18 @@ class WalletBackupTest(BitcoinTestFramework): nonexistent_wallet_file = os.path.join(self.nodes[0].datadir, 'nonexistent_wallet.bak') wallet_name = "res0" assert_raises_rpc_error(-8, "Backup file does not exist", node.restorewallet, wallet_name, nonexistent_wallet_file) - not_created_wallet_file = os.path.join(node.datadir, self.chain, 'wallets', wallet_name) + not_created_wallet_file = os.path.join(node.wallets_path, wallet_name) assert not os.path.exists(not_created_wallet_file) def restore_wallet_existent_name(self): node = self.nodes[3] backup_file = os.path.join(self.nodes[0].datadir, 'wallet.bak') wallet_name = "res0" - wallet_file = os.path.join(node.datadir, self.chain, 'wallets', wallet_name) + wallet_file = os.path.join(node.wallets_path, wallet_name) error_message = "Failed to create database path '{}'. Database already exists.".format(wallet_file) assert_raises_rpc_error(-36, error_message, node.restorewallet, wallet_name, backup_file) assert os.path.exists(wallet_file) - def init_three(self): - self.init_wallet(node=0) - self.init_wallet(node=1) - self.init_wallet(node=2) - def run_test(self): self.log.info("Generating initial blockchain") self.generate(self.nodes[0], 1) @@ -206,9 +201,9 @@ class WalletBackupTest(BitcoinTestFramework): self.nodes[3].restorewallet("res1", backup_file_1) self.nodes[3].restorewallet("res2", backup_file_2) - assert os.path.exists(os.path.join(self.nodes[3].datadir, self.chain, 'wallets', "res0")) - assert os.path.exists(os.path.join(self.nodes[3].datadir, self.chain, 'wallets', "res1")) - assert os.path.exists(os.path.join(self.nodes[3].datadir, self.chain, 'wallets', "res2")) + assert os.path.exists(os.path.join(self.nodes[3].wallets_path, "res0")) + assert os.path.exists(os.path.join(self.nodes[3].wallets_path, "res1")) + assert os.path.exists(os.path.join(self.nodes[3].wallets_path, "res2")) res0_rpc = self.nodes[3].get_wallet_rpc("res0") res1_rpc = self.nodes[3].get_wallet_rpc("res1") @@ -226,11 +221,14 @@ class WalletBackupTest(BitcoinTestFramework): self.erase_three() #start node2 with no chain - shutil.rmtree(os.path.join(self.nodes[2].datadir, self.chain, 'blocks')) - shutil.rmtree(os.path.join(self.nodes[2].datadir, self.chain, 'chainstate')) + shutil.rmtree(os.path.join(self.nodes[2].blocks_path)) + shutil.rmtree(os.path.join(self.nodes[2].chain_path, 'chainstate')) self.start_three(["-nowallet"]) - self.init_three() + # Create new wallets for the three nodes. + # We will use this empty wallets to test the 'importwallet()' RPC command below. + for node_num in range(3): + self.nodes[node_num].createwallet(wallet_name=self.default_wallet_name, descriptors=self.options.descriptors, load_on_startup=True) assert_equal(self.nodes[0].getbalance(), 0) assert_equal(self.nodes[1].getbalance(), 0) @@ -248,10 +246,10 @@ class WalletBackupTest(BitcoinTestFramework): # Backup to source wallet file must fail sourcePaths = [ - os.path.join(self.nodes[0].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename), - os.path.join(self.nodes[0].datadir, self.chain, '.', 'wallets', self.default_wallet_name, self.wallet_data_filename), - os.path.join(self.nodes[0].datadir, self.chain, 'wallets', self.default_wallet_name), - os.path.join(self.nodes[0].datadir, self.chain, 'wallets')] + os.path.join(self.nodes[0].wallets_path, self.default_wallet_name, self.wallet_data_filename), + os.path.join(self.nodes[0].wallets_path, '.', self.default_wallet_name, self.wallet_data_filename), + os.path.join(self.nodes[0].wallets_path, self.default_wallet_name), + os.path.join(self.nodes[0].wallets_path)] for sourcePath in sourcePaths: assert_raises_rpc_error(-4, "backup failed", self.nodes[0].backupwallet, sourcePath) diff --git a/test/functional/wallet_backwards_compatibility.py b/test/functional/wallet_backwards_compatibility.py index 5088e11eda..49e36b21c5 100755 --- a/test/functional/wallet_backwards_compatibility.py +++ b/test/functional/wallet_backwards_compatibility.py @@ -74,8 +74,8 @@ class BackwardsCompatibilityTest(BitcoinTestFramework): def nodes_wallet_dir(self, node): if node.version < 170000: - return os.path.join(node.datadir, "regtest") - return os.path.join(node.datadir, "regtest/wallets") + return node.chain_path + return node.wallets_path def run_test(self): node_miner = self.nodes[0] @@ -157,10 +157,10 @@ class BackwardsCompatibilityTest(BitcoinTestFramework): assert info['keypoolsize'] == 0 # Unload wallets and copy to older nodes: - node_master_wallets_dir = os.path.join(node_master.datadir, "regtest/wallets") - node_v19_wallets_dir = os.path.join(node_v19.datadir, "regtest/wallets") - node_v17_wallets_dir = os.path.join(node_v17.datadir, "regtest/wallets") - node_v16_wallets_dir = os.path.join(node_v16.datadir, "regtest") + node_master_wallets_dir = node_master.wallets_path + node_v19_wallets_dir = node_v19.wallets_path + node_v17_wallets_dir = node_v17.wallets_path + node_v16_wallets_dir = node_v16.chain_path node_master.unloadwallet("w1") node_master.unloadwallet("w2") node_master.unloadwallet("w3") @@ -264,10 +264,11 @@ class BackwardsCompatibilityTest(BitcoinTestFramework): os.path.join(node_master_wallets_dir, "u1_v16") ) load_res = node_master.loadwallet("u1_v16") - # Make sure this wallet opens without warnings. See https://github.com/bitcoin/bitcoin/pull/19054 + # Make sure this wallet opens with only the migration warning. See https://github.com/bitcoin/bitcoin/pull/19054 if int(node_master.getnetworkinfo()["version"]) >= 249900: # loadwallet#warnings (added in v25) -- only present if there is a warning - assert "warnings" not in load_res + # Legacy wallets will have only a deprecation warning + assert_equal(load_res["warnings"], ["Wallet loaded successfully. The legacy wallet type is being deprecated and support for creating and opening legacy wallets will be removed in the future. Legacy wallets can be migrated to a descriptor wallet with migratewallet."]) else: # loadwallet#warning (deprecated in v25) -- always present, but empty string if no warning assert_equal(load_res["warning"], '') diff --git a/test/functional/wallet_basic.py b/test/functional/wallet_basic.py index a1b805c09e..01149a0977 100755 --- a/test/functional/wallet_basic.py +++ b/test/functional/wallet_basic.py @@ -310,8 +310,7 @@ class WalletTest(BitcoinTestFramework): node_0_bal += amount assert_equal(self.nodes[0].getbalance(), node_0_bal) - for key in ["totalFee", "feeRate"]: - assert_raises_rpc_error(-8, "Unknown named parameter key", self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=1, key=1) + assert_raises_rpc_error(-8, "Unknown named parameter feeRate", self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=1, feeRate=1) # Test setting explicit fee rate just below the minimum. self.log.info("Test sendmany raises 'fee rate too low' if fee_rate of 0.99999999 is passed") @@ -328,7 +327,7 @@ class WalletTest(BitcoinTestFramework): for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]: assert_raises_rpc_error(-3, msg, self.nodes[2].sendmany, amounts={address: 1.0}, fee_rate=invalid_value) # Test fee_rate values that cannot be represented in sat/vB. - for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]: + for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]: assert_raises_rpc_error(-3, msg, self.nodes[2].sendmany, amounts={address: 10}, fee_rate=invalid_value) # Test fee_rate out of range (negative number). assert_raises_rpc_error(-3, OUT_OF_RANGE, self.nodes[2].sendmany, amounts={address: 10}, fee_rate=-1) @@ -505,9 +504,6 @@ class WalletTest(BitcoinTestFramework): fee = prebalance - postbalance - amount assert_fee_amount(fee, tx_size, Decimal(fee_rate_btc_kvb)) - for key in ["totalFee", "feeRate"]: - assert_raises_rpc_error(-8, "Unknown named parameter key", self.nodes[2].sendtoaddress, address=address, amount=1, fee_rate=1, key=1) - # Test setting explicit fee rate just below the minimum. self.log.info("Test sendtoaddress raises 'fee rate too low' if fee_rate of 0.99999999 is passed") assert_raises_rpc_error(-6, "Fee rate (0.999 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)", @@ -523,7 +519,7 @@ class WalletTest(BitcoinTestFramework): for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]: assert_raises_rpc_error(-3, msg, self.nodes[2].sendtoaddress, address=address, amount=1.0, fee_rate=invalid_value) # Test fee_rate values that cannot be represented in sat/vB. - for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]: + for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]: assert_raises_rpc_error(-3, msg, self.nodes[2].sendtoaddress, address=address, amount=10, fee_rate=invalid_value) # Test fee_rate out of range (negative number). assert_raises_rpc_error(-3, OUT_OF_RANGE, self.nodes[2].sendtoaddress, address=address, amount=1.0, fee_rate=-1) diff --git a/test/functional/wallet_bumpfee.py b/test/functional/wallet_bumpfee.py index b9ebf64c22..e69c1829ca 100755 --- a/test/functional/wallet_bumpfee.py +++ b/test/functional/wallet_bumpfee.py @@ -24,9 +24,11 @@ from test_framework.messages import ( from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, + assert_fee_amount, assert_greater_than, assert_raises_rpc_error, get_fee, + find_vout_for_address, ) from test_framework.wallet import MiniWallet @@ -109,6 +111,8 @@ class BumpFeeTest(BitcoinTestFramework): test_small_output_with_feerate_succeeds(self, rbf_node, dest_address) test_no_more_inputs_fails(self, rbf_node, dest_address) self.test_bump_back_to_yourself() + self.test_provided_change_pos(rbf_node) + self.test_single_output() # Context independent tests test_feerate_checks_replaced_outputs(self, rbf_node, peer_node) @@ -137,7 +141,7 @@ class BumpFeeTest(BitcoinTestFramework): for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]: assert_raises_rpc_error(-3, msg, rbf_node.bumpfee, rbfid, fee_rate=invalid_value) # Test fee_rate values that cannot be represented in sat/vB. - for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]: + for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]: assert_raises_rpc_error(-3, msg, rbf_node.bumpfee, rbfid, fee_rate=invalid_value) # Test fee_rate out of range (negative number). assert_raises_rpc_error(-3, "Amount out of range", rbf_node.bumpfee, rbfid, fee_rate=-1) @@ -174,6 +178,13 @@ class BumpFeeTest(BitcoinTestFramework): assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", rbf_node.bumpfee, rbfid, {"outputs": [{"data": "deadbeef"}, {"data": "deadbeef"}]}) + self.log.info("Test reduce_output option") + assert_raises_rpc_error(-1, "JSON integer out of range", rbf_node.bumpfee, rbfid, {"reduce_output": -1}) + assert_raises_rpc_error(-8, "Change position is out of range", rbf_node.bumpfee, rbfid, {"reduce_output": 2}) + + self.log.info("Test outputs and reduce_output cannot both be provided") + assert_raises_rpc_error(-8, "Cannot specify both new outputs to use and an output index to reduce", rbf_node.bumpfee, rbfid, {"reduce_output": 2, "outputs": [{dest_address: 0.1}]}) + self.clear_mempool() def test_bump_back_to_yourself(self): @@ -225,6 +236,72 @@ class BumpFeeTest(BitcoinTestFramework): node.unloadwallet("back_to_yourself") + def test_provided_change_pos(self, rbf_node): + self.log.info("Test the reduce_output option") + + change_addr = rbf_node.getnewaddress() + dest_addr = rbf_node.getnewaddress() + assert_equal(rbf_node.getaddressinfo(change_addr)["ischange"], False) + assert_equal(rbf_node.getaddressinfo(dest_addr)["ischange"], False) + + send_res = rbf_node.send(outputs=[{dest_addr: 1}], options={"change_address": change_addr}) + assert send_res["complete"] + txid = send_res["txid"] + + tx = rbf_node.gettransaction(txid=txid, verbose=True) + assert_equal(len(tx["decoded"]["vout"]), 2) + + change_pos = find_vout_for_address(rbf_node, txid, change_addr) + change_value = tx["decoded"]["vout"][change_pos]["value"] + + bumped = rbf_node.bumpfee(txid, {"reduce_output": change_pos}) + new_txid = bumped["txid"] + + new_tx = rbf_node.gettransaction(txid=new_txid, verbose=True) + assert_equal(len(new_tx["decoded"]["vout"]), 2) + new_change_pos = find_vout_for_address(rbf_node, new_txid, change_addr) + new_change_value = new_tx["decoded"]["vout"][new_change_pos]["value"] + + assert_greater_than(change_value, new_change_value) + + + def test_single_output(self): + self.log.info("Test that single output txs can be bumped") + node = self.nodes[1] + + node.createwallet("single_out_rbf") + wallet = node.get_wallet_rpc("single_out_rbf") + + addr = wallet.getnewaddress() + amount = Decimal("0.001") + # Make 2 UTXOs + self.nodes[0].sendtoaddress(addr, amount) + self.nodes[0].sendtoaddress(addr, amount) + self.generate(self.nodes[0], 1) + utxos = wallet.listunspent() + + tx = wallet.sendall(recipients=[wallet.getnewaddress()], fee_rate=2, options={"inputs": [utxos[0]]}) + + # Reduce the only output with a crazy high feerate, should fail as the output would be dust + assert_raises_rpc_error(-4, "The transaction amount is too small to pay the fee", wallet.bumpfee, txid=tx["txid"], options={"fee_rate": 1100, "reduce_output": 0}) + + # Reduce the only output successfully + bumped = wallet.bumpfee(txid=tx["txid"], options={"fee_rate": 10, "reduce_output": 0}) + bumped_tx = wallet.gettransaction(txid=bumped["txid"], verbose=True) + assert_equal(len(bumped_tx["decoded"]["vout"]), 1) + assert_equal(len(bumped_tx["decoded"]["vin"]), 1) + assert_equal(bumped_tx["decoded"]["vout"][0]["value"] + bumped["fee"], amount) + assert_fee_amount(bumped["fee"], bumped_tx["decoded"]["vsize"], Decimal(10) / Decimal(1e8) * 1000) + + # Bumping without reducing adds a new input and output + bumped = wallet.bumpfee(txid=bumped["txid"], options={"fee_rate": 20}) + bumped_tx = wallet.gettransaction(txid=bumped["txid"], verbose=True) + assert_equal(len(bumped_tx["decoded"]["vout"]), 2) + assert_equal(len(bumped_tx["decoded"]["vin"]), 2) + assert_fee_amount(bumped["fee"], bumped_tx["decoded"]["vsize"], Decimal(20) / Decimal(1e8) * 1000) + + wallet.unloadwallet() + def test_simple_bumpfee_succeeds(self, mode, rbf_node, peer_node, dest_address): self.log.info('Test simple bumpfee: {}'.format(mode)) rbfid = spend_one_input(rbf_node, dest_address) diff --git a/test/functional/wallet_descriptor.py b/test/functional/wallet_descriptor.py index 4673eb091c..6f563987cc 100755 --- a/test/functional/wallet_descriptor.py +++ b/test/functional/wallet_descriptor.py @@ -234,10 +234,12 @@ class WalletDescriptorTest(BitcoinTestFramework): self.log.info("Test that loading descriptor wallet containing legacy key types throws error") self.nodes[0].createwallet(wallet_name="crashme", descriptors=True) self.nodes[0].unloadwallet("crashme") - wallet_db = os.path.join(self.nodes[0].datadir, self.chain, "wallets", "crashme", self.wallet_data_filename) - with sqlite3.connect(wallet_db) as conn: + wallet_db = os.path.join(self.nodes[0].wallets_path, "crashme", self.wallet_data_filename) + conn = sqlite3.connect(wallet_db) + with conn: # add "cscript" entry: key type is uint160 (20 bytes), value type is CScript (zero-length here) conn.execute('INSERT INTO main VALUES(?, ?)', (b'\x07cscript' + b'\x00'*20, b'\x00')) + conn.close() assert_raises_rpc_error(-4, "Unexpected legacy entry in descriptor wallet found.", self.nodes[0].loadwallet, "crashme") diff --git a/test/functional/wallet_fundrawtransaction.py b/test/functional/wallet_fundrawtransaction.py index 46706d6ad2..fa4f009f34 100755 --- a/test/functional/wallet_fundrawtransaction.py +++ b/test/functional/wallet_fundrawtransaction.py @@ -23,6 +23,7 @@ from test_framework.util import ( assert_raises_rpc_error, count_bytes, find_vout_for_address, + get_fee, ) from test_framework.wallet_util import generate_keypair @@ -183,7 +184,6 @@ class RawTransactionsTest(BitcoinTestFramework): inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 1.0 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) - dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert len(dec_tx['vin']) > 0 #test that we have enough inputs @@ -193,8 +193,6 @@ class RawTransactionsTest(BitcoinTestFramework): inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 2.2 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) - dec_tx = self.nodes[2].decoderawtransaction(rawtx) - rawtxfund = self.nodes[2].fundrawtransaction(rawtx) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) assert len(dec_tx['vin']) > 0 #test if we have enough inputs @@ -206,13 +204,9 @@ class RawTransactionsTest(BitcoinTestFramework): inputs = [ ] outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 } rawtx = self.nodes[2].createrawtransaction(inputs, outputs) - dec_tx = self.nodes[2].decoderawtransaction(rawtx) rawtxfund = self.nodes[2].fundrawtransaction(rawtx) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) - totalOut = 0 - for out in dec_tx['vout']: - totalOut += out['value'] assert len(dec_tx['vin']) > 0 assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '') @@ -335,10 +329,8 @@ class RawTransactionsTest(BitcoinTestFramework): rawtxfund = self.nodes[2].fundrawtransaction(rawtx) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) - totalOut = 0 matchingOuts = 0 for i, out in enumerate(dec_tx['vout']): - totalOut += out['value'] if out['scriptPubKey']['address'] in outputs: matchingOuts+=1 else: @@ -364,12 +356,9 @@ class RawTransactionsTest(BitcoinTestFramework): # Should fail without add_inputs: assert_raises_rpc_error(-4, ERR_NOT_ENOUGH_PRESET_INPUTS, self.nodes[2].fundrawtransaction, rawtx, add_inputs=False) rawtxfund = self.nodes[2].fundrawtransaction(rawtx, add_inputs=True) - dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) - totalOut = 0 matchingOuts = 0 for out in dec_tx['vout']: - totalOut += out['value'] if out['scriptPubKey']['address'] in outputs: matchingOuts+=1 @@ -400,10 +389,8 @@ class RawTransactionsTest(BitcoinTestFramework): rawtxfund = self.nodes[2].fundrawtransaction(rawtx, add_inputs=True) dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex']) - totalOut = 0 matchingOuts = 0 for out in dec_tx['vout']: - totalOut += out['value'] if out['scriptPubKey']['address'] in outputs: matchingOuts+=1 @@ -581,11 +568,22 @@ class RawTransactionsTest(BitcoinTestFramework): def test_locked_wallet(self): self.log.info("Test fundrawtxn with locked wallet and hardened derivation") - self.nodes[1].encryptwallet("test") + df_wallet = self.nodes[1].get_wallet_rpc(self.default_wallet_name) + self.nodes[1].createwallet(wallet_name="locked_wallet", descriptors=self.options.descriptors) + wallet = self.nodes[1].get_wallet_rpc("locked_wallet") + # This test is not meant to exercise fee estimation. Making sure all txs are sent at a consistent fee rate. + wallet.settxfee(self.min_relay_tx_fee) + + # Add some balance to the wallet (this will be reverted at the end of the test) + df_wallet.sendall(recipients=[wallet.getnewaddress()]) + self.generate(self.nodes[1], 1) + + # Encrypt wallet and import descriptors + wallet.encryptwallet("test") if self.options.descriptors: - self.nodes[1].walletpassphrase('test', 10) - self.nodes[1].importdescriptors([{ + wallet.walletpassphrase('test', 10) + wallet.importdescriptors([{ 'desc': descsum_create('wpkh(tprv8ZgxMBicQKsPdYeeZbPSKd2KYLmeVKtcFA7kqCxDvDR13MQ6us8HopUR2wLcS2ZKPhLyKsqpDL2FtL73LMHcgoCL7DXsciA8eX8nbjCR2eG/0h/*h)'), 'timestamp': 'now', 'active': True @@ -596,49 +594,60 @@ class RawTransactionsTest(BitcoinTestFramework): 'active': True, 'internal': True }]) - self.nodes[1].walletlock() + wallet.walletlock() # Drain the keypool. - self.nodes[1].getnewaddress() - self.nodes[1].getrawchangeaddress() + wallet.getnewaddress() + wallet.getrawchangeaddress() + + # Choose input + inputs = wallet.listunspent() + + # Deduce exact fee to produce a changeless transaction + tx_size = 110 # Total tx size: 110 vbytes, p2wpkh -> p2wpkh. Input 68 vbytes + rest of tx is 42 vbytes. + value = inputs[0]["amount"] - get_fee(tx_size, self.min_relay_tx_fee) - # Choose 2 inputs - inputs = self.nodes[1].listunspent()[0:2] - value = sum(inp["amount"] for inp in inputs) - Decimal("0.00000500") # Pay a 500 sat fee outputs = {self.nodes[0].getnewaddress():value} - rawtx = self.nodes[1].createrawtransaction(inputs, outputs) + rawtx = wallet.createrawtransaction(inputs, outputs) # fund a transaction that does not require a new key for the change output - self.nodes[1].fundrawtransaction(rawtx) + funded_tx = wallet.fundrawtransaction(rawtx) + assert_equal(funded_tx["changepos"], -1) # fund a transaction that requires a new key for the change output # creating the key must be impossible because the wallet is locked outputs = {self.nodes[0].getnewaddress():value - Decimal("0.1")} - rawtx = self.nodes[1].createrawtransaction(inputs, outputs) - assert_raises_rpc_error(-4, "Transaction needs a change address, but we can't generate it.", self.nodes[1].fundrawtransaction, rawtx) + rawtx = wallet.createrawtransaction(inputs, outputs) + assert_raises_rpc_error(-4, "Transaction needs a change address, but we can't generate it.", wallet.fundrawtransaction, rawtx) # Refill the keypool. - self.nodes[1].walletpassphrase("test", 100) - self.nodes[1].keypoolrefill(8) #need to refill the keypool to get an internal change address - self.nodes[1].walletlock() + wallet.walletpassphrase("test", 100) + wallet.keypoolrefill(8) #need to refill the keypool to get an internal change address + wallet.walletlock() - assert_raises_rpc_error(-13, "walletpassphrase", self.nodes[1].sendtoaddress, self.nodes[0].getnewaddress(), 1.2) + assert_raises_rpc_error(-13, "walletpassphrase", wallet.sendtoaddress, self.nodes[0].getnewaddress(), 1.2) oldBalance = self.nodes[0].getbalance() inputs = [] outputs = {self.nodes[0].getnewaddress():1.1} - rawtx = self.nodes[1].createrawtransaction(inputs, outputs) - fundedTx = self.nodes[1].fundrawtransaction(rawtx) + rawtx = wallet.createrawtransaction(inputs, outputs) + fundedTx = wallet.fundrawtransaction(rawtx) + assert fundedTx["changepos"] != -1 # Now we need to unlock. - self.nodes[1].walletpassphrase("test", 600) - signedTx = self.nodes[1].signrawtransactionwithwallet(fundedTx['hex']) - self.nodes[1].sendrawtransaction(signedTx['hex']) + wallet.walletpassphrase("test", 600) + signedTx = wallet.signrawtransactionwithwallet(fundedTx['hex']) + wallet.sendrawtransaction(signedTx['hex']) self.generate(self.nodes[1], 1) # Make sure funds are received at node1. assert_equal(oldBalance+Decimal('51.10000000'), self.nodes[0].getbalance()) + # Restore pre-test wallet state + wallet.sendall(recipients=[df_wallet.getnewaddress(), df_wallet.getnewaddress(), df_wallet.getnewaddress()]) + wallet.unloadwallet() + self.generate(self.nodes[1], 1) + def test_many_inputs_fee(self): """Multiple (~19) inputs tx test | Compare fee.""" self.log.info("Test fundrawtxn fee with many inputs") @@ -829,7 +838,7 @@ class RawTransactionsTest(BitcoinTestFramework): for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]: assert_raises_rpc_error(-3, "Invalid amount", node.fundrawtransaction, rawtx, add_inputs=True, **{param: invalid_value}) # Test fee_rate values that cannot be represented in sat/vB. - for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]: + for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]: assert_raises_rpc_error(-3, "Invalid amount", node.fundrawtransaction, rawtx, fee_rate=invalid_value, add_inputs=True) diff --git a/test/functional/wallet_hd.py b/test/functional/wallet_hd.py index 8f84d8ed60..62f8301c16 100755 --- a/test/functional/wallet_hd.py +++ b/test/functional/wallet_hd.py @@ -87,11 +87,11 @@ class WalletHDTest(BitcoinTestFramework): self.stop_node(1) # we need to delete the complete chain directory # otherwise node1 would auto-recover all funds in flag the keypool keys as used - shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "blocks")) - shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "chainstate")) + shutil.rmtree(os.path.join(self.nodes[1].blocks_path)) + shutil.rmtree(os.path.join(self.nodes[1].chain_path, "chainstate")) shutil.copyfile( os.path.join(self.nodes[1].datadir, "hd.bak"), - os.path.join(self.nodes[1].datadir, self.chain, 'wallets', self.default_wallet_name, self.wallet_data_filename), + os.path.join(self.nodes[1].wallets_path, self.default_wallet_name, self.wallet_data_filename), ) self.start_node(1) @@ -115,11 +115,11 @@ class WalletHDTest(BitcoinTestFramework): # Try a RPC based rescan self.stop_node(1) - shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "blocks")) - shutil.rmtree(os.path.join(self.nodes[1].datadir, self.chain, "chainstate")) + shutil.rmtree(os.path.join(self.nodes[1].blocks_path)) + shutil.rmtree(os.path.join(self.nodes[1].chain_path, "chainstate")) shutil.copyfile( os.path.join(self.nodes[1].datadir, "hd.bak"), - os.path.join(self.nodes[1].datadir, self.chain, "wallets", self.default_wallet_name, self.wallet_data_filename), + os.path.join(self.nodes[1].wallets_path, self.default_wallet_name, self.wallet_data_filename), ) self.start_node(1, extra_args=self.extra_args[1]) self.connect_nodes(0, 1) diff --git a/test/functional/wallet_inactive_hdchains.py b/test/functional/wallet_inactive_hdchains.py index c0b3fea1c0..c6d22ab90b 100755 --- a/test/functional/wallet_inactive_hdchains.py +++ b/test/functional/wallet_inactive_hdchains.py @@ -5,7 +5,6 @@ """ Test Inactive HD Chains. """ -import os import shutil import time @@ -130,8 +129,8 @@ class InactiveHDChainsTest(BitcoinTestFramework): # Copy test wallet to node 0 test_wallet.unloadwallet() - test_wallet_dir = os.path.join(self.nodes[1].datadir, "regtest/wallets/keymeta_test") - new_test_wallet_dir = os.path.join(self.nodes[0].datadir, "regtest/wallets/keymeta_test") + test_wallet_dir = self.nodes[1].wallets_path / "keymeta_test" + new_test_wallet_dir = self.nodes[0].wallets_path / "keymeta_test" shutil.copytree(test_wallet_dir, new_test_wallet_dir) self.nodes[0].loadwallet("keymeta_test") test_wallet = self.nodes[0].get_wallet_rpc("keymeta_test") diff --git a/test/functional/wallet_keypool_topup.py b/test/functional/wallet_keypool_topup.py index f1458bb374..0f1c33a0c2 100755 --- a/test/functional/wallet_keypool_topup.py +++ b/test/functional/wallet_keypool_topup.py @@ -33,7 +33,7 @@ class KeypoolRestoreTest(BitcoinTestFramework): self.skip_if_no_wallet() def run_test(self): - wallet_path = os.path.join(self.nodes[1].datadir, self.chain, "wallets", self.default_wallet_name, self.wallet_data_filename) + wallet_path = os.path.join(self.nodes[1].wallets_path, self.default_wallet_name, self.wallet_data_filename) wallet_backup_path = os.path.join(self.nodes[1].datadir, "wallet.bak") self.generate(self.nodes[0], COINBASE_MATURITY + 1) diff --git a/test/functional/wallet_listtransactions.py b/test/functional/wallet_listtransactions.py index a44c129c87..18bb8a0cd8 100755 --- a/test/functional/wallet_listtransactions.py +++ b/test/functional/wallet_listtransactions.py @@ -234,8 +234,8 @@ class ListTransactionsTest(BitcoinTestFramework): # refill keypool otherwise the second node wouldn't recognize addresses generated on the first nodes self.nodes[0].keypoolrefill(1000) self.stop_nodes() - wallet0 = os.path.join(self.nodes[0].datadir, self.chain, self.default_wallet_name, "wallet.dat") - wallet2 = os.path.join(self.nodes[2].datadir, self.chain, self.default_wallet_name, "wallet.dat") + wallet0 = os.path.join(self.nodes[0].chain_path, self.default_wallet_name, "wallet.dat") + wallet2 = os.path.join(self.nodes[2].chain_path, self.default_wallet_name, "wallet.dat") shutil.copyfile(wallet0, wallet2) self.start_nodes() # reconnect nodes diff --git a/test/functional/wallet_migration.py b/test/functional/wallet_migration.py index 320f5dd9df..c565c879fb 100755 --- a/test/functional/wallet_migration.py +++ b/test/functional/wallet_migration.py @@ -4,11 +4,12 @@ # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test Migrating a wallet from legacy to descriptor.""" -import os import random import shutil from test_framework.descriptors import descsum_create from test_framework.test_framework import BitcoinTestFramework +from test_framework.messages import COIN, CTransaction, CTxOut +from test_framework.script_util import key_to_p2pkh_script, script_to_p2sh_script, script_to_p2wsh_script from test_framework.util import ( assert_equal, assert_raises_rpc_error, @@ -35,7 +36,7 @@ class WalletMigrationTest(BitcoinTestFramework): self.skip_if_no_bdb() def assert_is_sqlite(self, wallet_name): - wallet_file_path = os.path.join(self.nodes[0].datadir, "regtest/wallets", wallet_name, self.wallet_data_filename) + wallet_file_path = self.nodes[0].wallets_path / wallet_name / self.wallet_data_filename with open(wallet_file_path, 'rb') as f: file_magic = f.read(16) assert_equal(file_magic, b'SQLite format 3\x00') @@ -68,6 +69,15 @@ class WalletMigrationTest(BitcoinTestFramework): del d["parent_descs"] assert_equal(received_list_txs, expected_list_txs) + def check_address(self, wallet, addr, is_mine, is_change, label): + addr_info = wallet.getaddressinfo(addr) + assert_equal(addr_info['ismine'], is_mine) + assert_equal(addr_info['ischange'], is_change) + if label is not None: + assert_equal(addr_info['labels'], [label]), + else: + assert_equal(addr_info['labels'], []), + def test_basic(self): default = self.nodes[0].get_wallet_rpc(self.default_wallet_name) @@ -124,13 +134,22 @@ class WalletMigrationTest(BitcoinTestFramework): self.generate(self.nodes[0], 1) bal = basic1.getbalance() txs = basic1.listtransactions() + addr_gps = basic1.listaddressgroupings() - basic1.migratewallet() + basic1_migrate = basic1.migratewallet() assert_equal(basic1.getwalletinfo()["descriptors"], True) self.assert_is_sqlite("basic1") assert_equal(basic1.getbalance(), bal) self.assert_list_txs_equal(basic1.listtransactions(), txs) + self.log.info("Test backup file can be successfully restored") + self.nodes[0].restorewallet("basic1_restored", basic1_migrate['backup_path']) + basic1_restored = self.nodes[0].get_wallet_rpc("basic1_restored") + basic1_restored_wi = basic1_restored.getwalletinfo() + assert_equal(basic1_restored_wi['balance'], bal) + assert_equal(basic1_restored.listaddressgroupings(), addr_gps) + self.assert_list_txs_equal(basic1_restored.listtransactions(), txs) + # restart node and verify that everything is still there self.restart_node(0) default = self.nodes[0].get_wallet_rpc(self.default_wallet_name) @@ -458,11 +477,11 @@ class WalletMigrationTest(BitcoinTestFramework): wallet.unloadwallet() - wallet_file_path = os.path.join(self.nodes[0].datadir, "regtest", "wallets", "notloaded2") + wallet_file_path = self.nodes[0].wallets_path / "notloaded2" self.nodes[0].migratewallet(wallet_file_path) # Because we gave the name by full path, the loaded wallet's name is that path too. - wallet = self.nodes[0].get_wallet_rpc(wallet_file_path) + wallet = self.nodes[0].get_wallet_rpc(str(wallet_file_path)) info = wallet.getwalletinfo() assert_equal(info["descriptors"], True) @@ -485,12 +504,12 @@ class WalletMigrationTest(BitcoinTestFramework): wallet = self.create_legacy_wallet("plainfile") wallet.unloadwallet() - wallets_dir = os.path.join(self.nodes[0].datadir, "regtest", "wallets") - wallet_path = os.path.join(wallets_dir, "plainfile") - wallet_dat_path = os.path.join(wallet_path, "wallet.dat") - shutil.copyfile(wallet_dat_path, os.path.join(wallets_dir, "plainfile.bak")) + wallets_dir = self.nodes[0].wallets_path + wallet_path = wallets_dir / "plainfile" + wallet_dat_path = wallet_path / "wallet.dat" + shutil.copyfile(wallet_dat_path, wallets_dir / "plainfile.bak") shutil.rmtree(wallet_path) - shutil.move(os.path.join(wallets_dir, "plainfile.bak"), wallet_path) + shutil.move(wallets_dir / "plainfile.bak", wallet_path) self.nodes[0].loadwallet("plainfile") info = wallet.getwalletinfo() @@ -502,8 +521,181 @@ class WalletMigrationTest(BitcoinTestFramework): assert_equal(info["descriptors"], True) assert_equal(info["format"], "sqlite") - assert os.path.isdir(wallet_path) - assert os.path.isfile(wallet_dat_path) + assert wallet_path.is_dir() + assert wallet_dat_path.is_file() + + def test_addressbook(self): + df_wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name) + + self.log.info("Test migration of address book data") + wallet = self.create_legacy_wallet("legacy_addrbook") + df_wallet.sendtoaddress(wallet.getnewaddress(), 3) + + # Import watch-only script to create a watch-only wallet after migration + watch_addr = df_wallet.getnewaddress() + wallet.importaddress(watch_addr) + df_wallet.sendtoaddress(watch_addr, 2) + + # Import solvable script + multi_addr1 = wallet.getnewaddress() + multi_addr2 = wallet.getnewaddress() + multi_addr3 = df_wallet.getnewaddress() + wallet.importpubkey(df_wallet.getaddressinfo(multi_addr3)["pubkey"]) + ms_addr_info = wallet.addmultisigaddress(2, [multi_addr1, multi_addr2, multi_addr3]) + + self.generate(self.nodes[0], 1) + + # Test vectors + addr_external = { + "addr": df_wallet.getnewaddress(), + "is_mine": False, + "is_change": False, + "label": "" + } + addr_external_with_label = { + "addr": df_wallet.getnewaddress(), + "is_mine": False, + "is_change": False, + "label": "external" + } + addr_internal = { + "addr": wallet.getnewaddress(), + "is_mine": True, + "is_change": False, + "label": "" + } + addr_internal_with_label = { + "addr": wallet.getnewaddress(), + "is_mine": True, + "is_change": False, + "label": "internal" + } + change_address = { + "addr": wallet.getrawchangeaddress(), + "is_mine": True, + "is_change": True, + "label": None + } + watch_only_addr = { + "addr": watch_addr, + "is_mine": False, + "is_change": False, + "label": "imported" + } + ms_addr = { + "addr": ms_addr_info['address'], + "is_mine": False, + "is_change": False, + "label": "multisig" + } + + # To store the change address in the addressbook need to send coins to it + wallet.send(outputs=[{wallet.getnewaddress(): 2}], options={"change_address": change_address['addr']}) + self.generate(self.nodes[0], 1) + + # Util wrapper func for 'addr_info' + def check(info, node): + self.check_address(node, info['addr'], info['is_mine'], info['is_change'], info["label"]) + + # Pre-migration: set label and perform initial checks + for addr_info in [addr_external, addr_external_with_label, addr_internal, addr_internal_with_label, change_address, watch_only_addr, ms_addr]: + if not addr_info['is_change']: + wallet.setlabel(addr_info['addr'], addr_info["label"]) + check(addr_info, wallet) + + # Migrate wallet + info_migration = wallet.migratewallet() + wallet_wo = self.nodes[0].get_wallet_rpc(info_migration["watchonly_name"]) + wallet_solvables = self.nodes[0].get_wallet_rpc(info_migration["solvables_name"]) + + ######################### + # Post migration checks # + ######################### + + # First check the main wallet + for addr_info in [addr_external, addr_external_with_label, addr_internal, addr_internal_with_label, change_address, ms_addr]: + check(addr_info, wallet) + + # Watch-only wallet will contain the watch-only entry (with 'is_mine=True') and all external addresses ('send') + self.check_address(wallet_wo, watch_only_addr['addr'], is_mine=True, is_change=watch_only_addr['is_change'], label=watch_only_addr["label"]) + for addr_info in [addr_external, addr_external_with_label, ms_addr]: + check(addr_info, wallet_wo) + + # Solvables wallet will contain the multisig entry (with 'is_mine=True') and all external addresses ('send') + self.check_address(wallet_solvables, ms_addr['addr'], is_mine=True, is_change=ms_addr['is_change'], label=ms_addr["label"]) + for addr_info in [addr_external, addr_external_with_label]: + check(addr_info, wallet_solvables) + + ######################################################################################## + # Now restart migrated wallets and verify that the addressbook entries are still there # + ######################################################################################## + + # First the main wallet + self.nodes[0].unloadwallet("legacy_addrbook") + self.nodes[0].loadwallet("legacy_addrbook") + for addr_info in [addr_external, addr_external_with_label, addr_internal, addr_internal_with_label, change_address, ms_addr]: + check(addr_info, wallet) + + # Watch-only wallet + self.nodes[0].unloadwallet(info_migration["watchonly_name"]) + self.nodes[0].loadwallet(info_migration["watchonly_name"]) + self.check_address(wallet_wo, watch_only_addr['addr'], is_mine=True, is_change=watch_only_addr['is_change'], label=watch_only_addr["label"]) + for addr_info in [addr_external, addr_external_with_label, ms_addr]: + check(addr_info, wallet_wo) + + # Solvables wallet + self.nodes[0].unloadwallet(info_migration["solvables_name"]) + self.nodes[0].loadwallet(info_migration["solvables_name"]) + self.check_address(wallet_solvables, ms_addr['addr'], is_mine=True, is_change=ms_addr['is_change'], label=ms_addr["label"]) + for addr_info in [addr_external, addr_external_with_label]: + check(addr_info, wallet_solvables) + + def test_migrate_raw_p2sh(self): + self.log.info("Test migration of watch-only raw p2sh script") + df_wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name) + wallet = self.create_legacy_wallet("raw_p2sh") + + def send_to_script(script, amount): + tx = CTransaction() + tx.vout.append(CTxOut(nValue=amount*COIN, scriptPubKey=script)) + + hex_tx = df_wallet.fundrawtransaction(tx.serialize().hex())['hex'] + signed_tx = df_wallet.signrawtransactionwithwallet(hex_tx) + df_wallet.sendrawtransaction(signed_tx['hex']) + self.generate(self.nodes[0], 1) + + # Craft sh(pkh(key)) script and send coins to it + pubkey = df_wallet.getaddressinfo(df_wallet.getnewaddress())["pubkey"] + script_pkh = key_to_p2pkh_script(pubkey) + script_sh_pkh = script_to_p2sh_script(script_pkh) + send_to_script(script=script_sh_pkh, amount=2) + + # Import script and check balance + wallet.rpc.importaddress(address=script_pkh.hex(), label="raw_spk", rescan=True, p2sh=True) + assert_equal(wallet.getbalances()['watchonly']['trusted'], 2) + + # Craft wsh(pkh(key)) and send coins to it + pubkey = df_wallet.getaddressinfo(df_wallet.getnewaddress())["pubkey"] + script_wsh_pkh = script_to_p2wsh_script(key_to_p2pkh_script(pubkey)) + send_to_script(script=script_wsh_pkh, amount=3) + + # Import script and check balance + wallet.rpc.importaddress(address=script_wsh_pkh.hex(), label="raw_spk2", rescan=True, p2sh=False) + assert_equal(wallet.getbalances()['watchonly']['trusted'], 5) + + # Migrate wallet and re-check balance + info_migration = wallet.migratewallet() + wallet_wo = self.nodes[0].get_wallet_rpc(info_migration["watchonly_name"]) + + # Watch-only balance is under "mine". + assert_equal(wallet_wo.getbalances()['mine']['trusted'], 5) + # The watch-only scripts are no longer part of the main wallet + assert_equal(wallet.getbalances()['mine']['trusted'], 0) + + # Just in case, also verify wallet restart + self.nodes[0].unloadwallet(info_migration["watchonly_name"]) + self.nodes[0].loadwallet(info_migration["watchonly_name"]) + assert_equal(wallet_wo.getbalances()['mine']['trusted'], 5) def run_test(self): self.generate(self.nodes[0], 101) @@ -519,6 +711,8 @@ class WalletMigrationTest(BitcoinTestFramework): self.test_unloaded_by_path() self.test_default_wallet() self.test_direct_file() + self.test_addressbook() + self.test_migrate_raw_p2sh() if __name__ == '__main__': WalletMigrationTest().main() diff --git a/test/functional/wallet_miniscript.py b/test/functional/wallet_miniscript.py index 7bc3424bf4..45f0df1c76 100755 --- a/test/functional/wallet_miniscript.py +++ b/test/functional/wallet_miniscript.py @@ -277,6 +277,18 @@ class WalletMiniscriptTest(BitcoinTestFramework): assert not res["success"] assert "is not sane: witnesses without signature exist" in res["error"]["message"] + # Sanity check we wouldn't let an unspendable Miniscript descriptor in + res = self.ms_wo_wallet.importdescriptors( + [ + { + "desc": descsum_create("wsh(0)"), + "active": False, + "timestamp": "now", + } + ] + )[0] + assert not res["success"] and "is not satisfiable" in res["error"]["message"] + # Test we can track any type of Miniscript for ms in MINISCRIPTS: self.watchonly_test(ms) diff --git a/test/functional/wallet_multiwallet.py b/test/functional/wallet_multiwallet.py index 2faf6cad8b..10bc516d8f 100755 --- a/test/functional/wallet_multiwallet.py +++ b/test/functional/wallet_multiwallet.py @@ -62,7 +62,7 @@ class MultiWalletTest(BitcoinTestFramework): def run_test(self): node = self.nodes[0] - data_dir = lambda *p: os.path.join(node.datadir, self.chain, *p) + data_dir = lambda *p: os.path.join(node.chain_path, *p) wallet_dir = lambda *p: data_dir('wallets', *p) wallet = lambda name: node.get_wallet_rpc(name) @@ -299,7 +299,7 @@ class MultiWalletTest(BitcoinTestFramework): assert_equal(set(self.nodes[0].listwallets()), set(wallet_names)) # Fail to load if wallet doesn't exist - path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "wallets") + path = wallet_dir("wallets") assert_raises_rpc_error(-18, "Wallet file verification failed. Failed to load database path '{}'. Path does not exist.".format(path), self.nodes[0].loadwallet, 'wallets') # Fail to load duplicate wallets @@ -307,7 +307,7 @@ class MultiWalletTest(BitcoinTestFramework): if not self.options.descriptors: # This tests the default wallet that BDB makes, so SQLite wallet doesn't need to test this # Fail to load duplicate wallets by different ways (directory and filepath) - path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "wallet.dat") + path = wallet_dir("wallet.dat") assert_raises_rpc_error(-35, "Wallet file verification failed. Refusing to load database. Data file '{}' is already loaded.".format(path), self.nodes[0].loadwallet, 'wallet.dat') # Only BDB doesn't open duplicate wallet files. SQLite does not have this limitation. While this may be desired in the future, it is not necessary @@ -322,13 +322,13 @@ class MultiWalletTest(BitcoinTestFramework): # Fail to load if a directory is specified that doesn't contain a wallet os.mkdir(wallet_dir('empty_wallet_dir')) - path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "empty_wallet_dir") + path = wallet_dir("empty_wallet_dir") assert_raises_rpc_error(-18, "Wallet file verification failed. Failed to load database path '{}'. Data is not in recognized format.".format(path), self.nodes[0].loadwallet, 'empty_wallet_dir') self.log.info("Test dynamic wallet creation.") # Fail to create a wallet if it already exists. - path = os.path.join(self.options.tmpdir, "node0", "regtest", "wallets", "w2") + path = wallet_dir("w2") assert_raises_rpc_error(-4, "Failed to create database path '{}'. Database already exists.".format(path), self.nodes[0].createwallet, 'w2') # Successfully create a wallet with a new name diff --git a/test/functional/wallet_pruning.py b/test/functional/wallet_pruning.py index 1ceceaee93..06bd992da7 100755 --- a/test/functional/wallet_pruning.py +++ b/test/functional/wallet_pruning.py @@ -106,7 +106,7 @@ class WalletPruningTest(BitcoinTestFramework): def has_block(self, block_index): """Checks if the pruned node has the specific blk0000*.dat file""" - return os.path.isfile(os.path.join(self.nodes[1].datadir, self.chain, "blocks", f"blk{block_index:05}.dat")) + return os.path.isfile(os.path.join(self.nodes[1].blocks_path, f"blk{block_index:05}.dat")) def create_wallet(self, wallet_name, *, unload=False): """Creates and dumps a wallet on the non-pruned node0 to be later import by the pruned node""" diff --git a/test/functional/wallet_reorgsrestore.py b/test/functional/wallet_reorgsrestore.py index 1c79c6816c..af01b9439f 100755 --- a/test/functional/wallet_reorgsrestore.py +++ b/test/functional/wallet_reorgsrestore.py @@ -89,7 +89,7 @@ class ReorgsRestoreTest(BitcoinTestFramework): # Node0 wallet file is loaded on longest sync'ed node1 self.stop_node(1) self.nodes[0].backupwallet(os.path.join(self.nodes[0].datadir, 'wallet.bak')) - shutil.copyfile(os.path.join(self.nodes[0].datadir, 'wallet.bak'), os.path.join(self.nodes[1].datadir, self.chain, self.default_wallet_name, self.wallet_data_filename)) + shutil.copyfile(os.path.join(self.nodes[0].datadir, 'wallet.bak'), os.path.join(self.nodes[1].chain_path, self.default_wallet_name, self.wallet_data_filename)) self.start_node(1) tx_after_reorg = self.nodes[1].gettransaction(txid) # Check that normal confirmed tx is confirmed again but with different blockhash diff --git a/test/functional/wallet_resendwallettransactions.py b/test/functional/wallet_resendwallettransactions.py index 7bdb6f5e3a..f36d8efda7 100755 --- a/test/functional/wallet_resendwallettransactions.py +++ b/test/functional/wallet_resendwallettransactions.py @@ -108,9 +108,13 @@ class ResendWalletTransactionsTest(BitcoinTestFramework): # Set correct m_best_block_time, which is used in ResubmitWalletTransactions node.syncwithvalidationinterfacequeue() - # Evict these txs from the mempool evict_time = block_time + 60 * 60 * DEFAULT_MEMPOOL_EXPIRY_HOURS + 5 - node.setmocktime(evict_time) + # Flush out currently scheduled resubmit attempt now so that there can't be one right between eviction and check. + with node.assert_debug_log(['resubmit 2 unconfirmed transactions']): + node.setmocktime(evict_time) + node.mockscheduler(60) + + # Evict these txs from the mempool indep_send = node.send(outputs=[{node.getnewaddress(): 1}], inputs=[indep_utxo]) node.getmempoolentry(indep_send["txid"]) assert_raises_rpc_error(-5, "Transaction not in mempool", node.getmempoolentry, txid) diff --git a/test/functional/wallet_send.py b/test/functional/wallet_send.py index d7bb6ab1e7..4728f53be7 100755 --- a/test/functional/wallet_send.py +++ b/test/functional/wallet_send.py @@ -387,7 +387,7 @@ class WalletSendTest(BitcoinTestFramework): self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg)) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg)) # Test fee_rate values that cannot be represented in sat/vB. - for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999, "0.0001", "0.00000001", "0.00099999", "31.99999999"]: + for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]: self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg)) self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg)) # Test fee_rate out of range (negative number). diff --git a/test/functional/wallet_signer.py b/test/functional/wallet_signer.py index c414147c65..2735ec1706 100755 --- a/test/functional/wallet_signer.py +++ b/test/functional/wallet_signer.py @@ -25,29 +25,26 @@ class WalletSignerTest(BitcoinTestFramework): def mock_signer_path(self): path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mocks', 'signer.py') if platform.system() == "Windows": - return "py " + path + return "py -3 " + path else: return path def mock_invalid_signer_path(self): path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mocks', 'invalid_signer.py') if platform.system() == "Windows": - return "py " + path + return "py -3 " + path else: return path def mock_multi_signers_path(self): path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'mocks', 'multi_signers.py') if platform.system() == "Windows": - return "py " + path + return "py -3 " + path else: return path def set_test_params(self): self.num_nodes = 2 - # The experimental syscall sandbox feature (-sandbox) is not compatible with -signer (which - # invokes execve). - self.disable_syscall_sandbox = True self.extra_args = [ [], diff --git a/test/functional/wallet_signrawtransactionwithwallet.py b/test/functional/wallet_signrawtransactionwithwallet.py index 3d2f41cb83..d560dfdc11 100755 --- a/test/functional/wallet_signrawtransactionwithwallet.py +++ b/test/functional/wallet_signrawtransactionwithwallet.py @@ -33,6 +33,10 @@ from decimal import ( getcontext, ) + +RAW_TX = '020000000156b958f78e3f24e0b2f4e4db1255426b0902027cb37e3ddadb52e37c3557dddb0000000000ffffffff01c0a6b929010000001600149a2ee8c77140a053f36018ac8124a6ececc1668a00000000' + + class SignRawTransactionWithWalletTest(BitcoinTestFramework): def add_options(self, parser): self.add_wallet_options(parser) @@ -47,10 +51,12 @@ class SignRawTransactionWithWalletTest(BitcoinTestFramework): def test_with_lock_outputs(self): self.log.info("Test correct error reporting when trying to sign a locked output") self.nodes[0].encryptwallet("password") + assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signrawtransactionwithwallet, RAW_TX) + self.nodes[0].walletpassphrase("password", 9999) - rawTx = '020000000156b958f78e3f24e0b2f4e4db1255426b0902027cb37e3ddadb52e37c3557dddb0000000000ffffffff01c0a6b929010000001600149a2ee8c77140a053f36018ac8124a6ececc1668a00000000' - - assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signrawtransactionwithwallet, rawTx) + def test_with_invalid_sighashtype(self): + self.log.info("Test signrawtransactionwithwallet raises if an invalid sighashtype is passed") + assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[0].signrawtransactionwithwallet, hexstring=RAW_TX, sighashtype="all") def script_verification_error_test(self): """Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script. @@ -299,6 +305,7 @@ class SignRawTransactionWithWalletTest(BitcoinTestFramework): self.script_verification_error_test() self.OP_1NEGATE_test() self.test_with_lock_outputs() + self.test_with_invalid_sighashtype() self.test_fully_signed_tx() self.test_signing_with_csv() self.test_signing_with_cltv() diff --git a/test/functional/wallet_upgradewallet.py b/test/functional/wallet_upgradewallet.py index 4495a7d778..a4f2a9b74d 100755 --- a/test/functional/wallet_upgradewallet.py +++ b/test/functional/wallet_upgradewallet.py @@ -138,11 +138,11 @@ class UpgradeWalletTest(BitcoinTestFramework): self.log.info("Test upgradewallet RPC...") # Prepare for copying of the older wallet - node_master_wallet_dir = os.path.join(node_master.datadir, "regtest/wallets", self.default_wallet_name) - node_master_wallet = os.path.join(node_master_wallet_dir, self.default_wallet_name, self.wallet_data_filename) - v16_3_wallet = os.path.join(v16_3_node.datadir, "regtest/wallets/wallet.dat") - v15_2_wallet = os.path.join(v15_2_node.datadir, "regtest/wallet.dat") - split_hd_wallet = os.path.join(v15_2_node.datadir, "regtest/splithd") + node_master_wallet_dir = node_master.wallets_path / self.default_wallet_name + node_master_wallet = node_master_wallet_dir / self.default_wallet_name / self.wallet_data_filename + v16_3_wallet = v16_3_node.wallets_path / "wallet.dat" + v15_2_wallet = v15_2_node.chain_path / "wallet.dat" + split_hd_wallet = v15_2_node.chain_path / "splithd" self.stop_nodes() # Make split hd wallet |