repo_id
stringlengths 0
42
| file_path
stringlengths 15
97
| content
stringlengths 2
2.41M
| __index_level_0__
int64 0
0
|
---|---|---|---|
bitcoin/test | bitcoin/test/functional/wallet_dump.py | #!/usr/bin/env python3
# Copyright (c) 2016-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the dumpwallet RPC."""
import datetime
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
from test_framework.wallet_util import WalletUnlock
def read_dump(file_name, addrs, script_addrs, hd_master_addr_old):
"""
Read the given dump, count the addrs that match, count change and reserve.
Also check that the old hd_master is inactive
"""
with open(file_name, encoding='utf8') as inputfile:
found_comments = []
found_legacy_addr = 0
found_p2sh_segwit_addr = 0
found_bech32_addr = 0
found_script_addr = 0
found_addr_chg = 0
found_addr_rsv = 0
hd_master_addr_ret = None
for line in inputfile:
line = line.strip()
if not line:
continue
if line[0] == '#':
found_comments.append(line)
else:
# split out some data
key_date_label, comment = line.split("#")
key_date_label = key_date_label.split(" ")
# key = key_date_label[0]
date = key_date_label[1]
keytype = key_date_label[2]
imported_key = date == '1970-01-01T00:00:01Z'
if imported_key:
# Imported keys have multiple addresses, no label (keypath) and timestamp
# Skip them
continue
addr_keypath = comment.split(" addr=")[1]
addr = addr_keypath.split(" ")[0]
keypath = None
if keytype == "inactivehdseed=1":
# ensure the old master is still available
assert hd_master_addr_old == addr
elif keytype == "hdseed=1":
# ensure we have generated a new hd master key
assert hd_master_addr_old != addr
hd_master_addr_ret = addr
elif keytype == "script=1":
# scripts don't have keypaths
keypath = None
else:
keypath = addr_keypath.rstrip().split("hdkeypath=")[1]
# count key types
for addrObj in addrs:
if addrObj['address'] == addr.split(",")[0] and addrObj['hdkeypath'] == keypath and keytype == "label=":
if addr.startswith('m') or addr.startswith('n'):
# P2PKH address
found_legacy_addr += 1
elif addr.startswith('2'):
# P2SH-segwit address
found_p2sh_segwit_addr += 1
elif addr.startswith('bcrt1'):
found_bech32_addr += 1
break
elif keytype == "change=1":
found_addr_chg += 1
break
elif keytype == "reserve=1":
found_addr_rsv += 1
break
# count scripts
for script_addr in script_addrs:
if script_addr == addr.rstrip() and keytype == "script=1":
found_script_addr += 1
break
return found_comments, found_legacy_addr, found_p2sh_segwit_addr, found_bech32_addr, found_script_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret
class WalletDumpTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, descriptors=False)
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [["-keypool=90", "-addresstype=legacy"]]
self.rpc_timeout = 120
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
self.start_nodes()
def run_test(self):
self.nodes[0].createwallet("dump")
wallet_unenc_dump = self.nodes[0].datadir_path / "wallet.unencrypted.dump"
wallet_enc_dump = self.nodes[0].datadir_path / "wallet.encrypted.dump"
# generate 30 addresses to compare against the dump
# - 10 legacy P2PKH
# - 10 P2SH-segwit
# - 10 bech32
test_addr_count = 10
addrs = []
for address_type in ['legacy', 'p2sh-segwit', 'bech32']:
for _ in range(test_addr_count):
addr = self.nodes[0].getnewaddress(address_type=address_type)
vaddr = self.nodes[0].getaddressinfo(addr) # required to get hd keypath
addrs.append(vaddr)
# Test scripts dump by adding a 1-of-1 multisig address
multisig_addr = self.nodes[0].addmultisigaddress(1, [addrs[1]["address"]])["address"]
# Refill the keypool. getnewaddress() refills the keypool *before* taking a key from
# the keypool, so the final call to getnewaddress leaves the keypool with one key below
# its capacity
self.nodes[0].keypoolrefill()
self.log.info('Mine a block one second before the wallet is dumped')
dump_time = int(time.time())
self.nodes[0].setmocktime(dump_time - 1)
self.generate(self.nodes[0], 1)
self.nodes[0].setmocktime(dump_time)
dump_time_str = '# * Created on {}Z'.format(
datetime.datetime.fromtimestamp(
dump_time,
tz=datetime.timezone.utc,
).replace(tzinfo=None).isoformat())
dump_best_block_1 = '# * Best block at time of backup was {} ({}),'.format(
self.nodes[0].getblockcount(),
self.nodes[0].getbestblockhash(),
)
dump_best_block_2 = '# mined on {}Z'.format(
datetime.datetime.fromtimestamp(
dump_time - 1,
tz=datetime.timezone.utc,
).replace(tzinfo=None).isoformat())
self.log.info('Dump unencrypted wallet')
result = self.nodes[0].dumpwallet(wallet_unenc_dump)
assert_equal(result['filename'], str(wallet_unenc_dump))
found_comments, found_legacy_addr, found_p2sh_segwit_addr, found_bech32_addr, found_script_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
read_dump(wallet_unenc_dump, addrs, [multisig_addr], None)
assert '# End of dump' in found_comments # Check that file is not corrupt
assert_equal(dump_time_str, next(c for c in found_comments if c.startswith('# * Created on')))
assert_equal(dump_best_block_1, next(c for c in found_comments if c.startswith('# * Best block')))
assert_equal(dump_best_block_2, next(c for c in found_comments if c.startswith('# mined on')))
assert_equal(found_legacy_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_p2sh_segwit_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_bech32_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_script_addr, 1) # all scripts must be in the dump
assert_equal(found_addr_chg, 0) # 0 blocks where mined
assert_equal(found_addr_rsv, 90 * 2) # 90 keys plus 100% internal keys
# encrypt wallet, restart, unlock and dump
self.nodes[0].encryptwallet('test')
with WalletUnlock(self.nodes[0], "test"):
# Should be a no-op:
self.nodes[0].keypoolrefill()
self.nodes[0].dumpwallet(wallet_enc_dump)
found_comments, found_legacy_addr, found_p2sh_segwit_addr, found_bech32_addr, found_script_addr, found_addr_chg, found_addr_rsv, _ = \
read_dump(wallet_enc_dump, addrs, [multisig_addr], hd_master_addr_unenc)
assert '# End of dump' in found_comments # Check that file is not corrupt
assert_equal(dump_time_str, next(c for c in found_comments if c.startswith('# * Created on')))
assert_equal(dump_best_block_1, next(c for c in found_comments if c.startswith('# * Best block')))
assert_equal(dump_best_block_2, next(c for c in found_comments if c.startswith('# mined on')))
assert_equal(found_legacy_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_p2sh_segwit_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_bech32_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_script_addr, 1)
assert_equal(found_addr_chg, 90 * 2) # old reserve keys are marked as change now
assert_equal(found_addr_rsv, 90 * 2)
# Overwriting should fail
assert_raises_rpc_error(-8, "already exists", lambda: self.nodes[0].dumpwallet(wallet_enc_dump))
# Restart node with new wallet, and test importwallet
self.restart_node(0)
self.nodes[0].createwallet("w2")
# Make sure the address is not IsMine before import
result = self.nodes[0].getaddressinfo(multisig_addr)
assert not result['ismine']
self.nodes[0].importwallet(wallet_unenc_dump)
# Now check IsMine is true
result = self.nodes[0].getaddressinfo(multisig_addr)
assert result['ismine']
self.log.info('Check that wallet is flushed')
with self.nodes[0].assert_debug_log(['Flushing wallet.dat'], timeout=20):
self.nodes[0].getnewaddress()
# Make sure that dumpwallet doesn't have a lock order issue when there is an unconfirmed tx and it is reloaded
# See https://github.com/bitcoin/bitcoin/issues/22489
self.nodes[0].createwallet("w3")
w3 = self.nodes[0].get_wallet_rpc("w3")
w3.importprivkey(privkey=self.nodes[0].get_deterministic_priv_key().key, label="coinbase_import")
w3.sendtoaddress(w3.getnewaddress(), 10)
w3.unloadwallet()
self.nodes[0].loadwallet("w3")
w3.dumpwallet(self.nodes[0].datadir_path / "w3.dump")
if __name__ == '__main__':
WalletDumpTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_includeconf.py | #!/usr/bin/env python3
# Copyright (c) 2018-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests the includeconf argument
Verify that:
1. adding includeconf to the configuration file causes the includeconf
file to be loaded in the correct order.
2. includeconf cannot be used as a command line argument.
3. includeconf cannot be used recursively (ie includeconf can only
be used from the base config file).
4. multiple includeconf arguments can be specified in the main config
file.
"""
from test_framework.test_framework import BitcoinTestFramework
class IncludeConfTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
# Create additional config files
# - tmpdir/node0/relative.conf
with open(self.nodes[0].datadir_path / "relative.conf", "w", encoding="utf8") as f:
f.write("uacomment=relative\n")
# - tmpdir/node0/relative2.conf
with open(self.nodes[0].datadir_path / "relative2.conf", "w", encoding="utf8") as f:
f.write("uacomment=relative2\n")
with open(self.nodes[0].datadir_path / "bitcoin.conf", "a", encoding="utf8") as f:
f.write("uacomment=main\nincludeconf=relative.conf\n")
self.restart_node(0)
self.log.info("-includeconf works from config file. subversion should end with 'main; relative)/'")
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative)/")
self.log.info("-includeconf cannot be used as command-line arg")
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(
extra_args=['-noincludeconf=0'],
expected_msg='Error: Error parsing command line arguments: -includeconf cannot be used from commandline; -includeconf=true',
)
self.nodes[0].assert_start_raises_init_error(
extra_args=['-includeconf=relative2.conf', '-includeconf=no_warn.conf'],
expected_msg='Error: Error parsing command line arguments: -includeconf cannot be used from commandline; -includeconf="relative2.conf"',
)
self.log.info("-includeconf cannot be used recursively. subversion should end with 'main; relative)/'")
with open(self.nodes[0].datadir_path / "relative.conf", "a", encoding="utf8") as f:
f.write("includeconf=relative2.conf\n")
self.start_node(0)
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative)/")
self.stop_node(0, expected_stderr="warning: -includeconf cannot be used from included files; ignoring -includeconf=relative2.conf")
self.log.info("-includeconf cannot contain invalid arg")
# Commented out as long as we ignore invalid arguments in configuration files
#with open(self.nodes[0].datadir_path / "relative.conf", "w", encoding="utf8") as f:
# f.write("foo=bar\n")
#self.nodes[0].assert_start_raises_init_error(expected_msg="Error: Error reading configuration file: Invalid configuration value foo")
self.log.info("-includeconf cannot be invalid path")
(self.nodes[0].datadir_path / "relative.conf").unlink()
self.nodes[0].assert_start_raises_init_error(expected_msg="Error: Error reading configuration file: Failed to include configuration file relative.conf")
self.log.info("multiple -includeconf args can be used from the base config file. subversion should end with 'main; relative; relative2)/'")
with open(self.nodes[0].datadir_path / "relative.conf", "w", encoding="utf8") as f:
# Restore initial file contents
f.write("uacomment=relative\n")
with open(self.nodes[0].datadir_path / "bitcoin.conf", "a", encoding="utf8") as f:
f.write("includeconf=relative2.conf\n")
self.start_node(0)
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative; relative2)/")
if __name__ == '__main__':
IncludeConfTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_transactiontime_rescan.py | #!/usr/bin/env python3
# Copyright (c) 2018-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test transaction time during old block rescanning
"""
import concurrent.futures
import time
from test_framework.authproxy import JSONRPCException
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
set_node_times,
)
from test_framework.wallet_util import (
get_generate_key,
)
class TransactionTimeRescanTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = False
self.num_nodes = 3
self.extra_args = [["-keypool=400"],
["-keypool=400"],
[]
]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.log.info('Prepare nodes and wallet')
minernode = self.nodes[0] # node used to mine BTC and create transactions
usernode = self.nodes[1] # user node with correct time
restorenode = self.nodes[2] # node used to restore user wallet and check time determination in ComputeSmartTime (wallet.cpp)
# time constant
cur_time = int(time.time())
ten_days = 10 * 24 * 60 * 60
# synchronize nodes and time
self.sync_all()
set_node_times(self.nodes, cur_time)
# prepare miner wallet
minernode.createwallet(wallet_name='default')
miner_wallet = minernode.get_wallet_rpc('default')
m1 = miner_wallet.getnewaddress()
# prepare the user wallet with 3 watch only addresses
wo1 = usernode.getnewaddress()
wo2 = usernode.getnewaddress()
wo3 = usernode.getnewaddress()
usernode.createwallet(wallet_name='wo', disable_private_keys=True)
wo_wallet = usernode.get_wallet_rpc('wo')
wo_wallet.importaddress(wo1)
wo_wallet.importaddress(wo2)
wo_wallet.importaddress(wo3)
self.log.info('Start transactions')
# check blockcount
assert_equal(minernode.getblockcount(), 200)
# generate some btc to create transactions and check blockcount
initial_mine = COINBASE_MATURITY + 1
self.generatetoaddress(minernode, initial_mine, m1)
assert_equal(minernode.getblockcount(), initial_mine + 200)
# synchronize nodes and time
self.sync_all()
set_node_times(self.nodes, cur_time + ten_days)
# send 10 btc to user's first watch-only address
self.log.info('Send 10 btc to user')
miner_wallet.sendtoaddress(wo1, 10)
# generate blocks and check blockcount
self.generatetoaddress(minernode, COINBASE_MATURITY, m1)
assert_equal(minernode.getblockcount(), initial_mine + 300)
# synchronize nodes and time
self.sync_all()
set_node_times(self.nodes, cur_time + ten_days + ten_days)
# send 5 btc to our second watch-only address
self.log.info('Send 5 btc to user')
miner_wallet.sendtoaddress(wo2, 5)
# generate blocks and check blockcount
self.generatetoaddress(minernode, COINBASE_MATURITY, m1)
assert_equal(minernode.getblockcount(), initial_mine + 400)
# synchronize nodes and time
self.sync_all()
set_node_times(self.nodes, cur_time + ten_days + ten_days + ten_days)
# send 1 btc to our third watch-only address
self.log.info('Send 1 btc to user')
miner_wallet.sendtoaddress(wo3, 1)
# generate more blocks and check blockcount
self.generatetoaddress(minernode, COINBASE_MATURITY, m1)
assert_equal(minernode.getblockcount(), initial_mine + 500)
self.log.info('Check user\'s final balance and transaction count')
assert_equal(wo_wallet.getbalance(), 16)
assert_equal(len(wo_wallet.listtransactions()), 3)
self.log.info('Check transaction times')
for tx in wo_wallet.listtransactions():
if tx['address'] == wo1:
assert_equal(tx['blocktime'], cur_time + ten_days)
assert_equal(tx['time'], cur_time + ten_days)
elif tx['address'] == wo2:
assert_equal(tx['blocktime'], cur_time + ten_days + ten_days)
assert_equal(tx['time'], cur_time + ten_days + ten_days)
elif tx['address'] == wo3:
assert_equal(tx['blocktime'], cur_time + ten_days + ten_days + ten_days)
assert_equal(tx['time'], cur_time + ten_days + ten_days + ten_days)
# restore user wallet without rescan
self.log.info('Restore user wallet on another node without rescan')
restorenode.createwallet(wallet_name='wo', disable_private_keys=True)
restorewo_wallet = restorenode.get_wallet_rpc('wo')
# for descriptor wallets, the test framework maps the importaddress RPC to the
# importdescriptors RPC (with argument 'timestamp'='now'), which always rescans
# blocks of the past 2 hours, based on the current MTP timestamp; in order to avoid
# importing the last address (wo3), we advance the time further and generate 10 blocks
if self.options.descriptors:
set_node_times(self.nodes, cur_time + ten_days + ten_days + ten_days + ten_days)
self.generatetoaddress(minernode, 10, m1)
restorewo_wallet.importaddress(wo1, rescan=False)
restorewo_wallet.importaddress(wo2, rescan=False)
restorewo_wallet.importaddress(wo3, rescan=False)
# check user has 0 balance and no transactions
assert_equal(restorewo_wallet.getbalance(), 0)
assert_equal(len(restorewo_wallet.listtransactions()), 0)
# proceed to rescan, first with an incomplete one, then with a full rescan
self.log.info('Rescan last history part')
restorewo_wallet.rescanblockchain(initial_mine + 350)
self.log.info('Rescan all history')
restorewo_wallet.rescanblockchain()
self.log.info('Check user\'s final balance and transaction count after restoration')
assert_equal(restorewo_wallet.getbalance(), 16)
assert_equal(len(restorewo_wallet.listtransactions()), 3)
self.log.info('Check transaction times after restoration')
for tx in restorewo_wallet.listtransactions():
if tx['address'] == wo1:
assert_equal(tx['blocktime'], cur_time + ten_days)
assert_equal(tx['time'], cur_time + ten_days)
elif tx['address'] == wo2:
assert_equal(tx['blocktime'], cur_time + ten_days + ten_days)
assert_equal(tx['time'], cur_time + ten_days + ten_days)
elif tx['address'] == wo3:
assert_equal(tx['blocktime'], cur_time + ten_days + ten_days + ten_days)
assert_equal(tx['time'], cur_time + ten_days + ten_days + ten_days)
self.log.info('Test handling of invalid parameters for rescanblockchain')
assert_raises_rpc_error(-8, "Invalid start_height", restorewo_wallet.rescanblockchain, -1, 10)
assert_raises_rpc_error(-8, "Invalid stop_height", restorewo_wallet.rescanblockchain, 1, -1)
assert_raises_rpc_error(-8, "stop_height must be greater than start_height", restorewo_wallet.rescanblockchain, 20, 10)
self.log.info("Test `rescanblockchain` fails when wallet is encrypted and locked")
usernode.createwallet(wallet_name="enc_wallet", passphrase="passphrase")
enc_wallet = usernode.get_wallet_rpc("enc_wallet")
assert_raises_rpc_error(-13, "Error: Please enter the wallet passphrase with walletpassphrase first.", enc_wallet.rescanblockchain)
if not self.options.descriptors:
self.log.info("Test rescanning an encrypted wallet")
hd_seed = get_generate_key().privkey
usernode.createwallet(wallet_name="temp_wallet", blank=True, descriptors=False)
temp_wallet = usernode.get_wallet_rpc("temp_wallet")
temp_wallet.sethdseed(seed=hd_seed)
for i in range(399):
temp_wallet.getnewaddress()
self.generatetoaddress(usernode, COINBASE_MATURITY + 1, temp_wallet.getnewaddress())
self.generatetoaddress(usernode, COINBASE_MATURITY + 1, temp_wallet.getnewaddress())
minernode.createwallet("encrypted_wallet", blank=True, passphrase="passphrase", descriptors=False)
encrypted_wallet = minernode.get_wallet_rpc("encrypted_wallet")
encrypted_wallet.walletpassphrase("passphrase", 99999)
encrypted_wallet.sethdseed(seed=hd_seed)
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as thread:
with minernode.assert_debug_log(expected_msgs=["Rescan started from block 0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206... (slow variant inspecting all blocks)"], timeout=5):
rescanning = thread.submit(encrypted_wallet.rescanblockchain)
# set the passphrase timeout to 1 to test that the wallet remains unlocked during the rescan
minernode.cli("-rpcwallet=encrypted_wallet").walletpassphrase("passphrase", 1)
try:
minernode.cli("-rpcwallet=encrypted_wallet").walletlock()
except JSONRPCException as e:
assert e.error["code"] == -4 and "Error: the wallet is currently being used to rescan the blockchain for related transactions. Please call `abortrescan` before locking the wallet." in e.error["message"]
try:
minernode.cli("-rpcwallet=encrypted_wallet").walletpassphrasechange("passphrase", "newpassphrase")
except JSONRPCException as e:
assert e.error["code"] == -4 and "Error: the wallet is currently being used to rescan the blockchain for related transactions. Please call `abortrescan` before changing the passphrase." in e.error["message"]
assert_equal(rescanning.result(), {"start_height": 0, "stop_height": 803})
assert_equal(encrypted_wallet.getbalance(), temp_wallet.getbalance())
if __name__ == '__main__':
TransactionTimeRescanTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_compactblocks_hb.py | #!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test compact blocks HB selection logic."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class CompactBlocksConnectionTest(BitcoinTestFramework):
"""Test class for verifying selection of HB peer connections."""
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 6
def peer_info(self, from_node, to_node):
"""Query from_node for its getpeerinfo about to_node."""
for peerinfo in self.nodes[from_node].getpeerinfo():
if "(testnode%i)" % to_node in peerinfo['subver']:
return peerinfo
return None
def setup_network(self):
self.setup_nodes()
# Start network with everyone disconnected
self.sync_all()
def relay_block_through(self, peer):
"""Relay a new block through peer peer, and return HB status between 1 and [2,3,4,5]."""
self.connect_nodes(peer, 0)
self.generate(self.nodes[0], 1)
self.disconnect_nodes(peer, 0)
status_to = [self.peer_info(1, i)['bip152_hb_to'] for i in range(2, 6)]
status_from = [self.peer_info(i, 1)['bip152_hb_from'] for i in range(2, 6)]
assert_equal(status_to, status_from)
return status_to
def run_test(self):
self.log.info("Testing reserved high-bandwidth mode slot for outbound peer...")
# Connect everyone to node 0, and mine some blocks to get all nodes out of IBD.
for i in range(1, 6):
self.connect_nodes(i, 0)
self.generate(self.nodes[0], 2)
for i in range(1, 6):
self.disconnect_nodes(i, 0)
# Construct network topology:
# - Node 0 is the block producer
# - Node 1 is the "target" node being tested
# - Nodes 2-5 are intermediaries.
# - Node 1 has an outbound connection to node 2
# - Node 1 has inbound connections from nodes 3-5
self.connect_nodes(3, 1)
self.connect_nodes(4, 1)
self.connect_nodes(5, 1)
self.connect_nodes(1, 2)
# Mine blocks subsequently relaying through nodes 3,4,5 (inbound to node 1)
for nodeid in range(3, 6):
status = self.relay_block_through(nodeid)
assert_equal(status, [False, nodeid >= 3, nodeid >= 4, nodeid >= 5])
# And again through each. This should not change HB status.
for nodeid in range(3, 6):
status = self.relay_block_through(nodeid)
assert_equal(status, [False, True, True, True])
# Now relay one block through peer 2 (outbound from node 1), so it should take HB status
# from one of the inbounds.
status = self.relay_block_through(2)
assert_equal(status[0], True)
assert_equal(sum(status), 3)
# Now relay again through nodes 3,4,5. Since 2 is outbound, it should remain HB.
for nodeid in range(3, 6):
status = self.relay_block_through(nodeid)
assert status[0]
assert status[nodeid - 2]
assert_equal(sum(status), 3)
# Reconnect peer 2, and retry. Now the three inbounds should be HB again.
self.disconnect_nodes(1, 2)
self.connect_nodes(1, 2)
for nodeid in range(3, 6):
status = self.relay_block_through(nodeid)
assert not status[0]
assert status[nodeid - 2]
assert_equal(status, [False, True, True, True])
if __name__ == '__main__':
CompactBlocksConnectionTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_groups.py | #!/usr/bin/env python3
# Copyright (c) 2018-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test wallet group functionality."""
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
from test_framework.messages import (
tx_from_hex,
)
from test_framework.util import (
assert_approx,
assert_equal,
)
class WalletGroupTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 5
self.extra_args = [
[],
[],
["-avoidpartialspends"],
["-maxapsfee=0.00002719"],
["-maxapsfee=0.00002720"],
]
for args in self.extra_args:
args.append("[email protected]") # whitelist peers to speed up tx relay / mempool sync
args.append(f"-paytxfee={20 * 1e3 / 1e8}") # apply feerate of 20 sats/vB across all nodes
self.rpc_timeout = 480
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.log.info("Setting up")
# To take full use of immediate tx relay, all nodes need to be reachable
# via inbound peers, i.e. connect first to last to close the circle
# (the default test network topology looks like this:
# node0 <-- node1 <-- node2 <-- node3 <-- node4 <-- node5)
self.connect_nodes(0, self.num_nodes - 1)
# Mine some coins
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
# Get some addresses from the two nodes
addr1 = [self.nodes[1].getnewaddress() for _ in range(3)]
addr2 = [self.nodes[2].getnewaddress() for _ in range(3)]
addrs = addr1 + addr2
# Send 1 + 0.5 coin to each address
[self.nodes[0].sendtoaddress(addr, 1.0) for addr in addrs]
[self.nodes[0].sendtoaddress(addr, 0.5) for addr in addrs]
self.generate(self.nodes[0], 1)
# For each node, send 0.2 coins back to 0;
# - node[1] should pick one 0.5 UTXO and leave the rest
# - node[2] should pick one (1.0 + 0.5) UTXO group corresponding to a
# given address, and leave the rest
self.log.info("Test sending transactions picks one UTXO group and leaves the rest")
txid1 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
tx1 = self.nodes[1].getrawtransaction(txid1, True)
# txid1 should have 1 input and 2 outputs
assert_equal(1, len(tx1["vin"]))
assert_equal(2, len(tx1["vout"]))
# one output should be 0.2, the other should be ~0.3
v = [vout["value"] for vout in tx1["vout"]]
v.sort()
assert_approx(v[0], vexp=0.2, vspan=0.0001)
assert_approx(v[1], vexp=0.3, vspan=0.0001)
txid2 = self.nodes[2].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
tx2 = self.nodes[2].getrawtransaction(txid2, True)
# txid2 should have 2 inputs and 2 outputs
assert_equal(2, len(tx2["vin"]))
assert_equal(2, len(tx2["vout"]))
# one output should be 0.2, the other should be ~1.3
v = [vout["value"] for vout in tx2["vout"]]
v.sort()
assert_approx(v[0], vexp=0.2, vspan=0.0001)
assert_approx(v[1], vexp=1.3, vspan=0.0001)
self.log.info("Test avoiding partial spends if warranted, even if avoidpartialspends is disabled")
self.sync_all()
self.generate(self.nodes[0], 1)
# Nodes 1-2 now have confirmed UTXOs (letters denote destinations):
# Node #1: Node #2:
# - A 1.0 - D0 1.0
# - B0 1.0 - D1 0.5
# - B1 0.5 - E0 1.0
# - C0 1.0 - E1 0.5
# - C1 0.5 - F ~1.3
# - D ~0.3
assert_approx(self.nodes[1].getbalance(), vexp=4.3, vspan=0.0001)
assert_approx(self.nodes[2].getbalance(), vexp=4.3, vspan=0.0001)
# Sending 1.4 btc should pick one 1.0 + one more. For node #1,
# this could be (A / B0 / C0) + (B1 / C1 / D). We ensure that it is
# B0 + B1 or C0 + C1, because this avoids partial spends while not being
# detrimental to transaction cost
txid3 = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1.4)
tx3 = self.nodes[1].getrawtransaction(txid3, True)
# tx3 should have 2 inputs and 2 outputs
assert_equal(2, len(tx3["vin"]))
assert_equal(2, len(tx3["vout"]))
# the accumulated value should be 1.5, so the outputs should be
# ~0.1 and 1.4 and should come from the same destination
values = [vout["value"] for vout in tx3["vout"]]
values.sort()
assert_approx(values[0], vexp=0.1, vspan=0.0001)
assert_approx(values[1], vexp=1.4, vspan=0.0001)
input_txids = [vin["txid"] for vin in tx3["vin"]]
input_addrs = [self.nodes[1].gettransaction(txid)['details'][0]['address'] for txid in input_txids]
assert_equal(input_addrs[0], input_addrs[1])
# Node 2 enforces avoidpartialspends so needs no checking here
tx4_ungrouped_fee = 2820
tx4_grouped_fee = 4160
tx5_6_ungrouped_fee = 5520
tx5_6_grouped_fee = 8240
self.log.info("Test wallet option maxapsfee")
addr_aps = self.nodes[3].getnewaddress()
self.nodes[0].sendtoaddress(addr_aps, 1.0)
self.nodes[0].sendtoaddress(addr_aps, 1.0)
self.generate(self.nodes[0], 1)
with self.nodes[3].assert_debug_log([f'Fee non-grouped = {tx4_ungrouped_fee}, grouped = {tx4_grouped_fee}, using grouped']):
txid4 = self.nodes[3].sendtoaddress(self.nodes[0].getnewaddress(), 0.1)
tx4 = self.nodes[3].getrawtransaction(txid4, True)
# tx4 should have 2 inputs and 2 outputs although one output would
# have been enough and the transaction caused higher fees
assert_equal(2, len(tx4["vin"]))
assert_equal(2, len(tx4["vout"]))
addr_aps2 = self.nodes[3].getnewaddress()
[self.nodes[0].sendtoaddress(addr_aps2, 1.0) for _ in range(5)]
self.generate(self.nodes[0], 1)
with self.nodes[3].assert_debug_log([f'Fee non-grouped = {tx5_6_ungrouped_fee}, grouped = {tx5_6_grouped_fee}, using non-grouped']):
txid5 = self.nodes[3].sendtoaddress(self.nodes[0].getnewaddress(), 2.95)
tx5 = self.nodes[3].getrawtransaction(txid5, True)
# tx5 should have 3 inputs (1.0, 1.0, 1.0) and 2 outputs
assert_equal(3, len(tx5["vin"]))
assert_equal(2, len(tx5["vout"]))
# Test wallet option maxapsfee with node 4, which sets maxapsfee
# 1 sat higher, crossing the threshold from non-grouped to grouped.
self.log.info("Test wallet option maxapsfee threshold from non-grouped to grouped")
addr_aps3 = self.nodes[4].getnewaddress()
[self.nodes[0].sendtoaddress(addr_aps3, 1.0) for _ in range(5)]
self.generate(self.nodes[0], 1)
with self.nodes[4].assert_debug_log([f'Fee non-grouped = {tx5_6_ungrouped_fee}, grouped = {tx5_6_grouped_fee}, using grouped']):
txid6 = self.nodes[4].sendtoaddress(self.nodes[0].getnewaddress(), 2.95)
tx6 = self.nodes[4].getrawtransaction(txid6, True)
# tx6 should have 5 inputs and 2 outputs
assert_equal(5, len(tx6["vin"]))
assert_equal(2, len(tx6["vout"]))
# Empty out node2's wallet
self.nodes[2].sendall(recipients=[self.nodes[0].getnewaddress()])
self.sync_all()
self.generate(self.nodes[0], 1)
self.log.info("Fill a wallet with 10,000 outputs corresponding to the same scriptPubKey")
for _ in range(5):
raw_tx = self.nodes[0].createrawtransaction([{"txid":"0"*64, "vout":0}], [{addr2[0]: 0.05}])
tx = tx_from_hex(raw_tx)
tx.vin = []
tx.vout = [tx.vout[0]] * 2000
funded_tx = self.nodes[0].fundrawtransaction(tx.serialize().hex())
signed_tx = self.nodes[0].signrawtransactionwithwallet(funded_tx['hex'])
self.nodes[0].sendrawtransaction(signed_tx['hex'])
self.generate(self.nodes[0], 1)
# Check that we can create a transaction that only requires ~100 of our
# utxos, without pulling in all outputs and creating a transaction that
# is way too big.
self.log.info("Test creating txn that only requires ~100 of our UTXOs without pulling in all outputs")
assert self.nodes[2].sendtoaddress(address=addr2[0], amount=5)
if __name__ == '__main__':
WalletGroupTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_getblockfilter.py | #!/usr/bin/env python3
# Copyright (c) 2018-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the getblockfilter RPC."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal, assert_is_hex_string, assert_raises_rpc_error,
)
FILTER_TYPES = ["basic"]
class GetBlockFilterTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [["-blockfilterindex"], []]
def run_test(self):
# Create two chains by disconnecting nodes 0 & 1, mining, then reconnecting
self.disconnect_nodes(0, 1)
self.generate(self.nodes[0], 3, sync_fun=self.no_op)
self.generate(self.nodes[1], 4, sync_fun=self.no_op)
assert_equal(self.nodes[0].getblockcount(), 3)
chain0_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
# Reorg node 0 to a new chain
self.connect_nodes(0, 1)
self.sync_blocks()
assert_equal(self.nodes[0].getblockcount(), 4)
chain1_hashes = [self.nodes[0].getblockhash(block_height) for block_height in range(4)]
# Test getblockfilter returns a filter for all blocks and filter types on active chain
for block_hash in chain1_hashes:
for filter_type in FILTER_TYPES:
result = self.nodes[0].getblockfilter(block_hash, filter_type)
assert_is_hex_string(result['filter'])
# Test getblockfilter returns a filter for all blocks and filter types on stale chain
for block_hash in chain0_hashes:
for filter_type in FILTER_TYPES:
result = self.nodes[0].getblockfilter(block_hash, filter_type)
assert_is_hex_string(result['filter'])
# Test getblockfilter with unknown block
bad_block_hash = "0123456789abcdef" * 4
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockfilter, bad_block_hash, "basic")
# Test getblockfilter with undefined filter type
genesis_hash = self.nodes[0].getblockhash(0)
assert_raises_rpc_error(-5, "Unknown filtertype", self.nodes[0].getblockfilter, genesis_hash, "unknown")
# Test getblockfilter fails on node without compact block filter index
self.restart_node(0, extra_args=["-blockfilterindex=0"])
for filter_type in FILTER_TYPES:
assert_raises_rpc_error(-1, "Index is not enabled for filtertype {}".format(filter_type),
self.nodes[0].getblockfilter, genesis_hash, filter_type)
if __name__ == '__main__':
GetBlockFilterTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_conflicts.py | #!/usr/bin/env python3
# Copyright (c) 2023 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test that wallet correctly tracks transactions that have been conflicted by blocks, particularly during reorgs.
"""
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
class TxConflicts(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 3
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def get_utxo_of_value(self, from_tx_id, search_value):
return next(tx_out["vout"] for tx_out in self.nodes[0].gettransaction(from_tx_id)["details"] if tx_out["amount"] == Decimal(f"{search_value}"))
def run_test(self):
self.log.info("Send tx from which to conflict outputs later")
txid_conflict_from_1 = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
txid_conflict_from_2 = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), Decimal("10"))
self.generate(self.nodes[0], 1)
self.sync_blocks()
self.log.info("Disconnect nodes to broadcast conflicts on their respective chains")
self.disconnect_nodes(0, 1)
self.disconnect_nodes(2, 1)
self.log.info("Create transactions that conflict with each other")
output_A = self.get_utxo_of_value(from_tx_id=txid_conflict_from_1, search_value=10)
output_B = self.get_utxo_of_value(from_tx_id=txid_conflict_from_2, search_value=10)
# First create a transaction that consumes both A and B outputs.
#
# | tx1 | -----> | | | |
# | AB_parent_tx | ----> | Child_Tx |
# | tx2 | -----> | | | |
#
inputs_tx_AB_parent = [{"txid": txid_conflict_from_1, "vout": output_A}, {"txid": txid_conflict_from_2, "vout": output_B}]
tx_AB_parent = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs_tx_AB_parent, {self.nodes[0].getnewaddress(): Decimal("19.99998")}))
# Secondly, create two transactions: One consuming output_A, and another one consuming output_B
#
# | tx1 | -----> | Tx_A_1 |
# ----------------
# | tx2 | -----> | Tx_B_1 |
#
inputs_tx_A_1 = [{"txid": txid_conflict_from_1, "vout": output_A}]
inputs_tx_B_1 = [{"txid": txid_conflict_from_2, "vout": output_B}]
tx_A_1 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs_tx_A_1, {self.nodes[0].getnewaddress(): Decimal("9.99998")}))
tx_B_1 = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs_tx_B_1, {self.nodes[0].getnewaddress(): Decimal("9.99998")}))
self.log.info("Broadcast conflicted transaction")
txid_AB_parent = self.nodes[0].sendrawtransaction(tx_AB_parent["hex"])
self.generate(self.nodes[0], 1, sync_fun=self.no_op)
# Now that 'AB_parent_tx' was broadcast, build 'Child_Tx'
output_c = self.get_utxo_of_value(from_tx_id=txid_AB_parent, search_value=19.99998)
inputs_tx_C_child = [({"txid": txid_AB_parent, "vout": output_c})]
tx_C_child = self.nodes[0].signrawtransactionwithwallet(self.nodes[0].createrawtransaction(inputs_tx_C_child, {self.nodes[0].getnewaddress() : Decimal("19.99996")}))
tx_C_child_txid = self.nodes[0].sendrawtransaction(tx_C_child["hex"])
self.generate(self.nodes[0], 1, sync_fun=self.no_op)
self.log.info("Broadcast conflicting tx to node 1 and generate a longer chain")
conflicting_txid_A = self.nodes[1].sendrawtransaction(tx_A_1["hex"])
self.generate(self.nodes[1], 4, sync_fun=self.no_op)
conflicting_txid_B = self.nodes[1].sendrawtransaction(tx_B_1["hex"])
self.generate(self.nodes[1], 4, sync_fun=self.no_op)
self.log.info("Connect nodes 0 and 1, trigger reorg and ensure that the tx is effectively conflicted")
self.connect_nodes(0, 1)
self.sync_blocks([self.nodes[0], self.nodes[1]])
conflicted_AB_tx = self.nodes[0].gettransaction(txid_AB_parent)
tx_C_child = self.nodes[0].gettransaction(tx_C_child_txid)
conflicted_A_tx = self.nodes[0].gettransaction(conflicting_txid_A)
self.log.info("Verify, after the reorg, that Tx_A was accepted, and tx_AB and its Child_Tx are conflicting now")
# Tx A was accepted, Tx AB was not.
assert conflicted_AB_tx["confirmations"] < 0
assert conflicted_A_tx["confirmations"] > 0
# Conflicted tx should have confirmations set to the confirmations of the most conflicting tx
assert_equal(-conflicted_AB_tx["confirmations"], conflicted_A_tx["confirmations"])
# Child should inherit conflicted state from parent
assert_equal(-tx_C_child["confirmations"], conflicted_A_tx["confirmations"])
# Check the confirmations of the conflicting transactions
assert_equal(conflicted_A_tx["confirmations"], 8)
assert_equal(self.nodes[0].gettransaction(conflicting_txid_B)["confirmations"], 4)
self.log.info("Now generate a longer chain that does not contain any tx")
# Node2 chain without conflicts
self.generate(self.nodes[2], 15, sync_fun=self.no_op)
# Connect node0 and node2 and wait reorg
self.connect_nodes(0, 2)
self.sync_blocks()
conflicted = self.nodes[0].gettransaction(txid_AB_parent)
tx_C_child = self.nodes[0].gettransaction(tx_C_child_txid)
self.log.info("Test that formerly conflicted transaction are inactive after reorg")
# Former conflicted tx should be unconfirmed as it hasn't been yet rebroadcast
assert_equal(conflicted["confirmations"], 0)
# Former conflicted child tx should be unconfirmed as it hasn't been rebroadcast
assert_equal(tx_C_child["confirmations"], 0)
# Rebroadcast former conflicted tx and check it confirms smoothly
self.nodes[2].sendrawtransaction(conflicted["hex"])
self.generate(self.nodes[2], 1)
self.sync_blocks()
former_conflicted = self.nodes[0].gettransaction(txid_AB_parent)
assert_equal(former_conflicted["confirmations"], 1)
assert_equal(former_conflicted["blockheight"], 217)
if __name__ == '__main__':
TxConflicts().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_psbt.py | #!/usr/bin/env python3
# Copyright (c) 2018-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the Partially Signed Transaction RPCs.
"""
from decimal import Decimal
from itertools import product
from random import randbytes
from test_framework.descriptors import descsum_create
from test_framework.key import H_POINT
from test_framework.messages import (
COutPoint,
CTransaction,
CTxIn,
CTxOut,
MAX_BIP125_RBF_SEQUENCE,
WITNESS_SCALE_FACTOR,
ser_compact_size,
)
from test_framework.psbt import (
PSBT,
PSBTMap,
PSBT_GLOBAL_UNSIGNED_TX,
PSBT_IN_RIPEMD160,
PSBT_IN_SHA256,
PSBT_IN_HASH160,
PSBT_IN_HASH256,
PSBT_IN_NON_WITNESS_UTXO,
PSBT_IN_WITNESS_UTXO,
PSBT_OUT_TAP_TREE,
)
from test_framework.script import CScript, OP_TRUE
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_approx,
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
assert_raises_rpc_error,
find_vout_for_address,
)
from test_framework.wallet_util import (
generate_keypair,
get_generate_key,
)
import json
import os
class PSBTTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [
["-walletrbf=1", "-addresstype=bech32", "-changetype=bech32"], #TODO: Remove address type restrictions once taproot has psbt extensions
["-walletrbf=0", "-changetype=legacy"],
[]
]
# whitelist peers to speed up tx relay / mempool sync
for args in self.extra_args:
args.append("[email protected]")
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def test_utxo_conversion(self):
self.log.info("Check that non-witness UTXOs are removed for segwit v1+ inputs")
mining_node = self.nodes[2]
offline_node = self.nodes[0]
online_node = self.nodes[1]
# Disconnect offline node from others
# Topology of test network is linear, so this one call is enough
self.disconnect_nodes(0, 1)
# Create watchonly on online_node
online_node.createwallet(wallet_name='wonline', disable_private_keys=True)
wonline = online_node.get_wallet_rpc('wonline')
w2 = online_node.get_wallet_rpc(self.default_wallet_name)
# Mine a transaction that credits the offline address
offline_addr = offline_node.getnewaddress(address_type="bech32m")
online_addr = w2.getnewaddress(address_type="bech32m")
wonline.importaddress(offline_addr, "", False)
mining_wallet = mining_node.get_wallet_rpc(self.default_wallet_name)
mining_wallet.sendtoaddress(address=offline_addr, amount=1.0)
self.generate(mining_node, nblocks=1, sync_fun=lambda: self.sync_all([online_node, mining_node]))
# Construct an unsigned PSBT on the online node
utxos = wonline.listunspent(addresses=[offline_addr])
raw = wonline.createrawtransaction([{"txid":utxos[0]["txid"], "vout":utxos[0]["vout"]}],[{online_addr:0.9999}])
psbt = wonline.walletprocesspsbt(online_node.converttopsbt(raw))["psbt"]
assert not "not_witness_utxo" in mining_node.decodepsbt(psbt)["inputs"][0]
# add non-witness UTXO manually
psbt_new = PSBT.from_base64(psbt)
prev_tx = wonline.gettransaction(utxos[0]["txid"])["hex"]
psbt_new.i[0].map[PSBT_IN_NON_WITNESS_UTXO] = bytes.fromhex(prev_tx)
assert "non_witness_utxo" in mining_node.decodepsbt(psbt_new.to_base64())["inputs"][0]
# Have the offline node sign the PSBT (which will remove the non-witness UTXO)
signed_psbt = offline_node.walletprocesspsbt(psbt_new.to_base64())
assert not "non_witness_utxo" in mining_node.decodepsbt(signed_psbt["psbt"])["inputs"][0]
# Make sure we can mine the resulting transaction
txid = mining_node.sendrawtransaction(signed_psbt["hex"])
self.generate(mining_node, nblocks=1, sync_fun=lambda: self.sync_all([online_node, mining_node]))
assert_equal(online_node.gettxout(txid,0)["confirmations"], 1)
wonline.unloadwallet()
# Reconnect
self.connect_nodes(1, 0)
self.connect_nodes(0, 2)
def test_input_confs_control(self):
self.nodes[0].createwallet("minconf")
wallet = self.nodes[0].get_wallet_rpc("minconf")
# Fund the wallet with different chain heights
for _ in range(2):
self.nodes[1].sendmany("", {wallet.getnewaddress():1, wallet.getnewaddress():1})
self.generate(self.nodes[1], 1)
unconfirmed_txid = wallet.sendtoaddress(wallet.getnewaddress(), 0.5)
self.log.info("Crafting PSBT using an unconfirmed input")
target_address = self.nodes[1].getnewaddress()
psbtx1 = wallet.walletcreatefundedpsbt([], {target_address: 0.1}, 0, {'fee_rate': 1, 'maxconf': 0})['psbt']
# Make sure we only had the one input
tx1_inputs = self.nodes[0].decodepsbt(psbtx1)['tx']['vin']
assert_equal(len(tx1_inputs), 1)
utxo1 = tx1_inputs[0]
assert_equal(unconfirmed_txid, utxo1['txid'])
signed_tx1 = wallet.walletprocesspsbt(psbtx1)
txid1 = self.nodes[0].sendrawtransaction(signed_tx1['hex'])
mempool = self.nodes[0].getrawmempool()
assert txid1 in mempool
self.log.info("Fail to craft a new PSBT that sends more funds with add_inputs = False")
assert_raises_rpc_error(-4, "The preselected coins total amount does not cover the transaction target. Please allow other inputs to be automatically selected or include more coins manually", wallet.walletcreatefundedpsbt, [{'txid': utxo1['txid'], 'vout': utxo1['vout']}], {target_address: 1}, 0, {'add_inputs': False})
self.log.info("Fail to craft a new PSBT with minconf above highest one")
assert_raises_rpc_error(-4, "Insufficient funds", wallet.walletcreatefundedpsbt, [{'txid': utxo1['txid'], 'vout': utxo1['vout']}], {target_address: 1}, 0, {'add_inputs': True, 'minconf': 3, 'fee_rate': 10})
self.log.info("Fail to broadcast a new PSBT with maxconf 0 due to BIP125 rules to verify it actually chose unconfirmed outputs")
psbt_invalid = wallet.walletcreatefundedpsbt([{'txid': utxo1['txid'], 'vout': utxo1['vout']}], {target_address: 1}, 0, {'add_inputs': True, 'maxconf': 0, 'fee_rate': 10})['psbt']
signed_invalid = wallet.walletprocesspsbt(psbt_invalid)
assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, signed_invalid['hex'])
self.log.info("Craft a replacement adding inputs with highest confs possible")
psbtx2 = wallet.walletcreatefundedpsbt([{'txid': utxo1['txid'], 'vout': utxo1['vout']}], {target_address: 1}, 0, {'add_inputs': True, 'minconf': 2, 'fee_rate': 10})['psbt']
tx2_inputs = self.nodes[0].decodepsbt(psbtx2)['tx']['vin']
assert_greater_than_or_equal(len(tx2_inputs), 2)
for vin in tx2_inputs:
if vin['txid'] != unconfirmed_txid:
assert_greater_than_or_equal(self.nodes[0].gettxout(vin['txid'], vin['vout'])['confirmations'], 2)
signed_tx2 = wallet.walletprocesspsbt(psbtx2)
txid2 = self.nodes[0].sendrawtransaction(signed_tx2['hex'])
mempool = self.nodes[0].getrawmempool()
assert txid1 not in mempool
assert txid2 in mempool
wallet.unloadwallet()
def assert_change_type(self, psbtx, expected_type):
"""Assert that the given PSBT has a change output with the given type."""
# The decodepsbt RPC is stateless and independent of any settings, we can always just call it on the first node
decoded_psbt = self.nodes[0].decodepsbt(psbtx["psbt"])
changepos = psbtx["changepos"]
assert_equal(decoded_psbt["tx"]["vout"][changepos]["scriptPubKey"]["type"], expected_type)
def run_test(self):
# Create and fund a raw tx for sending 10 BTC
psbtx1 = self.nodes[0].walletcreatefundedpsbt([], {self.nodes[2].getnewaddress():10})['psbt']
# If inputs are specified, do not automatically add more:
utxo1 = self.nodes[0].listunspent()[0]
assert_raises_rpc_error(-4, "The preselected coins total amount does not cover the transaction target. "
"Please allow other inputs to be automatically selected or include more coins manually",
self.nodes[0].walletcreatefundedpsbt, [{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90})
psbtx1 = self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():90}, 0, {"add_inputs": True})['psbt']
assert_equal(len(self.nodes[0].decodepsbt(psbtx1)['tx']['vin']), 2)
# Inputs argument can be null
self.nodes[0].walletcreatefundedpsbt(None, {self.nodes[2].getnewaddress():10})
# Node 1 should not be able to add anything to it but still return the psbtx same as before
psbtx = self.nodes[1].walletprocesspsbt(psbtx1)['psbt']
assert_equal(psbtx1, psbtx)
# Node 0 should not be able to sign the transaction with the wallet is locked
self.nodes[0].encryptwallet("password")
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].walletprocesspsbt, psbtx)
# Node 0 should be able to process without signing though
unsigned_tx = self.nodes[0].walletprocesspsbt(psbtx, False)
assert_equal(unsigned_tx['complete'], False)
self.nodes[0].walletpassphrase(passphrase="password", timeout=1000000)
# Sign the transaction but don't finalize
processed_psbt = self.nodes[0].walletprocesspsbt(psbt=psbtx, finalize=False)
assert "hex" not in processed_psbt
signed_psbt = processed_psbt['psbt']
# Finalize and send
finalized_hex = self.nodes[0].finalizepsbt(signed_psbt)['hex']
self.nodes[0].sendrawtransaction(finalized_hex)
# Alternative method: sign AND finalize in one command
processed_finalized_psbt = self.nodes[0].walletprocesspsbt(psbt=psbtx, finalize=True)
finalized_psbt = processed_finalized_psbt['psbt']
finalized_psbt_hex = processed_finalized_psbt['hex']
assert signed_psbt != finalized_psbt
assert finalized_psbt_hex == finalized_hex
# Manually selected inputs can be locked:
assert_equal(len(self.nodes[0].listlockunspent()), 0)
utxo1 = self.nodes[0].listunspent()[0]
psbtx1 = self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():1}, 0,{"lockUnspents": True})["psbt"]
assert_equal(len(self.nodes[0].listlockunspent()), 1)
# Locks are ignored for manually selected inputs
self.nodes[0].walletcreatefundedpsbt([{"txid": utxo1['txid'], "vout": utxo1['vout']}], {self.nodes[2].getnewaddress():1}, 0)
# Create p2sh, p2wpkh, and p2wsh addresses
pubkey0 = self.nodes[0].getaddressinfo(self.nodes[0].getnewaddress())['pubkey']
pubkey1 = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey']
pubkey2 = self.nodes[2].getaddressinfo(self.nodes[2].getnewaddress())['pubkey']
# Setup watchonly wallets
self.nodes[2].createwallet(wallet_name='wmulti', disable_private_keys=True)
wmulti = self.nodes[2].get_wallet_rpc('wmulti')
# Create all the addresses
p2sh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "legacy")['address']
p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "bech32")['address']
p2sh_p2wsh = wmulti.addmultisigaddress(2, [pubkey0, pubkey1, pubkey2], "", "p2sh-segwit")['address']
if not self.options.descriptors:
wmulti.importaddress(p2sh)
wmulti.importaddress(p2wsh)
wmulti.importaddress(p2sh_p2wsh)
p2wpkh = self.nodes[1].getnewaddress("", "bech32")
p2pkh = self.nodes[1].getnewaddress("", "legacy")
p2sh_p2wpkh = self.nodes[1].getnewaddress("", "p2sh-segwit")
# fund those addresses
rawtx = self.nodes[0].createrawtransaction([], {p2sh:10, p2wsh:10, p2wpkh:10, p2sh_p2wsh:10, p2sh_p2wpkh:10, p2pkh:10})
rawtx = self.nodes[0].fundrawtransaction(rawtx, {"changePosition":3})
signed_tx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex'])['hex']
txid = self.nodes[0].sendrawtransaction(signed_tx)
self.generate(self.nodes[0], 6)
# Find the output pos
p2sh_pos = -1
p2wsh_pos = -1
p2wpkh_pos = -1
p2pkh_pos = -1
p2sh_p2wsh_pos = -1
p2sh_p2wpkh_pos = -1
decoded = self.nodes[0].decoderawtransaction(signed_tx)
for out in decoded['vout']:
if out['scriptPubKey']['address'] == p2sh:
p2sh_pos = out['n']
elif out['scriptPubKey']['address'] == p2wsh:
p2wsh_pos = out['n']
elif out['scriptPubKey']['address'] == p2wpkh:
p2wpkh_pos = out['n']
elif out['scriptPubKey']['address'] == p2sh_p2wsh:
p2sh_p2wsh_pos = out['n']
elif out['scriptPubKey']['address'] == p2sh_p2wpkh:
p2sh_p2wpkh_pos = out['n']
elif out['scriptPubKey']['address'] == p2pkh:
p2pkh_pos = out['n']
inputs = [{"txid": txid, "vout": p2wpkh_pos}, {"txid": txid, "vout": p2sh_p2wpkh_pos}, {"txid": txid, "vout": p2pkh_pos}]
outputs = [{self.nodes[1].getnewaddress(): 29.99}]
# spend single key from node 1
created_psbt = self.nodes[1].walletcreatefundedpsbt(inputs, outputs)
walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(created_psbt['psbt'])
# Make sure it has both types of UTXOs
decoded = self.nodes[1].decodepsbt(walletprocesspsbt_out['psbt'])
assert 'non_witness_utxo' in decoded['inputs'][0]
assert 'witness_utxo' in decoded['inputs'][0]
# Check decodepsbt fee calculation (input values shall only be counted once per UTXO)
assert_equal(decoded['fee'], created_psbt['fee'])
assert_equal(walletprocesspsbt_out['complete'], True)
self.nodes[1].sendrawtransaction(walletprocesspsbt_out['hex'])
self.log.info("Test walletcreatefundedpsbt fee rate of 10000 sat/vB and 0.1 BTC/kvB produces a total fee at or slightly below -maxtxfee (~0.05290000)")
res1 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {"fee_rate": 10000, "add_inputs": True})
assert_approx(res1["fee"], 0.055, 0.005)
res2 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {"feeRate": "0.1", "add_inputs": True})
assert_approx(res2["fee"], 0.055, 0.005)
self.log.info("Test min fee rate checks with walletcreatefundedpsbt are bypassed, e.g. a fee_rate under 1 sat/vB is allowed")
res3 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {"fee_rate": "0.999", "add_inputs": True})
assert_approx(res3["fee"], 0.00000381, 0.0000001)
res4 = self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {"feeRate": 0.00000999, "add_inputs": True})
assert_approx(res4["fee"], 0.00000381, 0.0000001)
self.log.info("Test min fee rate checks with walletcreatefundedpsbt are bypassed and that funding non-standard 'zero-fee' transactions is valid")
for param, zero_value in product(["fee_rate", "feeRate"], [0, 0.000, 0.00000000, "0", "0.000", "0.00000000"]):
assert_equal(0, self.nodes[1].walletcreatefundedpsbt(inputs, outputs, 0, {param: zero_value, "add_inputs": True})["fee"])
self.log.info("Test invalid fee rate settings")
for param, value in {("fee_rate", 100000), ("feeRate", 1)}:
assert_raises_rpc_error(-4, "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {param: value, "add_inputs": True})
assert_raises_rpc_error(-3, "Amount out of range",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {param: -1, "add_inputs": True})
assert_raises_rpc_error(-3, "Amount is not a number or string",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {param: {"foo": "bar"}, "add_inputs": True})
# Test fee rate values that don't pass fixed-point parsing checks.
for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]:
assert_raises_rpc_error(-3, "Invalid amount",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {param: invalid_value, "add_inputs": True})
# Test fee_rate values that cannot be represented in sat/vB.
for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]:
assert_raises_rpc_error(-3, "Invalid amount",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"fee_rate": invalid_value, "add_inputs": True})
self.log.info("- raises RPC error if both feeRate and fee_rate are passed")
assert_raises_rpc_error(-8, "Cannot specify both fee_rate (sat/vB) and feeRate (BTC/kvB)",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"fee_rate": 0.1, "feeRate": 0.1, "add_inputs": True})
self.log.info("- raises RPC error if both feeRate and estimate_mode passed")
assert_raises_rpc_error(-8, "Cannot specify both estimate_mode and feeRate",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"estimate_mode": "economical", "feeRate": 0.1, "add_inputs": True})
for param in ["feeRate", "fee_rate"]:
self.log.info("- raises RPC error if both {} and conf_target are passed".format(param))
assert_raises_rpc_error(-8, "Cannot specify both conf_target and {}. Please provide either a confirmation "
"target in blocks for automatic fee estimation, or an explicit fee rate.".format(param),
self.nodes[1].walletcreatefundedpsbt ,inputs, outputs, 0, {param: 1, "conf_target": 1, "add_inputs": True})
self.log.info("- raises RPC error if both fee_rate and estimate_mode are passed")
assert_raises_rpc_error(-8, "Cannot specify both estimate_mode and fee_rate",
self.nodes[1].walletcreatefundedpsbt ,inputs, outputs, 0, {"fee_rate": 1, "estimate_mode": "economical", "add_inputs": True})
self.log.info("- raises RPC error with invalid estimate_mode settings")
for k, v in {"number": 42, "object": {"foo": "bar"}}.items():
assert_raises_rpc_error(-3, f"JSON value of type {k} for field estimate_mode is not of expected type string",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"estimate_mode": v, "conf_target": 0.1, "add_inputs": True})
for mode in ["", "foo", Decimal("3.141592")]:
assert_raises_rpc_error(-8, 'Invalid estimate_mode parameter, must be one of: "unset", "economical", "conservative"',
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"estimate_mode": mode, "conf_target": 0.1, "add_inputs": True})
self.log.info("- raises RPC error with invalid conf_target settings")
for mode in ["unset", "economical", "conservative"]:
self.log.debug("{}".format(mode))
for k, v in {"string": "", "object": {"foo": "bar"}}.items():
assert_raises_rpc_error(-3, f"JSON value of type {k} for field conf_target is not of expected type number",
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"estimate_mode": mode, "conf_target": v, "add_inputs": True})
for n in [-1, 0, 1009]:
assert_raises_rpc_error(-8, "Invalid conf_target, must be between 1 and 1008", # max value of 1008 per src/policy/fees.h
self.nodes[1].walletcreatefundedpsbt, inputs, outputs, 0, {"estimate_mode": mode, "conf_target": n, "add_inputs": True})
self.log.info("Test walletcreatefundedpsbt with too-high fee rate produces total fee well above -maxtxfee and raises RPC error")
# previously this was silently capped at -maxtxfee
for bool_add, outputs_array in {True: outputs, False: [{self.nodes[1].getnewaddress(): 1}]}.items():
msg = "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)"
assert_raises_rpc_error(-4, msg, self.nodes[1].walletcreatefundedpsbt, inputs, outputs_array, 0, {"fee_rate": 1000000, "add_inputs": bool_add})
assert_raises_rpc_error(-4, msg, self.nodes[1].walletcreatefundedpsbt, inputs, outputs_array, 0, {"feeRate": 1, "add_inputs": bool_add})
self.log.info("Test various PSBT operations")
# partially sign multisig things with node 1
psbtx = wmulti.walletcreatefundedpsbt(inputs=[{"txid":txid,"vout":p2wsh_pos},{"txid":txid,"vout":p2sh_pos},{"txid":txid,"vout":p2sh_p2wsh_pos}], outputs={self.nodes[1].getnewaddress():29.99}, changeAddress=self.nodes[1].getrawchangeaddress())['psbt']
walletprocesspsbt_out = self.nodes[1].walletprocesspsbt(psbtx)
psbtx = walletprocesspsbt_out['psbt']
assert_equal(walletprocesspsbt_out['complete'], False)
# Unload wmulti, we don't need it anymore
wmulti.unloadwallet()
# partially sign with node 2. This should be complete and sendable
walletprocesspsbt_out = self.nodes[2].walletprocesspsbt(psbtx)
assert_equal(walletprocesspsbt_out['complete'], True)
self.nodes[2].sendrawtransaction(walletprocesspsbt_out['hex'])
# check that walletprocesspsbt fails to decode a non-psbt
rawtx = self.nodes[1].createrawtransaction([{"txid":txid,"vout":p2wpkh_pos}], {self.nodes[1].getnewaddress():9.99})
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[1].walletprocesspsbt, rawtx)
# Convert a non-psbt to psbt and make sure we can decode it
rawtx = self.nodes[0].createrawtransaction([], {self.nodes[1].getnewaddress():10})
rawtx = self.nodes[0].fundrawtransaction(rawtx)
new_psbt = self.nodes[0].converttopsbt(rawtx['hex'])
self.nodes[0].decodepsbt(new_psbt)
# Make sure that a non-psbt with signatures cannot be converted
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx['hex'])
assert_raises_rpc_error(-22, "Inputs must not have scriptSigs and scriptWitnesses",
self.nodes[0].converttopsbt, hexstring=signedtx['hex']) # permitsigdata=False by default
assert_raises_rpc_error(-22, "Inputs must not have scriptSigs and scriptWitnesses",
self.nodes[0].converttopsbt, hexstring=signedtx['hex'], permitsigdata=False)
assert_raises_rpc_error(-22, "Inputs must not have scriptSigs and scriptWitnesses",
self.nodes[0].converttopsbt, hexstring=signedtx['hex'], permitsigdata=False, iswitness=True)
# Unless we allow it to convert and strip signatures
self.nodes[0].converttopsbt(hexstring=signedtx['hex'], permitsigdata=True)
# Create outputs to nodes 1 and 2
# (note that we intentionally create two different txs here, as we want
# to check that each node is missing prevout data for one of the two
# utxos, see "should only have data for one input" test below)
node1_addr = self.nodes[1].getnewaddress()
node2_addr = self.nodes[2].getnewaddress()
utxo1 = self.create_outpoints(self.nodes[0], outputs=[{node1_addr: 13}])[0]
utxo2 = self.create_outpoints(self.nodes[0], outputs=[{node2_addr: 13}])[0]
self.generate(self.nodes[0], 6)[0]
# Create a psbt spending outputs from nodes 1 and 2
psbt_orig = self.nodes[0].createpsbt([utxo1, utxo2], {self.nodes[0].getnewaddress():25.999})
# Update psbts, should only have data for one input and not the other
psbt1 = self.nodes[1].walletprocesspsbt(psbt_orig, False, "ALL")['psbt']
psbt1_decoded = self.nodes[0].decodepsbt(psbt1)
assert psbt1_decoded['inputs'][0] and not psbt1_decoded['inputs'][1]
# Check that BIP32 path was added
assert "bip32_derivs" in psbt1_decoded['inputs'][0]
psbt2 = self.nodes[2].walletprocesspsbt(psbt_orig, False, "ALL", False)['psbt']
psbt2_decoded = self.nodes[0].decodepsbt(psbt2)
assert not psbt2_decoded['inputs'][0] and psbt2_decoded['inputs'][1]
# Check that BIP32 paths were not added
assert "bip32_derivs" not in psbt2_decoded['inputs'][1]
# Sign PSBTs (workaround issue #18039)
psbt1 = self.nodes[1].walletprocesspsbt(psbt_orig)['psbt']
psbt2 = self.nodes[2].walletprocesspsbt(psbt_orig)['psbt']
# Combine, finalize, and send the psbts
combined = self.nodes[0].combinepsbt([psbt1, psbt2])
finalized = self.nodes[0].finalizepsbt(combined)['hex']
self.nodes[0].sendrawtransaction(finalized)
self.generate(self.nodes[0], 6)
# Test additional args in walletcreatepsbt
# Make sure both pre-included and funded inputs
# have the correct sequence numbers based on
# replaceable arg
block_height = self.nodes[0].getblockcount()
unspent = self.nodes[0].listunspent()[0]
psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"replaceable": False, "add_inputs": True}, False)
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
assert "bip32_derivs" not in psbt_in
assert_equal(decoded_psbt["tx"]["locktime"], block_height+2)
# Same construction with only locktime set and RBF explicitly enabled
psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height, {"replaceable": True, "add_inputs": True}, True)
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
assert "bip32_derivs" in psbt_in
assert_equal(decoded_psbt["tx"]["locktime"], block_height)
# Same construction without optional arguments
psbtx_info = self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}])
decoded_psbt = self.nodes[0].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_equal(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
assert "bip32_derivs" in psbt_in
assert_equal(decoded_psbt["tx"]["locktime"], 0)
# Same construction without optional arguments, for a node with -walletrbf=0
unspent1 = self.nodes[1].listunspent()[0]
psbtx_info = self.nodes[1].walletcreatefundedpsbt([{"txid":unspent1["txid"], "vout":unspent1["vout"]}], [{self.nodes[2].getnewaddress():unspent1["amount"]+1}], block_height, {"add_inputs": True})
decoded_psbt = self.nodes[1].decodepsbt(psbtx_info["psbt"])
for tx_in, psbt_in in zip(decoded_psbt["tx"]["vin"], decoded_psbt["inputs"]):
assert_greater_than(tx_in["sequence"], MAX_BIP125_RBF_SEQUENCE)
assert "bip32_derivs" in psbt_in
# Make sure change address wallet does not have P2SH innerscript access to results in success
# when attempting BnB coin selection
self.nodes[0].walletcreatefundedpsbt([], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], block_height+2, {"changeAddress":self.nodes[1].getnewaddress()}, False)
# Make sure the wallet's change type is respected by default
small_output = {self.nodes[0].getnewaddress():0.1}
psbtx_native = self.nodes[0].walletcreatefundedpsbt([], [small_output])
self.assert_change_type(psbtx_native, "witness_v0_keyhash")
psbtx_legacy = self.nodes[1].walletcreatefundedpsbt([], [small_output])
self.assert_change_type(psbtx_legacy, "pubkeyhash")
# Make sure the change type of the wallet can also be overwritten
psbtx_np2wkh = self.nodes[1].walletcreatefundedpsbt([], [small_output], 0, {"change_type":"p2sh-segwit"})
self.assert_change_type(psbtx_np2wkh, "scripthash")
# Make sure the change type cannot be specified if a change address is given
invalid_options = {"change_type":"legacy","changeAddress":self.nodes[0].getnewaddress()}
assert_raises_rpc_error(-8, "both change address and address type options", self.nodes[0].walletcreatefundedpsbt, [], [small_output], 0, invalid_options)
# Regression test for 14473 (mishandling of already-signed witness transaction):
psbtx_info = self.nodes[0].walletcreatefundedpsbt([{"txid":unspent["txid"], "vout":unspent["vout"]}], [{self.nodes[2].getnewaddress():unspent["amount"]+1}], 0, {"add_inputs": True})
complete_psbt = self.nodes[0].walletprocesspsbt(psbtx_info["psbt"])
double_processed_psbt = self.nodes[0].walletprocesspsbt(complete_psbt["psbt"])
assert_equal(complete_psbt, double_processed_psbt)
# We don't care about the decode result, but decoding must succeed.
self.nodes[0].decodepsbt(double_processed_psbt["psbt"])
# Make sure unsafe inputs are included if specified
self.nodes[2].createwallet(wallet_name="unsafe")
wunsafe = self.nodes[2].get_wallet_rpc("unsafe")
self.nodes[0].sendtoaddress(wunsafe.getnewaddress(), 2)
self.sync_mempools()
assert_raises_rpc_error(-4, "Insufficient funds", wunsafe.walletcreatefundedpsbt, [], [{self.nodes[0].getnewaddress(): 1}])
wunsafe.walletcreatefundedpsbt([], [{self.nodes[0].getnewaddress(): 1}], 0, {"include_unsafe": True})
# BIP 174 Test Vectors
# Check that unknown values are just passed through
unknown_psbt = "cHNidP8BAD8CAAAAAf//////////////////////////////////////////AAAAAAD/////AQAAAAAAAAAAA2oBAAAAAAAACg8BAgMEBQYHCAkPAQIDBAUGBwgJCgsMDQ4PAAA="
unknown_out = self.nodes[0].walletprocesspsbt(unknown_psbt)['psbt']
assert_equal(unknown_psbt, unknown_out)
# Open the data file
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_psbt.json'), encoding='utf-8') as f:
d = json.load(f)
invalids = d['invalid']
invalid_with_msgs = d["invalid_with_msg"]
valids = d['valid']
creators = d['creator']
signers = d['signer']
combiners = d['combiner']
finalizers = d['finalizer']
extractors = d['extractor']
# Invalid PSBTs
for invalid in invalids:
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decodepsbt, invalid)
for invalid in invalid_with_msgs:
psbt, msg = invalid
assert_raises_rpc_error(-22, f"TX decode failed {msg}", self.nodes[0].decodepsbt, psbt)
# Valid PSBTs
for valid in valids:
self.nodes[0].decodepsbt(valid)
# Creator Tests
for creator in creators:
created_tx = self.nodes[0].createpsbt(inputs=creator['inputs'], outputs=creator['outputs'], replaceable=False)
assert_equal(created_tx, creator['result'])
# Signer tests
for i, signer in enumerate(signers):
self.nodes[2].createwallet(wallet_name="wallet{}".format(i))
wrpc = self.nodes[2].get_wallet_rpc("wallet{}".format(i))
for key in signer['privkeys']:
wrpc.importprivkey(key)
signed_tx = wrpc.walletprocesspsbt(signer['psbt'], True, "ALL")['psbt']
assert_equal(signed_tx, signer['result'])
# Combiner test
for combiner in combiners:
combined = self.nodes[2].combinepsbt(combiner['combine'])
assert_equal(combined, combiner['result'])
# Empty combiner test
assert_raises_rpc_error(-8, "Parameter 'txs' cannot be empty", self.nodes[0].combinepsbt, [])
# Finalizer test
for finalizer in finalizers:
finalized = self.nodes[2].finalizepsbt(finalizer['finalize'], False)['psbt']
assert_equal(finalized, finalizer['result'])
# Extractor test
for extractor in extractors:
extracted = self.nodes[2].finalizepsbt(extractor['extract'], True)['hex']
assert_equal(extracted, extractor['result'])
# Unload extra wallets
for i, signer in enumerate(signers):
self.nodes[2].unloadwallet("wallet{}".format(i))
if self.options.descriptors:
self.test_utxo_conversion()
self.test_input_confs_control()
# Test that psbts with p2pkh outputs are created properly
p2pkh = self.nodes[0].getnewaddress(address_type='legacy')
psbt = self.nodes[1].walletcreatefundedpsbt([], [{p2pkh : 1}], 0, {"includeWatching" : True}, True)
self.nodes[0].decodepsbt(psbt['psbt'])
# Test decoding error: invalid base64
assert_raises_rpc_error(-22, "TX decode failed invalid base64", self.nodes[0].decodepsbt, ";definitely not base64;")
# Send to all types of addresses
addr1 = self.nodes[1].getnewaddress("", "bech32")
addr2 = self.nodes[1].getnewaddress("", "legacy")
addr3 = self.nodes[1].getnewaddress("", "p2sh-segwit")
utxo1, utxo2, utxo3 = self.create_outpoints(self.nodes[1], outputs=[{addr1: 11}, {addr2: 11}, {addr3: 11}])
self.sync_all()
def test_psbt_input_keys(psbt_input, keys):
"""Check that the psbt input has only the expected keys."""
assert_equal(set(keys), set(psbt_input.keys()))
# Create a PSBT. None of the inputs are filled initially
psbt = self.nodes[1].createpsbt([utxo1, utxo2, utxo3], {self.nodes[0].getnewaddress():32.999})
decoded = self.nodes[1].decodepsbt(psbt)
test_psbt_input_keys(decoded['inputs'][0], [])
test_psbt_input_keys(decoded['inputs'][1], [])
test_psbt_input_keys(decoded['inputs'][2], [])
# Update a PSBT with UTXOs from the node
# Bech32 inputs should be filled with witness UTXO. Other inputs should not be filled because they are non-witness
updated = self.nodes[1].utxoupdatepsbt(psbt)
decoded = self.nodes[1].decodepsbt(updated)
test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo', 'non_witness_utxo'])
test_psbt_input_keys(decoded['inputs'][1], ['non_witness_utxo'])
test_psbt_input_keys(decoded['inputs'][2], ['non_witness_utxo'])
# Try again, now while providing descriptors, making P2SH-segwit work, and causing bip32_derivs and redeem_script to be filled in
descs = [self.nodes[1].getaddressinfo(addr)['desc'] for addr in [addr1,addr2,addr3]]
updated = self.nodes[1].utxoupdatepsbt(psbt=psbt, descriptors=descs)
decoded = self.nodes[1].decodepsbt(updated)
test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo', 'non_witness_utxo', 'bip32_derivs'])
test_psbt_input_keys(decoded['inputs'][1], ['non_witness_utxo', 'bip32_derivs'])
test_psbt_input_keys(decoded['inputs'][2], ['non_witness_utxo','witness_utxo', 'bip32_derivs', 'redeem_script'])
# Two PSBTs with a common input should not be joinable
psbt1 = self.nodes[1].createpsbt([utxo1], {self.nodes[0].getnewaddress():Decimal('10.999')})
assert_raises_rpc_error(-8, "exists in multiple PSBTs", self.nodes[1].joinpsbts, [psbt1, updated])
# Join two distinct PSBTs
addr4 = self.nodes[1].getnewaddress("", "p2sh-segwit")
utxo4 = self.create_outpoints(self.nodes[0], outputs=[{addr4: 5}])[0]
self.generate(self.nodes[0], 6)
psbt2 = self.nodes[1].createpsbt([utxo4], {self.nodes[0].getnewaddress():Decimal('4.999')})
psbt2 = self.nodes[1].walletprocesspsbt(psbt2)['psbt']
psbt2_decoded = self.nodes[0].decodepsbt(psbt2)
assert "final_scriptwitness" in psbt2_decoded['inputs'][0] and "final_scriptSig" in psbt2_decoded['inputs'][0]
joined = self.nodes[0].joinpsbts([psbt, psbt2])
joined_decoded = self.nodes[0].decodepsbt(joined)
assert len(joined_decoded['inputs']) == 4 and len(joined_decoded['outputs']) == 2 and "final_scriptwitness" not in joined_decoded['inputs'][3] and "final_scriptSig" not in joined_decoded['inputs'][3]
# Check that joining shuffles the inputs and outputs
# 10 attempts should be enough to get a shuffled join
shuffled = False
for _ in range(10):
shuffled_joined = self.nodes[0].joinpsbts([psbt, psbt2])
shuffled |= joined != shuffled_joined
if shuffled:
break
assert shuffled
# Newly created PSBT needs UTXOs and updating
addr = self.nodes[1].getnewaddress("", "p2sh-segwit")
utxo = self.create_outpoints(self.nodes[0], outputs=[{addr: 7}])[0]
addrinfo = self.nodes[1].getaddressinfo(addr)
self.generate(self.nodes[0], 6)[0]
psbt = self.nodes[1].createpsbt([utxo], {self.nodes[0].getnewaddress("", "p2sh-segwit"):Decimal('6.999')})
analyzed = self.nodes[0].analyzepsbt(psbt)
assert not analyzed['inputs'][0]['has_utxo'] and not analyzed['inputs'][0]['is_final'] and analyzed['inputs'][0]['next'] == 'updater' and analyzed['next'] == 'updater'
# After update with wallet, only needs signing
updated = self.nodes[1].walletprocesspsbt(psbt, False, 'ALL', True)['psbt']
analyzed = self.nodes[0].analyzepsbt(updated)
assert analyzed['inputs'][0]['has_utxo'] and not analyzed['inputs'][0]['is_final'] and analyzed['inputs'][0]['next'] == 'signer' and analyzed['next'] == 'signer' and analyzed['inputs'][0]['missing']['signatures'][0] == addrinfo['embedded']['witness_program']
# Check fee and size things
assert analyzed['fee'] == Decimal('0.001') and analyzed['estimated_vsize'] == 134 and analyzed['estimated_feerate'] == Decimal('0.00746268')
# After signing and finalizing, needs extracting
signed = self.nodes[1].walletprocesspsbt(updated)['psbt']
analyzed = self.nodes[0].analyzepsbt(signed)
assert analyzed['inputs'][0]['has_utxo'] and analyzed['inputs'][0]['is_final'] and analyzed['next'] == 'extractor'
self.log.info("PSBT spending unspendable outputs should have error message and Creator as next")
analysis = self.nodes[0].analyzepsbt('cHNidP8BAJoCAAAAAljoeiG1ba8MI76OcHBFbDNvfLqlyHV5JPVFiHuyq911AAAAAAD/////g40EJ9DsZQpoqka7CwmK6kQiwHGyyng1Kgd5WdB86h0BAAAAAP////8CcKrwCAAAAAAWAEHYXCtx0AYLCcmIauuBXlCZHdoSTQDh9QUAAAAAFv8/wADXYP/7//////8JxOh0LR2HAI8AAAAAAAEBIADC6wsAAAAAF2oUt/X69ELjeX2nTof+fZ10l+OyAokDAQcJAwEHEAABAACAAAEBIADC6wsAAAAAF2oUt/X69ELjeX2nTof+fZ10l+OyAokDAQcJAwEHENkMak8AAAAA')
assert_equal(analysis['next'], 'creator')
assert_equal(analysis['error'], 'PSBT is not valid. Input 0 spends unspendable output')
self.log.info("PSBT with invalid values should have error message and Creator as next")
analysis = self.nodes[0].analyzepsbt('cHNidP8BAHECAAAAAfA00BFgAm6tp86RowwH6BMImQNL5zXUcTT97XoLGz0BAAAAAAD/////AgD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XL87QKVAAAAABYAFPck4gF7iL4NL4wtfRAKgQbghiTUAAAAAAABAR8AgIFq49AHABYAFJUDtxf2PHo641HEOBOAIvFMNTr2AAAA')
assert_equal(analysis['next'], 'creator')
assert_equal(analysis['error'], 'PSBT is not valid. Input 0 has invalid value')
self.log.info("PSBT with signed, but not finalized, inputs should have Finalizer as next")
analysis = self.nodes[0].analyzepsbt('cHNidP8BAHECAAAAAZYezcxdnbXoQCmrD79t/LzDgtUo9ERqixk8wgioAobrAAAAAAD9////AlDDAAAAAAAAFgAUy/UxxZuzZswcmFnN/E9DGSiHLUsuGPUFAAAAABYAFLsH5o0R38wXx+X2cCosTMCZnQ4baAAAAAABAR8A4fUFAAAAABYAFOBI2h5thf3+Lflb2LGCsVSZwsltIgIC/i4dtVARCRWtROG0HHoGcaVklzJUcwo5homgGkSNAnJHMEQCIGx7zKcMIGr7cEES9BR4Kdt/pzPTK3fKWcGyCJXb7MVnAiALOBgqlMH4GbC1HDh/HmylmO54fyEy4lKde7/BT/PWxwEBAwQBAAAAIgYC/i4dtVARCRWtROG0HHoGcaVklzJUcwo5homgGkSNAnIYDwVpQ1QAAIABAACAAAAAgAAAAAAAAAAAAAAiAgL+CIiB59NSCssOJRGiMYQK1chahgAaaJpIXE41Cyir+xgPBWlDVAAAgAEAAIAAAACAAQAAAAAAAAAA')
assert_equal(analysis['next'], 'finalizer')
analysis = self.nodes[0].analyzepsbt('cHNidP8BAHECAAAAAfA00BFgAm6tp86RowwH6BMImQNL5zXUcTT97XoLGz0BAAAAAAD/////AgCAgWrj0AcAFgAUKNw0x8HRctAgmvoevm4u1SbN7XL87QKVAAAAABYAFPck4gF7iL4NL4wtfRAKgQbghiTUAAAAAAABAR8A8gUqAQAAABYAFJUDtxf2PHo641HEOBOAIvFMNTr2AAAA')
assert_equal(analysis['next'], 'creator')
assert_equal(analysis['error'], 'PSBT is not valid. Output amount invalid')
analysis = self.nodes[0].analyzepsbt('cHNidP8BAJoCAAAAAkvEW8NnDtdNtDpsmze+Ht2LH35IJcKv00jKAlUs21RrAwAAAAD/////S8Rbw2cO1020OmybN74e3Ysffkglwq/TSMoCVSzbVGsBAAAAAP7///8CwLYClQAAAAAWABSNJKzjaUb3uOxixsvh1GGE3fW7zQD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XIAAAAAAAEAnQIAAAACczMa321tVHuN4GKWKRncycI22aX3uXgwSFUKM2orjRsBAAAAAP7///9zMxrfbW1Ue43gYpYpGdzJwjbZpfe5eDBIVQozaiuNGwAAAAAA/v///wIA+QKVAAAAABl2qRT9zXUVA8Ls5iVqynLHe5/vSe1XyYisQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAAAAAQEfQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAA==')
assert_equal(analysis['next'], 'creator')
assert_equal(analysis['error'], 'PSBT is not valid. Input 0 specifies invalid prevout')
assert_raises_rpc_error(-25, 'Inputs missing or spent', self.nodes[0].walletprocesspsbt, 'cHNidP8BAJoCAAAAAkvEW8NnDtdNtDpsmze+Ht2LH35IJcKv00jKAlUs21RrAwAAAAD/////S8Rbw2cO1020OmybN74e3Ysffkglwq/TSMoCVSzbVGsBAAAAAP7///8CwLYClQAAAAAWABSNJKzjaUb3uOxixsvh1GGE3fW7zQD5ApUAAAAAFgAUKNw0x8HRctAgmvoevm4u1SbN7XIAAAAAAAEAnQIAAAACczMa321tVHuN4GKWKRncycI22aX3uXgwSFUKM2orjRsBAAAAAP7///9zMxrfbW1Ue43gYpYpGdzJwjbZpfe5eDBIVQozaiuNGwAAAAAA/v///wIA+QKVAAAAABl2qRT9zXUVA8Ls5iVqynLHe5/vSe1XyYisQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAAAAAQEfQM0ClQAAAAAWABRmWQUcjSjghQ8/uH4Bn/zkakwLtAAAAA==')
self.log.info("Test that we can fund psbts with external inputs specified")
privkey, _ = generate_keypair(wif=True)
self.nodes[1].createwallet("extfund")
wallet = self.nodes[1].get_wallet_rpc("extfund")
# Make a weird but signable script. sh(wsh(pkh())) descriptor accomplishes this
desc = descsum_create("sh(wsh(pkh({})))".format(privkey))
if self.options.descriptors:
res = self.nodes[0].importdescriptors([{"desc": desc, "timestamp": "now"}])
else:
res = self.nodes[0].importmulti([{"desc": desc, "timestamp": "now"}])
assert res[0]["success"]
addr = self.nodes[0].deriveaddresses(desc)[0]
addr_info = self.nodes[0].getaddressinfo(addr)
self.nodes[0].sendtoaddress(addr, 10)
self.nodes[0].sendtoaddress(wallet.getnewaddress(), 10)
self.generate(self.nodes[0], 6)
ext_utxo = self.nodes[0].listunspent(addresses=[addr])[0]
# An external input without solving data should result in an error
assert_raises_rpc_error(-4, "Not solvable pre-selected input COutPoint(%s, %s)" % (ext_utxo["txid"][0:10], ext_utxo["vout"]), wallet.walletcreatefundedpsbt, [ext_utxo], {self.nodes[0].getnewaddress(): 15})
# But funding should work when the solving data is provided
psbt = wallet.walletcreatefundedpsbt([ext_utxo], {self.nodes[0].getnewaddress(): 15}, 0, {"add_inputs": True, "solving_data": {"pubkeys": [addr_info['pubkey']], "scripts": [addr_info["embedded"]["scriptPubKey"], addr_info["embedded"]["embedded"]["scriptPubKey"]]}})
signed = wallet.walletprocesspsbt(psbt['psbt'])
assert not signed['complete']
signed = self.nodes[0].walletprocesspsbt(signed['psbt'])
assert signed['complete']
psbt = wallet.walletcreatefundedpsbt([ext_utxo], {self.nodes[0].getnewaddress(): 15}, 0, {"add_inputs": True, "solving_data":{"descriptors": [desc]}})
signed = wallet.walletprocesspsbt(psbt['psbt'])
assert not signed['complete']
signed = self.nodes[0].walletprocesspsbt(signed['psbt'])
assert signed['complete']
final = signed['hex']
dec = self.nodes[0].decodepsbt(signed["psbt"])
for i, txin in enumerate(dec["tx"]["vin"]):
if txin["txid"] == ext_utxo["txid"] and txin["vout"] == ext_utxo["vout"]:
input_idx = i
break
psbt_in = dec["inputs"][input_idx]
# Calculate the input weight
# (prevout + sequence + length of scriptSig + scriptsig + 1 byte buffer) * WITNESS_SCALE_FACTOR + num scriptWitness stack items + (length of stack item + stack item) * N stack items + 1 byte buffer
len_scriptsig = len(psbt_in["final_scriptSig"]["hex"]) // 2 if "final_scriptSig" in psbt_in else 0
len_scriptsig += len(ser_compact_size(len_scriptsig)) + 1
len_scriptwitness = (sum([(len(x) // 2) + len(ser_compact_size(len(x) // 2)) for x in psbt_in["final_scriptwitness"]]) + len(psbt_in["final_scriptwitness"]) + 1) if "final_scriptwitness" in psbt_in else 0
input_weight = ((40 + len_scriptsig) * WITNESS_SCALE_FACTOR) + len_scriptwitness
low_input_weight = input_weight // 2
high_input_weight = input_weight * 2
# Input weight error conditions
assert_raises_rpc_error(
-8,
"Input weights should be specified in inputs rather than in options.",
wallet.walletcreatefundedpsbt,
inputs=[ext_utxo],
outputs={self.nodes[0].getnewaddress(): 15},
options={"input_weights": [{"txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": 1000}]}
)
# Funding should also work if the input weight is provided
psbt = wallet.walletcreatefundedpsbt(
inputs=[{"txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": input_weight}],
outputs={self.nodes[0].getnewaddress(): 15},
add_inputs=True,
)
signed = wallet.walletprocesspsbt(psbt["psbt"])
signed = self.nodes[0].walletprocesspsbt(signed["psbt"])
final = signed["hex"]
assert self.nodes[0].testmempoolaccept([final])[0]["allowed"]
# Reducing the weight should have a lower fee
psbt2 = wallet.walletcreatefundedpsbt(
inputs=[{"txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": low_input_weight}],
outputs={self.nodes[0].getnewaddress(): 15},
add_inputs=True,
)
assert_greater_than(psbt["fee"], psbt2["fee"])
# Increasing the weight should have a higher fee
psbt2 = wallet.walletcreatefundedpsbt(
inputs=[{"txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": high_input_weight}],
outputs={self.nodes[0].getnewaddress(): 15},
add_inputs=True,
)
assert_greater_than(psbt2["fee"], psbt["fee"])
# The provided weight should override the calculated weight when solving data is provided
psbt3 = wallet.walletcreatefundedpsbt(
inputs=[{"txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": high_input_weight}],
outputs={self.nodes[0].getnewaddress(): 15},
add_inputs=True, solving_data={"descriptors": [desc]},
)
assert_equal(psbt2["fee"], psbt3["fee"])
# Import the external utxo descriptor so that we can sign for it from the test wallet
if self.options.descriptors:
res = wallet.importdescriptors([{"desc": desc, "timestamp": "now"}])
else:
res = wallet.importmulti([{"desc": desc, "timestamp": "now"}])
assert res[0]["success"]
# The provided weight should override the calculated weight for a wallet input
psbt3 = wallet.walletcreatefundedpsbt(
inputs=[{"txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": high_input_weight}],
outputs={self.nodes[0].getnewaddress(): 15},
add_inputs=True,
)
assert_equal(psbt2["fee"], psbt3["fee"])
self.log.info("Test signing inputs that the wallet has keys for but is not watching the scripts")
self.nodes[1].createwallet(wallet_name="scriptwatchonly", disable_private_keys=True)
watchonly = self.nodes[1].get_wallet_rpc("scriptwatchonly")
privkey, pubkey = generate_keypair(wif=True)
desc = descsum_create("wsh(pkh({}))".format(pubkey.hex()))
if self.options.descriptors:
res = watchonly.importdescriptors([{"desc": desc, "timestamp": "now"}])
else:
res = watchonly.importmulti([{"desc": desc, "timestamp": "now"}])
assert res[0]["success"]
addr = self.nodes[0].deriveaddresses(desc)[0]
self.nodes[0].sendtoaddress(addr, 10)
self.generate(self.nodes[0], 1)
self.nodes[0].importprivkey(privkey)
psbt = watchonly.sendall([wallet.getnewaddress()])["psbt"]
signed_tx = self.nodes[0].walletprocesspsbt(psbt)
self.nodes[0].sendrawtransaction(signed_tx["hex"])
# Same test but for taproot
if self.options.descriptors:
privkey, pubkey = generate_keypair(wif=True)
desc = descsum_create("tr({},pk({}))".format(H_POINT, pubkey.hex()))
res = watchonly.importdescriptors([{"desc": desc, "timestamp": "now"}])
assert res[0]["success"]
addr = self.nodes[0].deriveaddresses(desc)[0]
self.nodes[0].sendtoaddress(addr, 10)
self.generate(self.nodes[0], 1)
self.nodes[0].importdescriptors([{"desc": descsum_create("tr({})".format(privkey)), "timestamp":"now"}])
psbt = watchonly.sendall([wallet.getnewaddress(), addr])["psbt"]
processed_psbt = self.nodes[0].walletprocesspsbt(psbt)
txid = self.nodes[0].sendrawtransaction(processed_psbt["hex"])
vout = find_vout_for_address(self.nodes[0], txid, addr)
# Make sure tap tree is in psbt
parsed_psbt = PSBT.from_base64(psbt)
assert_greater_than(len(parsed_psbt.o[vout].map[PSBT_OUT_TAP_TREE]), 0)
assert "taproot_tree" in self.nodes[0].decodepsbt(psbt)["outputs"][vout]
parsed_psbt.make_blank()
comb_psbt = self.nodes[0].combinepsbt([psbt, parsed_psbt.to_base64()])
assert_equal(comb_psbt, psbt)
self.log.info("Test that walletprocesspsbt both updates and signs a non-updated psbt containing Taproot inputs")
addr = self.nodes[0].getnewaddress("", "bech32m")
utxo = self.create_outpoints(self.nodes[0], outputs=[{addr: 1}])[0]
psbt = self.nodes[0].createpsbt([utxo], [{self.nodes[0].getnewaddress(): 0.9999}])
signed = self.nodes[0].walletprocesspsbt(psbt)
rawtx = signed["hex"]
self.nodes[0].sendrawtransaction(rawtx)
self.generate(self.nodes[0], 1)
# Make sure tap tree is not in psbt
parsed_psbt = PSBT.from_base64(psbt)
assert PSBT_OUT_TAP_TREE not in parsed_psbt.o[0].map
assert "taproot_tree" not in self.nodes[0].decodepsbt(psbt)["outputs"][0]
parsed_psbt.make_blank()
comb_psbt = self.nodes[0].combinepsbt([psbt, parsed_psbt.to_base64()])
assert_equal(comb_psbt, psbt)
self.log.info("Test walletprocesspsbt raises if an invalid sighashtype is passed")
assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[0].walletprocesspsbt, psbt, sighashtype="all")
self.log.info("Test decoding PSBT with per-input preimage types")
# note that the decodepsbt RPC doesn't check whether preimages and hashes match
hash_ripemd160, preimage_ripemd160 = randbytes(20), randbytes(50)
hash_sha256, preimage_sha256 = randbytes(32), randbytes(50)
hash_hash160, preimage_hash160 = randbytes(20), randbytes(50)
hash_hash256, preimage_hash256 = randbytes(32), randbytes(50)
tx = CTransaction()
tx.vin = [CTxIn(outpoint=COutPoint(hash=int('aa' * 32, 16), n=0), scriptSig=b""),
CTxIn(outpoint=COutPoint(hash=int('bb' * 32, 16), n=0), scriptSig=b""),
CTxIn(outpoint=COutPoint(hash=int('cc' * 32, 16), n=0), scriptSig=b""),
CTxIn(outpoint=COutPoint(hash=int('dd' * 32, 16), n=0), scriptSig=b"")]
tx.vout = [CTxOut(nValue=0, scriptPubKey=b"")]
psbt = PSBT()
psbt.g = PSBTMap({PSBT_GLOBAL_UNSIGNED_TX: tx.serialize()})
psbt.i = [PSBTMap({bytes([PSBT_IN_RIPEMD160]) + hash_ripemd160: preimage_ripemd160}),
PSBTMap({bytes([PSBT_IN_SHA256]) + hash_sha256: preimage_sha256}),
PSBTMap({bytes([PSBT_IN_HASH160]) + hash_hash160: preimage_hash160}),
PSBTMap({bytes([PSBT_IN_HASH256]) + hash_hash256: preimage_hash256})]
psbt.o = [PSBTMap()]
res_inputs = self.nodes[0].decodepsbt(psbt.to_base64())["inputs"]
assert_equal(len(res_inputs), 4)
preimage_keys = ["ripemd160_preimages", "sha256_preimages", "hash160_preimages", "hash256_preimages"]
expected_hashes = [hash_ripemd160, hash_sha256, hash_hash160, hash_hash256]
expected_preimages = [preimage_ripemd160, preimage_sha256, preimage_hash160, preimage_hash256]
for res_input, preimage_key, hash, preimage in zip(res_inputs, preimage_keys, expected_hashes, expected_preimages):
assert preimage_key in res_input
assert_equal(len(res_input[preimage_key]), 1)
assert hash.hex() in res_input[preimage_key]
assert_equal(res_input[preimage_key][hash.hex()], preimage.hex())
self.log.info("Test that combining PSBTs with different transactions fails")
tx = CTransaction()
tx.vin = [CTxIn(outpoint=COutPoint(hash=int('aa' * 32, 16), n=0), scriptSig=b"")]
tx.vout = [CTxOut(nValue=0, scriptPubKey=b"")]
psbt1 = PSBT(g=PSBTMap({PSBT_GLOBAL_UNSIGNED_TX: tx.serialize()}), i=[PSBTMap()], o=[PSBTMap()]).to_base64()
tx.vout[0].nValue += 1 # slightly modify tx
psbt2 = PSBT(g=PSBTMap({PSBT_GLOBAL_UNSIGNED_TX: tx.serialize()}), i=[PSBTMap()], o=[PSBTMap()]).to_base64()
assert_raises_rpc_error(-8, "PSBTs not compatible (different transactions)", self.nodes[0].combinepsbt, [psbt1, psbt2])
assert_equal(self.nodes[0].combinepsbt([psbt1, psbt1]), psbt1)
self.log.info("Test that PSBT inputs are being checked via script execution")
acs_prevout = CTxOut(nValue=0, scriptPubKey=CScript([OP_TRUE]))
tx = CTransaction()
tx.vin = [CTxIn(outpoint=COutPoint(hash=int('dd' * 32, 16), n=0), scriptSig=b"")]
tx.vout = [CTxOut(nValue=0, scriptPubKey=b"")]
psbt = PSBT()
psbt.g = PSBTMap({PSBT_GLOBAL_UNSIGNED_TX: tx.serialize()})
psbt.i = [PSBTMap({bytes([PSBT_IN_WITNESS_UTXO]) : acs_prevout.serialize()})]
psbt.o = [PSBTMap()]
assert_equal(self.nodes[0].finalizepsbt(psbt.to_base64()),
{'hex': '0200000001dddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd0000000000000000000100000000000000000000000000', 'complete': True})
self.log.info("Test we don't crash when making a 0-value funded transaction at 0 fee without forcing an input selection")
assert_raises_rpc_error(-4, "Transaction requires one destination of non-0 value, a non-0 feerate, or a pre-selected input", self.nodes[0].walletcreatefundedpsbt, [], [{"data": "deadbeef"}], 0, {"fee_rate": "0"})
self.log.info("Test descriptorprocesspsbt updates and signs a psbt with descriptors")
self.generate(self.nodes[2], 1)
# Disable the wallet for node 2 since `descriptorprocesspsbt` does not use the wallet
self.restart_node(2, extra_args=["-disablewallet"])
self.connect_nodes(0, 2)
self.connect_nodes(1, 2)
key_info = get_generate_key()
key = key_info.privkey
address = key_info.p2wpkh_addr
descriptor = descsum_create(f"wpkh({key})")
utxo = self.create_outpoints(self.nodes[0], outputs=[{address: 1}])[0]
self.sync_all()
psbt = self.nodes[2].createpsbt([utxo], {self.nodes[0].getnewaddress(): 0.99999})
decoded = self.nodes[2].decodepsbt(psbt)
test_psbt_input_keys(decoded['inputs'][0], [])
# Test that even if the wrong descriptor is given, `witness_utxo` and `non_witness_utxo`
# are still added to the psbt
alt_descriptor = descsum_create(f"wpkh({get_generate_key().privkey})")
alt_psbt = self.nodes[2].descriptorprocesspsbt(psbt=psbt, descriptors=[alt_descriptor], sighashtype="ALL")["psbt"]
decoded = self.nodes[2].decodepsbt(alt_psbt)
test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo', 'non_witness_utxo'])
# Test that the psbt is not finalized and does not have bip32_derivs unless specified
processed_psbt = self.nodes[2].descriptorprocesspsbt(psbt=psbt, descriptors=[descriptor], sighashtype="ALL", bip32derivs=True, finalize=False)
decoded = self.nodes[2].decodepsbt(processed_psbt['psbt'])
test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo', 'non_witness_utxo', 'partial_signatures', 'bip32_derivs'])
# If psbt not finalized, test that result does not have hex
assert "hex" not in processed_psbt
processed_psbt = self.nodes[2].descriptorprocesspsbt(psbt=psbt, descriptors=[descriptor], sighashtype="ALL", bip32derivs=False, finalize=True)
decoded = self.nodes[2].decodepsbt(processed_psbt['psbt'])
test_psbt_input_keys(decoded['inputs'][0], ['witness_utxo', 'non_witness_utxo', 'final_scriptwitness'])
# Test psbt is complete
assert_equal(processed_psbt['complete'], True)
# Broadcast transaction
self.nodes[2].sendrawtransaction(processed_psbt['hex'])
self.log.info("Test descriptorprocesspsbt raises if an invalid sighashtype is passed")
assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[2].descriptorprocesspsbt, psbt, [descriptor], sighashtype="all")
if __name__ == '__main__':
PSBTTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_sendtxrcncl.py | #!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test SENDTXRCNCL message
"""
from test_framework.messages import (
msg_sendtxrcncl,
msg_verack,
msg_version,
msg_wtxidrelay,
NODE_BLOOM,
)
from test_framework.p2p import (
P2PInterface,
P2P_SERVICES,
P2P_SUBVERSION,
P2P_VERSION,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class PeerNoVerack(P2PInterface):
def __init__(self, wtxidrelay=True):
super().__init__(wtxidrelay=wtxidrelay)
def on_version(self, message):
# Avoid sending verack in response to version.
# When calling add_p2p_connection, wait_for_verack=False must be set (see
# comment in add_p2p_connection).
if message.nVersion >= 70016 and self.wtxidrelay:
self.send_message(msg_wtxidrelay())
class SendTxrcnclReceiver(P2PInterface):
def __init__(self):
super().__init__()
self.sendtxrcncl_msg_received = None
def on_sendtxrcncl(self, message):
self.sendtxrcncl_msg_received = message
class P2PFeelerReceiver(SendTxrcnclReceiver):
def on_version(self, message):
pass # feeler connections can not send any message other than their own version
class PeerTrackMsgOrder(P2PInterface):
def __init__(self):
super().__init__()
self.messages = []
def on_message(self, message):
super().on_message(message)
self.messages.append(message)
def create_sendtxrcncl_msg():
sendtxrcncl_msg = msg_sendtxrcncl()
sendtxrcncl_msg.version = 1
sendtxrcncl_msg.salt = 2
return sendtxrcncl_msg
class SendTxRcnclTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-txreconciliation']]
def run_test(self):
# Check everything concerning *sending* SENDTXRCNCL
# First, *sending* to *inbound*.
self.log.info('SENDTXRCNCL sent to an inbound')
peer = self.nodes[0].add_p2p_connection(SendTxrcnclReceiver(), send_version=True, wait_for_verack=True)
assert peer.sendtxrcncl_msg_received
assert_equal(peer.sendtxrcncl_msg_received.version, 1)
self.nodes[0].disconnect_p2ps()
self.log.info('SENDTXRCNCL should be sent before VERACK')
peer = self.nodes[0].add_p2p_connection(PeerTrackMsgOrder(), send_version=True, wait_for_verack=True)
peer.wait_for_verack()
verack_index = [i for i, msg in enumerate(peer.messages) if msg.msgtype == b'verack'][0]
sendtxrcncl_index = [i for i, msg in enumerate(peer.messages) if msg.msgtype == b'sendtxrcncl'][0]
assert sendtxrcncl_index < verack_index
self.nodes[0].disconnect_p2ps()
self.log.info('SENDTXRCNCL on pre-WTXID version should not be sent')
peer = self.nodes[0].add_p2p_connection(SendTxrcnclReceiver(), send_version=False, wait_for_verack=False)
pre_wtxid_version_msg = msg_version()
pre_wtxid_version_msg.nVersion = 70015
pre_wtxid_version_msg.strSubVer = P2P_SUBVERSION
pre_wtxid_version_msg.nServices = P2P_SERVICES
pre_wtxid_version_msg.relay = 1
peer.send_message(pre_wtxid_version_msg)
peer.wait_for_verack()
assert not peer.sendtxrcncl_msg_received
self.nodes[0].disconnect_p2ps()
self.log.info('SENDTXRCNCL for fRelay=false should not be sent')
peer = self.nodes[0].add_p2p_connection(SendTxrcnclReceiver(), send_version=False, wait_for_verack=False)
no_txrelay_version_msg = msg_version()
no_txrelay_version_msg.nVersion = P2P_VERSION
no_txrelay_version_msg.strSubVer = P2P_SUBVERSION
no_txrelay_version_msg.nServices = P2P_SERVICES
no_txrelay_version_msg.relay = 0
peer.send_message(no_txrelay_version_msg)
peer.wait_for_verack()
assert not peer.sendtxrcncl_msg_received
self.nodes[0].disconnect_p2ps()
self.log.info('SENDTXRCNCL for fRelay=false should not be sent (with NODE_BLOOM offered)')
self.restart_node(0, ["-peerbloomfilters", "-txreconciliation"])
peer = self.nodes[0].add_p2p_connection(SendTxrcnclReceiver(), send_version=False, wait_for_verack=False)
no_txrelay_version_msg = msg_version()
no_txrelay_version_msg.nVersion = P2P_VERSION
no_txrelay_version_msg.strSubVer = P2P_SUBVERSION
no_txrelay_version_msg.nServices = P2P_SERVICES
no_txrelay_version_msg.relay = 0
peer.send_message(no_txrelay_version_msg)
peer.wait_for_verack()
assert peer.nServices & NODE_BLOOM != 0
assert not peer.sendtxrcncl_msg_received
self.nodes[0].disconnect_p2ps()
# Now, *sending* to *outbound*.
self.log.info('SENDTXRCNCL sent to an outbound')
peer = self.nodes[0].add_outbound_p2p_connection(
SendTxrcnclReceiver(), wait_for_verack=True, p2p_idx=0, connection_type="outbound-full-relay")
assert peer.sendtxrcncl_msg_received
assert_equal(peer.sendtxrcncl_msg_received.version, 1)
self.nodes[0].disconnect_p2ps()
self.log.info('SENDTXRCNCL should not be sent if block-relay-only')
peer = self.nodes[0].add_outbound_p2p_connection(
SendTxrcnclReceiver(), wait_for_verack=True, p2p_idx=0, connection_type="block-relay-only")
assert not peer.sendtxrcncl_msg_received
self.nodes[0].disconnect_p2ps()
self.log.info("SENDTXRCNCL should not be sent if feeler")
peer = self.nodes[0].add_outbound_p2p_connection(P2PFeelerReceiver(), p2p_idx=0, connection_type="feeler")
assert not peer.sendtxrcncl_msg_received
self.nodes[0].disconnect_p2ps()
self.log.info("SENDTXRCNCL should not be sent if addrfetch")
peer = self.nodes[0].add_outbound_p2p_connection(
SendTxrcnclReceiver(), wait_for_verack=True, p2p_idx=0, connection_type="addr-fetch")
assert not peer.sendtxrcncl_msg_received
self.nodes[0].disconnect_p2ps()
self.log.info('SENDTXRCNCL not sent if -txreconciliation flag is not set')
self.restart_node(0, [])
peer = self.nodes[0].add_p2p_connection(SendTxrcnclReceiver(), send_version=True, wait_for_verack=True)
assert not peer.sendtxrcncl_msg_received
self.nodes[0].disconnect_p2ps()
self.log.info('SENDTXRCNCL not sent if blocksonly is set')
self.restart_node(0, ["-txreconciliation", "-blocksonly"])
peer = self.nodes[0].add_p2p_connection(SendTxrcnclReceiver(), send_version=True, wait_for_verack=True)
assert not peer.sendtxrcncl_msg_received
self.nodes[0].disconnect_p2ps()
# Check everything concerning *receiving* SENDTXRCNCL
# First, receiving from *inbound*.
self.restart_node(0, ["-txreconciliation"])
self.log.info('valid SENDTXRCNCL received')
peer = self.nodes[0].add_p2p_connection(PeerNoVerack(), send_version=True, wait_for_verack=False)
with self.nodes[0].assert_debug_log(["received: sendtxrcncl"]):
peer.send_message(create_sendtxrcncl_msg())
self.log.info('second SENDTXRCNCL triggers a disconnect')
with self.nodes[0].assert_debug_log(["(sendtxrcncl received from already registered peer); disconnecting"]):
peer.send_message(create_sendtxrcncl_msg())
peer.wait_for_disconnect()
self.restart_node(0, [])
self.log.info('SENDTXRCNCL if no txreconciliation supported is ignored')
peer = self.nodes[0].add_p2p_connection(PeerNoVerack(), send_version=True, wait_for_verack=False)
with self.nodes[0].assert_debug_log(['ignored, as our node does not have txreconciliation enabled']):
peer.send_message(create_sendtxrcncl_msg())
self.nodes[0].disconnect_p2ps()
self.restart_node(0, ["-txreconciliation"])
self.log.info('SENDTXRCNCL with version=0 triggers a disconnect')
sendtxrcncl_low_version = create_sendtxrcncl_msg()
sendtxrcncl_low_version.version = 0
peer = self.nodes[0].add_p2p_connection(PeerNoVerack(), send_version=True, wait_for_verack=False)
with self.nodes[0].assert_debug_log(["txreconciliation protocol violation"]):
peer.send_message(sendtxrcncl_low_version)
peer.wait_for_disconnect()
self.log.info('SENDTXRCNCL with version=2 is valid')
sendtxrcncl_higher_version = create_sendtxrcncl_msg()
sendtxrcncl_higher_version.version = 2
peer = self.nodes[0].add_p2p_connection(PeerNoVerack(), send_version=True, wait_for_verack=False)
with self.nodes[0].assert_debug_log(['Register peer=1']):
peer.send_message(sendtxrcncl_higher_version)
self.nodes[0].disconnect_p2ps()
self.log.info('unexpected SENDTXRCNCL is ignored')
peer = self.nodes[0].add_p2p_connection(PeerNoVerack(), send_version=False, wait_for_verack=False)
old_version_msg = msg_version()
old_version_msg.nVersion = 70015
old_version_msg.strSubVer = P2P_SUBVERSION
old_version_msg.nServices = P2P_SERVICES
old_version_msg.relay = 1
peer.send_message(old_version_msg)
with self.nodes[0].assert_debug_log(['Ignore unexpected txreconciliation signal']):
peer.send_message(create_sendtxrcncl_msg())
self.nodes[0].disconnect_p2ps()
self.log.info('sending SENDTXRCNCL after sending VERACK triggers a disconnect')
peer = self.nodes[0].add_p2p_connection(P2PInterface())
with self.nodes[0].assert_debug_log(["sendtxrcncl received after verack"]):
peer.send_message(create_sendtxrcncl_msg())
peer.wait_for_disconnect()
self.log.info('SENDTXRCNCL without WTXIDRELAY is ignored (recon state is erased after VERACK)')
peer = self.nodes[0].add_p2p_connection(PeerNoVerack(wtxidrelay=False), send_version=True, wait_for_verack=False)
with self.nodes[0].assert_debug_log(['Forget txreconciliation state of peer']):
peer.send_message(create_sendtxrcncl_msg())
peer.send_message(msg_verack())
self.nodes[0].disconnect_p2ps()
# Now, *receiving* from *outbound*.
self.log.info('SENDTXRCNCL if block-relay-only triggers a disconnect')
peer = self.nodes[0].add_outbound_p2p_connection(
PeerNoVerack(), wait_for_verack=False, p2p_idx=0, connection_type="block-relay-only")
with self.nodes[0].assert_debug_log(["we indicated no tx relay; disconnecting"]):
peer.send_message(create_sendtxrcncl_msg())
peer.wait_for_disconnect()
if __name__ == '__main__':
SendTxRcnclTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/mempool_resurrect.py | #!/usr/bin/env python3
# Copyright (c) 2014-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test resurrection of mined transactions when the blockchain is re-organized."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
from test_framework.wallet import MiniWallet
class MempoolCoinbaseTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
node = self.nodes[0]
wallet = MiniWallet(node)
# Spend block 1/2/3's coinbase transactions
# Mine a block
# Create three more transactions, spending the spends
# Mine another block
# ... make sure all the transactions are confirmed
# Invalidate both blocks
# ... make sure all the transactions are put back in the mempool
# Mine a new block
# ... make sure all the transactions are confirmed again
blocks = []
spends1_ids = [wallet.send_self_transfer(from_node=node)['txid'] for _ in range(3)]
blocks.extend(self.generate(node, 1))
spends2_ids = [wallet.send_self_transfer(from_node=node)['txid'] for _ in range(3)]
blocks.extend(self.generate(node, 1))
spends_ids = set(spends1_ids + spends2_ids)
# mempool should be empty, all txns confirmed
assert_equal(set(node.getrawmempool()), set())
confirmed_txns = set(node.getblock(blocks[0])['tx'] + node.getblock(blocks[1])['tx'])
# Checks that all spend txns are contained in the mined blocks
assert spends_ids < confirmed_txns
# Use invalidateblock to re-org back
node.invalidateblock(blocks[0])
# All txns should be back in mempool with 0 confirmations
assert_equal(set(node.getrawmempool()), spends_ids)
# Generate another block, they should all get mined
blocks = self.generate(node, 1)
# mempool should be empty, all txns confirmed
assert_equal(set(node.getrawmempool()), set())
confirmed_txns = set(node.getblock(blocks[0])['tx'])
assert spends_ids < confirmed_txns
if __name__ == '__main__':
MempoolCoinbaseTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_resendwallettransactions.py | #!/usr/bin/env python3
# Copyright (c) 2017-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that the wallet resends transactions periodically."""
import time
from decimal import Decimal
from test_framework.blocktools import (
create_block,
create_coinbase,
)
from test_framework.messages import DEFAULT_MEMPOOL_EXPIRY_HOURS
from test_framework.p2p import P2PTxInvStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
get_fee,
try_rpc,
)
class ResendWalletTransactionsTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
node = self.nodes[0] # alias
peer_first = node.add_p2p_connection(P2PTxInvStore())
self.log.info("Create a new transaction and wait until it's broadcast")
parent_utxo, indep_utxo = node.listunspent()[:2]
addr = node.getnewaddress()
txid = node.send(outputs=[{addr: 1}], inputs=[parent_utxo])["txid"]
# Can take a few seconds due to transaction trickling
peer_first.wait_for_broadcast([txid])
# Add a second peer since txs aren't rebroadcast to the same peer (see m_tx_inventory_known_filter)
peer_second = node.add_p2p_connection(P2PTxInvStore())
self.log.info("Create a block")
# Create and submit a block without the transaction.
# Transactions are only rebroadcast if there has been a block at least five minutes
# after the last time we tried to broadcast. Use mocktime and give an extra minute to be sure.
block_time = int(time.time()) + 6 * 60
node.setmocktime(block_time)
block = create_block(int(node.getbestblockhash(), 16), create_coinbase(node.getblockcount() + 1), block_time)
block.solve()
node.submitblock(block.serialize().hex())
# Set correct m_best_block_time, which is used in ResubmitWalletTransactions
node.syncwithvalidationinterfacequeue()
now = int(time.time())
# Transaction should not be rebroadcast within first 12 hours
# Leave 2 mins for buffer
twelve_hrs = 12 * 60 * 60
two_min = 2 * 60
node.setmocktime(now + twelve_hrs - two_min)
node.mockscheduler(60) # Tell scheduler to call MaybeResendWalletTxs now
assert_equal(int(txid, 16) in peer_second.get_invs(), False)
self.log.info("Bump time & check that transaction is rebroadcast")
# Transaction should be rebroadcast approximately 24 hours in the future,
# but can range from 12-36. So bump 36 hours to be sure.
with node.assert_debug_log(['resubmit 1 unconfirmed transactions']):
node.setmocktime(now + 36 * 60 * 60)
# Tell scheduler to call MaybeResendWalletTxs now.
node.mockscheduler(60)
# Give some time for trickle to occur
node.setmocktime(now + 36 * 60 * 60 + 600)
peer_second.wait_for_broadcast([txid])
self.log.info("Chain of unconfirmed not-in-mempool txs are rebroadcast")
# This tests that the node broadcasts the parent transaction before the child transaction.
# To test that scenario, we need a method to reliably get a child transaction placed
# in mapWallet positioned before the parent. We cannot predict the position in mapWallet,
# but we can observe it using listreceivedbyaddress and other related RPCs.
#
# So we will create the child transaction, use listreceivedbyaddress to see what the
# ordering of mapWallet is, if the child is not before the parent, we will create a new
# child (via bumpfee) and remove the old child (via removeprunedfunds) until we get the
# ordering of child before parent.
child_inputs = [{"txid": txid, "vout": 0}]
child_txid = node.sendall(recipients=[addr], inputs=child_inputs)["txid"]
# Get the child tx's info for manual bumping
child_tx_info = node.gettransaction(txid=child_txid, verbose=True)
child_output_value = child_tx_info["decoded"]["vout"][0]["value"]
# Include an additional 1 vbyte buffer to handle when we have a smaller signature
additional_child_fee = get_fee(child_tx_info["decoded"]["vsize"] + 1, Decimal(0.00001100))
while True:
txids = node.listreceivedbyaddress(minconf=0, address_filter=addr)[0]["txids"]
if txids == [child_txid, txid]:
break
# Manually bump the tx
# The inputs and the output address stay the same, just changing the amount for the new fee
child_output_value -= additional_child_fee
bumped_raw = node.createrawtransaction(inputs=child_inputs, outputs=[{addr: child_output_value}])
bumped = node.signrawtransactionwithwallet(bumped_raw)
bumped_txid = node.decoderawtransaction(bumped["hex"])["txid"]
# Sometimes we will get a signature that is a little bit shorter than we expect which causes the
# feerate to be a bit higher, then the followup to be a bit lower. This results in a replacement
# that can't be broadcast. We can just skip that and keep grinding.
if try_rpc(-26, "insufficient fee, rejecting replacement", node.sendrawtransaction, bumped["hex"]):
continue
# The scheduler queue creates a copy of the added tx after
# send/bumpfee and re-adds it to the wallet (undoing the next
# removeprunedfunds). So empty the scheduler queue:
node.syncwithvalidationinterfacequeue()
node.removeprunedfunds(child_txid)
child_txid = bumped_txid
entry_time = node.getmempoolentry(child_txid)["time"]
block_time = entry_time + 6 * 60
node.setmocktime(block_time)
block = create_block(int(node.getbestblockhash(), 16), create_coinbase(node.getblockcount() + 1), block_time)
block.solve()
node.submitblock(block.serialize().hex())
# Set correct m_best_block_time, which is used in ResubmitWalletTransactions
node.syncwithvalidationinterfacequeue()
evict_time = block_time + 60 * 60 * DEFAULT_MEMPOOL_EXPIRY_HOURS + 5
# Flush out currently scheduled resubmit attempt now so that there can't be one right between eviction and check.
with node.assert_debug_log(['resubmit 2 unconfirmed transactions']):
node.setmocktime(evict_time)
node.mockscheduler(60)
# Evict these txs from the mempool
indep_send = node.send(outputs=[{node.getnewaddress(): 1}], inputs=[indep_utxo])
node.getmempoolentry(indep_send["txid"])
assert_raises_rpc_error(-5, "Transaction not in mempool", node.getmempoolentry, txid)
assert_raises_rpc_error(-5, "Transaction not in mempool", node.getmempoolentry, child_txid)
# Rebroadcast and check that parent and child are both in the mempool
with node.assert_debug_log(['resubmit 2 unconfirmed transactions']):
node.setmocktime(evict_time + 36 * 60 * 60) # 36 hrs is the upper limit of the resend timer
node.mockscheduler(60)
node.getmempoolentry(txid)
node.getmempoolentry(child_txid)
if __name__ == '__main__':
ResendWalletTransactionsTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_rawtransaction.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the rawtransaction RPCs.
Test the following RPCs:
- getrawtransaction
- createrawtransaction
- signrawtransactionwithwallet
- sendrawtransaction
- decoderawtransaction
"""
from collections import OrderedDict
from decimal import Decimal
from itertools import product
from test_framework.messages import (
MAX_BIP125_RBF_SEQUENCE,
COIN,
CTransaction,
CTxOut,
tx_from_hex,
)
from test_framework.script import (
CScript,
OP_FALSE,
OP_INVALIDOPCODE,
OP_RETURN,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
)
from test_framework.wallet import (
getnewdestination,
MiniWallet,
)
TXID = "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000"
class multidict(dict):
"""Dictionary that allows duplicate keys.
Constructed with a list of (key, value) tuples. When dumped by the json module,
will output invalid json with repeated keys, eg:
>>> json.dumps(multidict([(1,2),(1,2)])
'{"1": 2, "1": 2}'
Used to test calls to rpc methods with repeated keys in the json object."""
def __init__(self, x):
dict.__init__(self, x)
self.x = x
def items(self):
return self.x
class RawTransactionsTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, descriptors=False)
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [
["-txindex"],
["-txindex"],
["-fastprune", "-prune=1"],
]
# whitelist all peers to speed up tx relay / mempool sync
for args in self.extra_args:
args.append("[email protected]")
self.supports_cli = False
def setup_network(self):
super().setup_network()
self.connect_nodes(0, 2)
def run_test(self):
self.wallet = MiniWallet(self.nodes[0])
self.getrawtransaction_tests()
self.createrawtransaction_tests()
self.sendrawtransaction_tests()
self.sendrawtransaction_testmempoolaccept_tests()
self.decoderawtransaction_tests()
self.transaction_version_number_tests()
if self.is_specified_wallet_compiled() and not self.options.descriptors:
self.import_deterministic_coinbase_privkeys()
self.raw_multisig_transaction_legacy_tests()
self.getrawtransaction_verbosity_tests()
def getrawtransaction_tests(self):
tx = self.wallet.send_self_transfer(from_node=self.nodes[0])
self.generate(self.nodes[0], 1)
txId = tx['txid']
err_msg = (
"No such mempool transaction. Use -txindex or provide a block hash to enable"
" blockchain transaction queries. Use gettransaction for wallet transactions."
)
for n in [0, 2]:
self.log.info(f"Test getrawtransaction {'with' if n == 0 else 'without'} -txindex")
if n == 0:
# With -txindex.
# 1. valid parameters - only supply txid
assert_equal(self.nodes[n].getrawtransaction(txId), tx['hex'])
# 2. valid parameters - supply txid and 0 for non-verbose
assert_equal(self.nodes[n].getrawtransaction(txId, 0), tx['hex'])
# 3. valid parameters - supply txid and False for non-verbose
assert_equal(self.nodes[n].getrawtransaction(txId, False), tx['hex'])
# 4. valid parameters - supply txid and 1 for verbose.
# We only check the "hex" field of the output so we don't need to update this test every time the output format changes.
assert_equal(self.nodes[n].getrawtransaction(txId, 1)["hex"], tx['hex'])
assert_equal(self.nodes[n].getrawtransaction(txId, 2)["hex"], tx['hex'])
# 5. valid parameters - supply txid and True for non-verbose
assert_equal(self.nodes[n].getrawtransaction(txId, True)["hex"], tx['hex'])
else:
# Without -txindex, expect to raise.
for verbose in [None, 0, False, 1, True]:
assert_raises_rpc_error(-5, err_msg, self.nodes[n].getrawtransaction, txId, verbose)
# 6. invalid parameters - supply txid and invalid boolean values (strings) for verbose
for value in ["True", "False"]:
assert_raises_rpc_error(-3, "not of expected type number", self.nodes[n].getrawtransaction, txid=txId, verbose=value)
assert_raises_rpc_error(-3, "not of expected type number", self.nodes[n].getrawtransaction, txid=txId, verbosity=value)
# 7. invalid parameters - supply txid and empty array
assert_raises_rpc_error(-3, "not of expected type number", self.nodes[n].getrawtransaction, txId, [])
# 8. invalid parameters - supply txid and empty dict
assert_raises_rpc_error(-3, "not of expected type number", self.nodes[n].getrawtransaction, txId, {})
# Make a tx by sending, then generate 2 blocks; block1 has the tx in it
tx = self.wallet.send_self_transfer(from_node=self.nodes[2])['txid']
block1, block2 = self.generate(self.nodes[2], 2)
for n in [0, 2]:
self.log.info(f"Test getrawtransaction {'with' if n == 0 else 'without'} -txindex, with blockhash")
# We should be able to get the raw transaction by providing the correct block
gottx = self.nodes[n].getrawtransaction(txid=tx, verbose=True, blockhash=block1)
assert_equal(gottx['txid'], tx)
assert_equal(gottx['in_active_chain'], True)
if n == 0:
self.log.info("Test getrawtransaction with -txindex, without blockhash: 'in_active_chain' should be absent")
for v in [1,2]:
gottx = self.nodes[n].getrawtransaction(txid=tx, verbosity=v)
assert_equal(gottx['txid'], tx)
assert 'in_active_chain' not in gottx
else:
self.log.info("Test getrawtransaction without -txindex, without blockhash: expect the call to raise")
assert_raises_rpc_error(-5, err_msg, self.nodes[n].getrawtransaction, txid=tx, verbose=True)
# We should not get the tx if we provide an unrelated block
assert_raises_rpc_error(-5, "No such transaction found", self.nodes[n].getrawtransaction, txid=tx, blockhash=block2)
# An invalid block hash should raise the correct errors
assert_raises_rpc_error(-3, "JSON value of type bool is not of expected type string", self.nodes[n].getrawtransaction, txid=tx, blockhash=True)
assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 6, for 'foobar')", self.nodes[n].getrawtransaction, txid=tx, blockhash="foobar")
assert_raises_rpc_error(-8, "parameter 3 must be of length 64 (not 8, for 'abcd1234')", self.nodes[n].getrawtransaction, txid=tx, blockhash="abcd1234")
foo = "ZZZ0000000000000000000000000000000000000000000000000000000000000"
assert_raises_rpc_error(-8, f"parameter 3 must be hexadecimal string (not '{foo}')", self.nodes[n].getrawtransaction, txid=tx, blockhash=foo)
bar = "0000000000000000000000000000000000000000000000000000000000000000"
assert_raises_rpc_error(-5, "Block hash not found", self.nodes[n].getrawtransaction, txid=tx, blockhash=bar)
# Undo the blocks and verify that "in_active_chain" is false.
self.nodes[n].invalidateblock(block1)
gottx = self.nodes[n].getrawtransaction(txid=tx, verbose=True, blockhash=block1)
assert_equal(gottx['in_active_chain'], False)
self.nodes[n].reconsiderblock(block1)
assert_equal(self.nodes[n].getbestblockhash(), block2)
self.log.info("Test getrawtransaction on genesis block coinbase returns an error")
block = self.nodes[0].getblock(self.nodes[0].getblockhash(0))
assert_raises_rpc_error(-5, "The genesis block coinbase is not considered an ordinary transaction", self.nodes[0].getrawtransaction, block['merkleroot'])
def getrawtransaction_verbosity_tests(self):
tx = self.wallet.send_self_transfer(from_node=self.nodes[1])['txid']
[block1] = self.generate(self.nodes[1], 1)
fields = [
'blockhash',
'blocktime',
'confirmations',
'hash',
'hex',
'in_active_chain',
'locktime',
'size',
'time',
'txid',
'vin',
'vout',
'vsize',
'weight',
]
prevout_fields = [
'generated',
'height',
'value',
'scriptPubKey',
]
script_pub_key_fields = [
'address',
'asm',
'hex',
'type',
]
# node 0 & 2 with verbosity 1 & 2
for n, v in product([0, 2], [1, 2]):
self.log.info(f"Test getrawtransaction_verbosity {v} {'with' if n == 0 else 'without'} -txindex, with blockhash")
gottx = self.nodes[n].getrawtransaction(txid=tx, verbosity=v, blockhash=block1)
missing_fields = set(fields).difference(gottx.keys())
if missing_fields:
raise AssertionError(f"fields {', '.join(missing_fields)} are not in transaction")
assert len(gottx['vin']) > 0
if v == 1:
assert 'fee' not in gottx
assert 'prevout' not in gottx['vin'][0]
if v == 2:
assert isinstance(gottx['fee'], Decimal)
assert 'prevout' in gottx['vin'][0]
prevout = gottx['vin'][0]['prevout']
script_pub_key = prevout['scriptPubKey']
missing_fields = set(prevout_fields).difference(prevout.keys())
if missing_fields:
raise AssertionError(f"fields {', '.join(missing_fields)} are not in transaction")
missing_fields = set(script_pub_key_fields).difference(script_pub_key.keys())
if missing_fields:
raise AssertionError(f"fields {', '.join(missing_fields)} are not in transaction")
# check verbosity 2 without blockhash but with txindex
assert 'fee' in self.nodes[0].getrawtransaction(txid=tx, verbosity=2)
# check that coinbase has no fee or does not throw any errors for verbosity 2
coin_base = self.nodes[1].getblock(block1)['tx'][0]
gottx = self.nodes[1].getrawtransaction(txid=coin_base, verbosity=2, blockhash=block1)
assert 'fee' not in gottx
# check that verbosity 2 for a mempool tx will fallback to verbosity 1
# Do this with a pruned chain, as a regression test for https://github.com/bitcoin/bitcoin/pull/29003
self.generate(self.nodes[2], 400)
assert_greater_than(self.nodes[2].pruneblockchain(250), 0)
mempool_tx = self.wallet.send_self_transfer(from_node=self.nodes[2])['txid']
gottx = self.nodes[2].getrawtransaction(txid=mempool_tx, verbosity=2)
assert 'fee' not in gottx
def createrawtransaction_tests(self):
self.log.info("Test createrawtransaction")
# Test `createrawtransaction` required parameters
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction)
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [])
# Test `createrawtransaction` invalid extra parameters
assert_raises_rpc_error(-1, "createrawtransaction", self.nodes[0].createrawtransaction, [], {}, 0, False, 'foo')
# Test `createrawtransaction` invalid `inputs`
assert_raises_rpc_error(-3, "JSON value of type string is not of expected type array", self.nodes[0].createrawtransaction, 'foo', {})
assert_raises_rpc_error(-3, "JSON value of type string is not of expected type object", self.nodes[0].createrawtransaction, ['foo'], {})
assert_raises_rpc_error(-3, "JSON value of type null is not of expected type string", self.nodes[0].createrawtransaction, [{}], {})
assert_raises_rpc_error(-8, "txid must be of length 64 (not 3, for 'foo')", self.nodes[0].createrawtransaction, [{'txid': 'foo'}], {})
txid = "ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844"
assert_raises_rpc_error(-8, f"txid must be hexadecimal string (not '{txid}')", self.nodes[0].createrawtransaction, [{'txid': txid}], {})
assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': TXID}], {})
assert_raises_rpc_error(-8, "Invalid parameter, missing vout key", self.nodes[0].createrawtransaction, [{'txid': TXID, 'vout': 'foo'}], {})
assert_raises_rpc_error(-8, "Invalid parameter, vout cannot be negative", self.nodes[0].createrawtransaction, [{'txid': TXID, 'vout': -1}], {})
# sequence number out of range
for invalid_seq in [-1, 4294967296]:
inputs = [{'txid': TXID, 'vout': 1, 'sequence': invalid_seq}]
address = getnewdestination()[2]
outputs = {address: 1}
assert_raises_rpc_error(-8, 'Invalid parameter, sequence number is out of range',
self.nodes[0].createrawtransaction, inputs, outputs)
# with valid sequence number
for valid_seq in [1000, 4294967294]:
inputs = [{'txid': TXID, 'vout': 1, 'sequence': valid_seq}]
address = getnewdestination()[2]
outputs = {address: 1}
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
decrawtx = self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['vin'][0]['sequence'], valid_seq)
# Test `createrawtransaction` invalid `outputs`
address = getnewdestination()[2]
assert_raises_rpc_error(-3, "JSON value of type string is not of expected type array", self.nodes[0].createrawtransaction, [], 'foo')
self.nodes[0].createrawtransaction(inputs=[], outputs={}) # Should not throw for backwards compatibility
self.nodes[0].createrawtransaction(inputs=[], outputs=[])
assert_raises_rpc_error(-8, "Data must be hexadecimal string", self.nodes[0].createrawtransaction, [], {'data': 'foo'})
assert_raises_rpc_error(-5, "Invalid Bitcoin address", self.nodes[0].createrawtransaction, [], {'foo': 0})
assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].createrawtransaction, [], {address: 'foo'})
assert_raises_rpc_error(-3, "Amount out of range", self.nodes[0].createrawtransaction, [], {address: -1})
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], multidict([(address, 1), (address, 1)]))
assert_raises_rpc_error(-8, "Invalid parameter, duplicated address: %s" % address, self.nodes[0].createrawtransaction, [], [{address: 1}, {address: 1}])
assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], [{"data": 'aa'}, {"data": "bb"}])
assert_raises_rpc_error(-8, "Invalid parameter, duplicate key: data", self.nodes[0].createrawtransaction, [], multidict([("data", 'aa'), ("data", "bb")]))
assert_raises_rpc_error(-8, "Invalid parameter, key-value pair must contain exactly one key", self.nodes[0].createrawtransaction, [], [{'a': 1, 'b': 2}])
assert_raises_rpc_error(-8, "Invalid parameter, key-value pair not an object as expected", self.nodes[0].createrawtransaction, [], [['key-value pair1'], ['2']])
# Test `createrawtransaction` mismatch between sequence number(s) and `replaceable` option
assert_raises_rpc_error(-8, "Invalid parameter combination: Sequence number(s) contradict replaceable option",
self.nodes[0].createrawtransaction, [{'txid': TXID, 'vout': 0, 'sequence': MAX_BIP125_RBF_SEQUENCE+1}], {}, 0, True)
# Test `createrawtransaction` invalid `locktime`
assert_raises_rpc_error(-3, "JSON value of type string is not of expected type number", self.nodes[0].createrawtransaction, [], {}, 'foo')
assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, -1)
assert_raises_rpc_error(-8, "Invalid parameter, locktime out of range", self.nodes[0].createrawtransaction, [], {}, 4294967296)
# Test `createrawtransaction` invalid `replaceable`
assert_raises_rpc_error(-3, "JSON value of type string is not of expected type bool", self.nodes[0].createrawtransaction, [], {}, 0, 'foo')
# Test that createrawtransaction accepts an array and object as outputs
# One output
tx = tx_from_hex(self.nodes[2].createrawtransaction(inputs=[{'txid': TXID, 'vout': 9}], outputs={address: 99}))
assert_equal(len(tx.vout), 1)
assert_equal(
tx.serialize().hex(),
self.nodes[2].createrawtransaction(inputs=[{'txid': TXID, 'vout': 9}], outputs=[{address: 99}]),
)
# Two outputs
address2 = getnewdestination()[2]
tx = tx_from_hex(self.nodes[2].createrawtransaction(inputs=[{'txid': TXID, 'vout': 9}], outputs=OrderedDict([(address, 99), (address2, 99)])))
assert_equal(len(tx.vout), 2)
assert_equal(
tx.serialize().hex(),
self.nodes[2].createrawtransaction(inputs=[{'txid': TXID, 'vout': 9}], outputs=[{address: 99}, {address2: 99}]),
)
# Multiple mixed outputs
tx = tx_from_hex(self.nodes[2].createrawtransaction(inputs=[{'txid': TXID, 'vout': 9}], outputs=multidict([(address, 99), (address2, 99), ('data', '99')])))
assert_equal(len(tx.vout), 3)
assert_equal(
tx.serialize().hex(),
self.nodes[2].createrawtransaction(inputs=[{'txid': TXID, 'vout': 9}], outputs=[{address: 99}, {address2: 99}, {'data': '99'}]),
)
def sendrawtransaction_tests(self):
self.log.info("Test sendrawtransaction with missing input")
inputs = [{'txid': TXID, 'vout': 1}] # won't exist
address = getnewdestination()[2]
outputs = {address: 4.998}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
assert_raises_rpc_error(-25, "bad-txns-inputs-missingorspent", self.nodes[2].sendrawtransaction, rawtx)
self.log.info("Test sendrawtransaction exceeding, falling short of, and equaling maxburnamount")
max_burn_exceeded = "Unspendable output exceeds maximum configured by user (maxburnamount)"
# Test that spendable transaction with default maxburnamount (0) gets sent
tx = self.wallet.create_self_transfer()['tx']
tx_hex = tx.serialize().hex()
self.nodes[2].sendrawtransaction(hexstring=tx_hex)
# Test that datacarrier transaction with default maxburnamount (0) does not get sent
tx = self.wallet.create_self_transfer()['tx']
tx_val = 0.001
tx.vout = [CTxOut(int(Decimal(tx_val) * COIN), CScript([OP_RETURN] + [OP_FALSE] * 30))]
tx_hex = tx.serialize().hex()
assert_raises_rpc_error(-25, max_burn_exceeded, self.nodes[2].sendrawtransaction, tx_hex)
# Test that oversized script gets rejected by sendrawtransaction
tx = self.wallet.create_self_transfer()['tx']
tx_val = 0.001
tx.vout = [CTxOut(int(Decimal(tx_val) * COIN), CScript([OP_FALSE] * 10001))]
tx_hex = tx.serialize().hex()
assert_raises_rpc_error(-25, max_burn_exceeded, self.nodes[2].sendrawtransaction, tx_hex)
# Test that script containing invalid opcode gets rejected by sendrawtransaction
tx = self.wallet.create_self_transfer()['tx']
tx_val = 0.01
tx.vout = [CTxOut(int(Decimal(tx_val) * COIN), CScript([OP_INVALIDOPCODE]))]
tx_hex = tx.serialize().hex()
assert_raises_rpc_error(-25, max_burn_exceeded, self.nodes[2].sendrawtransaction, tx_hex)
# Test a transaction where our burn exceeds maxburnamount
tx = self.wallet.create_self_transfer()['tx']
tx_val = 0.001
tx.vout = [CTxOut(int(Decimal(tx_val) * COIN), CScript([OP_RETURN] + [OP_FALSE] * 30))]
tx_hex = tx.serialize().hex()
assert_raises_rpc_error(-25, max_burn_exceeded, self.nodes[2].sendrawtransaction, tx_hex, 0, 0.0009)
# Test a transaction where our burn falls short of maxburnamount
tx = self.wallet.create_self_transfer()['tx']
tx_val = 0.001
tx.vout = [CTxOut(int(Decimal(tx_val) * COIN), CScript([OP_RETURN] + [OP_FALSE] * 30))]
tx_hex = tx.serialize().hex()
self.nodes[2].sendrawtransaction(hexstring=tx_hex, maxfeerate='0', maxburnamount='0.0011')
# Test a transaction where our burn equals maxburnamount
tx = self.wallet.create_self_transfer()['tx']
tx_val = 0.001
tx.vout = [CTxOut(int(Decimal(tx_val) * COIN), CScript([OP_RETURN] + [OP_FALSE] * 30))]
tx_hex = tx.serialize().hex()
self.nodes[2].sendrawtransaction(hexstring=tx_hex, maxfeerate='0', maxburnamount='0.001')
def sendrawtransaction_testmempoolaccept_tests(self):
self.log.info("Test sendrawtransaction/testmempoolaccept with maxfeerate")
fee_exceeds_max = "Fee exceeds maximum configured by user (e.g. -maxtxfee, maxfeerate)"
# Test a transaction with a small fee.
# Fee rate is 0.00100000 BTC/kvB
tx = self.wallet.create_self_transfer(fee_rate=Decimal('0.00100000'))
# Thus, testmempoolaccept should reject
testres = self.nodes[2].testmempoolaccept([tx['hex']], 0.00001000)[0]
assert_equal(testres['allowed'], False)
assert_equal(testres['reject-reason'], 'max-fee-exceeded')
# and sendrawtransaction should throw
assert_raises_rpc_error(-25, fee_exceeds_max, self.nodes[2].sendrawtransaction, tx['hex'], 0.00001000)
# and the following calls should both succeed
testres = self.nodes[2].testmempoolaccept(rawtxs=[tx['hex']])[0]
assert_equal(testres['allowed'], True)
self.nodes[2].sendrawtransaction(hexstring=tx['hex'])
# Test a transaction with a large fee.
# Fee rate is 0.20000000 BTC/kvB
tx = self.wallet.create_self_transfer(fee_rate=Decimal("0.20000000"))
# Thus, testmempoolaccept should reject
testres = self.nodes[2].testmempoolaccept([tx['hex']])[0]
assert_equal(testres['allowed'], False)
assert_equal(testres['reject-reason'], 'max-fee-exceeded')
# and sendrawtransaction should throw
assert_raises_rpc_error(-25, fee_exceeds_max, self.nodes[2].sendrawtransaction, tx['hex'])
# and the following calls should both succeed
testres = self.nodes[2].testmempoolaccept(rawtxs=[tx['hex']], maxfeerate='0.20000000')[0]
assert_equal(testres['allowed'], True)
self.nodes[2].sendrawtransaction(hexstring=tx['hex'], maxfeerate='0.20000000')
self.log.info("Test sendrawtransaction/testmempoolaccept with tx already in the chain")
self.generate(self.nodes[2], 1)
for node in self.nodes:
testres = node.testmempoolaccept([tx['hex']])[0]
assert_equal(testres['allowed'], False)
assert_equal(testres['reject-reason'], 'txn-already-known')
assert_raises_rpc_error(-27, 'Transaction already in block chain', node.sendrawtransaction, tx['hex'])
def decoderawtransaction_tests(self):
self.log.info("Test decoderawtransaction")
# witness transaction
encrawtx = "010000000001010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f50500000000000102616100000000"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, True) # decode as witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # force decode as non-witness transaction
# non-witness transaction
encrawtx = "01000000010000000000000072c1a6a246ae63f74f931e8365e15a089c68d61900000000000000000000ffffffff0100e1f505000000000000000000"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx, False) # decode as non-witness transaction
assert_equal(decrawtx['vout'][0]['value'], Decimal('1.00000000'))
# known ambiguous transaction in the chain (see https://github.com/bitcoin/bitcoin/issues/20579)
coinbase = "03c68708046ff8415c622f4254432e434f4d2ffabe6d6de1965d02c68f928e5b244ab1965115a36f56eb997633c7f690124bbf43644e23080000000ca3d3af6d005a65ff0200fd00000000"
encrawtx = f"020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff4b{coinbase}" \
"ffffffff03f4c1fb4b0000000016001497cfc76442fe717f2a3f0cc9c175f7561b6619970000000000000000266a24aa21a9ed957d1036a80343e0d1b659497e1b48a38ebe876a056d45965fac4a85cda84e1900000000000000002952534b424c4f434b3a8e092581ab01986cbadc84f4b43f4fa4bb9e7a2e2a0caf9b7cf64d939028e22c0120000000000000000000000000000000000000000000000000000000000000000000000000"
decrawtx = self.nodes[0].decoderawtransaction(encrawtx)
decrawtx_wit = self.nodes[0].decoderawtransaction(encrawtx, True)
assert_raises_rpc_error(-22, 'TX decode failed', self.nodes[0].decoderawtransaction, encrawtx, False) # fails to decode as non-witness transaction
assert_equal(decrawtx, decrawtx_wit) # the witness interpretation should be chosen
assert_equal(decrawtx['vin'][0]['coinbase'], coinbase)
def transaction_version_number_tests(self):
self.log.info("Test transaction version numbers")
# Test the minimum transaction version number that fits in a signed 32-bit integer.
# As transaction version is unsigned, this should convert to its unsigned equivalent.
tx = CTransaction()
tx.nVersion = -0x80000000
rawtx = tx.serialize().hex()
decrawtx = self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['version'], 0x80000000)
# Test the maximum transaction version number that fits in a signed 32-bit integer.
tx = CTransaction()
tx.nVersion = 0x7fffffff
rawtx = tx.serialize().hex()
decrawtx = self.nodes[0].decoderawtransaction(rawtx)
assert_equal(decrawtx['version'], 0x7fffffff)
def raw_multisig_transaction_legacy_tests(self):
self.log.info("Test raw multisig transactions (legacy)")
# The traditional multisig workflow does not work with descriptor wallets so these are legacy only.
# The multisig workflow with descriptor wallets uses PSBTs and is tested elsewhere, no need to do them here.
# 2of2 test
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
# Tests for createmultisig and addmultisigaddress
assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, ["01020304"])
# createmultisig can only take public keys
self.nodes[0].createmultisig(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# addmultisigaddress can take both pubkeys and addresses so long as they are in the wallet, which is tested here
assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 2, [addr1Obj['pubkey'], addr1])
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr1])['address']
# use balance deltas instead of absolute values
bal = self.nodes[2].getbalance()
# send 1.2 BTC to msig adr
txId = self.nodes[0].sendtoaddress(mSigObj, 1.2)
self.sync_all()
self.generate(self.nodes[0], 1)
# node2 has both keys of the 2of2 ms addr, tx should affect the balance
assert_equal(self.nodes[2].getbalance(), bal + Decimal('1.20000000'))
# 2of3 test from different nodes
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr3 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
addr3Obj = self.nodes[2].getaddressinfo(addr3)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey']])['address']
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx = self.nodes[0].decoderawtransaction(decTx['hex'])
self.sync_all()
self.generate(self.nodes[0], 1)
# THIS IS AN INCOMPLETE FEATURE
# NODE2 HAS TWO OF THREE KEYS AND THE FUNDS SHOULD BE SPENDABLE AND COUNT AT BALANCE CALCULATION
assert_equal(self.nodes[2].getbalance(), bal) # for now, assume the funds of a 2of3 multisig tx are not marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = next(o for o in rawTx['vout'] if o['value'] == Decimal('2.20000000'))
bal = self.nodes[0].getbalance()
inputs = [{"txid": txId, "vout": vout['n'], "scriptPubKey": vout['scriptPubKey']['hex'], "amount": vout['value']}]
outputs = {self.nodes[0].getnewaddress(): 2.19}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned = self.nodes[1].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxPartialSigned['complete'], False) # node1 only has one key, can't comp. sign the tx
rawTxSigned = self.nodes[2].signrawtransactionwithwallet(rawTx, inputs)
assert_equal(rawTxSigned['complete'], True) # node2 can sign the tx compl., own two of three keys
self.nodes[2].sendrawtransaction(rawTxSigned['hex'])
rawTx = self.nodes[0].decoderawtransaction(rawTxSigned['hex'])
self.sync_all()
self.generate(self.nodes[0], 1)
assert_equal(self.nodes[0].getbalance(), bal + Decimal('50.00000000') + Decimal('2.19000000')) # block reward + tx
# 2of2 test for combining transactions
bal = self.nodes[2].getbalance()
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[1].getaddressinfo(addr1)
addr2Obj = self.nodes[2].getaddressinfo(addr2)
self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])['address']
mSigObjValid = self.nodes[2].getaddressinfo(mSigObj)
txId = self.nodes[0].sendtoaddress(mSigObj, 2.2)
decTx = self.nodes[0].gettransaction(txId)
rawTx2 = self.nodes[0].decoderawtransaction(decTx['hex'])
self.sync_all()
self.generate(self.nodes[0], 1)
assert_equal(self.nodes[2].getbalance(), bal) # the funds of a 2of2 multisig tx should not be marked as spendable
txDetails = self.nodes[0].gettransaction(txId, True)
rawTx2 = self.nodes[0].decoderawtransaction(txDetails['hex'])
vout = next(o for o in rawTx2['vout'] if o['value'] == Decimal('2.20000000'))
bal = self.nodes[0].getbalance()
inputs = [{"txid": txId, "vout": vout['n'], "scriptPubKey": vout['scriptPubKey']['hex'], "redeemScript": mSigObjValid['hex'], "amount": vout['value']}]
outputs = {self.nodes[0].getnewaddress(): 2.19}
rawTx2 = self.nodes[2].createrawtransaction(inputs, outputs)
rawTxPartialSigned1 = self.nodes[1].signrawtransactionwithwallet(rawTx2, inputs)
self.log.debug(rawTxPartialSigned1)
assert_equal(rawTxPartialSigned1['complete'], False) # node1 only has one key, can't comp. sign the tx
rawTxPartialSigned2 = self.nodes[2].signrawtransactionwithwallet(rawTx2, inputs)
self.log.debug(rawTxPartialSigned2)
assert_equal(rawTxPartialSigned2['complete'], False) # node2 only has one key, can't comp. sign the tx
rawTxComb = self.nodes[2].combinerawtransaction([rawTxPartialSigned1['hex'], rawTxPartialSigned2['hex']])
self.log.debug(rawTxComb)
self.nodes[2].sendrawtransaction(rawTxComb)
rawTx2 = self.nodes[0].decoderawtransaction(rawTxComb)
self.sync_all()
self.generate(self.nodes[0], 1)
assert_equal(self.nodes[0].getbalance(), bal + Decimal('50.00000000') + Decimal('2.19000000')) # block reward + tx
if __name__ == '__main__':
RawTransactionsTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_unrequested_blocks.py | #!/usr/bin/env python3
# Copyright (c) 2015-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test processing of unrequested blocks.
Setup: two nodes, node0 + node1, not connected to each other. Node1 will have
nMinimumChainWork set to 0x10, so it won't process low-work unrequested blocks.
We have one P2PInterface connection to node0 called test_node, and one to node1
called min_work_node.
The test:
1. Generate one block on each node, to leave IBD.
2. Mine a new block on each tip, and deliver to each node from node's peer.
The tip should advance for node0, but node1 should skip processing due to
nMinimumChainWork.
Node1 is unused in tests 3-7:
3. Mine a block that forks from the genesis block, and deliver to test_node.
Node0 should not process this block (just accept the header), because it
is unrequested and doesn't have more or equal work to the tip.
4a,b. Send another two blocks that build on the forking block.
Node0 should process the second block but be stuck on the shorter chain,
because it's missing an intermediate block.
4c.Send 288 more blocks on the longer chain (the number of blocks ahead
we currently store).
Node0 should process all but the last block (too far ahead in height).
5. Send a duplicate of the block in #3 to Node0.
Node0 should not process the block because it is unrequested, and stay on
the shorter chain.
6. Send Node0 an inv for the height 3 block produced in #4 above.
Node0 should figure out that Node0 has the missing height 2 block and send a
getdata.
7. Send Node0 the missing block again.
Node0 should process and the tip should advance.
8. Create a fork which is invalid at a height longer than the current chain
(ie to which the node will try to reorg) but which has headers built on top
of the invalid block. Check that we get disconnected if we send more headers
on the chain the node now knows to be invalid.
9. Test Node1 is able to sync when connected to node0 (which should have sufficient
work on its chain).
"""
import time
from test_framework.blocktools import create_block, create_coinbase, create_tx_with_script
from test_framework.messages import CBlockHeader, CInv, MSG_BLOCK, msg_block, msg_headers, msg_inv
from test_framework.p2p import p2p_lock, P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
class AcceptBlockTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [[], ["-minimumchainwork=0x10"]]
def setup_network(self):
self.setup_nodes()
def check_hash_in_chaintips(self, node, blockhash):
tips = node.getchaintips()
for x in tips:
if x["hash"] == blockhash:
return True
return False
def run_test(self):
test_node = self.nodes[0].add_p2p_connection(P2PInterface())
min_work_node = self.nodes[1].add_p2p_connection(P2PInterface())
# 1. Have nodes mine a block (leave IBD)
[self.generate(n, 1, sync_fun=self.no_op) for n in self.nodes]
tips = [int("0x" + n.getbestblockhash(), 0) for n in self.nodes]
# 2. Send one block that builds on each tip.
# This should be accepted by node0
blocks_h2 = [] # the height 2 blocks on each node's chain
block_time = int(time.time()) + 1
for i in range(2):
blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time))
blocks_h2[i].solve()
block_time += 1
test_node.send_and_ping(msg_block(blocks_h2[0]))
with self.nodes[1].assert_debug_log(expected_msgs=[f"AcceptBlockHeader: not adding new block header {blocks_h2[1].hash}, missing anti-dos proof-of-work validation"]):
min_work_node.send_and_ping(msg_block(blocks_h2[1]))
assert_equal(self.nodes[0].getblockcount(), 2)
assert_equal(self.nodes[1].getblockcount(), 1)
# Ensure that the header of the second block was also not accepted by node1
assert_equal(self.check_hash_in_chaintips(self.nodes[1], blocks_h2[1].hash), False)
self.log.info("First height 2 block accepted by node0; correctly rejected by node1")
# 3. Send another block that builds on genesis.
block_h1f = create_block(int("0x" + self.nodes[0].getblockhash(0), 0), create_coinbase(1), block_time)
block_time += 1
block_h1f.solve()
test_node.send_and_ping(msg_block(block_h1f))
tip_entry_found = False
for x in self.nodes[0].getchaintips():
if x['hash'] == block_h1f.hash:
assert_equal(x['status'], "headers-only")
tip_entry_found = True
assert tip_entry_found
assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_h1f.hash)
# 4. Send another two block that build on the fork.
block_h2f = create_block(block_h1f.sha256, create_coinbase(2), block_time)
block_time += 1
block_h2f.solve()
test_node.send_and_ping(msg_block(block_h2f))
# Since the earlier block was not processed by node, the new block
# can't be fully validated.
tip_entry_found = False
for x in self.nodes[0].getchaintips():
if x['hash'] == block_h2f.hash:
assert_equal(x['status'], "headers-only")
tip_entry_found = True
assert tip_entry_found
# But this block should be accepted by node since it has equal work.
self.nodes[0].getblock(block_h2f.hash)
self.log.info("Second height 2 block accepted, but not reorg'ed to")
# 4b. Now send another block that builds on the forking chain.
block_h3 = create_block(block_h2f.sha256, create_coinbase(3), block_h2f.nTime+1)
block_h3.solve()
test_node.send_and_ping(msg_block(block_h3))
# Since the earlier block was not processed by node, the new block
# can't be fully validated.
tip_entry_found = False
for x in self.nodes[0].getchaintips():
if x['hash'] == block_h3.hash:
assert_equal(x['status'], "headers-only")
tip_entry_found = True
assert tip_entry_found
self.nodes[0].getblock(block_h3.hash)
# But this block should be accepted by node since it has more work.
self.nodes[0].getblock(block_h3.hash)
self.log.info("Unrequested more-work block accepted")
# 4c. Now mine 288 more blocks and deliver; all should be processed but
# the last (height-too-high) on node (as long as it is not missing any headers)
tip = block_h3
all_blocks = []
for i in range(288):
next_block = create_block(tip.sha256, create_coinbase(i + 4), tip.nTime+1)
next_block.solve()
all_blocks.append(next_block)
tip = next_block
# Now send the block at height 5 and check that it wasn't accepted (missing header)
test_node.send_and_ping(msg_block(all_blocks[1]))
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblock, all_blocks[1].hash)
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getblockheader, all_blocks[1].hash)
# The block at height 5 should be accepted if we provide the missing header, though
headers_message = msg_headers()
headers_message.headers.append(CBlockHeader(all_blocks[0]))
test_node.send_message(headers_message)
test_node.send_and_ping(msg_block(all_blocks[1]))
self.nodes[0].getblock(all_blocks[1].hash)
# Now send the blocks in all_blocks
for i in range(288):
test_node.send_message(msg_block(all_blocks[i]))
test_node.sync_with_ping()
# Blocks 1-287 should be accepted, block 288 should be ignored because it's too far ahead
for x in all_blocks[:-1]:
self.nodes[0].getblock(x.hash)
assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[-1].hash)
# 5. Test handling of unrequested block on the node that didn't process
# Should still not be processed (even though it has a child that has more
# work).
# The node should have requested the blocks at some point, so
# disconnect/reconnect first
self.nodes[0].disconnect_p2ps()
self.nodes[1].disconnect_p2ps()
test_node = self.nodes[0].add_p2p_connection(P2PInterface())
test_node.send_and_ping(msg_block(block_h1f))
assert_equal(self.nodes[0].getblockcount(), 2)
self.log.info("Unrequested block that would complete more-work chain was ignored")
# 6. Try to get node to request the missing block.
# Poke the node with an inv for block at height 3 and see if that
# triggers a getdata on block 2 (it should if block 2 is missing).
with p2p_lock:
# Clear state so we can check the getdata request
test_node.last_message.pop("getdata", None)
test_node.send_message(msg_inv([CInv(MSG_BLOCK, block_h3.sha256)]))
test_node.sync_with_ping()
with p2p_lock:
getdata = test_node.last_message["getdata"]
# Check that the getdata includes the right block
assert_equal(getdata.inv[0].hash, block_h1f.sha256)
self.log.info("Inv at tip triggered getdata for unprocessed block")
# 7. Send the missing block for the third time (now it is requested)
test_node.send_and_ping(msg_block(block_h1f))
assert_equal(self.nodes[0].getblockcount(), 290)
self.nodes[0].getblock(all_blocks[286].hash)
assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, all_blocks[287].hash)
self.log.info("Successfully reorged to longer chain")
# 8. Create a chain which is invalid at a height longer than the
# current chain, but which has more blocks on top of that
block_289f = create_block(all_blocks[284].sha256, create_coinbase(289), all_blocks[284].nTime+1)
block_289f.solve()
block_290f = create_block(block_289f.sha256, create_coinbase(290), block_289f.nTime+1)
block_290f.solve()
# block_291 spends a coinbase below maturity!
tx_to_add = create_tx_with_script(block_290f.vtx[0], 0, script_sig=b"42", amount=1)
block_291 = create_block(block_290f.sha256, create_coinbase(291), block_290f.nTime+1, txlist=[tx_to_add])
block_291.solve()
block_292 = create_block(block_291.sha256, create_coinbase(292), block_291.nTime+1)
block_292.solve()
# Now send all the headers on the chain and enough blocks to trigger reorg
headers_message = msg_headers()
headers_message.headers.append(CBlockHeader(block_289f))
headers_message.headers.append(CBlockHeader(block_290f))
headers_message.headers.append(CBlockHeader(block_291))
headers_message.headers.append(CBlockHeader(block_292))
test_node.send_and_ping(headers_message)
tip_entry_found = False
for x in self.nodes[0].getchaintips():
if x['hash'] == block_292.hash:
assert_equal(x['status'], "headers-only")
tip_entry_found = True
assert tip_entry_found
assert_raises_rpc_error(-1, "Block not found on disk", self.nodes[0].getblock, block_292.hash)
test_node.send_message(msg_block(block_289f))
test_node.send_and_ping(msg_block(block_290f))
self.nodes[0].getblock(block_289f.hash)
self.nodes[0].getblock(block_290f.hash)
test_node.send_message(msg_block(block_291))
# At this point we've sent an obviously-bogus block, wait for full processing
# and assume disconnection
test_node.wait_for_disconnect()
self.nodes[0].disconnect_p2ps()
test_node = self.nodes[0].add_p2p_connection(P2PInterface())
# We should have failed reorg and switched back to 290 (but have block 291)
assert_equal(self.nodes[0].getblockcount(), 290)
assert_equal(self.nodes[0].getbestblockhash(), all_blocks[286].hash)
assert_equal(self.nodes[0].getblock(block_291.hash)["confirmations"], -1)
# Now send a new header on the invalid chain, indicating we're forked off, and expect to get disconnected
block_293 = create_block(block_292.sha256, create_coinbase(293), block_292.nTime+1)
block_293.solve()
headers_message = msg_headers()
headers_message.headers.append(CBlockHeader(block_293))
test_node.send_message(headers_message)
test_node.wait_for_disconnect()
# 9. Connect node1 to node0 and ensure it is able to sync
self.connect_nodes(0, 1)
self.sync_blocks([self.nodes[0], self.nodes[1]])
self.log.info("Successfully synced nodes 1 and 0")
if __name__ == '__main__':
AcceptBlockTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/mempool_spend_coinbase.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test spending coinbase transactions.
The coinbase transaction in block N can appear in block
N+100... so is valid in the mempool when the best block
height is N+99.
This test makes sure coinbase spends that will be mature
in the next block are accepted into the memory pool,
but less mature coinbase spends are NOT.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
from test_framework.wallet import MiniWallet
class MempoolSpendCoinbaseTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
wallet = MiniWallet(self.nodes[0])
# Invalidate two blocks, so that miniwallet has access to a coin that will mature in the next block
chain_height = 198
self.nodes[0].invalidateblock(self.nodes[0].getblockhash(chain_height + 1))
assert_equal(chain_height, self.nodes[0].getblockcount())
# Coinbase at height chain_height-100+1 ok in mempool, should
# get mined. Coinbase at height chain_height-100+2 is
# too immature to spend.
coinbase_txid = lambda h: self.nodes[0].getblock(self.nodes[0].getblockhash(h))['tx'][0]
utxo_mature = wallet.get_utxo(txid=coinbase_txid(chain_height - 100 + 1))
utxo_immature = wallet.get_utxo(txid=coinbase_txid(chain_height - 100 + 2))
spend_mature_id = wallet.send_self_transfer(from_node=self.nodes[0], utxo_to_spend=utxo_mature)["txid"]
# other coinbase should be too immature to spend
immature_tx = wallet.create_self_transfer(utxo_to_spend=utxo_immature)
assert_raises_rpc_error(-26,
"bad-txns-premature-spend-of-coinbase",
lambda: self.nodes[0].sendrawtransaction(immature_tx['hex']))
# mempool should have just the mature one
assert_equal(self.nodes[0].getrawmempool(), [spend_mature_id])
# mine a block, mature one should get confirmed
self.generate(self.nodes[0], 1)
assert_equal(set(self.nodes[0].getrawmempool()), set())
# ... and now previously immature can be spent:
spend_new_id = self.nodes[0].sendrawtransaction(immature_tx['hex'])
assert_equal(self.nodes[0].getrawmempool(), [spend_new_id])
if __name__ == '__main__':
MempoolSpendCoinbaseTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_avoid_mixing_output_types.py | #!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or https://www.opensource.org/licenses/mit-license.php.
"""Test output type mixing during coin selection
A wallet may have different types of UTXOs to choose from during coin selection,
where output type is one of the following:
- BECH32M
- BECH32
- P2SH-SEGWIT
- LEGACY
This test verifies that mixing different output types is avoided unless
absolutely necessary. Both wallets start with zero funds. Alice mines
enough blocks to have spendable coinbase outputs. Alice sends three
random value payments which sum to 10BTC for each output type to Bob,
for a total of 40BTC in Bob's wallet.
Bob then sends random valued payments back to Alice, some of which need
unconfirmed change, and we verify that none of these payments contain mixed
inputs. Finally, Bob sends the remainder of his funds, which requires mixing.
The payment values are random, but chosen such that they sum up to a specified
total. This ensures we are not relying on specific values for the UTXOs,
but still know when to expect mixing due to the wallet being close to empty.
"""
import random
from test_framework.test_framework import BitcoinTestFramework
from test_framework.blocktools import COINBASE_MATURITY
ADDRESS_TYPES = [
"bech32m",
"bech32",
"p2sh-segwit",
"legacy",
]
def is_bech32_address(node, addr):
"""Check if an address contains a bech32 output."""
addr_info = node.getaddressinfo(addr)
return addr_info['desc'].startswith('wpkh(')
def is_bech32m_address(node, addr):
"""Check if an address contains a bech32m output."""
addr_info = node.getaddressinfo(addr)
return addr_info['desc'].startswith('tr(')
def is_p2sh_segwit_address(node, addr):
"""Check if an address contains a P2SH-Segwit output.
Note: this function does not actually determine the type
of P2SH output, but is sufficient for this test in that
we are only generating P2SH-Segwit outputs.
"""
addr_info = node.getaddressinfo(addr)
return addr_info['desc'].startswith('sh(wpkh(')
def is_legacy_address(node, addr):
"""Check if an address contains a legacy output."""
addr_info = node.getaddressinfo(addr)
return addr_info['desc'].startswith('pkh(')
def is_same_type(node, tx):
"""Check that all inputs are of the same OutputType"""
vins = node.getrawtransaction(tx, True)['vin']
inputs = []
for vin in vins:
prev_tx, n = vin['txid'], vin['vout']
inputs.append(
node.getrawtransaction(
prev_tx,
True,
)['vout'][n]['scriptPubKey']['address']
)
has_legacy = False
has_p2sh = False
has_bech32 = False
has_bech32m = False
for addr in inputs:
if is_legacy_address(node, addr):
has_legacy = True
if is_p2sh_segwit_address(node, addr):
has_p2sh = True
if is_bech32_address(node, addr):
has_bech32 = True
if is_bech32m_address(node, addr):
has_bech32m = True
return (sum([has_legacy, has_p2sh, has_bech32, has_bech32m]) == 1)
def generate_payment_values(n, m):
"""Return a randomly chosen list of n positive integers summing to m.
Each such list is equally likely to occur."""
dividers = sorted(random.sample(range(1, m), n - 1))
return [a - b for a, b in zip(dividers + [m], [0] + dividers)]
class AddressInputTypeGrouping(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, legacy=False)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [
[
"-addresstype=bech32",
"[email protected]",
"-txindex",
],
[
"-addresstype=p2sh-segwit",
"[email protected]",
"-txindex",
],
]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_sqlite()
def make_payment(self, A, B, v, addr_type):
fee_rate = random.randint(1, 20)
self.log.debug(f"Making payment of {v} BTC at fee_rate {fee_rate}")
tx = B.sendtoaddress(
address=A.getnewaddress(address_type=addr_type),
amount=v,
fee_rate=fee_rate,
)
return tx
def run_test(self):
# alias self.nodes[i] to A, B for readability
A, B = self.nodes[0], self.nodes[1]
self.generate(A, COINBASE_MATURITY + 5)
self.log.info("Creating mixed UTXOs in B's wallet")
for v in generate_payment_values(3, 10):
self.log.debug(f"Making payment of {v} BTC to legacy")
A.sendtoaddress(B.getnewaddress(address_type="legacy"), v)
for v in generate_payment_values(3, 10):
self.log.debug(f"Making payment of {v} BTC to p2sh")
A.sendtoaddress(B.getnewaddress(address_type="p2sh-segwit"), v)
for v in generate_payment_values(3, 10):
self.log.debug(f"Making payment of {v} BTC to bech32")
A.sendtoaddress(B.getnewaddress(address_type="bech32"), v)
for v in generate_payment_values(3, 10):
self.log.debug(f"Making payment of {v} BTC to bech32m")
A.sendtoaddress(B.getnewaddress(address_type="bech32m"), v)
self.generate(A, 1)
self.log.info("Sending payments from B to A")
for v in generate_payment_values(5, 9):
tx = self.make_payment(
A, B, v, random.choice(ADDRESS_TYPES)
)
self.generate(A, 1)
assert is_same_type(B, tx)
tx = self.make_payment(A, B, 30.99, random.choice(ADDRESS_TYPES))
assert not is_same_type(B, tx)
if __name__ == '__main__':
AddressInputTypeGrouping().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_multisig_descriptor_psbt.py | #!/usr/bin/env python3
# Copyright (c) 2021-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test a basic M-of-N multisig setup between multiple people using descriptor wallets and PSBTs, as well as a signing flow.
This is meant to be documentation as much as functional tests, so it is kept as simple and readable as possible.
"""
from test_framework.address import base58_to_byte
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_approx,
assert_equal,
)
class WalletMultisigDescriptorPSBTTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, legacy=False)
def set_test_params(self):
self.num_nodes = 3
self.setup_clean_chain = True
self.wallet_names = []
self.extra_args = [["-keypool=100"]] * self.num_nodes
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_sqlite()
@staticmethod
def _get_xpub(wallet):
"""Extract the wallet's xpubs using `listdescriptors` and pick the one from the `pkh` descriptor since it's least likely to be accidentally reused (legacy addresses)."""
descriptor = next(filter(lambda d: d["desc"].startswith("pkh"), wallet.listdescriptors()["descriptors"]))
return descriptor["desc"].split("]")[-1].split("/")[0]
@staticmethod
def _check_psbt(psbt, to, value, multisig):
"""Helper function for any of the N participants to check the psbt with decodepsbt and verify it is OK before signing."""
tx = multisig.decodepsbt(psbt)["tx"]
amount = 0
for vout in tx["vout"]:
address = vout["scriptPubKey"]["address"]
assert_equal(multisig.getaddressinfo(address)["ischange"], address != to)
if address == to:
amount += vout["value"]
assert_approx(amount, float(value), vspan=0.001)
def participants_create_multisigs(self, xpubs):
"""The multisig is created by importing the following descriptors. The resulting wallet is watch-only and every participant can do this."""
# some simple validation
assert_equal(len(xpubs), self.N)
# a sanity-check/assertion, this will throw if the base58 checksum of any of the provided xpubs are invalid
for xpub in xpubs:
base58_to_byte(xpub)
for i, node in enumerate(self.nodes):
node.createwallet(wallet_name=f"{self.name}_{i}", blank=True, descriptors=True, disable_private_keys=True)
multisig = node.get_wallet_rpc(f"{self.name}_{i}")
external = multisig.getdescriptorinfo(f"wsh(sortedmulti({self.M},{f'/0/*,'.join(xpubs)}/0/*))")
internal = multisig.getdescriptorinfo(f"wsh(sortedmulti({self.M},{f'/1/*,'.join(xpubs)}/1/*))")
result = multisig.importdescriptors([
{ # receiving addresses (internal: False)
"desc": external["descriptor"],
"active": True,
"internal": False,
"timestamp": "now",
},
{ # change addresses (internal: True)
"desc": internal["descriptor"],
"active": True,
"internal": True,
"timestamp": "now",
},
])
assert all(r["success"] for r in result)
yield multisig
def run_test(self):
self.M = 2
self.N = self.num_nodes
self.name = f"{self.M}_of_{self.N}_multisig"
self.log.info(f"Testing {self.name}...")
participants = {
# Every participant generates an xpub. The most straightforward way is to create a new descriptor wallet.
# This wallet will be the participant's `signer` for the resulting multisig. Avoid reusing this wallet for any other purpose (for privacy reasons).
"signers": [node.get_wallet_rpc(node.createwallet(wallet_name=f"participant_{self.nodes.index(node)}", descriptors=True)["name"]) for node in self.nodes],
# After participants generate and exchange their xpubs they will each create their own watch-only multisig.
# Note: these multisigs are all the same, this just highlights that each participant can independently verify everything on their own node.
"multisigs": []
}
self.log.info("Generate and exchange xpubs...")
xpubs = [self._get_xpub(signer) for signer in participants["signers"]]
self.log.info("Every participant imports the following descriptors to create the watch-only multisig...")
participants["multisigs"] = list(self.participants_create_multisigs(xpubs))
self.log.info("Check that every participant's multisig generates the same addresses...")
for _ in range(10): # we check that the first 10 generated addresses are the same for all participant's multisigs
receive_addresses = [multisig.getnewaddress() for multisig in participants["multisigs"]]
all(address == receive_addresses[0] for address in receive_addresses)
change_addresses = [multisig.getrawchangeaddress() for multisig in participants["multisigs"]]
all(address == change_addresses[0] for address in change_addresses)
self.log.info("Get a mature utxo to send to the multisig...")
coordinator_wallet = participants["signers"][0]
self.generatetoaddress(self.nodes[0], 101, coordinator_wallet.getnewaddress())
deposit_amount = 6.15
multisig_receiving_address = participants["multisigs"][0].getnewaddress()
self.log.info("Send funds to the resulting multisig receiving address...")
coordinator_wallet.sendtoaddress(multisig_receiving_address, deposit_amount)
self.generate(self.nodes[0], 1)
for participant in participants["multisigs"]:
assert_approx(participant.getbalance(), deposit_amount, vspan=0.001)
self.log.info("Send a transaction from the multisig!")
to = participants["signers"][self.N - 1].getnewaddress()
value = 1
self.log.info("First, make a sending transaction, created using `walletcreatefundedpsbt` (anyone can initiate this)...")
psbt = participants["multisigs"][0].walletcreatefundedpsbt(inputs=[], outputs={to: value}, feeRate=0.00010)
psbts = []
self.log.info("Now at least M users check the psbt with decodepsbt and (if OK) signs it with walletprocesspsbt...")
for m in range(self.M):
signers_multisig = participants["multisigs"][m]
self._check_psbt(psbt["psbt"], to, value, signers_multisig)
signing_wallet = participants["signers"][m]
partially_signed_psbt = signing_wallet.walletprocesspsbt(psbt["psbt"])
psbts.append(partially_signed_psbt["psbt"])
self.log.info("Finally, collect the signed PSBTs with combinepsbt, finalizepsbt, then broadcast the resulting transaction...")
combined = coordinator_wallet.combinepsbt(psbts)
finalized = coordinator_wallet.finalizepsbt(combined)
coordinator_wallet.sendrawtransaction(finalized["hex"])
self.log.info("Check that balances are correct after the transaction has been included in a block.")
self.generate(self.nodes[0], 1)
assert_approx(participants["multisigs"][0].getbalance(), deposit_amount - value, vspan=0.001)
assert_equal(participants["signers"][self.N - 1].getbalance(), value)
self.log.info("Send another transaction from the multisig, this time with a daisy chained signing flow (one after another in series)!")
psbt = participants["multisigs"][0].walletcreatefundedpsbt(inputs=[], outputs={to: value}, feeRate=0.00010)
for m in range(self.M):
signers_multisig = participants["multisigs"][m]
self._check_psbt(psbt["psbt"], to, value, signers_multisig)
signing_wallet = participants["signers"][m]
psbt = signing_wallet.walletprocesspsbt(psbt["psbt"])
assert_equal(psbt["complete"], m == self.M - 1)
coordinator_wallet.sendrawtransaction(psbt["hex"])
self.log.info("Check that balances are correct after the transaction has been included in a block.")
self.generate(self.nodes[0], 1)
assert_approx(participants["multisigs"][0].getbalance(), deposit_amount - (value * 2), vspan=0.001)
assert_equal(participants["signers"][self.N - 1].getbalance(), value * 2)
if __name__ == "__main__":
WalletMultisigDescriptorPSBTTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_minchainwork.py | #!/usr/bin/env python3
# Copyright (c) 2017-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test logic for setting nMinimumChainWork on command line.
Nodes don't consider themselves out of "initial block download" until
their active chain has more work than nMinimumChainWork.
Nodes don't download blocks from a peer unless the peer's best known block
has more work than nMinimumChainWork.
While in initial block download, nodes won't relay blocks to their peers, so
test that this parameter functions as intended by verifying that block relay
only succeeds past a given node once its nMinimumChainWork has been exceeded.
"""
import time
from test_framework.p2p import P2PInterface, msg_getheaders
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
# 2 hashes required per regtest block (with no difficulty adjustment)
REGTEST_WORK_PER_BLOCK = 2
class MinimumChainWorkTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [[], ["-minimumchainwork=0x65"], ["-minimumchainwork=0x65"]]
self.node_min_work = [0, 101, 101]
def setup_network(self):
# This test relies on the chain setup being:
# node0 <- node1 <- node2
# Before leaving IBD, nodes prefer to download blocks from outbound
# peers, so ensure that we're mining on an outbound peer and testing
# block relay to inbound peers.
self.setup_nodes()
for i in range(self.num_nodes-1):
self.connect_nodes(i+1, i)
# Set clock of node2 2 days ahead, to keep it in IBD during this test.
self.nodes[2].setmocktime(int(time.time()) + 48*60*60)
def run_test(self):
# Start building a chain on node0. node2 shouldn't be able to sync until node1's
# minchainwork is exceeded
starting_chain_work = REGTEST_WORK_PER_BLOCK # Genesis block's work
self.log.info(f"Testing relay across node 1 (minChainWork = {self.node_min_work[1]})")
starting_blockcount = self.nodes[2].getblockcount()
num_blocks_to_generate = int((self.node_min_work[1] - starting_chain_work) / REGTEST_WORK_PER_BLOCK)
self.log.info(f"Generating {num_blocks_to_generate} blocks on node0")
hashes = self.generate(self.nodes[0], num_blocks_to_generate, sync_fun=self.no_op)
self.log.info(f"Node0 current chain work: {self.nodes[0].getblockheader(hashes[-1])['chainwork']}")
# Sleep a few seconds and verify that node2 didn't get any new blocks
# or headers. We sleep, rather than sync_blocks(node0, node1) because
# it's reasonable either way for node1 to get the blocks, or not get
# them (since they're below node1's minchainwork).
time.sleep(3)
self.log.info("Verifying node 2 has no more blocks than before")
self.log.info(f"Blockcounts: {[n.getblockcount() for n in self.nodes]}")
# Node2 shouldn't have any new headers yet, because node1 should not
# have relayed anything.
assert_equal(len(self.nodes[2].getchaintips()), 1)
assert_equal(self.nodes[2].getchaintips()[0]['height'], 0)
assert self.nodes[1].getbestblockhash() != self.nodes[0].getbestblockhash()
assert_equal(self.nodes[2].getblockcount(), starting_blockcount)
self.log.info("Check that getheaders requests to node2 are ignored")
peer = self.nodes[2].add_p2p_connection(P2PInterface())
msg = msg_getheaders()
msg.locator.vHave = [int(self.nodes[2].getbestblockhash(), 16)]
msg.hashstop = 0
peer.send_and_ping(msg)
time.sleep(5)
assert "headers" not in peer.last_message or len(peer.last_message["headers"].headers) == 0
self.log.info("Generating one more block")
self.generate(self.nodes[0], 1)
self.log.info("Verifying nodes are all synced")
# Because nodes in regtest are all manual connections (eg using
# addnode), node1 should not have disconnected node0. If not for that,
# we'd expect node1 to have disconnected node0 for serving an
# insufficient work chain, in which case we'd need to reconnect them to
# continue the test.
self.sync_all()
self.log.info(f"Blockcounts: {[n.getblockcount() for n in self.nodes]}")
self.log.info("Test that getheaders requests to node2 are not ignored")
peer.send_and_ping(msg)
assert "headers" in peer.last_message
# Verify that node2 is in fact still in IBD (otherwise this test may
# not be exercising the logic we want!)
assert_equal(self.nodes[2].getblockchaininfo()['initialblockdownload'], True)
self.log.info("Test -minimumchainwork with a non-hex value")
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(
["-minimumchainwork=test"],
expected_msg='Error: Invalid non-hex (test) minimum chain work value specified',
)
if __name__ == '__main__':
MinimumChainWorkTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_addrman.py | #!/usr/bin/env python3
# Copyright (c) 2021-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test addrman functionality"""
import os
import re
import struct
from test_framework.messages import ser_uint256, hash256
from test_framework.netutil import ADDRMAN_NEW_BUCKET_COUNT, ADDRMAN_TRIED_BUCKET_COUNT, ADDRMAN_BUCKET_SIZE
from test_framework.p2p import MAGIC_BYTES
from test_framework.test_framework import BitcoinTestFramework
from test_framework.test_node import ErrorMatch
from test_framework.util import assert_equal
def serialize_addrman(
*,
format=1,
lowest_compatible=4,
net_magic="regtest",
bucket_key=1,
len_new=None,
len_tried=None,
mock_checksum=None,
):
new = []
tried = []
INCOMPATIBILITY_BASE = 32
r = MAGIC_BYTES[net_magic]
r += struct.pack("B", format)
r += struct.pack("B", INCOMPATIBILITY_BASE + lowest_compatible)
r += ser_uint256(bucket_key)
r += struct.pack("<i", len_new or len(new))
r += struct.pack("<i", len_tried or len(tried))
ADDRMAN_NEW_BUCKET_COUNT = 1 << 10
r += struct.pack("<i", ADDRMAN_NEW_BUCKET_COUNT ^ (1 << 30))
for _ in range(ADDRMAN_NEW_BUCKET_COUNT):
r += struct.pack("<i", 0)
checksum = hash256(r)
r += mock_checksum or checksum
return r
def write_addrman(peers_dat, **kwargs):
with open(peers_dat, "wb") as f:
f.write(serialize_addrman(**kwargs))
class AddrmanTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
peers_dat = os.path.join(self.nodes[0].chain_path, "peers.dat")
init_error = lambda reason: (
f"Error: Invalid or corrupt peers.dat \\({reason}\\). If you believe this "
f"is a bug, please report it to {self.config['environment']['PACKAGE_BUGREPORT']}. "
f'As a workaround, you can move the file \\("{re.escape(peers_dat)}"\\) out of the way \\(rename, '
"move, or delete\\) to have a new one created on the next start."
)
self.log.info("Check that mocked addrman is valid")
self.stop_node(0)
write_addrman(peers_dat)
with self.nodes[0].assert_debug_log(["Loaded 0 addresses from peers.dat"]):
self.start_node(0, extra_args=["-checkaddrman=1"])
assert_equal(self.nodes[0].getnodeaddresses(), [])
self.log.info("Check that addrman with negative lowest_compatible cannot be read")
self.stop_node(0)
write_addrman(peers_dat, lowest_compatible=-32)
self.nodes[0].assert_start_raises_init_error(
expected_msg=init_error(
"Corrupted addrman database: The compat value \\(0\\) is lower "
"than the expected minimum value 32.: (.+)"
),
match=ErrorMatch.FULL_REGEX,
)
self.log.info("Check that addrman from future is overwritten with new addrman")
self.stop_node(0)
write_addrman(peers_dat, lowest_compatible=111)
assert_equal(os.path.exists(peers_dat + ".bak"), False)
with self.nodes[0].assert_debug_log([
f'Creating new peers.dat because the file version was not compatible ("{peers_dat}"). Original backed up to peers.dat.bak',
]):
self.start_node(0)
assert_equal(self.nodes[0].getnodeaddresses(), [])
assert_equal(os.path.exists(peers_dat + ".bak"), True)
self.log.info("Check that corrupt addrman cannot be read (EOF)")
self.stop_node(0)
with open(peers_dat, "wb") as f:
f.write(serialize_addrman()[:-1])
self.nodes[0].assert_start_raises_init_error(
expected_msg=init_error("AutoFile::read: end of file.*"),
match=ErrorMatch.FULL_REGEX,
)
self.log.info("Check that corrupt addrman cannot be read (magic)")
self.stop_node(0)
write_addrman(peers_dat, net_magic="signet")
self.nodes[0].assert_start_raises_init_error(
expected_msg=init_error("Invalid network magic number"),
match=ErrorMatch.FULL_REGEX,
)
self.log.info("Check that corrupt addrman cannot be read (checksum)")
self.stop_node(0)
write_addrman(peers_dat, mock_checksum=b"ab" * 32)
self.nodes[0].assert_start_raises_init_error(
expected_msg=init_error("Checksum mismatch, data corrupted"),
match=ErrorMatch.FULL_REGEX,
)
self.log.info("Check that corrupt addrman cannot be read (len_tried)")
self.stop_node(0)
max_len_tried = ADDRMAN_TRIED_BUCKET_COUNT * ADDRMAN_BUCKET_SIZE
write_addrman(peers_dat, len_tried=-1)
self.nodes[0].assert_start_raises_init_error(
expected_msg=init_error(f"Corrupt AddrMan serialization: nTried=-1, should be in \\[0, {max_len_tried}\\]:.*"),
match=ErrorMatch.FULL_REGEX,
)
self.log.info("Check that corrupt addrman cannot be read (large len_tried)")
write_addrman(peers_dat, len_tried=max_len_tried + 1)
self.nodes[0].assert_start_raises_init_error(
expected_msg=init_error(f"Corrupt AddrMan serialization: nTried={max_len_tried + 1}, should be in \\[0, {max_len_tried}\\]:.*"),
match=ErrorMatch.FULL_REGEX,
)
self.log.info("Check that corrupt addrman cannot be read (len_new)")
self.stop_node(0)
max_len_new = ADDRMAN_NEW_BUCKET_COUNT * ADDRMAN_BUCKET_SIZE
write_addrman(peers_dat, len_new=-1)
self.nodes[0].assert_start_raises_init_error(
expected_msg=init_error(f"Corrupt AddrMan serialization: nNew=-1, should be in \\[0, {max_len_new}\\]:.*"),
match=ErrorMatch.FULL_REGEX,
)
self.log.info("Check that corrupt addrman cannot be read (large len_new)")
self.stop_node(0)
write_addrman(peers_dat, len_new=max_len_new + 1)
self.nodes[0].assert_start_raises_init_error(
expected_msg=init_error(f"Corrupt AddrMan serialization: nNew={max_len_new + 1}, should be in \\[0, {max_len_new}\\]:.*"),
match=ErrorMatch.FULL_REGEX,
)
self.log.info("Check that corrupt addrman cannot be read (failed check)")
self.stop_node(0)
write_addrman(peers_dat, bucket_key=0)
self.nodes[0].assert_start_raises_init_error(
expected_msg=init_error("Corrupt data. Consistency check failed with code -16: .*"),
match=ErrorMatch.FULL_REGEX,
)
self.log.info("Check that missing addrman is recreated")
self.stop_node(0)
os.remove(peers_dat)
with self.nodes[0].assert_debug_log([
f'Creating peers.dat because the file was not found ("{peers_dat}")',
]):
self.start_node(0)
assert_equal(self.nodes[0].getnodeaddresses(), [])
if __name__ == "__main__":
AddrmanTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/mining_getblocktemplate_longpoll.py | #!/usr/bin/env python3
# Copyright (c) 2014-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test longpolling with getblocktemplate."""
import random
import threading
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import get_rpc_proxy
from test_framework.wallet import MiniWallet
class LongpollThread(threading.Thread):
def __init__(self, node):
threading.Thread.__init__(self)
# query current longpollid
template = node.getblocktemplate({'rules': ['segwit']})
self.longpollid = template['longpollid']
# create a new connection to the node, we can't use the same
# connection from two threads
self.node = get_rpc_proxy(node.url, 1, timeout=600, coveragedir=node.coverage_dir)
def run(self):
self.node.getblocktemplate({'longpollid': self.longpollid, 'rules': ['segwit']})
class GetBlockTemplateLPTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.supports_cli = False
def run_test(self):
self.log.info("Warning: this test will take about 70 seconds in the best case. Be patient.")
self.log.info("Test that longpollid doesn't change between successive getblocktemplate() invocations if nothing else happens")
self.generate(self.nodes[0], 10)
template = self.nodes[0].getblocktemplate({'rules': ['segwit']})
longpollid = template['longpollid']
template2 = self.nodes[0].getblocktemplate({'rules': ['segwit']})
assert template2['longpollid'] == longpollid
self.log.info("Test that longpoll waits if we do nothing")
thr = LongpollThread(self.nodes[0])
with self.nodes[0].assert_debug_log(["ThreadRPCServer method=getblocktemplate"], timeout=3):
thr.start()
# check that thread still lives
thr.join(5) # wait 5 seconds or until thread exits
assert thr.is_alive()
self.miniwallet = MiniWallet(self.nodes[0])
self.log.info("Test that longpoll will terminate if another node generates a block")
self.generate(self.nodes[1], 1) # generate a block on another node
# check that thread will exit now that new transaction entered mempool
thr.join(5) # wait 5 seconds or until thread exits
assert not thr.is_alive()
self.log.info("Test that longpoll will terminate if we generate a block ourselves")
thr = LongpollThread(self.nodes[0])
with self.nodes[0].assert_debug_log(["ThreadRPCServer method=getblocktemplate"], timeout=3):
thr.start()
self.generate(self.nodes[0], 1) # generate a block on own node
thr.join(5) # wait 5 seconds or until thread exits
assert not thr.is_alive()
self.log.info("Test that introducing a new transaction into the mempool will terminate the longpoll")
thr = LongpollThread(self.nodes[0])
with self.nodes[0].assert_debug_log(["ThreadRPCServer method=getblocktemplate"], timeout=3):
thr.start()
# generate a transaction and submit it
self.miniwallet.send_self_transfer(from_node=random.choice(self.nodes))
# after one minute, every 10 seconds the mempool is probed, so in 80 seconds it should have returned
thr.join(60 + 20)
assert not thr.is_alive()
if __name__ == '__main__':
GetBlockTemplateLPTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_signrawtransactionwithkey.py | #!/usr/bin/env python3
# Copyright (c) 2015-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test transaction signing using the signrawtransactionwithkey RPC."""
from test_framework.blocktools import (
COINBASE_MATURITY,
)
from test_framework.address import (
address_to_scriptpubkey,
script_to_p2sh,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
find_vout_for_address,
)
from test_framework.script_util import (
key_to_p2pk_script,
key_to_p2pkh_script,
script_to_p2sh_p2wsh_script,
script_to_p2wsh_script,
)
from test_framework.wallet import (
getnewdestination,
)
from test_framework.wallet_util import (
generate_keypair,
)
from decimal import (
Decimal,
)
INPUTS = [
# Valid pay-to-pubkey scripts
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
{'txid': '83a4f6a6b73660e13ee6cb3c6063fa3759c50c9b7521d0536022961898f4fb02', 'vout': 0,
'scriptPubKey': '76a914669b857c03a5ed269d5d85a1ffac9ed5d663072788ac'},
]
OUTPUTS = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
class SignRawTransactionWithKeyTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def send_to_address(self, addr, amount):
input = {"txid": self.nodes[0].getblock(self.block_hash[self.blk_idx])["tx"][0], "vout": 0}
output = {addr: amount}
self.blk_idx += 1
rawtx = self.nodes[0].createrawtransaction([input], output)
txid = self.nodes[0].sendrawtransaction(self.nodes[0].signrawtransactionwithkey(rawtx, [self.nodes[0].get_deterministic_priv_key().key])["hex"], 0)
return txid
def assert_signing_completed_successfully(self, signed_tx):
assert 'errors' not in signed_tx
assert 'complete' in signed_tx
assert_equal(signed_tx['complete'], True)
def successful_signing_test(self):
"""Create and sign a valid raw transaction with one input.
Expected results:
1) The transaction has a complete set of signatures
2) No script verification error occurred"""
self.log.info("Test valid raw transaction with one input")
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N', 'cVKpPfVKSJxKqVpE9awvXNWuLHCa5j5tiE7K6zbUSptFpTEtiFrA']
rawTx = self.nodes[0].createrawtransaction(INPUTS, OUTPUTS)
rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, INPUTS)
self.assert_signing_completed_successfully(rawTxSigned)
def witness_script_test(self):
self.log.info("Test signing transaction to P2SH-P2WSH addresses without wallet")
# Create a new P2SH-P2WSH 1-of-1 multisig address:
embedded_privkey, embedded_pubkey = generate_keypair(wif=True)
p2sh_p2wsh_address = self.nodes[1].createmultisig(1, [embedded_pubkey.hex()], "p2sh-segwit")
# send transaction to P2SH-P2WSH 1-of-1 multisig address
self.block_hash = self.generate(self.nodes[0], COINBASE_MATURITY + 1)
self.blk_idx = 0
self.send_to_address(p2sh_p2wsh_address["address"], 49.999)
self.generate(self.nodes[0], 1)
# Get the UTXO info from scantxoutset
unspent_output = self.nodes[1].scantxoutset('start', [p2sh_p2wsh_address['descriptor']])['unspents'][0]
spk = script_to_p2sh_p2wsh_script(p2sh_p2wsh_address['redeemScript']).hex()
unspent_output['witnessScript'] = p2sh_p2wsh_address['redeemScript']
unspent_output['redeemScript'] = script_to_p2wsh_script(unspent_output['witnessScript']).hex()
assert_equal(spk, unspent_output['scriptPubKey'])
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([unspent_output], {getnewdestination()[2]: Decimal("49.998")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [unspent_output])
self.assert_signing_completed_successfully(spending_tx_signed)
# Now test with P2PKH and P2PK scripts as the witnessScript
for tx_type in ['P2PKH', 'P2PK']: # these tests are order-independent
self.verify_txn_with_witness_script(tx_type)
def verify_txn_with_witness_script(self, tx_type):
self.log.info("Test with a {} script as the witnessScript".format(tx_type))
embedded_privkey, embedded_pubkey = generate_keypair(wif=True)
witness_script = {
'P2PKH': key_to_p2pkh_script(embedded_pubkey).hex(),
'P2PK': key_to_p2pk_script(embedded_pubkey).hex()
}.get(tx_type, "Invalid tx_type")
redeem_script = script_to_p2wsh_script(witness_script).hex()
addr = script_to_p2sh(redeem_script)
script_pub_key = address_to_scriptpubkey(addr).hex()
# Fund that address
txid = self.send_to_address(addr, 10)
vout = find_vout_for_address(self.nodes[0], txid, addr)
self.generate(self.nodes[0], 1)
# Now create and sign a transaction spending that output on node[0], which doesn't know the scripts or keys
spending_tx = self.nodes[0].createrawtransaction([{'txid': txid, 'vout': vout}], {getnewdestination()[2]: Decimal("9.999")})
spending_tx_signed = self.nodes[0].signrawtransactionwithkey(spending_tx, [embedded_privkey], [{'txid': txid, 'vout': vout, 'scriptPubKey': script_pub_key, 'redeemScript': redeem_script, 'witnessScript': witness_script, 'amount': 10}])
self.assert_signing_completed_successfully(spending_tx_signed)
self.nodes[0].sendrawtransaction(spending_tx_signed['hex'])
def invalid_sighashtype_test(self):
self.log.info("Test signing transaction with invalid sighashtype")
tx = self.nodes[0].createrawtransaction(INPUTS, OUTPUTS)
privkeys = [self.nodes[0].get_deterministic_priv_key().key]
assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[0].signrawtransactionwithkey, tx, privkeys, sighashtype="all")
def run_test(self):
self.successful_signing_test()
self.witness_script_test()
self.invalid_sighashtype_test()
if __name__ == '__main__':
SignRawTransactionWithKeyTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_node_network_limited.py | #!/usr/bin/env python3
# Copyright (c) 2017-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests NODE_NETWORK_LIMITED.
Tests that a node configured with -prune=550 signals NODE_NETWORK_LIMITED correctly
and that it responds to getdata requests for blocks correctly:
- send a block within 288 + 2 of the tip
- disconnect peers who request blocks older than that."""
from test_framework.messages import (
CInv,
MSG_BLOCK,
NODE_NETWORK_LIMITED,
NODE_P2P_V2,
NODE_WITNESS,
msg_getdata,
msg_verack,
)
from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
class P2PIgnoreInv(P2PInterface):
firstAddrnServices = 0
def on_inv(self, message):
# The node will send us invs for other blocks. Ignore them.
pass
def on_addr(self, message):
self.firstAddrnServices = message.addrs[0].nServices
def wait_for_addr(self, timeout=5):
test_function = lambda: self.last_message.get("addr")
self.wait_until(test_function, timeout=timeout)
def send_getdata_for_block(self, blockhash):
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(MSG_BLOCK, int(blockhash, 16)))
self.send_message(getdata_request)
class NodeNetworkLimitedTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [['-prune=550', '-addrmantest'], [], []]
def disconnect_all(self):
self.disconnect_nodes(0, 1)
self.disconnect_nodes(0, 2)
self.disconnect_nodes(1, 2)
def setup_network(self):
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
def run_test(self):
node = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
expected_services = NODE_WITNESS | NODE_NETWORK_LIMITED
if self.options.v2transport:
expected_services |= NODE_P2P_V2
self.log.info("Check that node has signalled expected services.")
assert_equal(node.nServices, expected_services)
self.log.info("Check that the localservices is as expected.")
assert_equal(int(self.nodes[0].getnetworkinfo()['localservices'], 16), expected_services)
self.log.info("Mine enough blocks to reach the NODE_NETWORK_LIMITED range.")
self.connect_nodes(0, 1)
blocks = self.generate(self.nodes[1], 292, sync_fun=lambda: self.sync_blocks([self.nodes[0], self.nodes[1]]))
self.log.info("Make sure we can max retrieve block at tip-288.")
node.send_getdata_for_block(blocks[1]) # last block in valid range
node.wait_for_block(int(blocks[1], 16), timeout=3)
self.log.info("Requesting block at height 2 (tip-289) must fail (ignored).")
node.send_getdata_for_block(blocks[0]) # first block outside of the 288+2 limit
node.wait_for_disconnect(5)
self.log.info("Check local address relay, do a fresh connection.")
self.nodes[0].disconnect_p2ps()
node1 = self.nodes[0].add_p2p_connection(P2PIgnoreInv())
node1.send_message(msg_verack())
node1.wait_for_addr()
#must relay address with NODE_NETWORK_LIMITED
assert_equal(node1.firstAddrnServices, expected_services)
self.nodes[0].disconnect_p2ps()
# connect unsynced node 2 with pruned NODE_NETWORK_LIMITED peer
# because node 2 is in IBD and node 0 is a NODE_NETWORK_LIMITED peer, sync must not be possible
self.connect_nodes(0, 2)
try:
self.sync_blocks([self.nodes[0], self.nodes[2]], timeout=5)
except Exception:
pass
# node2 must remain at height 0
assert_equal(self.nodes[2].getblockheader(self.nodes[2].getbestblockhash())['height'], 0)
# now connect also to node 1 (non pruned)
self.connect_nodes(1, 2)
# sync must be possible
self.sync_blocks()
# disconnect all peers
self.disconnect_all()
# mine 10 blocks on node 0 (pruned node)
self.generate(self.nodes[0], 10, sync_fun=self.no_op)
# connect node1 (non pruned) with node0 (pruned) and check if the can sync
self.connect_nodes(0, 1)
# sync must be possible, node 1 is no longer in IBD and should therefore connect to node 0 (NODE_NETWORK_LIMITED)
self.sync_blocks([self.nodes[0], self.nodes[1]])
if __name__ == '__main__':
NodeNetworkLimitedTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_assumeutxo.py | #!/usr/bin/env python3
# Copyright (c) 2021-present The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test for assumeutxo, a means of quickly bootstrapping a node using
a serialized version of the UTXO set at a certain height, which corresponds
to a hash that has been compiled into bitcoind.
The assumeutxo value generated and used here is committed to in
`CRegTestParams::m_assumeutxo_data` in `src/chainparams.cpp`.
## Possible test improvements
- TODO: test submitting a transaction and verifying it appears in mempool
- TODO: test what happens with -reindex and -reindex-chainstate before the
snapshot is validated, and make sure it's deleted successfully.
Interesting test cases could be loading an assumeutxo snapshot file with:
- TODO: Valid hash but invalid snapshot file (bad coin height or
bad other serialization)
- TODO: Valid snapshot file, but referencing a snapshot block that turns out to be
invalid, or has an invalid parent
- TODO: Valid snapshot file and snapshot block, but the block is not on the
most-work chain
Interesting starting states could be loading a snapshot when the current chain tip is:
- TODO: An ancestor of snapshot block
- TODO: Not an ancestor of the snapshot block but has less work
- TODO: The snapshot block
- TODO: A descendant of the snapshot block
- TODO: Not an ancestor or a descendant of the snapshot block and has more work
"""
from shutil import rmtree
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
START_HEIGHT = 199
SNAPSHOT_BASE_HEIGHT = 299
FINAL_HEIGHT = 399
COMPLETE_IDX = {'synced': True, 'best_block_height': FINAL_HEIGHT}
class AssumeutxoTest(BitcoinTestFramework):
def set_test_params(self):
"""Use the pregenerated, deterministic chain up to height 199."""
self.num_nodes = 3
self.rpc_timeout = 120
self.extra_args = [
[],
["-fastprune", "-prune=1", "-blockfilterindex=1", "-coinstatsindex=1"],
["-txindex=1", "-blockfilterindex=1", "-coinstatsindex=1"],
]
def setup_network(self):
"""Start with the nodes disconnected so that one can generate a snapshot
including blocks the other hasn't yet seen."""
self.add_nodes(3)
self.start_nodes(extra_args=self.extra_args)
def test_invalid_snapshot_scenarios(self, valid_snapshot_path):
self.log.info("Test different scenarios of loading invalid snapshot files")
with open(valid_snapshot_path, 'rb') as f:
valid_snapshot_contents = f.read()
bad_snapshot_path = valid_snapshot_path + '.mod'
def expected_error(log_msg="", rpc_details=""):
with self.nodes[1].assert_debug_log([log_msg]):
assert_raises_rpc_error(-32603, f"Unable to load UTXO snapshot{rpc_details}", self.nodes[1].loadtxoutset, bad_snapshot_path)
self.log.info(" - snapshot file referring to a block that is not in the assumeutxo parameters")
prev_block_hash = self.nodes[0].getblockhash(SNAPSHOT_BASE_HEIGHT - 1)
bogus_block_hash = "0" * 64 # Represents any unknown block hash
for bad_block_hash in [bogus_block_hash, prev_block_hash]:
with open(bad_snapshot_path, 'wb') as f:
# block hash of the snapshot base is stored right at the start (first 32 bytes)
f.write(bytes.fromhex(bad_block_hash)[::-1] + valid_snapshot_contents[32:])
error_details = f", assumeutxo block hash in snapshot metadata not recognized ({bad_block_hash})"
expected_error(rpc_details=error_details)
self.log.info(" - snapshot file with wrong number of coins")
valid_num_coins = int.from_bytes(valid_snapshot_contents[32:32 + 8], "little")
for off in [-1, +1]:
with open(bad_snapshot_path, 'wb') as f:
f.write(valid_snapshot_contents[:32])
f.write((valid_num_coins + off).to_bytes(8, "little"))
f.write(valid_snapshot_contents[32 + 8:])
expected_error(log_msg=f"bad snapshot - coins left over after deserializing 298 coins" if off == -1 else f"bad snapshot format or truncated snapshot after deserializing 299 coins")
self.log.info(" - snapshot file with alternated UTXO data")
cases = [
[b"\xff" * 32, 0, "05030e506678f2eca8d624ffed97090ab3beadad1b51ee6e5985ba91c5720e37"], # wrong outpoint hash
[(1).to_bytes(4, "little"), 32, "7d29cfe2c1e242bc6f103878bb70cfffa8b4dac20dbd001ff6ce24b7de2d2399"], # wrong outpoint index
[b"\x81", 36, "f03939a195531f96d5dff983e294a1af62af86049fa7a19a7627246f237c03f1"], # wrong coin code VARINT((coinbase ? 1 : 0) | (height << 1))
[b"\x83", 36, "e4577da84590fb288c0f7967e89575e1b0aa46624669640f6f5dfef028d39930"], # another wrong coin code
]
for content, offset, wrong_hash in cases:
with open(bad_snapshot_path, "wb") as f:
f.write(valid_snapshot_contents[:(32 + 8 + offset)])
f.write(content)
f.write(valid_snapshot_contents[(32 + 8 + offset + len(content)):])
expected_error(log_msg=f"[snapshot] bad snapshot content hash: expected 61d9c2b29a2571a5fe285fe2d8554f91f93309666fc9b8223ee96338de25ff53, got {wrong_hash}")
def test_invalid_chainstate_scenarios(self):
self.log.info("Test different scenarios of invalid snapshot chainstate in datadir")
self.log.info(" - snapshot chainstate referring to a block that is not in the assumeutxo parameters")
self.stop_node(0)
chainstate_snapshot_path = self.nodes[0].chain_path / "chainstate_snapshot"
chainstate_snapshot_path.mkdir()
with open(chainstate_snapshot_path / "base_blockhash", 'wb') as f:
f.write(b'z' * 32)
def expected_error(log_msg="", error_msg=""):
with self.nodes[0].assert_debug_log([log_msg]):
self.nodes[0].assert_start_raises_init_error(expected_msg=error_msg)
expected_error_msg = f"Error: A fatal internal error occurred, see debug.log for details"
error_details = f"Assumeutxo data not found for the given blockhash"
expected_error(log_msg=error_details, error_msg=expected_error_msg)
# resurrect node again
rmtree(chainstate_snapshot_path)
self.start_node(0)
def run_test(self):
"""
Bring up two (disconnected) nodes, mine some new blocks on the first,
and generate a UTXO snapshot.
Load the snapshot into the second, ensure it syncs to tip and completes
background validation when connected to the first.
"""
n0 = self.nodes[0]
n1 = self.nodes[1]
n2 = self.nodes[2]
# Mock time for a deterministic chain
for n in self.nodes:
n.setmocktime(n.getblockheader(n.getbestblockhash())['time'])
self.sync_blocks()
# Generate a series of blocks that `n0` will have in the snapshot,
# but that n1 doesn't yet see. In order for the snapshot to activate,
# though, we have to ferry over the new headers to n1 so that it
# isn't waiting forever to see the header of the snapshot's base block
# while disconnected from n0.
for i in range(100):
self.generate(n0, nblocks=1, sync_fun=self.no_op)
newblock = n0.getblock(n0.getbestblockhash(), 0)
# make n1 aware of the new header, but don't give it the block.
n1.submitheader(newblock)
n2.submitheader(newblock)
# Ensure everyone is seeing the same headers.
for n in self.nodes:
assert_equal(n.getblockchaininfo()["headers"], SNAPSHOT_BASE_HEIGHT)
self.log.info("-- Testing assumeutxo + some indexes + pruning")
assert_equal(n0.getblockcount(), SNAPSHOT_BASE_HEIGHT)
assert_equal(n1.getblockcount(), START_HEIGHT)
self.log.info(f"Creating a UTXO snapshot at height {SNAPSHOT_BASE_HEIGHT}")
dump_output = n0.dumptxoutset('utxos.dat')
assert_equal(
dump_output['txoutset_hash'],
'61d9c2b29a2571a5fe285fe2d8554f91f93309666fc9b8223ee96338de25ff53')
assert_equal(dump_output['nchaintx'], 300)
assert_equal(n0.getblockchaininfo()["blocks"], SNAPSHOT_BASE_HEIGHT)
# Mine more blocks on top of the snapshot that n1 hasn't yet seen. This
# will allow us to test n1's sync-to-tip on top of a snapshot.
self.generate(n0, nblocks=100, sync_fun=self.no_op)
assert_equal(n0.getblockcount(), FINAL_HEIGHT)
assert_equal(n1.getblockcount(), START_HEIGHT)
assert_equal(n0.getblockchaininfo()["blocks"], FINAL_HEIGHT)
self.test_invalid_snapshot_scenarios(dump_output['path'])
self.test_invalid_chainstate_scenarios()
self.log.info(f"Loading snapshot into second node from {dump_output['path']}")
loaded = n1.loadtxoutset(dump_output['path'])
assert_equal(loaded['coins_loaded'], SNAPSHOT_BASE_HEIGHT)
assert_equal(loaded['base_height'], SNAPSHOT_BASE_HEIGHT)
normal, snapshot = n1.getchainstates()["chainstates"]
assert_equal(normal['blocks'], START_HEIGHT)
assert_equal(normal.get('snapshot_blockhash'), None)
assert_equal(normal['validated'], True)
assert_equal(snapshot['blocks'], SNAPSHOT_BASE_HEIGHT)
assert_equal(snapshot['snapshot_blockhash'], dump_output['base_hash'])
assert_equal(snapshot['validated'], False)
assert_equal(n1.getblockchaininfo()["blocks"], SNAPSHOT_BASE_HEIGHT)
PAUSE_HEIGHT = FINAL_HEIGHT - 40
self.log.info("Restarting node to stop at height %d", PAUSE_HEIGHT)
self.restart_node(1, extra_args=[
f"-stopatheight={PAUSE_HEIGHT}", *self.extra_args[1]])
# Finally connect the nodes and let them sync.
#
# Set `wait_for_connect=False` to avoid a race between performing connection
# assertions and the -stopatheight tripping.
self.connect_nodes(0, 1, wait_for_connect=False)
n1.wait_until_stopped(timeout=5)
self.log.info("Checking that blocks are segmented on disk")
assert self.has_blockfile(n1, "00000"), "normal blockfile missing"
assert self.has_blockfile(n1, "00001"), "assumed blockfile missing"
assert not self.has_blockfile(n1, "00002"), "too many blockfiles"
self.log.info("Restarted node before snapshot validation completed, reloading...")
self.restart_node(1, extra_args=self.extra_args[1])
self.connect_nodes(0, 1)
self.log.info(f"Ensuring snapshot chain syncs to tip. ({FINAL_HEIGHT})")
self.wait_until(lambda: n1.getchainstates()['chainstates'][-1]['blocks'] == FINAL_HEIGHT)
self.sync_blocks(nodes=(n0, n1))
self.log.info("Ensuring background validation completes")
self.wait_until(lambda: len(n1.getchainstates()['chainstates']) == 1)
# Ensure indexes have synced.
completed_idx_state = {
'basic block filter index': COMPLETE_IDX,
'coinstatsindex': COMPLETE_IDX,
}
self.wait_until(lambda: n1.getindexinfo() == completed_idx_state)
for i in (0, 1):
n = self.nodes[i]
self.log.info(f"Restarting node {i} to ensure (Check|Load)BlockIndex passes")
self.restart_node(i, extra_args=self.extra_args[i])
assert_equal(n.getblockchaininfo()["blocks"], FINAL_HEIGHT)
chainstate, = n.getchainstates()['chainstates']
assert_equal(chainstate['blocks'], FINAL_HEIGHT)
if i != 0:
# Ensure indexes have synced for the assumeutxo node
self.wait_until(lambda: n.getindexinfo() == completed_idx_state)
# Node 2: all indexes + reindex
# -----------------------------
self.log.info("-- Testing all indexes + reindex")
assert_equal(n2.getblockcount(), START_HEIGHT)
self.log.info(f"Loading snapshot into third node from {dump_output['path']}")
loaded = n2.loadtxoutset(dump_output['path'])
assert_equal(loaded['coins_loaded'], SNAPSHOT_BASE_HEIGHT)
assert_equal(loaded['base_height'], SNAPSHOT_BASE_HEIGHT)
normal, snapshot = n2.getchainstates()['chainstates']
assert_equal(normal['blocks'], START_HEIGHT)
assert_equal(normal.get('snapshot_blockhash'), None)
assert_equal(normal['validated'], True)
assert_equal(snapshot['blocks'], SNAPSHOT_BASE_HEIGHT)
assert_equal(snapshot['snapshot_blockhash'], dump_output['base_hash'])
assert_equal(snapshot['validated'], False)
self.connect_nodes(0, 2)
self.wait_until(lambda: n2.getchainstates()['chainstates'][-1]['blocks'] == FINAL_HEIGHT)
self.sync_blocks()
self.log.info("Ensuring background validation completes")
self.wait_until(lambda: len(n2.getchainstates()['chainstates']) == 1)
completed_idx_state = {
'basic block filter index': COMPLETE_IDX,
'coinstatsindex': COMPLETE_IDX,
'txindex': COMPLETE_IDX,
}
self.wait_until(lambda: n2.getindexinfo() == completed_idx_state)
for i in (0, 2):
n = self.nodes[i]
self.log.info(f"Restarting node {i} to ensure (Check|Load)BlockIndex passes")
self.restart_node(i, extra_args=self.extra_args[i])
assert_equal(n.getblockchaininfo()["blocks"], FINAL_HEIGHT)
chainstate, = n.getchainstates()['chainstates']
assert_equal(chainstate['blocks'], FINAL_HEIGHT)
if i != 0:
# Ensure indexes have synced for the assumeutxo node
self.wait_until(lambda: n.getindexinfo() == completed_idx_state)
self.log.info("Test -reindex-chainstate of an assumeutxo-synced node")
self.restart_node(2, extra_args=[
'-reindex-chainstate=1', *self.extra_args[2]])
assert_equal(n2.getblockchaininfo()["blocks"], FINAL_HEIGHT)
self.wait_until(lambda: n2.getblockcount() == FINAL_HEIGHT)
self.log.info("Test -reindex of an assumeutxo-synced node")
self.restart_node(2, extra_args=['-reindex=1', *self.extra_args[2]])
self.connect_nodes(0, 2)
self.wait_until(lambda: n2.getblockcount() == FINAL_HEIGHT)
if __name__ == '__main__':
AssumeutxoTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_proxy.py | #!/usr/bin/env python3
# Copyright (c) 2015-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoind with different proxy configuration.
Test plan:
- Start bitcoind's with different proxy configurations
- Use addnode to initiate connections
- Verify that proxies are connected to, and the right connection command is given
- Proxy configurations to test on bitcoind side:
- `-proxy` (proxy everything)
- `-onion` (proxy just onions)
- `-proxyrandomize` Circuit randomization
- `-cjdnsreachable`
- Proxy configurations to test on proxy side,
- support no authentication (other proxy)
- support no authentication + user/pass authentication (Tor)
- proxy on IPv6
- Create various proxies (as threads)
- Create nodes that connect to them
- Manipulate the peer connections using addnode (onetry) and observe effects
- Test the getpeerinfo `network` field for the peer
addnode connect to IPv4
addnode connect to IPv6
addnode connect to onion
addnode connect to generic DNS name
addnode connect to a CJDNS address
- Test getnetworkinfo for each node
- Test passing invalid -proxy
- Test passing invalid -onion
- Test passing invalid -i2psam
- Test passing -onlynet=onion without -proxy or -onion
- Test passing -onlynet=onion with -onion=0 and with -noonion
- Test passing unknown -onlynet
"""
import socket
from test_framework.socks5 import Socks5Configuration, Socks5Command, Socks5Server, AddressType
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
p2p_port,
)
from test_framework.netutil import test_ipv6_local
# Networks returned by RPC getpeerinfo.
NET_UNROUTABLE = "not_publicly_routable"
NET_IPV4 = "ipv4"
NET_IPV6 = "ipv6"
NET_ONION = "onion"
NET_I2P = "i2p"
NET_CJDNS = "cjdns"
# Networks returned by RPC getnetworkinfo, defined in src/rpc/net.cpp::GetNetworksInfo()
NETWORKS = frozenset({NET_IPV4, NET_IPV6, NET_ONION, NET_I2P, NET_CJDNS})
class ProxyTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 5
self.setup_clean_chain = True
def setup_nodes(self):
self.have_ipv6 = test_ipv6_local()
# Create two proxies on different ports
# ... one unauthenticated
self.conf1 = Socks5Configuration()
self.conf1.addr = ('127.0.0.1', p2p_port(self.num_nodes))
self.conf1.unauth = True
self.conf1.auth = False
# ... one supporting authenticated and unauthenticated (Tor)
self.conf2 = Socks5Configuration()
self.conf2.addr = ('127.0.0.1', p2p_port(self.num_nodes + 1))
self.conf2.unauth = True
self.conf2.auth = True
if self.have_ipv6:
# ... one on IPv6 with similar configuration
self.conf3 = Socks5Configuration()
self.conf3.af = socket.AF_INET6
self.conf3.addr = ('::1', p2p_port(self.num_nodes + 2))
self.conf3.unauth = True
self.conf3.auth = True
else:
self.log.warning("Testing without local IPv6 support")
self.serv1 = Socks5Server(self.conf1)
self.serv1.start()
self.serv2 = Socks5Server(self.conf2)
self.serv2.start()
if self.have_ipv6:
self.serv3 = Socks5Server(self.conf3)
self.serv3.start()
# We will not try to connect to this.
self.i2p_sam = ('127.0.0.1', 7656)
# Note: proxies are not used to connect to local nodes. This is because the proxy to
# use is based on CService.GetNetwork(), which returns NET_UNROUTABLE for localhost.
args = [
['-listen', f'-proxy={self.conf1.addr[0]}:{self.conf1.addr[1]}','-proxyrandomize=1'],
['-listen', f'-proxy={self.conf1.addr[0]}:{self.conf1.addr[1]}',f'-onion={self.conf2.addr[0]}:{self.conf2.addr[1]}',
f'-i2psam={self.i2p_sam[0]}:{self.i2p_sam[1]}', '-i2pacceptincoming=0', '-proxyrandomize=0'],
['-listen', f'-proxy={self.conf2.addr[0]}:{self.conf2.addr[1]}','-proxyrandomize=1'],
[],
['-listen', f'-proxy={self.conf1.addr[0]}:{self.conf1.addr[1]}','-proxyrandomize=1',
'-cjdnsreachable']
]
if self.have_ipv6:
args[3] = ['-listen', f'-proxy=[{self.conf3.addr[0]}]:{self.conf3.addr[1]}','-proxyrandomize=0', '-noonion']
self.add_nodes(self.num_nodes, extra_args=args)
self.start_nodes()
def network_test(self, node, addr, network):
for peer in node.getpeerinfo():
if peer["addr"] == addr:
assert_equal(peer["network"], network)
def node_test(self, node, *, proxies, auth, test_onion, test_cjdns):
rv = []
addr = "15.61.23.23:1234"
self.log.debug(f"Test: outgoing IPv4 connection through node for address {addr}")
node.addnode(addr, "onetry")
cmd = proxies[0].queue.get()
assert isinstance(cmd, Socks5Command)
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"15.61.23.23")
assert_equal(cmd.port, 1234)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
self.network_test(node, addr, network=NET_IPV4)
if self.have_ipv6:
addr = "[1233:3432:2434:2343:3234:2345:6546:4534]:5443"
self.log.debug(f"Test: outgoing IPv6 connection through node for address {addr}")
node.addnode(addr, "onetry")
cmd = proxies[1].queue.get()
assert isinstance(cmd, Socks5Command)
# Note: bitcoind's SOCKS5 implementation only sends atyp DOMAINNAME, even if connecting directly to IPv4/IPv6
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"1233:3432:2434:2343:3234:2345:6546:4534")
assert_equal(cmd.port, 5443)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
self.network_test(node, addr, network=NET_IPV6)
if test_onion:
addr = "pg6mmjiyjmcrsslvykfwnntlaru7p5svn6y2ymmju6nubxndf4pscryd.onion:8333"
self.log.debug(f"Test: outgoing onion connection through node for address {addr}")
node.addnode(addr, "onetry")
cmd = proxies[2].queue.get()
assert isinstance(cmd, Socks5Command)
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"pg6mmjiyjmcrsslvykfwnntlaru7p5svn6y2ymmju6nubxndf4pscryd.onion")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
self.network_test(node, addr, network=NET_ONION)
if test_cjdns:
addr = "[fc00:1:2:3:4:5:6:7]:8888"
self.log.debug(f"Test: outgoing CJDNS connection through node for address {addr}")
node.addnode(addr, "onetry")
cmd = proxies[1].queue.get()
assert isinstance(cmd, Socks5Command)
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"fc00:1:2:3:4:5:6:7")
assert_equal(cmd.port, 8888)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
self.network_test(node, addr, network=NET_CJDNS)
addr = "node.noumenon:8333"
self.log.debug(f"Test: outgoing DNS name connection through node for address {addr}")
node.addnode(addr, "onetry")
cmd = proxies[3].queue.get()
assert isinstance(cmd, Socks5Command)
assert_equal(cmd.atyp, AddressType.DOMAINNAME)
assert_equal(cmd.addr, b"node.noumenon")
assert_equal(cmd.port, 8333)
if not auth:
assert_equal(cmd.username, None)
assert_equal(cmd.password, None)
rv.append(cmd)
self.network_test(node, addr, network=NET_UNROUTABLE)
return rv
def run_test(self):
# basic -proxy
self.node_test(self.nodes[0],
proxies=[self.serv1, self.serv1, self.serv1, self.serv1],
auth=False, test_onion=True, test_cjdns=False)
# -proxy plus -onion
self.node_test(self.nodes[1],
proxies=[self.serv1, self.serv1, self.serv2, self.serv1],
auth=False, test_onion=True, test_cjdns=False)
# -proxy plus -onion, -proxyrandomize
rv = self.node_test(self.nodes[2],
proxies=[self.serv2, self.serv2, self.serv2, self.serv2],
auth=True, test_onion=True, test_cjdns=False)
# Check that credentials as used for -proxyrandomize connections are unique
credentials = set((x.username,x.password) for x in rv)
assert_equal(len(credentials), len(rv))
if self.have_ipv6:
# proxy on IPv6 localhost
self.node_test(self.nodes[3],
proxies=[self.serv3, self.serv3, self.serv3, self.serv3],
auth=False, test_onion=False, test_cjdns=False)
# -proxy=unauth -proxyrandomize=1 -cjdnsreachable
self.node_test(self.nodes[4],
proxies=[self.serv1, self.serv1, self.serv1, self.serv1],
auth=False, test_onion=True, test_cjdns=True)
def networks_dict(d):
r = {}
for x in d['networks']:
r[x['name']] = x
return r
self.log.info("Test RPC getnetworkinfo")
nodes_network_info = []
self.log.debug("Test that setting -proxy disables local address discovery, i.e. -discover=0")
for node in self.nodes:
network_info = node.getnetworkinfo()
assert_equal(network_info["localaddresses"], [])
nodes_network_info.append(network_info)
n0 = networks_dict(nodes_network_info[0])
assert_equal(NETWORKS, n0.keys())
for net in NETWORKS:
if net == NET_I2P:
expected_proxy = ''
expected_randomize = False
else:
expected_proxy = '%s:%i' % (self.conf1.addr)
expected_randomize = True
assert_equal(n0[net]['proxy'], expected_proxy)
assert_equal(n0[net]['proxy_randomize_credentials'], expected_randomize)
assert_equal(n0['onion']['reachable'], True)
assert_equal(n0['i2p']['reachable'], False)
assert_equal(n0['cjdns']['reachable'], False)
n1 = networks_dict(nodes_network_info[1])
assert_equal(NETWORKS, n1.keys())
for net in ['ipv4', 'ipv6']:
assert_equal(n1[net]['proxy'], f'{self.conf1.addr[0]}:{self.conf1.addr[1]}')
assert_equal(n1[net]['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['proxy'], f'{self.conf2.addr[0]}:{self.conf2.addr[1]}')
assert_equal(n1['onion']['proxy_randomize_credentials'], False)
assert_equal(n1['onion']['reachable'], True)
assert_equal(n1['i2p']['proxy'], f'{self.i2p_sam[0]}:{self.i2p_sam[1]}')
assert_equal(n1['i2p']['proxy_randomize_credentials'], False)
assert_equal(n1['i2p']['reachable'], True)
n2 = networks_dict(nodes_network_info[2])
assert_equal(NETWORKS, n2.keys())
proxy = f'{self.conf2.addr[0]}:{self.conf2.addr[1]}'
for net in NETWORKS:
if net == NET_I2P:
expected_proxy = ''
expected_randomize = False
else:
expected_proxy = proxy
expected_randomize = True
assert_equal(n2[net]['proxy'], expected_proxy)
assert_equal(n2[net]['proxy_randomize_credentials'], expected_randomize)
assert_equal(n2['onion']['reachable'], True)
assert_equal(n2['i2p']['reachable'], False)
assert_equal(n2['cjdns']['reachable'], False)
if self.have_ipv6:
n3 = networks_dict(nodes_network_info[3])
assert_equal(NETWORKS, n3.keys())
proxy = f'[{self.conf3.addr[0]}]:{self.conf3.addr[1]}'
for net in NETWORKS:
expected_proxy = '' if net == NET_I2P or net == NET_ONION else proxy
assert_equal(n3[net]['proxy'], expected_proxy)
assert_equal(n3[net]['proxy_randomize_credentials'], False)
assert_equal(n3['onion']['reachable'], False)
assert_equal(n3['i2p']['reachable'], False)
assert_equal(n3['cjdns']['reachable'], False)
n4 = networks_dict(nodes_network_info[4])
assert_equal(NETWORKS, n4.keys())
for net in NETWORKS:
if net == NET_I2P:
expected_proxy = ''
expected_randomize = False
else:
expected_proxy = '%s:%i' % (self.conf1.addr)
expected_randomize = True
assert_equal(n4[net]['proxy'], expected_proxy)
assert_equal(n4[net]['proxy_randomize_credentials'], expected_randomize)
assert_equal(n4['onion']['reachable'], True)
assert_equal(n4['i2p']['reachable'], False)
assert_equal(n4['cjdns']['reachable'], True)
self.stop_node(1)
self.log.info("Test passing invalid -proxy hostname raises expected init error")
self.nodes[1].extra_args = ["-proxy=abc..abc:23456"]
msg = "Error: Invalid -proxy address or hostname: 'abc..abc:23456'"
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test passing invalid -proxy port raises expected init error")
self.nodes[1].extra_args = ["-proxy=192.0.0.1:def"]
msg = "Error: Invalid port specified in -proxy: '192.0.0.1:def'"
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test passing invalid -onion hostname raises expected init error")
self.nodes[1].extra_args = ["-onion=xyz..xyz:23456"]
msg = "Error: Invalid -onion address or hostname: 'xyz..xyz:23456'"
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test passing invalid -onion port raises expected init error")
self.nodes[1].extra_args = ["-onion=192.0.0.1:def"]
msg = "Error: Invalid port specified in -onion: '192.0.0.1:def'"
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test passing invalid -i2psam hostname raises expected init error")
self.nodes[1].extra_args = ["-i2psam=def..def:23456"]
msg = "Error: Invalid -i2psam address or hostname: 'def..def:23456'"
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test passing invalid -i2psam port raises expected init error")
self.nodes[1].extra_args = ["-i2psam=192.0.0.1:def"]
msg = "Error: Invalid port specified in -i2psam: '192.0.0.1:def'"
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test passing invalid -onlynet=i2p without -i2psam raises expected init error")
self.nodes[1].extra_args = ["-onlynet=i2p"]
msg = "Error: Outbound connections restricted to i2p (-onlynet=i2p) but -i2psam is not provided"
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test passing invalid -onlynet=cjdns without -cjdnsreachable raises expected init error")
self.nodes[1].extra_args = ["-onlynet=cjdns"]
msg = "Error: Outbound connections restricted to CJDNS (-onlynet=cjdns) but -cjdnsreachable is not provided"
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test passing -onlynet=onion with -onion=0/-noonion raises expected init error")
msg = (
"Error: Outbound connections restricted to Tor (-onlynet=onion) but "
"the proxy for reaching the Tor network is explicitly forbidden: -onion=0"
)
for arg in ["-onion=0", "-noonion"]:
self.nodes[1].extra_args = ["-onlynet=onion", arg]
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test passing -onlynet=onion without -proxy, -onion or -listenonion raises expected init error")
self.nodes[1].extra_args = ["-onlynet=onion", "-listenonion=0"]
msg = (
"Error: Outbound connections restricted to Tor (-onlynet=onion) but the proxy for "
"reaching the Tor network is not provided: none of -proxy, -onion or -listenonion is given"
)
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test passing -onlynet=onion without -proxy or -onion but with -listenonion=1 is ok")
self.start_node(1, extra_args=["-onlynet=onion", "-listenonion=1"])
self.stop_node(1)
self.log.info("Test passing unknown network to -onlynet raises expected init error")
self.nodes[1].extra_args = ["-onlynet=abc"]
msg = "Error: Unknown network specified in -onlynet: 'abc'"
self.nodes[1].assert_start_raises_init_error(expected_msg=msg)
if __name__ == '__main__':
ProxyTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_blank.py | #!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or https://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.address import (
ADDRESS_BCRT1_UNSPENDABLE,
ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR,
)
from test_framework.util import (
assert_equal,
)
from test_framework.wallet_util import generate_keypair
class WalletBlankTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def add_options(self, options):
self.add_wallet_options(options)
def test_importaddress(self):
if self.options.descriptors:
return
self.log.info("Test that importaddress unsets the blank flag")
self.nodes[0].createwallet(wallet_name="iaddr", disable_private_keys=True, blank=True)
wallet = self.nodes[0].get_wallet_rpc("iaddr")
info = wallet.getwalletinfo()
assert_equal(info["descriptors"], False)
assert_equal(info["blank"], True)
wallet.importaddress(ADDRESS_BCRT1_UNSPENDABLE)
assert_equal(wallet.getwalletinfo()["blank"], False)
def test_importpubkey(self):
if self.options.descriptors:
return
self.log.info("Test that importpubkey unsets the blank flag")
for i, comp in enumerate([True, False]):
self.nodes[0].createwallet(wallet_name=f"ipub{i}", disable_private_keys=True, blank=True)
wallet = self.nodes[0].get_wallet_rpc(f"ipub{i}")
info = wallet.getwalletinfo()
assert_equal(info["descriptors"], False)
assert_equal(info["blank"], True)
_, pubkey = generate_keypair(compressed=comp)
wallet.importpubkey(pubkey.hex())
assert_equal(wallet.getwalletinfo()["blank"], False)
def test_importprivkey(self):
if self.options.descriptors:
return
self.log.info("Test that importprivkey unsets the blank flag")
for i, comp in enumerate([True, False]):
self.nodes[0].createwallet(wallet_name=f"ipriv{i}", blank=True)
wallet = self.nodes[0].get_wallet_rpc(f"ipriv{i}")
info = wallet.getwalletinfo()
assert_equal(info["descriptors"], False)
assert_equal(info["blank"], True)
wif, _ = generate_keypair(compressed=comp, wif=True)
wallet.importprivkey(wif)
assert_equal(wallet.getwalletinfo()["blank"], False)
def test_importmulti(self):
if self.options.descriptors:
return
self.log.info("Test that importmulti unsets the blank flag")
self.nodes[0].createwallet(wallet_name="imulti", disable_private_keys=True, blank=True)
wallet = self.nodes[0].get_wallet_rpc("imulti")
info = wallet.getwalletinfo()
assert_equal(info["descriptors"], False)
assert_equal(info["blank"], True)
wallet.importmulti([{
"desc": ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR,
"timestamp": "now",
}])
assert_equal(wallet.getwalletinfo()["blank"], False)
def test_importdescriptors(self):
if not self.options.descriptors:
return
self.log.info("Test that importdescriptors preserves the blank flag")
self.nodes[0].createwallet(wallet_name="idesc", disable_private_keys=True, blank=True)
wallet = self.nodes[0].get_wallet_rpc("idesc")
info = wallet.getwalletinfo()
assert_equal(info["descriptors"], True)
assert_equal(info["blank"], True)
wallet.importdescriptors([{
"desc": ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR,
"timestamp": "now",
}])
assert_equal(wallet.getwalletinfo()["blank"], True)
def test_importwallet(self):
if self.options.descriptors:
return
self.log.info("Test that importwallet unsets the blank flag")
def_wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
self.nodes[0].createwallet(wallet_name="iwallet", blank=True)
wallet = self.nodes[0].get_wallet_rpc("iwallet")
info = wallet.getwalletinfo()
assert_equal(info["descriptors"], False)
assert_equal(info["blank"], True)
wallet_dump_path = self.nodes[0].datadir_path / "wallet.dump"
def_wallet.dumpwallet(wallet_dump_path)
wallet.importwallet(wallet_dump_path)
assert_equal(wallet.getwalletinfo()["blank"], False)
def test_encrypt_legacy(self):
if self.options.descriptors:
return
self.log.info("Test that encrypting a blank legacy wallet preserves the blank flag and does not generate a seed")
self.nodes[0].createwallet(wallet_name="encblanklegacy", blank=True)
wallet = self.nodes[0].get_wallet_rpc("encblanklegacy")
info = wallet.getwalletinfo()
assert_equal(info["descriptors"], False)
assert_equal(info["blank"], True)
assert "hdseedid" not in info
wallet.encryptwallet("pass")
info = wallet.getwalletinfo()
assert_equal(info["blank"], True)
assert "hdseedid" not in info
def test_encrypt_descriptors(self):
if not self.options.descriptors:
return
self.log.info("Test that encrypting a blank descriptor wallet preserves the blank flag and descriptors remain the same")
self.nodes[0].createwallet(wallet_name="encblankdesc", blank=True)
wallet = self.nodes[0].get_wallet_rpc("encblankdesc")
info = wallet.getwalletinfo()
assert_equal(info["descriptors"], True)
assert_equal(info["blank"], True)
descs = wallet.listdescriptors()
wallet.encryptwallet("pass")
assert_equal(wallet.getwalletinfo()["blank"], True)
assert_equal(descs, wallet.listdescriptors())
def run_test(self):
self.test_importaddress()
self.test_importpubkey()
self.test_importprivkey()
self.test_importmulti()
self.test_importdescriptors()
self.test_importwallet()
self.test_encrypt_legacy()
self.test_encrypt_descriptors()
if __name__ == '__main__':
WalletBlankTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/mempool_accept_wtxid.py | #!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test mempool acceptance in case of an already known transaction
with identical non-witness data but different witness.
"""
from copy import deepcopy
from test_framework.messages import (
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
sha256,
)
from test_framework.p2p import P2PTxInvStore
from test_framework.script import (
CScript,
OP_0,
OP_ELSE,
OP_ENDIF,
OP_EQUAL,
OP_HASH160,
OP_IF,
OP_TRUE,
hash160,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
class MempoolWtxidTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def run_test(self):
node = self.nodes[0]
self.log.info('Start with empty mempool and 101 blocks')
# The last 100 coinbase transactions are premature
blockhash = self.generate(node, 101)[0]
txid = node.getblock(blockhash=blockhash, verbosity=2)["tx"][0]["txid"]
assert_equal(node.getmempoolinfo()['size'], 0)
self.log.info("Submit parent with multiple script branches to mempool")
hashlock = hash160(b'Preimage')
witness_script = CScript([OP_IF, OP_HASH160, hashlock, OP_EQUAL, OP_ELSE, OP_TRUE, OP_ENDIF])
witness_program = sha256(witness_script)
script_pubkey = CScript([OP_0, witness_program])
parent = CTransaction()
parent.vin.append(CTxIn(COutPoint(int(txid, 16), 0), b""))
parent.vout.append(CTxOut(int(9.99998 * COIN), script_pubkey))
parent.rehash()
privkeys = [node.get_deterministic_priv_key().key]
raw_parent = node.signrawtransactionwithkey(hexstring=parent.serialize().hex(), privkeys=privkeys)['hex']
parent_txid = node.sendrawtransaction(hexstring=raw_parent, maxfeerate=0)
self.generate(node, 1)
peer_wtxid_relay = node.add_p2p_connection(P2PTxInvStore())
# Create a new transaction with witness solving first branch
child_witness_script = CScript([OP_TRUE])
child_witness_program = sha256(child_witness_script)
child_script_pubkey = CScript([OP_0, child_witness_program])
child_one = CTransaction()
child_one.vin.append(CTxIn(COutPoint(int(parent_txid, 16), 0), b""))
child_one.vout.append(CTxOut(int(9.99996 * COIN), child_script_pubkey))
child_one.wit.vtxinwit.append(CTxInWitness())
child_one.wit.vtxinwit[0].scriptWitness.stack = [b'Preimage', b'\x01', witness_script]
child_one_wtxid = child_one.getwtxid()
child_one_txid = child_one.rehash()
# Create another identical transaction with witness solving second branch
child_two = deepcopy(child_one)
child_two.wit.vtxinwit[0].scriptWitness.stack = [b'', witness_script]
child_two_wtxid = child_two.getwtxid()
child_two_txid = child_two.rehash()
assert_equal(child_one_txid, child_two_txid)
assert child_one_wtxid != child_two_wtxid
self.log.info("Submit child_one to the mempool")
txid_submitted = node.sendrawtransaction(child_one.serialize().hex())
assert_equal(node.getmempoolentry(txid_submitted)['wtxid'], child_one_wtxid)
peer_wtxid_relay.wait_for_broadcast([child_one_wtxid])
assert_equal(node.getmempoolinfo()["unbroadcastcount"], 0)
# testmempoolaccept reports the "already in mempool" error
assert_equal(node.testmempoolaccept([child_one.serialize().hex()]), [{
"txid": child_one_txid,
"wtxid": child_one_wtxid,
"allowed": False,
"reject-reason": "txn-already-in-mempool"
}])
assert_equal(node.testmempoolaccept([child_two.serialize().hex()])[0], {
"txid": child_two_txid,
"wtxid": child_two_wtxid,
"allowed": False,
"reject-reason": "txn-same-nonwitness-data-in-mempool"
})
# sendrawtransaction will not throw but quits early when the exact same transaction is already in mempool
node.sendrawtransaction(child_one.serialize().hex())
self.log.info("Connect another peer that hasn't seen child_one before")
peer_wtxid_relay_2 = node.add_p2p_connection(P2PTxInvStore())
self.log.info("Submit child_two to the mempool")
# sendrawtransaction will not throw but quits early when a transaction with the same non-witness data is already in mempool
node.sendrawtransaction(child_two.serialize().hex())
# The node should rebroadcast the transaction using the wtxid of the correct transaction
# (child_one, which is in its mempool).
peer_wtxid_relay_2.wait_for_broadcast([child_one_wtxid])
assert_equal(node.getmempoolinfo()["unbroadcastcount"], 0)
if __name__ == '__main__':
MempoolWtxidTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_anchors.py | #!/usr/bin/env python3
# Copyright (c) 2020-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test block-relay-only anchors functionality"""
import os
from test_framework.p2p import P2PInterface, P2P_SERVICES
from test_framework.socks5 import Socks5Configuration, Socks5Server
from test_framework.messages import CAddress, hash256
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import check_node_connections, assert_equal, p2p_port
INBOUND_CONNECTIONS = 5
BLOCK_RELAY_CONNECTIONS = 2
ONION_ADDR = "pg6mmjiyjmcrsslvykfwnntlaru7p5svn6y2ymmju6nubxndf4pscryd.onion:8333"
class AnchorsTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.disable_autoconnect = False
def run_test(self):
node_anchors_path = self.nodes[0].chain_path / "anchors.dat"
self.log.info("When node starts, check if anchors.dat doesn't exist")
assert not os.path.exists(node_anchors_path)
self.log.info(f"Add {BLOCK_RELAY_CONNECTIONS} block-relay-only connections to node")
for i in range(BLOCK_RELAY_CONNECTIONS):
self.log.debug(f"block-relay-only: {i}")
self.nodes[0].add_outbound_p2p_connection(
P2PInterface(), p2p_idx=i, connection_type="block-relay-only"
)
self.log.info(f"Add {INBOUND_CONNECTIONS} inbound connections to node")
for i in range(INBOUND_CONNECTIONS):
self.log.debug(f"inbound: {i}")
self.nodes[0].add_p2p_connection(P2PInterface())
self.log.info("Check node connections")
check_node_connections(node=self.nodes[0], num_in=5, num_out=2)
# 127.0.0.1
ip = "7f000001"
# Since the ip is always 127.0.0.1 for this case,
# we store only the port to identify the peers
block_relay_nodes_port = []
inbound_nodes_port = []
for p in self.nodes[0].getpeerinfo():
addr_split = p["addr"].split(":")
if p["connection_type"] == "block-relay-only":
block_relay_nodes_port.append(hex(int(addr_split[1]))[2:])
else:
inbound_nodes_port.append(hex(int(addr_split[1]))[2:])
self.log.debug("Stop node")
self.stop_node(0)
# It should contain only the block-relay-only addresses
self.log.info("Check the addresses in anchors.dat")
with open(node_anchors_path, "rb") as file_handler:
anchors = file_handler.read()
anchors_hex = anchors.hex()
for port in block_relay_nodes_port:
ip_port = ip + port
assert ip_port in anchors_hex
for port in inbound_nodes_port:
ip_port = ip + port
assert ip_port not in anchors_hex
self.log.info("Perturb anchors.dat to test it doesn't throw an error during initialization")
with self.nodes[0].assert_debug_log(["0 block-relay-only anchors will be tried for connections."]):
with open(node_anchors_path, "wb") as out_file_handler:
tweaked_contents = bytearray(anchors)
tweaked_contents[20:20] = b'1'
out_file_handler.write(bytes(tweaked_contents))
self.log.debug("Start node")
self.start_node(0)
self.log.info("When node starts, check if anchors.dat doesn't exist anymore")
assert not os.path.exists(node_anchors_path)
self.log.info("Ensure addrv2 support")
# Use proxies to catch outbound connections to networks with 256-bit addresses
onion_conf = Socks5Configuration()
onion_conf.auth = True
onion_conf.unauth = True
onion_conf.addr = ('127.0.0.1', p2p_port(self.num_nodes))
onion_conf.keep_alive = True
onion_proxy = Socks5Server(onion_conf)
onion_proxy.start()
self.restart_node(0, extra_args=[f"-onion={onion_conf.addr[0]}:{onion_conf.addr[1]}"])
self.log.info("Add 256-bit-address block-relay-only connections to node")
self.nodes[0].addconnection(ONION_ADDR, 'block-relay-only')
self.log.debug("Stop node")
with self.nodes[0].assert_debug_log([f"DumpAnchors: Flush 1 outbound block-relay-only peer addresses to anchors.dat"]):
self.stop_node(0)
# Manually close keep_alive proxy connection
onion_proxy.stop()
self.log.info("Check for addrv2 addresses in anchors.dat")
caddr = CAddress()
caddr.net = CAddress.NET_TORV3
caddr.ip, port_str = ONION_ADDR.split(":")
caddr.port = int(port_str)
# TorV3 addrv2 serialization:
# time(4) | services(1) | networkID(1) | address length(1) | address(32)
expected_pubkey = caddr.serialize_v2()[7:39].hex()
# position of services byte of first addr in anchors.dat
# network magic, vector length, version, nTime
services_index = 4 + 1 + 4 + 4
data = bytes()
with open(node_anchors_path, "rb") as file_handler:
data = file_handler.read()
assert_equal(data[services_index], 0x00) # services == NONE
anchors2 = data.hex()
assert expected_pubkey in anchors2
with open(node_anchors_path, "wb") as file_handler:
# Modify service flags for this address even though we never connected to it.
# This is necessary because on restart we will not attempt an anchor connection
# to a host without our required services, even if its address is in the anchors.dat file
new_data = bytearray(data)[:-32]
new_data[services_index] = P2P_SERVICES
new_data_hash = hash256(new_data)
file_handler.write(new_data + new_data_hash)
self.log.info("Restarting node attempts to reconnect to anchors")
with self.nodes[0].assert_debug_log([f"Trying to make an anchor connection to {ONION_ADDR}"]):
self.start_node(0, extra_args=[f"-onion={onion_conf.addr[0]}:{onion_conf.addr[1]}"])
if __name__ == "__main__":
AnchorsTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_generate.py | #!/usr/bin/env python3
# Copyright (c) 2020-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test generate* RPCs."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.wallet import MiniWallet
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
class RPCGenerateTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
self.test_generatetoaddress()
self.test_generate()
self.test_generateblock()
def test_generatetoaddress(self):
self.generatetoaddress(self.nodes[0], 1, 'mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
assert_raises_rpc_error(-5, "Invalid address", self.generatetoaddress, self.nodes[0], 1, '3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
def test_generateblock(self):
node = self.nodes[0]
miniwallet = MiniWallet(node)
self.log.info('Mine an empty block to address and return the hex')
address = miniwallet.get_address()
generated_block = self.generateblock(node, output=address, transactions=[], submit=False)
node.submitblock(hexdata=generated_block['hex'])
assert_equal(generated_block['hash'], node.getbestblockhash())
self.log.info('Generate an empty block to address')
hash = self.generateblock(node, output=address, transactions=[])['hash']
block = node.getblock(blockhash=hash, verbose=2)
assert_equal(len(block['tx']), 1)
assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'], address)
self.log.info('Generate an empty block to a descriptor')
hash = self.generateblock(node, 'addr(' + address + ')', [])['hash']
block = node.getblock(blockhash=hash, verbosity=2)
assert_equal(len(block['tx']), 1)
assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'], address)
self.log.info('Generate an empty block to a combo descriptor with compressed pubkey')
combo_key = '0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798'
combo_address = 'bcrt1qw508d6qejxtdg4y5r3zarvary0c5xw7kygt080'
hash = self.generateblock(node, 'combo(' + combo_key + ')', [])['hash']
block = node.getblock(hash, 2)
assert_equal(len(block['tx']), 1)
assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'], combo_address)
self.log.info('Generate an empty block to a combo descriptor with uncompressed pubkey')
combo_key = '0408ef68c46d20596cc3f6ddf7c8794f71913add807f1dc55949fa805d764d191c0b7ce6894c126fce0babc6663042f3dde9b0cf76467ea315514e5a6731149c67'
combo_address = 'mkc9STceoCcjoXEXe6cm66iJbmjM6zR9B2'
hash = self.generateblock(node, 'combo(' + combo_key + ')', [])['hash']
block = node.getblock(hash, 2)
assert_equal(len(block['tx']), 1)
assert_equal(block['tx'][0]['vout'][0]['scriptPubKey']['address'], combo_address)
# Generate some extra mempool transactions to verify they don't get mined
for _ in range(10):
miniwallet.send_self_transfer(from_node=node)
self.log.info('Generate block with txid')
txid = miniwallet.send_self_transfer(from_node=node)['txid']
hash = self.generateblock(node, address, [txid])['hash']
block = node.getblock(hash, 1)
assert_equal(len(block['tx']), 2)
assert_equal(block['tx'][1], txid)
self.log.info('Generate block with raw tx')
rawtx = miniwallet.create_self_transfer()['hex']
hash = self.generateblock(node, address, [rawtx])['hash']
block = node.getblock(hash, 1)
assert_equal(len(block['tx']), 2)
txid = block['tx'][1]
assert_equal(node.getrawtransaction(txid=txid, verbose=False, blockhash=hash), rawtx)
self.log.info('Fail to generate block with out of order txs')
txid1 = miniwallet.send_self_transfer(from_node=node)['txid']
utxo1 = miniwallet.get_utxo(txid=txid1)
rawtx2 = miniwallet.create_self_transfer(utxo_to_spend=utxo1)['hex']
assert_raises_rpc_error(-25, 'TestBlockValidity failed: bad-txns-inputs-missingorspent', self.generateblock, node, address, [rawtx2, txid1])
self.log.info('Fail to generate block with txid not in mempool')
missing_txid = '0000000000000000000000000000000000000000000000000000000000000000'
assert_raises_rpc_error(-5, 'Transaction ' + missing_txid + ' not in mempool.', self.generateblock, node, address, [missing_txid])
self.log.info('Fail to generate block with invalid raw tx')
invalid_raw_tx = '0000'
assert_raises_rpc_error(-22, 'Transaction decode failed for ' + invalid_raw_tx, self.generateblock, node, address, [invalid_raw_tx])
self.log.info('Fail to generate block with invalid address/descriptor')
assert_raises_rpc_error(-5, 'Invalid address or descriptor', self.generateblock, node, '1234', [])
self.log.info('Fail to generate block with a ranged descriptor')
ranged_descriptor = 'pkh(tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp/0/*)'
assert_raises_rpc_error(-8, 'Ranged descriptor not accepted. Maybe pass through deriveaddresses first?', self.generateblock, node, ranged_descriptor, [])
self.log.info('Fail to generate block with a descriptor missing a private key')
child_descriptor = 'pkh(tpubD6NzVbkrYhZ4XgiXtGrdW5XDAPFCL9h7we1vwNCpn8tGbBcgfVYjXyhWo4E1xkh56hjod1RhGjxbaTLV3X4FyWuejifB9jusQ46QzG87VKp/0\'/0)'
assert_raises_rpc_error(-5, 'Cannot derive script without private keys', self.generateblock, node, child_descriptor, [])
def test_generate(self):
message = (
"generate\n\n"
"has been replaced by the -generate "
"cli option. Refer to -help for more information.\n"
)
self.log.info("Test rpc generate raises with message to use cli option")
assert_raises_rpc_error(-32601, message, self.nodes[0].rpc.generate)
self.log.info("Test rpc generate help prints message to use cli option")
assert_equal(message, self.nodes[0].help("generate"))
self.log.info("Test rpc generate is a hidden command not discoverable in general help")
assert message not in self.nodes[0].help()
if __name__ == "__main__":
RPCGenerateTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_ibd_txrelay.py | #!/usr/bin/env python3
# Copyright (c) 2020-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test transaction relay behavior during IBD:
- Set fee filters to MAX_MONEY
- Don't request transactions
- Ignore all transaction messages
"""
from decimal import Decimal
import time
from test_framework.messages import (
CInv,
COIN,
CTransaction,
from_hex,
msg_inv,
msg_tx,
MSG_WTX,
)
from test_framework.p2p import (
NONPREF_PEER_TX_DELAY,
P2PDataStore,
P2PInterface,
p2p_lock
)
from test_framework.test_framework import BitcoinTestFramework
MAX_FEE_FILTER = Decimal(9170997) / COIN
NORMAL_FEE_FILTER = Decimal(100) / COIN
class P2PIBDTxRelayTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [
["-minrelaytxfee={}".format(NORMAL_FEE_FILTER)],
["-minrelaytxfee={}".format(NORMAL_FEE_FILTER)],
]
def run_test(self):
self.log.info("Check that nodes set minfilter to MAX_MONEY while still in IBD")
for node in self.nodes:
assert node.getblockchaininfo()['initialblockdownload']
self.wait_until(lambda: all(peer['minfeefilter'] == MAX_FEE_FILTER for peer in node.getpeerinfo()))
self.log.info("Check that nodes don't send getdatas for transactions while still in IBD")
peer_inver = self.nodes[0].add_p2p_connection(P2PDataStore())
txid = 0xdeadbeef
peer_inver.send_and_ping(msg_inv([CInv(t=MSG_WTX, h=txid)]))
# The node should not send a getdata, but if it did, it would first delay 2 seconds
self.nodes[0].setmocktime(int(time.time() + NONPREF_PEER_TX_DELAY))
peer_inver.sync_with_ping()
with p2p_lock:
assert txid not in peer_inver.getdata_requests
self.nodes[0].disconnect_p2ps()
self.log.info("Check that nodes don't process unsolicited transactions while still in IBD")
# A transaction hex pulled from tx_valid.json. There are no valid transactions since no UTXOs
# exist yet, but it should be a well-formed transaction.
rawhex = "0100000001b14bdcbc3e01bdaad36cc08e81e69c82e1060bc14e518db2b49aa43ad90ba260000000004a01ff473" + \
"04402203f16c6f40162ab686621ef3000b04e75418a0c0cb2d8aebeac894ae360ac1e780220ddc15ecdfc3507ac48e168" + \
"1a33eb60996631bf6bf5bc0a0682c4db743ce7ca2b01ffffffff0140420f00000000001976a914660d4ef3a743e3e696a" + \
"d990364e555c271ad504b88ac00000000"
assert self.nodes[1].decoderawtransaction(rawhex) # returns a dict, should not throw
tx = from_hex(CTransaction(), rawhex)
peer_txer = self.nodes[0].add_p2p_connection(P2PInterface())
with self.nodes[0].assert_debug_log(expected_msgs=["received: tx"], unexpected_msgs=["was not accepted"]):
peer_txer.send_and_ping(msg_tx(tx))
self.nodes[0].disconnect_p2ps()
# Come out of IBD by generating a block
self.generate(self.nodes[0], 1)
self.log.info("Check that nodes reset minfilter after coming out of IBD")
for node in self.nodes:
assert not node.getblockchaininfo()['initialblockdownload']
self.wait_until(lambda: all(peer['minfeefilter'] == NORMAL_FEE_FILTER for peer in node.getpeerinfo()))
self.log.info("Check that nodes process the same transaction, even when unsolicited, when no longer in IBD")
peer_txer = self.nodes[0].add_p2p_connection(P2PInterface())
with self.nodes[0].assert_debug_log(expected_msgs=["was not accepted"]):
peer_txer.send_and_ping(msg_tx(tx))
if __name__ == '__main__':
P2PIBDTxRelayTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_i2p_ports.py | #!/usr/bin/env python3
# Copyright (c) 2021-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test ports handling for I2P hosts
"""
import re
from test_framework.test_framework import BitcoinTestFramework
class I2PPorts(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
# The test assumes that an I2P SAM proxy is not listening here.
self.extra_args = [["-i2psam=127.0.0.1:60000"]]
def run_test(self):
node = self.nodes[0]
self.log.info("Ensure we don't try to connect if port!=0")
addr = "zsxwyo6qcn3chqzwxnseusqgsnuw3maqnztkiypyfxtya4snkoka.b32.i2p:8333"
raised = False
try:
with node.assert_debug_log(expected_msgs=[f"Error connecting to {addr}"]):
node.addnode(node=addr, command="onetry")
except AssertionError as e:
raised = True
if not re.search(r"Expected messages .* does not partially match log", str(e)):
raise AssertionError(f"Assertion raised as expected, but with an unexpected message: {str(e)}")
if not raised:
raise AssertionError("Assertion should have been raised")
self.log.info("Ensure we try to connect if port=0 and get an error due to missing I2P proxy")
addr = "h3r6bkn46qxftwja53pxiykntegfyfjqtnzbm6iv6r5mungmqgmq.b32.i2p:0"
with node.assert_debug_log(expected_msgs=[f"Error connecting to {addr}"]):
node.addnode(node=addr, command="onetry")
if __name__ == '__main__':
I2PPorts().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_pruning.py | #!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test wallet import on pruned node."""
from test_framework.util import assert_equal, assert_raises_rpc_error
from test_framework.blocktools import (
COINBASE_MATURITY,
create_block
)
from test_framework.blocktools import create_coinbase
from test_framework.test_framework import BitcoinTestFramework
from test_framework.script import (
CScript,
OP_RETURN,
OP_TRUE,
)
class WalletPruningTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, descriptors=False)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.wallet_names = []
self.extra_args = [
[], # node dedicated to mining
['-prune=550'], # node dedicated to testing pruning
]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_bdb()
def mine_large_blocks(self, node, n):
# Get the block parameters for the first block
best_block = node.getblockheader(node.getbestblockhash())
height = int(best_block["height"]) + 1
self.nTime = max(self.nTime, int(best_block["time"])) + 1
previousblockhash = int(best_block["hash"], 16)
big_script = CScript([OP_RETURN] + [OP_TRUE] * 950000)
# Set mocktime to accept all future blocks
for i in self.nodes:
if i.running:
i.setmocktime(self.nTime + 600 * n)
for _ in range(n):
block = create_block(hashprev=previousblockhash, ntime=self.nTime, coinbase=create_coinbase(height, script_pubkey=big_script))
block.solve()
# Submit to the node
node.submitblock(block.serialize().hex())
previousblockhash = block.sha256
height += 1
# Simulate 10 minutes of work time per block
# Important for matching a timestamp with a block +- some window
self.nTime += 600
self.sync_all()
def test_wallet_import_pruned(self, wallet_name):
self.log.info("Make sure we can import wallet when pruned and required blocks are still available")
wallet_file = wallet_name + ".dat"
wallet_birthheight = self.get_birthheight(wallet_file)
# Verify that the block at wallet's birthheight is available at the pruned node
self.nodes[1].getblock(self.nodes[1].getblockhash(wallet_birthheight))
# Import wallet into pruned node
self.nodes[1].createwallet(wallet_name="wallet_pruned", descriptors=False, load_on_startup=True)
self.nodes[1].importwallet(self.nodes[0].datadir_path / wallet_file)
# Make sure that prune node's wallet correctly accounts for balances
assert_equal(self.nodes[1].getbalance(), self.nodes[0].getbalance())
self.log.info("- Done")
def test_wallet_import_pruned_with_missing_blocks(self, wallet_name):
self.log.info("Make sure we cannot import wallet when pruned and required blocks are not available")
wallet_file = wallet_name + ".dat"
wallet_birthheight = self.get_birthheight(wallet_file)
# Verify that the block at wallet's birthheight is not available at the pruned node
assert_raises_rpc_error(-1, "Block not available (pruned data)", self.nodes[1].getblock, self.nodes[1].getblockhash(wallet_birthheight))
# Make sure wallet cannot be imported because of missing blocks
# This will try to rescan blocks `TIMESTAMP_WINDOW` (2h) before the wallet birthheight.
# There are 6 blocks an hour, so 11 blocks (excluding birthheight).
assert_raises_rpc_error(-4, f"Pruned blocks from height {wallet_birthheight - 11} required to import keys. Use RPC call getblockchaininfo to determine your pruned height.", self.nodes[1].importwallet, self.nodes[0].datadir_path / wallet_file)
self.log.info("- Done")
def get_birthheight(self, wallet_file):
"""Gets birthheight of a wallet on node0"""
with open(self.nodes[0].datadir_path / wallet_file, 'r', encoding="utf8") as f:
for line in f:
if line.startswith('# * Best block at time of backup'):
wallet_birthheight = int(line.split(' ')[9])
return wallet_birthheight
def has_block(self, block_index):
"""Checks if the pruned node has the specific blk0000*.dat file"""
return (self.nodes[1].blocks_path / f"blk{block_index:05}.dat").is_file()
def create_wallet(self, wallet_name, *, unload=False):
"""Creates and dumps a wallet on the non-pruned node0 to be later import by the pruned node"""
self.nodes[0].createwallet(wallet_name=wallet_name, descriptors=False, load_on_startup=True)
self.nodes[0].dumpwallet(self.nodes[0].datadir_path / f"{wallet_name}.dat")
if (unload):
self.nodes[0].unloadwallet(wallet_name)
def run_test(self):
self.nTime = 0
self.log.info("Warning! This test requires ~1.3GB of disk space")
self.log.info("Generating a long chain of blocks...")
# A blk*.dat file is 128MB
# Generate 250 light blocks
self.generate(self.nodes[0], 250)
# Generate 50MB worth of large blocks in the blk00000.dat file
self.mine_large_blocks(self.nodes[0], 50)
# Create a wallet which birth's block is in the blk00000.dat file
wallet_birthheight_1 = "wallet_birthheight_1"
assert_equal(self.has_block(1), False)
self.create_wallet(wallet_birthheight_1, unload=True)
# Generate enough large blocks to reach pruning disk limit
# Not pruning yet because we are still below PruneAfterHeight
self.mine_large_blocks(self.nodes[0], 600)
self.log.info("- Long chain created")
# Create a wallet with birth height > wallet_birthheight_1
wallet_birthheight_2 = "wallet_birthheight_2"
self.create_wallet(wallet_birthheight_2)
# Fund wallet to later verify that importwallet correctly accounts for balances
self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, self.nodes[0].getnewaddress(), sync_fun=self.no_op)
# We've reached pruning storage & height limit but
# pruning doesn't run until another chunk (blk*.dat file) is allocated.
# That's why we are generating another 5 large blocks
self.mine_large_blocks(self.nodes[0], 5)
# blk00000.dat file is now pruned from node1
assert_equal(self.has_block(0), False)
self.test_wallet_import_pruned(wallet_birthheight_2)
self.test_wallet_import_pruned_with_missing_blocks(wallet_birthheight_1)
if __name__ == '__main__':
WalletPruningTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_addrfetch.py | #!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test p2p addr-fetch connections
"""
import time
from test_framework.messages import (
CAddress,
msg_addr,
)
from test_framework.p2p import (
P2PInterface,
p2p_lock,
P2P_SERVICES,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
ADDR = CAddress()
ADDR.time = int(time.time())
ADDR.nServices = P2P_SERVICES
ADDR.ip = "192.0.0.8"
ADDR.port = 18444
class P2PAddrFetch(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def assert_getpeerinfo(self, *, peer_ids):
num_peers = len(peer_ids)
info = self.nodes[0].getpeerinfo()
assert_equal(len(info), num_peers)
for n in range(0, num_peers):
assert_equal(info[n]['id'], peer_ids[n])
assert_equal(info[n]['connection_type'], 'addr-fetch')
def run_test(self):
node = self.nodes[0]
self.log.info("Connect to an addr-fetch peer")
peer_id = 0
peer = node.add_outbound_p2p_connection(P2PInterface(), p2p_idx=peer_id, connection_type="addr-fetch")
self.assert_getpeerinfo(peer_ids=[peer_id])
self.log.info("Check that we send getaddr but don't try to sync headers with the addr-fetch peer")
peer.sync_with_ping()
with p2p_lock:
assert peer.message_count['getaddr'] == 1
assert peer.message_count['getheaders'] == 0
self.log.info("Check that answering the getaddr with a single address does not lead to disconnect")
# This prevents disconnecting on self-announcements
msg = msg_addr()
msg.addrs = [ADDR]
peer.send_and_ping(msg)
self.assert_getpeerinfo(peer_ids=[peer_id])
self.log.info("Check that answering with larger addr messages leads to disconnect")
msg.addrs = [ADDR] * 2
peer.send_message(msg)
peer.wait_for_disconnect(timeout=5)
self.log.info("Check timeout for addr-fetch peer that does not send addrs")
peer_id = 1
peer = node.add_outbound_p2p_connection(P2PInterface(), p2p_idx=peer_id, connection_type="addr-fetch")
time_now = int(time.time())
self.assert_getpeerinfo(peer_ids=[peer_id])
# Expect addr-fetch peer connection to be maintained up to 5 minutes.
node.setmocktime(time_now + 295)
self.assert_getpeerinfo(peer_ids=[peer_id])
# Expect addr-fetch peer connection to be disconnected after 5 minutes.
node.setmocktime(time_now + 301)
peer.wait_for_disconnect(timeout=5)
self.assert_getpeerinfo(peer_ids=[])
if __name__ == '__main__':
P2PAddrFetch().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_import_with_label.py | #!/usr/bin/env python3
# Copyright (c) 2018-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the behavior of RPC importprivkey on set and unset labels of
addresses.
It tests different cases in which an address is imported with importaddress
with or without a label and then its private key is imported with importprivkey
with and without a label.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.wallet_util import test_address
class ImportWithLabel(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, descriptors=False)
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
"""Main test logic"""
self.log.info(
"Test importaddress with label and importprivkey without label."
)
self.log.info("Import a watch-only address with a label.")
address = self.nodes[0].getnewaddress()
label = "Test Label"
self.nodes[1].importaddress(address, label)
test_address(self.nodes[1],
address,
iswatchonly=True,
ismine=False,
labels=[label])
self.log.info(
"Import the watch-only address's private key without a "
"label and the address should keep its label."
)
priv_key = self.nodes[0].dumpprivkey(address)
self.nodes[1].importprivkey(priv_key)
test_address(self.nodes[1], address, labels=[label])
self.log.info(
"Test importaddress without label and importprivkey with label."
)
self.log.info("Import a watch-only address without a label.")
address2 = self.nodes[0].getnewaddress()
self.nodes[1].importaddress(address2)
test_address(self.nodes[1],
address2,
iswatchonly=True,
ismine=False,
labels=[""])
self.log.info(
"Import the watch-only address's private key with a "
"label and the address should have its label updated."
)
priv_key2 = self.nodes[0].dumpprivkey(address2)
label2 = "Test Label 2"
self.nodes[1].importprivkey(priv_key2, label2)
test_address(self.nodes[1], address2, labels=[label2])
self.log.info("Test importaddress with label and importprivkey with label.")
self.log.info("Import a watch-only address with a label.")
address3 = self.nodes[0].getnewaddress()
label3_addr = "Test Label 3 for importaddress"
self.nodes[1].importaddress(address3, label3_addr)
test_address(self.nodes[1],
address3,
iswatchonly=True,
ismine=False,
labels=[label3_addr])
self.log.info(
"Import the watch-only address's private key with a "
"label and the address should have its label updated."
)
priv_key3 = self.nodes[0].dumpprivkey(address3)
label3_priv = "Test Label 3 for importprivkey"
self.nodes[1].importprivkey(priv_key3, label3_priv)
test_address(self.nodes[1], address3, labels=[label3_priv])
self.log.info(
"Test importprivkey won't label new dests with the same "
"label as others labeled dests for the same key."
)
self.log.info("Import a watch-only p2sh-segwit address with a label.")
address4 = self.nodes[0].getnewaddress("", "p2sh-segwit")
label4_addr = "Test Label 4 for importaddress"
self.nodes[1].importaddress(address4, label4_addr)
test_address(self.nodes[1],
address4,
iswatchonly=True,
ismine=False,
labels=[label4_addr],
embedded=None)
self.log.info(
"Import the watch-only address's private key without a "
"label and new destinations for the key should have an "
"empty label while the 'old' destination should keep "
"its label."
)
priv_key4 = self.nodes[0].dumpprivkey(address4)
self.nodes[1].importprivkey(priv_key4)
embedded_addr = self.nodes[1].getaddressinfo(address4)['embedded']['address']
test_address(self.nodes[1], embedded_addr, labels=[""])
test_address(self.nodes[1], address4, labels=[label4_addr])
self.stop_nodes()
if __name__ == "__main__":
ImportWithLabel().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_nulldummy.py | #!/usr/bin/env python3
# Copyright (c) 2016-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test NULLDUMMY softfork.
Connect to a single node.
Generate 2 blocks (save the coinbases for later).
Generate COINBASE_MATURITY (CB) more blocks to ensure the coinbases are mature.
[Policy/Consensus] Check that NULLDUMMY compliant transactions are accepted in block CB + 3.
[Policy] Check that non-NULLDUMMY transactions are rejected before activation.
[Consensus] Check that the new NULLDUMMY rules are not enforced on block CB + 4.
[Policy/Consensus] Check that the new NULLDUMMY rules are enforced on block CB + 5.
"""
import time
from test_framework.address import address_to_scriptpubkey
from test_framework.blocktools import (
COINBASE_MATURITY,
NORMAL_GBT_REQUEST_PARAMS,
add_witness_commitment,
create_block,
)
from test_framework.messages import (
CTransaction,
tx_from_hex,
)
from test_framework.script import (
OP_0,
OP_TRUE,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
from test_framework.wallet import getnewdestination
from test_framework.wallet_util import generate_keypair
NULLDUMMY_ERROR = "mandatory-script-verify-flag-failed (Dummy CHECKMULTISIG argument must be zero)"
def invalidate_nulldummy_tx(tx):
"""Transform a NULLDUMMY compliant tx (i.e. scriptSig starts with OP_0)
to be non-NULLDUMMY compliant by replacing the dummy with OP_TRUE"""
assert_equal(tx.vin[0].scriptSig[0], OP_0)
tx.vin[0].scriptSig = bytes([OP_TRUE]) + tx.vin[0].scriptSig[1:]
tx.rehash()
class NULLDUMMYTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
# This script tests NULLDUMMY activation, which is part of the 'segwit' deployment, so we go through
# normal segwit activation here (and don't use the default always-on behaviour).
self.extra_args = [[
f'-testactivationheight=segwit@{COINBASE_MATURITY + 5}',
'-addresstype=legacy',
'-par=1', # Use only one script thread to get the exact reject reason for testing
]]
def create_transaction(self, *, txid, input_details=None, addr, amount, privkey):
input = {"txid": txid, "vout": 0}
output = {addr: amount}
rawtx = self.nodes[0].createrawtransaction([input], output)
# Details only needed for scripthash or witness spends
input = None if not input_details else [{**input, **input_details}]
signedtx = self.nodes[0].signrawtransactionwithkey(rawtx, [privkey], input)
return tx_from_hex(signedtx["hex"])
def run_test(self):
self.privkey, self.pubkey = generate_keypair(wif=True)
cms = self.nodes[0].createmultisig(1, [self.pubkey.hex()])
wms = self.nodes[0].createmultisig(1, [self.pubkey.hex()], 'p2sh-segwit')
self.ms_address = cms["address"]
ms_unlock_details = {"scriptPubKey": address_to_scriptpubkey(self.ms_address).hex(),
"redeemScript": cms["redeemScript"]}
self.wit_ms_address = wms['address']
self.coinbase_blocks = self.generate(self.nodes[0], 2) # block height = 2
coinbase_txid = []
for i in self.coinbase_blocks:
coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
self.generate(self.nodes[0], COINBASE_MATURITY) # block height = COINBASE_MATURITY + 2
self.lastblockhash = self.nodes[0].getbestblockhash()
self.lastblockheight = COINBASE_MATURITY + 2
self.lastblocktime = int(time.time()) + self.lastblockheight
self.log.info(f"Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [{COINBASE_MATURITY + 3}]")
test1txs = [self.create_transaction(txid=coinbase_txid[0], addr=self.ms_address, amount=49,
privkey=self.nodes[0].get_deterministic_priv_key().key)]
txid1 = self.nodes[0].sendrawtransaction(test1txs[0].serialize_with_witness().hex(), 0)
test1txs.append(self.create_transaction(txid=txid1, input_details=ms_unlock_details,
addr=self.ms_address, amount=48,
privkey=self.privkey))
txid2 = self.nodes[0].sendrawtransaction(test1txs[1].serialize_with_witness().hex(), 0)
test1txs.append(self.create_transaction(txid=coinbase_txid[1],
addr=self.wit_ms_address, amount=49,
privkey=self.nodes[0].get_deterministic_priv_key().key))
txid3 = self.nodes[0].sendrawtransaction(test1txs[2].serialize_with_witness().hex(), 0)
self.block_submit(self.nodes[0], test1txs, accept=True)
self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation")
test2tx = self.create_transaction(txid=txid2, input_details=ms_unlock_details,
addr=self.ms_address, amount=47,
privkey=self.privkey)
invalidate_nulldummy_tx(test2tx)
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test2tx.serialize_with_witness().hex(), 0)
self.log.info(f"Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [{COINBASE_MATURITY + 4}]")
self.block_submit(self.nodes[0], [test2tx], accept=True)
self.log.info("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation")
test4tx = self.create_transaction(txid=test2tx.hash, input_details=ms_unlock_details,
addr=getnewdestination()[2], amount=46,
privkey=self.privkey)
test6txs = [CTransaction(test4tx)]
invalidate_nulldummy_tx(test4tx)
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test4tx.serialize_with_witness().hex(), 0)
self.block_submit(self.nodes[0], [test4tx], accept=False)
self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
test5tx = self.create_transaction(txid=txid3, input_details={"scriptPubKey": test1txs[2].vout[0].scriptPubKey.hex(),
"amount": 49, "witnessScript": wms["redeemScript"]},
addr=getnewdestination(address_type='p2sh-segwit')[2], amount=48,
privkey=self.privkey)
test6txs.append(CTransaction(test5tx))
test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, test5tx.serialize_with_witness().hex(), 0)
self.block_submit(self.nodes[0], [test5tx], with_witness=True, accept=False)
self.log.info(f"Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [{COINBASE_MATURITY + 5}]")
for i in test6txs:
self.nodes[0].sendrawtransaction(i.serialize_with_witness().hex(), 0)
self.block_submit(self.nodes[0], test6txs, with_witness=True, accept=True)
def block_submit(self, node, txs, *, with_witness=False, accept):
tmpl = node.getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)
assert_equal(tmpl['previousblockhash'], self.lastblockhash)
assert_equal(tmpl['height'], self.lastblockheight + 1)
block = create_block(tmpl=tmpl, ntime=self.lastblocktime + 1, txlist=txs)
if with_witness:
add_witness_commitment(block)
block.solve()
assert_equal(None if accept else NULLDUMMY_ERROR, node.submitblock(block.serialize().hex()))
if accept:
assert_equal(node.getbestblockhash(), block.hash)
self.lastblockhash = block.hash
self.lastblocktime += 1
self.lastblockheight += 1
else:
assert_equal(node.getbestblockhash(), self.lastblockhash)
if __name__ == '__main__':
NULLDUMMYTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_invalid_block.py | #!/usr/bin/env python3
# Copyright (c) 2015-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid blocks.
In this test we connect to one node over p2p, and test block requests:
1) Valid blocks should be requested and become chain tip.
2) Invalid block with duplicated transaction should be re-requested.
3) Invalid block with bad coinbase value should be rejected and not
re-requested.
4) Invalid block due to future timestamp is later accepted when that timestamp
becomes valid.
"""
import copy
import time
from test_framework.blocktools import (
MAX_FUTURE_BLOCK_TIME,
create_block,
create_coinbase,
create_tx_with_script,
)
from test_framework.messages import COIN
from test_framework.p2p import P2PDataStore
from test_framework.script import OP_TRUE
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class InvalidBlockRequestTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.extra_args = [["[email protected]"]]
def run_test(self):
# Add p2p connection to node0
node = self.nodes[0] # convenience reference to the node
peer = node.add_p2p_connection(P2PDataStore())
best_block = node.getblock(node.getbestblockhash())
tip = int(node.getbestblockhash(), 16)
height = best_block["height"] + 1
block_time = best_block["time"] + 1
self.log.info("Create a new block with an anyone-can-spend coinbase")
block = create_block(tip, create_coinbase(height), block_time)
block.solve()
# Save the coinbase for later
block1 = block
peer.send_blocks_and_test([block1], node, success=True)
self.log.info("Mature the block.")
self.generatetoaddress(node, 100, node.get_deterministic_priv_key().address)
best_block = node.getblock(node.getbestblockhash())
tip = int(node.getbestblockhash(), 16)
height = best_block["height"] + 1
block_time = best_block["time"] + 1
# Use merkle-root malleability to generate an invalid block with
# same blockheader (CVE-2012-2459).
# Manufacture a block with 3 transactions (coinbase, spend of prior
# coinbase, spend of that spend). Duplicate the 3rd transaction to
# leave merkle root and blockheader unchanged but invalidate the block.
# For more information on merkle-root malleability see src/consensus/merkle.cpp.
self.log.info("Test merkle root malleability.")
tx1 = create_tx_with_script(block1.vtx[0], 0, script_sig=bytes([OP_TRUE]), amount=50 * COIN)
tx2 = create_tx_with_script(tx1, 0, script_sig=bytes([OP_TRUE]), amount=50 * COIN)
block2 = create_block(tip, create_coinbase(height), block_time, txlist=[tx1, tx2])
block_time += 1
block2.solve()
orig_hash = block2.sha256
block2_orig = copy.deepcopy(block2)
# Mutate block 2
block2.vtx.append(tx2)
assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
assert_equal(orig_hash, block2.rehash())
assert block2_orig.vtx != block2.vtx
peer.send_blocks_and_test([block2], node, success=False, reject_reason='bad-txns-duplicate')
# Check transactions for duplicate inputs (CVE-2018-17144)
self.log.info("Test duplicate input block.")
block2_dup = copy.deepcopy(block2_orig)
block2_dup.vtx[2].vin.append(block2_dup.vtx[2].vin[0])
block2_dup.vtx[2].rehash()
block2_dup.hashMerkleRoot = block2_dup.calc_merkle_root()
block2_dup.solve()
peer.send_blocks_and_test([block2_dup], node, success=False, reject_reason='bad-txns-inputs-duplicate')
self.log.info("Test very broken block.")
block3 = create_block(tip, create_coinbase(height, nValue=100), block_time)
block_time += 1
block3.solve()
peer.send_blocks_and_test([block3], node, success=False, reject_reason='bad-cb-amount')
# Complete testing of CVE-2012-2459 by sending the original block.
# It should be accepted even though it has the same hash as the mutated one.
self.log.info("Test accepting original block after rejecting its mutated version.")
peer.send_blocks_and_test([block2_orig], node, success=True, timeout=5)
# Update tip info
height += 1
block_time += 1
tip = int(block2_orig.hash, 16)
# Complete testing of CVE-2018-17144, by checking for the inflation bug.
# Create a block that spends the output of a tx in a previous block.
tx3 = create_tx_with_script(tx2, 0, script_sig=bytes([OP_TRUE]), amount=50 * COIN)
tx3.vin.append(tx3.vin[0]) # Duplicates input
tx3.rehash()
block4 = create_block(tip, create_coinbase(height), block_time, txlist=[tx3])
block4.solve()
self.log.info("Test inflation by duplicating input")
peer.send_blocks_and_test([block4], node, success=False, reject_reason='bad-txns-inputs-duplicate')
self.log.info("Test accepting identical block after rejecting it due to a future timestamp.")
t = int(time.time())
node.setmocktime(t)
# Set block time +1 second past max future validity
block = create_block(tip, create_coinbase(height), t + MAX_FUTURE_BLOCK_TIME + 1)
block.solve()
# Need force_send because the block will get rejected without a getdata otherwise
peer.send_blocks_and_test([block], node, force_send=True, success=False, reject_reason='time-too-new')
node.setmocktime(t + 1)
peer.send_blocks_and_test([block], node, success=True)
if __name__ == '__main__':
InvalidBlockRequestTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_import_rescan.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test wallet import RPCs.
Test rescan behavior of importaddress, importpubkey, importprivkey, and
importmulti RPCs with different types of keys and rescan options.
In the first part of the test, node 0 creates an address for each type of
import RPC call and sends BTC to it. Then other nodes import the addresses,
and the test makes listtransactions and getbalance calls to confirm that the
importing node either did or did not execute rescans picking up the send
transactions.
In the second part of the test, node 0 sends more BTC to each address, and the
test makes more listtransactions and getbalance calls to confirm that the
importing nodes pick up the new transactions regardless of whether rescans
happened previously.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.address import AddressType
from test_framework.util import (
assert_equal,
set_node_times,
)
import collections
from decimal import Decimal
import enum
import itertools
import random
Call = enum.Enum("Call", "single multiaddress multiscript")
Data = enum.Enum("Data", "address pub priv")
Rescan = enum.Enum("Rescan", "no yes late_timestamp")
class Variant(collections.namedtuple("Variant", "call data address_type rescan prune")):
"""Helper for importing one key and verifying scanned transactions."""
def do_import(self, timestamp):
"""Call one key import RPC."""
rescan = self.rescan == Rescan.yes
assert_equal(self.address["solvable"], True)
assert_equal(self.address["isscript"], self.address_type == AddressType.p2sh_segwit)
assert_equal(self.address["iswitness"], self.address_type == AddressType.bech32)
if self.address["isscript"]:
assert_equal(self.address["embedded"]["isscript"], False)
assert_equal(self.address["embedded"]["iswitness"], True)
if self.call == Call.single:
if self.data == Data.address:
response = self.node.importaddress(address=self.address["address"], label=self.label, rescan=rescan)
elif self.data == Data.pub:
response = self.node.importpubkey(pubkey=self.address["pubkey"], label=self.label, rescan=rescan)
elif self.data == Data.priv:
response = self.node.importprivkey(privkey=self.key, label=self.label, rescan=rescan)
assert_equal(response, None)
elif self.call in (Call.multiaddress, Call.multiscript):
request = {
"scriptPubKey": {
"address": self.address["address"]
} if self.call == Call.multiaddress else self.address["scriptPubKey"],
"timestamp": timestamp + TIMESTAMP_WINDOW + (1 if self.rescan == Rescan.late_timestamp else 0),
"pubkeys": [self.address["pubkey"]] if self.data == Data.pub else [],
"keys": [self.key] if self.data == Data.priv else [],
"label": self.label,
"watchonly": self.data != Data.priv
}
if self.address_type == AddressType.p2sh_segwit and self.data != Data.address:
# We need solving data when providing a pubkey or privkey as data
request.update({"redeemscript": self.address['embedded']['scriptPubKey']})
response = self.node.importmulti(
requests=[request],
rescan=self.rescan in (Rescan.yes, Rescan.late_timestamp),
)
assert_equal(response, [{"success": True}])
def check(self, txid=None, amount=None, confirmation_height=None):
"""Verify that listtransactions/listreceivedbyaddress return expected values."""
txs = self.node.listtransactions(label=self.label, count=10000, include_watchonly=True)
current_height = self.node.getblockcount()
assert_equal(len(txs), self.expected_txs)
addresses = self.node.listreceivedbyaddress(minconf=0, include_watchonly=True, address_filter=self.address['address'])
if self.expected_txs:
assert_equal(len(addresses[0]["txids"]), self.expected_txs)
if txid is not None:
tx, = [tx for tx in txs if tx["txid"] == txid]
assert_equal(tx["label"], self.label)
assert_equal(tx["address"], self.address["address"])
assert_equal(tx["amount"], amount)
assert_equal(tx["category"], "receive")
assert_equal(tx["label"], self.label)
assert_equal(tx["txid"], txid)
# If no confirmation height is given, the tx is still in the
# mempool.
confirmations = (1 + current_height - confirmation_height) if confirmation_height else 0
assert_equal(tx["confirmations"], confirmations)
if confirmations:
assert "trusted" not in tx
address, = [ad for ad in addresses if txid in ad["txids"]]
assert_equal(address["address"], self.address["address"])
assert_equal(address["amount"], self.expected_balance)
assert_equal(address["confirmations"], confirmations)
# Verify the transaction is correctly marked watchonly depending on
# whether the transaction pays to an imported public key or
# imported private key. The test setup ensures that transaction
# inputs will not be from watchonly keys (important because
# involvesWatchonly will be true if either the transaction output
# or inputs are watchonly).
if self.data != Data.priv:
assert_equal(address["involvesWatchonly"], True)
else:
assert_equal("involvesWatchonly" not in address, True)
# List of Variants for each way a key or address could be imported.
IMPORT_VARIANTS = [Variant(*variants) for variants in itertools.product(Call, Data, AddressType, Rescan, (False, True))]
# List of nodes to import keys to. Half the nodes will have pruning disabled,
# half will have it enabled. Different nodes will be used for imports that are
# expected to cause rescans, and imports that are not expected to cause
# rescans, in order to prevent rescans during later imports picking up
# transactions associated with earlier imports. This makes it easier to keep
# track of expected balances and transactions.
ImportNode = collections.namedtuple("ImportNode", "prune rescan")
IMPORT_NODES = [ImportNode(*fields) for fields in itertools.product((False, True), repeat=2)]
# Rescans start at the earliest block up to 2 hours before the key timestamp.
TIMESTAMP_WINDOW = 2 * 60 * 60
AMOUNT_DUST = 0.00000546
def get_rand_amount():
r = random.uniform(AMOUNT_DUST, 1)
return Decimal(str(round(r, 8)))
class ImportRescanTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, descriptors=False)
def set_test_params(self):
self.num_nodes = 2 + len(IMPORT_NODES)
self.supports_cli = False
self.rpc_timeout = 120
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
self.extra_args = [[] for _ in range(self.num_nodes)]
for i, import_node in enumerate(IMPORT_NODES, 2):
if import_node.prune:
self.extra_args[i] += ["-prune=1"]
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
# Import keys with pruning disabled
self.start_nodes(extra_args=[[]] * self.num_nodes)
self.import_deterministic_coinbase_privkeys()
self.stop_nodes()
self.start_nodes(extra_args=[["[email protected]"]] * self.num_nodes)
for i in range(1, self.num_nodes):
self.connect_nodes(i, 0)
def run_test(self):
# Create one transaction on node 0 with a unique amount for
# each possible type of wallet import RPC.
last_variants = []
for i, variant in enumerate(IMPORT_VARIANTS):
if i % 10 == 0:
blockhash = self.generate(self.nodes[0], 1)[0]
conf_height = self.nodes[0].getblockcount()
timestamp = self.nodes[0].getblockheader(blockhash)["time"]
for var in last_variants:
var.confirmation_height = conf_height
var.timestamp = timestamp
last_variants.clear()
variant.label = "label {} {}".format(i, variant)
variant.address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress(
label=variant.label,
address_type=variant.address_type.value,
))
variant.key = self.nodes[1].dumpprivkey(variant.address["address"])
variant.initial_amount = get_rand_amount()
variant.initial_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.initial_amount)
last_variants.append(variant)
blockhash = self.generate(self.nodes[0], 1)[0]
conf_height = self.nodes[0].getblockcount()
timestamp = self.nodes[0].getblockheader(blockhash)["time"]
for var in last_variants:
var.confirmation_height = conf_height
var.timestamp = timestamp
last_variants.clear()
# Generate a block further in the future (past the rescan window).
assert_equal(self.nodes[0].getrawmempool(), [])
set_node_times(
self.nodes,
self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"] + TIMESTAMP_WINDOW + 1,
)
self.generate(self.nodes[0], 1)
# For each variation of wallet key import, invoke the import RPC and
# check the results from getbalance and listtransactions.
for variant in IMPORT_VARIANTS:
self.log.info('Run import for variant {}'.format(variant))
expect_rescan = variant.rescan == Rescan.yes
variant.node = self.nodes[2 + IMPORT_NODES.index(ImportNode(variant.prune, expect_rescan))]
variant.do_import(variant.timestamp)
if expect_rescan:
variant.expected_balance = variant.initial_amount
variant.expected_txs = 1
variant.check(variant.initial_txid, variant.initial_amount, variant.confirmation_height)
else:
variant.expected_balance = 0
variant.expected_txs = 0
variant.check()
# Create new transactions sending to each address.
for i, variant in enumerate(IMPORT_VARIANTS):
if i % 10 == 0:
blockhash = self.generate(self.nodes[0], 1)[0]
conf_height = self.nodes[0].getblockcount() + 1
variant.sent_amount = get_rand_amount()
variant.sent_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.sent_amount)
variant.confirmation_height = conf_height
self.generate(self.nodes[0], 1)
assert_equal(self.nodes[0].getrawmempool(), [])
self.sync_all()
# Check the latest results from getbalance and listtransactions.
for variant in IMPORT_VARIANTS:
self.log.info('Run check for variant {}'.format(variant))
variant.expected_balance += variant.sent_amount
variant.expected_txs += 1
variant.check(variant.sent_txid, variant.sent_amount, variant.confirmation_height)
self.log.info('Test that the mempool is rescanned as well if the rescan parameter is set to true')
# The late timestamp and pruned variants are not necessary when testing mempool rescan
mempool_variants = [variant for variant in IMPORT_VARIANTS if variant.rescan != Rescan.late_timestamp and not variant.prune]
# No further blocks are mined so the timestamp will stay the same
timestamp = self.nodes[0].getblockheader(self.nodes[0].getbestblockhash())["time"]
# Create one transaction on node 0 with a unique amount for
# each possible type of wallet import RPC.
for i, variant in enumerate(mempool_variants):
variant.label = "mempool label {} {}".format(i, variant)
variant.address = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress(
label=variant.label,
address_type=variant.address_type.value,
))
variant.key = self.nodes[1].dumpprivkey(variant.address["address"])
variant.initial_amount = get_rand_amount()
variant.initial_txid = self.nodes[0].sendtoaddress(variant.address["address"], variant.initial_amount)
variant.confirmation_height = 0
variant.timestamp = timestamp
assert_equal(len(self.nodes[0].getrawmempool()), len(mempool_variants))
self.sync_mempools()
# For each variation of wallet key import, invoke the import RPC and
# check the results from getbalance and listtransactions.
for variant in mempool_variants:
self.log.info('Run import for mempool variant {}'.format(variant))
expect_rescan = variant.rescan == Rescan.yes
variant.node = self.nodes[2 + IMPORT_NODES.index(ImportNode(variant.prune, expect_rescan))]
variant.do_import(variant.timestamp)
if expect_rescan:
variant.expected_balance = variant.initial_amount
variant.expected_txs = 1
variant.check(variant.initial_txid, variant.initial_amount)
else:
variant.expected_balance = 0
variant.expected_txs = 0
variant.check()
if __name__ == "__main__":
ImportRescanTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_net_deadlock.py | #!/usr/bin/env python3
# Copyright (c) 2023-present The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import threading
from test_framework.test_framework import BitcoinTestFramework
from random import randbytes
class NetDeadlockTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
node0 = self.nodes[0]
node1 = self.nodes[1]
self.log.info("Simultaneously send a large message on both sides")
rand_msg = randbytes(4000000).hex()
thread0 = threading.Thread(target=node0.sendmsgtopeer, args=(0, "unknown", rand_msg))
thread1 = threading.Thread(target=node1.sendmsgtopeer, args=(0, "unknown", rand_msg))
thread0.start()
thread1.start()
thread0.join()
thread1.join()
self.log.info("Check whether a deadlock happened")
self.generate(node0, 1)
self.sync_blocks()
if __name__ == '__main__':
NetDeadlockTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_invalid_address_message.py | #!/usr/bin/env python3
# Copyright (c) 2020-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test error messages for 'getaddressinfo' and 'validateaddress' RPC commands."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
BECH32_VALID = 'bcrt1qtmp74ayg7p24uslctssvjm06q5phz4yrxucgnv'
BECH32_VALID_CAPITALS = 'BCRT1QPLMTZKC2XHARPPZDLNPAQL78RSHJ68U33RAH7R'
BECH32_VALID_MULTISIG = 'bcrt1qdg3myrgvzw7ml9q0ejxhlkyxm7vl9r56yzkfgvzclrf4hkpx9yfqhpsuks'
BECH32_INVALID_BECH32 = 'bcrt1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqdmchcc'
BECH32_INVALID_BECH32M = 'bcrt1qw508d6qejxtdg4y5r3zarvary0c5xw7k35mrzd'
BECH32_INVALID_VERSION = 'bcrt130xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqynjegk'
BECH32_INVALID_SIZE = 'bcrt1s0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7v8n0nx0muaewav25430mtr'
BECH32_INVALID_V0_SIZE = 'bcrt1qw508d6qejxtdg4y5r3zarvary0c5xw7kqqq5k3my'
BECH32_INVALID_PREFIX = 'bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k7grplx'
BECH32_TOO_LONG = 'bcrt1q049edschfnwystcqnsvyfpj23mpsg3jcedq9xv049edschfnwystcqnsvyfpj23mpsg3jcedq9xv049edschfnwystcqnsvyfpj23m'
BECH32_ONE_ERROR = 'bcrt1q049edschfnwystcqnsvyfpj23mpsg3jcedq9xv'
BECH32_ONE_ERROR_CAPITALS = 'BCRT1QPLMTZKC2XHARPPZDLNPAQL78RSHJ68U32RAH7R'
BECH32_TWO_ERRORS = 'bcrt1qax9suht3qv95sw33xavx8crpxduefdrsvgsklu' # should be bcrt1qax9suht3qv95sw33wavx8crpxduefdrsvgsklx
BECH32_NO_SEPARATOR = 'bcrtq049ldschfnwystcqnsvyfpj23mpsg3jcedq9xv'
BECH32_INVALID_CHAR = 'bcrt1q04oldschfnwystcqnsvyfpj23mpsg3jcedq9xv'
BECH32_MULTISIG_TWO_ERRORS = 'bcrt1qdg3myrgvzw7ml8q0ejxhlkyxn7vl9r56yzkfgvzclrf4hkpx9yfqhpsuks'
BECH32_WRONG_VERSION = 'bcrt1ptmp74ayg7p24uslctssvjm06q5phz4yrxucgnv'
BASE58_VALID = 'mipcBbFg9gMiCh81Kj8tqqdgoZub1ZJRfn'
BASE58_INVALID_PREFIX = '17VZNX1SN5NtKa8UQFxwQbFeFc3iqRYhem'
BASE58_INVALID_CHECKSUM = 'mipcBbFg9gMiCh81Kj8tqqdgoZub1ZJJfn'
BASE58_INVALID_LENGTH = '2VKf7XKMrp4bVNVmuRbyCewkP8FhGLP2E54LHDPakr9Sq5mtU2'
INVALID_ADDRESS = 'asfah14i8fajz0123f'
INVALID_ADDRESS_2 = '1q049ldschfnwystcqnsvyfpj23mpsg3jcedq9xv'
class InvalidAddressErrorMessageTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def check_valid(self, addr):
info = self.nodes[0].validateaddress(addr)
assert info['isvalid']
assert 'error' not in info
assert 'error_locations' not in info
def check_invalid(self, addr, error_str, error_locations=None):
res = self.nodes[0].validateaddress(addr)
assert not res['isvalid']
assert_equal(res['error'], error_str)
if error_locations:
assert_equal(res['error_locations'], error_locations)
else:
assert_equal(res['error_locations'], [])
def test_validateaddress(self):
# Invalid Bech32
self.check_invalid(BECH32_INVALID_SIZE, "Invalid Bech32 address program size (41 bytes)")
self.check_invalid(BECH32_INVALID_PREFIX, 'Invalid or unsupported Segwit (Bech32) or Base58 encoding.')
self.check_invalid(BECH32_INVALID_BECH32, 'Version 1+ witness address must use Bech32m checksum')
self.check_invalid(BECH32_INVALID_BECH32M, 'Version 0 witness address must use Bech32 checksum')
self.check_invalid(BECH32_INVALID_VERSION, 'Invalid Bech32 address witness version')
self.check_invalid(BECH32_INVALID_V0_SIZE, "Invalid Bech32 v0 address program size (21 bytes), per BIP141")
self.check_invalid(BECH32_TOO_LONG, 'Bech32 string too long', list(range(90, 108)))
self.check_invalid(BECH32_ONE_ERROR, 'Invalid Bech32 checksum', [9])
self.check_invalid(BECH32_TWO_ERRORS, 'Invalid Bech32 checksum', [22, 43])
self.check_invalid(BECH32_ONE_ERROR_CAPITALS, 'Invalid Bech32 checksum', [38])
self.check_invalid(BECH32_NO_SEPARATOR, 'Missing separator')
self.check_invalid(BECH32_INVALID_CHAR, 'Invalid Base 32 character', [8])
self.check_invalid(BECH32_MULTISIG_TWO_ERRORS, 'Invalid Bech32 checksum', [19, 30])
self.check_invalid(BECH32_WRONG_VERSION, 'Invalid Bech32 checksum', [5])
# Valid Bech32
self.check_valid(BECH32_VALID)
self.check_valid(BECH32_VALID_CAPITALS)
self.check_valid(BECH32_VALID_MULTISIG)
# Invalid Base58
self.check_invalid(BASE58_INVALID_PREFIX, 'Invalid or unsupported Base58-encoded address.')
self.check_invalid(BASE58_INVALID_CHECKSUM, 'Invalid checksum or length of Base58 address (P2PKH or P2SH)')
self.check_invalid(BASE58_INVALID_LENGTH, 'Invalid checksum or length of Base58 address (P2PKH or P2SH)')
# Valid Base58
self.check_valid(BASE58_VALID)
# Invalid address format
self.check_invalid(INVALID_ADDRESS, 'Invalid or unsupported Segwit (Bech32) or Base58 encoding.')
self.check_invalid(INVALID_ADDRESS_2, 'Invalid or unsupported Segwit (Bech32) or Base58 encoding.')
node = self.nodes[0]
# Missing arg returns the help text
assert_raises_rpc_error(-1, "Return information about the given bitcoin address.", node.validateaddress)
# Explicit None is not allowed for required parameters
assert_raises_rpc_error(-3, "JSON value of type null is not of expected type string", node.validateaddress, None)
def test_getaddressinfo(self):
node = self.nodes[0]
assert_raises_rpc_error(-5, "Invalid Bech32 address program size (41 bytes)", node.getaddressinfo, BECH32_INVALID_SIZE)
assert_raises_rpc_error(-5, "Invalid or unsupported Segwit (Bech32) or Base58 encoding.", node.getaddressinfo, BECH32_INVALID_PREFIX)
assert_raises_rpc_error(-5, "Invalid or unsupported Base58-encoded address.", node.getaddressinfo, BASE58_INVALID_PREFIX)
assert_raises_rpc_error(-5, "Invalid or unsupported Segwit (Bech32) or Base58 encoding.", node.getaddressinfo, INVALID_ADDRESS)
def run_test(self):
self.test_validateaddress()
if self.is_wallet_compiled():
self.init_wallet(node=0)
self.test_getaddressinfo()
if __name__ == '__main__':
InvalidAddressErrorMessageTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_fast_rescan.py | #!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that fast rescan using block filters for descriptor wallets detects
top-ups correctly and finds the same transactions than the slow variant."""
from test_framework.address import address_to_scriptpubkey
from test_framework.descriptors import descsum_create
from test_framework.test_framework import BitcoinTestFramework
from test_framework.test_node import TestNode
from test_framework.util import assert_equal
from test_framework.wallet import MiniWallet
from test_framework.wallet_util import get_generate_key
KEYPOOL_SIZE = 100 # smaller than default size to speed-up test
NUM_DESCRIPTORS = 9 # number of descriptors (8 default ranged ones + 1 fixed non-ranged one)
NUM_BLOCKS = 6 # number of blocks to mine
class WalletFastRescanTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, legacy=False)
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[f'-keypool={KEYPOOL_SIZE}', '-blockfilterindex=1']]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_sqlite()
def get_wallet_txids(self, node: TestNode, wallet_name: str) -> list[str]:
w = node.get_wallet_rpc(wallet_name)
txs = w.listtransactions('*', 1000000)
return [tx['txid'] for tx in txs]
def run_test(self):
node = self.nodes[0]
wallet = MiniWallet(node)
self.log.info("Create descriptor wallet with backup")
WALLET_BACKUP_FILENAME = node.datadir_path / 'wallet.bak'
node.createwallet(wallet_name='topup_test', descriptors=True)
w = node.get_wallet_rpc('topup_test')
fixed_key = get_generate_key()
print(w.importdescriptors([{"desc": descsum_create(f"wpkh({fixed_key.privkey})"), "timestamp": "now"}]))
descriptors = w.listdescriptors()['descriptors']
assert_equal(len(descriptors), NUM_DESCRIPTORS)
w.backupwallet(WALLET_BACKUP_FILENAME)
self.log.info(f"Create txs sending to end range address of each descriptor, triggering top-ups")
for i in range(NUM_BLOCKS):
self.log.info(f"Block {i+1}/{NUM_BLOCKS}")
for desc_info in w.listdescriptors()['descriptors']:
if 'range' in desc_info:
start_range, end_range = desc_info['range']
addr = w.deriveaddresses(desc_info['desc'], [end_range, end_range])[0]
spk = address_to_scriptpubkey(addr)
self.log.info(f"-> range [{start_range},{end_range}], last address {addr}")
else:
spk = bytes.fromhex(fixed_key.p2wpkh_script)
self.log.info(f"-> fixed non-range descriptor address {fixed_key.p2wpkh_addr}")
wallet.send_to(from_node=node, scriptPubKey=spk, amount=10000)
self.generate(node, 1)
self.log.info("Import wallet backup with block filter index")
with node.assert_debug_log(['fast variant using block filters']):
node.restorewallet('rescan_fast', WALLET_BACKUP_FILENAME)
txids_fast = self.get_wallet_txids(node, 'rescan_fast')
self.log.info("Import non-active descriptors with block filter index")
node.createwallet(wallet_name='rescan_fast_nonactive', descriptors=True, disable_private_keys=True, blank=True)
with node.assert_debug_log(['fast variant using block filters']):
w = node.get_wallet_rpc('rescan_fast_nonactive')
w.importdescriptors([{"desc": descriptor['desc'], "timestamp": 0} for descriptor in descriptors])
txids_fast_nonactive = self.get_wallet_txids(node, 'rescan_fast_nonactive')
self.restart_node(0, [f'-keypool={KEYPOOL_SIZE}', '-blockfilterindex=0'])
self.log.info("Import wallet backup w/o block filter index")
with node.assert_debug_log(['slow variant inspecting all blocks']):
node.restorewallet("rescan_slow", WALLET_BACKUP_FILENAME)
txids_slow = self.get_wallet_txids(node, 'rescan_slow')
self.log.info("Import non-active descriptors w/o block filter index")
node.createwallet(wallet_name='rescan_slow_nonactive', descriptors=True, disable_private_keys=True, blank=True)
with node.assert_debug_log(['slow variant inspecting all blocks']):
w = node.get_wallet_rpc('rescan_slow_nonactive')
w.importdescriptors([{"desc": descriptor['desc'], "timestamp": 0} for descriptor in descriptors])
txids_slow_nonactive = self.get_wallet_txids(node, 'rescan_slow_nonactive')
self.log.info("Verify that all rescans found the same txs in slow and fast variants")
assert_equal(len(txids_slow), NUM_DESCRIPTORS * NUM_BLOCKS)
assert_equal(len(txids_fast), NUM_DESCRIPTORS * NUM_BLOCKS)
assert_equal(len(txids_slow_nonactive), NUM_DESCRIPTORS * NUM_BLOCKS)
assert_equal(len(txids_fast_nonactive), NUM_DESCRIPTORS * NUM_BLOCKS)
assert_equal(sorted(txids_slow), sorted(txids_fast))
assert_equal(sorted(txids_slow_nonactive), sorted(txids_fast_nonactive))
if __name__ == '__main__':
WalletFastRescanTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_config_args.py | #!/usr/bin/env python3
# Copyright (c) 2017-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test various command line arguments and configuration file parameters."""
import os
from pathlib import Path
import re
import sys
import tempfile
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.test_node import ErrorMatch
from test_framework import util
class ConfArgsTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.supports_cli = False
self.wallet_names = []
self.disable_autoconnect = False
def test_config_file_parser(self):
self.log.info('Test config file parser')
self.stop_node(0)
# Check that startup fails if conf= is set in bitcoin.conf or in an included conf file
bad_conf_file_path = self.nodes[0].datadir_path / "bitcoin_bad.conf"
util.write_config(bad_conf_file_path, n=0, chain='', extra_config=f'conf=some.conf\n')
conf_in_config_file_err = 'Error: Error reading configuration file: conf cannot be set in the configuration file; use includeconf= if you want to include additional config files'
self.nodes[0].assert_start_raises_init_error(
extra_args=[f'-conf={bad_conf_file_path}'],
expected_msg=conf_in_config_file_err,
)
inc_conf_file_path = self.nodes[0].datadir_path / 'include.conf'
with open(self.nodes[0].datadir_path / 'bitcoin.conf', 'a', encoding='utf-8') as conf:
conf.write(f'includeconf={inc_conf_file_path}\n')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('conf=some.conf\n')
self.nodes[0].assert_start_raises_init_error(
expected_msg=conf_in_config_file_err,
)
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Error parsing command line arguments: Invalid parameter -dash_cli=1',
extra_args=['-dash_cli=1'],
)
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('dash_conf=1\n')
with self.nodes[0].assert_debug_log(expected_msgs=['Ignoring unknown configuration value dash_conf']):
self.start_node(0)
self.stop_node(0)
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('reindex=1\n')
with self.nodes[0].assert_debug_log(expected_msgs=['Warning: reindex=1 is set in the configuration file, which will significantly slow down startup. Consider removing or commenting out this option for better performance, unless there is currently a condition which makes rebuilding the indexes necessary']):
self.start_node(0)
self.stop_node(0)
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('-dash=1\n')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 1: -dash=1, options in configuration file must be specified without leading -')
if self.is_wallet_compiled():
with open(inc_conf_file_path, 'w', encoding='utf8') as conf:
conf.write("wallet=foo\n")
self.nodes[0].assert_start_raises_init_error(expected_msg=f'Error: Config setting for -wallet only applied on {self.chain} network when in [{self.chain}] section.')
main_conf_file_path = self.nodes[0].datadir_path / "bitcoin_main.conf"
util.write_config(main_conf_file_path, n=0, chain='', extra_config=f'includeconf={inc_conf_file_path}\n')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('acceptnonstdtxn=1\n')
self.nodes[0].assert_start_raises_init_error(extra_args=[f"-conf={main_conf_file_path}", "-allowignoredconf"], expected_msg='Error: acceptnonstdtxn is not currently supported for main chain')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('nono\n')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 1: nono, if you intended to specify a negated option, use nono=1 instead')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('server=1\nrpcuser=someuser\nrpcpassword=some#pass')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 3, using # in rpcpassword can be ambiguous and should be avoided')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('server=1\nrpcuser=someuser\nmain.rpcpassword=some#pass')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 3, using # in rpcpassword can be ambiguous and should be avoided')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('server=1\nrpcuser=someuser\n[main]\nrpcpassword=some#pass')
self.nodes[0].assert_start_raises_init_error(expected_msg='Error: Error reading configuration file: parse error on line 4, using # in rpcpassword can be ambiguous and should be avoided')
inc_conf_file2_path = self.nodes[0].datadir_path / 'include2.conf'
with open(self.nodes[0].datadir_path / 'bitcoin.conf', 'a', encoding='utf-8') as conf:
conf.write(f'includeconf={inc_conf_file2_path}\n')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('testnot.datadir=1\n')
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
conf.write('[testnet]\n')
self.restart_node(0)
self.nodes[0].stop_node(expected_stderr=f'Warning: {inc_conf_file_path}:1 Section [testnot] is not recognized.{os.linesep}{inc_conf_file2_path}:1 Section [testnet] is not recognized.')
with open(inc_conf_file_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
with open(inc_conf_file2_path, 'w', encoding='utf-8') as conf:
conf.write('') # clear
def test_config_file_log(self):
# Disable this test for windows currently because trying to override
# the default datadir through the environment does not seem to work.
if sys.platform == "win32":
return
self.log.info('Test that correct configuration path is changed when configuration file changes the datadir')
# Create a temporary directory that will be treated as the default data
# directory by bitcoind.
env, default_datadir = util.get_temp_default_datadir(Path(self.options.tmpdir, "test_config_file_log"))
default_datadir.mkdir(parents=True)
# Write a bitcoin.conf file in the default data directory containing a
# datadir= line pointing at the node datadir.
node = self.nodes[0]
conf_text = node.bitcoinconf.read_text()
conf_path = default_datadir / "bitcoin.conf"
conf_path.write_text(f"datadir={node.datadir_path}\n{conf_text}")
# Drop the node -datadir= argument during this test, because if it is
# specified it would take precedence over the datadir setting in the
# config file.
node_args = node.args
node.args = [arg for arg in node.args if not arg.startswith("-datadir=")]
# Check that correct configuration file path is actually logged
# (conf_path, not node.bitcoinconf)
with self.nodes[0].assert_debug_log(expected_msgs=[f"Config file: {conf_path}"]):
self.start_node(0, ["-allowignoredconf"], env=env)
self.stop_node(0)
# Restore node arguments after the test
node.args = node_args
def test_invalid_command_line_options(self):
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Error parsing command line arguments: Can not set -proxy with no value. Please specify value with -proxy=value.',
extra_args=['-proxy'],
)
def test_log_buffer(self):
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['Warning: parsed potentially confusing double-negative -connect=0\n']):
self.start_node(0, extra_args=['-noconnect=0'])
def test_args_log(self):
self.stop_node(0)
self.log.info('Test config args logging')
with self.nodes[0].assert_debug_log(
expected_msgs=[
'Command-line arg: addnode="some.node"',
'Command-line arg: rpcauth=****',
'Command-line arg: rpcpassword=****',
'Command-line arg: rpcuser=****',
'Command-line arg: torpassword=****',
f'Config file arg: {self.chain}="1"',
f'Config file arg: [{self.chain}] server="1"',
],
unexpected_msgs=[
'alice:f7efda5c189b999524f151318c0c86$d5b51b3beffbc0',
'secret-rpcuser',
'secret-torpassword',
'Command-line arg: rpcbind=****',
'Command-line arg: rpcallowip=****',
]):
self.start_node(0, extra_args=[
'-addnode=some.node',
'-rpcauth=alice:f7efda5c189b999524f151318c0c86$d5b51b3beffbc0',
'-rpcbind=127.1.1.1',
'-rpcbind=127.0.0.1',
"-rpcallowip=127.0.0.1",
'-rpcpassword=',
'-rpcuser=secret-rpcuser',
'-torpassword=secret-torpassword',
])
def test_networkactive(self):
self.log.info('Test -networkactive option')
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: true\n']):
self.start_node(0)
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: true\n']):
self.start_node(0, extra_args=['-networkactive'])
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: true\n']):
self.start_node(0, extra_args=['-networkactive=1'])
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: false\n']):
self.start_node(0, extra_args=['-networkactive=0'])
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: false\n']):
self.start_node(0, extra_args=['-nonetworkactive'])
self.stop_node(0)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: false\n']):
self.start_node(0, extra_args=['-nonetworkactive=1'])
def test_seed_peers(self):
self.log.info('Test seed peers')
default_data_dir = self.nodes[0].datadir_path
peer_dat = default_data_dir / 'peers.dat'
# Only regtest has no fixed seeds. To avoid connections to random
# nodes, regtest is the only network where it is safe to enable
# -fixedseeds in tests
util.assert_equal(self.nodes[0].getblockchaininfo()['chain'],'regtest')
self.stop_node(0)
# No peers.dat exists and -dnsseed=1
# We expect the node will use DNS Seeds, but Regtest mode does not have
# any valid DNS seeds. So after 60 seconds, the node should fallback to
# fixed seeds
assert not peer_dat.exists()
start = int(time.time())
with self.nodes[0].assert_debug_log(
expected_msgs=[
"Loaded 0 addresses from peers.dat",
"0 addresses found from DNS seeds",
"opencon thread start", # Ensure ThreadOpenConnections::start time is properly set
],
timeout=10,
):
self.start_node(0, extra_args=['-dnsseed=1', '-fixedseeds=1', f'-mocktime={start}'])
with self.nodes[0].assert_debug_log(expected_msgs=[
"Adding fixed seeds as 60 seconds have passed and addrman is empty",
]):
self.nodes[0].setmocktime(start + 65)
self.stop_node(0)
# No peers.dat exists and -dnsseed=0
# We expect the node will fallback immediately to fixed seeds
assert not peer_dat.exists()
with self.nodes[0].assert_debug_log(expected_msgs=[
"Loaded 0 addresses from peers.dat",
"DNS seeding disabled",
"Adding fixed seeds as -dnsseed=0 (or IPv4/IPv6 connections are disabled via -onlynet) and neither -addnode nor -seednode are provided\n",
]):
self.start_node(0, extra_args=['-dnsseed=0', '-fixedseeds=1'])
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(['-dnsseed=1', '-onlynet=i2p', '-i2psam=127.0.0.1:7656'], "Error: Incompatible options: -dnsseed=1 was explicitly specified, but -onlynet forbids connections to IPv4/IPv6")
# No peers.dat exists and dns seeds are disabled.
# We expect the node will not add fixed seeds when explicitly disabled.
assert not peer_dat.exists()
with self.nodes[0].assert_debug_log(expected_msgs=[
"Loaded 0 addresses from peers.dat",
"DNS seeding disabled",
"Fixed seeds are disabled",
]):
self.start_node(0, extra_args=['-dnsseed=0', '-fixedseeds=0'])
self.stop_node(0)
# No peers.dat exists and -dnsseed=0, but a -addnode is provided
# We expect the node will allow 60 seconds prior to using fixed seeds
assert not peer_dat.exists()
start = int(time.time())
with self.nodes[0].assert_debug_log(
expected_msgs=[
"Loaded 0 addresses from peers.dat",
"DNS seeding disabled",
"opencon thread start", # Ensure ThreadOpenConnections::start time is properly set
],
timeout=10,
):
self.start_node(0, extra_args=['-dnsseed=0', '-fixedseeds=1', '-addnode=fakenodeaddr', f'-mocktime={start}'])
with self.nodes[0].assert_debug_log(expected_msgs=[
"Adding fixed seeds as 60 seconds have passed and addrman is empty",
]):
self.nodes[0].setmocktime(start + 65)
def test_connect_with_seednode(self):
self.log.info('Test -connect with -seednode')
seednode_ignored = ['-seednode is ignored when -connect is used\n']
dnsseed_ignored = ['-dnsseed is ignored when -connect is used and -proxy is specified\n']
addcon_thread_started = ['addcon thread start\n']
self.stop_node(0)
# When -connect is supplied, expanding addrman via getaddr calls to ADDR_FETCH(-seednode)
# nodes is irrelevant and -seednode is ignored.
with self.nodes[0].assert_debug_log(expected_msgs=seednode_ignored):
self.start_node(0, extra_args=['-connect=fakeaddress1', '-seednode=fakeaddress2'])
# With -proxy, an ADDR_FETCH connection is made to a peer that the dns seed resolves to.
# ADDR_FETCH connections are not used when -connect is used.
with self.nodes[0].assert_debug_log(expected_msgs=dnsseed_ignored):
self.restart_node(0, extra_args=['-connect=fakeaddress1', '-dnsseed=1', '-proxy=1.2.3.4'])
# If the user did not disable -dnsseed, but it was soft-disabled because they provided -connect,
# they shouldn't see a warning about -dnsseed being ignored.
with self.nodes[0].assert_debug_log(expected_msgs=addcon_thread_started,
unexpected_msgs=dnsseed_ignored):
self.restart_node(0, extra_args=['-connect=fakeaddress1', '-proxy=1.2.3.4'])
# We have to supply expected_msgs as it's a required argument
# The expected_msg must be something we are confident will be logged after the unexpected_msg
# These cases test for -connect being supplied but only to disable it
for connect_arg in ['-connect=0', '-noconnect']:
with self.nodes[0].assert_debug_log(expected_msgs=addcon_thread_started,
unexpected_msgs=seednode_ignored):
self.restart_node(0, extra_args=[connect_arg, '-seednode=fakeaddress2'])
def test_ignored_conf(self):
self.log.info('Test error is triggered when the datadir in use contains a bitcoin.conf file that would be ignored '
'because a conflicting -conf file argument is passed.')
node = self.nodes[0]
with tempfile.NamedTemporaryFile(dir=self.options.tmpdir, mode="wt", delete=False) as temp_conf:
temp_conf.write(f"datadir={node.datadir_path}\n")
node.assert_start_raises_init_error([f"-conf={temp_conf.name}"], re.escape(
f'Error: Data directory "{node.datadir_path}" contains a "bitcoin.conf" file which is ignored, because a '
f'different configuration file "{temp_conf.name}" from command line argument "-conf={temp_conf.name}" '
f'is being used instead.') + r"[\s\S]*", match=ErrorMatch.FULL_REGEX)
# Test that passing a redundant -conf command line argument pointing to
# the same bitcoin.conf that would be loaded anyway does not trigger an
# error.
self.start_node(0, [f'-conf={node.datadir_path}/bitcoin.conf'])
self.stop_node(0)
def test_ignored_default_conf(self):
# Disable this test for windows currently because trying to override
# the default datadir through the environment does not seem to work.
if sys.platform == "win32":
return
self.log.info('Test error is triggered when bitcoin.conf in the default data directory sets another datadir '
'and it contains a different bitcoin.conf file that would be ignored')
# Create a temporary directory that will be treated as the default data
# directory by bitcoind.
env, default_datadir = util.get_temp_default_datadir(Path(self.options.tmpdir, "home"))
default_datadir.mkdir(parents=True)
# Write a bitcoin.conf file in the default data directory containing a
# datadir= line pointing at the node datadir. This will trigger a
# startup error because the node datadir contains a different
# bitcoin.conf that would be ignored.
node = self.nodes[0]
(default_datadir / "bitcoin.conf").write_text(f"datadir={node.datadir_path}\n")
# Drop the node -datadir= argument during this test, because if it is
# specified it would take precedence over the datadir setting in the
# config file.
node_args = node.args
node.args = [arg for arg in node.args if not arg.startswith("-datadir=")]
node.assert_start_raises_init_error([], re.escape(
f'Error: Data directory "{node.datadir_path}" contains a "bitcoin.conf" file which is ignored, because a '
f'different configuration file "{default_datadir}/bitcoin.conf" from data directory "{default_datadir}" '
f'is being used instead.') + r"[\s\S]*", env=env, match=ErrorMatch.FULL_REGEX)
node.args = node_args
def test_acceptstalefeeestimates_arg_support(self):
self.log.info("Test -acceptstalefeeestimates option support")
conf_file = self.nodes[0].datadir_path / "bitcoin.conf"
for chain, chain_name in {("main", ""), ("test", "testnet3"), ("signet", "signet")}:
util.write_config(conf_file, n=0, chain=chain_name, extra_config='acceptstalefeeestimates=1\n')
self.nodes[0].assert_start_raises_init_error(expected_msg=f'Error: acceptstalefeeestimates is not supported on {chain} chain.')
util.write_config(conf_file, n=0, chain="regtest") # Reset to regtest
def run_test(self):
self.test_log_buffer()
self.test_args_log()
self.test_seed_peers()
self.test_networkactive()
self.test_connect_with_seednode()
self.test_config_file_parser()
self.test_config_file_log()
self.test_invalid_command_line_options()
self.test_ignored_conf()
self.test_ignored_default_conf()
self.test_acceptstalefeeestimates_arg_support()
# Remove the -datadir argument so it doesn't override the config file
self.nodes[0].args = [arg for arg in self.nodes[0].args if not arg.startswith("-datadir")]
default_data_dir = self.nodes[0].datadir_path
new_data_dir = default_data_dir / 'newdatadir'
new_data_dir_2 = default_data_dir / 'newdatadir2'
# Check that using -datadir argument on non-existent directory fails
self.nodes[0].datadir_path = new_data_dir
self.nodes[0].assert_start_raises_init_error([f'-datadir={new_data_dir}'], f'Error: Specified data directory "{new_data_dir}" does not exist.')
# Check that using non-existent datadir in conf file fails
conf_file = default_data_dir / "bitcoin.conf"
# datadir needs to be set before [chain] section
with open(conf_file, encoding='utf8') as f:
conf_file_contents = f.read()
with open(conf_file, 'w', encoding='utf8') as f:
f.write(f"datadir={new_data_dir}\n")
f.write(conf_file_contents)
self.nodes[0].assert_start_raises_init_error([f'-conf={conf_file}'], f'Error: Error reading configuration file: specified data directory "{new_data_dir}" does not exist.')
# Check that an explicitly specified config file that cannot be opened fails
none_existent_conf_file = default_data_dir / "none_existent_bitcoin.conf"
self.nodes[0].assert_start_raises_init_error(['-conf=' + f'{none_existent_conf_file}'], 'Error: Error reading configuration file: specified config file "' + f'{none_existent_conf_file}' + '" could not be opened.')
# Create the directory and ensure the config file now works
new_data_dir.mkdir()
self.start_node(0, [f'-conf={conf_file}'])
self.stop_node(0)
assert (new_data_dir / self.chain / 'blocks').exists()
# Ensure command line argument overrides datadir in conf
new_data_dir_2.mkdir()
self.nodes[0].datadir_path = new_data_dir_2
self.start_node(0, [f'-datadir={new_data_dir_2}', f'-conf={conf_file}'])
assert (new_data_dir_2 / self.chain / 'blocks').exists()
if __name__ == '__main__':
ConfArgsTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/mining_basic.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mining RPCs
- getmininginfo
- getblocktemplate proposal mode
- submitblock"""
import copy
from decimal import Decimal
from test_framework.blocktools import (
create_coinbase,
get_witness_script,
NORMAL_GBT_REQUEST_PARAMS,
TIME_GENESIS_BLOCK,
)
from test_framework.messages import (
BLOCK_HEADER_SIZE,
CBlock,
CBlockHeader,
COIN,
ser_uint256,
)
from test_framework.p2p import P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
get_fee,
)
from test_framework.wallet import MiniWallet
VERSIONBITS_TOP_BITS = 0x20000000
VERSIONBITS_DEPLOYMENT_TESTDUMMY_BIT = 28
DEFAULT_BLOCK_MIN_TX_FEE = 1000 # default `-blockmintxfee` setting [sat/kvB]
def assert_template(node, block, expect, rehash=True):
if rehash:
block.hashMerkleRoot = block.calc_merkle_root()
rsp = node.getblocktemplate(template_request={
'data': block.serialize().hex(),
'mode': 'proposal',
'rules': ['segwit'],
})
assert_equal(rsp, expect)
class MiningTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.supports_cli = False
def mine_chain(self):
self.log.info('Create some old blocks')
for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 600, 600):
self.nodes[0].setmocktime(t)
self.generate(self.wallet, 1, sync_fun=self.no_op)
mining_info = self.nodes[0].getmininginfo()
assert_equal(mining_info['blocks'], 200)
assert_equal(mining_info['currentblocktx'], 0)
assert_equal(mining_info['currentblockweight'], 4000)
self.log.info('test blockversion')
self.restart_node(0, extra_args=[f'-mocktime={t}', '-blockversion=1337'])
self.connect_nodes(0, 1)
assert_equal(1337, self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)['version'])
self.restart_node(0, extra_args=[f'-mocktime={t}'])
self.connect_nodes(0, 1)
assert_equal(VERSIONBITS_TOP_BITS + (1 << VERSIONBITS_DEPLOYMENT_TESTDUMMY_BIT), self.nodes[0].getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)['version'])
self.restart_node(0)
self.connect_nodes(0, 1)
def test_blockmintxfee_parameter(self):
self.log.info("Test -blockmintxfee setting")
self.restart_node(0, extra_args=['-minrelaytxfee=0', '-persistmempool=0'])
node = self.nodes[0]
# test default (no parameter), zero and a bunch of arbitrary blockmintxfee rates [sat/kvB]
for blockmintxfee_sat_kvb in (DEFAULT_BLOCK_MIN_TX_FEE, 0, 50, 100, 500, 2500, 5000, 21000, 333333, 2500000):
blockmintxfee_btc_kvb = blockmintxfee_sat_kvb / Decimal(COIN)
if blockmintxfee_sat_kvb == DEFAULT_BLOCK_MIN_TX_FEE:
self.log.info(f"-> Default -blockmintxfee setting ({blockmintxfee_sat_kvb} sat/kvB)...")
else:
blockmintxfee_parameter = f"-blockmintxfee={blockmintxfee_btc_kvb:.8f}"
self.log.info(f"-> Test {blockmintxfee_parameter} ({blockmintxfee_sat_kvb} sat/kvB)...")
self.restart_node(0, extra_args=[blockmintxfee_parameter, '-minrelaytxfee=0', '-persistmempool=0'])
self.wallet.rescan_utxos() # to avoid spending outputs of txs that are not in mempool anymore after restart
# submit one tx with exactly the blockmintxfee rate, and one slightly below
tx_with_min_feerate = self.wallet.send_self_transfer(from_node=node, fee_rate=blockmintxfee_btc_kvb)
assert_equal(tx_with_min_feerate["fee"], get_fee(tx_with_min_feerate["tx"].get_vsize(), blockmintxfee_btc_kvb))
if blockmintxfee_btc_kvb > 0:
lowerfee_btc_kvb = blockmintxfee_btc_kvb - Decimal(10)/COIN # 0.01 sat/vbyte lower
tx_below_min_feerate = self.wallet.send_self_transfer(from_node=node, fee_rate=lowerfee_btc_kvb)
assert_equal(tx_below_min_feerate["fee"], get_fee(tx_below_min_feerate["tx"].get_vsize(), lowerfee_btc_kvb))
else: # go below zero fee by using modified fees
tx_below_min_feerate = self.wallet.send_self_transfer(from_node=node, fee_rate=blockmintxfee_btc_kvb)
node.prioritisetransaction(tx_below_min_feerate["txid"], 0, -1)
# check that tx below specified fee-rate is neither in template nor in the actual block
block_template = node.getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)
block_template_txids = [tx['txid'] for tx in block_template['transactions']]
self.generate(self.wallet, 1, sync_fun=self.no_op)
block = node.getblock(node.getbestblockhash(), verbosity=2)
block_txids = [tx['txid'] for tx in block['tx']]
assert tx_with_min_feerate['txid'] in block_template_txids
assert tx_with_min_feerate['txid'] in block_txids
assert tx_below_min_feerate['txid'] not in block_template_txids
assert tx_below_min_feerate['txid'] not in block_txids
def run_test(self):
node = self.nodes[0]
self.wallet = MiniWallet(node)
self.mine_chain()
def assert_submitblock(block, result_str_1, result_str_2=None):
block.solve()
result_str_2 = result_str_2 or 'duplicate-invalid'
assert_equal(result_str_1, node.submitblock(hexdata=block.serialize().hex()))
assert_equal(result_str_2, node.submitblock(hexdata=block.serialize().hex()))
self.log.info('getmininginfo')
mining_info = node.getmininginfo()
assert_equal(mining_info['blocks'], 200)
assert_equal(mining_info['chain'], self.chain)
assert 'currentblocktx' not in mining_info
assert 'currentblockweight' not in mining_info
assert_equal(mining_info['difficulty'], Decimal('4.656542373906925E-10'))
assert_equal(mining_info['networkhashps'], Decimal('0.003333333333333334'))
assert_equal(mining_info['pooledtx'], 0)
self.log.info("getblocktemplate: Test default witness commitment")
txid = int(self.wallet.send_self_transfer(from_node=node)['wtxid'], 16)
tmpl = node.getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)
# Check that default_witness_commitment is present.
assert 'default_witness_commitment' in tmpl
witness_commitment = tmpl['default_witness_commitment']
# Check that default_witness_commitment is correct.
witness_root = CBlock.get_merkle_root([ser_uint256(0),
ser_uint256(txid)])
script = get_witness_script(witness_root, 0)
assert_equal(witness_commitment, script.hex())
# Mine a block to leave initial block download and clear the mempool
self.generatetoaddress(node, 1, node.get_deterministic_priv_key().address)
tmpl = node.getblocktemplate(NORMAL_GBT_REQUEST_PARAMS)
self.log.info("getblocktemplate: Test capability advertised")
assert 'proposal' in tmpl['capabilities']
assert 'coinbasetxn' not in tmpl
next_height = int(tmpl["height"])
coinbase_tx = create_coinbase(height=next_height)
# sequence numbers must not be max for nLockTime to have effect
coinbase_tx.vin[0].nSequence = 2**32 - 2
coinbase_tx.rehash()
block = CBlock()
block.nVersion = tmpl["version"]
block.hashPrevBlock = int(tmpl["previousblockhash"], 16)
block.nTime = tmpl["curtime"]
block.nBits = int(tmpl["bits"], 16)
block.nNonce = 0
block.vtx = [coinbase_tx]
self.log.info("getblocktemplate: segwit rule must be set")
assert_raises_rpc_error(-8, "getblocktemplate must be called with the segwit rule set", node.getblocktemplate, {})
self.log.info("getblocktemplate: Test valid block")
assert_template(node, block, None)
self.log.info("submitblock: Test block decode failure")
assert_raises_rpc_error(-22, "Block decode failed", node.submitblock, block.serialize()[:-15].hex())
self.log.info("getblocktemplate: Test bad input hash for coinbase transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx[0].vin[0].prevout.hash += 1
bad_block.vtx[0].rehash()
assert_template(node, bad_block, 'bad-cb-missing')
self.log.info("submitblock: Test invalid coinbase transaction")
assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, CBlock().serialize().hex())
assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, bad_block.serialize().hex())
self.log.info("getblocktemplate: Test truncated final transaction")
assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {
'data': block.serialize()[:-1].hex(),
'mode': 'proposal',
'rules': ['segwit'],
})
self.log.info("getblocktemplate: Test duplicate transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx.append(bad_block.vtx[0])
assert_template(node, bad_block, 'bad-txns-duplicate')
assert_submitblock(bad_block, 'bad-txns-duplicate', 'bad-txns-duplicate')
self.log.info("getblocktemplate: Test invalid transaction")
bad_block = copy.deepcopy(block)
bad_tx = copy.deepcopy(bad_block.vtx[0])
bad_tx.vin[0].prevout.hash = 255
bad_tx.rehash()
bad_block.vtx.append(bad_tx)
assert_template(node, bad_block, 'bad-txns-inputs-missingorspent')
assert_submitblock(bad_block, 'bad-txns-inputs-missingorspent')
self.log.info("getblocktemplate: Test nonfinal transaction")
bad_block = copy.deepcopy(block)
bad_block.vtx[0].nLockTime = 2**32 - 1
bad_block.vtx[0].rehash()
assert_template(node, bad_block, 'bad-txns-nonfinal')
assert_submitblock(bad_block, 'bad-txns-nonfinal')
self.log.info("getblocktemplate: Test bad tx count")
# The tx count is immediately after the block header
bad_block_sn = bytearray(block.serialize())
assert_equal(bad_block_sn[BLOCK_HEADER_SIZE], 1)
bad_block_sn[BLOCK_HEADER_SIZE] += 1
assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, {
'data': bad_block_sn.hex(),
'mode': 'proposal',
'rules': ['segwit'],
})
self.log.info("getblocktemplate: Test bad bits")
bad_block = copy.deepcopy(block)
bad_block.nBits = 469762303 # impossible in the real world
assert_template(node, bad_block, 'bad-diffbits')
self.log.info("getblocktemplate: Test bad merkle root")
bad_block = copy.deepcopy(block)
bad_block.hashMerkleRoot += 1
assert_template(node, bad_block, 'bad-txnmrklroot', False)
assert_submitblock(bad_block, 'bad-txnmrklroot', 'bad-txnmrklroot')
self.log.info("getblocktemplate: Test bad timestamps")
bad_block = copy.deepcopy(block)
bad_block.nTime = 2**32 - 1
assert_template(node, bad_block, 'time-too-new')
assert_submitblock(bad_block, 'time-too-new', 'time-too-new')
bad_block.nTime = 0
assert_template(node, bad_block, 'time-too-old')
assert_submitblock(bad_block, 'time-too-old', 'time-too-old')
self.log.info("getblocktemplate: Test not best block")
bad_block = copy.deepcopy(block)
bad_block.hashPrevBlock = 123
assert_template(node, bad_block, 'inconclusive-not-best-prevblk')
assert_submitblock(bad_block, 'prev-blk-not-found', 'prev-blk-not-found')
self.log.info('submitheader tests')
assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='xx' * BLOCK_HEADER_SIZE))
assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='ff' * (BLOCK_HEADER_SIZE-2)))
assert_raises_rpc_error(-25, 'Must submit previous header', lambda: node.submitheader(hexdata=super(CBlock, bad_block).serialize().hex()))
block.nTime += 1
block.solve()
def chain_tip(b_hash, *, status='headers-only', branchlen=1):
return {'hash': b_hash, 'height': 202, 'branchlen': branchlen, 'status': status}
assert chain_tip(block.hash) not in node.getchaintips()
node.submitheader(hexdata=block.serialize().hex())
assert chain_tip(block.hash) in node.getchaintips()
node.submitheader(hexdata=CBlockHeader(block).serialize().hex()) # Noop
assert chain_tip(block.hash) in node.getchaintips()
bad_block_root = copy.deepcopy(block)
bad_block_root.hashMerkleRoot += 2
bad_block_root.solve()
assert chain_tip(bad_block_root.hash) not in node.getchaintips()
node.submitheader(hexdata=CBlockHeader(bad_block_root).serialize().hex())
assert chain_tip(bad_block_root.hash) in node.getchaintips()
# Should still reject invalid blocks, even if we have the header:
assert_equal(node.submitblock(hexdata=bad_block_root.serialize().hex()), 'bad-txnmrklroot')
assert_equal(node.submitblock(hexdata=bad_block_root.serialize().hex()), 'bad-txnmrklroot')
assert chain_tip(bad_block_root.hash) in node.getchaintips()
# We know the header for this invalid block, so should just return early without error:
node.submitheader(hexdata=CBlockHeader(bad_block_root).serialize().hex())
assert chain_tip(bad_block_root.hash) in node.getchaintips()
bad_block_lock = copy.deepcopy(block)
bad_block_lock.vtx[0].nLockTime = 2**32 - 1
bad_block_lock.vtx[0].rehash()
bad_block_lock.hashMerkleRoot = bad_block_lock.calc_merkle_root()
bad_block_lock.solve()
assert_equal(node.submitblock(hexdata=bad_block_lock.serialize().hex()), 'bad-txns-nonfinal')
assert_equal(node.submitblock(hexdata=bad_block_lock.serialize().hex()), 'duplicate-invalid')
# Build a "good" block on top of the submitted bad block
bad_block2 = copy.deepcopy(block)
bad_block2.hashPrevBlock = bad_block_lock.sha256
bad_block2.solve()
assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader(hexdata=CBlockHeader(bad_block2).serialize().hex()))
# Should reject invalid header right away
bad_block_time = copy.deepcopy(block)
bad_block_time.nTime = 1
bad_block_time.solve()
assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader(hexdata=CBlockHeader(bad_block_time).serialize().hex()))
# Should ask for the block from a p2p node, if they announce the header as well:
peer = node.add_p2p_connection(P2PDataStore())
peer.wait_for_getheaders(timeout=5) # Drop the first getheaders
peer.send_blocks_and_test(blocks=[block], node=node)
# Must be active now:
assert chain_tip(block.hash, status='active', branchlen=0) in node.getchaintips()
# Building a few blocks should give the same results
self.generatetoaddress(node, 10, node.get_deterministic_priv_key().address)
assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader(hexdata=CBlockHeader(bad_block_time).serialize().hex()))
assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader(hexdata=CBlockHeader(bad_block2).serialize().hex()))
node.submitheader(hexdata=CBlockHeader(block).serialize().hex())
node.submitheader(hexdata=CBlockHeader(bad_block_root).serialize().hex())
assert_equal(node.submitblock(hexdata=block.serialize().hex()), 'duplicate') # valid
self.test_blockmintxfee_parameter()
if __name__ == '__main__':
MiningTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/interface_bitcoin_cli.py | #!/usr/bin/env python3
# Copyright (c) 2017-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoin-cli"""
from decimal import Decimal
import re
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
assert_raises_process_error,
assert_raises_rpc_error,
get_auth_cookie,
)
import time
# The block reward of coinbaseoutput.nValue (50) BTC/block matures after
# COINBASE_MATURITY (100) blocks. Therefore, after mining 101 blocks we expect
# node 0 to have a balance of (BLOCKS - COINBASE_MATURITY) * 50 BTC/block.
BLOCKS = COINBASE_MATURITY + 1
BALANCE = (BLOCKS - 100) * 50
JSON_PARSING_ERROR = 'error: Error parsing JSON: foo'
BLOCKS_VALUE_OF_ZERO = 'error: the first argument (number of blocks to generate, default: 1) must be an integer value greater than zero'
TOO_MANY_ARGS = 'error: too many arguments (maximum 2 for nblocks and maxtries)'
WALLET_NOT_LOADED = 'Requested wallet does not exist or is not loaded'
WALLET_NOT_SPECIFIED = 'Wallet file not specified'
def cli_get_info_string_to_dict(cli_get_info_string):
"""Helper method to convert human-readable -getinfo into a dictionary"""
cli_get_info = {}
lines = cli_get_info_string.splitlines()
line_idx = 0
ansi_escape = re.compile(r'(\x9B|\x1B\[)[0-?]*[ -\/]*[@-~]')
while line_idx < len(lines):
# Remove ansi colour code
line = ansi_escape.sub('', lines[line_idx])
if "Balances" in line:
# When "Balances" appears in a line, all of the following lines contain "balance: wallet" until an empty line
cli_get_info["Balances"] = {}
while line_idx < len(lines) and not (lines[line_idx + 1] == ''):
line_idx += 1
balance, wallet = lines[line_idx].strip().split(" ")
# Remove right justification padding
wallet = wallet.strip()
if wallet == '""':
# Set default wallet("") to empty string
wallet = ''
cli_get_info["Balances"][wallet] = balance.strip()
elif ": " in line:
key, value = line.split(": ")
if key == 'Wallet' and value == '""':
# Set default wallet("") to empty string
value = ''
if key == "Proxies" and value == "n/a":
# Set N/A to empty string to represent no proxy
value = ''
cli_get_info[key.strip()] = value.strip()
line_idx += 1
return cli_get_info
class TestBitcoinCli(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_cli()
def run_test(self):
"""Main test logic"""
self.generate(self.nodes[0], BLOCKS)
self.log.info("Compare responses from getblockchaininfo RPC and `bitcoin-cli getblockchaininfo`")
cli_response = self.nodes[0].cli.getblockchaininfo()
rpc_response = self.nodes[0].getblockchaininfo()
assert_equal(cli_response, rpc_response)
self.log.info("Test named arguments")
assert_equal(self.nodes[0].cli.echo(0, 1, arg3=3, arg5=5), ['0', '1', None, '3', None, '5'])
assert_raises_rpc_error(-8, "Parameter arg1 specified twice both as positional and named argument", self.nodes[0].cli.echo, 0, 1, arg1=1)
assert_raises_rpc_error(-8, "Parameter arg1 specified twice both as positional and named argument", self.nodes[0].cli.echo, 0, None, 2, arg1=1)
self.log.info("Test that later cli named arguments values silently overwrite earlier ones")
assert_equal(self.nodes[0].cli("-named", "echo", "arg0=0", "arg1=1", "arg2=2", "arg1=3").send_cli(), ['0', '3', '2'])
assert_raises_rpc_error(-8, "Parameter args specified multiple times", self.nodes[0].cli("-named", "echo", "args=[0,1,2,3]", "4", "5", "6", ).send_cli)
user, password = get_auth_cookie(self.nodes[0].datadir_path, self.chain)
self.log.info("Test -stdinrpcpass option")
assert_equal(BLOCKS, self.nodes[0].cli(f'-rpcuser={user}', '-stdinrpcpass', input=password).getblockcount())
assert_raises_process_error(1, 'Incorrect rpcuser or rpcpassword', self.nodes[0].cli(f'-rpcuser={user}', '-stdinrpcpass', input='foo').echo)
self.log.info("Test -stdin and -stdinrpcpass")
assert_equal(['foo', 'bar'], self.nodes[0].cli(f'-rpcuser={user}', '-stdin', '-stdinrpcpass', input=f'{password}\nfoo\nbar').echo())
assert_raises_process_error(1, 'Incorrect rpcuser or rpcpassword', self.nodes[0].cli(f'-rpcuser={user}', '-stdin', '-stdinrpcpass', input='foo').echo)
self.log.info("Test connecting to a non-existing server")
assert_raises_process_error(1, "Could not connect to the server", self.nodes[0].cli('-rpcport=1').echo)
self.log.info("Test connecting with non-existing RPC cookie file")
assert_raises_process_error(1, "Could not locate RPC credentials", self.nodes[0].cli('-rpccookiefile=does-not-exist', '-rpcpassword=').echo)
self.log.info("Test -getinfo with arguments fails")
assert_raises_process_error(1, "-getinfo takes no arguments", self.nodes[0].cli('-getinfo').help)
self.log.info("Test -getinfo with -color=never does not return ANSI escape codes")
assert "\u001b[0m" not in self.nodes[0].cli('-getinfo', '-color=never').send_cli()
self.log.info("Test -getinfo with -color=always returns ANSI escape codes")
assert "\u001b[0m" in self.nodes[0].cli('-getinfo', '-color=always').send_cli()
self.log.info("Test -getinfo with invalid value for -color option")
assert_raises_process_error(1, "Invalid value for -color option. Valid values: always, auto, never.", self.nodes[0].cli('-getinfo', '-color=foo').send_cli)
self.log.info("Test -getinfo returns expected network and blockchain info")
if self.is_specified_wallet_compiled():
self.import_deterministic_coinbase_privkeys()
self.nodes[0].encryptwallet(password)
cli_get_info_string = self.nodes[0].cli('-getinfo').send_cli()
cli_get_info = cli_get_info_string_to_dict(cli_get_info_string)
network_info = self.nodes[0].getnetworkinfo()
blockchain_info = self.nodes[0].getblockchaininfo()
assert_equal(int(cli_get_info['Version']), network_info['version'])
assert_equal(cli_get_info['Verification progress'], "%.4f%%" % (blockchain_info['verificationprogress'] * 100))
assert_equal(int(cli_get_info['Blocks']), blockchain_info['blocks'])
assert_equal(int(cli_get_info['Headers']), blockchain_info['headers'])
assert_equal(int(cli_get_info['Time offset (s)']), network_info['timeoffset'])
expected_network_info = f"in {network_info['connections_in']}, out {network_info['connections_out']}, total {network_info['connections']}"
assert_equal(cli_get_info["Network"], expected_network_info)
assert_equal(cli_get_info['Proxies'], network_info['networks'][0]['proxy'])
assert_equal(Decimal(cli_get_info['Difficulty']), blockchain_info['difficulty'])
assert_equal(cli_get_info['Chain'], blockchain_info['chain'])
self.log.info("Test -getinfo and bitcoin-cli return all proxies")
self.restart_node(0, extra_args=["-proxy=127.0.0.1:9050", "-i2psam=127.0.0.1:7656"])
network_info = self.nodes[0].getnetworkinfo()
cli_get_info_string = self.nodes[0].cli('-getinfo').send_cli()
cli_get_info = cli_get_info_string_to_dict(cli_get_info_string)
assert_equal(cli_get_info["Proxies"], "127.0.0.1:9050 (ipv4, ipv6, onion, cjdns), 127.0.0.1:7656 (i2p)")
if self.is_specified_wallet_compiled():
self.log.info("Test -getinfo and bitcoin-cli getwalletinfo return expected wallet info")
# Explicitly set the output type in order to have consistent tx vsize / fees
# for both legacy and descriptor wallets (disables the change address type detection algorithm)
self.restart_node(0, extra_args=["-addresstype=bech32", "-changetype=bech32"])
assert_equal(Decimal(cli_get_info['Balance']), BALANCE)
assert 'Balances' not in cli_get_info_string
wallet_info = self.nodes[0].getwalletinfo()
assert_equal(int(cli_get_info['Keypool size']), wallet_info['keypoolsize'])
assert_equal(int(cli_get_info['Unlocked until']), wallet_info['unlocked_until'])
assert_equal(Decimal(cli_get_info['Transaction fee rate (-paytxfee) (BTC/kvB)']), wallet_info['paytxfee'])
assert_equal(Decimal(cli_get_info['Min tx relay fee rate (BTC/kvB)']), network_info['relayfee'])
assert_equal(self.nodes[0].cli.getwalletinfo(), wallet_info)
# Setup to test -getinfo, -generate, and -rpcwallet= with multiple wallets.
wallets = [self.default_wallet_name, 'Encrypted', 'secret']
amounts = [BALANCE + Decimal('9.999928'), Decimal(9), Decimal(31)]
self.nodes[0].createwallet(wallet_name=wallets[1])
self.nodes[0].createwallet(wallet_name=wallets[2])
w1 = self.nodes[0].get_wallet_rpc(wallets[0])
w2 = self.nodes[0].get_wallet_rpc(wallets[1])
w3 = self.nodes[0].get_wallet_rpc(wallets[2])
rpcwallet2 = f'-rpcwallet={wallets[1]}'
rpcwallet3 = f'-rpcwallet={wallets[2]}'
w1.walletpassphrase(password, self.rpc_timeout)
w2.encryptwallet(password)
w1.sendtoaddress(w2.getnewaddress(), amounts[1])
w1.sendtoaddress(w3.getnewaddress(), amounts[2])
# Mine a block to confirm; adds a block reward (50 BTC) to the default wallet.
self.generate(self.nodes[0], 1)
self.log.info("Test -getinfo with multiple wallets and -rpcwallet returns specified wallet balance")
for i in range(len(wallets)):
cli_get_info_string = self.nodes[0].cli('-getinfo', f'-rpcwallet={wallets[i]}').send_cli()
cli_get_info = cli_get_info_string_to_dict(cli_get_info_string)
assert 'Balances' not in cli_get_info_string
assert_equal(cli_get_info["Wallet"], wallets[i])
assert_equal(Decimal(cli_get_info['Balance']), amounts[i])
self.log.info("Test -getinfo with multiple wallets and -rpcwallet=non-existing-wallet returns no balances")
cli_get_info_string = self.nodes[0].cli('-getinfo', '-rpcwallet=does-not-exist').send_cli()
assert 'Balance' not in cli_get_info_string
assert 'Balances' not in cli_get_info_string
self.log.info("Test -getinfo with multiple wallets returns all loaded wallet names and balances")
assert_equal(set(self.nodes[0].listwallets()), set(wallets))
cli_get_info_string = self.nodes[0].cli('-getinfo').send_cli()
cli_get_info = cli_get_info_string_to_dict(cli_get_info_string)
assert 'Balance' not in cli_get_info
for k, v in zip(wallets, amounts):
assert_equal(Decimal(cli_get_info['Balances'][k]), v)
# Unload the default wallet and re-verify.
self.nodes[0].unloadwallet(wallets[0])
assert wallets[0] not in self.nodes[0].listwallets()
cli_get_info_string = self.nodes[0].cli('-getinfo').send_cli()
cli_get_info = cli_get_info_string_to_dict(cli_get_info_string)
assert 'Balance' not in cli_get_info
assert 'Balances' in cli_get_info_string
for k, v in zip(wallets[1:], amounts[1:]):
assert_equal(Decimal(cli_get_info['Balances'][k]), v)
assert wallets[0] not in cli_get_info
self.log.info("Test -getinfo after unloading all wallets except a non-default one returns its balance")
self.nodes[0].unloadwallet(wallets[2])
assert_equal(self.nodes[0].listwallets(), [wallets[1]])
cli_get_info_string = self.nodes[0].cli('-getinfo').send_cli()
cli_get_info = cli_get_info_string_to_dict(cli_get_info_string)
assert 'Balances' not in cli_get_info_string
assert_equal(cli_get_info['Wallet'], wallets[1])
assert_equal(Decimal(cli_get_info['Balance']), amounts[1])
self.log.info("Test -getinfo with -rpcwallet=remaining-non-default-wallet returns only its balance")
cli_get_info_string = self.nodes[0].cli('-getinfo', rpcwallet2).send_cli()
cli_get_info = cli_get_info_string_to_dict(cli_get_info_string)
assert 'Balances' not in cli_get_info_string
assert_equal(cli_get_info['Wallet'], wallets[1])
assert_equal(Decimal(cli_get_info['Balance']), amounts[1])
self.log.info("Test -getinfo with -rpcwallet=unloaded wallet returns no balances")
cli_get_info_string = self.nodes[0].cli('-getinfo', rpcwallet3).send_cli()
cli_get_info_keys = cli_get_info_string_to_dict(cli_get_info_string)
assert 'Balance' not in cli_get_info_keys
assert 'Balances' not in cli_get_info_string
# Test bitcoin-cli -generate.
n1 = 3
n2 = 4
w2.walletpassphrase(password, self.rpc_timeout)
blocks = self.nodes[0].getblockcount()
self.log.info('Test -generate with no args')
generate = self.nodes[0].cli('-generate').send_cli()
assert_equal(set(generate.keys()), {'address', 'blocks'})
assert_equal(len(generate["blocks"]), 1)
assert_equal(self.nodes[0].getblockcount(), blocks + 1)
self.log.info('Test -generate with bad args')
assert_raises_process_error(1, JSON_PARSING_ERROR, self.nodes[0].cli('-generate', 'foo').echo)
assert_raises_process_error(1, BLOCKS_VALUE_OF_ZERO, self.nodes[0].cli('-generate', 0).echo)
assert_raises_process_error(1, TOO_MANY_ARGS, self.nodes[0].cli('-generate', 1, 2, 3).echo)
self.log.info('Test -generate with nblocks')
generate = self.nodes[0].cli('-generate', n1).send_cli()
assert_equal(set(generate.keys()), {'address', 'blocks'})
assert_equal(len(generate["blocks"]), n1)
assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n1)
self.log.info('Test -generate with nblocks and maxtries')
generate = self.nodes[0].cli('-generate', n2, 1000000).send_cli()
assert_equal(set(generate.keys()), {'address', 'blocks'})
assert_equal(len(generate["blocks"]), n2)
assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n1 + n2)
self.log.info('Test -generate -rpcwallet in single-wallet mode')
generate = self.nodes[0].cli(rpcwallet2, '-generate').send_cli()
assert_equal(set(generate.keys()), {'address', 'blocks'})
assert_equal(len(generate["blocks"]), 1)
assert_equal(self.nodes[0].getblockcount(), blocks + 2 + n1 + n2)
self.log.info('Test -generate -rpcwallet=unloaded wallet raises RPC error')
assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate').echo)
assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 'foo').echo)
assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 0).echo)
assert_raises_rpc_error(-18, WALLET_NOT_LOADED, self.nodes[0].cli(rpcwallet3, '-generate', 1, 2, 3).echo)
# Test bitcoin-cli -generate with -rpcwallet in multiwallet mode.
self.nodes[0].loadwallet(wallets[2])
n3 = 4
n4 = 10
blocks = self.nodes[0].getblockcount()
self.log.info('Test -generate -rpcwallet with no args')
generate = self.nodes[0].cli(rpcwallet2, '-generate').send_cli()
assert_equal(set(generate.keys()), {'address', 'blocks'})
assert_equal(len(generate["blocks"]), 1)
assert_equal(self.nodes[0].getblockcount(), blocks + 1)
self.log.info('Test -generate -rpcwallet with bad args')
assert_raises_process_error(1, JSON_PARSING_ERROR, self.nodes[0].cli(rpcwallet2, '-generate', 'foo').echo)
assert_raises_process_error(1, BLOCKS_VALUE_OF_ZERO, self.nodes[0].cli(rpcwallet2, '-generate', 0).echo)
assert_raises_process_error(1, TOO_MANY_ARGS, self.nodes[0].cli(rpcwallet2, '-generate', 1, 2, 3).echo)
self.log.info('Test -generate -rpcwallet with nblocks')
generate = self.nodes[0].cli(rpcwallet2, '-generate', n3).send_cli()
assert_equal(set(generate.keys()), {'address', 'blocks'})
assert_equal(len(generate["blocks"]), n3)
assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n3)
self.log.info('Test -generate -rpcwallet with nblocks and maxtries')
generate = self.nodes[0].cli(rpcwallet2, '-generate', n4, 1000000).send_cli()
assert_equal(set(generate.keys()), {'address', 'blocks'})
assert_equal(len(generate["blocks"]), n4)
assert_equal(self.nodes[0].getblockcount(), blocks + 1 + n3 + n4)
self.log.info('Test -generate without -rpcwallet in multiwallet mode raises RPC error')
assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate').echo)
assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 'foo').echo)
assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 0).echo)
assert_raises_rpc_error(-19, WALLET_NOT_SPECIFIED, self.nodes[0].cli('-generate', 1, 2, 3).echo)
else:
self.log.info("*** Wallet not compiled; cli getwalletinfo and -getinfo wallet tests skipped")
self.generate(self.nodes[0], 25) # maintain block parity with the wallet_compiled conditional branch
self.log.info("Test -version with node stopped")
self.stop_node(0)
cli_response = self.nodes[0].cli('-version').send_cli()
assert f"{self.config['environment']['PACKAGE_NAME']} RPC client version" in cli_response
self.log.info("Test -rpcwait option successfully waits for RPC connection")
self.nodes[0].start() # start node without RPC connection
self.nodes[0].wait_for_cookie_credentials() # ensure cookie file is available to avoid race condition
blocks = self.nodes[0].cli('-rpcwait').send_cli('getblockcount')
self.nodes[0].wait_for_rpc_connection()
assert_equal(blocks, BLOCKS + 25)
self.log.info("Test -rpcwait option waits at most -rpcwaittimeout seconds for startup")
self.stop_node(0) # stop the node so we time out
start_time = time.time()
assert_raises_process_error(1, "Could not connect to the server", self.nodes[0].cli('-rpcwait', '-rpcwaittimeout=5').echo)
assert_greater_than_or_equal(time.time(), start_time + 5)
if __name__ == '__main__':
TestBitcoinCli().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_upgradewallet.py | #!/usr/bin/env python3
# Copyright (c) 2018-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""upgradewallet RPC functional test
Test upgradewallet RPC. Download node binaries:
Requires previous releases binaries, see test/README.md.
Only v0.15.2 and v0.16.3 are required by this test.
"""
import os
import shutil
import struct
from io import BytesIO
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.bdb import dump_bdb_kv
from test_framework.messages import deser_compact_size, deser_string
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_is_hex_string,
sha256sum_file,
)
UPGRADED_KEYMETA_VERSION = 12
def deser_keymeta(f):
ver, create_time = struct.unpack('<Iq', f.read(12))
kp_str = deser_string(f)
seed_id = f.read(20)
fpr = f.read(4)
path_len = 0
path = []
has_key_orig = False
if ver == UPGRADED_KEYMETA_VERSION:
path_len = deser_compact_size(f)
for i in range(0, path_len):
path.append(struct.unpack('<I', f.read(4))[0])
has_key_orig = bool(f.read(1))
return ver, create_time, kp_str, seed_id, fpr, path_len, path, has_key_orig
class UpgradeWalletTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, descriptors=False)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.extra_args = [
["-addresstype=bech32", "-keypool=2"], # current wallet version
["-usehd=1", "-keypool=2"], # v0.16.3 wallet
["-usehd=0", "-keypool=2"] # v0.15.2 wallet
]
self.wallet_names = [self.default_wallet_name, None, None]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_bdb()
self.skip_if_no_previous_releases()
def setup_network(self):
self.setup_nodes()
def setup_nodes(self):
self.add_nodes(self.num_nodes, extra_args=self.extra_args, versions=[
None,
160300,
150200,
])
self.start_nodes()
self.import_deterministic_coinbase_privkeys()
def dumb_sync_blocks(self):
"""
Little helper to sync older wallets.
Notice that v0.15.2's regtest is hardforked, so there is
no sync for it.
v0.15.2 is only being used to test for version upgrade
and master hash key presence.
v0.16.3 is being used to test for version upgrade and balances.
Further info: https://github.com/bitcoin/bitcoin/pull/18774#discussion_r416967844
"""
node_from = self.nodes[0]
v16_3_node = self.nodes[1]
to_height = node_from.getblockcount()
height = self.nodes[1].getblockcount()
for i in range(height, to_height+1):
b = node_from.getblock(blockhash=node_from.getblockhash(i), verbose=0)
v16_3_node.submitblock(b)
assert_equal(v16_3_node.getblockcount(), to_height)
def test_upgradewallet(self, wallet, previous_version, requested_version=None, expected_version=None):
unchanged = expected_version == previous_version
new_version = previous_version if unchanged else expected_version if expected_version else requested_version
old_wallet_info = wallet.getwalletinfo()
assert_equal(old_wallet_info["walletversion"], previous_version)
assert_equal(wallet.upgradewallet(requested_version),
{
"wallet_name": old_wallet_info["walletname"],
"previous_version": previous_version,
"current_version": new_version,
"result": "Already at latest version. Wallet version unchanged." if unchanged else "Wallet upgraded successfully from version {} to version {}.".format(previous_version, new_version),
}
)
assert_equal(wallet.getwalletinfo()["walletversion"], new_version)
def test_upgradewallet_error(self, wallet, previous_version, requested_version, msg):
assert_equal(wallet.getwalletinfo()["walletversion"], previous_version)
assert_equal(wallet.upgradewallet(requested_version),
{
"wallet_name": "",
"previous_version": previous_version,
"current_version": previous_version,
"error": msg,
}
)
assert_equal(wallet.getwalletinfo()["walletversion"], previous_version)
def run_test(self):
self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, self.nodes[0].getnewaddress(), sync_fun=lambda: self.dumb_sync_blocks())
# # Sanity check the test framework:
res = self.nodes[0].getblockchaininfo()
assert_equal(res['blocks'], COINBASE_MATURITY + 1)
node_master = self.nodes[0]
v16_3_node = self.nodes[1]
v15_2_node = self.nodes[2]
# Send coins to old wallets for later conversion checks.
v16_3_wallet = v16_3_node.get_wallet_rpc('wallet.dat')
v16_3_address = v16_3_wallet.getnewaddress()
self.generatetoaddress(node_master, COINBASE_MATURITY + 1, v16_3_address, sync_fun=lambda: self.dumb_sync_blocks())
v16_3_balance = v16_3_wallet.getbalance()
self.log.info("Test upgradewallet RPC...")
# Prepare for copying of the older wallet
node_master_wallet_dir = node_master.wallets_path / self.default_wallet_name
node_master_wallet = node_master_wallet_dir / self.default_wallet_name / self.wallet_data_filename
v16_3_wallet = v16_3_node.wallets_path / "wallet.dat"
v15_2_wallet = v15_2_node.chain_path / "wallet.dat"
split_hd_wallet = v15_2_node.chain_path / "splithd"
self.stop_nodes()
# Make split hd wallet
self.start_node(2, ['-usehd=1', '-keypool=2', '-wallet=splithd'])
self.stop_node(2)
def copy_v16():
node_master.get_wallet_rpc(self.default_wallet_name).unloadwallet()
# Copy the 0.16.3 wallet to the last Bitcoin Core version and open it:
shutil.rmtree(node_master_wallet_dir)
os.mkdir(node_master_wallet_dir)
shutil.copy(
v16_3_wallet,
node_master_wallet_dir
)
node_master.loadwallet(self.default_wallet_name)
def copy_non_hd():
node_master.get_wallet_rpc(self.default_wallet_name).unloadwallet()
# Copy the 0.15.2 non hd wallet to the last Bitcoin Core version and open it:
shutil.rmtree(node_master_wallet_dir)
os.mkdir(node_master_wallet_dir)
shutil.copy(
v15_2_wallet,
node_master_wallet_dir
)
node_master.loadwallet(self.default_wallet_name)
def copy_split_hd():
node_master.get_wallet_rpc(self.default_wallet_name).unloadwallet()
# Copy the 0.15.2 split hd wallet to the last Bitcoin Core version and open it:
shutil.rmtree(node_master_wallet_dir)
os.mkdir(node_master_wallet_dir)
shutil.copy(
split_hd_wallet,
os.path.join(node_master_wallet_dir, 'wallet.dat')
)
node_master.loadwallet(self.default_wallet_name)
self.restart_node(0)
copy_v16()
wallet = node_master.get_wallet_rpc(self.default_wallet_name)
self.log.info("Test upgradewallet without a version argument")
self.test_upgradewallet(wallet, previous_version=159900, expected_version=169900)
# wallet should still contain the same balance
assert_equal(wallet.getbalance(), v16_3_balance)
copy_non_hd()
wallet = node_master.get_wallet_rpc(self.default_wallet_name)
# should have no master key hash before conversion
assert_equal('hdseedid' in wallet.getwalletinfo(), False)
self.log.info("Test upgradewallet with explicit version number")
self.test_upgradewallet(wallet, previous_version=60000, requested_version=169900)
# after conversion master key hash should be present
assert_is_hex_string(wallet.getwalletinfo()['hdseedid'])
self.log.info("Intermediary versions don't effect anything")
copy_non_hd()
# Wallet starts with 60000
assert_equal(60000, wallet.getwalletinfo()['walletversion'])
wallet.unloadwallet()
before_checksum = sha256sum_file(node_master_wallet)
node_master.loadwallet('')
# Test an "upgrade" from 60000 to 129999 has no effect, as the next version is 130000
self.test_upgradewallet(wallet, previous_version=60000, requested_version=129999, expected_version=60000)
wallet.unloadwallet()
assert_equal(before_checksum, sha256sum_file(node_master_wallet))
node_master.loadwallet('')
self.log.info('Wallets cannot be downgraded')
copy_non_hd()
self.test_upgradewallet_error(wallet, previous_version=60000, requested_version=40000,
msg="Cannot downgrade wallet from version 60000 to version 40000. Wallet version unchanged.")
wallet.unloadwallet()
assert_equal(before_checksum, sha256sum_file(node_master_wallet))
node_master.loadwallet('')
self.log.info('Can upgrade to HD')
# Inspect the old wallet and make sure there is no hdchain
orig_kvs = dump_bdb_kv(node_master_wallet)
assert b'\x07hdchain' not in orig_kvs
# Upgrade to HD, no split
self.test_upgradewallet(wallet, previous_version=60000, requested_version=130000)
# Check that there is now a hd chain and it is version 1, no internal chain counter
new_kvs = dump_bdb_kv(node_master_wallet)
assert b'\x07hdchain' in new_kvs
hd_chain = new_kvs[b'\x07hdchain']
assert_equal(28, len(hd_chain))
hd_chain_version, external_counter, seed_id = struct.unpack('<iI20s', hd_chain)
assert_equal(1, hd_chain_version)
seed_id = bytearray(seed_id)
seed_id.reverse()
# New keys (including change) should be HD (the two old keys have been flushed)
info = wallet.getaddressinfo(wallet.getnewaddress())
assert_equal(seed_id.hex(), info['hdseedid'])
assert_equal('m/0\'/0\'/0\'', info['hdkeypath'])
prev_seed_id = info['hdseedid']
# Change key should be HD and from the same keypool
info = wallet.getaddressinfo(wallet.getrawchangeaddress())
assert_equal(prev_seed_id, info['hdseedid'])
assert_equal('m/0\'/0\'/1\'', info['hdkeypath'])
self.log.info('Cannot upgrade to HD Split, needs Pre Split Keypool')
for version in [139900, 159900, 169899]:
self.test_upgradewallet_error(wallet, previous_version=130000, requested_version=version,
msg="Cannot upgrade a non HD split wallet from version {} to version {} without upgrading to "
"support pre-split keypool. Please use version 169900 or no version specified.".format(130000, version))
self.log.info('Upgrade HD to HD chain split')
self.test_upgradewallet(wallet, previous_version=130000, requested_version=169900)
# Check that the hdchain updated correctly
new_kvs = dump_bdb_kv(node_master_wallet)
hd_chain = new_kvs[b'\x07hdchain']
assert_equal(32, len(hd_chain))
hd_chain_version, external_counter, seed_id, internal_counter = struct.unpack('<iI20sI', hd_chain)
assert_equal(2, hd_chain_version)
assert_equal(0, internal_counter)
seed_id = bytearray(seed_id)
seed_id.reverse()
assert_equal(seed_id.hex(), prev_seed_id)
# Next change address is the same keypool
info = wallet.getaddressinfo(wallet.getrawchangeaddress())
assert_equal(prev_seed_id, info['hdseedid'])
assert_equal('m/0\'/0\'/2\'', info['hdkeypath'])
# Next change address is the new keypool
info = wallet.getaddressinfo(wallet.getrawchangeaddress())
assert_equal(prev_seed_id, info['hdseedid'])
assert_equal('m/0\'/1\'/0\'', info['hdkeypath'])
# External addresses use the same keypool
info = wallet.getaddressinfo(wallet.getnewaddress())
assert_equal(prev_seed_id, info['hdseedid'])
assert_equal('m/0\'/0\'/3\'', info['hdkeypath'])
self.log.info('Upgrade non-HD to HD chain split')
copy_non_hd()
self.test_upgradewallet(wallet, previous_version=60000, requested_version=169900)
# Check that the hdchain updated correctly
new_kvs = dump_bdb_kv(node_master_wallet)
hd_chain = new_kvs[b'\x07hdchain']
assert_equal(32, len(hd_chain))
hd_chain_version, external_counter, seed_id, internal_counter = struct.unpack('<iI20sI', hd_chain)
assert_equal(2, hd_chain_version)
assert_equal(2, internal_counter)
# The next addresses are HD and should be on different HD chains (the one remaining key in each pool should have been flushed)
info = wallet.getaddressinfo(wallet.getnewaddress())
ext_id = info['hdseedid']
assert_equal('m/0\'/0\'/0\'', info['hdkeypath'])
info = wallet.getaddressinfo(wallet.getrawchangeaddress())
assert_equal(ext_id, info['hdseedid'])
assert_equal('m/0\'/1\'/0\'', info['hdkeypath'])
self.log.info('KeyMetadata should upgrade when loading into master')
copy_v16()
old_kvs = dump_bdb_kv(v16_3_wallet)
new_kvs = dump_bdb_kv(node_master_wallet)
for k, old_v in old_kvs.items():
if k.startswith(b'\x07keymeta'):
new_ver, new_create_time, new_kp_str, new_seed_id, new_fpr, new_path_len, new_path, new_has_key_orig = deser_keymeta(BytesIO(new_kvs[k]))
old_ver, old_create_time, old_kp_str, old_seed_id, old_fpr, old_path_len, old_path, old_has_key_orig = deser_keymeta(BytesIO(old_v))
assert_equal(10, old_ver)
if old_kp_str == b"": # imported things that don't have keymeta (i.e. imported coinbase privkeys) won't be upgraded
assert_equal(new_kvs[k], old_v)
continue
assert_equal(12, new_ver)
assert_equal(new_create_time, old_create_time)
assert_equal(new_kp_str, old_kp_str)
assert_equal(new_seed_id, old_seed_id)
assert_equal(0, old_path_len)
assert_equal(new_path_len, len(new_path))
assert_equal([], old_path)
assert_equal(False, old_has_key_orig)
assert_equal(True, new_has_key_orig)
# Check that the path is right
built_path = []
for s in new_kp_str.decode().split('/')[1:]:
h = 0
if s[-1] == '\'':
s = s[:-1]
h = 0x80000000
p = int(s) | h
built_path.append(p)
assert_equal(new_path, built_path)
self.log.info('Upgrading to NO_DEFAULT_KEY should not remove the defaultkey')
copy_split_hd()
# Check the wallet has a default key initially
old_kvs = dump_bdb_kv(node_master_wallet)
defaultkey = old_kvs[b'\x0adefaultkey']
self.log.info("Upgrade the wallet. Should still have the same default key.")
self.test_upgradewallet(wallet, previous_version=139900, requested_version=159900)
new_kvs = dump_bdb_kv(node_master_wallet)
up_defaultkey = new_kvs[b'\x0adefaultkey']
assert_equal(defaultkey, up_defaultkey)
# 0.16.3 doesn't have a default key
v16_3_kvs = dump_bdb_kv(v16_3_wallet)
assert b'\x0adefaultkey' not in v16_3_kvs
if self.is_sqlite_compiled():
self.log.info("Checking that descriptor wallets do nothing, successfully")
self.nodes[0].createwallet(wallet_name="desc_upgrade", descriptors=True)
desc_wallet = self.nodes[0].get_wallet_rpc("desc_upgrade")
self.test_upgradewallet(desc_wallet, previous_version=169900, expected_version=169900)
self.log.info("Checking that descriptor wallets without privkeys do nothing, successfully")
self.nodes[0].createwallet(wallet_name="desc_upgrade_nopriv", descriptors=True, disable_private_keys=True)
desc_wallet = self.nodes[0].get_wallet_rpc("desc_upgrade_nopriv")
self.test_upgradewallet(desc_wallet, previous_version=169900, expected_version=169900)
if self.is_bdb_compiled():
self.log.info("Upgrading a wallet with private keys disabled")
self.nodes[0].createwallet(wallet_name="privkeys_disabled_upgrade", disable_private_keys=True, descriptors=False)
disabled_wallet = self.nodes[0].get_wallet_rpc("privkeys_disabled_upgrade")
self.test_upgradewallet(disabled_wallet, previous_version=169900, expected_version=169900)
if __name__ == '__main__':
UpgradeWalletTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_net.py | #!/usr/bin/env python3
# Copyright (c) 2017-present The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC calls related to net.
Tests correspond to code in rpc/net.cpp.
"""
from decimal import Decimal
from itertools import product
import platform
import time
import test_framework.messages
from test_framework.netutil import ADDRMAN_NEW_BUCKET_COUNT, ADDRMAN_TRIED_BUCKET_COUNT, ADDRMAN_BUCKET_SIZE
from test_framework.p2p import (
P2PInterface,
P2P_SERVICES,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_approx,
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
p2p_port,
)
from test_framework.wallet import MiniWallet
def assert_net_servicesnames(servicesflag, servicenames):
"""Utility that checks if all flags are correctly decoded in
`getpeerinfo` and `getnetworkinfo`.
:param servicesflag: The services as an integer.
:param servicenames: The list of decoded services names, as strings.
"""
servicesflag_generated = 0
for servicename in servicenames:
servicesflag_generated |= getattr(test_framework.messages, 'NODE_' + servicename)
assert servicesflag_generated == servicesflag
class NetTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [["-minrelaytxfee=0.00001000"], ["-minrelaytxfee=0.00000500"]]
self.supports_cli = False
def run_test(self):
# We need miniwallet to make a transaction
self.wallet = MiniWallet(self.nodes[0])
# By default, the test framework sets up an addnode connection from
# node 1 --> node0. By connecting node0 --> node 1, we're left with
# the two nodes being connected both ways.
# Topology will look like: node0 <--> node1
self.connect_nodes(0, 1)
self.sync_all()
self.test_connection_count()
self.test_getpeerinfo()
self.test_getnettotals()
self.test_getnetworkinfo()
self.test_addnode_getaddednodeinfo()
self.test_service_flags()
self.test_getnodeaddresses()
self.test_addpeeraddress()
self.test_sendmsgtopeer()
self.test_getaddrmaninfo()
self.test_getrawaddrman()
def test_connection_count(self):
self.log.info("Test getconnectioncount")
# After using `connect_nodes` to connect nodes 0 and 1 to each other.
assert_equal(self.nodes[0].getconnectioncount(), 2)
def test_getpeerinfo(self):
self.log.info("Test getpeerinfo")
# Create a few getpeerinfo last_block/last_transaction values.
self.wallet.send_self_transfer(from_node=self.nodes[0]) # Make a transaction so we can see it in the getpeerinfo results
self.generate(self.nodes[1], 1)
time_now = int(time.time())
peer_info = [x.getpeerinfo() for x in self.nodes]
# Verify last_block and last_transaction keys/values.
for node, peer, field in product(range(self.num_nodes), range(2), ['last_block', 'last_transaction']):
assert field in peer_info[node][peer].keys()
if peer_info[node][peer][field] != 0:
assert_approx(peer_info[node][peer][field], time_now, vspan=60)
# check both sides of bidirectional connection between nodes
# the address bound to on one side will be the source address for the other node
assert_equal(peer_info[0][0]['addrbind'], peer_info[1][0]['addr'])
assert_equal(peer_info[1][0]['addrbind'], peer_info[0][0]['addr'])
assert_equal(peer_info[0][0]['minfeefilter'], Decimal("0.00000500"))
assert_equal(peer_info[1][0]['minfeefilter'], Decimal("0.00001000"))
# check the `servicesnames` field
for info in peer_info:
assert_net_servicesnames(int(info[0]["services"], 0x10), info[0]["servicesnames"])
assert_equal(peer_info[0][0]['connection_type'], 'inbound')
assert_equal(peer_info[0][1]['connection_type'], 'manual')
assert_equal(peer_info[1][0]['connection_type'], 'manual')
assert_equal(peer_info[1][1]['connection_type'], 'inbound')
# Check dynamically generated networks list in getpeerinfo help output.
assert "(ipv4, ipv6, onion, i2p, cjdns, not_publicly_routable)" in self.nodes[0].help("getpeerinfo")
self.log.info("Check getpeerinfo output before a version message was sent")
no_version_peer_id = 2
no_version_peer_conntime = int(time.time())
self.nodes[0].setmocktime(no_version_peer_conntime)
with self.nodes[0].wait_for_new_peer():
no_version_peer = self.nodes[0].add_p2p_connection(P2PInterface(), send_version=False, wait_for_verack=False)
self.nodes[0].setmocktime(0)
peer_info = self.nodes[0].getpeerinfo()[no_version_peer_id]
peer_info.pop("addr")
peer_info.pop("addrbind")
assert_equal(
peer_info,
{
"addr_processed": 0,
"addr_rate_limited": 0,
"addr_relay_enabled": False,
"bip152_hb_from": False,
"bip152_hb_to": False,
"bytesrecv": 0,
"bytesrecv_per_msg": {},
"bytessent": 0,
"bytessent_per_msg": {},
"connection_type": "inbound",
"conntime": no_version_peer_conntime,
"id": no_version_peer_id,
"inbound": True,
"inflight": [],
"last_block": 0,
"last_transaction": 0,
"lastrecv": 0,
"lastsend": 0,
"minfeefilter": Decimal("0E-8"),
"network": "not_publicly_routable",
"permissions": [],
"presynced_headers": -1,
"relaytxes": False,
"services": "0000000000000000",
"servicesnames": [],
"session_id": "",
"startingheight": -1,
"subver": "",
"synced_blocks": -1,
"synced_headers": -1,
"timeoffset": 0,
"transport_protocol_type": "v1" if not self.options.v2transport else "detecting",
"version": 0,
},
)
no_version_peer.peer_disconnect()
self.wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 2)
def test_getnettotals(self):
self.log.info("Test getnettotals")
# Test getnettotals and getpeerinfo by doing a ping. The bytes
# sent/received should increase by at least the size of one ping
# and one pong. Both have a payload size of 8 bytes, but the total
# size depends on the used p2p version:
# - p2p v1: 24 bytes (header) + 8 bytes (payload) = 32 bytes
# - p2p v2: 21 bytes (header/tag with short-id) + 8 bytes (payload) = 29 bytes
ping_size = 32 if not self.options.v2transport else 29
net_totals_before = self.nodes[0].getnettotals()
peer_info_before = self.nodes[0].getpeerinfo()
self.nodes[0].ping()
self.wait_until(lambda: (self.nodes[0].getnettotals()['totalbytessent'] >= net_totals_before['totalbytessent'] + ping_size * 2), timeout=1)
self.wait_until(lambda: (self.nodes[0].getnettotals()['totalbytesrecv'] >= net_totals_before['totalbytesrecv'] + ping_size * 2), timeout=1)
for peer_before in peer_info_before:
peer_after = lambda: next(p for p in self.nodes[0].getpeerinfo() if p['id'] == peer_before['id'])
self.wait_until(lambda: peer_after()['bytesrecv_per_msg'].get('pong', 0) >= peer_before['bytesrecv_per_msg'].get('pong', 0) + ping_size, timeout=1)
self.wait_until(lambda: peer_after()['bytessent_per_msg'].get('ping', 0) >= peer_before['bytessent_per_msg'].get('ping', 0) + ping_size, timeout=1)
def test_getnetworkinfo(self):
self.log.info("Test getnetworkinfo")
info = self.nodes[0].getnetworkinfo()
assert_equal(info['networkactive'], True)
assert_equal(info['connections'], 2)
assert_equal(info['connections_in'], 1)
assert_equal(info['connections_out'], 1)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: false\n']):
self.nodes[0].setnetworkactive(state=False)
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False)
# Wait a bit for all sockets to close
for n in self.nodes:
self.wait_until(lambda: n.getnetworkinfo()['connections'] == 0, timeout=3)
with self.nodes[0].assert_debug_log(expected_msgs=['SetNetworkActive: true\n']):
self.nodes[0].setnetworkactive(state=True)
# Connect nodes both ways.
self.connect_nodes(0, 1)
self.connect_nodes(1, 0)
info = self.nodes[0].getnetworkinfo()
assert_equal(info['networkactive'], True)
assert_equal(info['connections'], 2)
assert_equal(info['connections_in'], 1)
assert_equal(info['connections_out'], 1)
# check the `servicesnames` field
network_info = [node.getnetworkinfo() for node in self.nodes]
for info in network_info:
assert_net_servicesnames(int(info["localservices"], 0x10), info["localservicesnames"])
# Check dynamically generated networks list in getnetworkinfo help output.
assert "(ipv4, ipv6, onion, i2p, cjdns)" in self.nodes[0].help("getnetworkinfo")
def test_addnode_getaddednodeinfo(self):
self.log.info("Test addnode and getaddednodeinfo")
assert_equal(self.nodes[0].getaddednodeinfo(), [])
# add a node (node2) to node0
ip_port = "127.0.0.1:{}".format(p2p_port(2))
self.nodes[0].addnode(node=ip_port, command='add')
# try to add an equivalent ip
# (note that OpenBSD doesn't support the IPv4 shorthand notation with omitted zero-bytes)
if platform.system() != "OpenBSD":
ip_port2 = "127.1:{}".format(p2p_port(2))
assert_raises_rpc_error(-23, "Node already added", self.nodes[0].addnode, node=ip_port2, command='add')
# check that the node has indeed been added
added_nodes = self.nodes[0].getaddednodeinfo()
assert_equal(len(added_nodes), 1)
assert_equal(added_nodes[0]['addednode'], ip_port)
# check that node cannot be added again
assert_raises_rpc_error(-23, "Node already added", self.nodes[0].addnode, node=ip_port, command='add')
# check that node can be removed
self.nodes[0].addnode(node=ip_port, command='remove')
assert_equal(self.nodes[0].getaddednodeinfo(), [])
# check that an invalid command returns an error
assert_raises_rpc_error(-1, 'addnode "node" "command"', self.nodes[0].addnode, node=ip_port, command='abc')
# check that trying to remove the node again returns an error
assert_raises_rpc_error(-24, "Node could not be removed", self.nodes[0].addnode, node=ip_port, command='remove')
# check that a non-existent node returns an error
assert_raises_rpc_error(-24, "Node has not been added", self.nodes[0].getaddednodeinfo, '1.1.1.1')
def test_service_flags(self):
self.log.info("Test service flags")
self.nodes[0].add_p2p_connection(P2PInterface(), services=(1 << 4) | (1 << 63))
assert_equal(['UNKNOWN[2^4]', 'UNKNOWN[2^63]'], self.nodes[0].getpeerinfo()[-1]['servicesnames'])
self.nodes[0].disconnect_p2ps()
def test_getnodeaddresses(self):
self.log.info("Test getnodeaddresses")
self.nodes[0].add_p2p_connection(P2PInterface())
# Add an IPv6 address to the address manager.
ipv6_addr = "1233:3432:2434:2343:3234:2345:6546:4534"
self.nodes[0].addpeeraddress(address=ipv6_addr, port=8333)
# Add 10,000 IPv4 addresses to the address manager. Due to the way bucket
# and bucket positions are calculated, some of these addresses will collide.
imported_addrs = []
for i in range(10000):
first_octet = i >> 8
second_octet = i % 256
a = f"{first_octet}.{second_octet}.1.1"
imported_addrs.append(a)
self.nodes[0].addpeeraddress(a, 8333)
# Fetch the addresses via the RPC and test the results.
assert_equal(len(self.nodes[0].getnodeaddresses()), 1) # default count is 1
assert_equal(len(self.nodes[0].getnodeaddresses(count=2)), 2)
assert_equal(len(self.nodes[0].getnodeaddresses(network="ipv4", count=8)), 8)
# Maximum possible addresses in AddrMan is 10000. The actual number will
# usually be less due to bucket and bucket position collisions.
node_addresses = self.nodes[0].getnodeaddresses(0, "ipv4")
assert_greater_than(len(node_addresses), 5000)
assert_greater_than(10000, len(node_addresses))
for a in node_addresses:
assert_greater_than(a["time"], 1527811200) # 1st June 2018
assert_equal(a["services"], P2P_SERVICES)
assert a["address"] in imported_addrs
assert_equal(a["port"], 8333)
assert_equal(a["network"], "ipv4")
# Test the IPv6 address.
res = self.nodes[0].getnodeaddresses(0, "ipv6")
assert_equal(len(res), 1)
assert_equal(res[0]["address"], ipv6_addr)
assert_equal(res[0]["network"], "ipv6")
assert_equal(res[0]["port"], 8333)
assert_equal(res[0]["services"], P2P_SERVICES)
# Test for the absence of onion, I2P and CJDNS addresses.
for network in ["onion", "i2p", "cjdns"]:
assert_equal(self.nodes[0].getnodeaddresses(0, network), [])
# Test invalid arguments.
assert_raises_rpc_error(-8, "Address count out of range", self.nodes[0].getnodeaddresses, -1)
assert_raises_rpc_error(-8, "Network not recognized: Foo", self.nodes[0].getnodeaddresses, 1, "Foo")
def test_addpeeraddress(self):
"""RPC addpeeraddress sets the source address equal to the destination address.
If an address with the same /16 as an existing new entry is passed, it will be
placed in the same new bucket and have a 1/64 chance of the bucket positions
colliding (depending on the value of nKey in the addrman), in which case the
new address won't be added. The probability of collision can be reduced to
1/2^16 = 1/65536 by using an address from a different /16. We avoid this here
by first testing adding a tried table entry before testing adding a new table one.
"""
self.log.info("Test addpeeraddress")
self.restart_node(1, ["-checkaddrman=1"])
node = self.nodes[1]
self.log.debug("Test that addpeerinfo is a hidden RPC")
# It is hidden from general help, but its detailed help may be called directly.
assert "addpeerinfo" not in node.help()
assert "addpeerinfo" in node.help("addpeerinfo")
self.log.debug("Test that adding an empty address fails")
assert_equal(node.addpeeraddress(address="", port=8333), {"success": False})
assert_equal(node.getnodeaddresses(count=0), [])
self.log.debug("Test that non-bool tried fails")
assert_raises_rpc_error(-3, "JSON value of type string is not of expected type bool", self.nodes[0].addpeeraddress, address="1.2.3.4", tried="True", port=1234)
self.log.debug("Test that adding an address with invalid port fails")
assert_raises_rpc_error(-1, "JSON integer out of range", self.nodes[0].addpeeraddress, address="1.2.3.4", port=-1)
assert_raises_rpc_error(-1, "JSON integer out of range", self.nodes[0].addpeeraddress, address="1.2.3.4", port=65536)
self.log.debug("Test that adding a valid address to the tried table succeeds")
self.addr_time = int(time.time())
node.setmocktime(self.addr_time)
assert_equal(node.addpeeraddress(address="1.2.3.4", tried=True, port=8333), {"success": True})
with node.assert_debug_log(expected_msgs=["CheckAddrman: new 0, tried 1, total 1 started"]):
addrs = node.getnodeaddresses(count=0) # getnodeaddresses re-runs the addrman checks
assert_equal(len(addrs), 1)
assert_equal(addrs[0]["address"], "1.2.3.4")
assert_equal(addrs[0]["port"], 8333)
self.log.debug("Test that adding an already-present tried address to the new and tried tables fails")
for value in [True, False]:
assert_equal(node.addpeeraddress(address="1.2.3.4", tried=value, port=8333), {"success": False})
assert_equal(len(node.getnodeaddresses(count=0)), 1)
self.log.debug("Test that adding a second address, this time to the new table, succeeds")
assert_equal(node.addpeeraddress(address="2.0.0.0", port=8333), {"success": True})
with node.assert_debug_log(expected_msgs=["CheckAddrman: new 1, tried 1, total 2 started"]):
addrs = node.getnodeaddresses(count=0) # getnodeaddresses re-runs the addrman checks
assert_equal(len(addrs), 2)
def test_sendmsgtopeer(self):
node = self.nodes[0]
self.restart_node(0)
# we want to use a p2p v1 connection here in order to ensure
# a peer id of zero (a downgrade from v2 to v1 would lead
# to an increase of the peer id)
self.connect_nodes(0, 1, peer_advertises_v2=False)
self.log.info("Test sendmsgtopeer")
self.log.debug("Send a valid message")
with self.nodes[1].assert_debug_log(expected_msgs=["received: addr"]):
node.sendmsgtopeer(peer_id=0, msg_type="addr", msg="FFFFFF")
self.log.debug("Test error for sending to non-existing peer")
assert_raises_rpc_error(-1, "Error: Could not send message to peer", node.sendmsgtopeer, peer_id=100, msg_type="addr", msg="FF")
self.log.debug("Test that zero-length msg_type is allowed")
node.sendmsgtopeer(peer_id=0, msg_type="addr", msg="")
self.log.debug("Test error for msg_type that is too long")
assert_raises_rpc_error(-8, "Error: msg_type too long, max length is 12", node.sendmsgtopeer, peer_id=0, msg_type="long_msg_type", msg="FF")
self.log.debug("Test that unknown msg_type is allowed")
node.sendmsgtopeer(peer_id=0, msg_type="unknown", msg="FF")
self.log.debug("Test that empty msg is allowed")
node.sendmsgtopeer(peer_id=0, msg_type="addr", msg="FF")
self.log.debug("Test that oversized messages are allowed, but get us disconnected")
zero_byte_string = b'\x00' * 4000001
node.sendmsgtopeer(peer_id=0, msg_type="addr", msg=zero_byte_string.hex())
self.wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0, timeout=10)
def test_getaddrmaninfo(self):
self.log.info("Test getaddrmaninfo")
node = self.nodes[1]
# current count of ipv4 addresses in addrman is {'new':1, 'tried':1}
self.log.info("Test that count of addresses in addrman match expected values")
res = node.getaddrmaninfo()
assert_equal(res["ipv4"]["new"], 1)
assert_equal(res["ipv4"]["tried"], 1)
assert_equal(res["ipv4"]["total"], 2)
assert_equal(res["all_networks"]["new"], 1)
assert_equal(res["all_networks"]["tried"], 1)
assert_equal(res["all_networks"]["total"], 2)
for net in ["ipv6", "onion", "i2p", "cjdns"]:
assert_equal(res[net]["new"], 0)
assert_equal(res[net]["tried"], 0)
assert_equal(res[net]["total"], 0)
def test_getrawaddrman(self):
self.log.info("Test getrawaddrman")
node = self.nodes[1]
self.log.debug("Test that getrawaddrman is a hidden RPC")
# It is hidden from general help, but its detailed help may be called directly.
assert "getrawaddrman" not in node.help()
assert "getrawaddrman" in node.help("getrawaddrman")
def check_addr_information(result, expected):
"""Utility to compare a getrawaddrman result entry with an expected entry"""
assert_equal(result["address"], expected["address"])
assert_equal(result["port"], expected["port"])
assert_equal(result["services"], expected["services"])
assert_equal(result["network"], expected["network"])
assert_equal(result["source"], expected["source"])
assert_equal(result["source_network"], expected["source_network"])
assert_equal(result["time"], self.addr_time)
def check_getrawaddrman_entries(expected):
"""Utility to compare a getrawaddrman result with expected addrman contents"""
getrawaddrman = node.getrawaddrman()
getaddrmaninfo = node.getaddrmaninfo()
for (table_name, table_info) in expected.items():
assert_equal(len(getrawaddrman[table_name]), len(table_info["entries"]))
assert_equal(len(getrawaddrman[table_name]), getaddrmaninfo["all_networks"][table_name])
for bucket_position in getrawaddrman[table_name].keys():
bucket = int(bucket_position.split("/")[0])
position = int(bucket_position.split("/")[1])
# bucket and position only be sanity checked here as the
# test-addrman isn't deterministic
assert 0 <= int(bucket) < table_info["bucket_count"]
assert 0 <= int(position) < ADDRMAN_BUCKET_SIZE
entry = getrawaddrman[table_name][bucket_position]
expected_entry = list(filter(lambda e: e["address"] == entry["address"], table_info["entries"]))[0]
check_addr_information(entry, expected_entry)
# we expect one addrman new and tried table entry, which were added in a previous test
expected = {
"new": {
"bucket_count": ADDRMAN_NEW_BUCKET_COUNT,
"entries": [
{
"address": "2.0.0.0",
"port": 8333,
"services": 9,
"network": "ipv4",
"source": "2.0.0.0",
"source_network": "ipv4",
}
]
},
"tried": {
"bucket_count": ADDRMAN_TRIED_BUCKET_COUNT,
"entries": [
{
"address": "1.2.3.4",
"port": 8333,
"services": 9,
"network": "ipv4",
"source": "1.2.3.4",
"source_network": "ipv4",
}
]
}
}
self.log.debug("Test that the getrawaddrman contains information about the addresses added in a previous test")
check_getrawaddrman_entries(expected)
self.log.debug("Add one new address to each addrman table")
expected["new"]["entries"].append({
"address": "2803:0:1234:abcd::1",
"services": 9,
"network": "ipv6",
"source": "2803:0:1234:abcd::1",
"source_network": "ipv6",
"port": -1, # set once addpeeraddress is successful
})
expected["tried"]["entries"].append({
"address": "nrfj6inpyf73gpkyool35hcmne5zwfmse3jl3aw23vk7chdemalyaqad.onion",
"services": 9,
"network": "onion",
"source": "nrfj6inpyf73gpkyool35hcmne5zwfmse3jl3aw23vk7chdemalyaqad.onion",
"source_network": "onion",
"port": -1, # set once addpeeraddress is successful
})
port = 0
for (table_name, table_info) in expected.items():
# There's a slight chance that the to-be-added address collides with an already
# present table entry. To avoid this, we increment the port until an address has been
# added. Incrementing the port changes the position in the new table bucket (bucket
# stays the same) and changes both the bucket and the position in the tried table.
while True:
if node.addpeeraddress(address=table_info["entries"][1]["address"], port=port, tried=table_name == "tried")["success"]:
table_info["entries"][1]["port"] = port
self.log.debug(f"Added {table_info['entries'][1]['address']} to {table_name} table")
break
else:
port += 1
self.log.debug("Test that the newly added addresses appear in getrawaddrman")
check_getrawaddrman_entries(expected)
if __name__ == '__main__':
NetTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/README.md | # Functional tests
### Writing Functional Tests
#### Example test
The file [test/functional/example_test.py](example_test.py) is a heavily commented example
of a test case that uses both the RPC and P2P interfaces. If you are writing your first test, copy
that file and modify to fit your needs.
#### Coverage
Running `test/functional/test_runner.py` with the `--coverage` argument tracks which RPCs are
called by the tests and prints a report of uncovered RPCs in the summary. This
can be used (along with the `--extended` argument) to find out which RPCs we
don't have test cases for.
#### Style guidelines
- Where possible, try to adhere to [PEP-8 guidelines](https://www.python.org/dev/peps/pep-0008/)
- Use a python linter like flake8 before submitting PRs to catch common style
nits (eg trailing whitespace, unused imports, etc)
- The oldest supported Python version is specified in [doc/dependencies.md](/doc/dependencies.md).
Consider using [pyenv](https://github.com/pyenv/pyenv), which checks [.python-version](/.python-version),
to prevent accidentally introducing modern syntax from an unsupported Python version.
The CI linter job also checks this, but [possibly not in all cases](https://github.com/bitcoin/bitcoin/pull/14884#discussion_r239585126).
- See [the python lint script](/test/lint/lint-python.py) that checks for violations that
could lead to bugs and issues in the test code.
- Use [type hints](https://docs.python.org/3/library/typing.html) in your code to improve code readability
and to detect possible bugs earlier.
- Avoid wildcard imports.
- If more than one name from a module is needed, use lexicographically sorted multi-line imports
in order to reduce the possibility of potential merge conflicts.
- Use a module-level docstring to describe what the test is testing, and how it
is testing it.
- When subclassing the BitcoinTestFramework, place overrides for the
`set_test_params()`, `add_options()` and `setup_xxxx()` methods at the top of
the subclass, then locally-defined helper methods, then the `run_test()` method.
- Use `f'{x}'` for string formatting in preference to `'{}'.format(x)` or `'%s' % x`.
#### Naming guidelines
- Name the test `<area>_test.py`, where area can be one of the following:
- `feature` for tests for full features that aren't wallet/mining/mempool, eg `feature_rbf.py`
- `interface` for tests for other interfaces (REST, ZMQ, etc), eg `interface_rest.py`
- `mempool` for tests for mempool behaviour, eg `mempool_reorg.py`
- `mining` for tests for mining features, eg `mining_prioritisetransaction.py`
- `p2p` for tests that explicitly test the p2p interface, eg `p2p_disconnect_ban.py`
- `rpc` for tests for individual RPC methods or features, eg `rpc_listtransactions.py`
- `tool` for tests for tools, eg `tool_wallet.py`
- `wallet` for tests for wallet features, eg `wallet_keypool.py`
- Use an underscore to separate words
- exception: for tests for specific RPCs or command line options which don't include underscores, name the test after the exact RPC or argument name, eg `rpc_decodescript.py`, not `rpc_decode_script.py`
- Don't use the redundant word `test` in the name, eg `interface_zmq.py`, not `interface_zmq_test.py`
#### General test-writing advice
- Instead of inline comments or no test documentation at all, log the comments to the test log, e.g.
`self.log.info('Create enough transactions to fill a block')`. Logs make the test code easier to read and the test
logic easier [to debug](/test/README.md#test-logging).
- Set `self.num_nodes` to the minimum number of nodes necessary for the test.
Having additional unrequired nodes adds to the execution time of the test as
well as memory/CPU/disk requirements (which is important when running tests in
parallel).
- Avoid stop-starting the nodes multiple times during the test if possible. A
stop-start takes several seconds, so doing it several times blows up the
runtime of the test.
- Set the `self.setup_clean_chain` variable in `set_test_params()` to `True` to
initialize an empty blockchain and start from the Genesis block, rather than
load a premined blockchain from cache with the default value of `False`. The
cached data directories contain a 200-block pre-mined blockchain with the
spendable mining rewards being split between four nodes. Each node has 25
mature block subsidies (25x50=1250 BTC) in its wallet. Using them is much more
efficient than mining blocks in your test.
- When calling RPCs with lots of arguments, consider using named keyword
arguments instead of positional arguments to make the intent of the call
clear to readers.
- Many of the core test framework classes such as `CBlock` and `CTransaction`
don't allow new attributes to be added to their objects at runtime like
typical Python objects allow. This helps prevent unpredictable side effects
from typographical errors or usage of the objects outside of their intended
purpose.
#### RPC and P2P definitions
Test writers may find it helpful to refer to the definitions for the RPC and
P2P messages. These can be found in the following source files:
- `/src/rpc/*` for RPCs
- `/src/wallet/rpc*` for wallet RPCs
- `ProcessMessage()` in `/src/net_processing.cpp` for parsing P2P messages
#### Using the P2P interface
- `P2P`s can be used to test specific P2P protocol behavior.
[p2p.py](test_framework/p2p.py) contains test framework p2p objects and
[messages.py](test_framework/messages.py) contains all the definitions for objects passed
over the network (`CBlock`, `CTransaction`, etc, along with the network-level
wrappers for them, `msg_block`, `msg_tx`, etc).
- P2P tests have two threads. One thread handles all network communication
with the bitcoind(s) being tested in a callback-based event loop; the other
implements the test logic.
- `P2PConnection` is the class used to connect to a bitcoind. `P2PInterface`
contains the higher level logic for processing P2P payloads and connecting to
the Bitcoin Core node application logic. For custom behaviour, subclass the
P2PInterface object and override the callback methods.
`P2PConnection`s can be used as such:
```python
p2p_conn = node.add_p2p_connection(P2PInterface())
p2p_conn.send_and_ping(msg)
```
They can also be referenced by indexing into a `TestNode`'s `p2ps` list, which
contains the list of test framework `p2p` objects connected to itself
(it does not include any `TestNode`s):
```python
node.p2ps[0].sync_with_ping()
```
More examples can be found in [p2p_unrequested_blocks.py](p2p_unrequested_blocks.py),
[p2p_compactblocks.py](p2p_compactblocks.py).
#### Prototyping tests
The [`TestShell`](test-shell.md) class exposes the BitcoinTestFramework
functionality to interactive Python3 environments and can be used to prototype
tests. This may be especially useful in a REPL environment with session logging
utilities, such as
[IPython](https://ipython.readthedocs.io/en/stable/interactive/reference.html#session-logging-and-restoring).
The logs of such interactive sessions can later be adapted into permanent test
cases.
### Test framework modules
The following are useful modules for test developers. They are located in
[test/functional/test_framework/](test_framework).
#### [authproxy.py](test_framework/authproxy.py)
Taken from the [python-bitcoinrpc repository](https://github.com/jgarzik/python-bitcoinrpc).
#### [test_framework.py](test_framework/test_framework.py)
Base class for functional tests.
#### [util.py](test_framework/util.py)
Generally useful functions.
#### [p2p.py](test_framework/p2p.py)
Test objects for interacting with a bitcoind node over the p2p interface.
#### [script.py](test_framework/script.py)
Utilities for manipulating transaction scripts (originally from python-bitcoinlib)
#### [key.py](test_framework/key.py)
Test-only secp256k1 elliptic curve implementation
#### [blocktools.py](test_framework/blocktools.py)
Helper functions for creating blocks and transactions.
### Benchmarking with perf
An easy way to profile node performance during functional tests is provided
for Linux platforms using `perf`.
Perf will sample the running node and will generate profile data in the node's
datadir. The profile data can then be presented using `perf report` or a graphical
tool like [hotspot](https://github.com/KDAB/hotspot).
There are two ways of invoking perf: one is to use the `--perf` flag when
running tests, which will profile each node during the entire test run: perf
begins to profile when the node starts and ends when it shuts down. The other
way is the use the `profile_with_perf` context manager, e.g.
```python
with node.profile_with_perf("send-big-msgs"):
# Perform activity on the node you're interested in profiling, e.g.:
for _ in range(10000):
node.p2ps[0].send_message(some_large_message)
```
To see useful textual output, run
```sh
perf report -i /path/to/datadir/send-big-msgs.perf.data.xxxx --stdio | c++filt | less
```
#### See also:
- [Installing perf](https://askubuntu.com/q/50145)
- [Perf examples](https://www.brendangregg.com/perf.html)
- [Hotspot](https://github.com/KDAB/hotspot): a GUI for perf output analysis
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_getdata.py | #!/usr/bin/env python3
# Copyright (c) 2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test GETDATA processing behavior"""
from collections import defaultdict
from test_framework.messages import (
CInv,
msg_getdata,
)
from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
class P2PStoreBlock(P2PInterface):
def __init__(self):
super().__init__()
self.blocks = defaultdict(int)
def on_block(self, message):
message.block.calc_sha256()
self.blocks[message.block.sha256] += 1
class GetdataTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
p2p_block_store = self.nodes[0].add_p2p_connection(P2PStoreBlock())
self.log.info("test that an invalid GETDATA doesn't prevent processing of future messages")
# Send invalid message and verify that node responds to later ping
invalid_getdata = msg_getdata()
invalid_getdata.inv.append(CInv(t=0, h=0)) # INV type 0 is invalid.
p2p_block_store.send_and_ping(invalid_getdata)
# Check getdata still works by fetching tip block
best_block = int(self.nodes[0].getbestblockhash(), 16)
good_getdata = msg_getdata()
good_getdata.inv.append(CInv(t=2, h=best_block))
p2p_block_store.send_and_ping(good_getdata)
p2p_block_store.wait_until(lambda: p2p_block_store.blocks[best_block] == 1)
if __name__ == '__main__':
GetdataTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_versionbits_warning.py | #!/usr/bin/env python3
# Copyright (c) 2016-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test version bits warning system.
Generate chains with block versions that appear to be signalling unknown
soft-forks, and test that warning alerts are generated.
"""
import os
import re
from test_framework.blocktools import create_block, create_coinbase
from test_framework.messages import msg_block
from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
VB_PERIOD = 144 # versionbits period length for regtest
VB_THRESHOLD = 108 # versionbits activation threshold for regtest
VB_TOP_BITS = 0x20000000
VB_UNKNOWN_BIT = 27 # Choose a bit unassigned to any deployment
VB_UNKNOWN_VERSION = VB_TOP_BITS | (1 << VB_UNKNOWN_BIT)
WARN_UNKNOWN_RULES_ACTIVE = f"Unknown new rules activated (versionbit {VB_UNKNOWN_BIT})"
VB_PATTERN = re.compile("Unknown new rules activated.*versionbit")
class VersionBitsWarningTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self):
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
# Open and close to create zero-length file
with open(self.alert_filename, 'w', encoding='utf8'):
pass
self.extra_args = [[f"-alertnotify=echo %s >> \"{self.alert_filename}\""]]
self.setup_nodes()
def send_blocks_with_version(self, peer, numblocks, version):
"""Send numblocks blocks to peer with version set"""
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount()
block_time = self.nodes[0].getblockheader(tip)["time"] + 1
tip = int(tip, 16)
for _ in range(numblocks):
block = create_block(tip, create_coinbase(height + 1), block_time, version=version)
block.solve()
peer.send_message(msg_block(block))
block_time += 1
height += 1
tip = block.sha256
peer.sync_with_ping()
def versionbits_in_alert_file(self):
"""Test that the versionbits warning has been written to the alert file."""
with open(self.alert_filename, 'r', encoding='utf8') as f:
alert_text = f.read()
return VB_PATTERN.search(alert_text) is not None
def run_test(self):
node = self.nodes[0]
peer = node.add_p2p_connection(P2PInterface())
node_deterministic_address = node.get_deterministic_priv_key().address
# Mine one period worth of blocks
self.generatetoaddress(node, VB_PERIOD, node_deterministic_address)
self.log.info("Check that there is no warning if previous VB_BLOCKS have <VB_THRESHOLD blocks with unknown versionbits version.")
# Build one period of blocks with < VB_THRESHOLD blocks signaling some unknown bit
self.send_blocks_with_version(peer, VB_THRESHOLD - 1, VB_UNKNOWN_VERSION)
self.generatetoaddress(node, VB_PERIOD - VB_THRESHOLD + 1, node_deterministic_address)
# Check that we're not getting any versionbit-related errors in get*info()
assert not VB_PATTERN.match(node.getmininginfo()["warnings"])
assert not VB_PATTERN.match(node.getnetworkinfo()["warnings"])
# Build one period of blocks with VB_THRESHOLD blocks signaling some unknown bit
self.send_blocks_with_version(peer, VB_THRESHOLD, VB_UNKNOWN_VERSION)
self.generatetoaddress(node, VB_PERIOD - VB_THRESHOLD, node_deterministic_address)
self.log.info("Check that there is a warning if previous VB_BLOCKS have >=VB_THRESHOLD blocks with unknown versionbits version.")
# Mine a period worth of expected blocks so the generic block-version warning
# is cleared. This will move the versionbit state to ACTIVE.
self.generatetoaddress(node, VB_PERIOD, node_deterministic_address)
# Stop-start the node. This is required because bitcoind will only warn once about unknown versions or unknown rules activating.
self.restart_node(0)
# Generating one block guarantees that we'll get out of IBD
self.generatetoaddress(node, 1, node_deterministic_address)
self.wait_until(lambda: not node.getblockchaininfo()['initialblockdownload'])
# Generating one more block will be enough to generate an error.
self.generatetoaddress(node, 1, node_deterministic_address)
# Check that get*info() shows the versionbits unknown rules warning
assert WARN_UNKNOWN_RULES_ACTIVE in node.getmininginfo()["warnings"]
assert WARN_UNKNOWN_RULES_ACTIVE in node.getnetworkinfo()["warnings"]
# Check that the alert file shows the versionbits unknown rules warning
self.wait_until(lambda: self.versionbits_in_alert_file())
if __name__ == '__main__':
VersionBitsWarningTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_deriveaddresses.py | #!/usr/bin/env python3
# Copyright (c) 2018-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the deriveaddresses rpc call."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.descriptors import descsum_create
from test_framework.util import assert_equal, assert_raises_rpc_error
class DeriveaddressesTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
assert_raises_rpc_error(-5, "Missing checksum", self.nodes[0].deriveaddresses, "a")
descriptor = "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)#t6wfjs64"
address = "bcrt1qjqmxmkpmxt80xz4y3746zgt0q3u3ferr34acd5"
assert_equal(self.nodes[0].deriveaddresses(descriptor), [address])
descriptor = descriptor[:-9]
assert_raises_rpc_error(-5, "Missing checksum", self.nodes[0].deriveaddresses, descriptor)
descriptor_pubkey = "wpkh(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)#s9ga3alw"
address = "bcrt1qjqmxmkpmxt80xz4y3746zgt0q3u3ferr34acd5"
assert_equal(self.nodes[0].deriveaddresses(descriptor_pubkey), [address])
ranged_descriptor = "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)#kft60nuy"
assert_equal(self.nodes[0].deriveaddresses(ranged_descriptor, [1, 2]), ["bcrt1qhku5rq7jz8ulufe2y6fkcpnlvpsta7rq4442dy", "bcrt1qpgptk2gvshyl0s9lqshsmx932l9ccsv265tvaq"])
assert_equal(self.nodes[0].deriveaddresses(ranged_descriptor, 2), [address, "bcrt1qhku5rq7jz8ulufe2y6fkcpnlvpsta7rq4442dy", "bcrt1qpgptk2gvshyl0s9lqshsmx932l9ccsv265tvaq"])
assert_raises_rpc_error(-8, "Range should not be specified for an un-ranged descriptor", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"), [0, 2])
assert_raises_rpc_error(-8, "Range must be specified for a ranged descriptor", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"))
assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"), 10000000000)
assert_raises_rpc_error(-8, "Range is too large", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"), [1000000000, 2000000000])
assert_raises_rpc_error(-8, "Range specified as [begin,end] must not have begin after end", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"), [2, 0])
assert_raises_rpc_error(-8, "Range should be greater or equal than 0", self.nodes[0].deriveaddresses, descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"), [-1, 0])
combo_descriptor = descsum_create("combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)")
assert_equal(self.nodes[0].deriveaddresses(combo_descriptor), ["mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", address, "2NDvEwGfpEqJWfybzpKPHF2XH3jwoQV3D7x"])
# P2PK does not have a valid address
assert_raises_rpc_error(-5, "Descriptor does not have a corresponding address", self.nodes[0].deriveaddresses, descsum_create("pk(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK)"))
# Before #26275, bitcoind would crash when deriveaddresses was
# called with derivation index 2147483647, which is the maximum
# positive value of a signed int32, and - currently - the
# maximum value that the deriveaddresses bitcoin RPC call
# accepts as derivation index.
assert_equal(self.nodes[0].deriveaddresses(descsum_create("wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)"), [2147483647, 2147483647]), ["bcrt1qtzs23vgzpreks5gtygwxf8tv5rldxvvsyfpdkg"])
hardened_without_privkey_descriptor = descsum_create("wpkh(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1'/1/0)")
assert_raises_rpc_error(-5, "Cannot derive script without private keys", self.nodes[0].deriveaddresses, hardened_without_privkey_descriptor)
bare_multisig_descriptor = descsum_create("multi(1,tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0,tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)")
assert_raises_rpc_error(-5, "Descriptor does not have a corresponding address", self.nodes[0].deriveaddresses, bare_multisig_descriptor)
if __name__ == '__main__':
DeriveaddressesTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_addrv2_relay.py | #!/usr/bin/env python3
# Copyright (c) 2020-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test addrv2 relay
"""
import time
from test_framework.messages import (
CAddress,
msg_addrv2,
)
from test_framework.p2p import (
P2PInterface,
P2P_SERVICES,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
I2P_ADDR = "c4gfnttsuwqomiygupdqqqyy5y5emnk5c73hrfvatri67prd7vyq.b32.i2p"
ONION_ADDR = "pg6mmjiyjmcrsslvykfwnntlaru7p5svn6y2ymmju6nubxndf4pscryd.onion"
ADDRS = []
for i in range(10):
addr = CAddress()
addr.time = int(time.time()) + i
addr.port = 8333 + i
addr.nServices = P2P_SERVICES
# Add one I2P and one onion V3 address at an arbitrary position.
if i == 5:
addr.net = addr.NET_I2P
addr.ip = I2P_ADDR
addr.port = 0
elif i == 8:
addr.net = addr.NET_TORV3
addr.ip = ONION_ADDR
else:
addr.ip = f"123.123.123.{i % 256}"
ADDRS.append(addr)
class AddrReceiver(P2PInterface):
addrv2_received_and_checked = False
def __init__(self):
super().__init__(support_addrv2 = True)
def on_addrv2(self, message):
expected_set = set((addr.ip, addr.port) for addr in ADDRS)
received_set = set((addr.ip, addr.port) for addr in message.addrs)
if expected_set == received_set:
self.addrv2_received_and_checked = True
def wait_for_addrv2(self):
self.wait_until(lambda: "addrv2" in self.last_message)
def calc_addrv2_msg_size(addrs):
size = 1 # vector length byte
for addr in addrs:
size += 4 # time
size += 1 # services, COMPACTSIZE(P2P_SERVICES)
size += 1 # network id
size += 1 # address length byte
size += addr.ADDRV2_ADDRESS_LENGTH[addr.net] # address
size += 2 # port
return size
class AddrTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["[email protected]"]]
def run_test(self):
self.log.info('Create connection that sends addrv2 messages')
addr_source = self.nodes[0].add_p2p_connection(P2PInterface())
msg = msg_addrv2()
self.log.info('Send too-large addrv2 message')
msg.addrs = ADDRS * 101
with self.nodes[0].assert_debug_log(['addrv2 message size = 1010']):
addr_source.send_and_ping(msg)
self.log.info('Check that addrv2 message content is relayed and added to addrman')
addr_receiver = self.nodes[0].add_p2p_connection(AddrReceiver())
msg.addrs = ADDRS
msg_size = calc_addrv2_msg_size(ADDRS)
with self.nodes[0].assert_debug_log([
f'received: addrv2 ({msg_size} bytes) peer=0',
f'sending addrv2 ({msg_size} bytes) peer=1',
]):
addr_source.send_and_ping(msg)
self.nodes[0].setmocktime(int(time.time()) + 30 * 60)
addr_receiver.wait_for_addrv2()
assert addr_receiver.addrv2_received_and_checked
assert_equal(len(self.nodes[0].getnodeaddresses(count=0, network="i2p")), 0)
if __name__ == '__main__':
AddrTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_disable.py | #!/usr/bin/env python3
# Copyright (c) 2015-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test a node with the -disablewallet option.
- Test that validateaddress RPC works when running with -disablewallet
- Test that it is not possible to mine to an invalid address.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_raises_rpc_error
class DisableWalletTest (BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-disablewallet"]]
self.wallet_names = []
def run_test (self):
# Make sure wallet is really disabled
assert_raises_rpc_error(-32601, 'Method not found', self.nodes[0].getwalletinfo)
x = self.nodes[0].validateaddress('3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
assert x['isvalid'] == False
x = self.nodes[0].validateaddress('mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
assert x['isvalid'] == True
if __name__ == '__main__':
DisableWalletTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_ping.py | #!/usr/bin/env python3
# Copyright (c) 2020-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test ping message
"""
import time
from test_framework.messages import msg_pong
from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
PING_INTERVAL = 2 * 60
TIMEOUT_INTERVAL = 20 * 60
class msg_pong_corrupt(msg_pong):
def serialize(self):
return b""
class NodeNoPong(P2PInterface):
def on_ping(self, message):
pass
class PingPongTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
# Set the peer connection timeout low. It does not matter for this
# test, as long as it is less than TIMEOUT_INTERVAL.
self.extra_args = [['-peertimeout=1']]
def check_peer_info(self, *, pingtime, minping, pingwait):
stats = self.nodes[0].getpeerinfo()[0]
assert_equal(stats.pop('pingtime', None), pingtime)
assert_equal(stats.pop('minping', None), minping)
assert_equal(stats.pop('pingwait', None), pingwait)
def mock_forward(self, delta):
self.mock_time += delta
self.nodes[0].setmocktime(self.mock_time)
def run_test(self):
self.mock_time = int(time.time())
self.mock_forward(0)
self.log.info('Check that ping is sent after connection is established')
no_pong_node = self.nodes[0].add_p2p_connection(NodeNoPong())
self.mock_forward(3)
assert no_pong_node.last_message.pop('ping').nonce != 0
self.check_peer_info(pingtime=None, minping=None, pingwait=3)
self.log.info('Reply without nonce cancels ping')
with self.nodes[0].assert_debug_log(['pong peer=0: Short payload']):
no_pong_node.send_and_ping(msg_pong_corrupt())
self.check_peer_info(pingtime=None, minping=None, pingwait=None)
self.log.info('Reply without ping')
with self.nodes[0].assert_debug_log([
'pong peer=0: Unsolicited pong without ping, 0 expected, 0 received, 8 bytes',
]):
no_pong_node.send_and_ping(msg_pong())
self.check_peer_info(pingtime=None, minping=None, pingwait=None)
self.log.info('Reply with wrong nonce does not cancel ping')
assert 'ping' not in no_pong_node.last_message
with self.nodes[0].assert_debug_log(['pong peer=0: Nonce mismatch']):
# mock time PING_INTERVAL ahead to trigger node into sending a ping
self.mock_forward(PING_INTERVAL + 1)
no_pong_node.wait_until(lambda: 'ping' in no_pong_node.last_message)
self.mock_forward(9)
# Send the wrong pong
no_pong_node.send_and_ping(msg_pong(no_pong_node.last_message.pop('ping').nonce - 1))
self.check_peer_info(pingtime=None, minping=None, pingwait=9)
self.log.info('Reply with zero nonce does cancel ping')
with self.nodes[0].assert_debug_log(['pong peer=0: Nonce zero']):
no_pong_node.send_and_ping(msg_pong(0))
self.check_peer_info(pingtime=None, minping=None, pingwait=None)
self.log.info('Check that ping is properly reported on RPC')
assert 'ping' not in no_pong_node.last_message
# mock time PING_INTERVAL ahead to trigger node into sending a ping
self.mock_forward(PING_INTERVAL + 1)
no_pong_node.wait_until(lambda: 'ping' in no_pong_node.last_message)
ping_delay = 29
self.mock_forward(ping_delay)
no_pong_node.wait_until(lambda: 'ping' in no_pong_node.last_message)
no_pong_node.send_and_ping(msg_pong(no_pong_node.last_message.pop('ping').nonce))
self.check_peer_info(pingtime=ping_delay, minping=ping_delay, pingwait=None)
self.log.info('Check that minping is decreased after a fast roundtrip')
# mock time PING_INTERVAL ahead to trigger node into sending a ping
self.mock_forward(PING_INTERVAL + 1)
no_pong_node.wait_until(lambda: 'ping' in no_pong_node.last_message)
ping_delay = 9
self.mock_forward(ping_delay)
no_pong_node.wait_until(lambda: 'ping' in no_pong_node.last_message)
no_pong_node.send_and_ping(msg_pong(no_pong_node.last_message.pop('ping').nonce))
self.check_peer_info(pingtime=ping_delay, minping=ping_delay, pingwait=None)
self.log.info('Check that peer is disconnected after ping timeout')
assert 'ping' not in no_pong_node.last_message
self.nodes[0].ping()
no_pong_node.wait_until(lambda: 'ping' in no_pong_node.last_message)
with self.nodes[0].assert_debug_log(['ping timeout: 1201.000000s']):
self.mock_forward(TIMEOUT_INTERVAL // 2)
# Check that sending a ping does not prevent the disconnect
no_pong_node.sync_with_ping()
self.mock_forward(TIMEOUT_INTERVAL // 2 + 1)
no_pong_node.wait_for_disconnect()
if __name__ == '__main__':
PingPongTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_descriptor.py | #!/usr/bin/env python3
# Copyright (c) 2019-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test descriptor wallet function."""
try:
import sqlite3
except ImportError:
pass
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error
)
from test_framework.wallet_util import WalletUnlock
class WalletDescriptorTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, legacy=False)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [['-keypool=100']]
self.wallet_names = []
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_sqlite()
self.skip_if_no_py_sqlite3()
def run_test(self):
if self.is_bdb_compiled():
# Make a legacy wallet and check it is BDB
self.nodes[0].createwallet(wallet_name="legacy1", descriptors=False)
wallet_info = self.nodes[0].getwalletinfo()
assert_equal(wallet_info['format'], 'bdb')
self.nodes[0].unloadwallet("legacy1")
else:
self.log.warning("Skipping BDB test")
# Make a descriptor wallet
self.log.info("Making a descriptor wallet")
self.nodes[0].createwallet(wallet_name="desc1", descriptors=True)
# A descriptor wallet should have 100 addresses * 4 types = 400 keys
self.log.info("Checking wallet info")
wallet_info = self.nodes[0].getwalletinfo()
assert_equal(wallet_info['format'], 'sqlite')
assert_equal(wallet_info['keypoolsize'], 400)
assert_equal(wallet_info['keypoolsize_hd_internal'], 400)
assert 'keypoololdest' not in wallet_info
# Check that getnewaddress works
self.log.info("Test that getnewaddress and getrawchangeaddress work")
addr = self.nodes[0].getnewaddress("", "legacy")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('pkh(')
assert_equal(addr_info['hdkeypath'], 'm/44h/1h/0h/0/0')
addr = self.nodes[0].getnewaddress("", "p2sh-segwit")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('sh(wpkh(')
assert_equal(addr_info['hdkeypath'], 'm/49h/1h/0h/0/0')
addr = self.nodes[0].getnewaddress("", "bech32")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('wpkh(')
assert_equal(addr_info['hdkeypath'], 'm/84h/1h/0h/0/0')
addr = self.nodes[0].getnewaddress("", "bech32m")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('tr(')
assert_equal(addr_info['hdkeypath'], 'm/86h/1h/0h/0/0')
# Check that getrawchangeaddress works
addr = self.nodes[0].getrawchangeaddress("legacy")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('pkh(')
assert_equal(addr_info['hdkeypath'], 'm/44h/1h/0h/1/0')
addr = self.nodes[0].getrawchangeaddress("p2sh-segwit")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('sh(wpkh(')
assert_equal(addr_info['hdkeypath'], 'm/49h/1h/0h/1/0')
addr = self.nodes[0].getrawchangeaddress("bech32")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('wpkh(')
assert_equal(addr_info['hdkeypath'], 'm/84h/1h/0h/1/0')
addr = self.nodes[0].getrawchangeaddress("bech32m")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('tr(')
assert_equal(addr_info['hdkeypath'], 'm/86h/1h/0h/1/0')
# Make a wallet to receive coins at
self.nodes[0].createwallet(wallet_name="desc2", descriptors=True)
recv_wrpc = self.nodes[0].get_wallet_rpc("desc2")
send_wrpc = self.nodes[0].get_wallet_rpc("desc1")
# Generate some coins
self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, send_wrpc.getnewaddress())
# Make transactions
self.log.info("Test sending and receiving")
addr = recv_wrpc.getnewaddress()
send_wrpc.sendtoaddress(addr, 10)
# Make sure things are disabled
self.log.info("Test disabled RPCs")
assert_raises_rpc_error(-4, "Only legacy wallets are supported by this command", recv_wrpc.rpc.importprivkey, "cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW")
assert_raises_rpc_error(-4, "Only legacy wallets are supported by this command", recv_wrpc.rpc.importpubkey, send_wrpc.getaddressinfo(send_wrpc.getnewaddress())["pubkey"])
assert_raises_rpc_error(-4, "Only legacy wallets are supported by this command", recv_wrpc.rpc.importaddress, recv_wrpc.getnewaddress())
assert_raises_rpc_error(-4, "Only legacy wallets are supported by this command", recv_wrpc.rpc.importmulti, [])
assert_raises_rpc_error(-4, "Only legacy wallets are supported by this command", recv_wrpc.rpc.addmultisigaddress, 1, [recv_wrpc.getnewaddress()])
assert_raises_rpc_error(-4, "Only legacy wallets are supported by this command", recv_wrpc.rpc.dumpprivkey, recv_wrpc.getnewaddress())
assert_raises_rpc_error(-4, "Only legacy wallets are supported by this command", recv_wrpc.rpc.dumpwallet, 'wallet.dump')
assert_raises_rpc_error(-4, "Only legacy wallets are supported by this command", recv_wrpc.rpc.importwallet, 'wallet.dump')
assert_raises_rpc_error(-4, "Only legacy wallets are supported by this command", recv_wrpc.rpc.sethdseed)
self.log.info("Test encryption")
# Get the master fingerprint before encrypt
info1 = send_wrpc.getaddressinfo(send_wrpc.getnewaddress())
# Encrypt wallet 0
send_wrpc.encryptwallet('pass')
with WalletUnlock(send_wrpc, "pass"):
addr = send_wrpc.getnewaddress()
info2 = send_wrpc.getaddressinfo(addr)
assert info1['hdmasterfingerprint'] != info2['hdmasterfingerprint']
assert 'hdmasterfingerprint' in send_wrpc.getaddressinfo(send_wrpc.getnewaddress())
info3 = send_wrpc.getaddressinfo(addr)
assert_equal(info2['desc'], info3['desc'])
self.log.info("Test that getnewaddress still works after keypool is exhausted in an encrypted wallet")
for _ in range(500):
send_wrpc.getnewaddress()
self.log.info("Test that unlock is needed when deriving only hardened keys in an encrypted wallet")
with WalletUnlock(send_wrpc, "pass"):
send_wrpc.importdescriptors([{
"desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/*h)#y4dfsj7n",
"timestamp": "now",
"range": [0,10],
"active": True
}])
# Exhaust keypool of 100
for _ in range(100):
send_wrpc.getnewaddress(address_type='bech32')
# This should now error
assert_raises_rpc_error(-12, "Keypool ran out, please call keypoolrefill first", send_wrpc.getnewaddress, '', 'bech32')
self.log.info("Test born encrypted wallets")
self.nodes[0].createwallet('desc_enc', False, False, 'pass', False, True)
enc_rpc = self.nodes[0].get_wallet_rpc('desc_enc')
enc_rpc.getnewaddress() # Makes sure that we can get a new address from a born encrypted wallet
self.log.info("Test blank descriptor wallets")
self.nodes[0].createwallet(wallet_name='desc_blank', blank=True, descriptors=True)
blank_rpc = self.nodes[0].get_wallet_rpc('desc_blank')
assert_raises_rpc_error(-4, 'This wallet has no available keys', blank_rpc.getnewaddress)
self.log.info("Test descriptor wallet with disabled private keys")
self.nodes[0].createwallet(wallet_name='desc_no_priv', disable_private_keys=True, descriptors=True)
nopriv_rpc = self.nodes[0].get_wallet_rpc('desc_no_priv')
assert_raises_rpc_error(-4, 'This wallet has no available keys', nopriv_rpc.getnewaddress)
self.log.info("Test descriptor exports")
self.nodes[0].createwallet(wallet_name='desc_export', descriptors=True)
exp_rpc = self.nodes[0].get_wallet_rpc('desc_export')
self.nodes[0].createwallet(wallet_name='desc_import', disable_private_keys=True, descriptors=True)
imp_rpc = self.nodes[0].get_wallet_rpc('desc_import')
addr_types = [('legacy', False, 'pkh(', '44h/1h/0h', -13),
('p2sh-segwit', False, 'sh(wpkh(', '49h/1h/0h', -14),
('bech32', False, 'wpkh(', '84h/1h/0h', -13),
('bech32m', False, 'tr(', '86h/1h/0h', -13),
('legacy', True, 'pkh(', '44h/1h/0h', -13),
('p2sh-segwit', True, 'sh(wpkh(', '49h/1h/0h', -14),
('bech32', True, 'wpkh(', '84h/1h/0h', -13),
('bech32m', True, 'tr(', '86h/1h/0h', -13)]
for addr_type, internal, desc_prefix, deriv_path, int_idx in addr_types:
int_str = 'internal' if internal else 'external'
self.log.info("Testing descriptor address type for {} {}".format(addr_type, int_str))
if internal:
addr = exp_rpc.getrawchangeaddress(address_type=addr_type)
else:
addr = exp_rpc.getnewaddress(address_type=addr_type)
desc = exp_rpc.getaddressinfo(addr)['parent_desc']
assert_equal(desc_prefix, desc[0:len(desc_prefix)])
idx = desc.index('/') + 1
assert_equal(deriv_path, desc[idx:idx + 9])
if internal:
assert_equal('1', desc[int_idx])
else:
assert_equal('0', desc[int_idx])
self.log.info("Testing the same descriptor is returned for address type {} {}".format(addr_type, int_str))
for i in range(0, 10):
if internal:
addr = exp_rpc.getrawchangeaddress(address_type=addr_type)
else:
addr = exp_rpc.getnewaddress(address_type=addr_type)
test_desc = exp_rpc.getaddressinfo(addr)['parent_desc']
assert_equal(desc, test_desc)
self.log.info("Testing import of exported {} descriptor".format(addr_type))
imp_rpc.importdescriptors([{
'desc': desc,
'active': True,
'next_index': 11,
'timestamp': 'now',
'internal': internal
}])
for i in range(0, 10):
if internal:
exp_addr = exp_rpc.getrawchangeaddress(address_type=addr_type)
imp_addr = imp_rpc.getrawchangeaddress(address_type=addr_type)
else:
exp_addr = exp_rpc.getnewaddress(address_type=addr_type)
imp_addr = imp_rpc.getnewaddress(address_type=addr_type)
assert_equal(exp_addr, imp_addr)
self.log.info("Test that loading descriptor wallet containing legacy key types throws error")
self.nodes[0].createwallet(wallet_name="crashme", descriptors=True)
self.nodes[0].unloadwallet("crashme")
wallet_db = self.nodes[0].wallets_path / "crashme" / self.wallet_data_filename
conn = sqlite3.connect(wallet_db)
with conn:
# add "cscript" entry: key type is uint160 (20 bytes), value type is CScript (zero-length here)
conn.execute('INSERT INTO main VALUES(?, ?)', (b'\x07cscript' + b'\x00'*20, b'\x00'))
conn.close()
assert_raises_rpc_error(-4, "Unexpected legacy entry in descriptor wallet found.", self.nodes[0].loadwallet, "crashme")
if __name__ == '__main__':
WalletDescriptorTest().main ()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_segwit.py | #!/usr/bin/env python3
# Copyright (c) 2016-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test segwit transactions and blocks on P2P network."""
from decimal import Decimal
import random
import struct
import time
from test_framework.blocktools import (
WITNESS_COMMITMENT_HEADER,
add_witness_commitment,
create_block,
create_coinbase,
)
from test_framework.messages import (
MAX_BIP125_RBF_SEQUENCE,
CBlockHeader,
CInv,
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
CTxWitness,
MAX_BLOCK_WEIGHT,
MSG_BLOCK,
MSG_TX,
MSG_WITNESS_FLAG,
MSG_WITNESS_TX,
MSG_WTX,
NODE_NETWORK,
NODE_WITNESS,
msg_no_witness_block,
msg_getdata,
msg_headers,
msg_inv,
msg_tx,
msg_block,
msg_no_witness_tx,
ser_uint256,
ser_vector,
sha256,
)
from test_framework.p2p import (
P2PInterface,
p2p_lock,
P2P_SERVICES,
)
from test_framework.script import (
CScript,
CScriptNum,
CScriptOp,
MAX_SCRIPT_ELEMENT_SIZE,
OP_0,
OP_1,
OP_2,
OP_16,
OP_2DROP,
OP_CHECKMULTISIG,
OP_CHECKSIG,
OP_DROP,
OP_ELSE,
OP_ENDIF,
OP_IF,
OP_RETURN,
OP_TRUE,
SIGHASH_ALL,
SIGHASH_ANYONECANPAY,
SIGHASH_NONE,
SIGHASH_SINGLE,
hash160,
sign_input_legacy,
sign_input_segwitv0,
)
from test_framework.script_util import (
key_to_p2pk_script,
key_to_p2wpkh_script,
keyhash_to_p2pkh_script,
script_to_p2sh_script,
script_to_p2wsh_script,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
softfork_active,
assert_raises_rpc_error,
)
from test_framework.wallet import MiniWallet
from test_framework.wallet_util import generate_keypair
MAX_SIGOP_COST = 80000
SEGWIT_HEIGHT = 120
class UTXO():
"""Used to keep track of anyone-can-spend outputs that we can use in the tests."""
def __init__(self, sha256, n, value):
self.sha256 = sha256
self.n = n
self.nValue = value
def subtest(func):
"""Wraps the subtests for logging and state assertions."""
def func_wrapper(self, *args, **kwargs):
self.log.info("Subtest: {} (Segwit active = {})".format(func.__name__, self.segwit_active))
# Assert segwit status is as expected
assert_equal(softfork_active(self.nodes[0], 'segwit'), self.segwit_active)
func(self, *args, **kwargs)
# Each subtest should leave some utxos for the next subtest
assert self.utxo
self.sync_blocks()
# Assert segwit status is as expected at end of subtest
assert_equal(softfork_active(self.nodes[0], 'segwit'), self.segwit_active)
return func_wrapper
def sign_p2pk_witness_input(script, tx_to, in_idx, hashtype, value, key):
"""Add signature for a P2PK witness script."""
tx_to.wit.vtxinwit[in_idx].scriptWitness.stack = [script]
sign_input_segwitv0(tx_to, in_idx, script, value, key, hashtype)
def test_transaction_acceptance(node, p2p, tx, with_witness, accepted, reason=None):
"""Send a transaction to the node and check that it's accepted to the mempool
- Submit the transaction over the p2p interface
- use the getrawmempool rpc to check for acceptance."""
reason = [reason] if reason else []
with node.assert_debug_log(expected_msgs=reason):
p2p.send_and_ping(msg_tx(tx) if with_witness else msg_no_witness_tx(tx))
assert_equal(tx.hash in node.getrawmempool(), accepted)
def test_witness_block(node, p2p, block, accepted, with_witness=True, reason=None):
"""Send a block to the node and check that it's accepted
- Submit the block over the p2p interface
- use the getbestblockhash rpc to check for acceptance."""
reason = [reason] if reason else []
with node.assert_debug_log(expected_msgs=reason):
p2p.send_and_ping(msg_block(block) if with_witness else msg_no_witness_block(block))
assert_equal(node.getbestblockhash() == block.hash, accepted)
class TestP2PConn(P2PInterface):
def __init__(self, wtxidrelay=False):
super().__init__(wtxidrelay=wtxidrelay)
self.getdataset = set()
self.last_wtxidrelay = []
self.lastgetdata = []
self.wtxidrelay = wtxidrelay
# Don't send getdata message replies to invs automatically.
# We'll send the getdata messages explicitly in the test logic.
def on_inv(self, message):
pass
def on_getdata(self, message):
self.lastgetdata = message.inv
for inv in message.inv:
self.getdataset.add(inv.hash)
def on_wtxidrelay(self, message):
self.last_wtxidrelay.append(message)
def announce_tx_and_wait_for_getdata(self, tx, success=True, use_wtxid=False):
if success:
# sanity check
assert (self.wtxidrelay and use_wtxid) or (not self.wtxidrelay and not use_wtxid)
with p2p_lock:
self.last_message.pop("getdata", None)
if use_wtxid:
wtxid = tx.calc_sha256(True)
self.send_message(msg_inv(inv=[CInv(MSG_WTX, wtxid)]))
else:
self.send_message(msg_inv(inv=[CInv(MSG_TX, tx.sha256)]))
if success:
if use_wtxid:
self.wait_for_getdata([wtxid])
else:
self.wait_for_getdata([tx.sha256])
else:
time.sleep(5)
assert not self.last_message.get("getdata")
def announce_block_and_wait_for_getdata(self, block, use_header, timeout=60):
with p2p_lock:
self.last_message.pop("getdata", None)
self.last_message.pop("getheaders", None)
msg = msg_headers()
msg.headers = [CBlockHeader(block)]
if use_header:
self.send_message(msg)
else:
self.send_message(msg_inv(inv=[CInv(MSG_BLOCK, block.sha256)]))
self.wait_for_getheaders()
self.send_message(msg)
self.wait_for_getdata([block.sha256])
def request_block(self, blockhash, inv_type, timeout=60):
with p2p_lock:
self.last_message.pop("block", None)
self.send_message(msg_getdata(inv=[CInv(inv_type, blockhash)]))
self.wait_for_block(blockhash, timeout)
return self.last_message["block"].block
class SegWitTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
# This test tests SegWit both pre and post-activation, so use the normal BIP9 activation.
self.extra_args = [
["-acceptnonstdtxn=1", f"-testactivationheight=segwit@{SEGWIT_HEIGHT}", "[email protected]", "-par=1"],
["-acceptnonstdtxn=0", f"-testactivationheight=segwit@{SEGWIT_HEIGHT}"],
]
self.supports_cli = False
# Helper functions
def build_next_block(self):
"""Build a block on top of node0's tip."""
tip = self.nodes[0].getbestblockhash()
height = self.nodes[0].getblockcount() + 1
block_time = self.nodes[0].getblockheader(tip)["mediantime"] + 1
block = create_block(int(tip, 16), create_coinbase(height), block_time)
block.rehash()
return block
def update_witness_block_with_transactions(self, block, tx_list, nonce=0):
"""Add list of transactions to block, adds witness commitment, then solves."""
block.vtx.extend(tx_list)
add_witness_commitment(block, nonce)
block.solve()
def run_test(self):
# Setup the p2p connections
# self.test_node sets P2P_SERVICES, i.e. NODE_WITNESS | NODE_NETWORK
self.test_node = self.nodes[0].add_p2p_connection(TestP2PConn(), services=P2P_SERVICES)
# self.old_node sets only NODE_NETWORK
self.old_node = self.nodes[0].add_p2p_connection(TestP2PConn(), services=NODE_NETWORK)
# self.std_node is for testing node1 (requires standard txs)
self.std_node = self.nodes[1].add_p2p_connection(TestP2PConn(), services=P2P_SERVICES)
# self.std_wtx_node is for testing node1 with wtxid relay
self.std_wtx_node = self.nodes[1].add_p2p_connection(TestP2PConn(wtxidrelay=True), services=P2P_SERVICES)
assert self.test_node.nServices & NODE_WITNESS != 0
# Keep a place to store utxo's that can be used in later tests
self.utxo = []
self.log.info("Starting tests before segwit activation")
self.segwit_active = False
self.wallet = MiniWallet(self.nodes[0])
self.test_non_witness_transaction()
self.test_v0_outputs_arent_spendable()
self.test_block_relay()
self.test_unnecessary_witness_before_segwit_activation()
self.test_witness_tx_relay_before_segwit_activation()
self.test_standardness_v0()
self.log.info("Advancing to segwit activation")
self.advance_to_segwit_active()
# Segwit status 'active'
self.test_p2sh_witness()
self.test_witness_commitments()
self.test_block_malleability()
self.test_witness_block_size()
self.test_submit_block()
self.test_extra_witness_data()
self.test_max_witness_push_length()
self.test_max_witness_script_length()
self.test_witness_input_length()
self.test_block_relay()
self.test_tx_relay_after_segwit_activation()
self.test_standardness_v0()
self.test_segwit_versions()
self.test_premature_coinbase_witness_spend()
self.test_uncompressed_pubkey()
self.test_signature_version_1()
self.test_non_standard_witness_blinding()
self.test_non_standard_witness()
self.test_witness_sigops()
self.test_superfluous_witness()
self.test_wtxid_relay()
# Individual tests
@subtest
def test_non_witness_transaction(self):
"""See if sending a regular transaction works, and create a utxo to use in later tests."""
# Mine a block with an anyone-can-spend coinbase,
# let it mature, then try to spend it.
block = self.build_next_block()
block.solve()
self.test_node.send_and_ping(msg_no_witness_block(block)) # make sure the block was processed
txid = block.vtx[0].sha256
self.generate(self.wallet, 99) # let the block mature
# Create a transaction that spends the coinbase
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(txid, 0), b""))
tx.vout.append(CTxOut(49 * 100000000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx.calc_sha256()
# Check that serializing it with or without witness is the same
# This is a sanity check of our testing framework.
assert_equal(msg_no_witness_tx(tx).serialize(), msg_tx(tx).serialize())
self.test_node.send_and_ping(msg_tx(tx)) # make sure the block was processed
assert tx.hash in self.nodes[0].getrawmempool()
# Save this transaction for later
self.utxo.append(UTXO(tx.sha256, 0, 49 * 100000000))
self.generate(self.nodes[0], 1)
@subtest
def test_unnecessary_witness_before_segwit_activation(self):
"""Verify that blocks with witnesses are rejected before activation."""
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)])]
# Verify the hash with witness differs from the txid
# (otherwise our testing framework must be broken!)
tx.rehash()
assert tx.sha256 != tx.calc_sha256(with_witness=True)
# Construct a block that includes the transaction.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
# Sending witness data before activation is not allowed (anti-spam
# rule).
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, reason='unexpected-witness')
# But it should not be permanently marked bad...
# Resend without witness information.
self.test_node.send_and_ping(msg_no_witness_block(block)) # make sure the block was processed
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
# Update our utxo list; we spent the first entry.
self.utxo.pop(0)
self.utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
@subtest
def test_block_relay(self):
"""Test that block requests to NODE_WITNESS peer are with MSG_WITNESS_FLAG.
This is true regardless of segwit activation.
Also test that we don't ask for blocks from unupgraded peers."""
blocktype = 2 | MSG_WITNESS_FLAG
# test_node has set NODE_WITNESS, so all getdata requests should be for
# witness blocks.
# Test announcing a block via inv results in a getdata, and that
# announcing a block with a header results in a getdata
block1 = self.build_next_block()
block1.solve()
# Send an empty headers message, to clear out any prior getheaders
# messages that our peer may be waiting for us on.
self.test_node.send_message(msg_headers())
self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False)
assert self.test_node.last_message["getdata"].inv[0].type == blocktype
test_witness_block(self.nodes[0], self.test_node, block1, True)
block2 = self.build_next_block()
block2.solve()
self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True)
assert self.test_node.last_message["getdata"].inv[0].type == blocktype
test_witness_block(self.nodes[0], self.test_node, block2, True)
# Check that we can getdata for witness blocks or regular blocks,
# and the right thing happens.
if not self.segwit_active:
# Before activation, we should be able to request old blocks with
# or without witness, and they should be the same.
chain_height = self.nodes[0].getblockcount()
# Pick 10 random blocks on main chain, and verify that getdata's
# for MSG_BLOCK, MSG_WITNESS_BLOCK, and rpc getblock() are equal.
all_heights = list(range(chain_height + 1))
random.shuffle(all_heights)
all_heights = all_heights[0:10]
for height in all_heights:
block_hash = self.nodes[0].getblockhash(height)
rpc_block = self.nodes[0].getblock(block_hash, False)
block_hash = int(block_hash, 16)
block = self.test_node.request_block(block_hash, 2)
wit_block = self.test_node.request_block(block_hash, 2 | MSG_WITNESS_FLAG)
assert_equal(block.serialize(), wit_block.serialize())
assert_equal(block.serialize(), bytes.fromhex(rpc_block))
else:
# After activation, witness blocks and non-witness blocks should
# be different. Verify rpc getblock() returns witness blocks, while
# getdata respects the requested type.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [])
# This gives us a witness commitment.
assert len(block.vtx[0].wit.vtxinwit) == 1
assert len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Now try to retrieve it...
rpc_block = self.nodes[0].getblock(block.hash, False)
non_wit_block = self.test_node.request_block(block.sha256, 2)
wit_block = self.test_node.request_block(block.sha256, 2 | MSG_WITNESS_FLAG)
assert_equal(wit_block.serialize(), bytes.fromhex(rpc_block))
assert_equal(wit_block.serialize(False), non_wit_block.serialize())
assert_equal(wit_block.serialize(), block.serialize())
# Test size, vsize, weight
rpc_details = self.nodes[0].getblock(block.hash, True)
assert_equal(rpc_details["size"], len(block.serialize()))
assert_equal(rpc_details["strippedsize"], len(block.serialize(False)))
assert_equal(rpc_details["weight"], block.get_weight())
# Upgraded node should not ask for blocks from unupgraded
block4 = self.build_next_block()
block4.solve()
self.old_node.getdataset = set()
# Blocks can be requested via direct-fetch (immediately upon processing the announcement)
# or via parallel download (with an indeterminate delay from processing the announcement)
# so to test that a block is NOT requested, we could guess a time period to sleep for,
# and then check. We can avoid the sleep() by taking advantage of transaction getdata's
# being processed after block getdata's, and announce a transaction as well,
# and then check to see if that particular getdata has been received.
# Since 0.14, inv's will only be responded to with a getheaders, so send a header
# to announce this block.
msg = msg_headers()
msg.headers = [CBlockHeader(block4)]
self.old_node.send_message(msg)
self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0])
assert block4.sha256 not in self.old_node.getdataset
@subtest
def test_v0_outputs_arent_spendable(self):
"""Test that v0 outputs aren't spendable before segwit activation.
~6 months after segwit activation, the SCRIPT_VERIFY_WITNESS flag was
backdated so that it applies to all blocks, going back to the genesis
block.
Consequently, version 0 witness outputs are never spendable without
witness, and so can't be spent before segwit activation (the point at which
blocks are permitted to contain witnesses)."""
# Create two outputs, a p2wsh and p2sh-p2wsh
witness_script = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_script)
p2sh_script_pubkey = script_to_p2sh_script(script_pubkey)
value = self.utxo[0].nValue // 3
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b'')]
tx.vout = [CTxOut(value, script_pubkey), CTxOut(value, p2sh_script_pubkey)]
tx.vout.append(CTxOut(value, CScript([OP_TRUE])))
tx.rehash()
txid = tx.sha256
# Add it to a block
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
# Verify that segwit isn't activated. A block serialized with witness
# should be rejected prior to activation.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=True, reason='unexpected-witness')
# Now send the block without witness. It should be accepted
test_witness_block(self.nodes[0], self.test_node, block, accepted=True, with_witness=False)
# Now try to spend the outputs. This should fail since SCRIPT_VERIFY_WITNESS is always enabled.
p2wsh_tx = CTransaction()
p2wsh_tx.vin = [CTxIn(COutPoint(txid, 0), b'')]
p2wsh_tx.vout = [CTxOut(value, CScript([OP_TRUE]))]
p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
p2wsh_tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
p2wsh_tx.rehash()
p2sh_p2wsh_tx = CTransaction()
p2sh_p2wsh_tx.vin = [CTxIn(COutPoint(txid, 1), CScript([script_pubkey]))]
p2sh_p2wsh_tx.vout = [CTxOut(value, CScript([OP_TRUE]))]
p2sh_p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
p2sh_p2wsh_tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
p2sh_p2wsh_tx.rehash()
for tx in [p2wsh_tx, p2sh_p2wsh_tx]:
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
# When the block is serialized with a witness, the block will be rejected because witness
# data isn't allowed in blocks that don't commit to witness data.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=True, reason='unexpected-witness')
# When the block is serialized without witness, validation fails because the transaction is
# invalid (transactions are always validated with SCRIPT_VERIFY_WITNESS so a segwit v0 transaction
# without a witness is invalid).
# Note: The reject reason for this failure could be
# 'block-validation-failed' (if script check threads > 1) or
# 'mandatory-script-verify-flag-failed (Witness program was passed an
# empty witness)' (otherwise).
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, with_witness=False,
reason='mandatory-script-verify-flag-failed (Witness program was passed an empty witness)')
self.utxo.pop(0)
self.utxo.append(UTXO(txid, 2, value))
@subtest
def test_witness_tx_relay_before_segwit_activation(self):
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected for premature-witness, but should
# not be added to recently rejected list.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [b'a']
tx.rehash()
tx_hash = tx.sha256
tx_value = tx.vout[0].nValue
# Verify that if a peer doesn't set nServices to include NODE_WITNESS,
# the getdata is just for the non-witness portion.
self.old_node.announce_tx_and_wait_for_getdata(tx)
assert self.old_node.last_message["getdata"].inv[0].type == MSG_TX
# Since we haven't delivered the tx yet, inv'ing the same tx from
# a witness transaction ought not result in a getdata.
self.test_node.announce_tx_and_wait_for_getdata(tx, success=False)
# Delivering this transaction with witness should fail (no matter who
# its from)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
test_transaction_acceptance(self.nodes[0], self.old_node, tx, with_witness=True, accepted=False)
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=True, accepted=False)
# But eliminating the witness should fix it
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True)
# Cleanup: mine the first transaction and update utxo
self.generate(self.nodes[0], 1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx_hash, 0, tx_value))
@subtest
def test_standardness_v0(self):
"""Test V0 txout standardness.
V0 segwit outputs and inputs are always standard.
V0 segwit inputs may only be mined after activation, but not before."""
witness_script = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_script)
p2sh_script_pubkey = script_to_p2sh_script(witness_script)
# First prepare a p2sh output (so that spending it will pass standardness)
p2sh_tx = CTransaction()
p2sh_tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
p2sh_tx.vout = [CTxOut(self.utxo[0].nValue - 1000, p2sh_script_pubkey)]
p2sh_tx.rehash()
# Mine it on test_node to create the confirmed output.
test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_tx, with_witness=True, accepted=True)
self.generate(self.nodes[0], 1)
# Now test standardness of v0 P2WSH outputs.
# Start by creating a transaction with two outputs.
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_script]))]
tx.vout = [CTxOut(p2sh_tx.vout[0].nValue - 10000, script_pubkey)]
tx.vout.append(CTxOut(8000, script_pubkey)) # Might burn this later
tx.vin[0].nSequence = MAX_BIP125_RBF_SEQUENCE # Just to have the option to bump this tx from the mempool
tx.rehash()
# This is always accepted, since the mempool policy is to consider segwit as always active
# and thus allow segwit outputs
test_transaction_acceptance(self.nodes[1], self.std_node, tx, with_witness=True, accepted=True)
# Now create something that looks like a P2PKH output. This won't be spendable.
witness_hash = sha256(witness_script)
script_pubkey = CScript([OP_0, hash160(witness_hash)])
tx2 = CTransaction()
# tx was accepted, so we spend the second output.
tx2.vin = [CTxIn(COutPoint(tx.sha256, 1), b"")]
tx2.vout = [CTxOut(7000, script_pubkey)]
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_script]
tx2.rehash()
test_transaction_acceptance(self.nodes[1], self.std_node, tx2, with_witness=True, accepted=True)
# Now update self.utxo for later tests.
tx3 = CTransaction()
# tx and tx2 were both accepted. Don't bother trying to reclaim the
# P2PKH output; just send tx's first output back to an anyone-can-spend.
self.sync_mempools([self.nodes[0], self.nodes[1]])
tx3.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx3.vout = [CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))]
tx3.wit.vtxinwit.append(CTxInWitness())
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_script]
tx3.rehash()
if not self.segwit_active:
# Just check mempool acceptance, but don't add the transaction to the mempool, since witness is disallowed
# in blocks and the tx is impossible to mine right now.
testres3 = self.nodes[0].testmempoolaccept([tx3.serialize_with_witness().hex()])
testres3[0]["fees"].pop("effective-feerate")
testres3[0]["fees"].pop("effective-includes")
assert_equal(testres3,
[{
'txid': tx3.hash,
'wtxid': tx3.getwtxid(),
'allowed': True,
'vsize': tx3.get_vsize(),
'fees': {
'base': Decimal('0.00001000'),
},
}],
)
# Create the same output as tx3, but by replacing tx
tx3_out = tx3.vout[0]
tx3 = tx
tx3.vout = [tx3_out]
tx3.rehash()
testres3_replaced = self.nodes[0].testmempoolaccept([tx3.serialize_with_witness().hex()])
testres3_replaced[0]["fees"].pop("effective-feerate")
testres3_replaced[0]["fees"].pop("effective-includes")
assert_equal(testres3_replaced,
[{
'txid': tx3.hash,
'wtxid': tx3.getwtxid(),
'allowed': True,
'vsize': tx3.get_vsize(),
'fees': {
'base': Decimal('0.00011000'),
},
}],
)
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=True)
self.generate(self.nodes[0], 1)
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
assert_equal(len(self.nodes[1].getrawmempool()), 0)
@subtest
def advance_to_segwit_active(self):
"""Mine enough blocks to activate segwit."""
assert not softfork_active(self.nodes[0], 'segwit')
height = self.nodes[0].getblockcount()
self.generate(self.nodes[0], SEGWIT_HEIGHT - height - 2)
assert not softfork_active(self.nodes[0], 'segwit')
self.generate(self.nodes[0], 1)
assert softfork_active(self.nodes[0], 'segwit')
self.segwit_active = True
@subtest
def test_p2sh_witness(self):
"""Test P2SH wrapped witness programs."""
# Prepare the p2sh-wrapped witness output
witness_script = CScript([OP_DROP, OP_TRUE])
p2wsh_pubkey = script_to_p2wsh_script(witness_script)
script_pubkey = script_to_p2sh_script(p2wsh_pubkey)
script_sig = CScript([p2wsh_pubkey]) # a push of the redeem script
# Fund the P2SH output
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
# Verify mempool acceptance and block validity
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True, with_witness=True)
self.sync_blocks()
# Now test attempts to spend the output.
spend_tx = CTransaction()
spend_tx.vin.append(CTxIn(COutPoint(tx.sha256, 0), script_sig))
spend_tx.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE])))
spend_tx.rehash()
# This transaction should not be accepted into the mempool pre- or
# post-segwit. Mempool acceptance will use SCRIPT_VERIFY_WITNESS which
# will require a witness to spend a witness program regardless of
# segwit activation. Note that older bitcoind's that are not
# segwit-aware would also reject this for failing CLEANSTACK.
with self.nodes[0].assert_debug_log(
expected_msgs=[spend_tx.hash, 'was not accepted: mandatory-script-verify-flag-failed (Witness program was passed an empty witness)']):
test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False)
# Try to put the witness script in the scriptSig, should also fail.
spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a'])
spend_tx.rehash()
with self.nodes[0].assert_debug_log(
expected_msgs=[spend_tx.hash, 'was not accepted: mandatory-script-verify-flag-failed (Script evaluated without error but finished with a false/empty top stack element)']):
test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=False, accepted=False)
# Now put the witness script in the witness, should succeed after
# segwit activates.
spend_tx.vin[0].scriptSig = script_sig
spend_tx.rehash()
spend_tx.wit.vtxinwit.append(CTxInWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [b'a', witness_script]
# Verify mempool acceptance
test_transaction_acceptance(self.nodes[0], self.test_node, spend_tx, with_witness=True, accepted=True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [spend_tx])
# If we're after activation, then sending this with witnesses should be valid.
# This no longer works before activation, because SCRIPT_VERIFY_WITNESS
# is always set.
# TODO: rewrite this test to make clear that it only works after activation.
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Update self.utxo
self.utxo.pop(0)
self.utxo.append(UTXO(spend_tx.sha256, 0, spend_tx.vout[0].nValue))
@subtest
def test_witness_commitments(self):
"""Test witness commitments.
This test can only be run after segwit has activated."""
# First try a correct witness commitment.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Test the test -- witness serialization should be different
assert msg_block(block).serialize() != msg_no_witness_block(block).serialize()
# This empty block should be valid.
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Try to tweak the nonce
block_2 = self.build_next_block()
add_witness_commitment(block_2, nonce=28)
block_2.solve()
# The commitment should have changed!
assert block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1]
# This should also be valid.
test_witness_block(self.nodes[0], self.test_node, block_2, accepted=True)
# Now test commitments with actual transactions
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# Let's construct a witness script
witness_script = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_script)
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
# tx2 will spend tx1, and send back to a regular anyone-can-spend address
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, witness_script))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_script]
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx, tx2], nonce=1)
# Add an extra OP_RETURN output that matches the witness commitment template,
# even though it has extra data after the incorrect commitment.
# This block should fail.
block_3.vtx[0].vout.append(CTxOut(0, CScript([OP_RETURN, WITNESS_COMMITMENT_HEADER + ser_uint256(2), 10])))
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
block_3.solve()
test_witness_block(self.nodes[0], self.test_node, block_3, accepted=False, reason='bad-witness-merkle-match')
# Add a different commitment with different nonce, but in the
# right location, and with some funds burned(!).
# This should succeed (nValue shouldn't affect finding the
# witness commitment).
add_witness_commitment(block_3, nonce=0)
block_3.vtx[0].vout[0].nValue -= 1
block_3.vtx[0].vout[-1].nValue += 1
block_3.vtx[0].rehash()
block_3.hashMerkleRoot = block_3.calc_merkle_root()
assert len(block_3.vtx[0].vout) == 4 # 3 OP_returns
block_3.solve()
test_witness_block(self.nodes[0], self.test_node, block_3, accepted=True)
# Finally test that a block with no witness transactions can
# omit the commitment.
block_4 = self.build_next_block()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx.vout[0].nValue - 1000, witness_script))
tx3.rehash()
block_4.vtx.append(tx3)
block_4.hashMerkleRoot = block_4.calc_merkle_root()
block_4.solve()
test_witness_block(self.nodes[0], self.test_node, block_4, with_witness=False, accepted=True)
# Update available utxo's for use in later test.
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
@subtest
def test_block_malleability(self):
# Make sure that a block that has too big a virtual size
# because of a too-large coinbase witness is not permanently
# marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a' * 5000000)
assert block.get_weight() > MAX_BLOCK_WEIGHT
# We can't send over the p2p network, because this is too big to relay
# TODO: repeat this test with a block that can be relayed
assert_equal('bad-witness-nonce-size', self.nodes[0].submitblock(block.serialize().hex()))
assert self.nodes[0].getbestblockhash() != block.hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop()
assert block.get_weight() < MAX_BLOCK_WEIGHT
assert_equal(None, self.nodes[0].submitblock(block.serialize().hex()))
assert self.nodes[0].getbestblockhash() == block.hash
# Now make sure that malleating the witness reserved value doesn't
# result in a block permanently marked bad.
block = self.build_next_block()
add_witness_commitment(block)
block.solve()
# Change the nonce -- should not cause the block to be permanently
# failed
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(1)]
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, reason='bad-witness-merkle-match')
# Changing the witness reserved value doesn't change the block hash
block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(0)]
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
@subtest
def test_witness_block_size(self):
# TODO: Test that non-witness carrying blocks can't exceed 1MB
# Skipping this test for now; this is covered in feature_block.py
# Test that witness-bearing blocks are limited at ceil(base + wit/4) <= 1MB.
block = self.build_next_block()
assert len(self.utxo) > 0
# Create a P2WSH transaction.
# The witness script will be a bunch of OP_2DROP's, followed by OP_TRUE.
# This should give us plenty of room to tweak the spending tx's
# virtual size.
NUM_DROPS = 200 # 201 max ops per script!
NUM_OUTPUTS = 50
witness_script = CScript([OP_2DROP] * NUM_DROPS + [OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_script)
prevout = COutPoint(self.utxo[0].sha256, self.utxo[0].n)
value = self.utxo[0].nValue
parent_tx = CTransaction()
parent_tx.vin.append(CTxIn(prevout, b""))
child_value = int(value / NUM_OUTPUTS)
for _ in range(NUM_OUTPUTS):
parent_tx.vout.append(CTxOut(child_value, script_pubkey))
parent_tx.vout[0].nValue -= 50000
assert parent_tx.vout[0].nValue > 0
parent_tx.rehash()
child_tx = CTransaction()
for i in range(NUM_OUTPUTS):
child_tx.vin.append(CTxIn(COutPoint(parent_tx.sha256, i), b""))
child_tx.vout = [CTxOut(value - 100000, CScript([OP_TRUE]))]
for _ in range(NUM_OUTPUTS):
child_tx.wit.vtxinwit.append(CTxInWitness())
child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a' * 195] * (2 * NUM_DROPS) + [witness_script]
child_tx.rehash()
self.update_witness_block_with_transactions(block, [parent_tx, child_tx])
additional_bytes = MAX_BLOCK_WEIGHT - block.get_weight()
i = 0
while additional_bytes > 0:
# Add some more bytes to each input until we hit MAX_BLOCK_WEIGHT+1
extra_bytes = min(additional_bytes + 1, 55)
block.vtx[-1].wit.vtxinwit[int(i / (2 * NUM_DROPS))].scriptWitness.stack[i % (2 * NUM_DROPS)] = b'a' * (195 + extra_bytes)
additional_bytes -= extra_bytes
i += 1
block.vtx[0].vout.pop() # Remove old commitment
add_witness_commitment(block)
block.solve()
assert_equal(block.get_weight(), MAX_BLOCK_WEIGHT + 1)
# Make sure that our test case would exceed the old max-network-message
# limit
assert len(block.serialize()) > 2 * 1024 * 1024
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, reason='bad-blk-weight')
# Now resize the second transaction to make the block fit.
cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0])
block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a' * (cur_length - 1)
block.vtx[0].vout.pop()
add_witness_commitment(block)
block.solve()
assert block.get_weight() == MAX_BLOCK_WEIGHT
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Update available utxo's
self.utxo.pop(0)
self.utxo.append(UTXO(block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue))
@subtest
def test_submit_block(self):
"""Test that submitblock adds the nonce automatically when possible."""
block = self.build_next_block()
# Try using a custom nonce and then don't supply it.
# This shouldn't possibly work.
add_witness_commitment(block, nonce=1)
block.vtx[0].wit = CTxWitness() # drop the nonce
block.solve()
assert_equal('bad-witness-merkle-match', self.nodes[0].submitblock(block.serialize().hex()))
assert self.nodes[0].getbestblockhash() != block.hash
# Now redo commitment with the standard nonce, but let bitcoind fill it in.
add_witness_commitment(block, nonce=0)
block.vtx[0].wit = CTxWitness()
block.solve()
assert_equal(None, self.nodes[0].submitblock(block.serialize().hex()))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
# This time, add a tx with non-empty witness, but don't supply
# the commitment.
block_2 = self.build_next_block()
add_witness_commitment(block_2)
block_2.solve()
# Drop commitment and nonce -- submitblock should not fill in.
block_2.vtx[0].vout.pop()
block_2.vtx[0].wit = CTxWitness()
assert_equal('bad-txnmrklroot', self.nodes[0].submitblock(block_2.serialize().hex()))
# Tip should not advance!
assert self.nodes[0].getbestblockhash() != block_2.hash
@subtest
def test_extra_witness_data(self):
"""Test extra witness data in a transaction."""
block = self.build_next_block()
witness_script = CScript([OP_DROP, OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_script)
# First try extra witness data on a tx that doesn't require a witness
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 2000, script_pubkey))
tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([])]
tx.rehash()
self.update_witness_block_with_transactions(block, [tx])
# Extra witness data should not be allowed.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
reason='mandatory-script-verify-flag-failed (Witness provided for non-witness script)')
# Try extra signature data. Ok if we're not spending a witness output.
block.vtx[1].wit.vtxinwit = []
block.vtx[1].vin[0].scriptSig = CScript([OP_0])
block.vtx[1].rehash()
add_witness_commitment(block)
block.solve()
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Now try extra witness/signature data on an input that DOES require a
# witness
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) # witness output
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 1), b"")) # non-witness
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
tx2.wit.vtxinwit.extend([CTxInWitness(), CTxInWitness()])
tx2.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_script]
tx2.wit.vtxinwit[1].scriptWitness.stack = [CScript([OP_TRUE])]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
# This has extra witness data, so it should fail.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
reason='mandatory-script-verify-flag-failed (Stack size must be exactly one after execution)')
# Now get rid of the extra witness, but add extra scriptSig data
tx2.vin[0].scriptSig = CScript([OP_TRUE])
tx2.vin[1].scriptSig = CScript([OP_TRUE])
tx2.wit.vtxinwit[0].scriptWitness.stack.pop(0)
tx2.wit.vtxinwit[1].scriptWitness.stack = []
tx2.rehash()
add_witness_commitment(block)
block.solve()
# This has extra signature data for a witness input, so it should fail.
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
reason='mandatory-script-verify-flag-failed (Witness requires empty scriptSig)')
# Now get rid of the extra scriptsig on the witness input, and verify
# success (even with extra scriptsig data in the non-witness input)
tx2.vin[0].scriptSig = b""
tx2.rehash()
add_witness_commitment(block)
block.solve()
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Update utxo for later tests
self.utxo.pop(0)
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@subtest
def test_max_witness_push_length(self):
"""Test that witness stack can only allow up to 520 byte pushes."""
block = self.build_next_block()
witness_script = CScript([OP_DROP, OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_script)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxInWitness())
# First try a 521-byte stack element
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a' * (MAX_SCRIPT_ELEMENT_SIZE + 1), witness_script]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
reason='mandatory-script-verify-flag-failed (Push value size limit exceeded)')
# Now reduce the length of the stack element
tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a' * (MAX_SCRIPT_ELEMENT_SIZE)
add_witness_commitment(block)
block.solve()
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Update the utxo for later tests
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@subtest
def test_max_witness_script_length(self):
"""Test that witness outputs greater than 10kB can't be spent."""
MAX_WITNESS_SCRIPT_LENGTH = 10000
# This script is 19 max pushes (9937 bytes), then 64 more opcode-bytes.
long_witness_script = CScript([b'a' * MAX_SCRIPT_ELEMENT_SIZE] * 19 + [OP_DROP] * 63 + [OP_TRUE])
assert len(long_witness_script) == MAX_WITNESS_SCRIPT_LENGTH + 1
long_script_pubkey = script_to_p2wsh_script(long_witness_script)
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, long_script_pubkey))
tx.rehash()
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, CScript([OP_TRUE])))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a'] * 44 + [long_witness_script]
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
reason='mandatory-script-verify-flag-failed (Script is too big)')
# Try again with one less byte in the witness script
witness_script = CScript([b'a' * MAX_SCRIPT_ELEMENT_SIZE] * 19 + [OP_DROP] * 62 + [OP_TRUE])
assert len(witness_script) == MAX_WITNESS_SCRIPT_LENGTH
script_pubkey = script_to_p2wsh_script(witness_script)
tx.vout[0] = CTxOut(tx.vout[0].nValue, script_pubkey)
tx.rehash()
tx2.vin[0].prevout.hash = tx.sha256
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a'] * 43 + [witness_script]
tx2.rehash()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@subtest
def test_witness_input_length(self):
"""Test that vin length must match vtxinwit length."""
witness_script = CScript([OP_DROP, OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_script)
# Create a transaction that splits our utxo into many outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
value = self.utxo[0].nValue
for _ in range(10):
tx.vout.append(CTxOut(int(value / 10), script_pubkey))
tx.vout[0].nValue -= 1000
assert tx.vout[0].nValue >= 0
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Try various ways to spend tx that should all break.
# This "broken" transaction serializer will not normalize
# the length of vtxinwit.
class BrokenCTransaction(CTransaction):
def serialize_with_witness(self):
flags = 0
if not self.wit.is_null():
flags |= 1
r = b""
r += struct.pack("<i", self.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
if flags & 1:
r += self.wit.serialize()
r += struct.pack("<I", self.nLockTime)
return r
tx2 = BrokenCTransaction()
for i in range(10):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.vout.append(CTxOut(value - 3000, CScript([OP_TRUE])))
# First try using a too long vtxinwit
for i in range(11):
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[i].scriptWitness.stack = [b'a', witness_script]
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False, reason='bad-txnmrklroot')
# Now try using a too short vtxinwit
tx2.wit.vtxinwit.pop()
tx2.wit.vtxinwit.pop()
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
# This block doesn't result in a specific reject reason, but an iostream exception:
# "Exception 'CDataStream::read(): end of data: unspecified iostream_category error' (...) caught"
test_witness_block(self.nodes[0], self.test_node, block, accepted=False)
# Now make one of the intermediate witnesses be incorrect
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [b'a', witness_script]
tx2.wit.vtxinwit[5].scriptWitness.stack = [witness_script]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
reason='mandatory-script-verify-flag-failed (Operation not valid with the current stack size)')
# Fix the broken witness and the block should be accepted.
tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_script]
block.vtx = [block.vtx[0]]
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.utxo.pop()
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@subtest
def test_tx_relay_after_segwit_activation(self):
"""Test transaction relay after segwit activation.
After segwit activates, verify that mempool:
- rejects transactions with unnecessary/extra witnesses
- accepts transactions with valid witnesses
and that witness transactions are relayed to non-upgraded peers."""
# Generate a transaction that doesn't require a witness, but send it
# with a witness. Should be rejected because we can't use a witness
# when spending a non-witness output.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx.wit.vtxinwit.append(CTxInWitness())
tx.wit.vtxinwit[0].scriptWitness.stack = [b'a']
tx.rehash()
tx_hash = tx.sha256
# Verify that unnecessary witnesses are rejected.
self.test_node.announce_tx_and_wait_for_getdata(tx)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=True, accepted=False)
# Verify that removing the witness succeeds.
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True)
# Now try to add extra witness data to a valid witness tx.
witness_script = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_script)
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx_hash, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_pubkey))
tx2.rehash()
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.wit.vtxinwit.append(CTxInWitness())
# Add too-large for IsStandard witness and check that it does not enter reject filter
p2sh_script = CScript([OP_TRUE])
witness_script2 = CScript([b'a' * 400000])
tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, script_to_p2sh_script(p2sh_script)))
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_script2]
tx3.rehash()
# Node will not be blinded to the transaction, requesting it any number of times
# if it is being announced via txid relay.
# Node will be blinded to the transaction via wtxid, however.
self.std_node.announce_tx_and_wait_for_getdata(tx3)
self.std_wtx_node.announce_tx_and_wait_for_getdata(tx3, use_wtxid=True)
test_transaction_acceptance(self.nodes[1], self.std_node, tx3, True, False, 'tx-size')
self.std_node.announce_tx_and_wait_for_getdata(tx3)
self.std_wtx_node.announce_tx_and_wait_for_getdata(tx3, use_wtxid=True, success=False)
# Remove witness stuffing, instead add extra witness push on stack
tx3.vout[0] = CTxOut(tx2.vout[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE]))
tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_script]
tx3.rehash()
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, with_witness=True, accepted=True)
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=False)
# Get rid of the extra witness, and verify acceptance.
tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_script]
# Also check that old_node gets a tx announcement, even though this is
# a witness transaction.
self.old_node.wait_for_inv([CInv(MSG_TX, tx2.sha256)]) # wait until tx2 was inv'ed
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=True)
self.old_node.wait_for_inv([CInv(MSG_TX, tx3.sha256)])
# Test that getrawtransaction returns correct witness information
# hash, size, vsize
raw_tx = self.nodes[0].getrawtransaction(tx3.hash, 1)
assert_equal(int(raw_tx["hash"], 16), tx3.calc_sha256(True))
assert_equal(raw_tx["size"], len(tx3.serialize_with_witness()))
vsize = tx3.get_vsize()
assert_equal(raw_tx["vsize"], vsize)
assert_equal(raw_tx["weight"], tx3.get_weight())
assert_equal(len(raw_tx["vin"][0]["txinwitness"]), 1)
assert_equal(raw_tx["vin"][0]["txinwitness"][0], witness_script.hex())
assert vsize != raw_tx["size"]
# Cleanup: mine the transactions and update utxo for next test
self.generate(self.nodes[0], 1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
@subtest
def test_segwit_versions(self):
"""Test validity of future segwit version transactions.
Future segwit versions are non-standard to spend, but valid in blocks.
Sending to future segwit versions is always allowed.
Can run this before and after segwit activation."""
NUM_SEGWIT_VERSIONS = 17 # will test OP_0, OP1, ..., OP_16
if len(self.utxo) < NUM_SEGWIT_VERSIONS:
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
split_value = (self.utxo[0].nValue - 4000) // NUM_SEGWIT_VERSIONS
for _ in range(NUM_SEGWIT_VERSIONS):
tx.vout.append(CTxOut(split_value, CScript([OP_TRUE])))
tx.rehash()
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.utxo.pop(0)
for i in range(NUM_SEGWIT_VERSIONS):
self.utxo.append(UTXO(tx.sha256, i, split_value))
self.sync_blocks()
temp_utxo = []
tx = CTransaction()
witness_script = CScript([OP_TRUE])
witness_hash = sha256(witness_script)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
for version in list(range(OP_1, OP_16 + 1)) + [OP_0]:
# First try to spend to a future version segwit script_pubkey.
if version == OP_1:
# Don't use 32-byte v1 witness (used by Taproot; see BIP 341)
script_pubkey = CScript([CScriptOp(version), witness_hash + b'\x00'])
else:
script_pubkey = CScript([CScriptOp(version), witness_hash])
tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")]
tx.vout = [CTxOut(self.utxo[0].nValue - 1000, script_pubkey)]
tx.rehash()
test_transaction_acceptance(self.nodes[1], self.std_node, tx, with_witness=True, accepted=False)
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=True, accepted=True)
self.utxo.pop(0)
temp_utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue))
self.generate(self.nodes[0], 1) # Mine all the transactions
assert len(self.nodes[0].getrawmempool()) == 0
# Finally, verify that version 0 -> version 2 transactions
# are standard
script_pubkey = CScript([CScriptOp(OP_2), witness_hash])
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")]
tx2.vout = [CTxOut(tx.vout[0].nValue - 1000, script_pubkey)]
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_script]
tx2.rehash()
# Gets accepted to both policy-enforcing nodes and others.
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, with_witness=True, accepted=True)
test_transaction_acceptance(self.nodes[1], self.std_node, tx2, with_witness=True, accepted=True)
temp_utxo.pop() # last entry in temp_utxo was the output we just spent
temp_utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
# Spend everything in temp_utxo into an segwit v1 output.
tx3 = CTransaction()
total_value = 0
for i in temp_utxo:
tx3.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx3.wit.vtxinwit.append(CTxInWitness())
total_value += i.nValue
tx3.wit.vtxinwit[-1].scriptWitness.stack = [witness_script]
tx3.vout.append(CTxOut(total_value - 1000, script_pubkey))
tx3.rehash()
# First we test this transaction against std_node
# making sure the txid is added to the reject filter
self.std_node.announce_tx_and_wait_for_getdata(tx3)
test_transaction_acceptance(self.nodes[1], self.std_node, tx3, with_witness=True, accepted=False, reason="bad-txns-nonstandard-inputs")
# Now the node will no longer ask for getdata of this transaction when advertised by same txid
self.std_node.announce_tx_and_wait_for_getdata(tx3, success=False)
# Spending a higher version witness output is not allowed by policy,
# even with the node that accepts non-standard txs.
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, with_witness=True, accepted=False, reason="reserved for soft-fork upgrades")
# Building a block with the transaction must be valid, however.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2, tx3])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.sync_blocks()
# Add utxo to our list
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
@subtest
def test_premature_coinbase_witness_spend(self):
block = self.build_next_block()
# Change the output of the block to be a witness output.
witness_script = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_script)
block.vtx[0].vout[0].scriptPubKey = script_pubkey
# This next line will rehash the coinbase and update the merkle
# root, and solve.
self.update_witness_block_with_transactions(block, [])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
spend_tx = CTransaction()
spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].sha256, 0), b"")]
spend_tx.vout = [CTxOut(block.vtx[0].vout[0].nValue, witness_script)]
spend_tx.wit.vtxinwit.append(CTxInWitness())
spend_tx.wit.vtxinwit[0].scriptWitness.stack = [witness_script]
spend_tx.rehash()
# Now test a premature spend.
self.generate(self.nodes[0], 98)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
test_witness_block(self.nodes[0], self.test_node, block2, accepted=False, reason='bad-txns-premature-spend-of-coinbase')
# Advancing one more block should allow the spend.
self.generate(self.nodes[0], 1)
block2 = self.build_next_block()
self.update_witness_block_with_transactions(block2, [spend_tx])
test_witness_block(self.nodes[0], self.test_node, block2, accepted=True)
self.sync_blocks()
@subtest
def test_uncompressed_pubkey(self):
"""Test uncompressed pubkey validity in segwit transactions.
Uncompressed pubkeys are no longer supported in default relay policy,
but (for now) are still valid in blocks."""
# Segwit transactions using uncompressed pubkeys are not accepted
# under default policy, but should still pass consensus.
key, pubkey = generate_keypair(compressed=False)
assert_equal(len(pubkey), 65) # This should be an uncompressed pubkey
utxo = self.utxo.pop(0)
# Test 1: P2WPKH
# First create a P2WPKH output that uses an uncompressed pubkey
pubkeyhash = hash160(pubkey)
script_pkh = key_to_p2wpkh_script(pubkey)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(utxo.sha256, utxo.n), b""))
tx.vout.append(CTxOut(utxo.nValue - 1000, script_pkh))
tx.rehash()
# Confirm it in a block.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Now try to spend it. Send it to a P2WSH output, which we'll
# use in the next test.
witness_script = key_to_p2pk_script(pubkey)
script_wsh = script_to_p2wsh_script(witness_script)
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_wsh))
script = keyhash_to_p2pkh_script(pubkeyhash)
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [pubkey]
sign_input_segwitv0(tx2, 0, script, tx.vout[0].nValue, key)
# Should fail policy test.
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Test 2: P2WSH
# Try to spend the P2WSH output created in last test.
# Send it to a P2SH(P2WSH) output, which we'll use in the next test.
script_p2sh = script_to_p2sh_script(script_wsh)
script_sig = CScript([script_wsh])
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b""))
tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, script_p2sh))
tx3.wit.vtxinwit.append(CTxInWitness())
sign_p2pk_witness_input(witness_script, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key)
# Should fail policy test.
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
# But passes consensus.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx3])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Test 3: P2SH(P2WSH)
# Try to spend the P2SH output created in the last test.
# Send it to a P2PKH output, which we'll use in the next test.
script_pubkey = keyhash_to_p2pkh_script(pubkeyhash)
tx4 = CTransaction()
tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), script_sig))
tx4.vout.append(CTxOut(tx3.vout[0].nValue - 1000, script_pubkey))
tx4.wit.vtxinwit.append(CTxInWitness())
sign_p2pk_witness_input(witness_script, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key)
# Should fail policy test.
test_transaction_acceptance(self.nodes[0], self.test_node, tx4, True, False, 'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)')
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx4])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Test 4: Uncompressed pubkeys should still be valid in non-segwit
# transactions.
tx5 = CTransaction()
tx5.vin.append(CTxIn(COutPoint(tx4.sha256, 0), b""))
tx5.vout.append(CTxOut(tx4.vout[0].nValue - 1000, CScript([OP_TRUE])))
tx5.vin[0].scriptSig = CScript([pubkey])
sign_input_legacy(tx5, 0, script_pubkey, key)
# Should pass policy and consensus.
test_transaction_acceptance(self.nodes[0], self.test_node, tx5, True, True)
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx5])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.utxo.append(UTXO(tx5.sha256, 0, tx5.vout[0].nValue))
@subtest
def test_signature_version_1(self):
key, pubkey = generate_keypair()
witness_script = key_to_p2pk_script(pubkey)
script_pubkey = script_to_p2wsh_script(witness_script)
# First create a witness output for use in the tests.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=True, accepted=True)
# Mine this transaction in preparation for following tests.
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
self.sync_blocks()
self.utxo.pop(0)
# Test each hashtype
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
for sigflag in [0, SIGHASH_ANYONECANPAY]:
for hashtype in [SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE]:
hashtype |= sigflag
block = self.build_next_block()
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
tx.vout.append(CTxOut(prev_utxo.nValue - 1000, script_pubkey))
tx.wit.vtxinwit.append(CTxInWitness())
# Too-large input value
sign_p2pk_witness_input(witness_script, tx, 0, hashtype, prev_utxo.nValue + 1, key)
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
reason='mandatory-script-verify-flag-failed (Script evaluated without error '
'but finished with a false/empty top stack element')
# Too-small input value
sign_p2pk_witness_input(witness_script, tx, 0, hashtype, prev_utxo.nValue - 1, key)
block.vtx.pop() # remove last tx
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
reason='mandatory-script-verify-flag-failed (Script evaluated without error '
'but finished with a false/empty top stack element')
# Now try correct value
sign_p2pk_witness_input(witness_script, tx, 0, hashtype, prev_utxo.nValue, key)
block.vtx.pop()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue)
# Test combinations of signature hashes.
# Split the utxo into a lot of outputs.
# Randomly choose up to 10 to spend, sign with different hashtypes, and
# output to a random number of outputs. Repeat NUM_SIGHASH_TESTS times.
# Ensure that we've tested a situation where we use SIGHASH_SINGLE with
# an input index > number of outputs.
NUM_SIGHASH_TESTS = 500
temp_utxos = []
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b""))
split_value = prev_utxo.nValue // NUM_SIGHASH_TESTS
for _ in range(NUM_SIGHASH_TESTS):
tx.vout.append(CTxOut(split_value, script_pubkey))
tx.wit.vtxinwit.append(CTxInWitness())
sign_p2pk_witness_input(witness_script, tx, 0, SIGHASH_ALL, prev_utxo.nValue, key)
for i in range(NUM_SIGHASH_TESTS):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
block = self.build_next_block()
used_sighash_single_out_of_bounds = False
for i in range(NUM_SIGHASH_TESTS):
# Ping regularly to keep the connection alive
if (not i % 100):
self.test_node.sync_with_ping()
# Choose random number of inputs to use.
num_inputs = random.randint(1, 10)
# Create a slight bias for producing more utxos
num_outputs = random.randint(1, 11)
random.shuffle(temp_utxos)
assert len(temp_utxos) > num_inputs
tx = CTransaction()
total_value = 0
for i in range(num_inputs):
tx.vin.append(CTxIn(COutPoint(temp_utxos[i].sha256, temp_utxos[i].n), b""))
tx.wit.vtxinwit.append(CTxInWitness())
total_value += temp_utxos[i].nValue
split_value = total_value // num_outputs
for _ in range(num_outputs):
tx.vout.append(CTxOut(split_value, script_pubkey))
for i in range(num_inputs):
# Now try to sign each input, using a random hashtype.
anyonecanpay = 0
if random.randint(0, 1):
anyonecanpay = SIGHASH_ANYONECANPAY
hashtype = random.randint(1, 3) | anyonecanpay
sign_p2pk_witness_input(witness_script, tx, i, hashtype, temp_utxos[i].nValue, key)
if (hashtype == SIGHASH_SINGLE and i >= num_outputs):
used_sighash_single_out_of_bounds = True
tx.rehash()
for i in range(num_outputs):
temp_utxos.append(UTXO(tx.sha256, i, split_value))
temp_utxos = temp_utxos[num_inputs:]
block.vtx.append(tx)
# Test the block periodically, if we're close to maxblocksize
if block.get_weight() > MAX_BLOCK_WEIGHT - 4000:
self.update_witness_block_with_transactions(block, [])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
block = self.build_next_block()
if (not used_sighash_single_out_of_bounds):
self.log.info("WARNING: this test run didn't attempt SIGHASH_SINGLE with out-of-bounds index value")
# Test the transactions we've added to the block
if (len(block.vtx) > 1):
self.update_witness_block_with_transactions(block, [])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
# Now test witness version 0 P2PKH transactions
pubkeyhash = hash160(pubkey)
script_pkh = key_to_p2wpkh_script(pubkey)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(temp_utxos[0].sha256, temp_utxos[0].n), b""))
tx.vout.append(CTxOut(temp_utxos[0].nValue, script_pkh))
tx.wit.vtxinwit.append(CTxInWitness())
sign_p2pk_witness_input(witness_script, tx, 0, SIGHASH_ALL, temp_utxos[0].nValue, key)
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE])))
script = keyhash_to_p2pkh_script(pubkeyhash)
tx2.wit.vtxinwit.append(CTxInWitness())
sign_input_segwitv0(tx2, 0, script, tx.vout[0].nValue, key)
signature = tx2.wit.vtxinwit[0].scriptWitness.stack.pop()
# Check that we can't have a scriptSig
tx2.vin[0].scriptSig = CScript([signature, pubkey])
tx2.rehash()
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx, tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=False,
reason='mandatory-script-verify-flag-failed (Witness requires empty scriptSig)')
# Move the signature to the witness.
block.vtx.pop()
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey]
tx2.vin[0].scriptSig = b""
tx2.rehash()
self.update_witness_block_with_transactions(block, [tx2])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
temp_utxos.pop(0)
# Update self.utxos for later tests by creating two outputs
# that consolidate all the coins in temp_utxos.
output_value = sum(i.nValue for i in temp_utxos) // 2
tx = CTransaction()
index = 0
# Just spend to our usual anyone-can-spend output
tx.vout = [CTxOut(output_value, CScript([OP_TRUE]))] * 2
for i in temp_utxos:
# Use SIGHASH_ALL|SIGHASH_ANYONECANPAY so we can build up
# the signatures as we go.
tx.vin.append(CTxIn(COutPoint(i.sha256, i.n), b""))
tx.wit.vtxinwit.append(CTxInWitness())
sign_p2pk_witness_input(witness_script, tx, index, SIGHASH_ALL | SIGHASH_ANYONECANPAY, i.nValue, key)
index += 1
block = self.build_next_block()
self.update_witness_block_with_transactions(block, [tx])
test_witness_block(self.nodes[0], self.test_node, block, accepted=True)
for i in range(len(tx.vout)):
self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue))
@subtest
def test_non_standard_witness_blinding(self):
"""Test behavior of unnecessary witnesses in transactions does not blind the node for the transaction"""
# Create a p2sh output -- this is so we can pass the standardness
# rules (an anyone-can-spend OP_TRUE would be rejected, if not wrapped
# in P2SH).
p2sh_program = CScript([OP_TRUE])
script_pubkey = script_to_p2sh_script(p2sh_program)
# Now check that unnecessary witnesses can't be used to blind a node
# to a transaction, eg by violating standardness checks.
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
test_transaction_acceptance(self.nodes[0], self.test_node, tx, False, True)
self.generate(self.nodes[0], 1)
# We'll add an unnecessary witness to this transaction that would cause
# it to be non-standard, to test that violating policy with a witness
# doesn't blind a node to a transaction. Transactions
# rejected for having a witness shouldn't be added
# to the rejection cache.
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), CScript([p2sh_program])))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_pubkey))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a' * 400]
tx2.rehash()
# This will be rejected due to a policy check:
# No witness is allowed, since it is not a witness program but a p2sh program
test_transaction_acceptance(self.nodes[1], self.std_node, tx2, True, False, 'bad-witness-nonstandard')
# If we send without witness, it should be accepted.
test_transaction_acceptance(self.nodes[1], self.std_node, tx2, False, True)
# Now create a new anyone-can-spend utxo for the next test.
tx3 = CTransaction()
tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), CScript([p2sh_program])))
tx3.vout.append(CTxOut(tx2.vout[0].nValue - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx3.rehash()
test_transaction_acceptance(self.nodes[0], self.test_node, tx2, False, True)
test_transaction_acceptance(self.nodes[0], self.test_node, tx3, False, True)
self.generate(self.nodes[0], 1)
# Update our utxo list; we spent the first entry.
self.utxo.pop(0)
self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue))
@subtest
def test_non_standard_witness(self):
"""Test detection of non-standard P2WSH witness"""
pad = chr(1).encode('latin-1')
# Create scripts for tests
scripts = []
scripts.append(CScript([OP_DROP] * 100))
scripts.append(CScript([OP_DROP] * 99))
scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 60))
scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 61))
p2wsh_scripts = []
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
# For each script, generate a pair of P2WSH and P2SH-P2WSH output.
outputvalue = (self.utxo[0].nValue - 1000) // (len(scripts) * 2)
for i in scripts:
p2wsh = script_to_p2wsh_script(i)
p2wsh_scripts.append(p2wsh)
tx.vout.append(CTxOut(outputvalue, p2wsh))
tx.vout.append(CTxOut(outputvalue, script_to_p2sh_script(p2wsh)))
tx.rehash()
txid = tx.sha256
test_transaction_acceptance(self.nodes[0], self.test_node, tx, with_witness=False, accepted=True)
self.generate(self.nodes[0], 1)
# Creating transactions for tests
p2wsh_txs = []
p2sh_txs = []
for i in range(len(scripts)):
p2wsh_tx = CTransaction()
p2wsh_tx.vin.append(CTxIn(COutPoint(txid, i * 2)))
p2wsh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(b"")])))
p2wsh_tx.wit.vtxinwit.append(CTxInWitness())
p2wsh_tx.rehash()
p2wsh_txs.append(p2wsh_tx)
p2sh_tx = CTransaction()
p2sh_tx.vin.append(CTxIn(COutPoint(txid, i * 2 + 1), CScript([p2wsh_scripts[i]])))
p2sh_tx.vout.append(CTxOut(outputvalue - 5000, CScript([OP_0, hash160(b"")])))
p2sh_tx.wit.vtxinwit.append(CTxInWitness())
p2sh_tx.rehash()
p2sh_txs.append(p2sh_tx)
# Testing native P2WSH
# Witness stack size, excluding witnessScript, over 100 is non-standard
p2wsh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[0], True, False, 'bad-witness-nonstandard')
# Non-standard nodes should accept
test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[0], True, True)
# Stack element size over 80 bytes is non-standard
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[1], True, False, 'bad-witness-nonstandard')
# Non-standard nodes should accept
test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[1], True, True)
# Standard nodes should accept if element size is not over 80 bytes
p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[1], True, True)
# witnessScript size at 3600 bytes is standard
p2wsh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[2], True, True)
test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[2], True, True)
# witnessScript size at 3601 bytes is non-standard
p2wsh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2wsh_txs[3], True, False, 'bad-witness-nonstandard')
# Non-standard nodes should accept
test_transaction_acceptance(self.nodes[0], self.test_node, p2wsh_txs[3], True, True)
# Repeating the same tests with P2SH-P2WSH
p2sh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[0], True, False, 'bad-witness-nonstandard')
test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[0], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[1], True, False, 'bad-witness-nonstandard')
test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[1], True, True)
p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[1], True, True)
p2sh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]]
test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[2], True, True)
test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[2], True, True)
p2sh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]]
test_transaction_acceptance(self.nodes[1], self.std_node, p2sh_txs[3], True, False, 'bad-witness-nonstandard')
test_transaction_acceptance(self.nodes[0], self.test_node, p2sh_txs[3], True, True)
self.generate(self.nodes[0], 1) # Mine and clean up the mempool of non-standard node
# Valid but non-standard transactions in a block should be accepted by standard node
self.sync_blocks()
assert_equal(len(self.nodes[0].getrawmempool()), 0)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.utxo.pop(0)
@subtest
def test_witness_sigops(self):
"""Test sigop counting is correct inside witnesses."""
# Keep this under MAX_OPS_PER_SCRIPT (201)
witness_script = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG] * 5 + [OP_CHECKSIG] * 193 + [OP_ENDIF])
script_pubkey = script_to_p2wsh_script(witness_script)
sigops_per_script = 20 * 5 + 193 * 1
# We'll produce 2 extra outputs, one with a program that would take us
# over max sig ops, and one with a program that would exactly reach max
# sig ops
outputs = (MAX_SIGOP_COST // sigops_per_script) + 2
extra_sigops_available = MAX_SIGOP_COST % sigops_per_script
# We chose the number of checkmultisigs/checksigs to make this work:
assert extra_sigops_available < 100 # steer clear of MAX_OPS_PER_SCRIPT
# This script, when spent with the first
# N(=MAX_SIGOP_COST//sigops_per_script) outputs of our transaction,
# would push us just over the block sigop limit.
witness_script_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG] * (extra_sigops_available + 1) + [OP_ENDIF])
script_pubkey_toomany = script_to_p2wsh_script(witness_script_toomany)
# If we spend this script instead, we would exactly reach our sigop
# limit (for witness sigops).
witness_script_justright = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG] * (extra_sigops_available) + [OP_ENDIF])
script_pubkey_justright = script_to_p2wsh_script(witness_script_justright)
# First split our available utxo into a bunch of outputs
split_value = self.utxo[0].nValue // outputs
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
for _ in range(outputs):
tx.vout.append(CTxOut(split_value, script_pubkey))
tx.vout[-2].scriptPubKey = script_pubkey_toomany
tx.vout[-1].scriptPubKey = script_pubkey_justright
tx.rehash()
block_1 = self.build_next_block()
self.update_witness_block_with_transactions(block_1, [tx])
test_witness_block(self.nodes[0], self.test_node, block_1, accepted=True)
tx2 = CTransaction()
# If we try to spend the first n-1 outputs from tx, that should be
# too many sigops.
total_value = 0
for i in range(outputs - 1):
tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b""))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_script]
total_value += tx.vout[i].nValue
tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_script_toomany]
tx2.vout.append(CTxOut(total_value, CScript([OP_TRUE])))
tx2.rehash()
block_2 = self.build_next_block()
self.update_witness_block_with_transactions(block_2, [tx2])
test_witness_block(self.nodes[0], self.test_node, block_2, accepted=False, reason='bad-blk-sigops')
# Try dropping the last input in tx2, and add an output that has
# too many sigops (contributing to legacy sigop count).
checksig_count = (extra_sigops_available // 4) + 1
script_pubkey_checksigs = CScript([OP_CHECKSIG] * checksig_count)
tx2.vout.append(CTxOut(0, script_pubkey_checksigs))
tx2.vin.pop()
tx2.wit.vtxinwit.pop()
tx2.vout[0].nValue -= tx.vout[-2].nValue
tx2.rehash()
block_3 = self.build_next_block()
self.update_witness_block_with_transactions(block_3, [tx2])
test_witness_block(self.nodes[0], self.test_node, block_3, accepted=False, reason='bad-blk-sigops')
# If we drop the last checksig in this output, the tx should succeed.
block_4 = self.build_next_block()
tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG] * (checksig_count - 1))
tx2.rehash()
self.update_witness_block_with_transactions(block_4, [tx2])
test_witness_block(self.nodes[0], self.test_node, block_4, accepted=True)
# Reset the tip back down for the next test
self.sync_blocks()
for x in self.nodes:
x.invalidateblock(block_4.hash)
# Try replacing the last input of tx2 to be spending the last
# output of tx
block_5 = self.build_next_block()
tx2.vout.pop()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, outputs - 1), b""))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[-1].scriptWitness.stack = [witness_script_justright]
tx2.rehash()
self.update_witness_block_with_transactions(block_5, [tx2])
test_witness_block(self.nodes[0], self.test_node, block_5, accepted=True)
# TODO: test p2sh sigop counting
# Cleanup and prep for next test
self.utxo.pop(0)
self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue))
@subtest
def test_superfluous_witness(self):
# Serialization of tx that puts witness flag to 3 always
def serialize_with_bogus_witness(tx):
flags = 3
r = b""
r += struct.pack("<i", tx.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(tx.vin)
r += ser_vector(tx.vout)
if flags & 1:
if (len(tx.wit.vtxinwit) != len(tx.vin)):
# vtxinwit must have the same length as vin
tx.wit.vtxinwit = tx.wit.vtxinwit[:len(tx.vin)]
for _ in range(len(tx.wit.vtxinwit), len(tx.vin)):
tx.wit.vtxinwit.append(CTxInWitness())
r += tx.wit.serialize()
r += struct.pack("<I", tx.nLockTime)
return r
class msg_bogus_tx(msg_tx):
def serialize(self):
return serialize_with_bogus_witness(self.tx)
tx = self.wallet.create_self_transfer()['tx']
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, hexstring=serialize_with_bogus_witness(tx).hex(), iswitness=True)
with self.nodes[0].assert_debug_log(['Unknown transaction optional data']):
self.test_node.send_and_ping(msg_bogus_tx(tx))
tx.wit.vtxinwit = [] # drop witness
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, hexstring=serialize_with_bogus_witness(tx).hex(), iswitness=True)
with self.nodes[0].assert_debug_log(['Superfluous witness record']):
self.test_node.send_and_ping(msg_bogus_tx(tx))
@subtest
def test_wtxid_relay(self):
# Use brand new nodes to avoid contamination from earlier tests
self.wtx_node = self.nodes[0].add_p2p_connection(TestP2PConn(wtxidrelay=True), services=P2P_SERVICES)
self.tx_node = self.nodes[0].add_p2p_connection(TestP2PConn(wtxidrelay=False), services=P2P_SERVICES)
# Check wtxidrelay feature negotiation message through connecting a new peer
def received_wtxidrelay():
return (len(self.wtx_node.last_wtxidrelay) > 0)
self.wtx_node.wait_until(received_wtxidrelay)
# Create a Segwit output from the latest UTXO
# and announce it to the network
witness_script = CScript([OP_TRUE])
script_pubkey = script_to_p2wsh_script(witness_script)
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b""))
tx.vout.append(CTxOut(self.utxo[0].nValue - 1000, script_pubkey))
tx.rehash()
# Create a Segwit transaction
tx2 = CTransaction()
tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b""))
tx2.vout.append(CTxOut(tx.vout[0].nValue - 1000, script_pubkey))
tx2.wit.vtxinwit.append(CTxInWitness())
tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_script]
tx2.rehash()
# Announce Segwit transaction with wtxid
# and wait for getdata
self.wtx_node.announce_tx_and_wait_for_getdata(tx2, use_wtxid=True)
with p2p_lock:
lgd = self.wtx_node.lastgetdata[:]
assert_equal(lgd, [CInv(MSG_WTX, tx2.calc_sha256(True))])
# Announce Segwit transaction from non wtxidrelay peer
# and wait for getdata
self.tx_node.announce_tx_and_wait_for_getdata(tx2, use_wtxid=False)
with p2p_lock:
lgd = self.tx_node.lastgetdata[:]
assert_equal(lgd, [CInv(MSG_TX|MSG_WITNESS_FLAG, tx2.sha256)])
# Send tx2 through; it's an orphan so won't be accepted
with p2p_lock:
self.wtx_node.last_message.pop("getdata", None)
test_transaction_acceptance(self.nodes[0], self.wtx_node, tx2, with_witness=True, accepted=False)
# Expect a request for parent (tx) by txid despite use of WTX peer
self.wtx_node.wait_for_getdata([tx.sha256], 60)
with p2p_lock:
lgd = self.wtx_node.lastgetdata[:]
assert_equal(lgd, [CInv(MSG_WITNESS_TX, tx.sha256)])
# Send tx through
test_transaction_acceptance(self.nodes[0], self.wtx_node, tx, with_witness=False, accepted=True)
# Check tx2 is there now
assert_equal(tx2.hash in self.nodes[0].getrawmempool(), True)
if __name__ == '__main__':
SegWitTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_listtransactions.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the listtransactions API."""
from decimal import Decimal
import os
import shutil
from test_framework.messages import (
COIN,
tx_from_hex,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_array_result,
assert_equal,
assert_raises_rpc_error,
)
class ListTransactionsTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 3
# This test isn't testing txn relay/timing, so set whitelist on the
# peers for instant txn relay. This speeds up the test run time 2-3x.
self.extra_args = [["[email protected]", "-walletrbf=0"]] * self.num_nodes
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.log.info("Test simple send from node0 to node1")
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid},
{"category": "send", "amount": Decimal("-0.1"), "confirmations": 0, "trusted": True})
assert_array_result(self.nodes[1].listtransactions(),
{"txid": txid},
{"category": "receive", "amount": Decimal("0.1"), "confirmations": 0, "trusted": False})
self.log.info("Test confirmations change after mining a block")
blockhash = self.generate(self.nodes[0], 1)[0]
blockheight = self.nodes[0].getblockheader(blockhash)['height']
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid},
{"category": "send", "amount": Decimal("-0.1"), "confirmations": 1, "blockhash": blockhash, "blockheight": blockheight})
assert_array_result(self.nodes[1].listtransactions(),
{"txid": txid},
{"category": "receive", "amount": Decimal("0.1"), "confirmations": 1, "blockhash": blockhash, "blockheight": blockheight})
self.log.info("Test send-to-self on node0")
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid, "category": "send"},
{"amount": Decimal("-0.2")})
assert_array_result(self.nodes[0].listtransactions(),
{"txid": txid, "category": "receive"},
{"amount": Decimal("0.2")})
self.log.info("Test sendmany from node1: twice to self, twice to node0")
send_to = {self.nodes[0].getnewaddress(): 0.11,
self.nodes[1].getnewaddress(): 0.22,
self.nodes[0].getnewaddress(): 0.33,
self.nodes[1].getnewaddress(): 0.44}
txid = self.nodes[1].sendmany("", send_to)
self.sync_all()
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.11")},
{"txid": txid})
assert_array_result(self.nodes[0].listtransactions(),
{"category": "receive", "amount": Decimal("0.11")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.22")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "receive", "amount": Decimal("0.22")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.33")},
{"txid": txid})
assert_array_result(self.nodes[0].listtransactions(),
{"category": "receive", "amount": Decimal("0.33")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "send", "amount": Decimal("-0.44")},
{"txid": txid})
assert_array_result(self.nodes[1].listtransactions(),
{"category": "receive", "amount": Decimal("0.44")},
{"txid": txid})
if not self.options.descriptors:
# include_watchonly is a legacy wallet feature, so don't test it for descriptor wallets
self.log.info("Test 'include_watchonly' feature (legacy wallet)")
pubkey = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['pubkey']
multisig = self.nodes[1].createmultisig(1, [pubkey])
self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
self.generate(self.nodes[1], 1)
assert_equal(len(self.nodes[0].listtransactions(label="watchonly", include_watchonly=True)), 1)
assert_equal(len(self.nodes[0].listtransactions(dummy="watchonly", include_watchonly=True)), 1)
assert len(self.nodes[0].listtransactions(label="watchonly", count=100, include_watchonly=False)) == 0
assert_array_result(self.nodes[0].listtransactions(label="watchonly", count=100, include_watchonly=True),
{"category": "receive", "amount": Decimal("0.1")},
{"txid": txid, "label": "watchonly"})
self.run_rbf_opt_in_test()
self.run_externally_generated_address_test()
self.run_coinjoin_test()
self.run_invalid_parameters_test()
self.test_op_return()
def run_rbf_opt_in_test(self):
"""Test the opt-in-rbf flag for sent and received transactions."""
def is_opt_in(node, txid):
"""Check whether a transaction signals opt-in RBF itself."""
rawtx = node.getrawtransaction(txid, 1)
for x in rawtx["vin"]:
if x["sequence"] < 0xfffffffe:
return True
return False
def get_unconfirmed_utxo_entry(node, txid_to_match):
"""Find an unconfirmed output matching a certain txid."""
utxo = node.listunspent(0, 0)
for i in utxo:
if i["txid"] == txid_to_match:
return i
return None
self.log.info("Test txs w/o opt-in RBF (bip125-replaceable=no)")
# Chain a few transactions that don't opt in.
txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
assert not is_opt_in(self.nodes[0], txid_1)
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable": "no"})
self.sync_mempools()
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable": "no"})
# Tx2 will build off tx1, still not opting in to RBF.
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_1)
assert_equal(utxo_to_use["safe"], True)
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1)
assert_equal(utxo_to_use["safe"], False)
# Create tx2 using createrawtransaction
inputs = [{"txid": utxo_to_use["txid"], "vout": utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.999}
tx2 = self.nodes[1].createrawtransaction(inputs=inputs, outputs=outputs, replaceable=False)
tx2_signed = self.nodes[1].signrawtransactionwithwallet(tx2)["hex"]
txid_2 = self.nodes[1].sendrawtransaction(tx2_signed)
# ...and check the result
assert not is_opt_in(self.nodes[1], txid_2)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable": "no"})
self.sync_mempools()
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable": "no"})
self.log.info("Test txs with opt-in RBF (bip125-replaceable=yes)")
# Tx3 will opt-in to RBF
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2)
inputs = [{"txid": txid_2, "vout": utxo_to_use["vout"]}]
outputs = {self.nodes[1].getnewaddress(): 0.998}
tx3 = self.nodes[0].createrawtransaction(inputs, outputs)
tx3_modified = tx_from_hex(tx3)
tx3_modified.vin[0].nSequence = 0
tx3 = tx3_modified.serialize().hex()
tx3_signed = self.nodes[0].signrawtransactionwithwallet(tx3)['hex']
txid_3 = self.nodes[0].sendrawtransaction(tx3_signed)
assert is_opt_in(self.nodes[0], txid_3)
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable": "yes"})
self.sync_mempools()
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable": "yes"})
# Tx4 will chain off tx3. Doesn't signal itself, but depends on one
# that does.
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_3)
inputs = [{"txid": txid_3, "vout": utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.997}
tx4 = self.nodes[1].createrawtransaction(inputs=inputs, outputs=outputs, replaceable=False)
tx4_signed = self.nodes[1].signrawtransactionwithwallet(tx4)["hex"]
txid_4 = self.nodes[1].sendrawtransaction(tx4_signed)
assert not is_opt_in(self.nodes[1], txid_4)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "yes"})
self.sync_mempools()
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "yes"})
self.log.info("Test tx with unknown RBF state (bip125-replaceable=unknown)")
# Replace tx3, and check that tx4 becomes unknown
tx3_b = tx3_modified
tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee
tx3_b = tx3_b.serialize().hex()
tx3_b_signed = self.nodes[0].signrawtransactionwithwallet(tx3_b)['hex']
txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, 0)
assert is_opt_in(self.nodes[0], txid_3b)
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "unknown"})
self.sync_mempools()
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable": "unknown"})
self.log.info("Test bip125-replaceable status with gettransaction RPC")
for n in self.nodes[0:2]:
assert_equal(n.gettransaction(txid_1)["bip125-replaceable"], "no")
assert_equal(n.gettransaction(txid_2)["bip125-replaceable"], "no")
assert_equal(n.gettransaction(txid_3)["bip125-replaceable"], "yes")
assert_equal(n.gettransaction(txid_3b)["bip125-replaceable"], "yes")
assert_equal(n.gettransaction(txid_4)["bip125-replaceable"], "unknown")
self.log.info("Test bip125-replaceable status with listsinceblock")
for n in self.nodes[0:2]:
txs = {tx['txid']: tx['bip125-replaceable'] for tx in n.listsinceblock()['transactions']}
assert_equal(txs[txid_1], "no")
assert_equal(txs[txid_2], "no")
assert_equal(txs[txid_3], "yes")
assert_equal(txs[txid_3b], "yes")
assert_equal(txs[txid_4], "unknown")
self.log.info("Test mined transactions are no longer bip125-replaceable")
self.generate(self.nodes[0], 1)
assert txid_3b not in self.nodes[0].getrawmempool()
assert_equal(self.nodes[0].gettransaction(txid_3b)["bip125-replaceable"], "no")
assert_equal(self.nodes[0].gettransaction(txid_4)["bip125-replaceable"], "unknown")
def run_externally_generated_address_test(self):
"""Test behavior when receiving address is not in the address book."""
self.log.info("Setup the same wallet on two nodes")
# refill keypool otherwise the second node wouldn't recognize addresses generated on the first nodes
self.nodes[0].keypoolrefill(1000)
self.stop_nodes()
wallet0 = os.path.join(self.nodes[0].chain_path, self.default_wallet_name, "wallet.dat")
wallet2 = os.path.join(self.nodes[2].chain_path, self.default_wallet_name, "wallet.dat")
shutil.copyfile(wallet0, wallet2)
self.start_nodes()
# reconnect nodes
self.connect_nodes(0, 1)
self.connect_nodes(1, 2)
self.connect_nodes(2, 0)
addr1 = self.nodes[0].getnewaddress("pizza1", 'legacy')
addr2 = self.nodes[0].getnewaddress("pizza2", 'p2sh-segwit')
addr3 = self.nodes[0].getnewaddress("pizza3", 'bech32')
self.log.info("Send to externally generated addresses")
# send to an address beyond the next to be generated to test the keypool gap
self.nodes[1].sendtoaddress(addr3, "0.001")
self.generate(self.nodes[1], 1)
# send to an address that is already marked as used due to the keypool gap mechanics
self.nodes[1].sendtoaddress(addr2, "0.001")
self.generate(self.nodes[1], 1)
# send to self transaction
self.nodes[0].sendtoaddress(addr1, "0.001")
self.generate(self.nodes[0], 1)
self.log.info("Verify listtransactions is the same regardless of where the address was generated")
transactions0 = self.nodes[0].listtransactions()
transactions2 = self.nodes[2].listtransactions()
# normalize results: remove fields that normally could differ and sort
def normalize_list(txs):
for tx in txs:
tx.pop('label', None)
tx.pop('time', None)
tx.pop('timereceived', None)
txs.sort(key=lambda x: x['txid'])
normalize_list(transactions0)
normalize_list(transactions2)
assert_equal(transactions0, transactions2)
self.log.info("Verify labels are persistent on the node that generated the addresses")
assert_equal(['pizza1'], self.nodes[0].getaddressinfo(addr1)['labels'])
assert_equal(['pizza2'], self.nodes[0].getaddressinfo(addr2)['labels'])
assert_equal(['pizza3'], self.nodes[0].getaddressinfo(addr3)['labels'])
def run_coinjoin_test(self):
self.log.info('Check "coin-join" transaction')
input_0 = next(i for i in self.nodes[0].listunspent(query_options={"minimumAmount": 0.2}, include_unsafe=False))
input_1 = next(i for i in self.nodes[1].listunspent(query_options={"minimumAmount": 0.2}, include_unsafe=False))
raw_hex = self.nodes[0].createrawtransaction(
inputs=[
{
"txid": input_0["txid"],
"vout": input_0["vout"],
},
{
"txid": input_1["txid"],
"vout": input_1["vout"],
},
],
outputs={
self.nodes[0].getnewaddress(): 0.123,
self.nodes[1].getnewaddress(): 0.123,
},
)
raw_hex = self.nodes[0].signrawtransactionwithwallet(raw_hex)["hex"]
raw_hex = self.nodes[1].signrawtransactionwithwallet(raw_hex)["hex"]
txid_join = self.nodes[0].sendrawtransaction(hexstring=raw_hex, maxfeerate=0)
fee_join = self.nodes[0].getmempoolentry(txid_join)["fees"]["base"]
# Fee should be correct: assert_equal(fee_join, self.nodes[0].gettransaction(txid_join)['fee'])
# But it is not, see for example https://github.com/bitcoin/bitcoin/issues/14136:
assert fee_join != self.nodes[0].gettransaction(txid_join)["fee"]
def run_invalid_parameters_test(self):
self.log.info("Test listtransactions RPC parameter validity")
assert_raises_rpc_error(-8, 'Label argument must be a valid label name or "*".', self.nodes[0].listtransactions, label="")
self.nodes[0].listtransactions(label="*")
assert_raises_rpc_error(-8, "Negative count", self.nodes[0].listtransactions, count=-1)
assert_raises_rpc_error(-8, "Negative from", self.nodes[0].listtransactions, skip=-1)
def test_op_return(self):
"""Test if OP_RETURN outputs will be displayed correctly."""
raw_tx = self.nodes[0].createrawtransaction([], [{'data': 'aa'}])
funded_tx = self.nodes[0].fundrawtransaction(raw_tx)
signed_tx = self.nodes[0].signrawtransactionwithwallet(funded_tx['hex'])
tx_id = self.nodes[0].sendrawtransaction(signed_tx['hex'])
op_ret_tx = [tx for tx in self.nodes[0].listtransactions() if tx['txid'] == tx_id][0]
assert 'address' not in op_ret_tx
if __name__ == '__main__':
ListTransactionsTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_leak.py | #!/usr/bin/env python3
# Copyright (c) 2017-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test message sending before handshake completion.
Before receiving a VERACK, a node should not send anything but VERSION/VERACK
and feature negotiation messages (WTXIDRELAY, SENDADDRV2).
This test connects to a node and sends it a few messages, trying to entice it
into sending us something it shouldn't."""
import time
from test_framework.messages import (
msg_getaddr,
msg_ping,
msg_version,
)
from test_framework.p2p import (
P2PInterface,
P2P_SUBVERSION,
P2P_SERVICES,
P2P_VERSION_RELAY,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
)
PEER_TIMEOUT = 3
class LazyPeer(P2PInterface):
def __init__(self):
super().__init__()
self.unexpected_msg = False
self.ever_connected = False
self.got_wtxidrelay = False
self.got_sendaddrv2 = False
def bad_message(self, message):
self.unexpected_msg = True
print("should not have received message: %s" % message.msgtype)
def on_open(self):
self.ever_connected = True
# Does not respond to "version" with "verack"
def on_version(self, message): self.bad_message(message)
def on_verack(self, message): self.bad_message(message)
def on_inv(self, message): self.bad_message(message)
def on_addr(self, message): self.bad_message(message)
def on_getdata(self, message): self.bad_message(message)
def on_getblocks(self, message): self.bad_message(message)
def on_tx(self, message): self.bad_message(message)
def on_block(self, message): self.bad_message(message)
def on_getaddr(self, message): self.bad_message(message)
def on_headers(self, message): self.bad_message(message)
def on_getheaders(self, message): self.bad_message(message)
def on_ping(self, message): self.bad_message(message)
def on_mempool(self, message): self.bad_message(message)
def on_pong(self, message): self.bad_message(message)
def on_feefilter(self, message): self.bad_message(message)
def on_sendheaders(self, message): self.bad_message(message)
def on_sendcmpct(self, message): self.bad_message(message)
def on_cmpctblock(self, message): self.bad_message(message)
def on_getblocktxn(self, message): self.bad_message(message)
def on_blocktxn(self, message): self.bad_message(message)
def on_wtxidrelay(self, message): self.got_wtxidrelay = True
def on_sendaddrv2(self, message): self.got_sendaddrv2 = True
# Peer that sends a version but not a verack.
class NoVerackIdlePeer(LazyPeer):
def __init__(self):
self.version_received = False
super().__init__()
def on_verack(self, message): pass
# When version is received, don't reply with a verack. Instead, see if the
# node will give us a message that it shouldn't. This is not an exhaustive
# list!
def on_version(self, message):
self.version_received = True
self.send_message(msg_ping())
self.send_message(msg_getaddr())
class P2PVersionStore(P2PInterface):
version_received = None
def on_version(self, msg):
# Responds with an appropriate verack
super().on_version(msg)
self.version_received = msg
class P2PLeakTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[f"-peertimeout={PEER_TIMEOUT}"]]
def create_old_version(self, nversion):
old_version_msg = msg_version()
old_version_msg.nVersion = nversion
old_version_msg.strSubVer = P2P_SUBVERSION
old_version_msg.nServices = P2P_SERVICES
old_version_msg.relay = P2P_VERSION_RELAY
return old_version_msg
def run_test(self):
self.log.info('Check that the node doesn\'t send unexpected messages before handshake completion')
# Peer that never sends a version, nor any other messages. It shouldn't receive anything from the node.
no_version_idle_peer = self.nodes[0].add_p2p_connection(LazyPeer(), send_version=False, wait_for_verack=False)
# Peer that sends a version but not a verack.
no_verack_idle_peer = self.nodes[0].add_p2p_connection(NoVerackIdlePeer(), wait_for_verack=False)
# Pre-wtxidRelay peer that sends a version but not a verack and does not support feature negotiation
# messages which start at nVersion == 70016
pre_wtxidrelay_peer = self.nodes[0].add_p2p_connection(NoVerackIdlePeer(), send_version=False, wait_for_verack=False)
pre_wtxidrelay_peer.send_message(self.create_old_version(70015))
# Wait until the peer gets the verack in response to the version. Though, don't wait for the node to receive the
# verack, since the peer never sent one
no_verack_idle_peer.wait_for_verack()
pre_wtxidrelay_peer.wait_for_verack()
no_version_idle_peer.wait_until(lambda: no_version_idle_peer.ever_connected)
no_verack_idle_peer.wait_until(lambda: no_verack_idle_peer.version_received)
pre_wtxidrelay_peer.wait_until(lambda: pre_wtxidrelay_peer.version_received)
# Mine a block and make sure that it's not sent to the connected peers
self.generate(self.nodes[0], nblocks=1)
# Give the node enough time to possibly leak out a message
time.sleep(PEER_TIMEOUT + 2)
self.log.info("Connect peer to ensure the net thread runs the disconnect logic at least once")
self.nodes[0].add_p2p_connection(P2PInterface())
# Make sure only expected messages came in
assert not no_version_idle_peer.unexpected_msg
assert not no_version_idle_peer.got_wtxidrelay
assert not no_version_idle_peer.got_sendaddrv2
assert not no_verack_idle_peer.unexpected_msg
assert no_verack_idle_peer.got_wtxidrelay
assert no_verack_idle_peer.got_sendaddrv2
assert not pre_wtxidrelay_peer.unexpected_msg
assert not pre_wtxidrelay_peer.got_wtxidrelay
assert not pre_wtxidrelay_peer.got_sendaddrv2
# Expect peers to be disconnected due to timeout
assert not no_version_idle_peer.is_connected
assert not no_verack_idle_peer.is_connected
assert not pre_wtxidrelay_peer.is_connected
self.log.info('Check that the version message does not leak the local address of the node')
p2p_version_store = self.nodes[0].add_p2p_connection(P2PVersionStore())
ver = p2p_version_store.version_received
# Check that received time is within one hour of now
assert_greater_than_or_equal(ver.nTime, time.time() - 3600)
assert_greater_than_or_equal(time.time() + 3600, ver.nTime)
assert_equal(ver.addrFrom.port, 0)
assert_equal(ver.addrFrom.ip, '0.0.0.0')
assert_equal(ver.nStartingHeight, 201)
assert_equal(ver.relay, 1)
self.log.info('Check that old peers are disconnected')
p2p_old_peer = self.nodes[0].add_p2p_connection(P2PInterface(), send_version=False, wait_for_verack=False)
with self.nodes[0].assert_debug_log(["using obsolete version 31799; disconnecting"]):
p2p_old_peer.send_message(self.create_old_version(31799))
p2p_old_peer.wait_for_disconnect()
if __name__ == '__main__':
P2PLeakTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/interface_usdt_validation.py | #!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
""" Tests the validation:* tracepoint API interface.
See https://github.com/bitcoin/bitcoin/blob/master/doc/tracing.md#context-validation
"""
import ctypes
# Test will be skipped if we don't have bcc installed
try:
from bcc import BPF, USDT # type: ignore[import]
except ImportError:
pass
from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
validation_blockconnected_program = """
#include <uapi/linux/ptrace.h>
typedef signed long long i64;
struct connected_block
{
char hash[32];
int height;
i64 transactions;
int inputs;
i64 sigops;
u64 duration;
};
BPF_PERF_OUTPUT(block_connected);
int trace_block_connected(struct pt_regs *ctx) {
struct connected_block block = {};
bpf_usdt_readarg_p(1, ctx, &block.hash, 32);
bpf_usdt_readarg(2, ctx, &block.height);
bpf_usdt_readarg(3, ctx, &block.transactions);
bpf_usdt_readarg(4, ctx, &block.inputs);
bpf_usdt_readarg(5, ctx, &block.sigops);
bpf_usdt_readarg(6, ctx, &block.duration);
block_connected.perf_submit(ctx, &block, sizeof(block));
return 0;
}
"""
class ValidationTracepointTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_platform_not_linux()
self.skip_if_no_bitcoind_tracepoints()
self.skip_if_no_python_bcc()
self.skip_if_no_bpf_permissions()
def run_test(self):
# Tests the validation:block_connected tracepoint by generating blocks
# and comparing the values passed in the tracepoint arguments with the
# blocks.
# See https://github.com/bitcoin/bitcoin/blob/master/doc/tracing.md#tracepoint-validationblock_connected
class Block(ctypes.Structure):
_fields_ = [
("hash", ctypes.c_ubyte * 32),
("height", ctypes.c_int),
("transactions", ctypes.c_int64),
("inputs", ctypes.c_int),
("sigops", ctypes.c_int64),
("duration", ctypes.c_uint64),
]
def __repr__(self):
return "ConnectedBlock(hash=%s height=%d, transactions=%d, inputs=%d, sigops=%d, duration=%d)" % (
bytes(self.hash[::-1]).hex(),
self.height,
self.transactions,
self.inputs,
self.sigops,
self.duration)
BLOCKS_EXPECTED = 2
expected_blocks = dict()
events = []
self.log.info("hook into the validation:block_connected tracepoint")
ctx = USDT(pid=self.nodes[0].process.pid)
ctx.enable_probe(probe="validation:block_connected",
fn_name="trace_block_connected")
bpf = BPF(text=validation_blockconnected_program,
usdt_contexts=[ctx], debug=0, cflags=["-Wno-error=implicit-function-declaration"])
def handle_blockconnected(_, data, __):
event = ctypes.cast(data, ctypes.POINTER(Block)).contents
self.log.info(f"handle_blockconnected(): {event}")
events.append(event)
bpf["block_connected"].open_perf_buffer(
handle_blockconnected)
self.log.info(f"mine {BLOCKS_EXPECTED} blocks")
block_hashes = self.generatetoaddress(
self.nodes[0], BLOCKS_EXPECTED, ADDRESS_BCRT1_UNSPENDABLE)
for block_hash in block_hashes:
expected_blocks[block_hash] = self.nodes[0].getblock(block_hash, 2)
bpf.perf_buffer_poll(timeout=200)
self.log.info(f"check that we correctly traced {BLOCKS_EXPECTED} blocks")
for event in events:
block_hash = bytes(event.hash[::-1]).hex()
block = expected_blocks[block_hash]
assert_equal(block["hash"], block_hash)
assert_equal(block["height"], event.height)
assert_equal(len(block["tx"]), event.transactions)
assert_equal(len([tx["vin"] for tx in block["tx"]]), event.inputs)
assert_equal(0, event.sigops) # no sigops in coinbase tx
# only plausibility checks
assert event.duration > 0
del expected_blocks[block_hash]
assert_equal(BLOCKS_EXPECTED, len(events))
assert_equal(0, len(expected_blocks))
bpf.cleanup()
if __name__ == '__main__':
ValidationTracepointTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_compactblocks_blocksonly.py | #!/usr/bin/env python3
# Copyright (c) 2021-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test that a node in blocksonly mode does not request compact blocks."""
from test_framework.messages import (
MSG_BLOCK,
MSG_CMPCT_BLOCK,
MSG_WITNESS_FLAG,
CBlock,
CBlockHeader,
CInv,
from_hex,
msg_block,
msg_getdata,
msg_headers,
msg_sendcmpct,
)
from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class P2PCompactBlocksBlocksOnly(BitcoinTestFramework):
def set_test_params(self):
self.extra_args = [["-blocksonly"], [], [], []]
self.num_nodes = 4
def setup_network(self):
self.setup_nodes()
# Start network with everyone disconnected
self.sync_all()
def build_block_on_tip(self):
blockhash = self.generate(self.nodes[2], 1, sync_fun=self.no_op)[0]
block_hex = self.nodes[2].getblock(blockhash=blockhash, verbosity=0)
block = from_hex(CBlock(), block_hex)
block.rehash()
return block
def run_test(self):
# Nodes will only request hb compact blocks mode when they're out of IBD
for node in self.nodes:
assert not node.getblockchaininfo()['initialblockdownload']
p2p_conn_blocksonly = self.nodes[0].add_p2p_connection(P2PInterface())
p2p_conn_high_bw = self.nodes[1].add_p2p_connection(P2PInterface())
p2p_conn_low_bw = self.nodes[3].add_p2p_connection(P2PInterface())
for conn in [p2p_conn_blocksonly, p2p_conn_high_bw, p2p_conn_low_bw]:
assert_equal(conn.message_count['sendcmpct'], 1)
conn.send_and_ping(msg_sendcmpct(announce=False, version=2))
# Nodes:
# 0 -> blocksonly
# 1 -> high bandwidth
# 2 -> miner
# 3 -> low bandwidth
#
# Topology:
# p2p_conn_blocksonly ---> node0
# p2p_conn_high_bw ---> node1
# p2p_conn_low_bw ---> node3
# node2 (no connections)
#
# node2 produces blocks that are passed to the rest of the nodes
# through the respective p2p connections.
self.log.info("Test that -blocksonly nodes do not select peers for BIP152 high bandwidth mode")
block0 = self.build_block_on_tip()
# A -blocksonly node should not request BIP152 high bandwidth mode upon
# receiving a new valid block at the tip.
p2p_conn_blocksonly.send_and_ping(msg_block(block0))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block0.sha256)
assert_equal(p2p_conn_blocksonly.message_count['sendcmpct'], 1)
assert_equal(p2p_conn_blocksonly.last_message['sendcmpct'].announce, False)
# A normal node participating in transaction relay should request BIP152
# high bandwidth mode upon receiving a new valid block at the tip.
p2p_conn_high_bw.send_and_ping(msg_block(block0))
assert_equal(int(self.nodes[1].getbestblockhash(), 16), block0.sha256)
p2p_conn_high_bw.wait_until(lambda: p2p_conn_high_bw.message_count['sendcmpct'] == 2)
assert_equal(p2p_conn_high_bw.last_message['sendcmpct'].announce, True)
# Don't send a block from the p2p_conn_low_bw so the low bandwidth node
# doesn't select it for BIP152 high bandwidth relay.
self.nodes[3].submitblock(block0.serialize().hex())
self.log.info("Test that -blocksonly nodes send getdata(BLOCK) instead"
" of getdata(CMPCT) in BIP152 low bandwidth mode")
block1 = self.build_block_on_tip()
p2p_conn_blocksonly.send_message(msg_headers(headers=[CBlockHeader(block1)]))
p2p_conn_blocksonly.sync_with_ping()
assert_equal(p2p_conn_blocksonly.last_message['getdata'].inv, [CInv(MSG_BLOCK | MSG_WITNESS_FLAG, block1.sha256)])
p2p_conn_high_bw.send_message(msg_headers(headers=[CBlockHeader(block1)]))
p2p_conn_high_bw.sync_with_ping()
assert_equal(p2p_conn_high_bw.last_message['getdata'].inv, [CInv(MSG_CMPCT_BLOCK, block1.sha256)])
self.log.info("Test that getdata(CMPCT) is still sent on BIP152 low bandwidth connections"
" when no -blocksonly nodes are involved")
p2p_conn_low_bw.send_and_ping(msg_headers(headers=[CBlockHeader(block1)]))
p2p_conn_low_bw.sync_with_ping()
assert_equal(p2p_conn_low_bw.last_message['getdata'].inv, [CInv(MSG_CMPCT_BLOCK, block1.sha256)])
self.log.info("Test that -blocksonly nodes still serve compact blocks")
def test_for_cmpctblock(block):
if 'cmpctblock' not in p2p_conn_blocksonly.last_message:
return False
return p2p_conn_blocksonly.last_message['cmpctblock'].header_and_shortids.header.rehash() == block.sha256
p2p_conn_blocksonly.send_message(msg_getdata([CInv(MSG_CMPCT_BLOCK, block0.sha256)]))
p2p_conn_blocksonly.wait_until(lambda: test_for_cmpctblock(block0))
# Request BIP152 high bandwidth mode from the -blocksonly node.
p2p_conn_blocksonly.send_and_ping(msg_sendcmpct(announce=True, version=2))
block2 = self.build_block_on_tip()
self.nodes[0].submitblock(block1.serialize().hex())
self.nodes[0].submitblock(block2.serialize().hex())
p2p_conn_blocksonly.wait_until(lambda: test_for_cmpctblock(block2))
if __name__ == '__main__':
P2PCompactBlocksBlocksOnly().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_avoidreuse.py | #!/usr/bin/env python3
# Copyright (c) 2018-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the avoid_reuse and setwalletflag features."""
from test_framework.address import address_to_scriptpubkey
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_approx,
assert_equal,
assert_raises_rpc_error,
)
def reset_balance(node, discardaddr):
'''Throw away all owned coins by the node so it gets a balance of 0.'''
balance = node.getbalance(avoid_reuse=False)
if balance > 0.5:
node.sendtoaddress(address=discardaddr, amount=balance, subtractfeefromamount=True, avoid_reuse=False)
def count_unspent(node):
'''Count the unspent outputs for the given node and return various statistics'''
r = {
"total": {
"count": 0,
"sum": 0,
},
"reused": {
"count": 0,
"sum": 0,
},
}
supports_reused = True
for utxo in node.listunspent(minconf=0):
r["total"]["count"] += 1
r["total"]["sum"] += utxo["amount"]
if supports_reused and "reused" in utxo:
if utxo["reused"]:
r["reused"]["count"] += 1
r["reused"]["sum"] += utxo["amount"]
else:
supports_reused = False
r["reused"]["supported"] = supports_reused
return r
def assert_unspent(node, total_count=None, total_sum=None, reused_supported=None, reused_count=None, reused_sum=None, margin=0.001):
'''Make assertions about a node's unspent output statistics'''
stats = count_unspent(node)
if total_count is not None:
assert_equal(stats["total"]["count"], total_count)
if total_sum is not None:
assert_approx(stats["total"]["sum"], total_sum, margin)
if reused_supported is not None:
assert_equal(stats["reused"]["supported"], reused_supported)
if reused_count is not None:
assert_equal(stats["reused"]["count"], reused_count)
if reused_sum is not None:
assert_approx(stats["reused"]["sum"], reused_sum, margin)
def assert_balances(node, mine, margin=0.001):
'''Make assertions about a node's getbalances output'''
got = node.getbalances()["mine"]
for k,v in mine.items():
assert_approx(got[k], v, margin)
class AvoidReuseTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 2
# This test isn't testing txn relay/timing, so set whitelist on the
# peers for instant txn relay. This speeds up the test run time 2-3x.
self.extra_args = [["[email protected]"]] * self.num_nodes
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
'''Set up initial chain and run tests defined below'''
self.test_persistence()
self.test_immutable()
self.generate(self.nodes[0], 110)
self.test_change_remains_change(self.nodes[1])
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
self.test_sending_from_reused_address_without_avoid_reuse()
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
self.test_sending_from_reused_address_fails("legacy")
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
self.test_sending_from_reused_address_fails("p2sh-segwit")
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
self.test_sending_from_reused_address_fails("bech32")
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
self.test_getbalances_used()
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
self.test_full_destination_group_is_preferred()
reset_balance(self.nodes[1], self.nodes[0].getnewaddress())
self.test_all_destination_groups_are_used()
def test_persistence(self):
'''Test that wallet files persist the avoid_reuse flag.'''
self.log.info("Test wallet files persist avoid_reuse flag")
# Configure node 1 to use avoid_reuse
self.nodes[1].setwalletflag('avoid_reuse')
# Flags should be node1.avoid_reuse=false, node2.avoid_reuse=true
assert_equal(self.nodes[0].getwalletinfo()["avoid_reuse"], False)
assert_equal(self.nodes[1].getwalletinfo()["avoid_reuse"], True)
self.restart_node(1)
self.connect_nodes(0, 1)
# Flags should still be node1.avoid_reuse=false, node2.avoid_reuse=true
assert_equal(self.nodes[0].getwalletinfo()["avoid_reuse"], False)
assert_equal(self.nodes[1].getwalletinfo()["avoid_reuse"], True)
# Attempting to set flag to its current state should throw
assert_raises_rpc_error(-8, "Wallet flag is already set to false", self.nodes[0].setwalletflag, 'avoid_reuse', False)
assert_raises_rpc_error(-8, "Wallet flag is already set to true", self.nodes[1].setwalletflag, 'avoid_reuse', True)
assert_raises_rpc_error(-8, "Unknown wallet flag: abc", self.nodes[0].setwalletflag, 'abc', True)
# Create a wallet with avoid reuse, and test that disabling it afterwards persists
self.nodes[1].createwallet(wallet_name="avoid_reuse_persist", avoid_reuse=True)
w = self.nodes[1].get_wallet_rpc("avoid_reuse_persist")
assert_equal(w.getwalletinfo()["avoid_reuse"], True)
w.setwalletflag("avoid_reuse", False)
assert_equal(w.getwalletinfo()["avoid_reuse"], False)
w.unloadwallet()
self.nodes[1].loadwallet("avoid_reuse_persist")
assert_equal(w.getwalletinfo()["avoid_reuse"], False)
w.unloadwallet()
def test_immutable(self):
'''Test immutable wallet flags'''
self.log.info("Test immutable wallet flags")
# Attempt to set the disable_private_keys flag; this should not work
assert_raises_rpc_error(-8, "Wallet flag is immutable", self.nodes[1].setwalletflag, 'disable_private_keys')
tempwallet = ".wallet_avoidreuse.py_test_immutable_wallet.dat"
# Create a wallet with disable_private_keys set; this should work
self.nodes[1].createwallet(wallet_name=tempwallet, disable_private_keys=True)
w = self.nodes[1].get_wallet_rpc(tempwallet)
# Attempt to unset the disable_private_keys flag; this should not work
assert_raises_rpc_error(-8, "Wallet flag is immutable", w.setwalletflag, 'disable_private_keys', False)
# Unload temp wallet
self.nodes[1].unloadwallet(tempwallet)
def test_change_remains_change(self, node):
self.log.info("Test that change doesn't turn into non-change when spent")
reset_balance(node, node.getnewaddress())
addr = node.getnewaddress()
txid = node.sendtoaddress(addr, 1)
out = node.listunspent(minconf=0, query_options={'minimumAmount': 2})
assert_equal(len(out), 1)
assert_equal(out[0]['txid'], txid)
changeaddr = out[0]['address']
# Make sure it's starting out as change as expected
assert node.getaddressinfo(changeaddr)['ischange']
for logical_tx in node.listtransactions():
assert logical_tx.get('address') != changeaddr
# Spend it
reset_balance(node, node.getnewaddress())
# It should still be change
assert node.getaddressinfo(changeaddr)['ischange']
for logical_tx in node.listtransactions():
assert logical_tx.get('address') != changeaddr
def test_sending_from_reused_address_without_avoid_reuse(self):
'''
Test the same as test_sending_from_reused_address_fails, except send the 10 BTC with
the avoid_reuse flag set to false. This means the 10 BTC send should succeed,
where it fails in test_sending_from_reused_address_fails.
'''
self.log.info("Test sending from reused address with avoid_reuse=false")
fundaddr = self.nodes[1].getnewaddress()
retaddr = self.nodes[0].getnewaddress()
self.nodes[0].sendtoaddress(fundaddr, 10)
self.generate(self.nodes[0], 1)
# listunspent should show 1 single, unused 10 btc output
assert_unspent(self.nodes[1], total_count=1, total_sum=10, reused_supported=True, reused_count=0)
# getbalances should show no used, 10 btc trusted
assert_balances(self.nodes[1], mine={"used": 0, "trusted": 10})
# node 0 should not show a used entry, as it does not enable avoid_reuse
assert "used" not in self.nodes[0].getbalances()["mine"]
self.nodes[1].sendtoaddress(retaddr, 5)
self.generate(self.nodes[0], 1)
# listunspent should show 1 single, unused 5 btc output
assert_unspent(self.nodes[1], total_count=1, total_sum=5, reused_supported=True, reused_count=0)
# getbalances should show no used, 5 btc trusted
assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5})
self.nodes[0].sendtoaddress(fundaddr, 10)
self.generate(self.nodes[0], 1)
# listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10)
assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10)
# getbalances should show 10 used, 5 btc trusted
assert_balances(self.nodes[1], mine={"used": 10, "trusted": 5})
self.nodes[1].sendtoaddress(address=retaddr, amount=10, avoid_reuse=False)
# listunspent should show 1 total outputs (5 btc), unused
assert_unspent(self.nodes[1], total_count=1, total_sum=5, reused_count=0)
# getbalances should show no used, 5 btc trusted
assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5})
# node 1 should now have about 5 btc left (for both cases)
assert_approx(self.nodes[1].getbalance(), 5, 0.001)
assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 5, 0.001)
def test_sending_from_reused_address_fails(self, second_addr_type):
'''
Test the simple case where [1] generates a new address A, then
[0] sends 10 BTC to A.
[1] spends 5 BTC from A. (leaving roughly 5 BTC useable)
[0] sends 10 BTC to A again.
[1] tries to spend 10 BTC (fails; dirty).
[1] tries to spend 4 BTC (succeeds; change address sufficient)
'''
self.log.info("Test sending from reused {} address fails".format(second_addr_type))
fundaddr = self.nodes[1].getnewaddress(label="", address_type="legacy")
retaddr = self.nodes[0].getnewaddress()
self.nodes[0].sendtoaddress(fundaddr, 10)
self.generate(self.nodes[0], 1)
# listunspent should show 1 single, unused 10 btc output
assert_unspent(self.nodes[1], total_count=1, total_sum=10, reused_supported=True, reused_count=0)
# getbalances should show no used, 10 btc trusted
assert_balances(self.nodes[1], mine={"used": 0, "trusted": 10})
self.nodes[1].sendtoaddress(retaddr, 5)
self.generate(self.nodes[0], 1)
# listunspent should show 1 single, unused 5 btc output
assert_unspent(self.nodes[1], total_count=1, total_sum=5, reused_supported=True, reused_count=0)
# getbalances should show no used, 5 btc trusted
assert_balances(self.nodes[1], mine={"used": 0, "trusted": 5})
if not self.options.descriptors:
# For the second send, we transmute it to a related single-key address
# to make sure it's also detected as reuse
fund_spk = address_to_scriptpubkey(fundaddr).hex()
fund_decoded = self.nodes[0].decodescript(fund_spk)
if second_addr_type == "p2sh-segwit":
new_fundaddr = fund_decoded["segwit"]["p2sh-segwit"]
elif second_addr_type == "bech32":
new_fundaddr = fund_decoded["segwit"]["address"]
else:
new_fundaddr = fundaddr
assert_equal(second_addr_type, "legacy")
self.nodes[0].sendtoaddress(new_fundaddr, 10)
self.generate(self.nodes[0], 1)
# listunspent should show 2 total outputs (5, 10 btc), one unused (5), one reused (10)
assert_unspent(self.nodes[1], total_count=2, total_sum=15, reused_count=1, reused_sum=10)
# getbalances should show 10 used, 5 btc trusted
assert_balances(self.nodes[1], mine={"used": 10, "trusted": 5})
# node 1 should now have a balance of 5 (no dirty) or 15 (including dirty)
assert_approx(self.nodes[1].getbalance(), 5, 0.001)
assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 15, 0.001)
assert_raises_rpc_error(-6, "Insufficient funds", self.nodes[1].sendtoaddress, retaddr, 10)
self.nodes[1].sendtoaddress(retaddr, 4)
# listunspent should show 2 total outputs (1, 10 btc), one unused (1), one reused (10)
assert_unspent(self.nodes[1], total_count=2, total_sum=11, reused_count=1, reused_sum=10)
# getbalances should show 10 used, 1 btc trusted
assert_balances(self.nodes[1], mine={"used": 10, "trusted": 1})
# node 1 should now have about 1 btc left (no dirty) and 11 (including dirty)
assert_approx(self.nodes[1].getbalance(), 1, 0.001)
assert_approx(self.nodes[1].getbalance(avoid_reuse=False), 11, 0.001)
def test_getbalances_used(self):
'''
getbalances and listunspent should pick up on reused addresses
immediately, even for address reusing outputs created before the first
transaction was spending from that address
'''
self.log.info("Test getbalances used category")
# node under test should be completely empty
assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0)
new_addr = self.nodes[1].getnewaddress()
ret_addr = self.nodes[0].getnewaddress()
# send multiple transactions, reusing one address
for _ in range(101):
self.nodes[0].sendtoaddress(new_addr, 1)
self.generate(self.nodes[0], 1)
# send transaction that should not use all the available outputs
# per the current coin selection algorithm
self.nodes[1].sendtoaddress(ret_addr, 5)
# getbalances and listunspent should show the remaining outputs
# in the reused address as used/reused
assert_unspent(self.nodes[1], total_count=2, total_sum=96, reused_count=1, reused_sum=1, margin=0.01)
assert_balances(self.nodes[1], mine={"used": 1, "trusted": 95}, margin=0.01)
def test_full_destination_group_is_preferred(self):
'''
Test the case where [1] only has 101 outputs of 1 BTC in the same reused
address and tries to send a small payment of 0.5 BTC. The wallet
should use 100 outputs from the reused address as inputs and not a
single 1 BTC input, in order to join several outputs from the reused
address.
'''
self.log.info("Test that full destination groups are preferred in coin selection")
# Node under test should be empty
assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0)
new_addr = self.nodes[1].getnewaddress()
ret_addr = self.nodes[0].getnewaddress()
# Send 101 outputs of 1 BTC to the same, reused address in the wallet
for _ in range(101):
self.nodes[0].sendtoaddress(new_addr, 1)
self.generate(self.nodes[0], 1)
# Sending a transaction that is smaller than each one of the
# available outputs
txid = self.nodes[1].sendtoaddress(address=ret_addr, amount=0.5)
inputs = self.nodes[1].getrawtransaction(txid, 1)["vin"]
# The transaction should use 100 inputs exactly
assert_equal(len(inputs), 100)
def test_all_destination_groups_are_used(self):
'''
Test the case where [1] only has 202 outputs of 1 BTC in the same reused
address and tries to send a payment of 200.5 BTC. The wallet
should use all 202 outputs from the reused address as inputs.
'''
self.log.info("Test that all destination groups are used")
# Node under test should be empty
assert_equal(self.nodes[1].getbalance(avoid_reuse=False), 0)
new_addr = self.nodes[1].getnewaddress()
ret_addr = self.nodes[0].getnewaddress()
# Send 202 outputs of 1 BTC to the same, reused address in the wallet
for _ in range(202):
self.nodes[0].sendtoaddress(new_addr, 1)
self.generate(self.nodes[0], 1)
# Sending a transaction that needs to use the full groups
# of 100 inputs but also the incomplete group of 2 inputs.
txid = self.nodes[1].sendtoaddress(address=ret_addr, amount=200.5)
inputs = self.nodes[1].getrawtransaction(txid, 1)["vin"]
# The transaction should use 202 inputs exactly
assert_equal(len(inputs), 202)
if __name__ == '__main__':
AvoidReuseTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_i2p_sessions.py | #!/usr/bin/env python3
# Copyright (c) 2022-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test whether persistent or transient I2P sessions are being used, based on `-i2pacceptincoming`.
"""
from test_framework.test_framework import BitcoinTestFramework
class I2PSessions(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
# The test assumes that an I2P SAM proxy is not listening here.
self.extra_args = [
["-i2psam=127.0.0.1:60000", "-i2pacceptincoming=1"],
["-i2psam=127.0.0.1:60000", "-i2pacceptincoming=0"],
]
def run_test(self):
addr = "zsxwyo6qcn3chqzwxnseusqgsnuw3maqnztkiypyfxtya4snkoka.b32.i2p"
self.log.info("Ensure we create a persistent session when -i2pacceptincoming=1")
node0 = self.nodes[0]
with node0.assert_debug_log(expected_msgs=["Creating persistent SAM session"]):
node0.addnode(node=addr, command="onetry")
self.log.info("Ensure we create a transient session when -i2pacceptincoming=0")
node1 = self.nodes[1]
with node1.assert_debug_log(expected_msgs=["Creating transient SAM session"]):
node1.addnode(node=addr, command="onetry")
if __name__ == '__main__':
I2PSessions().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_importdescriptors.py | #!/usr/bin/env python3
# Copyright (c) 2019-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importdescriptors RPC.
Test importdescriptors by generating keys on node0, importing the corresponding
descriptors on node1 and then testing the address info for the different address
variants.
- `get_generate_key()` is called to generate keys and return the privkeys,
pubkeys and all variants of scriptPubKey and address.
- `test_importdesc()` is called to send an importdescriptors call to node1, test
success, and (if unsuccessful) test the error code and error message returned.
- `test_address()` is called to call getaddressinfo for an address on node1
and test the values returned."""
import concurrent.futures
from test_framework.authproxy import JSONRPCException
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
from test_framework.descriptors import descsum_create
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
from test_framework.wallet_util import (
get_generate_key,
test_address,
)
class ImportDescriptorsTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, legacy=False)
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [["-addresstype=legacy"],
["-addresstype=bech32", "-keypool=5"]
]
# whitelist peers to speed up tx relay / mempool sync
for args in self.extra_args:
args.append("[email protected]")
self.setup_clean_chain = True
self.wallet_names = []
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_sqlite()
def test_importdesc(self, req, success, error_code=None, error_message=None, warnings=None, wallet=None):
"""Run importdescriptors and assert success"""
if warnings is None:
warnings = []
wrpc = self.nodes[1].get_wallet_rpc('w1')
if wallet is not None:
wrpc = wallet
result = wrpc.importdescriptors([req])
observed_warnings = []
if 'warnings' in result[0]:
observed_warnings = result[0]['warnings']
assert_equal("\n".join(sorted(warnings)), "\n".join(sorted(observed_warnings)))
assert_equal(result[0]['success'], success)
if error_code is not None:
assert_equal(result[0]['error']['code'], error_code)
assert_equal(result[0]['error']['message'], error_message)
def run_test(self):
self.log.info('Setting up wallets')
self.nodes[0].createwallet(wallet_name='w0', disable_private_keys=False, descriptors=True)
w0 = self.nodes[0].get_wallet_rpc('w0')
self.nodes[1].createwallet(wallet_name='w1', disable_private_keys=True, blank=True, descriptors=True)
w1 = self.nodes[1].get_wallet_rpc('w1')
assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
self.nodes[1].createwallet(wallet_name="wpriv", disable_private_keys=False, blank=True, descriptors=True)
wpriv = self.nodes[1].get_wallet_rpc("wpriv")
assert_equal(wpriv.getwalletinfo()['keypoolsize'], 0)
self.log.info('Mining coins')
self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, w0.getnewaddress())
# RPC importdescriptors -----------------------------------------------
# # Test import fails if no descriptor present
self.log.info("Import should fail if a descriptor is not provided")
self.test_importdesc({"timestamp": "now"},
success=False,
error_code=-8,
error_message='Descriptor not found.')
# # Test importing of a P2PKH descriptor
key = get_generate_key()
self.log.info("Should import a p2pkh descriptor")
import_request = {"desc": descsum_create("pkh(" + key.pubkey + ")"),
"timestamp": "now",
"label": "Descriptor import test"}
self.test_importdesc(import_request, success=True)
test_address(w1,
key.p2pkh_addr,
solvable=True,
ismine=True,
labels=["Descriptor import test"])
assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
self.log.info("Test can import same descriptor with public key twice")
self.test_importdesc(import_request, success=True)
self.log.info("Test can update descriptor label")
self.test_importdesc({**import_request, "label": "Updated label"}, success=True)
test_address(w1, key.p2pkh_addr, solvable=True, ismine=True, labels=["Updated label"])
self.log.info("Internal addresses cannot have labels")
self.test_importdesc({**import_request, "internal": True},
success=False,
error_code=-8,
error_message="Internal addresses should not have a label")
self.log.info("Internal addresses should be detected as such")
key = get_generate_key()
self.test_importdesc({"desc": descsum_create("pkh(" + key.pubkey + ")"),
"timestamp": "now",
"internal": True},
success=True)
info = w1.getaddressinfo(key.p2pkh_addr)
assert_equal(info["ismine"], True)
assert_equal(info["ischange"], True)
# # Test importing of a P2SH-P2WPKH descriptor
key = get_generate_key()
self.log.info("Should not import a p2sh-p2wpkh descriptor without checksum")
self.test_importdesc({"desc": "sh(wpkh(" + key.pubkey + "))",
"timestamp": "now"
},
success=False,
error_code=-5,
error_message="Missing checksum")
self.log.info("Should not import a p2sh-p2wpkh descriptor that has range specified")
self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
"timestamp": "now",
"range": 1,
},
success=False,
error_code=-8,
error_message="Range should not be specified for an un-ranged descriptor")
self.log.info("Should not import a p2sh-p2wpkh descriptor and have it set to active")
self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
"timestamp": "now",
"active": True,
},
success=False,
error_code=-8,
error_message="Active descriptors must be ranged")
self.log.info("Should import a (non-active) p2sh-p2wpkh descriptor")
self.test_importdesc({"desc": descsum_create("sh(wpkh(" + key.pubkey + "))"),
"timestamp": "now",
"active": False,
},
success=True)
assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
test_address(w1,
key.p2sh_p2wpkh_addr,
ismine=True,
solvable=True)
# Check persistence of data and that loading works correctly
w1.unloadwallet()
self.nodes[1].loadwallet('w1')
test_address(w1,
key.p2sh_p2wpkh_addr,
ismine=True,
solvable=True)
# # Test importing of a multisig descriptor
key1 = get_generate_key()
key2 = get_generate_key()
self.log.info("Should import a 1-of-2 bare multisig from descriptor")
self.test_importdesc({"desc": descsum_create("multi(1," + key1.pubkey + "," + key2.pubkey + ")"),
"timestamp": "now"},
success=True)
self.log.info("Should not treat individual keys from the imported bare multisig as watchonly")
test_address(w1,
key1.p2pkh_addr,
ismine=False)
# # Test ranged descriptors
xpriv = "tprv8ZgxMBicQKsPeuVhWwi6wuMQGfPKi9Li5GtX35jVNknACgqe3CY4g5xgkfDDJcmtF7o1QnxWDRYw4H5P26PXq7sbcUkEqeR4fg3Kxp2tigg"
xpub = "tpubD6NzVbkrYhZ4YNXVQbNhMK1WqguFsUXceaVJKbmno2aZ3B6QfbMeraaYvnBSGpV3vxLyTTK9DYT1yoEck4XUScMzXoQ2U2oSmE2JyMedq3H"
addresses = ["2N7yv4p8G8yEaPddJxY41kPihnWvs39qCMf", "2MsHxyb2JS3pAySeNUsJ7mNnurtpeenDzLA"] # hdkeypath=m/0'/0'/0' and 1'
addresses += ["bcrt1qrd3n235cj2czsfmsuvqqpr3lu6lg0ju7scl8gn", "bcrt1qfqeppuvj0ww98r6qghmdkj70tv8qpchehegrg8"] # wpkh subscripts corresponding to the above addresses
desc = "sh(wpkh(" + xpub + "/0/0/*" + "))"
self.log.info("Ranged descriptors cannot have labels")
self.test_importdesc({"desc":descsum_create(desc),
"timestamp": "now",
"range": [0, 100],
"label": "test"},
success=False,
error_code=-8,
error_message='Ranged descriptors should not have a label')
self.log.info("Private keys required for private keys enabled wallet")
self.test_importdesc({"desc":descsum_create(desc),
"timestamp": "now",
"range": [0, 100]},
success=False,
error_code=-4,
error_message='Cannot import descriptor without private keys to a wallet with private keys enabled',
wallet=wpriv)
self.log.info("Ranged descriptor import should warn without a specified range")
self.test_importdesc({"desc": descsum_create(desc),
"timestamp": "now"},
success=True,
warnings=['Range not given, using default keypool range'])
assert_equal(w1.getwalletinfo()['keypoolsize'], 0)
# # Test importing of a ranged descriptor with xpriv
self.log.info("Should not import a ranged descriptor that includes xpriv into a watch-only wallet")
desc = "sh(wpkh(" + xpriv + "/0'/0'/*'" + "))"
self.test_importdesc({"desc": descsum_create(desc),
"timestamp": "now",
"range": 1},
success=False,
error_code=-4,
error_message='Cannot import private keys to a wallet with private keys disabled')
self.log.info("Should not import a descriptor with hardened derivations when private keys are disabled")
self.test_importdesc({"desc": descsum_create("wpkh(" + xpub + "/1h/*)"),
"timestamp": "now",
"range": 1},
success=False,
error_code=-4,
error_message='Cannot expand descriptor. Probably because of hardened derivations without private keys provided')
for address in addresses:
test_address(w1,
address,
ismine=False,
solvable=False)
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": -1},
success=False, error_code=-8, error_message='End of range is too high')
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [-1, 10]},
success=False, error_code=-8, error_message='Range should be greater or equal than 0')
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]},
success=False, error_code=-8, error_message='End of range is too high')
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [2, 1]},
success=False, error_code=-8, error_message='Range specified as [begin,end] must not have begin after end')
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now", "range": [0, 1000001]},
success=False, error_code=-8, error_message='Range is too large')
self.log.info("Verify we can only extend descriptor's range")
range_request = {"desc": descsum_create(desc), "timestamp": "now", "range": [5, 10], 'active': True}
self.test_importdesc(range_request, wallet=wpriv, success=True)
assert_equal(wpriv.getwalletinfo()['keypoolsize'], 6)
self.test_importdesc({**range_request, "range": [0, 10]}, wallet=wpriv, success=True)
assert_equal(wpriv.getwalletinfo()['keypoolsize'], 11)
self.test_importdesc({**range_request, "range": [0, 20]}, wallet=wpriv, success=True)
assert_equal(wpriv.getwalletinfo()['keypoolsize'], 21)
# Can keep range the same
self.test_importdesc({**range_request, "range": [0, 20]}, wallet=wpriv, success=True)
assert_equal(wpriv.getwalletinfo()['keypoolsize'], 21)
self.test_importdesc({**range_request, "range": [5, 10]}, wallet=wpriv, success=False,
error_code=-8, error_message='new range must include current range = [0,20]')
self.test_importdesc({**range_request, "range": [0, 10]}, wallet=wpriv, success=False,
error_code=-8, error_message='new range must include current range = [0,20]')
self.test_importdesc({**range_request, "range": [5, 20]}, wallet=wpriv, success=False,
error_code=-8, error_message='new range must include current range = [0,20]')
assert_equal(wpriv.getwalletinfo()['keypoolsize'], 21)
self.log.info("Check we can change descriptor internal flag")
self.test_importdesc({**range_request, "range": [0, 20], "internal": True}, wallet=wpriv, success=True)
assert_equal(wpriv.getwalletinfo()['keypoolsize'], 0)
assert_raises_rpc_error(-4, 'This wallet has no available keys', wpriv.getnewaddress, '', 'p2sh-segwit')
assert_equal(wpriv.getwalletinfo()['keypoolsize_hd_internal'], 21)
wpriv.getrawchangeaddress('p2sh-segwit')
self.test_importdesc({**range_request, "range": [0, 20], "internal": False}, wallet=wpriv, success=True)
assert_equal(wpriv.getwalletinfo()['keypoolsize'], 21)
wpriv.getnewaddress('', 'p2sh-segwit')
assert_equal(wpriv.getwalletinfo()['keypoolsize_hd_internal'], 0)
assert_raises_rpc_error(-4, 'This wallet has no available keys', wpriv.getrawchangeaddress, 'p2sh-segwit')
# Make sure ranged imports import keys in order
w1 = self.nodes[1].get_wallet_rpc('w1')
self.log.info('Key ranges should be imported in order')
xpub = "tpubDAXcJ7s7ZwicqjprRaEWdPoHKrCS215qxGYxpusRLLmJuT69ZSicuGdSfyvyKpvUNYBW1s2U3NSrT6vrCYB9e6nZUEvrqnwXPF8ArTCRXMY"
addresses = [
'bcrt1qtmp74ayg7p24uslctssvjm06q5phz4yrxucgnv', # m/0'/0'/0
'bcrt1q8vprchan07gzagd5e6v9wd7azyucksq2xc76k8', # m/0'/0'/1
'bcrt1qtuqdtha7zmqgcrr26n2rqxztv5y8rafjp9lulu', # m/0'/0'/2
'bcrt1qau64272ymawq26t90md6an0ps99qkrse58m640', # m/0'/0'/3
'bcrt1qsg97266hrh6cpmutqen8s4s962aryy77jp0fg0', # m/0'/0'/4
]
self.test_importdesc({'desc': descsum_create('wpkh([80002067/0h/0h]' + xpub + '/*)'),
'active': True,
'range' : [0, 2],
'timestamp': 'now'
},
success=True)
self.test_importdesc({'desc': descsum_create('sh(wpkh([abcdef12/0h/0h]' + xpub + '/*))'),
'active': True,
'range' : [0, 2],
'timestamp': 'now'
},
success=True)
self.test_importdesc({'desc': descsum_create('pkh([12345678/0h/0h]' + xpub + '/*)'),
'active': True,
'range' : [0, 2],
'timestamp': 'now'
},
success=True)
assert_equal(w1.getwalletinfo()['keypoolsize'], 5 * 3)
for i, expected_addr in enumerate(addresses):
received_addr = w1.getnewaddress('', 'bech32')
assert_raises_rpc_error(-4, 'This wallet has no available keys', w1.getrawchangeaddress, 'bech32')
assert_equal(received_addr, expected_addr)
bech32_addr_info = w1.getaddressinfo(received_addr)
assert_equal(bech32_addr_info['desc'][:23], 'wpkh([80002067/0h/0h/{}]'.format(i))
shwpkh_addr = w1.getnewaddress('', 'p2sh-segwit')
shwpkh_addr_info = w1.getaddressinfo(shwpkh_addr)
assert_equal(shwpkh_addr_info['desc'][:26], 'sh(wpkh([abcdef12/0h/0h/{}]'.format(i))
pkh_addr = w1.getnewaddress('', 'legacy')
pkh_addr_info = w1.getaddressinfo(pkh_addr)
assert_equal(pkh_addr_info['desc'][:22], 'pkh([12345678/0h/0h/{}]'.format(i))
assert_equal(w1.getwalletinfo()['keypoolsize'], 4 * 3) # After retrieving a key, we don't refill the keypool again, so it's one less for each address type
w1.keypoolrefill()
assert_equal(w1.getwalletinfo()['keypoolsize'], 5 * 3)
self.log.info("Check we can change next_index")
# go back and forth with next_index
for i in [4, 0, 2, 1, 3]:
self.test_importdesc({'desc': descsum_create('wpkh([80002067/0h/0h]' + xpub + '/*)'),
'active': True,
'range': [0, 9],
'next_index': i,
'timestamp': 'now'
},
success=True)
assert_equal(w1.getnewaddress('', 'bech32'), addresses[i])
# Check active=False default
self.log.info('Check imported descriptors are not active by default')
self.test_importdesc({'desc': descsum_create('pkh([12345678/1h]' + xpub + '/*)'),
'range' : [0, 2],
'timestamp': 'now',
'internal': True
},
success=True)
assert_raises_rpc_error(-4, 'This wallet has no available keys', w1.getrawchangeaddress, 'legacy')
self.log.info('Check can activate inactive descriptor')
self.test_importdesc({'desc': descsum_create('pkh([12345678]' + xpub + '/*)'),
'range': [0, 5],
'active': True,
'timestamp': 'now',
'internal': True
},
success=True)
address = w1.getrawchangeaddress('legacy')
assert_equal(address, "mpA2Wh9dvZT7yfELq1UnrUmAoc5qCkMetg")
self.log.info('Check can deactivate active descriptor')
self.test_importdesc({'desc': descsum_create('pkh([12345678]' + xpub + '/*)'),
'range': [0, 5],
'active': False,
'timestamp': 'now',
'internal': True
},
success=True)
assert_raises_rpc_error(-4, 'This wallet has no available keys', w1.getrawchangeaddress, 'legacy')
self.log.info('Verify activation state is persistent')
w1.unloadwallet()
self.nodes[1].loadwallet('w1')
assert_raises_rpc_error(-4, 'This wallet has no available keys', w1.getrawchangeaddress, 'legacy')
# # Test importing a descriptor containing a WIF private key
wif_priv = "cTe1f5rdT8A8DFgVWTjyPwACsDPJM9ff4QngFxUixCSvvbg1x6sh"
address = "2MuhcG52uHPknxDgmGPsV18jSHFBnnRgjPg"
desc = "sh(wpkh(" + wif_priv + "))"
self.log.info("Should import a descriptor with a WIF private key as spendable")
self.test_importdesc({"desc": descsum_create(desc),
"timestamp": "now"},
success=True,
wallet=wpriv)
self.log.info('Test can import same descriptor with private key twice')
self.test_importdesc({"desc": descsum_create(desc), "timestamp": "now"}, success=True, wallet=wpriv)
test_address(wpriv,
address,
solvable=True,
ismine=True)
txid = w0.sendtoaddress(address, 49.99995540)
self.generatetoaddress(self.nodes[0], 6, w0.getnewaddress())
tx = wpriv.createrawtransaction([{"txid": txid, "vout": 0}], {w0.getnewaddress(): 49.999})
signed_tx = wpriv.signrawtransactionwithwallet(tx)
w1.sendrawtransaction(signed_tx['hex'])
# Make sure that we can use import and use multisig as addresses
self.log.info('Test that multisigs can be imported, signed for, and getnewaddress\'d')
self.nodes[1].createwallet(wallet_name="wmulti_priv", disable_private_keys=False, blank=True, descriptors=True)
wmulti_priv = self.nodes[1].get_wallet_rpc("wmulti_priv")
assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 0)
xprv1 = 'tprv8ZgxMBicQKsPevADjDCWsa6DfhkVXicu8NQUzfibwX2MexVwW4tCec5mXdCW8kJwkzBRRmAay1KZya4WsehVvjTGVW6JLqiqd8DdZ4xSg52'
acc_xpub1 = 'tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8' # /84'/0'/0'
chg_xpub1 = 'tpubDCXqdwWZcszwqYJSnZp8eARkxGJfHAk23KDxbztV4BbschfaTfYLTcSkSJ3TN64dRqwa1rnFUScsYormKkGqNbbPwkorQimVevXjxzUV9Gf' # /84'/1'/0'
xprv2 = 'tprv8ZgxMBicQKsPdSNWUhDiwTScDr6JfkZuLshTRwzvZGnMSnGikV6jxpmdDkC3YRc4T3GD6Nvg9uv6hQg73RVv1EiTXDZwxVbsLugVHU8B1aq'
acc_xprv2 = 'tprv8gVCsmRAxVSxyUpsL13Y7ZEWBFPWbgS5E2MmFVNGuANrknvmmn2vWnmHvU8AwEFYzR2ji6EeZLSCLVacsYkvor3Pcb5JY5FGcevqTwYvdYx'
acc_xpub2 = 'tpubDDBF2BTR6s8drwrfDei8WxtckGuSm1cyoKxYY1QaKSBFbHBYQArWhHPA6eJrzZej6nfHGLSURYSLHr7GuYch8aY5n61tGqgn8b4cXrMuoPH'
chg_xpub2 = 'tpubDCYfZY2ceyHzYzMMVPt9MNeiqtQ2T7Uyp9QSFwYXh8Vi9iJFYXcuphJaGXfF3jUQJi5Y3GMNXvM11gaL4txzZgNGK22BFAwMXynnzv4z2Jh'
xprv3 = 'tprv8ZgxMBicQKsPeonDt8Ka2mrQmHa61hQ5FQCsvWBTpSNzBFgM58cV2EuXNAHF14VawVpznnme3SuTbA62sGriwWyKifJmXntfNeK7zeqMCj1'
acc_xpub3 = 'tpubDCsWoW1kuQB9kG5MXewHqkbjPtqPueRnXju7uM2NK7y3JYb2ajAZ9EiuZXNNuE4661RAfriBWhL8UsnAPpk8zrKKnZw1Ug7X4oHgMdZiU4E'
chg_xpub3 = 'tpubDC6UGqnsQStngYuGD4MKsMy7eD1Yg9NTJfPdvjdG2JE5oZ7EsSL3WHg4Gsw2pR5K39ZwJ46M1wZayhedVdQtMGaUhq5S23PH6fnENK3V1sb'
self.test_importdesc({"desc":"wsh(multi(2," + xprv1 + "/84h/0h/0h/*," + xprv2 + "/84h/0h/0h/*," + xprv3 + "/84h/0h/0h/*))#m2sr93jn",
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"},
success=True,
wallet=wmulti_priv)
self.test_importdesc({"desc":"wsh(multi(2," + xprv1 + "/84h/1h/0h/*," + xprv2 + "/84h/1h/0h/*," + xprv3 + "/84h/1h/0h/*))#q3sztvx5",
"active": True,
"internal" : True,
"range": 1000,
"next_index": 0,
"timestamp": "now"},
success=True,
wallet=wmulti_priv)
assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1001) # Range end (1000) is inclusive, so 1001 addresses generated
addr = wmulti_priv.getnewaddress('', 'bech32') # uses receive 0
assert_equal(addr, 'bcrt1qdt0qy5p7dzhxzmegnn4ulzhard33s2809arjqgjndx87rv5vd0fq2czhy8') # Derived at m/84'/0'/0'/0
change_addr = wmulti_priv.getrawchangeaddress('bech32') # uses change 0
assert_equal(change_addr, 'bcrt1qt9uhe3a9hnq7vajl7a094z4s3crm9ttf8zw3f5v9gr2nyd7e3lnsy44n8e') # Derived at m/84'/1'/0'/0
assert_equal(wmulti_priv.getwalletinfo()['keypoolsize'], 1000)
txid = w0.sendtoaddress(addr, 10)
self.generate(self.nodes[0], 6)
send_txid = wmulti_priv.sendtoaddress(w0.getnewaddress(), 8) # uses change 1
decoded = wmulti_priv.gettransaction(txid=send_txid, verbose=True)['decoded']
assert_equal(len(decoded['vin'][0]['txinwitness']), 4)
self.sync_all()
self.nodes[1].createwallet(wallet_name="wmulti_pub", disable_private_keys=True, blank=True, descriptors=True)
wmulti_pub = self.nodes[1].get_wallet_rpc("wmulti_pub")
assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 0)
self.test_importdesc({"desc":"wsh(multi(2,[7b2d0242/84h/0h/0h]" + acc_xpub1 + "/*,[59b09cd6/84h/0h/0h]" + acc_xpub2 + "/*,[e81a0532/84h/0h/0h]" + acc_xpub3 +"/*))#tsry0s5e",
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"},
success=True,
wallet=wmulti_pub)
self.test_importdesc({"desc":"wsh(multi(2,[7b2d0242/84h/1h/0h]" + chg_xpub1 + "/*,[59b09cd6/84h/1h/0h]" + chg_xpub2 + "/*,[e81a0532/84h/1h/0h]" + chg_xpub3 + "/*))#c08a2rzv",
"active": True,
"internal" : True,
"range": 1000,
"next_index": 0,
"timestamp": "now"},
success=True,
wallet=wmulti_pub)
assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 1000) # The first one was already consumed by previous import and is detected as used
addr = wmulti_pub.getnewaddress('', 'bech32') # uses receive 1
assert_equal(addr, 'bcrt1qp8s25ckjl7gr6x2q3dx3tn2pytwp05upkjztk6ey857tt50r5aeqn6mvr9') # Derived at m/84'/0'/0'/1
change_addr = wmulti_pub.getrawchangeaddress('bech32') # uses change 2
assert_equal(change_addr, 'bcrt1qp6j3jw8yetefte7kw6v5pc89rkgakzy98p6gf7ayslaveaxqyjusnw580c') # Derived at m/84'/1'/0'/2
assert send_txid in self.nodes[0].getrawmempool(True)
assert send_txid in (x['txid'] for x in wmulti_pub.listunspent(0))
assert_equal(wmulti_pub.getwalletinfo()['keypoolsize'], 999)
# generate some utxos for next tests
utxo = self.create_outpoints(w0, outputs=[{addr: 10}])[0]
addr2 = wmulti_pub.getnewaddress('', 'bech32')
utxo2 = self.create_outpoints(w0, outputs=[{addr2: 10}])[0]
self.generate(self.nodes[0], 6)
assert_equal(wmulti_pub.getbalance(), wmulti_priv.getbalance())
# Make sure that descriptor wallets containing multiple xpubs in a single descriptor load correctly
wmulti_pub.unloadwallet()
self.nodes[1].loadwallet('wmulti_pub')
self.log.info("Multisig with distributed keys")
self.nodes[1].createwallet(wallet_name="wmulti_priv1", descriptors=True)
wmulti_priv1 = self.nodes[1].get_wallet_rpc("wmulti_priv1")
res = wmulti_priv1.importdescriptors([
{
"desc": descsum_create("wsh(multi(2," + xprv1 + "/84h/0h/0h/*,[59b09cd6/84h/0h/0h]" + acc_xpub2 + "/*,[e81a0532/84h/0h/0h]" + acc_xpub3 + "/*))"),
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
},
{
"desc": descsum_create("wsh(multi(2," + xprv1 + "/84h/1h/0h/*,[59b09cd6/84h/1h/0h]" + chg_xpub2 + "/*,[e81a0532/84h/1h/0h]" + chg_xpub3 + "/*))"),
"active": True,
"internal" : True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
}])
assert_equal(res[0]['success'], True)
assert_equal(res[0]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
assert_equal(res[1]['success'], True)
assert_equal(res[1]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
self.nodes[1].createwallet(wallet_name='wmulti_priv2', blank=True, descriptors=True)
wmulti_priv2 = self.nodes[1].get_wallet_rpc('wmulti_priv2')
res = wmulti_priv2.importdescriptors([
{
"desc": descsum_create("wsh(multi(2,[7b2d0242/84h/0h/0h]" + acc_xpub1 + "/*," + xprv2 + "/84h/0h/0h/*,[e81a0532/84h/0h/0h]" + acc_xpub3 + "/*))"),
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
},
{
"desc": descsum_create("wsh(multi(2,[7b2d0242/84h/1h/0h]" + chg_xpub1 + "/*," + xprv2 + "/84h/1h/0h/*,[e81a0532/84h/1h/0h]" + chg_xpub3 + "/*))"),
"active": True,
"internal" : True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
}])
assert_equal(res[0]['success'], True)
assert_equal(res[0]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
assert_equal(res[1]['success'], True)
assert_equal(res[1]['warnings'][0], 'Not all private keys provided. Some wallet functionality may return unexpected errors')
rawtx = self.nodes[1].createrawtransaction([utxo], {w0.getnewaddress(): 9.999})
tx_signed_1 = wmulti_priv1.signrawtransactionwithwallet(rawtx)
assert_equal(tx_signed_1['complete'], False)
tx_signed_2 = wmulti_priv2.signrawtransactionwithwallet(tx_signed_1['hex'])
assert_equal(tx_signed_2['complete'], True)
self.nodes[1].sendrawtransaction(tx_signed_2['hex'])
self.log.info("We can create and use a huge multisig under P2WSH")
self.nodes[1].createwallet(wallet_name='wmulti_priv_big', blank=True, descriptors=True)
wmulti_priv_big = self.nodes[1].get_wallet_rpc('wmulti_priv_big')
xkey = "tprv8ZgxMBicQKsPeZSeYx7VXDDTs3XrTcmZQpRLbAeSQFCQGgKwR4gKpcxHaKdoTNHniv4EPDJNdzA3KxRrrBHcAgth8fU5X4oCndkkxk39iAt/*"
xkey_int = "tprv8ZgxMBicQKsPeZSeYx7VXDDTs3XrTcmZQpRLbAeSQFCQGgKwR4gKpcxHaKdoTNHniv4EPDJNdzA3KxRrrBHcAgth8fU5X4oCndkkxk39iAt/1/*"
res = wmulti_priv_big.importdescriptors([
{
"desc": descsum_create(f"wsh(sortedmulti(20,{(xkey + ',') * 19}{xkey}))"),
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
},
{
"desc": descsum_create(f"wsh(sortedmulti(20,{(xkey_int + ',') * 19}{xkey_int}))"),
"active": True,
"internal": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
}])
assert_equal(res[0]['success'], True)
assert_equal(res[1]['success'], True)
addr = wmulti_priv_big.getnewaddress()
w0.sendtoaddress(addr, 10)
self.generate(self.nodes[0], 1)
# It is standard and would relay.
txid = wmulti_priv_big.sendtoaddress(w0.getnewaddress(), 9.999)
decoded = wmulti_priv_big.gettransaction(txid=txid, verbose=True)['decoded']
# 20 sigs + dummy + witness script
assert_equal(len(decoded['vin'][0]['txinwitness']), 22)
self.log.info("Under P2SH, multisig are standard with up to 15 "
"compressed keys")
self.nodes[1].createwallet(wallet_name='multi_priv_big_legacy',
blank=True, descriptors=True)
multi_priv_big = self.nodes[1].get_wallet_rpc('multi_priv_big_legacy')
res = multi_priv_big.importdescriptors([
{
"desc": descsum_create(f"sh(multi(15,{(xkey + ',') * 14}{xkey}))"),
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
},
{
"desc": descsum_create(f"sh(multi(15,{(xkey_int + ',') * 14}{xkey_int}))"),
"active": True,
"internal": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
}])
assert_equal(res[0]['success'], True)
assert_equal(res[1]['success'], True)
addr = multi_priv_big.getnewaddress("", "legacy")
w0.sendtoaddress(addr, 10)
self.generate(self.nodes[0], 6)
# It is standard and would relay.
txid = multi_priv_big.sendtoaddress(w0.getnewaddress(), 10, "", "", True)
decoded = multi_priv_big.gettransaction(txid=txid, verbose=True)['decoded']
self.log.info("Amending multisig with new private keys")
self.nodes[1].createwallet(wallet_name="wmulti_priv3", descriptors=True)
wmulti_priv3 = self.nodes[1].get_wallet_rpc("wmulti_priv3")
res = wmulti_priv3.importdescriptors([
{
"desc": descsum_create("wsh(multi(2," + xprv1 + "/84h/0h/0h/*,[59b09cd6/84h/0h/0h]" + acc_xpub2 + "/*,[e81a0532/84h/0h/0h]" + acc_xpub3 + "/*))"),
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
}])
assert_equal(res[0]['success'], True)
res = wmulti_priv3.importdescriptors([
{
"desc": descsum_create("wsh(multi(2," + xprv1 + "/84h/0h/0h/*,[59b09cd6/84h/0h/0h]" + acc_xprv2 + "/*,[e81a0532/84h/0h/0h]" + acc_xpub3 + "/*))"),
"active": True,
"range": 1000,
"next_index": 0,
"timestamp": "now"
}])
assert_equal(res[0]['success'], True)
rawtx = self.nodes[1].createrawtransaction([utxo2], {w0.getnewaddress(): 9.999})
tx = wmulti_priv3.signrawtransactionwithwallet(rawtx)
assert_equal(tx['complete'], True)
self.nodes[1].sendrawtransaction(tx['hex'])
self.log.info("Combo descriptors cannot be active")
self.test_importdesc({"desc": descsum_create("combo(tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*)"),
"active": True,
"range": 1,
"timestamp": "now"},
success=False,
error_code=-4,
error_message="Combo descriptors cannot be set to active")
self.log.info("Descriptors with no type cannot be active")
self.test_importdesc({"desc": descsum_create("pk(tpubDCJtdt5dgJpdhW4MtaVYDhG4T4tF6jcLR1PxL43q9pq1mxvXgMS9Mzw1HnXG15vxUGQJMMSqCQHMTy3F1eW5VkgVroWzchsPD5BUojrcWs8/*)"),
"active": True,
"range": 1,
"timestamp": "now"},
success=True,
warnings=["Unknown output type, cannot set descriptor to active."])
self.log.info("Test importing a descriptor to an encrypted wallet")
descriptor = {"desc": descsum_create("pkh(" + xpriv + "/1h/*h)"),
"timestamp": "now",
"active": True,
"range": [0,4000],
"next_index": 4000}
self.nodes[0].createwallet("temp_wallet", blank=True, descriptors=True)
temp_wallet = self.nodes[0].get_wallet_rpc("temp_wallet")
temp_wallet.importdescriptors([descriptor])
self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, temp_wallet.getnewaddress())
self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, temp_wallet.getnewaddress())
self.nodes[0].createwallet("encrypted_wallet", blank=True, descriptors=True, passphrase="passphrase")
encrypted_wallet = self.nodes[0].get_wallet_rpc("encrypted_wallet")
descriptor["timestamp"] = 0
descriptor["next_index"] = 0
encrypted_wallet.walletpassphrase("passphrase", 99999)
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as thread:
with self.nodes[0].assert_debug_log(expected_msgs=["Rescan started from block 0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206... (slow variant inspecting all blocks)"], timeout=5):
importing = thread.submit(encrypted_wallet.importdescriptors, requests=[descriptor])
# Set the passphrase timeout to 1 to test that the wallet remains unlocked during the rescan
self.nodes[0].cli("-rpcwallet=encrypted_wallet").walletpassphrase("passphrase", 1)
try:
self.nodes[0].cli("-rpcwallet=encrypted_wallet").walletlock()
except JSONRPCException as e:
assert e.error["code"] == -4 and "Error: the wallet is currently being used to rescan the blockchain for related transactions. Please call `abortrescan` before locking the wallet." in e.error["message"]
try:
self.nodes[0].cli("-rpcwallet=encrypted_wallet").walletpassphrasechange("passphrase", "newpassphrase")
except JSONRPCException as e:
assert e.error["code"] == -4 and "Error: the wallet is currently being used to rescan the blockchain for related transactions. Please call `abortrescan` before changing the passphrase." in e.error["message"]
assert_equal(importing.result(), [{"success": True}])
assert_equal(temp_wallet.getbalance(), encrypted_wallet.getbalance())
if __name__ == '__main__':
ImportDescriptorsTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/interface_usdt_coinselection.py | #!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
""" Tests the coin_selection:* tracepoint API interface.
See https://github.com/bitcoin/bitcoin/blob/master/doc/tracing.md#context-coin_selection
"""
# Test will be skipped if we don't have bcc installed
try:
from bcc import BPF, USDT # type: ignore[import]
except ImportError:
pass
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
)
coinselection_tracepoints_program = """
#include <uapi/linux/ptrace.h>
#define WALLET_NAME_LENGTH 16
#define ALGO_NAME_LENGTH 16
struct event_data
{
u8 type;
char wallet_name[WALLET_NAME_LENGTH];
// selected coins event
char algo[ALGO_NAME_LENGTH];
s64 target;
s64 waste;
s64 selected_value;
// create tx event
bool success;
s64 fee;
s32 change_pos;
// aps create tx event
bool use_aps;
};
BPF_QUEUE(coin_selection_events, struct event_data, 1024);
int trace_selected_coins(struct pt_regs *ctx) {
struct event_data data;
__builtin_memset(&data, 0, sizeof(data));
data.type = 1;
bpf_usdt_readarg_p(1, ctx, &data.wallet_name, WALLET_NAME_LENGTH);
bpf_usdt_readarg_p(2, ctx, &data.algo, ALGO_NAME_LENGTH);
bpf_usdt_readarg(3, ctx, &data.target);
bpf_usdt_readarg(4, ctx, &data.waste);
bpf_usdt_readarg(5, ctx, &data.selected_value);
coin_selection_events.push(&data, 0);
return 0;
}
int trace_normal_create_tx(struct pt_regs *ctx) {
struct event_data data;
__builtin_memset(&data, 0, sizeof(data));
data.type = 2;
bpf_usdt_readarg_p(1, ctx, &data.wallet_name, WALLET_NAME_LENGTH);
bpf_usdt_readarg(2, ctx, &data.success);
bpf_usdt_readarg(3, ctx, &data.fee);
bpf_usdt_readarg(4, ctx, &data.change_pos);
coin_selection_events.push(&data, 0);
return 0;
}
int trace_attempt_aps(struct pt_regs *ctx) {
struct event_data data;
__builtin_memset(&data, 0, sizeof(data));
data.type = 3;
bpf_usdt_readarg_p(1, ctx, &data.wallet_name, WALLET_NAME_LENGTH);
coin_selection_events.push(&data, 0);
return 0;
}
int trace_aps_create_tx(struct pt_regs *ctx) {
struct event_data data;
__builtin_memset(&data, 0, sizeof(data));
data.type = 4;
bpf_usdt_readarg_p(1, ctx, &data.wallet_name, WALLET_NAME_LENGTH);
bpf_usdt_readarg(2, ctx, &data.use_aps);
bpf_usdt_readarg(3, ctx, &data.success);
bpf_usdt_readarg(4, ctx, &data.fee);
bpf_usdt_readarg(5, ctx, &data.change_pos);
coin_selection_events.push(&data, 0);
return 0;
}
"""
class CoinSelectionTracepointTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_platform_not_linux()
self.skip_if_no_bitcoind_tracepoints()
self.skip_if_no_python_bcc()
self.skip_if_no_bpf_permissions()
self.skip_if_no_wallet()
def get_tracepoints(self, expected_types):
events = []
try:
for i in range(0, len(expected_types) + 1):
event = self.bpf["coin_selection_events"].pop()
assert_equal(event.wallet_name.decode(), self.default_wallet_name)
assert_equal(event.type, expected_types[i])
events.append(event)
else:
# If the loop exits successfully instead of throwing a KeyError, then we have had
# more events than expected. There should be no more than len(expected_types) events.
assert False
except KeyError:
assert_equal(len(events), len(expected_types))
return events
def determine_selection_from_usdt(self, events):
success = None
use_aps = None
algo = None
waste = None
change_pos = None
is_aps = False
sc_events = []
for event in events:
if event.type == 1:
if not is_aps:
algo = event.algo.decode()
waste = event.waste
sc_events.append(event)
elif event.type == 2:
success = event.success
if not is_aps:
change_pos = event.change_pos
elif event.type == 3:
is_aps = True
elif event.type == 4:
assert is_aps
if event.use_aps:
use_aps = True
assert_equal(len(sc_events), 2)
algo = sc_events[1].algo.decode()
waste = sc_events[1].waste
change_pos = event.change_pos
return success, use_aps, algo, waste, change_pos
def run_test(self):
self.log.info("hook into the coin_selection tracepoints")
ctx = USDT(pid=self.nodes[0].process.pid)
ctx.enable_probe(probe="coin_selection:selected_coins", fn_name="trace_selected_coins")
ctx.enable_probe(probe="coin_selection:normal_create_tx_internal", fn_name="trace_normal_create_tx")
ctx.enable_probe(probe="coin_selection:attempting_aps_create_tx", fn_name="trace_attempt_aps")
ctx.enable_probe(probe="coin_selection:aps_create_tx_internal", fn_name="trace_aps_create_tx")
self.bpf = BPF(text=coinselection_tracepoints_program, usdt_contexts=[ctx], debug=0, cflags=["-Wno-error=implicit-function-declaration"])
self.log.info("Prepare wallets")
self.generate(self.nodes[0], 101)
wallet = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
self.log.info("Sending a transaction should result in all tracepoints")
# We should have 5 tracepoints in the order:
# 1. selected_coins (type 1)
# 2. normal_create_tx_internal (type 2)
# 3. attempting_aps_create_tx (type 3)
# 4. selected_coins (type 1)
# 5. aps_create_tx_internal (type 4)
wallet.sendtoaddress(wallet.getnewaddress(), 10)
events = self.get_tracepoints([1, 2, 3, 1, 4])
success, use_aps, algo, waste, change_pos = self.determine_selection_from_usdt(events)
assert_equal(success, True)
assert_greater_than(change_pos, -1)
self.log.info("Failing to fund results in 1 tracepoint")
# We should have 1 tracepoints in the order
# 1. normal_create_tx_internal (type 2)
assert_raises_rpc_error(-6, "Insufficient funds", wallet.sendtoaddress, wallet.getnewaddress(), 102 * 50)
events = self.get_tracepoints([2])
success, use_aps, algo, waste, change_pos = self.determine_selection_from_usdt(events)
assert_equal(success, False)
self.log.info("Explicitly enabling APS results in 2 tracepoints")
# We should have 2 tracepoints in the order
# 1. selected_coins (type 1)
# 2. normal_create_tx_internal (type 2)
wallet.setwalletflag("avoid_reuse")
wallet.sendtoaddress(address=wallet.getnewaddress(), amount=10, avoid_reuse=True)
events = self.get_tracepoints([1, 2])
success, use_aps, algo, waste, change_pos = self.determine_selection_from_usdt(events)
assert_equal(success, True)
assert_equal(use_aps, None)
self.bpf.cleanup()
if __name__ == '__main__':
CoinSelectionTracepointTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_message_capture.py | #!/usr/bin/env python3
# Copyright (c) 2020-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test per-peer message capture capability.
Additionally, the output of contrib/message-capture/message-capture-parser.py should be verified manually.
"""
import glob
from io import BytesIO
import os
from test_framework.p2p import P2PDataStore, MESSAGEMAP
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
TIME_SIZE = 8
LENGTH_SIZE = 4
MSGTYPE_SIZE = 12
def mini_parser(dat_file: str) -> None:
"""Parse a data file created by CaptureMessageToFile.
From the data file we'll only check the structure.
We won't care about things like:
- Deserializing the payload of the message
- This is managed by the deserialize methods in test_framework.messages
- The order of the messages
- There's no reason why we can't, say, change the order of the messages in the handshake
- Message Type
- We can add new message types
We're ignoring these because they're simply too brittle to test here.
"""
with open(dat_file, 'rb') as f_in:
# This should have at least one message in it
assert os.fstat(f_in.fileno()).st_size >= TIME_SIZE + LENGTH_SIZE + MSGTYPE_SIZE
while True:
tmp_header_raw = f_in.read(TIME_SIZE + LENGTH_SIZE + MSGTYPE_SIZE)
if not tmp_header_raw:
break
tmp_header = BytesIO(tmp_header_raw)
tmp_header.read(TIME_SIZE) # skip the timestamp field
msgtype = tmp_header.read(MSGTYPE_SIZE).rstrip(b'\x00')
assert msgtype in MESSAGEMAP
length: int = int.from_bytes(tmp_header.read(LENGTH_SIZE), "little")
data = f_in.read(length)
assert_equal(len(data), length)
class MessageCaptureTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [["-capturemessages"]]
self.setup_clean_chain = True
def run_test(self):
capturedir = self.nodes[0].chain_path / "message_capture"
# Connect a node so that the handshake occurs
self.nodes[0].add_p2p_connection(P2PDataStore())
self.nodes[0].disconnect_p2ps()
recv_file = glob.glob(os.path.join(capturedir, "*/msgs_recv.dat"))[0]
mini_parser(recv_file)
sent_file = glob.glob(os.path.join(capturedir, "*/msgs_sent.dat"))[0]
mini_parser(sent_file)
if __name__ == '__main__':
MessageCaptureTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_txoutproof.py | #!/usr/bin/env python3
# Copyright (c) 2014-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test gettxoutproof and verifytxoutproof RPCs."""
from test_framework.messages import (
CMerkleBlock,
from_hex,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
from test_framework.wallet import MiniWallet
class MerkleBlockTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [
[],
["-txindex"],
]
def run_test(self):
miniwallet = MiniWallet(self.nodes[0])
chain_height = self.nodes[1].getblockcount()
assert_equal(chain_height, 200)
txid1 = miniwallet.send_self_transfer(from_node=self.nodes[0])['txid']
txid2 = miniwallet.send_self_transfer(from_node=self.nodes[0])['txid']
# This will raise an exception because the transaction is not yet in a block
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid1])
self.generate(self.nodes[0], 1)
blockhash = self.nodes[0].getblockhash(chain_height + 1)
txlist = []
blocktxn = self.nodes[0].getblock(blockhash, True)["tx"]
txlist.append(blocktxn[1])
txlist.append(blocktxn[2])
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1])), [txid1])
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2])), txlist)
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2], blockhash)), txlist)
txin_spent = miniwallet.get_utxo(txid=txid2) # Get the change from txid2
tx3 = miniwallet.send_self_transfer(from_node=self.nodes[0], utxo_to_spend=txin_spent)
txid3 = tx3['txid']
self.generate(self.nodes[0], 1)
txid_spent = txin_spent["txid"]
txid_unspent = txid1 # Input was change from txid2, so txid1 should be unspent
# Invalid txids
assert_raises_rpc_error(-8, "txid must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[0].gettxoutproof, ["00000000000000000000000000000000"], blockhash)
assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].gettxoutproof, ["ZZZ0000000000000000000000000000000000000000000000000000000000000"], blockhash)
# Invalid blockhashes
assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 32, for '00000000000000000000000000000000')", self.nodes[0].gettxoutproof, [txid_spent], "00000000000000000000000000000000")
assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].gettxoutproof, [txid_spent], "ZZZ0000000000000000000000000000000000000000000000000000000000000")
# We can't find the block from a fully-spent tx
assert_raises_rpc_error(-5, "Transaction not yet in block", self.nodes[0].gettxoutproof, [txid_spent])
# We can get the proof if we specify the block
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid_spent], blockhash)), [txid_spent])
# We can't get the proof if we specify a non-existent block
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].gettxoutproof, [txid_spent], "0000000000000000000000000000000000000000000000000000000000000000")
# We can get the proof if the transaction is unspent
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid_unspent])), [txid_unspent])
# We can get the proof if we provide a list of transactions and one of them is unspent. The ordering of the list should not matter.
assert_equal(sorted(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid1, txid2]))), sorted(txlist))
assert_equal(sorted(self.nodes[0].verifytxoutproof(self.nodes[0].gettxoutproof([txid2, txid1]))), sorted(txlist))
# We can always get a proof if we have a -txindex
assert_equal(self.nodes[0].verifytxoutproof(self.nodes[1].gettxoutproof([txid_spent])), [txid_spent])
# We can't get a proof if we specify transactions from different blocks
assert_raises_rpc_error(-5, "Not all transactions found in specified or retrieved block", self.nodes[0].gettxoutproof, [txid1, txid3])
# Test empty list
assert_raises_rpc_error(-8, "Parameter 'txids' cannot be empty", self.nodes[0].gettxoutproof, [])
# Test duplicate txid
assert_raises_rpc_error(-8, 'Invalid parameter, duplicated txid', self.nodes[0].gettxoutproof, [txid1, txid1])
# Now we'll try tweaking a proof.
proof = self.nodes[1].gettxoutproof([txid1, txid2])
assert txid1 in self.nodes[0].verifytxoutproof(proof)
assert txid2 in self.nodes[1].verifytxoutproof(proof)
tweaked_proof = from_hex(CMerkleBlock(), proof)
# Make sure that our serialization/deserialization is working
assert txid1 in self.nodes[0].verifytxoutproof(tweaked_proof.serialize().hex())
# Check to see if we can go up the merkle tree and pass this off as a
# single-transaction block
tweaked_proof.txn.nTransactions = 1
tweaked_proof.txn.vHash = [tweaked_proof.header.hashMerkleRoot]
tweaked_proof.txn.vBits = [True] + [False]*7
for n in self.nodes:
assert not n.verifytxoutproof(tweaked_proof.serialize().hex())
# TODO: try more variants, eg transactions at different depths, and
# verify that the proofs are invalid
if __name__ == '__main__':
MerkleBlockTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_orphan_handling.py | #!/usr/bin/env python3
# Copyright (c) 2023 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import time
from test_framework.messages import (
CInv,
MSG_TX,
MSG_WITNESS_TX,
MSG_WTX,
msg_getdata,
msg_inv,
msg_notfound,
msg_tx,
tx_from_hex,
)
from test_framework.p2p import (
GETDATA_TX_INTERVAL,
NONPREF_PEER_TX_DELAY,
OVERLOADED_PEER_TX_DELAY,
p2p_lock,
P2PTxInvStore,
TXID_RELAY_DELAY,
)
from test_framework.util import (
assert_equal,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.wallet import (
MiniWallet,
MiniWalletMode,
)
# Time to bump forward (using setmocktime) before waiting for the node to send getdata(tx) in response
# to an inv(tx), in seconds. This delay includes all possible delays + 1, so it should only be used
# when the value of the delay is not interesting. If we want to test that the node waits x seconds
# for one peer and y seconds for another, use specific values instead.
TXREQUEST_TIME_SKIP = NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY + OVERLOADED_PEER_TX_DELAY + 1
def cleanup(func):
# Time to fastfoward (using setmocktime) in between subtests to ensure they do not interfere with
# one another, in seconds. Equal to 12 hours, which is enough to expire anything that may exist
# (though nothing should since state should be cleared) in p2p data structures.
LONG_TIME_SKIP = 12 * 60 * 60
def wrapper(self):
try:
func(self)
finally:
# Clear mempool
self.generate(self.nodes[0], 1)
self.nodes[0].disconnect_p2ps()
self.nodes[0].bumpmocktime(LONG_TIME_SKIP)
return wrapper
class PeerTxRelayer(P2PTxInvStore):
"""A P2PTxInvStore that also remembers all of the getdata and tx messages it receives."""
def __init__(self):
super().__init__()
self._tx_received = []
self._getdata_received = []
@property
def tx_received(self):
with p2p_lock:
return self._tx_received
@property
def getdata_received(self):
with p2p_lock:
return self._getdata_received
def on_tx(self, message):
self._tx_received.append(message)
def on_getdata(self, message):
self._getdata_received.append(message)
def wait_for_parent_requests(self, txids):
"""Wait for requests for missing parents by txid with witness data (MSG_WITNESS_TX or
WitnessTx). Requires that the getdata message match these txids exactly; all txids must be
requested and no additional requests are allowed."""
def test_function():
last_getdata = self.last_message.get('getdata')
if not last_getdata:
return False
return len(last_getdata.inv) == len(txids) and all([item.type == MSG_WITNESS_TX and item.hash in txids for item in last_getdata.inv])
self.wait_until(test_function, timeout=10)
def assert_no_immediate_response(self, message):
"""Check that the node does not immediately respond to this message with any of getdata,
inv, tx. The node may respond later.
"""
prev_lastmessage = self.last_message
self.send_and_ping(message)
after_lastmessage = self.last_message
for msgtype in ["getdata", "inv", "tx"]:
if msgtype not in prev_lastmessage:
assert msgtype not in after_lastmessage
else:
assert_equal(prev_lastmessage[msgtype], after_lastmessage[msgtype])
def assert_never_requested(self, txhash):
"""Check that the node has never sent us a getdata for this hash (int type)"""
for getdata in self.getdata_received:
for request in getdata.inv:
assert request.hash != txhash
class OrphanHandlingTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[]]
def create_parent_and_child(self):
"""Create package with 1 parent and 1 child, normal fees (no cpfp)."""
parent = self.wallet.create_self_transfer()
child = self.wallet.create_self_transfer(utxo_to_spend=parent['new_utxo'])
return child["tx"].getwtxid(), child["tx"], parent["tx"]
def relay_transaction(self, peer, tx):
"""Relay transaction using MSG_WTX"""
wtxid = int(tx.getwtxid(), 16)
peer.send_and_ping(msg_inv([CInv(t=MSG_WTX, h=wtxid)]))
self.nodes[0].bumpmocktime(TXREQUEST_TIME_SKIP)
peer.wait_for_getdata([wtxid])
peer.send_and_ping(msg_tx(tx))
@cleanup
def test_arrival_timing_orphan(self):
self.log.info("Test missing parents that arrive during delay are not requested")
node = self.nodes[0]
tx_parent_arrives = self.wallet.create_self_transfer()
tx_parent_doesnt_arrive = self.wallet.create_self_transfer()
# Fake orphan spends nonexistent outputs of the two parents
tx_fake_orphan = self.wallet.create_self_transfer_multi(utxos_to_spend=[
{"txid": tx_parent_doesnt_arrive["txid"], "vout": 10, "value": tx_parent_doesnt_arrive["new_utxo"]["value"]},
{"txid": tx_parent_arrives["txid"], "vout": 10, "value": tx_parent_arrives["new_utxo"]["value"]}
])
peer_spy = node.add_p2p_connection(PeerTxRelayer())
peer_normal = node.add_p2p_connection(PeerTxRelayer())
# This transaction is an orphan because it is missing inputs. It is a "fake" orphan that the
# spy peer has crafted to learn information about tx_parent_arrives even though it isn't
# able to spend a real output of it, but it could also just be a normal, real child tx.
# The node should not immediately respond with a request for orphan parents.
# Also, no request should be sent later because it will be resolved by
# the time the request is scheduled to be sent.
peer_spy.assert_no_immediate_response(msg_tx(tx_fake_orphan["tx"]))
# Node receives transaction. It attempts to obfuscate the exact timing at which this
# transaction entered its mempool. Send unsolicited because otherwise we need to wait for
# request delays.
peer_normal.send_and_ping(msg_tx(tx_parent_arrives["tx"]))
assert tx_parent_arrives["txid"] in node.getrawmempool()
# Spy peer should not be able to query the node for the parent yet, since it hasn't been
# announced / insufficient time has elapsed.
parent_inv = CInv(t=MSG_WTX, h=int(tx_parent_arrives["tx"].getwtxid(), 16))
assert_equal(len(peer_spy.get_invs()), 0)
peer_spy.assert_no_immediate_response(msg_getdata([parent_inv]))
# Request would be scheduled with this delay because it is not a preferred relay peer.
self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY)
peer_spy.assert_never_requested(int(tx_parent_arrives["txid"], 16))
peer_spy.assert_never_requested(int(tx_parent_doesnt_arrive["txid"], 16))
# Request would be scheduled with this delay because it is by txid.
self.nodes[0].bumpmocktime(TXID_RELAY_DELAY)
peer_spy.wait_for_parent_requests([int(tx_parent_doesnt_arrive["txid"], 16)])
peer_spy.assert_never_requested(int(tx_parent_arrives["txid"], 16))
@cleanup
def test_orphan_rejected_parents_exceptions(self):
node = self.nodes[0]
peer1 = node.add_p2p_connection(PeerTxRelayer())
peer2 = node.add_p2p_connection(PeerTxRelayer())
self.log.info("Test orphan handling when a nonsegwit parent is known to be invalid")
parent_low_fee_nonsegwit = self.wallet_nonsegwit.create_self_transfer(fee_rate=0)
assert_equal(parent_low_fee_nonsegwit["txid"], parent_low_fee_nonsegwit["tx"].getwtxid())
parent_other = self.wallet_nonsegwit.create_self_transfer()
child_nonsegwit = self.wallet_nonsegwit.create_self_transfer_multi(
utxos_to_spend=[parent_other["new_utxo"], parent_low_fee_nonsegwit["new_utxo"]])
# Relay the parent. It should be rejected because it pays 0 fees.
self.relay_transaction(peer1, parent_low_fee_nonsegwit["tx"])
assert parent_low_fee_nonsegwit["txid"] not in node.getrawmempool()
# Relay the child. It should not be accepted because it has missing inputs.
# Its parent should not be requested because its hash (txid == wtxid) has been added to the rejection filter.
with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(child_nonsegwit["txid"])]):
self.relay_transaction(peer2, child_nonsegwit["tx"])
assert child_nonsegwit["txid"] not in node.getrawmempool()
# No parents are requested.
self.nodes[0].bumpmocktime(GETDATA_TX_INTERVAL)
peer1.assert_never_requested(int(parent_other["txid"], 16))
peer2.assert_never_requested(int(parent_other["txid"], 16))
peer2.assert_never_requested(int(parent_low_fee_nonsegwit["txid"], 16))
self.log.info("Test orphan handling when a segwit parent was invalid but may be retried with another witness")
parent_low_fee = self.wallet.create_self_transfer(fee_rate=0)
child_low_fee = self.wallet.create_self_transfer(utxo_to_spend=parent_low_fee["new_utxo"])
# Relay the low fee parent. It should not be accepted.
self.relay_transaction(peer1, parent_low_fee["tx"])
assert parent_low_fee["txid"] not in node.getrawmempool()
# Relay the child. It should not be accepted because it has missing inputs.
self.relay_transaction(peer2, child_low_fee["tx"])
assert child_low_fee["txid"] not in node.getrawmempool()
# The parent should be requested because even though the txid commits to the fee, it doesn't
# commit to the feerate. Delayed because it's by txid and this is not a preferred relay peer.
self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
peer2.wait_for_getdata([int(parent_low_fee["tx"].rehash(), 16)])
self.log.info("Test orphan handling when a parent was previously downloaded with witness stripped")
parent_normal = self.wallet.create_self_transfer()
parent1_witness_stripped = tx_from_hex(parent_normal["tx"].serialize_without_witness().hex())
child_invalid_witness = self.wallet.create_self_transfer(utxo_to_spend=parent_normal["new_utxo"])
# Relay the parent with witness stripped. It should not be accepted.
self.relay_transaction(peer1, parent1_witness_stripped)
assert_equal(parent_normal["txid"], parent1_witness_stripped.rehash())
assert parent1_witness_stripped.rehash() not in node.getrawmempool()
# Relay the child. It should not be accepted because it has missing inputs.
self.relay_transaction(peer2, child_invalid_witness["tx"])
assert child_invalid_witness["txid"] not in node.getrawmempool()
# The parent should be requested since the unstripped wtxid would differ. Delayed because
# it's by txid and this is not a preferred relay peer.
self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
peer2.wait_for_getdata([int(parent_normal["tx"].rehash(), 16)])
# parent_normal can be relayed again even though parent1_witness_stripped was rejected
self.relay_transaction(peer1, parent_normal["tx"])
assert_equal(set(node.getrawmempool()), set([parent_normal["txid"], child_invalid_witness["txid"]]))
@cleanup
def test_orphan_multiple_parents(self):
node = self.nodes[0]
peer = node.add_p2p_connection(PeerTxRelayer())
self.log.info("Test orphan parent requests with a mixture of confirmed, in-mempool and missing parents")
# This UTXO confirmed a long time ago.
utxo_conf_old = self.wallet.send_self_transfer(from_node=node)["new_utxo"]
txid_conf_old = utxo_conf_old["txid"]
self.generate(self.wallet, 10)
# Create a fake reorg to trigger BlockDisconnected, which resets the rolling bloom filter.
# The alternative is to mine thousands of transactions to push it out of the filter.
last_block = node.getbestblockhash()
node.invalidateblock(last_block)
node.preciousblock(last_block)
node.syncwithvalidationinterfacequeue()
# This UTXO confirmed recently.
utxo_conf_recent = self.wallet.send_self_transfer(from_node=node)["new_utxo"]
self.generate(node, 1)
# This UTXO is unconfirmed and in the mempool.
assert_equal(len(node.getrawmempool()), 0)
mempool_tx = self.wallet.send_self_transfer(from_node=node)
utxo_unconf_mempool = mempool_tx["new_utxo"]
# This UTXO is unconfirmed and missing.
missing_tx = self.wallet.create_self_transfer()
utxo_unconf_missing = missing_tx["new_utxo"]
assert missing_tx["txid"] not in node.getrawmempool()
orphan = self.wallet.create_self_transfer_multi(utxos_to_spend=[utxo_conf_old,
utxo_conf_recent, utxo_unconf_mempool, utxo_unconf_missing])
self.relay_transaction(peer, orphan["tx"])
self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
peer.sync_with_ping()
assert_equal(len(peer.last_message["getdata"].inv), 2)
peer.wait_for_parent_requests([int(txid_conf_old, 16), int(missing_tx["txid"], 16)])
# Even though the peer would send a notfound for the "old" confirmed transaction, the node
# doesn't give up on the orphan. Once all of the missing parents are received, it should be
# submitted to mempool.
peer.send_message(msg_notfound(vec=[CInv(MSG_WITNESS_TX, int(txid_conf_old, 16))]))
peer.send_and_ping(msg_tx(missing_tx["tx"]))
peer.sync_with_ping()
assert_equal(node.getmempoolentry(orphan["txid"])["ancestorcount"], 3)
@cleanup
def test_orphans_overlapping_parents(self):
node = self.nodes[0]
# In the process of relaying inflight_parent_AB
peer_txrequest = node.add_p2p_connection(PeerTxRelayer())
# Sends the orphans
peer_orphans = node.add_p2p_connection(PeerTxRelayer())
confirmed_utxos = [self.wallet_nonsegwit.get_utxo() for _ in range(4)]
assert all([utxo["confirmations"] > 0 for utxo in confirmed_utxos])
self.log.info("Test handling of multiple orphans with missing parents that are already being requested")
# Parent of child_A only
missing_parent_A = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[0])
# Parents of child_A and child_B
missing_parent_AB = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[1])
inflight_parent_AB = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[2])
# Parent of child_B only
missing_parent_B = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=confirmed_utxos[3])
child_A = self.wallet_nonsegwit.create_self_transfer_multi(
utxos_to_spend=[missing_parent_A["new_utxo"], missing_parent_AB["new_utxo"], inflight_parent_AB["new_utxo"]]
)
child_B = self.wallet_nonsegwit.create_self_transfer_multi(
utxos_to_spend=[missing_parent_B["new_utxo"], missing_parent_AB["new_utxo"], inflight_parent_AB["new_utxo"]]
)
# The wtxid and txid need to be the same for the node to recognize that the missing input
# and in-flight request for inflight_parent_AB are the same transaction.
assert_equal(inflight_parent_AB["txid"], inflight_parent_AB["tx"].getwtxid())
# Announce inflight_parent_AB and wait for getdata
peer_txrequest.send_and_ping(msg_inv([CInv(t=MSG_WTX, h=int(inflight_parent_AB["tx"].getwtxid(), 16))]))
self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY)
peer_txrequest.wait_for_getdata([int(inflight_parent_AB["tx"].getwtxid(), 16)])
self.log.info("Test that the node does not request a parent if it has an in-flight txrequest")
# Relay orphan child_A
self.relay_transaction(peer_orphans, child_A["tx"])
self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
# There are 3 missing parents. missing_parent_A and missing_parent_AB should be requested.
# But inflight_parent_AB should not, because there is already an in-flight request for it.
peer_orphans.wait_for_parent_requests([int(missing_parent_A["txid"], 16), int(missing_parent_AB["txid"], 16)])
self.log.info("Test that the node does not request a parent if it has an in-flight orphan parent request")
# Relay orphan child_B
self.relay_transaction(peer_orphans, child_B["tx"])
self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
# Only missing_parent_B should be requested. Not inflight_parent_AB or missing_parent_AB
# because they are already being requested from peer_txrequest and peer_orphans respectively.
peer_orphans.wait_for_parent_requests([int(missing_parent_B["txid"], 16)])
peer_orphans.assert_never_requested(int(inflight_parent_AB["txid"], 16))
@cleanup
def test_orphan_of_orphan(self):
node = self.nodes[0]
peer = node.add_p2p_connection(PeerTxRelayer())
self.log.info("Test handling of an orphan with a parent who is another orphan")
missing_grandparent = self.wallet_nonsegwit.create_self_transfer()
missing_parent_orphan = self.wallet_nonsegwit.create_self_transfer(utxo_to_spend=missing_grandparent["new_utxo"])
missing_parent = self.wallet_nonsegwit.create_self_transfer()
orphan = self.wallet_nonsegwit.create_self_transfer_multi(utxos_to_spend=[missing_parent["new_utxo"], missing_parent_orphan["new_utxo"]])
# The node should put missing_parent_orphan into the orphanage and request missing_grandparent
self.relay_transaction(peer, missing_parent_orphan["tx"])
self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
peer.wait_for_parent_requests([int(missing_grandparent["txid"], 16)])
# The node should put the orphan into the orphanage and request missing_parent, skipping
# missing_parent_orphan because it already has it in the orphanage.
self.relay_transaction(peer, orphan["tx"])
self.nodes[0].bumpmocktime(NONPREF_PEER_TX_DELAY + TXID_RELAY_DELAY)
peer.wait_for_parent_requests([int(missing_parent["txid"], 16)])
@cleanup
def test_orphan_inherit_rejection(self):
node = self.nodes[0]
peer1 = node.add_p2p_connection(PeerTxRelayer())
peer2 = node.add_p2p_connection(PeerTxRelayer())
peer3 = node.add_p2p_connection(PeerTxRelayer())
self.log.info("Test that an orphan with rejected parents, along with any descendants, cannot be retried with an alternate witness")
parent_low_fee_nonsegwit = self.wallet_nonsegwit.create_self_transfer(fee_rate=0)
assert_equal(parent_low_fee_nonsegwit["txid"], parent_low_fee_nonsegwit["tx"].getwtxid())
child = self.wallet.create_self_transfer(utxo_to_spend=parent_low_fee_nonsegwit["new_utxo"])
grandchild = self.wallet.create_self_transfer(utxo_to_spend=child["new_utxo"])
assert child["txid"] != child["tx"].getwtxid()
assert grandchild["txid"] != grandchild["tx"].getwtxid()
# Relay the parent. It should be rejected because it pays 0 fees.
self.relay_transaction(peer1, parent_low_fee_nonsegwit["tx"])
# Relay the child. It should be rejected for having missing parents, and this rejection is
# cached by txid and wtxid.
with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(child["txid"])]):
self.relay_transaction(peer1, child["tx"])
assert_equal(0, len(node.getrawmempool()))
peer1.assert_never_requested(parent_low_fee_nonsegwit["txid"])
# Grandchild should also not be kept in orphanage because its parent has been rejected.
with node.assert_debug_log(['not keeping orphan with rejected parents {}'.format(grandchild["txid"])]):
self.relay_transaction(peer2, grandchild["tx"])
assert_equal(0, len(node.getrawmempool()))
peer2.assert_never_requested(child["txid"])
peer2.assert_never_requested(child["tx"].getwtxid())
# The child should never be requested, even if announced again with potentially different witness.
peer3.send_and_ping(msg_inv([CInv(t=MSG_TX, h=int(child["txid"], 16))]))
self.nodes[0].bumpmocktime(TXREQUEST_TIME_SKIP)
peer3.assert_never_requested(child["txid"])
def run_test(self):
self.nodes[0].setmocktime(int(time.time()))
self.wallet_nonsegwit = MiniWallet(self.nodes[0], mode=MiniWalletMode.RAW_P2PK)
self.generate(self.wallet_nonsegwit, 10)
self.wallet = MiniWallet(self.nodes[0])
self.generate(self.wallet, 160)
self.test_arrival_timing_orphan()
self.test_orphan_rejected_parents_exceptions()
self.test_orphan_multiple_parents()
self.test_orphans_overlapping_parents()
self.test_orphan_of_orphan()
self.test_orphan_inherit_rejection()
if __name__ == '__main__':
OrphanHandlingTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/interface_usdt_mempool.py | #!/usr/bin/env python3
# Copyright (c) 2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
""" Tests the mempool:* tracepoint API interface.
See https://github.com/bitcoin/bitcoin/blob/master/doc/tracing.md#context-mempool
"""
from decimal import Decimal
# Test will be skipped if we don't have bcc installed
try:
from bcc import BPF, USDT # type: ignore[import]
except ImportError:
pass
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.messages import COIN, DEFAULT_MEMPOOL_EXPIRY_HOURS
from test_framework.p2p import P2PDataStore
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
from test_framework.wallet import MiniWallet
MEMPOOL_TRACEPOINTS_PROGRAM = """
# include <uapi/linux/ptrace.h>
// The longest rejection reason is 118 chars and is generated in case of SCRIPT_ERR_EVAL_FALSE by
// strprintf("mandatory-script-verify-flag-failed (%s)", ScriptErrorString(check.GetScriptError()))
#define MAX_REJECT_REASON_LENGTH 118
// The longest string returned by RemovalReasonToString() is 'sizelimit'
#define MAX_REMOVAL_REASON_LENGTH 9
#define HASH_LENGTH 32
struct added_event
{
u8 hash[HASH_LENGTH];
s32 vsize;
s64 fee;
};
struct removed_event
{
u8 hash[HASH_LENGTH];
char reason[MAX_REMOVAL_REASON_LENGTH];
s32 vsize;
s64 fee;
u64 entry_time;
};
struct rejected_event
{
u8 hash[HASH_LENGTH];
char reason[MAX_REJECT_REASON_LENGTH];
};
struct replaced_event
{
u8 replaced_hash[HASH_LENGTH];
s32 replaced_vsize;
s64 replaced_fee;
u64 replaced_entry_time;
u8 replacement_hash[HASH_LENGTH];
s32 replacement_vsize;
s64 replacement_fee;
};
// BPF perf buffer to push the data to user space.
BPF_PERF_OUTPUT(added_events);
BPF_PERF_OUTPUT(removed_events);
BPF_PERF_OUTPUT(rejected_events);
BPF_PERF_OUTPUT(replaced_events);
int trace_added(struct pt_regs *ctx) {
struct added_event added = {};
bpf_usdt_readarg_p(1, ctx, &added.hash, HASH_LENGTH);
bpf_usdt_readarg(2, ctx, &added.vsize);
bpf_usdt_readarg(3, ctx, &added.fee);
added_events.perf_submit(ctx, &added, sizeof(added));
return 0;
}
int trace_removed(struct pt_regs *ctx) {
struct removed_event removed = {};
bpf_usdt_readarg_p(1, ctx, &removed.hash, HASH_LENGTH);
bpf_usdt_readarg_p(2, ctx, &removed.reason, MAX_REMOVAL_REASON_LENGTH);
bpf_usdt_readarg(3, ctx, &removed.vsize);
bpf_usdt_readarg(4, ctx, &removed.fee);
bpf_usdt_readarg(5, ctx, &removed.entry_time);
removed_events.perf_submit(ctx, &removed, sizeof(removed));
return 0;
}
int trace_rejected(struct pt_regs *ctx) {
struct rejected_event rejected = {};
bpf_usdt_readarg_p(1, ctx, &rejected.hash, HASH_LENGTH);
bpf_usdt_readarg_p(2, ctx, &rejected.reason, MAX_REJECT_REASON_LENGTH);
rejected_events.perf_submit(ctx, &rejected, sizeof(rejected));
return 0;
}
int trace_replaced(struct pt_regs *ctx) {
struct replaced_event replaced = {};
bpf_usdt_readarg_p(1, ctx, &replaced.replaced_hash, HASH_LENGTH);
bpf_usdt_readarg(2, ctx, &replaced.replaced_vsize);
bpf_usdt_readarg(3, ctx, &replaced.replaced_fee);
bpf_usdt_readarg(4, ctx, &replaced.replaced_entry_time);
bpf_usdt_readarg_p(5, ctx, &replaced.replacement_hash, HASH_LENGTH);
bpf_usdt_readarg(6, ctx, &replaced.replacement_vsize);
bpf_usdt_readarg(7, ctx, &replaced.replacement_fee);
replaced_events.perf_submit(ctx, &replaced, sizeof(replaced));
return 0;
}
"""
class MempoolTracepointTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_platform_not_linux()
self.skip_if_no_bitcoind_tracepoints()
self.skip_if_no_python_bcc()
self.skip_if_no_bpf_permissions()
def added_test(self):
"""Add a transaction to the mempool and make sure the tracepoint returns
the expected txid, vsize, and fee."""
events = []
self.log.info("Hooking into mempool:added tracepoint...")
node = self.nodes[0]
ctx = USDT(pid=node.process.pid)
ctx.enable_probe(probe="mempool:added", fn_name="trace_added")
bpf = BPF(text=MEMPOOL_TRACEPOINTS_PROGRAM, usdt_contexts=[ctx], debug=0, cflags=["-Wno-error=implicit-function-declaration"])
def handle_added_event(_, data, __):
events.append(bpf["added_events"].event(data))
bpf["added_events"].open_perf_buffer(handle_added_event)
self.log.info("Sending transaction...")
fee = Decimal(31200)
tx = self.wallet.send_self_transfer(from_node=node, fee=fee / COIN)
self.log.info("Polling buffer...")
bpf.perf_buffer_poll(timeout=200)
self.log.info("Cleaning up mempool...")
self.generate(node, 1)
self.log.info("Ensuring mempool:added event was handled successfully...")
assert_equal(1, len(events))
event = events[0]
assert_equal(bytes(event.hash)[::-1].hex(), tx["txid"])
assert_equal(event.vsize, tx["tx"].get_vsize())
assert_equal(event.fee, fee)
bpf.cleanup()
self.generate(self.wallet, 1)
def removed_test(self):
"""Expire a transaction from the mempool and make sure the tracepoint returns
the expected txid, expiry reason, vsize, and fee."""
events = []
self.log.info("Hooking into mempool:removed tracepoint...")
node = self.nodes[0]
ctx = USDT(pid=node.process.pid)
ctx.enable_probe(probe="mempool:removed", fn_name="trace_removed")
bpf = BPF(text=MEMPOOL_TRACEPOINTS_PROGRAM, usdt_contexts=[ctx], debug=0, cflags=["-Wno-error=implicit-function-declaration"])
def handle_removed_event(_, data, __):
events.append(bpf["removed_events"].event(data))
bpf["removed_events"].open_perf_buffer(handle_removed_event)
self.log.info("Sending transaction...")
fee = Decimal(31200)
tx = self.wallet.send_self_transfer(from_node=node, fee=fee / COIN)
txid = tx["txid"]
self.log.info("Fast-forwarding time to mempool expiry...")
entry_time = node.getmempoolentry(txid)["time"]
expiry_time = entry_time + 60 * 60 * DEFAULT_MEMPOOL_EXPIRY_HOURS + 5
node.setmocktime(expiry_time)
self.log.info("Triggering expiry...")
self.wallet.get_utxo(txid=txid)
self.wallet.send_self_transfer(from_node=node)
self.log.info("Polling buffer...")
bpf.perf_buffer_poll(timeout=200)
self.log.info("Ensuring mempool:removed event was handled successfully...")
assert_equal(1, len(events))
event = events[0]
assert_equal(bytes(event.hash)[::-1].hex(), txid)
assert_equal(event.reason.decode("UTF-8"), "expiry")
assert_equal(event.vsize, tx["tx"].get_vsize())
assert_equal(event.fee, fee)
assert_equal(event.entry_time, entry_time)
bpf.cleanup()
self.generate(self.wallet, 1)
def replaced_test(self):
"""Replace one and two transactions in the mempool and make sure the tracepoint
returns the expected txids, vsizes, and fees."""
events = []
self.log.info("Hooking into mempool:replaced tracepoint...")
node = self.nodes[0]
ctx = USDT(pid=node.process.pid)
ctx.enable_probe(probe="mempool:replaced", fn_name="trace_replaced")
bpf = BPF(text=MEMPOOL_TRACEPOINTS_PROGRAM, usdt_contexts=[ctx], debug=0, cflags=["-Wno-error=implicit-function-declaration"])
def handle_replaced_event(_, data, __):
events.append(bpf["replaced_events"].event(data))
bpf["replaced_events"].open_perf_buffer(handle_replaced_event)
self.log.info("Sending RBF transaction...")
utxo = self.wallet.get_utxo(mark_as_spent=True)
original_fee = Decimal(40000)
original_tx = self.wallet.send_self_transfer(
from_node=node, utxo_to_spend=utxo, fee=original_fee / COIN
)
entry_time = node.getmempoolentry(original_tx["txid"])["time"]
self.log.info("Sending replacement transaction...")
replacement_fee = Decimal(45000)
replacement_tx = self.wallet.send_self_transfer(
from_node=node, utxo_to_spend=utxo, fee=replacement_fee / COIN
)
self.log.info("Polling buffer...")
bpf.perf_buffer_poll(timeout=200)
self.log.info("Ensuring mempool:replaced event was handled successfully...")
assert_equal(1, len(events))
event = events[0]
assert_equal(bytes(event.replaced_hash)[::-1].hex(), original_tx["txid"])
assert_equal(event.replaced_vsize, original_tx["tx"].get_vsize())
assert_equal(event.replaced_fee, original_fee)
assert_equal(event.replaced_entry_time, entry_time)
assert_equal(bytes(event.replacement_hash)[::-1].hex(), replacement_tx["txid"])
assert_equal(event.replacement_vsize, replacement_tx["tx"].get_vsize())
assert_equal(event.replacement_fee, replacement_fee)
bpf.cleanup()
self.generate(self.wallet, 1)
def rejected_test(self):
"""Create an invalid transaction and make sure the tracepoint returns
the expected txid, rejection reason, peer id, and peer address."""
events = []
self.log.info("Adding P2P connection...")
node = self.nodes[0]
node.add_p2p_connection(P2PDataStore())
self.log.info("Hooking into mempool:rejected tracepoint...")
ctx = USDT(pid=node.process.pid)
ctx.enable_probe(probe="mempool:rejected", fn_name="trace_rejected")
bpf = BPF(text=MEMPOOL_TRACEPOINTS_PROGRAM, usdt_contexts=[ctx], debug=0, cflags=["-Wno-error=implicit-function-declaration"])
def handle_rejected_event(_, data, __):
events.append(bpf["rejected_events"].event(data))
bpf["rejected_events"].open_perf_buffer(handle_rejected_event)
self.log.info("Sending invalid transaction...")
tx = self.wallet.create_self_transfer(fee_rate=Decimal(0))
node.p2ps[0].send_txs_and_test([tx["tx"]], node, success=False)
self.log.info("Polling buffer...")
bpf.perf_buffer_poll(timeout=200)
self.log.info("Ensuring mempool:rejected event was handled successfully...")
assert_equal(1, len(events))
event = events[0]
assert_equal(bytes(event.hash)[::-1].hex(), tx["tx"].hash)
# The next test is already known to fail, so disable it to avoid
# wasting CPU time and developer time. See
# https://github.com/bitcoin/bitcoin/issues/27380
#assert_equal(event.reason.decode("UTF-8"), "min relay fee not met")
bpf.cleanup()
self.generate(self.wallet, 1)
def run_test(self):
"""Tests the mempool:added, mempool:removed, mempool:replaced,
and mempool:rejected tracepoints."""
# Create some coinbase transactions and mature them so they can be spent
node = self.nodes[0]
self.wallet = MiniWallet(node)
self.generate(self.wallet, 4)
self.generate(node, COINBASE_MATURITY)
# Test individual tracepoints
self.added_test()
self.removed_test()
self.replaced_test()
self.rejected_test()
if __name__ == "__main__":
MempoolTracepointTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_send.py | #!/usr/bin/env python3
# Copyright (c) 2020-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the send RPC command."""
from decimal import Decimal, getcontext
from itertools import product
from test_framework.authproxy import JSONRPCException
from test_framework.descriptors import descsum_create
from test_framework.messages import (
ser_compact_size,
WITNESS_SCALE_FACTOR,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_fee_amount,
assert_greater_than,
assert_raises_rpc_error,
count_bytes,
)
from test_framework.wallet_util import generate_keypair
class WalletSendTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 2
# whitelist all peers to speed up tx relay / mempool sync
self.extra_args = [
["-whitelist=127.0.0.1","-walletrbf=1"],
["-whitelist=127.0.0.1","-walletrbf=1"],
]
getcontext().prec = 8 # Satoshi precision for Decimal
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def test_send(self, from_wallet, to_wallet=None, amount=None, data=None,
arg_conf_target=None, arg_estimate_mode=None, arg_fee_rate=None,
conf_target=None, estimate_mode=None, fee_rate=None, add_to_wallet=None, psbt=None,
inputs=None, add_inputs=None, include_unsafe=None, change_address=None, change_position=None, change_type=None,
include_watching=None, locktime=None, lock_unspents=None, replaceable=None, subtract_fee_from_outputs=None,
expect_error=None, solving_data=None, minconf=None):
assert (amount is None) != (data is None)
from_balance_before = from_wallet.getbalances()["mine"]["trusted"]
if include_unsafe:
from_balance_before += from_wallet.getbalances()["mine"]["untrusted_pending"]
if to_wallet is None:
assert amount is None
else:
to_untrusted_pending_before = to_wallet.getbalances()["mine"]["untrusted_pending"]
if amount:
dest = to_wallet.getnewaddress()
outputs = {dest: amount}
else:
outputs = {"data": data}
# Construct options dictionary
options = {}
if add_to_wallet is not None:
options["add_to_wallet"] = add_to_wallet
else:
if psbt:
add_to_wallet = False
else:
add_to_wallet = from_wallet.getwalletinfo()["private_keys_enabled"] # Default value
if psbt is not None:
options["psbt"] = psbt
if conf_target is not None:
options["conf_target"] = conf_target
if estimate_mode is not None:
options["estimate_mode"] = estimate_mode
if fee_rate is not None:
options["fee_rate"] = fee_rate
if inputs is not None:
options["inputs"] = inputs
if add_inputs is not None:
options["add_inputs"] = add_inputs
if include_unsafe is not None:
options["include_unsafe"] = include_unsafe
if change_address is not None:
options["change_address"] = change_address
if change_position is not None:
options["change_position"] = change_position
if change_type is not None:
options["change_type"] = change_type
if include_watching is not None:
options["include_watching"] = include_watching
if locktime is not None:
options["locktime"] = locktime
if lock_unspents is not None:
options["lock_unspents"] = lock_unspents
if replaceable is None:
replaceable = True # default
else:
options["replaceable"] = replaceable
if subtract_fee_from_outputs is not None:
options["subtract_fee_from_outputs"] = subtract_fee_from_outputs
if solving_data is not None:
options["solving_data"] = solving_data
if minconf is not None:
options["minconf"] = minconf
if len(options.keys()) == 0:
options = None
if expect_error is None:
res = from_wallet.send(outputs=outputs, conf_target=arg_conf_target, estimate_mode=arg_estimate_mode, fee_rate=arg_fee_rate, options=options)
else:
try:
assert_raises_rpc_error(expect_error[0], expect_error[1], from_wallet.send,
outputs=outputs, conf_target=arg_conf_target, estimate_mode=arg_estimate_mode, fee_rate=arg_fee_rate, options=options)
except AssertionError:
# Provide debug info if the test fails
self.log.error("Unexpected successful result:")
self.log.error(arg_conf_target)
self.log.error(arg_estimate_mode)
self.log.error(arg_fee_rate)
self.log.error(options)
res = from_wallet.send(outputs=outputs, conf_target=arg_conf_target, estimate_mode=arg_estimate_mode, fee_rate=arg_fee_rate, options=options)
self.log.error(res)
if "txid" in res and add_to_wallet:
self.log.error("Transaction details:")
try:
tx = from_wallet.gettransaction(res["txid"])
self.log.error(tx)
self.log.error("testmempoolaccept (transaction may already be in mempool):")
self.log.error(from_wallet.testmempoolaccept([tx["hex"]]))
except JSONRPCException as exc:
self.log.error(exc)
raise
return
if locktime:
return res
if from_wallet.getwalletinfo()["private_keys_enabled"] and not include_watching:
assert_equal(res["complete"], True)
assert "txid" in res
else:
assert_equal(res["complete"], False)
assert not "txid" in res
assert "psbt" in res
from_balance = from_wallet.getbalances()["mine"]["trusted"]
if include_unsafe:
from_balance += from_wallet.getbalances()["mine"]["untrusted_pending"]
if add_to_wallet and not include_watching:
# Ensure transaction exists in the wallet:
tx = from_wallet.gettransaction(res["txid"])
assert tx
assert_equal(tx["bip125-replaceable"], "yes" if replaceable else "no")
# Ensure transaction exists in the mempool:
tx = from_wallet.getrawtransaction(res["txid"], True)
assert tx
if amount:
if subtract_fee_from_outputs:
assert_equal(from_balance_before - from_balance, amount)
else:
assert_greater_than(from_balance_before - from_balance, amount)
else:
assert next((out for out in tx["vout"] if out["scriptPubKey"]["asm"] == "OP_RETURN 35"), None)
else:
assert_equal(from_balance_before, from_balance)
if to_wallet:
self.sync_mempools()
if add_to_wallet:
if not subtract_fee_from_outputs:
assert_equal(to_wallet.getbalances()["mine"]["untrusted_pending"], to_untrusted_pending_before + Decimal(amount if amount else 0))
else:
assert_equal(to_wallet.getbalances()["mine"]["untrusted_pending"], to_untrusted_pending_before)
return res
def run_test(self):
self.log.info("Setup wallets...")
# w0 is a wallet with coinbase rewards
w0 = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
# w1 is a regular wallet
self.nodes[1].createwallet(wallet_name="w1")
w1 = self.nodes[1].get_wallet_rpc("w1")
# w2 contains the private keys for w3
self.nodes[1].createwallet(wallet_name="w2", blank=True)
w2 = self.nodes[1].get_wallet_rpc("w2")
xpriv = "tprv8ZgxMBicQKsPfHCsTwkiM1KT56RXbGGTqvc2hgqzycpwbHqqpcajQeMRZoBD35kW4RtyCemu6j34Ku5DEspmgjKdt2qe4SvRch5Kk8B8A2v"
xpub = "tpubD6NzVbkrYhZ4YkEfMbRJkQyZe7wTkbTNRECozCtJPtdLRn6cT1QKb8yHjwAPcAr26eHBFYs5iLiFFnCbwPRsncCKUKCfubHDMGKzMVcN1Jg"
if self.options.descriptors:
w2.importdescriptors([{
"desc": descsum_create("wpkh(" + xpriv + "/0/0/*)"),
"timestamp": "now",
"range": [0, 100],
"active": True
},{
"desc": descsum_create("wpkh(" + xpriv + "/0/1/*)"),
"timestamp": "now",
"range": [0, 100],
"active": True,
"internal": True
}])
else:
w2.sethdseed(True)
# w3 is a watch-only wallet, based on w2
self.nodes[1].createwallet(wallet_name="w3", disable_private_keys=True)
w3 = self.nodes[1].get_wallet_rpc("w3")
if self.options.descriptors:
# Match the privkeys in w2 for descriptors
res = w3.importdescriptors([{
"desc": descsum_create("wpkh(" + xpub + "/0/0/*)"),
"timestamp": "now",
"range": [0, 100],
"keypool": True,
"active": True,
"watchonly": True
},{
"desc": descsum_create("wpkh(" + xpub + "/0/1/*)"),
"timestamp": "now",
"range": [0, 100],
"keypool": True,
"active": True,
"internal": True,
"watchonly": True
}])
assert_equal(res, [{"success": True}, {"success": True}])
for _ in range(3):
a2_receive = w2.getnewaddress()
if not self.options.descriptors:
# Because legacy wallets use exclusively hardened derivation, we can't do a ranged import like we do for descriptors
a2_change = w2.getrawchangeaddress() # doesn't actually use change derivation
res = w3.importmulti([{
"desc": w2.getaddressinfo(a2_receive)["desc"],
"timestamp": "now",
"keypool": True,
"watchonly": True
},{
"desc": w2.getaddressinfo(a2_change)["desc"],
"timestamp": "now",
"keypool": True,
"internal": True,
"watchonly": True
}])
assert_equal(res, [{"success": True}, {"success": True}])
w0.sendtoaddress(a2_receive, 10) # fund w3
self.generate(self.nodes[0], 1)
if not self.options.descriptors:
# w4 has private keys enabled, but only contains watch-only keys (from w2)
# This is legacy wallet behavior only as descriptor wallets don't allow watchonly and non-watchonly things in the same wallet.
self.nodes[1].createwallet(wallet_name="w4", disable_private_keys=False)
w4 = self.nodes[1].get_wallet_rpc("w4")
for _ in range(3):
a2_receive = w2.getnewaddress()
res = w4.importmulti([{
"desc": w2.getaddressinfo(a2_receive)["desc"],
"timestamp": "now",
"keypool": False,
"watchonly": True
}])
assert_equal(res, [{"success": True}])
w0.sendtoaddress(a2_receive, 10) # fund w4
self.generate(self.nodes[0], 1)
self.log.info("Send to address...")
self.test_send(from_wallet=w0, to_wallet=w1, amount=1)
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True)
self.log.info("Don't broadcast...")
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False)
assert res["hex"]
self.log.info("Return PSBT...")
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, psbt=True)
assert res["psbt"]
self.log.info("Create transaction that spends to address, but don't broadcast...")
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False)
# conf_target & estimate_mode can be set as argument or option
res1 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=1, arg_estimate_mode="economical", add_to_wallet=False)
res2 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=1, estimate_mode="economical", add_to_wallet=False)
assert_equal(self.nodes[1].decodepsbt(res1["psbt"])["fee"],
self.nodes[1].decodepsbt(res2["psbt"])["fee"])
# but not at the same time
for mode in ["unset", "economical", "conservative"]:
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=1, arg_estimate_mode="economical",
conf_target=1, estimate_mode=mode, add_to_wallet=False,
expect_error=(-8, "Pass conf_target and estimate_mode either as arguments or in the options object, but not both"))
self.log.info("Create PSBT from watch-only wallet w3, sign with w2...")
res = self.test_send(from_wallet=w3, to_wallet=w1, amount=1)
res = w2.walletprocesspsbt(res["psbt"])
assert res["complete"]
if not self.options.descriptors:
# Descriptor wallets do not allow mixed watch-only and non-watch-only things in the same wallet.
# This is specifically testing that w4 ignores its own private keys and creates a psbt with send
# which is not something that needs to be tested in descriptor wallets.
self.log.info("Create PSBT from wallet w4 with watch-only keys, sign with w2...")
self.test_send(from_wallet=w4, to_wallet=w1, amount=1, expect_error=(-4, "Insufficient funds"))
res = self.test_send(from_wallet=w4, to_wallet=w1, amount=1, include_watching=True, add_to_wallet=False)
res = w2.walletprocesspsbt(res["psbt"])
assert res["complete"]
self.log.info("Create OP_RETURN...")
self.test_send(from_wallet=w0, to_wallet=w1, amount=1)
self.test_send(from_wallet=w0, data="Hello World", expect_error=(-8, "Data must be hexadecimal string (not 'Hello World')"))
self.test_send(from_wallet=w0, data="23")
res = self.test_send(from_wallet=w3, data="23")
res = w2.walletprocesspsbt(res["psbt"])
assert res["complete"]
self.log.info("Test setting explicit fee rate")
res1 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate="1", add_to_wallet=False)
res2 = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate="1", add_to_wallet=False)
assert_equal(self.nodes[1].decodepsbt(res1["psbt"])["fee"], self.nodes[1].decodepsbt(res2["psbt"])["fee"])
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=7, add_to_wallet=False)
fee = self.nodes[1].decodepsbt(res["psbt"])["fee"]
assert_fee_amount(fee, count_bytes(res["hex"]), Decimal("0.00007"))
# "unset" and None are treated the same for estimate_mode
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=2, estimate_mode="unset", add_to_wallet=False)
fee = self.nodes[1].decodepsbt(res["psbt"])["fee"]
assert_fee_amount(fee, count_bytes(res["hex"]), Decimal("0.00002"))
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=4.531, add_to_wallet=False)
fee = self.nodes[1].decodepsbt(res["psbt"])["fee"]
assert_fee_amount(fee, count_bytes(res["hex"]), Decimal("0.00004531"))
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=3, add_to_wallet=False)
fee = self.nodes[1].decodepsbt(res["psbt"])["fee"]
assert_fee_amount(fee, count_bytes(res["hex"]), Decimal("0.00003"))
# Test that passing fee_rate as both an argument and an option raises.
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=1, fee_rate=1, add_to_wallet=False,
expect_error=(-8, "Pass the fee_rate either as an argument, or in the options object, but not both"))
assert_raises_rpc_error(-8, "Use fee_rate (sat/vB) instead of feeRate", w0.send, {w1.getnewaddress(): 1}, 6, "conservative", 1, {"feeRate": 0.01})
assert_raises_rpc_error(-3, "Unexpected key totalFee", w0.send, {w1.getnewaddress(): 1}, 6, "conservative", 1, {"totalFee": 0.01})
for target, mode in product([-1, 0, 1009], ["economical", "conservative"]):
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=target, estimate_mode=mode,
expect_error=(-8, "Invalid conf_target, must be between 1 and 1008")) # max value of 1008 per src/policy/fees.h
msg = 'Invalid estimate_mode parameter, must be one of: "unset", "economical", "conservative"'
for target, mode in product([-1, 0], ["btc/kb", "sat/b"]):
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=target, estimate_mode=mode, expect_error=(-8, msg))
for mode in ["", "foo", Decimal("3.141592")]:
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=0.1, estimate_mode=mode, expect_error=(-8, msg))
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_conf_target=0.1, arg_estimate_mode=mode, expect_error=(-8, msg))
assert_raises_rpc_error(-8, msg, w0.send, {w1.getnewaddress(): 1}, 0.1, mode)
for mode in ["economical", "conservative"]:
for k, v in {"string": "true", "bool": True, "object": {"foo": "bar"}}.items():
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=v, estimate_mode=mode,
expect_error=(-3, f"JSON value of type {k} for field conf_target is not of expected type number"))
# Test setting explicit fee rate just below the minimum of 1 sat/vB.
self.log.info("Explicit fee rate raises RPC error 'fee rate too low' if fee_rate of 0.99999999 is passed")
msg = "Fee rate (0.999 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)"
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=0.999, expect_error=(-4, msg))
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=0.999, expect_error=(-4, msg))
self.log.info("Explicit fee rate raises if invalid fee_rate is passed")
# Test fee_rate with zero values.
msg = "Fee rate (0.000 sat/vB) is lower than the minimum fee rate setting (1.000 sat/vB)"
for zero_value in [0, 0.000, 0.00000000, "0", "0.000", "0.00000000"]:
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=zero_value, expect_error=(-4, msg))
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=zero_value, expect_error=(-4, msg))
msg = "Invalid amount"
# Test fee_rate values that don't pass fixed-point parsing checks.
for invalid_value in ["", 0.000000001, 1e-09, 1.111111111, 1111111111111111, "31.999999999999999999999"]:
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg))
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg))
# Test fee_rate values that cannot be represented in sat/vB.
for invalid_value in [0.0001, 0.00000001, 0.00099999, 31.99999999]:
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg))
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg))
# Test fee_rate out of range (negative number).
msg = "Amount out of range"
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=-1, expect_error=(-3, msg))
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=-1, expect_error=(-3, msg))
# Test type error.
msg = "Amount is not a number or string"
for invalid_value in [True, {"foo": "bar"}]:
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, fee_rate=invalid_value, expect_error=(-3, msg))
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, arg_fee_rate=invalid_value, expect_error=(-3, msg))
# TODO: Return hex if fee rate is below -maxmempool
# res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, conf_target=0.1, estimate_mode="sat/b", add_to_wallet=False)
# assert res["hex"]
# hex = res["hex"]
# res = self.nodes[0].testmempoolaccept([hex])
# assert not res[0]["allowed"]
# assert_equal(res[0]["reject-reason"], "...") # low fee
# assert_fee_amount(fee, Decimal(len(res["hex"]) / 2), Decimal("0.000001"))
self.log.info("If inputs are specified, do not automatically add more...")
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[], add_to_wallet=False)
assert res["complete"]
utxo1 = w0.listunspent()[0]
assert_equal(utxo1["amount"], 50)
ERR_NOT_ENOUGH_PRESET_INPUTS = "The preselected coins total amount does not cover the transaction target. " \
"Please allow other inputs to be automatically selected or include more coins manually"
self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1],
expect_error=(-4, ERR_NOT_ENOUGH_PRESET_INPUTS))
self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1], add_inputs=False,
expect_error=(-4, ERR_NOT_ENOUGH_PRESET_INPUTS))
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=51, inputs=[utxo1], add_inputs=True, add_to_wallet=False)
assert res["complete"]
self.log.info("Manual change address and position...")
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, change_address="not an address",
expect_error=(-5, "Change address must be a valid bitcoin address"))
change_address = w0.getnewaddress()
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_address=change_address)
assert res["complete"]
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_address=change_address, change_position=0)
assert res["complete"]
assert_equal(self.nodes[0].decodepsbt(res["psbt"])["tx"]["vout"][0]["scriptPubKey"]["address"], change_address)
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=False, change_type="legacy", change_position=0)
assert res["complete"]
change_address = self.nodes[0].decodepsbt(res["psbt"])["tx"]["vout"][0]["scriptPubKey"]["address"]
assert change_address[0] == "m" or change_address[0] == "n"
self.log.info("Set lock time...")
height = self.nodes[0].getblockchaininfo()["blocks"]
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, locktime=height + 1)
assert res["complete"]
assert res["txid"]
txid = res["txid"]
# Although the wallet finishes the transaction, it can't be added to the mempool yet:
hex = self.nodes[0].gettransaction(res["txid"])["hex"]
res = self.nodes[0].testmempoolaccept([hex])
assert not res[0]["allowed"]
assert_equal(res[0]["reject-reason"], "non-final")
# It shouldn't be confirmed in the next block
self.generate(self.nodes[0], 1)
assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 0)
# The mempool should allow it now:
res = self.nodes[0].testmempoolaccept([hex])
assert res[0]["allowed"]
# Don't wait for wallet to add it to the mempool:
res = self.nodes[0].sendrawtransaction(hex)
self.generate(self.nodes[0], 1)
assert_equal(self.nodes[0].gettransaction(txid)["confirmations"], 1)
self.log.info("Lock unspents...")
utxo1 = w0.listunspent()[0]
assert_greater_than(utxo1["amount"], 1)
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, inputs=[utxo1], add_to_wallet=False, lock_unspents=True)
assert res["complete"]
locked_coins = w0.listlockunspent()
assert_equal(len(locked_coins), 1)
# Locked coins are automatically unlocked when manually selected
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, inputs=[utxo1], add_to_wallet=False)
assert res["complete"]
self.log.info("Replaceable...")
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True, replaceable=True)
assert res["complete"]
assert_equal(self.nodes[0].gettransaction(res["txid"])["bip125-replaceable"], "yes")
res = self.test_send(from_wallet=w0, to_wallet=w1, amount=1, add_to_wallet=True, replaceable=False)
assert res["complete"]
assert_equal(self.nodes[0].gettransaction(res["txid"])["bip125-replaceable"], "no")
self.log.info("Subtract fee from output")
self.test_send(from_wallet=w0, to_wallet=w1, amount=1, subtract_fee_from_outputs=[0])
self.log.info("Include unsafe inputs")
self.nodes[1].createwallet(wallet_name="w5")
w5 = self.nodes[1].get_wallet_rpc("w5")
self.test_send(from_wallet=w0, to_wallet=w5, amount=2)
self.test_send(from_wallet=w5, to_wallet=w0, amount=1, expect_error=(-4, "Insufficient funds"))
res = self.test_send(from_wallet=w5, to_wallet=w0, amount=1, include_unsafe=True)
assert res["complete"]
self.log.info("Minconf")
self.nodes[1].createwallet(wallet_name="minconfw")
minconfw= self.nodes[1].get_wallet_rpc("minconfw")
self.test_send(from_wallet=w0, to_wallet=minconfw, amount=2)
self.generate(self.nodes[0], 3)
self.test_send(from_wallet=minconfw, to_wallet=w0, amount=1, minconf=4, expect_error=(-4, "Insufficient funds"))
self.test_send(from_wallet=minconfw, to_wallet=w0, amount=1, minconf=-4, expect_error=(-8, "Negative minconf"))
res = self.test_send(from_wallet=minconfw, to_wallet=w0, amount=1, minconf=3)
assert res["complete"]
self.log.info("External outputs")
privkey, _ = generate_keypair(wif=True)
self.nodes[1].createwallet("extsend")
ext_wallet = self.nodes[1].get_wallet_rpc("extsend")
self.nodes[1].createwallet("extfund")
ext_fund = self.nodes[1].get_wallet_rpc("extfund")
# Make a weird but signable script. sh(wsh(pkh())) descriptor accomplishes this
desc = descsum_create("sh(wsh(pkh({})))".format(privkey))
if self.options.descriptors:
res = ext_fund.importdescriptors([{"desc": desc, "timestamp": "now"}])
else:
res = ext_fund.importmulti([{"desc": desc, "timestamp": "now"}])
assert res[0]["success"]
addr = self.nodes[0].deriveaddresses(desc)[0]
addr_info = ext_fund.getaddressinfo(addr)
self.nodes[0].sendtoaddress(addr, 10)
self.nodes[0].sendtoaddress(ext_wallet.getnewaddress(), 10)
self.generate(self.nodes[0], 6)
ext_utxo = ext_fund.listunspent(addresses=[addr])[0]
# An external input without solving data should result in an error
self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[ext_utxo], add_inputs=True, psbt=True, include_watching=True, expect_error=(-4, "Not solvable pre-selected input COutPoint(%s, %s)" % (ext_utxo["txid"][0:10], ext_utxo["vout"])))
# But funding should work when the solving data is provided
res = self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[ext_utxo], add_inputs=True, psbt=True, include_watching=True, solving_data={"pubkeys": [addr_info['pubkey']], "scripts": [addr_info["embedded"]["scriptPubKey"], addr_info["embedded"]["embedded"]["scriptPubKey"]]})
signed = ext_wallet.walletprocesspsbt(res["psbt"])
signed = ext_fund.walletprocesspsbt(res["psbt"])
assert signed["complete"]
res = self.test_send(from_wallet=ext_wallet, to_wallet=self.nodes[0], amount=15, inputs=[ext_utxo], add_inputs=True, psbt=True, include_watching=True, solving_data={"descriptors": [desc]})
signed = ext_wallet.walletprocesspsbt(res["psbt"])
signed = ext_fund.walletprocesspsbt(res["psbt"])
assert signed["complete"]
dec = self.nodes[0].decodepsbt(signed["psbt"])
for i, txin in enumerate(dec["tx"]["vin"]):
if txin["txid"] == ext_utxo["txid"] and txin["vout"] == ext_utxo["vout"]:
input_idx = i
break
psbt_in = dec["inputs"][input_idx]
# Calculate the input weight
# (prevout + sequence + length of scriptSig + scriptsig + 1 byte buffer) * WITNESS_SCALE_FACTOR + num scriptWitness stack items + (length of stack item + stack item) * N stack items + 1 byte buffer
len_scriptsig = len(psbt_in["final_scriptSig"]["hex"]) // 2 if "final_scriptSig" in psbt_in else 0
len_scriptsig += len(ser_compact_size(len_scriptsig)) + 1
len_scriptwitness = (sum([(len(x) // 2) + len(ser_compact_size(len(x) // 2)) for x in psbt_in["final_scriptwitness"]]) + len(psbt_in["final_scriptwitness"]) + 1) if "final_scriptwitness" in psbt_in else 0
input_weight = ((40 + len_scriptsig) * WITNESS_SCALE_FACTOR) + len_scriptwitness
# Input weight error conditions
assert_raises_rpc_error(
-8,
"Input weights should be specified in inputs rather than in options.",
ext_wallet.send,
outputs={self.nodes[0].getnewaddress(): 15},
options={"inputs": [ext_utxo], "input_weights": [{"txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": 1000}]}
)
# Funding should also work when input weights are provided
res = self.test_send(
from_wallet=ext_wallet,
to_wallet=self.nodes[0],
amount=15,
inputs=[{"txid": ext_utxo["txid"], "vout": ext_utxo["vout"], "weight": input_weight}],
add_inputs=True,
psbt=True,
include_watching=True,
fee_rate=10
)
signed = ext_wallet.walletprocesspsbt(res["psbt"])
signed = ext_fund.walletprocesspsbt(res["psbt"])
assert signed["complete"]
testres = self.nodes[0].testmempoolaccept([signed["hex"]])[0]
assert_equal(testres["allowed"], True)
assert_fee_amount(testres["fees"]["base"], testres["vsize"], Decimal(0.0001))
if __name__ == '__main__':
WalletSendTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/test_runner.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Run regression test suite.
This module calls down into individual test cases via subprocess. It will
forward all unrecognized arguments onto the individual test scripts.
For a description of arguments recognized by test scripts, see
`test/functional/test_framework/test_framework.py:BitcoinTestFramework.main`.
"""
import argparse
from collections import deque
import configparser
import datetime
import os
import time
import shutil
import signal
import subprocess
import sys
import tempfile
import re
import logging
import unittest
os.environ["REQUIRE_WALLET_TYPE_SET"] = "1"
# Formatting. Default colors to empty strings.
DEFAULT, BOLD, GREEN, RED = ("", ""), ("", ""), ("", ""), ("", "")
try:
# Make sure python thinks it can write unicode to its stdout
"\u2713".encode("utf_8").decode(sys.stdout.encoding)
TICK = "✓ "
CROSS = "✖ "
CIRCLE = "○ "
except UnicodeDecodeError:
TICK = "P "
CROSS = "x "
CIRCLE = "o "
if os.name != 'nt' or sys.getwindowsversion() >= (10, 0, 14393): #type:ignore
if os.name == 'nt':
import ctypes
kernel32 = ctypes.windll.kernel32 # type: ignore
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
# Enable ascii color control to stdout
stdout = kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
stdout_mode = ctypes.c_int32()
kernel32.GetConsoleMode(stdout, ctypes.byref(stdout_mode))
kernel32.SetConsoleMode(stdout, stdout_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
# Enable ascii color control to stderr
stderr = kernel32.GetStdHandle(STD_ERROR_HANDLE)
stderr_mode = ctypes.c_int32()
kernel32.GetConsoleMode(stderr, ctypes.byref(stderr_mode))
kernel32.SetConsoleMode(stderr, stderr_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
# primitive formatting on supported
# terminal via ANSI escape sequences:
DEFAULT = ('\033[0m', '\033[0m')
BOLD = ('\033[0m', '\033[1m')
GREEN = ('\033[0m', '\033[0;32m')
RED = ('\033[0m', '\033[0;31m')
TEST_EXIT_PASSED = 0
TEST_EXIT_SKIPPED = 77
# List of framework modules containing unit tests. Should be kept in sync with
# the output of `git grep unittest.TestCase ./test/functional/test_framework`
TEST_FRAMEWORK_MODULES = [
"address",
"crypto.bip324_cipher",
"blocktools",
"crypto.chacha20",
"crypto.ellswift",
"key",
"messages",
"crypto.muhash",
"crypto.poly1305",
"crypto.ripemd160",
"script",
"segwit_addr",
]
EXTENDED_SCRIPTS = [
# These tests are not run by default.
# Longest test should go first, to favor running tests in parallel
'feature_pruning.py',
'feature_dbcrash.py',
'feature_index_prune.py',
'wallet_pruning.py --legacy-wallet',
]
BASE_SCRIPTS = [
# Scripts that are run by default.
# Longest test should go first, to favor running tests in parallel
# vv Tests less than 5m vv
'feature_fee_estimation.py',
'feature_taproot.py',
'feature_block.py',
# vv Tests less than 2m vv
'mining_getblocktemplate_longpoll.py',
'p2p_segwit.py',
'feature_maxuploadtarget.py',
'mempool_updatefromblock.py',
'mempool_persist.py --descriptors',
# vv Tests less than 60s vv
'rpc_psbt.py --legacy-wallet',
'rpc_psbt.py --descriptors',
'wallet_fundrawtransaction.py --legacy-wallet',
'wallet_fundrawtransaction.py --descriptors',
'wallet_bumpfee.py --legacy-wallet',
'wallet_bumpfee.py --descriptors',
'wallet_import_rescan.py --legacy-wallet',
'wallet_backup.py --legacy-wallet',
'wallet_backup.py --descriptors',
'feature_segwit.py --legacy-wallet',
'feature_segwit.py --descriptors',
'feature_segwit.py --descriptors --v2transport',
'p2p_tx_download.py',
'wallet_avoidreuse.py --legacy-wallet',
'wallet_avoidreuse.py --descriptors',
'feature_abortnode.py',
'wallet_address_types.py --legacy-wallet',
'wallet_address_types.py --descriptors',
'p2p_orphan_handling.py',
'wallet_basic.py --legacy-wallet',
'wallet_basic.py --descriptors',
'feature_maxtipage.py',
'wallet_multiwallet.py --legacy-wallet',
'wallet_multiwallet.py --descriptors',
'wallet_multiwallet.py --usecli',
'p2p_dns_seeds.py',
'wallet_groups.py --legacy-wallet',
'wallet_groups.py --descriptors',
'p2p_blockfilters.py',
'feature_assumevalid.py',
'wallet_taproot.py --descriptors',
'feature_bip68_sequence.py',
'rpc_packages.py',
'rpc_bind.py --ipv4',
'rpc_bind.py --ipv6',
'rpc_bind.py --nonloopback',
'p2p_headers_sync_with_minchainwork.py',
'p2p_feefilter.py',
'feature_csv_activation.py',
'p2p_sendheaders.py',
'wallet_listtransactions.py --legacy-wallet',
'wallet_listtransactions.py --descriptors',
'wallet_miniscript.py --descriptors',
# vv Tests less than 30s vv
'p2p_invalid_messages.py',
'rpc_createmultisig.py',
'p2p_timeouts.py',
'p2p_timeouts.py --v2transport',
'wallet_dump.py --legacy-wallet',
'rpc_signer.py',
'wallet_signer.py --descriptors',
'wallet_importmulti.py --legacy-wallet',
'mempool_limit.py',
'rpc_txoutproof.py',
'wallet_listreceivedby.py --legacy-wallet',
'wallet_listreceivedby.py --descriptors',
'wallet_abandonconflict.py --legacy-wallet',
'wallet_abandonconflict.py --descriptors',
'feature_reindex.py',
'feature_reindex_readonly.py',
'wallet_labels.py --legacy-wallet',
'wallet_labels.py --descriptors',
'p2p_compactblocks.py',
'p2p_compactblocks_blocksonly.py',
'wallet_hd.py --legacy-wallet',
'wallet_hd.py --descriptors',
'wallet_blank.py --legacy-wallet',
'wallet_blank.py --descriptors',
'wallet_keypool_topup.py --legacy-wallet',
'wallet_keypool_topup.py --descriptors',
'wallet_fast_rescan.py --descriptors',
'interface_zmq.py',
'rpc_invalid_address_message.py',
'rpc_validateaddress.py',
'interface_bitcoin_cli.py --legacy-wallet',
'interface_bitcoin_cli.py --descriptors',
'feature_bind_extra.py',
'mempool_resurrect.py',
'wallet_txn_doublespend.py --mineblock',
'tool_wallet.py --legacy-wallet',
'tool_wallet.py --descriptors',
'tool_signet_miner.py --legacy-wallet',
'tool_signet_miner.py --descriptors',
'wallet_txn_clone.py',
'wallet_txn_clone.py --segwit',
'rpc_getchaintips.py',
'rpc_misc.py',
'interface_rest.py',
'mempool_spend_coinbase.py',
'wallet_avoid_mixing_output_types.py --descriptors',
'mempool_reorg.py',
'p2p_block_sync.py',
'p2p_block_sync.py --v2transport',
'wallet_createwallet.py --legacy-wallet',
'wallet_createwallet.py --usecli',
'wallet_createwallet.py --descriptors',
'wallet_watchonly.py --legacy-wallet',
'wallet_watchonly.py --usecli --legacy-wallet',
'wallet_reindex.py --legacy-wallet',
'wallet_reindex.py --descriptors',
'wallet_reorgsrestore.py',
'wallet_conflicts.py --legacy-wallet',
'wallet_conflicts.py --descriptors',
'interface_http.py',
'interface_rpc.py',
'interface_usdt_coinselection.py',
'interface_usdt_mempool.py',
'interface_usdt_net.py',
'interface_usdt_utxocache.py',
'interface_usdt_validation.py',
'rpc_users.py',
'rpc_whitelist.py',
'feature_proxy.py',
'wallet_signrawtransactionwithwallet.py --legacy-wallet',
'wallet_signrawtransactionwithwallet.py --descriptors',
'rpc_signrawtransactionwithkey.py',
'rpc_rawtransaction.py --legacy-wallet',
'wallet_transactiontime_rescan.py --descriptors',
'wallet_transactiontime_rescan.py --legacy-wallet',
'p2p_addrv2_relay.py',
'p2p_compactblocks_hb.py',
'p2p_compactblocks_hb.py --v2transport',
'p2p_disconnect_ban.py',
'p2p_disconnect_ban.py --v2transport',
'feature_posix_fs_permissions.py',
'rpc_decodescript.py',
'rpc_blockchain.py',
'rpc_blockchain.py --v2transport',
'rpc_deprecated.py',
'wallet_disable.py',
'wallet_change_address.py --legacy-wallet',
'wallet_change_address.py --descriptors',
'p2p_addr_relay.py',
'p2p_getaddr_caching.py',
'p2p_getdata.py',
'p2p_addrfetch.py',
'rpc_net.py',
'rpc_net.py --v2transport',
'wallet_keypool.py --legacy-wallet',
'wallet_keypool.py --descriptors',
'wallet_descriptor.py --descriptors',
'p2p_nobloomfilter_messages.py',
'p2p_filter.py',
'rpc_setban.py',
'p2p_blocksonly.py',
'mining_prioritisetransaction.py',
'p2p_invalid_locator.py',
'p2p_invalid_block.py',
'p2p_invalid_block.py --v2transport',
'p2p_invalid_tx.py',
'p2p_invalid_tx.py --v2transport',
'p2p_v2_transport.py',
'example_test.py',
'wallet_txn_doublespend.py --legacy-wallet',
'wallet_multisig_descriptor_psbt.py --descriptors',
'wallet_txn_doublespend.py --descriptors',
'wallet_backwards_compatibility.py --legacy-wallet',
'wallet_backwards_compatibility.py --descriptors',
'wallet_txn_clone.py --mineblock',
'feature_notifications.py',
'rpc_getblockfilter.py',
'rpc_getblockfrompeer.py',
'rpc_invalidateblock.py',
'feature_utxo_set_hash.py',
'feature_rbf.py',
'mempool_packages.py',
'mempool_package_onemore.py',
'mempool_package_limits.py',
'feature_versionbits_warning.py',
'rpc_preciousblock.py',
'wallet_importprunedfunds.py --legacy-wallet',
'wallet_importprunedfunds.py --descriptors',
'p2p_leak_tx.py',
'p2p_leak_tx.py --v2transport',
'p2p_eviction.py',
'p2p_ibd_stalling.py',
'p2p_ibd_stalling.py --v2transport',
'p2p_net_deadlock.py',
'p2p_net_deadlock.py --v2transport',
'wallet_signmessagewithaddress.py',
'rpc_signmessagewithprivkey.py',
'rpc_generate.py',
'wallet_balance.py --legacy-wallet',
'wallet_balance.py --descriptors',
'p2p_initial_headers_sync.py',
'feature_nulldummy.py',
'mempool_accept.py',
'mempool_expiry.py',
'wallet_import_with_label.py --legacy-wallet',
'wallet_importdescriptors.py --descriptors',
'wallet_upgradewallet.py --legacy-wallet',
'wallet_crosschain.py',
'mining_basic.py',
'feature_signet.py',
'wallet_implicitsegwit.py --legacy-wallet',
'rpc_named_arguments.py',
'feature_startupnotify.py',
'wallet_simulaterawtx.py --legacy-wallet',
'wallet_simulaterawtx.py --descriptors',
'wallet_listsinceblock.py --legacy-wallet',
'wallet_listsinceblock.py --descriptors',
'wallet_listdescriptors.py --descriptors',
'p2p_leak.py',
'wallet_encryption.py --legacy-wallet',
'wallet_encryption.py --descriptors',
'feature_dersig.py',
'feature_cltv.py',
'rpc_uptime.py',
'feature_discover.py',
'wallet_resendwallettransactions.py --legacy-wallet',
'wallet_resendwallettransactions.py --descriptors',
'wallet_fallbackfee.py --legacy-wallet',
'wallet_fallbackfee.py --descriptors',
'rpc_dumptxoutset.py',
'feature_minchainwork.py',
'rpc_estimatefee.py',
'rpc_getblockstats.py',
'feature_bind_port_externalip.py',
'wallet_create_tx.py --legacy-wallet',
'wallet_send.py --legacy-wallet',
'wallet_send.py --descriptors',
'wallet_sendall.py --legacy-wallet',
'wallet_sendall.py --descriptors',
'wallet_create_tx.py --descriptors',
'wallet_inactive_hdchains.py --legacy-wallet',
'wallet_spend_unconfirmed.py',
'p2p_fingerprint.py',
'feature_uacomment.py',
'feature_init.py',
'wallet_coinbase_category.py --legacy-wallet',
'wallet_coinbase_category.py --descriptors',
'feature_filelock.py',
'feature_loadblock.py',
'feature_assumeutxo.py',
'p2p_dos_header_tree.py',
'p2p_add_connections.py',
'feature_bind_port_discover.py',
'p2p_unrequested_blocks.py',
'p2p_message_capture.py',
'feature_includeconf.py',
'feature_addrman.py',
'feature_asmap.py',
'feature_fastprune.py',
'mempool_unbroadcast.py',
'mempool_compatibility.py',
'mempool_accept_wtxid.py',
'mempool_dust.py',
'mempool_sigoplimit.py',
'rpc_deriveaddresses.py',
'rpc_deriveaddresses.py --usecli',
'p2p_ping.py',
'p2p_tx_privacy.py',
'rpc_scanblocks.py',
'p2p_sendtxrcncl.py',
'rpc_scantxoutset.py',
'feature_unsupported_utxo_db.py',
'feature_logging.py',
'feature_anchors.py',
'mempool_datacarrier.py',
'feature_coinstatsindex.py',
'wallet_orphanedreward.py',
'wallet_timelock.py',
'p2p_node_network_limited.py',
'p2p_node_network_limited.py --v2transport',
'p2p_permissions.py',
'feature_blocksdir.py',
'wallet_startup.py',
'feature_remove_pruned_files_on_startup.py',
'p2p_i2p_ports.py',
'p2p_i2p_sessions.py',
'feature_config_args.py',
'feature_presegwit_node_upgrade.py',
'feature_settings.py',
'rpc_getdescriptorinfo.py',
'rpc_mempool_info.py',
'rpc_help.py',
'feature_dirsymlinks.py',
'feature_help.py',
'feature_shutdown.py',
'wallet_migration.py',
'p2p_ibd_txrelay.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]
# Place EXTENDED_SCRIPTS first since it has the 3 longest running tests
ALL_SCRIPTS = EXTENDED_SCRIPTS + BASE_SCRIPTS
NON_SCRIPTS = [
# These are python files that live in the functional tests directory, but are not test scripts.
"combine_logs.py",
"create_cache.py",
"test_runner.py",
]
def main():
# Parse arguments and pass through unrecognised args
parser = argparse.ArgumentParser(add_help=False,
usage='%(prog)s [test_runner.py options] [script options] [scripts]',
description=__doc__,
epilog='''
Help text and arguments for individual test script:''',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--ansi', action='store_true', default=sys.stdout.isatty(), help="Use ANSI colors and dots in output (enabled by default when standard output is a TTY)")
parser.add_argument('--combinedlogslen', '-c', type=int, default=0, metavar='n', help='On failure, print a log (of length n lines) to the console, combined from the test framework and all test nodes.')
parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface')
parser.add_argument('--ci', action='store_true', help='Run checks and code that are usually only enabled in a continuous integration environment')
parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.')
parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests')
parser.add_argument('--help', '-h', '-?', action='store_true', help='print help text and exit')
parser.add_argument('--jobs', '-j', type=int, default=4, help='how many test scripts to run in parallel. Default=4.')
parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.')
parser.add_argument('--quiet', '-q', action='store_true', help='only print dots, results summary and failure logs')
parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs")
parser.add_argument('--failfast', '-F', action='store_true', help='stop execution after the first test failure')
parser.add_argument('--filter', help='filter scripts to run by regular expression')
args, unknown_args = parser.parse_known_args()
if not args.ansi:
global DEFAULT, BOLD, GREEN, RED
DEFAULT = ("", "")
BOLD = ("", "")
GREEN = ("", "")
RED = ("", "")
# args to be passed on always start with two dashes; tests are the remaining unknown args
tests = [arg for arg in unknown_args if arg[:2] != "--"]
passon_args = [arg for arg in unknown_args if arg[:2] == "--"]
# Read config generated by configure.
config = configparser.ConfigParser()
configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini"
config.read_file(open(configfile, encoding="utf8"))
passon_args.append("--configfile=%s" % configfile)
# Set up logging
logging_level = logging.INFO if args.quiet else logging.DEBUG
logging.basicConfig(format='%(message)s', level=logging_level)
# Create base test directory
tmpdir = "%s/test_runner_₿_🏃_%s" % (args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))
os.makedirs(tmpdir)
logging.debug("Temporary test directory at %s" % tmpdir)
enable_bitcoind = config["components"].getboolean("ENABLE_BITCOIND")
if not enable_bitcoind:
print("No functional tests to run.")
print("Rerun ./configure with --with-daemon and then make")
sys.exit(0)
# Build list of tests
test_list = []
if tests:
# Individual tests have been specified. Run specified tests that exist
# in the ALL_SCRIPTS list. Accept names with or without a .py extension.
# Specified tests can contain wildcards, but in that case the supplied
# paths should be coherent, e.g. the same path as that provided to call
# test_runner.py. Examples:
# `test/functional/test_runner.py test/functional/wallet*`
# `test/functional/test_runner.py ./test/functional/wallet*`
# `test_runner.py wallet*`
# but not:
# `test/functional/test_runner.py wallet*`
# Multiple wildcards can be passed:
# `test_runner.py tool* mempool*`
for test in tests:
script = test.split("/")[-1]
script = script + ".py" if ".py" not in script else script
matching_scripts = [s for s in ALL_SCRIPTS if s.startswith(script)]
if matching_scripts:
test_list.extend(matching_scripts)
else:
print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], test))
elif args.extended:
# Include extended tests
test_list += ALL_SCRIPTS
else:
# Run base tests only
test_list += BASE_SCRIPTS
# Remove the test cases that the user has explicitly asked to exclude.
if args.exclude:
exclude_tests = [test.split('.py')[0] for test in args.exclude.split(',')]
for exclude_test in exclude_tests:
# Remove <test_name>.py and <test_name>.py --arg from the test list
exclude_list = [test for test in test_list if test.split('.py')[0] == exclude_test]
for exclude_item in exclude_list:
test_list.remove(exclude_item)
if not exclude_list:
print("{}WARNING!{} Test '{}' not found in current test list.".format(BOLD[1], BOLD[0], exclude_test))
if args.filter:
test_list = list(filter(re.compile(args.filter).search, test_list))
if not test_list:
print("No valid test scripts specified. Check that your test is in one "
"of the test lists in test_runner.py, or run test_runner.py with no arguments to run all tests")
sys.exit(0)
if args.help:
# Print help for test_runner.py, then print help of the first script (with args removed) and exit.
parser.print_help()
subprocess.check_call([sys.executable, os.path.join(config["environment"]["SRCDIR"], 'test', 'functional', test_list[0].split()[0]), '-h'])
sys.exit(0)
check_script_list(src_dir=config["environment"]["SRCDIR"], fail_on_warn=args.ci)
check_script_prefixes()
if not args.keepcache:
shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True)
run_tests(
test_list=test_list,
src_dir=config["environment"]["SRCDIR"],
build_dir=config["environment"]["BUILDDIR"],
tmpdir=tmpdir,
jobs=args.jobs,
enable_coverage=args.coverage,
args=passon_args,
combined_logs_len=args.combinedlogslen,
failfast=args.failfast,
use_term_control=args.ansi,
)
def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=None, combined_logs_len=0, failfast=False, use_term_control):
args = args or []
# Warn if bitcoind is already running
try:
# pgrep exits with code zero when one or more matching processes found
if subprocess.run(["pgrep", "-x", "bitcoind"], stdout=subprocess.DEVNULL).returncode == 0:
print("%sWARNING!%s There is already a bitcoind process running on this system. Tests may fail unexpectedly due to resource contention!" % (BOLD[1], BOLD[0]))
except OSError:
# pgrep not supported
pass
# Warn if there is a cache directory
cache_dir = "%s/test/cache" % build_dir
if os.path.isdir(cache_dir):
print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir))
# Test Framework Tests
print("Running Unit Tests for Test Framework Modules")
tests_dir = src_dir + '/test/functional/'
# This allows `test_runner.py` to work from an out-of-source build directory using a symlink,
# a hard link or a copy on any platform. See https://github.com/bitcoin/bitcoin/pull/27561.
sys.path.append(tests_dir)
test_framework_tests = unittest.TestSuite()
for module in TEST_FRAMEWORK_MODULES:
test_framework_tests.addTest(unittest.TestLoader().loadTestsFromName("test_framework.{}".format(module)))
result = unittest.TextTestRunner(verbosity=1, failfast=True).run(test_framework_tests)
if not result.wasSuccessful():
sys.exit("Early exiting after failure in TestFramework unit tests")
flags = ['--cachedir={}'.format(cache_dir)] + args
if enable_coverage:
coverage = RPCCoverage()
flags.append(coverage.flag)
logging.debug("Initializing coverage directory at %s" % coverage.dir)
else:
coverage = None
if len(test_list) > 1 and jobs > 1:
# Populate cache
try:
subprocess.check_output([sys.executable, tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir])
except subprocess.CalledProcessError as e:
sys.stdout.buffer.write(e.output)
raise
#Run Tests
job_queue = TestHandler(
num_tests_parallel=jobs,
tests_dir=tests_dir,
tmpdir=tmpdir,
test_list=test_list,
flags=flags,
use_term_control=use_term_control,
)
start_time = time.time()
test_results = []
max_len_name = len(max(test_list, key=len))
test_count = len(test_list)
all_passed = True
i = 0
while i < test_count:
if failfast and not all_passed:
break
for test_result, testdir, stdout, stderr, skip_reason in job_queue.get_next():
test_results.append(test_result)
i += 1
done_str = "{}/{} - {}{}{}".format(i, test_count, BOLD[1], test_result.name, BOLD[0])
if test_result.status == "Passed":
logging.debug("%s passed, Duration: %s s" % (done_str, test_result.time))
elif test_result.status == "Skipped":
logging.debug(f"{done_str} skipped ({skip_reason})")
else:
all_passed = False
print("%s failed, Duration: %s s\n" % (done_str, test_result.time))
print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n')
print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n')
if combined_logs_len and os.path.isdir(testdir):
# Print the final `combinedlogslen` lines of the combined logs
print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], combined_logs_len, BOLD[0]))
print('\n============')
print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0]))
print('============\n')
combined_logs_args = [sys.executable, os.path.join(tests_dir, 'combine_logs.py'), testdir]
if BOLD[0]:
combined_logs_args += ['--color']
combined_logs, _ = subprocess.Popen(combined_logs_args, text=True, stdout=subprocess.PIPE).communicate()
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
if failfast:
logging.debug("Early exiting after test failure")
break
print_results(test_results, max_len_name, (int(time.time() - start_time)))
if coverage:
coverage_passed = coverage.report_rpc_coverage()
logging.debug("Cleaning up coverage data")
coverage.cleanup()
else:
coverage_passed = True
# Clear up the temp directory if all subdirectories are gone
if not os.listdir(tmpdir):
os.rmdir(tmpdir)
all_passed = all_passed and coverage_passed
# Clean up dangling processes if any. This may only happen with --failfast option.
# Killing the process group will also terminate the current process but that is
# not an issue
if not os.getenv("CI_FAILFAST_TEST_LEAVE_DANGLING") and len(job_queue.jobs):
os.killpg(os.getpgid(0), signal.SIGKILL)
sys.exit(not all_passed)
def print_results(test_results, max_len_name, runtime):
results = "\n" + BOLD[1] + "%s | %s | %s\n\n" % ("TEST".ljust(max_len_name), "STATUS ", "DURATION") + BOLD[0]
test_results.sort(key=TestResult.sort_key)
all_passed = True
time_sum = 0
for test_result in test_results:
all_passed = all_passed and test_result.was_successful
time_sum += test_result.time
test_result.padding = max_len_name
results += str(test_result)
status = TICK + "Passed" if all_passed else CROSS + "Failed"
if not all_passed:
results += RED[1]
results += BOLD[1] + "\n%s | %s | %s s (accumulated) \n" % ("ALL".ljust(max_len_name), status.ljust(9), time_sum) + BOLD[0]
if not all_passed:
results += RED[0]
results += "Runtime: %s s\n" % (runtime)
print(results)
class TestHandler:
"""
Trigger the test scripts passed in via the list.
"""
def __init__(self, *, num_tests_parallel, tests_dir, tmpdir, test_list, flags, use_term_control):
assert num_tests_parallel >= 1
self.num_jobs = num_tests_parallel
self.tests_dir = tests_dir
self.tmpdir = tmpdir
self.test_list = test_list
self.flags = flags
self.num_running = 0
self.jobs = []
self.use_term_control = use_term_control
def get_next(self):
while self.num_running < self.num_jobs and self.test_list:
# Add tests
self.num_running += 1
test = self.test_list.pop(0)
portseed = len(self.test_list)
portseed_arg = ["--portseed={}".format(portseed)]
log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16)
log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16)
test_argv = test.split()
testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)
tmpdir_arg = ["--tmpdir={}".format(testdir)]
self.jobs.append((test,
time.time(),
subprocess.Popen([sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg,
text=True,
stdout=log_stdout,
stderr=log_stderr),
testdir,
log_stdout,
log_stderr))
if not self.jobs:
raise IndexError('pop from empty list')
# Print remaining running jobs when all jobs have been started.
if not self.test_list:
print("Remaining jobs: [{}]".format(", ".join(j[0] for j in self.jobs)))
dot_count = 0
while True:
# Return all procs that have finished, if any. Otherwise sleep until there is one.
time.sleep(.5)
ret = []
for job in self.jobs:
(name, start_time, proc, testdir, log_out, log_err) = job
if proc.poll() is not None:
log_out.seek(0), log_err.seek(0)
[stdout, stderr] = [log_file.read().decode('utf-8') for log_file in (log_out, log_err)]
log_out.close(), log_err.close()
skip_reason = None
if proc.returncode == TEST_EXIT_PASSED and stderr == "":
status = "Passed"
elif proc.returncode == TEST_EXIT_SKIPPED:
status = "Skipped"
skip_reason = re.search(r"Test Skipped: (.*)", stdout).group(1)
else:
status = "Failed"
self.num_running -= 1
self.jobs.remove(job)
if self.use_term_control:
clearline = '\r' + (' ' * dot_count) + '\r'
print(clearline, end='', flush=True)
dot_count = 0
ret.append((TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr, skip_reason))
if ret:
return ret
if self.use_term_control:
print('.', end='', flush=True)
dot_count += 1
class TestResult():
def __init__(self, name, status, time):
self.name = name
self.status = status
self.time = time
self.padding = 0
def sort_key(self):
if self.status == "Passed":
return 0, self.name.lower()
elif self.status == "Failed":
return 2, self.name.lower()
elif self.status == "Skipped":
return 1, self.name.lower()
def __repr__(self):
if self.status == "Passed":
color = GREEN
glyph = TICK
elif self.status == "Failed":
color = RED
glyph = CROSS
elif self.status == "Skipped":
color = DEFAULT
glyph = CIRCLE
return color[1] + "%s | %s%s | %s s\n" % (self.name.ljust(self.padding), glyph, self.status.ljust(7), self.time) + color[0]
@property
def was_successful(self):
return self.status != "Failed"
def check_script_prefixes():
"""Check that test scripts start with one of the allowed name prefixes."""
good_prefixes_re = re.compile("^(example|feature|interface|mempool|mining|p2p|rpc|wallet|tool)_")
bad_script_names = [script for script in ALL_SCRIPTS if good_prefixes_re.match(script) is None]
if bad_script_names:
print("%sERROR:%s %d tests not meeting naming conventions:" % (BOLD[1], BOLD[0], len(bad_script_names)))
print(" %s" % ("\n ".join(sorted(bad_script_names))))
raise AssertionError("Some tests are not following naming convention!")
def check_script_list(*, src_dir, fail_on_warn):
"""Check scripts directory.
Check that all python files in this directory are categorized
as a test script or meta script."""
script_dir = src_dir + '/test/functional/'
python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")])
missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS)))
if len(missed_tests) != 0:
print("%sWARNING!%s The following scripts are not being run: %s. Check the test lists in test_runner.py." % (BOLD[1], BOLD[0], str(missed_tests)))
if fail_on_warn:
# On CI this warning is an error to prevent merging incomplete commits into master
sys.exit(1)
class RPCCoverage():
"""
Coverage reporting utilities for test_runner.
Coverage calculation works by having each test script subprocess write
coverage files into a particular directory. These files contain the RPC
commands invoked during testing, as well as a complete listing of RPC
commands per `bitcoin-cli help` (`rpc_interface.txt`).
After all tests complete, the commands run are combined and diff'd against
the complete list to calculate uncovered RPC commands.
See also: test/functional/test_framework/coverage.py
"""
def __init__(self):
self.dir = tempfile.mkdtemp(prefix="coverage")
self.flag = '--coveragedir=%s' % self.dir
def report_rpc_coverage(self):
"""
Print out RPC commands that were unexercised by tests.
"""
uncovered = self._get_uncovered_rpc_commands()
if uncovered:
print("Uncovered RPC commands:")
print("".join((" - %s\n" % command) for command in sorted(uncovered)))
return False
else:
print("All RPC commands covered.")
return True
def cleanup(self):
return shutil.rmtree(self.dir)
def _get_uncovered_rpc_commands(self):
"""
Return a set of currently untested RPC commands.
"""
# This is shared from `test/functional/test_framework/coverage.py`
reference_filename = 'rpc_interface.txt'
coverage_file_prefix = 'coverage.'
coverage_ref_filename = os.path.join(self.dir, reference_filename)
coverage_filenames = set()
all_cmds = set()
# Consider RPC generate covered, because it is overloaded in
# test_framework/test_node.py and not seen by the coverage check.
covered_cmds = set({'generate'})
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
with open(coverage_ref_filename, 'r', encoding="utf8") as coverage_ref_file:
all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
for root, _, files in os.walk(self.dir):
for filename in files:
if filename.startswith(coverage_file_prefix):
coverage_filenames.add(os.path.join(root, filename))
for filename in coverage_filenames:
with open(filename, 'r', encoding="utf8") as coverage_file:
covered_cmds.update([line.strip() for line in coverage_file.readlines()])
return all_cmds - covered_cmds
if __name__ == '__main__':
main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_pruning.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the pruning code.
WARNING:
This test uses 4GB of disk space.
This test takes 30 mins or more (up to 2 hours)
"""
import os
from test_framework.blocktools import (
MIN_BLOCKS_TO_KEEP,
create_block,
create_coinbase,
)
from test_framework.script import (
CScript,
OP_NOP,
OP_RETURN,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
)
# Rescans start at the earliest block up to 2 hours before a key timestamp, so
# the manual prune RPC avoids pruning blocks in the same window to be
# compatible with pruning based on key creation time.
TIMESTAMP_WINDOW = 2 * 60 * 60
def mine_large_blocks(node, n):
# Make a large scriptPubKey for the coinbase transaction. This is OP_RETURN
# followed by 950k of OP_NOP. This would be non-standard in a non-coinbase
# transaction but is consensus valid.
# Set the nTime if this is the first time this function has been called.
# A static variable ensures that time is monotonicly increasing and is therefore
# different for each block created => blockhash is unique.
if "nTimes" not in mine_large_blocks.__dict__:
mine_large_blocks.nTime = 0
# Get the block parameters for the first block
big_script = CScript([OP_RETURN] + [OP_NOP] * 950000)
best_block = node.getblock(node.getbestblockhash())
height = int(best_block["height"]) + 1
mine_large_blocks.nTime = max(mine_large_blocks.nTime, int(best_block["time"])) + 1
previousblockhash = int(best_block["hash"], 16)
for _ in range(n):
block = create_block(hashprev=previousblockhash, ntime=mine_large_blocks.nTime, coinbase=create_coinbase(height, script_pubkey=big_script))
block.solve()
# Submit to the node
node.submitblock(block.serialize().hex())
previousblockhash = block.sha256
height += 1
mine_large_blocks.nTime += 1
def calc_usage(blockdir):
return sum(os.path.getsize(blockdir + f) for f in os.listdir(blockdir) if os.path.isfile(os.path.join(blockdir, f))) / (1024. * 1024.)
class PruneTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 6
self.supports_cli = False
# Create nodes 0 and 1 to mine.
# Create node 2 to test pruning.
self.full_node_default_args = ["-maxreceivebuffer=20000", "-checkblocks=5"]
# Create nodes 3 and 4 to test manual pruning (they will be re-started with manual pruning later)
# Create nodes 5 to test wallet in prune mode, but do not connect
self.extra_args = [
self.full_node_default_args,
self.full_node_default_args,
["-maxreceivebuffer=20000", "-prune=550"],
["-maxreceivebuffer=20000"],
["-maxreceivebuffer=20000"],
["-prune=550", "-blockfilterindex=1"],
]
self.rpc_timeout = 120
def setup_network(self):
self.setup_nodes()
self.prunedir = os.path.join(self.nodes[2].blocks_path, '')
self.connect_nodes(0, 1)
self.connect_nodes(1, 2)
self.connect_nodes(0, 2)
self.connect_nodes(0, 3)
self.connect_nodes(0, 4)
self.sync_blocks(self.nodes[0:5])
def setup_nodes(self):
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
if self.is_wallet_compiled():
self.import_deterministic_coinbase_privkeys()
def create_big_chain(self):
# Start by creating some coinbases we can spend later
self.generate(self.nodes[1], 200, sync_fun=lambda: self.sync_blocks(self.nodes[0:2]))
self.generate(self.nodes[0], 150, sync_fun=self.no_op)
# Then mine enough full blocks to create more than 550MiB of data
mine_large_blocks(self.nodes[0], 645)
self.sync_blocks(self.nodes[0:5])
def test_invalid_command_line_options(self):
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Prune cannot be configured with a negative value.',
extra_args=['-prune=-1'],
)
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Prune configured below the minimum of 550 MiB. Please use a higher number.',
extra_args=['-prune=549'],
)
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Prune mode is incompatible with -txindex.',
extra_args=['-prune=550', '-txindex'],
)
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Prune mode is incompatible with -reindex-chainstate. Use full -reindex instead.',
extra_args=['-prune=550', '-reindex-chainstate'],
)
def test_rescan_blockchain(self):
self.restart_node(0, ["-prune=550"])
assert_raises_rpc_error(-1, "Can't rescan beyond pruned data. Use RPC call getblockchaininfo to determine your pruned height.", self.nodes[0].rescanblockchain)
def test_height_min(self):
assert os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), "blk00000.dat is missing, pruning too early"
self.log.info("Success")
self.log.info(f"Though we're already using more than 550MiB, current usage: {calc_usage(self.prunedir)}")
self.log.info("Mining 25 more blocks should cause the first block file to be pruned")
# Pruning doesn't run until we're allocating another chunk, 20 full blocks past the height cutoff will ensure this
mine_large_blocks(self.nodes[0], 25)
# Wait for blk00000.dat to be pruned
self.wait_until(lambda: not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), timeout=30)
self.log.info("Success")
usage = calc_usage(self.prunedir)
self.log.info(f"Usage should be below target: {usage}")
assert_greater_than(550, usage)
def create_chain_with_staleblocks(self):
# Create stale blocks in manageable sized chunks
self.log.info("Mine 24 (stale) blocks on Node 1, followed by 25 (main chain) block reorg from Node 0, for 12 rounds")
for _ in range(12):
# Disconnect node 0 so it can mine a longer reorg chain without knowing about node 1's soon-to-be-stale chain
# Node 2 stays connected, so it hears about the stale blocks and then reorg's when node0 reconnects
self.disconnect_nodes(0, 1)
self.disconnect_nodes(0, 2)
# Mine 24 blocks in node 1
mine_large_blocks(self.nodes[1], 24)
# Reorg back with 25 block chain from node 0
mine_large_blocks(self.nodes[0], 25)
# Create connections in the order so both nodes can see the reorg at the same time
self.connect_nodes(0, 1)
self.connect_nodes(0, 2)
self.sync_blocks(self.nodes[0:3])
self.log.info(f"Usage can be over target because of high stale rate: {calc_usage(self.prunedir)}")
def reorg_test(self):
# Node 1 will mine a 300 block chain starting 287 blocks back from Node 0 and Node 2's tip
# This will cause Node 2 to do a reorg requiring 288 blocks of undo data to the reorg_test chain
height = self.nodes[1].getblockcount()
self.log.info(f"Current block height: {height}")
self.forkheight = height - 287
self.forkhash = self.nodes[1].getblockhash(self.forkheight)
self.log.info(f"Invalidating block {self.forkhash} at height {self.forkheight}")
self.nodes[1].invalidateblock(self.forkhash)
# We've now switched to our previously mined-24 block fork on node 1, but that's not what we want
# So invalidate that fork as well, until we're on the same chain as node 0/2 (but at an ancestor 288 blocks ago)
mainchainhash = self.nodes[0].getblockhash(self.forkheight - 1)
curhash = self.nodes[1].getblockhash(self.forkheight - 1)
while curhash != mainchainhash:
self.nodes[1].invalidateblock(curhash)
curhash = self.nodes[1].getblockhash(self.forkheight - 1)
assert self.nodes[1].getblockcount() == self.forkheight - 1
self.log.info(f"New best height: {self.nodes[1].getblockcount()}")
# Disconnect node1 and generate the new chain
self.disconnect_nodes(0, 1)
self.disconnect_nodes(1, 2)
self.log.info("Generating new longer chain of 300 more blocks")
self.generate(self.nodes[1], 300, sync_fun=self.no_op)
self.log.info("Reconnect nodes")
self.connect_nodes(0, 1)
self.connect_nodes(1, 2)
self.sync_blocks(self.nodes[0:3], timeout=120)
self.log.info(f"Verify height on node 2: {self.nodes[2].getblockcount()}")
self.log.info(f"Usage possibly still high because of stale blocks in block files: {calc_usage(self.prunedir)}")
self.log.info("Mine 220 more large blocks so we have requisite history")
mine_large_blocks(self.nodes[0], 220)
self.sync_blocks(self.nodes[0:3], timeout=120)
usage = calc_usage(self.prunedir)
self.log.info(f"Usage should be below target: {usage}")
assert_greater_than(550, usage)
def reorg_back(self):
# Verify that a block on the old main chain fork has been pruned away
assert_raises_rpc_error(-1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash)
with self.nodes[2].assert_debug_log(expected_msgs=['block verification stopping at height', '(no data)']):
assert not self.nodes[2].verifychain(checklevel=4, nblocks=0)
self.log.info(f"Will need to redownload block {self.forkheight}")
# Verify that we have enough history to reorg back to the fork point
# Although this is more than 288 blocks, because this chain was written more recently
# and only its other 299 small and 220 large blocks are in the block files after it,
# it is expected to still be retained
self.nodes[2].getblock(self.nodes[2].getblockhash(self.forkheight))
first_reorg_height = self.nodes[2].getblockcount()
curchainhash = self.nodes[2].getblockhash(self.mainchainheight)
self.nodes[2].invalidateblock(curchainhash)
goalbestheight = self.mainchainheight
goalbesthash = self.mainchainhash2
# As of 0.10 the current block download logic is not able to reorg to the original chain created in
# create_chain_with_stale_blocks because it doesn't know of any peer that's on that chain from which to
# redownload its missing blocks.
# Invalidate the reorg_test chain in node 0 as well, it can successfully switch to the original chain
# because it has all the block data.
# However it must mine enough blocks to have a more work chain than the reorg_test chain in order
# to trigger node 2's block download logic.
# At this point node 2 is within 288 blocks of the fork point so it will preserve its ability to reorg
if self.nodes[2].getblockcount() < self.mainchainheight:
blocks_to_mine = first_reorg_height + 1 - self.mainchainheight
self.log.info(f"Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed: {blocks_to_mine}")
self.nodes[0].invalidateblock(curchainhash)
assert_equal(self.nodes[0].getblockcount(), self.mainchainheight)
assert_equal(self.nodes[0].getbestblockhash(), self.mainchainhash2)
goalbesthash = self.generate(self.nodes[0], blocks_to_mine, sync_fun=self.no_op)[-1]
goalbestheight = first_reorg_height + 1
self.log.info("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload")
# Wait for Node 2 to reorg to proper height
self.wait_until(lambda: self.nodes[2].getblockcount() >= goalbestheight, timeout=900)
assert_equal(self.nodes[2].getbestblockhash(), goalbesthash)
# Verify we can now have the data for a block previously pruned
assert_equal(self.nodes[2].getblock(self.forkhash)["height"], self.forkheight)
def manual_test(self, node_number, use_timestamp):
# at this point, node has 995 blocks and has not yet run in prune mode
self.start_node(node_number)
node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
assert_raises_rpc_error(-1, "Cannot prune blocks because node is not in prune mode", node.pruneblockchain, 500)
# now re-start in manual pruning mode
self.restart_node(node_number, extra_args=["-prune=1"])
node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
def height(index):
if use_timestamp:
return node.getblockheader(node.getblockhash(index))["time"] + TIMESTAMP_WINDOW
else:
return index
def prune(index):
ret = node.pruneblockchain(height=height(index))
assert_equal(ret + 1, node.getblockchaininfo()['pruneheight'])
def has_block(index):
return os.path.isfile(os.path.join(self.nodes[node_number].blocks_path, f"blk{index:05}.dat"))
# should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000)
assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
# Save block transaction count before pruning, assert value
block1_details = node.getblock(node.getblockhash(1))
assert_equal(block1_details["nTx"], len(block1_details["tx"]))
# mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight)
self.generate(node, 6, sync_fun=self.no_op)
assert_equal(node.getblockchaininfo()["blocks"], 1001)
# prune parameter in the future (block or timestamp) should raise an exception
future_parameter = height(1001) + 5
if use_timestamp:
assert_raises_rpc_error(-8, "Could not find block with at least the specified timestamp", node.pruneblockchain, future_parameter)
else:
assert_raises_rpc_error(-8, "Blockchain is shorter than the attempted prune height", node.pruneblockchain, future_parameter)
# Pruned block should still know the number of transactions
assert_equal(node.getblockheader(node.getblockhash(1))["nTx"], block1_details["nTx"])
# negative heights should raise an exception
assert_raises_rpc_error(-8, "Negative block height", node.pruneblockchain, -10)
# height=100 too low to prune first block file so this is a no-op
prune(100)
assert has_block(0), "blk00000.dat is missing when should still be there"
# Does nothing
node.pruneblockchain(height(0))
assert has_block(0), "blk00000.dat is missing when should still be there"
# height=500 should prune first file
prune(500)
assert not has_block(0), "blk00000.dat is still there, should be pruned by now"
assert has_block(1), "blk00001.dat is missing when should still be there"
# height=650 should prune second file
prune(650)
assert not has_block(1), "blk00001.dat is still there, should be pruned by now"
# height=1000 should not prune anything more, because tip-288 is in blk00002.dat.
prune(1000)
assert has_block(2), "blk00002.dat is still there, should be pruned by now"
# advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat)
self.generate(node, MIN_BLOCKS_TO_KEEP, sync_fun=self.no_op)
prune(1000)
assert not has_block(2), "blk00002.dat is still there, should be pruned by now"
assert not has_block(3), "blk00003.dat is still there, should be pruned by now"
# stop node, start back up with auto-prune at 550 MiB, make sure still runs
self.restart_node(node_number, extra_args=["-prune=550"])
self.log.info("Success")
def wallet_test(self):
# check that the pruning node's wallet is still in good shape
self.log.info("Stop and start pruning node to trigger wallet rescan")
self.restart_node(2, extra_args=["-prune=550"])
self.log.info("Success")
# check that wallet loads successfully when restarting a pruned node after IBD.
# this was reported to fail in #7494.
self.log.info("Syncing node 5 to test wallet")
self.connect_nodes(0, 5)
nds = [self.nodes[0], self.nodes[5]]
self.sync_blocks(nds, wait=5, timeout=300)
self.restart_node(5, extra_args=["-prune=550", "-blockfilterindex=1"]) # restart to trigger rescan
self.log.info("Success")
def run_test(self):
self.log.info("Warning! This test requires 4GB of disk space")
self.log.info("Mining a big blockchain of 995 blocks")
self.create_big_chain()
# Chain diagram key:
# * blocks on main chain
# +,&,$,@ blocks on other forks
# X invalidated block
# N1 Node 1
#
# Start by mining a simple chain that all nodes have
# N0=N1=N2 **...*(995)
# stop manual-pruning node with 995 blocks
self.stop_node(3)
self.stop_node(4)
self.log.info("Check that we haven't started pruning yet because we're below PruneAfterHeight")
self.test_height_min()
# Extend this chain past the PruneAfterHeight
# N0=N1=N2 **...*(1020)
self.log.info("Check that we'll exceed disk space target if we have a very high stale block rate")
self.create_chain_with_staleblocks()
# Disconnect N0
# And mine a 24 block chain on N1 and a separate 25 block chain on N0
# N1=N2 **...*+...+(1044)
# N0 **...**...**(1045)
#
# reconnect nodes causing reorg on N1 and N2
# N1=N2 **...*(1020) *...**(1045)
# \
# +...+(1044)
#
# repeat this process until you have 12 stale forks hanging off the
# main chain on N1 and N2
# N0 *************************...***************************(1320)
#
# N1=N2 **...*(1020) *...**(1045) *.. ..**(1295) *...**(1320)
# \ \ \
# +...+(1044) &.. $...$(1319)
# Save some current chain state for later use
self.mainchainheight = self.nodes[2].getblockcount() # 1320
self.mainchainhash2 = self.nodes[2].getblockhash(self.mainchainheight)
self.log.info("Check that we can survive a 288 block reorg still")
self.reorg_test() # (1033, )
# Now create a 288 block reorg by mining a longer chain on N1
# First disconnect N1
# Then invalidate 1033 on main chain and 1032 on fork so height is 1032 on main chain
# N1 **...*(1020) **...**(1032)X..
# \
# ++...+(1031)X..
#
# Now mine 300 more blocks on N1
# N1 **...*(1020) **...**(1032) @@...@(1332)
# \ \
# \ X...
# \ \
# ++...+(1031)X.. ..
#
# Reconnect nodes and mine 220 more blocks on N1
# N1 **...*(1020) **...**(1032) @@...@@@(1552)
# \ \
# \ X...
# \ \
# ++...+(1031)X.. ..
#
# N2 **...*(1020) **...**(1032) @@...@@@(1552)
# \ \
# \ *...**(1320)
# \ \
# ++...++(1044) ..
#
# N0 ********************(1032) @@...@@@(1552)
# \
# *...**(1320)
self.log.info("Test that we can rerequest a block we previously pruned if needed for a reorg")
self.reorg_back()
# Verify that N2 still has block 1033 on current chain (@), but not on main chain (*)
# Invalidate 1033 on current chain (@) on N2 and we should be able to reorg to
# original main chain (*), but will require redownload of some blocks
# In order to have a peer we think we can download from, must also perform this invalidation
# on N0 and mine a new longest chain to trigger.
# Final result:
# N0 ********************(1032) **...****(1553)
# \
# X@...@@@(1552)
#
# N2 **...*(1020) **...**(1032) **...****(1553)
# \ \
# \ X@...@@@(1552)
# \
# +..
#
# N1 doesn't change because 1033 on main chain (*) is invalid
self.log.info("Test manual pruning with block indices")
self.manual_test(3, use_timestamp=False)
self.log.info("Test manual pruning with timestamps")
self.manual_test(4, use_timestamp=True)
if self.is_wallet_compiled():
self.log.info("Test wallet re-scan")
self.wallet_test()
self.log.info("Test it's not possible to rescan beyond pruned data")
self.test_rescan_blockchain()
self.log.info("Test invalid pruning command line options")
self.test_invalid_command_line_options()
self.test_scanblocks_pruned()
self.log.info("Done")
def test_scanblocks_pruned(self):
node = self.nodes[5]
genesis_blockhash = node.getblockhash(0)
false_positive_spk = bytes.fromhex("001400000000000000000000000000000000000cadcb")
assert genesis_blockhash in node.scanblocks(
"start", [{"desc": f"raw({false_positive_spk.hex()})"}], 0, 0)['relevant_blocks']
assert_raises_rpc_error(-1, "Block not available (pruned data)", node.scanblocks,
"start", [{"desc": f"raw({false_positive_spk.hex()})"}], 0, 0, "basic", {"filter_false_positives": True})
if __name__ == '__main__':
PruneTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_importprunedfunds.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the importprunedfunds and removeprunedfunds RPCs."""
from decimal import Decimal
from test_framework.address import key_to_p2wpkh
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.messages import (
CMerkleBlock,
from_hex,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
from test_framework.wallet_util import generate_keypair
class ImportPrunedFundsTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.log.info("Mining blocks...")
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
# address
address1 = self.nodes[0].getnewaddress()
# pubkey
address2 = self.nodes[0].getnewaddress()
# privkey
address3_privkey, address3_pubkey = generate_keypair(wif=True)
address3 = key_to_p2wpkh(address3_pubkey)
self.nodes[0].importprivkey(address3_privkey)
# Check only one address
address_info = self.nodes[0].getaddressinfo(address1)
assert_equal(address_info['ismine'], True)
self.sync_all()
# Node 1 sync test
assert_equal(self.nodes[1].getblockcount(), COINBASE_MATURITY + 1)
# Address Test - before import
address_info = self.nodes[1].getaddressinfo(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].getaddressinfo(address2)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].getaddressinfo(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
# Send funds to self
txnid1 = self.nodes[0].sendtoaddress(address1, 0.1)
self.generate(self.nodes[0], 1)
rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex']
proof1 = self.nodes[0].gettxoutproof([txnid1])
txnid2 = self.nodes[0].sendtoaddress(address2, 0.05)
self.generate(self.nodes[0], 1)
rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex']
proof2 = self.nodes[0].gettxoutproof([txnid2])
txnid3 = self.nodes[0].sendtoaddress(address3, 0.025)
self.generate(self.nodes[0], 1)
rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex']
proof3 = self.nodes[0].gettxoutproof([txnid3])
self.sync_all()
# Import with no affiliated address
assert_raises_rpc_error(-5, "No addresses", self.nodes[1].importprunedfunds, rawtxn1, proof1)
balance1 = self.nodes[1].getbalance()
assert_equal(balance1, Decimal(0))
# Import with affiliated address with no rescan
self.nodes[1].createwallet('wwatch', disable_private_keys=True)
wwatch = self.nodes[1].get_wallet_rpc('wwatch')
wwatch.importaddress(address=address2, rescan=False)
wwatch.importprunedfunds(rawtransaction=rawtxn2, txoutproof=proof2)
assert [tx for tx in wwatch.listtransactions(include_watchonly=True) if tx['txid'] == txnid2]
# Import with private key with no rescan
w1 = self.nodes[1].get_wallet_rpc(self.default_wallet_name)
w1.importprivkey(privkey=address3_privkey, rescan=False)
w1.importprunedfunds(rawtxn3, proof3)
assert [tx for tx in w1.listtransactions() if tx['txid'] == txnid3]
balance3 = w1.getbalance()
assert_equal(balance3, Decimal('0.025'))
# Addresses Test - after import
address_info = w1.getaddressinfo(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = wwatch.getaddressinfo(address2)
if self.options.descriptors:
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], True)
else:
assert_equal(address_info['iswatchonly'], True)
assert_equal(address_info['ismine'], False)
address_info = w1.getaddressinfo(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], True)
# Remove transactions
assert_raises_rpc_error(-8, "Transaction does not exist in wallet.", w1.removeprunedfunds, txnid1)
assert not [tx for tx in w1.listtransactions(include_watchonly=True) if tx['txid'] == txnid1]
wwatch.removeprunedfunds(txnid2)
assert not [tx for tx in wwatch.listtransactions(include_watchonly=True) if tx['txid'] == txnid2]
w1.removeprunedfunds(txnid3)
assert not [tx for tx in w1.listtransactions(include_watchonly=True) if tx['txid'] == txnid3]
# Check various RPC parameter validation errors
assert_raises_rpc_error(-22, "TX decode failed", w1.importprunedfunds, b'invalid tx'.hex(), proof1)
assert_raises_rpc_error(-5, "Transaction given doesn't exist in proof", w1.importprunedfunds, rawtxn2, proof1)
mb = from_hex(CMerkleBlock(), proof1)
mb.header.hashMerkleRoot = 0xdeadbeef # cause mismatch between merkle root and merkle block
assert_raises_rpc_error(-5, "Something wrong with merkleblock", w1.importprunedfunds, rawtxn1, mb.serialize().hex())
mb = from_hex(CMerkleBlock(), proof1)
mb.header.nTime += 1 # modify arbitrary block header field to change block hash
assert_raises_rpc_error(-5, "Block not found in chain", w1.importprunedfunds, rawtxn1, mb.serialize().hex())
if __name__ == '__main__':
ImportPrunedFundsTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_fastprune.py | #!/usr/bin/env python3
# Copyright (c) 2023 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test fastprune mode."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal
)
from test_framework.wallet import MiniWallet
class FeatureFastpruneTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [["-fastprune"]]
def run_test(self):
self.log.info("ensure that large blocks don't crash or freeze in -fastprune")
wallet = MiniWallet(self.nodes[0])
tx = wallet.create_self_transfer()['tx']
annex = b"\x50" + b"\xff" * 0x10000
tx.wit.vtxinwit[0].scriptWitness.stack.append(annex)
self.generateblock(self.nodes[0], output="raw(55)", transactions=[tx.serialize().hex()])
assert_equal(self.nodes[0].getblockcount(), 201)
if __name__ == '__main__':
FeatureFastpruneTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_crosschain.py | #!/usr/bin/env python3
# Copyright (c) 2020-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_raises_rpc_error
class WalletCrossChain(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
self.add_nodes(self.num_nodes)
# Switch node 1 to testnet before starting it.
self.nodes[1].chain = 'testnet3'
self.nodes[1].extra_args = ['-maxconnections=0', '-prune=550'] # disable testnet sync
self.nodes[1].replace_in_config([('regtest=', 'testnet='), ('[regtest]', '[test]')])
self.start_nodes()
def run_test(self):
self.log.info("Creating wallets")
node0_wallet = self.nodes[0].datadir_path / 'node0_wallet'
node0_wallet_backup = self.nodes[0].datadir_path / 'node0_wallet.bak'
self.nodes[0].createwallet(node0_wallet)
self.nodes[0].backupwallet(node0_wallet_backup)
self.nodes[0].unloadwallet(node0_wallet)
node1_wallet = self.nodes[1].datadir_path / 'node1_wallet'
node1_wallet_backup = self.nodes[0].datadir_path / 'node1_wallet.bak'
self.nodes[1].createwallet(node1_wallet)
self.nodes[1].backupwallet(node1_wallet_backup)
self.nodes[1].unloadwallet(node1_wallet)
self.log.info("Loading/restoring wallets into nodes with a different genesis block")
if self.options.descriptors:
assert_raises_rpc_error(-18, 'Wallet file verification failed.', self.nodes[0].loadwallet, node1_wallet)
assert_raises_rpc_error(-18, 'Wallet file verification failed.', self.nodes[1].loadwallet, node0_wallet)
assert_raises_rpc_error(-18, 'Wallet file verification failed.', self.nodes[0].restorewallet, 'w', node1_wallet_backup)
assert_raises_rpc_error(-18, 'Wallet file verification failed.', self.nodes[1].restorewallet, 'w', node0_wallet_backup)
else:
assert_raises_rpc_error(-4, 'Wallet files should not be reused across chains.', self.nodes[0].loadwallet, node1_wallet)
assert_raises_rpc_error(-4, 'Wallet files should not be reused across chains.', self.nodes[1].loadwallet, node0_wallet)
assert_raises_rpc_error(-4, 'Wallet files should not be reused across chains.', self.nodes[0].restorewallet, 'w', node1_wallet_backup)
assert_raises_rpc_error(-4, 'Wallet files should not be reused across chains.', self.nodes[1].restorewallet, 'w', node0_wallet_backup)
if not self.options.descriptors:
self.log.info("Override cross-chain wallet load protection")
self.stop_nodes()
self.start_nodes([['-walletcrosschain', '-prune=550']] * self.num_nodes)
self.nodes[0].loadwallet(node1_wallet)
self.nodes[1].loadwallet(node0_wallet)
if __name__ == '__main__':
WalletCrossChain().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_inactive_hdchains.py | #!/usr/bin/env python3
# Copyright (c) 2021-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test Inactive HD Chains.
"""
import shutil
import time
from test_framework.authproxy import JSONRPCException
from test_framework.test_framework import BitcoinTestFramework
from test_framework.wallet_util import (
get_generate_key,
)
class InactiveHDChainsTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser, descriptors=False)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [["-keypool=10"], ["-nowallet", "-keypool=10"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_bdb()
self.skip_if_no_previous_releases()
def setup_nodes(self):
self.add_nodes(self.num_nodes, extra_args=self.extra_args, versions=[
None,
170200, # 0.17.2 Does not have the key metadata upgrade
])
self.start_nodes()
self.init_wallet(node=0)
def prepare_wallets(self, wallet_basename, encrypt=False):
self.nodes[0].createwallet(wallet_name=f"{wallet_basename}_base", descriptors=False, blank=True)
self.nodes[0].createwallet(wallet_name=f"{wallet_basename}_test", descriptors=False, blank=True)
base_wallet = self.nodes[0].get_wallet_rpc(f"{wallet_basename}_base")
test_wallet = self.nodes[0].get_wallet_rpc(f"{wallet_basename}_test")
# Setup both wallets with the same HD seed
seed = get_generate_key()
base_wallet.sethdseed(True, seed.privkey)
test_wallet.sethdseed(True, seed.privkey)
if encrypt:
# Encrypting will generate a new HD seed and flush the keypool
test_wallet.encryptwallet("pass")
else:
# Generate a new HD seed on the test wallet
test_wallet.sethdseed()
return base_wallet, test_wallet
def do_inactive_test(self, base_wallet, test_wallet, encrypt=False):
default = self.nodes[0].get_wallet_rpc(self.default_wallet_name)
# The first address should be known by both wallets.
addr1 = base_wallet.getnewaddress()
assert test_wallet.getaddressinfo(addr1)["ismine"]
# The address at index 9 is the first address that the test wallet will not know initially
for _ in range(0, 9):
base_wallet.getnewaddress()
addr2 = base_wallet.getnewaddress()
assert not test_wallet.getaddressinfo(addr2)["ismine"]
# Send to first address on the old seed
txid = default.sendtoaddress(addr1, 10)
self.generate(self.nodes[0], 1)
# Wait for the test wallet to see the transaction
while True:
try:
test_wallet.gettransaction(txid)
break
except JSONRPCException:
time.sleep(0.1)
if encrypt:
# The test wallet will not be able to generate the topped up keypool
# until it is unlocked. So it still should not know about the second address
assert not test_wallet.getaddressinfo(addr2)["ismine"]
test_wallet.walletpassphrase("pass", 1)
# The test wallet should now know about the second address as it
# should have generated it in the inactive chain's keypool
assert test_wallet.getaddressinfo(addr2)["ismine"]
# Send to second address on the old seed
txid = default.sendtoaddress(addr2, 10)
self.generate(self.nodes[0], 1)
test_wallet.gettransaction(txid)
def test_basic(self):
self.log.info("Test basic case for inactive HD chains")
self.do_inactive_test(*self.prepare_wallets("basic"))
def test_encrypted_wallet(self):
self.log.info("Test inactive HD chains when wallet is encrypted")
self.do_inactive_test(*self.prepare_wallets("enc", encrypt=True), encrypt=True)
def test_without_upgraded_keymeta(self):
# Test that it is possible to top up inactive hd chains even if there is no key origin
# in CKeyMetadata. This tests for the segfault reported in
# https://github.com/bitcoin/bitcoin/issues/21605
self.log.info("Test that topping up inactive HD chains does not need upgraded key origin")
self.nodes[0].createwallet(wallet_name="keymeta_base", descriptors=False, blank=True)
# Createwallet is overridden in the test framework so that the descriptor option can be filled
# depending on the test's cli args. However we don't want to do that when using old nodes that
# do not support descriptors. So we use the createwallet_passthrough function.
self.nodes[1].createwallet_passthrough(wallet_name="keymeta_test")
base_wallet = self.nodes[0].get_wallet_rpc("keymeta_base")
test_wallet = self.nodes[1].get_wallet_rpc("keymeta_test")
# Setup both wallets with the same HD seed
seed = get_generate_key()
base_wallet.sethdseed(True, seed.privkey)
test_wallet.sethdseed(True, seed.privkey)
# Encrypting will generate a new HD seed and flush the keypool
test_wallet.encryptwallet("pass")
# Copy test wallet to node 0
test_wallet.unloadwallet()
test_wallet_dir = self.nodes[1].wallets_path / "keymeta_test"
new_test_wallet_dir = self.nodes[0].wallets_path / "keymeta_test"
shutil.copytree(test_wallet_dir, new_test_wallet_dir)
self.nodes[0].loadwallet("keymeta_test")
test_wallet = self.nodes[0].get_wallet_rpc("keymeta_test")
self.do_inactive_test(base_wallet, test_wallet, encrypt=True)
def run_test(self):
self.generate(self.nodes[0], 101)
self.test_basic()
self.test_encrypted_wallet()
self.test_without_upgraded_keymeta()
if __name__ == '__main__':
InactiveHDChainsTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_fallbackfee.py | #!/usr/bin/env python3
# Copyright (c) 2017-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test wallet replace-by-fee capabilities in conjunction with the fallbackfee."""
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_raises_rpc_error
class WalletRBFTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
# sending a transaction without fee estimations must be possible by default on regtest
self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
# test sending a tx with disabled fallback fee (must fail)
self.restart_node(0, extra_args=["-fallbackfee=0"])
assert_raises_rpc_error(-6, "Fee estimation failed", lambda: self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1))
assert_raises_rpc_error(-4, "Fee estimation failed", lambda: self.nodes[0].fundrawtransaction(self.nodes[0].createrawtransaction([], {self.nodes[0].getnewaddress(): 1})))
assert_raises_rpc_error(-6, "Fee estimation failed", lambda: self.nodes[0].sendmany("", {self.nodes[0].getnewaddress(): 1}))
if __name__ == '__main__':
WalletRBFTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_signrawtransactionwithwallet.py | #!/usr/bin/env python3
# Copyright (c) 2015-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test transaction signing using the signrawtransactionwithwallet RPC."""
from test_framework.blocktools import (
COINBASE_MATURITY,
)
from test_framework.address import (
script_to_p2wsh,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
from test_framework.messages import (
CTxInWitness,
tx_from_hex,
)
from test_framework.script import (
CScript,
OP_CHECKLOCKTIMEVERIFY,
OP_CHECKSEQUENCEVERIFY,
OP_DROP,
OP_TRUE,
)
from decimal import (
Decimal,
getcontext,
)
RAW_TX = '020000000156b958f78e3f24e0b2f4e4db1255426b0902027cb37e3ddadb52e37c3557dddb0000000000ffffffff01c0a6b929010000001600149a2ee8c77140a053f36018ac8124a6ececc1668a00000000'
class SignRawTransactionWithWalletTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def test_with_lock_outputs(self):
self.log.info("Test correct error reporting when trying to sign a locked output")
self.nodes[0].encryptwallet("password")
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signrawtransactionwithwallet, RAW_TX)
self.nodes[0].walletpassphrase("password", 9999)
def test_with_invalid_sighashtype(self):
self.log.info("Test signrawtransactionwithwallet raises if an invalid sighashtype is passed")
assert_raises_rpc_error(-8, "all is not a valid sighash parameter.", self.nodes[0].signrawtransactionwithwallet, hexstring=RAW_TX, sighashtype="all")
def script_verification_error_test(self):
"""Create and sign a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script.
Expected results:
3) The transaction has no complete set of signatures
4) Two script verification errors occurred
5) Script verification errors have certain properties ("txid", "vout", "scriptSig", "sequence", "error")
6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)"""
self.log.info("Test script verification errors")
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N']
inputs = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7},
# Missing scriptPubKey
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 1},
]
scripts = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7,
'scriptPubKey': 'badbadbadbad'}
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
# Make sure decoderawtransaction is at least marginally sane
decodedRawTx = self.nodes[0].decoderawtransaction(rawTx)
for i, inp in enumerate(inputs):
assert_equal(decodedRawTx["vin"][i]["txid"], inp["txid"])
assert_equal(decodedRawTx["vin"][i]["vout"], inp["vout"])
# Make sure decoderawtransaction throws if there is extra data
assert_raises_rpc_error(-22, "TX decode failed", self.nodes[0].decoderawtransaction, rawTx + "00")
rawTxSigned = self.nodes[0].signrawtransactionwithkey(rawTx, privKeys, scripts)
# 3) The transaction has no complete set of signatures
assert not rawTxSigned['complete']
# 4) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 5) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'witness' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)
assert_equal(rawTxSigned['errors'][0]['txid'], inputs[1]['txid'])
assert_equal(rawTxSigned['errors'][0]['vout'], inputs[1]['vout'])
assert_equal(rawTxSigned['errors'][1]['txid'], inputs[2]['txid'])
assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout'])
assert not rawTxSigned['errors'][0]['witness']
# Now test signing failure for transaction with input witnesses
p2wpkh_raw_tx = "01000000000102fff7f7881a8099afa6940d42d1e7f6362bec38171ea3edf433541db4e4ad969f00000000494830450221008b9d1dc26ba6a9cb62127b02742fa9d754cd3bebf337f7a55d114c8e5cdd30be022040529b194ba3f9281a99f2b1c0a19c0489bc22ede944ccf4ecbab4cc618ef3ed01eeffffffef51e1b804cc89d182d279655c3aa89e815b1b309fe287d9b2b55d57b90ec68a0100000000ffffffff02202cb206000000001976a9148280b37df378db99f66f85c95a783a76ac7a6d5988ac9093510d000000001976a9143bde42dbee7e4dbe6a21b2d50ce2f0167faa815988ac000247304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee0121025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee635711000000"
rawTxSigned = self.nodes[0].signrawtransactionwithwallet(p2wpkh_raw_tx)
# 7) The transaction has no complete set of signatures
assert not rawTxSigned['complete']
# 8) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 9) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'witness' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# Non-empty witness checked here
assert_equal(rawTxSigned['errors'][1]['witness'], ["304402203609e17b84f6a7d30c80bfa610b5b4542f32a8a0d5447a12fb1366d7f01cc44a0220573a954c4518331561406f90300e8f3358f51928d43c212a8caed02de67eebee01", "025476c2e83188368da1ff3e292e7acafcdb3566bb0ad253f62fc70f07aeee6357"])
assert not rawTxSigned['errors'][0]['witness']
def test_fully_signed_tx(self):
self.log.info("Test signing a fully signed transaction does nothing")
self.nodes[0].walletpassphrase("password", 9999)
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
rawtx = self.nodes[0].createrawtransaction([], [{self.nodes[0].getnewaddress(): 10}])
fundedtx = self.nodes[0].fundrawtransaction(rawtx)
signedtx = self.nodes[0].signrawtransactionwithwallet(fundedtx["hex"])
assert_equal(signedtx["complete"], True)
signedtx2 = self.nodes[0].signrawtransactionwithwallet(signedtx["hex"])
assert_equal(signedtx2["complete"], True)
assert_equal(signedtx["hex"], signedtx2["hex"])
self.nodes[0].walletlock()
def OP_1NEGATE_test(self):
self.log.info("Test OP_1NEGATE (0x4f) satisfies BIP62 minimal push standardness rule")
hex_str = (
"0200000001FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
"FFFFFFFF00000000044F024F9CFDFFFFFF01F0B9F5050000000023210277777777"
"77777777777777777777777777777777777777777777777777777777AC66030000"
)
prev_txs = [
{
"txid": "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
"vout": 0,
"scriptPubKey": "A914AE44AB6E9AA0B71F1CD2B453B69340E9BFBAEF6087",
"redeemScript": "4F9C",
"amount": 1,
}
]
txn = self.nodes[0].signrawtransactionwithwallet(hex_str, prev_txs)
assert txn["complete"]
def test_signing_with_csv(self):
self.log.info("Test signing a transaction containing a fully signed CSV input")
self.nodes[0].walletpassphrase("password", 9999)
getcontext().prec = 8
# Make sure CSV is active
assert self.nodes[0].getdeploymentinfo()['deployments']['csv']['active']
# Create a P2WSH script with CSV
script = CScript([1, OP_CHECKSEQUENCEVERIFY, OP_DROP])
address = script_to_p2wsh(script)
# Fund that address and make the spend
utxo1 = self.create_outpoints(self.nodes[0], outputs=[{address: 1}])[0]
self.generate(self.nodes[0], 1)
utxo2 = self.nodes[0].listunspent()[0]
amt = Decimal(1) + utxo2["amount"] - Decimal(0.00001)
tx = self.nodes[0].createrawtransaction(
[{**utxo1, "sequence": 1},{"txid": utxo2["txid"], "vout": utxo2["vout"]}],
[{self.nodes[0].getnewaddress(): amt}],
self.nodes[0].getblockcount()
)
# Set the witness script
ctx = tx_from_hex(tx)
ctx.wit.vtxinwit.append(CTxInWitness())
ctx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE]), script]
tx = ctx.serialize_with_witness().hex()
# Sign and send the transaction
signed = self.nodes[0].signrawtransactionwithwallet(tx)
assert_equal(signed["complete"], True)
self.nodes[0].sendrawtransaction(signed["hex"])
def test_signing_with_cltv(self):
self.log.info("Test signing a transaction containing a fully signed CLTV input")
self.nodes[0].walletpassphrase("password", 9999)
getcontext().prec = 8
# Make sure CLTV is active
assert self.nodes[0].getdeploymentinfo()['deployments']['bip65']['active']
# Create a P2WSH script with CLTV
script = CScript([100, OP_CHECKLOCKTIMEVERIFY, OP_DROP])
address = script_to_p2wsh(script)
# Fund that address and make the spend
utxo1 = self.create_outpoints(self.nodes[0], outputs=[{address: 1}])[0]
self.generate(self.nodes[0], 1)
utxo2 = self.nodes[0].listunspent()[0]
amt = Decimal(1) + utxo2["amount"] - Decimal(0.00001)
tx = self.nodes[0].createrawtransaction(
[utxo1, {"txid": utxo2["txid"], "vout": utxo2["vout"]}],
[{self.nodes[0].getnewaddress(): amt}],
self.nodes[0].getblockcount()
)
# Set the witness script
ctx = tx_from_hex(tx)
ctx.wit.vtxinwit.append(CTxInWitness())
ctx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE]), script]
tx = ctx.serialize_with_witness().hex()
# Sign and send the transaction
signed = self.nodes[0].signrawtransactionwithwallet(tx)
assert_equal(signed["complete"], True)
self.nodes[0].sendrawtransaction(signed["hex"])
def test_signing_with_missing_prevtx_info(self):
txid = "1d1d4e24ed99057e84c3f80fd8fbec79ed9e1acee37da269356ecea000000000"
for type in ["bech32", "p2sh-segwit", "legacy"]:
self.log.info(f"Test signing with missing prevtx info ({type})")
addr = self.nodes[0].getnewaddress("", type)
addrinfo = self.nodes[0].getaddressinfo(addr)
pubkey = addrinfo["scriptPubKey"]
inputs = [{'txid': txid, 'vout': 3, 'sequence': 1000}]
outputs = {self.nodes[0].getnewaddress(): 1}
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
prevtx = dict(txid=txid, scriptPubKey=pubkey, vout=3, amount=1)
succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
assert succ["complete"]
if type == "legacy":
del prevtx["amount"]
succ = self.nodes[0].signrawtransactionwithwallet(rawtx, [prevtx])
assert succ["complete"]
else:
assert_raises_rpc_error(-3, "Missing amount", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"txid": txid,
"scriptPubKey": pubkey,
"vout": 3,
}
])
assert_raises_rpc_error(-3, "Missing vout", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"txid": txid,
"scriptPubKey": pubkey,
"amount": 1,
}
])
assert_raises_rpc_error(-3, "Missing txid", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"scriptPubKey": pubkey,
"vout": 3,
"amount": 1,
}
])
assert_raises_rpc_error(-3, "Missing scriptPubKey", self.nodes[0].signrawtransactionwithwallet, rawtx, [
{
"txid": txid,
"vout": 3,
"amount": 1
}
])
def run_test(self):
self.script_verification_error_test()
self.OP_1NEGATE_test()
self.test_with_lock_outputs()
self.test_with_invalid_sighashtype()
self.test_fully_signed_tx()
self.test_signing_with_csv()
self.test_signing_with_cltv()
self.test_signing_with_missing_prevtx_info()
if __name__ == '__main__':
SignRawTransactionWithWalletTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_signmessagewithaddress.py | #!/usr/bin/env python3
# Copyright (c) 2016-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Wallet commands for signing and verifying messages."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
)
class SignMessagesWithAddressTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-addresstype=legacy"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
message = 'This is just a test message'
self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
assert self.nodes[0].verifymessage(address, signature, message)
self.log.info('test verifying with another address should not work')
other_address = self.nodes[0].getnewaddress()
other_signature = self.nodes[0].signmessage(other_address, message)
assert not self.nodes[0].verifymessage(other_address, signature, message)
assert not self.nodes[0].verifymessage(address, other_signature, message)
self.log.info('test parameter validity and error codes')
# signmessage has two required parameters
for num_params in [0, 1, 3, 4, 5]:
param_list = ["dummy"]*num_params
assert_raises_rpc_error(-1, "signmessage", self.nodes[0].signmessage, *param_list)
# invalid key or address provided
assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].signmessage, "invalid_addr", message)
if __name__ == '__main__':
SignMessagesWithAddressTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_signet.py | #!/usr/bin/env python3
# Copyright (c) 2019-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test basic signet functionality"""
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
signet_blocks = [
'00000020f61eee3b63a380a477a063af32b2bbc97c9ff9f01f2c4225e973988108000000f575c83235984e7dc4afc1f30944c170462e84437ab6f2d52e16878a79e4678bd1914d5fae77031eccf4070001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025151feffffff0200f2052a010000001600149243f727dd5343293eb83174324019ec16c2630f0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402205e423a8754336ca99dbe16509b877ef1bf98d008836c725005b3c787c41ebe46022047246e4467ad7cc7f1ad98662afcaf14c115e0095a227c7b05c5182591c23e7e01000120000000000000000000000000000000000000000000000000000000000000000000000000',
'00000020533b53ded9bff4adc94101d32400a144c54edc5ed492a3b26c63b2d686000000b38fef50592017cfafbcab88eb3d9cf50b2c801711cad8299495d26df5e54812e7914d5fae77031ecfdd0b0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025251feffffff0200f2052a01000000160014fd09839740f0e0b4fc6d5e2527e4022aa9b89dfa0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022031d64a1692cdad1fc0ced69838169fe19ae01be524d831b95fcf5ea4e6541c3c02204f9dea0801df8b4d0cd0857c62ab35c6c25cc47c930630dc7fe723531daa3e9b01000120000000000000000000000000000000000000000000000000000000000000000000000000',
'000000202960f3752f0bfa8858a3e333294aedc7808025e868c9dc03e71d88bb320000007765fcd3d5b4966beb338bba2675dc2cf2ad28d4ad1d83bdb6f286e7e27ac1f807924d5fae77031e81d60b0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025351feffffff0200f2052a010000001600141e5fb426042692ae0e87c070e78c39307a5661c20000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402205de93694763a42954865bcf1540cb82958bc62d0ec4eee02070fb7937cd037f4022067f333753bce47b10bc25eb6e1f311482e994c862a7e0b2d41ab1c8679fd1b1101000120000000000000000000000000000000000000000000000000000000000000000000000000',
'00000020b06443a13ae1d3d50faef5ecad38c6818194dc46abca3e972e2aacdae800000069a5829097e80fee00ac49a56ea9f82d741a6af84d32b3bc455cf31871e2a8ac27924d5fae77031e9c91050001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025451feffffff0200f2052a0100000016001430db2f8225dcf7751361ab38735de08190318cb70000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402200936f5f9872f6df5dd242026ad52241a68423f7f682e79169a8d85a374eab9b802202cd2979c48b321b3453e65e8f92460db3fca93cbea8539b450c959f4fbe630c601000120000000000000000000000000000000000000000000000000000000000000000000000000',
'000000207ed403758a4f228a1939418a155e2ebd4ae6b26e5ffd0ae433123f7694010000542e80b609c5bc58af5bdf492e26d4f60cd43a3966c2e063c50444c29b3757a636924d5fae77031ee8601d0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025551feffffff0200f2052a01000000160014edc207e014df34fa3885dff97d1129d356e1186a0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022021a3656609f85a66a2c5672ed9322c2158d57251040d2716ed202a1fe14f0c12022057d68bc6611f7a9424a7e00bbf3e27e6ae6b096f60bac624a094bc97a59aa1ff01000120000000000000000000000000000000000000000000000000000000000000000000000000',
'000000205bea0a88d1422c3df08d766ad72df95084d0700e6f873b75dd4e986c7703000002b57516d33ed60c2bdd9f93d6d5614083324c837e68e5ba6e04287a7285633585924d5fae77031ed171960001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025651feffffff0200f2052a010000001600143ae612599cf96f2442ce572633e0251116eaa52f0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022059a7c54de76bfdbb1dd44c78ea2dbd2bb4e97f4abad38965f41e76433e56423c022054bf17f04fe17415c0141f60eebd2b839200f574d8ad8d55a0917b92b0eb913401000120000000000000000000000000000000000000000000000000000000000000000000000000',
'00000020daf3b60d374b19476461f97540498dcfa2eb7016238ec6b1d022f82fb60100007a7ae65b53cb988c2ec92d2384996713821d5645ffe61c9acea60da75cd5edfa1a944d5fae77031e9dbb050001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025751feffffff0200f2052a01000000160014ef2dceae02e35f8137de76768ae3345d99ca68860000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402202b3f946d6447f9bf17d00f3696cede7ee70b785495e5498274ee682a493befd5022045fc0bcf9332243168b5d35507175f9f374a8eba2336873885d12aada67ea5f601000120000000000000000000000000000000000000000000000000000000000000000000000000',
'00000020457cc5f3c2e1a5655bc20e20e48d33e1b7ea68786c614032b5c518f0b6000000541f36942d82c6e7248275ff15c8933487fbe1819c67a9ecc0f4b70bb7e6cf672a944d5fae77031e8f39860001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025851feffffff0200f2052a0100000016001472a27906947c06d034b38ba2fa13c6391a4832790000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402202d62805ce60cbd60591f97f949b5ea5bd7e2307bcde343e6ea8394da92758e72022053a25370b0aa20da100189b7899a8f8675a0fdc60e38ece6b8a4f98edd94569e01000120000000000000000000000000000000000000000000000000000000000000000000000000',
'00000020a2eb61eb4f3831baa3a3363e1b42db4462663f756f07423e81ed30322102000077224de7dea0f8d0ec22b1d2e2e255f0a987b96fe7200e1a2e6373f48a2f5b7894954d5fae77031e36867e0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025951feffffff0200f2052a01000000160014aa0ad9f26801258382e0734dceec03a4a75f60240000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402206fa0d59990eed369bd7375767c9a6c9369fae209152b8674e520da270605528c0220749eed3b12dbe3f583f505d21803e4aef59c8e24c5831951eafa4f15a8f92c4e01000120000000000000000000000000000000000000000000000000000000000000000000000000',
'00000020a868e8514be5e46dabd6a122132f423f36a43b716a40c394e2a8d063e1010000f4c6c717e99d800c699c25a2006a75a0c5c09f432a936f385e6fce139cdbd1a5e9964d5fae77031e7d026e0001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025a51feffffff0200f2052a01000000160014aaa671c82b138e3b8f510cd801e5f2bd0aa305940000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa24900473044022042309f4c3c7a1a2ac8c24f890f962df1c0086cec10be0868087cfc427520cb2702201dafee8911c269b7e786e242045bb57cef3f5b0f177010c6159abae42f646cc501000120000000000000000000000000000000000000000000000000000000000000000000000000',
]
class SignetBasicTest(BitcoinTestFramework):
def set_test_params(self):
self.chain = "signet"
self.num_nodes = 6
self.setup_clean_chain = True
shared_args1 = ["-signetchallenge=51"] # OP_TRUE
shared_args2 = [] # default challenge
# we use the exact same challenge except we do it as a 2-of-2, which means it should fail
shared_args3 = ["-signetchallenge=522103ad5e0edad18cb1f0fc0d28a3d4f1f3e445640337489abb10404f2d1e086be430210359ef5021964fe22d6f8e05b2463c9540ce96883fe3b278760f048f5189f2e6c452ae"]
self.extra_args = [
shared_args1, shared_args1,
shared_args2, shared_args2,
shared_args3, shared_args3,
]
def setup_network(self):
self.setup_nodes()
# Setup the three signets, which are incompatible with each other
self.connect_nodes(0, 1)
self.connect_nodes(2, 3)
self.connect_nodes(4, 5)
def run_test(self):
self.log.info("basic tests using OP_TRUE challenge")
self.log.info('getmininginfo')
mining_info = self.nodes[0].getmininginfo()
assert_equal(mining_info['blocks'], 0)
assert_equal(mining_info['chain'], 'signet')
assert 'currentblocktx' not in mining_info
assert 'currentblockweight' not in mining_info
assert_equal(mining_info['networkhashps'], Decimal('0'))
assert_equal(mining_info['pooledtx'], 0)
self.generate(self.nodes[0], 1, sync_fun=self.no_op)
self.log.info("pregenerated signet blocks check")
height = 0
for block in signet_blocks:
assert_equal(self.nodes[2].submitblock(block), None)
height += 1
assert_equal(self.nodes[2].getblockcount(), height)
self.log.info("pregenerated signet blocks check (incompatible solution)")
assert_equal(self.nodes[4].submitblock(signet_blocks[0]), 'bad-signet-blksig')
self.log.info("test that signet logs the network magic on node start")
with self.nodes[0].assert_debug_log(["Signet derived magic (message start)"]):
self.restart_node(0)
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(extra_args=["-signetchallenge=abc"], expected_msg="Error: -signetchallenge must be hex, not 'abc'.")
self.nodes[0].assert_start_raises_init_error(extra_args=["-signetchallenge=abc"] * 2, expected_msg="Error: -signetchallenge cannot be multiple values.")
if __name__ == '__main__':
SignetBasicTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/mempool_expiry.py | #!/usr/bin/env python3
# Copyright (c) 2020-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests that a mempool transaction expires after a given timeout and that its
children are removed as well.
Both the default expiry timeout defined by DEFAULT_MEMPOOL_EXPIRY_HOURS and a user
definable expiry timeout via the '-mempoolexpiry=<n>' command line argument
(<n> is the timeout in hours) are tested.
"""
from datetime import timedelta
from test_framework.messages import (
COIN,
DEFAULT_MEMPOOL_EXPIRY_HOURS,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
from test_framework.wallet import MiniWallet
CUSTOM_MEMPOOL_EXPIRY = 10 # hours
class MempoolExpiryTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def test_transaction_expiry(self, timeout):
"""Tests that a transaction expires after the expiry timeout and its
children are removed as well."""
node = self.nodes[0]
# Send a parent transaction that will expire.
parent_txid = self.wallet.send_self_transfer(from_node=node)['txid']
parent_utxo = self.wallet.get_utxo(txid=parent_txid)
independent_utxo = self.wallet.get_utxo()
# Add prioritisation to this transaction to check that it persists after the expiry
node.prioritisetransaction(parent_txid, 0, COIN)
assert_equal(node.getprioritisedtransactions()[parent_txid], { "fee_delta" : COIN, "in_mempool" : True})
# Ensure the transactions we send to trigger the mempool check spend utxos that are independent of
# the transactions being tested for expiration.
trigger_utxo1 = self.wallet.get_utxo()
trigger_utxo2 = self.wallet.get_utxo()
# Set the mocktime to the arrival time of the parent transaction.
entry_time = node.getmempoolentry(parent_txid)['time']
node.setmocktime(entry_time)
# Let half of the timeout elapse and broadcast the child transaction spending the parent transaction.
half_expiry_time = entry_time + int(60 * 60 * timeout/2)
node.setmocktime(half_expiry_time)
child_txid = self.wallet.send_self_transfer(from_node=node, utxo_to_spend=parent_utxo)['txid']
assert_equal(parent_txid, node.getmempoolentry(child_txid)['depends'][0])
self.log.info('Broadcast child transaction after {} hours.'.format(
timedelta(seconds=(half_expiry_time-entry_time))))
# Broadcast another (independent) transaction.
independent_txid = self.wallet.send_self_transfer(from_node=node, utxo_to_spend=independent_utxo)['txid']
# Let most of the timeout elapse and check that the parent tx is still
# in the mempool.
nearly_expiry_time = entry_time + 60 * 60 * timeout - 5
node.setmocktime(nearly_expiry_time)
# Broadcast a transaction as the expiry of transactions in the mempool is only checked
# when a new transaction is added to the mempool.
self.wallet.send_self_transfer(from_node=node, utxo_to_spend=trigger_utxo1)
self.log.info('Test parent tx not expired after {} hours.'.format(
timedelta(seconds=(nearly_expiry_time-entry_time))))
assert_equal(entry_time, node.getmempoolentry(parent_txid)['time'])
# Transaction should be evicted from the mempool after the expiry time
# has passed.
expiry_time = entry_time + 60 * 60 * timeout + 5
node.setmocktime(expiry_time)
# Again, broadcast a transaction so the expiry of transactions in the mempool is checked.
self.wallet.send_self_transfer(from_node=node, utxo_to_spend=trigger_utxo2)
self.log.info('Test parent tx expiry after {} hours.'.format(
timedelta(seconds=(expiry_time-entry_time))))
assert_raises_rpc_error(-5, 'Transaction not in mempool',
node.getmempoolentry, parent_txid)
# Prioritisation does not disappear when transaction expires
assert_equal(node.getprioritisedtransactions()[parent_txid], { "fee_delta" : COIN, "in_mempool" : False})
# The child transaction should be removed from the mempool as well.
self.log.info('Test child tx is evicted as well.')
assert_raises_rpc_error(-5, 'Transaction not in mempool',
node.getmempoolentry, child_txid)
# Check that the independent tx is still in the mempool.
self.log.info('Test the independent tx not expired after {} hours.'.format(
timedelta(seconds=(expiry_time-half_expiry_time))))
assert_equal(half_expiry_time, node.getmempoolentry(independent_txid)['time'])
def run_test(self):
self.wallet = MiniWallet(self.nodes[0])
self.log.info('Test default mempool expiry timeout of %d hours.' %
DEFAULT_MEMPOOL_EXPIRY_HOURS)
self.test_transaction_expiry(DEFAULT_MEMPOOL_EXPIRY_HOURS)
self.log.info('Test custom mempool expiry timeout of %d hours.' %
CUSTOM_MEMPOOL_EXPIRY)
self.restart_node(0, ['-mempoolexpiry=%d' % CUSTOM_MEMPOOL_EXPIRY])
self.test_transaction_expiry(CUSTOM_MEMPOOL_EXPIRY)
if __name__ == '__main__':
MempoolExpiryTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_scantxoutset.py | #!/usr/bin/env python3
# Copyright (c) 2018-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the scantxoutset rpc call."""
from test_framework.address import address_to_scriptpubkey
from test_framework.messages import COIN
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
from test_framework.wallet import (
MiniWallet,
getnewdestination,
)
from decimal import Decimal
def descriptors(out):
return sorted(u['desc'] for u in out['unspents'])
class ScantxoutsetTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def sendtodestination(self, destination, amount):
# interpret strings as addresses, assume scriptPubKey otherwise
if isinstance(destination, str):
destination = address_to_scriptpubkey(destination)
self.wallet.send_to(from_node=self.nodes[0], scriptPubKey=destination, amount=int(COIN * amount))
def run_test(self):
self.wallet = MiniWallet(self.nodes[0])
self.log.info("Test if we find coinbase outputs.")
assert_equal(sum(u["coinbase"] for u in self.nodes[0].scantxoutset("start", [self.wallet.get_descriptor()])["unspents"]), 49)
self.log.info("Create UTXOs...")
pubk1, spk_P2SH_SEGWIT, addr_P2SH_SEGWIT = getnewdestination("p2sh-segwit")
pubk2, spk_LEGACY, addr_LEGACY = getnewdestination("legacy")
pubk3, spk_BECH32, addr_BECH32 = getnewdestination("bech32")
self.sendtodestination(spk_P2SH_SEGWIT, 0.001)
self.sendtodestination(spk_LEGACY, 0.002)
self.sendtodestination(spk_BECH32, 0.004)
#send to child keys of tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK
self.sendtodestination("mkHV1C6JLheLoUSSZYk7x3FH5tnx9bu7yc", 0.008) # (m/0'/0'/0')
self.sendtodestination("mipUSRmJAj2KrjSvsPQtnP8ynUon7FhpCR", 0.016) # (m/0'/0'/1')
self.sendtodestination("n37dAGe6Mq1HGM9t4b6rFEEsDGq7Fcgfqg", 0.032) # (m/0'/0'/1500')
self.sendtodestination("mqS9Rpg8nNLAzxFExsgFLCnzHBsoQ3PRM6", 0.064) # (m/0'/0'/0)
self.sendtodestination("mnTg5gVWr3rbhHaKjJv7EEEc76ZqHgSj4S", 0.128) # (m/0'/0'/1)
self.sendtodestination("mketCd6B9U9Uee1iCsppDJJBHfvi6U6ukC", 0.256) # (m/0'/0'/1500)
self.sendtodestination("mj8zFzrbBcdaWXowCQ1oPZ4qioBVzLzAp7", 0.512) # (m/1/1/0')
self.sendtodestination("mfnKpKQEftniaoE1iXuMMePQU3PUpcNisA", 1.024) # (m/1/1/1')
self.sendtodestination("mou6cB1kaP1nNJM1sryW6YRwnd4shTbXYQ", 2.048) # (m/1/1/1500')
self.sendtodestination("mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", 4.096) # (m/1/1/0)
self.sendtodestination("mxp7w7j8S1Aq6L8StS2PqVvtt4HGxXEvdy", 8.192) # (m/1/1/1)
self.sendtodestination("mpQ8rokAhp1TAtJQR6F6TaUmjAWkAWYYBq", 16.384) # (m/1/1/1500)
self.generate(self.nodes[0], 1)
scan = self.nodes[0].scantxoutset("start", [])
info = self.nodes[0].gettxoutsetinfo()
assert_equal(scan['success'], True)
assert_equal(scan['height'], info['height'])
assert_equal(scan['txouts'], info['txouts'])
assert_equal(scan['bestblock'], info['bestblock'])
self.log.info("Test if we have found the non HD unspent outputs.")
assert_equal(self.nodes[0].scantxoutset("start", ["pkh(" + pubk1.hex() + ")", "pkh(" + pubk2.hex() + ")", "pkh(" + pubk3.hex() + ")"])['total_amount'], Decimal("0.002"))
assert_equal(self.nodes[0].scantxoutset("start", ["wpkh(" + pubk1.hex() + ")", "wpkh(" + pubk2.hex() + ")", "wpkh(" + pubk3.hex() + ")"])['total_amount'], Decimal("0.004"))
assert_equal(self.nodes[0].scantxoutset("start", ["sh(wpkh(" + pubk1.hex() + "))", "sh(wpkh(" + pubk2.hex() + "))", "sh(wpkh(" + pubk3.hex() + "))"])['total_amount'], Decimal("0.001"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(" + pubk1.hex() + ")", "combo(" + pubk2.hex() + ")", "combo(" + pubk3.hex() + ")"])['total_amount'], Decimal("0.007"))
assert_equal(self.nodes[0].scantxoutset("start", ["addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")", "addr(" + addr_BECH32 + ")"])['total_amount'], Decimal("0.007"))
assert_equal(self.nodes[0].scantxoutset("start", ["addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")", "combo(" + pubk3.hex() + ")"])['total_amount'], Decimal("0.007"))
self.log.info("Test range validation.")
assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": -1}])
assert_raises_rpc_error(-8, "Range should be greater or equal than 0", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [-1, 10]}])
assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]}])
assert_raises_rpc_error(-8, "Range specified as [begin,end] must not have begin after end", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [2, 1]}])
assert_raises_rpc_error(-8, "Range is too large", self.nodes[0].scantxoutset, "start", [{"desc": "desc", "range": [0, 1000001]}])
self.log.info("Test extended key derivation.")
# Run various scans, and verify that the sum of the amounts of the matches corresponds to the expected subset.
# Note that all amounts in the UTXO set are powers of 2 multiplied by 0.001 BTC, so each amounts uniquely identifies a subset.
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/0h)"])['total_amount'], Decimal("0.008"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/1h)"])['total_amount'], Decimal("0.016"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500')"])['total_amount'], Decimal("0.032"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/0)"])['total_amount'], Decimal("0.064"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/1)"])['total_amount'], Decimal("0.128"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500)"])['total_amount'], Decimal("0.256"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*h)", "range": 1499}])['total_amount'], Decimal("0.024"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/*h)", "range": 1500}])['total_amount'], Decimal("0.056"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])['total_amount'], Decimal("0.192"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*)", "range": 1500}])['total_amount'], Decimal("0.448"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0')"])['total_amount'], Decimal("0.512"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1')"])['total_amount'], Decimal("1.024"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500h)"])['total_amount'], Decimal("2.048"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])['total_amount'], Decimal("4.096"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1)"])['total_amount'], Decimal("8.192"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500)"])['total_amount'], Decimal("16.384"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)"])['total_amount'], Decimal("4.096"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo([abcdef88/1/2'/3/4h]tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)"])['total_amount'], Decimal("8.192"))
assert_equal(self.nodes[0].scantxoutset("start", ["combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1500)"])['total_amount'], Decimal("16.384"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1499}])['total_amount'], Decimal("1.536"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1500}])['total_amount'], Decimal("3.584"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1499}])['total_amount'], Decimal("12.288"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1500}])['total_amount'], Decimal("28.672"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1499}])['total_amount'], Decimal("12.288"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])['total_amount'], Decimal("28.672"))
assert_equal(self.nodes[0].scantxoutset("start", [{"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": [1500, 1500]}])['total_amount'], Decimal("16.384"))
# Test the reported descriptors for a few matches
assert_equal(descriptors(self.nodes[0].scantxoutset("start", [{"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/*)", "range": 1499}])), ["pkh([0c5f9a1e/0h/0h/0]026dbd8b2315f296d36e6b6920b1579ca75569464875c7ebe869b536a7d9503c8c)#rthll0rg", "pkh([0c5f9a1e/0h/0h/1]033e6f25d76c00bedb3a8993c7d5739ee806397f0529b1b31dda31ef890f19a60c)#mcjajulr"])
assert_equal(descriptors(self.nodes[0].scantxoutset("start", ["combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])), ["pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8"])
assert_equal(descriptors(self.nodes[0].scantxoutset("start", [{"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])), ['pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8', 'pkh([0c5f9a1e/1/1/1500]03832901c250025da2aebae2bfb38d5c703a57ab66ad477f9c578bfbcd78abca6f)#vchwd07g', 'pkh([0c5f9a1e/1/1/1]030d820fc9e8211c4169be8530efbc632775d8286167afd178caaf1089b77daba7)#z2t3ypsa'])
# Check that status and abort don't need second arg
assert_equal(self.nodes[0].scantxoutset("status"), None)
assert_equal(self.nodes[0].scantxoutset("abort"), False)
# check that first arg is needed
assert_raises_rpc_error(-1, "scantxoutset \"action\" ( [scanobjects,...] )", self.nodes[0].scantxoutset)
# Check that second arg is needed for start
assert_raises_rpc_error(-1, "scanobjects argument is required for the start action", self.nodes[0].scantxoutset, "start")
# Check that invalid command give error
assert_raises_rpc_error(-8, "Invalid action 'invalid_command'", self.nodes[0].scantxoutset, "invalid_command")
if __name__ == "__main__":
ScantxoutsetTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_signmessagewithprivkey.py | #!/usr/bin/env python3
# Copyright (c) 2016-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for signing messages with private key."""
from test_framework.descriptors import (
descsum_create,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
class SignMessagesWithPrivTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def addresses_from_privkey(self, priv_key):
'''Return addresses for a given WIF private key in legacy (P2PKH),
nested segwit (P2SH-P2WPKH) and native segwit (P2WPKH) formats.'''
descriptors = f'pkh({priv_key})', f'sh(wpkh({priv_key}))', f'wpkh({priv_key})'
return [self.nodes[0].deriveaddresses(descsum_create(desc))[0] for desc in descriptors]
def run_test(self):
message = 'This is just a test message'
self.log.info('test signing with priv_key')
priv_key = 'cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N'
expected_signature = 'INbVnW4e6PeRmsv2Qgu8NuopvrVjkcxob+sX8OcZG0SALhWybUjzMLPdAsXI46YZGb0KQTRii+wWIQzRpG/U+S0='
signature = self.nodes[0].signmessagewithprivkey(priv_key, message)
assert_equal(expected_signature, signature)
self.log.info('test that verifying with P2PKH address succeeds')
addresses = self.addresses_from_privkey(priv_key)
assert_equal(addresses[0], 'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB')
assert self.nodes[0].verifymessage(addresses[0], signature, message)
self.log.info('test that verifying with non-P2PKH addresses throws error')
for non_p2pkh_address in addresses[1:]:
assert_raises_rpc_error(-3, "Address does not refer to key", self.nodes[0].verifymessage, non_p2pkh_address, signature, message)
self.log.info('test parameter validity and error codes')
# signmessagewithprivkey has two required parameters
for num_params in [0, 1, 3, 4, 5]:
param_list = ["dummy"]*num_params
assert_raises_rpc_error(-1, "signmessagewithprivkey", self.nodes[0].signmessagewithprivkey, *param_list)
# verifymessage has three required parameters
for num_params in [0, 1, 2, 4, 5]:
param_list = ["dummy"]*num_params
assert_raises_rpc_error(-1, "verifymessage", self.nodes[0].verifymessage, *param_list)
# invalid key or address provided
assert_raises_rpc_error(-5, "Invalid private key", self.nodes[0].signmessagewithprivkey, "invalid_key", message)
assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].verifymessage, "invalid_addr", signature, message)
# malformed signature provided
assert_raises_rpc_error(-3, "Malformed base64 encoding", self.nodes[0].verifymessage, 'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB', "invalid_sig", message)
if __name__ == '__main__':
SignMessagesWithPrivTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_rbf.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RBF code."""
from decimal import Decimal
from test_framework.messages import (
MAX_BIP125_RBF_SEQUENCE,
COIN,
SEQUENCE_FINAL,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
from test_framework.wallet import MiniWallet
from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE
MAX_REPLACEMENT_LIMIT = 100
class ReplaceByFeeTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [
[
"-maxorphantx=1000",
"-limitancestorcount=50",
"-limitancestorsize=101",
"-limitdescendantcount=200",
"-limitdescendantsize=101",
],
# second node has default mempool parameters
[
],
]
self.supports_cli = False
def run_test(self):
self.wallet = MiniWallet(self.nodes[0])
self.log.info("Running test simple doublespend...")
self.test_simple_doublespend()
self.log.info("Running test doublespend chain...")
self.test_doublespend_chain()
self.log.info("Running test doublespend tree...")
self.test_doublespend_tree()
self.log.info("Running test replacement feeperkb...")
self.test_replacement_feeperkb()
self.log.info("Running test spends of conflicting outputs...")
self.test_spends_of_conflicting_outputs()
self.log.info("Running test new unconfirmed inputs...")
self.test_new_unconfirmed_inputs()
self.log.info("Running test too many replacements...")
self.test_too_many_replacements()
self.log.info("Running test too many replacements using default mempool params...")
self.test_too_many_replacements_with_default_mempool_params()
self.log.info("Running test opt-in...")
self.test_opt_in()
self.log.info("Running test RPC...")
self.test_rpc()
self.log.info("Running test prioritised transactions...")
self.test_prioritised_transactions()
self.log.info("Running test no inherited signaling...")
self.test_no_inherited_signaling()
self.log.info("Running test replacement relay fee...")
self.test_replacement_relay_fee()
self.log.info("Running test full replace by fee...")
self.test_fullrbf()
self.log.info("Passed")
def make_utxo(self, node, amount, *, confirmed=True, scriptPubKey=None):
"""Create a txout with a given amount and scriptPubKey
confirmed - txout created will be confirmed in the blockchain;
unconfirmed otherwise.
"""
tx = self.wallet.send_to(from_node=node, scriptPubKey=scriptPubKey or self.wallet.get_scriptPubKey(), amount=amount)
if confirmed:
mempool_size = len(node.getrawmempool())
while mempool_size > 0:
self.generate(node, 1)
new_size = len(node.getrawmempool())
# Error out if we have something stuck in the mempool, as this
# would likely be a bug.
assert new_size < mempool_size
mempool_size = new_size
return self.wallet.get_utxo(txid=tx["txid"], vout=tx["sent_vout"])
def test_simple_doublespend(self):
"""Simple doublespend"""
# we use MiniWallet to create a transaction template with inputs correctly set,
# and modify the output (amount, scriptPubKey) according to our needs
tx = self.wallet.create_self_transfer()["tx"]
tx1a_txid = self.nodes[0].sendrawtransaction(tx.serialize().hex())
# Should fail because we haven't changed the fee
tx.vout[0].scriptPubKey[-1] ^= 1
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx.serialize().hex(), 0)
# Extra 0.1 BTC fee
tx.vout[0].nValue -= int(0.1 * COIN)
tx1b_hex = tx.serialize().hex()
# Works when enabled
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)
mempool = self.nodes[0].getrawmempool()
assert tx1a_txid not in mempool
assert tx1b_txid in mempool
assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid))
def test_doublespend_chain(self):
"""Doublespend of a long chain"""
initial_nValue = 5 * COIN
tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue)
prevout = tx0_outpoint
remaining_value = initial_nValue
chain_txids = []
while remaining_value > 1 * COIN:
remaining_value -= int(0.1 * COIN)
prevout = self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=prevout,
sequence=0,
fee=Decimal("0.1"),
)["new_utxo"]
chain_txids.append(prevout["txid"])
# Whether the double-spend is allowed is evaluated by including all
# child fees - 4 BTC - so this attempt is rejected.
dbl_tx = self.wallet.create_self_transfer(
utxo_to_spend=tx0_outpoint,
sequence=0,
fee=Decimal("3"),
)["tx"]
dbl_tx_hex = dbl_tx.serialize().hex()
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, 0)
# Accepted with sufficient fee
dbl_tx.vout[0].nValue = int(0.1 * COIN)
dbl_tx_hex = dbl_tx.serialize().hex()
self.nodes[0].sendrawtransaction(dbl_tx_hex, 0)
mempool = self.nodes[0].getrawmempool()
for doublespent_txid in chain_txids:
assert doublespent_txid not in mempool
def test_doublespend_tree(self):
"""Doublespend of a big tree of transactions"""
initial_nValue = 5 * COIN
tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue)
def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.00001 * COIN, _total_txs=None):
if _total_txs is None:
_total_txs = [0]
if _total_txs[0] >= max_txs:
return
txout_value = (initial_value - fee) // tree_width
if txout_value < fee:
return
tx = self.wallet.send_self_transfer_multi(
utxos_to_spend=[prevout],
from_node=self.nodes[0],
sequence=0,
num_outputs=tree_width,
amount_per_output=txout_value,
)
yield tx["txid"]
_total_txs[0] += 1
for utxo in tx["new_utxos"]:
for x in branch(utxo, txout_value,
max_txs,
tree_width=tree_width, fee=fee,
_total_txs=_total_txs):
yield x
fee = int(0.00001 * COIN)
n = MAX_REPLACEMENT_LIMIT
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
assert_equal(len(tree_txs), n)
# Attempt double-spend, will fail because too little fee paid
dbl_tx_hex = self.wallet.create_self_transfer(
utxo_to_spend=tx0_outpoint,
sequence=0,
fee=(Decimal(fee) / COIN) * n,
)["hex"]
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, 0)
# 0.1 BTC fee is enough
dbl_tx_hex = self.wallet.create_self_transfer(
utxo_to_spend=tx0_outpoint,
sequence=0,
fee=(Decimal(fee) / COIN) * n + Decimal("0.1"),
)["hex"]
self.nodes[0].sendrawtransaction(dbl_tx_hex, 0)
mempool = self.nodes[0].getrawmempool()
for txid in tree_txs:
assert txid not in mempool
# Try again, but with more total transactions than the "max txs
# double-spent at once" anti-DoS limit.
for n in (MAX_REPLACEMENT_LIMIT + 1, MAX_REPLACEMENT_LIMIT * 2):
fee = int(0.00001 * COIN)
tx0_outpoint = self.make_utxo(self.nodes[0], initial_nValue)
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
assert_equal(len(tree_txs), n)
dbl_tx_hex = self.wallet.create_self_transfer(
utxo_to_spend=tx0_outpoint,
sequence=0,
fee=2 * (Decimal(fee) / COIN) * n,
)["hex"]
# This will raise an exception
assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, dbl_tx_hex, 0)
for txid in tree_txs:
self.nodes[0].getrawtransaction(txid)
def test_replacement_feeperkb(self):
"""Replacement requires fee-per-KB to be higher"""
tx0_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))
self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=tx0_outpoint,
sequence=0,
fee=Decimal("0.1"),
)
# Higher fee, but the fee per KB is much lower, so the replacement is
# rejected.
tx1b_hex = self.wallet.create_self_transfer_multi(
utxos_to_spend=[tx0_outpoint],
sequence=0,
num_outputs=100,
amount_per_output=1000,
)["hex"]
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
def test_spends_of_conflicting_outputs(self):
"""Replacements that spend conflicting tx outputs are rejected"""
utxo1 = self.make_utxo(self.nodes[0], int(1.2 * COIN))
utxo2 = self.make_utxo(self.nodes[0], 3 * COIN)
tx1a_utxo = self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=utxo1,
sequence=0,
fee=Decimal("0.1"),
)["new_utxo"]
# Direct spend an output of the transaction we're replacing.
tx2_hex = self.wallet.create_self_transfer_multi(
utxos_to_spend=[utxo1, utxo2, tx1a_utxo],
sequence=0,
amount_per_output=int(COIN * tx1a_utxo["value"]),
)["hex"]
# This will raise an exception
assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, 0)
# Spend tx1a's output to test the indirect case.
tx1b_utxo = self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=tx1a_utxo,
sequence=0,
fee=Decimal("0.1"),
)["new_utxo"]
tx2_hex = self.wallet.create_self_transfer_multi(
utxos_to_spend=[utxo1, utxo2, tx1b_utxo],
sequence=0,
amount_per_output=int(COIN * tx1a_utxo["value"]),
)["hex"]
# This will raise an exception
assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, 0)
def test_new_unconfirmed_inputs(self):
"""Replacements that add new unconfirmed inputs are rejected"""
confirmed_utxo = self.make_utxo(self.nodes[0], int(1.1 * COIN))
unconfirmed_utxo = self.make_utxo(self.nodes[0], int(0.1 * COIN), confirmed=False)
self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=confirmed_utxo,
sequence=0,
fee=Decimal("0.1"),
)
tx2_hex = self.wallet.create_self_transfer_multi(
utxos_to_spend=[confirmed_utxo, unconfirmed_utxo],
sequence=0,
amount_per_output=1 * COIN,
)["hex"]
# This will raise an exception
assert_raises_rpc_error(-26, "replacement-adds-unconfirmed", self.nodes[0].sendrawtransaction, tx2_hex, 0)
def test_too_many_replacements(self):
"""Replacements that evict too many transactions are rejected"""
# Try directly replacing more than MAX_REPLACEMENT_LIMIT
# transactions
# Start by creating a single transaction with many outputs
initial_nValue = 10 * COIN
utxo = self.make_utxo(self.nodes[0], initial_nValue)
fee = int(0.0001 * COIN)
split_value = int((initial_nValue - fee) / (MAX_REPLACEMENT_LIMIT + 1))
splitting_tx_utxos = self.wallet.send_self_transfer_multi(
from_node=self.nodes[0],
utxos_to_spend=[utxo],
sequence=0,
num_outputs=MAX_REPLACEMENT_LIMIT + 1,
amount_per_output=split_value,
)["new_utxos"]
# Now spend each of those outputs individually
for utxo in splitting_tx_utxos:
self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=utxo,
sequence=0,
fee=Decimal(fee) / COIN,
)
# Now create doublespend of the whole lot; should fail.
# Need a big enough fee to cover all spending transactions and have
# a higher fee rate
double_spend_value = (split_value - 100 * fee) * (MAX_REPLACEMENT_LIMIT + 1)
double_tx = self.wallet.create_self_transfer_multi(
utxos_to_spend=splitting_tx_utxos,
sequence=0,
amount_per_output=double_spend_value,
)["tx"]
double_tx_hex = double_tx.serialize().hex()
# This will raise an exception
assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, double_tx_hex, 0)
# If we remove an input, it should pass
double_tx.vin.pop()
double_tx_hex = double_tx.serialize().hex()
self.nodes[0].sendrawtransaction(double_tx_hex, 0)
def test_too_many_replacements_with_default_mempool_params(self):
"""
Test rule 5 (do not allow replacements that cause more than 100
evictions) without having to rely on non-default mempool parameters.
In order to do this, create a number of "root" UTXOs, and then hang
enough transactions off of each root UTXO to exceed the MAX_REPLACEMENT_LIMIT.
Then create a conflicting RBF replacement transaction.
"""
# Clear mempools to avoid cross-node sync failure.
for node in self.nodes:
self.generate(node, 1)
normal_node = self.nodes[1]
wallet = MiniWallet(normal_node)
# This has to be chosen so that the total number of transactions can exceed
# MAX_REPLACEMENT_LIMIT without having any one tx graph run into the descendant
# limit; 10 works.
num_tx_graphs = 10
# (Number of transactions per graph, rule 5 failure expected)
cases = [
# Test the base case of evicting fewer than MAX_REPLACEMENT_LIMIT
# transactions.
((MAX_REPLACEMENT_LIMIT // num_tx_graphs) - 1, False),
# Test hitting the rule 5 eviction limit.
(MAX_REPLACEMENT_LIMIT // num_tx_graphs, True),
]
for (txs_per_graph, failure_expected) in cases:
self.log.debug(f"txs_per_graph: {txs_per_graph}, failure: {failure_expected}")
# "Root" utxos of each txn graph that we will attempt to double-spend with
# an RBF replacement.
root_utxos = []
# For each root UTXO, create a package that contains the spend of that
# UTXO and `txs_per_graph` children tx.
for graph_num in range(num_tx_graphs):
root_utxos.append(wallet.get_utxo())
optin_parent_tx = wallet.send_self_transfer_multi(
from_node=normal_node,
sequence=MAX_BIP125_RBF_SEQUENCE,
utxos_to_spend=[root_utxos[graph_num]],
num_outputs=txs_per_graph,
)
assert_equal(True, normal_node.getmempoolentry(optin_parent_tx['txid'])['bip125-replaceable'])
new_utxos = optin_parent_tx['new_utxos']
for utxo in new_utxos:
# Create spends for each output from the "root" of this graph.
child_tx = wallet.send_self_transfer(
from_node=normal_node,
utxo_to_spend=utxo,
)
assert normal_node.getmempoolentry(child_tx['txid'])
num_txs_invalidated = len(root_utxos) + (num_tx_graphs * txs_per_graph)
if failure_expected:
assert num_txs_invalidated > MAX_REPLACEMENT_LIMIT
else:
assert num_txs_invalidated <= MAX_REPLACEMENT_LIMIT
# Now attempt to submit a tx that double-spends all the root tx inputs, which
# would invalidate `num_txs_invalidated` transactions.
tx_hex = wallet.create_self_transfer_multi(
utxos_to_spend=root_utxos,
fee_per_output=10_000_000, # absurdly high feerate
)["hex"]
if failure_expected:
assert_raises_rpc_error(
-26, "too many potential replacements", normal_node.sendrawtransaction, tx_hex, 0)
else:
txid = normal_node.sendrawtransaction(tx_hex, 0)
assert normal_node.getmempoolentry(txid)
# Clear the mempool once finished, and rescan the other nodes' wallet
# to account for the spends we've made on `normal_node`.
self.generate(normal_node, 1)
self.wallet.rescan_utxos()
def test_opt_in(self):
"""Replacing should only work if orig tx opted in"""
tx0_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))
# Create a non-opting in transaction
tx1a_utxo = self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=tx0_outpoint,
sequence=SEQUENCE_FINAL,
fee=Decimal("0.1"),
)["new_utxo"]
# This transaction isn't shown as replaceable
assert_equal(self.nodes[0].getmempoolentry(tx1a_utxo["txid"])['bip125-replaceable'], False)
# Shouldn't be able to double-spend
tx1b_hex = self.wallet.create_self_transfer(
utxo_to_spend=tx0_outpoint,
sequence=0,
fee=Decimal("0.2"),
)["hex"]
# This will raise an exception
assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
tx1_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))
# Create a different non-opting in transaction
tx2a_utxo = self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=tx1_outpoint,
sequence=0xfffffffe,
fee=Decimal("0.1"),
)["new_utxo"]
# Still shouldn't be able to double-spend
tx2b_hex = self.wallet.create_self_transfer(
utxo_to_spend=tx1_outpoint,
sequence=0,
fee=Decimal("0.2"),
)["hex"]
# This will raise an exception
assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx2b_hex, 0)
# Now create a new transaction that spends from tx1a and tx2a
# opt-in on one of the inputs
# Transaction should be replaceable on either input
tx3a_txid = self.wallet.send_self_transfer_multi(
from_node=self.nodes[0],
utxos_to_spend=[tx1a_utxo, tx2a_utxo],
sequence=[SEQUENCE_FINAL, 0xfffffffd],
fee_per_output=int(0.1 * COIN),
)["txid"]
# This transaction is shown as replaceable
assert_equal(self.nodes[0].getmempoolentry(tx3a_txid)['bip125-replaceable'], True)
self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=tx1a_utxo,
sequence=0,
fee=Decimal("0.4"),
)
# If tx3b was accepted, tx3c won't look like a replacement,
# but make sure it is accepted anyway
self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=tx2a_utxo,
sequence=0,
fee=Decimal("0.4"),
)
def test_prioritised_transactions(self):
# Ensure that fee deltas used via prioritisetransaction are
# correctly used by replacement logic
# 1. Check that feeperkb uses modified fees
tx0_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))
tx1a_txid = self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=tx0_outpoint,
sequence=0,
fee=Decimal("0.1"),
)["txid"]
# Higher fee, but the actual fee per KB is much lower.
tx1b_hex = self.wallet.create_self_transfer_multi(
utxos_to_spend=[tx0_outpoint],
sequence=0,
num_outputs=100,
amount_per_output=int(0.00001 * COIN),
)["hex"]
# Verify tx1b cannot replace tx1a.
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
# Use prioritisetransaction to set tx1a's fee to 0.
self.nodes[0].prioritisetransaction(txid=tx1a_txid, fee_delta=int(-0.1 * COIN))
# Now tx1b should be able to replace tx1a
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)
assert tx1b_txid in self.nodes[0].getrawmempool()
# 2. Check that absolute fee checks use modified fee.
tx1_outpoint = self.make_utxo(self.nodes[0], int(1.1 * COIN))
# tx2a
self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=tx1_outpoint,
sequence=0,
fee=Decimal("0.1"),
)
# Lower fee, but we'll prioritise it
tx2b = self.wallet.create_self_transfer(
utxo_to_spend=tx1_outpoint,
sequence=0,
fee=Decimal("0.09"),
)
# Verify tx2b cannot replace tx2a.
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx2b["hex"], 0)
# Now prioritise tx2b to have a higher modified fee
self.nodes[0].prioritisetransaction(txid=tx2b["txid"], fee_delta=int(0.1 * COIN))
# tx2b should now be accepted
tx2b_txid = self.nodes[0].sendrawtransaction(tx2b["hex"], 0)
assert tx2b_txid in self.nodes[0].getrawmempool()
def test_rpc(self):
us0 = self.wallet.get_utxo()
ins = [us0]
outs = {ADDRESS_BCRT1_UNSPENDABLE: Decimal(1.0000000)}
rawtx0 = self.nodes[0].createrawtransaction(ins, outs, 0, True)
rawtx1 = self.nodes[0].createrawtransaction(ins, outs, 0, False)
json0 = self.nodes[0].decoderawtransaction(rawtx0)
json1 = self.nodes[0].decoderawtransaction(rawtx1)
assert_equal(json0["vin"][0]["sequence"], 4294967293)
assert_equal(json1["vin"][0]["sequence"], 4294967295)
if self.is_specified_wallet_compiled():
self.init_wallet(node=0)
rawtx2 = self.nodes[0].createrawtransaction([], outs)
frawtx2a = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": True})
frawtx2b = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": False})
json0 = self.nodes[0].decoderawtransaction(frawtx2a['hex'])
json1 = self.nodes[0].decoderawtransaction(frawtx2b['hex'])
assert_equal(json0["vin"][0]["sequence"], 4294967293)
assert_equal(json1["vin"][0]["sequence"], 4294967294)
def test_no_inherited_signaling(self):
confirmed_utxo = self.wallet.get_utxo()
# Create an explicitly opt-in parent transaction
optin_parent_tx = self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=confirmed_utxo,
sequence=MAX_BIP125_RBF_SEQUENCE,
fee_rate=Decimal('0.01'),
)
assert_equal(True, self.nodes[0].getmempoolentry(optin_parent_tx['txid'])['bip125-replaceable'])
replacement_parent_tx = self.wallet.create_self_transfer(
utxo_to_spend=confirmed_utxo,
sequence=MAX_BIP125_RBF_SEQUENCE,
fee_rate=Decimal('0.02'),
)
# Test if parent tx can be replaced.
res = self.nodes[0].testmempoolaccept(rawtxs=[replacement_parent_tx['hex']])[0]
# Parent can be replaced.
assert_equal(res['allowed'], True)
# Create an opt-out child tx spending the opt-in parent
parent_utxo = self.wallet.get_utxo(txid=optin_parent_tx['txid'])
optout_child_tx = self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=parent_utxo,
sequence=SEQUENCE_FINAL,
fee_rate=Decimal('0.01'),
)
# Reports true due to inheritance
assert_equal(True, self.nodes[0].getmempoolentry(optout_child_tx['txid'])['bip125-replaceable'])
replacement_child_tx = self.wallet.create_self_transfer(
utxo_to_spend=parent_utxo,
sequence=SEQUENCE_FINAL,
fee_rate=Decimal('0.02'),
)
# Broadcast replacement child tx
# BIP 125 :
# 1. The original transactions signal replaceability explicitly or through inheritance as described in the above
# Summary section.
# The original transaction (`optout_child_tx`) doesn't signal RBF but its parent (`optin_parent_tx`) does.
# The replacement transaction (`replacement_child_tx`) should be able to replace the original transaction.
# See CVE-2021-31876 for further explanations.
assert_equal(True, self.nodes[0].getmempoolentry(optin_parent_tx['txid'])['bip125-replaceable'])
assert_raises_rpc_error(-26, 'txn-mempool-conflict', self.nodes[0].sendrawtransaction, replacement_child_tx["hex"], 0)
self.log.info('Check that the child tx can still be replaced (via a tx that also replaces the parent)')
replacement_parent_tx = self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=confirmed_utxo,
sequence=SEQUENCE_FINAL,
fee_rate=Decimal('0.03'),
)
# Check that child is removed and update wallet utxo state
assert_raises_rpc_error(-5, 'Transaction not in mempool', self.nodes[0].getmempoolentry, optout_child_tx['txid'])
self.wallet.get_utxo(txid=optout_child_tx['txid'])
def test_replacement_relay_fee(self):
tx = self.wallet.send_self_transfer(from_node=self.nodes[0])['tx']
# Higher fee, higher feerate, different txid, but the replacement does not provide a relay
# fee conforming to node's `incrementalrelayfee` policy of 1000 sat per KB.
assert_equal(self.nodes[0].getmempoolinfo()["incrementalrelayfee"], Decimal("0.00001"))
tx.vout[0].nValue -= 1
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx.serialize().hex())
def test_fullrbf(self):
confirmed_utxo = self.make_utxo(self.nodes[0], int(2 * COIN))
self.restart_node(0, extra_args=["-mempoolfullrbf=1"])
assert self.nodes[0].getmempoolinfo()["fullrbf"]
# Create an explicitly opt-out transaction
optout_tx = self.wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=confirmed_utxo,
sequence=MAX_BIP125_RBF_SEQUENCE + 1,
fee_rate=Decimal('0.01'),
)
assert_equal(False, self.nodes[0].getmempoolentry(optout_tx['txid'])['bip125-replaceable'])
conflicting_tx = self.wallet.create_self_transfer(
utxo_to_spend=confirmed_utxo,
sequence=SEQUENCE_FINAL,
fee_rate=Decimal('0.02'),
)
# Send the replacement transaction, conflicting with the optout_tx.
self.nodes[0].sendrawtransaction(conflicting_tx['hex'], 0)
# Optout_tx is not anymore in the mempool.
assert optout_tx['txid'] not in self.nodes[0].getrawmempool()
assert conflicting_tx['txid'] in self.nodes[0].getrawmempool()
if __name__ == '__main__':
ReplaceByFeeTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_csv_activation.py | #!/usr/bin/env python3
# Copyright (c) 2015-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test CSV soft fork activation.
This soft fork will activate the following BIPS:
BIP 68 - nSequence relative lock times
BIP 112 - CHECKSEQUENCEVERIFY
BIP 113 - MedianTimePast semantics for nLockTime
mine 83 blocks whose coinbases will be used to generate inputs for our tests
mine 344 blocks and seed block chain with the 83 inputs used for our tests at height 427
mine 2 blocks and verify soft fork not yet activated
mine 1 block and test that soft fork is activated (rules enforced for next block)
Test BIP 113 is enforced
Mine 4 blocks so next height is 580 and test BIP 68 is enforced for time and height
Mine 1 block so next height is 581 and test BIP 68 now passes time but not height
Mine 1 block so next height is 582 and test BIP 68 now passes time and height
Test that BIP 112 is enforced
Various transactions will be used to test that the BIPs rules are not enforced before the soft fork activates
And that after the soft fork activates transactions pass and fail as they should according to the rules.
For each BIP, transactions of versions 1 and 2 will be tested.
----------------
BIP 113:
bip113tx - modify the nLocktime variable
BIP 68:
bip68txs - 16 txs with nSequence relative locktime of 10 with various bits set as per the relative_locktimes below
BIP 112:
bip112txs_vary_nSequence - 16 txs with nSequence relative_locktimes of 10 evaluated against 10 OP_CSV OP_DROP
bip112txs_vary_nSequence_9 - 16 txs with nSequence relative_locktimes of 9 evaluated against 10 OP_CSV OP_DROP
bip112txs_vary_OP_CSV - 16 txs with nSequence = 10 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
bip112txs_vary_OP_CSV_9 - 16 txs with nSequence = 9 evaluated against varying {relative_locktimes of 10} OP_CSV OP_DROP
bip112tx_special - test negative argument to OP_CSV
bip112tx_emptystack - test empty stack (= no argument) OP_CSV
"""
from itertools import product
import time
from test_framework.blocktools import (
create_block,
create_coinbase,
)
from test_framework.p2p import P2PDataStore
from test_framework.script import (
CScript,
OP_CHECKSEQUENCEVERIFY,
OP_DROP,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
softfork_active,
)
from test_framework.wallet import (
MiniWallet,
MiniWalletMode,
)
TESTING_TX_COUNT = 83 # Number of testing transactions: 1 BIP113 tx, 16 BIP68 txs, 66 BIP112 txs (see comments above)
COINBASE_BLOCK_COUNT = TESTING_TX_COUNT # Number of coinbase blocks we need to generate as inputs for our txs
BASE_RELATIVE_LOCKTIME = 10
SEQ_DISABLE_FLAG = 1 << 31
SEQ_RANDOM_HIGH_BIT = 1 << 25
SEQ_TYPE_FLAG = 1 << 22
SEQ_RANDOM_LOW_BIT = 1 << 18
def relative_locktime(sdf, srhb, stf, srlb):
"""Returns a locktime with certain bits set."""
locktime = BASE_RELATIVE_LOCKTIME
if sdf:
locktime |= SEQ_DISABLE_FLAG
if srhb:
locktime |= SEQ_RANDOM_HIGH_BIT
if stf:
locktime |= SEQ_TYPE_FLAG
if srlb:
locktime |= SEQ_RANDOM_LOW_BIT
return locktime
def all_rlt_txs(txs):
return [tx['tx'] for tx in txs]
CSV_ACTIVATION_HEIGHT = 432
class BIP68_112_113Test(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.extra_args = [[
'[email protected]',
f'-testactivationheight=csv@{CSV_ACTIVATION_HEIGHT}',
'-par=1', # Use only one script thread to get the exact reject reason for testing
]]
self.supports_cli = False
def create_self_transfer_from_utxo(self, input_tx):
utxo = self.miniwallet.get_utxo(txid=input_tx.rehash(), mark_as_spent=False)
tx = self.miniwallet.create_self_transfer(utxo_to_spend=utxo)['tx']
return tx
def create_bip112special(self, input, txversion):
tx = self.create_self_transfer_from_utxo(input)
tx.nVersion = txversion
self.miniwallet.sign_tx(tx)
tx.vin[0].scriptSig = CScript([-1, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(tx.vin[0].scriptSig)))
tx.rehash()
return tx
def create_bip112emptystack(self, input, txversion):
tx = self.create_self_transfer_from_utxo(input)
tx.nVersion = txversion
self.miniwallet.sign_tx(tx)
tx.vin[0].scriptSig = CScript([OP_CHECKSEQUENCEVERIFY] + list(CScript(tx.vin[0].scriptSig)))
tx.rehash()
return tx
def send_generic_input_tx(self, coinbases):
input_txid = self.nodes[0].getblock(coinbases.pop(), 2)['tx'][0]['txid']
utxo_to_spend = self.miniwallet.get_utxo(txid=input_txid)
return self.miniwallet.send_self_transfer(from_node=self.nodes[0], utxo_to_spend=utxo_to_spend)['tx']
def create_bip68txs(self, bip68inputs, txversion, locktime_delta=0):
"""Returns a list of bip68 transactions with different bits set."""
txs = []
assert len(bip68inputs) >= 16
for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)):
locktime = relative_locktime(sdf, srhb, stf, srlb)
tx = self.create_self_transfer_from_utxo(bip68inputs[i])
tx.nVersion = txversion
tx.vin[0].nSequence = locktime + locktime_delta
self.miniwallet.sign_tx(tx)
txs.append({'tx': tx, 'sdf': sdf, 'stf': stf})
return txs
def create_bip112txs(self, bip112inputs, varyOP_CSV, txversion, locktime_delta=0):
"""Returns a list of bip68 transactions with different bits set."""
txs = []
assert len(bip112inputs) >= 16
for i, (sdf, srhb, stf, srlb) in enumerate(product(*[[True, False]] * 4)):
locktime = relative_locktime(sdf, srhb, stf, srlb)
tx = self.create_self_transfer_from_utxo(bip112inputs[i])
if varyOP_CSV: # if varying OP_CSV, nSequence is fixed
tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME + locktime_delta
else: # vary nSequence instead, OP_CSV is fixed
tx.vin[0].nSequence = locktime + locktime_delta
tx.nVersion = txversion
self.miniwallet.sign_tx(tx)
if varyOP_CSV:
tx.vin[0].scriptSig = CScript([locktime, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(tx.vin[0].scriptSig)))
else:
tx.vin[0].scriptSig = CScript([BASE_RELATIVE_LOCKTIME, OP_CHECKSEQUENCEVERIFY, OP_DROP] + list(CScript(tx.vin[0].scriptSig)))
tx.rehash()
txs.append({'tx': tx, 'sdf': sdf, 'stf': stf})
return txs
def generate_blocks(self, number):
test_blocks = []
for _ in range(number):
block = self.create_test_block([])
test_blocks.append(block)
self.last_block_time += 600
self.tip = block.sha256
self.tipheight += 1
return test_blocks
def create_test_block(self, txs):
block = create_block(self.tip, create_coinbase(self.tipheight + 1), self.last_block_time + 600, txlist=txs)
block.solve()
return block
def send_blocks(self, blocks, success=True, reject_reason=None):
"""Sends blocks to test node. Syncs and verifies that tip has advanced to most recent block.
Call with success = False if the tip shouldn't advance to the most recent block."""
self.helper_peer.send_blocks_and_test(blocks, self.nodes[0], success=success, reject_reason=reject_reason)
def run_test(self):
self.helper_peer = self.nodes[0].add_p2p_connection(P2PDataStore())
self.miniwallet = MiniWallet(self.nodes[0], mode=MiniWalletMode.RAW_P2PK)
self.log.info("Generate blocks in the past for coinbase outputs.")
long_past_time = int(time.time()) - 600 * 1000 # enough to build up to 1000 blocks 10 minutes apart without worrying about getting into the future
self.nodes[0].setmocktime(long_past_time - 100) # enough so that the generated blocks will still all be before long_past_time
self.coinbase_blocks = self.generate(self.miniwallet, COINBASE_BLOCK_COUNT) # blocks generated for inputs
self.nodes[0].setmocktime(0) # set time back to present so yielded blocks aren't in the future as we advance last_block_time
self.tipheight = COINBASE_BLOCK_COUNT # height of the next block to build
self.last_block_time = long_past_time
self.tip = int(self.nodes[0].getbestblockhash(), 16)
# Activation height is hardcoded
# We advance to block height five below BIP112 activation for the following tests
test_blocks = self.generate_blocks(CSV_ACTIVATION_HEIGHT - 5 - COINBASE_BLOCK_COUNT)
self.send_blocks(test_blocks)
assert not softfork_active(self.nodes[0], 'csv')
# Inputs at height = 431
#
# Put inputs for all tests in the chain at height 431 (tip now = 430) (time increases by 600s per block)
# Note we reuse inputs for v1 and v2 txs so must test these separately
# 16 normal inputs
bip68inputs = []
for _ in range(16):
bip68inputs.append(self.send_generic_input_tx(self.coinbase_blocks))
# 2 sets of 16 inputs with 10 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
bip112basicinputs = []
for _ in range(2):
inputs = []
for _ in range(16):
inputs.append(self.send_generic_input_tx(self.coinbase_blocks))
bip112basicinputs.append(inputs)
# 2 sets of 16 varied inputs with (relative_lock_time) OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
bip112diverseinputs = []
for _ in range(2):
inputs = []
for _ in range(16):
inputs.append(self.send_generic_input_tx(self.coinbase_blocks))
bip112diverseinputs.append(inputs)
# 1 special input with -1 OP_CSV OP_DROP (actually will be prepended to spending scriptSig)
bip112specialinput = self.send_generic_input_tx(self.coinbase_blocks)
# 1 special input with (empty stack) OP_CSV (actually will be prepended to spending scriptSig)
bip112emptystackinput = self.send_generic_input_tx(self.coinbase_blocks)
# 1 normal input
bip113input = self.send_generic_input_tx(self.coinbase_blocks)
self.nodes[0].setmocktime(self.last_block_time + 600)
inputblockhash = self.generate(self.nodes[0], 1)[0] # 1 block generated for inputs to be in chain at height 431
self.nodes[0].setmocktime(0)
self.tip = int(inputblockhash, 16)
self.tipheight += 1
self.last_block_time += 600
assert_equal(len(self.nodes[0].getblock(inputblockhash, True)["tx"]), TESTING_TX_COUNT + 1)
# 2 more version 4 blocks
test_blocks = self.generate_blocks(2)
self.send_blocks(test_blocks)
assert_equal(self.tipheight, CSV_ACTIVATION_HEIGHT - 2)
self.log.info(f"Height = {self.tipheight}, CSV not yet active (will activate for block {CSV_ACTIVATION_HEIGHT}, not {CSV_ACTIVATION_HEIGHT - 1})")
assert not softfork_active(self.nodes[0], 'csv')
# Test both version 1 and version 2 transactions for all tests
# BIP113 test transaction will be modified before each use to put in appropriate block time
bip113tx_v1 = self.create_self_transfer_from_utxo(bip113input)
bip113tx_v1.vin[0].nSequence = 0xFFFFFFFE
bip113tx_v1.nVersion = 1
bip113tx_v2 = self.create_self_transfer_from_utxo(bip113input)
bip113tx_v2.vin[0].nSequence = 0xFFFFFFFE
bip113tx_v2.nVersion = 2
# For BIP68 test all 16 relative sequence locktimes
bip68txs_v1 = self.create_bip68txs(bip68inputs, 1)
bip68txs_v2 = self.create_bip68txs(bip68inputs, 2)
# For BIP112 test:
# 16 relative sequence locktimes of 10 against 10 OP_CSV OP_DROP inputs
bip112txs_vary_nSequence_v1 = self.create_bip112txs(bip112basicinputs[0], False, 1)
bip112txs_vary_nSequence_v2 = self.create_bip112txs(bip112basicinputs[0], False, 2)
# 16 relative sequence locktimes of 9 against 10 OP_CSV OP_DROP inputs
bip112txs_vary_nSequence_9_v1 = self.create_bip112txs(bip112basicinputs[1], False, 1, -1)
bip112txs_vary_nSequence_9_v2 = self.create_bip112txs(bip112basicinputs[1], False, 2, -1)
# sequence lock time of 10 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
bip112txs_vary_OP_CSV_v1 = self.create_bip112txs(bip112diverseinputs[0], True, 1)
bip112txs_vary_OP_CSV_v2 = self.create_bip112txs(bip112diverseinputs[0], True, 2)
# sequence lock time of 9 against 16 (relative_lock_time) OP_CSV OP_DROP inputs
bip112txs_vary_OP_CSV_9_v1 = self.create_bip112txs(bip112diverseinputs[1], True, 1, -1)
bip112txs_vary_OP_CSV_9_v2 = self.create_bip112txs(bip112diverseinputs[1], True, 2, -1)
# -1 OP_CSV OP_DROP input
bip112tx_special_v1 = self.create_bip112special(bip112specialinput, 1)
bip112tx_special_v2 = self.create_bip112special(bip112specialinput, 2)
# (empty stack) OP_CSV input
bip112tx_emptystack_v1 = self.create_bip112emptystack(bip112emptystackinput, 1)
bip112tx_emptystack_v2 = self.create_bip112emptystack(bip112emptystackinput, 2)
self.log.info("TESTING")
self.log.info("Pre-Soft Fork Tests. All txs should pass.")
self.log.info("Test version 1 txs")
success_txs = []
# BIP113 tx, -1 CSV tx and empty stack CSV tx should succeed
bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
self.miniwallet.sign_tx(bip113tx_v1)
success_txs.append(bip113tx_v1)
success_txs.append(bip112tx_special_v1)
success_txs.append(bip112tx_emptystack_v1)
# add BIP 68 txs
success_txs.extend(all_rlt_txs(bip68txs_v1))
# add BIP 112 with seq=10 txs
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v1))
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v1))
# try BIP 112 with seq=9 txs
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v1))
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v1))
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
self.log.info("Test version 2 txs")
success_txs = []
# BIP113 tx, -1 CSV tx and empty stack CSV tx should succeed
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
self.miniwallet.sign_tx(bip113tx_v2)
success_txs.append(bip113tx_v2)
success_txs.append(bip112tx_special_v2)
success_txs.append(bip112tx_emptystack_v2)
# add BIP 68 txs
success_txs.extend(all_rlt_txs(bip68txs_v2))
# add BIP 112 with seq=10 txs
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_v2))
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_v2))
# try BIP 112 with seq=9 txs
success_txs.extend(all_rlt_txs(bip112txs_vary_nSequence_9_v2))
success_txs.extend(all_rlt_txs(bip112txs_vary_OP_CSV_9_v2))
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# 1 more version 4 block to get us to height 432 so the fork should now be active for the next block
assert not softfork_active(self.nodes[0], 'csv')
test_blocks = self.generate_blocks(1)
self.send_blocks(test_blocks)
assert softfork_active(self.nodes[0], 'csv')
self.log.info("Post-Soft Fork Tests.")
self.log.info("BIP 113 tests")
# BIP 113 tests should now fail regardless of version number if nLockTime isn't satisfied by new rules
bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
self.miniwallet.sign_tx(bip113tx_v1)
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 # = MTP of prior block (not <) but < time put on current block
self.miniwallet.sign_tx(bip113tx_v2)
for bip113tx in [bip113tx_v1, bip113tx_v2]:
self.send_blocks([self.create_test_block([bip113tx])], success=False, reject_reason='bad-txns-nonfinal')
# BIP 113 tests should now pass if the locktime is < MTP
bip113tx_v1.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
self.miniwallet.sign_tx(bip113tx_v1)
bip113tx_v2.nLockTime = self.last_block_time - 600 * 5 - 1 # < MTP of prior block
self.miniwallet.sign_tx(bip113tx_v2)
for bip113tx in [bip113tx_v1, bip113tx_v2]:
self.send_blocks([self.create_test_block([bip113tx])])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# Next block height = 437 after 4 blocks of random version
test_blocks = self.generate_blocks(4)
self.send_blocks(test_blocks)
self.log.info("BIP 68 tests")
self.log.info("Test version 1 txs - all should still pass")
success_txs = []
success_txs.extend(all_rlt_txs(bip68txs_v1))
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
self.log.info("Test version 2 txs")
# All txs with SEQUENCE_LOCKTIME_DISABLE_FLAG set pass
bip68success_txs = [tx['tx'] for tx in bip68txs_v2 if tx['sdf']]
self.send_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# All txs without flag fail as we are at delta height = 8 < 10 and delta time = 8 * 600 < 10 * 512
bip68timetxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and tx['stf']]
for tx in bip68timetxs:
self.send_blocks([self.create_test_block([tx])], success=False, reject_reason='bad-txns-nonfinal')
bip68heighttxs = [tx['tx'] for tx in bip68txs_v2 if not tx['sdf'] and not tx['stf']]
for tx in bip68heighttxs:
self.send_blocks([self.create_test_block([tx])], success=False, reject_reason='bad-txns-nonfinal')
# Advance one block to 438
test_blocks = self.generate_blocks(1)
self.send_blocks(test_blocks)
# Height txs should fail and time txs should now pass 9 * 600 > 10 * 512
bip68success_txs.extend(bip68timetxs)
self.send_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
for tx in bip68heighttxs:
self.send_blocks([self.create_test_block([tx])], success=False, reject_reason='bad-txns-nonfinal')
# Advance one block to 439
test_blocks = self.generate_blocks(1)
self.send_blocks(test_blocks)
# All BIP 68 txs should pass
bip68success_txs.extend(bip68heighttxs)
self.send_blocks([self.create_test_block(bip68success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
self.log.info("BIP 112 tests")
self.log.info("Test version 1 txs")
# -1 OP_CSV tx and (empty stack) OP_CSV tx should fail
self.send_blocks([self.create_test_block([bip112tx_special_v1])], success=False,
reject_reason='mandatory-script-verify-flag-failed (Negative locktime)')
self.send_blocks([self.create_test_block([bip112tx_emptystack_v1])], success=False,
reject_reason='mandatory-script-verify-flag-failed (Operation not valid with the current stack size)')
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 1 txs should still pass
success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if tx['sdf']]
success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if tx['sdf']]
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV, version 1 txs should now fail
fail_txs = all_rlt_txs(bip112txs_vary_nSequence_v1)
fail_txs += all_rlt_txs(bip112txs_vary_nSequence_9_v1)
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v1 if not tx['sdf']]
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v1 if not tx['sdf']]
for tx in fail_txs:
self.send_blocks([self.create_test_block([tx])], success=False,
reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)')
self.log.info("Test version 2 txs")
# -1 OP_CSV tx and (empty stack) OP_CSV tx should fail
self.send_blocks([self.create_test_block([bip112tx_special_v2])], success=False,
reject_reason='mandatory-script-verify-flag-failed (Negative locktime)')
self.send_blocks([self.create_test_block([bip112tx_emptystack_v2])], success=False,
reject_reason='mandatory-script-verify-flag-failed (Operation not valid with the current stack size)')
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in argument to OP_CSV, version 2 txs should pass (all sequence locks are met)
success_txs = [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if tx['sdf']]
success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if tx['sdf']]
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# SEQUENCE_LOCKTIME_DISABLE_FLAG is unset in argument to OP_CSV for all remaining txs ##
# All txs with nSequence 9 should fail either due to earlier mismatch or failing the CSV check
fail_txs = all_rlt_txs(bip112txs_vary_nSequence_9_v2)
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_9_v2 if not tx['sdf']]
for tx in fail_txs:
self.send_blocks([self.create_test_block([tx])], success=False,
reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)')
# If SEQUENCE_LOCKTIME_DISABLE_FLAG is set in nSequence, tx should fail
fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if tx['sdf']]
for tx in fail_txs:
self.send_blocks([self.create_test_block([tx])], success=False,
reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)')
# If sequencelock types mismatch, tx should fail
fail_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and tx['stf']]
fail_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]
for tx in fail_txs:
self.send_blocks([self.create_test_block([tx])], success=False,
reject_reason='mandatory-script-verify-flag-failed (Locktime requirement not satisfied)')
# Remaining txs should pass, just test masking works properly
success_txs = [tx['tx'] for tx in bip112txs_vary_nSequence_v2 if not tx['sdf'] and not tx['stf']]
success_txs += [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and not tx['stf']]
self.send_blocks([self.create_test_block(success_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
# Additional test, of checking that comparison of two time types works properly
time_txs = []
for tx in [tx['tx'] for tx in bip112txs_vary_OP_CSV_v2 if not tx['sdf'] and tx['stf']]:
tx.vin[0].nSequence = BASE_RELATIVE_LOCKTIME | SEQ_TYPE_FLAG
self.miniwallet.sign_tx(tx)
time_txs.append(tx)
self.send_blocks([self.create_test_block(time_txs)])
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
if __name__ == '__main__':
BIP68_112_113Test().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_tx_download.py | #!/usr/bin/env python3
# Copyright (c) 2019-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test transaction download behavior
"""
import time
from test_framework.messages import (
CInv,
MSG_TX,
MSG_TYPE_MASK,
MSG_WTX,
msg_inv,
msg_notfound,
)
from test_framework.p2p import (
P2PInterface,
p2p_lock,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
from test_framework.wallet import MiniWallet
class TestP2PConn(P2PInterface):
def __init__(self, wtxidrelay=True):
super().__init__(wtxidrelay=wtxidrelay)
self.tx_getdata_count = 0
def on_getdata(self, message):
for i in message.inv:
if i.type & MSG_TYPE_MASK == MSG_TX or i.type & MSG_TYPE_MASK == MSG_WTX:
self.tx_getdata_count += 1
# Constants from net_processing
GETDATA_TX_INTERVAL = 60 # seconds
INBOUND_PEER_TX_DELAY = 2 # seconds
TXID_RELAY_DELAY = 2 # seconds
OVERLOADED_PEER_DELAY = 2 # seconds
MAX_GETDATA_IN_FLIGHT = 100
MAX_PEER_TX_ANNOUNCEMENTS = 5000
NONPREF_PEER_TX_DELAY = 2
# Python test constants
NUM_INBOUND = 10
MAX_GETDATA_INBOUND_WAIT = GETDATA_TX_INTERVAL + INBOUND_PEER_TX_DELAY + TXID_RELAY_DELAY
class TxDownloadTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
def test_tx_requests(self):
self.log.info("Test that we request transactions from all our peers, eventually")
txid = 0xdeadbeef
self.log.info("Announce the txid from each incoming peer to node 0")
msg = msg_inv([CInv(t=MSG_WTX, h=txid)])
for p in self.nodes[0].p2ps:
p.send_and_ping(msg)
outstanding_peer_index = [i for i in range(len(self.nodes[0].p2ps))]
def getdata_found(peer_index):
p = self.nodes[0].p2ps[peer_index]
with p2p_lock:
return p.last_message.get("getdata") and p.last_message["getdata"].inv[-1].hash == txid
node_0_mocktime = int(time.time())
while outstanding_peer_index:
node_0_mocktime += MAX_GETDATA_INBOUND_WAIT
self.nodes[0].setmocktime(node_0_mocktime)
self.wait_until(lambda: any(getdata_found(i) for i in outstanding_peer_index))
for i in outstanding_peer_index:
if getdata_found(i):
outstanding_peer_index.remove(i)
self.nodes[0].setmocktime(0)
self.log.info("All outstanding peers received a getdata")
def test_inv_block(self):
self.log.info("Generate a transaction on node 0")
tx = self.wallet.create_self_transfer()
txid = int(tx['txid'], 16)
self.log.info(
"Announce the transaction to all nodes from all {} incoming peers, but never send it".format(NUM_INBOUND))
msg = msg_inv([CInv(t=MSG_TX, h=txid)])
for p in self.peers:
p.send_and_ping(msg)
self.log.info("Put the tx in node 0's mempool")
self.nodes[0].sendrawtransaction(tx['hex'])
# Since node 1 is connected outbound to an honest peer (node 0), it
# should get the tx within a timeout. (Assuming that node 0
# announced the tx within the timeout)
# The timeout is the sum of
# * the worst case until the tx is first requested from an inbound
# peer, plus
# * the first time it is re-requested from the outbound peer, plus
# * 2 seconds to avoid races
assert self.nodes[1].getpeerinfo()[0]['inbound'] == False
timeout = 2 + INBOUND_PEER_TX_DELAY + GETDATA_TX_INTERVAL
self.log.info("Tx should be received at node 1 after {} seconds".format(timeout))
self.sync_mempools(timeout=timeout)
def test_in_flight_max(self):
self.log.info("Test that we don't load peers with more than {} transaction requests immediately".format(MAX_GETDATA_IN_FLIGHT))
txids = [i for i in range(MAX_GETDATA_IN_FLIGHT + 2)]
p = self.nodes[0].p2ps[0]
with p2p_lock:
p.tx_getdata_count = 0
mock_time = int(time.time() + 1)
self.nodes[0].setmocktime(mock_time)
for i in range(MAX_GETDATA_IN_FLIGHT):
p.send_message(msg_inv([CInv(t=MSG_WTX, h=txids[i])]))
p.sync_with_ping()
mock_time += INBOUND_PEER_TX_DELAY
self.nodes[0].setmocktime(mock_time)
p.wait_until(lambda: p.tx_getdata_count >= MAX_GETDATA_IN_FLIGHT)
for i in range(MAX_GETDATA_IN_FLIGHT, len(txids)):
p.send_message(msg_inv([CInv(t=MSG_WTX, h=txids[i])]))
p.sync_with_ping()
self.log.info("No more than {} requests should be seen within {} seconds after announcement".format(MAX_GETDATA_IN_FLIGHT, INBOUND_PEER_TX_DELAY + OVERLOADED_PEER_DELAY - 1))
self.nodes[0].setmocktime(mock_time + INBOUND_PEER_TX_DELAY + OVERLOADED_PEER_DELAY - 1)
p.sync_with_ping()
with p2p_lock:
assert_equal(p.tx_getdata_count, MAX_GETDATA_IN_FLIGHT)
self.log.info("If we wait {} seconds after announcement, we should eventually get more requests".format(INBOUND_PEER_TX_DELAY + OVERLOADED_PEER_DELAY))
self.nodes[0].setmocktime(mock_time + INBOUND_PEER_TX_DELAY + OVERLOADED_PEER_DELAY)
p.wait_until(lambda: p.tx_getdata_count == len(txids))
def test_expiry_fallback(self):
self.log.info('Check that expiry will select another peer for download')
WTXID = 0xffaa
peer1 = self.nodes[0].add_p2p_connection(TestP2PConn())
peer2 = self.nodes[0].add_p2p_connection(TestP2PConn())
for p in [peer1, peer2]:
p.send_message(msg_inv([CInv(t=MSG_WTX, h=WTXID)]))
# One of the peers is asked for the tx
peer2.wait_until(lambda: sum(p.tx_getdata_count for p in [peer1, peer2]) == 1)
with p2p_lock:
peer_expiry, peer_fallback = (peer1, peer2) if peer1.tx_getdata_count == 1 else (peer2, peer1)
assert_equal(peer_fallback.tx_getdata_count, 0)
self.nodes[0].setmocktime(int(time.time()) + GETDATA_TX_INTERVAL + 1) # Wait for request to peer_expiry to expire
peer_fallback.wait_until(lambda: peer_fallback.tx_getdata_count >= 1, timeout=1)
self.restart_node(0) # reset mocktime
def test_disconnect_fallback(self):
self.log.info('Check that disconnect will select another peer for download')
WTXID = 0xffbb
peer1 = self.nodes[0].add_p2p_connection(TestP2PConn())
peer2 = self.nodes[0].add_p2p_connection(TestP2PConn())
for p in [peer1, peer2]:
p.send_message(msg_inv([CInv(t=MSG_WTX, h=WTXID)]))
# One of the peers is asked for the tx
peer2.wait_until(lambda: sum(p.tx_getdata_count for p in [peer1, peer2]) == 1)
with p2p_lock:
peer_disconnect, peer_fallback = (peer1, peer2) if peer1.tx_getdata_count == 1 else (peer2, peer1)
assert_equal(peer_fallback.tx_getdata_count, 0)
peer_disconnect.peer_disconnect()
peer_disconnect.wait_for_disconnect()
peer_fallback.wait_until(lambda: peer_fallback.tx_getdata_count >= 1, timeout=1)
def test_notfound_fallback(self):
self.log.info('Check that notfounds will select another peer for download immediately')
WTXID = 0xffdd
peer1 = self.nodes[0].add_p2p_connection(TestP2PConn())
peer2 = self.nodes[0].add_p2p_connection(TestP2PConn())
for p in [peer1, peer2]:
p.send_message(msg_inv([CInv(t=MSG_WTX, h=WTXID)]))
# One of the peers is asked for the tx
peer2.wait_until(lambda: sum(p.tx_getdata_count for p in [peer1, peer2]) == 1)
with p2p_lock:
peer_notfound, peer_fallback = (peer1, peer2) if peer1.tx_getdata_count == 1 else (peer2, peer1)
assert_equal(peer_fallback.tx_getdata_count, 0)
peer_notfound.send_and_ping(msg_notfound(vec=[CInv(MSG_WTX, WTXID)])) # Send notfound, so that fallback peer is selected
peer_fallback.wait_until(lambda: peer_fallback.tx_getdata_count >= 1, timeout=1)
def test_preferred_inv(self, preferred=False):
if preferred:
self.log.info('Check invs from preferred peers are downloaded immediately')
self.restart_node(0, extra_args=['[email protected]'])
else:
self.log.info('Check invs from non-preferred peers are downloaded after {} s'.format(NONPREF_PEER_TX_DELAY))
mock_time = int(time.time() + 1)
self.nodes[0].setmocktime(mock_time)
peer = self.nodes[0].add_p2p_connection(TestP2PConn())
peer.send_message(msg_inv([CInv(t=MSG_WTX, h=0xff00ff00)]))
peer.sync_with_ping()
if preferred:
peer.wait_until(lambda: peer.tx_getdata_count >= 1, timeout=1)
else:
with p2p_lock:
assert_equal(peer.tx_getdata_count, 0)
self.nodes[0].setmocktime(mock_time + NONPREF_PEER_TX_DELAY)
peer.wait_until(lambda: peer.tx_getdata_count >= 1, timeout=1)
def test_txid_inv_delay(self, glob_wtxid=False):
self.log.info('Check that inv from a txid-relay peers are delayed by {} s, with a wtxid peer {}'.format(TXID_RELAY_DELAY, glob_wtxid))
self.restart_node(0, extra_args=['[email protected]'])
mock_time = int(time.time() + 1)
self.nodes[0].setmocktime(mock_time)
peer = self.nodes[0].add_p2p_connection(TestP2PConn(wtxidrelay=False))
if glob_wtxid:
# Add a second wtxid-relay connection otherwise TXID_RELAY_DELAY is waived in
# lack of wtxid-relay peers
self.nodes[0].add_p2p_connection(TestP2PConn(wtxidrelay=True))
peer.send_message(msg_inv([CInv(t=MSG_TX, h=0xff11ff11)]))
peer.sync_with_ping()
with p2p_lock:
assert_equal(peer.tx_getdata_count, 0 if glob_wtxid else 1)
self.nodes[0].setmocktime(mock_time + TXID_RELAY_DELAY)
peer.wait_until(lambda: peer.tx_getdata_count >= 1, timeout=1)
def test_large_inv_batch(self):
self.log.info('Test how large inv batches are handled with relay permission')
self.restart_node(0, extra_args=['[email protected]'])
peer = self.nodes[0].add_p2p_connection(TestP2PConn())
peer.send_message(msg_inv([CInv(t=MSG_WTX, h=wtxid) for wtxid in range(MAX_PEER_TX_ANNOUNCEMENTS + 1)]))
peer.wait_until(lambda: peer.tx_getdata_count == MAX_PEER_TX_ANNOUNCEMENTS + 1)
self.log.info('Test how large inv batches are handled without relay permission')
self.restart_node(0)
peer = self.nodes[0].add_p2p_connection(TestP2PConn())
peer.send_message(msg_inv([CInv(t=MSG_WTX, h=wtxid) for wtxid in range(MAX_PEER_TX_ANNOUNCEMENTS + 1)]))
peer.wait_until(lambda: peer.tx_getdata_count == MAX_PEER_TX_ANNOUNCEMENTS)
peer.sync_with_ping()
def test_spurious_notfound(self):
self.log.info('Check that spurious notfound is ignored')
self.nodes[0].p2ps[0].send_message(msg_notfound(vec=[CInv(MSG_TX, 1)]))
def run_test(self):
self.wallet = MiniWallet(self.nodes[0])
# Run tests without mocktime that only need one peer-connection first, to avoid restarting the nodes
self.test_expiry_fallback()
self.test_disconnect_fallback()
self.test_notfound_fallback()
self.test_preferred_inv()
self.test_preferred_inv(True)
self.test_txid_inv_delay()
self.test_txid_inv_delay(True)
self.test_large_inv_batch()
self.test_spurious_notfound()
# Run each test against new bitcoind instances, as setting mocktimes has long-term effects on when
# the next trickle relay event happens.
for test in [self.test_in_flight_max, self.test_inv_block, self.test_tx_requests]:
self.stop_nodes()
self.start_nodes()
self.connect_nodes(1, 0)
# Setup the p2p connections
self.peers = []
for node in self.nodes:
for _ in range(NUM_INBOUND):
self.peers.append(node.add_p2p_connection(TestP2PConn()))
self.log.info("Nodes are setup with {} incoming connections each".format(NUM_INBOUND))
test()
if __name__ == '__main__':
TxDownloadTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_taproot.py | #!/usr/bin/env python3
# Copyright (c) 2021-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test generation and spending of P2TR addresses."""
import random
import uuid
from decimal import Decimal
from test_framework.address import output_key_to_p2tr
from test_framework.key import H_POINT
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
from test_framework.descriptors import descsum_create
from test_framework.script import (
CScript,
MAX_PUBKEYS_PER_MULTI_A,
OP_CHECKSIG,
OP_CHECKSIGADD,
OP_NUMEQUAL,
taproot_construct,
)
from test_framework.segwit_addr import encode_segwit_address
# xprvs/xpubs, and m/* derived x-only pubkeys (created using independent implementation)
KEYS = [
{
"xprv": "tprv8ZgxMBicQKsPeNLUGrbv3b7qhUk1LQJZAGMuk9gVuKh9sd4BWGp1eMsehUni6qGb8bjkdwBxCbgNGdh2bYGACK5C5dRTaif9KBKGVnSezxV",
"xpub": "tpubD6NzVbkrYhZ4XqNGAWGWSzmxGWFwVjVTjZxh2fioKbVYi7Jx8fdbprVWsdW7mHwqjchBVas8TLZG4Xwuz4RKU4iaCqiCvoSkFCzQptqk5Y1",
"pubs": [
"83d8ee77a0f3a32a5cea96fd1624d623b836c1e5d1ac2dcde46814b619320c18",
"a30253b018ea6fca966135bf7dd8026915427f24ccf10d4e03f7870f4128569b",
"a61e5749f2f3db9dc871d7b187e30bfd3297eea2557e9be99897ea8ff7a29a21",
"8110cf482f66dc37125e619d73075af932521724ffc7108309e88f361efe8c8a",
]
},
{
"xprv": "tprv8ZgxMBicQKsPe98QUPieXy5KFPVjuZNpcC9JY7K7buJEm8nWvJogK4kTda7eLjK9U4PnMNbSjEkpjDJazeBZ4rhYNYD7N6GEdaysj1AYSb5",
"xpub": "tpubD6NzVbkrYhZ4XcACN3PEwNjRpR1g4tZjBVk5pdMR2B6dbd3HYhdGVZNKofAiFZd9okBserZvv58A6tBX4pE64UpXGNTSesfUW7PpW36HuKz",
"pubs": [
"f95886b02a84928c5c15bdca32784993105f73de27fa6ad8c1a60389b999267c",
"71522134160685eb779857033bfc84c7626f13556154653a51dd42619064e679",
"48957b4158b2c5c3f4c000f51fd2cf0fd5ff8868ebfb194256f5e9131fc74bd8",
"086dda8139b3a84944010648d2b674b70447be3ae59322c09a4907bc80be62c1",
]
},
{
"xprv": "tprv8ZgxMBicQKsPe3ZJmcj9aJ2EPZJYYCh6Lp3v82p75wspgaXmtDZ2RBtkAtWcGnW2VQDzMHQPBkCKMoYTqh1RfJKjv4PcmWVR7KqTpjsdboN",
"xpub": "tpubD6NzVbkrYhZ4XWb6fGPjyhgLxapUhXszv7ehQYrQWDgDX4nYWcNcbgWcM2RhYo9s2mbZcfZJ8t5LzYcr24FK79zVybsw5Qj3Rtqug8jpJMy",
"pubs": [
"9fa5ffb68821cf559001caa0577eeea4978b29416def328a707b15e91701a2f7",
"8a104c54cd34acba60c97dd8f1f7abc89ba9587afd88dc928e91aca7b1c50d20",
"13ba6b252a4eb5ef31d39cb521724cdab19a698323f5c17093f28fb1821d052f",
"f6c2b4863fd5ba1ba09e3a890caed8b75ffbe013ebab31a06ab87cd6f72506af",
]
},
{
"xprv": "tprv8ZgxMBicQKsPdKziibn63Rm6aNzp7dSjDnufZMStXr71Huz7iihCRpbZZZ6Voy5HyuHCWx6foHMipzMzUq4tZrtkZ24DJwz5EeNWdsuwX5h",
"xpub": "tpubD6NzVbkrYhZ4Wo2WcFSgSqRD9QWkGxddo6WSqsVBx7uQ8QEtM7WncKDRjhFEexK119NigyCsFygA4b7sAPQxqebyFGAZ9XVV1BtcgNzbCRR",
"pubs": [
"03a669ea926f381582ec4a000b9472ba8a17347f5fb159eddd4a07036a6718eb",
"bbf56b14b119bccafb686adec2e3d2a6b51b1626213590c3afa815d1fd36f85d",
"2994519e31bbc238a07d82f85c9832b831705d2ee4a2dbb477ecec8a3f570fe5",
"68991b5c139a4c479f8c89d6254d288c533aefc0c5b91fac6c89019c4de64988",
]
},
{
"xprv": "tprv8ZgxMBicQKsPen4PGtDwURYnCtVMDejyE8vVwMGhQWfVqB2FBPdekhTacDW4vmsKTsgC1wsncVqXiZdX2YFGAnKoLXYf42M78fQJFzuDYFN",
"xpub": "tpubD6NzVbkrYhZ4YF6BAXtXsqCtmv1HNyvsoSXHDsJzpnTtffH1onTEwC5SnLzCHPKPebh2i7Gxvi9kJNADcpuSmH8oM3rCYcHVtdXHjpYoKnX",
"pubs": [
"aba457d16a8d59151c387f24d1eb887efbe24644c1ee64b261282e7baebdb247",
"c8558b7caf198e892032d91f1a48ee9bdc25462b83b4d0ac62bb7fb2a0df630e",
"8a4bcaba0e970685858d133a4d0079c8b55bbc755599e212285691eb779ce3dc",
"b0d68ada13e0d954b3921b88160d4453e9c151131c2b7c724e08f538a666ceb3",
]
},
{
"xprv": "tprv8ZgxMBicQKsPd91vCgRmbzA13wyip2RimYeVEkAyZvsEN5pUSB3T43SEBxPsytkxb42d64W2EiRE9CewpJQkzR8HKHLV8Uhk4dMF5yRPaTv",
"xpub": "tpubD6NzVbkrYhZ4Wc3i6L6N1Pp7cyVeyMcdLrFGXGDGzCfdCa5F4Zs3EY46N72Ws8QDEUYBVwXfDfda2UKSseSdU1fsBegJBhGCZyxkf28bkQ6",
"pubs": [
"9b4d495b74887815a1ff623c055c6eac6b6b2e07d2a016d6526ebac71dd99744",
"8e971b781b7ce7ab742d80278f2dfe7dd330f3efd6d00047f4a2071f2e7553cb",
"b811d66739b9f07435ccda907ec5cd225355321c35e0a7c7791232f24cf10632",
"4cd27a5552c272bc80ba544e9cc6340bb906969f5e7a1510b6cef9592683fbc9",
]
},
{
"xprv": "tprv8ZgxMBicQKsPdEhLRxxwzTv2t18j7ruoffPeqAwVA2qXJ2P66RaMZLUWQ85SjoA7xPxdSgCB9UZ72m65qbnaLPtFTfHVP3MEmkpZk1Bv8RT",
"xpub": "tpubD6NzVbkrYhZ4Whj8KcdYPsa9T2efHC6iExzS7gynaJdv8WdripPwjq6NaH5gQJGrLmvUwHY1smhiakUosXNDTEa6qfKUQdLKV6DJBre6XvQ",
"pubs": [
"d0c19def28bb1b39451c1a814737615983967780d223b79969ba692182c6006b",
"cb1d1b1dc62fec1894d4c3d9a1b6738e5ff9c273a64f74e9ab363095f45e9c47",
"245be588f41acfaeb9481aa132717db56ee1e23eb289729fe2b8bde8f9a00830",
"5bc4ad6d6187fa82728c85a073b428483295288f8aef5722e47305b5872f7169",
]
},
{
"xprv": "tprv8ZgxMBicQKsPcxbqxzcMAwQpiCD8x6qaZEJTxdKxw4w9GuMzDACTD9yhEsHGfqQcfYX4LivosLDDngTykYEp9JnTdcqY7cHqU8PpeFFKyV3",
"xpub": "tpubD6NzVbkrYhZ4WRddreGwaM4wHDj57S2V8XuFF9NGMLjY7PckqZ23PebZR1wGA4w84uX2vZphdZVsnREjij1ibYjEBTaTVQCEZCLs4xUDapx",
"pubs": [
"065cc1b92bd99e5a3e626e8296a366b2d132688eb43aea19bc14fd8f43bf07fb",
"5b95633a7dda34578b6985e6bfd85d83ec38b7ded892a9b74a3d899c85890562",
"dc86d434b9a34495c8e845b969d51f80d19a8df03b400353ffe8036a0c22eb60",
"06c8ffde238745b29ae8a97ae533e1f3edf214bba6ec58b5e7b9451d1d61ec19",
]
},
{
"xprv": "tprv8ZgxMBicQKsPe6zLoU8MTTXgsdJVNBErrYGpoGwHf5VGvwUzdNc7NHeCSzkJkniCxBhZWujXjmD4HZmBBrnr3URgJjM6GxRgMmEhLdqNTWG",
"xpub": "tpubD6NzVbkrYhZ4Xa28h7nwrsBoSepRXWRmRqsc5nyb5MHfmRjmFmRhYnG4d9dC7uxixN5AfsEv1Lz3mCAuWvERyvPgKozHUVjfo8EG6foJGy7",
"pubs": [
"d826a0a53abb6ffc60df25b9c152870578faef4b2eb5a09bdd672bbe32cdd79b",
"939365e0359ff6bc6f6404ee220714c5d4a0d1e36838b9e2081ede217674e2ba",
"4e8767edcf7d3d90258cfbbea01b784f4d2de813c4277b51279cf808bac410a2",
"d42a2c280940bfc6ede971ae72cde2e1df96c6da7dab06a132900c6751ade208",
]
},
{
"xprv": "tprv8ZgxMBicQKsPeB5o5oCsN2dVxM2mtJiYERQEBRc4JNwC1DFGYaEdNkmh8jJYVPU76YhkFoRoWTdh1p3yQGykG8TfDW34dKgrgSx28gswUyL",
"xpub": "tpubD6NzVbkrYhZ4Xe7aySsTmSHcXNYi3duSoj11TweMiejaqhW3Ay4DZFPZJses4sfpk4b9VHRhn8v4cKTMjugMM3hqXcqSSmRdiW8QvASXjfY",
"pubs": [
"e360564b2e0e8d06681b6336a29d0750210e8f34afd9afb5e6fd5fe6dba26c81",
"76b4900f00a1dcce463b6d8e02b768518fce4f9ecd6679a13ad78ea1e4815ad3",
"5575556e263c8ed52e99ab02147cc05a738869afe0039911b5a60a780f4e43d2",
"593b00e2c8d4bd6dda0fd9e238888acf427bb4e128887fd5a40e0e9da78cbc01",
]
},
{
"xprv": "tprv8ZgxMBicQKsPfEH6jHemkGDjZRnAaKFJVGH8pQU638E6SdbX9hxit1tK2sfFPfL6KS7v8FfUKxstbfEpzSymbdfBM9Y5UkrxErF9fJaKLK3",
"xpub": "tpubD6NzVbkrYhZ4YhJtcwKN9fsr8TJ6jeSD4Zsv6vWPTQ2VH7rHn6nK4WWBCzKK7FkdVVwm3iztCU1UmStY4hX6gRbBmp9UzK9C59dQEzeXS12",
"pubs": [
"7631cacec3343052d87ef4d0065f61dde82d7d2db0c1cc02ef61ef3c982ea763",
"c05e44a9e735d1b1bef62e2c0d886e6fb4923b2649b67828290f5cacc51c71b7",
"b33198b20701afe933226c92fd0e3d51d3f266f1113d864dbd026ae3166ef7f2",
"f99643ac3f4072ee4a949301e86963a9ca0ad57f2ef29f6b84fda037d7cac85b",
]
},
{
"xprv": "tprv8ZgxMBicQKsPdNWU38dT6aGxtqJR4oYS5kPpLVBcuKiiu7gqTYqMMqhUG6DP7pPahzPQu36sWSmeLCP1C4AwqcR5FX2RyRoZfd4B8pAnSdX",
"xpub": "tpubD6NzVbkrYhZ4WqYFvnJ3Vyw5TrpME8jLf3zbd1DvKbX7jbwc5wewYLKLSFRzZWV6hZj7XhsXAy7fhE5jB25DiWyNM3ztXbsXHRVCrp5BiPY",
"pubs": [
"2258b1c3160be0864a541854eec9164a572f094f7562628281a8073bb89173a7",
"83df59d0a5c951cdd62b7ab225a62079f48d2a333a86e66c35420d101446e92e",
"2a654bf234d819055312f9ca03fad5836f9163b09cdd24d29678f694842b874a",
"aa0334ab910047387c912a21ec0dab806a47ffa38365060dbc5d47c18c6e66e7",
]
},
{
"xprv": "tprv8mGPkMVz5mZuJDnC2NjjAv7E9Zqa5LCgX4zawbZu5nzTtLb5kGhPwycX4H1gtW1f5ZdTKTNtQJ61hk71F2TdcQ93EFDTpUcPBr98QRji615",
"xpub": "tpubDHxRtmYEE9FaBgoyv2QKaKmLibMWEfPb6NbNE7cCW4nripqrNfWz8UEPEPbHCrakwLvwFfsqoaf4pjX4gWStp4nECRf1QwBKPkLqnY8pHbj",
"pubs": [
"00a9da96087a72258f83b338ef7f0ea8cbbe05da5f18f091eb397d1ecbf7c3d3",
"b2749b74d51a78f5fe3ebb3a7c0ff266a468cade143dfa265c57e325177edf00",
"6b8747a6bbe4440d7386658476da51f6e49a220508a7ec77fe7bccc3e7baa916",
"4674bf4d9ebbe01bf0aceaca2472f63198655ecf2df810f8d69b38421972318e",
]
}
]
def key(hex_key):
"""Construct an x-only pubkey from its hex representation."""
return bytes.fromhex(hex_key)
def pk(hex_key):
"""Construct a script expression for taproot_construct for pk(hex_key)."""
return (None, CScript([bytes.fromhex(hex_key), OP_CHECKSIG]))
def multi_a(k, hex_keys, sort=False):
"""Construct a script expression for taproot_construct for a multi_a script."""
xkeys = [bytes.fromhex(hex_key) for hex_key in hex_keys]
if sort:
xkeys.sort()
ops = [xkeys[0], OP_CHECKSIG]
for i in range(1, len(hex_keys)):
ops += [xkeys[i], OP_CHECKSIGADD]
ops += [k, OP_NUMEQUAL]
return (None, CScript(ops))
def compute_taproot_address(pubkey, scripts):
"""Compute the address for a taproot output with given inner key and scripts."""
return output_key_to_p2tr(taproot_construct(pubkey, scripts).output_pubkey)
def compute_raw_taproot_address(pubkey):
return encode_segwit_address("bcrt", 1, pubkey)
class WalletTaprootTest(BitcoinTestFramework):
"""Test generation and spending of P2TR address outputs."""
def add_options(self, parser):
self.add_wallet_options(parser, legacy=False)
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.extra_args = [['-keypool=100'], ['-keypool=100']]
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_sqlite()
def setup_network(self):
self.setup_nodes()
def init_wallet(self, *, node):
pass
@staticmethod
def make_desc(pattern, privmap, keys, pub_only = False):
pat = pattern.replace("$H", H_POINT)
for i in range(len(privmap)):
if privmap[i] and not pub_only:
pat = pat.replace("$%i" % (i + 1), keys[i]['xprv'])
else:
pat = pat.replace("$%i" % (i + 1), keys[i]['xpub'])
return descsum_create(pat)
@staticmethod
def make_addr(treefn, keys, i):
args = []
for j in range(len(keys)):
args.append(keys[j]['pubs'][i])
tree = treefn(*args)
if isinstance(tree, tuple):
return compute_taproot_address(*tree)
if isinstance(tree, bytes):
return compute_raw_taproot_address(tree)
assert False
def do_test_addr(self, comment, pattern, privmap, treefn, keys):
self.log.info("Testing %s address derivation" % comment)
# Create wallets
wallet_uuid = uuid.uuid4().hex
self.nodes[0].createwallet(wallet_name=f"privs_tr_enabled_{wallet_uuid}", descriptors=True, blank=True)
self.nodes[0].createwallet(wallet_name=f"pubs_tr_enabled_{wallet_uuid}", descriptors=True, blank=True, disable_private_keys=True)
self.nodes[0].createwallet(wallet_name=f"addr_gen_{wallet_uuid}", descriptors=True, disable_private_keys=True, blank=True)
privs_tr_enabled = self.nodes[0].get_wallet_rpc(f"privs_tr_enabled_{wallet_uuid}")
pubs_tr_enabled = self.nodes[0].get_wallet_rpc(f"pubs_tr_enabled_{wallet_uuid}")
addr_gen = self.nodes[0].get_wallet_rpc(f"addr_gen_{wallet_uuid}")
desc = self.make_desc(pattern, privmap, keys, False)
desc_pub = self.make_desc(pattern, privmap, keys, True)
assert_equal(self.nodes[0].getdescriptorinfo(desc)['descriptor'], desc_pub)
result = addr_gen.importdescriptors([{"desc": desc_pub, "active": True, "timestamp": "now"}])
assert result[0]['success']
address_type = "bech32m" if "tr" in pattern else "bech32"
for i in range(4):
addr_g = addr_gen.getnewaddress(address_type=address_type)
if treefn is not None:
addr_r = self.make_addr(treefn, keys, i)
assert_equal(addr_g, addr_r)
desc_a = addr_gen.getaddressinfo(addr_g)['desc']
if desc.startswith("tr("):
assert desc_a.startswith("tr(")
rederive = self.nodes[1].deriveaddresses(desc_a)
assert_equal(len(rederive), 1)
assert_equal(rederive[0], addr_g)
# tr descriptors can be imported
result = privs_tr_enabled.importdescriptors([{"desc": desc, "timestamp": "now"}])
assert result[0]['success']
result = pubs_tr_enabled.importdescriptors([{"desc": desc_pub, "timestamp": "now"}])
assert result[0]["success"]
# Cleanup
privs_tr_enabled.unloadwallet()
pubs_tr_enabled.unloadwallet()
addr_gen.unloadwallet()
def do_test_sendtoaddress(self, comment, pattern, privmap, treefn, keys_pay, keys_change):
self.log.info("Testing %s through sendtoaddress" % comment)
# Create wallets
wallet_uuid = uuid.uuid4().hex
self.nodes[0].createwallet(wallet_name=f"rpc_online_{wallet_uuid}", descriptors=True, blank=True)
rpc_online = self.nodes[0].get_wallet_rpc(f"rpc_online_{wallet_uuid}")
desc_pay = self.make_desc(pattern, privmap, keys_pay)
desc_change = self.make_desc(pattern, privmap, keys_change)
desc_pay_pub = self.make_desc(pattern, privmap, keys_pay, True)
desc_change_pub = self.make_desc(pattern, privmap, keys_change, True)
assert_equal(self.nodes[0].getdescriptorinfo(desc_pay)['descriptor'], desc_pay_pub)
assert_equal(self.nodes[0].getdescriptorinfo(desc_change)['descriptor'], desc_change_pub)
result = rpc_online.importdescriptors([{"desc": desc_pay, "active": True, "timestamp": "now"}])
assert result[0]['success']
result = rpc_online.importdescriptors([{"desc": desc_change, "active": True, "timestamp": "now", "internal": True}])
assert result[0]['success']
address_type = "bech32m" if "tr" in pattern else "bech32"
for i in range(4):
addr_g = rpc_online.getnewaddress(address_type=address_type)
if treefn is not None:
addr_r = self.make_addr(treefn, keys_pay, i)
assert_equal(addr_g, addr_r)
boring_balance = int(self.boring.getbalance() * 100000000)
to_amnt = random.randrange(1000000, boring_balance)
self.boring.sendtoaddress(address=addr_g, amount=Decimal(to_amnt) / 100000000, subtractfeefromamount=True)
self.generatetoaddress(self.nodes[0], 1, self.boring.getnewaddress(), sync_fun=self.no_op)
test_balance = int(rpc_online.getbalance() * 100000000)
ret_amnt = random.randrange(100000, test_balance)
# Increase fee_rate to compensate for the wallet's inability to estimate fees for script path spends.
res = rpc_online.sendtoaddress(address=self.boring.getnewaddress(), amount=Decimal(ret_amnt) / 100000000, subtractfeefromamount=True, fee_rate=200)
self.generatetoaddress(self.nodes[0], 1, self.boring.getnewaddress(), sync_fun=self.no_op)
assert rpc_online.gettransaction(res)["confirmations"] > 0
# Cleanup
txid = rpc_online.sendall(recipients=[self.boring.getnewaddress()])["txid"]
self.generatetoaddress(self.nodes[0], 1, self.boring.getnewaddress(), sync_fun=self.no_op)
assert rpc_online.gettransaction(txid)["confirmations"] > 0
rpc_online.unloadwallet()
def do_test_psbt(self, comment, pattern, privmap, treefn, keys_pay, keys_change):
self.log.info("Testing %s through PSBT" % comment)
# Create wallets
wallet_uuid = uuid.uuid4().hex
self.nodes[0].createwallet(wallet_name=f"psbt_online_{wallet_uuid}", descriptors=True, disable_private_keys=True, blank=True)
self.nodes[1].createwallet(wallet_name=f"psbt_offline_{wallet_uuid}", descriptors=True, blank=True)
self.nodes[1].createwallet(f"key_only_wallet_{wallet_uuid}", descriptors=True, blank=True)
psbt_online = self.nodes[0].get_wallet_rpc(f"psbt_online_{wallet_uuid}")
psbt_offline = self.nodes[1].get_wallet_rpc(f"psbt_offline_{wallet_uuid}")
key_only_wallet = self.nodes[1].get_wallet_rpc(f"key_only_wallet_{wallet_uuid}")
desc_pay = self.make_desc(pattern, privmap, keys_pay, False)
desc_change = self.make_desc(pattern, privmap, keys_change, False)
desc_pay_pub = self.make_desc(pattern, privmap, keys_pay, True)
desc_change_pub = self.make_desc(pattern, privmap, keys_change, True)
assert_equal(self.nodes[0].getdescriptorinfo(desc_pay)['descriptor'], desc_pay_pub)
assert_equal(self.nodes[0].getdescriptorinfo(desc_change)['descriptor'], desc_change_pub)
result = psbt_online.importdescriptors([{"desc": desc_pay_pub, "active": True, "timestamp": "now"}])
assert result[0]['success']
result = psbt_online.importdescriptors([{"desc": desc_change_pub, "active": True, "timestamp": "now", "internal": True}])
assert result[0]['success']
result = psbt_offline.importdescriptors([{"desc": desc_pay, "active": True, "timestamp": "now"}])
assert result[0]['success']
result = psbt_offline.importdescriptors([{"desc": desc_change, "active": True, "timestamp": "now", "internal": True}])
assert result[0]['success']
for key in keys_pay + keys_change:
result = key_only_wallet.importdescriptors([{"desc": descsum_create(f"wpkh({key['xprv']}/*)"), "timestamp":"now"}])
assert result[0]["success"]
address_type = "bech32m" if "tr" in pattern else "bech32"
for i in range(4):
addr_g = psbt_online.getnewaddress(address_type=address_type)
if treefn is not None:
addr_r = self.make_addr(treefn, keys_pay, i)
assert_equal(addr_g, addr_r)
boring_balance = int(self.boring.getbalance() * 100000000)
to_amnt = random.randrange(1000000, boring_balance)
self.boring.sendtoaddress(address=addr_g, amount=Decimal(to_amnt) / 100000000, subtractfeefromamount=True)
self.generatetoaddress(self.nodes[0], 1, self.boring.getnewaddress(), sync_fun=self.no_op)
test_balance = int(psbt_online.getbalance() * 100000000)
ret_amnt = random.randrange(100000, test_balance)
# Increase fee_rate to compensate for the wallet's inability to estimate fees for script path spends.
psbt = psbt_online.walletcreatefundedpsbt([], [{self.boring.getnewaddress(): Decimal(ret_amnt) / 100000000}], None, {"subtractFeeFromOutputs":[0], "fee_rate": 200, "change_type": address_type})['psbt']
res = psbt_offline.walletprocesspsbt(psbt=psbt, finalize=False)
for wallet in [psbt_offline, key_only_wallet]:
res = wallet.walletprocesspsbt(psbt=psbt, finalize=False)
decoded = wallet.decodepsbt(res["psbt"])
if pattern.startswith("tr("):
for psbtin in decoded["inputs"]:
assert "non_witness_utxo" not in psbtin
assert "witness_utxo" in psbtin
assert "taproot_internal_key" in psbtin
assert "taproot_bip32_derivs" in psbtin
assert "taproot_key_path_sig" in psbtin or "taproot_script_path_sigs" in psbtin
if "taproot_script_path_sigs" in psbtin:
assert "taproot_merkle_root" in psbtin
assert "taproot_scripts" in psbtin
rawtx = self.nodes[0].finalizepsbt(res['psbt'])['hex']
res = self.nodes[0].testmempoolaccept([rawtx])
assert res[0]["allowed"]
txid = self.nodes[0].sendrawtransaction(rawtx)
self.generatetoaddress(self.nodes[0], 1, self.boring.getnewaddress(), sync_fun=self.no_op)
assert psbt_online.gettransaction(txid)['confirmations'] > 0
# Cleanup
psbt = psbt_online.sendall(recipients=[self.boring.getnewaddress()], psbt=True)["psbt"]
res = psbt_offline.walletprocesspsbt(psbt=psbt, finalize=False)
rawtx = self.nodes[0].finalizepsbt(res['psbt'])['hex']
txid = self.nodes[0].sendrawtransaction(rawtx)
self.generatetoaddress(self.nodes[0], 1, self.boring.getnewaddress(), sync_fun=self.no_op)
assert psbt_online.gettransaction(txid)['confirmations'] > 0
psbt_online.unloadwallet()
psbt_offline.unloadwallet()
def do_test(self, comment, pattern, privmap, treefn):
nkeys = len(privmap)
keys = random.sample(KEYS, nkeys * 4)
self.do_test_addr(comment, pattern, privmap, treefn, keys[0:nkeys])
self.do_test_sendtoaddress(comment, pattern, privmap, treefn, keys[0:nkeys], keys[nkeys:2*nkeys])
self.do_test_psbt(comment, pattern, privmap, treefn, keys[2*nkeys:3*nkeys], keys[3*nkeys:4*nkeys])
def run_test(self):
self.nodes[0].createwallet(wallet_name="boring")
self.boring = self.nodes[0].get_wallet_rpc("boring")
self.log.info("Mining blocks...")
gen_addr = self.boring.getnewaddress()
self.generatetoaddress(self.nodes[0], 101, gen_addr, sync_fun=self.no_op)
self.do_test(
"tr(XPRV)",
"tr($1/*)",
[True],
lambda k1: (key(k1), [])
)
self.do_test(
"tr(H,XPRV)",
"tr($H,pk($1/*))",
[True],
lambda k1: (key(H_POINT), [pk(k1)])
)
self.do_test(
"wpkh(XPRV)",
"wpkh($1/*)",
[True],
None
)
self.do_test(
"tr(XPRV,{H,{H,XPUB}})",
"tr($1/*,{pk($H),{pk($H),pk($2/*)}})",
[True, False],
lambda k1, k2: (key(k1), [pk(H_POINT), [pk(H_POINT), pk(k2)]])
)
self.do_test(
"wsh(multi(1,XPRV,XPUB))",
"wsh(multi(1,$1/*,$2/*))",
[True, False],
None
)
self.do_test(
"tr(XPRV,{XPUB,XPUB})",
"tr($1/*,{pk($2/*),pk($2/*)})",
[True, False],
lambda k1, k2: (key(k1), [pk(k2), pk(k2)])
)
self.do_test(
"tr(XPRV,{{XPUB,H},{H,XPUB}})",
"tr($1/*,{{pk($2/*),pk($H)},{pk($H),pk($2/*)}})",
[True, False],
lambda k1, k2: (key(k1), [[pk(k2), pk(H_POINT)], [pk(H_POINT), pk(k2)]])
)
self.do_test(
"tr(XPUB,{{H,{H,XPUB}},{H,{H,{H,XPRV}}}})",
"tr($1/*,{{pk($H),{pk($H),pk($2/*)}},{pk($H),{pk($H),{pk($H),pk($3/*)}}}})",
[False, False, True],
lambda k1, k2, k3: (key(k1), [[pk(H_POINT), [pk(H_POINT), pk(k2)]], [pk(H_POINT), [pk(H_POINT), [pk(H_POINT), pk(k3)]]]])
)
self.do_test(
"tr(XPRV,{XPUB,{{XPUB,{H,H}},{{H,H},XPUB}}})",
"tr($1/*,{pk($2/*),{{pk($2/*),{pk($H),pk($H)}},{{pk($H),pk($H)},pk($2/*)}}})",
[True, False],
lambda k1, k2: (key(k1), [pk(k2), [[pk(k2), [pk(H_POINT), pk(H_POINT)]], [[pk(H_POINT), pk(H_POINT)], pk(k2)]]])
)
self.do_test(
"tr(H,multi_a(1,XPRV))",
"tr($H,multi_a(1,$1/*))",
[True],
lambda k1: (key(H_POINT), [multi_a(1, [k1])])
)
self.do_test(
"tr(H,sortedmulti_a(1,XPRV,XPUB))",
"tr($H,sortedmulti_a(1,$1/*,$2/*))",
[True, False],
lambda k1, k2: (key(H_POINT), [multi_a(1, [k1, k2], True)])
)
self.do_test(
"tr(H,{H,multi_a(1,XPUB,XPRV)})",
"tr($H,{pk($H),multi_a(1,$1/*,$2/*)})",
[False, True],
lambda k1, k2: (key(H_POINT), [pk(H_POINT), [multi_a(1, [k1, k2])]])
)
self.do_test(
"tr(H,sortedmulti_a(1,XPUB,XPRV,XPRV))",
"tr($H,sortedmulti_a(1,$1/*,$2/*,$3/*))",
[False, True, True],
lambda k1, k2, k3: (key(H_POINT), [multi_a(1, [k1, k2, k3], True)])
)
self.do_test(
"tr(H,multi_a(2,XPRV,XPUB,XPRV))",
"tr($H,multi_a(2,$1/*,$2/*,$3/*))",
[True, False, True],
lambda k1, k2, k3: (key(H_POINT), [multi_a(2, [k1, k2, k3])])
)
self.do_test(
"tr(XPUB,{{XPUB,{XPUB,sortedmulti_a(2,XPRV,XPUB,XPRV)}})",
"tr($2/*,{pk($2/*),{pk($2/*),sortedmulti_a(2,$1/*,$2/*,$3/*)}})",
[True, False, True],
lambda k1, k2, k3: (key(k2), [pk(k2), [pk(k2), multi_a(2, [k1, k2, k3], True)]])
)
rnd_pos = random.randrange(MAX_PUBKEYS_PER_MULTI_A)
self.do_test(
"tr(XPUB,multi_a(1,H...,XPRV,H...))",
"tr($2/*,multi_a(1" + (",$H" * rnd_pos) + ",$1/*" + (",$H" * (MAX_PUBKEYS_PER_MULTI_A - 1 - rnd_pos)) + "))",
[True, False],
lambda k1, k2: (key(k2), [multi_a(1, ([H_POINT] * rnd_pos) + [k1] + ([H_POINT] * (MAX_PUBKEYS_PER_MULTI_A - 1 - rnd_pos)))])
)
self.do_test(
"rawtr(XPRV)",
"rawtr($1/*)",
[True],
lambda k1: key(k1)
)
if __name__ == '__main__':
WalletTaprootTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_encryption.py | #!/usr/bin/env python3
# Copyright (c) 2016-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Wallet encryption"""
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_raises_rpc_error,
assert_equal,
)
from test_framework.wallet_util import WalletUnlock
class WalletEncryptionTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
passphrase = "WalletPassphrase"
passphrase2 = "SecondWalletPassphrase"
# Make sure the wallet isn't encrypted first
msg = "test message"
address = self.nodes[0].getnewaddress(address_type='legacy')
sig = self.nodes[0].signmessage(address, msg)
assert self.nodes[0].verifymessage(address, sig, msg)
assert_raises_rpc_error(-15, "Error: running with an unencrypted wallet, but walletpassphrase was called", self.nodes[0].walletpassphrase, 'ff', 1)
assert_raises_rpc_error(-15, "Error: running with an unencrypted wallet, but walletpassphrasechange was called.", self.nodes[0].walletpassphrasechange, 'ff', 'ff')
# Encrypt the wallet
assert_raises_rpc_error(-8, "passphrase cannot be empty", self.nodes[0].encryptwallet, '')
self.nodes[0].encryptwallet(passphrase)
# Test that the wallet is encrypted
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signmessage, address, msg)
assert_raises_rpc_error(-15, "Error: running with an encrypted wallet, but encryptwallet was called.", self.nodes[0].encryptwallet, 'ff')
assert_raises_rpc_error(-8, "passphrase cannot be empty", self.nodes[0].walletpassphrase, '', 1)
assert_raises_rpc_error(-8, "passphrase cannot be empty", self.nodes[0].walletpassphrasechange, '', 'ff')
# Check that walletpassphrase works
self.nodes[0].walletpassphrase(passphrase, 2)
sig = self.nodes[0].signmessage(address, msg)
assert self.nodes[0].verifymessage(address, sig, msg)
# Check that the timeout is right
time.sleep(3)
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signmessage, address, msg)
# Test wrong passphrase
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase + "wrong", 10)
# Test walletlock
with WalletUnlock(self.nodes[0], passphrase):
sig = self.nodes[0].signmessage(address, msg)
assert self.nodes[0].verifymessage(address, sig, msg)
assert_raises_rpc_error(-13, "Please enter the wallet passphrase with walletpassphrase first", self.nodes[0].signmessage, address, msg)
# Test passphrase changes
self.nodes[0].walletpassphrasechange(passphrase, passphrase2)
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase, 10)
with WalletUnlock(self.nodes[0], passphrase2):
sig = self.nodes[0].signmessage(address, msg)
assert self.nodes[0].verifymessage(address, sig, msg)
# Test timeout bounds
assert_raises_rpc_error(-8, "Timeout cannot be negative.", self.nodes[0].walletpassphrase, passphrase2, -10)
self.log.info('Check a timeout less than the limit')
MAX_VALUE = 100000000
now = int(time.time())
self.nodes[0].setmocktime(now)
expected_time = now + MAX_VALUE - 600
self.nodes[0].walletpassphrase(passphrase2, MAX_VALUE - 600)
actual_time = self.nodes[0].getwalletinfo()['unlocked_until']
assert_equal(actual_time, expected_time)
self.log.info('Check a timeout greater than the limit')
expected_time = now + MAX_VALUE
self.nodes[0].walletpassphrase(passphrase2, MAX_VALUE + 1000)
actual_time = self.nodes[0].getwalletinfo()['unlocked_until']
assert_equal(actual_time, expected_time)
self.nodes[0].walletlock()
# Test passphrase with null characters
passphrase_with_nulls = "Phrase\0With\0Nulls"
self.nodes[0].walletpassphrasechange(passphrase2, passphrase_with_nulls)
# walletpassphrasechange should not stop at null characters
assert_raises_rpc_error(-14, "wallet passphrase entered was incorrect", self.nodes[0].walletpassphrase, passphrase_with_nulls.partition("\0")[0], 10)
with WalletUnlock(self.nodes[0], passphrase_with_nulls):
sig = self.nodes[0].signmessage(address, msg)
assert self.nodes[0].verifymessage(address, sig, msg)
if __name__ == '__main__':
WalletEncryptionTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_getblockstats.py | #!/usr/bin/env python3
# Copyright (c) 2017-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test getblockstats rpc call
#
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
import json
import os
TESTSDIR = os.path.dirname(os.path.realpath(__file__))
class GetblockstatsTest(BitcoinTestFramework):
start_height = 101
max_stat_pos = 2
def add_options(self, parser):
parser.add_argument('--gen-test-data', dest='gen_test_data',
default=False, action='store_true',
help='Generate test data')
parser.add_argument('--test-data', dest='test_data',
default='data/rpc_getblockstats.json',
action='store', metavar='FILE',
help='Test data file')
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.supports_cli = False
def get_stats(self):
return [self.nodes[0].getblockstats(hash_or_height=self.start_height + i) for i in range(self.max_stat_pos+1)]
def generate_test_data(self, filename):
mocktime = 1525107225
self.nodes[0].setmocktime(mocktime)
self.nodes[0].createwallet(wallet_name='test')
privkey = self.nodes[0].get_deterministic_priv_key().key
self.nodes[0].importprivkey(privkey)
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
address = self.nodes[0].get_deterministic_priv_key().address
self.nodes[0].sendtoaddress(address=address, amount=10, subtractfeefromamount=True)
self.generate(self.nodes[0], 1)
self.nodes[0].sendtoaddress(address=address, amount=10, subtractfeefromamount=True)
self.nodes[0].sendtoaddress(address=address, amount=10, subtractfeefromamount=False)
self.nodes[0].settxfee(amount=0.003)
self.nodes[0].sendtoaddress(address=address, amount=1, subtractfeefromamount=True)
# Send to OP_RETURN output to test its exclusion from statistics
self.nodes[0].send(outputs={"data": "21"})
self.sync_all()
self.generate(self.nodes[0], 1)
self.expected_stats = self.get_stats()
blocks = []
tip = self.nodes[0].getbestblockhash()
blockhash = None
height = 0
while tip != blockhash:
blockhash = self.nodes[0].getblockhash(height)
blocks.append(self.nodes[0].getblock(blockhash, 0))
height += 1
to_dump = {
'blocks': blocks,
'mocktime': int(mocktime),
'stats': self.expected_stats,
}
with open(filename, 'w', encoding="utf8") as f:
json.dump(to_dump, f, sort_keys=True, indent=2)
def load_test_data(self, filename):
with open(filename, 'r', encoding="utf8") as f:
d = json.load(f)
blocks = d['blocks']
mocktime = d['mocktime']
self.expected_stats = d['stats']
# Set the timestamps from the file so that the nodes can get out of Initial Block Download
self.nodes[0].setmocktime(mocktime)
self.sync_all()
for b in blocks:
self.nodes[0].submitblock(b)
def run_test(self):
test_data = os.path.join(TESTSDIR, self.options.test_data)
if self.options.gen_test_data:
self.generate_test_data(test_data)
else:
self.load_test_data(test_data)
self.sync_all()
stats = self.get_stats()
# Make sure all valid statistics are included but nothing else is
expected_keys = self.expected_stats[0].keys()
assert_equal(set(stats[0].keys()), set(expected_keys))
assert_equal(stats[0]['height'], self.start_height)
assert_equal(stats[self.max_stat_pos]['height'], self.start_height + self.max_stat_pos)
for i in range(self.max_stat_pos+1):
self.log.info('Checking block %d\n' % (i))
assert_equal(stats[i], self.expected_stats[i])
# Check selecting block by hash too
blockhash = self.expected_stats[i]['blockhash']
stats_by_hash = self.nodes[0].getblockstats(hash_or_height=blockhash)
assert_equal(stats_by_hash, self.expected_stats[i])
# Make sure each stat can be queried on its own
for stat in expected_keys:
for i in range(self.max_stat_pos+1):
result = self.nodes[0].getblockstats(hash_or_height=self.start_height + i, stats=[stat])
assert_equal(list(result.keys()), [stat])
if result[stat] != self.expected_stats[i][stat]:
self.log.info('result[%s] (%d) failed, %r != %r' % (
stat, i, result[stat], self.expected_stats[i][stat]))
assert_equal(result[stat], self.expected_stats[i][stat])
# Make sure only the selected statistics are included (more than one)
some_stats = {'minfee', 'maxfee'}
stats = self.nodes[0].getblockstats(hash_or_height=1, stats=list(some_stats))
assert_equal(set(stats.keys()), some_stats)
# Test invalid parameters raise the proper json exceptions
tip = self.start_height + self.max_stat_pos
assert_raises_rpc_error(-8, 'Target block height %d after current tip %d' % (tip+1, tip),
self.nodes[0].getblockstats, hash_or_height=tip+1)
assert_raises_rpc_error(-8, 'Target block height %d is negative' % (-1),
self.nodes[0].getblockstats, hash_or_height=-1)
# Make sure not valid stats aren't allowed
inv_sel_stat = 'asdfghjkl'
inv_stats = [
[inv_sel_stat],
['minfee', inv_sel_stat],
[inv_sel_stat, 'minfee'],
['minfee', inv_sel_stat, 'maxfee'],
]
for inv_stat in inv_stats:
assert_raises_rpc_error(-8, f"Invalid selected statistic '{inv_sel_stat}'",
self.nodes[0].getblockstats, hash_or_height=1, stats=inv_stat)
# Make sure we aren't always returning inv_sel_stat as the culprit stat
assert_raises_rpc_error(-8, f"Invalid selected statistic 'aaa{inv_sel_stat}'",
self.nodes[0].getblockstats, hash_or_height=1, stats=['minfee', f'aaa{inv_sel_stat}'])
# Mainchain's genesis block shouldn't be found on regtest
assert_raises_rpc_error(-5, 'Block not found', self.nodes[0].getblockstats,
hash_or_height='000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f')
# Invalid number of args
assert_raises_rpc_error(-1, 'getblockstats hash_or_height ( stats )', self.nodes[0].getblockstats, '00', 1, 2)
assert_raises_rpc_error(-1, 'getblockstats hash_or_height ( stats )', self.nodes[0].getblockstats)
self.log.info('Test block height 0')
genesis_stats = self.nodes[0].getblockstats(0)
assert_equal(genesis_stats["blockhash"], "0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206")
assert_equal(genesis_stats["utxo_increase"], 1)
assert_equal(genesis_stats["utxo_size_inc"], 117)
assert_equal(genesis_stats["utxo_increase_actual"], 0)
assert_equal(genesis_stats["utxo_size_inc_actual"], 0)
self.log.info('Test tip including OP_RETURN')
tip_stats = self.nodes[0].getblockstats(tip)
assert_equal(tip_stats["utxo_increase"], 6)
assert_equal(tip_stats["utxo_size_inc"], 441)
assert_equal(tip_stats["utxo_increase_actual"], 4)
assert_equal(tip_stats["utxo_size_inc_actual"], 300)
if __name__ == '__main__':
GetblockstatsTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_reindex_readonly.py | #!/usr/bin/env python3
# Copyright (c) 2023-present The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test running bitcoind with -reindex from a read-only blockstore
- Start a node, generate blocks, then restart with -reindex after setting blk files to read-only
"""
import os
import stat
import subprocess
from test_framework.test_framework import BitcoinTestFramework
class BlockstoreReindexTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-fastprune"]]
def reindex_readonly(self):
self.log.debug("Generate block big enough to start second block file")
fastprune_blockfile_size = 0x10000
opreturn = "6a"
nulldata = fastprune_blockfile_size * "ff"
self.generateblock(self.nodes[0], output=f"raw({opreturn}{nulldata})", transactions=[])
self.stop_node(0)
assert (self.nodes[0].chain_path / "blocks" / "blk00000.dat").exists()
assert (self.nodes[0].chain_path / "blocks" / "blk00001.dat").exists()
self.log.debug("Make the first block file read-only")
filename = self.nodes[0].chain_path / "blocks" / "blk00000.dat"
filename.chmod(stat.S_IREAD)
undo_immutable = lambda: None
# Linux
try:
subprocess.run(['chattr'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
try:
subprocess.run(['chattr', '+i', filename], capture_output=True, check=True)
undo_immutable = lambda: subprocess.check_call(['chattr', '-i', filename])
self.log.info("Made file immutable with chattr")
except subprocess.CalledProcessError as e:
self.log.warning(str(e))
if e.stdout:
self.log.warning(f"stdout: {e.stdout}")
if e.stderr:
self.log.warning(f"stderr: {e.stderr}")
if os.getuid() == 0:
self.log.warning("Return early on Linux under root, because chattr failed.")
self.log.warning("This should only happen due to missing capabilities in a container.")
self.log.warning("Make sure to --cap-add LINUX_IMMUTABLE if you want to run this test.")
undo_immutable = False
except Exception:
# macOS, and *BSD
try:
subprocess.run(['chflags'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
try:
subprocess.run(['chflags', 'uchg', filename], capture_output=True, check=True)
undo_immutable = lambda: subprocess.check_call(['chflags', 'nouchg', filename])
self.log.info("Made file immutable with chflags")
except subprocess.CalledProcessError as e:
self.log.warning(str(e))
if e.stdout:
self.log.warning(f"stdout: {e.stdout}")
if e.stderr:
self.log.warning(f"stderr: {e.stderr}")
if os.getuid() == 0:
self.log.warning("Return early on BSD under root, because chflags failed.")
undo_immutable = False
except Exception:
pass
if undo_immutable:
self.log.info("Attempt to restart and reindex the node with the unwritable block file")
with self.nodes[0].assert_debug_log(expected_msgs=['FlushStateToDisk', 'failed to open file'], unexpected_msgs=[]):
self.nodes[0].assert_start_raises_init_error(extra_args=['-reindex', '-fastprune'],
expected_msg="Error: A fatal internal error occurred, see debug.log for details")
undo_immutable()
filename.chmod(0o777)
def run_test(self):
self.reindex_readonly()
if __name__ == '__main__':
BlockstoreReindexTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_invalidateblock.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the invalidateblock RPC."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
class InvalidateTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self):
self.setup_nodes()
def run_test(self):
self.log.info("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:")
self.log.info("Mine 4 blocks on Node 0")
self.generate(self.nodes[0], 4, sync_fun=self.no_op)
assert_equal(self.nodes[0].getblockcount(), 4)
besthash_n0 = self.nodes[0].getbestblockhash()
self.log.info("Mine competing 6 blocks on Node 1")
self.generate(self.nodes[1], 6, sync_fun=self.no_op)
assert_equal(self.nodes[1].getblockcount(), 6)
self.log.info("Connect nodes to force a reorg")
self.connect_nodes(0, 1)
self.sync_blocks(self.nodes[0:2])
assert_equal(self.nodes[0].getblockcount(), 6)
badhash = self.nodes[1].getblockhash(2)
self.log.info("Invalidate block 2 on node 0 and verify we reorg to node 0's original chain")
self.nodes[0].invalidateblock(badhash)
assert_equal(self.nodes[0].getblockcount(), 4)
assert_equal(self.nodes[0].getbestblockhash(), besthash_n0)
self.log.info("Make sure we won't reorg to a lower work chain:")
self.connect_nodes(1, 2)
self.log.info("Sync node 2 to node 1 so both have 6 blocks")
self.sync_blocks(self.nodes[1:3])
assert_equal(self.nodes[2].getblockcount(), 6)
self.log.info("Invalidate block 5 on node 1 so its tip is now at 4")
self.nodes[1].invalidateblock(self.nodes[1].getblockhash(5))
assert_equal(self.nodes[1].getblockcount(), 4)
self.log.info("Invalidate block 3 on node 2, so its tip is now 2")
self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3))
assert_equal(self.nodes[2].getblockcount(), 2)
self.log.info("..and then mine a block")
self.generate(self.nodes[2], 1, sync_fun=self.no_op)
self.log.info("Verify all nodes are at the right height")
self.wait_until(lambda: self.nodes[2].getblockcount() == 3, timeout=5)
self.wait_until(lambda: self.nodes[0].getblockcount() == 4, timeout=5)
self.wait_until(lambda: self.nodes[1].getblockcount() == 4, timeout=5)
self.log.info("Verify that we reconsider all ancestors as well")
blocks = self.generatetodescriptor(self.nodes[1], 10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR, sync_fun=self.no_op)
assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
# Invalidate the two blocks at the tip
self.nodes[1].invalidateblock(blocks[-1])
self.nodes[1].invalidateblock(blocks[-2])
assert_equal(self.nodes[1].getbestblockhash(), blocks[-3])
# Reconsider only the previous tip
self.nodes[1].reconsiderblock(blocks[-1])
# Should be back at the tip by now
assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
self.log.info("Verify that we reconsider all descendants")
blocks = self.generatetodescriptor(self.nodes[1], 10, ADDRESS_BCRT1_UNSPENDABLE_DESCRIPTOR, sync_fun=self.no_op)
assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
# Invalidate the two blocks at the tip
self.nodes[1].invalidateblock(blocks[-2])
self.nodes[1].invalidateblock(blocks[-4])
assert_equal(self.nodes[1].getbestblockhash(), blocks[-5])
# Reconsider only the previous tip
self.nodes[1].reconsiderblock(blocks[-4])
# Should be back at the tip by now
assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
self.log.info("Verify that invalidating an unknown block throws an error")
assert_raises_rpc_error(-5, "Block not found", self.nodes[1].invalidateblock, "00" * 32)
assert_equal(self.nodes[1].getbestblockhash(), blocks[-1])
if __name__ == '__main__':
InvalidateTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/mempool_sigoplimit.py | #!/usr/bin/env python3
# Copyright (c) 2023 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test sigop limit mempool policy (`-bytespersigop` parameter)"""
from decimal import Decimal
from math import ceil
from test_framework.messages import (
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
WITNESS_SCALE_FACTOR,
tx_from_hex,
)
from test_framework.script import (
CScript,
OP_CHECKMULTISIG,
OP_CHECKSIG,
OP_ENDIF,
OP_FALSE,
OP_IF,
OP_RETURN,
OP_TRUE,
)
from test_framework.script_util import (
keys_to_multisig_script,
script_to_p2wsh_script,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
)
from test_framework.wallet import MiniWallet
from test_framework.wallet_util import generate_keypair
DEFAULT_BYTES_PER_SIGOP = 20 # default setting
class BytesPerSigOpTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
# allow large datacarrier output to pad transactions
self.extra_args = [['-datacarriersize=100000']]
def create_p2wsh_spending_tx(self, witness_script, output_script):
"""Create a 1-input-1-output P2WSH spending transaction with only the
witness script in the witness stack and the given output script."""
# create P2WSH address and fund it via MiniWallet first
fund = self.wallet.send_to(
from_node=self.nodes[0],
scriptPubKey=script_to_p2wsh_script(witness_script),
amount=1000000,
)
# create spending transaction
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(fund["txid"], 16), fund["sent_vout"]))]
tx.wit.vtxinwit = [CTxInWitness()]
tx.wit.vtxinwit[0].scriptWitness.stack = [bytes(witness_script)]
tx.vout = [CTxOut(500000, output_script)]
return tx
def test_sigops_limit(self, bytes_per_sigop, num_sigops):
sigop_equivalent_vsize = ceil(num_sigops * bytes_per_sigop / WITNESS_SCALE_FACTOR)
self.log.info(f"- {num_sigops} sigops (equivalent size of {sigop_equivalent_vsize} vbytes)")
# create a template tx with the specified sigop cost in the witness script
# (note that the sigops count even though being in a branch that's not executed)
num_multisigops = num_sigops // 20
num_singlesigops = num_sigops % 20
witness_script = CScript(
[OP_FALSE, OP_IF] +
[OP_CHECKMULTISIG]*num_multisigops +
[OP_CHECKSIG]*num_singlesigops +
[OP_ENDIF, OP_TRUE]
)
# use a 256-byte data-push as lower bound in the output script, in order
# to avoid having to compensate for tx size changes caused by varying
# length serialization sizes (both for scriptPubKey and data-push lengths)
tx = self.create_p2wsh_spending_tx(witness_script, CScript([OP_RETURN, b'X'*256]))
# bump the tx to reach the sigop-limit equivalent size by padding the datacarrier output
assert_greater_than_or_equal(sigop_equivalent_vsize, tx.get_vsize())
vsize_to_pad = sigop_equivalent_vsize - tx.get_vsize()
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'X'*(256+vsize_to_pad)])
assert_equal(sigop_equivalent_vsize, tx.get_vsize())
res = self.nodes[0].testmempoolaccept([tx.serialize().hex()])[0]
assert_equal(res['allowed'], True)
assert_equal(res['vsize'], sigop_equivalent_vsize)
# increase the tx's vsize to be right above the sigop-limit equivalent size
# => tx's vsize in mempool should also grow accordingly
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'X'*(256+vsize_to_pad+1)])
res = self.nodes[0].testmempoolaccept([tx.serialize().hex()])[0]
assert_equal(res['allowed'], True)
assert_equal(res['vsize'], sigop_equivalent_vsize+1)
# decrease the tx's vsize to be right below the sigop-limit equivalent size
# => tx's vsize in mempool should stick at the sigop-limit equivalent
# bytes level, as it is higher than the tx's serialized vsize
# (the maximum of both is taken)
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'X'*(256+vsize_to_pad-1)])
res = self.nodes[0].testmempoolaccept([tx.serialize().hex()])[0]
assert_equal(res['allowed'], True)
assert_equal(res['vsize'], sigop_equivalent_vsize)
# check that the ancestor and descendant size calculations in the mempool
# also use the same max(sigop_equivalent_vsize, serialized_vsize) logic
# (to keep it simple, we only test the case here where the sigop vsize
# is much larger than the serialized vsize, i.e. we create a small child
# tx by getting rid of the large padding output)
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'test123'])
assert_greater_than(sigop_equivalent_vsize, tx.get_vsize())
self.nodes[0].sendrawtransaction(hexstring=tx.serialize().hex(), maxburnamount='1.0')
# fetch parent tx, which doesn't contain any sigops
parent_txid = tx.vin[0].prevout.hash.to_bytes(32, 'big').hex()
parent_tx = tx_from_hex(self.nodes[0].getrawtransaction(txid=parent_txid))
entry_child = self.nodes[0].getmempoolentry(tx.rehash())
assert_equal(entry_child['descendantcount'], 1)
assert_equal(entry_child['descendantsize'], sigop_equivalent_vsize)
assert_equal(entry_child['ancestorcount'], 2)
assert_equal(entry_child['ancestorsize'], sigop_equivalent_vsize + parent_tx.get_vsize())
entry_parent = self.nodes[0].getmempoolentry(parent_tx.rehash())
assert_equal(entry_parent['ancestorcount'], 1)
assert_equal(entry_parent['ancestorsize'], parent_tx.get_vsize())
assert_equal(entry_parent['descendantcount'], 2)
assert_equal(entry_parent['descendantsize'], parent_tx.get_vsize() + sigop_equivalent_vsize)
def test_sigops_package(self):
self.log.info("Test a overly-large sigops-vbyte hits package limits")
# Make a 2-transaction package which fails vbyte checks even though
# separately they would work.
self.restart_node(0, extra_args=["-bytespersigop=5000","-permitbaremultisig=1"] + self.extra_args[0])
def create_bare_multisig_tx(utxo_to_spend=None):
_, pubkey = generate_keypair()
amount_for_bare = 50000
tx_dict = self.wallet.create_self_transfer(fee=Decimal("3"), utxo_to_spend=utxo_to_spend)
tx_utxo = tx_dict["new_utxo"]
tx = tx_dict["tx"]
tx.vout.append(CTxOut(amount_for_bare, keys_to_multisig_script([pubkey], k=1)))
tx.vout[0].nValue -= amount_for_bare
tx_utxo["txid"] = tx.rehash()
tx_utxo["value"] -= Decimal("0.00005000")
return (tx_utxo, tx)
tx_parent_utxo, tx_parent = create_bare_multisig_tx()
tx_child_utxo, tx_child = create_bare_multisig_tx(tx_parent_utxo)
# Separately, the parent tx is ok
parent_individual_testres = self.nodes[0].testmempoolaccept([tx_parent.serialize().hex()])[0]
assert parent_individual_testres["allowed"]
# Multisig is counted as MAX_PUBKEYS_PER_MULTISIG = 20 sigops
assert_equal(parent_individual_testres["vsize"], 5000 * 20)
# But together, it's exceeding limits in the *package* context. If sigops adjusted vsize wasn't being checked
# here, it would get further in validation and give too-long-mempool-chain error instead.
packet_test = self.nodes[0].testmempoolaccept([tx_parent.serialize().hex(), tx_child.serialize().hex()])
assert_equal([x["package-error"] for x in packet_test], ["package-mempool-limits", "package-mempool-limits"])
# When we actually try to submit, the parent makes it into the mempool, but the child would exceed ancestor vsize limits
res = self.nodes[0].submitpackage([tx_parent.serialize().hex(), tx_child.serialize().hex()])
assert "too-long-mempool-chain" in res["tx-results"][tx_child.getwtxid()]["error"]
assert tx_parent.rehash() in self.nodes[0].getrawmempool()
# Transactions are tiny in weight
assert_greater_than(2000, tx_parent.get_weight() + tx_child.get_weight())
def run_test(self):
self.wallet = MiniWallet(self.nodes[0])
for bytes_per_sigop in (DEFAULT_BYTES_PER_SIGOP, 43, 81, 165, 327, 649, 1072):
if bytes_per_sigop == DEFAULT_BYTES_PER_SIGOP:
self.log.info(f"Test default sigops limit setting ({bytes_per_sigop} bytes per sigop)...")
else:
bytespersigop_parameter = f"-bytespersigop={bytes_per_sigop}"
self.log.info(f"Test sigops limit setting {bytespersigop_parameter}...")
self.restart_node(0, extra_args=[bytespersigop_parameter] + self.extra_args[0])
for num_sigops in (69, 101, 142, 183, 222):
self.test_sigops_limit(bytes_per_sigop, num_sigops)
self.generate(self.wallet, 1)
self.test_sigops_package()
if __name__ == '__main__':
BytesPerSigOpTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_decodescript.py | #!/usr/bin/env python3
# Copyright (c) 2015-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test decoding scripts via decodescript RPC command."""
import json
import os
from test_framework.messages import (
sha256,
tx_from_hex,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
class DecodeScriptTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def decodescript_script_sig(self):
signature = '304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001'
push_signature = '48' + signature
public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2'
push_public_key = '21' + public_key
# below are test cases for all of the standard transaction types
self.log.info("- P2PK")
# the scriptSig of a public key scriptPubKey simply pushes a signature onto the stack
rpc_result = self.nodes[0].decodescript(push_signature)
assert_equal(signature, rpc_result['asm'])
self.log.info("- P2PKH")
rpc_result = self.nodes[0].decodescript(push_signature + push_public_key)
assert_equal(signature + ' ' + public_key, rpc_result['asm'])
self.log.info("- multisig")
# this also tests the leading portion of a P2SH multisig scriptSig
# OP_0 <A sig> <B sig>
rpc_result = self.nodes[0].decodescript('00' + push_signature + push_signature)
assert_equal('0 ' + signature + ' ' + signature, rpc_result['asm'])
self.log.info("- P2SH")
# an empty P2SH redeemScript is valid and makes for a very simple test case.
# thus, such a spending scriptSig would just need to pass the outer redeemScript
# hash test and leave true on the top of the stack.
rpc_result = self.nodes[0].decodescript('5100')
assert_equal('1 0', rpc_result['asm'])
# null data scriptSig - no such thing because null data scripts cannot be spent.
# thus, no test case for that standard transaction type is here.
def decodescript_script_pub_key(self):
public_key = '03b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb2'
push_public_key = '21' + public_key
public_key_hash = '5dd1d3a048119c27b28293056724d9522f26d945'
push_public_key_hash = '14' + public_key_hash
uncompressed_public_key = '04b0da749730dc9b4b1f4a14d6902877a92541f5368778853d9c4a0cb7802dcfb25e01fc8fde47c96c98a4f3a8123e33a38a50cf9025cc8c4494a518f991792bb7'
push_uncompressed_public_key = '41' + uncompressed_public_key
p2wsh_p2pk_script_hash = 'd8590cf8ea0674cf3d49fd7ca249b85ef7485dea62c138468bddeb20cd6519f7'
# below are test cases for all of the standard transaction types
self.log.info("- P2PK")
# <pubkey> OP_CHECKSIG
rpc_result = self.nodes[0].decodescript(push_public_key + 'ac')
assert_equal(public_key + ' OP_CHECKSIG', rpc_result['asm'])
assert_equal('pubkey', rpc_result['type'])
# P2PK is translated to P2WPKH
assert_equal('0 ' + public_key_hash, rpc_result['segwit']['asm'])
self.log.info("- P2PKH")
# OP_DUP OP_HASH160 <PubKeyHash> OP_EQUALVERIFY OP_CHECKSIG
rpc_result = self.nodes[0].decodescript('76a9' + push_public_key_hash + '88ac')
assert_equal('pubkeyhash', rpc_result['type'])
assert_equal('OP_DUP OP_HASH160 ' + public_key_hash + ' OP_EQUALVERIFY OP_CHECKSIG', rpc_result['asm'])
# P2PKH is translated to P2WPKH
assert_equal('witness_v0_keyhash', rpc_result['segwit']['type'])
assert_equal('0 ' + public_key_hash, rpc_result['segwit']['asm'])
self.log.info("- multisig")
# <m> <A pubkey> <B pubkey> <C pubkey> <n> OP_CHECKMULTISIG
# just imagine that the pub keys used below are different.
# for our purposes here it does not matter that they are the same even though it is unrealistic.
multisig_script = '52' + push_public_key + push_public_key + push_public_key + '53ae'
rpc_result = self.nodes[0].decodescript(multisig_script)
assert_equal('multisig', rpc_result['type'])
assert_equal('2 ' + public_key + ' ' + public_key + ' ' + public_key + ' 3 OP_CHECKMULTISIG', rpc_result['asm'])
# multisig in P2WSH
multisig_script_hash = sha256(bytes.fromhex(multisig_script)).hex()
assert_equal('witness_v0_scripthash', rpc_result['segwit']['type'])
assert_equal('0 ' + multisig_script_hash, rpc_result['segwit']['asm'])
self.log.info ("- P2SH")
# OP_HASH160 <Hash160(redeemScript)> OP_EQUAL.
# push_public_key_hash here should actually be the hash of a redeem script.
# but this works the same for purposes of this test.
rpc_result = self.nodes[0].decodescript('a9' + push_public_key_hash + '87')
assert_equal('scripthash', rpc_result['type'])
assert_equal('OP_HASH160 ' + public_key_hash + ' OP_EQUAL', rpc_result['asm'])
# P2SH does not work in segwit secripts. decodescript should not return a result for it.
assert 'segwit' not in rpc_result
self.log.info("- null data")
# use a signature look-alike here to make sure that we do not decode random data as a signature.
# this matters if/when signature sighash decoding comes along.
# would want to make sure that no such decoding takes place in this case.
signature_imposter = '48304502207fa7a6d1e0ee81132a269ad84e68d695483745cde8b541e3bf630749894e342a022100c1f7ab20e13e22fb95281a870f3dcf38d782e53023ee313d741ad0cfbc0c509001'
# OP_RETURN <data>
rpc_result = self.nodes[0].decodescript('6a' + signature_imposter)
assert_equal('nulldata', rpc_result['type'])
assert_equal('OP_RETURN ' + signature_imposter[2:], rpc_result['asm'])
self.log.info("- CLTV redeem script")
# redeem scripts are in-effect scriptPubKey scripts, so adding a test here.
# OP_NOP2 is also known as OP_CHECKLOCKTIMEVERIFY.
# just imagine that the pub keys used below are different.
# for our purposes here it does not matter that they are the same even though it is unrealistic.
#
# OP_IF
# <receiver-pubkey> OP_CHECKSIGVERIFY
# OP_ELSE
# <lock-until> OP_CHECKLOCKTIMEVERIFY OP_DROP
# OP_ENDIF
# <sender-pubkey> OP_CHECKSIG
#
# lock until block 500,000
cltv_script = '63' + push_public_key + 'ad670320a107b17568' + push_public_key + 'ac'
rpc_result = self.nodes[0].decodescript(cltv_script)
assert_equal('nonstandard', rpc_result['type'])
assert_equal('OP_IF ' + public_key + ' OP_CHECKSIGVERIFY OP_ELSE 500000 OP_CHECKLOCKTIMEVERIFY OP_DROP OP_ENDIF ' + public_key + ' OP_CHECKSIG', rpc_result['asm'])
# CLTV script in P2WSH
cltv_script_hash = sha256(bytes.fromhex(cltv_script)).hex()
assert_equal('0 ' + cltv_script_hash, rpc_result['segwit']['asm'])
self.log.info("- P2PK with uncompressed pubkey")
# <pubkey> OP_CHECKSIG
rpc_result = self.nodes[0].decodescript(push_uncompressed_public_key + 'ac')
assert_equal('pubkey', rpc_result['type'])
assert_equal(uncompressed_public_key + ' OP_CHECKSIG', rpc_result['asm'])
# uncompressed pubkeys are invalid for checksigs in segwit scripts.
# decodescript should not return a P2WPKH equivalent.
assert 'segwit' not in rpc_result
self.log.info("- multisig with uncompressed pubkey")
# <m> <A pubkey> <B pubkey> <n> OP_CHECKMULTISIG
# just imagine that the pub keys used below are different.
# the purpose of this test is to check that a segwit script is not returned for bare multisig scripts
# with an uncompressed pubkey in them.
rpc_result = self.nodes[0].decodescript('52' + push_public_key + push_uncompressed_public_key +'52ae')
assert_equal('multisig', rpc_result['type'])
assert_equal('2 ' + public_key + ' ' + uncompressed_public_key + ' 2 OP_CHECKMULTISIG', rpc_result['asm'])
# uncompressed pubkeys are invalid for checksigs in segwit scripts.
# decodescript should not return a P2WPKH equivalent.
assert 'segwit' not in rpc_result
self.log.info("- P2WPKH")
# 0 <PubKeyHash>
rpc_result = self.nodes[0].decodescript('00' + push_public_key_hash)
assert_equal('witness_v0_keyhash', rpc_result['type'])
assert_equal('0 ' + public_key_hash, rpc_result['asm'])
# segwit scripts do not work nested into each other.
# a nested segwit script should not be returned in the results.
assert 'segwit' not in rpc_result
self.log.info("- P2WSH")
# 0 <ScriptHash>
# even though this hash is of a P2PK script which is better used as bare P2WPKH, it should not matter
# for the purpose of this test.
rpc_result = self.nodes[0].decodescript('0020' + p2wsh_p2pk_script_hash)
assert_equal('witness_v0_scripthash', rpc_result['type'])
assert_equal('0 ' + p2wsh_p2pk_script_hash, rpc_result['asm'])
# segwit scripts do not work nested into each other.
# a nested segwit script should not be returned in the results.
assert 'segwit' not in rpc_result
self.log.info("- P2TR")
# 1 <x-only pubkey>
xonly_public_key = '01'*32 # first ever P2TR output on mainnet
rpc_result = self.nodes[0].decodescript('5120' + xonly_public_key)
assert_equal('witness_v1_taproot', rpc_result['type'])
assert_equal('1 ' + xonly_public_key, rpc_result['asm'])
assert 'segwit' not in rpc_result
def decoderawtransaction_asm_sighashtype(self):
"""Test decoding scripts via RPC command "decoderawtransaction".
This test is in with the "decodescript" tests because they are testing the same "asm" script decodes.
"""
self.log.info("- various mainnet txs")
# this test case uses a mainnet transaction that has a P2SH input and both P2PKH and P2SH outputs.
tx = '0100000001696a20784a2c70143f634e95227dbdfdf0ecd51647052e70854512235f5986ca010000008a47304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb014104d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536ffffffff0100e1f505000000001976a914eb6c6e0cdb2d256a32d97b8df1fc75d1920d9bca88ac00000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('304402207174775824bec6c2700023309a168231ec80b82c6069282f5133e6f11cbb04460220570edc55c7c5da2ca687ebd0372d3546ebc3f810516a002350cac72dfe192dfb[ALL] 04d3f898e6487787910a690410b7a917ef198905c27fb9d3b0a42da12aceae0544fc7088d239d9a48f2828a15a09e84043001f27cc80d162cb95404e1210161536', rpc_result['vin'][0]['scriptSig']['asm'])
# this test case uses a mainnet transaction that has a P2SH input and both P2PKH and P2SH outputs.
# it's from James D'Angelo's awesome introductory videos about multisig: https://www.youtube.com/watch?v=zIbUSaZBJgU and https://www.youtube.com/watch?v=OSA1pwlaypc
# verify that we have not altered scriptPubKey decoding.
tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914dc863734a218bfe83ef770ee9d41a27f824a6e5688acee2a02000000000017a9142a5edea39971049a540474c6a99edf0aa4074c588700000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('8e3730608c3b0bb5df54f09076e196bc292a8e39a78e73b44b6ba08c78f5cbb0', rpc_result['txid'])
assert_equal('0 3045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea[ALL] 3045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75[ALL] 5221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53ae', rpc_result['vin'][0]['scriptSig']['asm'])
assert_equal('OP_DUP OP_HASH160 dc863734a218bfe83ef770ee9d41a27f824a6e56 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
assert_equal('OP_HASH160 2a5edea39971049a540474c6a99edf0aa4074c58 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
txSave = tx_from_hex(tx)
self.log.info("- tx not passing DER signature checks")
# make sure that a specifically crafted op_return value will not pass all the IsDERSignature checks and then get decoded as a sighash type
tx = '01000000015ded05872fdbda629c7d3d02b194763ce3b9b1535ea884e3c8e765d42e316724020000006b48304502204c10d4064885c42638cbff3585915b322de33762598321145ba033fc796971e2022100bb153ad3baa8b757e30a2175bd32852d2e1cb9080f84d7e32fcdfd667934ef1b012103163c0ff73511ea1743fb5b98384a2ff09dd06949488028fd819f4d83f56264efffffffff0200000000000000000b6a0930060201000201000180380100000000001976a9141cabd296e753837c086da7a45a6c2fe0d49d7b7b88ac00000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('OP_RETURN 300602010002010001', rpc_result['vout'][0]['scriptPubKey']['asm'])
self.log.info("- tx passing DER signature checks")
# verify that we have not altered scriptPubKey processing even of a specially crafted P2PKH pubkeyhash and P2SH redeem script hash that is made to pass the der signature checks
tx = '01000000018d1f5635abd06e2c7e2ddf58dc85b3de111e4ad6e0ab51bb0dcf5e84126d927300000000fdfe0000483045022100ae3b4e589dfc9d48cb82d41008dc5fa6a86f94d5c54f9935531924602730ab8002202f88cf464414c4ed9fa11b773c5ee944f66e9b05cc1e51d97abc22ce098937ea01483045022100b44883be035600e9328a01b66c7d8439b74db64187e76b99a68f7893b701d5380220225bf286493e4c4adcf928c40f785422572eb232f84a0b83b0dea823c3a19c75014c695221020743d44be989540d27b1b4bbbcfd17721c337cb6bc9af20eb8a32520b393532f2102c0120a1dda9e51a938d39ddd9fe0ebc45ea97e1d27a7cbd671d5431416d3dd87210213820eb3d5f509d7438c9eeecb4157b2f595105e7cd564b3cdbb9ead3da41eed53aeffffffff02611e0000000000001976a914301102070101010101010102060101010101010188acee2a02000000000017a91430110207010101010101010206010101010101018700000000'
rpc_result = self.nodes[0].decoderawtransaction(tx)
assert_equal('OP_DUP OP_HASH160 3011020701010101010101020601010101010101 OP_EQUALVERIFY OP_CHECKSIG', rpc_result['vout'][0]['scriptPubKey']['asm'])
assert_equal('OP_HASH160 3011020701010101010101020601010101010101 OP_EQUAL', rpc_result['vout'][1]['scriptPubKey']['asm'])
# some more full transaction tests of varying specific scriptSigs. used instead of
# tests in decodescript_script_sig because the decodescript RPC is specifically
# for working on scriptPubKeys (argh!).
push_signature = txSave.vin[0].scriptSig.hex()[2:(0x48*2+4)]
signature = push_signature[2:]
der_signature = signature[:-2]
signature_sighash_decoded = der_signature + '[ALL]'
signature_2 = der_signature + '82'
push_signature_2 = '48' + signature_2
signature_2_sighash_decoded = der_signature + '[NONE|ANYONECANPAY]'
self.log.info("- P2PK scriptSig")
txSave.vin[0].scriptSig = bytes.fromhex(push_signature)
rpc_result = self.nodes[0].decoderawtransaction(txSave.serialize().hex())
assert_equal(signature_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
# make sure that the sighash decodes come out correctly for a more complex / lesser used case.
txSave.vin[0].scriptSig = bytes.fromhex(push_signature_2)
rpc_result = self.nodes[0].decoderawtransaction(txSave.serialize().hex())
assert_equal(signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
self.log.info("- multisig scriptSig")
txSave.vin[0].scriptSig = bytes.fromhex('00' + push_signature + push_signature_2)
rpc_result = self.nodes[0].decoderawtransaction(txSave.serialize().hex())
assert_equal('0 ' + signature_sighash_decoded + ' ' + signature_2_sighash_decoded, rpc_result['vin'][0]['scriptSig']['asm'])
self.log.info("- scriptSig that contains more than push operations")
# in fact, it contains an OP_RETURN with data specially crafted to cause improper decode if the code does not catch it.
txSave.vin[0].scriptSig = bytes.fromhex('6a143011020701010101010101020601010101010101')
rpc_result = self.nodes[0].decoderawtransaction(txSave.serialize().hex())
assert_equal('OP_RETURN 3011020701010101010101020601010101010101', rpc_result['vin'][0]['scriptSig']['asm'])
def decodescript_datadriven_tests(self):
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/rpc_decodescript.json'), encoding='utf-8') as f:
dd_tests = json.load(f)
for script, result in dd_tests:
rpc_result = self.nodes[0].decodescript(script)
assert_equal(result, rpc_result)
def decodescript_miniscript(self):
"""Check that a Miniscript is decoded when possible under P2WSH context."""
# Sourced from https://github.com/bitcoin/bitcoin/pull/27037#issuecomment-1416151907.
# Miniscript-compatible offered HTLC
res = self.nodes[0].decodescript("82012088a914ffffffffffffffffffffffffffffffffffffffff88210250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0ad51b2")
assert res["segwit"]["desc"] == "wsh(and_v(and_v(v:hash160(ffffffffffffffffffffffffffffffffffffffff),v:pk(0250929b74c1a04954b78b4b6035e97a5e078a5a0f28ec96d547bfee9ace803ac0)),older(1)))#gm8xz4fl"
# Miniscript-incompatible offered HTLC
res = self.nodes[0].decodescript("82012088a914ffffffffffffffffffffffffffffffffffffffff882102ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffacb2")
assert res["segwit"]["desc"] == "addr(bcrt1q73qyfypp47hvgnkjqnav0j3k2lq3v76wg22dk8tmwuz5sfgv66xsvxg6uu)#9p3q328s"
# Miniscript-compatible multisig bigger than 520 byte P2SH limit.
res = self.nodes[0].decodescript("5b21020e0338c96a8870479f2396c373cc7696ba124e8635d41b0ea581112b678172612102675333a4e4b8fb51d9d4e22fa5a8eaced3fdac8a8cbf9be8c030f75712e6af992102896807d54bc55c24981f24a453c60ad3e8993d693732288068a23df3d9f50d4821029e51a5ef5db3137051de8323b001749932f2ff0d34c82e96a2c2461de96ae56c2102a4e1a9638d46923272c266631d94d36bdb03a64ee0e14c7518e49d2f29bc401021031c41fdbcebe17bec8d49816e00ca1b5ac34766b91c9f2ac37d39c63e5e008afb2103079e252e85abffd3c401a69b087e590a9b86f33f574f08129ccbd3521ecf516b2103111cf405b627e22135b3b3733a4a34aa5723fb0f58379a16d32861bf576b0ec2210318f331b3e5d38156da6633b31929c5b220349859cc9ca3d33fb4e68aa08401742103230dae6b4ac93480aeab26d000841298e3b8f6157028e47b0897c1e025165de121035abff4281ff00660f99ab27bb53e6b33689c2cd8dcd364bc3c90ca5aea0d71a62103bd45cddfacf2083b14310ae4a84e25de61e451637346325222747b157446614c2103cc297026b06c71cbfa52089149157b5ff23de027ac5ab781800a578192d175462103d3bde5d63bdb3a6379b461be64dad45eabff42f758543a9645afd42f6d4248282103ed1e8d5109c9ed66f7941bc53cc71137baa76d50d274bda8d5e8ffbd6e61fe9a5fae736402c00fb269522103aab896d53a8e7d6433137bbba940f9c521e085dd07e60994579b64a6d992cf79210291b7d0b1b692f8f524516ed950872e5da10fb1b808b5a526dedc6fed1cf29807210386aa9372fbab374593466bc5451dc59954e90787f08060964d95c87ef34ca5bb53ae68")
assert_equal(res["segwit"]["desc"], "wsh(or_d(multi(11,020e0338c96a8870479f2396c373cc7696ba124e8635d41b0ea581112b67817261,02675333a4e4b8fb51d9d4e22fa5a8eaced3fdac8a8cbf9be8c030f75712e6af99,02896807d54bc55c24981f24a453c60ad3e8993d693732288068a23df3d9f50d48,029e51a5ef5db3137051de8323b001749932f2ff0d34c82e96a2c2461de96ae56c,02a4e1a9638d46923272c266631d94d36bdb03a64ee0e14c7518e49d2f29bc4010,031c41fdbcebe17bec8d49816e00ca1b5ac34766b91c9f2ac37d39c63e5e008afb,03079e252e85abffd3c401a69b087e590a9b86f33f574f08129ccbd3521ecf516b,03111cf405b627e22135b3b3733a4a34aa5723fb0f58379a16d32861bf576b0ec2,0318f331b3e5d38156da6633b31929c5b220349859cc9ca3d33fb4e68aa0840174,03230dae6b4ac93480aeab26d000841298e3b8f6157028e47b0897c1e025165de1,035abff4281ff00660f99ab27bb53e6b33689c2cd8dcd364bc3c90ca5aea0d71a6,03bd45cddfacf2083b14310ae4a84e25de61e451637346325222747b157446614c,03cc297026b06c71cbfa52089149157b5ff23de027ac5ab781800a578192d17546,03d3bde5d63bdb3a6379b461be64dad45eabff42f758543a9645afd42f6d424828,03ed1e8d5109c9ed66f7941bc53cc71137baa76d50d274bda8d5e8ffbd6e61fe9a),and_v(v:older(4032),multi(2,03aab896d53a8e7d6433137bbba940f9c521e085dd07e60994579b64a6d992cf79,0291b7d0b1b692f8f524516ed950872e5da10fb1b808b5a526dedc6fed1cf29807,0386aa9372fbab374593466bc5451dc59954e90787f08060964d95c87ef34ca5bb))))#7jwwklk4")
def run_test(self):
self.log.info("Test decoding of standard input scripts [scriptSig]")
self.decodescript_script_sig()
self.log.info("Test decoding of standard output scripts [scriptPubKey]")
self.decodescript_script_pub_key()
self.log.info("Test 'asm' script decoding of transactions")
self.decoderawtransaction_asm_sighashtype()
self.log.info("Data-driven tests")
self.decodescript_datadriven_tests()
self.log.info("Miniscript descriptor decoding")
self.decodescript_miniscript()
if __name__ == '__main__':
DecodeScriptTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_eviction.py | #!/usr/bin/env python3
# Copyright (c) 2019-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
""" Test node eviction logic
When the number of peers has reached the limit of maximum connections,
the next connecting inbound peer will trigger the eviction mechanism.
We cannot currently test the parts of the eviction logic that are based on
address/netgroup since in the current framework, all peers are connecting from
the same local address. See Issue #14210 for more info.
Therefore, this test is limited to the remaining protection criteria.
"""
import time
from test_framework.blocktools import (
create_block,
create_coinbase,
)
from test_framework.messages import (
msg_pong,
msg_tx,
)
from test_framework.p2p import (
P2PDataStore,
P2PInterface,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
from test_framework.wallet import MiniWallet
class SlowP2PDataStore(P2PDataStore):
def on_ping(self, message):
time.sleep(0.1)
self.send_message(msg_pong(message.nonce))
class SlowP2PInterface(P2PInterface):
def on_ping(self, message):
time.sleep(0.1)
self.send_message(msg_pong(message.nonce))
class P2PEvict(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
# The choice of maxconnections=32 results in a maximum of 21 inbound connections
# (32 - 10 outbound - 1 feeler). 20 inbound peers are protected from eviction:
# 4 by netgroup, 4 that sent us blocks, 4 that sent us transactions and 8 via lowest ping time
self.extra_args = [['-maxconnections=32']]
def run_test(self):
protected_peers = set() # peers that we expect to be protected from eviction
current_peer = -1
node = self.nodes[0]
self.wallet = MiniWallet(node)
self.log.info("Create 4 peers and protect them from eviction by sending us a block")
for _ in range(4):
block_peer = node.add_p2p_connection(SlowP2PDataStore())
current_peer += 1
block_peer.sync_with_ping()
best_block = node.getbestblockhash()
tip = int(best_block, 16)
best_block_time = node.getblock(best_block)['time']
block = create_block(tip, create_coinbase(node.getblockcount() + 1), best_block_time + 1)
block.solve()
block_peer.send_blocks_and_test([block], node, success=True)
protected_peers.add(current_peer)
self.log.info("Create 5 slow-pinging peers, making them eviction candidates")
for _ in range(5):
node.add_p2p_connection(SlowP2PInterface())
current_peer += 1
self.log.info("Create 4 peers and protect them from eviction by sending us a tx")
for i in range(4):
txpeer = node.add_p2p_connection(SlowP2PInterface())
current_peer += 1
txpeer.sync_with_ping()
tx = self.wallet.create_self_transfer()['tx']
txpeer.send_message(msg_tx(tx))
protected_peers.add(current_peer)
self.log.info("Create 8 peers and protect them from eviction by having faster pings")
for _ in range(8):
fastpeer = node.add_p2p_connection(P2PInterface())
current_peer += 1
self.wait_until(lambda: "ping" in fastpeer.last_message, timeout=10)
# Make sure by asking the node what the actual min pings are
peerinfo = node.getpeerinfo()
pings = {}
for i in range(len(peerinfo)):
pings[i] = peerinfo[i]['minping'] if 'minping' in peerinfo[i] else 1000000
sorted_pings = sorted(pings.items(), key=lambda x: x[1])
# Usually the 8 fast peers are protected. In rare case of unreliable pings,
# one of the slower peers might have a faster min ping though.
for i in range(8):
protected_peers.add(sorted_pings[i][0])
self.log.info("Create peer that triggers the eviction mechanism")
node.add_p2p_connection(SlowP2PInterface())
# One of the non-protected peers must be evicted. We can't be sure which one because
# 4 peers are protected via netgroup, which is identical for all peers,
# and the eviction mechanism doesn't preserve the order of identical elements.
evicted_peers = []
for i in range(len(node.p2ps)):
if not node.p2ps[i].is_connected:
evicted_peers.append(i)
self.log.info("Test that one peer was evicted")
self.log.debug("{} evicted peer: {}".format(len(evicted_peers), set(evicted_peers)))
assert_equal(len(evicted_peers), 1)
self.log.info("Test that no peer expected to be protected was evicted")
self.log.debug("{} protected peers: {}".format(len(protected_peers), protected_peers))
assert evicted_peers[0] not in protected_peers
if __name__ == '__main__':
P2PEvict().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_blockfilters.py | #!/usr/bin/env python3
# Copyright (c) 2019-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests NODE_COMPACT_FILTERS (BIP 157/158).
Tests that a node configured with -blockfilterindex and -peerblockfilters signals
NODE_COMPACT_FILTERS and can serve cfilters, cfheaders and cfcheckpts.
"""
from test_framework.messages import (
FILTER_TYPE_BASIC,
NODE_COMPACT_FILTERS,
hash256,
msg_getcfcheckpt,
msg_getcfheaders,
msg_getcfilters,
ser_uint256,
uint256_from_str,
)
from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
class FiltersClient(P2PInterface):
def __init__(self):
super().__init__()
# Store the cfilters received.
self.cfilters = []
def pop_cfilters(self):
cfilters = self.cfilters
self.cfilters = []
return cfilters
def on_cfilter(self, message):
"""Store cfilters received in a list."""
self.cfilters.append(message)
class CompactFiltersTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.rpc_timeout = 480
self.num_nodes = 2
self.extra_args = [
["-blockfilterindex", "-peerblockfilters"],
["-blockfilterindex"],
]
def run_test(self):
# Node 0 supports COMPACT_FILTERS, node 1 does not.
peer_0 = self.nodes[0].add_p2p_connection(FiltersClient())
peer_1 = self.nodes[1].add_p2p_connection(FiltersClient())
# Nodes 0 & 1 share the same first 999 blocks in the chain.
self.generate(self.nodes[0], 999)
# Stale blocks by disconnecting nodes 0 & 1, mining, then reconnecting
self.disconnect_nodes(0, 1)
stale_block_hash = self.generate(self.nodes[0], 1, sync_fun=self.no_op)[0]
self.nodes[0].syncwithvalidationinterfacequeue()
assert_equal(self.nodes[0].getblockcount(), 1000)
self.generate(self.nodes[1], 1001, sync_fun=self.no_op)
assert_equal(self.nodes[1].getblockcount(), 2000)
# Check that nodes have signalled NODE_COMPACT_FILTERS correctly.
assert peer_0.nServices & NODE_COMPACT_FILTERS != 0
assert peer_1.nServices & NODE_COMPACT_FILTERS == 0
# Check that the localservices is as expected.
assert int(self.nodes[0].getnetworkinfo()['localservices'], 16) & NODE_COMPACT_FILTERS != 0
assert int(self.nodes[1].getnetworkinfo()['localservices'], 16) & NODE_COMPACT_FILTERS == 0
self.log.info("get cfcheckpt on chain to be re-orged out.")
request = msg_getcfcheckpt(
filter_type=FILTER_TYPE_BASIC,
stop_hash=int(stale_block_hash, 16),
)
peer_0.send_and_ping(message=request)
response = peer_0.last_message['cfcheckpt']
assert_equal(response.filter_type, request.filter_type)
assert_equal(response.stop_hash, request.stop_hash)
assert_equal(len(response.headers), 1)
self.log.info("Reorg node 0 to a new chain.")
self.connect_nodes(0, 1)
self.sync_blocks(timeout=600)
self.nodes[0].syncwithvalidationinterfacequeue()
main_block_hash = self.nodes[0].getblockhash(1000)
assert main_block_hash != stale_block_hash, "node 0 chain did not reorganize"
self.log.info("Check that peers can fetch cfcheckpt on active chain.")
tip_hash = self.nodes[0].getbestblockhash()
request = msg_getcfcheckpt(
filter_type=FILTER_TYPE_BASIC,
stop_hash=int(tip_hash, 16),
)
peer_0.send_and_ping(request)
response = peer_0.last_message['cfcheckpt']
assert_equal(response.filter_type, request.filter_type)
assert_equal(response.stop_hash, request.stop_hash)
main_cfcheckpt = self.nodes[0].getblockfilter(main_block_hash, 'basic')['header']
tip_cfcheckpt = self.nodes[0].getblockfilter(tip_hash, 'basic')['header']
assert_equal(
response.headers,
[int(header, 16) for header in (main_cfcheckpt, tip_cfcheckpt)],
)
self.log.info("Check that peers can fetch cfcheckpt on stale chain.")
request = msg_getcfcheckpt(
filter_type=FILTER_TYPE_BASIC,
stop_hash=int(stale_block_hash, 16),
)
peer_0.send_and_ping(request)
response = peer_0.last_message['cfcheckpt']
stale_cfcheckpt = self.nodes[0].getblockfilter(stale_block_hash, 'basic')['header']
assert_equal(
response.headers,
[int(header, 16) for header in (stale_cfcheckpt, )],
)
self.log.info("Check that peers can fetch cfheaders on active chain.")
request = msg_getcfheaders(
filter_type=FILTER_TYPE_BASIC,
start_height=1,
stop_hash=int(main_block_hash, 16),
)
peer_0.send_and_ping(request)
response = peer_0.last_message['cfheaders']
main_cfhashes = response.hashes
assert_equal(len(main_cfhashes), 1000)
assert_equal(
compute_last_header(response.prev_header, response.hashes),
int(main_cfcheckpt, 16),
)
self.log.info("Check that peers can fetch cfheaders on stale chain.")
request = msg_getcfheaders(
filter_type=FILTER_TYPE_BASIC,
start_height=1,
stop_hash=int(stale_block_hash, 16),
)
peer_0.send_and_ping(request)
response = peer_0.last_message['cfheaders']
stale_cfhashes = response.hashes
assert_equal(len(stale_cfhashes), 1000)
assert_equal(
compute_last_header(response.prev_header, response.hashes),
int(stale_cfcheckpt, 16),
)
self.log.info("Check that peers can fetch cfilters.")
stop_hash = self.nodes[0].getblockhash(10)
request = msg_getcfilters(
filter_type=FILTER_TYPE_BASIC,
start_height=1,
stop_hash=int(stop_hash, 16),
)
peer_0.send_and_ping(request)
response = peer_0.pop_cfilters()
assert_equal(len(response), 10)
self.log.info("Check that cfilter responses are correct.")
for cfilter, cfhash, height in zip(response, main_cfhashes, range(1, 11)):
block_hash = self.nodes[0].getblockhash(height)
assert_equal(cfilter.filter_type, FILTER_TYPE_BASIC)
assert_equal(cfilter.block_hash, int(block_hash, 16))
computed_cfhash = uint256_from_str(hash256(cfilter.filter_data))
assert_equal(computed_cfhash, cfhash)
self.log.info("Check that peers can fetch cfilters for stale blocks.")
request = msg_getcfilters(
filter_type=FILTER_TYPE_BASIC,
start_height=1000,
stop_hash=int(stale_block_hash, 16),
)
peer_0.send_and_ping(request)
response = peer_0.pop_cfilters()
assert_equal(len(response), 1)
cfilter = response[0]
assert_equal(cfilter.filter_type, FILTER_TYPE_BASIC)
assert_equal(cfilter.block_hash, int(stale_block_hash, 16))
computed_cfhash = uint256_from_str(hash256(cfilter.filter_data))
assert_equal(computed_cfhash, stale_cfhashes[999])
self.log.info("Requests to node 1 without NODE_COMPACT_FILTERS results in disconnection.")
requests = [
msg_getcfcheckpt(
filter_type=FILTER_TYPE_BASIC,
stop_hash=int(main_block_hash, 16),
),
msg_getcfheaders(
filter_type=FILTER_TYPE_BASIC,
start_height=1000,
stop_hash=int(main_block_hash, 16),
),
msg_getcfilters(
filter_type=FILTER_TYPE_BASIC,
start_height=1000,
stop_hash=int(main_block_hash, 16),
),
]
for request in requests:
peer_1 = self.nodes[1].add_p2p_connection(P2PInterface())
with self.nodes[1].assert_debug_log(expected_msgs=["requested unsupported block filter type"]):
peer_1.send_message(request)
peer_1.wait_for_disconnect()
self.log.info("Check that invalid requests result in disconnection.")
requests = [
# Requesting too many filters results in disconnection.
(
msg_getcfilters(
filter_type=FILTER_TYPE_BASIC,
start_height=0,
stop_hash=int(main_block_hash, 16),
), "requested too many cfilters/cfheaders"
),
# Requesting too many filter headers results in disconnection.
(
msg_getcfheaders(
filter_type=FILTER_TYPE_BASIC,
start_height=0,
stop_hash=int(tip_hash, 16),
), "requested too many cfilters/cfheaders"
),
# Requesting unknown filter type results in disconnection.
(
msg_getcfcheckpt(
filter_type=255,
stop_hash=int(main_block_hash, 16),
), "requested unsupported block filter type"
),
# Requesting unknown hash results in disconnection.
(
msg_getcfcheckpt(
filter_type=FILTER_TYPE_BASIC,
stop_hash=123456789,
), "requested invalid block hash"
),
(
# Request with (start block height > stop block height) results in disconnection.
msg_getcfheaders(
filter_type=FILTER_TYPE_BASIC,
start_height=1000,
stop_hash=int(self.nodes[0].getblockhash(999), 16),
), "sent invalid getcfilters/getcfheaders with start height 1000 and stop height 999"
),
]
for request, expected_log_msg in requests:
peer_0 = self.nodes[0].add_p2p_connection(P2PInterface())
with self.nodes[0].assert_debug_log(expected_msgs=[expected_log_msg]):
peer_0.send_message(request)
peer_0.wait_for_disconnect()
self.log.info("Test -peerblockfilters without -blockfilterindex raises an error")
self.stop_node(0)
self.nodes[0].extra_args = ["-peerblockfilters"]
msg = "Error: Cannot set -peerblockfilters without -blockfilterindex."
self.nodes[0].assert_start_raises_init_error(expected_msg=msg)
self.log.info("Test unknown value to -blockfilterindex raises an error")
self.nodes[0].extra_args = ["-blockfilterindex=abc"]
msg = "Error: Unknown -blockfilterindex value abc."
self.nodes[0].assert_start_raises_init_error(expected_msg=msg)
def compute_last_header(prev_header, hashes):
"""Compute the last filter header from a starting header and a sequence of filter hashes."""
header = ser_uint256(prev_header)
for filter_hash in hashes:
header = hash256(ser_uint256(filter_hash) + header)
return uint256_from_str(header)
if __name__ == '__main__':
CompactFiltersTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_keypool.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet keypool and interaction with wallet encryption/locking."""
import time
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
from test_framework.wallet_util import WalletUnlock
class KeyPoolTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
nodes = self.nodes
addr_before_encrypting = nodes[0].getnewaddress()
addr_before_encrypting_data = nodes[0].getaddressinfo(addr_before_encrypting)
wallet_info_old = nodes[0].getwalletinfo()
if not self.options.descriptors:
assert addr_before_encrypting_data['hdseedid'] == wallet_info_old['hdseedid']
# Encrypt wallet and wait to terminate
nodes[0].encryptwallet('test')
if self.options.descriptors:
# Import hardened derivation only descriptors
nodes[0].walletpassphrase('test', 10)
nodes[0].importdescriptors([
{
"desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/*h)#y4dfsj7n",
"timestamp": "now",
"range": [0,0],
"active": True
},
{
"desc": "pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1h/*h)#a0nyvl0k",
"timestamp": "now",
"range": [0,0],
"active": True
},
{
"desc": "sh(wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/2h/*h))#lmeu2axg",
"timestamp": "now",
"range": [0,0],
"active": True
},
{
"desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/3h/*h)#jkl636gm",
"timestamp": "now",
"range": [0,0],
"active": True,
"internal": True
},
{
"desc": "pkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/4h/*h)#l3crwaus",
"timestamp": "now",
"range": [0,0],
"active": True,
"internal": True
},
{
"desc": "sh(wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/5h/*h))#qg8wa75f",
"timestamp": "now",
"range": [0,0],
"active": True,
"internal": True
}
])
nodes[0].walletlock()
# Keep creating keys
addr = nodes[0].getnewaddress()
addr_data = nodes[0].getaddressinfo(addr)
wallet_info = nodes[0].getwalletinfo()
assert addr_before_encrypting_data['hdmasterfingerprint'] != addr_data['hdmasterfingerprint']
if not self.options.descriptors:
assert addr_data['hdseedid'] == wallet_info['hdseedid']
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# put six (plus 2) new keys in the keypool (100% external-, +100% internal-keys, 1 in min)
with WalletUnlock(nodes[0], 'test'):
nodes[0].keypoolrefill(6)
wi = nodes[0].getwalletinfo()
if self.options.descriptors:
assert_equal(wi['keypoolsize_hd_internal'], 24)
assert_equal(wi['keypoolsize'], 24)
else:
assert_equal(wi['keypoolsize_hd_internal'], 6)
assert_equal(wi['keypoolsize'], 6)
# drain the internal keys
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
nodes[0].getrawchangeaddress()
addr = set()
# the next one should fail
assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].getrawchangeaddress)
# drain the external keys
addr.add(nodes[0].getnewaddress(address_type="bech32"))
addr.add(nodes[0].getnewaddress(address_type="bech32"))
addr.add(nodes[0].getnewaddress(address_type="bech32"))
addr.add(nodes[0].getnewaddress(address_type="bech32"))
addr.add(nodes[0].getnewaddress(address_type="bech32"))
addr.add(nodes[0].getnewaddress(address_type="bech32"))
assert len(addr) == 6
# the next one should fail
assert_raises_rpc_error(-12, "Error: Keypool ran out, please call keypoolrefill first", nodes[0].getnewaddress)
# refill keypool with three new addresses
nodes[0].walletpassphrase('test', 1)
nodes[0].keypoolrefill(3)
# test walletpassphrase timeout
time.sleep(1.1)
assert_equal(nodes[0].getwalletinfo()["unlocked_until"], 0)
# drain the keypool
for _ in range(3):
nodes[0].getnewaddress()
assert_raises_rpc_error(-12, "Keypool ran out", nodes[0].getnewaddress)
with WalletUnlock(nodes[0], 'test'):
nodes[0].keypoolrefill(100)
wi = nodes[0].getwalletinfo()
if self.options.descriptors:
assert_equal(wi['keypoolsize_hd_internal'], 400)
assert_equal(wi['keypoolsize'], 400)
else:
assert_equal(wi['keypoolsize_hd_internal'], 100)
assert_equal(wi['keypoolsize'], 100)
if not self.options.descriptors:
# Check that newkeypool entirely flushes the keypool
start_keypath = nodes[0].getaddressinfo(nodes[0].getnewaddress())['hdkeypath']
start_change_keypath = nodes[0].getaddressinfo(nodes[0].getrawchangeaddress())['hdkeypath']
# flush keypool and get new addresses
nodes[0].newkeypool()
end_keypath = nodes[0].getaddressinfo(nodes[0].getnewaddress())['hdkeypath']
end_change_keypath = nodes[0].getaddressinfo(nodes[0].getrawchangeaddress())['hdkeypath']
# The new keypath index should be 100 more than the old one
new_index = int(start_keypath.rsplit('/', 1)[1][:-1]) + 100
new_change_index = int(start_change_keypath.rsplit('/', 1)[1][:-1]) + 100
assert_equal(end_keypath, "m/0'/0'/" + str(new_index) + "'")
assert_equal(end_change_keypath, "m/0'/1'/" + str(new_change_index) + "'")
# create a blank wallet
nodes[0].createwallet(wallet_name='w2', blank=True, disable_private_keys=True)
w2 = nodes[0].get_wallet_rpc('w2')
# refer to initial wallet as w1
w1 = nodes[0].get_wallet_rpc(self.default_wallet_name)
# import private key and fund it
address = addr.pop()
desc = w1.getaddressinfo(address)['desc']
if self.options.descriptors:
res = w2.importdescriptors([{'desc': desc, 'timestamp': 'now'}])
else:
res = w2.importmulti([{'desc': desc, 'timestamp': 'now'}])
assert_equal(res[0]['success'], True)
with WalletUnlock(w1, 'test'):
res = w1.sendtoaddress(address=address, amount=0.00010000)
self.generate(nodes[0], 1)
destination = addr.pop()
# Using a fee rate (10 sat / byte) well above the minimum relay rate
# creating a 5,000 sat transaction with change should not be possible
assert_raises_rpc_error(-4, "Transaction needs a change address, but we can't generate it.", w2.walletcreatefundedpsbt, inputs=[], outputs=[{addr.pop(): 0.00005000}], subtractFeeFromOutputs=[0], feeRate=0.00010)
# creating a 10,000 sat transaction without change, with a manual input, should still be possible
res = w2.walletcreatefundedpsbt(inputs=w2.listunspent(), outputs=[{destination: 0.00010000}], subtractFeeFromOutputs=[0], feeRate=0.00010)
assert_equal("psbt" in res, True)
# creating a 10,000 sat transaction without change should still be possible
res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00010000}], subtractFeeFromOutputs=[0], feeRate=0.00010)
assert_equal("psbt" in res, True)
# should work without subtractFeeFromOutputs if the exact fee is subtracted from the amount
res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00008900}], feeRate=0.00010)
assert_equal("psbt" in res, True)
# dust change should be removed
res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00008800}], feeRate=0.00010)
assert_equal("psbt" in res, True)
# create a transaction without change at the maximum fee rate, such that the output is still spendable:
res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00010000}], subtractFeeFromOutputs=[0], feeRate=0.0008823)
assert_equal("psbt" in res, True)
assert_equal(res["fee"], Decimal("0.00009706"))
# creating a 10,000 sat transaction with a manual change address should be possible
res = w2.walletcreatefundedpsbt(inputs=[], outputs=[{destination: 0.00010000}], subtractFeeFromOutputs=[0], feeRate=0.00010, changeAddress=addr.pop())
assert_equal("psbt" in res, True)
if not self.options.descriptors:
msg = "Error: Private keys are disabled for this wallet"
assert_raises_rpc_error(-4, msg, w2.keypoolrefill, 100)
if __name__ == '__main__':
KeyPoolTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_feefilter.py | #!/usr/bin/env python3
# Copyright (c) 2016-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test processing of feefilter messages."""
from decimal import Decimal
from test_framework.messages import MSG_TX, MSG_WTX, msg_feefilter
from test_framework.p2p import P2PInterface, p2p_lock
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
from test_framework.wallet import MiniWallet
class FeefilterConn(P2PInterface):
feefilter_received = False
def on_feefilter(self, message):
self.feefilter_received = True
def assert_feefilter_received(self, recv: bool):
with p2p_lock:
assert_equal(self.feefilter_received, recv)
class TestP2PConn(P2PInterface):
def __init__(self):
super().__init__()
self.txinvs = []
def on_inv(self, message):
for i in message.inv:
if (i.type == MSG_TX) or (i.type == MSG_WTX):
self.txinvs.append('{:064x}'.format(i.hash))
def wait_for_invs_to_match(self, invs_expected):
invs_expected.sort()
self.wait_until(lambda: invs_expected == sorted(self.txinvs))
def clear_invs(self):
with p2p_lock:
self.txinvs = []
class FeeFilterTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
# We lower the various required feerates for this test
# to catch a corner-case where feefilter used to slightly undercut
# mempool and wallet feerate calculation based on GetFee
# rounding down 3 places, leading to stranded transactions.
# See issue #16499
# grant noban permission to all peers to speed up tx relay / mempool sync
self.extra_args = [[
"-minrelaytxfee=0.00000100",
"-mintxfee=0.00000100",
"[email protected]",
]] * self.num_nodes
def run_test(self):
self.test_feefilter_forcerelay()
self.test_feefilter()
self.test_feefilter_blocksonly()
def test_feefilter_forcerelay(self):
self.log.info('Check that peers without forcerelay permission (default) get a feefilter message')
self.nodes[0].add_p2p_connection(FeefilterConn()).assert_feefilter_received(True)
self.log.info('Check that peers with forcerelay permission do not get a feefilter message')
self.restart_node(0, extra_args=['[email protected]'])
self.nodes[0].add_p2p_connection(FeefilterConn()).assert_feefilter_received(False)
# Restart to disconnect peers and load default extra_args
self.restart_node(0)
self.connect_nodes(1, 0)
def test_feefilter(self):
node1 = self.nodes[1]
node0 = self.nodes[0]
miniwallet = MiniWallet(node1)
conn = self.nodes[0].add_p2p_connection(TestP2PConn())
self.log.info("Test txs paying 0.2 sat/byte are received by test connection")
txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00000200'), from_node=node1)['wtxid'] for _ in range(3)]
conn.wait_for_invs_to_match(txids)
conn.clear_invs()
# Set a fee filter of 0.15 sat/byte on test connection
conn.send_and_ping(msg_feefilter(150))
self.log.info("Test txs paying 0.15 sat/byte are received by test connection")
txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00000150'), from_node=node1)['wtxid'] for _ in range(3)]
conn.wait_for_invs_to_match(txids)
conn.clear_invs()
self.log.info("Test txs paying 0.1 sat/byte are no longer received by test connection")
txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00000100'), from_node=node1)['wtxid'] for _ in range(3)]
self.sync_mempools() # must be sure node 0 has received all txs
# Send one transaction from node0 that should be received, so that we
# we can sync the test on receipt (if node1's txs were relayed, they'd
# be received by the time this node0 tx is received). This is
# unfortunately reliant on the current relay behavior where we batch up
# to 35 entries in an inv, which means that when this next transaction
# is eligible for relay, the prior transactions from node1 are eligible
# as well.
txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00020000'), from_node=node0)['wtxid'] for _ in range(1)]
conn.wait_for_invs_to_match(txids)
conn.clear_invs()
self.sync_mempools() # must be sure node 1 has received all txs
self.log.info("Remove fee filter and check txs are received again")
conn.send_and_ping(msg_feefilter(0))
txids = [miniwallet.send_self_transfer(fee_rate=Decimal('0.00020000'), from_node=node1)['wtxid'] for _ in range(3)]
conn.wait_for_invs_to_match(txids)
conn.clear_invs()
def test_feefilter_blocksonly(self):
"""Test that we don't send fee filters to block-relay-only peers and when we're in blocksonly mode."""
self.log.info("Check that we don't send fee filters to block-relay-only peers.")
feefilter_peer = self.nodes[0].add_outbound_p2p_connection(FeefilterConn(), p2p_idx=0, connection_type="block-relay-only")
feefilter_peer.sync_with_ping()
feefilter_peer.assert_feefilter_received(False)
self.log.info("Check that we don't send fee filters when in blocksonly mode.")
self.restart_node(0, ["-blocksonly"])
feefilter_peer = self.nodes[0].add_p2p_connection(FeefilterConn())
feefilter_peer.sync_with_ping()
feefilter_peer.assert_feefilter_received(False)
if __name__ == '__main__':
FeeFilterTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_disconnect_ban.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node disconnect and ban behavior"""
import time
from pathlib import Path
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
class DisconnectBanTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.supports_cli = False
def run_test(self):
self.log.info("Connect nodes both way")
# By default, the test framework sets up an addnode connection from
# node 1 --> node0. By connecting node0 --> node 1, we're left with
# the two nodes being connected both ways.
# Topology will look like: node0 <--> node1
self.connect_nodes(0, 1)
self.log.info("Test setban and listbanned RPCs")
self.log.info("setban: successfully ban single IP address")
assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point
self.nodes[1].setban(subnet="127.0.0.1", command="add")
self.wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10)
assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("clearbanned: successfully clear ban list")
self.nodes[1].clearbanned()
assert_equal(len(self.nodes[1].listbanned()), 0)
self.log.info('Test banlist database recreation')
self.stop_node(1)
target_file = self.nodes[1].chain_path / "banlist.json"
Path.unlink(target_file)
with self.nodes[1].assert_debug_log(["Recreating the banlist database"]):
self.start_node(1)
assert Path.exists(target_file)
assert_equal(self.nodes[1].listbanned(), [])
self.nodes[1].setban("127.0.0.0/24", "add")
self.log.info("setban: fail to ban an already banned subnet")
assert_equal(len(self.nodes[1].listbanned()), 1)
assert_raises_rpc_error(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add")
self.log.info("setban: fail to ban an invalid subnet")
assert_raises_rpc_error(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add")
assert_equal(len(self.nodes[1].listbanned()), 1) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24
self.log.info("setban: fail to ban with past absolute timestamp")
assert_raises_rpc_error(-8, "Error: Absolute timestamp is in the past", self.nodes[1].setban, "127.27.0.1", "add", 123, True)
self.log.info("setban remove: fail to unban a non-banned subnet")
assert_raises_rpc_error(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove")
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("setban remove: successfully unban subnet")
self.nodes[1].setban("127.0.0.0/24", "remove")
assert_equal(len(self.nodes[1].listbanned()), 0)
self.nodes[1].clearbanned()
assert_equal(len(self.nodes[1].listbanned()), 0)
self.log.info("setban: test persistence across node restart")
# Set the mocktime so we can control when bans expire
old_time = int(time.time())
self.nodes[1].setmocktime(old_time)
self.nodes[1].setban("127.0.0.0/32", "add")
self.nodes[1].setban("127.0.0.0/24", "add")
self.nodes[1].setban("192.168.0.1", "add", 1) # ban for 1 seconds
self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) # ban for 1000 seconds
listBeforeShutdown = self.nodes[1].listbanned()
assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address'])
self.log.info("setban: test banning with absolute timestamp")
self.nodes[1].setban("192.168.0.2", "add", old_time + 120, True)
# Move time forward by 3 seconds so the third ban has expired
self.nodes[1].setmocktime(old_time + 3)
assert_equal(len(self.nodes[1].listbanned()), 4)
self.log.info("Test ban_duration and time_remaining")
for ban in self.nodes[1].listbanned():
if ban["address"] in ["127.0.0.0/32", "127.0.0.0/24"]:
assert_equal(ban["ban_duration"], 86400)
assert_equal(ban["time_remaining"], 86397)
elif ban["address"] == "2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19":
assert_equal(ban["ban_duration"], 1000)
assert_equal(ban["time_remaining"], 997)
elif ban["address"] == "192.168.0.2/32":
assert_equal(ban["ban_duration"], 120)
assert_equal(ban["time_remaining"], 117)
self.restart_node(1)
listAfterShutdown = self.nodes[1].listbanned()
assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
assert_equal("127.0.0.0/32", listAfterShutdown[1]['address'])
assert_equal("192.168.0.2/32", listAfterShutdown[2]['address'])
assert_equal("/19" in listAfterShutdown[3]['address'], True)
# Clear ban lists
self.nodes[1].clearbanned()
self.log.info("Connect nodes both way")
self.connect_nodes(0, 1)
self.connect_nodes(1, 0)
self.log.info("Test disconnectnode RPCs")
self.log.info("disconnectnode: fail to disconnect when calling with address and nodeid")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
node1 = self.nodes[0].getpeerinfo()[0]["id"]
assert_raises_rpc_error(-32602, "Only one of address and nodeid should be provided.", self.nodes[0].disconnectnode, address=address1, nodeid=node1)
self.log.info("disconnectnode: fail to disconnect when calling with junk address")
assert_raises_rpc_error(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, address="221B Baker Street")
self.log.info("disconnectnode: successfully disconnect node by address")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
self.nodes[0].disconnectnode(address=address1)
self.wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully reconnect node")
self.connect_nodes(0, 1) # reconnect the node
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully disconnect node by node id")
id1 = self.nodes[0].getpeerinfo()[0]['id']
self.nodes[0].disconnectnode(nodeid=id1)
self.wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['id'] == id1]
if __name__ == '__main__':
DisconnectBanTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/mempool_accept.py | #!/usr/bin/env python3
# Copyright (c) 2017-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool acceptance of raw transactions."""
from copy import deepcopy
from decimal import Decimal
import math
from test_framework.test_framework import BitcoinTestFramework
from test_framework.messages import (
MAX_BIP125_RBF_SEQUENCE,
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
MAX_BLOCK_WEIGHT,
MAX_MONEY,
SEQUENCE_FINAL,
tx_from_hex,
)
from test_framework.script import (
CScript,
OP_0,
OP_HASH160,
OP_RETURN,
OP_TRUE,
)
from test_framework.script_util import (
DUMMY_MIN_OP_RETURN_SCRIPT,
keys_to_multisig_script,
MIN_PADDING,
MIN_STANDARD_TX_NONWITNESS_SIZE,
script_to_p2sh_script,
script_to_p2wsh_script,
)
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
)
from test_framework.wallet import MiniWallet
from test_framework.wallet_util import generate_keypair
class MempoolAcceptanceTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[
'-txindex','-permitbaremultisig=0',
]] * self.num_nodes
self.supports_cli = False
def check_mempool_result(self, result_expected, *args, **kwargs):
"""Wrapper to check result of testmempoolaccept on node_0's mempool"""
result_test = self.nodes[0].testmempoolaccept(*args, **kwargs)
for r in result_test:
# Skip these checks for now
r.pop('wtxid')
if "fees" in r:
r["fees"].pop("effective-feerate")
r["fees"].pop("effective-includes")
assert_equal(result_expected, result_test)
assert_equal(self.nodes[0].getmempoolinfo()['size'], self.mempool_size) # Must not change mempool state
def run_test(self):
node = self.nodes[0]
self.wallet = MiniWallet(node)
self.log.info('Start with empty mempool, and 200 blocks')
self.mempool_size = 0
assert_equal(node.getblockcount(), 200)
assert_equal(node.getmempoolinfo()['size'], self.mempool_size)
self.log.info('Should not accept garbage to testmempoolaccept')
assert_raises_rpc_error(-3, 'JSON value of type string is not of expected type array', lambda: node.testmempoolaccept(rawtxs='ff00baar'))
assert_raises_rpc_error(-8, 'Array must contain between 1 and 25 transactions.', lambda: node.testmempoolaccept(rawtxs=['ff22']*26))
assert_raises_rpc_error(-8, 'Array must contain between 1 and 25 transactions.', lambda: node.testmempoolaccept(rawtxs=[]))
assert_raises_rpc_error(-22, 'TX decode failed', lambda: node.testmempoolaccept(rawtxs=['ff00baar']))
self.log.info('A transaction already in the blockchain')
tx = self.wallet.create_self_transfer()['tx'] # Pick a random coin(base) to spend
tx.vout.append(deepcopy(tx.vout[0]))
tx.vout[0].nValue = int(0.3 * COIN)
tx.vout[1].nValue = int(49 * COIN)
raw_tx_in_block = tx.serialize().hex()
txid_in_block = self.wallet.sendrawtransaction(from_node=node, tx_hex=raw_tx_in_block)
self.generate(node, 1)
self.mempool_size = 0
self.check_mempool_result(
result_expected=[{'txid': txid_in_block, 'allowed': False, 'reject-reason': 'txn-already-known'}],
rawtxs=[raw_tx_in_block],
)
self.log.info('A transaction not in the mempool')
fee = Decimal('0.000007')
utxo_to_spend = self.wallet.get_utxo(txid=txid_in_block) # use 0.3 BTC UTXO
tx = self.wallet.create_self_transfer(utxo_to_spend=utxo_to_spend, sequence=MAX_BIP125_RBF_SEQUENCE)['tx']
tx.vout[0].nValue = int((Decimal('0.3') - fee) * COIN)
raw_tx_0 = tx.serialize().hex()
txid_0 = tx.rehash()
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee}}],
rawtxs=[raw_tx_0],
)
self.log.info('A final transaction not in the mempool')
output_amount = Decimal('0.025')
tx = self.wallet.create_self_transfer(
sequence=SEQUENCE_FINAL,
locktime=node.getblockcount() + 2000, # Can be anything
)['tx']
tx.vout[0].nValue = int(output_amount * COIN)
raw_tx_final = tx.serialize().hex()
tx = tx_from_hex(raw_tx_final)
fee_expected = Decimal('50.0') - output_amount
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': fee_expected}}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
node.sendrawtransaction(hexstring=raw_tx_final, maxfeerate=0)
self.mempool_size += 1
self.log.info('A transaction in the mempool')
node.sendrawtransaction(hexstring=raw_tx_0)
self.mempool_size += 1
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'txn-already-in-mempool'}],
rawtxs=[raw_tx_0],
)
self.log.info('A transaction that replaces a mempool transaction')
tx = tx_from_hex(raw_tx_0)
tx.vout[0].nValue -= int(fee * COIN) # Double the fee
tx.vin[0].nSequence = MAX_BIP125_RBF_SEQUENCE + 1 # Now, opt out of RBF
raw_tx_0 = tx.serialize().hex()
txid_0 = tx.rehash()
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': True, 'vsize': tx.get_vsize(), 'fees': {'base': (2 * fee)}}],
rawtxs=[raw_tx_0],
)
self.log.info('A transaction that conflicts with an unconfirmed tx')
# Send the transaction that replaces the mempool transaction and opts out of replaceability
node.sendrawtransaction(hexstring=tx.serialize().hex(), maxfeerate=0)
# take original raw_tx_0
tx = tx_from_hex(raw_tx_0)
tx.vout[0].nValue -= int(4 * fee * COIN) # Set more fee
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'txn-mempool-conflict'}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
self.log.info('A transaction with missing inputs, that never existed')
tx = tx_from_hex(raw_tx_0)
tx.vin[0].prevout = COutPoint(hash=int('ff' * 32, 16), n=14)
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with missing inputs, that existed once in the past')
tx = tx_from_hex(raw_tx_0)
tx.vin[0].prevout.n = 1 # Set vout to 1, to spend the other outpoint (49 coins) of the in-chain-tx we want to double spend
raw_tx_1 = tx.serialize().hex()
txid_1 = node.sendrawtransaction(hexstring=raw_tx_1, maxfeerate=0)
# Now spend both to "clearly hide" the outputs, ie. remove the coins from the utxo set by spending them
tx = self.wallet.create_self_transfer()['tx']
tx.vin.append(deepcopy(tx.vin[0]))
tx.wit.vtxinwit.append(deepcopy(tx.wit.vtxinwit[0]))
tx.vin[0].prevout = COutPoint(hash=int(txid_0, 16), n=0)
tx.vin[1].prevout = COutPoint(hash=int(txid_1, 16), n=0)
tx.vout[0].nValue = int(0.1 * COIN)
raw_tx_spend_both = tx.serialize().hex()
txid_spend_both = self.wallet.sendrawtransaction(from_node=node, tx_hex=raw_tx_spend_both)
self.generate(node, 1)
self.mempool_size = 0
# Now see if we can add the coins back to the utxo set by sending the exact txs again
self.check_mempool_result(
result_expected=[{'txid': txid_0, 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[raw_tx_0],
)
self.check_mempool_result(
result_expected=[{'txid': txid_1, 'allowed': False, 'reject-reason': 'missing-inputs'}],
rawtxs=[raw_tx_1],
)
self.log.info('Create a "reference" tx for later use')
utxo_to_spend = self.wallet.get_utxo(txid=txid_spend_both)
tx = self.wallet.create_self_transfer(utxo_to_spend=utxo_to_spend, sequence=SEQUENCE_FINAL)['tx']
tx.vout[0].nValue = int(0.05 * COIN)
raw_tx_reference = tx.serialize().hex()
# Reference tx should be valid on itself
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': { 'base': Decimal('0.1') - Decimal('0.05')}}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
self.log.info('A transaction with no outputs')
tx = tx_from_hex(raw_tx_reference)
tx.vout = []
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-empty'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A really large transaction')
tx = tx_from_hex(raw_tx_reference)
tx.vin = [tx.vin[0]] * math.ceil(MAX_BLOCK_WEIGHT // 4 / len(tx.vin[0].serialize()))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-oversize'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with negative output value')
tx = tx_from_hex(raw_tx_reference)
tx.vout[0].nValue *= -1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-negative'}],
rawtxs=[tx.serialize().hex()],
)
# The following two validations prevent overflow of the output amounts (see CVE-2010-5139).
self.log.info('A transaction with too large output value')
tx = tx_from_hex(raw_tx_reference)
tx.vout[0].nValue = MAX_MONEY + 1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-vout-toolarge'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with too large sum of output values')
tx = tx_from_hex(raw_tx_reference)
tx.vout = [tx.vout[0]] * 2
tx.vout[0].nValue = MAX_MONEY
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-txouttotal-toolarge'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction with duplicate inputs')
tx = tx_from_hex(raw_tx_reference)
tx.vin = [tx.vin[0]] * 2
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-inputs-duplicate'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A non-coinbase transaction with coinbase-like outpoint')
tx = tx_from_hex(raw_tx_reference)
tx.vin.append(CTxIn(COutPoint(hash=0, n=0xffffffff)))
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-prevout-null'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A coinbase transaction')
# Pick the input of the first tx we created, so it has to be a coinbase tx
raw_tx_coinbase_spent = node.getrawtransaction(txid=node.decoderawtransaction(hexstring=raw_tx_in_block)['vin'][0]['txid'])
tx = tx_from_hex(raw_tx_coinbase_spent)
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'coinbase'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('Some nonstandard transactions')
tx = tx_from_hex(raw_tx_reference)
tx.nVersion = 3 # A version currently non-standard
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'version'}],
rawtxs=[tx.serialize().hex()],
)
tx = tx_from_hex(raw_tx_reference)
tx.vout[0].scriptPubKey = CScript([OP_0]) # Some non-standard script
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptpubkey'}],
rawtxs=[tx.serialize().hex()],
)
tx = tx_from_hex(raw_tx_reference)
_, pubkey = generate_keypair()
tx.vout[0].scriptPubKey = keys_to_multisig_script([pubkey] * 3, k=2) # Some bare multisig script (2-of-3)
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bare-multisig'}],
rawtxs=[tx.serialize().hex()],
)
tx = tx_from_hex(raw_tx_reference)
tx.vin[0].scriptSig = CScript([OP_HASH160]) # Some not-pushonly scriptSig
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-not-pushonly'}],
rawtxs=[tx.serialize().hex()],
)
tx = tx_from_hex(raw_tx_reference)
tx.vin[0].scriptSig = CScript([b'a' * 1648]) # Some too large scriptSig (>1650 bytes)
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-size'}],
rawtxs=[tx.serialize().hex()],
)
tx = tx_from_hex(raw_tx_reference)
output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=script_to_p2sh_script(b'burn'))
num_scripts = 100000 // len(output_p2sh_burn.serialize()) # Use enough outputs to make the tx too large for our policy
tx.vout = [output_p2sh_burn] * num_scripts
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'tx-size'}],
rawtxs=[tx.serialize().hex()],
)
tx = tx_from_hex(raw_tx_reference)
tx.vout[0] = output_p2sh_burn
tx.vout[0].nValue -= 1 # Make output smaller, such that it is dust for our policy
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'dust'}],
rawtxs=[tx.serialize().hex()],
)
tx = tx_from_hex(raw_tx_reference)
tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff'])
tx.vout = [tx.vout[0]] * 2
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'multi-op-return'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A timelocked transaction')
tx = tx_from_hex(raw_tx_reference)
tx.vin[0].nSequence -= 1 # Should be non-max, so locktime is not ignored
tx.nLockTime = node.getblockcount() + 1
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-final'}],
rawtxs=[tx.serialize().hex()],
)
self.log.info('A transaction that is locked by BIP68 sequence logic')
tx = tx_from_hex(raw_tx_reference)
tx.vin[0].nSequence = 2 # We could include it in the second block mined from now, but not the very next one
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-BIP68-final'}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
# Prep for tiny-tx tests with wsh(OP_TRUE) output
seed_tx = self.wallet.send_to(from_node=node, scriptPubKey=script_to_p2wsh_script(CScript([OP_TRUE])), amount=COIN)
self.generate(node, 1)
self.log.info('A tiny transaction(in non-witness bytes) that is disallowed')
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(int(seed_tx["txid"], 16), seed_tx["sent_vout"]), b"", SEQUENCE_FINAL))
tx.wit.vtxinwit = [CTxInWitness()]
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
tx.vout.append(CTxOut(0, CScript([OP_RETURN] + ([OP_0] * (MIN_PADDING - 2)))))
# Note it's only non-witness size that matters!
assert_equal(len(tx.serialize_without_witness()), 64)
assert_equal(MIN_STANDARD_TX_NONWITNESS_SIZE - 1, 64)
assert_greater_than(len(tx.serialize()), 64)
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'tx-size-small'}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
self.log.info('Minimally-small transaction(in non-witness bytes) that is allowed')
tx.vout[0] = CTxOut(COIN - 1000, DUMMY_MIN_OP_RETURN_SCRIPT)
assert_equal(len(tx.serialize_without_witness()), MIN_STANDARD_TX_NONWITNESS_SIZE)
self.check_mempool_result(
result_expected=[{'txid': tx.rehash(), 'allowed': True, 'vsize': tx.get_vsize(), 'fees': { 'base': Decimal('0.00001000')}}],
rawtxs=[tx.serialize().hex()],
maxfeerate=0,
)
if __name__ == '__main__':
MempoolAcceptanceTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_invalid_locator.py | #!/usr/bin/env python3
# Copyright (c) 2015-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node responses to invalid locators.
"""
from test_framework.messages import msg_getheaders, msg_getblocks, MAX_LOCATOR_SZ
from test_framework.p2p import P2PInterface
from test_framework.test_framework import BitcoinTestFramework
class InvalidLocatorTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def run_test(self):
node = self.nodes[0] # convenience reference to the node
self.generatetoaddress(node, 1, node.get_deterministic_priv_key().address) # Get node out of IBD
self.log.info('Test max locator size')
block_count = node.getblockcount()
for msg in [msg_getheaders(), msg_getblocks()]:
self.log.info('Wait for disconnect when sending {} hashes in locator'.format(MAX_LOCATOR_SZ + 1))
exceed_max_peer = node.add_p2p_connection(P2PInterface())
msg.locator.vHave = [int(node.getblockhash(i - 1), 16) for i in range(block_count, block_count - (MAX_LOCATOR_SZ + 1), -1)]
exceed_max_peer.send_message(msg)
exceed_max_peer.wait_for_disconnect()
node.disconnect_p2ps()
self.log.info('Wait for response when sending {} hashes in locator'.format(MAX_LOCATOR_SZ))
within_max_peer = node.add_p2p_connection(P2PInterface())
msg.locator.vHave = [int(node.getblockhash(i - 1), 16) for i in range(block_count, block_count - (MAX_LOCATOR_SZ), -1)]
within_max_peer.send_message(msg)
if type(msg) is msg_getheaders:
within_max_peer.wait_for_header(node.getbestblockhash())
else:
within_max_peer.wait_for_block(int(node.getbestblockhash(), 16))
if __name__ == '__main__':
InvalidLocatorTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/p2p_filter.py | #!/usr/bin/env python3
# Copyright (c) 2020-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test BIP 37
"""
from test_framework.messages import (
CInv,
COIN,
MAX_BLOOM_FILTER_SIZE,
MAX_BLOOM_HASH_FUNCS,
MSG_WTX,
MSG_BLOCK,
MSG_FILTERED_BLOCK,
msg_filteradd,
msg_filterclear,
msg_filterload,
msg_getdata,
msg_mempool,
msg_version,
)
from test_framework.p2p import (
P2PInterface,
P2P_SERVICES,
P2P_SUBVERSION,
P2P_VERSION,
p2p_lock,
)
from test_framework.script import MAX_SCRIPT_ELEMENT_SIZE
from test_framework.test_framework import BitcoinTestFramework
from test_framework.wallet import (
MiniWallet,
getnewdestination,
)
class P2PBloomFilter(P2PInterface):
# This is a P2SH watch-only wallet
watch_script_pubkey = bytes.fromhex('a914ffffffffffffffffffffffffffffffffffffffff87')
# The initial filter (n=10, fp=0.000001) with just the above scriptPubKey added
watch_filter_init = msg_filterload(
data=
b'@\x00\x08\x00\x80\x00\x00 \x00\xc0\x00 \x04\x00\x08$\x00\x04\x80\x00\x00 \x00\x00\x00\x00\x80\x00\x00@\x00\x02@ \x00',
nHashFuncs=19,
nTweak=0,
nFlags=1,
)
def __init__(self):
super().__init__()
self._tx_received = False
self._merkleblock_received = False
def on_inv(self, message):
want = msg_getdata()
for i in message.inv:
# inv messages can only contain TX or BLOCK, so translate BLOCK to FILTERED_BLOCK
if i.type == MSG_BLOCK:
want.inv.append(CInv(MSG_FILTERED_BLOCK, i.hash))
else:
want.inv.append(i)
if len(want.inv):
self.send_message(want)
def on_merkleblock(self, message):
self._merkleblock_received = True
def on_tx(self, message):
self._tx_received = True
@property
def tx_received(self):
with p2p_lock:
return self._tx_received
@tx_received.setter
def tx_received(self, value):
with p2p_lock:
self._tx_received = value
@property
def merkleblock_received(self):
with p2p_lock:
return self._merkleblock_received
@merkleblock_received.setter
def merkleblock_received(self, value):
with p2p_lock:
self._merkleblock_received = value
class FilterTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[
'-peerbloomfilters',
'[email protected]', # immediate tx relay
]]
def generatetoscriptpubkey(self, scriptpubkey):
"""Helper to generate a single block to the given scriptPubKey."""
return self.generatetodescriptor(self.nodes[0], 1, f'raw({scriptpubkey.hex()})')[0]
def test_size_limits(self, filter_peer):
self.log.info('Check that too large filter is rejected')
with self.nodes[0].assert_debug_log(['Misbehaving']):
filter_peer.send_and_ping(msg_filterload(data=b'\xbb'*(MAX_BLOOM_FILTER_SIZE+1)))
self.log.info('Check that max size filter is accepted')
with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
filter_peer.send_and_ping(msg_filterload(data=b'\xbb'*(MAX_BLOOM_FILTER_SIZE)))
filter_peer.send_and_ping(msg_filterclear())
self.log.info('Check that filter with too many hash functions is rejected')
with self.nodes[0].assert_debug_log(['Misbehaving']):
filter_peer.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS+1))
self.log.info('Check that filter with max hash functions is accepted')
with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
filter_peer.send_and_ping(msg_filterload(data=b'\xaa', nHashFuncs=MAX_BLOOM_HASH_FUNCS))
# Don't send filterclear until next two filteradd checks are done
self.log.info('Check that max size data element to add to the filter is accepted')
with self.nodes[0].assert_debug_log([], unexpected_msgs=['Misbehaving']):
filter_peer.send_and_ping(msg_filteradd(data=b'\xcc'*(MAX_SCRIPT_ELEMENT_SIZE)))
self.log.info('Check that too large data element to add to the filter is rejected')
with self.nodes[0].assert_debug_log(['Misbehaving']):
filter_peer.send_and_ping(msg_filteradd(data=b'\xcc'*(MAX_SCRIPT_ELEMENT_SIZE+1)))
filter_peer.send_and_ping(msg_filterclear())
def test_msg_mempool(self):
self.log.info("Check that a node with bloom filters enabled services p2p mempool messages")
filter_peer = P2PBloomFilter()
self.log.info("Create two tx before connecting, one relevant to the node another that is not")
rel_txid = self.wallet.send_to(from_node=self.nodes[0], scriptPubKey=filter_peer.watch_script_pubkey, amount=1 * COIN)["txid"]
irr_result = self.wallet.send_to(from_node=self.nodes[0], scriptPubKey=getnewdestination()[1], amount=2 * COIN)
irr_txid = irr_result["txid"]
irr_wtxid = irr_result["wtxid"]
self.log.info("Send a mempool msg after connecting and check that the relevant tx is announced")
self.nodes[0].add_p2p_connection(filter_peer)
filter_peer.send_and_ping(filter_peer.watch_filter_init)
filter_peer.send_message(msg_mempool())
filter_peer.wait_for_tx(rel_txid)
self.log.info("Request the irrelevant transaction even though it was not announced")
filter_peer.send_message(msg_getdata([CInv(t=MSG_WTX, h=int(irr_wtxid, 16))]))
self.log.info("We should get it anyway because it was in the mempool on connection to peer")
filter_peer.wait_for_tx(irr_txid)
def test_frelay_false(self, filter_peer):
self.log.info("Check that a node with fRelay set to false does not receive invs until the filter is set")
filter_peer.tx_received = False
self.wallet.send_to(from_node=self.nodes[0], scriptPubKey=filter_peer.watch_script_pubkey, amount=9 * COIN)
# Sync to make sure the reason filter_peer doesn't receive the tx is not p2p delays
filter_peer.sync_with_ping()
assert not filter_peer.tx_received
# Clear the mempool so that this transaction does not impact subsequent tests
self.generate(self.nodes[0], 1)
def test_filter(self, filter_peer):
# Set the bloomfilter using filterload
filter_peer.send_and_ping(filter_peer.watch_filter_init)
# If fRelay is not already True, sending filterload sets it to True
assert self.nodes[0].getpeerinfo()[0]['relaytxes']
self.log.info('Check that we receive merkleblock and tx if the filter matches a tx in a block')
block_hash = self.generatetoscriptpubkey(filter_peer.watch_script_pubkey)
txid = self.nodes[0].getblock(block_hash)['tx'][0]
filter_peer.wait_for_merkleblock(block_hash)
filter_peer.wait_for_tx(txid)
self.log.info('Check that we only receive a merkleblock if the filter does not match a tx in a block')
filter_peer.tx_received = False
block_hash = self.generatetoscriptpubkey(getnewdestination()[1])
filter_peer.wait_for_merkleblock(block_hash)
assert not filter_peer.tx_received
self.log.info('Check that we not receive a tx if the filter does not match a mempool tx')
filter_peer.merkleblock_received = False
filter_peer.tx_received = False
self.wallet.send_to(from_node=self.nodes[0], scriptPubKey=getnewdestination()[1], amount=7 * COIN)
filter_peer.sync_with_ping()
assert not filter_peer.merkleblock_received
assert not filter_peer.tx_received
self.log.info('Check that we receive a tx if the filter matches a mempool tx')
filter_peer.merkleblock_received = False
txid = self.wallet.send_to(from_node=self.nodes[0], scriptPubKey=filter_peer.watch_script_pubkey, amount=9 * COIN)["txid"]
filter_peer.wait_for_tx(txid)
assert not filter_peer.merkleblock_received
self.log.info('Check that after deleting filter all txs get relayed again')
filter_peer.send_and_ping(msg_filterclear())
for _ in range(5):
txid = self.wallet.send_to(from_node=self.nodes[0], scriptPubKey=getnewdestination()[1], amount=7 * COIN)["txid"]
filter_peer.wait_for_tx(txid)
self.log.info('Check that request for filtered blocks is ignored if no filter is set')
filter_peer.merkleblock_received = False
filter_peer.tx_received = False
with self.nodes[0].assert_debug_log(expected_msgs=['received getdata']):
block_hash = self.generatetoscriptpubkey(getnewdestination()[1])
filter_peer.wait_for_inv([CInv(MSG_BLOCK, int(block_hash, 16))])
filter_peer.sync_with_ping()
assert not filter_peer.merkleblock_received
assert not filter_peer.tx_received
self.log.info('Check that sending "filteradd" if no filter is set is treated as misbehavior')
with self.nodes[0].assert_debug_log(['Misbehaving']):
filter_peer.send_and_ping(msg_filteradd(data=b'letsmisbehave'))
self.log.info("Check that division-by-zero remote crash bug [CVE-2013-5700] is fixed")
filter_peer.send_and_ping(msg_filterload(data=b'', nHashFuncs=1))
filter_peer.send_and_ping(msg_filteradd(data=b'letstrytocrashthisnode'))
self.nodes[0].disconnect_p2ps()
def run_test(self):
self.wallet = MiniWallet(self.nodes[0])
filter_peer = self.nodes[0].add_p2p_connection(P2PBloomFilter())
self.log.info('Test filter size limits')
self.test_size_limits(filter_peer)
self.log.info('Test BIP 37 for a node with fRelay = True (default)')
self.test_filter(filter_peer)
self.nodes[0].disconnect_p2ps()
self.log.info('Test BIP 37 for a node with fRelay = False')
# Add peer but do not send version yet
filter_peer_without_nrelay = self.nodes[0].add_p2p_connection(P2PBloomFilter(), send_version=False, wait_for_verack=False)
# Send version with relay=False
version_without_fRelay = msg_version()
version_without_fRelay.nVersion = P2P_VERSION
version_without_fRelay.strSubVer = P2P_SUBVERSION
version_without_fRelay.nServices = P2P_SERVICES
version_without_fRelay.relay = 0
filter_peer_without_nrelay.send_message(version_without_fRelay)
filter_peer_without_nrelay.wait_for_verack()
assert not self.nodes[0].getpeerinfo()[0]['relaytxes']
self.test_frelay_false(filter_peer_without_nrelay)
self.test_filter(filter_peer_without_nrelay)
self.test_msg_mempool()
if __name__ == '__main__':
FilterTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_blockchain.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPCs related to blockchainstate.
Test the following RPCs:
- getblockchaininfo
- getdeploymentinfo
- getchaintxstats
- gettxoutsetinfo
- getblockheader
- getdifficulty
- getnetworkhashps
- waitforblockheight
- getblock
- getblockhash
- getbestblockhash
- verifychain
Tests correspond to code in rpc/blockchain.cpp.
"""
from decimal import Decimal
import http.client
import os
import subprocess
import textwrap
from test_framework.blocktools import (
MAX_FUTURE_BLOCK_TIME,
TIME_GENESIS_BLOCK,
create_block,
create_coinbase,
)
from test_framework.messages import (
CBlockHeader,
from_hex,
msg_block,
)
from test_framework.p2p import P2PInterface
from test_framework.script import hash256
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_greater_than_or_equal,
assert_raises,
assert_raises_rpc_error,
assert_is_hex_string,
assert_is_hash_string,
)
from test_framework.wallet import MiniWallet
HEIGHT = 200 # blocks mined
TIME_RANGE_STEP = 600 # ten-minute steps
TIME_RANGE_MTP = TIME_GENESIS_BLOCK + (HEIGHT - 6) * TIME_RANGE_STEP
TIME_RANGE_TIP = TIME_GENESIS_BLOCK + (HEIGHT - 1) * TIME_RANGE_STEP
TIME_RANGE_END = TIME_GENESIS_BLOCK + HEIGHT * TIME_RANGE_STEP
DIFFICULTY_ADJUSTMENT_INTERVAL = 2016
class BlockchainTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.supports_cli = False
def run_test(self):
self.wallet = MiniWallet(self.nodes[0])
self._test_prune_disk_space()
self.mine_chain()
self._test_max_future_block_time()
self.restart_node(
0,
extra_args=[
"-stopatheight=207",
"-checkblocks=-1", # Check all blocks
"-prune=1", # Set pruning after rescan is complete
],
)
self._test_getblockchaininfo()
self._test_getchaintxstats()
self._test_gettxoutsetinfo()
self._test_getblockheader()
self._test_getdifficulty()
self._test_getnetworkhashps()
self._test_stopatheight()
self._test_waitforblockheight()
self._test_getblock()
self._test_getdeploymentinfo()
self._test_y2106()
assert self.nodes[0].verifychain(4, 0)
def mine_chain(self):
self.log.info(f"Generate {HEIGHT} blocks after the genesis block in ten-minute steps")
for t in range(TIME_GENESIS_BLOCK, TIME_RANGE_END, TIME_RANGE_STEP):
self.nodes[0].setmocktime(t)
self.generate(self.wallet, 1)
assert_equal(self.nodes[0].getblockchaininfo()['blocks'], HEIGHT)
def _test_prune_disk_space(self):
self.log.info("Test that a manually pruned node does not run into "
"integer overflow on first start up")
self.restart_node(0, extra_args=["-prune=1"])
self.log.info("Avoid warning when assumed chain size is enough")
self.restart_node(0, extra_args=["-prune=123456789"])
def _test_max_future_block_time(self):
self.stop_node(0)
self.log.info("A block tip of more than MAX_FUTURE_BLOCK_TIME in the future raises an error")
self.nodes[0].assert_start_raises_init_error(
extra_args=[f"-mocktime={TIME_RANGE_TIP - MAX_FUTURE_BLOCK_TIME - 1}"],
expected_msg=": The block database contains a block which appears to be from the future."
" This may be due to your computer's date and time being set incorrectly."
f" Only rebuild the block database if you are sure that your computer's date and time are correct.{os.linesep}"
"Please restart with -reindex or -reindex-chainstate to recover.",
)
self.log.info("A block tip of MAX_FUTURE_BLOCK_TIME in the future is fine")
self.start_node(0, extra_args=[f"-mocktime={TIME_RANGE_TIP - MAX_FUTURE_BLOCK_TIME}"])
def _test_getblockchaininfo(self):
self.log.info("Test getblockchaininfo")
keys = [
'bestblockhash',
'blocks',
'chain',
'chainwork',
'difficulty',
'headers',
'initialblockdownload',
'mediantime',
'pruned',
'size_on_disk',
'time',
'verificationprogress',
'warnings',
]
res = self.nodes[0].getblockchaininfo()
assert_equal(res['time'], TIME_RANGE_END - TIME_RANGE_STEP)
assert_equal(res['mediantime'], TIME_RANGE_MTP)
# result should have these additional pruning keys if manual pruning is enabled
assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning'] + keys))
# size_on_disk should be > 0
assert_greater_than(res['size_on_disk'], 0)
# pruneheight should be greater or equal to 0
assert_greater_than_or_equal(res['pruneheight'], 0)
# check other pruning fields given that prune=1
assert res['pruned']
assert not res['automatic_pruning']
self.restart_node(0, ['-stopatheight=207'])
res = self.nodes[0].getblockchaininfo()
# should have exact keys
assert_equal(sorted(res.keys()), keys)
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(
extra_args=['-testactivationheight=name@2'],
expected_msg='Error: Invalid name (name@2) for -testactivationheight=name@height.',
)
self.nodes[0].assert_start_raises_init_error(
extra_args=['-testactivationheight=bip34@-2'],
expected_msg='Error: Invalid height value (bip34@-2) for -testactivationheight=name@height.',
)
self.nodes[0].assert_start_raises_init_error(
extra_args=['-testactivationheight='],
expected_msg='Error: Invalid format () for -testactivationheight=name@height.',
)
self.start_node(0, extra_args=[
'-stopatheight=207',
'-prune=550',
])
res = self.nodes[0].getblockchaininfo()
# result should have these additional pruning keys if prune=550
assert_equal(sorted(res.keys()), sorted(['pruneheight', 'automatic_pruning', 'prune_target_size'] + keys))
# check related fields
assert res['pruned']
assert_equal(res['pruneheight'], 0)
assert res['automatic_pruning']
assert_equal(res['prune_target_size'], 576716800)
assert_greater_than(res['size_on_disk'], 0)
def check_signalling_deploymentinfo_result(self, gdi_result, height, blockhash, status_next):
assert height >= 144 and height <= 287
assert_equal(gdi_result, {
"hash": blockhash,
"height": height,
"deployments": {
'bip34': {'type': 'buried', 'active': True, 'height': 2},
'bip66': {'type': 'buried', 'active': True, 'height': 3},
'bip65': {'type': 'buried', 'active': True, 'height': 4},
'csv': {'type': 'buried', 'active': True, 'height': 5},
'segwit': {'type': 'buried', 'active': True, 'height': 6},
'testdummy': {
'type': 'bip9',
'bip9': {
'bit': 28,
'start_time': 0,
'timeout': 0x7fffffffffffffff, # testdummy does not have a timeout so is set to the max int64 value
'min_activation_height': 0,
'status': 'started',
'status_next': status_next,
'since': 144,
'statistics': {
'period': 144,
'threshold': 108,
'elapsed': height - 143,
'count': height - 143,
'possible': True,
},
'signalling': '#'*(height-143),
},
'active': False
},
'taproot': {
'type': 'bip9',
'bip9': {
'start_time': -1,
'timeout': 9223372036854775807,
'min_activation_height': 0,
'status': 'active',
'status_next': 'active',
'since': 0,
},
'height': 0,
'active': True
}
}
})
def _test_getdeploymentinfo(self):
# Note: continues past -stopatheight height, so must be invoked
# after _test_stopatheight
self.log.info("Test getdeploymentinfo")
self.stop_node(0)
self.start_node(0, extra_args=[
'-testactivationheight=bip34@2',
'-testactivationheight=dersig@3',
'-testactivationheight=cltv@4',
'-testactivationheight=csv@5',
'-testactivationheight=segwit@6',
])
gbci207 = self.nodes[0].getblockchaininfo()
self.check_signalling_deploymentinfo_result(self.nodes[0].getdeploymentinfo(), gbci207["blocks"], gbci207["bestblockhash"], "started")
# block just prior to lock in
self.generate(self.wallet, 287 - gbci207["blocks"])
gbci287 = self.nodes[0].getblockchaininfo()
self.check_signalling_deploymentinfo_result(self.nodes[0].getdeploymentinfo(), gbci287["blocks"], gbci287["bestblockhash"], "locked_in")
# calling with an explicit hash works
self.check_signalling_deploymentinfo_result(self.nodes[0].getdeploymentinfo(gbci207["bestblockhash"]), gbci207["blocks"], gbci207["bestblockhash"], "started")
def _test_y2106(self):
self.log.info("Check that block timestamps work until year 2106")
self.generate(self.nodes[0], 8)[-1]
time_2106 = 2**32 - 1
self.nodes[0].setmocktime(time_2106)
last = self.generate(self.nodes[0], 6)[-1]
assert_equal(self.nodes[0].getblockheader(last)["mediantime"], time_2106)
def _test_getchaintxstats(self):
self.log.info("Test getchaintxstats")
# Test `getchaintxstats` invalid extra parameters
assert_raises_rpc_error(-1, 'getchaintxstats', self.nodes[0].getchaintxstats, 0, '', 0)
# Test `getchaintxstats` invalid `nblocks`
assert_raises_rpc_error(-3, "JSON value of type string is not of expected type number", self.nodes[0].getchaintxstats, '')
assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, -1)
assert_raises_rpc_error(-8, "Invalid block count: should be between 0 and the block's height - 1", self.nodes[0].getchaintxstats, self.nodes[0].getblockcount())
# Test `getchaintxstats` invalid `blockhash`
assert_raises_rpc_error(-3, "JSON value of type number is not of expected type string", self.nodes[0].getchaintxstats, blockhash=0)
assert_raises_rpc_error(-8, "blockhash must be of length 64 (not 1, for '0')", self.nodes[0].getchaintxstats, blockhash='0')
assert_raises_rpc_error(-8, "blockhash must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')", self.nodes[0].getchaintxstats, blockhash='ZZZ0000000000000000000000000000000000000000000000000000000000000')
assert_raises_rpc_error(-5, "Block not found", self.nodes[0].getchaintxstats, blockhash='0000000000000000000000000000000000000000000000000000000000000000')
blockhash = self.nodes[0].getblockhash(HEIGHT)
self.nodes[0].invalidateblock(blockhash)
assert_raises_rpc_error(-8, "Block is not in main chain", self.nodes[0].getchaintxstats, blockhash=blockhash)
self.nodes[0].reconsiderblock(blockhash)
chaintxstats = self.nodes[0].getchaintxstats(nblocks=1)
# 200 txs plus genesis tx
assert_equal(chaintxstats['txcount'], HEIGHT + 1)
# tx rate should be 1 per 10 minutes, or 1/600
# we have to round because of binary math
assert_equal(round(chaintxstats['txrate'] * TIME_RANGE_STEP, 10), Decimal(1))
b1_hash = self.nodes[0].getblockhash(1)
b1 = self.nodes[0].getblock(b1_hash)
b200_hash = self.nodes[0].getblockhash(HEIGHT)
b200 = self.nodes[0].getblock(b200_hash)
time_diff = b200['mediantime'] - b1['mediantime']
chaintxstats = self.nodes[0].getchaintxstats()
assert_equal(chaintxstats['time'], b200['time'])
assert_equal(chaintxstats['txcount'], HEIGHT + 1)
assert_equal(chaintxstats['window_final_block_hash'], b200_hash)
assert_equal(chaintxstats['window_final_block_height'], HEIGHT )
assert_equal(chaintxstats['window_block_count'], HEIGHT - 1)
assert_equal(chaintxstats['window_tx_count'], HEIGHT - 1)
assert_equal(chaintxstats['window_interval'], time_diff)
assert_equal(round(chaintxstats['txrate'] * time_diff, 10), Decimal(HEIGHT - 1))
chaintxstats = self.nodes[0].getchaintxstats(blockhash=b1_hash)
assert_equal(chaintxstats['time'], b1['time'])
assert_equal(chaintxstats['txcount'], 2)
assert_equal(chaintxstats['window_final_block_hash'], b1_hash)
assert_equal(chaintxstats['window_final_block_height'], 1)
assert_equal(chaintxstats['window_block_count'], 0)
assert 'window_tx_count' not in chaintxstats
assert 'window_interval' not in chaintxstats
assert 'txrate' not in chaintxstats
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
assert_equal(res['total_amount'], Decimal('8725.00000000'))
assert_equal(res['transactions'], HEIGHT)
assert_equal(res['height'], HEIGHT)
assert_equal(res['txouts'], HEIGHT)
assert_equal(res['bogosize'], 16800),
assert_equal(res['bestblock'], node.getblockhash(HEIGHT))
size = res['disk_size']
assert size > 6400
assert size < 64000
assert_equal(len(res['bestblock']), 64)
assert_equal(len(res['hash_serialized_3']), 64)
self.log.info("Test gettxoutsetinfo works for blockchain with just the genesis block")
b1hash = node.getblockhash(1)
node.invalidateblock(b1hash)
res2 = node.gettxoutsetinfo()
assert_equal(res2['transactions'], 0)
assert_equal(res2['total_amount'], Decimal('0'))
assert_equal(res2['height'], 0)
assert_equal(res2['txouts'], 0)
assert_equal(res2['bogosize'], 0),
assert_equal(res2['bestblock'], node.getblockhash(0))
assert_equal(len(res2['hash_serialized_3']), 64)
self.log.info("Test gettxoutsetinfo returns the same result after invalidate/reconsider block")
node.reconsiderblock(b1hash)
res3 = node.gettxoutsetinfo()
# The field 'disk_size' is non-deterministic and can thus not be
# compared between res and res3. Everything else should be the same.
del res['disk_size'], res3['disk_size']
assert_equal(res, res3)
self.log.info("Test gettxoutsetinfo hash_type option")
# Adding hash_type 'hash_serialized_3', which is the default, should
# not change the result.
res4 = node.gettxoutsetinfo(hash_type='hash_serialized_3')
del res4['disk_size']
assert_equal(res, res4)
# hash_type none should not return a UTXO set hash.
res5 = node.gettxoutsetinfo(hash_type='none')
assert 'hash_serialized_3' not in res5
# hash_type muhash should return a different UTXO set hash.
res6 = node.gettxoutsetinfo(hash_type='muhash')
assert 'muhash' in res6
assert res['hash_serialized_3'] != res6['muhash']
# muhash should not be returned unless requested.
for r in [res, res2, res3, res4, res5]:
assert 'muhash' not in r
# Unknown hash_type raises an error
assert_raises_rpc_error(-8, "'foo hash' is not a valid hash_type", node.gettxoutsetinfo, "foo hash")
def _test_getblockheader(self):
self.log.info("Test getblockheader")
node = self.nodes[0]
assert_raises_rpc_error(-8, "hash must be of length 64 (not 8, for 'nonsense')", node.getblockheader, "nonsense")
assert_raises_rpc_error(-8, "hash must be hexadecimal string (not 'ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844')", node.getblockheader, "ZZZ7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844")
assert_raises_rpc_error(-5, "Block not found", node.getblockheader, "0cf7bb8b1697ea987f3b223ba7819250cae33efacb068d23dc24859824a77844")
besthash = node.getbestblockhash()
secondbesthash = node.getblockhash(HEIGHT - 1)
header = node.getblockheader(blockhash=besthash)
assert_equal(header['hash'], besthash)
assert_equal(header['height'], HEIGHT)
assert_equal(header['confirmations'], 1)
assert_equal(header['previousblockhash'], secondbesthash)
assert_is_hex_string(header['chainwork'])
assert_equal(header['nTx'], 1)
assert_is_hash_string(header['hash'])
assert_is_hash_string(header['previousblockhash'])
assert_is_hash_string(header['merkleroot'])
assert_is_hash_string(header['bits'], length=None)
assert isinstance(header['time'], int)
assert_equal(header['mediantime'], TIME_RANGE_MTP)
assert isinstance(header['nonce'], int)
assert isinstance(header['version'], int)
assert isinstance(int(header['versionHex'], 16), int)
assert isinstance(header['difficulty'], Decimal)
# Test with verbose=False, which should return the header as hex.
header_hex = node.getblockheader(blockhash=besthash, verbose=False)
assert_is_hex_string(header_hex)
header = from_hex(CBlockHeader(), header_hex)
header.calc_sha256()
assert_equal(header.hash, besthash)
assert 'previousblockhash' not in node.getblockheader(node.getblockhash(0))
assert 'nextblockhash' not in node.getblockheader(node.getbestblockhash())
def _test_getdifficulty(self):
self.log.info("Test getdifficulty")
difficulty = self.nodes[0].getdifficulty()
# 1 hash in 2 should be valid, so difficulty should be 1/2**31
# binary => decimal => binary math is why we do this check
assert abs(difficulty * 2**31 - 1) < 0.0001
def _test_getnetworkhashps(self):
self.log.info("Test getnetworkhashps")
assert_raises_rpc_error(
-3,
textwrap.dedent("""
Wrong type passed:
{
"Position 1 (nblocks)": "JSON value of type string is not of expected type number",
"Position 2 (height)": "JSON value of type array is not of expected type number"
}
""").strip(),
lambda: self.nodes[0].getnetworkhashps("a", []),
)
assert_raises_rpc_error(
-8,
"Block does not exist at specified height",
lambda: self.nodes[0].getnetworkhashps(100, self.nodes[0].getblockcount() + 1),
)
assert_raises_rpc_error(
-8,
"Block does not exist at specified height",
lambda: self.nodes[0].getnetworkhashps(100, -10),
)
assert_raises_rpc_error(
-8,
"Invalid nblocks. Must be a positive number or -1.",
lambda: self.nodes[0].getnetworkhashps(-100),
)
assert_raises_rpc_error(
-8,
"Invalid nblocks. Must be a positive number or -1.",
lambda: self.nodes[0].getnetworkhashps(0),
)
# Genesis block height estimate should return 0
hashes_per_second = self.nodes[0].getnetworkhashps(100, 0)
assert_equal(hashes_per_second, 0)
# This should be 2 hashes every 10 minutes or 1/300
hashes_per_second = self.nodes[0].getnetworkhashps()
assert abs(hashes_per_second * 300 - 1) < 0.0001
# Test setting the first param of getnetworkhashps to -1 returns the average network
# hashes per second from the last difficulty change.
current_block_height = self.nodes[0].getmininginfo()['blocks']
blocks_since_last_diff_change = current_block_height % DIFFICULTY_ADJUSTMENT_INTERVAL + 1
expected_hashes_per_second_since_diff_change = self.nodes[0].getnetworkhashps(blocks_since_last_diff_change)
assert_equal(self.nodes[0].getnetworkhashps(-1), expected_hashes_per_second_since_diff_change)
# Ensure long lookups get truncated to chain length
hashes_per_second = self.nodes[0].getnetworkhashps(self.nodes[0].getblockcount() + 1000)
assert hashes_per_second > 0.003
def _test_stopatheight(self):
self.log.info("Test stopping at height")
assert_equal(self.nodes[0].getblockcount(), HEIGHT)
self.generate(self.wallet, 6)
assert_equal(self.nodes[0].getblockcount(), HEIGHT + 6)
self.log.debug('Node should not stop at this height')
assert_raises(subprocess.TimeoutExpired, lambda: self.nodes[0].process.wait(timeout=3))
try:
self.generatetoaddress(self.nodes[0], 1, self.wallet.get_address(), sync_fun=self.no_op)
except (ConnectionError, http.client.BadStatusLine):
pass # The node already shut down before response
self.log.debug('Node should stop at this height...')
self.nodes[0].wait_until_stopped()
self.start_node(0)
assert_equal(self.nodes[0].getblockcount(), HEIGHT + 7)
def _test_waitforblockheight(self):
self.log.info("Test waitforblockheight")
node = self.nodes[0]
peer = node.add_p2p_connection(P2PInterface())
current_height = node.getblock(node.getbestblockhash())['height']
# Create a fork somewhere below our current height, invalidate the tip
# of that fork, and then ensure that waitforblockheight still
# works as expected.
#
# (Previously this was broken based on setting
# `rpc/blockchain.cpp:latestblock` incorrectly.)
#
fork_height = current_height - 100 # choose something vaguely near our tip
fork_hash = node.getblockhash(fork_height)
fork_block = node.getblock(fork_hash)
def solve_and_send_block(prevhash, height, time):
b = create_block(prevhash, create_coinbase(height), time)
b.solve()
peer.send_and_ping(msg_block(b))
return b
b1 = solve_and_send_block(int(fork_hash, 16), fork_height+1, fork_block['time'] + 1)
b2 = solve_and_send_block(b1.sha256, fork_height+2, b1.nTime + 1)
node.invalidateblock(b2.hash)
def assert_waitforheight(height, timeout=2):
assert_equal(
node.waitforblockheight(height=height, timeout=timeout)['height'],
current_height)
assert_waitforheight(0)
assert_waitforheight(current_height - 1)
assert_waitforheight(current_height)
assert_waitforheight(current_height + 1)
def _test_getblock(self):
node = self.nodes[0]
fee_per_byte = Decimal('0.00000010')
fee_per_kb = 1000 * fee_per_byte
self.wallet.send_self_transfer(fee_rate=fee_per_kb, from_node=node)
blockhash = self.generate(node, 1)[0]
def assert_hexblock_hashes(verbosity):
block = node.getblock(blockhash, verbosity)
assert_equal(blockhash, hash256(bytes.fromhex(block[:160]))[::-1].hex())
def assert_fee_not_in_block(verbosity):
block = node.getblock(blockhash, verbosity)
assert 'fee' not in block['tx'][1]
def assert_fee_in_block(verbosity):
block = node.getblock(blockhash, verbosity)
tx = block['tx'][1]
assert 'fee' in tx
assert_equal(tx['fee'], tx['vsize'] * fee_per_byte)
def assert_vin_contains_prevout(verbosity):
block = node.getblock(blockhash, verbosity)
tx = block["tx"][1]
total_vin = Decimal("0.00000000")
total_vout = Decimal("0.00000000")
for vin in tx["vin"]:
assert "prevout" in vin
assert_equal(set(vin["prevout"].keys()), set(("value", "height", "generated", "scriptPubKey")))
assert_equal(vin["prevout"]["generated"], True)
total_vin += vin["prevout"]["value"]
for vout in tx["vout"]:
total_vout += vout["value"]
assert_equal(total_vin, total_vout + tx["fee"])
def assert_vin_does_not_contain_prevout(verbosity):
block = node.getblock(blockhash, verbosity)
tx = block["tx"][1]
if isinstance(tx, str):
# In verbosity level 1, only the transaction hashes are written
pass
else:
for vin in tx["vin"]:
assert "prevout" not in vin
self.log.info("Test that getblock with verbosity 0 hashes to expected value")
assert_hexblock_hashes(0)
assert_hexblock_hashes(False)
self.log.info("Test that getblock with verbosity 1 doesn't include fee")
assert_fee_not_in_block(1)
assert_fee_not_in_block(True)
self.log.info('Test that getblock with verbosity 2 and 3 includes expected fee')
assert_fee_in_block(2)
assert_fee_in_block(3)
self.log.info("Test that getblock with verbosity 1 and 2 does not include prevout")
assert_vin_does_not_contain_prevout(1)
assert_vin_does_not_contain_prevout(2)
self.log.info("Test that getblock with verbosity 3 includes prevout")
assert_vin_contains_prevout(3)
self.log.info("Test getblock with invalid verbosity type returns proper error message")
assert_raises_rpc_error(-3, "JSON value of type string is not of expected type number", node.getblock, blockhash, "2")
self.log.info("Test that getblock with verbosity 2 and 3 still works with pruned Undo data")
def move_block_file(old, new):
old_path = self.nodes[0].blocks_path / old
new_path = self.nodes[0].blocks_path / new
old_path.rename(new_path)
# Move instead of deleting so we can restore chain state afterwards
move_block_file('rev00000.dat', 'rev_wrong')
assert_fee_not_in_block(2)
assert_fee_not_in_block(3)
assert_vin_does_not_contain_prevout(2)
assert_vin_does_not_contain_prevout(3)
# Restore chain state
move_block_file('rev_wrong', 'rev00000.dat')
assert 'previousblockhash' not in node.getblock(node.getblockhash(0))
assert 'nextblockhash' not in node.getblock(node.getbestblockhash())
if __name__ == '__main__':
BlockchainTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/rpc_preciousblock.py | #!/usr/bin/env python3
# Copyright (c) 2015-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the preciousblock RPC."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
)
def unidirectional_node_sync_via_rpc(node_src, node_dest):
blocks_to_copy = []
blockhash = node_src.getbestblockhash()
while True:
try:
assert len(node_dest.getblock(blockhash, False)) > 0
break
except Exception:
blocks_to_copy.append(blockhash)
blockhash = node_src.getblockheader(blockhash, True)['previousblockhash']
blocks_to_copy.reverse()
for blockhash in blocks_to_copy:
blockdata = node_src.getblock(blockhash, False)
assert node_dest.submitblock(blockdata) in (None, 'inconclusive')
def node_sync_via_rpc(nodes):
for node_src in nodes:
for node_dest in nodes:
if node_src is node_dest:
continue
unidirectional_node_sync_via_rpc(node_src, node_dest)
class PreciousTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
self.supports_cli = False
def setup_network(self):
self.setup_nodes()
def run_test(self):
self.log.info("Ensure submitblock can in principle reorg to a competing chain")
self.generate(self.nodes[0], 1, sync_fun=self.no_op)
assert_equal(self.nodes[0].getblockcount(), 1)
hashZ = self.generate(self.nodes[1], 2, sync_fun=self.no_op)[-1]
assert_equal(self.nodes[1].getblockcount(), 2)
node_sync_via_rpc(self.nodes[0:3])
assert_equal(self.nodes[0].getbestblockhash(), hashZ)
self.log.info("Mine blocks A-B-C on Node 0")
hashC = self.generate(self.nodes[0], 3, sync_fun=self.no_op)[-1]
assert_equal(self.nodes[0].getblockcount(), 5)
self.log.info("Mine competing blocks E-F-G on Node 1")
hashG = self.generate(self.nodes[1], 3, sync_fun=self.no_op)[-1]
assert_equal(self.nodes[1].getblockcount(), 5)
assert hashC != hashG
self.log.info("Connect nodes and check no reorg occurs")
# Submit competing blocks via RPC so any reorg should occur before we proceed (no way to wait on inaction for p2p sync)
node_sync_via_rpc(self.nodes[0:2])
self.connect_nodes(0, 1)
assert_equal(self.nodes[0].getbestblockhash(), hashC)
assert_equal(self.nodes[1].getbestblockhash(), hashG)
self.log.info("Make Node0 prefer block G")
self.nodes[0].preciousblock(hashG)
assert_equal(self.nodes[0].getbestblockhash(), hashG)
self.log.info("Make Node0 prefer block C again")
self.nodes[0].preciousblock(hashC)
assert_equal(self.nodes[0].getbestblockhash(), hashC)
self.log.info("Make Node1 prefer block C")
self.nodes[1].preciousblock(hashC)
self.sync_blocks(self.nodes[0:2]) # wait because node 1 may not have downloaded hashC
assert_equal(self.nodes[1].getbestblockhash(), hashC)
self.log.info("Make Node1 prefer block G again")
self.nodes[1].preciousblock(hashG)
assert_equal(self.nodes[1].getbestblockhash(), hashG)
self.log.info("Make Node0 prefer block G again")
self.nodes[0].preciousblock(hashG)
assert_equal(self.nodes[0].getbestblockhash(), hashG)
self.log.info("Make Node1 prefer block C again")
self.nodes[1].preciousblock(hashC)
assert_equal(self.nodes[1].getbestblockhash(), hashC)
self.log.info("Mine another block (E-F-G-)H on Node 0 and reorg Node 1")
self.generate(self.nodes[0], 1, sync_fun=self.no_op)
assert_equal(self.nodes[0].getblockcount(), 6)
self.sync_blocks(self.nodes[0:2])
hashH = self.nodes[0].getbestblockhash()
assert_equal(self.nodes[1].getbestblockhash(), hashH)
self.log.info("Node1 should not be able to prefer block C anymore")
self.nodes[1].preciousblock(hashC)
assert_equal(self.nodes[1].getbestblockhash(), hashH)
self.log.info("Mine competing blocks I-J-K-L on Node 2")
self.generate(self.nodes[2], 4, sync_fun=self.no_op)
assert_equal(self.nodes[2].getblockcount(), 6)
hashL = self.nodes[2].getbestblockhash()
self.log.info("Connect nodes and check no reorg occurs")
node_sync_via_rpc(self.nodes[1:3])
self.connect_nodes(1, 2)
self.connect_nodes(0, 2)
assert_equal(self.nodes[0].getbestblockhash(), hashH)
assert_equal(self.nodes[1].getbestblockhash(), hashH)
assert_equal(self.nodes[2].getbestblockhash(), hashL)
self.log.info("Make Node1 prefer block L")
self.nodes[1].preciousblock(hashL)
assert_equal(self.nodes[1].getbestblockhash(), hashL)
self.log.info("Make Node2 prefer block H")
self.nodes[2].preciousblock(hashH)
assert_equal(self.nodes[2].getbestblockhash(), hashH)
if __name__ == '__main__':
PreciousTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/feature_bind_extra.py | #!/usr/bin/env python3
# Copyright (c) 2014-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test starting bitcoind with -bind and/or -bind=...=onion and confirm
that bind happens on the expected ports.
"""
import sys
from test_framework.netutil import (
addr_to_hex,
get_bind_addrs,
)
from test_framework.test_framework import (
BitcoinTestFramework,
SkipTest,
)
from test_framework.util import (
assert_equal,
p2p_port,
rpc_port,
)
class BindExtraTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
# Avoid any -bind= on the command line. Force the framework to avoid
# adding -bind=127.0.0.1.
self.bind_to_localhost_only = False
self.num_nodes = 2
def setup_network(self):
# Due to OS-specific network stats queries, we only run on Linux.
self.log.info("Checking for Linux")
if not sys.platform.startswith('linux'):
raise SkipTest("This test can only be run on Linux.")
loopback_ipv4 = addr_to_hex("127.0.0.1")
# Start custom ports by reusing unused p2p ports
port = p2p_port(self.num_nodes)
# Array of tuples [command line arguments, expected bind addresses].
self.expected = []
# Node0, no normal -bind=... with -bind=...=onion, thus only the tor target.
self.expected.append(
[
[f"-bind=127.0.0.1:{port}=onion"],
[(loopback_ipv4, port)]
],
)
port += 1
# Node1, both -bind=... and -bind=...=onion.
self.expected.append(
[
[f"-bind=127.0.0.1:{port}", f"-bind=127.0.0.1:{port + 1}=onion"],
[(loopback_ipv4, port), (loopback_ipv4, port + 1)]
],
)
port += 2
self.extra_args = list(map(lambda e: e[0], self.expected))
self.add_nodes(self.num_nodes, self.extra_args)
# Don't start the nodes, as some of them would collide trying to bind on the same port.
def run_test(self):
for i in range(len(self.expected)):
self.log.info(f"Starting node {i} with {self.expected[i][0]}")
self.start_node(i)
pid = self.nodes[i].process.pid
binds = set(get_bind_addrs(pid))
# Remove IPv6 addresses because on some CI environments "::1" is not configured
# on the system (so our test_ipv6_local() would return False), but it is
# possible to bind on "::". This makes it unpredictable whether to expect
# that bitcoind has bound on "::1" (for RPC) and "::" (for P2P).
ipv6_addr_len_bytes = 32
binds = set(filter(lambda e: len(e[0]) != ipv6_addr_len_bytes, binds))
# Remove RPC ports. They are not relevant for this test.
binds = set(filter(lambda e: e[1] != rpc_port(i), binds))
assert_equal(binds, set(self.expected[i][1]))
self.stop_node(i)
self.log.info(f"Stopped node {i}")
if __name__ == '__main__':
BindExtraTest().main()
| 0 |
bitcoin/test | bitcoin/test/functional/wallet_hd.py | #!/usr/bin/env python3
# Copyright (c) 2016-2022 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
import shutil
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
)
class WalletHDTest(BitcoinTestFramework):
def add_options(self, parser):
self.add_wallet_options(parser)
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [[], ['-keypool=0']]
# whitelist peers to speed up tx relay / mempool sync
for args in self.extra_args:
args.append("[email protected]")
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Make sure we use hd, keep masterkeyid
hd_fingerprint = self.nodes[1].getaddressinfo(self.nodes[1].getnewaddress())['hdmasterfingerprint']
assert_equal(len(hd_fingerprint), 8)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV = self.nodes[1].getaddressinfo(change_addr)
if self.options.descriptors:
assert_equal(change_addrV["hdkeypath"], "m/84h/1h/0h/1/0")
else:
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
non_hd_add = 'bcrt1qmevj8zfx0wdvp05cqwkmr6mxkfx60yezwjksmt'
non_hd_key = 'cS9umN9w6cDMuRVYdbkfE4c7YUFLJRoXMfhQ569uY4odiQbVN8Rt'
self.nodes[1].importprivkey(non_hd_key)
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(self.nodes[1].datadir_path / "hd.bak")
#self.nodes[1].dumpwallet(self.nodes[1].datadir_path / "hd.dump")
# Derive some HD addresses and remember the last
# Also send funds to each add
self.generate(self.nodes[0], COINBASE_MATURITY + 1)
hd_add = None
NUM_HD_ADDS = 10
for i in range(1, NUM_HD_ADDS + 1):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].getaddressinfo(hd_add)
if self.options.descriptors:
assert_equal(hd_info["hdkeypath"], "m/84h/1h/0h/0/" + str(i))
else:
assert_equal(hd_info["hdkeypath"], "m/0'/0'/" + str(i) + "'")
assert_equal(hd_info["hdmasterfingerprint"], hd_fingerprint)
self.nodes[0].sendtoaddress(hd_add, 1)
self.generate(self.nodes[0], 1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.generate(self.nodes[0], 1)
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV = self.nodes[1].getaddressinfo(change_addr)
if self.options.descriptors:
assert_equal(change_addrV["hdkeypath"], "m/84h/1h/0h/1/1")
else:
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
self.log.info("Restore backup ...")
self.stop_node(1)
# we need to delete the complete chain directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(self.nodes[1].blocks_path)
shutil.rmtree(self.nodes[1].chain_path / "chainstate")
shutil.copyfile(
self.nodes[1].datadir_path / "hd.bak",
self.nodes[1].wallets_path / self.default_wallet_name / self.wallet_data_filename
)
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
for i in range(1, NUM_HD_ADDS + 1):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].getaddressinfo(hd_add_2)
if self.options.descriptors:
assert_equal(hd_info_2["hdkeypath"], "m/84h/1h/0h/0/" + str(i))
else:
assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/" + str(i) + "'")
assert_equal(hd_info_2["hdmasterfingerprint"], hd_fingerprint)
assert_equal(hd_add, hd_add_2)
self.connect_nodes(0, 1)
self.sync_all()
# Needs rescan
self.nodes[1].rescanblockchain()
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# Try a RPC based rescan
self.stop_node(1)
shutil.rmtree(self.nodes[1].blocks_path)
shutil.rmtree(self.nodes[1].chain_path / "chainstate")
shutil.copyfile(
self.nodes[1].datadir_path / "hd.bak",
self.nodes[1].wallets_path / self.default_wallet_name / self.wallet_data_filename
)
self.start_node(1, extra_args=self.extra_args[1])
self.connect_nodes(0, 1)
self.sync_all()
# Wallet automatically scans blocks older than key on startup
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
out = self.nodes[1].rescanblockchain(0, 1)
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], 1)
out = self.nodes[1].rescanblockchain()
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], self.nodes[1].getblockcount())
assert_equal(self.nodes[1].getbalance(), NUM_HD_ADDS + 1)
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
outs = self.nodes[1].gettransaction(txid=txid, verbose=True)['decoded']['vout']
keypath = ""
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].getaddressinfo(out['scriptPubKey']['address'])['hdkeypath']
if self.options.descriptors:
assert_equal(keypath[0:14], "m/84h/1h/0h/1/")
else:
assert_equal(keypath[0:7], "m/0'/1'")
if not self.options.descriptors:
# Generate a new HD seed on node 1 and make sure it is set
orig_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
self.nodes[1].sethdseed()
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert orig_masterkeyid != new_masterkeyid
addr = self.nodes[1].getnewaddress()
# Make sure the new address is the first from the keypool
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/0\'')
self.nodes[1].keypoolrefill(1) # Fill keypool with 1 key
# Set a new HD seed on node 1 without flushing the keypool
new_seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
orig_masterkeyid = new_masterkeyid
self.nodes[1].sethdseed(False, new_seed)
new_masterkeyid = self.nodes[1].getwalletinfo()['hdseedid']
assert orig_masterkeyid != new_masterkeyid
addr = self.nodes[1].getnewaddress()
assert_equal(orig_masterkeyid, self.nodes[1].getaddressinfo(addr)['hdseedid'])
# Make sure the new address continues previous keypool
assert_equal(self.nodes[1].getaddressinfo(addr)['hdkeypath'], 'm/0\'/0\'/1\'')
# Check that the next address is from the new seed
self.nodes[1].keypoolrefill(1)
next_addr = self.nodes[1].getnewaddress()
assert_equal(new_masterkeyid, self.nodes[1].getaddressinfo(next_addr)['hdseedid'])
# Make sure the new address is not from previous keypool
assert_equal(self.nodes[1].getaddressinfo(next_addr)['hdkeypath'], 'm/0\'/0\'/0\'')
assert next_addr != addr
# Sethdseed parameter validity
assert_raises_rpc_error(-1, 'sethdseed', self.nodes[0].sethdseed, False, new_seed, 0)
assert_raises_rpc_error(-5, "Invalid private key", self.nodes[1].sethdseed, False, "not_wif")
assert_raises_rpc_error(-3, "JSON value of type string is not of expected type bool", self.nodes[1].sethdseed, "Not_bool")
assert_raises_rpc_error(-3, "JSON value of type bool is not of expected type string", self.nodes[1].sethdseed, False, True)
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, new_seed)
assert_raises_rpc_error(-5, "Already have this key", self.nodes[1].sethdseed, False, self.nodes[1].dumpprivkey(self.nodes[1].getnewaddress()))
self.log.info('Test sethdseed restoring with keys outside of the initial keypool')
self.generate(self.nodes[0], 10)
# Restart node 1 with keypool of 3 and a different wallet
self.nodes[1].createwallet(wallet_name='origin', blank=True)
self.restart_node(1, extra_args=['-keypool=3', '-wallet=origin'])
self.connect_nodes(0, 1)
# sethdseed restoring and seeing txs to addresses out of the keypool
origin_rpc = self.nodes[1].get_wallet_rpc('origin')
seed = self.nodes[0].dumpprivkey(self.nodes[0].getnewaddress())
origin_rpc.sethdseed(True, seed)
self.nodes[1].createwallet(wallet_name='restore', blank=True)
restore_rpc = self.nodes[1].get_wallet_rpc('restore')
restore_rpc.sethdseed(True, seed) # Set to be the same seed as origin_rpc
restore_rpc.sethdseed(True) # Rotate to a new seed, making original `seed` inactive
self.nodes[1].createwallet(wallet_name='restore2', blank=True)
restore2_rpc = self.nodes[1].get_wallet_rpc('restore2')
restore2_rpc.sethdseed(True, seed) # Set to be the same seed as origin_rpc
restore2_rpc.sethdseed(True) # Rotate to a new seed, making original `seed` inactive
# Check persistence of inactive seed by reloading restore. restore2 is still loaded to test the case where the wallet is not reloaded
restore_rpc.unloadwallet()
self.nodes[1].loadwallet('restore')
restore_rpc = self.nodes[1].get_wallet_rpc('restore')
# Empty origin keypool and get an address that is beyond the initial keypool
origin_rpc.getnewaddress()
origin_rpc.getnewaddress()
last_addr = origin_rpc.getnewaddress() # Last address of initial keypool
addr = origin_rpc.getnewaddress() # First address beyond initial keypool
# Check that the restored seed has last_addr but does not have addr
info = restore_rpc.getaddressinfo(last_addr)
assert_equal(info['ismine'], True)
info = restore_rpc.getaddressinfo(addr)
assert_equal(info['ismine'], False)
info = restore2_rpc.getaddressinfo(last_addr)
assert_equal(info['ismine'], True)
info = restore2_rpc.getaddressinfo(addr)
assert_equal(info['ismine'], False)
# Check that the origin seed has addr
info = origin_rpc.getaddressinfo(addr)
assert_equal(info['ismine'], True)
# Send a transaction to addr, which is out of the initial keypool.
# The wallet that has set a new seed (restore_rpc) should not detect this transaction.
txid = self.nodes[0].sendtoaddress(addr, 1)
origin_rpc.sendrawtransaction(self.nodes[0].gettransaction(txid)['hex'])
self.generate(self.nodes[0], 1)
origin_rpc.gettransaction(txid)
assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore_rpc.gettransaction, txid)
out_of_kp_txid = txid
# Send a transaction to last_addr, which is in the initial keypool.
# The wallet that has set a new seed (restore_rpc) should detect this transaction and generate 3 new keys from the initial seed.
# The previous transaction (out_of_kp_txid) should still not be detected as a rescan is required.
txid = self.nodes[0].sendtoaddress(last_addr, 1)
origin_rpc.sendrawtransaction(self.nodes[0].gettransaction(txid)['hex'])
self.generate(self.nodes[0], 1)
origin_rpc.gettransaction(txid)
restore_rpc.gettransaction(txid)
assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore_rpc.gettransaction, out_of_kp_txid)
restore2_rpc.gettransaction(txid)
assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', restore2_rpc.gettransaction, out_of_kp_txid)
# After rescanning, restore_rpc should now see out_of_kp_txid and generate an additional key.
# addr should now be part of restore_rpc and be ismine
restore_rpc.rescanblockchain()
restore_rpc.gettransaction(out_of_kp_txid)
info = restore_rpc.getaddressinfo(addr)
assert_equal(info['ismine'], True)
restore2_rpc.rescanblockchain()
restore2_rpc.gettransaction(out_of_kp_txid)
info = restore2_rpc.getaddressinfo(addr)
assert_equal(info['ismine'], True)
# Check again that 3 keys were derived.
# Empty keypool and get an address that is beyond the initial keypool
origin_rpc.getnewaddress()
origin_rpc.getnewaddress()
last_addr = origin_rpc.getnewaddress()
addr = origin_rpc.getnewaddress()
# Check that the restored seed has last_addr but does not have addr
info = restore_rpc.getaddressinfo(last_addr)
assert_equal(info['ismine'], True)
info = restore_rpc.getaddressinfo(addr)
assert_equal(info['ismine'], False)
info = restore2_rpc.getaddressinfo(last_addr)
assert_equal(info['ismine'], True)
info = restore2_rpc.getaddressinfo(addr)
assert_equal(info['ismine'], False)
if __name__ == '__main__':
WalletHDTest().main()
| 0 |