From faabb25ea5a2dcfdb1f6202b7aa1b7fa1db7f05a Mon Sep 17 00:00:00 2001 From: zebra-lucky Date: Mon, 28 Jul 2025 18:00:41 +0300 Subject: [PATCH] add new tests for Taproot/FROST --- conftest.py | 95 +++ docs/frost-wallet-dev.md | 8 + scripts/joinmarket-qt.py | 2 +- scripts/tumbler.py | 3 +- src/jmclient/__init__.py | 5 +- src/jmclient/configure.py | 41 +- src/jmclient/cryptoengine.py | 3 +- src/jmclient/frost_clients.py | 11 +- src/jmclient/frost_ipc.py | 26 +- src/jmclient/wallet.py | 66 +- src/jmclient/wallet_rpc.py | 7 +- test/jmclient/test_configure.py | 9 - test/jmclient/test_frost_clients.py | 964 ++++++++++++++++++++++ test/jmclient/test_frost_ipc.py | 341 ++++++++ test/jmclient/test_frost_wallet.py | 410 ++++++++++ test/jmclient/test_taproot_wallet.py | 1134 ++++++++++++++++++++++++++ test/jmclient/test_wallet.py | 21 - test/jmfrost/chilldkg_example.py | 0 test/jmfrost/test_chilldkg_ref.py | 0 test/jmfrost/test_frost_ref.py | 18 +- test/jmfrost/trusted_keygen.py | 152 ++++ test/regtest_frost_joinmarket.cfg | 154 ++++ test/regtest_taproot_joinmarket.cfg | 150 ++++ 23 files changed, 3526 insertions(+), 94 deletions(-) create mode 100644 test/jmclient/test_frost_clients.py create mode 100644 test/jmclient/test_frost_ipc.py create mode 100644 test/jmclient/test_frost_wallet.py create mode 100644 test/jmclient/test_taproot_wallet.py mode change 100755 => 100644 test/jmfrost/chilldkg_example.py mode change 100755 => 100644 test/jmfrost/test_chilldkg_ref.py mode change 100755 => 100644 test/jmfrost/test_frost_ref.py create mode 100644 test/jmfrost/trusted_keygen.py create mode 100644 test/regtest_frost_joinmarket.cfg create mode 100644 test/regtest_taproot_joinmarket.cfg diff --git a/conftest.py b/conftest.py index 6a44a0f..971eb50 100644 --- a/conftest.py +++ b/conftest.py @@ -172,3 +172,98 @@ def setup_regtest_bitcoind(pytestconfig): local_command(stop_cmd) # note, it is better to clean out ~/.bitcoin/regtest but too # dangerous to automate it here perhaps + + +@pytest.fixture(scope="session") +def setup_regtest_taproot_bitcoind(pytestconfig): + """ + Setup regtest bitcoind and handle its clean up. + """ + conf = pytestconfig.getoption("--btcconf") + rpcuser = pytestconfig.getoption("--btcuser") + rpcpassword = pytestconfig.getoption("--btcpwd") + bitcoin_path = pytestconfig.getoption("--btcroot") + bitcoind_path = os.path.join(bitcoin_path, "bitcoind") + bitcoincli_path = os.path.join(bitcoin_path, "bitcoin-cli") + start_cmd = f'{bitcoind_path} -regtest -daemon -txindex -conf={conf}' + stop_cmd = f'{bitcoincli_path} -regtest -rpcuser={rpcuser} -rpcpassword={rpcpassword} stop' + + # determine bitcoind version + try: + bitcoind_version = get_bitcoind_version(bitcoind_path, conf) + except RuntimeError as exc: + pytest.exit(f"Cannot setup tests, bitcoind failing.\n{exc}") + + if bitcoind_version[0] >= 26: + start_cmd += ' -allowignoredconf=1' + local_command(start_cmd, bg=True) + root_cmd = f'{bitcoincli_path} -regtest -rpcuser={rpcuser} -rpcpassword={rpcpassword}' + wallet_name = 'jm-test-taproot-wallet' + # Bitcoin Core v0.21+ does not create default wallet + # From Bitcoin Core 0.21.0 there is support for descriptor wallets, which + # are default from 23.x+ (including 22.99.0 development versions). + # We don't support descriptor wallets yet. + if bitcoind_version[0] >= 27: + create_wallet = (f'{root_cmd} -rpcwait -named createwallet ' + f'wallet_name={wallet_name} descriptors=true') + else: + pytest.exit("Cannot setup tests, bitcoind version " + "must be 27 or greater.\n") + local_command(create_wallet) + local_command(f'{root_cmd} loadwallet {wallet_name}') + for i in range(2): + cpe = local_command(f'{root_cmd} -rpcwallet={wallet_name} getnewaddress') + if cpe.returncode != 0: + pytest.exit(f"Cannot setup tests, bitcoin-cli failing.\n{cpe.stdout.decode('utf-8')}") + destn_addr = cpe.stdout[:-1].decode('utf-8') + local_command(f'{root_cmd} -rpcwallet={wallet_name} generatetoaddress 301 {destn_addr}') + sleep(1) + yield + # shut down bitcoind + local_command(stop_cmd) + # note, it is better to clean out ~/.bitcoin/regtest but too + # dangerous to automate it here perhaps + + +@pytest.fixture(scope="session") +def setup_regtest_frost_bitcoind(pytestconfig): + """ + Setup regtest bitcoind and handle its clean up. + """ + conf = pytestconfig.getoption("--btcconf") + rpcuser = pytestconfig.getoption("--btcuser") + rpcpassword = pytestconfig.getoption("--btcpwd") + bitcoin_path = pytestconfig.getoption("--btcroot") + bitcoind_path = os.path.join(bitcoin_path, "bitcoind") + bitcoincli_path = os.path.join(bitcoin_path, "bitcoin-cli") + start_cmd = f'{bitcoind_path} -regtest -daemon -txindex -conf={conf}' + stop_cmd = f'{bitcoincli_path} -regtest -rpcuser={rpcuser} -rpcpassword={rpcpassword} stop' + + # determine bitcoind version + try: + bitcoind_version = get_bitcoind_version(bitcoind_path, conf) + except RuntimeError as exc: + pytest.exit(f"Cannot setup tests, bitcoind failing.\n{exc}") + + if bitcoind_version[0] >= 26: + start_cmd += ' -allowignoredconf=1' + local_command(start_cmd, bg=True) + root_cmd = f'{bitcoincli_path} -regtest -rpcuser={rpcuser} -rpcpassword={rpcpassword}' + wallet_name = 'jm-test-frost-wallet' + # Bitcoin Core v0.21+ does not create default wallet + # From Bitcoin Core 0.21.0 there is support for descriptor wallets, which + # are default from 23.x+ (including 22.99.0 development versions). + # We don't support descriptor wallets yet. + if bitcoind_version[0] >= 27: + create_wallet = (f'{root_cmd} -rpcwait -named createwallet ' + f'wallet_name={wallet_name} descriptors=true') + else: + pytest.exit("Cannot setup tests, bitcoind version " + "must be 27 or greater.\n") + local_command(create_wallet) + local_command(f'{root_cmd} loadwallet {wallet_name} true true') + yield + # shut down bitcoind + local_command(stop_cmd) + # note, it is better to clean out ~/.bitcoin/regtest but too + # dangerous to automate it here perhaps diff --git a/docs/frost-wallet-dev.md b/docs/frost-wallet-dev.md index 6a1ba7d..d8d73ef 100644 --- a/docs/frost-wallet-dev.md +++ b/docs/frost-wallet-dev.md @@ -13,6 +13,14 @@ usage. Usual wallet usage interact with FROST/DKG functionality via IPC code in `frost_ipc.py` (currently `AF_UNIX` socket for simplicity). +`jmclient.wallet_utils.open_wallet` has two new parameters: +- `load_dkg=False`: by default do not load `DKGStorage` +- `dkg_read_only=True`: load `DKGStorage` for read only commands + +Additionally `open_wallet` params `read_only` and `dkg_read_only` can not +be mutually unset by design. + + ## Structure of DKG data in the DKGStorage ``` diff --git a/scripts/joinmarket-qt.py b/scripts/joinmarket-qt.py index 52aa7c9..fc3a0c4 100755 --- a/scripts/joinmarket-qt.py +++ b/scripts/joinmarket-qt.py @@ -2462,7 +2462,7 @@ if isinstance(jm_single().bc_interface, RegtestBitcoinCoreInterface): #trigger start with a fake tx jm_single().bc_interface.pushtx(b"\x00"*20) -logsdir = os.path.join(os.path.dirname(jm_single().config_location), "logs") +logsdir = os.path.join(jm_single().datadir, "logs") #tumble log will not always be used, but is made available anyway: tumble_log = get_tumble_log(logsdir) #ignored makers list persisted across entire app run diff --git a/scripts/tumbler.py b/scripts/tumbler.py index 4827886..420d8d3 100755 --- a/scripts/tumbler.py +++ b/scripts/tumbler.py @@ -33,8 +33,7 @@ async def main(): jmprint('Error: Needs a wallet file', "error") sys.exit(EXIT_ARGERROR) load_program_config(config_path=options['datadir']) - logsdir = os.path.join(os.path.dirname( - jm_single().config_location), "logs") + logsdir = os.path.join(jm_single().datadir, "logs") tumble_log = get_tumble_log(logsdir) if jm_single().bc_interface is None: diff --git a/src/jmclient/__init__.py b/src/jmclient/__init__.py index 7e868a6..ad22ade 100644 --- a/src/jmclient/__init__.py +++ b/src/jmclient/__init__.py @@ -26,7 +26,8 @@ from .wallet import (Mnemonic, estimate_tx_fee, WalletError, BaseWallet, ImportW SegwitWallet, SegwitLegacyWallet, FidelityBondMixin, FidelityBondWatchonlyWallet, SegwitWalletFidelityBonds, UTXOManager, WALLET_IMPLEMENTATIONS, compute_tx_locktime, - UnknownAddressForLabel, TaprootWallet, FrostWallet) + UnknownAddressForLabel, TaprootWallet, FrostWallet, + TaprootWalletFidelityBonds, DKGManager) from .storage import (Argon2Hash, Storage, StorageError, RetryableStorageError, StoragePasswordError, VolatileStorage, DKGStorage, DKGRecoveryStorage) @@ -83,7 +84,7 @@ from .websocketserver import JmwalletdWebSocketServerFactory, \ from .wallet_rpc import JMWalletDaemon from .bond_calc import get_bond_values from .frost_clients import FROSTClient -from .frost_ipc import FrostIPCClient +from .frost_ipc import FrostIPCServer, FrostIPCClient # Set default logging handler to avoid "No handler found" warnings. try: diff --git a/src/jmclient/configure.py b/src/jmclient/configure.py index 18370ef..490294b 100644 --- a/src/jmclient/configure.py +++ b/src/jmclient/configure.py @@ -46,8 +46,7 @@ class AttributeDict(object): logFormatter = logging.Formatter( ('%(asctime)s [%(threadName)-12.12s] ' '[%(levelname)-5.5s] %(message)s')) - logsdir = os.path.join(os.path.dirname( - global_singleton.config_location), "logs") + logsdir = os.path.join(global_singleton.datadir, "logs") fileHandler = logging.FileHandler( logsdir + '/{}.log'.format(value)) fileHandler.setFormatter(logFormatter) @@ -77,7 +76,7 @@ global_singleton.joinmarket_alert = joinmarket_alert global_singleton.debug_silence = debug_silence global_singleton.config = ConfigParser(strict=False) #This is reset to a full path after load_program_config call -global_singleton.config_location = 'joinmarket.cfg' +global_singleton.config_fname = 'joinmarket.cfg' #as above global_singleton.commit_file_location = 'cmtdata/commitments.json' global_singleton.wait_for_commitments = 0 @@ -698,13 +697,12 @@ def load_program_config(config_path: str = "", bs: Optional[str] = None, os.makedirs(os.path.join(global_singleton.datadir, "logs")) if not os.path.exists(os.path.join(global_singleton.datadir, "cmtdata")): os.makedirs(os.path.join(global_singleton.datadir, "cmtdata")) - global_singleton.config_location = os.path.join( - global_singleton.datadir, global_singleton.config_location) + config_location = os.path.join( + global_singleton.datadir, global_singleton.config_fname) _remove_unwanted_default_settings(global_singleton.config) try: - loadedFiles = global_singleton.config.read( - [global_singleton.config_location]) + loadedFiles = global_singleton.config.read([config_location]) except UnicodeDecodeError: jmprint("Error loading `joinmarket.cfg`, invalid file format.", "info") @@ -717,7 +715,7 @@ def load_program_config(config_path: str = "", bs: Optional[str] = None, global_singleton.config.set("BLOCKCHAIN", "blockchain_source", bs) # Create default config file if not found if len(loadedFiles) != 1: - with open(global_singleton.config_location, "w") as configfile: + with open(config_location, "w") as configfile: configfile.write(defaultconfig) jmprint("Created a new `joinmarket.cfg`. Please review and adopt the " "settings and restart joinmarket.", "info") @@ -787,8 +785,7 @@ def load_program_config(config_path: str = "", bs: Optional[str] = None, # and setting that in the plugin object; the plugin # itself will switch on its own logging when ready, # attaching a filehandler to the global log. - plogsdir = os.path.join(os.path.dirname( - global_singleton.config_location), "logs", p.name) + plogsdir = os.path.join(global_singleton.datadir, "logs", p.name) if not os.path.exists(plogsdir): os.makedirs(plogsdir) p.set_log_dir(plogsdir) @@ -851,7 +848,8 @@ def _get_bitcoin_rpc_credentials(_config: ConfigParser) -> Tuple[str, str]: raise ValueError("Invalid RPC auth credentials `rpc_user` and `rpc_password`") return rpc_user, rpc_password -def get_blockchain_interface_instance(_config: ConfigParser): +def get_blockchain_interface_instance(_config: ConfigParser, *, + rpc_wallet_name=None): # todo: refactor joinmarket module to get rid of loops # importing here is necessary to avoid import loops from jmclient.blockchaininterface import BitcoinCoreInterface, \ @@ -876,8 +874,21 @@ def get_blockchain_interface_instance(_config: ConfigParser): else: raise ValueError('wrong network configured: ' + network) rpc_user, rpc_password = _get_bitcoin_rpc_credentials(_config) - rpc_wallet_file = _config.get("BLOCKCHAIN", "rpc_wallet_file") + if rpc_wallet_name is not None: + rpc_wallet_file = rpc_wallet_name + else: + rpc_wallet_file = _config.get("BLOCKCHAIN", "rpc_wallet_file") rpc = JsonRpc(rpc_host, rpc_port, rpc_user, rpc_password) + # code for TaprootWallet testing + if rpc_wallet_name and source in ['bitcoin-rpc', 'regtest', + 'bitcoin-rpc-no-history']: + # create wallet with disable_private_keys=True, blank=True + rpc.call('createwallet', [rpc_wallet_name, True, True]) + loaded_wallets = rpc.call("listwallets", []) + if not rpc_wallet_name in loaded_wallets: + log.info(f"Loading Bitcoin RPC wallet {rpc_wallet_name }...") + rpc.call('loadwallet', [rpc_wallet_name]) + log.info("Done.") if source == 'bitcoin-rpc': #pragma: no cover bc_interface = BitcoinCoreInterface(rpc, network, rpc_wallet_file) @@ -933,7 +944,9 @@ def update_persist_config(section: str, name: str, value: Any) -> bool: sectionname = None newlines = [] match_found = False - with open(jm_single().config_location, "r") as f: + config_location = os.path.join(jm_single().datadir, + jm_single().config_fname) + with open(config_location, "r") as f: for line in f.readlines(): newline = line # ignore comment lines @@ -956,7 +969,7 @@ def update_persist_config(section: str, name: str, value: Any) -> bool: return False # success: update in-mem and re-persist jm_single().config.set(section, name, value) - with open(jm_single().config_location, "wb") as f: + with open(config_location, "wb") as f: f.writelines([x.encode("utf-8") for x in newlines]) return True diff --git a/src/jmclient/cryptoengine.py b/src/jmclient/cryptoengine.py index 5ce191c..ce9b77c 100644 --- a/src/jmclient/cryptoengine.py +++ b/src/jmclient/cryptoengine.py @@ -19,7 +19,7 @@ from .configure import get_network, jm_single TYPE_P2PKH, TYPE_P2SH_P2WPKH, TYPE_P2WPKH, TYPE_P2SH_M_N, TYPE_TIMELOCK_P2WSH, \ TYPE_SEGWIT_WALLET_FIDELITY_BONDS, TYPE_WATCHONLY_FIDELITY_BONDS, \ TYPE_WATCHONLY_TIMELOCK_P2WSH, TYPE_WATCHONLY_P2WPKH, TYPE_P2WSH, \ - TYPE_P2TR, TYPE_P2TR_FROST = range(12) + TYPE_P2TR, TYPE_P2TR_FROST, TYPE_TAPROOT_WALLET_FIDELITY_BONDS = range(13) NET_MAINNET, NET_TESTNET, NET_SIGNET = range(3) NET_MAP = {'mainnet': NET_MAINNET, 'testnet': NET_TESTNET, 'signet': NET_SIGNET} @@ -532,4 +532,5 @@ ENGINES = { TYPE_SEGWIT_WALLET_FIDELITY_BONDS: BTC_P2WPKH, TYPE_P2TR: BTC_P2TR, TYPE_P2TR_FROST: BTC_P2TR_FROST, + TYPE_TAPROOT_WALLET_FIDELITY_BONDS: BTC_P2TR, } diff --git a/src/jmclient/frost_clients.py b/src/jmclient/frost_clients.py index 81f8de7..cab2925 100644 --- a/src/jmclient/frost_clients.py +++ b/src/jmclient/frost_clients.py @@ -451,8 +451,11 @@ class DKGClient: if ready_list and len(ready_list) == len(self.hostpubkeys) - 1: ext_recovery = coordinator.ext_recovery self.finalize(session_id, coordinator.cmsg2, ext_recovery) + return True + return False except Exception as e: jlog.error(f'on_dkg_finalized: {repr(e)}') + return False async def wait_on_dkg_output(self, session_id): try: @@ -733,7 +736,8 @@ class FROSTClient(DKGClient): if p == hostpubkey: self.my_id = (i+1).to_bytes(32, 'big') break - assert self.my_id is not None + assert self.my_id is not None, (f'unknown hostpubkey ' + f'{hostpubkey.hex()}') hostpubkeyhash = sha256(hostpubkey).digest() session_id = sha256(os.urandom(32)).digest() coordinator = FROSTCoordinator(session_id=session_id, @@ -822,7 +826,7 @@ class FROSTClient(DKGClient): return None, None, None, None, None pubkey = self.find_pubkey_by_pubkeyhash(pubkeyhash) if not pubkey: - raise Exception(f'pubkey for {pubkeyhash.hex()} not found') + raise Exception(f'pubkey for {pubkeyhash} not found') xpubkey = XOnlyPubKey(pubkey[1:]) if not xpubkey.verify_schnorr(session_id, hextobin(sig)): raise Exception(f'signature verification failed') @@ -905,9 +909,6 @@ class FROSTClient(DKGClient): raise Exception(f'secshare not found for ' f'{dkg_session_id.hex()}') _pubshares = dkg._dkg_pubshares.get(dkg_session_id) - if not _pubshares: - raise Exception(f'pubshares not found for ' - f'{dkg_session_id.hex()}') pubshares = [] for i, pubshare in enumerate(_pubshares): if (i+1) not in ids: diff --git a/src/jmclient/frost_ipc.py b/src/jmclient/frost_ipc.py index 1d9767e..5d9642f 100644 --- a/src/jmclient/frost_ipc.py +++ b/src/jmclient/frost_ipc.py @@ -100,7 +100,10 @@ class FrostIPCServer(IPCBase): new_pubkey = dkg.find_dkg_pubkey(mixdepth, address_type, index) if new_pubkey: await self.send_dkg_pubkey(msg_id, new_pubkey) + else: + raise Exception('No pubkey found or generated') except Exception as e: + await self.send_dkg_pubkey(msg_id, None) jlog.error(f'FrostIPCServer.on_get_dkg_pubkey: {repr(e)}') async def send_dkg_pubkey(self, msg_id, pubkey): @@ -129,6 +132,7 @@ class FrostIPCServer(IPCBase): await self.send_frost_sig(msg_id, sig, pubkey, tweaked_pubkey) except Exception as e: jlog.error(f'FrostIPCServer.on_frost_sign: {repr(e)}') + await self.send_frost_sig(msg_id, None, None, None) async def send_frost_sig(self, msg_id, sig, pubkey, tweaked_pubkey): try: @@ -215,7 +219,15 @@ class FrostIPCClient(IPCBase): self.msg_futures[self.msg_id] = fut await fut pubkey = fut.result() - jlog.debug('FrostIPCClient.get_dkg_pubkey successfully got pubkey') + if pubkey is None: + jlog.error( + f'FrostIPCClient.get_dkg_pubkey got None pubkey from ' + f'FrostIPCServer for mixdepth={mixdepth}, ' + f'address_type={address_type}, index={index}') + return pubkey + jlog.debug(f'FrostIPCClient.get_dkg_pubkey successfully got ' + f'pubkey for mixdepth={mixdepth}, ' + f'address_type={address_type}, index={index}') return pubkey except Exception as e: jlog.error(f'FrostIPCClient.get_dkg_pubkey: {repr(e)}') @@ -237,7 +249,17 @@ class FrostIPCClient(IPCBase): self.msg_futures[self.msg_id] = fut await fut sig, pubkey, tweaked_pubkey = fut.result() - jlog.debug('FrostIPCClient.frost_sign successfully got signature') + if sig is None: + jlog.error( + f'FrostIPCClient.frost_sign got None sig value from ' + f'FrostIPCServer for mixdepth={mixdepth}, ' + f'address_type={address_type}, index={index}, ' + f'sighash={sighash.hex()}') + return sig, pubkey, tweaked_pubkey + jlog.debug( + f'FrostIPCClient.frost_sign successfully got signature ' + f'for mixdepth={mixdepth}, address_type={address_type}, ' + f'index={index}, sighash={sighash.hex()}') return sig, pubkey, tweaked_pubkey except Exception as e: jlog.error(f'FrostIPCClient.frost_sign: {repr(e)}') diff --git a/src/jmclient/wallet.py b/src/jmclient/wallet.py index d5dd475..68dc06a 100644 --- a/src/jmclient/wallet.py +++ b/src/jmclient/wallet.py @@ -30,7 +30,7 @@ from .cryptoengine import TYPE_P2PKH, TYPE_P2SH_P2WPKH, TYPE_P2WSH,\ TYPE_P2WPKH, TYPE_TIMELOCK_P2WSH, TYPE_SEGWIT_WALLET_FIDELITY_BONDS,\ TYPE_WATCHONLY_FIDELITY_BONDS, TYPE_WATCHONLY_TIMELOCK_P2WSH, \ TYPE_WATCHONLY_P2WPKH, TYPE_P2TR, TYPE_P2TR_FROST, ENGINES, \ - detect_script_type, EngineError + detect_script_type, EngineError, TYPE_TAPROOT_WALLET_FIDELITY_BONDS from .storage import DKGRecoveryStorage from .support import get_random_bytes from . import mn_encode, mn_decode @@ -526,7 +526,7 @@ class DKGManager: return self._dkg_pubkey.get(session) def add_party_data(self, *, session_id, dkg_output, hostpubkeys, t, - recovery_data, ext_recovery): + recovery_data, ext_recovery, save_dkg=True): assert isinstance(dkg_output, tuple) assert isinstance(dkg_output.secshare, bytes) assert len(dkg_output.secshare) == 32 @@ -547,12 +547,13 @@ class DKGManager: self._dkg_t[session_id] = t recovery_dkg = self.recovery_storage.data[self.RECOVERY_STORAGE_KEY] - recovery_dkg[session_id] = (ext_recovery, recovery_data) + recovery_dkg[session_id] = [ext_recovery, recovery_data] - self.save() + if save_dkg: + self.save() def add_coordinator_data(self, *, session_id, dkg_output, hostpubkeys, t, - recovery_data, ext_recovery): + recovery_data, ext_recovery, save_dkg=True): assert isinstance(dkg_output, tuple) assert isinstance(dkg_output.secshare, bytes) assert len(dkg_output.secshare) == 32 @@ -583,9 +584,10 @@ class DKGManager: self._dkg_sessions[md_type_idx] = session_id recovery_dkg = self.recovery_storage.data[self.RECOVERY_STORAGE_KEY] - recovery_dkg[session_id] = (ext_recovery, recovery_data) + recovery_dkg[session_id] = [ext_recovery, recovery_data] - self.save() + if save_dkg: + self.save() async def dkg_recover(self, dkgrec_path): rec_storage = DKGRecoveryStorage( @@ -660,14 +662,13 @@ class DKGManager: self._dkg_pubkey.pop(c, None) self._dkg_hostpubkeys.pop(c, None) self._dkg_t.pop(c, None) - self.save() - for md_type_idx in list(self._dkg_sessions.keys()): - if self._dkg_sessions[md_type_idx] == c: - self._dkg_sessions.pop(md_type_idx) - if c in self._dkg_secshare: res += f'dkg data for session {sess_id} deleted\n' else: res +=f'not found dkg data for session {sess_id}\n' + for md_type_idx in list(self._dkg_sessions.keys()): + if self._dkg_sessions[md_type_idx] == c: + res += f'session data for session {sess_id} deleted\n' + self._dkg_sessions.pop(md_type_idx) self.save() return res except Exception as e: @@ -706,23 +707,22 @@ class DKGManager: f'\nNot decrypted sesions:\n{json.dumps(enc_res, indent=4)}') def recdkg_rm(self, session_ids: list): - res = '' - rm_sess_ids = [] - not_found_ids = [] - recovery_dkg = self.recovery_storage.data[self.RECOVERY_STORAGE_KEY] - for session_id, (ext_recovery, recovery_data) in recovery_dkg.items(): - if session_id in session_ids: - rm_sess_ids.append(session_id) - else: - not_found_ids.append(session_id) - for session_id in rm_sess_ids: - del recovery_dkg[session_id] - res += (f'dkg recovery data for session {session_id.hex()}' - f' deleted\n') - for session_id in not_found_ids: - res += f'not found dkg data for session {session_id.hex()}\n' - self.save() - return res + try: + res = '' + rec_dkg = self.recovery_storage.data[self.RECOVERY_STORAGE_KEY] + for sess_id in session_ids: + c = hextobin(sess_id) + if c in rec_dkg.keys(): + rec_dkg.pop(c, None) + res += (f'dkg recovery data for session {sess_id}' + f' deleted\n') + else: + res += (f'not found dkg recovery data for session' + f' {sess_id}\n') + self.save() + return res + except Exception as e: + jmprint(f'error: {repr(e)}', 'error') class BaseWallet(object): @@ -870,7 +870,8 @@ class BaseWallet(object): return 'p2pkh' elif self.TYPE == TYPE_P2SH_P2WPKH: return 'p2sh-p2wpkh' - elif self.TYPE in (TYPE_P2TR, TYPE_P2TR_FROST): + elif self.TYPE in (TYPE_P2TR, TYPE_TAPROOT_WALLET_FIDELITY_BONDS, + TYPE_P2TR_FROST): return 'p2tr' elif self.TYPE in (TYPE_P2WPKH, TYPE_SEGWIT_WALLET_FIDELITY_BONDS): @@ -3305,9 +3306,12 @@ class SegwitWallet(ImportWalletMixin, BIP39WalletMixin, PSBTWalletMixin, SNICKER class SegwitWalletFidelityBonds(FidelityBondMixin, SegwitWallet): TYPE = TYPE_SEGWIT_WALLET_FIDELITY_BONDS -class TaprootWallet(BIP39WalletMixin, BIP86Wallet): +class TaprootWallet(ImportWalletMixin, BIP39WalletMixin, PSBTWalletMixin, SNICKERWalletMixin, BIP86Wallet): TYPE = TYPE_P2TR +class TaprootWalletFidelityBonds(FidelityBondMixin, TaprootWallet): + TYPE = TYPE_TAPROOT_WALLET_FIDELITY_BONDS + class BIP32FrostMixin(BaseWallet): diff --git a/src/jmclient/wallet_rpc.py b/src/jmclient/wallet_rpc.py index e3df708..4e4c062 100644 --- a/src/jmclient/wallet_rpc.py +++ b/src/jmclient/wallet_rpc.py @@ -526,7 +526,7 @@ class JMWalletDaemon(Service): # We're running the tumbler. assert self.tumble_log is not None - logsdir = os.path.join(os.path.dirname(jm_single().config_location), "logs") + logsdir = os.path.join(jm_single().datadir, "logs") sfile = os.path.join(logsdir, self.tumbler_options['schedulefile']) tumbler_taker_finished_update(self.taker, sfile, self.tumble_log, self.tumbler_options, res, fromtx, waittime, txdetails) @@ -1467,8 +1467,7 @@ class JMWalletDaemon(Service): except ScheduleGenerationErrorNoFunds: raise NotEnoughCoinsForTumbler() - logsdir = os.path.join(os.path.dirname(jm_single().config_location), - "logs") + logsdir = os.path.join(jm_single().datadir, "logs") sfile = os.path.join(logsdir, tumbler_options['schedulefile']) with open(sfile, "wb") as f: f.write(schedule_to_text(schedule)) @@ -1521,7 +1520,7 @@ class JMWalletDaemon(Service): if not self.tumbler_options or not self.coinjoin_state == CJ_TAKER_RUNNING: return make_jmwalletd_response(request, status=404) - logsdir = os.path.join(os.path.dirname(jm_single().config_location), "logs") + logsdir = os.path.join(jm_single().datadir, "logs") sfile = os.path.join(logsdir, self.tumbler_options['schedulefile']) res, schedule = get_schedule(sfile) diff --git a/test/jmclient/test_configure.py b/test/jmclient/test_configure.py index 5c9ae93..62c9347 100644 --- a/test/jmclient/test_configure.py +++ b/test/jmclient/test_configure.py @@ -16,15 +16,6 @@ def test_attribute_dict(): assert ad["foo"] == 1 -def test_load_config(tmpdir): - load_test_config(bs="regtest") - jm_single().config_location = "joinmarket.cfg" - with pytest.raises(SystemExit): - load_test_config(config_path=str(tmpdir), bs="regtest") - jm_single().config_location = "joinmarket.cfg" - load_test_config() - - def test_blockchain_sources(): load_test_config() for src in ["dummy"]: diff --git a/test/jmclient/test_frost_clients.py b/test/jmclient/test_frost_clients.py new file mode 100644 index 0000000..c133424 --- /dev/null +++ b/test/jmclient/test_frost_clients.py @@ -0,0 +1,964 @@ +# -*- coding: utf-8 -*- + +from pprint import pprint + +from hashlib import sha256 +from unittest import IsolatedAsyncioTestCase + +import jmclient # install asyncioreactor +from twisted.internet import reactor + +import pytest + +import jmbitcoin as btc +from jmbase import get_log +from jmclient import ( + load_test_config, jm_single, get_network, cryptoengine, VolatileStorage, + FrostWallet, WalletService) +from jmclient.frost_clients import DKGClient, FROSTClient +from jmfrost.chilldkg_ref.chilldkg import ( + hostpubkey_gen, ParticipantMsg1, CoordinatorMsg1, ParticipantMsg2, + CoordinatorMsg2) + + +pytestmark = pytest.mark.usefixtures("setup_regtest_frost_bitcoind") + +log = get_log() + + +async def get_populated_wallet(entropy=None): + storage = VolatileStorage() + dkg_storage = VolatileStorage() + recovery_storage = VolatileStorage() + FrostWallet.initialize(storage, dkg_storage, recovery_storage, + get_network(), entropy=entropy) + wlt = FrostWallet(storage, dkg_storage, recovery_storage) + await wlt.async_init(storage) + return wlt + + +async def populate_dkg_session(test_case): + dkgc1 = DKGClient(test_case.wlt_svc1) + dkgc2 = DKGClient(test_case.wlt_svc2) + dkgc3 = DKGClient(test_case.wlt_svc3) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1_2 + ) = dkgc2.on_dkg_init( + test_case.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_2 = dkgc2.deserialize_pmsg1(pmsg1_2) + + ( + nick1, + hostpubkeyhash3_hex, + session_id3_hex, + sig3_hex, + pmsg1_3 + ) = dkgc3.on_dkg_init( + test_case.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_3 = dkgc2.deserialize_pmsg1(pmsg1_3) + + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + test_case.nick2, hostpubkeyhash2_hex, session_id, sig2_hex, pmsg1_2) + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + test_case.nick3, hostpubkeyhash3_hex, session_id, sig3_hex, pmsg1_3) + cmsg1 = dkgc1.deserialize_cmsg1(cmsg1) + + pmsg2_2 = dkgc2.party_step2(session_id, cmsg1) + pmsg2_2 = dkgc2.deserialize_pmsg2(pmsg2_2) + pmsg2_3 = dkgc3.party_step2(session_id, cmsg1) + pmsg2_3 = dkgc3.deserialize_pmsg2(pmsg2_3) + + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + test_case.nick2, session_id, pmsg2_2) + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + test_case.nick3, session_id, pmsg2_3) + cmsg2 = dkgc3.deserialize_cmsg2(cmsg2) + + assert dkgc2.finalize(session_id, cmsg2, ext_recovery) + assert dkgc3.finalize(session_id, cmsg2, ext_recovery) + dkgc1.on_dkg_finalized(test_case.nick2, session_id) + dkgc1.on_dkg_finalized(test_case.nick3, session_id) + return session_id + + +class DKGClientTestCaseBase(IsolatedAsyncioTestCase): + + def setUp(self): + load_test_config(config_path='./test_frost') + btc.select_chain_params("bitcoin/regtest") + cryptoengine.BTC_P2TR.VBYTE = 100 + jm_single().bc_interface.tick_forward_chain_interval = 2 + + async def asyncSetUp(self): + entropy1 = bytes.fromhex('8e5e5677fb302874a607b63ad03ba434') + entropy2 = bytes.fromhex('38dfa80fbb21b32b2b2740e00a47de9d') + entropy3 = bytes.fromhex('3ad9c77fcd1d537b6ef396952d1221a0') + # entropy4 wor wallet with hospubkey not in joinmarket.cfg + entropy4 = bytes.fromhex('ce88b87f6c85d651e416b8173ab95e57') + self.wlt1 = await get_populated_wallet(entropy1) + self.hostpubkey1 = hostpubkey_gen(self.wlt1._hostseckey[:32]) + self.wlt_svc1 = WalletService(self.wlt1) + self.wlt2 = await get_populated_wallet(entropy2) + self.hostpubkey2 = hostpubkey_gen(self.wlt2._hostseckey[:32]) + self.wlt_svc2 = WalletService(self.wlt2) + self.wlt3 = await get_populated_wallet(entropy3) + self.hostpubkey3 = hostpubkey_gen(self.wlt3._hostseckey[:32]) + self.wlt_svc3 = WalletService(self.wlt3) + self.wlt4= await get_populated_wallet(entropy4) + self.hostpubkey4 = hostpubkey_gen(self.wlt4._hostseckey[:32]) + self.wlt_svc4 = WalletService(self.wlt4) + self.nick1, self.nick2, self.nick3, self.nick4 = [ + 'nick1', 'nick2', 'nick3', 'nick4' + ] + + +class DKGClientTestCase(DKGClientTestCaseBase): + + async def test_dkg_init(self): + # test wallet with unknown hostpubkey + dkgc1 = DKGClient(self.wlt_svc4) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + assert hostpubkeyhash_hex is None + assert session_id is None + assert sig_hex is None + + dkgc1 = DKGClient(self.wlt_svc1) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + assert hostpubkeyhash_hex and len(hostpubkeyhash_hex) == 64 + assert session_id and len(session_id) == 32 + assert sig_hex and len(sig_hex) == 128 + + async def test_on_dkg_init(self): + dkgc1 = DKGClient(self.wlt_svc1) + dkgc2 = DKGClient(self.wlt_svc2) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + + # fail with wrong pubkeyhash + hostpubkeyhash4_hex = sha256(self.hostpubkey4).digest() + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash4_hex, session_id, sig_hex) + for v in [nick1, hostpubkeyhash2_hex, session_id2_hex, + sig2_hex, pmsg1]: + assert v is None + + # fail with wrong sig + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, '01020304'*16) + for v in [nick1, hostpubkeyhash2_hex, session_id2_hex, + sig2_hex, pmsg1]: + assert v is None + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + assert nick1 == self.nick1 + assert hostpubkeyhash2_hex and len(hostpubkeyhash2_hex) == 64 + assert session_id2_hex and len(session_id2_hex) == 64 + assert bytes.fromhex(session_id2_hex) == session_id + assert sig_hex and len(sig_hex) == 128 + assert pmsg1 is not None + + # fail on second call with right params + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + for v in [nick1, hostpubkeyhash2_hex, session_id2_hex, + sig2_hex, pmsg1]: + assert v is None + + async def test_party_step1(self): + dkgc1 = DKGClient(self.wlt_svc1) + dkgc2 = DKGClient(self.wlt_svc2) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + + # fail with unknown session_id + pmsg1 = dkgc2.party_step1(b'\x05'*32) + assert pmsg1 is None + + # fail when session.state1 aleready set + pmsg1 = dkgc2.party_step1(session_id) + assert pmsg1 is None + + session = dkgc2.dkg_sessions.get(session_id) + session.state1 = None + pmsg1 = dkgc2.party_step1(session_id) + assert pmsg1 is not None + assert isinstance(pmsg1, bytes) + + session.state1 = None + pmsg1 = dkgc2.party_step1(session_id, serialize=False) + assert pmsg1 is not None + assert isinstance(pmsg1, ParticipantMsg1) + + def test_on_dkg_pmsg1(self): + dkgc1 = DKGClient(self.wlt_svc1) + dkgc2 = DKGClient(self.wlt_svc2) + dkgc3 = DKGClient(self.wlt_svc3) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1_2 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_2 = dkgc2.deserialize_pmsg1(pmsg1_2) + + ( + nick1, + hostpubkeyhash3_hex, + session_id3_hex, + sig3_hex, + pmsg1_3 + ) = dkgc3.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_3 = dkgc2.deserialize_pmsg1(pmsg1_3) + + # party2 added pmsg1, no ready_list, no cmsg1 returned yet + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick2, hostpubkeyhash2_hex, session_id, sig2_hex, pmsg1_2) + assert ready_list is None + assert cmsg1 is None + + # unknown coordinator session + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick3, hostpubkeyhash3_hex, b'\xaa'*32, sig3_hex, pmsg1_3) + assert ready_list is None + assert cmsg1 is None + + # unknown pubkeyhash + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick3, b'\xaa'*32, session_id, sig3_hex, pmsg1_3) + assert ready_list is None + assert cmsg1 is None + + # wrong sig + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick3, hostpubkeyhash3_hex, session_id, 'aa'*64, pmsg1_3) + assert ready_list is None + assert cmsg1 is None + + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick3, hostpubkeyhash3_hex, session_id, sig3_hex, pmsg1_3) + assert ready_list == set([self.nick2, self.nick3]) + cmsg1 = dkgc1.deserialize_cmsg1(cmsg1) + assert isinstance(cmsg1, CoordinatorMsg1) + + def test_coordinator_step1(self): + dkgc1 = DKGClient(self.wlt_svc1) + dkgc2 = DKGClient(self.wlt_svc2) + dkgc3 = DKGClient(self.wlt_svc3) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1_2 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_2 = dkgc2.deserialize_pmsg1(pmsg1_2) + + ( + nick1, + hostpubkeyhash3_hex, + session_id3_hex, + sig3_hex, + pmsg1_3 + ) = dkgc3.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_3 = dkgc2.deserialize_pmsg1(pmsg1_3) + + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick2, hostpubkeyhash2_hex, session_id, sig2_hex, pmsg1_2) + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick3, hostpubkeyhash3_hex, session_id, sig3_hex, pmsg1_3) + + # unknown session_id + cmsg1 = dkgc1.coordinator_step1(b'\xaa'*32) + assert cmsg1 is None + + # coordinator.state already set + cmsg1 = dkgc1.coordinator_step1(session_id) + assert cmsg1 is None + + coordinator = dkgc1.dkg_coordinators.get(session_id) + coordinator.state = None + cmsg1 = dkgc1.coordinator_step1(session_id) + + def test_party_step2(self): + dkgc1 = DKGClient(self.wlt_svc1) + dkgc2 = DKGClient(self.wlt_svc2) + dkgc3 = DKGClient(self.wlt_svc3) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1_2 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_2 = dkgc2.deserialize_pmsg1(pmsg1_2) + + ( + nick1, + hostpubkeyhash3_hex, + session_id3_hex, + sig3_hex, + pmsg1_3 + ) = dkgc3.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_3 = dkgc2.deserialize_pmsg1(pmsg1_3) + + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick2, hostpubkeyhash2_hex, session_id, sig2_hex, pmsg1_2) + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick3, hostpubkeyhash3_hex, session_id, sig3_hex, pmsg1_3) + cmsg1 = dkgc1.deserialize_cmsg1(cmsg1) + + # unknown session_id + pmsg2 = dkgc2.party_step2(b'\xaa'*32, cmsg1) + assert pmsg2 is None + + pmsg2 = dkgc2.party_step2(session_id, cmsg1) + assert cmsg1 is not None + pmsg2 = dkgc1.deserialize_pmsg2(pmsg2) + assert isinstance(pmsg2, ParticipantMsg2) + + # session.state2 already set + pmsg2 = dkgc2.party_step2(session_id, cmsg1) + assert pmsg2 is None + + def test_on_dkg_pmsg2(self): + dkgc1 = DKGClient(self.wlt_svc1) + dkgc2 = DKGClient(self.wlt_svc2) + dkgc3 = DKGClient(self.wlt_svc3) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1_2 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_2 = dkgc2.deserialize_pmsg1(pmsg1_2) + + ( + nick1, + hostpubkeyhash3_hex, + session_id3_hex, + sig3_hex, + pmsg1_3 + ) = dkgc3.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_3 = dkgc2.deserialize_pmsg1(pmsg1_3) + + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick2, hostpubkeyhash2_hex, session_id, sig2_hex, pmsg1_2) + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick3, hostpubkeyhash3_hex, session_id, sig3_hex, pmsg1_3) + cmsg1 = dkgc1.deserialize_cmsg1(cmsg1) + + pmsg2_2 = dkgc2.party_step2(session_id, cmsg1) + pmsg2_2 = dkgc2.deserialize_pmsg2(pmsg2_2) + assert isinstance(pmsg2_2, ParticipantMsg2) + pmsg2_3 = dkgc3.party_step2(session_id, cmsg1) + pmsg2_3 = dkgc3.deserialize_pmsg2(pmsg2_3) + assert isinstance(pmsg2_3, ParticipantMsg2) + + # party2 added pmsg2, no ready_list, no cmsg2 returned yet + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick2, session_id, pmsg2_2) + assert ready_list is None + assert cmsg2 is None + assert ext_recovery is None + + # unknown coordinator session + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick3, b'\xaa'*32, pmsg2_3) + assert ready_list is None + assert cmsg2 is None + assert ext_recovery is None + + # unknown party nick + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick4, session_id, pmsg2_3) + assert ready_list is None + assert cmsg2 is None + assert ext_recovery is None + + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick3, session_id, pmsg2_3) + cmsg2 = dkgc1.deserialize_cmsg2(cmsg2) + assert ready_list == set([self.nick2, self.nick3]) + assert isinstance(cmsg2, CoordinatorMsg2) + assert isinstance(ext_recovery, bytes) + + # party pubkey for nick3 not found + coordinator = dkgc1.dkg_coordinators.get(session_id) + session3 = coordinator.sessions.pop(self.hostpubkey3) + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick3, session_id, pmsg2_3) + assert ready_list is None + assert cmsg2 is None + assert ext_recovery is None + coordinator.sessions[self.hostpubkey3] = session3 + + # pmsg2 already set in coordinator sessions + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick3, session_id, pmsg2_3) + assert ready_list is None + assert cmsg2 is None + assert ext_recovery is None + + def test_coordinator_step2(self): + dkgc1 = DKGClient(self.wlt_svc1) + dkgc2 = DKGClient(self.wlt_svc2) + dkgc3 = DKGClient(self.wlt_svc3) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1_2 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_2 = dkgc2.deserialize_pmsg1(pmsg1_2) + + ( + nick1, + hostpubkeyhash3_hex, + session_id3_hex, + sig3_hex, + pmsg1_3 + ) = dkgc3.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_3 = dkgc2.deserialize_pmsg1(pmsg1_3) + + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick2, hostpubkeyhash2_hex, session_id, sig2_hex, pmsg1_2) + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick3, hostpubkeyhash3_hex, session_id, sig3_hex, pmsg1_3) + cmsg1 = dkgc1.deserialize_cmsg1(cmsg1) + + pmsg2_2 = dkgc2.party_step2(session_id, cmsg1) + pmsg2_2 = dkgc2.deserialize_pmsg2(pmsg2_2) + pmsg2_3 = dkgc3.party_step2(session_id, cmsg1) + pmsg2_3 = dkgc3.deserialize_pmsg2(pmsg2_3) + + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick2, session_id, pmsg2_2) + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick3, session_id, pmsg2_3) + + # unknown session_id + cmsg2 = dkgc1.coordinator_step2(b'\xaa'*32) + assert cmsg2 is None + + # coordinator.cmsg2 already set + cmsg2 = dkgc1.coordinator_step2(session_id) + assert cmsg2 is None + + coordinator = dkgc1.dkg_coordinators.get(session_id) + coordinator.cmsg2 = None + cmsg2 = dkgc1.coordinator_step2(session_id) + cmsg2 = dkgc1.deserialize_cmsg2(cmsg2) + assert isinstance(cmsg2, CoordinatorMsg2) + + def test_dkg_finalize(self): + dkgc1 = DKGClient(self.wlt_svc1) + dkgc2 = DKGClient(self.wlt_svc2) + dkgc3 = DKGClient(self.wlt_svc3) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1_2 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_2 = dkgc2.deserialize_pmsg1(pmsg1_2) + + ( + nick1, + hostpubkeyhash3_hex, + session_id3_hex, + sig3_hex, + pmsg1_3 + ) = dkgc3.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_3 = dkgc2.deserialize_pmsg1(pmsg1_3) + + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick2, hostpubkeyhash2_hex, session_id, sig2_hex, pmsg1_2) + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick3, hostpubkeyhash3_hex, session_id, sig3_hex, pmsg1_3) + cmsg1 = dkgc1.deserialize_cmsg1(cmsg1) + + pmsg2_2 = dkgc2.party_step2(session_id, cmsg1) + pmsg2_2 = dkgc2.deserialize_pmsg2(pmsg2_2) + pmsg2_3 = dkgc3.party_step2(session_id, cmsg1) + pmsg2_3 = dkgc3.deserialize_pmsg2(pmsg2_3) + + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick2, session_id, pmsg2_2) + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick3, session_id, pmsg2_3) + cmsg2 = dkgc3.deserialize_cmsg2(cmsg2) + + # unknown session_id + assert not dkgc2.finalize(b'\xaa'*32, cmsg2, ext_recovery) + + assert dkgc2.finalize(session_id, cmsg2, ext_recovery) + assert dkgc3.finalize(session_id, cmsg2, ext_recovery) + + # session.dkg_output already set + assert not dkgc2.finalize(session_id, cmsg2, ext_recovery) + assert not dkgc3.finalize(session_id, cmsg2, ext_recovery) + + def test_on_dkg_finalized(self): + dkgc1 = DKGClient(self.wlt_svc1) + dkgc2 = DKGClient(self.wlt_svc2) + dkgc3 = DKGClient(self.wlt_svc3) + hostpubkeyhash_hex, session_id, sig_hex = dkgc1.dkg_init(0, 0, 0) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pmsg1_2 + ) = dkgc2.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_2 = dkgc2.deserialize_pmsg1(pmsg1_2) + + ( + nick1, + hostpubkeyhash3_hex, + session_id3_hex, + sig3_hex, + pmsg1_3 + ) = dkgc3.on_dkg_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + pmsg1_3 = dkgc2.deserialize_pmsg1(pmsg1_3) + + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick2, hostpubkeyhash2_hex, session_id, sig2_hex, pmsg1_2) + ready_list, cmsg1 = dkgc1.on_dkg_pmsg1( + self.nick3, hostpubkeyhash3_hex, session_id, sig3_hex, pmsg1_3) + cmsg1 = dkgc1.deserialize_cmsg1(cmsg1) + + pmsg2_2 = dkgc2.party_step2(session_id, cmsg1) + pmsg2_2 = dkgc2.deserialize_pmsg2(pmsg2_2) + pmsg2_3 = dkgc3.party_step2(session_id, cmsg1) + pmsg2_3 = dkgc3.deserialize_pmsg2(pmsg2_3) + + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick2, session_id, pmsg2_2) + ready_list, cmsg2, ext_recovery = dkgc1.on_dkg_pmsg2( + self.nick3, session_id, pmsg2_3) + cmsg2 = dkgc3.deserialize_cmsg2(cmsg2) + + assert dkgc2.finalize(session_id, cmsg2, ext_recovery) + assert dkgc3.finalize(session_id, cmsg2, ext_recovery) + + # unknown session_id + dkgc1.on_dkg_finalized(self.nick2, b'\xaa'*32) + + assert not dkgc1.on_dkg_finalized(self.nick2, session_id) + assert dkgc1.on_dkg_finalized(self.nick3, session_id) + + +class FROSTClientTestCase(DKGClientTestCaseBase): + + async def asyncSetUp(self): + await super().asyncSetUp() + self.dkg_session_id = await populate_dkg_session(self) + self.fc1 = FROSTClient(self.wlt_svc1) + self.fc2 = FROSTClient(self.wlt_svc2) + self.fc3 = FROSTClient(self.wlt_svc3) + self.fc4 = FROSTClient(self.wlt_svc4) + + async def test_frost_init(self): + msg_bytes = bytes.fromhex('aabb'*16) + # test wallet with unknown hostpubkey + hostpubkeyhash_hex, session_id, sig_hex = self.fc4.frost_init( + self.dkg_session_id, msg_bytes) + assert hostpubkeyhash_hex is None + assert session_id is None + assert sig_hex is None + + hostpubkeyhash_hex, session_id, sig_hex = self.fc1.frost_init( + self.dkg_session_id, msg_bytes) + assert hostpubkeyhash_hex and len(hostpubkeyhash_hex) == 64 + assert session_id and len(session_id) == 32 + assert sig_hex and len(sig_hex) == 128 + + async def test_on_frost_init(self): + msg_bytes = bytes.fromhex('aabb'*16) + hostpubkeyhash_hex, session_id, sig_hex = self.fc1.frost_init( + self.dkg_session_id, msg_bytes) + + # fail with wrong pubkeyhash + hostpubkeyhash4_hex = sha256(self.hostpubkey4).digest() + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pub_nonce + ) = self.fc2.on_frost_init( + self.nick1, hostpubkeyhash4_hex, session_id, sig_hex) + for v in [nick1, hostpubkeyhash2_hex, + session_id2_hex, sig2_hex, pub_nonce]: + assert v is None + + # fail with wrong sig + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pub_nonce + ) = self.fc2.on_frost_init( + self.nick1, hostpubkeyhash_hex, session_id, '01020304'*16) + for v in [nick1, hostpubkeyhash2_hex, + session_id2_hex, sig2_hex, pub_nonce]: + assert v is None + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pub_nonce + ) = self.fc2.on_frost_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + assert nick1 == self.nick1 + assert hostpubkeyhash2_hex and len(hostpubkeyhash2_hex) == 64 + assert session_id2_hex and len(session_id2_hex) == 64 + assert bytes.fromhex(session_id2_hex) == session_id + assert sig_hex and len(sig_hex) == 128 + assert pub_nonce and len(pub_nonce) == 66 + + # fail on second call with right params + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pub_nonce + ) = self.fc2.on_frost_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + for v in [nick1, hostpubkeyhash2_hex, + session_id2_hex, sig2_hex, pub_nonce]: + assert v is None + + def test_frost_round1(self): + msg_bytes = bytes.fromhex('aabb'*16) + hostpubkeyhash_hex, session_id, sig_hex = self.fc1.frost_init( + self.dkg_session_id, msg_bytes) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pub_nonce + ) = self.fc2.on_frost_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + + # fail with unknown session_id + pub_nonce = self.fc2.party_step1(b'\x05'*32) + assert pub_nonce is None + + # fail with session.sec_nonce already set + pub_nonce = self.fc2.frost_round1(session_id) + assert pub_nonce is None + + session = self.fc2.frost_sessions.get(session_id) + session.sec_nonce = None + pub_nonce = self.fc2.frost_round1(session_id) + assert pub_nonce and len(pub_nonce) == 66 + + def test_on_frost_round1(self): + msg_bytes = bytes.fromhex('aabb'*16) + hostpubkeyhash_hex, session_id, sig_hex = self.fc1.frost_init( + self.dkg_session_id, msg_bytes) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pub_nonce2 + ) = self.fc2.on_frost_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + + ( + nick1, + hostpubkeyhash3_hex, + session_id3_hex, + sig3_hex, + pub_nonce3 + ) = self.fc3.on_frost_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + + # unknown session_id + ( + ready_list, + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.on_frost_round1( + self.nick2, hostpubkeyhash2_hex, b'\xaa'*32, + sig2_hex, pub_nonce2) + for v in [ready_list, nonce_agg, dkg_session_id, ids, msg]: + assert v is None + + # unknown pubkeyhash + ( + ready_list, + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.on_frost_round1( + self.nick2, 'bb'*32, session_id, sig2_hex, pub_nonce2) + for v in [ready_list, nonce_agg, dkg_session_id, ids, msg]: + assert v is None + + # wrong sig + ( + ready_list, + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.on_frost_round1( + self.nick2, hostpubkeyhash2_hex, session_id, '1234'*32, pub_nonce2) + for v in [ready_list, nonce_agg, dkg_session_id, ids, msg]: + assert v is None + + ( + ready_list, + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.on_frost_round1( + self.nick2, hostpubkeyhash2_hex, session_id, + sig2_hex, pub_nonce2) + assert ready_list == set([self.nick2]) + assert nonce_agg and len(nonce_agg)== 66 + assert dkg_session_id and dkg_session_id == self.dkg_session_id + assert ids == [1, 2] + assert msg and len(msg) == 32 and msg == msg_bytes + + # miminum pub_nonce set already presented, ignoring additional + ( + ready_list, + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.on_frost_round1( + self.nick3, hostpubkeyhash3_hex, session_id, sig3_hex, pub_nonce3) + for v in [ready_list, nonce_agg, dkg_session_id, ids, msg]: + assert v is None + + def test_frost_agg1(self): + msg_bytes = bytes.fromhex('aabb'*16) + hostpubkeyhash_hex, session_id, sig_hex = self.fc1.frost_init( + self.dkg_session_id, msg_bytes) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pub_nonce2 + ) = self.fc2.on_frost_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + + ( + ready_list, + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.on_frost_round1( + self.nick2, hostpubkeyhash2_hex, session_id, + sig2_hex, pub_nonce2) + + # fail on unknown session_id + ( + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.frost_agg1(b'\xaa'*32) + for v in [nonce_agg, dkg_session_id, ids, msg]: + assert v is None + + # fail with coordinator.nonce_agg already set + ( + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.frost_agg1(session_id) + for v in [nonce_agg, dkg_session_id, ids, msg]: + assert v is None + + coordinator = self.fc1.frost_coordinators.get(session_id) + coordinator.nonce_agg = None + ( + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.frost_agg1(session_id) + assert nonce_agg and len(nonce_agg)== 66 + assert dkg_session_id and dkg_session_id == self.dkg_session_id + assert ids == [1, 2] + assert msg and len(msg) == 32 and msg == msg_bytes + + def test_frost_round2(self): + msg_bytes = bytes.fromhex('aabb'*16) + hostpubkeyhash_hex, session_id, sig_hex = self.fc1.frost_init( + self.dkg_session_id, msg_bytes) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pub_nonce2 + ) = self.fc2.on_frost_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + + ( + ready_list, + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.on_frost_round1( + self.nick2, hostpubkeyhash2_hex, session_id, + sig2_hex, pub_nonce2) + + # fail on unknown session_id + partial_sig = self.fc2.frost_round2( + b'\xaa'*32, nonce_agg, self.dkg_session_id, ids, msg) + + # fail on unknown dkg_session_id + partial_sig = self.fc2.frost_round2( + session_id, nonce_agg, b'\xdd'*32, ids, msg) + + partial_sig = self.fc2.frost_round2( + session_id, nonce_agg, self.dkg_session_id, ids, msg) + assert partial_sig and len(partial_sig) == 32 + + # session.partial_sig already set + partial_sig = self.fc2.frost_round2( + session_id, nonce_agg, self.dkg_session_id, ids, msg) + assert partial_sig is None + + def test_on_frost_round2(self): + msg_bytes = bytes.fromhex('aabb'*16) + hostpubkeyhash_hex, session_id, sig_hex = self.fc1.frost_init( + self.dkg_session_id, msg_bytes) + + ( + nick1, + hostpubkeyhash2_hex, + session_id2_hex, + sig2_hex, + pub_nonce2 + ) = self.fc2.on_frost_init( + self.nick1, hostpubkeyhash_hex, session_id, sig_hex) + + ( + ready_list, + nonce_agg, + dkg_session_id, + ids, + msg + ) = self.fc1.on_frost_round1( + self.nick2, hostpubkeyhash2_hex, session_id, + sig2_hex, pub_nonce2) + + partial_sig = self.fc2.frost_round2( + session_id, nonce_agg, self.dkg_session_id, ids, msg) + + # unknown party nick + sig = self.fc1.on_frost_round2(self.nick4, session_id, partial_sig) + assert sig is None + + # party pubkey for nick3 not found + coordinator = self.fc1.frost_coordinators.get(session_id) + session2 = coordinator.sessions.pop(self.hostpubkey2) + sig = self.fc1.on_frost_round2(self.nick2, session_id, partial_sig) + assert sig is None + coordinator.sessions[self.hostpubkey2] = session2 + + # fail on unknown session_id + sig = self.fc1.on_frost_round2(self.nick2, b'\xaa'*32, partial_sig) + assert sig is None + + sig = self.fc1.on_frost_round2(self.nick2, session_id, partial_sig) + assert sig and len(sig) == 64 + + # partial_sig already set in coordinator + sig = self.fc1.on_frost_round2(self.nick2, session_id, partial_sig) + assert sig is None diff --git a/test/jmclient/test_frost_ipc.py b/test/jmclient/test_frost_ipc.py new file mode 100644 index 0000000..6ea5b0f --- /dev/null +++ b/test/jmclient/test_frost_ipc.py @@ -0,0 +1,341 @@ +# -*- coding: utf-8 -*- + +import asyncio +import base64 +import time +from pprint import pprint + +from unittest import IsolatedAsyncioTestCase + +import jmclient # install asyncioreactor +from twisted.internet import reactor + +import pytest + +import jmbitcoin as btc +from jmbase import get_log +from jmclient import ( + load_test_config, jm_single, get_network, cryptoengine, VolatileStorage, + FrostWallet, WalletService) +from jmclient import FrostIPCServer, FrostIPCClient +from jmclient.frost_clients import FROSTClient + +from test_frost_clients import populate_dkg_session + + +pytestmark = pytest.mark.usefixtures("setup_regtest_frost_bitcoind") + +log = get_log() + + +async def get_populated_wallet(entropy=None): + storage = VolatileStorage() + dkg_storage = VolatileStorage() + recovery_storage = VolatileStorage() + FrostWallet.initialize(storage, dkg_storage, recovery_storage, + get_network(), entropy=entropy) + wlt = FrostWallet(storage, dkg_storage, recovery_storage) + await wlt.async_init(storage) + return wlt + + +class DummyFrostJMClientProtocol: + + def __init__(self, factory, client, nick): + self.nick = nick + self.factory = factory + self.client = client + self.party_clients = {} + + async def dkg_gen(self): + log.debug(f'Coordinator call dkg_gen') + client = self.factory.client + md_type_idx = None + session_id = None + session = None + + while True: + if md_type_idx is None: + md_type_idx = await client.dkg_gen() + if md_type_idx is None: + log.debug('finished dkg_gen execution') + break + + if session_id is None: + session_id, _, session = self.dkg_init(*md_type_idx) + if session_id is None: + log.warn('could not get session_id from dkg_init}') + await asyncio.sleep(5) + continue + + pub = await client.wait_on_dkg_output(session_id) + if not pub: + session_id = None + session = None + continue + + if session.dkg_output: + md_type_idx = None + session_id = None + session = None + client.dkg_gen_list.pop(0) + continue + + def dkg_init(self, mixdepth, address_type, index): + log.debug(f'Coordinator call dkg_init ' + f'({mixdepth}, {address_type}, {index})') + client = self.factory.client + hostpubkeyhash, session_id, sig = client.dkg_init( + mixdepth, address_type, index) + coordinator = client.dkg_coordinators.get(session_id) + session = client.dkg_sessions.get(session_id) + if session_id and session and coordinator: + session.dkg_init_sec = time.time() + + for _, pc in self.party_clients.items(): + + async def on_dkg_init(pc, nick, hostpubkeyhash, + session_id, sig): + await pc.on_dkg_init( + nick, hostpubkeyhash, session_id, sig) + + asyncio.create_task(on_dkg_init( + pc, self.nick, hostpubkeyhash, session_id, sig)) + return session_id, coordinator, session + return None, None, None + + async def on_dkg_init(self, nick, hostpubkeyhash, session_id, sig): + client = self.factory.client + nick, hostpubkeyhash, session_id, sig, pmsg1 = client.on_dkg_init( + nick, hostpubkeyhash, session_id, sig) + if pmsg1: + pc = self.party_clients[nick] + session_id = bytes.fromhex(session_id) + await pc.on_dkg_pmsg1( + self.nick, hostpubkeyhash, session_id, sig, pmsg1) + + async def on_dkg_pmsg1(self, nick, hostpubkeyhash, session_id, sig, pmsg1): + client = self.factory.client + pmsg1 = client.deserialize_pmsg1(pmsg1) + ready_nicks, cmsg1 = client.on_dkg_pmsg1( + nick, hostpubkeyhash, session_id, sig, pmsg1) + if ready_nicks and cmsg1: + for party_nick in ready_nicks: + pc = self.party_clients[party_nick] + await pc.on_dkg_cmsg1(self.nick, session_id, cmsg1) + + async def on_dkg_cmsg1(self, nick, session_id, cmsg1): + client = self.factory.client + session = client.dkg_sessions.get(session_id) + if not session: + log.error(f'on_dkg_cmsg1: session {session_id} not found') + return {'accepted': True} + if session and session.coord_nick == nick: + cmsg1 = client.deserialize_cmsg1(cmsg1) + pmsg2 = client.party_step2(session_id, cmsg1) + if pmsg2: + pc = self.party_clients[nick] + await pc.on_dkg_pmsg2(self.nick, session_id, pmsg2) + else: + log.error(f'on_dkg_cmsg1: not coordinator nick {nick}') + + async def on_dkg_pmsg2(self, nick, session_id, pmsg2): + client = self.factory.client + pmsg2 = client.deserialize_pmsg2(pmsg2) + ready_nicks, cmsg2, ext_recovery = client.on_dkg_pmsg2( + nick, session_id, pmsg2) + if ready_nicks and cmsg2 and ext_recovery: + for party_nick in ready_nicks: + pc = self.party_clients[party_nick] + await pc.on_dkg_cmsg2( + self.nick, session_id, cmsg2, ext_recovery) + + async def on_dkg_cmsg2(self, nick, session_id, cmsg2, ext_recovery): + client = self.factory.client + session = client.dkg_sessions.get(session_id) + if not session: + log.error(f'on_dkg_cmsg2: session {session_id} not found') + return {'accepted': True} + if session and session.coord_nick == nick: + cmsg2 = client.deserialize_cmsg2(cmsg2) + finalized = client.finalize(session_id, cmsg2, ext_recovery) + if finalized: + pc = self.party_clients[nick] + await pc.on_dkg_finalized(self.nick, session_id) + else: + log.error(f'on_dkg_cmsg2: not coordinator nick {nick}') + + async def on_dkg_finalized(self, nick, session_id): + client = self.factory.client + log.debug(f'Coordinator get dkgfinalized') + client.on_dkg_finalized(nick, session_id) + + def frost_init(self, dkg_session_id, msg_bytes): + log.debug(f'Coordinator call frost_init') + client = self.factory.client + hostpubkeyhash, session_id, sig = client.frost_init( + dkg_session_id, msg_bytes) + coordinator = client.frost_coordinators.get(session_id) + session = client.frost_sessions.get(session_id) + if session_id and session and coordinator: + coordinator.frost_init_sec = time.time() + for _, pc in self.party_clients.items(): + + async def on_frost_init(pc, nick, hostpubkeyhash, + session_id, sig): + await pc.on_frost_init( + nick, hostpubkeyhash, session_id, sig) + + asyncio.create_task(on_frost_init( + pc, self.nick, hostpubkeyhash, session_id, sig)) + return session_id, coordinator, session + + async def on_frost_init(self, nick, hostpubkeyhash, session_id, sig): + client = self.factory.client + ( + nick, + hostpubkeyhash, + session_id, + sig, + pub_nonce + ) = client.on_frost_init(nick, hostpubkeyhash, session_id, sig) + if pub_nonce: + pc = self.party_clients[nick] + session_id = bytes.fromhex(session_id) + await pc.on_frost_round1( + self.nick, hostpubkeyhash, session_id, sig, pub_nonce) + + async def on_frost_round1(self, nick, hostpubkeyhash, + session_id, sig, pub_nonce): + client = self.factory.client + ( + ready_nicks, + nonce_agg, + dkg_session_id, + ids, + msg + ) = client.on_frost_round1( + nick, hostpubkeyhash, session_id, sig, pub_nonce) + if ready_nicks and nonce_agg: + for party_nick in ready_nicks: + pc = self.party_clients[nick] + self.frost_agg1(pc, self.nick, session_id, nonce_agg, + dkg_session_id, ids, msg) + + def frost_agg1(self, pc, nick, session_id, + nonce_agg, dkg_session_id, ids, msg): + pc.on_frost_agg1( + self.nick, session_id, nonce_agg, dkg_session_id, ids, msg) + + def on_frost_agg1(self, nick, session_id, + nonce_agg, dkg_session_id, ids, msg): + client = self.factory.client + session = client.frost_sessions.get(session_id) + if not session: + log.error(f'on_frost_agg1: session {session_id} not found') + return + if session and session.coord_nick == nick: + partial_sig = client.frost_round2( + session_id, nonce_agg, dkg_session_id, ids, msg) + if partial_sig: + pc = self.party_clients[nick] + pc.on_frost_round2(self.nick, session_id, partial_sig) + else: + log.error(f'on_frost_agg1: not coordinator nick {nick}') + + def on_frost_round2(self, nick, session_id, partial_sig): + client = self.factory.client + sig = client.on_frost_round2(nick, session_id, partial_sig) + if sig: + log.debug(f'Successfully get signature {sig.hex()[:8]}...') + + +class DummyFrostJMClientProtocolFactory: + + protocol = DummyFrostJMClientProtocol + + def __init__(self, client, nick): + self.client = client + self.proto_client = self.protocol(self, self.client, nick) + + def add_party_client(self, nick, party_client): + self.proto_client.party_clients[nick] = party_client + + def getClient(self): + return self.proto_client + + +class FrostIPCTestCaseBase(IsolatedAsyncioTestCase): + + def setUp(self): + load_test_config(config_path='./test_frost') + btc.select_chain_params("bitcoin/regtest") + cryptoengine.BTC_P2TR.VBYTE = 100 + jm_single().bc_interface.tick_forward_chain_interval = 2 + + async def asyncSetUp(self): + self.nick1, self.nick2, self.nick3 = ['nick1', 'nick2', 'nick3'] + entropy1 = bytes.fromhex('8e5e5677fb302874a607b63ad03ba434') + entropy2 = bytes.fromhex('38dfa80fbb21b32b2b2740e00a47de9d') + entropy3 = bytes.fromhex('3ad9c77fcd1d537b6ef396952d1221a0') + self.wlt1 = await get_populated_wallet(entropy1) + self.wlt_svc1 = WalletService(self.wlt1) + self.fc1 = FROSTClient(self.wlt_svc1) + cfactory1 = DummyFrostJMClientProtocolFactory(self.fc1, self.nick1) + self.wlt1.set_client_factory(cfactory1) + + self.wlt2 = await get_populated_wallet(entropy2) + self.wlt_svc2 = WalletService(self.wlt2) + self.fc2 = FROSTClient(self.wlt_svc2) + cfactory2 = DummyFrostJMClientProtocolFactory(self.fc2, self.nick2) + self.wlt2.set_client_factory(cfactory2) + + self.wlt3 = await get_populated_wallet(entropy3) + self.wlt_svc3 = WalletService(self.wlt3) + self.fc3 = FROSTClient(self.wlt_svc3) + cfactory3 = DummyFrostJMClientProtocolFactory(self.fc3, self.nick3) + self.wlt3.set_client_factory(cfactory3) + + cfactory1.add_party_client(self.nick2, cfactory2.proto_client) + cfactory1.add_party_client(self.nick3, cfactory3.proto_client) + + cfactory2.add_party_client(self.nick1, cfactory1.proto_client) + cfactory2.add_party_client(self.nick3, cfactory3.proto_client) + + cfactory3.add_party_client(self.nick1, cfactory1.proto_client) + cfactory3.add_party_client(self.nick2, cfactory2.proto_client) + + await populate_dkg_session(self) + + self.ipcs = FrostIPCServer(self.wlt1) + await self.ipcs.async_init() + self.ipcc = FrostIPCClient(self.wlt1) + await self.ipcc.async_init() + self.wlt1.set_ipc_client(self.ipcc) + + +class FrostIPCClientTestCase(FrostIPCTestCaseBase): + + async def asyncSetUp(self): + await super().asyncSetUp() + self.serve_task = asyncio.create_task(self.ipcs.serve_forever()) + + async def asyncTearDown(self): + self.serve_task.cancel("cancel from asyncTearDown") + + async def test_get_dkg_pubkey(self): + pubkey = await self.ipcc.get_dkg_pubkey(0, 0, 0) + dkg = self.wlt1.dkg + pubkeys = list(dkg._dkg_pubkey.values()) + assert pubkey and pubkey in pubkeys + + pubkey = await self.ipcc.get_dkg_pubkey(0, 0, 1) + pubkeys = list(dkg._dkg_pubkey.values()) + assert pubkey and pubkey in pubkeys + + async def test_frost_sign(self): + sighash = bytes.fromhex('01020304'*8) + sig, pubkey, tweaked_pubkey = await self.ipcc.frost_sign(0, 0, 0, sighash) + assert sig and len(sig) == 64 + assert pubkey and len(pubkey) == 33 + assert tweaked_pubkey and len(tweaked_pubkey) == 33 diff --git a/test/jmclient/test_frost_wallet.py b/test/jmclient/test_frost_wallet.py new file mode 100644 index 0000000..db26187 --- /dev/null +++ b/test/jmclient/test_frost_wallet.py @@ -0,0 +1,410 @@ +'''Wallet functionality tests.''' +import os +import json +from pprint import pprint + +from unittest import IsolatedAsyncioTestCase + +import bencoder + +import jmclient # install asyncioreactor +from twisted.internet import reactor + +import pytest +import jmbitcoin as btc +from jmbase import get_log +from jmclient import ( + load_test_config, jm_single, VolatileStorage, get_network, cryptoengine, + create_wallet, open_test_wallet_maybe, FrostWallet, DKGManager) + +from jmfrost.chilldkg_ref.chilldkg import DKGOutput, hostpubkey_gen +from jmclient.frost_clients import ( + serialize_ext_recovery, decrypt_ext_recovery) + +pytestmark = pytest.mark.usefixtures("setup_regtest_frost_bitcoind") + +test_create_wallet_filename = "frost_testwallet_for_create_wallet_test" + +log = get_log() + + +async def get_populated_wallet(): + storage = VolatileStorage() + dkg_storage = VolatileStorage() + recovery_storage = VolatileStorage() + FrostWallet.initialize(storage, dkg_storage, recovery_storage, + get_network()) + wallet = FrostWallet(storage, dkg_storage, recovery_storage) + await wallet.async_init(storage) + return wallet + + +def populate_dkg(wlt, add_party=True, add_coordinator=True, save_dkg=True): + pubkey = hostpubkey_gen(wlt._hostseckey[:32]) + md_type_idx = (0, 0, 0) # mixdepth, address_type, index + ext_recovery_bytes = serialize_ext_recovery(*md_type_idx) + ext_recovery = btc.ecies_encrypt(ext_recovery_bytes, pubkey) + if add_party: + wlt.dkg.add_party_data( + session_id=bytes.fromhex('aa'*32), + dkg_output=DKGOutput( + bytes.fromhex('01'*32), # secshare + bytes.fromhex('02'*32 + '01'), # threshold_pubkey + [ # pubshares + bytes.fromhex('03'*32 + '02'), + bytes.fromhex('03'*32 + '03'), + bytes.fromhex('03'*32 + '04'), + ] + ), + hostpubkeys=[ + bytes.fromhex('02'*32 + '05'), + bytes.fromhex('02'*32 + '06'), + bytes.fromhex('02'*32 + '07'), + ], + t=2, + recovery_data=bytes.fromhex('0102030405'*10), + ext_recovery=ext_recovery, + save_dkg=save_dkg + ) + if add_coordinator: + wlt.dkg.add_coordinator_data( + session_id=bytes.fromhex('bb'*32), + dkg_output=DKGOutput( + bytes.fromhex('11'*32), # secshare + bytes.fromhex('02'*32 + '11'), # threshold_pubkey + [ # pubshares + bytes.fromhex('03'*32 + '12'), + bytes.fromhex('03'*32 + '13'), + bytes.fromhex('03'*32 + '14'), + ] + ), + hostpubkeys=[ + bytes.fromhex('02'*32 + '15'), + bytes.fromhex('02'*32 + '16'), + bytes.fromhex('02'*32 + '17'), + ], + t=2, + recovery_data=bytes.fromhex('0102030405'*10), + ext_recovery=ext_recovery, + save_dkg=save_dkg + ) + return ext_recovery + + +def check_dkg(wlt, ext_recovery, check_party=True, check_coordinator=True): + if check_party: + dkg_dict = wlt._dkg_storage.data[DKGManager.STORAGE_KEY] + assert dkg_dict[DKGManager.SECSHARE_SUBKEY] == { + b'\xaa'*32: b'\x01'*32 + } + assert dkg_dict[DKGManager.PUBSHARES_SUBKEY] == { + b'\xaa'*32: [ + bytes.fromhex('03'*32 + '02'), + bytes.fromhex('03'*32 + '03'), + bytes.fromhex('03'*32 + '04'), + ] + } + assert dkg_dict[DKGManager.PUBKEY_SUBKEY] == { + b'\xaa'*32: bytes.fromhex('02'*32 + '01'), + } + assert dkg_dict[DKGManager.HOSTPUBKEYS_SUBKEY] == { + b'\xaa'*32: [ + bytes.fromhex('02'*32 + '05'), + bytes.fromhex('02'*32 + '06'), + bytes.fromhex('02'*32 + '07'), + ] + } + assert dkg_dict[DKGManager.T_SUBKEY] == { + b'\xaa'*32: 2, + } + assert dkg_dict[DKGManager.SESSIONS_SUBKEY] == dict() + rec_dict = wlt._recovery_storage.data[DKGManager.RECOVERY_STORAGE_KEY] + assert rec_dict == { + b'\xaa'*32: [ + ext_recovery, + bytes.fromhex('0102030405'*10), + ], + } + if check_coordinator: + ext_recovery_bytes = decrypt_ext_recovery(wlt._hostseckey, + ext_recovery) + dkg_dict = wlt._dkg_storage.data[DKGManager.STORAGE_KEY] + assert dkg_dict[DKGManager.SECSHARE_SUBKEY] == { + b'\xbb'*32: b'\x11'*32 + } + assert dkg_dict[DKGManager.PUBSHARES_SUBKEY] == { + b'\xbb'*32: [ + bytes.fromhex('03'*32 + '12'), + bytes.fromhex('03'*32 + '13'), + bytes.fromhex('03'*32 + '14'), + ] + } + assert dkg_dict[DKGManager.PUBKEY_SUBKEY] == { + b'\xbb'*32: bytes.fromhex('02'*32 + '11'), + } + assert dkg_dict[DKGManager.HOSTPUBKEYS_SUBKEY] == { + b'\xbb'*32: [ + bytes.fromhex('02'*32 + '15'), + bytes.fromhex('02'*32 + '16'), + bytes.fromhex('02'*32 + '17'), + ] + } + assert dkg_dict[DKGManager.T_SUBKEY] == { + b'\xbb'*32: 2, + } + assert dkg_dict[DKGManager.SESSIONS_SUBKEY] == { + ext_recovery_bytes: b'\xbb'*32 + } + rec_dict = wlt._recovery_storage.data[DKGManager.RECOVERY_STORAGE_KEY] + assert rec_dict == { + b'\xbb'*32: [ + ext_recovery, + bytes.fromhex('0102030405'*10), + ], + } + + +class AsyncioTestCase(IsolatedAsyncioTestCase): + + params = { + 'test_is_standard_wallet_script': [FrostWallet] + } + + def setUp(self): + load_test_config(config_path='./test_frost') + btc.select_chain_params("bitcoin/regtest") + #see note in cryptoengine.py: + cryptoengine.BTC_P2TR.VBYTE = 100 + jm_single().bc_interface.tick_forward_chain_interval = 2 + + def tearDown(self): + if os.path.exists(test_create_wallet_filename): + os.remove(test_create_wallet_filename) + dkg_filename = f'{test_create_wallet_filename}.dkg' + recovery_filename = f'{test_create_wallet_filename}.dkg_recovery' + if os.path.exists(dkg_filename): + os.remove(dkg_filename) + if os.path.exists(recovery_filename): + os.remove(recovery_filename) + + async def test_create_wallet(self): + password = b"hunter2" + wallet_name = test_create_wallet_filename + # test mainnet (we are not transacting) + btc.select_chain_params("bitcoin") + wallet = await create_wallet(wallet_name, password, 4, FrostWallet) + mnemonic = wallet.get_mnemonic_words()[0] + wallet.close() + # ensure that the wallet file created is openable with the password, + # and has the parameters that were claimed on creation: + new_wallet = await open_test_wallet_maybe( + wallet_name, "", 4, password=password, ask_for_password=False) + assert new_wallet.get_mnemonic_words()[0] == mnemonic + btc.select_chain_params("bitcoin/regtest") + + async def test_dkg_manager_initialized(self): + wlt = await get_populated_wallet() + dkg_dict = wlt._dkg_storage.data[DKGManager.STORAGE_KEY] + assert set(dkg_dict.keys()) == set([ + DKGManager.SECSHARE_SUBKEY, + DKGManager.PUBSHARES_SUBKEY, + DKGManager.PUBKEY_SUBKEY, + DKGManager.HOSTPUBKEYS_SUBKEY, + DKGManager.T_SUBKEY, + DKGManager.SESSIONS_SUBKEY, + ]) + + assert dkg_dict[DKGManager.SECSHARE_SUBKEY] == dict() + assert dkg_dict[DKGManager.PUBSHARES_SUBKEY] == dict() + assert dkg_dict[DKGManager.PUBKEY_SUBKEY] == dict() + assert dkg_dict[DKGManager.HOSTPUBKEYS_SUBKEY] == dict() + assert dkg_dict[DKGManager.T_SUBKEY] == dict() + assert dkg_dict[DKGManager.SESSIONS_SUBKEY] == dict() + rec_dict = wlt._recovery_storage.data[DKGManager.RECOVERY_STORAGE_KEY] + assert rec_dict == dict() + + async def test_dkg_add_party_data(self): + wlt = await get_populated_wallet() + ext_recovery = populate_dkg(wlt, True, False) + check_dkg(wlt, ext_recovery, True, False) + + async def test_dkg_add_coordinator_data(self): + wlt = await get_populated_wallet() + ext_recovery = populate_dkg(wlt, False, True) + check_dkg(wlt, ext_recovery, False, True) + + async def test_dkg_save(self): + wlt = await get_populated_wallet() + ext_recovery = populate_dkg(wlt, True, True, save_dkg=False) + ext_recovery_bytes = decrypt_ext_recovery(wlt._hostseckey, + ext_recovery) + + saved_dkg = bencoder.bdecode(wlt._dkg_storage.file_data[8:]) + STORAGE_KEY = DKGManager.STORAGE_KEY + HOSTPUBKEYS_SUBKEY = DKGManager.HOSTPUBKEYS_SUBKEY + PUBKEY_SUBKEY = DKGManager.PUBKEY_SUBKEY + PUBSHARES_SUBKEY = DKGManager.PUBSHARES_SUBKEY + SECSHARE_SUBKEY = DKGManager.SECSHARE_SUBKEY + T_SUBKEY = DKGManager.T_SUBKEY + SESSIONS_SUBKEY = DKGManager.SESSIONS_SUBKEY + + assert saved_dkg[STORAGE_KEY][SECSHARE_SUBKEY] == dict() + assert saved_dkg[STORAGE_KEY][PUBSHARES_SUBKEY] == dict() + assert saved_dkg[STORAGE_KEY][PUBKEY_SUBKEY] == dict() + assert saved_dkg[STORAGE_KEY][HOSTPUBKEYS_SUBKEY] == dict() + assert saved_dkg[STORAGE_KEY][T_SUBKEY] == dict() + assert saved_dkg[STORAGE_KEY][SESSIONS_SUBKEY] == dict() + + saved_rec = bencoder.bdecode(wlt._recovery_storage.file_data[8:]) + assert saved_rec[b'dkg'] == dict() + + wlt.dkg.save() + + saved_dkg = bencoder.bdecode(wlt._dkg_storage.file_data[8:]) + assert set(saved_dkg[STORAGE_KEY][SECSHARE_SUBKEY].keys()) == set([ + b'\xaa'*32, + b'\xbb'*32, + ]) + assert set(saved_dkg[STORAGE_KEY][PUBSHARES_SUBKEY].keys()) == set([ + b'\xaa'*32, + b'\xbb'*32, + ]) + assert set(saved_dkg[STORAGE_KEY][PUBKEY_SUBKEY].keys()) == set([ + b'\xaa'*32, + b'\xbb'*32, + ]) + assert set(saved_dkg[STORAGE_KEY][HOSTPUBKEYS_SUBKEY].keys()) == set([ + b'\xaa'*32, + b'\xbb'*32, + ]) + assert set(saved_dkg[STORAGE_KEY][T_SUBKEY].keys()) == set([ + b'\xaa'*32, + b'\xbb'*32, + ]) + assert set(saved_dkg[STORAGE_KEY][SESSIONS_SUBKEY].keys()) == set([ + ext_recovery_bytes + ]) + + saved_rec = bencoder.bdecode(wlt._recovery_storage.file_data[8:]) + RECOVERY_STORAGE_KEY = DKGManager.RECOVERY_STORAGE_KEY + assert set(saved_rec[RECOVERY_STORAGE_KEY].keys()) == set([ + b'\xaa'*32, + b'\xbb'*32, + ]) + + async def test_dkg_load_storage(self): + password = b"hunter2" + wlt = await create_wallet( + test_create_wallet_filename, password, 4, FrostWallet) + mnemonic = wlt.get_mnemonic_words()[0] + ext_recovery = populate_dkg(wlt, False, True) + check_dkg(wlt, ext_recovery, False, True) + wlt.save() + wlt.close() + + new_wlt = await open_test_wallet_maybe( + test_create_wallet_filename, "", 4, password=password, + ask_for_password=False, + load_dkg=True, dkg_read_only=False, read_only=True) + + dkgman = DKGManager( + new_wlt, new_wlt._dkg_storage, new_wlt._recovery_storage) + new_wlt._dkg = dkgman + check_dkg(new_wlt, ext_recovery, False, True) + + async def test_dkg_find_session(self): + wlt = await get_populated_wallet() + ext_recovery = populate_dkg(wlt, True, True) + assert wlt.dkg.find_session(0, 0, 0) == b'\xbb'*32 + assert wlt.dkg.find_session(0, 0, 1) is None + + async def test_dkg_find_dkg_pubkey(self): + wlt = await get_populated_wallet() + ext_recovery = populate_dkg(wlt, True, True) + assert wlt.dkg.find_dkg_pubkey(0, 0, 0) == b'\x02'*32 + b'\x11' + assert wlt.dkg.find_dkg_pubkey(0, 0, 1) is None + + async def test_dkg_recover(self): + assert 0 # FIXME need to add test + + async def test_dkg_ls(self): + wlt = await get_populated_wallet() + ext_recovery = populate_dkg(wlt, True, True) + ls_data = wlt.dkg.dkg_ls() + ls_title = 'DKG data:\n' + ls_title_len = len(ls_title) + assert ls_data.startswith(ls_title) + ls_data = ls_data[ls_title_len:] + ls_json = json.loads(ls_data) + assert set(ls_json.keys()) == set(['sessions', 'a'*64, 'b'*64]) + assert ls_json['sessions']['0,0,0'] == 'b'*64 + ls_json_a = ls_json['a'*64] + ls_json_b = ls_json['b'*64] + + assert ls_json_a['secshare'] == '01'*32 + assert set(ls_json_a['pubshares']) == set(['03'*32 + '02', + '03'*32 + '03', + '03'*32 + '04']) + assert ls_json_a['pubkey'] == '02'*32 + '01' + assert set(ls_json_a['hostpubkeys']) == set(['02'*32 + '05', + '02'*32 + '06', + '02'*32 + '07']) + assert ls_json_a['t'] == 2 + + assert ls_json_b['secshare'] == '11'*32 + assert set(ls_json_b['pubshares']) == set(['03'*32 + '12', + '03'*32 + '13', + '03'*32 + '14']) + assert ls_json_b['pubkey'] == '02'*32 + '11' + assert set(ls_json_b['hostpubkeys']) == set(['02'*32 + '15', + '02'*32 + '16', + '02'*32 + '17']) + assert ls_json_b['t'] == 2 + + async def test_dkg_rm(self): + wlt = await get_populated_wallet() + ext_recovery = populate_dkg(wlt, True, True) + rm_data = wlt.dkg.dkg_rm(['a'*64]) + rm_lines = rm_data.split('\n') + assert rm_lines[0] == f'dkg data for session {"a"*64} deleted' + rm_data = wlt.dkg.dkg_rm(['a'*64]) + rm_lines = rm_data.split('\n') + assert rm_lines[0] == f'not found dkg data for session {"a"*64}' + + rm_data = wlt.dkg.dkg_rm(['b'*64]) + rm_lines = rm_data.split('\n') + assert rm_lines[0] == f'dkg data for session {"b"*64} deleted' + assert rm_lines[1] == f'session data for session {"b"*64} deleted' + rm_data = wlt.dkg.dkg_rm(['b'*64]) + rm_lines = rm_data.split('\n') + assert rm_lines[0] == f'not found dkg data for session {"b"*64}' + + async def test_recdkg_ls(self): + wlt = await get_populated_wallet() + ext_recovery = populate_dkg(wlt, True, True) + ls_data = wlt.dkg.recdkg_ls() + ls_lines = ls_data.split('\n') + assert ls_lines[1] == 'Decrypted sesions:' + assert ls_lines[-2] == 'Not decrypted sesions:' + assert ls_lines[-1] == '[]' + ls_json = json.loads('\n'.join(ls_lines[2:-3])) + assert ls_json[0] == ['a'*64, '0'*12, '0102030405'*10] + assert ls_json[1] == ['b'*64, '0'*12, '0102030405'*10] + + async def test_recdkg_rm(self): + wlt = await get_populated_wallet() + ext_recovery = populate_dkg(wlt, True, True) + rm_data = wlt.dkg.recdkg_rm(['a'*64]) + rm_lines = rm_data.split('\n') + assert rm_lines[0] == f'dkg recovery data for session {"a"*64} deleted' + rm_data = wlt.dkg.recdkg_rm(['a'*64]) + rm_lines = rm_data.split('\n') + assert rm_lines[0] == \ + f'not found dkg recovery data for session {"a"*64}' + rm_data = wlt.dkg.recdkg_rm(['b'*64]) + rm_lines = rm_data.split('\n') + assert rm_lines[0] == f'dkg recovery data for session {"b"*64} deleted' + rm_data = wlt.dkg.recdkg_rm(['b'*64]) + rm_lines = rm_data.split('\n') + assert rm_lines[0] == \ + f'not found dkg recovery data for session {"b"*64}' diff --git a/test/jmclient/test_taproot_wallet.py b/test/jmclient/test_taproot_wallet.py new file mode 100644 index 0000000..9665b37 --- /dev/null +++ b/test/jmclient/test_taproot_wallet.py @@ -0,0 +1,1134 @@ +'''Wallet functionality tests.''' +import datetime +import os +import time +import json +from binascii import hexlify, unhexlify + +from unittest import IsolatedAsyncioTestCase + +from unittest_parametrize import parametrize, ParametrizedTestCase + +import jmclient # install asyncioreactor +from twisted.internet import reactor + +import pytest +from _pytest.monkeypatch import MonkeyPatch +import jmbitcoin as btc +from commontest import ensure_bip65_activated +from jmbase import get_log, hextobin, bintohex +from jmclient import ( + load_test_config, jm_single, BaseWallet, BIP32Wallet, VolatileStorage, + get_network, cryptoengine, WalletError, BIP49Wallet, WalletService, + TaprootWalletFidelityBonds, create_wallet, open_test_wallet_maybe, + open_wallet, FidelityBondMixin, FidelityBondWatchonlyWallet, LegacyWallet, + wallet_gettimelockaddress, UnknownAddressForLabel, TaprootWallet, + get_blockchain_interface_instance) +from test_blockchaininterface import sync_test_wallet +from freezegun import freeze_time + +pytestmark = pytest.mark.usefixtures("setup_regtest_taproot_bitcoind") + +testdir = os.path.dirname(os.path.realpath(__file__)) + +test_create_wallet_filename = "taproot_testwallet_for_create_wallet_test" +test_cache_cleared_filename = "taproot_testwallet_for_cache_clear_test" + +log = get_log() + + +def assert_taproot(tx): + assert (tx.has_witness() + and tx.vout[0].scriptPubKey.is_witness_v1_taproot()) + + +async def get_populated_wallet(amount=10**8, num=3): + storage = VolatileStorage() + TaprootWallet.initialize(storage, get_network()) + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + + # fund three wallet addresses at mixdepth 0 + for i in range(num): + addr = await wallet.get_internal_addr(0) + fund_wallet_addr(wallet, addr, amount / 10**8) + + return wallet + + +def fund_wallet_addr(wallet, addr, value_btc=1): + # special case, grab_coins returns hex from rpc: + txin_id = hextobin(jm_single().bc_interface.grab_coins(addr, value_btc)) + txinfo = jm_single().bc_interface.get_transaction(txin_id) + txin = btc.CMutableTransaction.deserialize(btc.x(txinfo["hex"])) + utxo_in = wallet.add_new_utxos(txin, 1) + assert len(utxo_in) == 1 + return list(utxo_in.keys())[0] + + +def get_bip39_vectors(): + fh = open(os.path.join(testdir, 'bip39vectors.json')) + data = json.load(fh)['english'] + data_with_tuples = [] + for d in data: + data_with_tuples.append(tuple(d)) + fh.close() + return data_with_tuples + + +class AsyncioTestCase(IsolatedAsyncioTestCase, ParametrizedTestCase): + + params = { + 'test_is_standard_wallet_script': + [TaprootWallet, TaprootWalletFidelityBonds] + } + + def setUp(self): + load_test_config(config_path='./test_taproot') + btc.select_chain_params("bitcoin/regtest") + #see note in cryptoengine.py: + cryptoengine.BTC_P2TR.VBYTE = 100 + jm_single().bc_interface.tick_forward_chain_interval = 2 + TaprootWallet._get_mixdepth_from_path_ = \ + TaprootWallet._get_mixdepth_from_path + TaprootWallet._get_bip32_mixdepth_path_level_ = \ + TaprootWallet._get_bip32_mixdepth_path_level + TaprootWallet._get_bip32_base_path_ = \ + TaprootWallet._get_bip32_base_path + TaprootWallet._create_master_key_ = \ + TaprootWallet._create_master_key + + def tearDown(self): + monkeypatch = MonkeyPatch() + monkeypatch.setattr(TaprootWallet, '_get_mixdepth_from_path', + TaprootWallet._get_mixdepth_from_path_) + monkeypatch.setattr(TaprootWallet, '_get_bip32_mixdepth_path_level', + TaprootWallet._get_bip32_mixdepth_path_level_) + monkeypatch.setattr(TaprootWallet, '_get_bip32_base_path', + TaprootWallet._get_bip32_base_path_) + monkeypatch.setattr(TaprootWallet, '_create_master_key', + TaprootWallet._create_master_key_) + + if os.path.exists(test_create_wallet_filename): + os.remove(test_create_wallet_filename) + if os.path.exists(test_cache_cleared_filename): + os.remove(test_cache_cleared_filename) + + @parametrize( + 'entropy,mnemonic,key,xpriv', + get_bip39_vectors()) + async def test_bip39_seeds(self, entropy, mnemonic, key, xpriv): + jm_single().config.set('BLOCKCHAIN', 'network', 'mainnet') + created_entropy = TaprootWallet.entropy_from_mnemonic(mnemonic) + assert entropy == hexlify(created_entropy).decode('ascii') + storage = VolatileStorage() + TaprootWallet.initialize( + storage, get_network(), entropy=created_entropy, + entropy_extension='TREZOR', max_mixdepth=4) + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + assert (mnemonic, b'TREZOR') == wallet.get_mnemonic_words() + assert key == hexlify(wallet._create_master_key()).decode('ascii') + + # need to monkeypatch this, else we'll default to the BIP-49 path + monkeypatch = MonkeyPatch() + monkeypatch.setattr(TaprootWallet, '_get_bip32_base_path', + BIP32Wallet._get_bip32_base_path) + assert xpriv == wallet.get_bip32_priv_export() + + async def test_bip86_seed(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + mnemonic = 'abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about' + master_xpriv = 'tprv8ZgxMBicQKsPe5YMU9gHen4Ez3ApihUfykaqUorj9t6FDqy3nP6eoXiAo2ssvpAjoLroQxHqr3R5nE3a5dU3DHTjTgJDd7zrbniJr6nrCzd' + account0_xpriv = 'tprv8gytrHbFLhE7zLJ6BvZWEDDGJe8aS8VrmFnvqpMv8CEZtUbn2NY5KoRKQNpkcL1yniyCBRi7dAPy4kUxHkcSvd9jzLmLMEG96TPwant2jbX' + addr0_script = '51203b82b2b2a9185315da6f80da5f06d0440d8a5e1457fa93387c2d919c86ec8786' + + entropy = TaprootWallet.entropy_from_mnemonic(mnemonic) + storage = VolatileStorage() + TaprootWallet.initialize( + storage, get_network(), entropy=entropy, max_mixdepth=0) + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + assert (mnemonic, None) == wallet.get_mnemonic_words() + assert account0_xpriv == wallet.get_bip32_priv_export(0) + script = await wallet.get_external_script(0) + assert addr0_script == hexlify(script).decode('ascii') + + # FIXME: is this desired behaviour? BIP49 wallet will not return xpriv for + # the root key but only for key after base path + monkeypatch = MonkeyPatch() + monkeypatch.setattr(TaprootWallet, '_get_bip32_base_path', + BIP32Wallet._get_bip32_base_path) + assert master_xpriv == wallet.get_bip32_priv_export() + + async def test_bip32_test_vector_1(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'mainnet') + + entropy = unhexlify('000102030405060708090a0b0c0d0e0f') + storage = VolatileStorage() + TaprootWallet.initialize( + storage, get_network(), entropy=entropy, max_mixdepth=0) + + # test vector 1 is using hardened derivation for the account/mixdepth level + monkeypatch = MonkeyPatch() + monkeypatch.setattr(TaprootWallet, '_get_mixdepth_from_path', + BIP49Wallet._get_mixdepth_from_path) + monkeypatch.setattr(TaprootWallet, '_get_bip32_mixdepth_path_level', + BIP49Wallet._get_bip32_mixdepth_path_level) + monkeypatch.setattr(TaprootWallet, '_get_bip32_base_path', + BIP32Wallet._get_bip32_base_path) + monkeypatch.setattr(TaprootWallet, '_create_master_key', + BIP32Wallet._create_master_key) + + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + + assert wallet.get_bip32_priv_export() == 'xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi' + assert wallet.get_bip32_pub_export() == 'xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8' + assert wallet.get_bip32_priv_export(0) == 'xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7' + assert wallet.get_bip32_pub_export(0) == 'xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw' + assert wallet.get_bip32_priv_export(0, 1) == 'xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs' + assert wallet.get_bip32_pub_export(0, 1) == 'xpub6ASuArnXKPbfEwhqN6e3mwBcDTgzisQN1wXN9BJcM47sSikHjJf3UFHKkNAWbWMiGj7Wf5uMash7SyYq527Hqck2AxYysAA7xmALppuCkwQ' + # there are more test vectors but those don't match joinmarket's wallet + # structure, hence they make litte sense to test here + + async def test_bip32_test_vector_2(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'mainnet') + + entropy = unhexlify('fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542') + storage = VolatileStorage() + LegacyWallet.initialize( + storage, get_network(), entropy=entropy, max_mixdepth=0) + + monkeypatch = MonkeyPatch() + monkeypatch.setattr(LegacyWallet, '_get_bip32_base_path', + BIP32Wallet._get_bip32_base_path) + monkeypatch.setattr(LegacyWallet, '_create_master_key', + BIP32Wallet._create_master_key) + + wallet = LegacyWallet(storage) + await wallet.async_init(storage) + + assert wallet.get_bip32_priv_export() == 'xprv9s21ZrQH143K31xYSDQpPDxsXRTUcvj2iNHm5NUtrGiGG5e2DtALGdso3pGz6ssrdK4PFmM8NSpSBHNqPqm55Qn3LqFtT2emdEXVYsCzC2U' + assert wallet.get_bip32_pub_export() == 'xpub661MyMwAqRbcFW31YEwpkMuc5THy2PSt5bDMsktWQcFF8syAmRUapSCGu8ED9W6oDMSgv6Zz8idoc4a6mr8BDzTJY47LJhkJ8UB7WEGuduB' + assert wallet.get_bip32_priv_export(0) == 'xprv9vHkqa6EV4sPZHYqZznhT2NPtPCjKuDKGY38FBWLvgaDx45zo9WQRUT3dKYnjwih2yJD9mkrocEZXo1ex8G81dwSM1fwqWpWkeS3v86pgKt' + assert wallet.get_bip32_pub_export(0) == 'xpub69H7F5d8KSRgmmdJg2KhpAK8SR3DjMwAdkxj3ZuxV27CprR9LgpeyGmXUbC6wb7ERfvrnKZjXoUmmDznezpbZb7ap6r1D3tgFxHmwMkQTPH' + # there are more test vectors but those don't match joinmarket's wallet + # structure, hence they make litte sense to test here + + async def test_bip32_test_vector_3(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'mainnet') + + entropy = unhexlify('4b381541583be4423346c643850da4b320e46a87ae3d2a4e6da11eba819cd4acba45d239319ac14f863b8d5ab5a0d0c64d2e8a1e7d1457df2e5a3c51c73235be') + storage = VolatileStorage() + TaprootWallet.initialize( + storage, get_network(), entropy=entropy, max_mixdepth=0) + + # test vector 3 is using hardened derivation for the account/mixdepth level + monkeypatch = MonkeyPatch() + monkeypatch.setattr(TaprootWallet, '_get_mixdepth_from_path', + BIP49Wallet._get_mixdepth_from_path) + monkeypatch.setattr(TaprootWallet, '_get_bip32_mixdepth_path_level', + BIP49Wallet._get_bip32_mixdepth_path_level) + monkeypatch.setattr(TaprootWallet, '_get_bip32_base_path', + BIP32Wallet._get_bip32_base_path) + monkeypatch.setattr(TaprootWallet, '_create_master_key', + BIP32Wallet._create_master_key) + + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + + assert wallet.get_bip32_priv_export() == 'xprv9s21ZrQH143K25QhxbucbDDuQ4naNntJRi4KUfWT7xo4EKsHt2QJDu7KXp1A3u7Bi1j8ph3EGsZ9Xvz9dGuVrtHHs7pXeTzjuxBrCmmhgC6' + assert wallet.get_bip32_pub_export() == 'xpub661MyMwAqRbcEZVB4dScxMAdx6d4nFc9nvyvH3v4gJL378CSRZiYmhRoP7mBy6gSPSCYk6SzXPTf3ND1cZAceL7SfJ1Z3GC8vBgp2epUt13' + assert wallet.get_bip32_priv_export(0) == 'xprv9uPDJpEQgRQfDcW7BkF7eTya6RPxXeJCqCJGHuCJ4GiRVLzkTXBAJMu2qaMWPrS7AANYqdq6vcBcBUdJCVVFceUvJFjaPdGZ2y9WACViL4L' + assert wallet.get_bip32_pub_export(0) == 'xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y' + + @parametrize( + 'mixdepth,internal,index,address,wif', + [ + (0, BaseWallet.ADDRESS_TYPE_EXTERNAL, 0, + 'bcrt1pwr88j8y5hs57fktnlxvs8ynzpx2v78vcn3z2wjq3gxjhec8naedsenq84j', + 'cUYX9yfrAnbm7LyjiaYUjVAp83pD6WMffaQNyKUf6ubUuFUcwWGx'), + (0, BaseWallet.ADDRESS_TYPE_EXTERNAL, 5, + 'bcrt1pj9y406c0fwtsj6ntnnpzkwzq3tmsa3t9n6rcwelut8cs48a8sp7qmfylrx', + 'cUxhCGWR7DddkKthD2zFf22RLJzQQfPeMvPxQHfYaPNwQy1fB7TH'), + (0, BaseWallet.ADDRESS_TYPE_INTERNAL, 3, + 'bcrt1plajm8x83lekgnvhkxtm5jehmsvlkdfefnxln7lpka0psgk0vn8nqjhgrhn', + 'cTwM3mu54nJt2DJ51RfJxHAivVUdazNW7nXgwaejHfg86Xd6NHe9'), + (2, BaseWallet.ADDRESS_TYPE_INTERNAL, 2, + 'bcrt1pgfhvh4f699qujwnmd9kylv86uf5shc3ecz0ggvte0rza7rejhvwqz3mnal', + 'cPx3oVxi2Frn54n4uFTpfTEbqpgPpqC7RMrcCbUCSNSv1Y9RyLUA') + ]) + async def test_bip32_addresses_p2tr(self, mixdepth, + internal, index, address, wif): + """ + Test with a random but fixed entropy + """ + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + + entropy = unhexlify('2e0339ba89b4a1272cdf78b27ee62669ee01992a59e836e2807051be128ca817') + storage = VolatileStorage() + TaprootWallet.initialize( + storage, get_network(), entropy=entropy, max_mixdepth=3) + + monkeypatch = MonkeyPatch() + monkeypatch.setattr(TaprootWallet, '_get_bip32_base_path', + BIP32Wallet._get_bip32_base_path) + monkeypatch.setattr(TaprootWallet, '_create_master_key', + BIP32Wallet._create_master_key) + + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + + # wallet needs to know about all intermediate keys + for i in range(index + 1): + await wallet.get_new_script(mixdepth, internal) + + assert wif == wallet.get_wif(mixdepth, internal, index) + assert address == await wallet.get_addr(mixdepth, internal, index) + + @parametrize( + 'timenumber,address,wif', + [ + (0, + 'bcrt1qj9ewr9kq0043dj90l9w28znydtzcmqgeqs3gua8c2ph6aj5v2d5s459kxa', + 'cW5MjSamNpGVqwd1xMdUa6bHBdkKxCb8QovCrm44juAAfD6N64Ud'), + (50, + 'bcrt1qjsnz39xvguzxjnydg89zkx25rv2sdnlsa9q6q0s0rkk925xru5mqn6en8c', + 'cVXG11bFA6fiey2nAgBwNe7Y4cL1ZqLJ5uYtDiJsXoUV91phNk8n'), + (1, + 'bcrt1q249qewynmkhyqzplrezg0xjcughgguzgh7wznagewwxpq3838r9sfw2yks', + 'cTGBzJXiSsTArDFNtpyAgDRuumBK4Gj7S6RjuVYiLHytnLNgHGTw') + ]) + async def test_bip32_timelocked_addresses(self, timenumber, + address, wif): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + + entropy = unhexlify('2e0339ba89b4a1272cdf78b27ee62669ee01992a59e836e2807051be128ca817') + storage = VolatileStorage() + TaprootWalletFidelityBonds.initialize( + storage, get_network(), entropy=entropy, max_mixdepth=1) + wallet = TaprootWalletFidelityBonds(storage) + await wallet.async_init(storage) + mixdepth = FidelityBondMixin.FIDELITY_BOND_MIXDEPTH + address_type = FidelityBondMixin.BIP32_TIMELOCK_ID + + assert address == await wallet.get_addr( + mixdepth, address_type, timenumber) + assert wif == wallet.get_wif_path( + wallet.get_path(mixdepth, address_type, timenumber)) + + @parametrize( + 'timenumber,locktime_string', + [ + (0, "2020-01"), + (20, "2021-09"), + (100, "2028-05"), + (150, "2032-07"), + (350, "2049-03") + ]) + @freeze_time("2019-12") + async def test_gettimelockaddress_method(self, + timenumber, locktime_string): + jm_single().config.set("BLOCKCHAIN", "network", "mainnet") + storage = VolatileStorage() + TaprootWalletFidelityBonds.initialize(storage, get_network()) + wallet = TaprootWalletFidelityBonds(storage) + await wallet.async_init(storage) + + m = FidelityBondMixin.FIDELITY_BOND_MIXDEPTH + address_type = FidelityBondMixin.BIP32_TIMELOCK_ID + script = await wallet.get_script(m, address_type, timenumber) + addr = await wallet.script_to_addr(script) + + addr_from_method = await wallet_gettimelockaddress( + wallet, locktime_string) + + assert addr == addr_from_method + + @freeze_time("2021-01") + async def test_gettimelockaddress_in_past(self): + jm_single().config.set("BLOCKCHAIN", "network", "mainnet") + storage = VolatileStorage() + TaprootWalletFidelityBonds.initialize(storage, get_network()) + wallet = TaprootWalletFidelityBonds(storage) + await wallet.async_init(storage) + + assert await wallet_gettimelockaddress(wallet, "2020-01") == "" + assert await wallet_gettimelockaddress(wallet, "2021-01") == "" + assert await wallet_gettimelockaddress(wallet, "2021-02") != "" + + @parametrize( + 'index,wif', + [ + (0, 'cU3iQ73p1mYyJ9aDY4VahGFG8cqK3QAW3VeSiStEXm1sBiFgdiSJ'), + (9, 'cT2X1VVE48NfAiuPzgsc8ogJ19cXWV17S4AkUgzWD61jEd6ZtezZ'), + (50, 'cQrqAeoSVFHUM2wkt11YkCUc3erkVkhr2KaxrbqxKtuA8ztt2qCr') + ]) + async def test_bip32_burn_keys(self, index, wif): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + + entropy = unhexlify('2e0339ba89b4a1272cdf78b27ee62669ee01992a59e836e2807051be128ca817') + storage = VolatileStorage() + TaprootWalletFidelityBonds.initialize( + storage, get_network(), entropy=entropy, max_mixdepth=1) + wallet = TaprootWalletFidelityBonds(storage) + await wallet.async_init(storage) + mixdepth = FidelityBondMixin.FIDELITY_BOND_MIXDEPTH + address_type = FidelityBondMixin.BIP32_BURN_ID + + #advance index_cache enough + wallet.set_next_index(mixdepth, address_type, index, force=True) + + assert wif == wallet.get_wif_path( + wallet.get_path(mixdepth, address_type, index)) + + async def test_import_key(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + storage = VolatileStorage() + TaprootWallet.initialize(storage, get_network()) + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + + await wallet.import_private_key( + 0, 'cRAGLvPmhpzJNgdMT4W2gVwEW3fusfaDqdQWM2vnWLgXKzCWKtcM') + await wallet.import_private_key( + 1, 'cVqtSSoVxFyPqTRGfeESi31uCYfgTF4tGWRtGeVs84fzybiX5TPk') + + with pytest.raises(WalletError): + await wallet.import_private_key( + 1, 'cRAGLvPmhpzJNgdMT4W2gVwEW3fusfaDqdQWM2vnWLgXKzCWKtcM') + + # test persist imported keys + wallet.save() + data = storage.file_data + + del wallet + del storage + + storage = VolatileStorage(data=data) + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + + imported_paths_md0 = list(wallet.yield_imported_paths(0)) + imported_paths_md1 = list(wallet.yield_imported_paths(1)) + assert len(imported_paths_md0) == 1 + assert len(imported_paths_md1) == 1 + + # verify imported addresses + assert await wallet.get_address_from_path(imported_paths_md0[0]) == \ + 'bcrt1p3e8d2nwlpf6rm0q36auq736cpj5y5uw337kf2nj9yn9tkg48n9dq5zgmdq' + assert await wallet.get_address_from_path(imported_paths_md1[0]) == \ + 'bcrt1ph8wfv0zm42lgvd23xe2070khe285grmum6fm8ehv7e2zkpnvcs6qjjm7nr' + + # test remove key + await wallet.remove_imported_key(path=imported_paths_md0[0]) + assert not list(wallet.yield_imported_paths(0)) + + assert wallet.get_details(imported_paths_md1[0]) == (1, 'imported', 0) + + @parametrize( + 'wif, type_check', + [ + ('cRAGLvPmhpzJNgdMT4W2gVwEW3fusfaDqdQWM2vnWLgXKzCWKtcM', + assert_taproot) + ]) + async def test_signing_imported(self, wif, type_check): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + storage = VolatileStorage() + TaprootWallet.initialize(storage, get_network()) + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + + MIXDEPTH = 0 + path = await wallet.import_private_key(MIXDEPTH, wif) + addr = await wallet.get_address_from_path(path) + utxo = fund_wallet_addr(wallet, addr) + # The dummy output is constructed as an unspendable p2sh: + p2tr_script = btc.CScript(bytes.fromhex('5120' + '00'*32)) + tx = btc.mktx([utxo], + [{"address": + str(btc.CCoinAddress.from_scriptPubKey(p2tr_script)), + "value": 10**8 - 9000}]) + script = await wallet.get_script_from_path(path) + success, msg = await wallet.sign_tx(tx, {0: (script, 10**8)}) + assert success, msg + type_check(tx) + txout = jm_single().bc_interface.pushtx(tx.serialize()) + assert txout + + @parametrize( + 'wallet_cls,type_check', + [ + (TaprootWallet, assert_taproot), + ]) + async def test_signing_simple(self, wallet_cls, type_check): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + storage = VolatileStorage() + wallet_cls.initialize(storage, get_network(), entropy=b"\xaa"*16) + wallet = wallet_cls(storage) + await wallet.async_init(storage) + addr = await wallet.get_internal_addr(0) + utxo = fund_wallet_addr(wallet, addr) + path = "m/86'/1'/0'/0/0" + privkey, engine = wallet._get_key_from_path( + wallet.path_repr_to_path(path)) + pubkey = engine.privkey_to_pubkey(privkey) + tx = btc.mktx([utxo], + [{"address": str(btc.CCoinAddress.from_scriptPubKey( + btc.pubkey_to_p2tr_script(pubkey))), + "value": 10**8 - 9000}]) + script = await wallet.get_script( + 0, BaseWallet.ADDRESS_TYPE_INTERNAL, 0) + success, msg = await wallet.sign_tx(tx, {0: (script, 10**8)}) + assert success, msg + type_check(tx) + txout = jm_single().bc_interface.pushtx(tx.serialize()) + assert txout + + # note that address validation is tested separately; + # this test functions only to make sure that given a valid + # taproot address, we can actually spend to it + @parametrize( + 'hexspk', + [ + ("512091b64d5324723a985170e4dc5a0f84c041804f2cd12660fa5dec09fc21783605",), + ("5120147c9c57132f6e7ecddba9800bb0c4449251c92a1e60371ee77557b6620f3ea3",), + ("5120712447206d7a5238acc7ff53fbe94a3b64539ad291c7cdbc490b7577e4b17df5",), + ]) + async def test_spend_to_p2traddr(self, hexspk): + storage = VolatileStorage() + TaprootWallet.initialize(storage, get_network(), entropy=b"\xaa"*16) + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + addr = await wallet.get_internal_addr(0) + utxo = fund_wallet_addr(wallet, addr) + sPK = btc.CScript(hextobin(hexspk)) + tx = btc.mktx( + [utxo], + [{"address": str(btc.CCoinAddress.from_scriptPubKey(sPK)), + "value": 10**8 - 9000}]) + script = await wallet.get_script( + 0, BaseWallet.ADDRESS_TYPE_INTERNAL, 0) + success, msg = await wallet.sign_tx(tx, {0: (script, 10**8)}) + assert success, msg + txout = jm_single().bc_interface.pushtx(tx.serialize()) + assert txout + # probably unnecessary, but since we are sanity checking: + # does the output of the in-mempool tx have the sPK we expect? + txid = tx.GetTxid()[::-1] + txres = btc.CTransaction.deserialize(hextobin(jm_single().bc_interface._rpc( + "getrawtransaction", [bintohex(txid), True])["hex"])) + assert txres.vout[0].scriptPubKey == sPK + assert txres.vout[0].nValue == 10**8 - 9000 + + async def test_timelocked_output_signing(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + ensure_bip65_activated() + storage = VolatileStorage() + TaprootWalletFidelityBonds.initialize(storage, get_network()) + wallet = TaprootWalletFidelityBonds(storage) + await wallet.async_init(storage) + + timenumber = 0 + script = await wallet.get_script( + FidelityBondMixin.FIDELITY_BOND_MIXDEPTH, + FidelityBondMixin.BIP32_TIMELOCK_ID, timenumber) + utxo = fund_wallet_addr(wallet, await wallet.script_to_addr(script)) + timestamp = wallet._time_number_to_timestamp(timenumber) + + tx = btc.mktx([utxo], [{ + "address": str(btc.CCoinAddress.from_scriptPubKey( + btc.standard_scripthash_scriptpubkey(btc.Hash160(b"\x00")))), + "value":10**8 - 9000}], locktime=timestamp+1) + success, msg = await wallet.sign_tx(tx, {0: (script, 10**8)}) + assert success, msg + txout = jm_single().bc_interface.pushtx(tx.serialize()) + assert txout + + async def test_get_bbm(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + amount = 10**8 + num_tx = 3 + wallet = await get_populated_wallet(amount, num_tx) + # disable a utxo and check we can correctly report + # balance with the disabled flag off: + utxos = await wallet._utxos.get_utxos_at_mixdepth(0) + utxo_1 = list(utxos.keys())[0] + wallet.disable_utxo(*utxo_1) + balances = wallet.get_balance_by_mixdepth(include_disabled=True) + assert balances[0] == num_tx * amount + balances = wallet.get_balance_by_mixdepth() + assert balances[0] == (num_tx - 1) * amount + wallet.toggle_disable_utxo(*utxo_1) + balances = wallet.get_balance_by_mixdepth() + assert balances[0] == num_tx * amount + + async def test_add_utxos(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + amount = 10**8 + num_tx = 3 + + wallet = await get_populated_wallet(amount, num_tx) + + balances = wallet.get_balance_by_mixdepth() + assert balances[0] == num_tx * amount + for md in range(1, wallet.max_mixdepth + 1): + assert balances[md] == 0 + + utxos = await wallet.get_utxos_by_mixdepth() + assert len(utxos[0]) == num_tx + for md in range(1, wallet.max_mixdepth + 1): + assert not utxos[md] + + with pytest.raises(Exception): + # no funds in mixdepth + await wallet.select_utxos(1, amount) + + with pytest.raises(Exception): + # not enough funds + await wallet.select_utxos(0, amount * (num_tx + 1)) + + wallet.reset_utxos() + assert wallet.get_balance_by_mixdepth()[0] == 0 + + async def test_select_utxos(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + amount = 10**8 + + wallet = await get_populated_wallet(amount) + utxos = await wallet.select_utxos(0, amount // 2) + + assert len(utxos) == 1 + utxos = list(utxos.keys()) + + more_utxos = await wallet.select_utxos( + 0, int(amount * 1.5), utxo_filter=utxos) + assert len(more_utxos) == 2 + assert utxos[0] not in more_utxos + + async def test_add_new_utxos(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + wallet = await get_populated_wallet(num=1) + + scripts = [(await wallet.get_new_script( + x, BaseWallet.ADDRESS_TYPE_INTERNAL)) + for x in range(3)] + tx_scripts = list(scripts) + tx = btc.mktx( + [(b"\x00"*32, 2)], + [{"address": await wallet.script_to_addr(s), + "value": 10**8} for s in tx_scripts]) + added = wallet.add_new_utxos(tx, 1) + assert len(added) == len(scripts) + + added_scripts = {x['script'] for x in added.values()} + for s in scripts: + assert s in added_scripts + + balances = wallet.get_balance_by_mixdepth() + assert balances[0] == 2 * 10**8 + assert balances[1] == 10**8 + assert balances[2] == 10**8 + assert len(balances) == wallet.max_mixdepth + 1 + + async def test_remove_old_utxos(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + wallet = await get_populated_wallet() + + # add some more utxos to mixdepth 1 + for i in range(3): + addr = await wallet.get_internal_addr(1) + txin = jm_single().bc_interface.grab_coins(addr, 1) + script = await wallet.get_script( + 1, BaseWallet.ADDRESS_TYPE_INTERNAL, i) + wallet.add_utxo(btc.x(txin), 0, script, 10**8, 1) + + inputs = await wallet.select_utxos(0, 10**8) + inputs.update(await wallet.select_utxos(1, 2 * 10**8)) + assert len(inputs) == 3 + + tx_inputs = list(inputs.keys()) + tx_inputs.append((b'\x12'*32, 6)) + + tx = btc.mktx(tx_inputs, + [{"address": "2N9gfkUsFW7Kkb1Eurue7NzUxUt7aNJiS1U", + "value": 3 * 10**8 - 1000}]) + + removed = await wallet.remove_old_utxos(tx) + assert len(removed) == len(inputs) + + for txid in removed: + assert txid in inputs + + balances = wallet.get_balance_by_mixdepth() + assert balances[0] == 2 * 10**8 + assert balances[1] == 10**8 + assert balances[2] == 0 + assert len(balances) == wallet.max_mixdepth + 1 + + async def test_address_labels(self): + wallet = await get_populated_wallet(num=2) + addr1 = await wallet.get_internal_addr(0) + addr2 = await wallet.get_internal_addr(1) + assert wallet.get_address_label(addr2) is None + assert wallet.get_address_label(addr2) is None + wallet.set_address_label(addr1, "test") + # utf-8 characters here are on purpose, to test utf-8 encoding / decoding + wallet.set_address_label(addr2, "glāžšķūņu rūķīši") + assert wallet.get_address_label(addr1) == "test" + assert wallet.get_address_label(addr2) == "glāžšķūņu rūķīši" + wallet.set_address_label(addr1, "") + wallet.set_address_label(addr2, None) + assert wallet.get_address_label(addr2) is None + assert wallet.get_address_label(addr2) is None + with pytest.raises(UnknownAddressForLabel): + wallet.get_address_label("2MzY5yyonUY7zpHspg7jB7WQs1uJxKafQe4") + wallet.set_address_label("2MzY5yyonUY7zpHspg7jB7WQs1uJxKafQe4", + "test") + # we no longer decode addresses just to see if we know about them, + # so we won't get a CCoinAddressError for invalid addresses + #with pytest.raises(CCoinAddressError): + wallet.get_address_label("badaddress") + wallet.set_address_label("badaddress", "test") + + async def test_initialize_twice(self): + wallet = await get_populated_wallet(num=0) + storage = wallet._storage + with pytest.raises(WalletError): + TaprootWallet.initialize(storage, get_network()) + + async def test_is_known(self): + wallet = await get_populated_wallet(num=0) + script = await wallet.get_new_script( + 1, BaseWallet.ADDRESS_TYPE_INTERNAL) + addr = await wallet.get_external_addr(2) + + assert wallet.is_known_script(script) + assert wallet.is_known_addr(addr) + assert wallet.is_known_addr(await wallet.script_to_addr(script)) + assert wallet.is_known_script(wallet.addr_to_script(addr)) + + assert not wallet.is_known_script(b'\x12' * len(script)) + assert not wallet.is_known_addr('2MzY5yyonUY7zpHspg7jB7WQs1uJxKafQe4') + + async def test_wallet_save(self): + wallet = await get_populated_wallet() + + script = await wallet.get_external_script(1) + + wallet.save() + storage = wallet._storage + data = storage.file_data + + del wallet + del storage + + storage = VolatileStorage(data=data) + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + + assert wallet.get_next_unused_index(0, BaseWallet.ADDRESS_TYPE_INTERNAL) == 3 + assert wallet.get_next_unused_index(0, BaseWallet.ADDRESS_TYPE_EXTERNAL) == 0 + assert wallet.get_next_unused_index(1, BaseWallet.ADDRESS_TYPE_INTERNAL) == 0 + assert wallet.get_next_unused_index(1, BaseWallet.ADDRESS_TYPE_EXTERNAL) == 1 + assert wallet.is_known_script(script) + + async def test_set_next_index(self): + wallet = await get_populated_wallet() + + assert wallet.get_next_unused_index(0, + BaseWallet.ADDRESS_TYPE_INTERNAL) == 3 + + with pytest.raises(Exception): + # cannot advance index without force=True + wallet.set_next_index(0, BaseWallet.ADDRESS_TYPE_INTERNAL, 5) + + wallet.set_next_index(0, BaseWallet.ADDRESS_TYPE_INTERNAL, 1) + assert wallet.get_next_unused_index(0, BaseWallet.ADDRESS_TYPE_INTERNAL) == 1 + + wallet.set_next_index(0, BaseWallet.ADDRESS_TYPE_INTERNAL, 20, force=True) + assert wallet.get_next_unused_index(0, BaseWallet.ADDRESS_TYPE_INTERNAL) == 20 + + script = await wallet.get_new_script( + 0, BaseWallet.ADDRESS_TYPE_INTERNAL) + path = wallet.script_to_path(script) + index = wallet.get_details(path)[2] + assert index == 20 + + async def test_path_repr(self): + wallet = await get_populated_wallet() + path = wallet.get_path(2, BIP32Wallet.ADDRESS_TYPE_EXTERNAL, 0) + path_repr = wallet.get_path_repr(path) + path_new = wallet.path_repr_to_path(path_repr) + + assert path_new == path + + async def test_path_repr_imported(self): + wallet = await get_populated_wallet(num=0) + path = await wallet.import_private_key( + 0, 'cRAGLvPmhpzJNgdMT4W2gVwEW3fusfaDqdQWM2vnWLgXKzCWKtcM') + path_repr = wallet.get_path_repr(path) + path_new = wallet.path_repr_to_path(path_repr) + + assert path_new == path + + async def test_wrong_wallet_cls(self): + storage = VolatileStorage() + TaprootWallet.initialize(storage, get_network()) + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + + wallet.save() + data = storage.file_data + + del wallet + del storage + + storage = VolatileStorage(data=data) + + with pytest.raises(Exception): + wallet = LegacyWallet(storage) + await wallet.async_init(storage) + + async def test_wallet_id(self): + storage1 = VolatileStorage() + TaprootWallet.initialize(storage1, get_network()) + wallet1 = TaprootWallet(storage1) + await wallet1.async_init(storage1) + + storage2 = VolatileStorage() + LegacyWallet.initialize(storage2, get_network(), + entropy=wallet1._entropy) + wallet2 = LegacyWallet(storage2) + await wallet2.async_init(storage2) + + assert wallet1.get_wallet_id() != wallet2.get_wallet_id() + + storage2 = VolatileStorage() + TaprootWallet.initialize(storage2, get_network(), + entropy=wallet1._entropy) + wallet2 = TaprootWallet(storage2) + await wallet2.async_init(storage2) + + assert wallet1.get_wallet_id() == wallet2.get_wallet_id() + + async def test_cache_cleared(self): + orig_bc_interface = jm_single().bc_interface + + def place_back_bc_interface(): + jm_single().bc_interface = orig_bc_interface + + self.addCleanup(place_back_bc_interface) + time_ms = int(time.time() * 1000) + jm_single().bc_interface = get_blockchain_interface_instance( + jm_single().config, + rpc_wallet_name=f'jm-test-taproot-wallet-noprivkeys-{time_ms}') + # test plan: + # 1. create a new wallet and sync from scratch + # 2. read its cache as an object + # 3. close the wallet, reopen it, sync it. + # 4. corrupt its cache and save. + # 5. Re open the wallet with recoversync + # and check that the corrupted data is not present. + if os.path.exists(test_cache_cleared_filename): + os.remove(test_cache_cleared_filename) + wallet = await create_wallet(test_cache_cleared_filename, + b"hunter2", 2, TaprootWallet) + # note: we use the WalletService as an encapsulation + # of the wallet here because we want to be able to sync, + # but we do not actually start the service and go into + # the monitoring loop. + wallet_service = WalletService(wallet) + # default fast sync, no coins, so no loop + await wallet_service.sync_wallet() + wallet_service.update_blockheight() + # to get the cache to save, we need to + # use an address: + addr = await wallet_service.get_new_addr(0,0) + orig_bc_interface.grab_coins(addr, 1.0) + await wallet_service.transaction_monitor() + path_to_corrupt = list(wallet._cache.keys())[0] + # we'll just corrupt the first address and script: + entry_to_corrupt = wallet._cache[path_to_corrupt][b"86'"][b"1'"][b"0'"][b'0'][b'0'] + entry_to_corrupt[b'A'] = "notanaddress" + entry_to_corrupt[b'S'] = "notascript" + wallet_service.wallet.save() + wallet_service.wallet.close() + jm_single().config.set("POLICY", "wallet_caching_disabled", "true") + wallet2 = await open_wallet(test_cache_cleared_filename, + ask_for_password=False, + password=b"hunter2") + jm_single().config.set("POLICY", "wallet_caching_disabled", "false") + wallet_service2 = WalletService(wallet2) + while not wallet_service2.synced: + await wallet_service2.sync_wallet(fast=False) + await wallet_service.transaction_monitor() + # we ignored the corrupt cache? + assert wallet_service2.get_balance_at_mixdepth(0) == 10 ** 8 + + async def test_addr_script_conversion(self): + wallet = await get_populated_wallet(num=1) + + path = wallet.get_path(0, BaseWallet.ADDRESS_TYPE_INTERNAL, 0) + script = await wallet.get_script_from_path(path) + addr = await wallet.script_to_addr(script) + + assert script == wallet.addr_to_script(addr) + addr_path = wallet.addr_to_path(addr) + assert path == addr_path + + async def test_imported_key_removed(self): + wif = 'cRAGLvPmhpzJNgdMT4W2gVwEW3fusfaDqdQWM2vnWLgXKzCWKtcM' + + storage = VolatileStorage() + TaprootWallet.initialize(storage, get_network()) + wallet = TaprootWallet(storage) + await wallet.async_init(storage) + + path = await wallet.import_private_key(1, wif) + script = await wallet.get_script_from_path(path) + assert wallet.is_known_script(script) + + await wallet.remove_imported_key(path=path) + assert not wallet.is_known_script(script) + + with pytest.raises(WalletError): + await wallet.get_script_from_path(path) + + async def test_wallet_mixdepth_simple(self): + wallet = await get_populated_wallet(num=0) + mixdepth = wallet.mixdepth + assert wallet.max_mixdepth == mixdepth + + wallet.close() + storage_data = wallet._storage.file_data + + storage = VolatileStorage(data=storage_data) + new_wallet = type(wallet)(storage) + await new_wallet.async_init(storage) + assert new_wallet.mixdepth == mixdepth + assert new_wallet.max_mixdepth == mixdepth + + async def test_wallet_mixdepth_increase(self): + wallet = await get_populated_wallet(num=0) + mixdepth = wallet.mixdepth + + wallet.close() + storage_data = wallet._storage.file_data + + new_mixdepth = mixdepth + 2 + storage = VolatileStorage(data=storage_data) + new_wallet = type(wallet)(storage, mixdepth=new_mixdepth) + await new_wallet.async_init(storage, mixdepth=new_mixdepth) + assert new_wallet.mixdepth == new_mixdepth + assert new_wallet.max_mixdepth == new_mixdepth + + async def test_wallet_mixdepth_decrease(self): + wallet = await get_populated_wallet(num=1) + + # setup + max_mixdepth = wallet.max_mixdepth + assert max_mixdepth >= 1, "bad default value for mixdepth for this test" + addr = await wallet.get_internal_addr(max_mixdepth) + utxo = fund_wallet_addr(wallet, addr, 1) + bci = jm_single().bc_interface + unspent_list = bci.listunspent(0) + # filter on label, but note (a) in certain circumstances (in- + # wallet transfer) it is possible for the utxo to be labeled + # with the external label, and (b) the wallet will know if it + # belongs or not anyway (is_known_addr): + our_unspent_list = [x for x in unspent_list if ( + bci.is_address_labeled(x, wallet.get_wallet_name()))] + assert wallet.get_balance_by_mixdepth()[max_mixdepth] == 10**8 + wallet.close() + storage_data = wallet._storage.file_data + + # actual test + orig_bc_interface = jm_single().bc_interface + + def place_back_bc_interface(): + jm_single().bc_interface = orig_bc_interface + + self.addCleanup(place_back_bc_interface) + time_ms = int(time.time() * 1000) + jm_single().bc_interface = get_blockchain_interface_instance( + jm_single().config, + rpc_wallet_name=f'jm-test-taproot-wallet-noprivkeys-{time_ms}') + + new_mixdepth = max_mixdepth - 1 + storage = VolatileStorage(data=storage_data) + new_wallet = type(wallet)(storage, mixdepth=new_mixdepth) + await new_wallet.async_init(storage, mixdepth=new_mixdepth) + assert new_wallet.max_mixdepth == max_mixdepth + assert new_wallet.mixdepth == new_mixdepth + await sync_test_wallet(True, WalletService(new_wallet)) + + assert max_mixdepth not in new_wallet.get_balance_by_mixdepth() + assert max_mixdepth not in await new_wallet.get_utxos_by_mixdepth() + + # wallet.select_utxos will still return utxos from higher mixdepths + # because we explicitly ask for a specific mixdepth + assert utxo in await new_wallet.select_utxos(max_mixdepth, 10**7) + + async def test_watchonly_wallet(self): + jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') + storage = VolatileStorage() + TaprootWalletFidelityBonds.initialize(storage, get_network()) + wallet = TaprootWalletFidelityBonds(storage) + await wallet.async_init(storage) + + paths = [ + "m/86'/1'/0'/0/0", + "m/86'/1'/0'/1/0", + "m/86'/1'/0'/2/0:1577836800", + "m/86'/1'/0'/2/0:2314051200" + ] + burn_path = "m/49'/1'/0'/3/0" + + scripts = [ + await wallet.get_script_from_path(wallet.path_repr_to_path(path)) + for path in paths] + privkey, engine = wallet._get_key_from_path( + wallet.path_repr_to_path(burn_path)) + burn_pubkey = engine.privkey_to_pubkey(privkey) + + master_pub_key = wallet.get_bip32_pub_export( + FidelityBondMixin.FIDELITY_BOND_MIXDEPTH) + watchonly_storage = VolatileStorage() + entropy = FidelityBondMixin.get_xpub_from_fidelity_bond_master_pub_key( + master_pub_key).encode() + FidelityBondWatchonlyWallet.initialize(watchonly_storage, get_network(), + entropy=entropy) + watchonly_wallet = FidelityBondWatchonlyWallet(watchonly_storage) + await watchonly_wallet.async_init(watchonly_storage) + + watchonly_scripts = [ + await watchonly_wallet.get_script_from_path( + watchonly_wallet.path_repr_to_path(path)) for path in paths] + privkey, engine = wallet._get_key_from_path(wallet.path_repr_to_path(burn_path)) + watchonly_burn_pubkey = engine.privkey_to_pubkey(privkey) + + for script, watchonly_script in zip(scripts, watchonly_scripts): + assert script == watchonly_script + assert burn_pubkey == watchonly_burn_pubkey + + async def test_calculate_timelocked_fidelity_bond_value(self): + EPSILON = 0.000001 + YEAR = 60*60*24*356.25 + + # the function should be flat anywhere before the locktime ends + values = [FidelityBondMixin.calculate_timelocked_fidelity_bond_value( + utxo_value=100000000, + confirmation_time=0, + locktime=6*YEAR, + current_time=y*YEAR, + interest_rate=0.01 + ) + for y in range(4) + ] + value_diff = [values[i] - values[i+1] for i in range(len(values)-1)] + for vd in value_diff: + assert abs(vd) < EPSILON + + # after locktime, the value should go down + values = [FidelityBondMixin.calculate_timelocked_fidelity_bond_value( + utxo_value=100000000, + confirmation_time=0, + locktime=6*YEAR, + current_time=(6+y)*YEAR, + interest_rate=0.01 + ) + for y in range(5) + ] + value_diff = [values[i+1] - values[i] for i in range(len(values)-1)] + for vrd in value_diff: + assert vrd < 0 + + # value of a bond goes up as the locktime goes up + values = [FidelityBondMixin.calculate_timelocked_fidelity_bond_value( + utxo_value=100000000, + confirmation_time=0, + locktime=y*YEAR, + current_time=0, + interest_rate=0.01 + ) + for y in range(5) + ] + value_ratio = [values[i] / values[i+1] for i in range(len(values)-1)] + value_ratio_diff = [value_ratio[i] - value_ratio[i+1] + for i in range(len(value_ratio)-1)] + for vrd in value_ratio_diff: + assert vrd < 0 + + # value of a bond locked into the far future is constant, + # clamped at the value of burned coins + values = [FidelityBondMixin.calculate_timelocked_fidelity_bond_value( + utxo_value=100000000, + confirmation_time=0, + locktime=(200+y)*YEAR, + current_time=0, + interest_rate=0.01 + ) + for y in range(5) + ] + value_diff = [values[i] - values[i+1] for i in range(len(values)-1)] + for vd in value_diff: + assert abs(vd) < EPSILON + + @parametrize( + 'password, wallet_cls', + [ + ("hunter2", TaprootWallet), + ]) + async def test_create_wallet(self, password, wallet_cls): + wallet_name = test_create_wallet_filename + password = password.encode("utf-8") + # test mainnet (we are not transacting) + btc.select_chain_params("bitcoin") + wallet = await create_wallet(wallet_name, password, 4, wallet_cls) + mnemonic = wallet.get_mnemonic_words()[0] + addr = await wallet.get_addr(0,0,0) + firstkey = wallet.get_key_from_addr(addr) + print("Created mnemonic, firstkey: ", mnemonic, firstkey) + wallet.close() + # ensure that the wallet file created is openable with the password, + # and has the parameters that were claimed on creation: + new_wallet = await open_test_wallet_maybe( + wallet_name, "", 4, password=password, ask_for_password=False) + assert new_wallet.get_mnemonic_words()[0] == mnemonic + addr = await new_wallet.get_addr(0,0,0) + assert new_wallet.get_key_from_addr(addr) == firstkey + os.remove(wallet_name) + btc.select_chain_params("bitcoin/regtest") + + @parametrize( + 'wallet_cls', + [ + (TaprootWallet,), + (TaprootWalletFidelityBonds,) + ]) + async def test_is_standard_wallet_script(self, wallet_cls): + storage = VolatileStorage() + wallet_cls.initialize( + storage, get_network(), max_mixdepth=0) + wallet = wallet_cls(storage) + await wallet.async_init(storage) + script = await wallet.get_new_script(0, 1) + assert wallet.is_known_script(script) + path = wallet.script_to_path(script) + assert await wallet.is_standard_wallet_script(path) + + async def test_is_standard_wallet_script_nonstandard(self): + storage = VolatileStorage() + TaprootWalletFidelityBonds.initialize( + storage, get_network(), max_mixdepth=0) + wallet = TaprootWalletFidelityBonds(storage) + await wallet.async_init(storage) + import_path = await wallet.import_private_key( + 0, 'cRAGLvPmhpzJNgdMT4W2gVwEW3fusfaDqdQWM2vnWLgXKzCWKtcM') + assert await wallet.is_standard_wallet_script(import_path) + ts = wallet.datetime_to_time_number( + datetime.datetime.strptime("2021-07", "%Y-%m")) + tl_path = wallet.get_path(0, wallet.BIP32_TIMELOCK_ID, ts) + assert not await wallet.is_standard_wallet_script(tl_path) diff --git a/test/jmclient/test_wallet.py b/test/jmclient/test_wallet.py index baf174d..d09d059 100644 --- a/test/jmclient/test_wallet.py +++ b/test/jmclient/test_wallet.py @@ -530,27 +530,6 @@ class AsyncioTestCase(IsolatedAsyncioTestCase, ParametrizedTestCase): txout = jm_single().bc_interface.pushtx(tx.serialize()) assert txout - async def test_signing_simple_p2tr(self): - jm_single().config.set('BLOCKCHAIN', 'network', 'testnet') - storage = VolatileStorage() - TaprootWallet.initialize(storage, get_network(), entropy=b"\xaa"*16) - wallet = TaprootWallet(storage) - await wallet.async_init(storage) - addr = await wallet.get_internal_addr(0) - utxo = fund_wallet_addr(wallet, addr) - # The dummy output is constructed as an unspendable p2sh: - tx = btc.mktx([utxo], - [{"address": str(btc.CCoinAddress.from_scriptPubKey( - btc.CScript(b"\x00").to_p2sh_scriptPubKey())), - "value": 10**8 - 9000}]) - script = await wallet.get_script( - 0, BaseWallet.ADDRESS_TYPE_INTERNAL, 0) - success, msg = await wallet.sign_tx(tx, {0: (script, 10**8)}) - assert success, msg - assert_segwit(tx) - txout = jm_single().bc_interface.pushtx(tx.serialize()) - assert txout - # note that address validation is tested separately; # this test functions only to make sure that given a valid # taproot address, we can actually spend to it diff --git a/test/jmfrost/chilldkg_example.py b/test/jmfrost/chilldkg_example.py old mode 100755 new mode 100644 diff --git a/test/jmfrost/test_chilldkg_ref.py b/test/jmfrost/test_chilldkg_ref.py old mode 100755 new mode 100644 diff --git a/test/jmfrost/test_frost_ref.py b/test/jmfrost/test_frost_ref.py old mode 100755 new mode 100644 index ff58071..b978969 --- a/test/jmfrost/test_frost_ref.py +++ b/test/jmfrost/test_frost_ref.py @@ -2,9 +2,22 @@ import json import os +import secrets import sys +import time +from typing import List, Optional, Tuple -from .trusted_keygen import trusted_dealer_keygen +from jmfrost.frost_ref.reference import ( + PlainPk, XonlyPk, nonce_agg, SessionContext, partial_sig_verify, + partial_sig_agg, get_xonly_pk, group_pubkey_and_tweak, individual_pk, + deterministic_sign, cbytes, sign, InvalidContributionError, + check_frost_key_compatibility, check_pubshares_correctness, + check_group_pubkey_correctness, nonce_gen_internal, AGGREGATOR_ID, + partial_sig_verify_internal, nonce_gen) +from jmfrost.frost_ref.utils.bip340 import ( + schnorr_verify, bytes_from_int, int_from_bytes, point_mul, G, n) + +from trusted_keygen import trusted_dealer_keygen def fromhex_all(l): @@ -402,7 +415,8 @@ def test_sig_agg_vectors(): session_ctx = SessionContext(aggnonce_tmp, ids_tmp, pubshares_tmp, tweaks_tmp, tweak_modes_tmp, msg) assert_raises(exception, lambda: partial_sig_agg(psigs_tmp, ids_tmp, session_ctx), except_fn) -def test_sign_and_verify_random(iterations: int) -> None: +def test_sign_and_verify_random() -> None: + iterations = 4 for itr in range(iterations): secure_rng = secrets.SystemRandom() # randomly choose a number: 2 <= number <= 10 diff --git a/test/jmfrost/trusted_keygen.py b/test/jmfrost/trusted_keygen.py new file mode 100644 index 0000000..2cbbbd2 --- /dev/null +++ b/test/jmfrost/trusted_keygen.py @@ -0,0 +1,152 @@ +# Implementation of the Trusted Dealer Key Generation approach for FROST mentioned +# in https://datatracker.ietf.org/doc/draft-irtf-cfrg-frost/15/ (Appendix D). +# +# It's worth noting that this isn't the only compatible method (with BIP FROST Signing), +# there are alternative key generation methods available, such as BIP-FROST-DKG: +# https://github.com/BlockstreamResearch/bip-frost-dkg + +# todo: use the `Scalar` type like BIP-DKG? +#todo: this shows mypy error, but the file runs + +from typing import Tuple, List, NewType +import unittest +# todo: replace random module with secrets +import random +# for [1] import functions from reference +# [2] specify path for bip340 when running reference.py +# import sys, os +# script_dir = os.path.dirname(os.path.abspath(__file__)) +# parent_dir = os.path.abspath(os.path.join(script_dir, '..')) +# sys.path.append(parent_dir) +from jmfrost.frost_ref.utils.bip340 import ( + Point, n as curve_order, bytes_from_int, + point_mul, G, has_even_y, x +) + +# point on the secret polynomial, represents a signer's secret share +PolyPoint = Tuple[int, int] +# point on the secp256k1 curve, represents a signer's public share +ECPoint = Point + +# +# The following helper functions and types were copied from reference.py +# +PlainPk = NewType('PlainPk', bytes) + +def xbytes(P: Point) -> bytes: + return bytes_from_int(x(P)) + +def cbytes(P: Point) -> bytes: + a = b'\x02' if has_even_y(P) else b'\x03' + return a + xbytes(P) + +def derive_interpolating_value_internal(L: List[int], x_i: int) -> int: + num, deno = 1, 1 + for x_j in L: + if x_j == x_i: + continue + num *= x_j + deno *= (x_j - x_i) + return num * pow(deno, curve_order - 2, curve_order) % curve_order +# +# End of helper functions and types copied from reference.py. +# + +# evaluates poly using Horner's method, assuming coeff[0] corresponds +# to the coefficient of highest degree term +def polynomial_evaluate(coeffs: List[int], x: int) -> int: + res = 0 + for coeff in coeffs: + res = res * x + coeff + return res % curve_order + + +def secret_share_combine(shares: List[PolyPoint]) -> int: + x_coords = [] + for (x, y) in shares: + x_coords.append(x) + + secret = 0 + for (x, y) in shares: + delta = y * derive_interpolating_value_internal(x_coords, x) + secret += delta + return secret % curve_order + +# coeffs shouldn't include the const term (i.e. secret) +def secret_share_shard(secret: int, coeffs: List[int], max_participants: int) -> List[PolyPoint]: + coeffs = coeffs + [secret] + + secshares: List[PolyPoint] = [] + for x_i in range(1, max_participants + 1): + y_i = polynomial_evaluate(coeffs, x_i) + secshare_i = (x_i, y_i) + secshares.append(secshare_i) + return secshares + +def trusted_dealer_keygen(secret_key: int, max_participants: int, min_participants: int) -> Tuple[ECPoint, List[PolyPoint], List[ECPoint]]: + assert (1 <= secret_key <= curve_order - 1) + assert (2 <= min_participants <= max_participants) + # we don't force BIP340 compatibility of group pubkey in keygen + P = point_mul(G, secret_key) + assert P is not None + + coeffs = [] + for i in range(min_participants - 1): + coeffs.append(random.randint(1, curve_order - 1)) + secshares = secret_share_shard(secret_key, coeffs, max_participants) + pubshares = [] + for secshare in secshares: + X = point_mul(G, secshare[1]) + assert X is not None + pubshares.append(X) + return (P, secshares, pubshares) + +# Test vector from RFC draft. +# section F.5 of https://datatracker.ietf.org/doc/draft-irtf-cfrg-frost/15/ +class Tests(unittest.TestCase): + def setUp(self) -> None: + self.max_participants = 3 + self.min_participants = 2 + self.poly = [ + 0xfbf85eadae3058ea14f19148bb72b45e4399c0b16028acaf0395c9b03c823579, + 0x0d004150d27c3bf2a42f312683d35fac7394b1e9e318249c1bfe7f0795a83114, + ] + self.shares: List[PolyPoint] = [ + (1, 0x08f89ffe80ac94dcb920c26f3f46140bfc7f95b493f8310f5fc1ea2b01f4254c), + (2, 0x04f0feac2edcedc6ce1253b7fab8c86b856a797f44d83d82a385554e6e401984), + (3, 0x00e95d59dd0d46b0e303e500b62b7ccb0e555d49f5b849f5e748c071da8c0dbc), + ] + self.secret = 0x0d004150d27c3bf2a42f312683d35fac7394b1e9e318249c1bfe7f0795a83114 + + def test_polynomial_evaluate(self) -> None: + coeffs = self.poly.copy() + expected_secret = self.secret + + self.assertEqual(polynomial_evaluate(coeffs, 0), expected_secret) + + def test_secret_share_combine(self) -> None: + shares: List[PolyPoint] = self.shares.copy() + expected_secret = self.secret + + self.assertEqual(secret_share_combine([shares[0], shares[1]]), expected_secret) + self.assertEqual(secret_share_combine([shares[1], shares[2]]), expected_secret) + self.assertEqual(secret_share_combine([shares[0], shares[2]]), expected_secret) + self.assertEqual(secret_share_combine(shares), expected_secret) + + def test_trusted_dealer_keygen(self) -> None: + secret_key = random.randint(1, curve_order - 1) + max_participants = 5 + min_participants = 3 + group_pk, secshares, pubshares = trusted_dealer_keygen(secret_key, max_participants, min_participants) + + # group_pk need not be xonly (i.e., have even y always) + self.assertEqual(group_pk, point_mul(G, secret_key)) + self.assertEqual(secret_share_combine(secshares), secret_key) + self.assertEqual(len(secshares), max_participants) + self.assertEqual(len(pubshares), max_participants) + for i in range(len(pubshares)): + with self.subTest(i=i): + self.assertEqual(pubshares[i], point_mul(G, secshares[i][1])) + +if __name__=='__main__': + unittest.main() diff --git a/test/regtest_frost_joinmarket.cfg b/test/regtest_frost_joinmarket.cfg new file mode 100644 index 0000000..3d7b4e7 --- /dev/null +++ b/test/regtest_frost_joinmarket.cfg @@ -0,0 +1,154 @@ +#NOTE: This configuration file is for testing with regtest only +#For mainnet usage, running a JoinMarket script will create the default file +[DAEMON] +no_daemon = 1 +daemon_port = 27183 +daemon_host = localhost +use_ssl = false + +[BLOCKCHAIN] +blockchain_source = regtest +rpc_host = localhost +rpc_port = 18443 +rpc_user = bitcoinrpc +rpc_password = 123456abcdef +network = testnet +rpc_wallet_file = jm-test-frost-wallet + +[MESSAGING:server1] +type = irc +host = localhost +hostid = localhost1 +channel = joinmarket-pit +port = 16667 +usessl = false +socks5 = false +socks5_host = localhost +socks5_port = 9150 + +[MESSAGING:server2] +type = irc +host = localhost +hostid = localhost2 +channel = joinmarket-pit +port = 16668 +usessl = false +socks5 = false +socks5_host = localhost +socks5_port = 9150 + +[MESSAGING:onion] +# onion based message channels must have the exact type 'onion' +# (while the section name above can be MESSAGING:whatever), and there must +# be only ONE such message channel configured (note the directory servers +# can be multiple, below): +type = onion +socks5_host = localhost +socks5_port = 9050 +# the tor control configuration: +tor_control_host = localhost +# or, to use a UNIX socket +# control_host = unix:/var/run/tor/control +tor_control_port = 9051 +# the host/port actually serving the hidden service +# (note the *virtual port*, that the client uses, +# is hardcoded to 80): +onion_serving_host = 127.0.0.1 +onion_serving_port = 8080 +# This is mandatory for directory nodes (who must also set their +# own .onion:port as the only directory in directory_nodes, below), +# but NOT TO BE USED by non-directory nodes (which is you, unless +# you know otherwise!), as it will greatly degrade your privacy. +# +# Special handling on regtest, so just ignore and let the code handle it: +hidden_service_dir = "" +# This is a comma separated list (comma can be omitted if only one item). +# Each item has format host:port +# On regtest we are going to increment the port numbers served from, with +# the value used here as the starting value: +directory_nodes = localhost:8081 +# this is not present in default real config +# and is specifically used to flag tests: +# means we use indices 1,2,3,4,5: +regtest_count=1,5 + +[TIMEOUT] +maker_timeout_sec = 10 + +[LOGGING] +console_log_level = DEBUG + +[POLICY] + +segwit = true +native = true +frost = true + +# for dust sweeping, try merge_algorithm = gradual +# for more rapid dust sweeping, try merge_algorithm = greedy +# for most rapid dust sweeping, try merge_algorithm = greediest +# but don't forget to bump your miner fees! +merge_algorithm = default +# the fee estimate is based on a projection of how many satoshis +# per kB are needed to get in one of the next N blocks, N set here +# as the value of 'tx_fees'. This estimate can be extremely high +# if you set N=1, so we choose N=3 for a more reasonable figure, +# as our default. Note that for clients not using a local blockchain +# instance, we retrieve an estimate from the API at cointape.com, currently. +tx_fees = 3 +taker_utxo_retries = 3 +taker_utxo_age = 1 +taker_utxo_amtpercent = 20 +accept_commitment_broadcasts = 1 +#some settings useful for testing scenarios +#laxity for repeated tests; tests on actual +#commitments/maker limit/utxo sourcing logic should obviously reset +taker_utxo_retries = 3 +minimum_makers = 1 +listunspent_args = [0] +max_sats_freeze_reuse = -1 + +# ONLY for testing! +max_cj_fee_abs = 200000 +max_cj_fee_rel = 0.2 + +[PAYJOIN] +# for the majority of situations, the defaults +# need not be altered - they will ensure you don't pay +# a significantly higher fee. +# MODIFICATION OF THESE SETTINGS IS DISADVISED. + +# Payjoin protocol version; currently only '1' is supported. +payjoin_version = 1 + +# servers can change their destination address by default (0). +# if '1', they cannot. Note that servers can explicitly request +# that this is activated, in which case we respect that choice. +disable_output_substitution = 0 + +# "default" here indicates that we will allow the receiver to +# increase the fee we pay by: +# 1.2 * (our_fee_rate_per_vbyte * vsize_of_our_input_type) +# (see https://github.com/bitcoin/bips/blob/master/bip-0078.mediawiki#span_idfeeoutputspanFee_output) +# (and 1.2 to give breathing room) +# which indicates we are allowing roughly one extra input's fee. +# If it is instead set to an integer, then that many satoshis are allowed. +# Additionally, note that we will also set the parameter additionafeeoutputindex +# to that of our change output, unless there is none in which case this is disabled. +max_additional_fee_contribution = default + +# this is the minimum satoshis per vbyte we allow in the payjoin +# transaction; note it is decimal, not integer. +min_fee_rate = 1.1 + +# for payjoins to hidden service endpoints, the socks5 configuration: +onion_socks5_host = localhost +onion_socks5_port = 9050 +# in some exceptional case the HS may be SSL configured, +# this feature is not yet implemented in code, but here for the +# future: +hidden_service_ssl = false + +[FROST] +t = 2 +hostpubkeys = 024cc1ec6fedba593a6cb683b627953b2aa80f8df80f360f78805fc00898697c74,03a8348fe4afd1974d07a50783c5d5c1ef59200eeb8ab97c7d8606534749a7043d,0307952377783138b82b222fd73199c541338a96cf758ed5a27816d6e6a324e77d diff --git a/test/regtest_taproot_joinmarket.cfg b/test/regtest_taproot_joinmarket.cfg new file mode 100644 index 0000000..129ebd9 --- /dev/null +++ b/test/regtest_taproot_joinmarket.cfg @@ -0,0 +1,150 @@ +#NOTE: This configuration file is for testing with regtest only +#For mainnet usage, running a JoinMarket script will create the default file +[DAEMON] +no_daemon = 1 +daemon_port = 27183 +daemon_host = localhost +use_ssl = false + +[BLOCKCHAIN] +blockchain_source = regtest +rpc_host = localhost +rpc_port = 18443 +rpc_user = bitcoinrpc +rpc_password = 123456abcdef +network = testnet +rpc_wallet_file = jm-test-taproot-wallet + +[MESSAGING:server1] +type = irc +host = localhost +hostid = localhost1 +channel = joinmarket-pit +port = 16667 +usessl = false +socks5 = false +socks5_host = localhost +socks5_port = 9150 + +[MESSAGING:server2] +type = irc +host = localhost +hostid = localhost2 +channel = joinmarket-pit +port = 16668 +usessl = false +socks5 = false +socks5_host = localhost +socks5_port = 9150 + +[MESSAGING:onion] +# onion based message channels must have the exact type 'onion' +# (while the section name above can be MESSAGING:whatever), and there must +# be only ONE such message channel configured (note the directory servers +# can be multiple, below): +type = onion +socks5_host = localhost +socks5_port = 9050 +# the tor control configuration: +tor_control_host = localhost +# or, to use a UNIX socket +# control_host = unix:/var/run/tor/control +tor_control_port = 9051 +# the host/port actually serving the hidden service +# (note the *virtual port*, that the client uses, +# is hardcoded to 80): +onion_serving_host = 127.0.0.1 +onion_serving_port = 8080 +# This is mandatory for directory nodes (who must also set their +# own .onion:port as the only directory in directory_nodes, below), +# but NOT TO BE USED by non-directory nodes (which is you, unless +# you know otherwise!), as it will greatly degrade your privacy. +# +# Special handling on regtest, so just ignore and let the code handle it: +hidden_service_dir = "" +# This is a comma separated list (comma can be omitted if only one item). +# Each item has format host:port +# On regtest we are going to increment the port numbers served from, with +# the value used here as the starting value: +directory_nodes = localhost:8081 +# this is not present in default real config +# and is specifically used to flag tests: +# means we use indices 1,2,3,4,5: +regtest_count=1,5 + +[TIMEOUT] +maker_timeout_sec = 10 + +[LOGGING] +console_log_level = DEBUG + +[POLICY] + +segwit = true +native = true +taproot = true + +# for dust sweeping, try merge_algorithm = gradual +# for more rapid dust sweeping, try merge_algorithm = greedy +# for most rapid dust sweeping, try merge_algorithm = greediest +# but don't forget to bump your miner fees! +merge_algorithm = default +# the fee estimate is based on a projection of how many satoshis +# per kB are needed to get in one of the next N blocks, N set here +# as the value of 'tx_fees'. This estimate can be extremely high +# if you set N=1, so we choose N=3 for a more reasonable figure, +# as our default. Note that for clients not using a local blockchain +# instance, we retrieve an estimate from the API at cointape.com, currently. +tx_fees = 3 +taker_utxo_retries = 3 +taker_utxo_age = 1 +taker_utxo_amtpercent = 20 +accept_commitment_broadcasts = 1 +#some settings useful for testing scenarios +#laxity for repeated tests; tests on actual +#commitments/maker limit/utxo sourcing logic should obviously reset +taker_utxo_retries = 3 +minimum_makers = 1 +listunspent_args = [0] +max_sats_freeze_reuse = -1 + +# ONLY for testing! +max_cj_fee_abs = 200000 +max_cj_fee_rel = 0.2 + +[PAYJOIN] +# for the majority of situations, the defaults +# need not be altered - they will ensure you don't pay +# a significantly higher fee. +# MODIFICATION OF THESE SETTINGS IS DISADVISED. + +# Payjoin protocol version; currently only '1' is supported. +payjoin_version = 1 + +# servers can change their destination address by default (0). +# if '1', they cannot. Note that servers can explicitly request +# that this is activated, in which case we respect that choice. +disable_output_substitution = 0 + +# "default" here indicates that we will allow the receiver to +# increase the fee we pay by: +# 1.2 * (our_fee_rate_per_vbyte * vsize_of_our_input_type) +# (see https://github.com/bitcoin/bips/blob/master/bip-0078.mediawiki#span_idfeeoutputspanFee_output) +# (and 1.2 to give breathing room) +# which indicates we are allowing roughly one extra input's fee. +# If it is instead set to an integer, then that many satoshis are allowed. +# Additionally, note that we will also set the parameter additionafeeoutputindex +# to that of our change output, unless there is none in which case this is disabled. +max_additional_fee_contribution = default + +# this is the minimum satoshis per vbyte we allow in the payjoin +# transaction; note it is decimal, not integer. +min_fee_rate = 1.1 + +# for payjoins to hidden service endpoints, the socks5 configuration: +onion_socks5_host = localhost +onion_socks5_port = 9050 +# in some exceptional case the HS may be SSL configured, +# this feature is not yet implemented in code, but here for the +# future: +hidden_service_ssl = false