diff --git a/.github/workflows/common_check.yaml b/.github/workflows/common_check.yaml index 14fcd2e9..40af8071 100644 --- a/.github/workflows/common_check.yaml +++ b/.github/workflows/common_check.yaml @@ -19,9 +19,9 @@ jobs: fail-fast: false matrix: python-version: - # - "3.9" - "3.10" - poetry-version: ["1.3.2"] + - "3.11" + poetry-version: ["2.0.1"] os: [ubuntu-22.04,] runs-on: ${{ matrix.os }} steps: diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index 407e84ca..92cd9f20 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -22,11 +22,10 @@ jobs: strategy: matrix: python-versions: - - 3.8 - - 3.9 + - 3.10 os: - - ubuntu-20.04 - runs-on: ubuntu-20.04 + - ubuntu-24.04 + runs-on: ubuntu-24.04 env: PYTHONPATH: . steps: diff --git a/derive_client/_bridge/client.py b/derive_client/_bridge/client.py index 92daa0ef..e7d2d191 100644 --- a/derive_client/_bridge/client.py +++ b/derive_client/_bridge/client.py @@ -4,100 +4,111 @@ from __future__ import annotations -import copy import functools import json from logging import Logger -from typing import Literal from eth_account import Account -from web3 import Web3 -from web3.contract import Contract +from returns.future import future_safe +from returns.io import IOResult +from web3 import AsyncWeb3 +from web3.contract import AsyncContract +from web3.contract.async_contract import AsyncContractFunction from web3.datastructures import AttributeDict +from web3.types import HexBytes, LogReceipt, TxReceipt -from derive_client._bridge.transaction import ( - _check_gas_balance, - ensure_allowance, - ensure_balance, - prepare_mainnet_to_derive_gas_tx, -) from derive_client.constants import ( + ARBITRUM_DEPOSIT_WRAPPER, + BASE_DEPOSIT_WRAPPER, CONFIGS, CONTROLLER_ABI_PATH, CONTROLLER_V0_ABI_PATH, - DEFAULT_GAS_FUNDING_AMOUNT, + CURRENCY_DECIMALS, DEPOSIT_HELPER_ABI_PATH, DEPOSIT_HOOK_ABI_PATH, DERIVE_ABI_PATH, DERIVE_L2_ABI_PATH, ERC20_ABI_PATH, - L1_STANDARD_BRIDGE_ABI_PATH, + ETH_DEPOSIT_WRAPPER, LIGHT_ACCOUNT_ABI_PATH, LYRA_OFT_WITHDRAW_WRAPPER_ABI_PATH, LYRA_OFT_WITHDRAW_WRAPPER_ADDRESS, MSG_GAS_LIMIT, NEW_VAULT_ABI_PATH, OLD_VAULT_ABI_PATH, + OPTIMISM_DEPOSIT_WRAPPER, PAYLOAD_SIZE, SOCKET_ABI_PATH, TARGET_SPEED, + WITHDRAW_WRAPPER_V2, WITHDRAW_WRAPPER_V2_ABI_PATH, ) from derive_client.data_types import ( Address, BridgeContext, + BridgeTxDetails, BridgeTxResult, BridgeType, ChainID, Currency, DeriveTokenAddresses, + Direction, Environment, LayerZeroChainIDv2, MintableTokenData, NonMintableTokenData, + PreparedBridgeTx, SocketAddress, TxResult, - TxStatus, ) -from derive_client.exceptions import AlreadyFinalizedError, BridgeEventParseError, BridgeRouteError, InsufficientGas -from derive_client.utils import ( +from derive_client.exceptions import ( + BridgeEventParseError, + BridgePrimarySignerRequiredError, + BridgeRouteError, + PartialBridgeResult, +) +from derive_client.utils import get_prod_derive_addresses +from derive_client.utils.w3 import to_base_units + +from .w3 import ( build_standard_transaction, + ensure_token_allowance, + ensure_token_balance, get_contract, - get_prod_derive_addresses, - get_w3_connection, + get_w3_connections, make_filter_params, - send_and_confirm_tx, - wait_for_event, - wait_for_tx_receipt, + send_tx, + sign_tx, + wait_for_bridge_event, + wait_for_tx_finality, ) -from derive_client.utils.w3 import simulate_tx -def _load_vault_contract(w3: Web3, token_data: NonMintableTokenData) -> Contract: +def _load_vault_contract(w3: AsyncWeb3, token_data: NonMintableTokenData) -> AsyncContract: path = NEW_VAULT_ABI_PATH if token_data.isNewBridge else OLD_VAULT_ABI_PATH abi = json.loads(path.read_text()) return get_contract(w3=w3, address=token_data.Vault, abi=abi) -def _load_controller_contract(w3: Web3, token_data: MintableTokenData) -> Contract: +def _load_controller_contract(w3: AsyncWeb3, token_data: MintableTokenData) -> AsyncContract: path = CONTROLLER_ABI_PATH if token_data.isNewBridge else CONTROLLER_V0_ABI_PATH abi = json.loads(path.read_text()) return get_contract(w3=w3, address=token_data.Controller, abi=abi) -def _load_deposit_contract(w3: Web3, token_data: MintableTokenData) -> Contract: +def _load_deposit_contract(w3: AsyncWeb3, token_data: MintableTokenData) -> AsyncContract: address = token_data.LyraTSAShareHandlerDepositHook abi = json.loads(DEPOSIT_HOOK_ABI_PATH.read_text()) return get_contract(w3=w3, address=address, abi=abi) -def _load_light_account(w3: Web3, wallet: Address) -> Contract: +def _load_light_account(w3: AsyncWeb3, wallet: Address) -> AsyncContract: abi = json.loads(LIGHT_ACCOUNT_ABI_PATH.read_text()) return get_contract(w3=w3, address=wallet, abi=abi) def _get_min_fees( - bridge_contract: Contract, + bridge_contract: AsyncContract, connector: Address, token_data: NonMintableTokenData | MintableTokenData, ) -> int: @@ -108,122 +119,141 @@ def _get_min_fees( if token_data.isNewBridge: params["payloadSize_"] = PAYLOAD_SIZE - return bridge_contract.functions.getMinFees(**params).call() + return bridge_contract.functions.getMinFees(**params) class BridgeClient: - def __init__(self, env: Environment, chain_id: ChainID, account: Account, wallet: Address, logger: Logger): + def __init__(self, env: Environment, account: Account, wallet: Address, logger: Logger): + """Synchronous constructor that performs minimal, non-blocking setup.""" + if not env == Environment.PROD: raise RuntimeError(f"Bridging is not supported in the {env.name} environment.") + self.config = CONFIGS[env] - self.derive_w3 = get_w3_connection(chain_id=ChainID.DERIVE, logger=logger) - self.remote_w3 = get_w3_connection(chain_id=chain_id, logger=logger) self.account = account - self.withdraw_wrapper = self._load_withdraw_wrapper() - self.deposit_helper = self._load_deposit_helper() + self.owner = account.address + self.wallet = wallet self.derive_addresses = get_prod_derive_addresses() - self.light_account = _load_light_account(w3=self.derive_w3, wallet=wallet) + self.w3s = get_w3_connections(logger=logger) self.logger = logger - if self.owner != self.account.address: - raise ValueError( - "Bridging disabled for secondary session-key signers: old-style assets " - "(USDC, USDT) on Derive cannot specify a custom receiver. Using a " - "secondary signer routes funds to the session key's contract instead of " - "the primary owner's. Please run all bridge operations with the " - "primary wallet owner." - ) @property - def remote_chain_id(self) -> ChainID: - return ChainID(self.remote_w3.eth.chain_id) + def derive_w3(self) -> AsyncWeb3: + return self.w3s[ChainID.DERIVE] @property - def wallet(self) -> Address: - """Smart contract funding wallet.""" - return self.light_account.address + def private_key(self) -> HexBytes: + """Private key of the owner (EOA) of the smart contract funding account.""" + return self.account._private_key @functools.cached_property - def owner(self) -> Address: - """Owner of smart contract funding wallet, must be the same as self.account.address.""" - return self.light_account.functions.owner().call() + def light_account(self): + """Smart contract funding wallet.""" + return _load_light_account(w3=self.derive_w3, wallet=self.wallet) - @property - def private_key(self): - """Private key of the owner (EOA) of the smart contract funding account.""" - return self.account._private_key + async def verify_owner(self): + """We verify the wallet owner on each prepare_deposit and prepare_withdrawal.""" - def _load_deposit_helper(self) -> Contract: - address = ( - self.config.contracts.DEPOSIT_WRAPPER - if self.remote_chain_id - not in [ - ChainID.ARBITRUM, - ChainID.OPTIMISM, - ] - else getattr( - self.config.contracts, - f"{self.remote_chain_id.name}_DEPOSIT_WRAPPER", + owner = await self.light_account.functions.owner().call() + if owner != self.owner: + raise BridgePrimarySignerRequiredError( + "Bridging disabled for secondary session-key signers: old-style assets " + "(USDC, USDT) on Derive cannot specify a custom receiver. Using a " + "secondary signer routes funds to the session key's contract instead of " + "the primary owner's. Please run all bridge operations with the " + "primary wallet owner." ) - ) + + def get_deposit_helper(self, chain_id: ChainID) -> AsyncContract: + + match chain_id: + case ChainID.ARBITRUM: + address = ARBITRUM_DEPOSIT_WRAPPER + case ChainID.OPTIMISM: + address = OPTIMISM_DEPOSIT_WRAPPER + case ChainID.BASE: + address = BASE_DEPOSIT_WRAPPER + case ChainID.ETH: + address = ETH_DEPOSIT_WRAPPER + case _: + raise ValueError(f"Deposit helper not supported on: {chain_id}") + abi = json.loads(DEPOSIT_HELPER_ABI_PATH.read_text()) - return get_contract(w3=self.remote_w3, address=address, abi=abi) + return get_contract(w3=self.w3s[chain_id], address=address, abi=abi) - def _load_withdraw_wrapper(self) -> Contract: - address = self.config.contracts.WITHDRAW_WRAPPER_V2 + @functools.cached_property + def withdraw_wrapper(self) -> AsyncContract: + address = WITHDRAW_WRAPPER_V2 abi = json.loads(WITHDRAW_WRAPPER_V2_ABI_PATH.read_text()) return get_contract(w3=self.derive_w3, address=address, abi=abi) @functools.lru_cache def _make_bridge_context( - self, direction: Literal["deposit", "withdraw"], bridge_type: BridgeType, currency: Currency + self, + direction: Direction, + currency: Currency, + remote_chain_id: ChainID, ) -> BridgeContext: - is_deposit = direction == "deposit" - src_w3, tgt_w3 = (self.remote_w3, self.derive_w3) if is_deposit else (self.derive_w3, self.remote_w3) - src_chain, tgt_chain = ( - (self.remote_chain_id, ChainID.DERIVE) if is_deposit else (ChainID.DERIVE, self.remote_chain_id) - ) - if bridge_type == BridgeType.LAYERZERO and currency is Currency.DRV: + is_deposit = direction == Direction.DEPOSIT + + if is_deposit: + src_w3, tgt_w3 = self.w3s[remote_chain_id], self.derive_w3 + src_chain, tgt_chain = remote_chain_id, ChainID.DERIVE + else: + src_w3, tgt_w3 = self.derive_w3, self.w3s[remote_chain_id] + src_chain, tgt_chain = ChainID.DERIVE, remote_chain_id + + if currency is Currency.DRV: src_addr = DeriveTokenAddresses[src_chain.name].value tgt_addr = DeriveTokenAddresses[tgt_chain.name].value derive_abi = json.loads(DERIVE_L2_ABI_PATH.read_text()) - remote_abi_path = DERIVE_ABI_PATH if self.remote_chain_id == ChainID.ETH else DERIVE_L2_ABI_PATH + remote_abi_path = DERIVE_ABI_PATH if remote_chain_id == ChainID.ETH else DERIVE_L2_ABI_PATH remote_abi = json.loads(remote_abi_path.read_text()) src_abi, tgt_abi = (remote_abi, derive_abi) if is_deposit else (derive_abi, remote_abi) src = get_contract(src_w3, src_addr, abi=src_abi) tgt = get_contract(tgt_w3, tgt_addr, abi=tgt_abi) src_event, tgt_event = src.events.OFTSent(), tgt.events.OFTReceived() - return BridgeContext(src_w3, tgt_w3, src, src_event, tgt_event) + context = BridgeContext(currency, src_w3, tgt_w3, src, src_event, tgt_event, src_chain, tgt_chain) + return context + + erc20_abi = json.loads(ERC20_ABI_PATH.read_text()) + socket_abi = json.loads(SOCKET_ABI_PATH.read_text()) + + if is_deposit: + token_data: NonMintableTokenData = self.derive_addresses.chains[src_chain][currency] + token_contract = get_contract(src_w3, token_data.NonMintableToken, abi=erc20_abi) + else: + token_data: MintableTokenData = self.derive_addresses.chains[src_chain][currency] + token_contract = get_contract(src_w3, token_data.MintableToken, abi=erc20_abi) - elif bridge_type == BridgeType.SOCKET and currency is not Currency.DRV: - erc20_abi = json.loads(ERC20_ABI_PATH.read_text()) - socket_abi = json.loads(SOCKET_ABI_PATH.read_text()) + src_addr = SocketAddress[src_chain.name].value + tgt_addr = SocketAddress[tgt_chain.name].value + src_socket = get_contract(src_w3, address=src_addr, abi=socket_abi) + tgt_socket = get_contract(tgt_w3, address=tgt_addr, abi=socket_abi) + src_event, tgt_event = src_socket.events.MessageOutbound(), tgt_socket.events.ExecutionSuccess() + context = BridgeContext(currency, src_w3, tgt_w3, token_contract, src_event, tgt_event, src_chain, tgt_chain) + return context - if is_deposit: - token_data: NonMintableTokenData = self.derive_addresses.chains[self.remote_chain_id][currency] - token_contract = get_contract(src_w3, token_data.NonMintableToken, abi=erc20_abi) - else: - token_data: MintableTokenData = self.derive_addresses.chains[ChainID.DERIVE][currency] - token_contract = get_contract(src_w3, token_data.MintableToken, abi=erc20_abi) + def _get_context(self, state: PreparedBridgeTx | BridgeTxResult) -> BridgeContext: - src_addr = SocketAddress[src_chain.name].value - tgt_addr = SocketAddress[tgt_chain.name].value - src_socket = get_contract(src_w3, address=src_addr, abi=socket_abi) - tgt_socket = get_contract(tgt_w3, address=tgt_addr, abi=socket_abi) - src_event, tgt_event = src_socket.events.MessageOutbound(), tgt_socket.events.ExecutionSuccess() - return BridgeContext(src_w3, tgt_w3, token_contract, src_event, tgt_event) + direction = Direction.WITHDRAW if state.source_chain == ChainID.DERIVE else Direction.DEPOSIT + remote_chain_id = state.target_chain if direction == Direction.WITHDRAW else state.source_chain + context = self._make_bridge_context( + direction=direction, + currency=state.currency, + remote_chain_id=remote_chain_id, + ) - raise ValueError(f"Unsupported bridge_type={bridge_type} for currency={currency}.") + return context def _resolve_socket_route( self, - direction: Literal["deposit", "withdraw"], - currency: Currency, + context: BridgeContext, ) -> tuple[MintableTokenData | NonMintableTokenData, Address]: - is_deposit = direction == "deposit" - src_chain, tgt_chain = ( - (self.remote_chain_id, ChainID.DERIVE) if is_deposit else (ChainID.DERIVE, self.remote_chain_id) - ) + + currency = context.currency + src_chain, tgt_chain = context.source_chain, context.target_chain if (src_token_data := self.derive_addresses.chains[src_chain].get(currency)) is None: msg = f"No bridge path for {currency.name} from {src_chain.name} to {tgt_chain.name}." @@ -236,25 +266,129 @@ def _resolve_socket_route( msg = f"Target chain {tgt_chain.name} not found in {src_chain.name} connectors." raise BridgeRouteError(msg) if src_chain not in tgt_token_data.connectors: - msg = f"Target chain {src_chain.name} not found in {tgt_chain.name} connectors." + msg = f"Source chain {src_chain.name} not found in {tgt_chain.name} connectors." raise BridgeRouteError(msg) return src_token_data, src_token_data.connectors[tgt_chain][TARGET_SPEED] - def deposit(self, amount: int, currency: Currency) -> BridgeTxResult: - """ - Deposit funds by preparing, signing, and sending a bridging transaction. - """ + async def _prepare_tx( + self, + amount: int, + func: AsyncContractFunction, + value: int, + fee_in_token: int, + context: BridgeContext, + ) -> PreparedBridgeTx: + + onchain_decimals: int = await context.source_token.functions.decimals().call() + if onchain_decimals != (expected_decimals := CURRENCY_DECIMALS[context.currency]): + raise RuntimeError( + f"Decimal mismatch for {context.currency.name} on {context.source_chain.name}: " + f"expected {expected_decimals}, got {onchain_decimals}" + ) - # record on target chain when we start polling - token_data, _connector = self._resolve_socket_route("deposit", currency=currency) - context = self._make_bridge_context("deposit", bridge_type=BridgeType.SOCKET, currency=currency) - target_from_block = context.target_w3.eth.block_number + w3 = context.source_w3 + tx = await build_standard_transaction(func=func, account=self.account, w3=w3, value=value, logger=self.logger) + signed_tx = sign_tx(w3=context.source_w3, tx=tx, private_key=self.private_key) - spender = token_data.Vault if token_data.isNewBridge else self.deposit_helper.address - _check_gas_balance(context.source_w3, self.owner) - ensure_balance(context.source_token, self.owner, amount) - ensure_allowance( + tx_details = BridgeTxDetails( + contract=func.address, + method=func.fn_name, + kwargs=func.kwargs, + tx=tx, + signed_tx=signed_tx, + ) + + prepared_tx = PreparedBridgeTx( + amount=amount, + value=0, + fee_value=value, + fee_in_token=fee_in_token, + currency=context.currency, + source_chain=context.source_chain, + target_chain=context.target_chain, + bridge_type=context.bridge_type, + tx_details=tx_details, + ) + + return prepared_tx + + @future_safe + async def prepare_deposit( + self, + human_amount: float, + currency: Currency, + chain_id: ChainID, + ) -> IOResult[PreparedBridgeTx, Exception]: + + if currency is Currency.ETH: + raise NotImplementedError("ETH deposits are not implemented.") + + amount: int = to_base_units(human_amount=human_amount, currency=currency) + await self.verify_owner() + + direction = Direction.DEPOSIT + + if currency == Currency.DRV: + context = self._make_bridge_context(direction, currency=currency, remote_chain_id=chain_id) + prepared_tx = await self._prepare_layerzero_deposit(amount=amount, context=context) + else: + context = self._make_bridge_context(direction, currency=currency, remote_chain_id=chain_id) + prepared_tx = await self._prepare_socket_deposit(amount=amount, context=context) + + return prepared_tx + + @future_safe + async def prepare_withdrawal( + self, + human_amount: float, + currency: Currency, + chain_id: ChainID, + ) -> IOResult[PreparedBridgeTx, Exception]: + + if currency is Currency.ETH: + raise NotImplementedError("ETH withdrawals are not implemented.") + + amount: int = to_base_units(human_amount=human_amount, currency=currency) + await self.verify_owner() + + direction = Direction.WITHDRAW + + if currency == Currency.DRV: + context = self._make_bridge_context(direction, currency=currency, remote_chain_id=chain_id) + prepared_tx = await self._prepare_layerzero_withdrawal(amount=amount, context=context) + else: + context = self._make_bridge_context(direction, currency=currency, remote_chain_id=chain_id) + prepared_tx = await self._prepare_socket_withdrawal(amount=amount, context=context) + + return prepared_tx + + @future_safe + async def submit_bridge_tx(self, prepared_tx: PreparedBridgeTx) -> IOResult[BridgeTxResult, Exception]: + + tx_result = await self._send_bridge_tx(prepared_tx=prepared_tx) + + return tx_result + + @future_safe + async def poll_bridge_progress(self, tx_result: BridgeTxResult) -> IOResult[BridgeTxResult, Exception]: + + try: + tx_result.source_tx.tx_receipt = await self._confirm_source_tx(tx_result=tx_result) + tx_result.target_tx = TxResult(tx_hash=await self._wait_for_target_event(tx_result=tx_result)) + tx_result.target_tx.tx_receipt = await self._confirm_target_tx(tx_result=tx_result) + except Exception as e: + raise PartialBridgeResult(f"Bridge pipeline failed: {e}", tx_result=tx_result) from e + + return tx_result + + async def _prepare_socket_deposit(self, amount: int, context: BridgeContext) -> PreparedBridgeTx: + + token_data, _connector = self._resolve_socket_route(context=context) + + spender = token_data.Vault if token_data.isNewBridge else self.get_deposit_helper(context.source_chain).address + await ensure_token_balance(context.source_token, self.owner, amount=amount) + await ensure_token_allowance( w3=context.source_w3, token_contract=context.source_token, owner=self.owner, @@ -265,38 +399,28 @@ def deposit(self, amount: int, currency: Currency) -> BridgeTxResult: ) if token_data.isNewBridge: - tx = self._prepare_new_style_deposit(token_data, amount) + func, fees_func = self._prepare_new_style_deposit(token_data, amount, context) else: - tx = self._prepare_old_style_deposit(token_data, amount) + func, fees_func = self._prepare_old_style_deposit(token_data, amount, context) - source_tx = send_and_confirm_tx( - w3=context.source_w3, tx=tx, private_key=self.private_key, action="bridge()", logger=self.logger - ) - tx_result = BridgeTxResult( - currency=currency, - bridge=BridgeType.SOCKET, - source_chain=context.source_chain, - target_chain=context.target_chain, - source_tx=source_tx, - target_from_block=target_from_block, - ) + fees = await fees_func.call() + prepared_tx = await self._prepare_tx(amount=amount, func=func, value=fees + 1, fee_in_token=0, context=context) - return tx_result + return prepared_tx - def withdraw_with_wrapper(self, amount: int, currency: Currency) -> BridgeTxResult: - """ - Checks if sufficent gas is available in derive, if not funds the wallet. - Prepares, signs, and sends a withdrawal transaction using the withdraw wrapper. - """ + async def _prepare_socket_withdrawal(self, amount: int, context: BridgeContext) -> PreparedBridgeTx: - # record on target chain when we start polling - token_data, connector = self._resolve_socket_route("withdraw", currency=currency) - context = self._make_bridge_context("withdraw", bridge_type=BridgeType.SOCKET, currency=currency) - target_from_block = context.target_w3.eth.block_number + token_data, connector = self._resolve_socket_route(context=context) - ensure_balance(context.source_token, self.wallet, amount) - - self._check_bridge_funds(token_data, connector, amount) + # Get estimated fee in token for a withdrawal + fee_in_token = await self.withdraw_wrapper.functions.getFeeInToken( + token=token_data.MintableToken, + controller=token_data.Controller, + connector=token_data.connectors[context.target_chain][TARGET_SPEED], + gasLimit=MSG_GAS_LIMIT, + ).call() + await ensure_token_balance(context.source_token, self.wallet, amount=amount, fee_in_token=fee_in_token) + await self._check_bridge_funds(token_data, connector, amount) kwargs = { "token": context.source_token.address, @@ -316,45 +440,21 @@ def withdraw_with_wrapper(self, amount: int, currency: Currency) -> BridgeTxResu dest=[context.source_token.address, self.withdraw_wrapper.address], func=[approve_data, bridge_data], ) - - tx = build_standard_transaction(func=func, account=self.account, w3=context.source_w3, value=0) - self._ensure_derive_eth_balance(tx) - simulate_tx( - w3=context.source_w3, - tx=tx, - account=self.account, - ) - source_tx = send_and_confirm_tx( - w3=context.source_w3, - tx=tx, - private_key=self.private_key, - action="executeBatch()", - logger=self.logger, - ) - tx_result = BridgeTxResult( - currency=currency, - bridge=BridgeType.SOCKET, - source_chain=context.source_chain, - target_chain=context.target_chain, - source_tx=source_tx, - target_from_block=target_from_block, + prepared_tx = await self._prepare_tx( + amount=amount, + func=func, + value=0, + fee_in_token=fee_in_token, + context=context, ) - return tx_result - - def deposit_drv(self, amount: int, currency: Currency) -> BridgeTxResult: - """ - Deposit funds by preparing, signing, and sending a bridging transaction. - """ + return prepared_tx - # record on target chain when we start polling - context = self._make_bridge_context("deposit", bridge_type=BridgeType.LAYERZERO, currency=currency) - target_from_block = context.target_w3.eth.block_number + async def _prepare_layerzero_deposit(self, amount: int, context: BridgeContext) -> PreparedBridgeTx: # check allowance, if needed approve - _check_gas_balance(context.source_w3, self.owner) - ensure_balance(context.source_token, self.owner, amount) - ensure_allowance( + await ensure_token_balance(context.source_token, self.owner, amount=amount) + await ensure_token_allowance( w3=context.source_w3, token_contract=context.source_token, owner=self.owner, @@ -365,7 +465,7 @@ def deposit_drv(self, amount: int, currency: Currency) -> BridgeTxResult: ) # build the send tx - receiver_bytes32 = Web3.to_bytes(hexstr=self.wallet).rjust(32, b"\x00") + receiver_bytes32 = AsyncWeb3.to_bytes(hexstr=self.wallet).rjust(32, b"\x00") kwargs = { "dstEid": LayerZeroChainIDv2.DERIVE.value, @@ -379,46 +479,33 @@ def deposit_drv(self, amount: int, currency: Currency) -> BridgeTxResult: pay_in_lz_token = False send_params = tuple(kwargs.values()) - fees = context.source_token.functions.quoteSend(send_params, pay_in_lz_token).call() + fees = await context.source_token.functions.quoteSend(send_params, pay_in_lz_token).call() native_fee, lz_token_fee = fees refund_address = self.owner func = context.source_token.functions.send(send_params, fees, refund_address) - tx = build_standard_transaction(func=func, account=self.account, w3=context.source_w3, value=native_fee) - - source_tx = send_and_confirm_tx( - w3=context.source_w3, - tx=tx, - private_key=self.private_key, - action="executeBatch()", - logger=self.logger, - ) - tx_result = BridgeTxResult( - currency=currency, - bridge=BridgeType.LAYERZERO, - source_chain=context.source_chain, - target_chain=context.target_chain, - source_tx=source_tx, - target_from_block=target_from_block, + prepared_tx = await self._prepare_tx( + amount=amount, + func=func, + value=native_fee, + fee_in_token=0, + context=context, ) - return tx_result - - def withdraw_drv(self, amount: int, currency: Currency) -> BridgeTxResult: + return prepared_tx - # record on target chain when we start polling - context = self._make_bridge_context("withdraw", bridge_type=BridgeType.LAYERZERO, currency=currency) - target_from_block = context.target_w3.eth.block_number + async def _prepare_layerzero_withdrawal(self, amount: int, context: BridgeContext) -> PreparedBridgeTx: abi = json.loads(LYRA_OFT_WITHDRAW_WRAPPER_ABI_PATH.read_text()) withdraw_wrapper = get_contract(context.source_w3, LYRA_OFT_WITHDRAW_WRAPPER_ADDRESS, abi=abi) - - ensure_balance(context.source_token, self.wallet, amount) - destEID = LayerZeroChainIDv2[context.target_chain.name] - fee = withdraw_wrapper.functions.getFeeInToken(context.source_token.address, amount, destEID).call() - if amount < fee: - raise ValueError(f"Withdraw amount < fee: {amount} < {fee} ({(fee / amount * 100):.2f}%)") + + fee_in_token = await withdraw_wrapper.functions.getFeeInToken( + token=context.source_token.address, + amount=amount, + destEID=destEID, + ).call() + await ensure_token_balance(context.source_token, self.wallet, amount=amount, fee_in_token=fee_in_token) kwargs = { "token": context.source_token.address, @@ -434,34 +521,78 @@ def withdraw_drv(self, amount: int, currency: Currency) -> BridgeTxResult: dest=[context.source_token.address, withdraw_wrapper.address], func=[approve_data, bridge_data], ) - - tx = build_standard_transaction(func=func, account=self.account, w3=context.source_w3, value=0) - self._ensure_derive_eth_balance(tx) - simulate_tx( - w3=context.source_w3, - tx=tx, - account=self.account, + prepared_tx = await self._prepare_tx( + amount=amount, + func=func, + value=0, + fee_in_token=fee_in_token, + context=context, ) - source_tx = send_and_confirm_tx( - w3=context.source_w3, - tx=tx, - private_key=self.private_key, - action="executeBatch()", - logger=self.logger, - ) + return prepared_tx + + async def _send_bridge_tx(self, prepared_tx: PreparedBridgeTx) -> BridgeTxResult: + + context = self._get_context(prepared_tx) + + # record on target chain where we should start polling + target_from_block = await context.target_w3.eth.block_number + + signed_tx = prepared_tx.tx_details.signed_tx + tx_hash = await send_tx(w3=context.source_w3, signed_tx=signed_tx) + source_tx = TxResult(tx_hash=tx_hash) + tx_result = BridgeTxResult( - currency=currency, - bridge=BridgeType.LAYERZERO, - source_chain=context.source_chain, - target_chain=context.target_chain, + prepared_tx=prepared_tx, source_tx=source_tx, target_from_block=target_from_block, ) return tx_result - def fetch_lz_event_log(self, tx_result: BridgeTxResult, context: BridgeContext): + async def _confirm_source_tx(self, tx_result: BridgeTxResult) -> TxReceipt: + + context = self._get_context(tx_result) + msg = "⏳ Checking source chain [%s] tx receipt for %s" + self.logger.info(msg, tx_result.source_chain.name, tx_result.source_tx.tx_hash) + tx_receipt = await wait_for_tx_finality( + w3=context.source_w3, + tx_hash=tx_result.source_tx.tx_hash, + logger=self.logger, + ) + + return tx_receipt + + async def _wait_for_target_event(self, tx_result: BridgeTxResult) -> HexBytes: + + bridge_event_fetchers = { + BridgeType.SOCKET: self._fetch_socket_event_log, + BridgeType.LAYERZERO: self._fetch_lz_event_log, + } + if (fetch_event := bridge_event_fetchers.get(tx_result.bridge_type)) is None: + raise BridgeRouteError(f"Invalid bridge_type: {tx_result.bridge_type}") + + context = self._get_context(tx_result) + event_log = await fetch_event(tx_result, context) + tx_hash = event_log["transactionHash"] + self.logger.info(f"Target event tx_hash found: {tx_hash.to_0x_hex()}") + + return tx_hash + + async def _confirm_target_tx(self, tx_result: BridgeTxResult) -> TxReceipt: + + context = self._get_context(tx_result) + msg = "⏳ Checking target chain [%s] tx receipt for %s" + self.logger.info(msg, tx_result.target_chain.name, tx_result.target_tx.tx_hash) + tx_receipt = await wait_for_tx_finality( + w3=context.target_w3, + tx_hash=tx_result.target_tx.tx_hash, + logger=self.logger, + ) + + return tx_receipt + + async def _fetch_lz_event_log(self, tx_result: BridgeTxResult, context: BridgeContext) -> LogReceipt: try: source_event = context.source_event.process_log(tx_result.source_tx.tx_receipt.logs[-1]) @@ -481,9 +612,14 @@ def fetch_lz_event_log(self, tx_result: BridgeTxResult, context: BridgeContext): self.logger.info( f"πŸ” Listening for OFTReceived on [{tx_result.target_chain.name}] at {context.target_event.address}" ) - return wait_for_event(context.target_w3, filter_params, logger=self.logger) - def fetch_socket_event_log(self, tx_result: BridgeTxResult, context: BridgeContext): + return await wait_for_bridge_event( + w3=context.target_w3, + filter_params=filter_params, + logger=self.logger, + ) + + async def _fetch_socket_event_log(self, tx_result: BridgeTxResult, context: BridgeContext) -> LogReceipt: try: source_event = context.source_event.process_log(tx_result.source_tx.tx_receipt.logs[-2]) @@ -504,108 +640,24 @@ def matching_message_id(log: AttributeDict) -> bool: self.logger.info( f"πŸ” Listening for ExecutionSuccess on [{tx_result.target_chain.name}] at {context.target_event.address}" ) - return wait_for_event(context.target_w3, filter_params, condition=matching_message_id, logger=self.logger) - - def poll_bridge_progress(self, tx_result: BridgeTxResult) -> BridgeTxResult: - if tx_result.status is not TxStatus.PENDING: - raise AlreadyFinalizedError(f"Bridge already in final state: {tx_result.status.name}") - # Do not mutate the input in-place - tx_result = copy.deepcopy(tx_result) - - bridge_event_fetchers = { - BridgeType.SOCKET: self.fetch_socket_event_log, - BridgeType.LAYERZERO: self.fetch_lz_event_log, - } - if (fetch_event := bridge_event_fetchers.get(tx_result.bridge)) is None: - raise ValueError(f"Invalid bridge_type: {tx_result.bridge}") - - direction = "withdraw" if tx_result.source_chain == ChainID.DERIVE else "deposit" - context = self._make_bridge_context( - direction=direction, - bridge_type=tx_result.bridge, - currency=tx_result.currency, + return await wait_for_bridge_event( + w3=context.target_w3, + filter_params=filter_params, + condition=matching_message_id, + logger=self.logger, ) - # 1. TimeoutError as exception during source_tx.tx_receipt - if not tx_result.source_tx.tx_receipt: - self.logger.info( - f"⏳ Checking source chain [{tx_result.source_chain.name}] tx receipt for {tx_result.source_tx.tx_hash}" - ) - tx_result.source_tx.exception = None - try: - tx_result.source_tx.tx_receipt = wait_for_tx_receipt( - w3=context.source_w3, tx_hash=tx_result.source_tx.tx_hash - ) - except TimeoutError as e: - tx_result.source_tx.exception = e - return tx_result - - # 2. target_tx is None (i.e. TimeoutError when waiting for event log on target chain) - if not tx_result.target_tx: - try: - event_log = fetch_event(tx_result, context) - tx_result.target_tx = TxResult(event_log["transactionHash"].to_0x_hex()) - except TimeoutError: - return tx_result - - # 3. Timeout waiting for target_tx.tx_receipt - if not tx_result.target_tx.tx_receipt: - self.logger.info( - f"⏳ Checking target chain [{tx_result.target_chain.name}] tx receipt for {tx_result.target_tx.tx_hash}" - ) - tx_result.target_tx.exception = None - try: - tx_result.target_tx.tx_receipt = wait_for_tx_receipt( - w3=context.target_w3, tx_hash=tx_result.target_tx.tx_hash - ) - except TimeoutError as e: - tx_result.target_tx.exception = e - - return tx_result - - def _ensure_derive_eth_balance(self, tx: dict[str, str]): - """Ensure that the Derive EOA wallet has sufficient ETH balance for gas.""" - balance_of_owner = self.derive_w3.eth.get_balance(self.owner) - required_gas = tx['maxFeePerGas'] * tx['gas'] - if balance_of_owner < required_gas + DEFAULT_GAS_FUNDING_AMOUNT: - self.logger.info(f"Funding Derive EOA wallet with {DEFAULT_GAS_FUNDING_AMOUNT} ETH") - self.bridge_mainnet_eth_to_derive(DEFAULT_GAS_FUNDING_AMOUNT) - - def bridge_mainnet_eth_to_derive(self, amount: int) -> TxResult: - """ - Prepares, signs, and sends a transaction to bridge ETH from mainnet to Derive. - This is the "socket superbridge" method; not required when using the withdraw wrapper. - """ - - w3 = get_w3_connection(ChainID.ETH, logger=self.logger) - - address = self.config.contracts.L1_CHUG_SPLASH_PROXY - bridge_abi = json.loads(L1_STANDARD_BRIDGE_ABI_PATH.read_text()) - proxy_contract = get_contract(w3=w3, address=address, abi=bridge_abi) - - tx = prepare_mainnet_to_derive_gas_tx(w3=w3, account=self.account, amount=amount, proxy_contract=proxy_contract) - tx['gas'] = w3.eth.estimate_gas(tx) - tx = simulate_tx( - w3=w3, - tx=tx, - account=self.account, - ) - require_gas = tx['maxFeePerGas'] * tx['gas'] - current_balance = w3.eth.get_balance(self.account.address) - if not current_balance >= (amount + require_gas) * 1.1: - raise InsufficientGas( - f"Insufficient ETH balance for bridging amount {amount} + gas {require_gas}. Balance: {current_balance}" - ) - tx_result = send_and_confirm_tx( - w3=w3, tx=tx, private_key=self.private_key, action="bridgeETH()", logger=self.logger - ) - return tx_result + def _prepare_new_style_deposit( + self, + token_data: NonMintableTokenData, + amount: int, + context: BridgeContext, + ) -> tuple[AsyncContractFunction, int]: - def _prepare_new_style_deposit(self, token_data: NonMintableTokenData, amount: int) -> dict: - vault_contract = _load_vault_contract(w3=self.remote_w3, token_data=token_data) + vault_contract = _load_vault_contract(w3=self.w3s[context.source_chain], token_data=token_data) connector = token_data.connectors[ChainID.DERIVE][TARGET_SPEED] - fees = _get_min_fees(bridge_contract=vault_contract, connector=connector, token_data=token_data) + fees_func = _get_min_fees(bridge_contract=vault_contract, connector=connector, token_data=token_data) func = vault_contract.functions.bridge( receiver_=self.wallet, amount_=amount, @@ -614,13 +666,20 @@ def _prepare_new_style_deposit(self, token_data: NonMintableTokenData, amount: i extraData_=b"", options_=b"", ) - return build_standard_transaction(func=func, account=self.account, w3=self.remote_w3, value=fees + 1) - def _prepare_old_style_deposit(self, token_data: NonMintableTokenData, amount: int) -> dict: - vault_contract = _load_vault_contract(w3=self.remote_w3, token_data=token_data) + return func, fees_func + + def _prepare_old_style_deposit( + self, + token_data: NonMintableTokenData, + amount: int, + context: BridgeContext, + ) -> tuple[AsyncContractFunction, int]: + + vault_contract = _load_vault_contract(w3=self.w3s[context.source_chain], token_data=token_data) connector = token_data.connectors[ChainID.DERIVE][TARGET_SPEED] - fees = _get_min_fees(bridge_contract=vault_contract, connector=connector, token_data=token_data) - func = self.deposit_helper.functions.depositToLyra( + fees_func = _get_min_fees(bridge_contract=vault_contract, connector=connector, token_data=token_data) + func = self.get_deposit_helper(context.source_chain).functions.depositToLyra( token=token_data.NonMintableToken, socketVault=token_data.Vault, isSCW=True, @@ -628,22 +687,24 @@ def _prepare_old_style_deposit(self, token_data: NonMintableTokenData, amount: i gasLimit=MSG_GAS_LIMIT, connector=connector, ) - return build_standard_transaction(func=func, account=self.account, w3=self.remote_w3, value=fees + 1) - def _check_bridge_funds(self, token_data, connector: Address, amount: int): + return func, fees_func + + async def _check_bridge_funds(self, token_data, connector: Address, amount: int) -> None: + controller = _load_controller_contract(w3=self.derive_w3, token_data=token_data) if token_data.isNewBridge: - deposit_hook = controller.functions.hook__().call() + deposit_hook = await controller.functions.hook__().call() expected_hook = token_data.LyraTSAShareHandlerDepositHook if not deposit_hook == token_data.LyraTSAShareHandlerDepositHook: msg = f"Controller deposit hook {deposit_hook} does not match expected address {expected_hook}" raise ValueError(msg) deposit_contract = _load_deposit_contract(w3=self.derive_w3, token_data=token_data) - pool_id = deposit_contract.functions.connectorPoolIds(connector).call() - locked = deposit_contract.functions.poolLockedAmounts(pool_id).call() + pool_id = await deposit_contract.functions.connectorPoolIds(connector).call() + locked = await deposit_contract.functions.poolLockedAmounts(pool_id).call() else: - pool_id = controller.functions.connectorPoolIds(connector).call() - locked = controller.functions.poolLockedAmounts(pool_id).call() + pool_id = await controller.functions.connectorPoolIds(connector).call() + locked = await controller.functions.poolLockedAmounts(pool_id).call() if amount > locked: raise RuntimeError( diff --git a/derive_client/_bridge/standard_bridge.py b/derive_client/_bridge/standard_bridge.py new file mode 100644 index 00000000..098a5595 --- /dev/null +++ b/derive_client/_bridge/standard_bridge.py @@ -0,0 +1,313 @@ +import asyncio +import json +from logging import Logger + +from eth_account import Account +from eth_utils import keccak +from returns.future import future_safe +from returns.io import IOResult +from web3 import AsyncWeb3 +from web3.contract import AsyncContract +from web3.types import HexBytes, LogReceipt, TxReceipt + +from derive_client.constants import ( + L1_CHUG_SPLASH_PROXY, + L1_CROSS_DOMAIN_MESSENGER_ABI_PATH, + L1_STANDARD_BRIDGE_ABI_PATH, + L2_CROSS_DOMAIN_MESSENGER_ABI_PATH, + L2_CROSS_DOMAIN_MESSENGER_PROXY, + L2_STANDARD_BRIDGE_ABI_PATH, + L2_STANDARD_BRIDGE_PROXY, + MSG_GAS_LIMIT, + RESOLVED_DELEGATE_PROXY, +) +from derive_client.data_types import ( + Address, + BridgeTxDetails, + BridgeTxResult, + BridgeType, + ChainID, + Currency, + PreparedBridgeTx, + TxResult, +) +from derive_client.exceptions import BridgeEventParseError, PartialBridgeResult, StandardBridgeRelayFailed +from derive_client.utils.w3 import to_base_units + +from .w3 import ( + build_standard_transaction, + encode_abi, + get_contract, + get_w3_connections, + make_filter_params, + send_tx, + sign_tx, + wait_for_bridge_event, + wait_for_tx_finality, +) + + +def _load_l1_contract(w3: AsyncWeb3) -> AsyncContract: + address = L1_CHUG_SPLASH_PROXY + abi = json.loads(L1_STANDARD_BRIDGE_ABI_PATH.read_text()) + return get_contract(w3=w3, address=address, abi=abi) + + +def _load_l2_contract(w3: AsyncWeb3) -> AsyncContract: + address = L2_STANDARD_BRIDGE_PROXY + abi = json.loads(L2_STANDARD_BRIDGE_ABI_PATH.read_text()) + return get_contract(w3=w3, address=address, abi=abi) + + +def _load_l2_contracts(w3s: dict[ChainID, AsyncWeb3]) -> dict[ChainID, AsyncContract]: + return {chain_id: _load_l2_contract(w3) for chain_id, w3 in w3s.items() if chain_id is not ChainID.ETH} + + +def _load_l1_cross_domain_messenger_proxy(w3: AsyncWeb3) -> AsyncContract: + address = RESOLVED_DELEGATE_PROXY + abi = json.loads(L1_CROSS_DOMAIN_MESSENGER_ABI_PATH.read_text()) + return get_contract(w3=w3, address=address, abi=abi) + + +def _load_l2_cross_domain_messenger_proxy(w3: AsyncWeb3) -> AsyncContract: + address = L2_CROSS_DOMAIN_MESSENGER_PROXY + abi = json.loads(L2_CROSS_DOMAIN_MESSENGER_ABI_PATH.read_text()) + return get_contract(w3=w3, address=address, abi=abi) + + +class StandardBridge: + + def __init__(self, account: Account, logger: Logger): + + self.account = account + self.logger = logger + self.w3s = get_w3_connections(logger=logger) + self.l1_contract = _load_l1_contract(self.w3s[ChainID.ETH]) + self.l2_contracts = _load_l2_contracts(self.w3s) + self.l1_messenger_proxy = _load_l1_cross_domain_messenger_proxy(self.w3s[ChainID.ETH]) + self.l2_messenger_proxy = _load_l2_cross_domain_messenger_proxy(self.w3s[ChainID.DERIVE]) + + @future_safe + async def prepare_eth_tx( + self, + human_amount: float, + to: Address, + source_chain: ChainID, + target_chain: ChainID, + ) -> IOResult[PreparedBridgeTx, Exception]: + + currency = Currency.ETH + + if source_chain is not ChainID.ETH or target_chain is not ChainID.DERIVE or to != self.account.address: + raise NotImplementedError("Only ETH transfers from Ethereum to Derive EOA are currently supported.") + + value: int = to_base_units(human_amount=human_amount, currency=currency) + prepared_tx = await self._prepare_eth_tx( + value=value, + to=to, + source_chain=source_chain, + target_chain=target_chain, + ) + + return prepared_tx + + @property + def private_key(self) -> str: + """Private key of the owner (EOA).""" + return self.account._private_key + + @future_safe + async def submit_bridge_tx(self, prepared_tx: PreparedBridgeTx) -> IOResult[BridgeTxResult, Exception]: + + tx_result = await self._send_bridge_tx(prepared_tx=prepared_tx) + + return tx_result + + @future_safe + async def poll_bridge_progress(self, tx_result: BridgeTxResult) -> IOResult[BridgeTxResult, Exception]: + + try: + tx_result.source_tx.tx_receipt = await self._confirm_source_tx(tx_result=tx_result) + tx_result.target_tx = TxResult(tx_hash=await self._wait_for_target_event(tx_result=tx_result)) + tx_result.target_tx.tx_receipt = await self._confirm_target_tx(tx_result=tx_result) + except Exception as e: + raise PartialBridgeResult(f"Bridge pipeline failed: {e}", tx_result=tx_result) from e + + return tx_result + + async def _prepare_eth_tx( + self, + value: int, + to: Address, + source_chain: ChainID, + target_chain: ChainID, + ) -> PreparedBridgeTx: + + w3 = self.w3s[source_chain] + + proxy_contract = self.l1_contract + func = proxy_contract.functions.bridgeETHTo( + _to=to, + _minGasLimit=MSG_GAS_LIMIT, + _extraData=b"", + ) + + tx = await build_standard_transaction(func=func, account=self.account, w3=w3, value=value, logger=self.logger) + + tx_gas_cost = tx["gas"] * tx["maxFeePerGas"] + if value < tx_gas_cost: + msg = f"⚠️ Bridge tx value {value} is smaller than gas cost {tx_gas_cost} (~{tx_gas_cost/value:.2f}x value)" + self.logger.warning(msg) + + signed_tx = sign_tx(w3=w3, tx=tx, private_key=self.private_key) + + tx_details = BridgeTxDetails( + contract=func.address, + method=func.fn_name, + kwargs=func.kwargs, + tx=tx, + signed_tx=signed_tx, + ) + + prepared_tx = PreparedBridgeTx( + amount=0, + value=value, + fee_value=0, + fee_in_token=0, + currency=Currency.ETH, + source_chain=source_chain, + target_chain=target_chain, + bridge_type=BridgeType.STANDARD, + tx_details=tx_details, + ) + + return prepared_tx + + async def _send_bridge_tx(self, prepared_tx: PreparedBridgeTx) -> BridgeTxResult: + + source_w3 = self.w3s[prepared_tx.source_chain] + target_w3 = self.w3s[prepared_tx.target_chain] + + # record on target chain where we should start polling + target_from_block = await target_w3.eth.block_number + + signed_tx = prepared_tx.tx_details.signed_tx + tx_hash = await send_tx(w3=source_w3, signed_tx=signed_tx) + source_tx = TxResult(tx_hash=tx_hash) + + tx_result = BridgeTxResult( + prepared_tx=prepared_tx, + source_tx=source_tx, + target_from_block=target_from_block, + ) + + return tx_result + + async def _confirm_source_tx(self, tx_result: BridgeTxResult) -> TxReceipt: + + msg = "⏳ Checking source chain [%s] tx receipt for %s" + self.logger.info(msg, tx_result.source_chain.name, tx_result.source_tx.tx_hash) + + w3 = self.w3s[tx_result.source_chain] + tx_receipt = await wait_for_tx_finality( + w3=w3, + tx_hash=tx_result.source_tx.tx_hash, + logger=self.logger, + ) + + return tx_receipt + + async def _wait_for_target_event(self, tx_result: BridgeTxResult) -> HexBytes: + + event_log = await self._fetch_standard_event_log(tx_result) + tx_hash = event_log["transactionHash"] + self.logger.info(f"Target event tx_hash found: {tx_hash.to_0x_hex()}") + + return tx_hash + + async def _confirm_target_tx(self, tx_result: BridgeTxResult) -> TxReceipt: + + msg = "⏳ Checking target chain [%s] tx receipt for %s" + self.logger.info(msg, tx_result.target_chain.name, tx_result.target_tx.tx_hash) + + w3 = self.w3s[tx_result.target_chain] + tx_receipt = await wait_for_tx_finality( + w3=w3, + tx_hash=tx_result.target_tx.tx_hash, + logger=self.logger, + ) + + return tx_receipt + + async def _fetch_standard_event_log(self, tx_result: BridgeTxResult) -> LogReceipt: + + source_event = self.l1_messenger_proxy.events.SentMessage() + + target_w3 = self.w3s[tx_result.target_chain] + try: + source_event_log = source_event.process_log(tx_result.source_tx.tx_receipt.logs[3]) + nonce = source_event_log["args"]["messageNonce"] + except Exception as e: + raise BridgeEventParseError(f"Could not decode StandardBridge messageNonce: {e}") from e + + self.logger.info(f"πŸ”– Source [{tx_result.source_chain.name}] messageNonce: {nonce}") + + args = source_event_log["args"] + gas_limit = args["gasLimit"] + sender = AsyncWeb3.to_checksum_address(args["sender"]) + target = AsyncWeb3.to_checksum_address(args["target"]) + message = args["message"] + value = tx_result.amount + + func = self.l1_messenger_proxy.functions.relayMessage( + _nonce=nonce, + _sender=sender, + _target=target, + _value=value, + _minGasLimit=gas_limit, + _message=message, + ) + + msg_hash = keccak(encode_abi(func)) + tx_result.event_id = msg_hash.hex() + self.logger.info(f"πŸ—οΈ Computed msgHash: {tx_result.event_id}") + + target_event = self.l2_messenger_proxy.events.RelayedMessage() + failed_target_event = self.l2_messenger_proxy.events.FailedRelayedMessage() + + filter_params = make_filter_params( + event=target_event, + from_block=tx_result.target_from_block, + argument_filters={"msgHash": msg_hash}, + ) + failed_filter_params = make_filter_params( + event=failed_target_event, + from_block=tx_result.target_from_block, + argument_filters={"msgHash": msg_hash}, + ) + + self.logger.info(f"πŸ” Listening for msgHash on [{tx_result.target_chain.name}] at {target_event.address}") + + relayed_task = asyncio.create_task(wait_for_bridge_event(target_w3, filter_params, logger=self.logger)) + failed_task = asyncio.create_task(wait_for_bridge_event(target_w3, failed_filter_params, logger=self.logger)) + done, pending = await asyncio.wait([relayed_task, failed_task], return_when=asyncio.FIRST_COMPLETED) + + for task in pending: + task.cancel() + if failed_task in done: + # reraises Exceptions (i.e. BridgeEventTimeout), and in this scenario not raise StandardBridgeRelayFailed + event_log = done.pop().result() + raise StandardBridgeRelayFailed( + "The relay was attempted but reverted on L2. " + "Likely causes are out-of-gas, non-standard token implementation, or target contract reversion.\n" + "Action:\n" + "- Inspect the L2 tx receipt logs for the revert reason.\n" + "- If out-of-gas, resubmit with higher _minGasLimit.\n" + "- If token mismatch, check that the L2 token contract matches the expected bridgeable ERC20.\n" + "- If paused/reverted, retry after resolving the underlying contract state.", + event_log=event_log, + ) + + event_log = done.pop().result() + + return event_log diff --git a/derive_client/_bridge/transaction.py b/derive_client/_bridge/transaction.py deleted file mode 100644 index 10a78fcf..00000000 --- a/derive_client/_bridge/transaction.py +++ /dev/null @@ -1,116 +0,0 @@ -from logging import Logger - -from eth_account import Account -from web3 import Web3 -from web3.contract import Contract - -from derive_client.constants import DEFAULT_GAS_FUNDING_AMOUNT, DEPOSIT_GAS_LIMIT, MSG_GAS_LIMIT -from derive_client.data_types import Address, ChainID, TxStatus -from derive_client.exceptions import InsufficientGas -from derive_client.utils import build_standard_transaction, estimate_fees, exp_backoff_retry, send_and_confirm_tx - - -def _check_gas_balance(w3: Web3, account: Address, gas_limit=DEPOSIT_GAS_LIMIT): - """Check whether the account has sufficient gas balance.""" - balance = w3.eth.get_balance(account) - if balance < gas_limit: - raise InsufficientGas( - f"Insufficient balance for gas: {gas_limit} < {balance} ({(balance / gas_limit * 100):.2f}%)" - ) - - -def ensure_balance(token_contract: Contract, owner: Address, amount: int): - balance = token_contract.functions.balanceOf(owner).call() - if amount > balance: - raise ValueError(f"Not enough tokens to withdraw: {amount} < {balance} ({(balance / amount * 100):.2f}%)") - - -def ensure_allowance( - w3: Web3, - token_contract: Contract, - owner: Address, - spender: Address, - amount: int, - private_key: str, - logger: Logger, -): - allowance = token_contract.functions.allowance(owner, spender).call() - if amount > allowance: - logger.info(f"Increasing allowance from {allowance} to {amount}") - increase_allowance( - w3=w3, - from_account=Account.from_key(private_key), - erc20_contract=token_contract, - spender=spender, - amount=amount, - private_key=private_key, - logger=logger, - ) - - -def increase_allowance( - w3: Web3, - from_account: Account, - erc20_contract: Contract, - spender: Address, - amount: int, - private_key: str, - logger: Logger, -) -> None: - func = erc20_contract.functions.approve(spender, amount) - tx = build_standard_transaction(func=func, account=from_account, w3=w3) - tx_result = send_and_confirm_tx(w3=w3, tx=tx, private_key=private_key, action="approve()", logger=logger) - if tx_result.status != TxStatus.SUCCESS: - raise RuntimeError("approve() failed") - - -def prepare_mainnet_to_derive_gas_tx( - w3: Web3, - account: Account, - proxy_contract: Contract, - amount: int = DEFAULT_GAS_FUNDING_AMOUNT, -) -> dict: - """ - Prepares a bridging transaction to move ETH from Ethereum mainnet to Derive. - This function uses fee estimation and simulates the tx. - """ - - # This bridges ETH from EOA -> EOA, *not* to the smart contract funding wallet. - # If the Derive-side recipient must be a smart contract, this must be changed. - - if not w3.eth.chain_id == ChainID.ETH: - raise ValueError(f"Connected to chain ID {w3.eth.chain_id}, but expected Ethereum mainnet ({ChainID.ETH}).") - - balance = w3.eth.get_balance(account.address) - nonce = w3.eth.get_transaction_count(account.address) - - @exp_backoff_retry - def simulate_tx(): - fee_estimations = estimate_fees(w3, blocks=10, percentiles=[99]) - max_fee = fee_estimations[0]["maxFeePerGas"] - priority_fee = fee_estimations[0]["maxPriorityFeePerGas"] - - tx = proxy_contract.functions.bridgeETH( - MSG_GAS_LIMIT, # _minGasLimit # Optimism - b"", # _extraData - ).build_transaction( - { - "from": account.address, - "value": amount, - "nonce": nonce, - "maxFeePerGas": max_fee, - "maxPriorityFeePerGas": priority_fee, - "chainId": ChainID.ETH, - } - ) - estimated_gas = w3.eth.estimate_gas(tx) - tx["gas"] = estimated_gas - required = estimated_gas * max_fee + amount - if balance < required: - raise RuntimeError( - f"Insufficient funds: have {balance}, need {required} ({(balance / required * 100):.2f}%" - ) - w3.eth.call(tx) - return tx - - return simulate_tx() diff --git a/derive_client/_bridge/w3.py b/derive_client/_bridge/w3.py new file mode 100644 index 00000000..bb181f32 --- /dev/null +++ b/derive_client/_bridge/w3.py @@ -0,0 +1,538 @@ +import asyncio +import heapq +import json +import statistics +import time +from logging import Logger +from typing import Any, Callable, Generator, Literal + +from eth_abi import encode +from eth_account import Account +from eth_account.datastructures import SignedTransaction +from requests import RequestException +from web3 import AsyncHTTPProvider, AsyncWeb3 +from web3.contract import Contract +from web3.contract.async_contract import AsyncContract, AsyncContractEvent, AsyncContractFunction +from web3.datastructures import AttributeDict + +from derive_client.constants import ( + ABI_DATA_DIR, + ASSUMED_BRIDGE_GAS_LIMIT, + DEFAULT_RPC_ENDPOINTS, + GAS_FEE_BUFFER, + MIN_PRIORITY_FEE, +) +from derive_client.data_types import ( + Address, + ChainID, + FeeEstimate, + FeeEstimates, + FeeHistory, + GasPriority, + RPCEndpoints, + TxStatus, + Wei, +) +from derive_client.exceptions import ( + BridgeEventTimeout, + FinalityTimeout, + InsufficientNativeBalance, + InsufficientTokenBalance, + NoAvailableRPC, + TransactionDropped, + TxPendingTimeout, +) +from derive_client.utils.logger import get_logger +from derive_client.utils.retry import exp_backoff_retry +from derive_client.utils.w3 import EndpointState, load_rpc_endpoints + +EVENT_LOG_RETRIES = 10 + + +def make_rotating_provider_middleware( + endpoints: list[AsyncHTTPProvider], + *, + initial_backoff: float = 1.0, + max_backoff: float = 600.0, + logger: Logger, +) -> Callable[[Callable[[str, Any], Any], AsyncWeb3], Callable[[str, Any], Any]]: + """ + v6.11-style middleware: + - round-robin via a min-heap of `next_available` times + - on 429: exponential back-off for that endpoint, capped + """ + + heap: list[EndpointState] = [EndpointState(p) for p in endpoints] + heapq.heapify(heap) + lock = asyncio.Lock() + + async def middleware_factory(make_request: Callable[[str, Any], Any], w3: AsyncWeb3) -> Callable[[str, Any], Any]: + async def rotating_backoff(method: str, params: Any) -> Any: + + now = time.monotonic() + + while True: + # 1) grab the earlies-available endpoint + async with lock: + state = heapq.heappop(heap) + + # 2) if it's not yet ready, push back and error out + if state.next_available > now: + async with lock: + heapq.heappush(heap, state) + msg = "All RPC endpoints are cooling down. Try again in %.2f seconds." + logger.warning(msg, state.next_available - now) + raise NoAvailableRPC(msg) + + try: + # 3) attempt the request + resp = await state.provider.make_request(method, params) + + # Json‑RPC error branch + if isinstance(resp, dict) and (error := resp.get("error")): + state.backoff = state.backoff * 2 if state.backoff else initial_backoff + state.backoff = min(state.backoff, max_backoff) + state.next_available = now + state.backoff + async with lock: + heapq.heappush(heap, state) + err_msg = error.get("message", "") + err_code = error.get("code", "") + msg = "RPC error on %s: %s (code: %s)β†’ backing off %.2fs" + logger.info(msg, state.provider.endpoint_uri, err_msg, err_code, state.backoff) + continue + + # 4) on success, reset its backoff and re-schedule immediately + state.backoff = 0.0 + state.next_available = now + async with lock: + heapq.heappush(heap, state) + return resp + + except RequestException as e: + logger.debug("Endpoint %s failed: %s", state.provider.endpoint_uri, e) + + # We retry on all exceptions + hdr = (e.response and e.response.headers or {}).get("Retry-After") + try: + backoff = float(hdr) + except (ValueError, TypeError): + backoff = state.backoff * 2 if state.backoff > 0 else initial_backoff + + # cap backoff and schedule + state.backoff = min(backoff, max_backoff) + state.next_available = now + state.backoff + async with lock: + heapq.heappush(heap, state) + msg = "Backing off %s for %.2fs" + logger.info(msg, state.provider.endpoint_uri, backoff) + continue + except Exception as e: + msg = "Unexpected error calling %s %s on %s; backing off %.2fs and continuing" + logger.exception(msg, method, params, state.provider.endpoint_uri, max_backoff, exc_info=e) + state.backoff = max_backoff + state.next_available = now + state.backoff + async with lock: + heapq.heappush(heap, state) + continue + + return rotating_backoff + + return middleware_factory + + +def get_w3_connection( + chain_id: ChainID, + *, + rpc_endpoints: RPCEndpoints | None = None, + logger: Logger | None = None, +) -> AsyncWeb3: + + rpc_endpoints = rpc_endpoints or load_rpc_endpoints(DEFAULT_RPC_ENDPOINTS) + providers = [AsyncHTTPProvider(str(url)) for url in rpc_endpoints[chain_id]] + + logger = logger or get_logger() + + # NOTE: Initial provider is a no-op once middleware is in place + # NOTE: If you don't set a dummy provider, bad things will happen! + provider = AsyncHTTPProvider() + w3 = AsyncWeb3(provider) + + rotator = make_rotating_provider_middleware( + providers, + initial_backoff=1.0, + max_backoff=600.0, + logger=logger, + ) + w3.middleware_onion.add(rotator, name="rotating_provider") + + return w3 + + +def get_w3_connections(logger) -> dict[ChainID, AsyncWeb3]: + return {chain_id: get_w3_connection(chain_id, logger=logger) for chain_id in ChainID} + + +def get_contract(w3: AsyncWeb3, address: str, abi: list) -> AsyncContract: + return w3.eth.contract(address=AsyncWeb3.to_checksum_address(address), abi=abi) + + +def get_erc20_contract(w3: AsyncWeb3, token_address: str) -> AsyncContract: + erc20_abi_path = ABI_DATA_DIR / "erc20.json" + abi = json.loads(erc20_abi_path.read_text()) + return get_contract(w3=w3, address=token_address, abi=abi) + + +async def ensure_token_balance(token_contract: Contract, owner: Address, amount: int, fee_in_token: int = 0): + balance = await token_contract.functions.balanceOf(owner).call() + required = amount + fee_in_token + if amount > balance: + raise InsufficientTokenBalance( + f"Not enough tokens for withdraw: required={required} (amount={amount} + fee={fee_in_token}), " + f"balance={balance} ({(balance / required * 100):.2f}% of required)" + ) + + +async def ensure_token_allowance( + w3: AsyncWeb3, + token_contract: Contract, + owner: Address, + spender: Address, + amount: int, + private_key: str, + logger: Logger, +): + allowance = await token_contract.functions.allowance(owner, spender).call() + if amount > allowance: + logger.info(f"Increasing allowance from {allowance} to {amount}") + await _increase_token_allowance( + w3=w3, + from_account=Account.from_key(private_key), + erc20_contract=token_contract, + spender=spender, + amount=amount, + private_key=private_key, + logger=logger, + ) + + +async def _increase_token_allowance( + w3: AsyncWeb3, + from_account: Account, + erc20_contract: Contract, + spender: Address, + amount: int, + private_key: str, + logger: Logger, +) -> None: + func = erc20_contract.functions.approve(spender, amount) + tx = await build_standard_transaction(func=func, account=from_account, w3=w3, logger=logger) + signed_tx = sign_tx(w3=w3, tx=tx, private_key=private_key) + tx_hash = await send_tx(w3=w3, signed_tx=signed_tx) + tx_receipt = await wait_for_tx_finality(w3=w3, tx_hash=tx_hash, logger=logger) + if tx_receipt.status != TxStatus.SUCCESS: + raise RuntimeError("approve() failed") + + +async def estimate_fees(w3, blocks: int = 20) -> FeeEstimates: + """Estimate EIP-1559 maxFeePerGas and maxPriorityFeePerGas from recent blocks for GasPriority percentiles.""" + + percentiles = tuple(map(int, GasPriority)) + fee_history = FeeHistory(**await w3.eth.fee_history(blocks, "pending", percentiles)) + latest_base_fee = fee_history.base_fee_per_gas[-1] + + percentile_rewards = {p: [] for p in percentiles} + for block_rewards in fee_history.reward: + for percentile, reward in zip(percentiles, block_rewards): + percentile_rewards[percentile].append(reward) + + estimates = {} + for percentile in percentiles: + rewards = percentile_rewards[percentile] + non_zero_rewards = list(filter(lambda x: x, rewards)) + if non_zero_rewards: + estimated_priority_fee = int(statistics.median(non_zero_rewards)) + else: + estimated_priority_fee = MIN_PRIORITY_FEE + + buffered_base_fee = int(latest_base_fee * GAS_FEE_BUFFER) + estimated_max_fee = buffered_base_fee + estimated_priority_fee + estimates[percentile] = FeeEstimate(estimated_max_fee, estimated_priority_fee) + + return FeeEstimates(estimates) + + +async def preflight_native_balance_check( + w3: AsyncWeb3, + fee_estimate: FeeEstimate, + account: Account, + value: Wei, +) -> None: + balance = await w3.eth.get_balance(account.address) + max_fee_per_gas = fee_estimate.max_fee_per_gas + + max_gas_cost = ASSUMED_BRIDGE_GAS_LIMIT * max_fee_per_gas + total_cost = max_gas_cost + value + + if not balance >= total_cost: + chain_id = ChainID(await w3.eth.chain_id) + ratio = balance / total_cost * 100 + raise InsufficientNativeBalance( + f"Insufficient funds on {chain_id.name} ({chain_id}): " + f"balance={balance}, required={total_cost} {ratio:.2f}% available " + f"(includes value={value} and assumed gas limit={ASSUMED_BRIDGE_GAS_LIMIT} at {max_fee_per_gas} wei/gas)", + balance=balance, + chain_id=chain_id, + assumed_gas_limit=ASSUMED_BRIDGE_GAS_LIMIT, + fee_estimate=fee_estimate, + ) + + +@exp_backoff_retry +async def build_standard_transaction( + func, + account: Account, + w3: AsyncWeb3, + logger: Logger, + value: int = 0, + gas_blocks: int = 30, + gas_priority: GasPriority = GasPriority.MEDIUM, +) -> dict: + """Standardized transaction building with EIP-1559 and gas estimation""" + + nonce = await w3.eth.get_transaction_count(account.address) + fee_estimations = await estimate_fees(w3, blocks=gas_blocks) + + for percentile, fee_estimation in fee_estimations.items(): + logger.debug(f"{fee_estimation} [{percentile}% Percentile]") + + fee_estimate = fee_estimations[gas_priority] + logger.info(f"Fee estimate: {fee_estimate} [Gas priority {gas_priority.name} | {gas_priority.value}% Percentile]") + + await preflight_native_balance_check(w3=w3, account=account, fee_estimate=fee_estimate, value=value) + + tx = await func.build_transaction( + { + "from": account.address, + "nonce": nonce, + "maxFeePerGas": fee_estimate.max_fee_per_gas, + "maxPriorityFeePerGas": fee_estimate.max_priority_fee_per_gas, + "chainId": await w3.eth.chain_id, + "value": value, + } + ) + + # Warn if actual gas exceeds ASSUMED_BRIDGE_GAS_LIMIT; may indicate the limit is too low + # and could cause unhandled RPC errors instead of raising InsufficientNativeBalance + if tx["gas"] > ASSUMED_BRIDGE_GAS_LIMIT: + logger.warning(f"Bridge tx gas {tx['gas']} exceeds assumed limit {ASSUMED_BRIDGE_GAS_LIMIT}") + + # simulate the tx + await w3.eth.call(tx) + return tx + + +async def wait_for_tx_finality( + w3: AsyncWeb3, + tx_hash: str, + logger: Logger, + finality_blocks: int = 10, + timeout: float = 300.0, + poll_interval: float = 1.0, +) -> AttributeDict: + """ + Wait until tx is mined and has `finality_blocks` confirmations. + On timeout this raises one of: + - FinalityTimeout: receipt exists but not enough confirmations + - TxPendingTimeout: no receipt, but tx present and pending in mempool + - TransactionDropped: no receipt and tx not known to node (likely dropped) + + Notes on reorgs and provider inconsistency: + - A chain reorg can cause a previously-seen receipt to disappear (tx becomes "unmined"). + In that case the tx will often reappear as pending in the mempool (TxPendingTimeout), + but it can also be dropped entirely (TransactionDropped) or re-mined later. + - With rotating RPC providers you may observe receipts, tx entries, and block numbers + from different nodes that disagree. This function classifies a timeout based on a + single get_transaction probe and is intentionally conservative; callers should + interpret exceptions as: + * FinalityTimeout: node reports mined or we observed a receipt but not enough confirms: + wait longer; invoke this function again. + * TxPendingTimeout: node knows the tx and reports it pending: + either wait/poll longer or resubmit (reuse the nonce to prevent duplication). + * TransactionDropped: node has no record (likely dropped or node out-of-sync): + either wait/poll longer or resubmit (reuse the nonce to prevent duplication). + """ + + start_time = time.monotonic() + + while True: + try: + receipt = AttributeDict(await w3.eth.get_transaction_receipt(tx_hash)) + # receipt can disappear temporarily during reorgs, or if RPC provider is not synced + except Exception as exc: + receipt = None + logger.debug("No tx receipt for tx_hash=%s", tx_hash, extra={"exc": exc}) + + # blockNumber can change as tx gets reorged into different blocks + try: + if ( + receipt is not None + and (block_number := await w3.eth.block_number) >= receipt.blockNumber + finality_blocks + ): + return receipt + except Exception as exc: + msg = "Failed to fetch block_number trying to assess finality of tx_hash=%s" + logger.debug(msg, tx_hash, extra={"exc": exc}) + + if time.monotonic() - start_time > timeout: + # 1) We have a receipt but did not reach required confirmations + if receipt is not None: + raise FinalityTimeout( + f"Timed out waiting for finality: tx={tx_hash!r}, timeout_s={timeout}r ", + f"required confirmations={finality_blocks}." + f"\nreceipt_block={receipt.blockNumber!r}, current_block={block_number!r}.", + "\nAction: wait longer / poll for finality again.", + ) + # 2) No receipt: check if tx is known to node (mempool) or dropped + try: + tx = AttributeDict(w3.eth.get_transaction(tx_hash)) + except Exception as exc: + tx = None + logger.debug("get_transaction probe failed for tx_hash=%s", tx_hash, extra={"exc": exc}) + + # still pending in mempool + if tx is not None and tx.blockNumber is None: + raise TxPendingTimeout( + f"No receipt within timeout: tx={tx_hash!r}, timeout_s={timeout}.", + "\nNode reports transaction present and pending in mempool.", + "\nAction: either wait/poll longer or resubmit (reuse the nonce to prevent duplication).", + ) + # node reports tx mined, but no receipt + elif tx is not None: + raise FinalityTimeout( + f"Timed out waiting for finality: tx={tx_hash!r}, timeout_s={timeout}, " + f"required confirmations={finality_blocks}." + f"\nNode reports tx mined at block {tx.blockNumber!r} but receipt not observed by this verifier." + "\nAction: wait longer / poll for finality again.", + ) + # tx dropped or node no longer knows about it + else: + raise TransactionDropped( + f"Transaction not found after timeout: tx={tx_hash!r}, timeout_s={timeout}.", + "\nNode does not report a receipt or pending transaction (likely dropped).", + "\nAction: either wait/poll longer or resubmit (reuse the nonce to prevent duplication).", + ) + + logger.debug("Waiting for finality: tx=%s sleeping=%.1fs", tx_hash, poll_interval) + await asyncio.sleep(poll_interval) + + +def sign_tx(w3: AsyncWeb3, tx: dict, private_key: str) -> SignedTransaction: + signed_tx = w3.eth.account.sign_transaction(tx, private_key=private_key) + return signed_tx + + +async def send_tx(w3: AsyncWeb3, signed_tx: SignedTransaction) -> str: + tx_hash = await w3.eth.send_raw_transaction(signed_tx.raw_transaction) + return tx_hash.to_0x_hex() + + +async def iter_events( + w3: AsyncWeb3, + filter_params: dict, + *, + condition: Callable[[AttributeDict], bool] = lambda _: True, + max_block_range: int = 10_000, + poll_interval: float = 5.0, + timeout: float | None = None, + logger: Logger, +) -> Generator[AttributeDict, None, None]: + """Stream matching logs over a fixed or live block window. Optionally raises TimeoutError.""" + + original_filter_params = filter_params.copy() # return original in TimeoutError + if (cursor := filter_params["fromBlock"]) == "latest": + cursor = await w3.eth.block_number + + start_block = cursor + filter_params["toBlock"] = filter_params.get("toBlock", "latest") + fixed_ceiling = None if filter_params["toBlock"] == "latest" else filter_params["toBlock"] + + deadline = None if timeout is None else time.monotonic() + timeout + while True: + if deadline and time.monotonic() > deadline: + msg = f"Timed out waiting for events after scanning blocks {start_block}-{cursor}" + logger.warning(msg) + raise TimeoutError(f"{msg}: filter_params: {original_filter_params}") + upper = fixed_ceiling or await w3.eth.block_number + if cursor <= upper: + end = min(upper, cursor + max_block_range - 1) + filter_params["fromBlock"] = hex(cursor) + filter_params["toBlock"] = hex(end) + # For example, when rotating providers are out of sync + retry_get_logs = exp_backoff_retry(w3.eth.get_logs, attempts=EVENT_LOG_RETRIES) + logs = await retry_get_logs(filter_params=filter_params) + logger.debug(f"Scanned {cursor} - {end}: {len(logs)} logs") + for log in filter(condition, logs): + yield log + cursor = end + 1 # bounds are inclusive + + if fixed_ceiling and cursor > fixed_ceiling: + raise StopIteration + + await asyncio.sleep(poll_interval) + + +async def wait_for_bridge_event( + w3: AsyncWeb3, + filter_params: dict, + *, + condition: Callable[[AttributeDict], bool] = lambda _: True, + max_block_range: int = 10_000, + poll_interval: float = 5.0, + timeout: float = 300.0, + logger: Logger, +) -> AttributeDict: + """Wait for the first matching bridge-related log on the target chain or raise BridgeEventTimeout.""" + + try: + return await anext(iter_events(**locals())) + except TimeoutError as e: + raise BridgeEventTimeout("Timed out waiting for target chain bridge event") from e + + +def make_filter_params( + event: AsyncContractEvent, + from_block: int | Literal["latest"], + to_block: int | Literal["latest"] = "latest", + argument_filters: dict | None = None, +) -> dict: + """ + Function to create an eth_getLogs compatible filter_params for this event without using .create_filter. + event.create_filter uses eth_newFilter (a "push"), which not all RPC endpoints support. + """ + + argument_filters = argument_filters or {} + filter_params = event._get_event_filter_params( + fromBlock=from_block, + toBlock=to_block, + argument_filters=argument_filters, + abi=event.abi, + ) + filter_params["topics"] = tuple(filter_params["topics"]) + address = filter_params["address"] + if isinstance(address, str): + filter_params["address"] = AsyncWeb3.to_checksum_address(address) + elif isinstance(address, (list, tuple)) and len(address) == 1: + filter_params["address"] = AsyncWeb3.to_checksum_address(address[0]) + else: + raise ValueError(f"Unexpected address filter: {address!r}") + + return filter_params + + +def encode_abi(func: AsyncContractFunction) -> bytes: + """Get the ABI-encoded data (including 4-byte selector).""" + + types = [arg["internalType"] for arg in func.abi["inputs"]] + selector = bytes.fromhex(func.selector.removeprefix("0x")) + + return selector + encode(types, func.arguments) diff --git a/derive_client/cli.py b/derive_client/cli.py index 08999ffe..4494f4c9 100644 --- a/derive_client/cli.py +++ b/derive_client/cli.py @@ -2,6 +2,7 @@ Cli module in order to allow interaction. """ +import math import os from pathlib import Path from textwrap import dedent @@ -10,9 +11,9 @@ import rich_click as click from dotenv import load_dotenv from rich import print +from rich.table import Table from derive_client.analyser import PortfolioAnalyser -from derive_client.clients.base_client import BaseClient from derive_client.data_types import ( ChainID, CollateralAsset, @@ -22,17 +23,61 @@ OrderSide, OrderStatus, OrderType, + PreparedBridgeTx, SubaccountType, TxStatus, UnderlyingCurrency, ) from derive_client.derive import DeriveClient -from derive_client.utils import get_logger +from derive_client.utils import from_base_units, get_logger click.rich_click.USE_RICH_MARKUP = True pd.set_option("display.precision", 2) +def fmt_sig_up_to(x: float, sig: int = 4) -> str: + """Format x to up to `sig` significant digits, preserving all necessary decimals.""" + + if x == 0: + return "0" + + order = math.floor(math.log10(abs(x))) + decimals = max(sig - order - 1, 0) + formatted = f"{x:.{decimals}f}" + return formatted.rstrip("0").rstrip(".") + + +def rich_prepared_tx(prepared_tx: PreparedBridgeTx): + + table = Table(title="Prepared Bridge Transaction", show_header=False, box=None) + if prepared_tx.amount > 0: + human_amount = from_base_units(amount=prepared_tx.amount, currency=prepared_tx.currency) + table.add_row("Amount", f"{human_amount} {prepared_tx.currency.name} (base units: {prepared_tx.amount})") + if prepared_tx.fee_in_token > 0: + fee_human = from_base_units(prepared_tx.fee_in_token, prepared_tx.currency) + table.add_row( + "Estimated fee (token)", + f"{fmt_sig_up_to(fee_human)} {prepared_tx.currency.name} (base units: {prepared_tx.fee_in_token})", + ) + if prepared_tx.value and prepared_tx.value > 0: + human_value = prepared_tx.value / 1e18 + table.add_row("Value", f"{human_value} ETH (base units: {prepared_tx.value})") + if prepared_tx.fee_value > 0: + human_fee_value = fmt_sig_up_to(prepared_tx.fee_value / 1e9) + table.add_row("Estimated fee (native)", f"{human_fee_value} gwei (base units: {prepared_tx.fee_value})") + + # table.add_row("Value", f"{fmt_sig_up_to(prepared_tx.value / 1e18)} ETH (base units: {prepared_tx.value})") + table.add_row("Source chain", prepared_tx.source_chain.name) + table.add_row("Target chain", prepared_tx.target_chain.name) + table.add_row("Bridge type", prepared_tx.bridge_type.name) + table.add_row("Tx hash", prepared_tx.tx_hash) + table.add_row("Gas limit", str(prepared_tx.gas)) + table.add_row("Max fee/gas", f"{fmt_sig_up_to(prepared_tx.max_fee_per_gas / 1e9)} gwei") + table.add_row("Max total fee", f"{fmt_sig_up_to(prepared_tx.max_total_fee / 1e9)} gwei") + + return table + + def set_logger(ctx, level): """Set the logger.""" if not hasattr(ctx, "logger"): @@ -141,7 +186,7 @@ def bridge(): "-a", type=float, required=True, - help="The amount to deposit in ETH (will be converted to Wei).", + help="The amount to deposit in human units of the selected token (converted to base units internally).", ) @click.pass_context def deposit(ctx, chain_id, currency, amount): @@ -155,10 +200,17 @@ def deposit(ctx, chain_id, currency, amount): chain_id = ChainID[chain_id] currency = Currency[currency] - client: BaseClient = ctx.obj["client"] + client: DeriveClient = ctx.obj["client"] + + prepared_tx = client.prepare_deposit_to_derive(chain_id=chain_id, currency=currency, human_amount=amount) - bridge_tx_result = client.deposit_to_derive(chain_id=chain_id, currency=currency, amount=amount) - bridge_tx_result = client.poll_bridge_progress(bridge_tx_result) + print(rich_prepared_tx(prepared_tx)) + if not click.confirm("Do you want to submit this transaction?", default=False): + print("[yellow]Aborted by user.[/yellow]") + return + + tx_result = client.submit_bridge_tx(prepared_tx=prepared_tx) + bridge_tx_result = client.poll_bridge_progress(tx_result=tx_result) match bridge_tx_result.status: case TxStatus.SUCCESS: @@ -191,7 +243,7 @@ def deposit(ctx, chain_id, currency, amount): "-a", type=float, required=True, - help="The amount to deposit in ETH (will be converted to Wei).", + help="The amount to withdraw in human units of the selected token (converted to base units internally).", ) @click.pass_context def withdraw(ctx, chain_id, currency, amount): @@ -207,8 +259,15 @@ def withdraw(ctx, chain_id, currency, amount): client: DeriveClient = ctx.obj["client"] - bridge_tx_result = client.withdraw_from_derive(chain_id=chain_id, currency=currency, amount=amount) - bridge_tx_result = client.poll_bridge_progress(bridge_tx_result) + prepared_tx = client.prepare_withdrawal_from_derive(chain_id=chain_id, currency=currency, human_amount=amount) + + print(rich_prepared_tx(prepared_tx)) + if not click.confirm("Do you want to submit this transaction?", default=False): + print("[yellow]Aborted by user.[/yellow]") + return + + tx_result = client.submit_bridge_tx(prepared_tx=prepared_tx) + bridge_tx_result = client.poll_bridge_progress(tx_result=tx_result) match bridge_tx_result.status: case TxStatus.SUCCESS: @@ -428,7 +487,7 @@ def fetch_tickers(ctx, instrument_name): ) def transfer_collateral(ctx, amount, to, asset): """Transfer collateral.""" - client: BaseClient = ctx.obj["client"] + client: DeriveClient = ctx.obj["client"] result = client.transfer_collateral(amount=amount, to=to, asset=CollateralAsset(asset)) print(result) @@ -439,7 +498,7 @@ def transfer_collateral(ctx, amount, to, asset): def fetch_subaccounts(ctx): """Fetch subaccounts.""" print("Fetching subaccounts") - client = ctx.obj["client"] + client: DeriveClient = ctx.obj["client"] subaccounts = client.fetch_subaccounts() print(subaccounts) @@ -467,7 +526,7 @@ def fetch_subaccount(ctx, subaccount_id, underlying_currency, columns): print("Fetching subaccount") print(f"Subaccount ID: {subaccount_id}") print(f"Underlying currency: {underlying_currency}") - client = ctx.obj["client"] + client: DeriveClient = ctx.obj["client"] subaccount = client.fetch_subaccount(subaccount_id=subaccount_id) analyser = PortfolioAnalyser(subaccount) print("Positions") @@ -510,7 +569,7 @@ def create_subaccount(ctx, collateral_asset, underlying_currency, subaccount_typ subaccount_type = SubaccountType(subaccount_type) collateral_asset = CollateralAsset(collateral_asset) print(f"Creating subaccount with collateral asset {collateral_asset} and underlying currency {underlying_currency}") - client = ctx.obj["client"] + client: DeriveClient = ctx.obj["client"] subaccount_id = client.create_subaccount( amount=int(amount * 1e6), subaccount_type=subaccount_type, @@ -561,7 +620,7 @@ def create_subaccount(ctx, collateral_asset, underlying_currency, subaccount_typ def fetch_orders(ctx, instrument_name, label, page, page_size, status, regex): """Fetch orders.""" print("Fetching orders") - client = ctx.obj["client"] + client: DeriveClient = ctx.obj["client"] orders = client.fetch_orders( instrument_name=instrument_name, label=label, @@ -625,7 +684,7 @@ def fetch_orders(ctx, instrument_name, label, page, page_size, status, regex): def cancel_order(ctx, order_id, instrument_name): """Cancel order.""" print("Cancelling order") - client = ctx.obj["client"] + client: DeriveClient = ctx.obj["client"] result = client.cancel(order_id=order_id, instrument_name=instrument_name) print(result) @@ -635,7 +694,7 @@ def cancel_order(ctx, order_id, instrument_name): def cancel_all_orders(ctx): """Cancel all orders.""" print("Cancelling all orders") - client = ctx.obj["client"] + client: DeriveClient = ctx.obj["client"] result = client.cancel_all() print(result) diff --git a/derive_client/clients/async_client.py b/derive_client/clients/async_client.py index 41312125..4d08c33b 100644 --- a/derive_client/clients/async_client.py +++ b/derive_client/clients/async_client.py @@ -3,6 +3,7 @@ """ import asyncio +import functools import json import time from datetime import datetime @@ -11,14 +12,28 @@ import aiohttp from derive_action_signing.utils import sign_ws_login, utc_now_ms +from derive_client._bridge import BridgeClient +from derive_client._bridge.standard_bridge import StandardBridge from derive_client.constants import DEFAULT_REFERER, TEST_PRIVATE_KEY -from derive_client.data_types import Environment, InstrumentType, OrderSide, OrderType, TimeInForce, UnderlyingCurrency - -from .base_client import DeriveJSONRPCException -from .ws_client import WsClient - - -class AsyncClient(WsClient): +from derive_client.data_types import ( + Address, + BridgeTxResult, + ChainID, + Currency, + Environment, + InstrumentType, + OrderSide, + OrderType, + PreparedBridgeTx, + TimeInForce, + UnderlyingCurrency, +) +from derive_client.utils import unwrap_or_raise + +from .base_client import BaseClient, DeriveJSONRPCException + + +class AsyncClient(BaseClient): """ We use the async client to make async requests to the derive API We us the ws client to make async requests to the derive ws API @@ -44,12 +59,202 @@ def __init__( logger=logger, verbose=verbose, subaccount_id=subaccount_id, - referral_code=None, ) self.message_queues = {} self.connecting = False - # we make sure to get the event loop + + @functools.cached_property + def _bridge(self) -> BridgeClient: + return BridgeClient(env=self.env, account=self.signer, wallet=self.wallet, logger=self.logger) + + @functools.cached_property + def _standard_bridge(self) -> StandardBridge: + return StandardBridge(self.account, self.logger) + + async def prepare_standard_tx( + self, + human_amount: float, + currency: Currency, + to: Address, + source_chain: ChainID, + target_chain: ChainID, + ) -> PreparedBridgeTx: + """ + Prepare a transaction to bridge tokens to using Standard Bridge. + + This creates a signed transaction ready for submission but does not execute it. + Review the returned PreparedBridgeTx before calling submit_bridge_tx(). + + Args: + human_amount: Amount in token units (e.g., 1.5 USDC, 0.1 ETH) + currency: Currency enum value describing the token to bridge + to: Destination address on the target chain + source_chain: ChainID for the source chain + target_chain: ChainID for the target chain + + Returns: + PreparedBridgeTx: Contains transaction details including: + - tx_hash: Pre-computed transaction hash + - nonce: Transaction nonce for replacement/cancellation + - tx_details: Contract address, method, gas estimates, signed transaction + - currency, amount, source_chain, target_chain, bridge_type: Bridge context + + Use the returned object to: + - Verify contract addresses and gas costs before submission + - Submit with submit_bridge_tx() on approval + """ + + result = await self._standard_bridge.prepare_tx( + human_amount=human_amount, + currency=currency, + to=to, + source_chain=source_chain, + target_chain=target_chain, + ) + + return unwrap_or_raise(result) + + async def prepare_deposit_to_derive( + self, + human_amount: float, + currency: Currency, + chain_id: ChainID, + ) -> PreparedBridgeTx: + """ + Prepare a deposit transaction to bridge tokens to Derive. + + This creates a signed transaction ready for submission but does not execute it. + Review the returned PreparedBridgeTx before calling submit_bridge_tx(). + + Args: + human_amount: Amount in token units (e.g., 1.5 USDC, 0.1 ETH) + currency: Token to bridge + chain_id: Source chain to bridge from + + Returns: + PreparedBridgeTx: Contains transaction details including: + - tx_hash: Pre-computed transaction hash + - nonce: Transaction nonce for replacement/cancellation + - tx_details: Contract address, method, gas estimates, signed transaction + - currency, amount, source_chain, target_chain, bridge_type: Bridge context + + Use the returned object to: + - Verify contract addresses and gas costs before submission + - Submit with submit_bridge_tx() on approval + """ + + if currency is Currency.ETH: + raise NotImplementedError( + "ETH deposits to the funding wallet (Light Account) are not implemented. " + "For gas funding of the owner (EOA) use `prepare_standard_tx`." + ) + + result = await self._bridge.prepare_deposit(human_amount=human_amount, currency=currency, chain_id=chain_id) + return unwrap_or_raise(result) + + async def prepare_withdrawal_from_derive( + self, + human_amount: float, + currency: Currency, + chain_id: ChainID, + ) -> PreparedBridgeTx: + """ + Prepare a withdrawal transaction to bridge tokens from Derive. + + This creates a signed transaction ready for submission but does not execute it. + Review the returned PreparedBridgeTx before calling submit_bridge_tx(). + + Args: + human_amount: Amount in token units (e.g., 1.5 USDC, 0.1 ETH) + currency: Token to bridge + chain_id: Target chain to bridge to + + Returns: + PreparedBridgeTx: Contains transaction details including: + - tx_hash: Pre-computed transaction hash + - nonce: Transaction nonce for replacement/cancellation + - tx_details: Contract address, method, gas estimates, signed transaction + - currency, amount, source_chain, target_chain, bridge_type: Bridge context + + Use the returned object to: + - Verify contract addresses and gas costs before submission + - Submit with submit_bridge_tx() when ready + """ + + result = await self._bridge.prepare_withdrawal(human_amount=human_amount, currency=currency, chain_id=chain_id) + return unwrap_or_raise(result) + + async def submit_bridge_tx(self, prepared_tx: PreparedBridgeTx) -> BridgeTxResult: + """ + Submit a prepared bridge transaction to the blockchain. + + This broadcasts the signed transaction and returns tracking information. + The transaction is submitted but not yet confirmed - use poll_bridge_progress() + to monitor completion. + + Args: + prepared_tx: Transaction prepared by prepare_deposit_to_derive() + or prepare_withdrawal_from_derive() + + Returns: + BridgeTxResult: Initial tracking object containing: + - source_tx: Transaction hash on source chain (unconfirmed) + - target_from_block: Block number to start polling target chain events + - tx_details: Copy of original transaction details + - currency, bridge, source_chain, target_chain: Bridge context + + Next steps: + - Call poll_bridge_progress() to wait for cross-chain completion + """ + + if prepared_tx.currency == Currency.ETH: + result = await self._standard_bridge.submit_bridge_tx(prepared_tx=prepared_tx) + else: + result = await self._bridge.submit_bridge_tx(prepared_tx=prepared_tx) + + return unwrap_or_raise(result) + + async def poll_bridge_progress(self, tx_result: BridgeTxResult) -> BridgeTxResult: + """ + Poll for bridge transaction completion across both chains. + + This monitors the full cross-chain bridge pipeline: + 1. Source chain finality + 2. Target chain event detection + 3. Target chain finality + + Args: + tx_result: Result from submit_bridge_tx() or previous poll attempt + + Returns: + BridgeTxResult: Updated with completed bridge information: + - source_tx.tx_receipt: Source chain transaction receipt (confirmed) + - target_tx.tx_hash: Target chain transaction hash + - target_tx.tx_receipt: Target chain transaction receipt (confirmed) + + Raises: + PartialBridgeResult: Pipeline failed at some step. The exception contains + the partially updated tx_result for inspection and retry. Common scenarios: + - FinalityTimeout: Not enough confirmations, wait longer + - TxPendingTimeout: Transaction stuck, consider resubmission + - TransactionDropped: Transaction lost, likely needs resubmission + + Recovery strategies: + - On PartialBridgeResult: inspect the tx_result in the exception + - For FinalityTimeout: call poll_bridge_progress() again with the partial result + - For TransactionDropped: prepare new tx with same nonce to replace + - For TxPendingTimeout: prepare new tx with higher gas using same nonce. + - In case of a nonce collision: verify whether previous transaction got included + or whether the nonce was reused in another tx. + """ + + if tx_result.currency == Currency.ETH: + result = await self._standard_bridge.poll_bridge_progress(tx_result=tx_result) + else: + result = await self._bridge.poll_bridge_progress(tx_result=tx_result) + + return unwrap_or_raise(result) def get_subscription_id(self, instrument_name: str, group: str = "1", depth: str = "100"): return f"orderbook.{instrument_name}.{group}.{depth}" @@ -295,7 +500,7 @@ async def create_order( "order_type": order_type.name.lower(), "mmp": False, "time_in_force": time_in_force.value, - "referral_code": DEFAULT_REFERER if not self.referral_code else self.referral_code, + "referral_code": DEFAULT_REFERER, **signed_action.to_json(), } try: diff --git a/derive_client/clients/base_client.py b/derive_client/clients/base_client.py index fb357b77..d779fd67 100644 --- a/derive_client/clients/base_client.py +++ b/derive_client/clients/base_client.py @@ -20,19 +20,16 @@ ) from derive_action_signing.signed_action import SignedAction from derive_action_signing.utils import MAX_INT_32, get_action_nonce, sign_rest_auth_header, utc_now_ms +from hexbytes import HexBytes from pydantic import validate_call from web3 import Web3 -from derive_client._bridge import BridgeClient from derive_client.constants import CONFIGS, DEFAULT_REFERER, PUBLIC_HEADERS, TOKEN_DECIMALS from derive_client.data_types import ( Address, - BridgeTxResult, - ChainID, CollateralAsset, CreateSubAccountData, CreateSubAccountDetails, - Currency, DepositResult, DeriveTxResult, DeriveTxStatus, @@ -63,8 +60,6 @@ def _is_final_tx(res: DeriveTxResult) -> bool: class BaseClient: """Client for the Derive dex.""" - referral_code: str = None - def _create_signature_headers(self): """ Create the signature headers. @@ -79,12 +74,11 @@ def _create_signature_headers(self): def __init__( self, wallet: Address, - private_key: str, + private_key: str | HexBytes, env: Environment, logger: Logger | LoggerAdapter | None = None, verbose: bool = False, subaccount_id: int | None = None, - referral_code: Address | None = None, ): self.verbose = verbose self.env = env @@ -95,7 +89,14 @@ def __init__( self.wallet = wallet self._verify_wallet(wallet) self.subaccount_id = self._determine_subaccount_id(subaccount_id) - self.referral_code = referral_code + + @property + def account(self): + return self.signer + + @property + def private_key(self) -> HexBytes: + return self.account._private_key @property def endpoints(self) -> RestAPI: @@ -120,7 +121,7 @@ def _determine_subaccount_id(self, subaccount_id: int | None) -> int: if subaccount_id is not None and subaccount_id not in subaccount_ids: raise ValueError(f"Provided subaccount {subaccount_id} not among retrieved aubaccounts: {subaccounts!r}") subaccount_id = subaccount_id or subaccount_ids[0] - self.logger.info(f"Selected subaccount_id: {subaccount_id}") + self.logger.debug(f"Selected subaccount_id: {subaccount_id}") return subaccount_id def create_account(self, wallet): @@ -138,59 +139,6 @@ def create_account(self, wallet): raise Exception(result_code["error"]) return True - @validate_call - def deposit_to_derive(self, chain_id: ChainID, currency: Currency, amount: float) -> BridgeTxResult: - """ - Submit a deposit into the Derive chain funding contract and return its initial BridgeTxResult - without waiting for completion. - - Parameters: - chain_id (ChainID): The chain you are bridging FROM. - currency (Currency): The asset being bridged. - amount (float): amount to deposit, in human units (will be scaled to Wei). - """ - - amount = int(amount * 10 ** TOKEN_DECIMALS[UnderlyingCurrency[currency.name.upper()]]) - client = BridgeClient(self.env, chain_id, account=self.signer, wallet=self.wallet, logger=self.logger) - - if currency == Currency.DRV: - return client.deposit_drv(amount=amount, currency=currency) - - return client.deposit(amount=amount, currency=currency) - - @validate_call - def withdraw_from_derive(self, chain_id: ChainID, currency: Currency, amount: float) -> BridgeTxResult: - """ - Submit a withdrawal from the Derive chain funding contract and return its initial BridgeTxResult - without waiting for completion. - - Parameters: - chain_id (ChainID): The chain you are bridging TO. - currency (Currency): The asset being bridged. - amount (float): amount to withdraw, in human units (will be scaled to Wei). - """ - - amount = int(amount * 10 ** TOKEN_DECIMALS[UnderlyingCurrency[currency.name.upper()]]) - client = BridgeClient(self.env, chain_id, account=self.signer, wallet=self.wallet, logger=self.logger) - - if currency == Currency.DRV: - return client.withdraw_drv(amount=amount, currency=currency) - - return client.withdraw_with_wrapper(amount=amount, currency=currency) - - def poll_bridge_progress(self, tx_result: BridgeTxResult) -> BridgeTxResult: - """ - Given a pending BridgeTxResult, return a new BridgeTxResult with updated status. - Raises AlreadyFinalizedError if tx_result is not in PENDING status. - - Parameters: - tx_result (BridgeTxResult): the result to refresh. - """ - - chain_id = tx_result.source_chain if tx_result.source_chain != ChainID.DERIVE else tx_result.target_chain - client = BridgeClient(self.env, chain_id, account=self.signer, wallet=self.wallet, logger=self.logger) - return client.poll_bridge_progress(tx_result=tx_result) - def fetch_instruments( self, expired=False, @@ -298,7 +246,7 @@ def create_order( "order_type": order_type.name.lower(), "mmp": False, "time_in_force": time_in_force.value, - "referral_code": DEFAULT_REFERER if not self.referral_code else self.referral_code, + "referral_code": DEFAULT_REFERER, **signed_action.to_json(), } diff --git a/derive_client/clients/http_client.py b/derive_client/clients/http_client.py index 5e5c96fe..387e6be1 100644 --- a/derive_client/clients/http_client.py +++ b/derive_client/clients/http_client.py @@ -2,8 +2,106 @@ Base class for HTTP client. """ +import functools +from logging import Logger, LoggerAdapter + +from derive_client.data_types import Address, BridgeTxResult, ChainID, Currency, Environment, PreparedBridgeTx +from derive_client.utils.asyncio_sync import run_coroutine_sync + +from .async_client import AsyncClient from .base_client import BaseClient class HttpClient(BaseClient): """HTTP client class.""" + + def __init__( + self, + wallet: Address, + private_key: str, + env: Environment, + logger: Logger | LoggerAdapter | None = None, + verbose: bool = False, + subaccount_id: int | None = None, + ): + super().__init__( + wallet=wallet, + private_key=private_key, + env=env, + logger=logger, + verbose=verbose, + subaccount_id=subaccount_id, + ) + + @functools.cached_property + def _async_client(self) -> AsyncClient: + return AsyncClient( + wallet=self.wallet, + private_key=self.private_key, + env=self.env, + logger=self.logger, + verbose=self.verbose, + subaccount_id=self.subaccount_id, + ) + + def prepare_standard_tx( + self, + human_amount: float, + currency: Currency, + to: Address, + source_chain: ChainID, + target_chain: ChainID, + ) -> PreparedBridgeTx: + """Thin sync wrapper around AsyncClient.prepare_standard_tx.""" + + coroutine = self._async_client.prepare_standard_tx( + human_amount=human_amount, + currency=currency, + to=to, + source_chain=source_chain, + target_chain=target_chain, + ) + + return run_coroutine_sync(coroutine) + + def prepare_deposit_to_derive( + self, + human_amount: float, + currency: Currency, + chain_id: ChainID, + ) -> PreparedBridgeTx: + """Thin sync wrapper around AsyncClient.prepare_deposit_to_derive.""" + + coroutine = self._async_client.prepare_deposit_to_derive( + human_amount=human_amount, + currency=currency, + chain_id=chain_id, + ) + return run_coroutine_sync(coroutine) + + def prepare_withdrawal_from_derive( + self, + human_amount: float, + currency: Currency, + chain_id: ChainID, + ) -> PreparedBridgeTx: + """Thin sync wrapper around AsyncClient.prepare_withdrawal_from_derive.""" + + coroutine = self._async_client.prepare_withdrawal_from_derive( + human_amount=human_amount, + currency=currency, + chain_id=chain_id, + ) + return run_coroutine_sync(coroutine) + + def submit_bridge_tx(self, prepared_tx: PreparedBridgeTx) -> BridgeTxResult: + """Thin sync wrapper around AsyncClient.submit_bridge_tx.""" + + coroutine = self._async_client.submit_bridge_tx(prepared_tx=prepared_tx) + return run_coroutine_sync(coroutine) + + def poll_bridge_progress(self, tx_result: BridgeTxResult) -> BridgeTxResult: + """Thin sync wrapper around AsyncClient.poll_bridge_progress.""" + + coroutine = self._async_client.poll_bridge_progress(tx_result=tx_result) + return run_coroutine_sync(coroutine) diff --git a/derive_client/constants.py b/derive_client/constants.py index 7ec43ef2..26ee9501 100644 --- a/derive_client/constants.py +++ b/derive_client/constants.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -from derive_client.data_types import Environment, UnderlyingCurrency +from derive_client.data_types import Currency, Environment, UnderlyingCurrency class ContractAddresses(BaseModel, frozen=True): @@ -23,11 +23,6 @@ class ContractAddresses(BaseModel, frozen=True): DEPOSIT_MODULE: str WITHDRAWAL_MODULE: str TRANSFER_MODULE: str - L1_CHUG_SPLASH_PROXY: str | None - WITHDRAW_WRAPPER_V2: str | None - DEPOSIT_WRAPPER: str | None - ARBITRUM_DEPOSIT_WRAPPER: str | None = None - OPTIMISM_DEPOSIT_WRAPPER: str | None = None def __getitem__(self, key): return getattr(self, key) @@ -74,15 +69,12 @@ class EnvConfig(BaseModel, frozen=True): DEPOSIT_MODULE="0x43223Db33AdA0575D2E100829543f8B04A37a1ec", WITHDRAWAL_MODULE="0xe850641C5207dc5E9423fB15f89ae6031A05fd92", TRANSFER_MODULE="0x0CFC1a4a90741aB242cAfaCD798b409E12e68926", - L1_CHUG_SPLASH_PROXY=None, - WITHDRAW_WRAPPER_V2=None, - DEPOSIT_WRAPPER=None, ), ), Environment.PROD: EnvConfig( base_url="https://api.lyra.finance", ws_address="wss://api.lyra.finance/ws", - rpc_endpoint="https://rpc.lyra.finance", + rpc_endpoint="https://957.rpc.thirdweb.com/", block_explorer="https://explorer.lyra.finance", ACTION_TYPEHASH="0x4d7a9f27c403ff9c0f19bce61d76d82f9aa29f8d6d4b0c5474607d9770d1af17", DOMAIN_SEPARATOR="0xd96e5f90797da7ec8dc4e276260c7f3f87fedf68775fbe1ef116e996fc60441b", @@ -100,11 +92,6 @@ class EnvConfig(BaseModel, frozen=True): DEPOSIT_MODULE="0x9B3FE5E5a3bcEa5df4E08c41Ce89C4e3Ff01Ace3", WITHDRAWAL_MODULE="0x9d0E8f5b25384C7310CB8C6aE32C8fbeb645d083", TRANSFER_MODULE="0x01259207A40925b794C8ac320456F7F6c8FE2636", - L1_CHUG_SPLASH_PROXY="0x61e44dc0dae6888b5a301887732217d5725b0bff", - WITHDRAW_WRAPPER_V2="0xea8E683D8C46ff05B871822a00461995F93df800", - DEPOSIT_WRAPPER="0x9628bba16db41ea7fe1fd84f9ce53bc27c63f59b", - ARBITRUM_DEPOSIT_WRAPPER="0x076BB6117750e80AD570D98891B68da86C203A88", # unknown address - OPTIMISM_DEPOSIT_WRAPPER="0xC65005131Cfdf06622b99E8E17f72Cf694b586cC", # unknown address ), ), } @@ -113,8 +100,10 @@ class EnvConfig(BaseModel, frozen=True): DEFAULT_REFERER = "0x9135BA0f495244dc0A5F029b25CDE95157Db89AD" GAS_FEE_BUFFER = 1.1 # buffer multiplier to pad maxFeePerGas +GAS_LIMIT_BUFFER = 1.1 # buffer multiplier to pad gas limit MSG_GAS_LIMIT = 200_000 -DEPOSIT_GAS_LIMIT = 420_000 +ASSUMED_BRIDGE_GAS_LIMIT = 1_000_000 +MIN_PRIORITY_FEE = 10_000 PAYLOAD_SIZE = 161 TARGET_SPEED = "FAST" @@ -136,6 +125,35 @@ class EnvConfig(BaseModel, frozen=True): UnderlyingCurrency.DRV: 18, } +CURRENCY_DECIMALS = { + Currency.ETH: 18, + Currency.weETH: 18, + Currency.rswETH: 18, + Currency.rsETH: 18, + Currency.USDe: 18, + Currency.deUSD: 18, + Currency.PYUSD: 6, + Currency.sUSDe: 18, + Currency.SolvBTC: 18, + Currency.SolvBTCBBN: 18, + Currency.LBTC: 8, + Currency.OP: 18, + Currency.DAI: 18, + Currency.sDAI: 18, + Currency.cbBTC: 8, + Currency.eBTC: 8, + Currency.AAVE: 18, + Currency.OLAS: 18, + Currency.DRV: 18, + Currency.WBTC: 8, + Currency.WETH: 18, + Currency.USDC: 6, + Currency.USDT: 6, + Currency.wstETH: 18, + Currency.USDCe: 6, + Currency.SNX: 18, +} + DEFAULT_RPC_ENDPOINTS = DATA_DIR / "rpc_endpoints.yaml" NEW_VAULT_ABI_PATH = ABI_DATA_DIR / "socket_superbridge_vault.json" @@ -147,11 +165,26 @@ class EnvConfig(BaseModel, frozen=True): LIGHT_ACCOUNT_ABI_PATH = ABI_DATA_DIR / "light_account.json" L1_CHUG_SPLASH_PROXY_ABI_PATH = ABI_DATA_DIR / "l1_chug_splash_proxy.json" L1_STANDARD_BRIDGE_ABI_PATH = ABI_DATA_DIR / "l1_standard_bridge.json" +L1_CROSS_DOMAIN_MESSENGER_ABI_PATH = ABI_DATA_DIR / "l1_cross_domain_messenger.json" +L2_STANDARD_BRIDGE_ABI_PATH = ABI_DATA_DIR / "l2_standard_bridge.json" +L2_CROSS_DOMAIN_MESSENGER_ABI_PATH = ABI_DATA_DIR / "l2_cross_domain_messenger.json" WITHDRAW_WRAPPER_V2_ABI_PATH = ABI_DATA_DIR / "withdraw_wrapper_v2.json" DERIVE_ABI_PATH = ABI_DATA_DIR / "Derive.json" DERIVE_L2_ABI_PATH = ABI_DATA_DIR / "DeriveL2.json" LYRA_OFT_WITHDRAW_WRAPPER_ABI_PATH = ABI_DATA_DIR / "LyraOFTWithdrawWrapper.json" ERC20_ABI_PATH = ABI_DATA_DIR / "erc20.json" SOCKET_ABI_PATH = ABI_DATA_DIR / "Socket.json" +CONNECTOR_PLUG = ABI_DATA_DIR / "ConnectorPlug.json" + +# Contracts used in bridging module LYRA_OFT_WITHDRAW_WRAPPER_ADDRESS = "0x9400cc156dad38a716047a67c897973A29A06710" +L1_CHUG_SPLASH_PROXY = "0x61e44dc0dae6888b5a301887732217d5725b0bff" +RESOLVED_DELEGATE_PROXY = "0x5456f02c08e9A018E42C39b351328E5AA864174A" +L2_STANDARD_BRIDGE_PROXY = "0x4200000000000000000000000000000000000010" +L2_CROSS_DOMAIN_MESSENGER_PROXY = "0x4200000000000000000000000000000000000007" +WITHDRAW_WRAPPER_V2 = "0xea8E683D8C46ff05B871822a00461995F93df800" +ETH_DEPOSIT_WRAPPER = "0x46e75B6983126896227a5717f2484efb04A0c151" +BASE_DEPOSIT_WRAPPER = "0x9628bba16db41ea7fe1fd84f9ce53bc27c63f59b" +ARBITRUM_DEPOSIT_WRAPPER = "0x076BB6117750e80AD570D98891B68da86C203A88" +OPTIMISM_DEPOSIT_WRAPPER = "0xC65005131Cfdf06622b99E8E17f72Cf694b586cC" diff --git a/derive_client/data/abi/l1_cross_domain_messenger.json b/derive_client/data/abi/l1_cross_domain_messenger.json new file mode 100644 index 00000000..2ad94c8d --- /dev/null +++ b/derive_client/data/abi/l1_cross_domain_messenger.json @@ -0,0 +1,407 @@ +[ + { + "inputs": [ + { + "internalType": "contract OptimismPortal", + "name": "_portal", + "type": "address" + } + ], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "msgHash", + "type": "bytes32" + } + ], + "name": "FailedRelayedMessage", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint8", + "name": "version", + "type": "uint8" + } + ], + "name": "Initialized", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "msgHash", + "type": "bytes32" + } + ], + "name": "RelayedMessage", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "target", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "message", + "type": "bytes" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "messageNonce", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "gasLimit", + "type": "uint256" + } + ], + "name": "SentMessage", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "SentMessageExtension1", + "type": "event" + }, + { + "inputs": [], + "name": "MESSAGE_VERSION", + "outputs": [ + { + "internalType": "uint16", + "name": "", + "type": "uint16" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "MIN_GAS_CALLDATA_OVERHEAD", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "MIN_GAS_DYNAMIC_OVERHEAD_DENOMINATOR", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "MIN_GAS_DYNAMIC_OVERHEAD_NUMERATOR", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "OTHER_MESSENGER", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "PORTAL", + "outputs": [ + { + "internalType": "contract OptimismPortal", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "RELAY_CALL_OVERHEAD", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "RELAY_CONSTANT_OVERHEAD", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "RELAY_GAS_CHECK_BUFFER", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "RELAY_RESERVED_GAS", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + }, + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + } + ], + "name": "baseGas", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "name": "failedMessages", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "initialize", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "messageNonce", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_nonce", + "type": "uint256" + }, + { + "internalType": "address", + "name": "_sender", + "type": "address" + }, + { + "internalType": "address", + "name": "_target", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_value", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_minGasLimit", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + } + ], + "name": "relayMessage", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_target", + "type": "address" + }, + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + }, + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + } + ], + "name": "sendMessage", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "name": "successfulMessages", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "version", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "xDomainMessageSender", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + } +] \ No newline at end of file diff --git a/derive_client/data/abi/l2_cross_domain_messenger.json b/derive_client/data/abi/l2_cross_domain_messenger.json new file mode 100644 index 00000000..086cb416 --- /dev/null +++ b/derive_client/data/abi/l2_cross_domain_messenger.json @@ -0,0 +1,407 @@ +[ + { + "inputs": [ + { + "internalType": "address", + "name": "_l1CrossDomainMessenger", + "type": "address" + } + ], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "msgHash", + "type": "bytes32" + } + ], + "name": "FailedRelayedMessage", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "uint8", + "name": "version", + "type": "uint8" + } + ], + "name": "Initialized", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "bytes32", + "name": "msgHash", + "type": "bytes32" + } + ], + "name": "RelayedMessage", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "target", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "message", + "type": "bytes" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "messageNonce", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "gasLimit", + "type": "uint256" + } + ], + "name": "SentMessage", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "sender", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "value", + "type": "uint256" + } + ], + "name": "SentMessageExtension1", + "type": "event" + }, + { + "inputs": [], + "name": "MESSAGE_VERSION", + "outputs": [ + { + "internalType": "uint16", + "name": "", + "type": "uint16" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "MIN_GAS_CALLDATA_OVERHEAD", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "MIN_GAS_DYNAMIC_OVERHEAD_DENOMINATOR", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "MIN_GAS_DYNAMIC_OVERHEAD_NUMERATOR", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "OTHER_MESSENGER", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "RELAY_CALL_OVERHEAD", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "RELAY_CONSTANT_OVERHEAD", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "RELAY_GAS_CHECK_BUFFER", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "RELAY_RESERVED_GAS", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + }, + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + } + ], + "name": "baseGas", + "outputs": [ + { + "internalType": "uint64", + "name": "", + "type": "uint64" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "name": "failedMessages", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "initialize", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "l1CrossDomainMessenger", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "messageNonce", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_nonce", + "type": "uint256" + }, + { + "internalType": "address", + "name": "_sender", + "type": "address" + }, + { + "internalType": "address", + "name": "_target", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_value", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "_minGasLimit", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + } + ], + "name": "relayMessage", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_target", + "type": "address" + }, + { + "internalType": "bytes", + "name": "_message", + "type": "bytes" + }, + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + } + ], + "name": "sendMessage", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes32", + "name": "", + "type": "bytes32" + } + ], + "name": "successfulMessages", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "version", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "xDomainMessageSender", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + } +] \ No newline at end of file diff --git a/derive_client/data/abi/l2_standard_bridge.json b/derive_client/data/abi/l2_standard_bridge.json new file mode 100644 index 00000000..6dcb8c12 --- /dev/null +++ b/derive_client/data/abi/l2_standard_bridge.json @@ -0,0 +1,617 @@ +[ + { + "inputs": [ + { + "internalType": "addresspayable", + "name": "_otherBridge", + "type": "address" + } + ], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "l1Token", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l2Token", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "extraData", + "type": "bytes" + } + ], + "name": "DepositFinalized", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "localToken", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "remoteToken", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "extraData", + "type": "bytes" + } + ], + "name": "ERC20BridgeFinalized", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "localToken", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "remoteToken", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "extraData", + "type": "bytes" + } + ], + "name": "ERC20BridgeInitiated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "extraData", + "type": "bytes" + } + ], + "name": "ETHBridgeFinalized", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "extraData", + "type": "bytes" + } + ], + "name": "ETHBridgeInitiated", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "l1Token", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "l2Token", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": false, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "indexed": false, + "internalType": "bytes", + "name": "extraData", + "type": "bytes" + } + ], + "name": "WithdrawalInitiated", + "type": "event" + }, + { + "inputs": [], + "name": "MESSENGER", + "outputs": [ + { + "internalType": "contractCrossDomainMessenger", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "OTHER_BRIDGE", + "outputs": [ + { + "internalType": "contractStandardBridge", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_localToken", + "type": "address" + }, + { + "internalType": "address", + "name": "_remoteToken", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + }, + { + "internalType": "bytes", + "name": "_extraData", + "type": "bytes" + } + ], + "name": "bridgeERC20", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_localToken", + "type": "address" + }, + { + "internalType": "address", + "name": "_remoteToken", + "type": "address" + }, + { + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + }, + { + "internalType": "bytes", + "name": "_extraData", + "type": "bytes" + } + ], + "name": "bridgeERC20To", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + }, + { + "internalType": "bytes", + "name": "_extraData", + "type": "bytes" + } + ], + "name": "bridgeETH", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + }, + { + "internalType": "bytes", + "name": "_extraData", + "type": "bytes" + } + ], + "name": "bridgeETHTo", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + }, + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "name": "deposits", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_localToken", + "type": "address" + }, + { + "internalType": "address", + "name": "_remoteToken", + "type": "address" + }, + { + "internalType": "address", + "name": "_from", + "type": "address" + }, + { + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_extraData", + "type": "bytes" + } + ], + "name": "finalizeBridgeERC20", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_from", + "type": "address" + }, + { + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_extraData", + "type": "bytes" + } + ], + "name": "finalizeBridgeETH", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l1Token", + "type": "address" + }, + { + "internalType": "address", + "name": "_l2Token", + "type": "address" + }, + { + "internalType": "address", + "name": "_from", + "type": "address" + }, + { + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_extraData", + "type": "bytes" + } + ], + "name": "finalizeDeposit", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [], + "name": "l1TokenBridge", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "messenger", + "outputs": [ + { + "internalType": "contractCrossDomainMessenger", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "version", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l2Token", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + }, + { + "internalType": "bytes", + "name": "_extraData", + "type": "bytes" + } + ], + "name": "withdraw", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l2Token", + "type": "address" + }, + { + "internalType": "address", + "name": "_to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "_amount", + "type": "uint256" + }, + { + "internalType": "uint32", + "name": "_minGasLimit", + "type": "uint32" + }, + { + "internalType": "bytes", + "name": "_extraData", + "type": "bytes" + } + ], + "name": "withdrawTo", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "stateMutability": "payable", + "type": "receive" + } +] \ No newline at end of file diff --git a/derive_client/data_types/__init__.py b/derive_client/data_types/__init__.py index ea4097a3..c4ba0f22 100644 --- a/derive_client/data_types/__init__.py +++ b/derive_client/data_types/__init__.py @@ -9,8 +9,10 @@ DeriveJSONRPCErrorCode, DeriveTokenAddresses, DeriveTxStatus, + Direction, Environment, EthereumJSONRPCErrorCode, + GasPriority, InstrumentType, LayerZeroChainIDv2, MainnetCurrency, @@ -28,24 +30,31 @@ from .models import ( Address, BridgeContext, + BridgeTxDetails, BridgeTxResult, CreateSubAccountData, CreateSubAccountDetails, DepositResult, DeriveAddresses, DeriveTxResult, + FeeEstimate, + FeeEstimates, + FeeHistory, ManagerAddress, MintableTokenData, NonMintableTokenData, + PreparedBridgeTx, RPCEndpoints, SessionKey, TxResult, + Wei, WithdrawResult, ) __all__ = [ "TxStatus", "DeriveTxStatus", + "Direction", "BridgeType", "BridgeContext", "BridgeTxResult", @@ -66,6 +75,10 @@ "SubaccountType", "CollateralAsset", "ActionType", + "GasPriority", + "FeeHistory", + "FeeEstimate", + "FeeEstimates", "RfqStatus", "Address", "SessionKey", @@ -82,4 +95,7 @@ "DeriveTxResult", "SocketAddress", "RPCEndpoints", + "BridgeTxDetails", + "PreparedBridgeTx", + "Wei", ] diff --git a/derive_client/data_types/enums.py b/derive_client/data_types/enums.py index 643f265b..508e2330 100644 --- a/derive_client/data_types/enums.py +++ b/derive_client/data_types/enums.py @@ -7,7 +7,6 @@ class TxStatus(IntEnum): FAILED = 0 # confirmed and status == 0 (on-chain revert) SUCCESS = 1 # confirmed and status == 1 PENDING = 2 # not yet confirmed, no receipt - ERROR = 3 # local error, e.g. connection, invalid tx class DeriveTxStatus(Enum): @@ -24,6 +23,12 @@ class DeriveTxStatus(Enum): class BridgeType(Enum): SOCKET = "socket" LAYERZERO = "layerzero" + STANDARD = "standard" + + +class Direction(Enum): + DEPOSIT = "deposit" + WITHDRAW = "withdraw" class ChainID(IntEnum): @@ -53,6 +58,12 @@ class LayerZeroChainIDv2(IntEnum): DERIVE = 30311 +class GasPriority(IntEnum): + SLOW = 25 + MEDIUM = 50 + FAST = 75 + + class SocketAddress(Enum): ETH = "0x943ac2775928318653e91d350574436a1b9b16f9" ARBITRUM = "0x37cc674582049b579571e2ffd890a4d99355f6ba" @@ -119,6 +130,8 @@ class UnderlyingCurrency(Enum): class Currency(Enum): """Depositable currencies""" + ETH = "ETH" + weETH = "weETH" rswETH = "rswETH" rsETH = "rsETH" diff --git a/derive_client/data_types/models.py b/derive_client/data_types/models.py index 288d33e0..c01102e5 100644 --- a/derive_client/data_types/models.py +++ b/derive_client/data_types/models.py @@ -1,35 +1,34 @@ """Models used in the bridge module.""" +from typing import Any + from derive_action_signing.module_data import ModuleData from derive_action_signing.utils import decimal_to_big_int from eth_abi.abi import encode +from eth_account.datastructures import SignedTransaction from eth_utils import is_0x_prefixed, is_address, is_hex, to_checksum_address -from pydantic import BaseModel, ConfigDict, Field, GetCoreSchemaHandler, GetJsonSchemaHandler, HttpUrl +from hexbytes import HexBytes +from pydantic import BaseModel, ConfigDict, Field, GetCoreSchemaHandler, GetJsonSchemaHandler, HttpUrl, RootModel from pydantic.dataclasses import dataclass from pydantic_core import core_schema -from web3 import Web3 -from web3.contract import Contract -from web3.contract.contract import ContractEvent +from web3 import AsyncWeb3, Web3 +from web3.contract import AsyncContract +from web3.contract.async_contract import AsyncContractEvent from web3.datastructures import AttributeDict -from .enums import BridgeType, ChainID, Currency, DeriveTxStatus, MainnetCurrency, MarginType, SessionKeyScope, TxStatus +from derive_client.exceptions import TxReceiptMissing - -class PException(Exception): - - @classmethod - def __get_pydantic_core_schema__(cls, _source, _handler: GetCoreSchemaHandler): - return core_schema.no_info_plain_validator_function(cls._validate) - - @classmethod - def __get_pydantic_json_schema__(cls, _schema, _handler: GetJsonSchemaHandler) -> dict: - return {"type": "string", "description": "An arbitrary Python Exception; serialized via str()"} - - @classmethod - def _validate(cls, v) -> Exception: - if not isinstance(v, Exception): - raise TypeError(f"Expected Exception, got {v!r}") - return v +from .enums import ( + BridgeType, + ChainID, + Currency, + DeriveTxStatus, + GasPriority, + MainnetCurrency, + MarginType, + SessionKeyScope, + TxStatus, +) class PAttributeDict(AttributeDict): @@ -49,10 +48,74 @@ def _validate(cls, v) -> AttributeDict: return AttributeDict(v) +class PHexBytes(HexBytes): + @classmethod + def __get_pydantic_core_schema__(cls, _source: Any, _handler: Any) -> core_schema.CoreSchema: + # Allow either HexBytes or bytes/hex strings to be parsed into HexBytes + return core_schema.no_info_before_validator_function( + cls._validate, + core_schema.union_schema( + [ + core_schema.is_instance_schema(HexBytes), + core_schema.bytes_schema(), + core_schema.str_schema(), + ] + ), + ) + + @classmethod + def __get_pydantic_json_schema__(cls, _schema: core_schema.CoreSchema, _handler: Any) -> dict: + return {"type": "string", "format": "hex"} + + @classmethod + def _validate(cls, v: Any) -> HexBytes: + if isinstance(v, HexBytes): + return v + if isinstance(v, (bytes, bytearray)): + return HexBytes(v) + if isinstance(v, str): + return HexBytes(v) + raise TypeError(f"Expected HexBytes-compatible type, got {type(v).__name__}") + + +class PSignedTransaction(SignedTransaction): + @classmethod + def __get_pydantic_core_schema__(cls, _source: Any, _handler: Any) -> core_schema.CoreSchema: + # Accept existing SignedTransaction or a tuple/dict of its fields + return core_schema.no_info_plain_validator_function(cls._validate) + + @classmethod + def __get_pydantic_json_schema__(cls, _schema: core_schema.CoreSchema, _handler: Any) -> dict: + return { + "type": "object", + "properties": { + "raw_transaction": {"type": "string", "format": "hex"}, + "hash": {"type": "string", "format": "hex"}, + "r": {"type": "integer"}, + "s": {"type": "integer"}, + "v": {"type": "integer"}, + }, + } + + @classmethod + def _validate(cls, v: Any) -> SignedTransaction: + if isinstance(v, SignedTransaction): + return v + if isinstance(v, dict): + return SignedTransaction( + raw_transaction=PHexBytes(v["raw_transaction"]), + hash=PHexBytes(v["hash"]), + r=int(v["r"]), + s=int(v["s"]), + v=int(v["v"]), + ) + raise TypeError(f"Expected SignedTransaction or dict, got {type(v).__name__}") + + class Address(str): @classmethod def __get_pydantic_core_schema__(cls, _source, _handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: - return core_schema.no_info_before_validator_function(cls._validate, core_schema.str_schema()) + return core_schema.no_info_before_validator_function(cls._validate, core_schema.any_schema()) @classmethod def __get_pydantic_json_schema__(cls, _schema, _handler: GetJsonSchemaHandler) -> dict: @@ -75,14 +138,34 @@ def __get_pydantic_json_schema__(cls, _schema, _handler: GetJsonSchemaHandler): return {"type": "string", "format": "ethereum-tx-hash"} @classmethod - def _validate(cls, v: str) -> str: + def _validate(cls, v: str | HexBytes) -> str: + if isinstance(v, HexBytes): + v = v.to_0x_hex() if not isinstance(v, str): - raise TypeError("Expected a string for TxHash") + raise TypeError("Expected a string or HexBytes for TxHash") if not is_0x_prefixed(v) or not is_hex(v) or len(v) != 66: raise ValueError(f"Invalid Ethereum transaction hash: {v}") return v +class Wei(int): + @classmethod + def __get_pydantic_core_schema__(cls, _source, _handler: GetCoreSchemaHandler) -> core_schema.CoreSchema: + return core_schema.no_info_before_validator_function(cls._validate, core_schema.int_schema()) + + @classmethod + def __get_pydantic_json_schema__(cls, _schema, _handler: GetJsonSchemaHandler) -> dict: + return {"type": ["string", "integer"], "title": "Wei"} + + @classmethod + def _validate(cls, v: str | int) -> int: + if isinstance(v, int): + return v + if isinstance(v, str) and is_hex(v): + return int(v, 16) + raise TypeError(f"Invalid type for Wei: {type(v)}") + + @dataclass class CreateSubAccountDetails: amount: int @@ -153,42 +236,115 @@ class ManagerAddress(BaseModel): @dataclass(config=ConfigDict(arbitrary_types_allowed=True)) class BridgeContext: - source_w3: Web3 - target_w3: Web3 - source_token: Contract - source_event: ContractEvent - target_event: ContractEvent + currency: Currency + source_w3: AsyncWeb3 + target_w3: AsyncWeb3 + source_token: AsyncContract + source_event: AsyncContractEvent + target_event: AsyncContractEvent + source_chain: ChainID + target_chain: ChainID @property - def source_chain(self) -> ChainID: - return ChainID(self.source_w3.eth.chain_id) + def bridge_type(self) -> BridgeType: + return BridgeType.LAYERZERO if self.currency == Currency.DRV else BridgeType.SOCKET + + +@dataclass +class BridgeTxDetails: + contract: Address + method: str + kwargs: dict[str, Any] + tx: dict[str, Any] + signed_tx: PSignedTransaction @property - def target_chain(self) -> ChainID: - return ChainID(self.target_w3.eth.chain_id) + def tx_hash(self) -> str: + """Pre-computed transaction hash.""" + return self.signed_tx.hash.to_0x_hex() + + @property + def nonce(self) -> int: + """Transaction nonce.""" + return self.tx["nonce"] + + @property + def gas(self) -> int: + """Gas limit""" + return self.tx["gas"] + + @property + def max_fee_per_gas(self) -> Wei: + return self.tx["maxFeePerGas"] + + +@dataclass +class PreparedBridgeTx: + amount: int + value: int + currency: Currency + source_chain: ChainID + target_chain: ChainID + bridge_type: BridgeType + tx_details: BridgeTxDetails + + fee_value: int + fee_in_token: int + + def __post_init_post_parse__(self) -> None: + + # rule 1: don't allow both amount (erc20) and value (native) to be non-zero + if self.amount and self.value: + raise ValueError( + f"Both amount ({self.amount}) and value ({self.value}) are non-zero; " + "use `prepare_erc20_tx` or `prepare_eth_tx` instead." + ) + + # rule 2: don't allow both fee types to be non-zero simultaneously + if self.fee_value and self.fee_in_token: + raise ValueError( + f"Both fee_value ({self.fee_value}) and fee_in_token ({self.fee_in_token}) are non-zero; " + "fees must be expressed in only one currency." + ) + + @property + def tx_hash(self) -> str: + """Pre-computed transaction hash.""" + return self.tx_details.tx_hash + + @property + def nonce(self) -> int: + """Transaction nonce.""" + return self.tx_details.nonce + + @property + def gas(self) -> int: + return self.tx_details.gas + + @property + def max_fee_per_gas(self) -> Wei: + return self.tx_details.max_fee_per_gas + + @property + def max_total_fee(self) -> Wei: + return self.gas * self.max_fee_per_gas @dataclass(config=ConfigDict(validate_assignment=True)) class TxResult: - tx_hash: TxHash | None = None + tx_hash: TxHash tx_receipt: PAttributeDict | None = None - exception: PException | None = None @property def status(self) -> TxStatus: if self.tx_receipt is not None: return TxStatus(int(self.tx_receipt.status)) # ∈ {0, 1} (EIP-658) - if self.exception is not None and not isinstance(self.exception, TimeoutError): - return TxStatus.ERROR return TxStatus.PENDING @dataclass(config=ConfigDict(validate_assignment=True)) class BridgeTxResult: - currency: Currency - bridge: BridgeType - source_chain: ChainID - target_chain: ChainID + prepared_tx: PreparedBridgeTx source_tx: TxResult target_from_block: int event_id: str | None = None @@ -200,6 +356,38 @@ def status(self) -> TxStatus: return self.source_tx.status return self.target_tx.status if self.target_tx is not None else TxStatus.PENDING + @property + def currency(self) -> Currency: + return self.prepared_tx.currency + + @property + def source_chain(self) -> ChainID: + return self.prepared_tx.source_chain + + @property + def target_chain(self) -> ChainID: + return self.prepared_tx.target_chain + + @property + def bridge_type(self) -> BridgeType: + return self.prepared_tx.bridge_type + + @property + def gas_used(self) -> int: + if not self.source_tx.tx_receipt: + raise TxReceiptMissing("Source tx receipt not available") + return self.source_tx.tx_receipt["gasUsed"] + + @property + def effective_gas_price(self) -> Wei: + if not self.source_tx.tx_receipt: + raise TxReceiptMissing("Source tx receipt not available") + return self.source_tx.tx_receipt["effectiveGasPrice"] + + @property + def total_fee(self) -> Wei: + return self.gas_used * self.effective_gas_price + class DepositResult(BaseModel): status: DeriveTxStatus # should be "REQUESTED" @@ -233,3 +421,28 @@ def __getitem__(self, key: ChainID | int | str) -> list[HttpUrl]: if not (urls := getattr(self, chain.name, [])): raise ValueError(f"No RPC URLs configured for {chain.name}") return urls + + +class FeeHistory(BaseModel): + base_fee_per_gas: list[Wei] = Field(alias="baseFeePerGas") + gas_used_ratio: list[float] = Field(alias="gasUsedRatio") + base_fee_per_blob_gas: list[Wei] | None = Field(default=None, alias="baseFeePerBlobGas") + blob_gas_used_ratio: list[float] | None = Field(default=None, alias="blobGasUsedRatio") + oldest_block: int = Field(alias="oldestBlock") + reward: list[list[Wei]] + + +@dataclass +class FeeEstimate: + max_fee_per_gas: int + max_priority_fee_per_gas: int + + +class FeeEstimates(RootModel): + root: dict[GasPriority, FeeEstimate] + + def __getitem__(self, key: GasPriority): + return self.root[key] + + def items(self): + return self.root.items() diff --git a/derive_client/derive.py b/derive_client/derive.py index 6260c06d..fdf693c0 100644 --- a/derive_client/derive.py +++ b/derive_client/derive.py @@ -5,7 +5,7 @@ import pandas as pd from web3 import Web3 -from derive_client.clients import BaseClient, HttpClient +from derive_client.clients import HttpClient # we set to show 4 decimal places pd.options.display.float_format = '{:,.4f}'.format @@ -15,7 +15,7 @@ def to_32byte_hex(val): return Web3.to_hex(Web3.to_bytes(val).rjust(32, b"\0")) -class DeriveClient(BaseClient): +class DeriveClient(HttpClient): """Client for the derive dex.""" def _create_signature_headers(self): diff --git a/derive_client/exceptions.py b/derive_client/exceptions.py index 2c3cbc6a..01e919b9 100644 --- a/derive_client/exceptions.py +++ b/derive_client/exceptions.py @@ -1,6 +1,13 @@ """Custom Exception classes.""" -from typing import Any +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from web3.types import LogReceipt + + from derive_client.data_types import BridgeTxResult, ChainID, FeeEstimate, Wei class ApiException(Exception): @@ -33,18 +40,10 @@ def __str__(self): return f"{base} [data={self.data!r}]" if self.data is not None else base -class TxSubmissionError(Exception): - """Raised when a transaction could not be signed or submitted.""" - - class BridgeEventParseError(Exception): """Raised when an expected cross-chain bridge event could not be parsed.""" -class AlreadyFinalizedError(Exception): - """Raised when attempting to poll a BridgeTxResult who'se status is not TxStatus.PENDING.""" - - class BridgeRouteError(Exception): """Raised when no bridge route exists for the given currency and chains.""" @@ -53,5 +52,69 @@ class NoAvailableRPC(Exception): """Raised when all configured RPC endpoints are temporarily unavailable due to backoff or failures.""" -class InsufficientGas(Exception): - """Raised when a minimum gas requirement is not met.""" +class InsufficientNativeBalance(Exception): + """Raised when the native currency balance is insufficient for gas and/or value transfer.""" + + def __init__( + self, + message: str, + *, + chain_id: ChainID, + balance: Wei, + assumed_gas_limit: Wei, + fee_estimate: FeeEstimate, + ): + super().__init__(message) + self.chain_id = chain_id + self.balance = balance + self.assumed_gas_limit = assumed_gas_limit + self.fee_estimate = fee_estimate + + +class InsufficientTokenBalance(Exception): + """Raised when the token balance is insufficient for the requested operation.""" + + +class BridgePrimarySignerRequiredError(Exception): + """Raised when bridging is attempted with a secondary session-key signer.""" + + +class TxReceiptMissing(Exception): + """Raised when a transaction receipt is required but not available.""" + + +class FinalityTimeout(Exception): + """Raised when the transaction was mined but did not reach the required finality within the timeout.""" + + +class TxPendingTimeout(Exception): + """Raised when the transaction receipt does not materialize and the transaction remains in the mempool.""" + + +class TransactionDropped(Exception): + """Raised when the transaction the transaction is no longer in the mempool, likely dropped.""" + + +class BridgeEventTimeout(Exception): + """Raised when no matching bridge event was seen before deadline.""" + + +class PartialBridgeResult(Exception): + """Raised after submission when the bridge pipeline fails""" + + def __init__(self, message: str, *, tx_result: BridgeTxResult): + super().__init__(message) + self.tx_result = tx_result + + @property + def cause(self) -> Exception | None: + """Provides access to the orignal Exception.""" + return self.__cause__ + + +class StandardBridgeRelayFailed(Exception): + """Raised when the L2 messenger emits FailedRelayedMessage.""" + + def __init__(self, message: str, *, event_log: LogReceipt): + super().__init__(message) + self.event_log = event_log diff --git a/derive_client/utils/__init__.py b/derive_client/utils/__init__.py index d631367b..c3fc9e41 100644 --- a/derive_client/utils/__init__.py +++ b/derive_client/utils/__init__.py @@ -4,40 +4,19 @@ from .logger import get_logger from .prod_addresses import get_prod_derive_addresses from .retry import exp_backoff_retry, get_retry_session, wait_until -from .w3 import ( - build_standard_transaction, - estimate_fees, - get_contract, - get_erc20_contract, - get_w3_connection, - iter_events, - load_rpc_endpoints, - make_filter_params, - make_rotating_provider_middleware, - send_and_confirm_tx, - sign_and_send_tx, - wait_for_event, - wait_for_tx_receipt, -) +from .unwrap import unwrap_or_raise +from .w3 import from_base_units, get_w3_connection, load_rpc_endpoints, to_base_units __all__ = [ - "estimate_fees", "get_logger", "get_prod_derive_addresses", "exp_backoff_retry", "get_retry_session", - "make_filter_params", - "make_rotating_provider_middleware", "wait_until", "get_w3_connection", - "get_contract", - "get_erc20_contract", "load_rpc_endpoints", - "wait_for_tx_receipt", - "sign_and_send_tx", - "send_and_confirm_tx", + "to_base_units", + "from_base_units", "download_prod_address_abis", - "build_standard_transaction", - "iter_events", - "wait_for_event", + "unwrap_or_raise", ] diff --git a/derive_client/utils/asyncio_sync.py b/derive_client/utils/asyncio_sync.py new file mode 100644 index 00000000..37b3a48b --- /dev/null +++ b/derive_client/utils/asyncio_sync.py @@ -0,0 +1,55 @@ +import asyncio +import threading +from concurrent.futures import TimeoutError as _TimeoutError +from typing import Any, Optional + +_bg = {"loop": None, "thread": None, "started": False, "start_ev": threading.Event()} +_start_lock = threading.Lock() + + +def _start_bg_loop() -> None: + if _bg["loop"] is not None and _bg["started"]: + return + with _start_lock: + if _bg["loop"] is not None and _bg["started"]: + return + + def _run() -> None: + loop = asyncio.new_event_loop() + _bg["loop"] = loop + asyncio.set_event_loop(loop) + _bg["start_ev"].set() + _bg["started"] = True + try: + loop.run_forever() + finally: + try: + pending = asyncio.all_tasks(loop=loop) + for t in pending: + t.cancel() + loop.run_until_complete(loop.shutdown_asyncgens()) + finally: + loop.close() + _bg["loop"] = None + _bg["started"] = False + _bg["start_ev"].clear() + + t = threading.Thread(target=_run, name="bg-async-loop", daemon=True) + t.start() + _bg["thread"] = t + _bg["start_ev"].wait(timeout=5) + if not _bg["started"]: + raise RuntimeError("Failed to start background loop") + + +def run_coroutine_sync(coro: object, timeout: Optional[float] = None) -> Any: + """Run coroutine on the single background loop and block until result.""" + + _start_bg_loop() + loop = _bg["loop"] + fut = asyncio.run_coroutine_threadsafe(coro, loop) + try: + return fut.result(timeout) + except _TimeoutError: + fut.cancel() + raise diff --git a/derive_client/utils/retry.py b/derive_client/utils/retry.py index 03a6bc34..06498b0a 100644 --- a/derive_client/utils/retry.py +++ b/derive_client/utils/retry.py @@ -1,3 +1,4 @@ +import asyncio import functools import time from http import HTTPStatus @@ -35,15 +36,15 @@ def exp_backoff_retry( return lambda f: exp_backoff_retry(f, attempts=attempts, initial_delay=initial_delay, exceptions=exceptions) @functools.wraps(func) - def wrapper(*args, **kwargs): + async def wrapper(*args, **kwargs): delay = initial_delay for attempt in range(attempts): try: - return func(*args, **kwargs) + return await func(*args, **kwargs) except exceptions as e: if attempt == attempts - 1: raise e - time.sleep(delay) + await asyncio.sleep(delay) delay *= 2 return wrapper diff --git a/derive_client/utils/unwrap.py b/derive_client/utils/unwrap.py new file mode 100644 index 00000000..bb299e94 --- /dev/null +++ b/derive_client/utils/unwrap.py @@ -0,0 +1,23 @@ +from typing import TypeVar + +from returns.io import IOFailure, IOResult, IOSuccess +from returns.result import Failure, Result, Success +from returns.unsafe import unsafe_perform_io + +T = TypeVar("T") + + +def unwrap_or_raise(result: Result[T, Exception] | IOResult[T, Exception]) -> T: + """Convert a returns.Result into a normal Python value or raise the underlying exception.""" + + match result: + case Success(): + return result.unwrap() + case Failure(): + raise result.failure() + case IOSuccess(): + return unsafe_perform_io(result).unwrap() + case IOFailure(): + raise unsafe_perform_io(result).failure() + case _: + raise RuntimeError(f"unwrap_or_raise received a non-Result value: {result}") diff --git a/derive_client/utils/w3.py b/derive_client/utils/w3.py index 4d167652..c5cad7ea 100644 --- a/derive_client/utils/w3.py +++ b/derive_client/utils/w3.py @@ -1,29 +1,20 @@ import functools import heapq -import json import threading import time from logging import Logger from pathlib import Path -from typing import Any, Callable, Generator, Literal +from typing import Any, Callable import yaml -from eth_account import Account -from hexbytes import HexBytes from requests import RequestException from web3 import Web3 -from web3.contract import Contract -from web3.contract.contract import ContractEvent -from web3.datastructures import AttributeDict from web3.providers.rpc import HTTPProvider -from derive_client.constants import ABI_DATA_DIR, DEFAULT_RPC_ENDPOINTS, GAS_FEE_BUFFER -from derive_client.data_types import ChainID, RPCEndpoints, TxResult, TxStatus +from derive_client.constants import CURRENCY_DECIMALS, DEFAULT_RPC_ENDPOINTS +from derive_client.data_types import ChainID, Currency, RPCEndpoints from derive_client.exceptions import NoAvailableRPC from derive_client.utils.logger import get_logger -from derive_client.utils.retry import exp_backoff_retry - -EVENT_LOG_RETRIES = 10 class EndpointState: @@ -158,234 +149,13 @@ def get_w3_connection( return w3 -def get_contract(w3: Web3, address: str, abi: list) -> Contract: - return w3.eth.contract(address=Web3.to_checksum_address(address), abi=abi) - - -def get_erc20_contract(w3: Web3, token_address: str) -> Contract: - erc20_abi_path = ABI_DATA_DIR / "erc20.json" - abi = json.loads(erc20_abi_path.read_text()) - return get_contract(w3=w3, address=token_address, abi=abi) - - -def simulate_tx(w3: Web3, tx: dict, account: Account) -> dict: - balance = w3.eth.get_balance(account.address) - max_fee_per_gas = tx["maxFeePerGas"] - gas_limit = tx["gas"] - value = tx.get("value", 0) - - max_gas_cost = gas_limit * max_fee_per_gas - total_cost = max_gas_cost + value - if not balance >= total_cost: - ratio = balance / total_cost * 100 - raise ValueError(f"Insufficient gas balance, have {balance}, need {total_cost}: ({ratio:.2f})") - - w3.eth.call(tx) - return tx - - -@exp_backoff_retry -def build_standard_transaction( - func, - account: Account, - w3: Web3, - value: int = 0, - gas_blocks: int = 100, - gas_percentile: int = 99, -) -> dict: - """Standardized transaction building with EIP-1559 and gas estimation""" - - nonce = w3.eth.get_transaction_count(account.address) - fee_estimations = estimate_fees(w3, blocks=gas_blocks, percentiles=[gas_percentile]) - max_fee = fee_estimations[0]["maxFeePerGas"] - priority_fee = fee_estimations[0]["maxPriorityFeePerGas"] - - tx = func.build_transaction( - { - "from": account.address, - "nonce": nonce, - "maxFeePerGas": max_fee, - "maxPriorityFeePerGas": priority_fee, - "chainId": w3.eth.chain_id, - "value": value, - } - ) - - tx["gas"] = w3.eth.estimate_gas(tx) - return tx - - return simulate_tx(w3, tx, account) - - -def wait_for_tx_receipt(w3: Web3, tx_hash: str, timeout=120, poll_interval=1) -> AttributeDict: - start_time = time.monotonic() - while True: - try: - receipt = w3.eth.get_transaction_receipt(tx_hash) - except Exception: - receipt = None - if receipt is not None: - return receipt - if time.monotonic() - start_time > timeout: - raise TimeoutError("Timed out waiting for transaction receipt.") - time.sleep(poll_interval) - - -def sign_and_send_tx(w3: Web3, tx: dict, private_key: str, logger: Logger) -> HexBytes: - signed_tx = w3.eth.account.sign_transaction(tx, private_key=private_key) - logger.debug(f"signed_tx: {signed_tx}") - tx_hash = w3.eth.send_raw_transaction(signed_tx.raw_transaction) - logger.debug(f"tx_hash: {tx_hash.to_0x_hex()}") - return tx_hash - - -def send_and_confirm_tx( - w3: Web3, - tx: dict, - private_key: str, - *, - action: str, # e.g. "approve()", "deposit()", "withdraw()" - logger: Logger, -) -> TxResult: - """Send and confirm transactions.""" - - try: - - tx_hash = sign_and_send_tx(w3=w3, tx=tx, private_key=private_key, logger=logger) - tx_result = TxResult(tx_hash=tx_hash.to_0x_hex(), tx_receipt=None, exception=None) - - except Exception as send_err: - msg = f"❌ Failed to send tx for {action}, error: {send_err!r}" - logger.error(msg) - return TxResult(exception=send_err, tx_hash=None, tx_receipt=None) - - try: - tx_receipt = wait_for_tx_receipt(w3=w3, tx_hash=tx_hash) - tx_result.tx_receipt = tx_receipt - except TimeoutError as timeout_err: - logger.warning(f"⏱️ Timeout waiting for tx receipt of {tx_hash.to_0x_hex()}") - tx_result.exception = timeout_err - return tx_result - - if tx_result.tx_receipt.status == TxStatus.SUCCESS: - logger.info(f"βœ… {action} succeeded for tx {tx_hash.to_0x_hex()}") - else: - logger.error(f"❌ {action} reverted for tx {tx_hash.to_0x_hex()}") +def to_base_units(human_amount: float, currency: Currency) -> int: + """Convert a human-readable token amount to base units using the currency's decimals.""" - return tx_result + return int(human_amount * 10 ** CURRENCY_DECIMALS[currency]) -def estimate_fees(w3, percentiles: list[int], blocks=20, default_tip=10_000): - fee_history = w3.eth.fee_history(blocks, "pending", percentiles) - base_fees = fee_history["baseFeePerGas"] - rewards = fee_history["reward"] +def from_base_units(amount: int, currency: Currency) -> float: + """Convert base units to human-readable amount using the currency's decimals.""" - # Calculate average priority fees for each percentile - avg_priority_fees = [] - for i in range(len(percentiles)): - nonzero_rewards = [r[i] for r in rewards if len(r) > i and r[i] > 0] - if nonzero_rewards: - estimated_tip = sum(nonzero_rewards) // len(nonzero_rewards) - else: - estimated_tip = default_tip - avg_priority_fees.append(estimated_tip) - - # Use the latest base fee - latest_base_fee = base_fees[-1] - - # Calculate max fees - fee_estimations = [] - for priority_fee in avg_priority_fees: - max_fee = int((latest_base_fee + priority_fee) * GAS_FEE_BUFFER) - fee_estimations.append({"maxFeePerGas": max_fee, "maxPriorityFeePerGas": priority_fee}) - - return fee_estimations - - -def iter_events( - w3: Web3, - filter_params: dict, - *, - condition: Callable[[AttributeDict], bool] = lambda _: True, - max_block_range: int = 10_000, - poll_interval: float = 5.0, - timeout: float | None = None, - logger: Logger, -) -> Generator[AttributeDict, None, None]: - """Stream matching logs over a fixed or live block window. Optionally raises TimeoutError.""" - - original_filter_params = filter_params.copy() # return original in TimeoutError - if (cursor := filter_params["fromBlock"]) == "latest": - cursor = w3.eth.block_number - - start_block = cursor - filter_params["toBlock"] = filter_params.get("toBlock", "latest") - fixed_ceiling = None if filter_params["toBlock"] == "latest" else filter_params["toBlock"] - - deadline = None if timeout is None else time.monotonic() + timeout - while True: - if deadline and time.monotonic() > deadline: - msg = f"Timed out waiting for events after scanning blocks {start_block}-{cursor}" - logger.warning(msg) - raise TimeoutError(f"{msg}: filter_params: {original_filter_params}") - upper = fixed_ceiling or w3.eth.block_number - if cursor <= upper: - end = min(upper, cursor + max_block_range - 1) - filter_params["fromBlock"] = hex(cursor) - filter_params["toBlock"] = hex(end) - # For example, when rotating providers are out of sync - retry_get_logs = exp_backoff_retry(w3.eth.get_logs, attempts=EVENT_LOG_RETRIES) - logs = retry_get_logs(filter_params=filter_params) - logger.debug(f"Scanned {cursor} - {end}: {len(logs)} logs") - yield from filter(condition, logs) - cursor = end + 1 # bounds are inclusive - - if fixed_ceiling and cursor > fixed_ceiling: - raise StopIteration - - time.sleep(poll_interval) - - -def wait_for_event( - w3: Web3, - filter_params: dict, - *, - condition: Callable[[AttributeDict], bool] = lambda _: True, - max_block_range: int = 10_000, - poll_interval: float = 5.0, - timeout: float = 300.0, - logger: Logger, -) -> AttributeDict: - """Return the first log from iter_events, or raise TimeoutError after `timeout` seconds.""" - - return next(iter_events(**locals())) - - -def make_filter_params( - event: ContractEvent, - from_block: int | Literal["latest"], - to_block: int | Literal["latest"] = "latest", - argument_filters: dict | None = None, -) -> dict: - """ - Function to create an eth_getLogs compatible filter_params for this event without using .create_filter. - event.create_filter uses eth_newFilter (a "push"), which not all RPC endpoints support. - """ - - argument_filters = argument_filters or {} - filter_params = event._get_event_filter_params( - fromBlock=from_block, - toBlock=to_block, - argument_filters=argument_filters, - abi=event.abi, - ) - filter_params["topics"] = tuple(filter_params["topics"]) - address = filter_params["address"] - if isinstance(address, str): - filter_params["address"] = Web3.to_checksum_address(address) - elif isinstance(address, (list, tuple)) and len(address) == 1: - filter_params["address"] = Web3.to_checksum_address(address[0]) - else: - raise ValueError(f"Unexpected address filter: {address!r}") - - return filter_params + return amount / 10 ** CURRENCY_DECIMALS[currency] diff --git a/poetry.lock b/poetry.lock index 4033e5f4..e43f54eb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -7,7 +7,6 @@ description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, @@ -20,7 +19,6 @@ description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aiohttp-3.12.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5421af8f22a98f640261ee48aae3a37f0c41371e99412d55eaf2f8a46d5dad29"}, {file = "aiohttp-3.12.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fcda86f6cb318ba36ed8f1396a6a4a3fd8f856f84d426584392083d10da4de0"}, @@ -121,7 +119,7 @@ propcache = ">=0.2.0" yarl = ">=1.17.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns (>=3.3.0)", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiolimiter" @@ -130,7 +128,6 @@ description = "asyncio rate limiter, a leaky bucket implementation" optional = false python-versions = "<4.0,>=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aiolimiter-1.2.1-py3-none-any.whl", hash = "sha256:d3f249e9059a20badcb56b61601a83556133655c11d1eb3dd3e04ff069e5f3c7"}, {file = "aiolimiter-1.2.1.tar.gz", hash = "sha256:e02a37ea1a855d9e832252a105420ad4d15011505512a1a1d814647451b5cca9"}, @@ -143,7 +140,6 @@ description = "aiosignal: a list of registered asynchronous callbacks" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, @@ -159,7 +155,6 @@ description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -185,19 +180,18 @@ description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "bitarray" @@ -206,7 +200,6 @@ description = "efficient arrays of booleans -- C extension" optional = false python-versions = "*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "bitarray-3.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a0c126a6ed1d3cd68cd91c0056cee8edcf6aa57c557b555528fe37375e72ea74"}, {file = "bitarray-3.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:690fc6d2b5c5e267f643e3720e8b4203838d3f30439e2070dccfae473b8223c3"}, @@ -351,7 +344,6 @@ description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, @@ -399,7 +391,6 @@ description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" groups = ["main", "dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057"}, {file = "certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b"}, @@ -412,7 +403,6 @@ description = "The Real First Universal Charset Detector. Open, modern and activ optional = false python-versions = ">=3.7" groups = ["main", "dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, @@ -515,7 +505,6 @@ description = "Python bindings for C-KZG-4844" optional = false python-versions = "*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ckzg-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b9825a1458219e8b4b023012b8ef027ef1f47e903f9541cbca4615f80132730"}, {file = "ckzg-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2a40a3ba65cca4b52825d26829e6f7eb464aa27a9e9efb6b8b2ce183442c741"}, @@ -626,7 +615,6 @@ description = "Build Nice User Interfaces In The Terminal" optional = false python-versions = "<4.0,>=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "cli_ui-0.19.0-py3-none-any.whl", hash = "sha256:1cf1b93328f7377730db29507e10bcb29ccc1427ceef45714b522d1f2055e7cd"}, {file = "cli_ui-0.19.0.tar.gz", hash = "sha256:59cdab0c6a2a6703c61b31cb75a1943076888907f015fffe15c5a8eb41a933aa"}, @@ -644,7 +632,6 @@ description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" groups = ["main", "dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, @@ -664,7 +651,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "python_version <= \"3.11\" and platform_system == \"Windows\" or python_version >= \"3.12\" and platform_system == \"Windows\"", dev = "python_version <= \"3.11\" or python_version >= \"3.12\""} +markers = {main = "platform_system == \"Windows\""} [[package]] name = "cytoolz" @@ -673,7 +660,7 @@ description = "Cython implementation of Toolz: High performance functional utili optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" and implementation_name == \"cpython\" or python_version >= \"3.12\" and implementation_name == \"cpython\"" +markers = "implementation_name == \"cpython\"" files = [ {file = "cytoolz-1.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cec9af61f71fc3853eb5dca3d42eb07d1f48a4599fa502cbe92adde85f74b042"}, {file = "cytoolz-1.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:140bbd649dbda01e91add7642149a5987a7c3ccc251f2263de894b89f50b6608"}, @@ -790,7 +777,6 @@ description = "Python package to sign on-chain self-custodial requests for order optional = false python-versions = "<4.0,>=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "derive_action_signing-0.0.12-py3-none-any.whl", hash = "sha256:c7fbee46e8fc6abac9204bec6fee9922d22800f647fee4c44f0e15a72eecd187"}, {file = "derive_action_signing-0.0.12.tar.gz", hash = "sha256:2ede7861234fd677abd05f88d2e0f27e27966753e02735e938a97be173bd277f"}, @@ -811,7 +797,6 @@ description = "Pythonic argument parser, that will make you smile" optional = false python-versions = "*" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, ] @@ -823,7 +808,6 @@ description = "eth_abi: Python utilities for working with Ethereum ABI definitio optional = false python-versions = "<4,>=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "eth_abi-5.2.0-py3-none-any.whl", hash = "sha256:17abe47560ad753f18054f5b3089fcb588f3e3a092136a416b6c1502cb7e8877"}, {file = "eth_abi-5.2.0.tar.gz", hash = "sha256:178703fa98c07d8eecd5ae569e7e8d159e493ebb6eeb534a8fe973fbc4e40ef0"}, @@ -847,7 +831,6 @@ description = "eth-account: Sign Ethereum transactions and messages with local p optional = false python-versions = "<4,>=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "eth_account-0.13.7-py3-none-any.whl", hash = "sha256:39727de8c94d004ff61d10da7587509c04d2dc7eac71e04830135300bdfc6d24"}, {file = "eth_account-0.13.7.tar.gz", hash = "sha256:5853ecbcbb22e65411176f121f5f24b8afeeaf13492359d254b16d8b18c77a46"}, @@ -877,7 +860,6 @@ description = "eth-hash: The Ethereum hashing function, keccak256, sometimes (er optional = false python-versions = "<4,>=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "eth_hash-0.7.1-py3-none-any.whl", hash = "sha256:0fb1add2adf99ef28883fd6228eb447ef519ea72933535ad1a0b28c6f65f868a"}, {file = "eth_hash-0.7.1.tar.gz", hash = "sha256:d2411a403a0b0a62e8247b4117932d900ffb4c8c64b15f92620547ca5ce46be5"}, @@ -890,7 +872,7 @@ pycryptodome = {version = ">=3.6.6,<4", optional = true, markers = "extra == \"p dev = ["build (>=0.9.0)", "bump_my_version (>=0.19.0)", "ipython", "mypy (==1.10.0)", "pre-commit (>=3.4.0)", "pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)", "sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)", "tox (>=4.0.0)", "twine", "wheel"] docs = ["sphinx (>=6.0.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx_rtd_theme (>=1.0.0)", "towncrier (>=24,<25)"] pycryptodome = ["pycryptodome (>=3.6.6,<4)"] -pysha3 = ["pysha3 (>=1.0.0,<2.0.0)", "safe-pysha3 (>=1.0.0)"] +pysha3 = ["pysha3 (>=1.0.0,<2.0.0) ; python_version < \"3.9\"", "safe-pysha3 (>=1.0.0) ; python_version >= \"3.9\""] test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"] [[package]] @@ -900,7 +882,6 @@ description = "eth-keyfile: A library for handling the encrypted keyfiles used t optional = false python-versions = "<4,>=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "eth_keyfile-0.8.1-py3-none-any.whl", hash = "sha256:65387378b82fe7e86d7cb9f8d98e6d639142661b2f6f490629da09fddbef6d64"}, {file = "eth_keyfile-0.8.1.tar.gz", hash = "sha256:9708bc31f386b52cca0969238ff35b1ac72bd7a7186f2a84b86110d3c973bec1"}, @@ -923,7 +904,6 @@ description = "eth-keys: Common API for Ethereum key operations" optional = false python-versions = "<4,>=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "eth_keys-0.7.0-py3-none-any.whl", hash = "sha256:b0cdda8ffe8e5ba69c7c5ca33f153828edcace844f67aabd4542d7de38b159cf"}, {file = "eth_keys-0.7.0.tar.gz", hash = "sha256:79d24fd876201df67741de3e3fefb3f4dbcbb6ace66e47e6fe662851a4547814"}, @@ -946,7 +926,6 @@ description = "eth-rlp: RLP definitions for common Ethereum objects in Python" optional = false python-versions = "<4,>=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "eth_rlp-2.2.0-py3-none-any.whl", hash = "sha256:5692d595a741fbaef1203db6a2fedffbd2506d31455a6ad378c8449ee5985c47"}, {file = "eth_rlp-2.2.0.tar.gz", hash = "sha256:5e4b2eb1b8213e303d6a232dfe35ab8c29e2d3051b86e8d359def80cd21db83d"}, @@ -970,7 +949,6 @@ description = "eth-typing: Common type annotations for ethereum python packages" optional = false python-versions = ">=3.8, <4" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "eth-typing-4.0.0.tar.gz", hash = "sha256:9af0b6beafbc5c2e18daf19da5f5a68315023172c4e79d149e12ad10a3d3f731"}, {file = "eth_typing-4.0.0-py3-none-any.whl", hash = "sha256:7e556bea322b6e8c0a231547b736c258e10ce9eed5ddc254f51031b12af66a16"}, @@ -988,7 +966,6 @@ description = "eth-utils: Common utility functions for python code that interact optional = false python-versions = "<4,>=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "eth_utils-4.1.1-py3-none-any.whl", hash = "sha256:ccbbac68a6d65cb6e294c5bcb6c6a5cec79a241c56dc5d9c345ed788c30f8534"}, {file = "eth_utils-4.1.1.tar.gz", hash = "sha256:71c8d10dec7494aeed20fa7a4d52ec2ce4a2e52fdce80aab4f5c3c19f3648b25"}, @@ -1031,7 +1008,6 @@ description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, @@ -1049,7 +1025,6 @@ description = "A list-like structure which implements collections.abc.MutableSeq optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a"}, {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61"}, @@ -1164,7 +1139,6 @@ description = "Copy your docs directly to the gh-pages branch." optional = false python-versions = "*" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, @@ -1183,7 +1157,6 @@ description = "hexbytes: Python `bytes` subclass that decodes hex, with a readab optional = false python-versions = "<4,>=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "hexbytes-1.3.1-py3-none-any.whl", hash = "sha256:da01ff24a1a9a2b1881c4b85f0e9f9b0f51b526b379ffa23832ae7899d29c2c7"}, {file = "hexbytes-1.3.1.tar.gz", hash = "sha256:a657eebebdfe27254336f98d8af6e2236f3f83aed164b87466b6cf6c5f5a4765"}, @@ -1201,7 +1174,6 @@ description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" groups = ["main", "dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1210,31 +1182,6 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] -[[package]] -name = "importlib-metadata" -version = "8.7.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.10\"" -files = [ - {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, - {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - [[package]] name = "iniconfig" version = "2.1.0" @@ -1242,7 +1189,6 @@ description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, @@ -1255,7 +1201,6 @@ description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -1271,7 +1216,6 @@ description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -1290,7 +1234,6 @@ description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d"}, {file = "jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196"}, @@ -1313,7 +1256,6 @@ description = "The JSON Schema meta-schemas and vocabularies, exposed as a Regis optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af"}, {file = "jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608"}, @@ -1329,7 +1271,6 @@ description = "An Dict like LRU container." optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "lru-dict-1.3.0.tar.gz", hash = "sha256:54fd1966d6bd1fcde781596cb86068214edeebff1db13a2cea11079e3fd07b6b"}, {file = "lru_dict-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4073333894db9840f066226d50e6f914a2240711c87d60885d8c940b69a6673f"}, @@ -1424,15 +1365,11 @@ description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "markdown-3.8.2-py3-none-any.whl", hash = "sha256:5c83764dbd4e00bdd94d85a19b8d55ccca20fe35b2e678a1422b380324dd5f24"}, {file = "markdown-3.8.2.tar.gz", hash = "sha256:247b9a70dd12e27f67431ce62523e675b866d254f900c4fe75ce3dda62237c45"}, ] -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - [package.extras] docs = ["mdx_gh_links (>=0.2)", "mkdocs (>=1.6)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] @@ -1444,7 +1381,6 @@ description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -1470,7 +1406,6 @@ description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -1542,7 +1477,6 @@ description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -1555,7 +1489,6 @@ description = "Markdown URL utilities" optional = false python-versions = ">=3.7" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -1568,7 +1501,6 @@ description = "A deep merge function for 🐍." optional = false python-versions = ">=3.6" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, @@ -1581,7 +1513,6 @@ description = "Project documentation with Markdown." optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"}, {file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"}, @@ -1591,7 +1522,6 @@ files = [ click = ">=7.0" colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} ghp-import = ">=1.0" -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} jinja2 = ">=2.11.1" markdown = ">=3.3.6" markupsafe = ">=2.0.1" @@ -1605,7 +1535,7 @@ watchdog = ">=2.0" [package.extras] i18n = ["babel (>=2.9.0)"] -min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.4)", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4) ; platform_system == \"Windows\"", "ghp-import (==1.0)", "importlib-metadata (==4.4) ; python_version < \"3.10\"", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"] [[package]] name = "mkdocs-autorefs" @@ -1614,7 +1544,6 @@ description = "Automatically link across pages in MkDocs." optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mkdocs-autorefs-0.4.1.tar.gz", hash = "sha256:70748a7bd025f9ecd6d6feeba8ba63f8e891a1af55f48e366d6d6e78493aba84"}, {file = "mkdocs_autorefs-0.4.1-py3-none-any.whl", hash = "sha256:a2248a9501b29dc0cc8ba4c09f4f47ff121945f6ce33d760f145d6f89d313f5b"}, @@ -1631,14 +1560,12 @@ description = "MkDocs extension that lists all dependencies according to a mkdoc optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"}, {file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"}, ] [package.dependencies] -importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} mergedeep = ">=1.3.4" platformdirs = ">=2.2.0" pyyaml = ">=5.1" @@ -1650,7 +1577,6 @@ description = "Mkdocs Markdown includer plugin." optional = false python-versions = ">=3.6" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mkdocs_include_markdown_plugin-3.9.1-py3-none-any.whl", hash = "sha256:f33687e29ac66d045ba181ea50f054646b0090b42b0a4318f08e7f1d1235e6f6"}, {file = "mkdocs_include_markdown_plugin-3.9.1.tar.gz", hash = "sha256:5e5698e78d7fea111be9873a456089daa333497988405acaac8eba2924a19152"}, @@ -1667,7 +1593,6 @@ description = "Documentation that simply works" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mkdocs_material-8.5.11-py3-none-any.whl", hash = "sha256:c907b4b052240a5778074a30a78f31a1f8ff82d7012356dc26898b97559f082e"}, {file = "mkdocs_material-8.5.11.tar.gz", hash = "sha256:b0ea0513fd8cab323e8a825d6692ea07fa83e917bb5db042e523afecc7064ab7"}, @@ -1689,7 +1614,6 @@ description = "Extension pack for Python Markdown and MkDocs Material." optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, @@ -1702,7 +1626,6 @@ description = "multidict implementation" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "multidict-6.5.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7b7d75cb5b90fa55700edbbdca12cd31f6b19c919e98712933c7a1c3c6c71b73"}, {file = "multidict-6.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ad32e43e028276612bf5bab762677e7d131d2df00106b53de2efb2b8a28d5bce"}, @@ -1814,7 +1737,6 @@ description = "Type system extensions for programs checked with the mypy type ch optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, @@ -1827,7 +1749,6 @@ description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" groups = ["main", "dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -1874,7 +1795,6 @@ description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, @@ -1887,7 +1807,6 @@ description = "Powerful data structures for data analysis, time series, and stat optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pandas-2.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:625466edd01d43b75b1883a64d859168e4556261a5035b32f9d743b67ef44634"}, {file = "pandas-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6872d695c896f00df46b71648eea332279ef4077a409e2fe94220208b6bb675"}, @@ -1975,7 +1894,6 @@ description = "(Soon to be) the fastest pure-Python PEG parser I could muster" optional = false python-versions = "*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "parsimonious-0.10.0-py3-none-any.whl", hash = "sha256:982ab435fabe86519b57f6b35610aa4e4e977e9f02a14353edf4bbc75369fc0f"}, {file = "parsimonious-0.10.0.tar.gz", hash = "sha256:8281600da180ec8ae35427a4ab4f7b82bfec1e3d1e52f80cb60ea82b9512501c"}, @@ -1991,7 +1909,6 @@ description = "Utility library for gitignore style pattern matching of file path optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -2004,7 +1921,6 @@ description = "A small Python package for determining appropriate platform-speci optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4"}, {file = "platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc"}, @@ -2022,7 +1938,6 @@ description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, @@ -2039,7 +1954,6 @@ description = "Accelerated property cache" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770"}, {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3"}, @@ -2148,7 +2062,6 @@ description = "" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "protobuf-6.31.1-cp310-abi3-win32.whl", hash = "sha256:7fa17d5a29c2e04b7d90e5e32388b8bfd0e7107cd8e616feef7ed3fa6bdab5c9"}, {file = "protobuf-6.31.1-cp310-abi3-win_amd64.whl", hash = "sha256:426f59d2964864a1a366254fa703b8632dcec0790d8862d30034d8245e1cd447"}, @@ -2168,7 +2081,6 @@ description = "Python style guide checker" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, @@ -2181,7 +2093,6 @@ description = "Cryptographic library for Python" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pycryptodome-3.23.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a176b79c49af27d7f6c12e4b178b0824626f40a7b9fed08f712291b6d54bf566"}, {file = "pycryptodome-3.23.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:573a0b3017e06f2cffd27d92ef22e46aa3be87a2d317a5abf7cc0e84e321bd75"}, @@ -2233,7 +2144,6 @@ description = "Data validation using Python type hints" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"}, {file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"}, @@ -2247,7 +2157,7 @@ typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -2256,7 +2166,6 @@ description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, @@ -2369,7 +2278,6 @@ description = "passive checker of Python programs" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, @@ -2382,7 +2290,6 @@ description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" groups = ["main", "dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"}, {file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"}, @@ -2398,7 +2305,6 @@ description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pymdown_extensions-10.16-py3-none-any.whl", hash = "sha256:f5dd064a4db588cb2d95229fc4ee63a1b16cc8b4d0e6145c0899ed8723da1df2"}, {file = "pymdown_extensions-10.16.tar.gz", hash = "sha256:71dac4fca63fabeffd3eb9038b756161a33ec6e8d230853d3cecf562155ab3de"}, @@ -2418,7 +2324,6 @@ description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -2442,7 +2347,6 @@ description = "pytest plugin to re-run tests to eliminate flaky failures" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytest-rerunfailures-13.0.tar.gz", hash = "sha256:e132dbe420bc476f544b96e7036edd0a69707574209b6677263c950d19b09199"}, {file = "pytest_rerunfailures-13.0-py3-none-any.whl", hash = "sha256:34919cb3fcb1f8e5d4b940aa75ccdea9661bade925091873b7c6fa5548333069"}, @@ -2459,7 +2363,6 @@ description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main", "dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2475,7 +2378,6 @@ description = "Read key-value pairs from a .env file and set them as environment optional = false python-versions = "*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "python-dotenv-0.17.1.tar.gz", hash = "sha256:b1ae5e9643d5ed987fc57cc2583021e38db531946518130777734f9589b3141f"}, {file = "python_dotenv-0.17.1-py2.py3-none-any.whl", hash = "sha256:00aa34e92d992e9f8383730816359647f358f4a3be1ba45e5a5cefd27ee91544"}, @@ -2491,7 +2393,6 @@ description = "World timezone definitions, modern and historical" optional = false python-versions = "*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, @@ -2504,7 +2405,6 @@ description = "Unicode normalization forms (NFC, NFKC, NFD, NFKD). A library ind optional = false python-versions = ">=3.6" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyunormalize-16.0.0-py3-none-any.whl", hash = "sha256:c647d95e5d1e2ea9a2f448d1d95d8518348df24eab5c3fd32d2b5c3300a49152"}, {file = "pyunormalize-16.0.0.tar.gz", hash = "sha256:2e1dfbb4a118154ae26f70710426a52a364b926c9191f764601f5a8cb12761f7"}, @@ -2517,7 +2417,7 @@ description = "Python for Window Extensions" optional = false python-versions = "*" groups = ["main"] -markers = "python_version <= \"3.11\" and platform_system == \"Windows\" or python_version >= \"3.12\" and platform_system == \"Windows\"" +markers = "platform_system == \"Windows\"" files = [ {file = "pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1"}, {file = "pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d"}, @@ -2544,7 +2444,6 @@ description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -2608,7 +2507,6 @@ description = "A custom YAML tag for referencing environment variables in YAML f optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "pyyaml_env_tag-1.1-py3-none-any.whl", hash = "sha256:17109e1a528561e32f026364712fee1264bc2ea6715120891174ed1b980d2e04"}, {file = "pyyaml_env_tag-1.1.tar.gz", hash = "sha256:2eb38b75a2d21ee0475d6d97ec19c63287a7e140231e4214969d0eac923cd7ff"}, @@ -2624,7 +2522,6 @@ description = "JSON Referencing + Python" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, @@ -2642,7 +2539,6 @@ description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, @@ -2747,7 +2643,6 @@ description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" groups = ["main", "dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, @@ -2763,6 +2658,25 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "returns" +version = "0.26.0" +description = "Make your functions return something meaningful, typed, and safe!" +optional = false +python-versions = "<4.0,>=3.10" +groups = ["main"] +files = [ + {file = "returns-0.26.0-py3-none-any.whl", hash = "sha256:7cae94c730d6c56ffd9d0f583f7a2c0b32cfe17d141837150c8e6cff3eb30d71"}, + {file = "returns-0.26.0.tar.gz", hash = "sha256:180320e0f6e9ea9845330ccfc020f542330f05b7250941d9b9b7c00203fcc3da"}, +] + +[package.dependencies] +typing-extensions = ">=4.0,<5.0" + +[package.extras] +check-laws = ["hypothesis (>=6.136,<7.0)", "pytest (>=8.0,<9.0)"] +compatible-mypy = ["mypy (>=1.12,<1.18)"] + [[package]] name = "rich" version = "14.0.0" @@ -2770,7 +2684,6 @@ description = "Render rich text, tables, progress bars, syntax highlighting, mar optional = false python-versions = ">=3.8.0" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, @@ -2791,7 +2704,6 @@ description = "Format click help output nicely with rich" optional = false python-versions = ">=3.7" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rich_click-1.8.9-py3-none-any.whl", hash = "sha256:c3fa81ed8a671a10de65a9e20abf642cfdac6fdb882db1ef465ee33919fbcfe2"}, {file = "rich_click-1.8.9.tar.gz", hash = "sha256:fd98c0ab9ddc1cf9c0b7463f68daf28b4d0033a74214ceb02f761b3ff2af3136"}, @@ -2813,7 +2725,6 @@ description = "rlp: A package for Recursive Length Prefix encoding and decoding" optional = false python-versions = "<4,>=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rlp-4.1.0-py3-none-any.whl", hash = "sha256:8eca394c579bad34ee0b937aecb96a57052ff3716e19c7a578883e767bc5da6f"}, {file = "rlp-4.1.0.tar.gz", hash = "sha256:be07564270a96f3e225e2c107db263de96b5bc1f27722d2855bd3459a08e95a9"}, @@ -2835,7 +2746,6 @@ description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "rpds_py-0.25.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f4ad628b5174d5315761b67f212774a32f5bad5e61396d38108bd801c0a8f5d9"}, {file = "rpds_py-0.25.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c742af695f7525e559c16f1562cf2323db0e3f0fbdcabdf6865b095256b2d40"}, @@ -2963,7 +2873,6 @@ description = "Simple data validation library" optional = false python-versions = "*" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "schema-0.7.7-py2.py3-none-any.whl", hash = "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde"}, {file = "schema-0.7.7.tar.gz", hash = "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807"}, @@ -2976,7 +2885,6 @@ description = "Python helper for Semantic Versioning (http://semver.org/)" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, @@ -2989,20 +2897,19 @@ description = "Easily download, build, install, upgrade, and uninstall Python pa optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "setuptools-75.9.1-py3-none-any.whl", hash = "sha256:0a6f876d62f4d978ca1a11ab4daf728d1357731f978543ff18ecdbf9fd071f73"}, {file = "setuptools-75.9.1.tar.gz", hash = "sha256:b6eca2c3070cdc82f71b4cb4bb2946bc0760a210d11362278cf1ff394e6ea32c"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" @@ -3011,7 +2918,6 @@ description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main", "dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -3024,7 +2930,6 @@ description = "Pretty-print tabular data" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, @@ -3040,7 +2945,6 @@ description = "Bump software releases" optional = false python-versions = ">=3.7,<4.0" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tbump-6.11.0-py3-none-any.whl", hash = "sha256:6b181fe6f3ae84ce0b9af8cc2009a8bca41ded34e73f623a7413b9684f1b4526"}, {file = "tbump-6.11.0.tar.gz", hash = "sha256:385e710eedf0a8a6ff959cf1e9f3cfd17c873617132fc0ec5f629af0c355c870"}, @@ -3102,7 +3006,6 @@ description = "Style preserving TOML library" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, @@ -3115,7 +3018,7 @@ description = "List processing tools and functional utilities" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "python_version <= \"3.11\" and (implementation_name == \"cpython\" or implementation_name == \"pypy\") or python_version >= \"3.12\" and (implementation_name == \"cpython\" or implementation_name == \"pypy\")" +markers = "implementation_name == \"cpython\" or implementation_name == \"pypy\"" files = [ {file = "toolz-1.0.0-py3-none-any.whl", hash = "sha256:292c8f1c4e7516bf9086f8850935c799a874039c8bcf959d47b600e4c44a6236"}, {file = "toolz-1.0.0.tar.gz", hash = "sha256:2c86e3d9a04798ac556793bced838816296a2f085017664e4995cb40a1047a02"}, @@ -3132,7 +3035,7 @@ files = [ {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, ] -markers = {main = "python_version <= \"3.11\" or python_version >= \"3.12\"", dev = "python_version < \"3.11\""} +markers = {dev = "python_version < \"3.11\""} [[package]] name = "typing-inspection" @@ -3141,7 +3044,6 @@ description = "Runtime typing introspection tools" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"}, {file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"}, @@ -3157,7 +3059,6 @@ description = "Provider of IANA time zone data" optional = false python-versions = ">=2" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, @@ -3170,7 +3071,6 @@ description = "ASCII transliterations of Unicode text" optional = false python-versions = ">=3.7" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "Unidecode-1.4.0-py3-none-any.whl", hash = "sha256:c3c7606c27503ad8d501270406e345ddb480a7b5f38827eafe4fa82a137f0021"}, {file = "Unidecode-1.4.0.tar.gz", hash = "sha256:ce35985008338b676573023acc382d62c264f307c8f7963733405add37ea2b23"}, @@ -3183,14 +3083,13 @@ description = "HTTP library with thread-safe connection pooling, file post, and optional = false python-versions = ">=3.9" groups = ["main", "dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -3202,7 +3101,6 @@ description = "Filesystem events monitoring" optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, @@ -3246,7 +3144,6 @@ description = "web3.py" optional = false python-versions = ">=3.7.2" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "web3-6.11.0-py3-none-any.whl", hash = "sha256:44e79da6a4765eacf137f2f388e37aa0c1e24a93bdfb462cffe9441d1be3d509"}, {file = "web3-6.11.0.tar.gz", hash = "sha256:050dea52ae73d787272e7ecba7249f096595938c90cce1a384c20375c6b0f720"}, @@ -3270,7 +3167,7 @@ typing-extensions = ">=4.0.1" websockets = ">=10.0.0" [package.extras] -dev = ["black (>=22.1.0)", "build (>=0.9.0)", "bumpversion", "eth-tester[py-evm] (==v0.9.1-b.1)", "flake8 (==3.8.3)", "flaky (>=3.7.0)", "hypothesis (>=3.31.2)", "importlib-metadata (<5.0)", "ipfshttpclient (==0.8.0a2)", "isort (>=5.11.0)", "mypy (==1.4.1)", "py-geth (>=3.11.0)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.18.1)", "pytest-mock (>=1.10)", "pytest-watch (>=4.2)", "pytest-xdist (>=1.29)", "setuptools (>=38.6.0)", "sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=3.18.0)", "tqdm (>4.32)", "twine (>=1.13)", "types-protobuf (==3.19.13)", "types-requests (>=2.26.1)", "types-setuptools (>=57.4.4)", "when-changed (>=0.3.0)"] +dev = ["black (>=22.1.0)", "build (>=0.9.0)", "bumpversion", "eth-tester[py-evm] (==v0.9.1-b.1)", "flake8 (==3.8.3)", "flaky (>=3.7.0)", "hypothesis (>=3.31.2)", "importlib-metadata (<5.0) ; python_version < \"3.8\"", "ipfshttpclient (==0.8.0a2)", "isort (>=5.11.0)", "mypy (==1.4.1)", "py-geth (>=3.11.0)", "pytest (>=7.0.0)", "pytest-asyncio (>=0.18.1)", "pytest-mock (>=1.10)", "pytest-watch (>=4.2)", "pytest-xdist (>=1.29)", "setuptools (>=38.6.0)", "sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)", "tox (>=3.18.0)", "tqdm (>4.32)", "twine (>=1.13)", "types-protobuf (==3.19.13)", "types-requests (>=2.26.1)", "types-setuptools (>=57.4.4)", "when-changed (>=0.3.0)"] docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.0.0)", "towncrier (>=21,<22)"] ipfs = ["ipfshttpclient (==0.8.0a2)"] linter = ["black (>=22.1.0)", "flake8 (==3.8.3)", "isort (>=5.11.0)", "mypy (==1.4.1)", "types-protobuf (==3.19.13)", "types-requests (>=2.26.1)", "types-setuptools (>=57.4.4)"] @@ -3283,7 +3180,6 @@ description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "websocket-client-0.59.0.tar.gz", hash = "sha256:d376bd60eace9d437ab6d7ee16f4ab4e821c9dae591e1b783c58ebd8aaf80c5c"}, {file = "websocket_client-0.59.0-py2.py3-none-any.whl", hash = "sha256:2e50d26ca593f70aba7b13a489435ef88b8fc3b5c5643c1ce8808ff9b40f0b32"}, @@ -3299,7 +3195,6 @@ description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, @@ -3379,7 +3274,6 @@ description = "Yet another URL library" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" files = [ {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4"}, {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a"}, @@ -3492,28 +3386,7 @@ idna = ">=2.0" multidict = ">=4.0" propcache = ">=0.2.1" -[[package]] -name = "zipp" -version = "3.23.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.10\"" -files = [ - {file = "zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e"}, - {file = "zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [metadata] lock-version = "2.1" -python-versions = ">=3.9,<=3.12" -content-hash = "51c56da059c0ddc0152a67ba6dbfe9ccd611075194caf9c8520e7341d7ae81d7" +python-versions = ">=3.10,<=3.12" +content-hash = "87604146d8dc7947d3e2e6ae74575cee04664dd1d816ffd7815a6ca5da30c9c7" diff --git a/pyproject.toml b/pyproject.toml index 50f15c1c..22bba1f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ readme = "README.md" packages = [{ include = "derive_client" }] [tool.poetry.dependencies] -python = ">=3.9,<=3.12" +python = ">=3.10,<=3.12" requests = "^2" web3 = { version = ">=6,<8" } websocket-client = ">=0.32.0,<1" @@ -19,6 +19,7 @@ eth-account = ">=0.13" derive-action-signing = "^0.0.12" pydantic = "^2.11.3" aiolimiter = "^1.2.1" +returns = "^0.26.0" [tool.poetry.scripts] drv = "derive_client.cli:cli" diff --git a/tests/test_w3.py b/tests/test_w3.py index a807fb5e..25dffa2c 100644 --- a/tests/test_w3.py +++ b/tests/test_w3.py @@ -11,7 +11,8 @@ from derive_client.constants import DEFAULT_RPC_ENDPOINTS from derive_client.data_types import ChainID, EthereumJSONRPCErrorCode -from derive_client.utils import get_logger, load_rpc_endpoints, make_rotating_provider_middleware +from derive_client.utils import get_logger, load_rpc_endpoints +from derive_client.utils.w3 import make_rotating_provider_middleware RPC_ENDPOINTS = list(load_rpc_endpoints(DEFAULT_RPC_ENDPOINTS).model_dump().items())