diff --git a/README.md b/README.md index de7ddd79..0d7a8499 100644 --- a/README.md +++ b/README.md @@ -98,14 +98,14 @@ operator COMMAND --flagA=123 --flagB=xyz Pull the latest docker Operator docker image: ```bash -docker pull europe-west4-docker.pkg.dev/stakewiselabs/public/v3-operator:v4.1.8 +docker pull europe-west4-docker.pkg.dev/stakewiselabs/public/v3-operator:v4.1.9 ``` You can also build the docker image from source by cloning this repo and executing the following command from within the `v3-operator` folder: ```bash -docker build --pull -t europe-west4-docker.pkg.dev/stakewiselabs/public/v3-operator:v4.1.8 . +docker build --pull -t europe-west4-docker.pkg.dev/stakewiselabs/public/v3-operator:v4.1.9 . ``` You will execute Operator Service commands using the format below (note the use of flags are optional): @@ -114,7 +114,7 @@ You will execute Operator Service commands using the format below (note the use docker run --rm -ti \ -u $(id -u):$(id -g) \ -v ~/.stakewise/:/data \ -europe-west4-docker.pkg.dev/stakewiselabs/public/v3-operator:v4.1.8 \ +europe-west4-docker.pkg.dev/stakewiselabs/public/v3-operator:v4.1.9 \ src/main.py COMMAND \ --flagA=123 \ --flagB=xyz diff --git a/pyproject.toml b/pyproject.toml index 2c0e7a6c..8a0686cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "v3-operator" -version = "v4.1.8" +version = "v4.1.9" description = "StakeWise operator service for registering vault validators" authors = ["StakeWise Labs "] package-mode = false diff --git a/scripts/install.sh b/scripts/install.sh index 15bbfb9d..181b87ad 100755 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -288,7 +288,7 @@ http_copy() { github_release() { owner_repo=$1 version=$2 - test -z "$version" && version="v4.1.8" + test -z "$version" && version="v4.1.9" giturl="https://github.com/${owner_repo}/releases/${version}" json=$(http_copy "$giturl" "Accept:application/json") test -z "$json" && return 1 diff --git a/src/commands/consolidate.py b/src/commands/consolidate.py index 4047b8a6..d7827e24 100644 --- a/src/commands/consolidate.py +++ b/src/commands/consolidate.py @@ -15,7 +15,6 @@ from src.common.consolidations import ( get_consolidation_request_fee, get_consolidations_count, - get_pending_consolidations, ) from src.common.contracts import VaultContract from src.common.execution import check_gas_price @@ -30,15 +29,15 @@ validate_public_keys_file, ) from src.common.wallet import wallet -from src.common.withdrawals import get_pending_partial_withdrawals from src.config.config import OperatorConfig from src.config.networks import AVAILABLE_NETWORKS, GNOSIS, MAINNET, NETWORKS from src.config.settings import DEFAULT_MAX_CONSOLIDATION_REQUEST_FEE_GWEI, settings -from src.validators.consensus import EXITING_STATUSES, fetch_consensus_validators +from src.validators.consolidation_manager import ConsolidationManager +from src.validators.exceptions import ConsolidationError from src.validators.oracles import poll_consolidation_signature from src.validators.register_validators import submit_consolidate_validators from src.validators.relayer import RelayerClient -from src.validators.typings import ConsensusValidator +from src.validators.typings import ConsolidationKeys logger = logging.getLogger(__name__) @@ -204,11 +203,16 @@ def consolidate( raise click.ClickException( 'Provide only one parameter: either --source-public-keys-file or --source-public-keys.' ) - if not any([source_public_keys, source_public_keys_file]) and target_public_key: + if not (source_public_keys or source_public_keys_file) and target_public_key: raise click.ClickException( 'One of these parameters must be provided with target-public-key:' ' --source-public-keys-file or --source-public-keys.' ) + if (source_public_keys or source_public_keys_file) and not target_public_key: + raise click.ClickException( + '--target-public-key must be provided when using' + ' --source-public-keys-file or --source-public-keys.' + ) if source_public_keys_file: source_public_keys = _load_public_keys(source_public_keys_file) @@ -253,7 +257,6 @@ def consolidate( try: asyncio.run( main( - vault_address=vault, source_public_keys=source_public_keys, target_public_key=target_public_key, exclude_public_keys=exclude_public_keys, @@ -269,7 +272,6 @@ def consolidate( # pylint: disable-next=too-many-arguments async def main( - vault_address: ChecksumAddress, source_public_keys: list[HexStr] | None, target_public_key: HexStr | None, exclude_public_keys: set[HexStr], @@ -281,7 +283,6 @@ async def main( await setup_clients() try: await process( - vault_address=vault_address, source_public_keys=source_public_keys, target_public_key=target_public_key, exclude_public_keys=exclude_public_keys, @@ -295,7 +296,6 @@ async def main( # pylint: disable-next=too-many-locals,too-many-arguments async def process( - vault_address: ChecksumAddress, source_public_keys: list[HexStr] | None, target_public_key: HexStr | None, exclude_public_keys: set[HexStr], @@ -312,27 +312,26 @@ async def process( """ chain_head = await get_chain_latest_head() - await _check_validators_manager(vault_address) + await _check_validators_manager() await _check_consolidations_queue(chain_head) - if source_public_keys is not None and target_public_key is not None: - # keys provided by the user - target_source = await _check_public_keys( - vault_address=vault_address, + + consolidation_keys = None + if source_public_keys and target_public_key: + consolidation_keys = ConsolidationKeys( source_public_keys=source_public_keys, target_public_key=target_public_key, - chain_head=chain_head, - ) - - else: - target_source = await _find_target_source_public_keys( - vault_address=vault_address, - chain_head=chain_head, - exclude_public_keys=exclude_public_keys, ) - if not target_source: - raise click.ClickException( - f'Validators in vault {vault_address} can\'t be consolidated' - ) + consolidation_manager = await ConsolidationManager.create( + consolidation_keys=consolidation_keys, + chain_head=chain_head, + exclude_public_keys=exclude_public_keys, + ) + try: + target_source = consolidation_manager.get_target_source() + except ConsolidationError as e: + raise click.ClickException(str(e)) + if not target_source: + raise click.ClickException(f'Validators in vault {settings.vault} can\'t be consolidated') for target_validator, source_validator in target_source: if source_validator.index == target_validator.index: @@ -384,13 +383,13 @@ async def process( # The oracles signatures are only required when switching from 0x01 to 0x02 oracle_signatures = await poll_consolidation_signature( target_public_keys=[target_source_public_keys[0][0]], - vault=vault_address, + vault=settings.vault, protocol_config=protocol_config, ) encoded_validators = _encode_validators(target_source_public_keys) validators_manager_signature = await _get_validators_manager_signature( - vault_address, target_source_public_keys + target_source_public_keys ) tx_hash = await submit_consolidate_validators( @@ -408,128 +407,10 @@ async def process( ) -# pylint: disable-next=too-many-branches,too-many-locals -async def _check_public_keys( - vault_address: ChecksumAddress, - source_public_keys: list[HexStr], - target_public_key: HexStr, - chain_head: ChainHead, -) -> list[tuple[ConsensusValidator, ConsensusValidator]]: - """ - Validate that provided public keys can be consolidated - and returns the target and source validators info. - """ - logger.info('Checking selected validators for consolidation...') - - # Validate that source public keys are unique - if len(source_public_keys) != len(set(source_public_keys)): - raise click.ClickException('Source public keys must be unique.') - - # Validate the switch from 0x01 to 0x02 and consolidation to another validator - if len(source_public_keys) > 1 and target_public_key in source_public_keys: - raise click.ClickException( - 'Cannot switch from 0x01 to 0x02 and consolidate ' - 'to another validator in the same request.' - ) - - # Fetch source and target validators - validators = await fetch_consensus_validators( - list(set(source_public_keys + [target_public_key])) - ) - pubkey_to_validator = {val.public_key: val for val in validators} - - source_validators: list[ConsensusValidator] = [] - max_activation_epoch = chain_head.epoch - settings.network_config.SHARD_COMMITTEE_PERIOD - - current_consolidations = await get_pending_consolidations(chain_head, validators) - consolidating_indexes: set[int] = set() - for cons in current_consolidations: - consolidating_indexes.add(cons.source_index) - - # Validate source public keys - for source_public_key in source_public_keys: - source_validator = pubkey_to_validator.get(source_public_key) - - if not source_validator: - raise click.ClickException( - f'Validator {source_public_key} not found in the consensus layer.' - ) - - # Validate the source validator status - if source_validator.status in EXITING_STATUSES: - raise click.ClickException( - f'Validator {source_public_key} is in exiting ' - f'status {source_validator.status.value}.' - ) - - # Validate the source has been active long enough - if source_validator.activation_epoch > max_activation_epoch: - raise click.ClickException( - f'Validator {source_validator.public_key} is not active enough for consolidation. ' - f'It must be active for at least ' - f'{settings.network_config.SHARD_COMMITTEE_PERIOD} epochs before consolidation.' - ) - - # Validate the source validator is not consolidating - if source_validator.index in consolidating_indexes: - raise click.ClickException( - f'Validator {source_validator.public_key} is consolidating to another validator.' - ) - source_validators.append(source_validator) - - # Validate target public key - target_validator = pubkey_to_validator.get(target_public_key) - if not target_validator: - raise click.ClickException( - f'Target validator {target_public_key} not found in the consensus layer.' - ) - if target_validator.status in EXITING_STATUSES: - raise click.ClickException( - f'Target validator {target_public_key} is in exiting ' - f'status {target_validator.status.value}.' - ) - if target_validator.index in consolidating_indexes: - raise click.ClickException( - f'Target validator {target_public_key} is consolidating to another validator.' - ) - - # Validate that target validator is a compounding validator. - # Not required for a switch from 0x01 to 0x02. - if not _is_switch_to_compounding(source_public_keys, target_public_key): - if not target_validator.is_compounding: - raise click.ClickException( - f'The target validator {target_public_key} is not a compounding validator.' - ) - - # Validate the source validators has no pending withdrawals in the queue - await _check_pending_balance_to_withdraw(chain_head, source_validators) - - # Validate the source and target validators are in the vault - logger.info('Fetching vault validators...') - vault_validators = await VaultContract(vault_address).get_registered_validators_public_keys( - from_block=settings.vault_first_block, - to_block=chain_head.block_number, - ) - for public_keys in source_public_keys + [target_public_key]: - if public_keys not in vault_validators: - raise click.ClickException( - f'Validator {public_keys} is not registered in the vault {vault_address}.' - ) - - # Validate the total balance won't exceed the max effective balance - if sum(val.balance for val in validators) > settings.max_validator_balance_gwei: - raise click.ClickException( - 'Cannot consolidate validators,' - f' total balance exceed {settings.max_validator_balance_gwei} Gwei' - ) - - return [(target_validator, source_validator) for source_validator in source_validators] - - -async def _check_validators_manager(vault_address: ChecksumAddress) -> None: +async def _check_validators_manager() -> None: if settings.relayer_endpoint: return - vault_contract = VaultContract(vault_address) + vault_contract = VaultContract(settings.vault) validators_manager = await vault_contract.validators_manager() if validators_manager != wallet.account.address: raise click.ClickException( @@ -546,18 +427,6 @@ async def _check_consolidations_queue(chain_head: ChainHead) -> None: ) -async def _check_pending_balance_to_withdraw( - chain_head: ChainHead, validators: list[ConsensusValidator] -) -> None: - """Verify the source validators has no pending withdrawals in the queue""" - pending_partial_withdrawals = await get_pending_partial_withdrawals(chain_head, validators) - if pending_partial_withdrawals: - indexes = ', '.join(str(w.validator_index) for w in pending_partial_withdrawals) - raise click.ClickException( - f'Validators with indexes {indexes} have pending partial withdrawals in the queue. ' - ) - - def _encode_validators(target_source_public_keys: list[tuple[HexStr, HexStr]]) -> bytes: validators_data = b'' for target_key, source_key in target_source_public_keys: @@ -566,10 +435,6 @@ def _encode_validators(target_source_public_keys: list[tuple[HexStr, HexStr]]) - return validators_data -def _is_switch_to_compounding(source_public_keys: list[HexStr], target_public_key: HexStr) -> bool: - return len(source_public_keys) == 1 and source_public_keys[0] == target_public_key - - def _load_public_keys(public_keys_file: Path) -> list[HexStr]: """Loads public keys from file.""" with open(public_keys_file, 'r', encoding='utf-8') as f: @@ -578,117 +443,15 @@ def _load_public_keys(public_keys_file: Path) -> list[HexStr]: return public_keys -# pylint: disable-next=too-many-locals -async def _find_target_source_public_keys( - vault_address: ChecksumAddress, - chain_head: ChainHead, - exclude_public_keys: set[HexStr], -) -> list[tuple[ConsensusValidator, ConsensusValidator]]: - """ - If there are no 0x02 validators, - take the oldest 0x01 validator and convert it to 0x02 with confirmation prompt. - If there is 0x02 validator, - take the oldest 0x01 validators to top up its balance to 2048 ETH / 64 GNO. - """ - logger.info('Fetching vault validators...') - vault_contract = VaultContract(vault_address) - public_keys = await vault_contract.get_registered_validators_public_keys( - from_block=settings.vault_first_block, - to_block=chain_head.block_number, - ) - all_validators = await fetch_consensus_validators(public_keys) - - # use all validators to fetch all the consolidations - # including the ones were source validator is exiting - current_consolidations = await get_pending_consolidations(chain_head, all_validators) - consolidating_indexes: set[int] = set() - for cons in current_consolidations: - consolidating_indexes.add(cons.source_index) - consolidating_indexes.add(cons.target_index) - - # Candidates on the role of either source or target validator - validator_candidates: list[ConsensusValidator] = [] - - for val in all_validators: - if val.status in EXITING_STATUSES: - continue - if val.index in consolidating_indexes: - continue - if val.public_key in exclude_public_keys: - continue - validator_candidates.append(val) - - if not validator_candidates: - return [] - - source_validators = await _get_source_validators( - chain_head=chain_head, - validator_candidates=validator_candidates, - ) - if not source_validators: - return [] - - source_validators.sort(key=lambda val: val.activation_epoch) - target_validator_candidates = [val for val in validator_candidates if val.is_compounding] - - if not target_validator_candidates: - # there are no 0x02 validators, switch the oldest 0x01 to 0x02 - return [(source_validators[0], source_validators[0])] - - # there is at least one 0x02 validator, top up the one with smallest balance - target_validator = min(target_validator_candidates, key=lambda val: val.balance) - - selected_source_validators: list[ConsensusValidator] = [] - target_balance = target_validator.balance - - for val in source_validators: - if target_balance + val.balance > settings.max_validator_balance_gwei: - break - selected_source_validators.append(val) - target_balance += val.balance # type: ignore - - if selected_source_validators: - return [(target_validator, val) for val in selected_source_validators] - - # Target validator is almost full, switch the oldest 0x01 to 0x02 - return [(source_validators[0], source_validators[0])] - - -async def _get_source_validators( - chain_head: ChainHead, - validator_candidates: list[ConsensusValidator], -) -> list[ConsensusValidator]: - max_activation_epoch = chain_head.epoch - settings.network_config.SHARD_COMMITTEE_PERIOD - - pending_partial_withdrawals = await get_pending_partial_withdrawals( - chain_head=chain_head, consensus_validators=validator_candidates - ) - pending_partial_withdrawals_indexes = { - withdrawal.validator_index for withdrawal in pending_partial_withdrawals - } - - source_validators = [] - for val in validator_candidates: - if val.is_compounding: - continue - if val.activation_epoch >= max_activation_epoch: - continue - if val.index in pending_partial_withdrawals_indexes: - continue - source_validators.append(val) - - return source_validators - - async def _get_validators_manager_signature( - vault_address: ChecksumAddress, target_source_public_keys: list[tuple[HexStr, HexStr]] + target_source_public_keys: list[tuple[HexStr, HexStr]] ) -> HexStr: if not settings.relayer_endpoint: return HexStr('0x') relayer = RelayerClient() # fetch validator manager signature from relayer relayer_response = await relayer.consolidate_validators( - vault_address=vault_address, + vault_address=settings.vault, target_source_public_keys=target_source_public_keys, ) if not relayer_response.validators_manager_signature: diff --git a/src/validators/consolidation_manager.py b/src/validators/consolidation_manager.py new file mode 100644 index 00000000..3d062392 --- /dev/null +++ b/src/validators/consolidation_manager.py @@ -0,0 +1,362 @@ +import logging +from abc import ABC, abstractmethod + +from eth_typing import HexStr +from sw_utils import ChainHead +from web3.types import Gwei + +from src.common.consolidations import get_pending_consolidations +from src.common.contracts import VaultContract +from src.common.withdrawals import get_pending_partial_withdrawals +from src.config.settings import settings +from src.validators.consensus import EXITING_STATUSES, fetch_consensus_validators +from src.validators.exceptions import ConsolidationError +from src.validators.typings import ConsensusValidator, ConsolidationKeys + +logger = logging.getLogger(__name__) + + +class ConsolidationManager(ABC): + chain_head: ChainHead + vault_validators: list[HexStr] + consensus_validators: list[ConsensusValidator] + consolidating_source_indexes: set[int] + consolidating_target_indexes: set[int] + pending_partial_withdrawals_indexes: set[int] + exclude_public_keys: set[HexStr] + + @classmethod + async def create( + cls, + consolidation_keys: ConsolidationKeys | None, + chain_head: ChainHead, + exclude_public_keys: set[HexStr], + ) -> 'ConsolidationManager': + # Instance to create + self: ConsolidationManager + + # Switch to "check" logic or "select" logic + if consolidation_keys is not None: + self = ConsolidationChecker( + consolidation_keys=consolidation_keys, + chain_head=chain_head, + ) + else: + self = ConsolidationSelector( + chain_head=chain_head, + exclude_public_keys=exclude_public_keys, + ) + + # Fetch vault validators + logger.info('Fetching vault validators...') + self.vault_validators = await VaultContract( + settings.vault + ).get_registered_validators_public_keys( + from_block=settings.vault_first_block, + to_block=self.chain_head.block_number, + ) + + # Fetch consensus validators + if consolidation_keys is not None: + self.consensus_validators = await fetch_consensus_validators( + consolidation_keys.all_public_keys + ) + else: + self.consensus_validators = await fetch_consensus_validators(self.vault_validators) + + # Pending consolidations + pending_consolidations = await get_pending_consolidations( + chain_head, self.consensus_validators + ) + self.consolidating_source_indexes = set() + self.consolidating_target_indexes = set() + for cons in pending_consolidations: + self.consolidating_source_indexes.add(cons.source_index) + self.consolidating_target_indexes.add(cons.target_index) + + # Pending withdrawals + pending_partial_withdrawals = await get_pending_partial_withdrawals( + chain_head, self.consensus_validators + ) + self.pending_partial_withdrawals_indexes = set() + for withdrawal in pending_partial_withdrawals: + self.pending_partial_withdrawals_indexes.add(withdrawal.validator_index) + + return self + + @abstractmethod + def get_target_source(self) -> list[tuple[ConsensusValidator, ConsensusValidator]]: + """ + # Source validators must be: + - unique + - in the vault + - not exiting + - active for at least SHARD_COMMITTEE_PERIOD epochs + - not consolidating to another validator + - not consolidating from another validator + - no pending partial withdrawals in the queue + - total balance not exceeding the max effective balance when consolidated + # Target validator must be: + - in the vault + - not exiting + - not consolidating to another validator + - a compounding validator + + # For switch from 0x01 to 0x02: + - source and target public keys are the same + - in the vault + - not exiting + - active for at least SHARD_COMMITTEE_PERIOD epochs + """ + raise NotImplementedError() + + @property + def max_activation_epoch(self) -> int: + return self.chain_head.epoch - settings.network_config.SHARD_COMMITTEE_PERIOD + + +class ConsolidationSelector(ConsolidationManager): + """ + Suited for the case when the user doesn't specify which validators to consolidate. + ConsolidationSelector picks the most appropriate validators for consolidation. + """ + + def __init__( + self, + chain_head: ChainHead, + exclude_public_keys: set[HexStr], + ): + self.chain_head = chain_head + self.exclude_public_keys = exclude_public_keys + + def get_target_source(self) -> list[tuple[ConsensusValidator, ConsensusValidator]]: + """ + If there are no 0x02 validators, + take the oldest 0x01 validator and convert it to 0x02. + If there is a 0x02 validator, + take the oldest 0x01 validators to top up the target's balance to MAX BALANCE. + """ + # Candidates on the role of either source or target validator + ( + source_validators_candidates, + target_validator_candidates, + ) = self._find_validators_candidates() + if not source_validators_candidates or not target_validator_candidates: + return [] + + source_validators_candidates.sort(key=lambda val: val.activation_epoch) + target_validator_candidates = [ + val for val in target_validator_candidates if val.is_compounding + ] + if not target_validator_candidates: + # there are no 0x02 validators, switch the oldest 0x01 to 0x02 + return [(source_validators_candidates[0], source_validators_candidates[0])] + + # there is at least one 0x02 validator, top up the one with smallest balance + target_validator = min(target_validator_candidates, key=lambda val: val.balance) + + selected_source_validators: list[ConsensusValidator] = [] + target_balance = target_validator.balance + + for val in source_validators_candidates: + if target_balance + val.balance > settings.max_validator_balance_gwei: + break + selected_source_validators.append(val) + target_balance = Gwei(target_balance + val.balance) + + if selected_source_validators: + return [(target_validator, val) for val in selected_source_validators] + + # Target validator is almost full, switch the oldest 0x01 to 0x02 + # Only do this if there are source validators available + return [(source_validators_candidates[0], source_validators_candidates[0])] + + def _find_validators_candidates( + self, + ) -> tuple[list[ConsensusValidator], list[ConsensusValidator]]: + source_validators: list[ConsensusValidator] = [] + target_validators: list[ConsensusValidator] = [] + for val in self.consensus_validators: + if val.status in EXITING_STATUSES: + continue + # Exclude validators that are sources in ongoing consolidations + if val.index in self.consolidating_source_indexes: + continue + if val.public_key in self.exclude_public_keys: + continue + target_validators.append(val) + + # additional filters for source validators + # Source validator must be non-compounding + if val.is_compounding: + continue + if val.activation_epoch > self.max_activation_epoch: + continue + # Source validator cannot be in any ongoing consolidations (either as source or target) + if val.index in self.consolidating_target_indexes: + continue + if val.index in self.pending_partial_withdrawals_indexes: + continue + source_validators.append(val) + return source_validators, target_validators + + +class ConsolidationChecker(ConsolidationManager): + """ + Suited for the case when the user specifies which validators to consolidate. + We have to check if they are valid for consolidation. + """ + + def __init__( + self, + consolidation_keys: ConsolidationKeys, + chain_head: ChainHead, + ): + self.consolidation_keys = consolidation_keys + self.chain_head = chain_head + + def get_target_source(self) -> list[tuple[ConsensusValidator, ConsensusValidator]]: + """ + Validate that provided public keys can be consolidated + and return the target and source validators info. + """ + logger.info('Checking selected validators for consolidation...') + self._validate_public_keys() + + # Validate the source and target validators are in the vault + for public_key in self.source_public_keys + [self.target_public_key]: + if public_key not in self.vault_validators: + raise ConsolidationError( + f'Validator {public_key} is not registered in the vault {settings.vault}.' + ) + + # Validate target public key + target_validator = self._validate_target_validator() + if self.is_switch_to_compounding(): + return [(target_validator, target_validator)] + + # Validate source public keys + pubkey_to_validator = {val.public_key: val for val in self.consensus_validators} + source_validators: list[ConsensusValidator] = [] + for source_public_key in self.source_public_keys: + source_validator = pubkey_to_validator.get(source_public_key) + if not source_validator: + raise ConsolidationError( + f'Validator {source_public_key} not found in the consensus layer.' + ) + + # Validate the source validator status + if source_validator.status in EXITING_STATUSES: + raise ConsolidationError( + f'Validator {source_public_key} is in exiting ' + f'status {source_validator.status.value}.' + ) + + # Validate the source validator has been active long enough + if source_validator.activation_epoch > self.max_activation_epoch: + raise ConsolidationError( + f'Validator {source_validator.public_key} ' + f'is not active enough for consolidation. ' + f'It must be active for at least ' + f'{settings.network_config.SHARD_COMMITTEE_PERIOD} ' + f'epochs before consolidation.' + ) + + # Validate the source validator is not consolidating + if ( + source_validator.index in self.consolidating_source_indexes + or source_validator.index in self.consolidating_target_indexes + ): + raise ConsolidationError( + f'Validator {source_validator.public_key} ' + f'is consolidating to another validator.' + ) + + # Validate the source validator has no pending withdrawals in the queue + if source_validator.index in self.pending_partial_withdrawals_indexes: + raise ConsolidationError( + f'Validator {source_validator.public_key} ' + f'has pending partial withdrawals in the queue.' + ) + + source_validators.append(source_validator) + + # Validate the total balance won't exceed the max effective balance + if ( + sum(val.balance for val in self.consensus_validators) + > settings.max_validator_balance_gwei + ): + raise ConsolidationError( + 'Cannot consolidate validators,' + f' total balance exceeds {settings.max_validator_balance_gwei} Gwei' + ) + + return [(target_validator, source_validator) for source_validator in source_validators] + + def _validate_public_keys(self) -> None: + # Validate that source public keys are unique + if len(self.source_public_keys) != len(set(self.source_public_keys)): + raise ConsolidationError('Source public keys must be unique.') + + # Reject combining switch from 0x01 to 0x02 with consolidation to another validator + if len(self.source_public_keys) > 1 and self.target_public_key in self.source_public_keys: + raise ConsolidationError( + 'Cannot switch from 0x01 to 0x02 and consolidate ' + 'to another validator in the same request.' + ) + + def _validate_target_validator( + self, + ) -> ConsensusValidator: + target_validators = [ + val for val in self.consensus_validators if val.public_key == self.target_public_key + ] + if not target_validators: + raise ConsolidationError( + f'Validator {self.target_public_key} not found in the consensus layer.' + ) + target_validator = target_validators[0] + if target_validator.status in EXITING_STATUSES: + raise ConsolidationError( + f'Target validator {self.target_public_key} is in exiting ' + f'status {target_validator.status.value}.' + ) + # Target validator cannot be used as source in ongoing consolidations + if target_validator.index in self.consolidating_source_indexes: + raise ConsolidationError( + f'Target validator {self.target_public_key} is involved in another consolidation.' + ) + + if self.is_switch_to_compounding(): + if target_validator.is_compounding: + raise ConsolidationError( + f'Target validator {self.target_public_key} is already a compounding validator.' + ) + # switch the 0x01 to 0x02 + if target_validator.activation_epoch > self.max_activation_epoch: + raise ConsolidationError( + f'Validator {self.target_public_key} is not active enough for consolidation. ' + f'It must be active for at least ' + f'{settings.network_config.SHARD_COMMITTEE_PERIOD} epochs before consolidation.' + ) + else: + if not target_validator.is_compounding: + raise ConsolidationError( + f'The target validator {self.target_public_key} ' + f'is not a compounding validator.' + ) + return target_validator + + def is_switch_to_compounding(self) -> bool: + return ( + len(self.source_public_keys) == 1 + and self.source_public_keys[0] == self.target_public_key + ) + + @property + def source_public_keys(self) -> list[HexStr]: + return self.consolidation_keys.source_public_keys + + @property + def target_public_key(self) -> HexStr: + return self.consolidation_keys.target_public_key diff --git a/src/validators/exceptions.py b/src/validators/exceptions.py index bffcc848..04b6260b 100644 --- a/src/validators/exceptions.py +++ b/src/validators/exceptions.py @@ -10,3 +10,6 @@ class EmptyRelayerResponseException(Exception): ... class FundingException(Exception): ... + + +class ConsolidationError(Exception): ... diff --git a/src/validators/tests/test_consolidation_manager.py b/src/validators/tests/test_consolidation_manager.py new file mode 100644 index 00000000..e70e2744 --- /dev/null +++ b/src/validators/tests/test_consolidation_manager.py @@ -0,0 +1,773 @@ +from unittest.mock import patch + +import pytest +from eth_typing import HexStr +from sw_utils import ChainHead +from sw_utils.tests import faker + +from src.common.tests.factories import create_chain_head +from src.common.tests.utils import ether_to_gwei +from src.config.settings import settings +from src.validators.consensus import EXITING_STATUSES +from src.validators.consolidation_manager import ( + ConsolidationChecker, + ConsolidationSelector, +) +from src.validators.exceptions import ConsolidationError +from src.validators.tests.factories import create_consensus_validator +from src.validators.typings import ConsensusValidator, ConsolidationKeys + + +@pytest.mark.usefixtures('fake_settings') +class TestConsolidationSelector: + def test_empty_list_when_no_target_validators(self): + selector = create_manager( + vault_validators=[], + consensus_validators=[], + ) + result = selector.get_target_source() + assert result == [] + + def test_switches_oldest_0x01_to_0x02(self): + consensus_validators = [ + create_consensus_validator( + activation_epoch=1, + is_compounding=False, + ), + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + result = selector.get_target_source() + assert result == [(consensus_validators[0], consensus_validators[0])] + + def test_consolidation_with_single_compounding(self): + consensus_validators = [ + create_consensus_validator( + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + activation_epoch=1, + is_compounding=True, + ), + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + result = selector.get_target_source() + assert result == [(consensus_validators[1], consensus_validators[0])] + + def test_consolidation_to_smallest_balance(self): + consensus_validators = [ + create_consensus_validator( + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + activation_epoch=1, is_compounding=True, balance=ether_to_gwei(32.1) + ), + create_consensus_validator( + activation_epoch=1, is_compounding=True, balance=ether_to_gwei(32.3) + ), + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + result = selector.get_target_source() + assert result == [(consensus_validators[1], consensus_validators[0])] + + def test_consolidation_max_balance(self): + consensus_validators = [ + create_consensus_validator( + activation_epoch=1, is_compounding=True, balance=ether_to_gwei(32.0) + ), + create_consensus_validator( + activation_epoch=1, is_compounding=False, balance=ether_to_gwei(32.1) + ), + create_consensus_validator( + activation_epoch=2, is_compounding=False, balance=ether_to_gwei(32.2) + ), + create_consensus_validator( + activation_epoch=3, is_compounding=False, balance=ether_to_gwei(32.3) + ), + ] + + with patch.object(settings, 'max_validator_balance_gwei', ether_to_gwei(100)): + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + result = selector.get_target_source() + assert result == [ + (consensus_validators[0], consensus_validators[1]), + (consensus_validators[0], consensus_validators[2]), + ] + + def test_excludes_consolidating_validators(self): + consensus_validators = [ + create_consensus_validator( + index=10, + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + index=11, + activation_epoch=1, + is_compounding=True, + ), + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + consolidating_source_indexes={10, 11}, + consolidating_target_indexes={10, 11}, + ) + result = selector.get_target_source() + assert result == [] + + def test_excludes_pending_partial_withdrawals(self): + consensus_validators = [ + create_consensus_validator( + index=10, + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + index=11, + activation_epoch=1, + is_compounding=True, + ), + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + pending_partial_withdrawals_indexes={10, 11}, + ) + result = selector.get_target_source() + assert result == [] + + def test_excludes_specified_public_keys(self): + consensus_validators = [ + create_consensus_validator( + index=10, + activation_epoch=1, + is_compounding=False, + ), + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + exclude_public_keys={consensus_validators[0].public_key}, + ) + result = selector.get_target_source() + assert result == [] + + def test_excludes_exiting_validators(self): + consensus_validators = [ + create_consensus_validator( + activation_epoch=1, + is_compounding=False, + status=status, + ) + for status in EXITING_STATUSES + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + result = selector.get_target_source() + assert result == [] + + def test_min_activation_epoch(self): + epoch = 1000 + consensus_validators = [ + create_consensus_validator( + index=10, + activation_epoch=epoch - settings.network_config.SHARD_COMMITTEE_PERIOD + 1, + is_compounding=False, + ), + ] + selector = create_manager( + chain_head=create_chain_head(epoch), + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + result = selector.get_target_source() + assert result == [] + + def test_excludes_source_as_target_validator(self): + """Test that target validator is excluded if it's in consolidating_source_indexes""" + consensus_validators = [ + create_consensus_validator( + index=10, + activation_epoch=1, + is_compounding=True, + ), + create_consensus_validator( + index=11, + activation_epoch=1, + is_compounding=False, + ), + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + consolidating_source_indexes={ + 10 + }, # index 10 is in source consolidation, so can't be target + consolidating_target_indexes=set(), + ) + result = selector.get_target_source() + # Should switch validator 11 from 0x01 to 0x02 since there are no valid targets + # but validator 11 is available as source + assert result == [(consensus_validators[1], consensus_validators[1])] + + def test_excludes_source_validator_in_both_indexes(self): + """Test that source validator is excluded if it's in either source or target consolidating indexes""" + consensus_validators = [ + create_consensus_validator( + index=10, + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + index=11, + activation_epoch=1, + is_compounding=True, + ), + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + consolidating_source_indexes={10}, # index 10 is in source consolidation + consolidating_target_indexes=set(), + ) + result = selector.get_target_source() + # Should be empty because the only potential source (index 10) is excluded + # since it's in consolidating_source_indexes + assert result == [] + + # Test with target indexes too + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + consolidating_source_indexes=set(), + consolidating_target_indexes={10}, # index 10 is in target consolidation + ) + result = selector.get_target_source() + # Should still be empty because the only potential source (index 10) is excluded + # since it's in consolidating_target_indexes + assert result == [] + + def test_allows_validator_in_target_indexes_as_target(self): + """Test that target validator is not excluded if it's in consolidating_target_indexes""" + consensus_validators = [ + create_consensus_validator( + index=10, + activation_epoch=1, + is_compounding=True, + ), + create_consensus_validator( + index=11, + activation_epoch=1, + is_compounding=False, + ), + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + consolidating_source_indexes=set(), + consolidating_target_indexes={10}, + ) + result = selector.get_target_source() + # Validator 10 can still be target even if already in consolidating_target_indexes, + # so validator 11 consolidates into validator 10 + assert result == [(consensus_validators[0], consensus_validators[1])] + + def test_excludes_validator_in_target_indexes_as_source(self): + """Test that source validator is excluded if it's in consolidating_target_indexes""" + consensus_validators = [ + create_consensus_validator( + index=10, + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + index=11, + activation_epoch=1, + is_compounding=True, + ), + ] + selector = create_manager( + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + consolidating_source_indexes=set(), + consolidating_target_indexes={ + 10 + }, # index 10 is in target consolidation, so can't be source + ) + result = selector.get_target_source() + # Should be empty because the only potential source (index 10) is excluded + # since it's in consolidating_target_indexes + assert result == [] + + +@pytest.mark.usefixtures('fake_settings') +class TestConsolidationChecker: + def test_empty_list_when_empty_vault(self): + pk = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[pk], + target_public_key=pk, + ) + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[], + consensus_validators=[], + ) + with pytest.raises( + ConsolidationError, + match=f'Validator {pk} is not registered in the vault {settings.vault}.', + ): + selector.get_target_source() + + def test_switch_from_0x01_to_0x02(self): + pk = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[pk], + target_public_key=pk, + ) + + consensus_validators = [ + create_consensus_validator( + public_key=pk, + activation_epoch=1, + is_compounding=False, + ), + ] + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + result = selector.get_target_source() + assert result == [(consensus_validators[0], consensus_validators[0])] + + def test_switch_from_0x02_to_0x02(self): + pk = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[pk], + target_public_key=pk, + ) + + consensus_validators = [ + create_consensus_validator( + public_key=pk, + activation_epoch=1, + is_compounding=True, + ), + ] + + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + with pytest.raises( + ConsolidationError, match=f'Target validator {pk} is already a compounding validator.' + ): + selector.get_target_source() + + def test_consolidation_with_single_compounding(self): + source_pk = faker.validator_public_key() + target_pk = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[source_pk], + target_public_key=target_pk, + ) + + consensus_validators = [ + create_consensus_validator( + public_key=source_pk, + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + public_key=target_pk, + activation_epoch=1, + is_compounding=True, + ), + ] + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + result = selector.get_target_source() + assert result == [(consensus_validators[1], consensus_validators[0])] + + def test_consolidation_to_smallest_balance(self): + source_pk_1 = faker.validator_public_key() + source_pk_2 = faker.validator_public_key() + target_pk_1 = faker.validator_public_key() + target_pk_2 = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[source_pk_1, source_pk_2], + target_public_key=target_pk_1, # This is the intended target, but checker will validate it + ) + + consensus_validators = [ + create_consensus_validator( + public_key=source_pk_1, + activation_epoch=1, + is_compounding=False, + balance=ether_to_gwei(32.0), + ), + create_consensus_validator( + public_key=source_pk_2, + activation_epoch=1, + is_compounding=False, + balance=ether_to_gwei(32.0), + ), + create_consensus_validator( + public_key=target_pk_1, + index=100, + activation_epoch=1, + is_compounding=True, + balance=ether_to_gwei(32.1), + ), + create_consensus_validator( + public_key=target_pk_2, + index=101, + activation_epoch=1, + is_compounding=True, + balance=ether_to_gwei(32.3), + ), + ] + # Actually, for ConsolidationChecker, it should work with the provided target + # So the test should expect the consolidation to happen to the specified target_pk_1 + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + result = selector.get_target_source() + # ConsolidationChecker validates the specific target provided in consolidation_keys + assert result == [ + (consensus_validators[2], consensus_validators[0]), + (consensus_validators[2], consensus_validators[1]), + ] + + def test_consolidation_max_balance(self): + source_pk_1 = faker.validator_public_key() + source_pk_2 = faker.validator_public_key() + source_pk_3 = faker.validator_public_key() + target_pk = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[source_pk_1, source_pk_2, source_pk_3], + target_public_key=target_pk, + ) + + consensus_validators = [ + create_consensus_validator( + public_key=source_pk_1, + activation_epoch=1, + is_compounding=False, + balance=ether_to_gwei(32.0), + ), + create_consensus_validator( + public_key=source_pk_2, + activation_epoch=1, + is_compounding=False, + balance=ether_to_gwei(32.1), + ), + create_consensus_validator( + public_key=source_pk_3, + activation_epoch=1, + is_compounding=False, + balance=ether_to_gwei(32.2), + ), + create_consensus_validator( + public_key=target_pk, + activation_epoch=1, + is_compounding=True, + balance=ether_to_gwei(32.0), + ), + ] + + with patch.object(settings, 'max_validator_balance_gwei', ether_to_gwei(96.0)): + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + + with pytest.raises( + ConsolidationError, match='Cannot consolidate validators, total balance exceeds' + ): + selector.get_target_source() + + def test_excludes_consolidating_validators(self): + source_pk = faker.validator_public_key() + target_pk = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[source_pk], + target_public_key=target_pk, + ) + + consensus_validators = [ + create_consensus_validator( + public_key=source_pk, + index=10, + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + public_key=target_pk, + index=11, + activation_epoch=1, + is_compounding=True, + ), + ] + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + consolidating_source_indexes={10}, # Source validator is consolidating + consolidating_target_indexes=set(), + ) + + with pytest.raises( + ConsolidationError, + match=f'Validator {source_pk} is consolidating to another validator.', + ): + selector.get_target_source() + + # Also test when target validator is consolidating + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + consolidating_source_indexes={11}, # Target validator is consolidating (as source) + consolidating_target_indexes=set(), + ) + + with pytest.raises( + ConsolidationError, + match=f'Target validator {target_pk} is involved in another consolidation.', + ): + selector.get_target_source() + + def test_excludes_pending_partial_withdrawals(self): + source_pk = faker.validator_public_key() + target_pk = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[source_pk], + target_public_key=target_pk, + ) + + consensus_validators = [ + create_consensus_validator( + public_key=source_pk, + index=10, + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + public_key=target_pk, + index=11, + activation_epoch=1, + is_compounding=True, + ), + ] + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + pending_partial_withdrawals_indexes={10}, # Source validator has pending withdrawal + ) + + with pytest.raises( + ConsolidationError, + match=f'Validator {source_pk} has pending partial withdrawals in the queue.', + ): + selector.get_target_source() + + def test_excludes_exiting_validators(self): + source_pk = faker.validator_public_key() + target_pk = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[source_pk], + target_public_key=target_pk, + ) + + # Test with exiting source validator + consensus_validators = [ + create_consensus_validator( + public_key=source_pk, + activation_epoch=1, + is_compounding=False, + status=EXITING_STATUSES[0], + ), + create_consensus_validator( + public_key=target_pk, + activation_epoch=1, + is_compounding=True, + ), + ] + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + + with pytest.raises( + ConsolidationError, + match=f'Validator {source_pk} is in exiting status {EXITING_STATUSES[0].value}.', + ): + selector.get_target_source() + + # Test with exiting target validator + consensus_validators = [ + create_consensus_validator( + public_key=source_pk, + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + public_key=target_pk, + activation_epoch=1, + is_compounding=True, + status=EXITING_STATUSES[0], + ), + ] + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + + with pytest.raises( + ConsolidationError, + match=f'Target validator {target_pk} is in exiting status {EXITING_STATUSES[0].value}.', + ): + selector.get_target_source() + + def test_rejects_non_compounding_target(self): + source_pk = faker.validator_public_key() + target_pk = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[source_pk], + target_public_key=target_pk, + ) + + consensus_validators = [ + create_consensus_validator( + public_key=source_pk, + index=10, + activation_epoch=1, + is_compounding=False, + ), + create_consensus_validator( + public_key=target_pk, + index=11, + activation_epoch=1, + is_compounding=False, + ), + ] + selector = create_manager( + consolidation_keys=consolidation_keys, + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + + with pytest.raises( + ConsolidationError, + match=f'The target validator {target_pk} is not a compounding validator.', + ): + selector.get_target_source() + + def test_min_activation_epoch(self): + source_pk = faker.validator_public_key() + target_pk = faker.validator_public_key() + consolidation_keys = ConsolidationKeys( + source_public_keys=[source_pk], + target_public_key=target_pk, + ) + + epoch = 10 + consensus_validators = [ + create_consensus_validator( + public_key=source_pk, + index=10, + activation_epoch=epoch + settings.network_config.SHARD_COMMITTEE_PERIOD - 1, + is_compounding=False, + ), + create_consensus_validator( + public_key=target_pk, + index=11, + activation_epoch=1, + is_compounding=True, + ), + ] + selector = create_manager( + consolidation_keys=consolidation_keys, + chain_head=create_chain_head(epoch), + vault_validators=[v.public_key for v in consensus_validators], + consensus_validators=consensus_validators, + ) + + with pytest.raises( + ConsolidationError, + match=f'Validator {consensus_validators[0].public_key} is not active enough for consolidation.', + ): + selector.get_target_source() + + +def create_manager( + consolidation_keys: ConsolidationKeys | None = None, + chain_head: ChainHead | None = None, + exclude_public_keys: set[HexStr] | None = None, + vault_validators: list[HexStr] | None = None, + consensus_validators: list[ConsensusValidator] | None = None, + consolidating_source_indexes: set[int] | None = None, + consolidating_target_indexes: set[int] | None = None, + pending_partial_withdrawals_indexes: set[int] | None = None, +) -> ConsolidationSelector | ConsolidationChecker: + self: ConsolidationChecker | ConsolidationSelector + if chain_head is None: + chain_head = create_chain_head(epoch=1024) + if consolidation_keys is not None: + self = ConsolidationChecker( + consolidation_keys=consolidation_keys, + chain_head=chain_head, + ) + else: + if exclude_public_keys is None: + exclude_public_keys = set() + self = ConsolidationSelector( + chain_head=chain_head, + exclude_public_keys=exclude_public_keys, + ) + self.vault_validators = vault_validators + self.consensus_validators = consensus_validators + + if consolidating_source_indexes: + self.consolidating_source_indexes = consolidating_source_indexes + else: + self.consolidating_source_indexes = set() + + if consolidating_target_indexes: + self.consolidating_target_indexes = consolidating_target_indexes + else: + self.consolidating_target_indexes = set() + + if pending_partial_withdrawals_indexes: + self.pending_partial_withdrawals_indexes = pending_partial_withdrawals_indexes + else: + self.pending_partial_withdrawals_indexes = set() + return self diff --git a/src/validators/typings.py b/src/validators/typings.py index dc13cd83..bba85fcd 100644 --- a/src/validators/typings.py +++ b/src/validators/typings.py @@ -119,3 +119,13 @@ class ApprovalRequest: class ConsolidationRequest: public_keys: list[HexStr] vault_address: ChecksumAddress + + +@dataclass +class ConsolidationKeys: + source_public_keys: list[HexStr] + target_public_key: HexStr + + @property + def all_public_keys(self) -> list[HexStr]: + return list(dict.fromkeys(self.source_public_keys + [self.target_public_key]))