diff --git a/bridge_indexer/dipdup.yaml b/bridge_indexer/dipdup.yaml index 1edf62b..e9e3a07 100644 --- a/bridge_indexer/dipdup.yaml +++ b/bridge_indexer/dipdup.yaml @@ -79,19 +79,19 @@ indexes: callback: tezos.on_head tezos_rollup_cement: -# kind: tezos.operations -# datasources: -# - tzkt -# types: -# - sr_cement -# contracts: -# - tezos_smart_rollup -# handlers: -# - callback: tezos.on_cement_commitment -# pattern: -# - type: sr_cement -# destination: tezos_smart_rollup -# + kind: tezos.operations + datasources: + - tzkt + types: + - sr_cement + contracts: + - tezos_smart_rollup + handlers: + - callback: tezos.on_cement_commitment + pattern: + - type: sr_cement + destination: tezos_smart_rollup + tezos_deposit_operations: kind: tezos.operations @@ -128,6 +128,57 @@ indexes: from_: etherlink_rollup_kernel + etherlink_withdrawal_events: + kind: evm.events + datasources: + - etherlink_subsquid + - etherlink_node + handlers: + - callback: etherlink.on_withdraw + contract: etherlink_rollup_kernel + name: Withdrawal + + tezos_withdrawal_operations: + kind: tezos.operations + datasources: + - tzkt + types: + - sr_execute + contracts: + - tezos_smart_rollup + handlers: + - callback: tezos.on_rollup_execute + pattern: + - type: sr_execute + destination: tezos_smart_rollup + + + etherlink_token_balance_update_events: + kind: evm.events + datasources: + - etherlink_subsquid + - etherlink_node + handlers: + - callback: etherlink.on_transfer + contract: l2_tzbtc_token + name: Transfer + - callback: etherlink.on_transfer + contract: l2_sirs_token + name: Transfer + - callback: etherlink.on_transfer + contract: l2_usdt_token + name: Transfer + + +hooks: + bridge_matcher: + callback: bridge_matcher +jobs: + bridge_matcher: + hook: bridge_matcher + daemon: True + + advanced: reindex: config_modified: ignore diff --git a/bridge_indexer/handlers/batch.py b/bridge_indexer/handlers/batch.py index 23d063f..a6351cd 100644 --- a/bridge_indexer/handlers/batch.py +++ b/bridge_indexer/handlers/batch.py @@ -1,20 +1,16 @@ import logging -from typing import Any -from dipdup.config import HandlerConfig -from dipdup.context import HandlerContext -from dipdup.index import Index +logger = logging.getLogger('bridge_indexer.handlers.batch') -from bridge_indexer.handlers.bridge_matcher import BridgeMatcher -logger = logging.getLogger('bridge_indexer.handlers.batch') +from collections.abc import Iterable +from dipdup.context import HandlerContext +from dipdup.index import MatchedHandler async def batch( ctx: HandlerContext, - handlers: tuple[tuple[Index[Any, Any, Any], HandlerConfig, Any]], + handlers: Iterable[MatchedHandler], ) -> None: - for index, handler, data in handlers: - await index._call_matched_handler(handler, data) - - await BridgeMatcher.check_pending_transactions() + for handler in handlers: + await ctx.fire_matched_handler(handler) diff --git a/bridge_indexer/handlers/bridge_matcher.py b/bridge_indexer/handlers/bridge_matcher.py index 0f83002..1261a60 100644 --- a/bridge_indexer/handlers/bridge_matcher.py +++ b/bridge_indexer/handlers/bridge_matcher.py @@ -1,5 +1,7 @@ +import threading from datetime import timedelta +from bridge_indexer.handlers.bridge_matcher_locks import BridgeMatcherLocks from bridge_indexer.models import BridgeDepositOperation from bridge_indexer.models import BridgeOperation from bridge_indexer.models import BridgeOperationStatus @@ -8,6 +10,7 @@ from bridge_indexer.models import EtherlinkDepositOperation from bridge_indexer.models import EtherlinkWithdrawOperation from bridge_indexer.models import RollupInboxMessage +from bridge_indexer.models import RollupOutboxMessage from bridge_indexer.models import TezosDepositOperation from bridge_indexer.models import TezosWithdrawOperation @@ -15,38 +18,14 @@ class BridgeMatcher: - _pending_tezos_deposits: bool = False - _pending_etherlink_withdrawals: bool = False - _pending_etherlink_deposits: bool = False - _pending_etherlink_xtz_deposits: bool = False - _pending_tezos_withdrawals: bool = False - - @classmethod - def set_pending_tezos_deposits(cls): - cls._pending_tezos_deposits = True - - @classmethod - def set_pending_etherlink_withdrawals(cls): - cls._pending_etherlink_withdrawals = True - - @classmethod - def set_pending_etherlink_deposits(cls): - cls._pending_etherlink_deposits = True - - @classmethod - def set_pending_etherlink_xtz_deposits(cls): - cls._pending_etherlink_xtz_deposits = True - - @classmethod - def set_pending_tezos_withdrawals(cls): - cls._pending_tezos_withdrawals = True + matcher_lock = threading.Lock() @classmethod async def check_pending_tezos_deposits(cls): - if not cls._pending_tezos_deposits: + if not BridgeMatcherLocks.pending_tezos_deposits: return else: - cls._pending_tezos_deposits = False + BridgeMatcherLocks.pending_tezos_deposits = False qs = TezosDepositOperation.filter(bridge_deposits=None) async for l1_deposit in qs: @@ -64,6 +43,11 @@ async def check_pending_tezos_deposits(cls): @classmethod async def check_pending_inbox(cls): + if not BridgeMatcherLocks.pending_inbox: + return + else: + BridgeMatcherLocks.pending_inbox = False + qs = BridgeDepositOperation.filter( inbox_message=None, ).order_by( @@ -86,10 +70,10 @@ async def check_pending_inbox(cls): @classmethod async def check_pending_etherlink_deposits(cls): - if not cls._pending_etherlink_deposits: + if not BridgeMatcherLocks.pending_etherlink_deposits: return else: - cls._pending_etherlink_deposits = False + BridgeMatcherLocks.pending_etherlink_deposits = False qs = EtherlinkDepositOperation.filter( bridge_deposits=None, @@ -128,10 +112,10 @@ async def check_pending_etherlink_deposits(cls): @classmethod async def check_pending_etherlink_xtz_deposits(cls): - if not cls._pending_etherlink_xtz_deposits: + if not BridgeMatcherLocks.pending_etherlink_xtz_deposits: return else: - cls._pending_etherlink_xtz_deposits = False + BridgeMatcherLocks.pending_etherlink_xtz_deposits = False qs = EtherlinkDepositOperation.filter( bridge_deposits=None, @@ -171,18 +155,67 @@ async def check_pending_etherlink_xtz_deposits(cls): bridge_operation.status = BridgeOperationStatus.finished await bridge_operation.save() + @classmethod + async def check_pending_etherlink_withdrawals(cls): + if not BridgeMatcherLocks.pending_etherlink_withdrawals: + return + else: + BridgeMatcherLocks.pending_etherlink_withdrawals = False + + qs = EtherlinkWithdrawOperation.filter(bridge_withdrawals=None) + async for l2_withdrawal in qs: + bridge_withdrawal = await BridgeWithdrawOperation.create(l2_transaction=l2_withdrawal) + await BridgeOperation.create( + id=bridge_withdrawal.id, + type=BridgeOperationType.withdrawal, + l1_account=l2_withdrawal.l1_account, + l2_account=l2_withdrawal.l2_account, + created_at=l2_withdrawal.timestamp, + updated_at=l2_withdrawal.timestamp, + status=BridgeOperationStatus.created, + ) + + @classmethod + async def check_pending_outbox(cls): + if not BridgeMatcherLocks.pending_outbox: + return + else: + BridgeMatcherLocks.pending_outbox = False + + qs = BridgeWithdrawOperation.filter( + outbox_message=None, + ).order_by( + 'l2_transaction__level', 'l2_transaction__transaction_index', 'l2_transaction__log_index', + ).prefetch_related('l2_transaction') + async for bridge_withdrawal in qs: + bridge_withdrawal: BridgeWithdrawOperation + outbox_message = await RollupOutboxMessage.filter( + parameters_hash=bridge_withdrawal.l2_transaction.parameters_hash, + # created_at__gte=bridge_withdrawal.l2_transaction.timestamp, + # created_at__gte=datetime.fromtimestamp(bridge_withdrawal.l2_transaction.timestamp, tz=timezone.utc), + ).order_by('level', 'index').first() + + if outbox_message: + bridge_withdrawal.outbox_message = outbox_message + await bridge_withdrawal.save() + bridge_withdrawal.l2_transaction.parameters_hash = None + await bridge_withdrawal.l2_transaction.save() + outbox_message.parameters_hash = None + await outbox_message.save() + @classmethod async def check_pending_tezos_withdrawals(cls): - if not cls._pending_tezos_withdrawals: + if not BridgeMatcherLocks.pending_tezos_withdrawals: return else: - cls._pending_tezos_withdrawals = False + BridgeMatcherLocks.pending_tezos_withdrawals = False - qs = TezosWithdrawOperation.filter(bridge_withdrawals__isnull=True).order_by('level') + qs = TezosWithdrawOperation.filter(bridge_withdrawals=None).order_by('level') async for l1_withdrawal in qs: + l1_withdrawal: TezosWithdrawOperation bridge_withdrawal = await BridgeWithdrawOperation.filter( l1_transaction=None, - l2_transaction__outbox_message_id=l1_withdrawal.outbox_message_id, + outbox_message_id=l1_withdrawal.outbox_message_id, ).first() if not bridge_withdrawal: @@ -197,12 +230,3 @@ async def check_pending_tezos_withdrawals(cls): bridge_operation.updated_at = l1_withdrawal.timestamp bridge_operation.status = BridgeOperationStatus.finished await bridge_operation.save() - - @staticmethod - async def check_pending_transactions(): - await BridgeMatcher.check_pending_tezos_deposits() - await BridgeMatcher.check_pending_etherlink_withdrawals() - - await BridgeMatcher.check_pending_etherlink_deposits() - await BridgeMatcher.check_pending_etherlink_xtz_deposits() - await BridgeMatcher.check_pending_tezos_withdrawals() diff --git a/bridge_indexer/handlers/bridge_matcher_locks.py b/bridge_indexer/handlers/bridge_matcher_locks.py new file mode 100644 index 0000000..b871b52 --- /dev/null +++ b/bridge_indexer/handlers/bridge_matcher_locks.py @@ -0,0 +1,36 @@ +class BridgeMatcherLocks: + pending_tezos_deposits: bool = False + pending_etherlink_withdrawals: bool = False + pending_etherlink_deposits: bool = False + pending_etherlink_xtz_deposits: bool = False + pending_tezos_withdrawals: bool = False + pending_inbox: bool = False + pending_outbox: bool = False + + @classmethod + def set_pending_tezos_deposits(cls): + BridgeMatcherLocks.pending_tezos_deposits = True + + @classmethod + def set_pending_etherlink_withdrawals(cls): + BridgeMatcherLocks.pending_etherlink_withdrawals = True + + @classmethod + def set_pending_etherlink_deposits(cls): + BridgeMatcherLocks.pending_etherlink_deposits = True + + @classmethod + def set_pending_etherlink_xtz_deposits(cls): + BridgeMatcherLocks.pending_etherlink_xtz_deposits = True + + @classmethod + def set_pending_tezos_withdrawals(cls): + BridgeMatcherLocks.pending_tezos_withdrawals = True + + @classmethod + def set_pending_inbox(cls): + BridgeMatcherLocks.pending_inbox = True + + @classmethod + def set_pending_outbox(cls): + BridgeMatcherLocks.pending_outbox = True diff --git a/bridge_indexer/handlers/etherlink/on_deposit.py b/bridge_indexer/handlers/etherlink/on_deposit.py index f105d84..cde9952 100644 --- a/bridge_indexer/handlers/etherlink/on_deposit.py +++ b/bridge_indexer/handlers/etherlink/on_deposit.py @@ -4,7 +4,7 @@ from dipdup.context import HandlerContext from dipdup.models.evm import EvmEvent -from bridge_indexer.handlers.bridge_matcher import BridgeMatcher +from bridge_indexer.handlers.bridge_matcher_locks import BridgeMatcherLocks from bridge_indexer.models import EtherlinkDepositOperation from bridge_indexer.models import EtherlinkToken from bridge_indexer.models import TezosTicket @@ -82,5 +82,4 @@ async def on_deposit( ctx.logger.info(f'Etherlink Deposit Event registered: {deposit.id}') - BridgeMatcher.set_pending_etherlink_deposits() - await BridgeMatcher.check_pending_transactions() + BridgeMatcherLocks.set_pending_etherlink_deposits() diff --git a/bridge_indexer/handlers/etherlink/on_withdraw.py b/bridge_indexer/handlers/etherlink/on_withdraw.py index 2717d55..be7282d 100644 --- a/bridge_indexer/handlers/etherlink/on_withdraw.py +++ b/bridge_indexer/handlers/etherlink/on_withdraw.py @@ -1,12 +1,15 @@ +from datetime import datetime +from datetime import timezone + from dipdup.context import HandlerContext from dipdup.models.evm import EvmEvent -from tortoise.exceptions import DoesNotExist from bridge_indexer.handlers import setup_handler_logger -from bridge_indexer.handlers.bridge_matcher import BridgeMatcher +from bridge_indexer.handlers.bridge_matcher_locks import BridgeMatcherLocks +from bridge_indexer.handlers.rollup_message import OutboxParametersHash from bridge_indexer.models import EtherlinkToken from bridge_indexer.models import EtherlinkWithdrawOperation -from bridge_indexer.types.fa_precompile.evm_events.withdrawal import WithdrawalPayload +from bridge_indexer.types.kernel.evm_events.withdrawal import WithdrawalPayload async def on_withdraw( @@ -14,7 +17,7 @@ async def on_withdraw( event: EvmEvent[WithdrawalPayload], ) -> None: setup_handler_logger(ctx) - ctx.logger.info(f'Etherlink Withdraw Event found: 0x{event.data.transaction_hash}') + ctx.logger.info(f'Etherlink Withdraw Event found: {event.data.transaction_hash}') token_contract = event.payload.ticket_owner.removeprefix('0x') etherlink_token = await EtherlinkToken.get_or_none(id=token_contract) if not etherlink_token: @@ -26,18 +29,8 @@ async def on_withdraw( ) return - try: - outbox_message = await ctx.container.outbox_message_service.find_by_index(event.payload.outbox_level, event.payload.outbox_msg_id) - except DoesNotExist: - ctx.logger.error( - 'Failed to fetch Outbox Message with level %d and index %d. Operation ignored.', - event.payload.outbox_level, - event.payload.outbox_msg_id, - ) - return - withdrawal = await EtherlinkWithdrawOperation.create( - timestamp=event.data.timestamp, + timestamp=datetime.fromtimestamp(event.data.timestamp, tz=timezone.utc), level=event.data.level, address=event.data.address[-40:], log_index=event.data.log_index, @@ -47,11 +40,13 @@ async def on_withdraw( l1_account=event.payload.receiver, l2_token=etherlink_token, ticket_id=event.payload.ticket_hash, + l2_ticket_owner=event.payload.ticket_owner[-40:], + l1_ticket_owner=event.payload.proxy, amount=event.payload.amount, - outbox_message=outbox_message, + parameters_hash=await OutboxParametersHash(event).from_event(), + kernel_withdrawal_id=event.payload.withdrawal_id, ) ctx.logger.info(f'Etherlink Withdraw Event registered: {withdrawal.id}') - BridgeMatcher.set_pending_etherlink_withdrawals() - await BridgeMatcher.check_pending_transactions() + BridgeMatcherLocks.set_pending_etherlink_withdrawals() diff --git a/bridge_indexer/handlers/etherlink/on_xtz_deposit.py b/bridge_indexer/handlers/etherlink/on_xtz_deposit.py index da4d556..ffc1e81 100644 --- a/bridge_indexer/handlers/etherlink/on_xtz_deposit.py +++ b/bridge_indexer/handlers/etherlink/on_xtz_deposit.py @@ -4,7 +4,7 @@ from dipdup.context import HandlerContext from dipdup.models.evm import EvmTransactionData -from bridge_indexer.handlers.bridge_matcher import BridgeMatcher +from bridge_indexer.handlers.bridge_matcher_locks import BridgeMatcherLocks from bridge_indexer.models import EtherlinkDepositOperation from bridge_indexer.models import EtherlinkToken from bridge_indexer.models import TezosTicket @@ -55,5 +55,4 @@ async def on_xtz_deposit( ctx.logger.info(f'XTZ Deposit Transaction registered: {deposit.id}') - BridgeMatcher.set_pending_etherlink_xtz_deposits() - await BridgeMatcher.check_pending_transactions() + BridgeMatcherLocks.set_pending_etherlink_xtz_deposits() diff --git a/bridge_indexer/handlers/rollup_message.py b/bridge_indexer/handlers/rollup_message.py index 8e77ff6..4b525c2 100644 --- a/bridge_indexer/handlers/rollup_message.py +++ b/bridge_indexer/handlers/rollup_message.py @@ -1,80 +1,52 @@ -import asyncio -from typing import TYPE_CHECKING +import base64 +import threading +from datetime import datetime +from typing import Any from typing import AsyncGenerator +from typing import TYPE_CHECKING +import orjson from dipdup.datasources.http import HttpDatasource -from dipdup.datasources.tezos_tzkt import Datasource from dipdup.datasources.tezos_tzkt import TezosTzktDatasource +from dipdup.models import IndexStatus +from dipdup.models.evm import EvmEvent from dipdup.models.tezos import TezosOperationData +from dipdup.models.tezos import TezosTransaction +from pytezos import MichelsonType +from pytezos import michelson_to_micheline +from tortoise.exceptions import DoesNotExist +from bridge_indexer.handlers.bridge_matcher_locks import BridgeMatcherLocks from bridge_indexer.models import BridgeOperation from bridge_indexer.models import BridgeOperationStatus from bridge_indexer.models import BridgeWithdrawOperation from bridge_indexer.models import RollupCementedCommitment from bridge_indexer.models import RollupInboxMessage +from bridge_indexer.models import RollupInboxMessageType from bridge_indexer.models import RollupOutboxMessage +from bridge_indexer.models import TezosTicket +from bridge_indexer.types.kernel.evm_events.withdrawal import WithdrawalPayload +from bridge_indexer.types.rollup.tezos_parameters.default import DefaultParameter +from bridge_indexer.types.rollup.tezos_storage import RollupStorage +from bridge_indexer.types.ticketer.tezos_parameters.withdraw import WithdrawParameter if TYPE_CHECKING: from bridge_indexer.handlers.service_container import BridgeConstantStorage from bridge_indexer.handlers.service_container import ProtocolConstantStorage + from logging import Logger class InboxMessageService: - def __init__(self, tzkt: Datasource, bridge: 'BridgeConstantStorage'): - self._tzkt = tzkt - self._bridge = bridge - - def _validate_message(self, message_data: dict) -> None: - match message_data['type']: - case 'transfer': - if message_data['target']['address'] != self._bridge.smart_rollup_address: - raise TypeError('Message target must be Bridge Rollup address, not {}.', message_data['target']['address']) - - case 'external': - raise NotImplementedError - case _: - raise TypeError('Unsupported Inbox Message Type: {}.', message_data['type']) - - async def _fetch_inbox_level(self, inbox_level: int): - index = -1 - for message_data in await self._tzkt.request('GET', f'v1/smart_rollups/inbox?level={inbox_level}'): - index += 1 - try: - self._validate_message(message_data) - yield RollupInboxMessage( - id=message_data['id'], - level=message_data['level'], - index=index, - type=message_data['type'], - parameter=message_data.get('parameter'), - payload=message_data.get('payload'), - ) - except (TypeError, NotImplementedError): - continue - - @staticmethod - async def _check_inbox_level(inbox_level): - return await RollupInboxMessage.filter(level=inbox_level).count() > 0 - - async def _prepare_inbox_level(self, inbox_level): - if await self._check_inbox_level(inbox_level): - return - inbox: list[RollupInboxMessage] = [] - async for inbox_message in self._fetch_inbox_level(inbox_level): - inbox.append(inbox_message) - if len(inbox) > 0: - await RollupInboxMessage.bulk_create(inbox) - else: - raise RuntimeError('Processed level with no supported inbox_messages!') - - async def _read_inbox_level(self, inbox_level: int) -> AsyncGenerator[RollupInboxMessage, None]: - await self._prepare_inbox_level(inbox_level) + @classmethod + async def _read_inbox_level(cls, inbox_level: int) -> AsyncGenerator[RollupInboxMessage, None]: async for inbox_message in RollupInboxMessage.filter(level=inbox_level, l1_deposits__isnull=True).order_by('id'): yield inbox_message - async def match_transaction_with_inbox(self, data: TezosOperationData) -> RollupInboxMessage: - async for inbox_message in self._read_inbox_level(data.level): - if data.parameter_json == inbox_message.parameter: + @classmethod + async def match_transaction_with_inbox(cls, data: TezosOperationData) -> RollupInboxMessage: + async for inbox_message in cls._read_inbox_level(data.level): + # only `transfer` type inbox messages here + if data.parameter_json == inbox_message.message: return inbox_message raise TypeError('Transaction not matched') @@ -90,60 +62,33 @@ def __init__(self, tzkt: TezosTzktDatasource, rollup_node: HttpDatasource, proto self._rollup_node = rollup_node self._protocol = protocol - @staticmethod - def _estimate_outbox_message_cemented_level(outbox_level: int, lcc_level: int, commitment_period: int, challenge_window: int): + @classmethod + def estimate_outbox_message_cemented_level(cls, outbox_level: int, lcc_inbox_level: int, protocol: 'ProtocolConstantStorage') -> int: + commitment_period = protocol.smart_rollup_commitment_period + challenge_window = protocol.smart_rollup_challenge_window + return ( outbox_level - + (lcc_level - outbox_level) % commitment_period + + (lcc_inbox_level - outbox_level) % commitment_period + challenge_window - + (commitment_period - challenge_window % commitment_period) % commitment_period + + (commitment_period - challenge_window % commitment_period) + % commitment_period # well, at this line, I'm just fucking around already. + 5 ) - async def _fetch_outbox(self, outbox_level: int): - for message_data in await self._rollup_node.request('GET', f'global/block/{outbox_level}/outbox/{outbox_level}/messages'): - outbox_level = message_data['outbox_level'] - created_at = await self._tzkt.request('GET', f'v1/blocks/{outbox_level}/timestamp') - - lcc = None - while lcc is None: - lcc = await RollupCementedCommitment.filter(inbox_level__lt=outbox_level).order_by('-inbox_level').first() - await asyncio.sleep(1) # fixme - - cemented_level = self._estimate_outbox_message_cemented_level( - outbox_level, - lcc.inbox_level, - self._protocol.smart_rollup_commitment_period, - self._protocol.smart_rollup_challenge_window, - ) - cemented_at = await self._tzkt.request('GET', f'v1/blocks/{cemented_level}/timestamp') - - yield RollupOutboxMessage( - level=message_data['outbox_level'], - index=message_data['message_index'], - message=message_data['message'], - created_at=created_at, - cemented_at=cemented_at, - ) - - async def _prepare_outbox(self, outbox_level): - if await RollupOutboxMessage.filter(level=outbox_level).count() == 0: - outbox: list[RollupOutboxMessage] = [] - async for outbox_message in self._fetch_outbox(outbox_level): - outbox.append(outbox_message) - await RollupOutboxMessage.bulk_create(outbox) - - async def find_by_index(self, outbox_level: int, index: int): - await self._prepare_outbox(outbox_level) - + @classmethod + async def find_by_index(cls, outbox_level: int, index: int): return await RollupOutboxMessage.get(level=outbox_level, index=index) async def update_proof(self): head_data = await self._tzkt.get_head_block() - async for outbox_message in RollupOutboxMessage.filter( - l1_withdrawals__isnull=True, - l2_withdrawals__isnull=False, - ): + async for bridge_withdraw_operation in BridgeWithdrawOperation.filter( + l1_transaction=None, + outbox_message_id__isnull=False, + ).prefetch_related('outbox_message'): + bridge_withdraw_operation: BridgeWithdrawOperation + outbox_message = bridge_withdraw_operation.outbox_message + if head_data.level - outbox_message.level > self._protocol.smart_rollup_max_active_outbox_levels: continue @@ -161,7 +106,6 @@ async def update_proof(self): outbox_message.updated_at = commitment.created_at await outbox_message.save() - bridge_withdraw_operation = await BridgeWithdrawOperation.get(l2_transaction__outbox_message=outbox_message) bridge_withdraw_operation.updated_at = commitment.created_at await bridge_withdraw_operation.save() @@ -169,3 +113,286 @@ async def update_proof(self): bridge_operation.updated_at = commitment.created_at bridge_operation.status = BridgeOperationStatus.sealed await bridge_operation.save() + + @classmethod + async def _read_inbox_level(cls, inbox_level: int) -> AsyncGenerator[RollupInboxMessage, None]: + async for inbox_message in RollupInboxMessage.filter(level=inbox_level, l1_deposits__isnull=True).order_by('id'): + yield inbox_message + + @classmethod + async def _fetch_matching_outbox_message(cls, timestamp, parameters_hash) -> RollupOutboxMessage: + outbox_message = ( + ) + + if outbox_message is None: + raise TypeError('Outbox not matched') + + outbox_message.parameters_hash = None + await outbox_message.save() + + return outbox_message + + +class RollupMessageIndex: + request_limit = 10000 + _lock = threading.Lock() + + def __init__( + self, + tzkt: TezosTzktDatasource, + rollup_node: HttpDatasource, + bridge: 'BridgeConstantStorage', + protocol: 'ProtocolConstantStorage', + logger: 'Logger', + ): + self._tzkt = tzkt + self._rollup_node = rollup_node + self._bridge = bridge + self._protocol = protocol + self._logger = logger + + self._status: IndexStatus = IndexStatus.new + + self._inbox_id_cursor: int = 0 + self._inbox_level_cursor: int = 0 + self._outbox_level_cursor: int = 0 + self._outbox_index_cursor: int = 0 + self._realtime_head_level: int = 0 + + self._outbox_level_queue: set = set() + self._create_inbox_batch: list[RollupInboxMessage] = [] + self._create_outbox_batch: list[RollupOutboxMessage] = [] + + async def synchronize(self): + with self._lock: + while True: + if self._status == IndexStatus.realtime: + break + + if self._status == IndexStatus.new: + await self._prepare_new_index() + + if self._status == IndexStatus.syncing: + await self._process() + + async def handle_realtime(self, head_level: int): + with self._lock: + if self._status == IndexStatus.realtime: + self._realtime_head_level = max(self._realtime_head_level, head_level) + await self._process() + + async def _process(self): + inbox = await self._tzkt.request( + method='GET', + url=f'v1/smart_rollups/inbox?id.gt={self._inbox_id_cursor}&type.in=transfer,external&target={self._bridge.smart_rollup_address}&micheline=0&sort=id&limit={self.request_limit}', + ) + + if len(inbox) == 0: + if self._status == IndexStatus.syncing: + self._status = IndexStatus.realtime + return + else: + self._logger.info(f'Found {len(inbox)} not indexed Inbox Messages.') + + for inbox_message in inbox: + match inbox_message['type']: + case RollupInboxMessageType.transfer.value: + await self._handle_transfer_inbox_message(inbox_message) + case RollupInboxMessageType.external.value: + await self._handle_external_inbox_message(inbox_message) + case _: + continue + self._inbox_id_cursor = inbox_message['id'] + + if len(self._create_inbox_batch): + await RollupInboxMessage.bulk_create(self._create_inbox_batch) + self._logger.info(f'Successfully saved {len(self._create_inbox_batch)} new Inbox Messages.') + self._inbox_level_cursor = self._create_inbox_batch[-1].level + BridgeMatcherLocks.set_pending_inbox() + + del self._create_inbox_batch[:] + + while len(self._outbox_level_queue) > 0 and ( + self._status == IndexStatus.syncing or min(self._outbox_level_queue) <= self._realtime_head_level + ): + outbox_level = self._outbox_level_queue.pop() + await self._handle_outbox_level(outbox_level) + + if len(self._create_outbox_batch): + await RollupOutboxMessage.bulk_create(self._create_outbox_batch, ignore_conflicts=True) + self._logger.info(f'Successfully saved {len(self._create_outbox_batch)} new Outbox Messages.') + self._outbox_index_cursor = self._create_outbox_batch[-1].index + self._outbox_level_cursor = self._create_outbox_batch[-1].level + BridgeMatcherLocks.set_pending_outbox() + + del self._create_outbox_batch[:] + + self._logger.info(f'Update Inbox Message cursor index to {self._inbox_id_cursor}') + + async def _handle_transfer_inbox_message(self, message): + self._create_inbox_batch.append( + RollupInboxMessage( + id=message['id'], + level=message['level'], + index=message['index'], + type=RollupInboxMessageType.transfer, + message=message['parameter'], + parameters_hash=await InboxParametersHash(message['parameter']).from_inbox_message_parameters(), + ) + ) + + async def _handle_external_inbox_message(self, message): + payload = base64.b64decode(message['payload']).hex() + if len(payload) <= 350: + return + + self._outbox_level_queue.add(message['level']) + + async def _handle_outbox_level(self, outbox_level): + outbox = await self._rollup_node.request(method='GET', url=f'global/block/head/outbox/{outbox_level}/messages') + self._logger.warning(f'_handle_outbox_level with {len(outbox)} messages.') + if len(outbox) == 0: + return + + if len(outbox) == self._protocol.smart_rollup_max_outbox_messages_per_level: + if outbox_level < self._outbox_level_cursor: + return + if outbox_level == self._outbox_level_cursor: + if self._outbox_index_cursor < len(outbox) - 1: + outbox = outbox[self._outbox_index_cursor :] + else: + return + + recent_cement_operations = await self._tzkt.request( + method='GET', + url=f'v1/operations/sr_cement?rollup={self._bridge.smart_rollup_address}&level.lt={outbox_level}&sort.desc=level&limit=1', + ) + lcc_inbox_level = recent_cement_operations[0]['commitment']['inboxLevel'] + + created_at = datetime.fromisoformat(await self._tzkt.request('GET', f'v1/blocks/{outbox_level}/timestamp')) + cemented_level = OutboxMessageService.estimate_outbox_message_cemented_level( + outbox_level, + lcc_inbox_level, + self._protocol, + ) + cemented_at = datetime.fromisoformat(await self._tzkt.request('GET', f'v1/blocks/{cemented_level}/timestamp')) + + for outbox_message in outbox: + try: + parameters_hash = await OutboxParametersHash(outbox_message).from_outbox_message() + except ValueError: + continue + + self._create_outbox_batch.append( + RollupOutboxMessage( + level=outbox_message['outbox_level'], + index=outbox_message['message_index'], + message=outbox_message['message'], + parameters_hash=parameters_hash, + created_at=created_at, + cemented_at=cemented_at, + cemented_level=cemented_level, + ) + ) + + if len(outbox) == self._protocol.smart_rollup_max_outbox_messages_per_level: + self._logger.info(f'Full outbox found at level {outbox_level}, going to check next level for the rest Outbox Messages...') + self._outbox_level_queue.add(outbox_level + 1) + + async def _prepare_new_index(self): + try: + last_saved_inbox_message = await RollupInboxMessage.all().order_by('-id').first() + self._inbox_id_cursor = 1 + last_saved_inbox_message.id + self._logger.info('Last previous saved Inbox Message found. Going to continue with next Inbox Message.') + except AttributeError: + self._logger.info('No previous saved Inbox Message found. Going to start indexing since Smart Rollup origination moment.') + rollup_data = await self._tzkt.request(method='GET', url=f'v1/smart_rollups/{self._bridge.smart_rollup_address}') + first_level = rollup_data['firstActivity'] + inbox = await self._tzkt.request( + method='GET', + url=f'v1/smart_rollups/inbox?type.in=transfer,external&target={self._bridge.smart_rollup_address}&level.ge={first_level}&sort.asc=id&limit=1', + ) + self._inbox_id_cursor = inbox[0]['id'] + + self._logger.info(f'Inbox Message cursor index is {self._inbox_id_cursor}.') + self._status = IndexStatus.syncing + + +class InboxParametersHash: + def __init__(self, value: TezosTransaction[DefaultParameter, RollupStorage] | RollupInboxMessage): + self._value = value + + async def from_inbox_message_parameters(self): + inbox_message_parameters = self._value + return self._hash_from_dto(inbox_message_parameters) + + async def from_transaction(self): + default = self._value + return self._hash_from_dto(default.data.parameter_json) + + @staticmethod + def _hash_from_dto(dto): + parameters_hash = hash(orjson.dumps(dto, option=orjson.OPT_SORT_KEYS)).to_bytes(8, byteorder='big', signed=True).hex() + + return parameters_hash + + +class OutboxParametersHash: + def __init__(self, value: dict[str, Any] | EvmEvent[WithdrawalPayload]): + self._value = value + + async def from_outbox_message(self): + outbox_message = self._value + + try: + transaction = outbox_message['message']['transactions'][0] + parameters_micheline = transaction['parameters'] + ticket = await TezosTicket.get(ticketer_address=transaction['destination']) + michelson_outbox_interface = ticket.outbox_interface + micheline_expression = michelson_to_micheline(michelson_outbox_interface) + michelson_type = MichelsonType.match(micheline_expression) + + parameters_data = michelson_type.from_micheline_value(parameters_micheline).to_python_object() + parameters: WithdrawParameter = WithdrawParameter.model_validate(parameters_data) + + comparable_data = { + 'receiver': parameters.receiver, + 'ticket_hash': ticket.hash, + 'amount': parameters.ticket.amount, + 'ticketer_address': parameters.ticket.ticketer, + 'proxy': transaction['destination'], + } + assert comparable_data + except (AttributeError, KeyError, DoesNotExist): + raise ValueError + + return self._hash_from_dto(comparable_data) + + async def from_event(self): + event = self._value + + try: + ticket = await TezosTicket.get( + hash=event.payload.ticket_hash, + ).prefetch_related('token', 'etherlink_tokens') + assert ticket.ticketer_address == event.payload.proxy + assert ticket.etherlink_tokens.id == event.payload.ticket_owner[-40:] + + comparable_data = { + 'receiver': event.payload.receiver, + 'ticket_hash': ticket.hash, + 'amount': event.payload.amount, + 'ticketer_address': ticket.ticketer_address, + 'proxy': event.payload.proxy, + } + assert comparable_data + except (DoesNotExist, AssertionError, AttributeError): + raise ValueError + + return self._hash_from_dto(comparable_data) + + @staticmethod + def _hash_from_dto(dto): + parameters_hash = hash(orjson.dumps(dto, option=orjson.OPT_SORT_KEYS)).to_bytes(8, byteorder='big', signed=True).hex() + + return parameters_hash diff --git a/bridge_indexer/handlers/service_container.py b/bridge_indexer/handlers/service_container.py index b837e95..f03420c 100644 --- a/bridge_indexer/handlers/service_container.py +++ b/bridge_indexer/handlers/service_container.py @@ -1,18 +1,15 @@ -import os - from dipdup.context import DipDupContext from dipdup.datasources.http import HttpDatasource from dipdup.datasources.tezos_tzkt import TezosTzktDatasource from dipdup.datasources.tzip_metadata import TzipMetadataDatasource from pydantic import BaseModel from pydantic import Field +from pydantic_settings import BaseSettings -from bridge_indexer.handlers.rollup_message import InboxMessageService from bridge_indexer.handlers.rollup_message import OutboxMessageService +from bridge_indexer.handlers.rollup_message import RollupMessageIndex from bridge_indexer.handlers.ticket import TicketService -from pydantic_settings import BaseSettings - class BridgeConstantStorage(BaseSettings): smart_rollup_address: str = Field(alias='SMART_ROLLUP_ADDRESS') @@ -26,13 +23,14 @@ class ProtocolConstantStorage(BaseModel): smart_rollup_challenge_window: int = Field(validation_alias='smart_rollup_challenge_window_in_blocks') smart_rollup_timeout_period: int = Field(validation_alias='smart_rollup_timeout_period_in_blocks') smart_rollup_max_active_outbox_levels: int = Field(validation_alias='smart_rollup_max_active_outbox_levels') + smart_rollup_max_outbox_messages_per_level: int = Field(validation_alias='smart_rollup_max_outbox_messages_per_level') class ServiceContainer: protocol: ProtocolConstantStorage bridge: BridgeConstantStorage ticket_service: TicketService - inbox_message_service: InboxMessageService + rollup_message_index: RollupMessageIndex outbox_message_service: OutboxMessageService tzkt: TezosTzktDatasource metadata: TzipMetadataDatasource @@ -55,9 +53,13 @@ async def register(self): protocol = ProtocolConstantStorage.model_validate(response) ticket_service = TicketService(tzkt, metadata, bridge) - inbox_message_service = InboxMessageService( + + rollup_message_index = RollupMessageIndex( tzkt=tzkt, + rollup_node=rollup_node, bridge=bridge, + protocol=protocol, + logger=ctx.logger, ) outbox_message_service = OutboxMessageService( tzkt=tzkt, @@ -67,7 +69,7 @@ async def register(self): self.bridge = bridge self.ticket_service = ticket_service - self.inbox_message_service = inbox_message_service + self.rollup_message_index = rollup_message_index self.outbox_message_service = outbox_message_service self.tzkt = tzkt self.metadata = metadata diff --git a/bridge_indexer/handlers/tezos/on_cement_commitment.py b/bridge_indexer/handlers/tezos/on_cement_commitment.py index aa1d587..39c4cab 100644 --- a/bridge_indexer/handlers/tezos/on_cement_commitment.py +++ b/bridge_indexer/handlers/tezos/on_cement_commitment.py @@ -27,10 +27,6 @@ async def on_cement_commitment( }, ) - if not ctx.datasources['tzkt']._signalr_client: - ctx.logger.debug('Skip syncing message with level %d', cement.data.level) - return - ctx.logger.info(f'Cemented Commitment registered: {cement.commitment.hash}') protocol = ctx.container.protocol @@ -45,15 +41,17 @@ async def on_cement_commitment( .only('id') .values_list('id', flat=True) ) - expired = ( - await BridgeWithdrawOperation.filter( - id__in=sealed, - l2_transaction__outbox_message__level__lte=cement.commitment.inbox_level - protocol.smart_rollup_max_active_outbox_levels, + if len(sealed): + expired = ( + await BridgeWithdrawOperation.filter( + id__in=sealed, + outbox_message__level__lte=cement.commitment.inbox_level - protocol.smart_rollup_max_active_outbox_levels, + ) + .only('id') + .values_list('id', flat=True) ) - .only('id') - .values_list('id', flat=True) - ) - await BridgeOperation.filter(id__in=expired).update(status=BridgeOperationStatus.outbox_expired) + if len(expired): + await BridgeOperation.filter(id__in=expired).update(status=BridgeOperationStatus.outbox_expired) created = ( await BridgeOperation.filter( @@ -65,19 +63,20 @@ async def on_cement_commitment( .only('id') .values_list('id', flat=True) ) - failed = ( - await BridgeDepositOperation.filter( - id__in=created, - l1_transaction__level__lte=cement.commitment.inbox_level - protocol.smart_rollup_commitment_period + 1, + if len(created): + failed = ( + await BridgeDepositOperation.filter( + id__in=created, + l1_transaction__level__lte=cement.commitment.inbox_level - protocol.smart_rollup_commitment_period + 1, + ) + .only('id') + .values_list('id', flat=True) ) - .only('id') - .values_list('id', flat=True) - ) - await BridgeOperation.filter(id__in=failed).update(status=BridgeOperationStatus.inbox_matching_timeout) + if len(failed): + await BridgeOperation.filter(id__in=failed).update(status=BridgeOperationStatus.inbox_matching_timeout) pending_count = await RollupOutboxMessage.filter( - l1_withdrawals__isnull=True, - l2_withdrawals__isnull=False, + bridge_withdrawals__l1_transaction=None, level__gt=cement.commitment.inbox_level - protocol.smart_rollup_max_active_outbox_levels, ).count() if not pending_count: diff --git a/bridge_indexer/handlers/tezos/on_head.py b/bridge_indexer/handlers/tezos/on_head.py index 5b1436b..4bd1815 100644 --- a/bridge_indexer/handlers/tezos/on_head.py +++ b/bridge_indexer/handlers/tezos/on_head.py @@ -1,11 +1,12 @@ from dipdup.context import HandlerContext from dipdup.models.tezos import TezosHeadBlockData -from bridge_indexer.handlers.bridge_matcher import BridgeMatcher +from bridge_indexer.handlers.rollup_message import RollupMessageIndex async def on_head( ctx: HandlerContext, head: TezosHeadBlockData, ) -> None: - await BridgeMatcher.check_pending_transactions() + rollup_message_index: RollupMessageIndex = ctx.container.rollup_message_index + await rollup_message_index.handle_realtime(head.level) diff --git a/bridge_indexer/handlers/tezos/on_rollup_call.py b/bridge_indexer/handlers/tezos/on_rollup_call.py index e394060..93bebdf 100644 --- a/bridge_indexer/handlers/tezos/on_rollup_call.py +++ b/bridge_indexer/handlers/tezos/on_rollup_call.py @@ -1,12 +1,8 @@ -from datetime import datetime -from datetime import timezone - from dipdup.context import HandlerContext from dipdup.models.tezos import TezosTransaction -from bridge_indexer.handlers.bridge_matcher import BridgeMatcher +from bridge_indexer.handlers.bridge_matcher_locks import BridgeMatcherLocks from bridge_indexer.handlers.rollup_message import InboxParametersHash -from bridge_indexer.handlers.rollup_message import RollupMessageIndex from bridge_indexer.models import TezosDepositOperation from bridge_indexer.types.rollup.tezos_parameters.default import DefaultParameter from bridge_indexer.types.rollup.tezos_storage import RollupStorage @@ -16,9 +12,6 @@ async def on_rollup_call( ctx: HandlerContext, default: TezosTransaction[DefaultParameter, RollupStorage], ) -> None: - rollup_message_index: RollupMessageIndex = ctx.container.rollup_message_index - await rollup_message_index.handle_realtime(default.data.level) - ctx.logger.info(f'Tezos Deposit Transaction found: {default.data.hash}') parameter = default.parameter.root.LL @@ -45,5 +38,4 @@ async def on_rollup_call( ctx.logger.info(f'Tezos Deposit Transaction registered: {deposit.id}') - BridgeMatcher.set_pending_tezos_deposits() - await BridgeMatcher.check_pending_transactions() + BridgeMatcherLocks.set_pending_tezos_deposits() diff --git a/bridge_indexer/handlers/tezos/on_rollup_execute.py b/bridge_indexer/handlers/tezos/on_rollup_execute.py index cb6e5a2..680c21e 100644 --- a/bridge_indexer/handlers/tezos/on_rollup_execute.py +++ b/bridge_indexer/handlers/tezos/on_rollup_execute.py @@ -1,9 +1,11 @@ +import asyncio + from dipdup.context import HandlerContext from dipdup.models.tezos import TezosSmartRollupExecute from tortoise.exceptions import DoesNotExist from bridge_indexer.handlers import setup_handler_logger -from bridge_indexer.handlers.bridge_matcher import BridgeMatcher +from bridge_indexer.handlers.bridge_matcher_locks import BridgeMatcherLocks from bridge_indexer.models import TezosWithdrawOperation from bridge_indexer.types.output_proof.output_proof import OutputProofData @@ -27,6 +29,13 @@ async def on_rollup_execute( continue message_hex = operation['output_proof'] break + + try: + assert message_hex + except AssertionError: + ctx.logger.error('Outbox Message execution not found in block operations.') + return + decoder = OutputProofData(bytes.fromhex(message_hex)) output_proof, _ = decoder.unpack() @@ -57,4 +66,4 @@ async def on_rollup_execute( ctx.logger.info(f'Tezos Withdraw Transaction registered: {withdrawal.id}') - BridgeMatcher.set_pending_tezos_withdrawals() + BridgeMatcherLocks.set_pending_tezos_withdrawals() diff --git a/bridge_indexer/handlers/ticket.py b/bridge_indexer/handlers/ticket.py index 79f062f..1add5f1 100644 --- a/bridge_indexer/handlers/ticket.py +++ b/bridge_indexer/handlers/ticket.py @@ -1,10 +1,12 @@ from typing import TYPE_CHECKING from eth_abi import decode +from eth_utils import remove_0x_prefix from pytezos import forge_micheline from pytezos import unforge_micheline from pytezos.michelson.forge import forge_address from web3 import Web3 +from web3._utils.encoding import hex_encode_abi_type if TYPE_CHECKING: from dipdup.datasources.tezos_tzkt import TezosTzktDatasource @@ -13,7 +15,7 @@ from bridge_indexer.models import EtherlinkToken from bridge_indexer.models import TezosTicket from bridge_indexer.models import TezosToken -from bridge_indexer.types.rollup.tezos_parameters.default import Content as TicketContent +from bridge_indexer.types.rollup.tezos_parameters.default import TicketContent as TicketContent class TicketService: @@ -24,9 +26,7 @@ def __init__(self, tzkt: 'TezosTzktDatasource', metadata: 'TzipMetadataDatasourc async def register_fa_tickets(self): for ticketer_address in self._bridge.fa_ticketer_list: - for ticket_data in await self._tzkt.request( - 'GET', f'v1/tickets?ticketer.eq={ticketer_address}' - ): + for ticket_data in await self._tzkt.request('GET', f'v1/tickets?ticketer.eq={ticketer_address}'): await self.fetch_ticket( ticket_data['ticketer']['address'], TicketContent.parse_obj(ticket_data['content']), @@ -39,7 +39,7 @@ async def fetch_ticket(self, ticketer_address, ticket_content: TicketContent): if ticket: return ticket - ticket_metadata = self.get_ticket_metadata(ticket_content) + ticket_metadata: dict[str, str] = self.get_ticket_metadata(ticket_content) asset_id = '_'.join([ticket_metadata['contract_address'], str(ticket_metadata['token_id'])]) token = await TezosToken.get_or_none(pk=asset_id) @@ -63,21 +63,28 @@ async def fetch_ticket(self, ticketer_address, ticket_content: TicketContent): ticket = await TezosTicket.create( hash=ticket_hash, ticketer_address=ticketer_address, - ticket_id=ticket_content.nat, + ticket_id=ticket_content.ticket_id, token=token, + metadata=ticket_content.metadata_hex, + outbox_interface='pair (address %receiver) (pair %ticket (address %ticketer) (pair (pair %content (nat %ticket_id) (option %metadata bytes)) (nat %amount)))', + whitelisted=True, ) return ticket async def register_native_ticket(self): for ticket_data in await self._tzkt.request('GET', f'v1/tickets?ticketer={self._bridge.native_ticketer}'): - ticket_hash = self.get_ticket_hash(self._bridge.native_ticketer, TicketContent.parse_obj(ticket_data['content'])) + ticket_content = TicketContent.parse_obj(ticket_data['content']) + ticket_hash = self.get_ticket_hash(self._bridge.native_ticketer, ticket_content) xtz = await TezosToken.get(pk='xtz') ticket = await TezosTicket.create( hash=ticket_hash, ticketer_address=self._bridge.native_ticketer, - ticket_id=ticket_data['content']['nat'], + ticket_id=ticket_content.ticket_id, token=xtz, + metadata=ticket_content.metadata_hex, + outbox_interface='pair (address %receiver) (pair %ticket (address %ticketer) (pair (pair %content (nat %ticket_id) (option %metadata bytes)) (nat %amount)))', + whitelisted=True, ) await EtherlinkToken.create( id=xtz.id, @@ -89,8 +96,9 @@ async def register_native_ticket(self): return ticket raise ValueError('No Native Ticketer found') - def get_ticket_metadata(self, ticket_content: TicketContent) -> dict: - ticket_metadata_forged = bytes.fromhex(ticket_content.bytes) + @staticmethod + def get_ticket_metadata(ticket_content: TicketContent) -> dict: + ticket_metadata_forged = bytes.fromhex(ticket_content.metadata_hex) ticket_metadata_map = unforge_micheline(ticket_metadata_forged[1:]) ticket_metadata = {} for pair in ticket_metadata_map: @@ -102,32 +110,55 @@ def get_ticket_metadata(self, ticket_content: TicketContent) -> dict: ticket_metadata['token_id'] = 0 return ticket_metadata - def get_ticket_hash(self, ticketer_address, ticket_content: TicketContent) -> int: - if ticket_content.bytes: - bytes_micheline = { + @staticmethod + def get_ticket_content_bytes( + ticketer_address: str, + ticket_content: TicketContent, + ) -> bytes: + if ticket_content.metadata_hex: + ticket_metadata_micheline = { 'prim': 'Some', 'args': [ { - 'bytes': ticket_content.bytes, + 'bytes': ticket_content.metadata_hex, } ], } else: - bytes_micheline = {'prim': 'None'} - ticket_content_micheline = { + ticket_metadata_micheline = {'prim': 'None'} + ticket_content_micheline: dict = { 'prim': 'Pair', 'args': [ - {'int': ticket_content.nat}, - bytes_micheline, + {'int': ticket_content.ticket_id}, + ticket_metadata_micheline, ], } - data = Web3.solidity_keccak( - ['bytes22', 'bytes'], - [ + abi_types = ['bytes22', 'bytes'] + normalized_values = Web3.normalize_values( + w3=Web3(), + abi_types=abi_types, + values=[ forge_address(ticketer_address), forge_micheline(ticket_content_micheline), ], ) - ticket_hash = decode(['uint256'], data)[0] + + ticket_content_hex = ''.join( + remove_0x_prefix(hex_encode_abi_type(abi_type, value)) for abi_type, value in zip(abi_types, normalized_values) + ) + + return bytes.fromhex(ticket_content_hex) + + def get_ticket_hash( + self, + ticketer_address: str, + ticket_content: TicketContent, + ) -> int: + + ticket_content_bytes = self.get_ticket_content_bytes(ticketer_address, ticket_content) + + digest = Web3.keccak(ticket_content_bytes) + ticket_hash = decode(['uint256'], digest)[0] + return ticket_hash diff --git a/bridge_indexer/hooks/bridge_matcher.py b/bridge_indexer/hooks/bridge_matcher.py new file mode 100644 index 0000000..fc43bcb --- /dev/null +++ b/bridge_indexer/hooks/bridge_matcher.py @@ -0,0 +1,30 @@ +import asyncio +import logging + +from dipdup.context import HookContext + +from bridge_indexer.handlers.bridge_matcher import BridgeMatcher + +logger = logging.getLogger(__name__) + +async def bridge_matcher( + ctx: HookContext, +) -> None: + while True: + await asyncio.sleep(.2) + + if BridgeMatcher.matcher_lock.locked(): + continue + with BridgeMatcher.matcher_lock: + await BridgeMatcher.check_pending_tezos_deposits() + + await BridgeMatcher.check_pending_inbox() + + await BridgeMatcher.check_pending_etherlink_deposits() + await BridgeMatcher.check_pending_etherlink_xtz_deposits() + + await BridgeMatcher.check_pending_etherlink_withdrawals() + + await BridgeMatcher.check_pending_outbox() + + await BridgeMatcher.check_pending_tezos_withdrawals() diff --git a/bridge_indexer/hooks/on_restart.py b/bridge_indexer/hooks/on_restart.py index b91e4c0..fb09453 100644 --- a/bridge_indexer/hooks/on_restart.py +++ b/bridge_indexer/hooks/on_restart.py @@ -1,6 +1,7 @@ from dipdup.context import HookContext from bridge_indexer.handlers.bridge_matcher import BridgeMatcher +from bridge_indexer.handlers.bridge_matcher_locks import BridgeMatcherLocks from bridge_indexer.handlers.service_container import ServiceContainer @@ -14,9 +15,11 @@ async def on_restart( ctx.logger.info('Start of Rollup Message Index syncing.') await ctx.container.rollup_message_index.synchronize() ctx.logger.info('Rollup Message Index syncing complete. Switch to realtime indexing mode.') - BridgeMatcher.set_pending_tezos_deposits() - BridgeMatcher.set_pending_etherlink_withdrawals() - BridgeMatcher.set_pending_etherlink_deposits() - BridgeMatcher.set_pending_etherlink_xtz_deposits() - BridgeMatcher.set_pending_tezos_withdrawals() - await BridgeMatcher.check_pending_transactions() + + BridgeMatcherLocks.set_pending_tezos_deposits() + BridgeMatcherLocks.set_pending_inbox() + BridgeMatcherLocks.set_pending_etherlink_deposits() + BridgeMatcherLocks.set_pending_etherlink_xtz_deposits() + BridgeMatcherLocks.set_pending_etherlink_withdrawals() + BridgeMatcherLocks.set_pending_outbox() + BridgeMatcherLocks.set_pending_tezos_withdrawals() diff --git a/bridge_indexer/hooks/on_synchronized.py b/bridge_indexer/hooks/on_synchronized.py index 3649808..05d21b1 100644 --- a/bridge_indexer/hooks/on_synchronized.py +++ b/bridge_indexer/hooks/on_synchronized.py @@ -1,12 +1,7 @@ from dipdup.context import HookContext -from bridge_indexer.handlers.bridge_matcher import BridgeMatcher - async def on_synchronized( ctx: HookContext, ) -> None: await ctx.execute_sql('on_synchronized') - - await BridgeMatcher.check_pending_transactions() - await ctx.container.outbox_message_service.update_proof() diff --git a/bridge_indexer/models/__init__.py b/bridge_indexer/models/__init__.py index 8cac8c0..3ee568d 100644 --- a/bridge_indexer/models/__init__.py +++ b/bridge_indexer/models/__init__.py @@ -174,8 +174,14 @@ class Meta: table = 'l1_withdrawal' model = 'models.TezosWithdrawOperation' - bridge_withdrawals: fields.ReverseRelation['BridgeWithdrawOperation'] + outbox_message: ForeignKeyFieldInstance[RollupOutboxMessage] = fields.ForeignKeyField( + model_name=RollupOutboxMessage.Meta.model, + source_field='outbox_message_id', + to_field='id', + index=True, + ) + bridge_withdrawals: fields.ReverseRelation['BridgeWithdrawOperation'] class AbstractEtherlinkOperation(AbstractBlockchainOperation): class Meta: @@ -338,7 +344,6 @@ class Meta: source_field='outbox_message_id', to_field='id', null=True, - index=True, ) diff --git a/bridge_indexer/sql/bridge_matcher/.keep b/bridge_indexer/sql/bridge_matcher/.keep new file mode 100644 index 0000000..e69de29 diff --git a/bridge_indexer/types/output_proof/__init__.py b/bridge_indexer/types/output_proof/__init__.py index ed6a8c1..e69de29 100644 --- a/bridge_indexer/types/output_proof/__init__.py +++ b/bridge_indexer/types/output_proof/__init__.py @@ -1,16 +0,0 @@ -# from dipdup.pysignalr import Message -# from dipdup.pysignalr import WebsocketMessage -# from dipdup.pysignalr import WebsocketProtocol -# from dipdup.pysignalr import WebsocketTransport -# -# def on_message(): -# breakpoint() - -# ws_client = WebsocketTransport( -# url=url, -# protocol=WebsocketProtocol(), -# callback=on_message, -# skip_negotiation=True, -# connection_timeout=self._http_config.connection_timeout, -# ) -# ws_client. diff --git a/bridge_indexer/types/output_proof/decoder.py b/bridge_indexer/types/output_proof/decoder.py index f314305..6060214 100644 --- a/bridge_indexer/types/output_proof/decoder.py +++ b/bridge_indexer/types/output_proof/decoder.py @@ -73,13 +73,9 @@ def from_type_str(cls, abi_type, registry): decoder=FixedSignedIntegerDecoder, ) registry.register_decoder( - lookup=BaseEquals(base='hex', with_sub=True), + lookup=BaseEquals(base='hex'), decoder=BytesToTextDecoder, ) -# registry.register_decoder( -# lookup=BaseEquals(base='hex'), -# decoder=BytesToHexDecoder, -# ) default_codec = ABICodec(registry) decode = default_codec.decode diff --git a/docker-compose.yml b/docker-compose.yml index b86d156..8efcff0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,8 +1,10 @@ services: db: image: postgres:15 - volumes: - - db:/var/lib/postgresql/data + + tmpfs: + - /var/lib/postgresql + restart: always ports: - "127.0.0.1:${POSTGRES_PORT:-5432}:5432" @@ -32,9 +34,3 @@ services: - HASURA_GRAPHQL_ENABLE_TELEMETRY=false - HASURA_GRAPHQL_ADMIN_SECRET=${ADMIN_SECRET} - HASURA_GRAPHQL_UNAUTHORIZED_ROLE=user - -volumes: - db: - driver_opts: - type: tmpfs - device: tmpfs diff --git a/poetry.lock b/poetry.lock index 2a83248..9e4539e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,87 +13,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.10.0" +version = "3.10.1" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:68ab608118e212f56feef44d4785aa90b713042da301f26338f36497b481cd79"}, - {file = "aiohttp-3.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:64a117c16273ca9f18670f33fc7fd9604b9f46ddb453ce948262889a6be72868"}, - {file = "aiohttp-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:54076a25f32305e585a3abae1f0ad10646bec539e0e5ebcc62b54ee4982ec29f"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71c76685773444d90ae83874433505ed800e1706c391fdf9e57cc7857611e2f4"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bdda86ab376f9b3095a1079a16fbe44acb9ddde349634f1c9909d13631ff3bcf"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d6dcd1d21da5ae1416f69aa03e883a51e84b6c803b8618cbab341ac89a85b9e"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06ef0135d7ab7fb0284342fbbf8e8ddf73b7fee8ecc55f5c3a3d0a6b765e6d8b"}, - {file = "aiohttp-3.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccab9381f38c669bb9254d848f3b41a3284193b3e274a34687822f98412097e9"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:947da3aee057010bc750b7b4bb65cbd01b0bdb7c4e1cf278489a1d4a1e9596b3"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5268b35fee7eb754fb5b3d0f16a84a2e9ed21306f5377f3818596214ad2d7714"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ff25d988fd6ce433b5c393094a5ca50df568bdccf90a8b340900e24e0d5fb45c"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:594b4b4f1dfe8378b4a0342576dc87a930c960641159f5ae83843834016dbd59"}, - {file = "aiohttp-3.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c8820dad615cd2f296ed3fdea8402b12663ac9e5ea2aafc90ef5141eb10b50b8"}, - {file = "aiohttp-3.10.0-cp310-cp310-win32.whl", hash = "sha256:ab1d870403817c9a0486ca56ccbc0ebaf85d992277d48777faa5a95e40e5bcca"}, - {file = "aiohttp-3.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:563705a94ea3af43467167f3a21c665f3b847b2a0ae5544fa9e18df686a660da"}, - {file = "aiohttp-3.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13679e11937d3f37600860de1f848e2e062e2b396d3aa79b38c89f9c8ab7e791"}, - {file = "aiohttp-3.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c66a1aadafbc0bd7d648cb7fcb3860ec9beb1b436ce3357036a4d9284fcef9a"}, - {file = "aiohttp-3.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7e3545b06aae925f90f06402e05cfb9c62c6409ce57041932163b09c48daad6"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:effafe5144aa32f0388e8f99b1b2692cf094ea2f6b7ceca384b54338b77b1f50"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a04f2c8d41821a2507b49b2694c40495a295b013afb0cc7355b337980b47c546"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dbfac556219d884d50edc6e1952a93545c2786193f00f5521ec0d9d464040ab"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a65472256c5232681968deeea3cd5453aa091c44e8db09f22f1a1491d422c2d9"}, - {file = "aiohttp-3.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941366a554e566efdd3f042e17a9e461a36202469e5fd2aee66fe3efe6412aef"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:927b4aca6340301e7d8bb05278d0b6585b8633ea852b7022d604a5df920486bf"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:34adb8412e736a5d0df6d1fccdf71599dfb07a63add241a94a189b6364e997f1"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:43c60d9b332a01ee985f080f639f3e56abcfb95ec1320013c94083c3b6a2e143"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3f49edf7c5cd2987634116e1b6a0ee2438fca17f7c4ee480ff41decb76cf6158"}, - {file = "aiohttp-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9784246431eaf9d651b3cc06f9c64f9a9f57299f4971c5ea778fa0b81074ef13"}, - {file = "aiohttp-3.10.0-cp311-cp311-win32.whl", hash = "sha256:bec91402df78b897a47b66b9c071f48051cea68d853d8bc1d4404896c6de41ae"}, - {file = "aiohttp-3.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:25a9924343bf91b0c5082cae32cfc5a1f8787ac0433966319ec07b0ed4570722"}, - {file = "aiohttp-3.10.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:21dab4a704c68dc7bc2a1219a4027158e8968e2079f1444eda2ba88bc9f2895f"}, - {file = "aiohttp-3.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:872c0dcaccebd5733d535868fe2356aa6939f5827dcea7a8b9355bb2eff6f56e"}, - {file = "aiohttp-3.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f381424dbce313bb5a666a215e7a9dcebbc533e9a2c467a1f0c95279d24d1fa7"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ca48e9f092a417c6669ee8d3a19d40b3c66dde1a2ae0d57e66c34812819b671"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbe2f6d0466f5c59c7258e0745c20d74806a1385fbb7963e5bbe2309a11cc69b"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:03799a95402a7ed62671c4465e1eae51d749d5439dbc49edb6eee52ea165c50b"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5549c71c35b5f057a4eebcc538c41299826f7813f28880722b60e41c861a57ec"}, - {file = "aiohttp-3.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6fa7a42b78d8698491dc4ad388169de54cca551aa9900f750547372de396277"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:77bbf0a2f6fefac6c0db1792c234f577d80299a33ce7125467439097cf869198"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:34eaf5cfcc979846d73571b1a4be22cad5e029d55cdbe77cdc7545caa4dcb925"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4f1de31a585344a106db43a9c3af2e15bb82e053618ff759f1fdd31d82da38eb"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f3a1ea61d96146e9b9e5597069466e2e4d9e01e09381c5dd51659f890d5e29e7"}, - {file = "aiohttp-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:73c01201219eb039a828bb58dcc13112eec2fed6eea718356316cd552df26e04"}, - {file = "aiohttp-3.10.0-cp312-cp312-win32.whl", hash = "sha256:33e915971eee6d2056d15470a1214e4e0f72b6aad10225548a7ab4c4f54e2db7"}, - {file = "aiohttp-3.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:2dc75da06c35a7b47a88ceadbf993a53d77d66423c2a78de8c6f9fb41ec35687"}, - {file = "aiohttp-3.10.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f1bc4d68b83966012813598fe39b35b4e6019b69d29385cf7ec1cb08e1ff829b"}, - {file = "aiohttp-3.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9b8b31c057a0b7bb822a159c490af05cb11b8069097f3236746a78315998afa"}, - {file = "aiohttp-3.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10f0d7894ddc6ff8f369e3fdc082ef1f940dc1f5b9003cd40945d24845477220"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72de8ffba4a27e3c6e83e58a379fc4fe5548f69f9b541fde895afb9be8c31658"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd36d0f0afc2bd84f007cedd2d9a449c3cf04af471853a25eb71f28bc2e1a119"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f64d503c661864866c09806ac360b95457f872d639ca61719115a9f389b2ec90"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31616121369bc823791056c632f544c6c8f8d1ceecffd8bf3f72ef621eaabf49"}, - {file = "aiohttp-3.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f76c12abb88b7ee64b3f9ae72f0644af49ff139067b5add142836dab405d60d4"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6c99eef30a7e98144bcf44d615bc0f445b3a3730495fcc16124cb61117e1f81e"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:39e7ec718e7a1971a5d98357e3e8c0529477d45c711d32cd91999dc8d8404e1e"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1cef548ee4e84264b78879de0c754bbe223193c6313beb242ce862f82eab184"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f98f036eab11d2f90cdd01b9d1410de9d7eb520d070debeb2edadf158b758431"}, - {file = "aiohttp-3.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc4376ff537f7d2c1e98f97f6d548e99e5d96078b0333c1d3177c11467b972de"}, - {file = "aiohttp-3.10.0-cp38-cp38-win32.whl", hash = "sha256:ebedc51ee6d39f9ea5e26e255fd56a7f4e79a56e77d960f9bae75ef4f95ed57f"}, - {file = "aiohttp-3.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:aad87626f31a85fd4af02ba7fd6cc424b39d4bff5c8677e612882649da572e47"}, - {file = "aiohttp-3.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1dc95c5e2a5e60095f1bb51822e3b504e6a7430c9b44bff2120c29bb876c5202"}, - {file = "aiohttp-3.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1c83977f7b6f4f4a96fab500f5a76d355f19f42675224a3002d375b3fb309174"}, - {file = "aiohttp-3.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8cedc48d36652dd3ac40e5c7c139d528202393e341a5e3475acedb5e8d5c4c75"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b099fbb823efed3c1d736f343ac60d66531b13680ee9b2669e368280f41c2b8"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d583755ddb9c97a2da1322f17fc7d26792f4e035f472d675e2761c766f94c2ff"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a03a4407bdb9ae815f0d5a19df482b17df530cf7bf9c78771aa1c713c37ff1f"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcb6e65f6ea7caa0188e36bebe9e72b259d3d525634758c91209afb5a6cbcba7"}, - {file = "aiohttp-3.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6612c6ed3147a4a2d6463454b94b877566b38215665be4c729cd8b7bdce15b4"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b0c0148d2a69b82ffe650c2ce235b431d49a90bde7dd2629bcb40314957acf6"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0d85a173b4dbbaaad1900e197181ea0fafa617ca6656663f629a8a372fdc7d06"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:12c43dace645023583f3dd2337dfc3aa92c99fb943b64dcf2bc15c7aa0fb4a95"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:33acb0d9bf12cdc80ceec6f5fda83ea7990ce0321c54234d629529ca2c54e33d"}, - {file = "aiohttp-3.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:91e0b76502205484a4d1d6f25f461fa60fe81a7987b90e57f7b941b0753c3ec8"}, - {file = "aiohttp-3.10.0-cp39-cp39-win32.whl", hash = "sha256:1ebd8ed91428ffbe8b33a5bd6f50174e11882d5b8e2fe28670406ab5ee045ede"}, - {file = "aiohttp-3.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:0433795c4a8bafc03deb3e662192250ba5db347c41231b0273380d2f53c9ea0b"}, - {file = "aiohttp-3.10.0.tar.gz", hash = "sha256:e8dd7da2609303e3574c95b0ec9f1fd49647ef29b94701a2862cceae76382e1d"}, + {file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:47b4c2412960e64d97258f40616efddaebcb34ff664c8a972119ed38fac2a62c"}, + {file = "aiohttp-3.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7dbf637f87dd315fa1f36aaed8afa929ee2c607454fb7791e74c88a0d94da59"}, + {file = "aiohttp-3.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c8fb76214b5b739ce59e2236a6489d9dc3483649cfd6f563dbf5d8e40dbdd57d"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c577cdcf8f92862363b3d598d971c6a84ed8f0bf824d4cc1ce70c2fb02acb4a"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:777e23609899cb230ad2642b4bdf1008890f84968be78de29099a8a86f10b261"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b07286a1090483799599a2f72f76ac396993da31f6e08efedb59f40876c144fa"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9db600a86414a9a653e3c1c7f6a2f6a1894ab8f83d11505247bd1b90ad57157"}, + {file = "aiohttp-3.10.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c3f1eb280008e51965a8d160a108c333136f4a39d46f516c64d2aa2e6a53f2"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f5dd109a925fee4c9ac3f6a094900461a2712df41745f5d04782ebcbe6479ccb"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8c81ff4afffef9b1186639506d70ea90888218f5ddfff03870e74ec80bb59970"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:2a384dfbe8bfebd203b778a30a712886d147c61943675f4719b56725a8bbe803"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b9fb6508893dc31cfcbb8191ef35abd79751db1d6871b3e2caee83959b4d91eb"}, + {file = "aiohttp-3.10.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:88596384c3bec644a96ae46287bb646d6a23fa6014afe3799156aef42669c6bd"}, + {file = "aiohttp-3.10.1-cp310-cp310-win32.whl", hash = "sha256:68164d43c580c2e8bf8e0eb4960142919d304052ccab92be10250a3a33b53268"}, + {file = "aiohttp-3.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:d6bbe2c90c10382ca96df33b56e2060404a4f0f88673e1e84b44c8952517e5f3"}, + {file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6979b4f20d3e557a867da9d9227de4c156fcdcb348a5848e3e6190fd7feb972"}, + {file = "aiohttp-3.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03c0c380c83f8a8d4416224aafb88d378376d6f4cadebb56b060688251055cd4"}, + {file = "aiohttp-3.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c2b104e81b3c3deba7e6f5bc1a9a0e9161c380530479970766a6655b8b77c7c"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b023b68c61ab0cd48bd38416b421464a62c381e32b9dc7b4bdfa2905807452a4"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a07c76a82390506ca0eabf57c0540cf5a60c993c442928fe4928472c4c6e5e6"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:41d8dab8c64ded1edf117d2a64f353efa096c52b853ef461aebd49abae979f16"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:615348fab1a9ef7d0960a905e83ad39051ae9cb0d2837da739b5d3a7671e497a"}, + {file = "aiohttp-3.10.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:256ee6044214ee9d66d531bb374f065ee94e60667d6bbeaa25ca111fc3997158"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7d5bb926805022508b7ddeaad957f1fce7a8d77532068d7bdb431056dc630cd"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:028faf71b338f069077af6315ad54281612705d68889f5d914318cbc2aab0d50"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5c12310d153b27aa630750be44e79313acc4e864c421eb7d2bc6fa3429c41bf8"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:de1a91d5faded9054957ed0a9e01b9d632109341942fc123947ced358c5d9009"}, + {file = "aiohttp-3.10.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9c186b270979fb1dee3ababe2d12fb243ed7da08b30abc83ebac3a928a4ddb15"}, + {file = "aiohttp-3.10.1-cp311-cp311-win32.whl", hash = "sha256:4a9ce70f5e00380377aac0e568abd075266ff992be2e271765f7b35d228a990c"}, + {file = "aiohttp-3.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:a77c79bac8d908d839d32c212aef2354d2246eb9deb3e2cb01ffa83fb7a6ea5d"}, + {file = "aiohttp-3.10.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2212296cdb63b092e295c3e4b4b442e7b7eb41e8a30d0f53c16d5962efed395d"}, + {file = "aiohttp-3.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4dcb127ca3eb0a61205818a606393cbb60d93b7afb9accd2fd1e9081cc533144"}, + {file = "aiohttp-3.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb8b79a65332e1a426ccb6290ce0409e1dc16b4daac1cc5761e059127fa3d134"}, + {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc24f707ed9cb961f6ee04020ca01de2c89b2811f3cf3361dc7c96a14bfbcc"}, + {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cb54f5725b4b37af12edf6c9e834df59258c82c15a244daa521a065fbb11717"}, + {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:51d03e948e53b3639ce4d438f3d1d8202898ec6655cadcc09ec99229d4adc2a9"}, + {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:786299d719eb5d868f161aeec56d589396b053925b7e0ce36e983d30d0a3e55c"}, + {file = "aiohttp-3.10.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abda4009a30d51d3f06f36bc7411a62b3e647fa6cc935ef667e3e3d3a7dd09b1"}, + {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:67f7639424c313125213954e93a6229d3a1d386855d70c292a12628f600c7150"}, + {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8e5a26d7aac4c0d8414a347da162696eea0629fdce939ada6aedf951abb1d745"}, + {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:120548d89f14b76a041088b582454d89389370632ee12bf39d919cc5c561d1ca"}, + {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f5293726943bdcea24715b121d8c4ae12581441d22623b0e6ab12d07ce85f9c4"}, + {file = "aiohttp-3.10.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f8605e573ed6c44ec689d94544b2c4bb1390aaa723a8b5a2cc0a5a485987a68"}, + {file = "aiohttp-3.10.1-cp312-cp312-win32.whl", hash = "sha256:e7168782621be4448d90169a60c8b37e9b0926b3b79b6097bc180c0a8a119e73"}, + {file = "aiohttp-3.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fbf8c0ded367c5c8eaf585f85ca8dd85ff4d5b73fb8fe1e6ac9e1b5e62e11f7"}, + {file = "aiohttp-3.10.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:54b7f4a20d7cc6bfa4438abbde069d417bb7a119f870975f78a2b99890226d55"}, + {file = "aiohttp-3.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2fa643ca990323db68911b92f3f7a0ca9ae300ae340d0235de87c523601e58d9"}, + {file = "aiohttp-3.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8311d0d690487359fe2247ec5d2cac9946e70d50dced8c01ce9e72341c21151"}, + {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222821c60b8f6a64c5908cb43d69c0ee978a1188f6a8433d4757d39231b42cdb"}, + {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7b55d9ede66af7feb6de87ff277e0ccf6d51c7db74cc39337fe3a0e31b5872d"}, + {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a95151a5567b3b00368e99e9c5334a919514f60888a6b6d2054fea5e66e527e"}, + {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e9e9171d2fe6bfd9d3838a6fe63b1e91b55e0bf726c16edf265536e4eafed19"}, + {file = "aiohttp-3.10.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a57e73f9523e980f6101dc9a83adcd7ac0006ea8bf7937ca3870391c7bb4f8ff"}, + {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0df51a3d70a2bfbb9c921619f68d6d02591f24f10e9c76de6f3388c89ed01de6"}, + {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b0de63ff0307eac3961b4af74382d30220d4813f36b7aaaf57f063a1243b4214"}, + {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:8db9b749f589b5af8e4993623dbda6716b2b7a5fcb0fa2277bf3ce4b278c7059"}, + {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6b14c19172eb53b63931d3e62a9749d6519f7c121149493e6eefca055fcdb352"}, + {file = "aiohttp-3.10.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cd57ad998e3038aa87c38fe85c99ed728001bf5dde8eca121cadee06ee3f637"}, + {file = "aiohttp-3.10.1-cp38-cp38-win32.whl", hash = "sha256:df31641e3f02b77eb3c5fb63c0508bee0fc067cf153da0e002ebbb0db0b6d91a"}, + {file = "aiohttp-3.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:93094eba50bc2ad4c40ff4997ead1fdcd41536116f2e7d6cfec9596a8ecb3615"}, + {file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:440954ddc6b77257e67170d57b1026aa9545275c33312357472504eef7b4cc0b"}, + {file = "aiohttp-3.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f9f8beed277488a52ee2b459b23c4135e54d6a819eaba2e120e57311015b58e9"}, + {file = "aiohttp-3.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d8a8221a63602008550022aa3a4152ca357e1dde7ab3dd1da7e1925050b56863"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a702bd3663b5cbf3916e84bf332400d24cdb18399f0877ca6b313ce6c08bfb43"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1988b370536eb14f0ce7f3a4a5b422ab64c4e255b3f5d7752c5f583dc8c967fc"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ccf1f0a304352c891d124ac1a9dea59b14b2abed1704aaa7689fc90ef9c5be1"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc3ea6ef2a83edad84bbdb5d96e22f587b67c68922cd7b6f9d8f24865e655bcf"}, + {file = "aiohttp-3.10.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b47c125ab07f0831803b88aeb12b04c564d5f07a1c1a225d4eb4d2f26e8b5e"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:21778552ef3d44aac3278cc6f6d13a6423504fa5f09f2df34bfe489ed9ded7f5"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:bde0693073fd5e542e46ea100aa6c1a5d36282dbdbad85b1c3365d5421490a92"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:bf66149bb348d8e713f3a8e0b4f5b952094c2948c408e1cfef03b49e86745d60"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:587237571a85716d6f71f60d103416c9df7d5acb55d96d3d3ced65f39bff9c0c"}, + {file = "aiohttp-3.10.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:bfe33cba6e127d0b5b417623c9aa621f0a69f304742acdca929a9fdab4593693"}, + {file = "aiohttp-3.10.1-cp39-cp39-win32.whl", hash = "sha256:9fbff00646cf8211b330690eb2fd64b23e1ce5b63a342436c1d1d6951d53d8dd"}, + {file = "aiohttp-3.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:5951c328f9ac42d7bce7a6ded535879bc9ae13032818d036749631fa27777905"}, + {file = "aiohttp-3.10.1.tar.gz", hash = "sha256:8b0d058e4e425d3b45e8ec70d49b402f4d6b21041e674798b1f91ba027c73f28"}, ] [package.dependencies]