├── .github ├── CODEOWNERS ├── SECURITY.md ├── workflows │ ├── auto-assign-pr.yml │ ├── lint-actions.yml │ ├── claude.yml │ ├── dependencies.yml │ └── lint.yml ├── dependabot.yml └── ISSUE_TEMPLATE │ ├── new-issue-for-tracking.md │ └── bug_report.md ├── tools ├── relay-tools │ ├── src │ │ ├── lib.rs │ │ └── bin │ │ │ ├── recast │ │ │ ├── mod.rs │ │ │ └── README.md │ │ │ └── chainwalker │ │ │ └── README.md │ └── Cargo.toml └── xtask │ ├── Cargo.toml │ └── src │ └── flags.rs ├── clippy.toml ├── tests ├── fixtures │ └── mod.rs ├── storage │ └── mod.rs ├── tests.rs ├── e2e │ ├── cases │ │ ├── snapshots │ │ │ ├── tests__e2e__cases__rpc_snap__upgrade_account.snap │ │ │ ├── tests__e2e__cases__rpc_snap__status_unverified.snap │ │ │ ├── tests__e2e__cases__rpc_snap__contact_info_unverified.snap │ │ │ ├── tests__e2e__cases__rpc_snap__send_prepared_calls.snap │ │ │ ├── tests__e2e__cases__rpc_snap__status_email_only.snap │ │ │ ├── tests__e2e__cases__rpc_snap__contact_info_email_only.snap │ │ │ ├── tests__e2e__cases__rpc_snap__status_unverified_email.snap │ │ │ ├── tests__e2e__cases__rpc_snap__contact_info_unverified_email.snap │ │ │ ├── tests__e2e__cases__rpc_snap__onramp_status.snap │ │ │ ├── tests__e2e__cases__rpc_snap__status_verified.snap │ │ │ ├── tests__e2e__cases__rpc_snap__add_faucet_funds.snap │ │ │ ├── tests__e2e__cases__rpc_snap__health.snap │ │ │ ├── tests__e2e__cases__rpc_snap__contact_info_verified.snap │ │ │ ├── tests__e2e__cases__rpc_snap__get_authorization.snap │ │ │ ├── tests__e2e__cases__rpc_snap__get_keys.snap │ │ │ └── tests__e2e__cases__rpc_snap__verify_signature.snap │ │ ├── mod.rs │ │ ├── errors.rs │ │ ├── metrics.rs │ │ ├── signature.rs │ │ └── cli.rs │ ├── layerzero │ │ └── contracts │ │ │ ├── foundry.toml │ │ │ ├── .gitignore │ │ │ ├── remappings.txt │ │ │ ├── build.sh │ │ │ └── README.md │ ├── constants.rs │ ├── common_calls.rs │ └── config.rs └── assets │ ├── rpc-cache-keyfile │ └── config │ └── base_sepolia.env ├── migrations ├── 0010_sent_at.sql ├── 0018_new_bundle_status.sql ├── 0021_fix_constraint.sql ├── 0012_receipt.sql ├── 0002_tx_chain_id_and_error.sql ├── 0028_fee_payer_statuses.sql ├── 0004_multiple_transactions.sql ├── 0020_layerzero_bundle_statuses.sql ├── 0005_queue.sql ├── 0008_intent.sql ├── 0006_entrypoint.sql ├── 0007_orchestrator.sql ├── 0014_bundle_transactions_mapping.sql ├── 0031_asset_diffs.sql ├── 0003_indexes.sql ├── 0024_precalls.sql ├── 0030_historical_usd_prices.sql ├── 0011_emails.sql ├── 0015_update_intent_structure.sql ├── 0017_bridge.sql ├── 0016_liquidity.sql ├── 0023_layerzero_batch_tracking.sql ├── 0026_phones.sql ├── 0009_preCalls.sql ├── 0022_pull_gas_tracking.sql ├── 0001_initial.sql ├── 0019_refunds.sql ├── 0025_authorization_list.sql ├── 0013_pending_bundles.sql └── 0029_bundle_history_indexes.sql ├── .gitmodules ├── .cargo └── config.toml ├── .gitattributes ├── src ├── rpc │ ├── mod.rs │ └── extra_fee.rs ├── serde │ ├── mod.rs │ ├── fn_selector.rs │ ├── duration.rs │ ├── timestamp.rs │ ├── trace_context.rs │ ├── key_role.rs │ └── hash_map.rs ├── liquidity │ └── mod.rs ├── price │ ├── fetchers │ │ └── mod.rs │ ├── metrics.rs │ └── mod.rs ├── twilio │ ├── mod.rs │ ├── lookup.rs │ ├── error.rs │ └── verify.rs ├── interop │ ├── refund │ │ └── mod.rs │ ├── mod.rs │ ├── settler │ │ └── layerzero │ │ │ ├── metrics.rs │ │ │ └── batcher │ │ │ └── mod.rs │ └── escrow.rs ├── estimation │ ├── mod.rs │ └── op.rs ├── version.rs ├── error │ ├── onramp.rs │ ├── email.rs │ ├── merkle.rs │ ├── asset.rs │ ├── keys.rs │ ├── phone.rs │ └── storage.rs ├── lib.rs ├── transactions │ ├── mod.rs │ └── pull_gas.rs ├── signers │ ├── mod.rs │ └── p256.rs ├── types │ ├── transaction.rs │ ├── webauthn.rs │ ├── multicall.rs │ ├── rpc │ │ ├── mod.rs │ │ ├── faucet.rs │ │ └── permission.rs │ ├── tokens.rs │ ├── mod.rs │ ├── settler.rs │ ├── interop.rs │ └── storage.rs ├── metrics │ └── periodic │ │ ├── types │ │ └── mod.rs │ │ ├── job.rs │ │ └── mod.rs ├── bin │ └── relay.rs ├── constants.rs ├── nonce.rs └── transport │ ├── error.rs │ └── timeout.rs ├── .zed └── settings.json ├── rustfmt.toml ├── scripts ├── release.sh └── bundle.sh ├── .sqlx ├── query-35bd868ca1c07b9f4873ecc64513619734c086df3c6cbd4fbd0903272a0ccb02.json ├── query-475402016379ac086af8e1f9c898546fdff72c46a947a8e5070b666d72cb5346.json ├── query-4fa59903d26caa494ccd51b9aead351c8cab1692c2be15e1f2d6a5d7d5fe08a6.json ├── query-d8cd9e8289f3eeb9038f5cb4caac6ab7b10f46c1037e58850b72321261e8e2e2.json ├── query-521a92e948a7f1850ee92b96ad8e64b41860e04d73254367bbf4cb46906466db.json ├── query-901ae63757fae5ad8ec191880a1de5e94801667e6d3fd8a00994366feda2eeea.json ├── query-fc3c067a17f06d37801c3497a57806f5070b746758cca9ae08978dc8527853d1.json ├── query-fe168d68d876f84599f5acf0427654cd902b71abde17dadcf674792501718cea.json ├── query-58e36950f47c0cf88ee592d39bbc31de58f1822cd57ad253c8a7177a1928a7ce.json ├── query-90909084259d8d512e696d0a7fba41c2bb5ae77bd3eb4ed1d896e15a7dfc3e6f.json ├── query-feb85b17b6047d883bb5b8192eb548f187a1dbc6cc6b77bae54dd70aa5eb9fe4.json ├── query-28d16aa625b17b320d6e9736c545344645dbdabce9bbc4cf03c05711eb39c8f4.json ├── query-53e65e533d25a0a5a06ee5a5304680357941fe731ab2913c9ef1b0e074e8b79b.json ├── query-de4c25c6e9bc42aa8101e12eab0402d0c92e4e489f84496625b2f9f824d1e7c5.json ├── query-cf0827b8888f86f22fb19a9d0f124380dd9310a6161b4f41192921e65a5dff39.json ├── query-e7b4c5c58b009c5aea401135c1c74a8fbabf4bfcdd5e9ebc9d32dfcdc50396c5.json ├── query-fb433f362c6529b4de21b30bd11e4f2598c98a52cfa37ba4bb5a10a993ba5ed4.json ├── query-9f99b2b2dc8d3524098bc72ed56c29b4ce2cd8c07b869dbd423ac5283e05b3a2.json ├── query-37f5254831ac828540af50d176d42adebca2d74b124b750726fcfe301512cbc0.json ├── query-7150f186ceabdaa8f4a1245a8986db4d2feed0d9b0a5b689105acc35a764451e.json ├── query-ffefb5fc0bc0c8b8493556f315d918da1fa4e5229bf10e6b6d63f8ab02afd875.json ├── query-0a2c41a3f52fc9b74c5559796bbd578f4539853b345f2b5a386446144acbf6a0.json ├── query-c6a789bee1e7953d0c8f0135ce4499d164069c60092ac1c32f93b3f6dfd7daf3.json ├── query-a430d775b648600b9648e00c315b75da85e010aa060a31ff44588d4735e23783.json ├── query-8690d1b8ac24a4b90a01139e1f6e83a903cd8f2b47083072a9d75225fa404586.json ├── query-0140239eaa8c0793250727f23a9d488ba6beb5c2c24cbeba4a861fcf59d53d3b.json ├── query-3ad838e08ad45f17b15a76de133f2ad127c5b642478fca30f6ece0b847587ea3.json ├── query-12b6acfe38bc1f14b02f12a0a9e18af367ce91a9b1e57dc6f1b2e11b6122a52f.json ├── query-d918ed5091140ccf09ee3efbb18c9a23846361a55fc785d012a2310bfedacaeb.json ├── query-e7be266cb85b0764b96972837977ba2c14a82c546cedcd7b7e1ffd006911b31c.json ├── query-c06938b08164158736e7afc354d5da2aadad2e003756b5bcb92af374a884c945.json ├── query-7a5f0d16cb7ab928b3c786bd30c8ed7719a55ed99ced4fd42eb23cf4742e0a7a.json ├── query-600124940cbb2b7e408c94c01c30db2e81b68af9e72b9aa2f038ca902b988256.json ├── query-6646d9fdc0ac1291db1d4d31cc834ae037225b5fa26d874707526c6aca5c73e3.json ├── query-36c7612c5aa2121d5332b536d0bf36819458b13d6e351134ac5587fba53a0fbb.json ├── query-349050d675179f714ad8cf61ad13a457aab1a08ffd7d73b63944aeba2fc6a36a.json ├── query-0009f8bf8c91e1694f89a301751726d5cc2ccf66d77f496560c1d5654992b450.json ├── query-37f2533e86d3ddd8b3b216ddbf2c0c3fc14c857fdd8c04fa10e1abafb3c41b0b.json ├── query-af997c238b65ac231426b9e8805c0ef1359e293347c83d517a5b027524c0c818.json ├── query-8c6a1801f4c22bc63820b2ca1d6c158ecb9726e1fef1d5506492ae0b10f14259.json ├── query-8507914e3fae7f28166ad44c36ad2663f159d8ba92e0f398c6e0368c1af16007.json ├── query-babe59a25bb68aad24f5937029dc93dbda96968a5d0f4ba8cf02c8c7454c7087.json ├── query-d3dfb6b5e67050d361d40444031b247d9ad67ce219fa241e614f3ac15e39afc9.json ├── query-4f4f8bd7f995f16a895eff8f5f0227d0f5f09b1699aa444c59b273b5a7cf4413.json ├── query-ca924b77cc9a0ffa23670bf0e1b80fb71ddcb0a27dcc95ae2daf07fa1326f390.json ├── query-270d917a154bd20ec47184954d32b8dc9db680a20013978620e4dfceb4ddaf95.json ├── query-60a6599da9c3c316c5965152cd0130d6c2e3aac26c9fb7e00aac77041eb299f4.json ├── query-42298767b4d0c49cbf76aa6224137b475dadd3d07e3c0c5f2e843a2bc48eaa09.json ├── query-80d12caf4bb184a309dc40c49d8899666d7ad87366a8ad025c96664461dc94a8.json ├── query-21fdfd5f099a9fbfd3920e95e7dad2d60beaa400b2cbf84cd0af12a242dbaff4.json ├── query-4325db0dbbaa7043d2e4dc4e01e08413a09813c047d3ff23c672d57d958efc11.json ├── query-6d0902521dbbf2b5d2369c6e048ad08fde7e008a93be8d6182d224b55ff10dda.json ├── query-897bfeb578a927a561e619883e66d64756377054d9165317851766952e4a6243.json ├── query-9cb21fc4b4845f71b4cd5ae3a960ba62477716111279b0308880ec711e0ed8ab.json ├── query-eb066e77020459c15f2301f23614630195ffc710c529a27e81f4e711514c5a47.json ├── query-892ab2ea122fabd13647fcecf88ce0798ac90407d4fd904d5b975e00fe76991e.json ├── query-4b9602a95b13d65d494bb5a37e355c76b01f67be7472f2cfcc09784d16a5d76a.json ├── query-110fdd402d29c3f5aa016c4f4bda551418ea75c3c14b54c6c25d6a844bbf1607.json ├── query-e4ced6542c605a22a57fbe6a34e50fe20e9ffae4532bde1f123626595ebe3083.json ├── query-768abd437f7b9f89622ba85c870a3cc92b8b3716a8b87b11e4bed13498ad4b30.json ├── query-dc6b871547e833a766105ee576575c0e88ccafa1d87f5d81e85e91f6caffa185.json ├── query-0e7a3fa8802afa71128f8195407a728fe5f09491fbcc77929268ce4bbc428055.json ├── query-6fbd7366feea64ac3286dbdf37acffc9eb1fb1aa2345851201a3410c7547c34b.json ├── query-3b97d246bc2bbb97755f84a94484e1772b22e2f663814de36609e8a7feaa19a3.json ├── query-3467bdaf82adedf2757a9754e5d44c6f602399d0e473d43ad0f52570b437b55f.json ├── query-5d19bd26bec547a5e4026104e3f53fcc9bf20d442799f43ca5585822701dd197.json ├── query-530e93d5b22f16dff6556e241d328ee6d7b75d7c82a4f2f310fc961c53da4c0a.json ├── query-82e7c9cce1aa485db564962b021eaea9756ca73e5298e743ef0c37583fceb803.json ├── query-d1a42181197af48903ca0fc5e663982233307b11b6194c00b2272e85bb8a3d5c.json ├── query-7c3d06612031a0df180c5ab75b8a882d1bfd8d81f5965a352a71a30f1e618810.json ├── query-c37655b3fb49e027a64b8fad5654512bb3dfdeb79b260360955533281565223d.json ├── query-f42adf1637e3fa9e83464da75a8c6915d79e127ec6a036811afebdcd4b1e8d45.json └── query-a2d9ea4ad367d0feb713813a783fa071651305285432ddb71cee64ff240ee464.json ├── .dockerignore ├── .editorconfig ├── .config └── nextest.toml ├── .gitignore ├── LICENSE-MIT ├── relay.example.yaml ├── Dockerfile ├── Dockerfile.stress ├── Dockerfile.chainwalker └── docs ├── diagrams └── bundle_state_machine.mmd └── README.md /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @joshieDo 2 | -------------------------------------------------------------------------------- /tools/relay-tools/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod common; 2 | -------------------------------------------------------------------------------- /clippy.toml: -------------------------------------------------------------------------------- 1 | msrv = "1.88" 2 | allow-dbg-in-tests = true 3 | -------------------------------------------------------------------------------- /tests/fixtures/mod.rs: -------------------------------------------------------------------------------- 1 | //! Test fixtures 2 | 3 | mod merkle; 4 | -------------------------------------------------------------------------------- /tests/storage/mod.rs: -------------------------------------------------------------------------------- 1 | //! Relay storage tests 2 | 3 | mod roundtrip; 4 | -------------------------------------------------------------------------------- /tests/tests.rs: -------------------------------------------------------------------------------- 1 | //! Relay tests 2 | 3 | mod e2e; 4 | mod fixtures; 5 | mod storage; 6 | -------------------------------------------------------------------------------- /migrations/0010_sent_at.sql: -------------------------------------------------------------------------------- 1 | alter table pending_txs rename column received_at to sent_at; 2 | -------------------------------------------------------------------------------- /migrations/0018_new_bundle_status.sql: -------------------------------------------------------------------------------- 1 | alter type bundle_status add value 'liquidity_locked'; -------------------------------------------------------------------------------- /migrations/0021_fix_constraint.sql: -------------------------------------------------------------------------------- 1 | alter table pending_unlocks drop constraint pending_unlocks_pkey; -------------------------------------------------------------------------------- /migrations/0012_receipt.sql: -------------------------------------------------------------------------------- 1 | delete from txs where status = 'confirmed'; 2 | alter table txs add column receipt jsonb; -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "tests/account"] 2 | path = tests/account 3 | url = https://github.com/ithacaxyz/account.git 4 | -------------------------------------------------------------------------------- /migrations/0002_tx_chain_id_and_error.sql: -------------------------------------------------------------------------------- 1 | alter table txs 2 | add column chain_id bigserial not null, 3 | add column error text; 4 | -------------------------------------------------------------------------------- /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [alias] 2 | xtask = "run --package xtask --bin xtask --" 3 | e2e = "xtask e2e" 4 | contracts = "xtask contracts" 5 | -------------------------------------------------------------------------------- /.github/SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | 5 | Contact [security@ithaca.xyz](mailto:security@ithaca.xyz). 6 | -------------------------------------------------------------------------------- /migrations/0028_fee_payer_statuses.sql: -------------------------------------------------------------------------------- 1 | ALTER TYPE bundle_status ADD VALUE 'fee_payer_queued'; 2 | ALTER TYPE bundle_status ADD VALUE 'fee_payer_completed'; -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | .gitattributes !filter !diff 3 | 4 | Dockerfile.* linguist-language=Dockerfile 5 | .zed/*.json linguist-language=JSON-with-Comments 6 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__upgrade_account.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: response 4 | --- 5 | null 6 | -------------------------------------------------------------------------------- /migrations/0004_multiple_transactions.sql: -------------------------------------------------------------------------------- 1 | alter table pending_txs rename column envelope to envelopes; 2 | update pending_txs set envelopes = jsonb_build_array(envelopes); 3 | -------------------------------------------------------------------------------- /migrations/0020_layerzero_bundle_statuses.sql: -------------------------------------------------------------------------------- 1 | ALTER TYPE bundle_status ADD VALUE 'settlements_processing'; 2 | ALTER TYPE bundle_status ADD VALUE 'settlement_completion_queued'; -------------------------------------------------------------------------------- /src/rpc/mod.rs: -------------------------------------------------------------------------------- 1 | //! RPC modules. 2 | 3 | mod account; 4 | mod extra_fee; 5 | mod relay; 6 | 7 | pub use account::*; 8 | pub use extra_fee::*; 9 | pub use relay::*; 10 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__status_unverified.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: status_unverified 4 | --- 5 | {} 6 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__contact_info_unverified.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: contact_unverified 4 | --- 5 | {} 6 | -------------------------------------------------------------------------------- /tests/e2e/layerzero/contracts/foundry.toml: -------------------------------------------------------------------------------- 1 | [profile.default] 2 | src = "src" 3 | out = "out" 4 | libs = ["lib"] 5 | evm_version = "prague" 6 | optimizer = true 7 | optimizer_runs = 200 -------------------------------------------------------------------------------- /src/serde/mod.rs: -------------------------------------------------------------------------------- 1 | //! Serde helpers. 2 | 3 | pub mod duration; 4 | pub mod fn_selector; 5 | pub mod hash_map; 6 | pub mod key_role; 7 | pub mod timestamp; 8 | pub mod trace_context; 9 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__send_prepared_calls.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: response 4 | --- 5 | { 6 | "id": "[id]" 7 | } 8 | -------------------------------------------------------------------------------- /migrations/0005_queue.sql: -------------------------------------------------------------------------------- 1 | create table if not exists queued_txs ( 2 | id serial primary key, 3 | chain_id bigserial not null, 4 | tx_id bytea not null unique, 5 | tx jsonb not null 6 | ); 7 | -------------------------------------------------------------------------------- /tests/assets/rpc-cache-keyfile: -------------------------------------------------------------------------------- 1 | This file serves as the key for the github actions/cache 2 | 3 | Any change in this file will invalidate the cache in CI that stores RPC data. 4 | 5 | Last updated: 04-11-2025 -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__status_email_only.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: status_email_only 4 | --- 5 | { 6 | "email": "[email_timestamp]" 7 | } 8 | -------------------------------------------------------------------------------- /tests/e2e/layerzero/contracts/.gitignore: -------------------------------------------------------------------------------- 1 | # Compiler files 2 | cache/ 3 | out/ 4 | 5 | # Ignores development broadcast logs 6 | broadcast/ 7 | 8 | # Forge lib dependencies 9 | lib/ 10 | src/EndpointV2Mock.sol -------------------------------------------------------------------------------- /migrations/0008_intent.sql: -------------------------------------------------------------------------------- 1 | update pending_txs set tx = jsonb_set(tx, '{quote,intent}', tx#>'{quote,op}') - '{quote,op}'; 2 | update queued_txs set tx = jsonb_set(tx, '{quote,intent}', tx#>'{quote,op}') - '{quote,op}'; 3 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__contact_info_email_only.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: contact_email_only 4 | --- 5 | { 6 | "email": "emailonly@example.com" 7 | } 8 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__status_unverified_email.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: status_unverified_email 4 | --- 5 | { 6 | "email": "[email_timestamp]" 7 | } 8 | -------------------------------------------------------------------------------- /migrations/0006_entrypoint.sql: -------------------------------------------------------------------------------- 1 | update pending_txs set tx = jsonb_set(tx, '{quote,entrypoint}', tx->'entrypoint') - 'entrypoint'; 2 | update queued_txs set tx = jsonb_set(tx, '{quote,entrypoint}', tx->'entrypoint') - 'entrypoint'; 3 | -------------------------------------------------------------------------------- /src/liquidity/mod.rs: -------------------------------------------------------------------------------- 1 | //! Liquidity management logic. 2 | 3 | pub mod bridge; 4 | mod rebalance; 5 | pub use rebalance::RebalanceService; 6 | mod tracker; 7 | pub use tracker::{ChainAddress, LiquidityTracker, LiquidityTrackerError}; 8 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__contact_info_unverified_email.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: contact_unverified_email 4 | --- 5 | { 6 | "email": "emailonly@example.com" 7 | } 8 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__onramp_status.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: response 4 | --- 5 | { 6 | "email": "[email_timestamp]", 7 | "phone": "[phone_timestamp]" 8 | } 9 | -------------------------------------------------------------------------------- /src/price/fetchers/mod.rs: -------------------------------------------------------------------------------- 1 | mod coingecko; 2 | pub use coingecko::*; 3 | 4 | /// List of supported coin fetchers. 5 | #[derive(Debug, Eq, PartialEq, Hash)] 6 | pub enum PriceFetcher { 7 | /// CoinGecko. 8 | CoinGecko, 9 | } 10 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__status_verified.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: status_response 4 | --- 5 | { 6 | "email": "[email_timestamp]", 7 | "phone": "[phone_timestamp]" 8 | } 9 | -------------------------------------------------------------------------------- /tools/xtask/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "xtask" 3 | description = "Auxiliary build tool" 4 | publish = false 5 | version = "14.0.0" 6 | edition = "2024" 7 | 8 | [dependencies] 9 | anyhow = "1.0" 10 | xflags = "0.3" 11 | xshell = "0.2" -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__add_faucet_funds.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: response 4 | --- 5 | { 6 | "transactionHash": "[transactionHash]", 7 | "message": "Faucet funding successful" 8 | } 9 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__health.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: response 4 | --- 5 | { 6 | "status": "[status]", 7 | "version": "[version]", 8 | "quoteSigner": "[quoteSigner]" 9 | } 10 | -------------------------------------------------------------------------------- /migrations/0007_orchestrator.sql: -------------------------------------------------------------------------------- 1 | update pending_txs set tx = jsonb_set(tx, '{quote,orchestrator}', tx#>'{quote,entrypoint}') - '{quote,entrypoint}'; 2 | update queued_txs set tx = jsonb_set(tx, '{quote,orchestrator}', tx#>'{quote,entrypoint}') - '{quote,entrypoint}'; 3 | -------------------------------------------------------------------------------- /.zed/settings.json: -------------------------------------------------------------------------------- 1 | // Folder-specific settings 2 | // 3 | // For a full list of overridable settings, and general information on folder-specific settings, 4 | // see the documentation: https://zed.dev/docs/configuring-zed#settings-files 5 | { "file_scan_exclusions": ["tests/account/**"] } 6 | -------------------------------------------------------------------------------- /migrations/0014_bundle_transactions_mapping.sql: -------------------------------------------------------------------------------- 1 | create table bundle_transactions ( 2 | bundle_id bytea not null, 3 | tx_id bytea not null 4 | ); 5 | 6 | insert into bundle_transactions (bundle_id, tx_id) select bundle_id, tx_id from txs; 7 | 8 | alter table txs drop column bundle_id; -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__contact_info_verified.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: contact_response 4 | --- 5 | { 6 | "email": "test@example.com", 7 | "phone": "+1234567890", 8 | "phoneVerifiedAt": "[phone_timestamp]" 9 | } 10 | -------------------------------------------------------------------------------- /migrations/0031_asset_diffs.sql: -------------------------------------------------------------------------------- 1 | -- Create table for storing asset diffs from confirmed transactions 2 | create table asset_diffs ( 3 | tx_id bytea not null primary key references txs(tx_id) on delete cascade, 4 | asset_diffs jsonb not null, 5 | created_at timestamptz not null default now() 6 | ); 7 | -------------------------------------------------------------------------------- /migrations/0003_indexes.sql: -------------------------------------------------------------------------------- 1 | create index if not exists accounts_address on accounts (address); 2 | 3 | create index if not exists keys_account_address on keys (account_address); 4 | 5 | create index if not exists txs_bundle_id on txs (bundle_id); 6 | 7 | create index if not exists txs_tx_id on txs (tx_id); 8 | -------------------------------------------------------------------------------- /.github/workflows/auto-assign-pr.yml: -------------------------------------------------------------------------------- 1 | name: Auto Assign PR to Author 2 | 3 | on: 4 | pull_request: 5 | types: [opened, reopened] 6 | 7 | jobs: 8 | auto-assign: 9 | permissions: 10 | issues: write 11 | pull-requests: write 12 | uses: ithacaxyz/ci/.github/workflows/auto-assign-pr.yml@main 13 | -------------------------------------------------------------------------------- /tests/assets/config/base_sepolia.env: -------------------------------------------------------------------------------- 1 | # Base Sepolia E2E Test Configuration 2 | # Shared environment variables for Makefile and CI 3 | TEST_FORK_URL=https://sepolia.base.org 4 | TEST_ORCHESTRATOR=0x638Ba5eD6b8cc9B036CdDABd9c326839Bb199069 5 | TEST_PROXY=0x55BFeDbF3718D44f0cc605D8b41552e33a096D1F 6 | TEST_SIMULATOR=0xb453370bca7BA63Bf832e77A7fA34568519e9b2E -------------------------------------------------------------------------------- /migrations/0024_precalls.sql: -------------------------------------------------------------------------------- 1 | create table if not exists precalls ( 2 | address bytea not null, 3 | chain_id bigint not null, 4 | nonce bytea not null, 5 | data jsonb not null, 6 | primary key (address, chain_id, nonce) 7 | ); 8 | 9 | -- Index for efficient queries 10 | CREATE INDEX idx_precalls_address ON precalls(address); 11 | -------------------------------------------------------------------------------- /migrations/0030_historical_usd_prices.sql: -------------------------------------------------------------------------------- 1 | -- Create table for storing historical USD prices for assets 2 | -- Timestamps are normalized to minute boundaries (seconds set to 0) 3 | create table historical_usd_prices ( 4 | asset_uid text not null, 5 | timestamp bigint not null, 6 | usd_price double precision not null, 7 | primary key (asset_uid, timestamp) 8 | ); 9 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | merge_derives = false 2 | reorder_imports = true 3 | use_field_init_shorthand = true 4 | use_small_heuristics = "Max" 5 | 6 | # Nightly 7 | max_width = 100 8 | comment_width = 100 9 | imports_granularity = "Crate" 10 | wrap_comments = true 11 | format_code_in_doc_comments = true 12 | doc_comment_code_block_width = 100 13 | format_macro_matchers = true 14 | -------------------------------------------------------------------------------- /scripts/release.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | if ! command -v cargo set-version 2>&1 >/dev/null 5 | then 6 | echo "Please install cargo-edit" 7 | exit 1 8 | fi 9 | 10 | echo "Setting version in Cargo.toml to $1" 11 | cargo set-version $1 12 | 13 | echo "Committing and tagging" 14 | git commit -m "chore: release $1" 15 | git tag $1 16 | -------------------------------------------------------------------------------- /migrations/0011_emails.sql: -------------------------------------------------------------------------------- 1 | create table emails ( 2 | address bytea not null unique, 3 | email text not null, 4 | token varchar(255) not null, 5 | verified_at timestamp, 6 | created_at timestamp not null default now () 7 | ); 8 | 9 | -- only allow an email address once per account to prevent spam 10 | create unique index idx_address_email on emails (address, email); 11 | -------------------------------------------------------------------------------- /src/twilio/mod.rs: -------------------------------------------------------------------------------- 1 | //! Twilio integration for phone verification. 2 | 3 | mod client; 4 | mod error; 5 | mod lookup; 6 | mod verify; 7 | 8 | pub use client::TwilioClient; 9 | pub use error::{TwilioError, TwilioErrorCode}; 10 | pub use lookup::{LineType, LineTypeIntelligence, LookupResponse}; 11 | pub use verify::{VerificationCheckResponse, VerificationResponse, VerificationStatus}; 12 | -------------------------------------------------------------------------------- /migrations/0015_update_intent_structure.sql: -------------------------------------------------------------------------------- 1 | UPDATE queued_txs SET tx = jsonb_set(jsonb_set(tx, '{quote,intent,settler}', '"0x0000000000000000000000000000000000000000"'::jsonb), '{quote,intent,settlerContext}', '"0x"'::jsonb); 2 | UPDATE pending_txs SET tx = jsonb_set(jsonb_set(tx, '{quote,intent,settler}', '"0x0000000000000000000000000000000000000000"'::jsonb), '{quote,intent,settlerContext}', '"0x"'::jsonb); -------------------------------------------------------------------------------- /src/interop/refund/mod.rs: -------------------------------------------------------------------------------- 1 | //! Refund handling module for cross-chain interoperability. 2 | //! 3 | //! This module provides functionality for processing and monitoring refunds 4 | //! when cross-chain transactions fail or need to be reverted. 5 | 6 | pub mod monitor; 7 | pub mod processor; 8 | 9 | pub use monitor::RefundMonitorService; 10 | pub use processor::{RefundProcessor, RefundProcessorError}; 11 | -------------------------------------------------------------------------------- /tests/e2e/layerzero/contracts/remappings.txt: -------------------------------------------------------------------------------- 1 | forge-std/=lib/forge-std/src/ 2 | solady/=lib/solady/src/ 3 | @layerzerolabs/lz-evm-protocol-v2/=lib/LayerZero-v2/packages/layerzero-v2/evm/protocol/ 4 | @layerzerolabs/lz-evm-messagelib-v2/=lib/LayerZero-v2/packages/layerzero-v2/evm/messagelib/ 5 | @layerzerolabs/oapp-evm/=lib/devtools/packages/oapp-evm/ 6 | @openzeppelin/contracts/=lib/openzeppelin-contracts/contracts/ -------------------------------------------------------------------------------- /src/price/metrics.rs: -------------------------------------------------------------------------------- 1 | //! Price metrics 2 | 3 | use metrics::{Counter, Gauge}; 4 | use metrics_derive::Metrics; 5 | 6 | /// Metrics for a [`CoinPair`](crate::types::CoinPair). 7 | #[derive(Metrics)] 8 | #[metrics(scope = "oracle")] 9 | pub struct CoinPairMetrics { 10 | /// Rate for this pair. 11 | pub rate: Gauge, 12 | /// How often an expired rate was requested. 13 | pub expired_hits: Counter, 14 | } 15 | -------------------------------------------------------------------------------- /.sqlx/query-35bd868ca1c07b9f4873ecc64513619734c086df3c6cbd4fbd0903272a0ccb02.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "delete from pending_txs where tx_id = $1", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea" 9 | ] 10 | }, 11 | "nullable": [] 12 | }, 13 | "hash": "35bd868ca1c07b9f4873ecc64513619734c086df3c6cbd4fbd0903272a0ccb02" 14 | } 15 | -------------------------------------------------------------------------------- /.sqlx/query-475402016379ac086af8e1f9c898546fdff72c46a947a8e5070b666d72cb5346.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "delete from queued_txs where tx_id = $1", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea" 9 | ] 10 | }, 11 | "nullable": [] 12 | }, 13 | "hash": "475402016379ac086af8e1f9c898546fdff72c46a947a8e5070b666d72cb5346" 14 | } 15 | -------------------------------------------------------------------------------- /.sqlx/query-4fa59903d26caa494ccd51b9aead351c8cab1692c2be15e1f2d6a5d7d5fe08a6.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update txs set tx_hash = $1 where tx_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Bytea" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "4fa59903d26caa494ccd51b9aead351c8cab1692c2be15e1f2d6a5d7d5fe08a6" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-d8cd9e8289f3eeb9038f5cb4caac6ab7b10f46c1037e58850b72321261e8e2e2.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update txs set error = $1 where tx_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Text", 9 | "Bytea" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "d8cd9e8289f3eeb9038f5cb4caac6ab7b10f46c1037e58850b72321261e8e2e2" 15 | } 16 | -------------------------------------------------------------------------------- /migrations/0017_bridge.sql: -------------------------------------------------------------------------------- 1 | create type bridge_transfer_status as enum ('pending', 'sent', 'outbound_failed', 'completed', 'inbound_failed'); 2 | 3 | create table if not exists bridge_transfers ( 4 | transfer_id bytea not null unique, 5 | transfer_data jsonb not null, 6 | bridge_data jsonb, 7 | outbound_block_number bigint, 8 | inbound_block_number bigint, 9 | status bridge_transfer_status not null default 'pending' 10 | ); 11 | -------------------------------------------------------------------------------- /.sqlx/query-521a92e948a7f1850ee92b96ad8e64b41860e04d73254367bbf4cb46906466db.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "insert into bundle_transactions (bundle_id, tx_id) values ($1, $2)", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Bytea" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "521a92e948a7f1850ee92b96ad8e64b41860e04d73254367bbf4cb46906466db" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-901ae63757fae5ad8ec191880a1de5e94801667e6d3fd8a00994366feda2eeea.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n DELETE FROM pending_refunds\n WHERE bundle_id = $1\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea" 9 | ] 10 | }, 11 | "nullable": [] 12 | }, 13 | "hash": "901ae63757fae5ad8ec191880a1de5e94801667e6d3fd8a00994366feda2eeea" 14 | } 15 | -------------------------------------------------------------------------------- /.sqlx/query-fc3c067a17f06d37801c3497a57806f5070b746758cca9ae08978dc8527853d1.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update bridge_transfers set bridge_data = $1 where transfer_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Jsonb", 9 | "Bytea" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "fc3c067a17f06d37801c3497a57806f5070b746758cca9ae08978dc8527853d1" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-fe168d68d876f84599f5acf0427654cd902b71abde17dadcf674792501718cea.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update pending_txs set envelopes = envelopes || $1 where tx_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Jsonb", 9 | "Bytea" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "fe168d68d876f84599f5acf0427654cd902b71abde17dadcf674792501718cea" 15 | } 16 | -------------------------------------------------------------------------------- /src/estimation/mod.rs: -------------------------------------------------------------------------------- 1 | //! Estimation module for intent simulation and fee calculation. 2 | //! 3 | //! This module provides a clean separation of concerns for: 4 | //! - **Simulation**: Executing intents to determine gas usage and effects 5 | //! - **Fee Calculation**: Computing costs based on gas, prices, and network conditions 6 | //! - **Quote Generation**: Building complete quotes with all pricing information 7 | 8 | pub mod fees; 9 | 10 | pub mod arb; 11 | pub mod op; 12 | -------------------------------------------------------------------------------- /.sqlx/query-58e36950f47c0cf88ee592d39bbc31de58f1822cd57ad253c8a7177a1928a7ce.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update phones set attempts = attempts + 1 where address = $1 and phone = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Text" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "58e36950f47c0cf88ee592d39bbc31de58f1822cd57ad253c8a7177a1928a7ce" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-90909084259d8d512e696d0a7fba41c2bb5ae77bd3eb4ed1d896e15a7dfc3e6f.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "insert into bridge_transfers (transfer_id, transfer_data) values ($1, $2)", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Jsonb" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "90909084259d8d512e696d0a7fba41c2bb5ae77bd3eb4ed1d896e15a7dfc3e6f" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-feb85b17b6047d883bb5b8192eb548f187a1dbc6cc6b77bae54dd70aa5eb9fe4.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update bridge_transfers set inbound_block_number = $1 where transfer_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Bytea" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "feb85b17b6047d883bb5b8192eb548f187a1dbc6cc6b77bae54dd70aa5eb9fe4" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-28d16aa625b17b320d6e9736c545344645dbdabce9bbc4cf03c05711eb39c8f4.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update bridge_transfers set outbound_block_number = $1 where transfer_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Bytea" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "28d16aa625b17b320d6e9736c545344645dbdabce9bbc4cf03c05711eb39c8f4" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-53e65e533d25a0a5a06ee5a5304680357941fe731ab2913c9ef1b0e074e8b79b.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "insert into txs (tx_id, chain_id, tx) values ($1, $2, $3)", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Int8", 10 | "Jsonb" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "53e65e533d25a0a5a06ee5a5304680357941fe731ab2913c9ef1b0e074e8b79b" 16 | } 17 | -------------------------------------------------------------------------------- /.sqlx/query-de4c25c6e9bc42aa8101e12eab0402d0c92e4e489f84496625b2f9f824d1e7c5.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update txs set tx_hash = $1, receipt = $2 where tx_id = $3", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Jsonb", 10 | "Bytea" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "de4c25c6e9bc42aa8101e12eab0402d0c92e4e489f84496625b2f9f824d1e7c5" 16 | } 17 | -------------------------------------------------------------------------------- /.sqlx/query-cf0827b8888f86f22fb19a9d0f124380dd9310a6161b4f41192921e65a5dff39.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update phones set verified_at = now() where address = $1 and phone = $2 and verified_at is null", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Text" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "cf0827b8888f86f22fb19a9d0f124380dd9310a6161b4f41192921e65a5dff39" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-e7b4c5c58b009c5aea401135c1c74a8fbabf4bfcdd5e9ebc9d32dfcdc50396c5.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update phones set verification_sid = $3 where address = $1 and phone = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Text", 10 | "Text" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "e7b4c5c58b009c5aea401135c1c74a8fbabf4bfcdd5e9ebc9d32dfcdc50396c5" 16 | } 17 | -------------------------------------------------------------------------------- /migrations/0016_liquidity.sql: -------------------------------------------------------------------------------- 1 | create table if not exists locked_liquidity ( 2 | chain_id bigint not null, 3 | asset_address bytea not null, 4 | amount numeric(80) not null default 0, 5 | primary key (chain_id, asset_address) 6 | ); 7 | 8 | create table if not exists pending_unlocks ( 9 | chain_id bigint not null, 10 | asset_address bytea not null, 11 | block_number bigint not null, 12 | amount numeric(80) not null, 13 | primary key (chain_id, asset_address) 14 | ); -------------------------------------------------------------------------------- /.sqlx/query-fb433f362c6529b4de21b30bd11e4f2598c98a52cfa37ba4bb5a10a993ba5ed4.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update locked_liquidity set amount = $1 where chain_id = $2 and asset_address = $3", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Numeric", 9 | "Int8", 10 | "Bytea" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "fb433f362c6529b4de21b30bd11e4f2598c98a52cfa37ba4bb5a10a993ba5ed4" 16 | } 17 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "cargo" 9 | directory: "/" 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /.sqlx/query-9f99b2b2dc8d3524098bc72ed56c29b4ce2cd8c07b869dbd423ac5283e05b3a2.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update emails set verified_at = now() where address = $1 and email = $2 and token = $3", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Text", 10 | "Text" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "9f99b2b2dc8d3524098bc72ed56c29b4ce2cd8c07b869dbd423ac5283e05b3a2" 16 | } 17 | -------------------------------------------------------------------------------- /.sqlx/query-37f5254831ac828540af50d176d42adebca2d74b124b750726fcfe301512cbc0.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update locked_liquidity set amount = amount - $1 where chain_id = $2 and asset_address = $3", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Numeric", 9 | "Int8", 10 | "Bytea" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "37f5254831ac828540af50d176d42adebca2d74b124b750726fcfe301512cbc0" 16 | } 17 | -------------------------------------------------------------------------------- /src/version.rs: -------------------------------------------------------------------------------- 1 | //! Relay version. 2 | 3 | /// The short version information for relay. 4 | pub const RELAY_SHORT_VERSION: &str = env!("RELAY_SHORT_VERSION"); 5 | 6 | /// The long version information for relay. 7 | pub const RELAY_LONG_VERSION: &str = concat!( 8 | env!("RELAY_LONG_VERSION_0"), 9 | "\n", 10 | env!("RELAY_LONG_VERSION_1"), 11 | "\n", 12 | env!("RELAY_LONG_VERSION_2"), 13 | "\n", 14 | env!("RELAY_LONG_VERSION_3"), 15 | "\n", 16 | env!("RELAY_LONG_VERSION_4") 17 | ); 18 | -------------------------------------------------------------------------------- /migrations/0023_layerzero_batch_tracking.sql: -------------------------------------------------------------------------------- 1 | -- LayerZero nonce tracking table 2 | CREATE TABLE layerzero_nonces ( 3 | chain_id BIGINT NOT NULL, 4 | src_eid INTEGER NOT NULL, 5 | nonce_lz BIGINT NOT NULL, 6 | tx_id BYTEA NOT NULL, -- 32 bytes 7 | created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), 8 | updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), 9 | PRIMARY KEY (chain_id, src_eid) 10 | ); 11 | 12 | -- Index for efficient queries 13 | CREATE INDEX idx_layerzero_nonces_tx_id ON layerzero_nonces(tx_id); -------------------------------------------------------------------------------- /.sqlx/query-7150f186ceabdaa8f4a1245a8986db4d2feed0d9b0a5b689105acc35a764451e.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n DELETE FROM precalls WHERE chain_id = $1 AND address = $2 AND nonce = $3\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Bytea", 10 | "Bytea" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "7150f186ceabdaa8f4a1245a8986db4d2feed0d9b0a5b689105acc35a764451e" 16 | } 17 | -------------------------------------------------------------------------------- /.sqlx/query-ffefb5fc0bc0c8b8493556f315d918da1fa4e5229bf10e6b6d63f8ab02afd875.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "insert into locked_liquidity (chain_id, asset_address, amount) values ($1, $2, $3) on conflict do nothing", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Bytea", 10 | "Numeric" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "ffefb5fc0bc0c8b8493556f315d918da1fa4e5229bf10e6b6d63f8ab02afd875" 16 | } 17 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # exclude everything 2 | * 3 | 4 | # include source files 5 | !/bin 6 | !/crates 7 | !/src 8 | !Cargo.lock 9 | !Cargo.toml 10 | !Cross.toml 11 | !deny.toml 12 | !Makefile 13 | !build.rs 14 | !/tools 15 | 16 | # include for vergen constants 17 | !/.git 18 | 19 | # include licenses 20 | !LICENSE-* 21 | 22 | # include dist directory, where the reth binary is located after compilation 23 | !/dist 24 | 25 | # include example files 26 | !/examples 27 | 28 | # include migrations 29 | !/migrations 30 | # include 31 | !/.sqlx 32 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/new-issue-for-tracking.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: New issue for tracking 3 | about: Feature, refactoring issue template 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## Description 11 | 12 | *Add a one-liner for the issue* 13 | 14 | ### Problem Statement 15 | *What is the problem we're trying to solve* 16 | 17 | ### Implementation Hints 18 | *Document implementation hints or various options and which one was chosen* 19 | 20 | ### User story 21 | *What changes from a user/consumer perspective* 22 | -------------------------------------------------------------------------------- /migrations/0026_phones.sql: -------------------------------------------------------------------------------- 1 | create table phones ( 2 | address bytea not null, 3 | phone text not null, 4 | verification_sid text not null, 5 | verified_at timestamp, 6 | attempts int not null default 0, 7 | created_at timestamp not null default now() 8 | ); 9 | 10 | -- only allow a phone number once per account to prevent spam 11 | create unique index idx_address_phone on phones (address, phone); 12 | 13 | -- index for looking up verified phone numbers 14 | create index idx_phone_verified on phones (phone, verified_at); -------------------------------------------------------------------------------- /.sqlx/query-0a2c41a3f52fc9b74c5559796bbd578f4539853b345f2b5a386446144acbf6a0.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "insert into emails (address, email, token) values ($1, $2, $3) on conflict(address, email) do update set token = $3", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Text", 10 | "Varchar" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "0a2c41a3f52fc9b74c5559796bbd578f4539853b345f2b5a386446144acbf6a0" 16 | } 17 | -------------------------------------------------------------------------------- /.sqlx/query-c6a789bee1e7953d0c8f0135ce4499d164069c60092ac1c32f93b3f6dfd7daf3.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "insert into pending_unlocks (chain_id, asset_address, amount, block_number) values ($1, $2, $3, $4)", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Bytea", 10 | "Numeric", 11 | "Int8" 12 | ] 13 | }, 14 | "nullable": [] 15 | }, 16 | "hash": "c6a789bee1e7953d0c8f0135ce4499d164069c60092ac1c32f93b3f6dfd7daf3" 17 | } 18 | -------------------------------------------------------------------------------- /.sqlx/query-a430d775b648600b9648e00c315b75da85e010aa060a31ff44588d4735e23783.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n INSERT INTO precalls (chain_id, address, data, nonce)\n VALUES ($1, $2, $3, $4)\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Bytea", 10 | "Jsonb", 11 | "Bytea" 12 | ] 13 | }, 14 | "nullable": [] 15 | }, 16 | "hash": "a430d775b648600b9648e00c315b75da85e010aa060a31ff44588d4735e23783" 17 | } 18 | -------------------------------------------------------------------------------- /.sqlx/query-8690d1b8ac24a4b90a01139e1f6e83a903cd8f2b47083072a9d75225fa404586.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n INSERT INTO queued_txs (tx_id, chain_id, tx)\n VALUES ($1, $2, $3)\n ON CONFLICT (tx_id) DO NOTHING\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Int8", 10 | "Jsonb" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "8690d1b8ac24a4b90a01139e1f6e83a903cd8f2b47083072a9d75225fa404586" 16 | } 17 | -------------------------------------------------------------------------------- /src/error/onramp.rs: -------------------------------------------------------------------------------- 1 | use super::invalid_params; 2 | use thiserror::Error; 3 | 4 | /// Errors related to onramp operations. 5 | #[derive(Debug, Error)] 6 | pub enum OnrampError { 7 | /// Invalid secret provided for accessing onramp contact information. 8 | #[error("invalid secret")] 9 | InvalidSecret, 10 | } 11 | 12 | impl From for jsonrpsee::types::error::ErrorObject<'static> { 13 | fn from(err: OnrampError) -> Self { 14 | match err { 15 | OnrampError::InvalidSecret => invalid_params(err.to_string()), 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /.sqlx/query-0140239eaa8c0793250727f23a9d488ba6beb5c2c24cbeba4a861fcf59d53d3b.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n INSERT INTO asset_diffs (tx_id, asset_diffs)\n VALUES ($1, $2)\n ON CONFLICT (tx_id) DO UPDATE SET asset_diffs = EXCLUDED.asset_diffs\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Jsonb" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "0140239eaa8c0793250727f23a9d488ba6beb5c2c24cbeba4a861fcf59d53d3b" 15 | } 16 | -------------------------------------------------------------------------------- /.sqlx/query-3ad838e08ad45f17b15a76de133f2ad127c5b642478fca30f6ece0b847587ea3.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select account from accounts where address = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "account", 9 | "type_info": "Jsonb" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Bytea" 15 | ] 16 | }, 17 | "nullable": [ 18 | false 19 | ] 20 | }, 21 | "hash": "3ad838e08ad45f17b15a76de133f2ad127c5b642478fca30f6ece0b847587ea3" 22 | } 23 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__get_authorization.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: response 4 | --- 5 | { 6 | "authorization": { 7 | "chainId": "0x0", 8 | "address": "0x29a79095352a718b3d7fe84e1f14e9f34a35598e", 9 | "nonce": "0x0", 10 | "yParity": "0x0", 11 | "r": "0xe46ab6ac9ca13464273108c00775c46f9fee1f2953b45bfd002c1b02ce3e1254", 12 | "s": "0x21b84c92a4f780b39c5eb53ab28cb81812007ce3f8bc7ca930b1b54f90239089" 13 | }, 14 | "data": "[data]", 15 | "to": "0x700b6a60ce7eaaea56f065753d8dcb9653dbad35" 16 | } 17 | -------------------------------------------------------------------------------- /.sqlx/query-12b6acfe38bc1f14b02f12a0a9e18af367ce91a9b1e57dc6f1b2e11b6122a52f.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select tx_id from bundle_transactions where bundle_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "tx_id", 9 | "type_info": "Bytea" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Bytea" 15 | ] 16 | }, 17 | "nullable": [ 18 | false 19 | ] 20 | }, 21 | "hash": "12b6acfe38bc1f14b02f12a0a9e18af367ce91a9b1e57dc6f1b2e11b6122a52f" 22 | } 23 | -------------------------------------------------------------------------------- /src/interop/mod.rs: -------------------------------------------------------------------------------- 1 | //! Cross-chain interoperability module. 2 | //! 3 | //! This module provides functionality for handling cross-chain transactions 4 | //! and escrow management for the relay service. 5 | 6 | pub mod escrow; 7 | pub mod refund; 8 | /// Settlement functionality for cross-chain bundles. 9 | pub mod settler; 10 | 11 | pub use escrow::{EscrowDetails, EscrowInfo}; 12 | pub use refund::{RefundMonitorService, RefundProcessor, RefundProcessorError}; 13 | pub use settler::{ 14 | LayerZeroSettler, SettlementError, SettlementProcessor, Settler, SettlerId, SimpleSettler, 15 | }; 16 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | # EditorConfig helps developers define and maintain consistent 2 | # coding styles between different editors and IDEs 3 | # editorconfig.org 4 | 5 | root = true 6 | 7 | [*] 8 | end_of_line = lf 9 | charset = utf-8 10 | trim_trailing_whitespace = true 11 | insert_final_newline = true 12 | indent_style = space 13 | indent_size = 4 14 | 15 | [*.rs] 16 | max_line_length = 100 17 | 18 | [*.{yml,yaml}] 19 | indent_size = 2 20 | 21 | [*.md] 22 | # double whitespace at end of line 23 | # denotes a line break in Markdown 24 | trim_trailing_whitespace = false 25 | max_line_length = 100 26 | -------------------------------------------------------------------------------- /.sqlx/query-d918ed5091140ccf09ee3efbb18c9a23846361a55fc785d012a2310bfedacaeb.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "insert into pending_txs (chain_id, sender, tx_id, tx, envelopes, sent_at) values ($1, $2, $3, $4, $5, $6)", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Int8", 9 | "Bytea", 10 | "Bytea", 11 | "Jsonb", 12 | "Jsonb", 13 | "Timestamp" 14 | ] 15 | }, 16 | "nullable": [] 17 | }, 18 | "hash": "d918ed5091140ccf09ee3efbb18c9a23846361a55fc785d012a2310bfedacaeb" 19 | } 20 | -------------------------------------------------------------------------------- /.sqlx/query-e7be266cb85b0764b96972837977ba2c14a82c546cedcd7b7e1ffd006911b31c.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select bridge_data from bridge_transfers where transfer_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "bridge_data", 9 | "type_info": "Jsonb" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Bytea" 15 | ] 16 | }, 17 | "nullable": [ 18 | true 19 | ] 20 | }, 21 | "hash": "e7be266cb85b0764b96972837977ba2c14a82c546cedcd7b7e1ffd006911b31c" 22 | } 23 | -------------------------------------------------------------------------------- /.sqlx/query-c06938b08164158736e7afc354d5da2aadad2e003756b5bcb92af374a884c945.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select transfer_data from bridge_transfers where transfer_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "transfer_data", 9 | "type_info": "Jsonb" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Bytea" 15 | ] 16 | }, 17 | "nullable": [ 18 | false 19 | ] 20 | }, 21 | "hash": "c06938b08164158736e7afc354d5da2aadad2e003756b5bcb92af374a884c945" 22 | } 23 | -------------------------------------------------------------------------------- /.sqlx/query-7a5f0d16cb7ab928b3c786bd30c8ed7719a55ed99ced4fd42eb23cf4742e0a7a.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select attempts from phones where address = $1 and phone = $2", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "attempts", 9 | "type_info": "Int4" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Bytea", 15 | "Text" 16 | ] 17 | }, 18 | "nullable": [ 19 | false 20 | ] 21 | }, 22 | "hash": "7a5f0d16cb7ab928b3c786bd30c8ed7719a55ed99ced4fd42eb23cf4742e0a7a" 23 | } 24 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # Ithaca Relay 2 | //! 3 | //! Library for the implementation of the Ithaca Relay. 4 | 5 | pub mod asset; 6 | pub mod chains; 7 | pub mod cli; 8 | pub mod config; 9 | pub mod constants; 10 | pub mod diagnostics; 11 | pub mod error; 12 | pub mod estimation; 13 | pub mod interop; 14 | pub mod liquidity; 15 | pub mod metrics; 16 | pub mod nonce; 17 | pub mod otlp; 18 | pub mod price; 19 | pub mod provider; 20 | pub mod rpc; 21 | pub mod serde; 22 | pub mod signers; 23 | pub mod spawn; 24 | pub mod storage; 25 | pub mod transactions; 26 | pub mod transport; 27 | pub mod twilio; 28 | pub mod types; 29 | pub mod version; 30 | -------------------------------------------------------------------------------- /.github/workflows/lint-actions.yml: -------------------------------------------------------------------------------- 1 | name: lint github workflows 2 | on: 3 | pull_request: 4 | merge_group: 5 | push: 6 | branches: [main] 7 | 8 | jobs: 9 | actionlint: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: Download actionlint 14 | id: get_actionlint 15 | run: bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) 16 | shell: bash 17 | - name: Check workflow files 18 | run: SHELLCHECK_OPTS="-S error" ${{ steps.get_actionlint.outputs.executable }} -color 19 | shell: bash 20 | -------------------------------------------------------------------------------- /.sqlx/query-600124940cbb2b7e408c94c01c30db2e81b68af9e72b9aa2f038ca902b988256.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "insert into phones (address, phone, verification_sid, attempts) values ($1, $2, $3, 0)\n on conflict(address, phone) do update set verification_sid = $3, attempts = 0, verified_at = null, created_at = now()", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Text", 10 | "Text" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "600124940cbb2b7e408c94c01c30db2e81b68af9e72b9aa2f038ca902b988256" 16 | } 17 | -------------------------------------------------------------------------------- /src/transactions/mod.rs: -------------------------------------------------------------------------------- 1 | //! Service responsible for broadcasting transactions. 2 | 3 | mod service; 4 | pub use service::*; 5 | mod signer; 6 | pub use signer::*; 7 | mod transaction; 8 | pub use transaction::{ 9 | PendingTransaction, RelayTransaction, RelayTransactionKind, TransactionFailureReason, 10 | TransactionStatus, TxId, 11 | }; 12 | mod fees; 13 | mod flashblocks; 14 | mod metrics; 15 | mod monitor; 16 | pub use monitor::TransactionMonitoringHandle; 17 | /// Cross-chain interop bundle processing. 18 | pub mod interop; 19 | pub use interop::{InteropService, InteropServiceHandle}; 20 | mod pull_gas; 21 | pub use pull_gas::PullGasState; 22 | -------------------------------------------------------------------------------- /.sqlx/query-6646d9fdc0ac1291db1d4d31cc834ae037225b5fa26d874707526c6aca5c73e3.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select coalesce(sum(amount), 0) from locked_liquidity where chain_id = $1 and asset_address = $2", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "coalesce", 9 | "type_info": "Numeric" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Int8", 15 | "Bytea" 16 | ] 17 | }, 18 | "nullable": [ 19 | null 20 | ] 21 | }, 22 | "hash": "6646d9fdc0ac1291db1d4d31cc834ae037225b5fa26d874707526c6aca5c73e3" 23 | } 24 | -------------------------------------------------------------------------------- /.sqlx/query-36c7612c5aa2121d5332b536d0bf36819458b13d6e351134ac5587fba53a0fbb.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select extract(epoch from verified_at)::bigint as verified_at from phones where address = $1 and verified_at is not null", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "verified_at", 9 | "type_info": "Int8" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Bytea" 15 | ] 16 | }, 17 | "nullable": [ 18 | null 19 | ] 20 | }, 21 | "hash": "36c7612c5aa2121d5332b536d0bf36819458b13d6e351134ac5587fba53a0fbb" 22 | } 23 | -------------------------------------------------------------------------------- /.sqlx/query-349050d675179f714ad8cf61ad13a457aab1a08ffd7d73b63944aeba2fc6a36a.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT data\n FROM precalls\n WHERE chain_id = $1 AND address = $2\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "data", 9 | "type_info": "Jsonb" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Int8", 15 | "Bytea" 16 | ] 17 | }, 18 | "nullable": [ 19 | false 20 | ] 21 | }, 22 | "hash": "349050d675179f714ad8cf61ad13a457aab1a08ffd7d73b63944aeba2fc6a36a" 23 | } 24 | -------------------------------------------------------------------------------- /.config/nextest.toml: -------------------------------------------------------------------------------- 1 | [profile.default] 2 | retries = { backoff = "exponential", count = 3, delay = "2s", jitter = true } 3 | slow-timeout = { period = "120s", terminate-after = 4 } 4 | 5 | [test-groups] 6 | sequential-tests = { max-threads = 1 } 7 | 8 | [[profile.default.overrides]] 9 | filter = """ 10 | test(test_basic_concurrent) | 11 | test(resume_paused) | 12 | test(diverged_nonce) | 13 | test(auth_then_two_authorizes_then_erc20_transfer) | 14 | test(auth_then_erc20_transfer) | 15 | test(native_transfer) | 16 | test(pause_out_of_funds) 17 | """ 18 | test-group = "sequential-tests" 19 | threads-required = 2 20 | slow-timeout = { period = "120s" } -------------------------------------------------------------------------------- /.sqlx/query-0009f8bf8c91e1694f89a301751726d5cc2ccf66d77f496560c1d5654992b450.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n select transfer_data\n from bridge_transfers\n where status IN ('pending', 'sent')\n ORDER BY transfer_id\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "transfer_data", 9 | "type_info": "Jsonb" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [] 14 | }, 15 | "nullable": [ 16 | false 17 | ] 18 | }, 19 | "hash": "0009f8bf8c91e1694f89a301751726d5cc2ccf66d77f496560c1d5654992b450" 20 | } 21 | -------------------------------------------------------------------------------- /.sqlx/query-37f2533e86d3ddd8b3b216ddbf2c0c3fc14c857fdd8c04fa10e1abafb3c41b0b.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n INSERT INTO pending_refunds (bundle_id, refund_timestamp)\n VALUES ($1, $2)\n ON CONFLICT (bundle_id) DO UPDATE SET\n refund_timestamp = GREATEST(pending_refunds.refund_timestamp, EXCLUDED.refund_timestamp)\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Timestamptz" 10 | ] 11 | }, 12 | "nullable": [] 13 | }, 14 | "hash": "37f2533e86d3ddd8b3b216ddbf2c0c3fc14c857fdd8c04fa10e1abafb3c41b0b" 15 | } 16 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Additional context** 27 | Add any other context about the problem here. 28 | -------------------------------------------------------------------------------- /tests/e2e/cases/mod.rs: -------------------------------------------------------------------------------- 1 | //! Relay end-to-end test cases 2 | 3 | mod assets; 4 | mod bnb_chain; 5 | mod calls; 6 | mod cli; 7 | mod delegation; 8 | mod errors; 9 | mod faucet; 10 | mod fee_payer; 11 | mod fees; 12 | mod history; 13 | mod intents_merkle; 14 | mod keys; 15 | mod liquidity; 16 | mod metrics; 17 | mod multi_chain; 18 | mod multichain_refund; 19 | pub mod multichain_usdt_transfer; 20 | mod paymaster; 21 | mod porto; 22 | mod relay; 23 | mod rpc_snap; 24 | mod signature; 25 | mod simple; 26 | mod simple_settlement; 27 | mod simulation; 28 | mod transactions; 29 | mod undeployed_token; 30 | mod upgrade; 31 | pub use upgrade::{upgrade_account_eagerly, upgrade_account_lazily}; 32 | -------------------------------------------------------------------------------- /.sqlx/query-af997c238b65ac231426b9e8805c0ef1359e293347c83d517a5b027524c0c818.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select verified_at from phones where phone = $1 and address = $2 and verified_at is not null order by verified_at desc limit 1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "verified_at", 9 | "type_info": "Timestamp" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Text", 15 | "Bytea" 16 | ] 17 | }, 18 | "nullable": [ 19 | true 20 | ] 21 | }, 22 | "hash": "af997c238b65ac231426b9e8805c0ef1359e293347c83d517a5b027524c0c818" 23 | } 24 | -------------------------------------------------------------------------------- /src/serde/fn_selector.rs: -------------------------------------------------------------------------------- 1 | //! Helpers for deserializing function selectors. 2 | 3 | use std::str::FromStr; 4 | 5 | use alloy::{json_abi::Function, primitives::FixedBytes}; 6 | use serde::{Deserialize, Deserializer}; 7 | 8 | /// Deserialize a function selector from either a string or a valid 4-byte array. 9 | /// 10 | /// See [`Function::parse`]. 11 | pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> 12 | where 13 | D: Deserializer<'de>, 14 | { 15 | let s: &str = Deserialize::deserialize(deserializer)?; 16 | 17 | FixedBytes::<4>::from_str(s) 18 | .or_else(|_| Function::parse(s).map_err(serde::de::Error::custom).map(|f| f.selector())) 19 | } 20 | -------------------------------------------------------------------------------- /.sqlx/query-8c6a1801f4c22bc63820b2ca1d6c158ecb9726e1fef1d5506492ae0b10f14259.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select coalesce(sum(amount), 0) from pending_unlocks where chain_id = $1 and asset_address = $2 and block_number <= $3", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "coalesce", 9 | "type_info": "Numeric" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Int8", 15 | "Bytea", 16 | "Int8" 17 | ] 18 | }, 19 | "nullable": [ 20 | null 21 | ] 22 | }, 23 | "hash": "8c6a1801f4c22bc63820b2ca1d6c158ecb9726e1fef1d5506492ae0b10f14259" 24 | } 25 | -------------------------------------------------------------------------------- /.sqlx/query-8507914e3fae7f28166ad44c36ad2663f159d8ba92e0f398c6e0368c1af16007.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n select email from emails\n where address = $1\n order by verified_at is not null desc, created_at desc\n limit 1\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "email", 9 | "type_info": "Text" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Bytea" 15 | ] 16 | }, 17 | "nullable": [ 18 | false 19 | ] 20 | }, 21 | "hash": "8507914e3fae7f28166ad44c36ad2663f159d8ba92e0f398c6e0368c1af16007" 22 | } 23 | -------------------------------------------------------------------------------- /src/signers/mod.rs: -------------------------------------------------------------------------------- 1 | //! Relay signers. 2 | 3 | mod r#dyn; 4 | use alloy::primitives::{B256, Bytes}; 5 | pub use r#dyn::DynSigner; 6 | 7 | mod p256; 8 | pub use p256::{P256Key, P256Signer}; 9 | 10 | mod webauthn; 11 | pub use webauthn::WebAuthnSigner; 12 | 13 | use crate::types::KeyType; 14 | 15 | /// Trait for a [EIP-712] payload signer. 16 | #[async_trait::async_trait] 17 | pub trait Eip712PayLoadSigner: std::fmt::Debug + Send + Sync { 18 | /// Returns the key type. 19 | fn key_type(&self) -> KeyType; 20 | 21 | /// Signs the [EIP-712] payload hash. 22 | /// 23 | /// Returns [`Bytes`]. 24 | async fn sign_payload_hash(&self, payload_hash: B256) -> eyre::Result; 25 | } 26 | -------------------------------------------------------------------------------- /.sqlx/query-babe59a25bb68aad24f5937029dc93dbda96968a5d0f4ba8cf02c8c7454c7087.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n INSERT INTO historical_usd_prices (asset_uid, timestamp, usd_price)\n SELECT * FROM UNNEST($1::text[], $2::bigint[], $3::double precision[])\n ON CONFLICT (asset_uid, timestamp) DO UPDATE SET usd_price = EXCLUDED.usd_price\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "TextArray", 9 | "Int8Array", 10 | "Float8Array" 11 | ] 12 | }, 13 | "nullable": [] 14 | }, 15 | "hash": "babe59a25bb68aad24f5937029dc93dbda96968a5d0f4ba8cf02c8c7454c7087" 16 | } 17 | -------------------------------------------------------------------------------- /migrations/0009_preCalls.sql: -------------------------------------------------------------------------------- 1 | update pending_txs set tx = jsonb_set(tx, '{quote,intent,encodedPreCalls}', tx#>'{quote,intent,encodedPreOps}') - '{quote,intent,encodedPreOps}'; 2 | update queued_txs set tx = jsonb_set(tx, '{quote,intent,encodedPreCalls}', tx#>'{quote,intent,encodedPreOps}') - '{quote,intent,encodedPreOps}'; 3 | update pending_txs set tx = jsonb_set(tx, '{quote,intent,supportedAccountImplementation}', tx#>'{quote,intent,supportedDelegationImplementation}') - '{quote,intent,supportedDelegationImplementation}'; 4 | update queued_txs set tx = jsonb_set(tx, '{quote,intent,supportedAccountImplementation}', tx#>'{quote,intent,supportedDelegationImplementation}') - '{quote,intent,supportedDelegationImplementation}'; 5 | -------------------------------------------------------------------------------- /src/serde/duration.rs: -------------------------------------------------------------------------------- 1 | //! Helpers for serializing and deserializing [`Duration`]. 2 | 3 | use serde::{self, Deserialize, Deserializer, Serializer}; 4 | use std::time::Duration; 5 | 6 | /// Serializes [`Duration`] as seconds. 7 | pub fn serialize(duration: &Duration, serializer: S) -> Result 8 | where 9 | S: Serializer, 10 | { 11 | let duration = duration.as_secs(); 12 | serializer.serialize_u64(duration) 13 | } 14 | 15 | /// Deserializes seconds into a [`Duration`]. 16 | pub fn deserialize<'de, D>(deserializer: D) -> Result 17 | where 18 | D: Deserializer<'de>, 19 | { 20 | let duration = u64::deserialize(deserializer)?; 21 | Ok(Duration::from_secs(duration)) 22 | } 23 | -------------------------------------------------------------------------------- /tools/relay-tools/src/bin/recast/mod.rs: -------------------------------------------------------------------------------- 1 | use clap::{Parser, Subcommand}; 2 | use eyre::Result; 3 | 4 | mod send; 5 | 6 | /// Recast - Ithaca relay CLI tool 7 | #[derive(Debug, Parser)] 8 | #[command(name = "recast")] 9 | #[command(about = "CLI tool for interacting with the Ithaca relay")] 10 | #[command(version)] 11 | struct Cli { 12 | #[command(subcommand)] 13 | command: Commands, 14 | } 15 | 16 | #[derive(Debug, Subcommand)] 17 | enum Commands { 18 | /// Send tokens using the Ithaca relay 19 | Send(send::Args), 20 | } 21 | 22 | #[tokio::main] 23 | async fn main() -> Result<()> { 24 | let cli = Cli::parse(); 25 | 26 | match cli.command { 27 | Commands::Send(args) => args.execute().await, 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/types/transaction.rs: -------------------------------------------------------------------------------- 1 | use crate::transactions::TransactionServiceHandle; 2 | use alloy::primitives::{ChainId, map::HashMap}; 3 | use std::sync::Arc; 4 | 5 | /// Transaction service handles for multiple chains 6 | #[derive(Debug, Clone)] 7 | pub struct TransactionServiceHandles(Arc>); 8 | 9 | impl TransactionServiceHandles { 10 | /// Create new TransactionServiceHandles 11 | pub fn new(handles: HashMap) -> Self { 12 | Self(Arc::new(handles)) 13 | } 14 | 15 | /// Get a transaction service handle by chain ID 16 | pub fn get(&self, chain_id: &ChainId) -> Option<&TransactionServiceHandle> { 17 | self.0.get(chain_id) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /.sqlx/query-d3dfb6b5e67050d361d40444031b247d9ad67ce219fa241e614f3ac15e39afc9.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update txs set status = $1 where tx_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | { 9 | "Custom": { 10 | "name": "tx_status", 11 | "kind": { 12 | "Enum": [ 13 | "inflight", 14 | "pending", 15 | "confirmed", 16 | "failed" 17 | ] 18 | } 19 | } 20 | }, 21 | "Bytea" 22 | ] 23 | }, 24 | "nullable": [] 25 | }, 26 | "hash": "d3dfb6b5e67050d361d40444031b247d9ad67ce219fa241e614f3ac15e39afc9" 27 | } 28 | -------------------------------------------------------------------------------- /src/metrics/periodic/types/mod.rs: -------------------------------------------------------------------------------- 1 | mod balance; 2 | use alloy::primitives::{U256, utils::format_units}; 3 | pub use balance::*; 4 | 5 | mod liquidity; 6 | pub use liquidity::*; 7 | 8 | /// Formats a U256 value into a f64 with the specified number of decimals. 9 | pub fn format_units_f64(value: U256, decimals: u8) -> eyre::Result { 10 | Ok(format_units(value, decimals)?.parse::()?) 11 | } 12 | 13 | #[cfg(test)] 14 | mod tests { 15 | use super::*; 16 | use std::str::FromStr; 17 | 18 | #[test] 19 | fn test_format_units_f64() { 20 | let value = U256::from_str("12345678901234567890").unwrap(); 21 | let decimals = 18; 22 | assert_eq!(format_units_f64(value, decimals).unwrap(), 12.345678901234567); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /.sqlx/query-4f4f8bd7f995f16a895eff8f5f0227d0f5f09b1699aa444c59b273b5a7cf4413.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT tx_id, asset_diffs FROM asset_diffs WHERE tx_id = ANY($1)\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "tx_id", 9 | "type_info": "Bytea" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "asset_diffs", 14 | "type_info": "Jsonb" 15 | } 16 | ], 17 | "parameters": { 18 | "Left": [ 19 | "ByteaArray" 20 | ] 21 | }, 22 | "nullable": [ 23 | false, 24 | false 25 | ] 26 | }, 27 | "hash": "4f4f8bd7f995f16a895eff8f5f0227d0f5f09b1699aa444c59b273b5a7cf4413" 28 | } 29 | -------------------------------------------------------------------------------- /.sqlx/query-ca924b77cc9a0ffa23670bf0e1b80fb71ddcb0a27dcc95ae2daf07fa1326f390.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n WITH moved AS (\n DELETE FROM pending_bundles\n WHERE bundle_id = $1\n RETURNING bundle_id, status, bundle_data, created_at\n )\n INSERT INTO finished_bundles (bundle_id, status, bundle_data, created_at, finished_at)\n SELECT bundle_id, status, bundle_data, created_at, NOW()\n FROM moved\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea" 9 | ] 10 | }, 11 | "nullable": [] 12 | }, 13 | "hash": "ca924b77cc9a0ffa23670bf0e1b80fb71ddcb0a27dcc95ae2daf07fa1326f390" 14 | } 15 | -------------------------------------------------------------------------------- /.sqlx/query-270d917a154bd20ec47184954d32b8dc9db680a20013978620e4dfceb4ddaf95.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT transaction_data\n FROM pull_gas_transactions\n WHERE signer_address = $1\n AND chain_id = $2\n AND state = 'pending'\n ORDER BY created_at\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "transaction_data", 9 | "type_info": "Jsonb" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Bytea", 15 | "Int8" 16 | ] 17 | }, 18 | "nullable": [ 19 | false 20 | ] 21 | }, 22 | "hash": "270d917a154bd20ec47184954d32b8dc9db680a20013978620e4dfceb4ddaf95" 23 | } 24 | -------------------------------------------------------------------------------- /.sqlx/query-60a6599da9c3c316c5965152cd0130d6c2e3aac26c9fb7e00aac77041eb299f4.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select phone, extract(epoch from verified_at)::bigint as verified_at from phones where address = $1 and verified_at is not null", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "phone", 9 | "type_info": "Text" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "verified_at", 14 | "type_info": "Int8" 15 | } 16 | ], 17 | "parameters": { 18 | "Left": [ 19 | "Bytea" 20 | ] 21 | }, 22 | "nullable": [ 23 | false, 24 | null 25 | ] 26 | }, 27 | "hash": "60a6599da9c3c316c5965152cd0130d6c2e3aac26c9fb7e00aac77041eb299f4" 28 | } 29 | -------------------------------------------------------------------------------- /.sqlx/query-42298767b4d0c49cbf76aa6224137b475dadd3d07e3c0c5f2e843a2bc48eaa09.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n select\n extract(epoch from coalesce(verified_at, created_at))::bigint as timestamp\n from emails\n where address = $1\n order by verified_at is not null desc, created_at desc\n limit 1\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "timestamp", 9 | "type_info": "Int8" 10 | } 11 | ], 12 | "parameters": { 13 | "Left": [ 14 | "Bytea" 15 | ] 16 | }, 17 | "nullable": [ 18 | null 19 | ] 20 | }, 21 | "hash": "42298767b4d0c49cbf76aa6224137b475dadd3d07e3c0c5f2e843a2bc48eaa09" 22 | } 23 | -------------------------------------------------------------------------------- /.sqlx/query-80d12caf4bb184a309dc40c49d8899666d7ad87366a8ad025c96664461dc94a8.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "update bridge_transfers set status = $1 where transfer_id = $2", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | { 9 | "Custom": { 10 | "name": "bridge_transfer_status", 11 | "kind": { 12 | "Enum": [ 13 | "pending", 14 | "sent", 15 | "outbound_failed", 16 | "completed", 17 | "inbound_failed" 18 | ] 19 | } 20 | } 21 | }, 22 | "Bytea" 23 | ] 24 | }, 25 | "nullable": [] 26 | }, 27 | "hash": "80d12caf4bb184a309dc40c49d8899666d7ad87366a8ad025c96664461dc94a8" 28 | } 29 | -------------------------------------------------------------------------------- /.sqlx/query-21fdfd5f099a9fbfd3920e95e7dad2d60beaa400b2cbf84cd0af12a242dbaff4.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n UPDATE pull_gas_transactions\n SET state = $2,\n updated_at = NOW()\n WHERE id = $1\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | { 10 | "Custom": { 11 | "name": "pull_gas_state", 12 | "kind": { 13 | "Enum": [ 14 | "pending", 15 | "completed", 16 | "failed" 17 | ] 18 | } 19 | } 20 | } 21 | ] 22 | }, 23 | "nullable": [] 24 | }, 25 | "hash": "21fdfd5f099a9fbfd3920e95e7dad2d60beaa400b2cbf84cd0af12a242dbaff4" 26 | } 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | out/ 6 | 7 | # These are backup files generated by rustfmt 8 | **/*.rs.bk 9 | 10 | # MSVC Windows builds of rustc generate these, which store debugging information 11 | *.pdb 12 | 13 | # Generated by Intellij-based IDEs. 14 | .idea 15 | 16 | # Generated by MacOS 17 | .DS_Store 18 | 19 | # Proptest data 20 | proptest-regressions/ 21 | 22 | # Release artifacts 23 | dist/ 24 | 25 | # VSCode 26 | .vscode 27 | 28 | # Coverage report 29 | lcov.info 30 | 31 | # Rust bug report 32 | rustc-ice-* 33 | 34 | crates/testing/resources/**/cache/* 35 | 36 | # Environment files 37 | .env 38 | 39 | # Relay configuration 40 | relay.yaml 41 | 42 | # Scratchpad 43 | _ 44 | plans/ 45 | CLAUDE.local.md 46 | 47 | # direnv 48 | /.envrc 49 | .direnv 50 | -------------------------------------------------------------------------------- /src/interop/settler/layerzero/metrics.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::U256; 2 | use metrics::{Counter, Histogram}; 3 | use metrics_derive::Metrics; 4 | 5 | /// Chain specific metrics for the layerzero settler. 6 | #[derive(Metrics)] 7 | #[metrics(scope = "layerzero")] 8 | pub struct LayerZeroChainMetrics { 9 | /// Cumulative layerzero fees paid on this chain 10 | cumulative_fees_paid: Counter, 11 | /// Histogram for the native fees paid on this chain 12 | fees_paid_histogram: Histogram, 13 | } 14 | 15 | impl LayerZeroChainMetrics { 16 | /// Record native fee paid for layerzero 17 | pub fn record_fee_paid(&self, fee: U256) { 18 | self.fees_paid_histogram.record(f64::from(fee)); 19 | if let Ok(fee) = fee.try_into() { 20 | self.cumulative_fees_paid.increment(fee); 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/types/webauthn.rs: -------------------------------------------------------------------------------- 1 | use alloy::sol; 2 | 3 | sol! { 4 | #[derive(Debug)] 5 | struct WebAuthnP256 { 6 | /// The WebAuthn authenticator data. 7 | /// See: https://www.w3.org/TR/webauthn-2/#dom-authenticatorassertionresponse-authenticatordata. 8 | bytes authenticatorData; 9 | /// The WebAuthn client data JSON. 10 | /// See: https://www.w3.org/TR/webauthn-2/#dom-authenticatorresponse-clientdatajson. 11 | string clientDataJSON; 12 | /// Start index of "challenge":"..." in `clientDataJSON`. 13 | uint256 challengeIndex; 14 | /// Start index of "type":"..." in `clientDataJSON`. 15 | uint256 typeIndex; 16 | /// The r value of secp256r1 signature. 17 | bytes32 r; 18 | /// The s value of secp256r1 signature. 19 | bytes32 s; 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/transactions/pull_gas.rs: -------------------------------------------------------------------------------- 1 | //! Pull gas transaction types and state management. 2 | 3 | use serde::{Deserialize, Serialize}; 4 | use sqlx::Type; 5 | use strum::{Display, EnumString}; 6 | 7 | /// States of a pull gas transaction 8 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Type, Display, EnumString)] 9 | #[sqlx(type_name = "pull_gas_state", rename_all = "lowercase")] 10 | #[strum(serialize_all = "lowercase")] 11 | pub enum PullGasState { 12 | /// Transaction is pending (created or sent to chain) 13 | Pending, 14 | /// Transaction confirmed successfully 15 | Completed, 16 | /// Transaction failed 17 | Failed, 18 | } 19 | 20 | impl PullGasState { 21 | /// Whether the transaction has succeeded. 22 | pub fn is_completed(&self) -> bool { 23 | matches!(self, Self::Completed) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /.sqlx/query-4325db0dbbaa7043d2e4dc4e01e08413a09813c047d3ff23c672d57d958efc11.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT bundle_id, refund_timestamp\n FROM pending_refunds\n WHERE refund_timestamp <= $1\n ORDER BY refund_timestamp ASC\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "bundle_id", 9 | "type_info": "Bytea" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "refund_timestamp", 14 | "type_info": "Timestamptz" 15 | } 16 | ], 17 | "parameters": { 18 | "Left": [ 19 | "Timestamptz" 20 | ] 21 | }, 22 | "nullable": [ 23 | false, 24 | false 25 | ] 26 | }, 27 | "hash": "4325db0dbbaa7043d2e4dc4e01e08413a09813c047d3ff23c672d57d958efc11" 28 | } 29 | -------------------------------------------------------------------------------- /.sqlx/query-6d0902521dbbf2b5d2369c6e048ad08fde7e008a93be8d6182d224b55ff10dda.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT chain_id, asset_address, amount\n FROM locked_liquidity\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "chain_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "asset_address", 14 | "type_info": "Bytea" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "amount", 19 | "type_info": "Numeric" 20 | } 21 | ], 22 | "parameters": { 23 | "Left": [] 24 | }, 25 | "nullable": [ 26 | false, 27 | false, 28 | false 29 | ] 30 | }, 31 | "hash": "6d0902521dbbf2b5d2369c6e048ad08fde7e008a93be8d6182d224b55ff10dda" 32 | } 33 | -------------------------------------------------------------------------------- /src/serde/timestamp.rs: -------------------------------------------------------------------------------- 1 | //! Helpers for serializing and deserializing [`SystemTime`]. 2 | 3 | use serde::{self, Deserialize, Deserializer, Serializer}; 4 | use std::time::{Duration, SystemTime, UNIX_EPOCH}; 5 | 6 | /// Serializes [`SystemTime`] as the number of seconds since [`UNIX_EPOCH`]. 7 | pub fn serialize(time: &SystemTime, serializer: S) -> Result 8 | where 9 | S: Serializer, 10 | { 11 | let timestamp = time.duration_since(UNIX_EPOCH).unwrap_or(Duration::from_secs(0)).as_secs(); 12 | serializer.serialize_u64(timestamp) 13 | } 14 | 15 | /// Deserializes a number of seconds since [`UNIX_EPOCH`] into a [`SystemTime`]. 16 | pub fn deserialize<'de, D>(deserializer: D) -> Result 17 | where 18 | D: Deserializer<'de>, 19 | { 20 | let timestamp = u64::deserialize(deserializer)?; 21 | Ok(UNIX_EPOCH + Duration::from_secs(timestamp)) 22 | } 23 | -------------------------------------------------------------------------------- /src/types/multicall.rs: -------------------------------------------------------------------------------- 1 | //! Multicall3 contract interface for batching multiple calls. 2 | 3 | use alloy::sol; 4 | 5 | sol! { 6 | /// Represents a single call in a multicall batch 7 | #[derive(Debug)] 8 | struct Call3 { 9 | /// Target contract address 10 | address target; 11 | /// Whether to allow this call to fail 12 | bool allowFailure; 13 | /// Encoded function call data 14 | bytes callData; 15 | } 16 | 17 | /// Result of a single call in aggregate3 18 | #[derive(Debug)] 19 | struct Result { 20 | /// Whether the call was successful 21 | bool success; 22 | /// The return data from the call 23 | bytes returnData; 24 | } 25 | 26 | /// Execute multiple calls in a single transaction 27 | function aggregate3(Call3[] calldata calls) public payable returns (Result[] memory returnData); 28 | } 29 | -------------------------------------------------------------------------------- /.sqlx/query-897bfeb578a927a561e619883e66d64756377054d9165317851766952e4a6243.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n INSERT INTO pull_gas_transactions\n (id, signer_address, chain_id, state, transaction_data)\n VALUES ($1, $2, $3, $4, $5)\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | "Bytea", 10 | "Int8", 11 | { 12 | "Custom": { 13 | "name": "pull_gas_state", 14 | "kind": { 15 | "Enum": [ 16 | "pending", 17 | "completed", 18 | "failed" 19 | ] 20 | } 21 | } 22 | }, 23 | "Jsonb" 24 | ] 25 | }, 26 | "nullable": [] 27 | }, 28 | "hash": "897bfeb578a927a561e619883e66d64756377054d9165317851766952e4a6243" 29 | } 30 | -------------------------------------------------------------------------------- /migrations/0022_pull_gas_tracking.sql: -------------------------------------------------------------------------------- 1 | -- create enum for pull_gas transaction states 2 | CREATE TYPE pull_gas_state AS ENUM ('pending', 'completed', 'failed'); 3 | 4 | -- create table to track pull_gas transactions with JSONB storage 5 | CREATE TABLE IF NOT EXISTS pull_gas_transactions ( 6 | -- transaction hash 7 | id BYTEA PRIMARY KEY, 8 | -- signer address 9 | signer_address BYTEA NOT NULL, 10 | -- chain ID 11 | chain_id BIGINT NOT NULL, 12 | state pull_gas_state NOT NULL DEFAULT 'pending', 13 | -- tx envelope 14 | transaction_data JSONB NOT NULL, 15 | -- timestamps 16 | created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), 17 | updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() 18 | ); 19 | 20 | -- add indexes for queries 21 | CREATE INDEX idx_pull_gas_signer_chain ON pull_gas_transactions(signer_address, chain_id); 22 | CREATE INDEX idx_pull_gas_state ON pull_gas_transactions(state); -------------------------------------------------------------------------------- /migrations/0001_initial.sql: -------------------------------------------------------------------------------- 1 | create table if not exists accounts ( 2 | address bytea not null unique, 3 | account jsonb not null, 4 | created_at timestamp not null default now () 5 | ); 6 | 7 | create table if not exists keys ( 8 | key_id bytea not null, 9 | account_address bytea not null, 10 | key_hash bytea not null, 11 | signature bytea not null 12 | ); 13 | 14 | create type tx_status as enum ('inflight', 'pending', 'confirmed', 'failed'); 15 | 16 | create table if not exists txs ( 17 | tx_id bytea not null unique, 18 | bundle_id bytea not null, 19 | status tx_status not null default 'inflight', 20 | tx_hash bytea 21 | ); 22 | 23 | create table if not exists pending_txs ( 24 | chain_id bigserial not null, 25 | sender bytea not null, 26 | tx_id bytea not null unique, 27 | tx jsonb not null, 28 | envelope jsonb not null, 29 | received_at timestamp not null 30 | ); 31 | -------------------------------------------------------------------------------- /.sqlx/query-9cb21fc4b4845f71b4cd5ae3a960ba62477716111279b0308880ec711e0ed8ab.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select * from locked_liquidity where (chain_id, asset_address) = ANY(SELECT unnest($1::bigint[]), unnest($2::bytea[]))", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "chain_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "asset_address", 14 | "type_info": "Bytea" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "amount", 19 | "type_info": "Numeric" 20 | } 21 | ], 22 | "parameters": { 23 | "Left": [ 24 | "Int8Array", 25 | "ByteaArray" 26 | ] 27 | }, 28 | "nullable": [ 29 | false, 30 | false, 31 | false 32 | ] 33 | }, 34 | "hash": "9cb21fc4b4845f71b4cd5ae3a960ba62477716111279b0308880ec711e0ed8ab" 35 | } 36 | -------------------------------------------------------------------------------- /.sqlx/query-eb066e77020459c15f2301f23614630195ffc710c529a27e81f4e711514c5a47.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT chain_id, asset_address, SUM(amount) AS \"amount!: BigDecimal\"\n FROM pending_unlocks\n GROUP BY chain_id, asset_address\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "chain_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "asset_address", 14 | "type_info": "Bytea" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "amount!: BigDecimal", 19 | "type_info": "Numeric" 20 | } 21 | ], 22 | "parameters": { 23 | "Left": [] 24 | }, 25 | "nullable": [ 26 | false, 27 | false, 28 | null 29 | ] 30 | }, 31 | "hash": "eb066e77020459c15f2301f23614630195ffc710c529a27e81f4e711514c5a47" 32 | } 33 | -------------------------------------------------------------------------------- /migrations/0019_refunds.sql: -------------------------------------------------------------------------------- 1 | -- Create table for tracking pending refunds 2 | -- Stores bundle_id and the maximum refund timestamp for delayed refund processing 3 | -- This table is used by the RefundMonitorService to find refunds ready for execution 4 | -- NOTE: No foreign key constraint to pending_bundles as bundles may move to finished_bundles 5 | CREATE TABLE pending_refunds ( 6 | bundle_id BYTEA PRIMARY KEY, 7 | refund_timestamp TIMESTAMPTZ NOT NULL, 8 | created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() 9 | ); 10 | 11 | -- Index for efficiently finding refunds ready to process 12 | CREATE INDEX idx_pending_refunds_timestamp 13 | ON pending_refunds(refund_timestamp); 14 | 15 | -- Add new bundle status values 16 | ALTER TYPE bundle_status ADD VALUE 'settlements_queued'; 17 | ALTER TYPE bundle_status ADD VALUE 'settlements_confirmed'; 18 | ALTER TYPE bundle_status ADD VALUE 'refunds_scheduled'; 19 | ALTER TYPE bundle_status ADD VALUE 'refunds_ready'; 20 | -------------------------------------------------------------------------------- /.sqlx/query-892ab2ea122fabd13647fcecf88ce0798ac90407d4fd904d5b975e00fe76991e.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select * from locked_liquidity where (chain_id, asset_address) = ANY(SELECT unnest($1::bigint[]), unnest($2::bytea[])) for update", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "chain_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "asset_address", 14 | "type_info": "Bytea" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "amount", 19 | "type_info": "Numeric" 20 | } 21 | ], 22 | "parameters": { 23 | "Left": [ 24 | "Int8Array", 25 | "ByteaArray" 26 | ] 27 | }, 28 | "nullable": [ 29 | false, 30 | false, 31 | false 32 | ] 33 | }, 34 | "hash": "892ab2ea122fabd13647fcecf88ce0798ac90407d4fd904d5b975e00fe76991e" 35 | } 36 | -------------------------------------------------------------------------------- /.sqlx/query-4b9602a95b13d65d494bb5a37e355c76b01f67be7472f2cfcc09784d16a5d76a.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select * from queued_txs where chain_id = $1 order by id", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "id", 9 | "type_info": "Int4" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "chain_id", 14 | "type_info": "Int8" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "tx_id", 19 | "type_info": "Bytea" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "tx", 24 | "type_info": "Jsonb" 25 | } 26 | ], 27 | "parameters": { 28 | "Left": [ 29 | "Int8" 30 | ] 31 | }, 32 | "nullable": [ 33 | false, 34 | false, 35 | false, 36 | false 37 | ] 38 | }, 39 | "hash": "4b9602a95b13d65d494bb5a37e355c76b01f67be7472f2cfcc09784d16a5d76a" 40 | } 41 | -------------------------------------------------------------------------------- /src/types/rpc/mod.rs: -------------------------------------------------------------------------------- 1 | //! RPC request and response types. 2 | 3 | mod account; 4 | pub use account::*; 5 | 6 | mod assets; 7 | pub use assets::*; 8 | 9 | mod calls; 10 | pub use calls::*; 11 | 12 | mod keys; 13 | pub use keys::*; 14 | 15 | mod permission; 16 | pub use permission::*; 17 | 18 | mod capabilities; 19 | pub use capabilities::*; 20 | 21 | mod faucet; 22 | pub use faucet::*; 23 | 24 | use alloy::primitives::{Address, U256}; 25 | use serde::{Deserialize, Serialize}; 26 | 27 | /// Represents extra request values. 28 | #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Default)] 29 | #[serde(rename_all = "camelCase")] 30 | pub struct Meta { 31 | /// Payer of the gas 32 | /// 33 | /// Defaults to the EOA. 34 | pub fee_payer: Option
, 35 | /// ERC20 token to pay for the gas of the calls. 36 | /// 37 | /// Defaults to the native token. 38 | pub fee_token: Option
, 39 | /// Nonce. 40 | pub nonce: Option, 41 | } 42 | -------------------------------------------------------------------------------- /src/serde/trace_context.rs: -------------------------------------------------------------------------------- 1 | //! Helpers for serializing and deserializing [`Context`]. 2 | 3 | use opentelemetry::{Context, global}; 4 | use serde::{self, Deserialize, Deserializer, Serialize, Serializer}; 5 | use std::collections::HashMap; 6 | 7 | /// Serializes [`Context`] as a `HashMap`. 8 | pub fn serialize(ctx: &Context, serializer: S) -> Result 9 | where 10 | S: Serializer, 11 | { 12 | let mut carrier = HashMap::new(); 13 | global::get_text_map_propagator(|propagator| { 14 | propagator.inject_context(ctx, &mut carrier); 15 | }); 16 | carrier.serialize(serializer) 17 | } 18 | 19 | /// Deserializes a `HashMap` into a [`Context`]. 20 | pub fn deserialize<'de, D>(deserializer: D) -> Result 21 | where 22 | D: Deserializer<'de>, 23 | { 24 | let carrier = HashMap::::deserialize(deserializer)?; 25 | Ok(global::get_text_map_propagator(|propagator| propagator.extract(&carrier))) 26 | } 27 | -------------------------------------------------------------------------------- /migrations/0025_authorization_list.sql: -------------------------------------------------------------------------------- 1 | -- Migrate from authorization (single optional) to authorization_list (array) in Intent variant 2 | 3 | -- For pending_txs: rename authorization to authorization_list and convert to array 4 | UPDATE pending_txs 5 | SET tx = jsonb_set( 6 | tx - 'authorization', 7 | '{authorization_list}', 8 | CASE 9 | WHEN tx->'authorization' IS NOT NULL AND tx->'authorization' != 'null'::jsonb THEN 10 | jsonb_build_array(tx->'authorization') 11 | ELSE 12 | '[]'::jsonb 13 | END 14 | ) 15 | WHERE tx ? 'quote'; 16 | 17 | -- For queued_txs: rename authorization to authorization_list and convert to array 18 | UPDATE queued_txs 19 | SET tx = jsonb_set( 20 | tx - 'authorization', 21 | '{authorization_list}', 22 | CASE 23 | WHEN tx->'authorization' IS NOT NULL AND tx->'authorization' != 'null'::jsonb THEN 24 | jsonb_build_array(tx->'authorization') 25 | ELSE 26 | '[]'::jsonb 27 | END 28 | ) 29 | WHERE tx ? 'quote'; -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__get_keys.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: response 4 | --- 5 | { 6 | "0x7a69": [ 7 | { 8 | "hash": "0xf377756284e290a571b9f06981121207a3fd7e9970e1473fe9e553c16a804f68", 9 | "expiry": "0x0", 10 | "type": "secp256k1", 11 | "role": "admin", 12 | "publicKey": "0x0000000000000000000000001409e2aad496505095defaaa2baff67c65168e65", 13 | "permissions": [] 14 | }, 15 | { 16 | "hash": "0x12bbb12078df9966cf1035cfd7ec1d8eb6c5971cbef21427e9dd0865e8153efd", 17 | "expiry": "0x0", 18 | "type": "p256", 19 | "role": "normal", 20 | "publicKey": "0x53f04de62ca2d17dc1a7cf36672a3a84049eeaf8fc2d976c48f22b85b3d96b5938e8a43a312e3753964811c1874ce21343cc3cf8910e05585a6d7216986dfb85", 21 | "permissions": [ 22 | { 23 | "type": "call", 24 | "selector": "0xa9059cbb", 25 | "to": "0xed1db453c3156ff3155a97ad217b3087d5dc5f6e" 26 | } 27 | ] 28 | } 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /.sqlx/query-110fdd402d29c3f5aa016c4f4bda551418ea75c3c14b54c6c25d6a844bbf1607.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "delete from pending_unlocks where chain_id = $1 and block_number <= $2 returning *", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "chain_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "asset_address", 14 | "type_info": "Bytea" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "block_number", 19 | "type_info": "Int8" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "amount", 24 | "type_info": "Numeric" 25 | } 26 | ], 27 | "parameters": { 28 | "Left": [ 29 | "Int8", 30 | "Int8" 31 | ] 32 | }, 33 | "nullable": [ 34 | false, 35 | false, 36 | false, 37 | false 38 | ] 39 | }, 40 | "hash": "110fdd402d29c3f5aa016c4f4bda551418ea75c3c14b54c6c25d6a844bbf1607" 41 | } 42 | -------------------------------------------------------------------------------- /src/serde/key_role.rs: -------------------------------------------------------------------------------- 1 | //! Helpers for serialization and deserialization of key roles. 2 | 3 | use serde::{Deserialize, Deserializer, Serializer}; 4 | 5 | /// Deserialize a string into a boolean value. 6 | /// 7 | /// - `"admin"` -> `true` 8 | /// - `"normal"` -> `false` 9 | /// - `"session"` -> `false` 10 | pub fn deserialize<'de, D>(deserializer: D) -> Result 11 | where 12 | D: Deserializer<'de>, 13 | { 14 | let s: &str = Deserialize::deserialize(deserializer)?; 15 | match s { 16 | "admin" => Ok(true), 17 | "normal" | "session" => Ok(false), 18 | _ => Err(serde::de::Error::unknown_variant(s, &["admin", "normal", "session"])), 19 | } 20 | } 21 | 22 | /// Serialize a boolean value as `"admin"` (`true`) or `"normal"` (`false`). 23 | pub fn serialize(value: &bool, serializer: S) -> Result 24 | where 25 | S: Serializer, 26 | { 27 | match value { 28 | true => serializer.serialize_str("admin"), 29 | false => serializer.serialize_str("normal"), 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /.sqlx/query-e4ced6542c605a22a57fbe6a34e50fe20e9ffae4532bde1f123626595ebe3083.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT asset_uid, timestamp, usd_price\n FROM historical_usd_prices\n WHERE (asset_uid, timestamp) IN (\n SELECT * FROM UNNEST($1::text[], $2::bigint[])\n )\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "asset_uid", 9 | "type_info": "Text" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "timestamp", 14 | "type_info": "Int8" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "usd_price", 19 | "type_info": "Float8" 20 | } 21 | ], 22 | "parameters": { 23 | "Left": [ 24 | "TextArray", 25 | "Int8Array" 26 | ] 27 | }, 28 | "nullable": [ 29 | false, 30 | false, 31 | false 32 | ] 33 | }, 34 | "hash": "e4ced6542c605a22a57fbe6a34e50fe20e9ffae4532bde1f123626595ebe3083" 35 | } 36 | -------------------------------------------------------------------------------- /src/error/email.rs: -------------------------------------------------------------------------------- 1 | use super::{internal_rpc, invalid_params}; 2 | use thiserror::Error; 3 | 4 | /// Errors related to emails. 5 | #[derive(Debug, Error)] 6 | pub enum EmailError { 7 | /// E-mail already verified on at least one account. 8 | #[error("email already verified")] 9 | EmailAlreadyVerified, 10 | /// The verification token was incorrect. 11 | #[error("invalid verification token")] 12 | InvalidToken, 13 | /// The signature was incorrect. 14 | #[error("invalid token signature")] 15 | InvalidSignature, 16 | /// An internal error occurred. 17 | #[error(transparent)] 18 | InternalError(#[from] eyre::Error), 19 | } 20 | 21 | impl From for jsonrpsee::types::error::ErrorObject<'static> { 22 | fn from(err: EmailError) -> Self { 23 | match err { 24 | EmailError::EmailAlreadyVerified 25 | | EmailError::InvalidToken 26 | | EmailError::InvalidSignature => invalid_params(err.to_string()), 27 | EmailError::InternalError(..) => internal_rpc(err.to_string()), 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /.sqlx/query-768abd437f7b9f89622ba85c870a3cc92b8b3716a8b87b11e4bed13498ad4b30.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select * from pending_unlocks where (chain_id, asset_address) = ANY(SELECT unnest($1::bigint[]), unnest($2::bytea[])) for update", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "chain_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "asset_address", 14 | "type_info": "Bytea" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "block_number", 19 | "type_info": "Int8" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "amount", 24 | "type_info": "Numeric" 25 | } 26 | ], 27 | "parameters": { 28 | "Left": [ 29 | "Int8Array", 30 | "ByteaArray" 31 | ] 32 | }, 33 | "nullable": [ 34 | false, 35 | false, 36 | false, 37 | false 38 | ] 39 | }, 40 | "hash": "768abd437f7b9f89622ba85c870a3cc92b8b3716a8b87b11e4bed13498ad4b30" 41 | } 42 | -------------------------------------------------------------------------------- /tests/e2e/layerzero/contracts/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | # Clear dependencies to make the script rerunnable 5 | rm -rf lib 6 | 7 | # Install dependencies 8 | forge install foundry-rs/forge-std@v1.9.5 --no-git 9 | forge install vectorized/solady@v0.0.246 --no-git 10 | forge install LayerZero-Labs/LayerZero-v2@88428755be6caa71cb1d2926141d73c8989296b5 --no-git 11 | forge install LayerZero-Labs/devtools@2648a5cb4497019c03d516d809d8be25bfbd1798 --no-git 12 | forge install OpenZeppelin/openzeppelin-contracts@v5.0.2 --no-git 13 | 14 | # Copy EndpointV2Mock from devtools after installation 15 | ENDPOINT_MOCK_PATH="lib/devtools/packages/test-devtools-evm-foundry/contracts/mocks/EndpointV2Mock.sol" 16 | if [ -f "$ENDPOINT_MOCK_PATH" ]; then 17 | cp "$ENDPOINT_MOCK_PATH" src/ 18 | echo "Copied EndpointV2Mock.sol from devtools" 19 | else 20 | echo "Error: EndpointV2Mock.sol not found at $ENDPOINT_MOCK_PATH" 21 | echo "Contents of lib/devtools (if exists):" 22 | ls -la lib/devtools 2>/dev/null || echo "lib/devtools not found" 23 | exit 1 24 | fi 25 | 26 | # Build contracts 27 | forge build 28 | -------------------------------------------------------------------------------- /tests/e2e/constants.rs: -------------------------------------------------------------------------------- 1 | //! Relay end-to-end test constants 2 | 3 | use alloy::primitives::{Address, B256, FixedBytes, address, b256, fixed_bytes}; 4 | 5 | pub const EOA_PRIVATE_KEY: B256 = 6 | b256!("0x174aca1574aca1574aca1574aca1574aca1574aca1574aca1574aca1574aca15"); 7 | 8 | pub const SIGNERS_MNEMONIC: &str = 9 | "forget sound story reveal safe minimum wasp mechanic solar predict harsh catch"; 10 | 11 | pub const DEFAULT_EXECUTE_SELECTOR: FixedBytes<4> = fixed_bytes!("0x32323232"); 12 | 13 | pub const DEFAULT_EXECUTE_TO: Address = address!("0x3232323232323232323232323232323232323232"); 14 | 15 | pub const FIRST_RELAY_SIGNER: B256 = 16 | b256!("0xab78933d36d3e049cc43e1f72845a6c03cdadf8557027bc4895053e8351b71cd"); 17 | 18 | pub const DEPLOYER_PRIVATE_KEY: B256 = 19 | b256!("0x2a871d0798f97d79848a013d4936a73bf4cc922c825d33c1cf7073dff6d409c6"); 20 | 21 | pub const LAYERZERO_DEPLOYER_ADDRESS: Address = 22 | address!("0x8fd379246834eac74B8419FfdA202CF8051F7A03"); 23 | 24 | pub const LAYERZERO_DEPLOYER_PRIVATE_KEY: B256 = 25 | b256!("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); 26 | -------------------------------------------------------------------------------- /.sqlx/query-dc6b871547e833a766105ee576575c0e88ccafa1d87f5d81e85e91f6caffa185.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select * from emails where email = $1 and verified_at is not null", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "address", 9 | "type_info": "Bytea" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "email", 14 | "type_info": "Text" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "token", 19 | "type_info": "Varchar" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "verified_at", 24 | "type_info": "Timestamp" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "created_at", 29 | "type_info": "Timestamp" 30 | } 31 | ], 32 | "parameters": { 33 | "Left": [ 34 | "Text" 35 | ] 36 | }, 37 | "nullable": [ 38 | false, 39 | false, 40 | false, 41 | true, 42 | false 43 | ] 44 | }, 45 | "hash": "dc6b871547e833a766105ee576575c0e88ccafa1d87f5d81e85e91f6caffa185" 46 | } 47 | -------------------------------------------------------------------------------- /src/error/merkle.rs: -------------------------------------------------------------------------------- 1 | //! Merkle tree error types. 2 | 3 | use thiserror::Error; 4 | 5 | /// Errors that can occur during Merkle tree operations 6 | #[derive(Debug, Clone, PartialEq, Eq, Error)] 7 | pub enum MerkleError { 8 | /// Cannot generate proof for empty tree 9 | #[error("Cannot operate on empty tree")] 10 | EmptyTree, 11 | /// Leaf index out of bounds 12 | #[error("Leaf index {index} out of bounds (tree has {tree_size} leaves)")] 13 | IndexOutOfBounds { 14 | /// The index that was requested 15 | index: usize, 16 | /// The actual size of the tree 17 | tree_size: usize, 18 | }, 19 | /// Cannot generate proof for single leaf tree 20 | #[error("Cannot generate proof for single leaf tree")] 21 | SingleLeaf, 22 | /// Exceeds maximum supported size 23 | #[error("Exceeds maximum supported size")] 24 | TooLarge, 25 | /// Tree size calculation overflow 26 | #[error("Tree size calculation overflow")] 27 | SizeCalculationOverflow, 28 | /// Failed to compute leaf hashes 29 | #[error("Failed to compute leaf hashes: {0}")] 30 | LeafHashError(String), 31 | } 32 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2025-2025 Ithaca 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /tests/e2e/layerzero/contracts/README.md: -------------------------------------------------------------------------------- 1 | # LayerZero Test Contracts 2 | 3 | This directory contains a standalone Foundry project with custom LayerZero test contracts. 4 | 5 | ## Setup 6 | 7 | Use the build script to install dependencies and build contracts: 8 | ```bash 9 | cd tests/e2e/layerzero/contracts 10 | ./build.sh 11 | ``` 12 | 13 | This script will: 14 | 1. Install forge dependencies 15 | 2. Copy the EndpointV2Mock from the devtools dependency 16 | 3. Build all contracts 17 | 18 | ## Contracts 19 | 20 | - **MockEscrow.sol** - Custom escrow contract that uses LayerZero OApp for cross-chain token transfers 21 | - **MinimalSendReceiveLib.sol** - Minimal implementation of LayerZero send/receive library for testing 22 | 23 | ## Integration with Tests 24 | 25 | The e2e tests expect these contracts to be built and available. The CI workflow builds these contracts separately from the main test contracts. 26 | 27 | ## Dependencies 28 | 29 | Dependencies are managed via `foundry.toml` and installed with `forge install`: 30 | - LayerZero V2 protocol contracts (commit: 4cfd33f) 31 | - LayerZero OApp contracts 32 | - OpenZeppelin contracts v5.0.2 33 | - Solady v0.0.246 -------------------------------------------------------------------------------- /src/types/rpc/faucet.rs: -------------------------------------------------------------------------------- 1 | //! RPC faucet-related request and response types. 2 | 3 | use alloy::primitives::{Address, ChainId, U256}; 4 | use serde::{Deserialize, Serialize}; 5 | 6 | /// Parameters for the `wallet_addFaucetFunds` method. 7 | #[derive(Debug, Clone, Serialize, Deserialize)] 8 | #[serde(rename_all = "camelCase")] 9 | pub struct AddFaucetFundsParameters { 10 | /// The token address to use for the faucet. 11 | pub token_address: Address, 12 | /// The address to send funds to. 13 | pub address: Address, 14 | /// The chain ID where the funds should be added. 15 | #[serde(with = "alloy::serde::quantity")] 16 | pub chain_id: ChainId, 17 | /// The amount of funds to add (in wei for native tokens). 18 | pub value: U256, 19 | } 20 | 21 | /// Response for the `wallet_addFaucetFunds` method. 22 | #[derive(Debug, Clone, Serialize, Deserialize)] 23 | #[serde(rename_all = "camelCase")] 24 | pub struct AddFaucetFundsResponse { 25 | /// The transaction hash of the funding transaction. 26 | pub transaction_hash: Option, 27 | /// Optional message providing details about the operation. 28 | pub message: Option, 29 | } 30 | -------------------------------------------------------------------------------- /src/error/asset.rs: -------------------------------------------------------------------------------- 1 | use crate::types::AssetUid; 2 | 3 | use super::internal_rpc; 4 | use alloy::primitives::Address; 5 | use thiserror::Error; 6 | 7 | /// Errors related to assets. 8 | #[derive(Debug, Error)] 9 | pub enum AssetError { 10 | /// The response on querying information from assets was invalid. 11 | #[error("call response for the asset info query was invalid.")] 12 | InvalidAssetInfoResponse, 13 | /// The asset info service is unavailable. 14 | #[error("the asset info service is unavailable.")] 15 | ServiceUnavailable, 16 | /// The fee token is not known. 17 | #[error("unknown fee token: {0}")] 18 | UnknownFeeToken(Address), 19 | /// The price for the asset is unavailable. 20 | #[error("price unavailable for asset {0:?}")] 21 | PriceUnavailable(AssetUid), 22 | } 23 | 24 | impl From for jsonrpsee::types::error::ErrorObject<'static> { 25 | fn from(err: AssetError) -> Self { 26 | match err { 27 | AssetError::InvalidAssetInfoResponse 28 | | AssetError::ServiceUnavailable 29 | | AssetError::UnknownFeeToken(_) 30 | | AssetError::PriceUnavailable(_) => internal_rpc(err.to_string()), 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/metrics/periodic/job.rs: -------------------------------------------------------------------------------- 1 | use tokio::time::Interval; 2 | use tracing::error; 3 | 4 | use super::MetricCollector; 5 | 6 | // A periodic job that holds a collector and its period interval. 7 | pub struct PeriodicJob { 8 | /// Metric collector. 9 | collector: T, 10 | /// Period interval that this collector should be run on. 11 | interval: Interval, 12 | } 13 | 14 | impl PeriodicJob { 15 | /// Creates a [`PeriodicJob`]. 16 | pub fn new(collector: T, interval: Interval) -> Self { 17 | Self { collector, interval } 18 | } 19 | } 20 | 21 | impl PeriodicJob { 22 | /// Launches a tokio task with a created [`PeriodicJob`]. 23 | pub fn launch_task(collector: T, interval: Interval) 24 | where 25 | T: Send, 26 | { 27 | tokio::spawn(async move { 28 | let mut job = Self::new(collector, interval); 29 | loop { 30 | job.interval.tick().await; 31 | if let Err(err) = job.collector.collect().await { 32 | error!(target = "metrics::periodic", ?err, ?job.collector); 33 | } 34 | job.interval.reset(); 35 | } 36 | }); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/serde/hash_map.rs: -------------------------------------------------------------------------------- 1 | //! [`toml`] does not allow non-[`String`] keys for [`HashMap`], so we're working around this with a 2 | //! helepr that (de)serializes keys as strings via [`ToString`] and [`FromStr`] implementations. 3 | 4 | use alloy::primitives::map::HashMap; 5 | use serde::{self, Deserialize, Deserializer, Serialize, Serializer, de::Error}; 6 | use std::{fmt::Display, hash::Hash, str::FromStr}; 7 | 8 | /// Serializes [`Duration`] as seconds. 9 | pub fn serialize(map: &HashMap, serializer: S) -> Result 10 | where 11 | K: ToString + Hash + Eq, 12 | V: Serialize, 13 | S: Serializer, 14 | { 15 | map.iter().map(|(k, v)| (k.to_string(), v)).collect::>().serialize(serializer) 16 | } 17 | 18 | /// Deserializes seconds into a [`Duration`]. 19 | pub fn deserialize<'de, K, V, D>(deserializer: D) -> Result, D::Error> 20 | where 21 | K: FromStr + Hash + Eq, 22 | V: Deserialize<'de>, 23 | D: Deserializer<'de>, 24 | { 25 | HashMap::::deserialize(deserializer).and_then(|map| { 26 | map.into_iter() 27 | .map(|(k, v)| K::from_str(&k).map(|k| (k, v)).map_err(D::Error::custom)) 28 | .collect() 29 | }) 30 | } 31 | -------------------------------------------------------------------------------- /.github/workflows/claude.yml: -------------------------------------------------------------------------------- 1 | name: Claude PR Assistant 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | pull_request_review_comment: 7 | types: [created] 8 | issues: 9 | types: [opened, assigned] 10 | pull_request_review: 11 | types: [submitted] 12 | 13 | jobs: 14 | claude-code-action: 15 | if: | 16 | (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) || 17 | (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) || 18 | (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) || 19 | (github.event_name == 'issues' && contains(github.event.issue.body, '@claude')) 20 | runs-on: ubuntu-latest 21 | permissions: 22 | contents: read 23 | pull-requests: read 24 | issues: read 25 | id-token: write 26 | steps: 27 | - name: Checkout repository 28 | uses: actions/checkout@v4 29 | with: 30 | fetch-depth: 1 31 | 32 | - name: Run Claude PR Action 33 | uses: anthropics/claude-code-action@beta 34 | with: 35 | anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} 36 | timeout_minutes: "60" 37 | -------------------------------------------------------------------------------- /.sqlx/query-0e7a3fa8802afa71128f8195407a728fe5f09491fbcc77929268ce4bbc428055.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select * from pending_txs where sender = $1 and chain_id = $2", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "chain_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "sender", 14 | "type_info": "Bytea" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "tx_id", 19 | "type_info": "Bytea" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "tx", 24 | "type_info": "Jsonb" 25 | }, 26 | { 27 | "ordinal": 4, 28 | "name": "envelopes", 29 | "type_info": "Jsonb" 30 | }, 31 | { 32 | "ordinal": 5, 33 | "name": "sent_at", 34 | "type_info": "Timestamp" 35 | } 36 | ], 37 | "parameters": { 38 | "Left": [ 39 | "Bytea", 40 | "Int8" 41 | ] 42 | }, 43 | "nullable": [ 44 | false, 45 | false, 46 | false, 47 | false, 48 | false, 49 | false 50 | ] 51 | }, 52 | "hash": "0e7a3fa8802afa71128f8195407a728fe5f09491fbcc77929268ce4bbc428055" 53 | } 54 | -------------------------------------------------------------------------------- /tools/relay-tools/src/bin/recast/README.md: -------------------------------------------------------------------------------- 1 | # recast 2 | 3 | CLI tool for sending tokens via Ithaca relay. 4 | 5 | ## Quick Start 6 | 7 | ```bash 8 | # Send 100 USDC (not wei) on Base 9 | recast send --uid usdc --to 0xd8da6bf26964af9d7eed9e03e53415d37aa96045 --amount 100 --chain 8453 10 | 11 | # Send 0.1 USDC (not wei) on Base with debug output 12 | recast send --uid eth --to 0xd8da6bf26964af9d7eed9e03e53415d37aa96045 --amount 0.1 --chain 8453 --debug 13 | ``` 14 | 15 | ## Help 16 | 17 | ``` 18 | $ recast send --help 19 | Send tokens using the Ithaca relay 20 | 21 | Usage: recast send [OPTIONS] --uid --to
--amount --chain 22 | 23 | Options: 24 | --uid Token UID (e.g., "tether", "teth") 25 | --to
Recipient address 26 | --amount Amount to send (in ether units, e.g., "1.5" for 1.5 ether) 27 | --chain Chain ID 28 | --fee-uid Fee token UID (defaults to the transfer token) 29 | --private-key Private key of the sender [env: PRIVATE_KEY] 30 | --relay-url Relay URL [default: https://stg-rpc.ithaca.xyz] 31 | --debug Enable debug output 32 | -h, --help Print help 33 | ``` -------------------------------------------------------------------------------- /src/types/tokens.rs: -------------------------------------------------------------------------------- 1 | use alloy::sol; 2 | 3 | sol! { 4 | #[sol(rpc)] 5 | #[derive(Debug)] 6 | interface IERC20 { 7 | event Transfer(address indexed from, address indexed to, uint256 amount); 8 | 9 | function name() external view returns (string); 10 | function symbol() external view returns (string); 11 | function decimals() external view returns (uint8); 12 | function approve(address spender, uint256 amount) external returns (bool); 13 | function transfer(address to, uint256 amount) external returns (bool); 14 | function transferFrom(address from, address to, uint256 amount) external returns (bool); 15 | function balanceOf(address eoa) external view returns (uint256); 16 | function allowance(address owner, address spender) external view returns (uint256); 17 | function mint(address recipient, uint256 value); 18 | } 19 | } 20 | 21 | sol! { 22 | #[sol(rpc)] 23 | #[derive(Debug)] 24 | interface IERC721 { 25 | event Transfer(address indexed from, address indexed to, uint256 indexed id); 26 | 27 | function safeTransferFrom(address from, address to, uint256 id); 28 | function tokenURI(uint256 id) public view virtual returns (string); 29 | function burn(uint256 id) public virtual; 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /tools/xtask/src/flags.rs: -------------------------------------------------------------------------------- 1 | xflags::xflags! { 2 | src "./src/flags.rs" 3 | 4 | /// Run custom build command. 5 | cmd xtask { 6 | /// Compile contracts. 7 | cmd contracts {} 8 | /// Run end-to-end tests. 9 | cmd e2e { 10 | /// Additional arguments to pass to cargo test. 11 | repeated rest: String 12 | } 13 | } 14 | } 15 | // generated start 16 | // The following code is generated by `xflags` macro. 17 | // Run `env UPDATE_XFLAGS=1 cargo build` to regenerate. 18 | #[derive(Debug)] 19 | pub struct Xtask { 20 | pub subcommand: XtaskCmd, 21 | } 22 | 23 | #[derive(Debug)] 24 | pub enum XtaskCmd { 25 | Contracts(Contracts), 26 | E2e(E2e), 27 | } 28 | 29 | #[derive(Debug)] 30 | pub struct Contracts; 31 | 32 | #[derive(Debug)] 33 | pub struct E2e { 34 | pub rest: Vec, 35 | } 36 | 37 | impl Xtask { 38 | #[allow(dead_code)] 39 | pub fn from_env_or_exit() -> Self { 40 | Self::from_env_or_exit_() 41 | } 42 | 43 | #[allow(dead_code)] 44 | pub fn from_env() -> xflags::Result { 45 | Self::from_env_() 46 | } 47 | 48 | #[allow(dead_code)] 49 | pub fn from_vec(args: Vec) -> xflags::Result { 50 | Self::from_vec_(args) 51 | } 52 | } 53 | // generated end 54 | -------------------------------------------------------------------------------- /src/error/keys.rs: -------------------------------------------------------------------------------- 1 | use super::invalid_params; 2 | use crate::types::KeyHash; 3 | use thiserror::Error; 4 | 5 | /// Errors related to authorization keys. 6 | #[derive(Debug, Error)] 7 | pub enum KeysError { 8 | /// The key type is not supported. 9 | #[error("only supports `p256`, `webauthnp256` and `secp256k1` key types")] 10 | UnsupportedKeyType, 11 | /// Precalls are only allowed to modify one key to ensure correct ordering. 12 | #[error("precall can't modify multiple keys")] 13 | PrecallConflictingKeys, 14 | /// Should only have admin authorization keys. 15 | #[error("should only have admin authorization keys")] 16 | OnlyAdminKeyAllowed, 17 | /// Invalid signature. 18 | #[error("invalid signature")] 19 | InvalidSignature, 20 | /// Unknown key hash. 21 | #[error("key hash {0} is unknown")] 22 | UnknownKeyHash(KeyHash), 23 | } 24 | 25 | impl From for jsonrpsee::types::error::ErrorObject<'static> { 26 | fn from(err: KeysError) -> Self { 27 | match err { 28 | KeysError::UnsupportedKeyType 29 | | KeysError::PrecallConflictingKeys 30 | | KeysError::OnlyAdminKeyAllowed 31 | | KeysError::InvalidSignature 32 | | KeysError::UnknownKeyHash { .. } => invalid_params(err.to_string()), 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /.sqlx/query-6fbd7366feea64ac3286dbdf37acffc9eb1fb1aa2345851201a3410c7547c34b.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select status as \"status: BridgeTransferStatus\", outbound_block_number, inbound_block_number from bridge_transfers where transfer_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "status: BridgeTransferStatus", 9 | "type_info": { 10 | "Custom": { 11 | "name": "bridge_transfer_status", 12 | "kind": { 13 | "Enum": [ 14 | "pending", 15 | "sent", 16 | "outbound_failed", 17 | "completed", 18 | "inbound_failed" 19 | ] 20 | } 21 | } 22 | } 23 | }, 24 | { 25 | "ordinal": 1, 26 | "name": "outbound_block_number", 27 | "type_info": "Int8" 28 | }, 29 | { 30 | "ordinal": 2, 31 | "name": "inbound_block_number", 32 | "type_info": "Int8" 33 | } 34 | ], 35 | "parameters": { 36 | "Left": [ 37 | "Bytea" 38 | ] 39 | }, 40 | "nullable": [ 41 | false, 42 | true, 43 | true 44 | ] 45 | }, 46 | "hash": "6fbd7366feea64ac3286dbdf37acffc9eb1fb1aa2345851201a3410c7547c34b" 47 | } 48 | -------------------------------------------------------------------------------- /relay.example.yaml: -------------------------------------------------------------------------------- 1 | # This is a minimal example config for running the relay locally for a single chain. 2 | # 3 | # Note that this still requires you to run Anvil with the contracts deployed and configured, 4 | # as well as deployment of EXP1 and EXP2. 5 | server: 6 | address: "127.0.0.1" 7 | port: 9119 8 | metrics_port: 9000 9 | 10 | fee_recipient: "0x0000000000000000000000000000000000000000" 11 | 12 | orchestrator: "0x" 13 | delegation_proxy: "0x" 14 | simulator: "0x" 15 | escrow: "0x" 16 | funder: "0x" 17 | 18 | pricefeed: 19 | coingecko: 20 | # Remaps asset UIDs to CoinGecko coin IDs. 21 | # 22 | # If not specified, the UID is used as the coin ID. 23 | # 24 | # See 25 | remapping: 26 | exp1: "usd-coin" 27 | exp2: "tether" 28 | 29 | chains: 30 | # The key is either a chain ID, or a chain name. 31 | anvil: 32 | endpoint: "http://localhost:8545/" 33 | assets: 34 | ethereum: 35 | # Address 0 denotes the native asset and it must be present, even if it is not a fee token. 36 | address: "0x0000000000000000000000000000000000000000" 37 | fee_token: true 38 | exp1: 39 | address: "0x" 40 | fee_token: true 41 | exp2: 42 | address: "0x" 43 | fee_token: true 44 | 45 | transactions: 46 | num_signers: 10 47 | -------------------------------------------------------------------------------- /src/interop/settler/layerzero/batcher/mod.rs: -------------------------------------------------------------------------------- 1 | //! LayerZero settlement batch processing system. 2 | //! 3 | //! Aggregates cross-chain settlements into multicall batches while maintaining strict nonce 4 | //! ordering. Settlements arrive from LayerZeroSettler and are queued until they form gapless 5 | //! sequences. 6 | //! 7 | //! ## Flow 8 | //! 1. Settlements submitted via `LayerZeroPoolHandle::send_settlement_and_wait` 9 | //! 2. Pool stores by SettlementPathKey (chain_id, src_eid, settler_address) in nonce-ordered 10 | //! BTreeMaps 11 | //! 3. Processor polls for gapless batches starting from highest confirmed nonce 12 | //! 4. Batches up to 20 messages into multicall transactions 13 | //! 5. Updates highest nonce on confirmation, notifying waiting callers 14 | //! 15 | //! ## Modules 16 | //! - `types`: Message and batch structures, chain configurations 17 | //! - `pool`: Settlement queue with gap detection and caller notification 18 | //! - `processor`: Per-chain-pair tasks that build and execute batches 19 | 20 | /// Maximum number of settlements to include in a single batch 21 | pub const MAX_SETTLEMENTS_PER_BATCH: usize = 20; 22 | 23 | mod pool; 24 | mod processor; 25 | mod types; 26 | 27 | pub use pool::{LayerZeroBatchPool, LayerZeroPoolHandle}; 28 | pub use processor::LayerZeroBatchProcessor; 29 | pub use types::{LayerZeroBatchMessage, LayerZeroPoolMessages, PendingBatch}; 30 | -------------------------------------------------------------------------------- /src/bin/relay.rs: -------------------------------------------------------------------------------- 1 | //! # Ithaca Relay 2 | //! 3 | //! A relay service that sponsors transactions for EIP-7702 accounts. 4 | 5 | use clap::Parser; 6 | use relay::{ 7 | cli::*, 8 | otlp::{OtelConfig, OtelGuard}, 9 | }; 10 | use tracing::debug; 11 | use tracing_subscriber::prelude::*; 12 | 13 | fn init_tracing_subscriber() -> Option { 14 | let registry = tracing_subscriber::registry().with( 15 | tracing_subscriber::fmt::layer() 16 | .with_filter(tracing_subscriber::filter::EnvFilter::from_default_env()), 17 | ); 18 | 19 | if let Some(cfg) = OtelConfig::load() { 20 | let guard = cfg.provider(); 21 | registry.with(guard.layer()).init(); 22 | Some(guard) 23 | } else { 24 | registry.init(); 25 | None 26 | } 27 | } 28 | 29 | #[tokio::main] 30 | async fn main() { 31 | // Enable backtraces unless a RUST_BACKTRACE value has already been explicitly provided. 32 | if std::env::var_os("RUST_BACKTRACE").is_none() { 33 | unsafe { std::env::set_var("RUST_BACKTRACE", "1") }; 34 | } 35 | 36 | let _guard = init_tracing_subscriber(); 37 | if _guard.is_some() { 38 | debug!("opentelemetry initialized"); 39 | } 40 | 41 | let args = Args::parse(); 42 | if let Err(err) = args.run().await { 43 | eprintln!("Error: {err:?}"); 44 | std::process::exit(1); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/interop/escrow.rs: -------------------------------------------------------------------------------- 1 | //! Escrow details. 2 | 3 | use crate::types::Escrow; 4 | use alloy::primitives::{Address, B256, ChainId}; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | /// Information about escrows for a specific chain and settlement. 8 | #[derive(Debug, Clone)] 9 | pub struct EscrowInfo { 10 | /// The escrow IDs associated with the settlement on this chain 11 | pub escrow_ids: Vec, 12 | /// The escrow contract address on this chain 13 | pub escrow_address: Address, 14 | } 15 | 16 | /// Details of an escrow including metadata for tracking 17 | #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] 18 | pub struct EscrowDetails { 19 | /// The underlying escrow data 20 | pub escrow: Escrow, 21 | /// Unique identifier for the escrow (computed from escrow data) 22 | pub escrow_id: B256, 23 | /// Chain ID where the escrow resides 24 | pub chain_id: ChainId, 25 | /// Address of the escrow contract 26 | pub escrow_address: Address, 27 | } 28 | 29 | impl EscrowDetails { 30 | /// Creates a new `EscrowDetails` from an [`Escrow`] and additional metadata. 31 | /// 32 | /// The escrow ID is automatically calculated from the escrow data using keccak256. 33 | pub fn new(escrow: Escrow, chain_id: ChainId, escrow_address: Address) -> Self { 34 | let escrow_id = escrow.calculate_id(); 35 | Self { escrow, escrow_id, chain_id, escrow_address } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/twilio/lookup.rs: -------------------------------------------------------------------------------- 1 | //! Twilio Lookup API v2 types. 2 | 3 | use serde::Deserialize; 4 | 5 | /// Twilio Lookup v2 response. 6 | #[derive(Debug, Deserialize)] 7 | pub struct LookupResponse { 8 | /// Phone number information. 9 | pub line_type_intelligence: Option, 10 | } 11 | 12 | /// Line type intelligence data. 13 | #[derive(Debug, Deserialize)] 14 | pub struct LineTypeIntelligence { 15 | /// Type of phone line. 16 | #[serde(rename = "type")] 17 | pub line_type: LineType, 18 | } 19 | 20 | /// Phone line type. 21 | /// 22 | /// See for more details. 23 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] 24 | #[serde(rename_all = "camelCase")] 25 | pub enum LineType { 26 | /// Mobile phone. 27 | Mobile, 28 | /// Landline phone. 29 | Landline, 30 | /// VoIP phone. 31 | Voip, 32 | /// Fixed VoIP. 33 | FixedVoip, 34 | /// Non-fixed VoIP. 35 | NonFixedVoip, 36 | /// Pager. 37 | Pager, 38 | /// Toll-free number. 39 | TollFree, 40 | /// Premium rate number. 41 | Premium, 42 | /// Shared cost number. 43 | SharedCost, 44 | /// Unknown type. 45 | Unknown, 46 | } 47 | 48 | impl LineType { 49 | /// Check if this line type is allowed for verification. 50 | pub fn is_allowed_for_verification(&self) -> bool { 51 | matches!(self, LineType::Mobile | LineType::Landline) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /.sqlx/query-3b97d246bc2bbb97755f84a94484e1772b22e2f663814de36609e8a7feaa19a3.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select chain_id, tx_hash, status as \"status: TxStatus\", error, receipt from txs where tx_id = $1", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "chain_id", 9 | "type_info": "Int8" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "tx_hash", 14 | "type_info": "Bytea" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "status: TxStatus", 19 | "type_info": { 20 | "Custom": { 21 | "name": "tx_status", 22 | "kind": { 23 | "Enum": [ 24 | "inflight", 25 | "pending", 26 | "confirmed", 27 | "failed" 28 | ] 29 | } 30 | } 31 | } 32 | }, 33 | { 34 | "ordinal": 3, 35 | "name": "error", 36 | "type_info": "Text" 37 | }, 38 | { 39 | "ordinal": 4, 40 | "name": "receipt", 41 | "type_info": "Jsonb" 42 | } 43 | ], 44 | "parameters": { 45 | "Left": [ 46 | "Bytea" 47 | ] 48 | }, 49 | "nullable": [ 50 | false, 51 | true, 52 | false, 53 | true, 54 | true 55 | ] 56 | }, 57 | "hash": "3b97d246bc2bbb97755f84a94484e1772b22e2f663814de36609e8a7feaa19a3" 58 | } 59 | -------------------------------------------------------------------------------- /tests/e2e/cases/snapshots/tests__e2e__cases__rpc_snap__verify_signature.snap: -------------------------------------------------------------------------------- 1 | --- 2 | source: tests/e2e/cases/rpc_snap.rs 3 | expression: response 4 | --- 5 | { 6 | "valid": true, 7 | "proof": { 8 | "account": "0xa023ba34fe23ad51d00515a68657e232ab6a1297", 9 | "keyHash": "0xf377756284e290a571b9f06981121207a3fd7e9970e1473fe9e553c16a804f68", 10 | "initPreCall": { 11 | "eoa": "0xa023ba34fe23ad51d00515a68657e232ab6a1297", 12 | "executionData": "0x00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000e4cebfe3360000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000200000000000000000000000001409e2aad496505095defaaa2baff67c65168e6500000000000000000000000000000000000000000000000000000000", 13 | "nonce": "[nonce]", 14 | "signature": "[signature]" 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/error/phone.rs: -------------------------------------------------------------------------------- 1 | //! Phone verification-related errors. 2 | 3 | use jsonrpsee::types::error::ErrorObject; 4 | use thiserror::Error; 5 | 6 | use super::{internal_rpc, invalid_params}; 7 | 8 | /// Errors that can occur during phone verification. 9 | #[derive(Debug, Error)] 10 | pub enum PhoneError { 11 | /// Phone already verified on at least one account. 12 | #[error("phone already verified")] 13 | PhoneAlreadyVerified, 14 | /// The verification code was incorrect. 15 | #[error("invalid verification code")] 16 | InvalidCode, 17 | /// Too many verification attempts. 18 | #[error("too many verification attempts")] 19 | TooManyAttempts, 20 | /// Rate limit exceeded. 21 | #[error("rate limit exceeded")] 22 | RateLimitExceeded, 23 | /// Invalid phone number format. 24 | #[error("invalid phone number")] 25 | InvalidPhoneNumber, 26 | /// An internal error occurred. 27 | #[error(transparent)] 28 | InternalError(#[from] eyre::Error), 29 | } 30 | 31 | impl From for ErrorObject<'static> { 32 | fn from(err: PhoneError) -> Self { 33 | match err { 34 | PhoneError::PhoneAlreadyVerified 35 | | PhoneError::InvalidCode 36 | | PhoneError::TooManyAttempts 37 | | PhoneError::RateLimitExceeded 38 | | PhoneError::InvalidPhoneNumber => invalid_params(err.to_string()), 39 | PhoneError::InternalError(..) => internal_rpc(err.to_string()), 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /.sqlx/query-3467bdaf82adedf2757a9754e5d44c6f602399d0e473d43ad0f52570b437b55f.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n UPDATE pending_bundles\n SET status = $2, updated_at = NOW()\n WHERE bundle_id = $1\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | { 10 | "Custom": { 11 | "name": "bundle_status", 12 | "kind": { 13 | "Enum": [ 14 | "init", 15 | "source_queued", 16 | "source_confirmed", 17 | "source_failures", 18 | "destination_queued", 19 | "destination_failures", 20 | "destination_confirmed", 21 | "refunds_queued", 22 | "withdrawals_queued", 23 | "done", 24 | "failed", 25 | "liquidity_locked", 26 | "settlements_queued", 27 | "settlements_confirmed", 28 | "refunds_scheduled", 29 | "refunds_ready", 30 | "settlements_processing", 31 | "settlement_completion_queued", 32 | "fee_payer_queued", 33 | "fee_payer_completed" 34 | ] 35 | } 36 | } 37 | } 38 | ] 39 | }, 40 | "nullable": [] 41 | }, 42 | "hash": "3467bdaf82adedf2757a9754e5d44c6f602399d0e473d43ad0f52570b437b55f" 43 | } 44 | -------------------------------------------------------------------------------- /tests/e2e/common_calls.rs: -------------------------------------------------------------------------------- 1 | use super::{DEFAULT_EXECUTE_SELECTOR, MockErc20}; 2 | use alloy::{ 3 | primitives::{Address, B256, U256, bytes}, 4 | sol_types::SolCall, 5 | }; 6 | use relay::types::{Call, IERC721, IthacaAccount::SpendPeriod, Key}; 7 | 8 | /// Native transfer value call. 9 | pub fn transfer_native(recipient: Address, amount: U256) -> Call { 10 | Call { to: recipient, value: amount, data: bytes!("") } 11 | } 12 | 13 | /// ERC721 transfer call. 14 | pub fn transfer_721(erc721: Address, from: Address, to: Address, id: U256) -> Call { 15 | Call { 16 | to: erc721, 17 | value: U256::ZERO, 18 | data: IERC721::safeTransferFromCall { from, to, id }.abi_encode().into(), 19 | } 20 | } 21 | 22 | /// ERC721 burn call. 23 | pub fn burn_721(erc721: Address, id: U256) -> Call { 24 | Call { to: erc721, value: U256::ZERO, data: IERC721::burnCall { id }.abi_encode().into() } 25 | } 26 | /// ERC20 mint call. 27 | pub fn mint(erc20: Address, a: Address, val: U256) -> Call { 28 | Call { to: erc20, value: U256::ZERO, data: MockErc20::mintCall { a, val }.abi_encode().into() } 29 | } 30 | 31 | /// Set a daily spend limit. 32 | pub fn daily_limit(token: Address, limit: U256, key: &Key) -> Call { 33 | Call::set_spend_limit(key.key_hash(), token, SpendPeriod::Day, limit) 34 | } 35 | 36 | /// Allow executing any function from target 37 | pub fn can_execute_all(target: Address, key_hash: B256) -> Call { 38 | Call::set_can_execute(key_hash, target, DEFAULT_EXECUTE_SELECTOR, true) 39 | } 40 | -------------------------------------------------------------------------------- /migrations/0013_pending_bundles.sql: -------------------------------------------------------------------------------- 1 | -- Create bundle status enum with all final values 2 | CREATE TYPE bundle_status AS ENUM ( 3 | 'init', 4 | 'source_queued', 5 | 'source_confirmed', 6 | 'source_failures', 7 | 'destination_queued', 8 | 'destination_failures', 9 | 'destination_confirmed', 10 | 'refunds_queued', 11 | 'withdrawals_queued', 12 | 'done', 13 | 'failed' 14 | ); 15 | 16 | -- Stores pending interop bundles for crash recovery 17 | CREATE TABLE pending_bundles ( 18 | bundle_id BYTEA PRIMARY KEY, 19 | status bundle_status NOT NULL, 20 | bundle_data JSONB NOT NULL, -- Stores serialized bundle data 21 | created_at TIMESTAMP NOT NULL DEFAULT NOW(), 22 | updated_at TIMESTAMP 23 | ); 24 | 25 | -- Create indexes for efficient queries 26 | CREATE INDEX idx_pending_bundles_created_at ON pending_bundles(created_at); 27 | CREATE INDEX idx_pending_bundles_data ON pending_bundles USING gin(bundle_data); 28 | 29 | -- Stores finished interop bundles for historical tracking 30 | CREATE TABLE finished_bundles ( 31 | bundle_id BYTEA PRIMARY KEY, 32 | status bundle_status NOT NULL, 33 | bundle_data JSONB NOT NULL, -- Stores serialized bundle data 34 | created_at TIMESTAMP NOT NULL, 35 | finished_at TIMESTAMP NOT NULL DEFAULT NOW() 36 | ); 37 | 38 | -- Create indexes for efficient queries on finished bundles 39 | CREATE INDEX idx_finished_bundles_finished_at ON finished_bundles(finished_at); 40 | CREATE INDEX idx_finished_bundles_status ON finished_bundles(status); -------------------------------------------------------------------------------- /src/error/storage.rs: -------------------------------------------------------------------------------- 1 | use super::internal_rpc; 2 | use crate::{error::invalid_params, types::rpc::BundleId}; 3 | use alloy::primitives::Address; 4 | 5 | /// Errors returned by [`Storage`]. 6 | #[derive(Debug, thiserror::Error)] 7 | pub enum StorageError { 8 | /// The account does not exist. 9 | #[error("Account with address {0} does not exist in storage.")] 10 | AccountDoesNotExist(Address), 11 | /// The bundle does not exist. 12 | #[error("Bundle {0} does not exist")] 13 | BundleDoesNotExist(BundleId), 14 | /// Can't lock liquidity. 15 | #[error("can't lock liquidity")] 16 | CantLockLiquidity, 17 | /// A deserialization error occurred. 18 | #[error("a deserialization error occurred")] 19 | SerdeError(#[from] serde_json::Error), 20 | /// An internal error occurred. 21 | #[error("an internal error occurred")] 22 | InternalError(#[from] eyre::Error), 23 | } 24 | 25 | impl From for jsonrpsee::types::error::ErrorObject<'static> { 26 | fn from(err: StorageError) -> Self { 27 | match err { 28 | StorageError::AccountDoesNotExist(..) | StorageError::BundleDoesNotExist(..) => { 29 | invalid_params(err.to_string()) 30 | } 31 | StorageError::CantLockLiquidity => internal_rpc("can't lock liquidity"), 32 | StorageError::SerdeError(..) => internal_rpc("an internal error occurred"), 33 | StorageError::InternalError(..) => internal_rpc("an internal error occurred"), 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /.sqlx/query-5d19bd26bec547a5e4026104e3f53fcc9bf20d442799f43ca5585822701dd197.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n INSERT INTO pending_bundles (bundle_id, status, bundle_data, created_at)\n VALUES ($1, $2, $3, NOW())\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | { 10 | "Custom": { 11 | "name": "bundle_status", 12 | "kind": { 13 | "Enum": [ 14 | "init", 15 | "source_queued", 16 | "source_confirmed", 17 | "source_failures", 18 | "destination_queued", 19 | "destination_failures", 20 | "destination_confirmed", 21 | "refunds_queued", 22 | "withdrawals_queued", 23 | "done", 24 | "failed", 25 | "liquidity_locked", 26 | "settlements_queued", 27 | "settlements_confirmed", 28 | "refunds_scheduled", 29 | "refunds_ready", 30 | "settlements_processing", 31 | "settlement_completion_queued", 32 | "fee_payer_queued", 33 | "fee_payer_completed" 34 | ] 35 | } 36 | } 37 | }, 38 | "Jsonb" 39 | ] 40 | }, 41 | "nullable": [] 42 | }, 43 | "hash": "5d19bd26bec547a5e4026104e3f53fcc9bf20d442799f43ca5585822701dd197" 44 | } 45 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM lukemathwalker/cargo-chef:latest-rust-1 AS chef 2 | WORKDIR /app 3 | 4 | LABEL org.opencontainers.image.source=https://github.com/ithacaxyz/relay 5 | 6 | # Builds a cargo-chef plan 7 | FROM chef AS planner 8 | COPY . . 9 | RUN cargo chef prepare --recipe-path recipe.json 10 | 11 | FROM chef AS builder 12 | COPY --from=planner /app/recipe.json recipe.json 13 | 14 | # Build profile, maxperf by default 15 | ARG BUILD_PROFILE=maxperf 16 | ENV BUILD_PROFILE $BUILD_PROFILE 17 | 18 | # Extra Cargo features 19 | ARG FEATURES="" 20 | ENV FEATURES $FEATURES 21 | 22 | # Install system dependencies 23 | RUN apt-get update && apt-get -y upgrade && apt-get install -y libclang-dev pkg-config 24 | 25 | # Builds dependencies 26 | RUN cargo chef cook --profile $BUILD_PROFILE --recipe-path recipe.json 27 | 28 | # Copy source 29 | COPY . . 30 | 31 | # Add migrations 32 | ADD migrations migrations 33 | 34 | # Build application 35 | RUN cargo build --profile $BUILD_PROFILE --features "$FEATURES" --locked --bin relay 36 | 37 | # ARG is not resolved in COPY so we have to hack around it by copying the 38 | # binary to a temporary location 39 | RUN cp /app/target/$BUILD_PROFILE/relay /app/relay 40 | 41 | # Use Ubuntu as the release image 42 | FROM ubuntu AS runtime 43 | WORKDIR /app 44 | 45 | # Install runtime dependencies 46 | RUN apt-get update && apt-get -y upgrade && apt-get install -y ca-certificates && update-ca-certificates 47 | 48 | # Copy relay over from the build stage 49 | COPY --from=builder /app/relay /usr/local/bin 50 | 51 | EXPOSE 9119 52 | ENTRYPOINT ["/usr/local/bin/relay"] 53 | -------------------------------------------------------------------------------- /tools/relay-tools/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "relay-tools" 3 | version = "0.1.0" 4 | edition = "2024" 5 | description = "Collection of tools for the Relay" 6 | publish = false 7 | 8 | [[bin]] 9 | name = "chainwalker" 10 | path = "src/bin/chainwalker/main.rs" 11 | 12 | [[bin]] 13 | name = "stress" 14 | path = "src/bin/stress/main.rs" 15 | 16 | [[bin]] 17 | name = "recast" 18 | path = "src/bin/recast/mod.rs" 19 | 20 | [dependencies] 21 | alloy = { version = "1.0.41", features = [ 22 | "std", 23 | "json-rpc", 24 | "providers", 25 | "reqwest-rustls-tls", 26 | "rpc-client", 27 | "rpc-types", 28 | "serde", 29 | "signers", 30 | "transports", 31 | "provider-ws", 32 | "essentials", 33 | "getrandom", 34 | "dyn-abi", 35 | "eip712", 36 | "eips", 37 | "k256", 38 | "provider-debug-api", 39 | "rlp", 40 | "rpc", 41 | "rpc-types-trace", 42 | "signer-local", 43 | "sol-types", 44 | "network", 45 | "signer-mnemonic", 46 | "signer-aws", 47 | "rand", 48 | ], default-features = false } 49 | alloy-chains = "0.2.15" 50 | clap = { version = "4.5", features = ["derive", "env"] } 51 | dirs = "6.0" 52 | eyre = "0.6" 53 | futures = "0.3" 54 | futures-util = "0.3" 55 | jsonrpsee = { version = "0.26", features = ["client", "client-core", "http-client"] } 56 | rand = "0.9" 57 | relay = { path = "../.." } 58 | serde = { version = "1.0", features = ["derive"] } 59 | serde_json = "1.0" 60 | tokio = { version = "1.39", features = ["macros", "rt-multi-thread", "time"] } 61 | tracing = "0.1" 62 | tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] } 63 | url = "2.5" -------------------------------------------------------------------------------- /Dockerfile.stress: -------------------------------------------------------------------------------- 1 | FROM lukemathwalker/cargo-chef:latest-rust-1 AS chef 2 | WORKDIR /app 3 | 4 | LABEL org.opencontainers.image.source=https://github.com/ithacaxyz/relay 5 | 6 | # Builds a cargo-chef plan 7 | FROM chef AS planner 8 | COPY . . 9 | RUN cargo chef prepare --recipe-path recipe.json 10 | 11 | FROM chef AS builder 12 | COPY --from=planner /app/recipe.json recipe.json 13 | 14 | # Build profile, maxperf by default 15 | ARG BUILD_PROFILE=maxperf 16 | ENV BUILD_PROFILE $BUILD_PROFILE 17 | 18 | # Extra Cargo features 19 | ARG FEATURES="" 20 | ENV FEATURES $FEATURES 21 | 22 | # Install system dependencies 23 | RUN apt-get update && apt-get -y upgrade && apt-get install -y libclang-dev pkg-config 24 | 25 | # Builds dependencies 26 | RUN cargo chef cook --profile $BUILD_PROFILE --recipe-path recipe.json 27 | 28 | # Copy source 29 | COPY . . 30 | 31 | # Build application 32 | RUN cargo build --profile $BUILD_PROFILE --features "$FEATURES" --locked -p relay-tools --bin stress 33 | 34 | # ARG is not resolved in COPY so we have to hack around it by copying the 35 | # binary to a temporary location 36 | RUN cp /app/target/$BUILD_PROFILE/stress /app/stress 37 | 38 | # Use Ubuntu as the release image 39 | FROM ubuntu AS runtime 40 | WORKDIR /app 41 | 42 | # Install runtime dependencies 43 | RUN apt-get update && apt-get -y upgrade && apt-get install -y ca-certificates && update-ca-certificates 44 | 45 | # Copy stress over from the build stage 46 | COPY --from=builder /app/stress /usr/local/bin 47 | 48 | # Add nushell 49 | COPY --from=ghcr.io/nushell/nushell:0.105.1-alpine /usr/bin/nu /usr/bin/nu 50 | 51 | EXPOSE 9119 52 | ENTRYPOINT ["/usr/local/bin/stress"] 53 | -------------------------------------------------------------------------------- /src/metrics/periodic/mod.rs: -------------------------------------------------------------------------------- 1 | //! Periodic metric collectors. 2 | 3 | mod types; 4 | pub use types::*; 5 | 6 | mod job; 7 | use job::PeriodicJob; 8 | 9 | use std::{fmt::Debug, future::Future, sync::Arc, time::Duration}; 10 | 11 | use crate::{chains::Chains, config::RelayConfig, error::StorageError, storage::RelayStorage}; 12 | 13 | /// Metric collector error. 14 | #[derive(Debug, thiserror::Error)] 15 | pub enum MetricCollectorError { 16 | /// Error coming from RPC 17 | #[error(transparent)] 18 | RpcError(#[from] alloy::transports::RpcError), 19 | /// Error coming from storage 20 | #[error(transparent)] 21 | StorageError(#[from] StorageError), 22 | /// Multicall error. 23 | #[error(transparent)] 24 | MulticallError(#[from] alloy::providers::MulticallError), 25 | } 26 | 27 | /// Trait for a collector that records its own metric. 28 | pub trait MetricCollector: Debug { 29 | /// Collects metrics and records them. 30 | fn collect(&self) -> impl Future> + Send; 31 | } 32 | 33 | /// Spawns all available periodic metric collectors. 34 | pub async fn spawn_periodic_collectors( 35 | config: &RelayConfig, 36 | storage: RelayStorage, 37 | chains: Arc, 38 | ) -> Result<(), MetricCollectorError> { 39 | PeriodicJob::launch_task( 40 | BalanceCollector::new(config.funder, chains.clone()), 41 | tokio::time::interval(Duration::from_secs(30)), 42 | ); 43 | 44 | PeriodicJob::launch_task( 45 | LiquidityCollector::new(storage, chains.clone()), 46 | tokio::time::interval(Duration::from_secs(30)), 47 | ); 48 | 49 | Ok(()) 50 | } 51 | -------------------------------------------------------------------------------- /.sqlx/query-530e93d5b22f16dff6556e241d328ee6d7b75d7c82a4f2f310fc961c53da4c0a.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "select tx_id, chain_id, tx_hash, status as \"status: TxStatus\", error, receipt\n from txs\n where tx_id = ANY($1)\n order by array_position($1, tx_id)", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "tx_id", 9 | "type_info": "Bytea" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "chain_id", 14 | "type_info": "Int8" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "tx_hash", 19 | "type_info": "Bytea" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "status: TxStatus", 24 | "type_info": { 25 | "Custom": { 26 | "name": "tx_status", 27 | "kind": { 28 | "Enum": [ 29 | "inflight", 30 | "pending", 31 | "confirmed", 32 | "failed" 33 | ] 34 | } 35 | } 36 | } 37 | }, 38 | { 39 | "ordinal": 4, 40 | "name": "error", 41 | "type_info": "Text" 42 | }, 43 | { 44 | "ordinal": 5, 45 | "name": "receipt", 46 | "type_info": "Jsonb" 47 | } 48 | ], 49 | "parameters": { 50 | "Left": [ 51 | "ByteaArray" 52 | ] 53 | }, 54 | "nullable": [ 55 | false, 56 | false, 57 | true, 58 | false, 59 | true, 60 | true 61 | ] 62 | }, 63 | "hash": "530e93d5b22f16dff6556e241d328ee6d7b75d7c82a4f2f310fc961c53da4c0a" 64 | } 65 | -------------------------------------------------------------------------------- /.sqlx/query-82e7c9cce1aa485db564962b021eaea9756ca73e5298e743ef0c37583fceb803.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n INSERT INTO pending_bundles (bundle_id, status, bundle_data, created_at)\n VALUES ($1, $2, $3, NOW())\n ON CONFLICT (bundle_id) DO UPDATE SET\n status = EXCLUDED.status,\n bundle_data = EXCLUDED.bundle_data,\n updated_at = NOW()\n ", 4 | "describe": { 5 | "columns": [], 6 | "parameters": { 7 | "Left": [ 8 | "Bytea", 9 | { 10 | "Custom": { 11 | "name": "bundle_status", 12 | "kind": { 13 | "Enum": [ 14 | "init", 15 | "source_queued", 16 | "source_confirmed", 17 | "source_failures", 18 | "destination_queued", 19 | "destination_failures", 20 | "destination_confirmed", 21 | "refunds_queued", 22 | "withdrawals_queued", 23 | "done", 24 | "failed", 25 | "liquidity_locked", 26 | "settlements_queued", 27 | "settlements_confirmed", 28 | "refunds_scheduled", 29 | "refunds_ready", 30 | "settlements_processing", 31 | "settlement_completion_queued", 32 | "fee_payer_queued", 33 | "fee_payer_completed" 34 | ] 35 | } 36 | } 37 | }, 38 | "Jsonb" 39 | ] 40 | }, 41 | "nullable": [] 42 | }, 43 | "hash": "82e7c9cce1aa485db564962b021eaea9756ca73e5298e743ef0c37583fceb803" 44 | } 45 | -------------------------------------------------------------------------------- /.sqlx/query-d1a42181197af48903ca0fc5e663982233307b11b6194c00b2272e85bb8a3d5c.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT\n q.asset_uid,\n q.requested_timestamp,\n h.timestamp,\n h.usd_price\n FROM UNNEST($1::text[], $2::bigint[]) AS q(asset_uid, requested_timestamp)\n LEFT JOIN LATERAL (\n SELECT timestamp, usd_price\n FROM historical_usd_prices\n WHERE asset_uid = q.asset_uid\n AND timestamp BETWEEN q.requested_timestamp - 300\n AND q.requested_timestamp + 300\n ORDER BY ABS(timestamp - q.requested_timestamp)\n LIMIT 1\n ) h ON true\n WHERE h.timestamp IS NOT NULL\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "asset_uid", 9 | "type_info": "Text" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "requested_timestamp", 14 | "type_info": "Int8" 15 | }, 16 | { 17 | "ordinal": 2, 18 | "name": "timestamp", 19 | "type_info": "Int8" 20 | }, 21 | { 22 | "ordinal": 3, 23 | "name": "usd_price", 24 | "type_info": "Float8" 25 | } 26 | ], 27 | "parameters": { 28 | "Left": [ 29 | "TextArray", 30 | "Int8Array" 31 | ] 32 | }, 33 | "nullable": [ 34 | null, 35 | null, 36 | false, 37 | false 38 | ] 39 | }, 40 | "hash": "d1a42181197af48903ca0fc5e663982233307b11b6194c00b2272e85bb8a3d5c" 41 | } 42 | -------------------------------------------------------------------------------- /.sqlx/query-7c3d06612031a0df180c5ab75b8a882d1bfd8d81f5965a352a71a30f1e618810.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT status AS \"status: BundleStatus\", bundle_data\n FROM pending_bundles\n ORDER BY created_at\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "status: BundleStatus", 9 | "type_info": { 10 | "Custom": { 11 | "name": "bundle_status", 12 | "kind": { 13 | "Enum": [ 14 | "init", 15 | "source_queued", 16 | "source_confirmed", 17 | "source_failures", 18 | "destination_queued", 19 | "destination_failures", 20 | "destination_confirmed", 21 | "refunds_queued", 22 | "withdrawals_queued", 23 | "done", 24 | "failed", 25 | "liquidity_locked", 26 | "settlements_queued", 27 | "settlements_confirmed", 28 | "refunds_scheduled", 29 | "refunds_ready", 30 | "settlements_processing", 31 | "settlement_completion_queued", 32 | "fee_payer_queued", 33 | "fee_payer_completed" 34 | ] 35 | } 36 | } 37 | } 38 | }, 39 | { 40 | "ordinal": 1, 41 | "name": "bundle_data", 42 | "type_info": "Jsonb" 43 | } 44 | ], 45 | "parameters": { 46 | "Left": [] 47 | }, 48 | "nullable": [ 49 | false, 50 | false 51 | ] 52 | }, 53 | "hash": "7c3d06612031a0df180c5ab75b8a882d1bfd8d81f5965a352a71a30f1e618810" 54 | } 55 | -------------------------------------------------------------------------------- /migrations/0029_bundle_history_indexes.sql: -------------------------------------------------------------------------------- 1 | -- Add support for wallet_getCallsHistory endpoint 2 | 3 | -- Add tx column to txs table for history queries 4 | -- NULL for interop bundles (data in bundle_data), populated for single-chain 5 | alter table txs add column tx jsonb; 6 | 7 | -- Create immutable function to extract received_at timestamp from tx JSONB 8 | -- This is needed for indexing since ::timestamptz cast is not immutable 9 | create or replace function tx_received_at_immutable(tx jsonb) 10 | returns timestamptz 11 | language sql 12 | immutable 13 | parallel safe 14 | return (tx->>'received_at')::timestamptz; 15 | 16 | -- Multi-chain bundle composite indexes (EOA + timestamp) 17 | -- These enable index-only scans and early stop on ORDER BY ... LIMIT 18 | -- Note: dst_txs is typically a single-element array, so we index the first element's EOA 19 | create index if not exists idx_pending_bundles_eoa_created_at 20 | on pending_bundles ((bundle_data->'dst_txs'->0->'quote'->'intent'->>'eoa'), created_at desc); 21 | create index if not exists idx_finished_bundles_eoa_finished_at 22 | on finished_bundles ((bundle_data->'dst_txs'->0->'quote'->'intent'->>'eoa'), finished_at desc); 23 | 24 | -- Single-chain bundle composite index (EOA + timestamp) 25 | -- Partial index only on non-null tx for single-chain bundle queries 26 | create index if not exists idx_txs_eoa_received_at 27 | on txs ( 28 | (tx->'quote'->'intent'->>'eoa'), 29 | tx_received_at_immutable(tx) desc 30 | ) 31 | where tx is not null; 32 | 33 | -- Bundle-transaction mapping indexes 34 | -- Composite index for joins and lookups 35 | create index if not exists idx_bundle_transactions_composite 36 | on bundle_transactions(bundle_id, tx_id); 37 | create index if not exists idx_bundle_transactions_tx_id 38 | on bundle_transactions(tx_id); 39 | -------------------------------------------------------------------------------- /tests/e2e/cases/errors.rs: -------------------------------------------------------------------------------- 1 | use crate::e2e::{AuthKind, cases::upgrade_account_eagerly, environment::Environment}; 2 | use alloy::primitives::{Address, U256}; 3 | use relay::{ 4 | rpc::RelayApiClient, 5 | types::{ 6 | Call, KeyType, KeyWith712Signer, 7 | rpc::{Meta, PrepareCallsCapabilities, PrepareCallsParameters}, 8 | }, 9 | }; 10 | 11 | #[tokio::test(flavor = "multi_thread")] 12 | async fn decode_insufficient_allowance() -> eyre::Result<()> { 13 | let env = Environment::setup().await?; 14 | let key = KeyWith712Signer::random_admin(KeyType::Secp256k1)?.unwrap(); 15 | 16 | upgrade_account_eagerly(&env, &[key.to_authorized()], &key, AuthKind::Auth).await?; 17 | 18 | let response = env 19 | .relay_endpoint 20 | .prepare_calls(PrepareCallsParameters { 21 | from: Some(env.eoa.address()), 22 | calls: vec![Call::transfer_from( 23 | env.erc20s[4], 24 | Address::ZERO, 25 | env.eoa.address(), 26 | U256::from(10000000u64), 27 | )], 28 | chain_id: env.chain_id(), 29 | capabilities: PrepareCallsCapabilities { 30 | authorize_keys: vec![], 31 | revoke_keys: vec![], 32 | meta: Meta { fee_payer: None, fee_token: Some(env.fee_token), nonce: None }, 33 | pre_calls: vec![], 34 | pre_call: false, 35 | required_funds: vec![], 36 | }, 37 | state_overrides: Default::default(), 38 | balance_overrides: Default::default(), 39 | key: Some(key.to_call_key()), 40 | }) 41 | .await; 42 | 43 | assert!(response.is_err_and(|err| err.to_string().contains("InsufficientAllowance"))); 44 | 45 | Ok(()) 46 | } 47 | -------------------------------------------------------------------------------- /.sqlx/query-c37655b3fb49e027a64b8fad5654512bb3dfdeb79b260360955533281565223d.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT status AS \"status: BundleStatus\", bundle_data\n FROM pending_bundles\n WHERE bundle_id = $1\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "status: BundleStatus", 9 | "type_info": { 10 | "Custom": { 11 | "name": "bundle_status", 12 | "kind": { 13 | "Enum": [ 14 | "init", 15 | "source_queued", 16 | "source_confirmed", 17 | "source_failures", 18 | "destination_queued", 19 | "destination_failures", 20 | "destination_confirmed", 21 | "refunds_queued", 22 | "withdrawals_queued", 23 | "done", 24 | "failed", 25 | "liquidity_locked", 26 | "settlements_queued", 27 | "settlements_confirmed", 28 | "refunds_scheduled", 29 | "refunds_ready", 30 | "settlements_processing", 31 | "settlement_completion_queued", 32 | "fee_payer_queued", 33 | "fee_payer_completed" 34 | ] 35 | } 36 | } 37 | } 38 | }, 39 | { 40 | "ordinal": 1, 41 | "name": "bundle_data", 42 | "type_info": "Jsonb" 43 | } 44 | ], 45 | "parameters": { 46 | "Left": [ 47 | "Bytea" 48 | ] 49 | }, 50 | "nullable": [ 51 | false, 52 | false 53 | ] 54 | }, 55 | "hash": "c37655b3fb49e027a64b8fad5654512bb3dfdeb79b260360955533281565223d" 56 | } 57 | -------------------------------------------------------------------------------- /src/types/mod.rs: -------------------------------------------------------------------------------- 1 | //! Shared primitive types. 2 | use serde::{Deserialize, Serialize}; 3 | 4 | mod account; 5 | pub use account::*; 6 | 7 | mod asset; 8 | pub use asset::*; 9 | 10 | pub mod simulation_assets; 11 | pub use simulation_assets::*; 12 | 13 | mod contracts; 14 | pub use contracts::*; 15 | 16 | mod orchestrator; 17 | pub use orchestrator::*; 18 | 19 | mod tokens; 20 | pub use tokens::*; 21 | 22 | mod key; 23 | use alloy::primitives::{Address, Uint}; 24 | pub use key::*; 25 | 26 | mod intent; 27 | pub use intent::*; 28 | 29 | mod intents; 30 | pub use intents::*; 31 | 32 | mod slots; 33 | pub use slots::*; 34 | 35 | mod interop; 36 | pub use interop::*; 37 | 38 | mod layerzero; 39 | pub use layerzero::*; 40 | 41 | mod signed; 42 | pub use signed::*; 43 | 44 | mod quote; 45 | pub use quote::*; 46 | 47 | mod transaction; 48 | pub use transaction::*; 49 | 50 | pub mod rpc; 51 | 52 | mod call; 53 | pub use call::*; 54 | 55 | mod webauthn; 56 | pub use webauthn::*; 57 | 58 | pub mod simulator; 59 | pub use simulator::*; 60 | 61 | mod storage; 62 | pub use storage::*; 63 | 64 | mod merkle; 65 | pub use merkle::*; 66 | 67 | mod settler; 68 | pub use settler::*; 69 | 70 | mod escrow; 71 | pub use escrow::*; 72 | 73 | mod funder; 74 | pub use funder::*; 75 | 76 | mod multicall; 77 | pub use multicall::*; 78 | 79 | mod cast_debug; 80 | pub use cast_debug::*; 81 | 82 | /// A 40 bit integer. 83 | pub type U40 = Uint<40, 1>; 84 | 85 | /// The health response. 86 | #[derive(Debug, Clone, Serialize, Deserialize)] 87 | #[serde(rename_all = "camelCase")] 88 | pub struct Health { 89 | /// The status (usually OK) of the RPC. 90 | pub status: String, 91 | /// The version of the RPC. 92 | pub version: String, 93 | /// The address of the quote signer. 94 | pub quote_signer: Address, 95 | } 96 | -------------------------------------------------------------------------------- /.sqlx/query-f42adf1637e3fa9e83464da75a8c6915d79e127ec6a036811afebdcd4b1e8d45.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT status as \"status: BundleStatus\"\n FROM (\n SELECT status FROM pending_bundles WHERE bundle_id = $1\n UNION ALL\n SELECT status FROM finished_bundles WHERE bundle_id = $1\n ) AS combined\n LIMIT 1\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "status: BundleStatus", 9 | "type_info": { 10 | "Custom": { 11 | "name": "bundle_status", 12 | "kind": { 13 | "Enum": [ 14 | "init", 15 | "source_queued", 16 | "source_confirmed", 17 | "source_failures", 18 | "destination_queued", 19 | "destination_failures", 20 | "destination_confirmed", 21 | "refunds_queued", 22 | "withdrawals_queued", 23 | "done", 24 | "failed", 25 | "liquidity_locked", 26 | "settlements_queued", 27 | "settlements_confirmed", 28 | "refunds_scheduled", 29 | "refunds_ready", 30 | "settlements_processing", 31 | "settlement_completion_queued", 32 | "fee_payer_queued", 33 | "fee_payer_completed" 34 | ] 35 | } 36 | } 37 | } 38 | } 39 | ], 40 | "parameters": { 41 | "Left": [ 42 | "Bytea" 43 | ] 44 | }, 45 | "nullable": [ 46 | null 47 | ] 48 | }, 49 | "hash": "f42adf1637e3fa9e83464da75a8c6915d79e127ec6a036811afebdcd4b1e8d45" 50 | } 51 | -------------------------------------------------------------------------------- /src/price/mod.rs: -------------------------------------------------------------------------------- 1 | //! Price oracle and fetchers. 2 | 3 | mod fetchers; 4 | pub use fetchers::*; 5 | 6 | mod metrics; 7 | mod oracle; 8 | pub use oracle::{PriceOracle, PriceOracleConfig}; 9 | 10 | use alloy::primitives::{U256, U512}; 11 | 12 | /// Calculate the USD value of a token amount given its USD price and decimals. 13 | /// 14 | /// This converts an amount in the smallest unit (e.g., wei) to its USD value 15 | /// based on the USD price per whole token (e.g., USD price per ETH). 16 | /// 17 | /// # Returns 18 | /// The total USD value as f64 19 | pub fn calculate_usd_value(token_amount: U256, usd_price: f64, decimals: u8) -> f64 { 20 | let result = U512::from(token_amount).saturating_mul(U512::from(usd_price * 1e18)) 21 | / U512::from(10u128.pow(decimals as u32)); 22 | result.to::() as f64 / 1e18 23 | } 24 | 25 | #[cfg(test)] 26 | mod tests { 27 | use crate::types::AssetUid; 28 | 29 | use super::*; 30 | use std::time::Duration; 31 | use tokio::time::sleep; 32 | 33 | #[ignore] // requires GECKO_API 34 | #[tokio::test] 35 | async fn coingecko() { 36 | let oracle = PriceOracle::new(Default::default()); 37 | oracle.spawn_fetcher(PriceFetcher::CoinGecko, &Default::default()); 38 | 39 | // Allow coingecko to fetch prices 40 | sleep(Duration::from_millis(500)).await; 41 | 42 | oracle 43 | .native_conversion_rate( 44 | AssetUid::new("usd-coin".into()), 45 | AssetUid::new("ethereum".into()), 46 | ) 47 | .await 48 | .unwrap(); 49 | oracle 50 | .native_conversion_rate( 51 | AssetUid::new("tether".into()), 52 | AssetUid::new("ethereum".into()), 53 | ) 54 | .await 55 | .unwrap(); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/rpc/extra_fee.rs: -------------------------------------------------------------------------------- 1 | //! Extra fee info for L2s 2 | use alloy::primitives::U256; 3 | 4 | /// Contains information about the extra fee on L2 transactions, these come from costs that the L2 5 | /// incurs when posting the transaction on the parent chain. 6 | #[derive(Debug)] 7 | pub enum ExtraFeeInfo { 8 | /// Arbitrum L2 with L1 DA fee components. 9 | /// 10 | /// On arbitrum chains, the extra fee is also required to be added to the gas limit for the 11 | /// transaction: 12 | /// https://docs.arbitrum.io/build-decentralized-apps/how-to-estimate-gas#breaking-down-the-formula 13 | Arbitrum { 14 | /// L1 gas estimate for the transaction 15 | gas_estimate: u64, 16 | }, 17 | /// Optimism L2 with calculated L1 fee. 18 | /// 19 | /// On optimism chains, the extra fee is deducted from the account directly and does not need 20 | /// to be included in the gas limit for the transaction: 21 | /// https://docs.optimism.io/stack/transactions/fees#mechanism 22 | Optimism { 23 | /// The L1 fee estimate 24 | l1_fee: U256, 25 | }, 26 | /// Not an L2 or no extra fee 27 | None, 28 | } 29 | 30 | impl ExtraFeeInfo { 31 | /// Returns the calculated extra fee based on the L2 type 32 | pub fn extra_fee(&self) -> U256 { 33 | match self { 34 | Self::Arbitrum { .. } => U256::ZERO, 35 | Self::Optimism { l1_fee } => *l1_fee, 36 | Self::None => U256::ZERO, 37 | } 38 | } 39 | 40 | /// Returns the amount of gas to add to the gas limit to account for the l1 fee. This will 41 | /// return zero on chains that are not arbitrum. 42 | pub fn extra_gas(&self) -> u64 { 43 | match self { 44 | Self::Arbitrum { gas_estimate, .. } => *gas_estimate, 45 | _ => 0, 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /scripts/bundle.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eo pipefail 3 | 4 | usage() 5 | { 6 | echo "Usage: $0 -r -b " 7 | exit 1 8 | } 9 | 10 | RED='\033[0;31m' 11 | GREEN='\033[0;32m' 12 | NC='\033[0m' 13 | 14 | while getopts ":b:r:" opt; do 15 | case ${opt} in 16 | b ) bundle_id=$OPTARG;; 17 | r ) rpc_url=$OPTARG;; 18 | \? ) usage;; 19 | esac 20 | done 21 | 22 | rpc_url=${rpc_url:-${ETH_RPC_URL:-http://localhost:8545}} 23 | 24 | if [ -z "$bundle_id" ] || [ -z "$rpc_url" ]; then 25 | usage 26 | fi 27 | 28 | if ! command -v cast help 2>&1 >/dev/null 29 | then 30 | echo "Please install cast" 31 | exit 1 32 | fi 33 | 34 | if ! command -v jq 2>&1 >/dev/null 35 | then 36 | echo "Please install jq" 37 | exit 1 38 | fi 39 | 40 | bundle="$(cast rpc wallet_getCallsStatus -r $rpc_url $bundle_id)" 41 | status=$(echo "${bundle}" | jq '.status') 42 | 43 | case $status in 44 | 100 ) human_status="Pending";; 45 | 200 ) human_status="Confirmed";; 46 | 300 ) human_status="Failed offchain";; 47 | 400 ) human_status="Reverted";; 48 | 500 ) human_status="Partially reverted";; 49 | *) human_status="Unknown";; 50 | esac 51 | 52 | settlement_status=$(echo "${bundle}" | jq -r '.capabilities.interopStatus') 53 | 54 | echo "Bundle ${bundle_id} on ${rpc_url}" 55 | echo 56 | echo "Bundle status: ${human_status} (${status})" 57 | echo "Settlement status: ${settlement_status}" 58 | echo "Receipts:" 59 | 60 | for receipt in $(echo "${bundle}" | jq -c '.receipts[]'); do 61 | tx_status=$(echo "${receipt}" | jq -r '.status') 62 | 63 | echo -n "- " 64 | if [ $tx_status = "0x1" ]; then 65 | echo -en $GREEN 66 | else 67 | echo -en $RED 68 | fi 69 | echo "${receipt}" | jq -r '.transactionHash + " (" + (.chainId | tostring) + ")"' 70 | echo -en $NC 71 | done 72 | -------------------------------------------------------------------------------- /src/signers/p256.rs: -------------------------------------------------------------------------------- 1 | //! P256 signer type with webauthn capabilities used for gas estimation and testing. 2 | 3 | use super::Eip712PayLoadSigner; 4 | use crate::types::KeyType; 5 | use alloy::primitives::{B256, Bytes}; 6 | use p256::ecdsa::{SigningKey, signature::hazmat::PrehashSigner}; 7 | use std::sync::Arc; 8 | 9 | /// Trait for all signers that use a [`SigningKey`] underneath.. 10 | pub trait P256Key { 11 | /// Return reference to the P256 signing key. 12 | fn signing_key(&self) -> &SigningKey; 13 | 14 | /// Signs a prehashed digest with the p256 key. 15 | fn sign_prehash(&self, digest: &[u8]) -> eyre::Result { 16 | Ok(self 17 | .signing_key() 18 | .sign_prehash(digest) 19 | .map(|s: p256::ecdsa::Signature| s.normalize_s().unwrap_or(s))?) 20 | } 21 | 22 | /// Returns the signer's p256 public key in [`Bytes`]. 23 | fn public_key(&self) -> Bytes { 24 | self.signing_key().verifying_key().to_encoded_point(false).to_bytes()[1..].to_vec().into() 25 | } 26 | } 27 | 28 | impl P256Key for P256Signer { 29 | fn signing_key(&self) -> &SigningKey { 30 | &self.0 31 | } 32 | } 33 | 34 | /// Abstraction over a P256 signer with webauthn capabilities. 35 | #[derive(Debug)] 36 | pub struct P256Signer(pub Arc); 37 | 38 | impl P256Signer { 39 | /// Load a P256 key 40 | pub fn load(key: &B256) -> eyre::Result { 41 | Ok(Self(Arc::new(SigningKey::from_slice(key.as_slice())?))) 42 | } 43 | } 44 | 45 | #[async_trait::async_trait] 46 | impl Eip712PayLoadSigner for P256Signer { 47 | fn key_type(&self) -> KeyType { 48 | KeyType::P256 49 | } 50 | 51 | async fn sign_payload_hash(&self, payload_hash: B256) -> eyre::Result { 52 | Ok(self.sign_prehash(payload_hash.as_slice())?.to_bytes().to_vec().into()) 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /tools/relay-tools/src/bin/chainwalker/README.md: -------------------------------------------------------------------------------- 1 | # Chainwalker 2 | 3 | Tests interop by transferring tokens across all available chain connections. 4 | 5 | ## Quickstart 6 | 7 | Fund one account with the token(s) of chain in one chain. The script will send on each hop 90% of the balance, hoping that the remaining 10% are enough to cover the fees. 8 | 9 | ### Test specific tokens only 10 | ```bash 11 | chainwalker --private-key $PRIVATE_KEY --only-uids "tether,ethereum" 12 | ``` 13 | 14 | ### Test all tokens except certain chains 15 | ```bash 16 | chainwalker --private-key $PRIVATE_KEY --exclude-chains "1,137" 17 | ``` 18 | 19 | ## Full Options 20 | 21 | ```bash 22 | chainwalker --help 23 | ``` 24 | 25 | ### Available Options 26 | 27 | - `--private-key KEY` - Private key of test account (required, or set PRIVATE_KEY env var) 28 | - `--no-run` - Plan and display test sequence without executing transfers 29 | - `--only-uids UIDs` - Only test specific interop token UIDs (comma-separated) 30 | - `--only-chains CHAIN_IDS` - Only test specific chains (comma-separated chain IDs) 31 | - `--exclude-chains CHAIN_IDS` - Exclude specific chains from testing (comma-separated chain IDs) 32 | - `--force` - Continue even if account has been used before 33 | - `--transfer-percentage N` - Percentage of balance to transfer on each hop (default: 90) 34 | - `--skip-settlement-wait` - Skip waiting for settlement completion 35 | - `--relay-url URL` - Relay URL (default: https://stg-rpc.ithaca.xyz) 36 | 37 | ## How it Works 38 | 39 | Chainwalker tests cross-chain interoperability by: 40 | 41 | 1. **Discovery** - Finds all available interop tokens and their chain connections 42 | 2. **Planning** - Creates an path to visit every connecting (bidirectional) 43 | 3. **Execution** - Transfers tokens along the planned path, validating each hop 44 | 4. **Reporting** - Generates a detailed JSON report with transfer results and quotes 45 | -------------------------------------------------------------------------------- /.github/workflows/dependencies.yml: -------------------------------------------------------------------------------- 1 | # Runs `cargo update` periodically. 2 | 3 | name: Update Dependencies 4 | 5 | on: 6 | schedule: 7 | # Run weekly 8 | - cron: "0 0 * * SUN" 9 | workflow_dispatch: 10 | # Needed so we can run it manually 11 | 12 | env: 13 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 14 | BRANCH: cargo-update 15 | TITLE: "chore(deps): weekly `cargo update`" 16 | BODY: | 17 | Automation to keep dependencies in `Cargo.lock` current. 18 | 19 |
cargo update log 20 |

21 | 22 | ```log 23 | $cargo_update_log 24 | ``` 25 | 26 |

27 |
28 | 29 | jobs: 30 | update: 31 | name: Update 32 | runs-on: ubuntu-latest 33 | steps: 34 | - uses: actions/checkout@v4 35 | - uses: dtolnay/rust-toolchain@nightly 36 | 37 | - name: cargo update 38 | # Remove first line that always just says "Updating crates.io index" 39 | run: cargo update --color never 2>&1 | sed '/crates.io index/d' | tee -a cargo_update.log 40 | 41 | - name: craft commit message and PR body 42 | id: msg 43 | run: | 44 | export cargo_update_log="$(cat cargo_update.log)" 45 | 46 | echo "commit_message<> $GITHUB_OUTPUT 47 | printf "%s\n\n%s\n" $TITLE $cargo_update_log >> $GITHUB_OUTPUT 48 | echo "EOF" >> $GITHUB_OUTPUT 49 | 50 | echo "body<> $GITHUB_OUTPUT 51 | echo "$BODY" | envsubst >> $GITHUB_OUTPUT 52 | echo "EOF" >> $GITHUB_OUTPUT 53 | 54 | - name: Create Pull Request 55 | uses: peter-evans/create-pull-request@v7 56 | with: 57 | add-paths: ./Cargo.lock 58 | commit-message: ${{ steps.msg.outputs.commit_message }} 59 | title: ${{ env.TITLE }} 60 | body: ${{ steps.msg.outputs.body }} 61 | branch: ${{ env.BRANCH }} 62 | -------------------------------------------------------------------------------- /Dockerfile.chainwalker: -------------------------------------------------------------------------------- 1 | FROM lukemathwalker/cargo-chef:latest-rust-1 AS chef 2 | WORKDIR /app 3 | 4 | LABEL org.opencontainers.image.source=https://github.com/ithacaxyz/relay 5 | 6 | # Builds a cargo-chef plan 7 | FROM chef AS planner 8 | COPY . . 9 | RUN cargo chef prepare --recipe-path recipe.json 10 | 11 | FROM chef AS builder 12 | COPY --from=planner /app/recipe.json recipe.json 13 | 14 | # Build profile, maxperf by default 15 | ARG BUILD_PROFILE=maxperf 16 | ENV BUILD_PROFILE $BUILD_PROFILE 17 | 18 | # Extra Cargo features 19 | ARG FEATURES="" 20 | ENV FEATURES $FEATURES 21 | 22 | # Install system dependencies 23 | RUN apt-get update && apt-get -y upgrade && apt-get install -y libclang-dev pkg-config 24 | 25 | # Builds dependencies 26 | RUN cargo chef cook --profile $BUILD_PROFILE --recipe-path recipe.json 27 | 28 | # Copy source 29 | COPY . . 30 | 31 | # Build application 32 | RUN cargo build --profile $BUILD_PROFILE --features "$FEATURES" --locked -p relay-tools --bin chainwalker 33 | 34 | # ARG is not resolved in COPY so we have to hack around it by copying the 35 | # binary to a temporary location 36 | RUN cp /app/target/$BUILD_PROFILE/chainwalker /app/chainwalker 37 | 38 | # Use Ubuntu as the release image 39 | FROM ubuntu AS runtime 40 | WORKDIR /app 41 | 42 | # Install runtime dependencies 43 | RUN apt-get update && apt-get -y upgrade && apt-get install -y ca-certificates curl git && update-ca-certificates 44 | 45 | # Install foundry 46 | RUN curl -L https://foundry.paradigm.xyz | bash && \ 47 | . /root/.bashrc && \ 48 | /root/.foundry/bin/foundryup 49 | 50 | # Copy chainwalker over from the build stage 51 | COPY --from=builder /app/chainwalker /usr/local/bin 52 | 53 | # Add nushell 54 | COPY --from=ghcr.io/nushell/nushell:0.105.1-alpine /usr/bin/nu /usr/bin/nu 55 | 56 | # Add foundry binaries to PATH 57 | ENV PATH="/root/.foundry/bin:${PATH}" 58 | 59 | EXPOSE 9119 60 | ENTRYPOINT ["/usr/local/bin/chainwalker"] 61 | -------------------------------------------------------------------------------- /src/twilio/error.rs: -------------------------------------------------------------------------------- 1 | //! Twilio error types. 2 | 3 | use serde::{Deserialize, Deserializer}; 4 | 5 | use std::fmt::{Display, Formatter}; 6 | 7 | /// Twilio error response. 8 | #[derive(Debug, Deserialize)] 9 | pub struct TwilioError { 10 | /// Error code. 11 | pub code: TwilioErrorCode, 12 | /// Error message. 13 | pub message: String, 14 | /// More info URL. 15 | pub more_info: String, 16 | /// HTTP status code. 17 | pub status: u16, 18 | } 19 | 20 | impl Display for TwilioError { 21 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 22 | self.code.fmt(f) 23 | } 24 | } 25 | 26 | impl std::error::Error for TwilioError { 27 | fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { 28 | None 29 | } 30 | } 31 | 32 | /// Twilio error codes. 33 | /// 34 | /// See for more details. 35 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 36 | #[repr(u32)] 37 | pub enum TwilioErrorCode { 38 | /// Resource not found. 39 | NotFound = 20404, 40 | /// Too many requests. 41 | TooManyRequests = 20429, 42 | /// Unknown error code. 43 | Unknown(u32), 44 | } 45 | 46 | impl<'de> Deserialize<'de> for TwilioErrorCode { 47 | fn deserialize(deserializer: D) -> Result 48 | where 49 | D: Deserializer<'de>, 50 | { 51 | let code = u32::deserialize(deserializer)?; 52 | Ok(match code { 53 | 20404 => Self::NotFound, 54 | 20429 => Self::TooManyRequests, 55 | other => Self::Unknown(other), 56 | }) 57 | } 58 | } 59 | 60 | impl Display for TwilioErrorCode { 61 | fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { 62 | match self { 63 | Self::NotFound => write!(f, "Resource not found"), 64 | Self::TooManyRequests => write!(f, "Too many requests"), 65 | Self::Unknown(code) => write!(f, "Unknown error code: {}", code), 66 | } 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/twilio/verify.rs: -------------------------------------------------------------------------------- 1 | //! Twilio Verify API v2 types. 2 | 3 | use serde::{Deserialize, Deserializer}; 4 | 5 | /// Twilio Verify API verification response. 6 | #[derive(Debug, Deserialize)] 7 | pub struct VerificationResponse { 8 | /// Verification SID. 9 | pub sid: String, 10 | /// Verification status. 11 | pub status: VerificationStatus, 12 | } 13 | 14 | /// Twilio Verify API verification check response. 15 | #[derive(Debug, Deserialize)] 16 | pub struct VerificationCheckResponse { 17 | /// Verification status. 18 | pub status: VerificationStatus, 19 | } 20 | 21 | /// Verification status. 22 | /// 23 | /// See for more details. 24 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 25 | pub enum VerificationStatus { 26 | /// Verification pending. 27 | Pending, 28 | /// Verification approved. 29 | Approved, 30 | /// Verification canceled. 31 | Canceled, 32 | /// Max attempts reached. 33 | MaxAttemptsReached, 34 | /// Verification deleted. 35 | Deleted, 36 | /// Verification failed. 37 | Failed, 38 | /// Verification expired. 39 | Expired, 40 | } 41 | 42 | impl VerificationStatus { 43 | /// Check if verification is approved. 44 | pub fn is_approved(&self) -> bool { 45 | matches!(self, VerificationStatus::Approved) 46 | } 47 | } 48 | 49 | impl<'de> Deserialize<'de> for VerificationStatus { 50 | fn deserialize(deserializer: D) -> Result 51 | where 52 | D: Deserializer<'de>, 53 | { 54 | let s = String::deserialize(deserializer)?; 55 | Ok(match s.as_str() { 56 | "pending" => Self::Pending, 57 | "approved" => Self::Approved, 58 | "canceled" => Self::Canceled, 59 | "max_attempts_reached" => Self::MaxAttemptsReached, 60 | "deleted" => Self::Deleted, 61 | "failed" => Self::Failed, 62 | "expired" => Self::Expired, 63 | _ => Self::Failed, 64 | }) 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /.sqlx/query-a2d9ea4ad367d0feb713813a783fa071651305285432ddb71cee64ff240ee464.json: -------------------------------------------------------------------------------- 1 | { 2 | "db_name": "PostgreSQL", 3 | "query": "\n SELECT bundle_id, status as \"status: BundleStatus\", bundle_data, created_at\n FROM finished_bundles\n WHERE bundle_id = $1\n ", 4 | "describe": { 5 | "columns": [ 6 | { 7 | "ordinal": 0, 8 | "name": "bundle_id", 9 | "type_info": "Bytea" 10 | }, 11 | { 12 | "ordinal": 1, 13 | "name": "status: BundleStatus", 14 | "type_info": { 15 | "Custom": { 16 | "name": "bundle_status", 17 | "kind": { 18 | "Enum": [ 19 | "init", 20 | "source_queued", 21 | "source_confirmed", 22 | "source_failures", 23 | "destination_queued", 24 | "destination_failures", 25 | "destination_confirmed", 26 | "refunds_queued", 27 | "withdrawals_queued", 28 | "done", 29 | "failed", 30 | "liquidity_locked", 31 | "settlements_queued", 32 | "settlements_confirmed", 33 | "refunds_scheduled", 34 | "refunds_ready", 35 | "settlements_processing", 36 | "settlement_completion_queued", 37 | "fee_payer_queued", 38 | "fee_payer_completed" 39 | ] 40 | } 41 | } 42 | } 43 | }, 44 | { 45 | "ordinal": 2, 46 | "name": "bundle_data", 47 | "type_info": "Jsonb" 48 | }, 49 | { 50 | "ordinal": 3, 51 | "name": "created_at", 52 | "type_info": "Timestamp" 53 | } 54 | ], 55 | "parameters": { 56 | "Left": [ 57 | "Bytea" 58 | ] 59 | }, 60 | "nullable": [ 61 | false, 62 | false, 63 | false, 64 | false 65 | ] 66 | }, 67 | "hash": "a2d9ea4ad367d0feb713813a783fa071651305285432ddb71cee64ff240ee464" 68 | } 69 | -------------------------------------------------------------------------------- /src/types/settler.rs: -------------------------------------------------------------------------------- 1 | //! Settler contract types and interfaces. 2 | //! 3 | //! The settler contract handles cross-chain attestations for multichain intents, 4 | //! enabling trust between different chains during settlement. 5 | 6 | use alloy::sol; 7 | 8 | sol! { 9 | #[sol(rpc)] 10 | #[derive(Debug)] 11 | contract Settler { 12 | /// Emitted when a settlementId is sent to one or more chains. 13 | event Sent(address indexed sender, bytes32 indexed settlementId, uint256 receiverChainId); 14 | 15 | /// Allows anyone to attest to any settlementId, on all the input chains. 16 | /// Input chain readers can choose which attestations they want to trust. 17 | /// 18 | /// # Arguments 19 | /// * `settlementId` - The ID of the settlement to attest to 20 | /// * `settlerContext` - Encoded context data that the settler can decode (e.g., array of input chains) 21 | function send(bytes32 settlementId, bytes calldata settlerContext) external payable; 22 | 23 | /// Write the settlement status for a specific sender and chain. 24 | /// Only the owner can call this function. 25 | /// 26 | /// # Arguments 27 | /// * `sender` - The address of the sender 28 | /// * `settlementId` - The ID of the settlement 29 | /// * `chainId` - The chain ID 30 | function write(address sender, bytes32 settlementId, uint256 chainId) external; 31 | 32 | /// Check if an attester from a particular output chain has attested to the settlementId. 33 | /// For our case, the attester is the orchestrator. 34 | /// And the settlementId is the root of the merkle tree which is signed by the user. 35 | /// 36 | /// # Arguments 37 | /// * `settlementId` - The ID of the settlement to check 38 | /// * `attester` - The address of the attester 39 | /// * `chainId` - The chain ID to check 40 | function read(bytes32 settlementId, address attester, uint256 chainId) 41 | external 42 | view 43 | returns (bool isSettled); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /docs/diagrams/bundle_state_machine.mmd: -------------------------------------------------------------------------------- 1 | %%{init: {'theme':'base', 'themeVariables': { 'primaryColor':'#232f3e', 'primaryTextColor':'#fff', 'primaryBorderColor':'#7FFF00', 'lineColor':'#F8B229', 'secondaryColor':'#006100', 'tertiaryColor':'#fff', 'fontFamily': 'monospace'}}}%% 2 | stateDiagram-v2 3 | [*] --> Init 4 | 5 | Init --> LiquidityLocked 6 | 7 | LiquidityLocked --> SourceQueued 8 | 9 | SourceQueued --> SourceConfirmed 10 | SourceQueued --> SourceFailures 11 | 12 | SourceConfirmed --> DestinationQueued 13 | 14 | SourceFailures --> RefundsScheduled 15 | SourceFailures --> Failed 16 | 17 | DestinationQueued --> DestinationConfirmed 18 | DestinationQueued --> DestinationFailures 19 | 20 | DestinationConfirmed --> SettlementsQueued 21 | 22 | DestinationFailures --> RefundsScheduled 23 | DestinationFailures --> Failed 24 | 25 | SettlementsQueued --> SettlementsProcessing 26 | SettlementsQueued --> Failed 27 | 28 | SettlementsProcessing --> SettlementCompletionQueued 29 | 30 | SettlementCompletionQueued --> Done 31 | SettlementCompletionQueued --> Failed 32 | 33 | RefundsScheduled --> RefundsReady 34 | 35 | RefundsReady --> RefundsQueued 36 | 37 | RefundsQueued --> Failed 38 | 39 | Done --> [*] 40 | Failed --> [*] 41 | 42 | classDef success fill:#28a745,stroke:#1e7e34,stroke-width:2px,color:#fff 43 | classDef failure fill:#dc3545,stroke:#bd2130,stroke-width:2px,color:#fff 44 | classDef processing fill:#ffc107,stroke:#d39e00,stroke-width:2px,color:#000 45 | classDef queued fill:#17a2b8,stroke:#117a8b,stroke-width:2px,color:#fff 46 | classDef terminal fill:#6c757d,stroke:#545b62,stroke-width:2px,color:#fff 47 | 48 | class Init,LiquidityLocked processing 49 | class SourceQueued,DestinationQueued,SettlementsQueued,SettlementCompletionQueued,RefundsQueued queued 50 | class SourceConfirmed,DestinationConfirmed,Done success 51 | class SourceFailures,DestinationFailures,RefundsScheduled,RefundsReady,Failed failure 52 | class SettlementsProcessing processing -------------------------------------------------------------------------------- /src/constants.rs: -------------------------------------------------------------------------------- 1 | //! Relay constants. 2 | 3 | use alloy::primitives::{Address, U256, address, uint}; 4 | use std::time::Duration; 5 | 6 | /// Extra buffer added to Intent gas estimates signed by P256 keys to cover execution overhead 7 | /// and ensure sufficient gas is provided. 8 | /// 9 | /// P256 signature verification has high gas usage variance and the 10_000 value seems to be a safe 10 | /// bet. 11 | pub const P256_GAS_BUFFER: U256 = uint!(10_000_U256); 12 | 13 | /// Extra buffer accounting for the cost of a cold storage write. 14 | /// 15 | /// 20_000 - 2900 gas 16 | pub const COLD_SSTORE_GAS_BUFFER: U256 = uint!(17_100_U256); 17 | 18 | /// Extra buffer added to Intent gas estimates to cover execution overhead 19 | /// and ensure sufficient gas is provided. 20 | pub const INTENT_GAS_BUFFER: u64 = 0; 21 | 22 | /// The default poll interval used by the relay clients. 23 | pub const DEFAULT_POLL_INTERVAL: Duration = Duration::from_millis(300); 24 | 25 | /// Default number of incoming RPC connections. 26 | pub const DEFAULT_RPC_DEFAULT_MAX_CONNECTIONS: u32 = 5_000; 27 | 28 | /// Extra buffer added to transaction gas estimates to pass the contract 63/64 check. 29 | pub const TX_GAS_BUFFER: u64 = 0; 30 | 31 | /// Default cap on maximum number of pending transactions per chain. 32 | pub const DEFAULT_MAX_TRANSACTIONS: usize = 100; 33 | 34 | /// Default number of signers to derive from mnemonic and use for sending transactions. 35 | pub const DEFAULT_NUM_SIGNERS: usize = 16; 36 | 37 | /// Duration for escrow refunds in seconds. 38 | /// 39 | /// After this duration, escrowed funds can be refunded if settlement hasn't occurred. 40 | pub const ESCROW_REFUND_DURATION_SECS: u64 = 3600; // 1 hour 41 | 42 | /// Length of the salt used for escrow operations. 43 | /// 44 | /// This is used to generate unique escrow IDs. 45 | pub const ESCROW_SALT_LENGTH: usize = 12; 46 | 47 | /// Address used by eth_simulateV1 to identify ETH transfers as ERC20 transfer events. 48 | pub const SIMULATEV1_NATIVE_ADDRESS: Address = 49 | address!("0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"); 50 | 51 | /// The default porto base url 52 | pub const DEFAULT_PORTO_BASE_URL: &str = "id.porto.sh"; 53 | -------------------------------------------------------------------------------- /src/types/interop.rs: -------------------------------------------------------------------------------- 1 | use crate::{storage::BundleStatus, transactions::RelayTransaction}; 2 | 3 | /// Represents a batch of transactions at different stages of the interop bundle lifecycle. 4 | #[derive(Debug)] 5 | pub enum InteropTransactionBatch<'a> { 6 | /// Source chain transactions 7 | Source(&'a [RelayTransaction]), 8 | /// Destination chain transactions 9 | Destination(&'a [RelayTransaction]), 10 | /// Settlement transactions (after destination confirmation) 11 | ExecuteSend(&'a [RelayTransaction]), 12 | /// Execute receive transactions (e.g., LayerZero delivery) 13 | ExecuteReceive(&'a [RelayTransaction]), 14 | /// Refund transactions (when source/destination fails) 15 | Refund(&'a [RelayTransaction]), 16 | /// Fee payer transaction 17 | FeePayer(&'a RelayTransaction), 18 | } 19 | 20 | impl<'a> InteropTransactionBatch<'a> { 21 | /// Returns the inner transactions. 22 | pub fn transactions(&self) -> &[RelayTransaction] { 23 | match self { 24 | Self::Source(txs) => txs, 25 | Self::Destination(txs) => txs, 26 | Self::ExecuteSend(txs) => txs, 27 | Self::ExecuteReceive(txs) => txs, 28 | Self::Refund(txs) => txs, 29 | Self::FeePayer(tx) => std::slice::from_ref(tx), 30 | } 31 | } 32 | 33 | /// Returns the appropriate queued status for this transaction batch type. 34 | pub fn next_status(&self) -> BundleStatus { 35 | match self { 36 | Self::Source(_) => BundleStatus::SourceQueued, 37 | Self::Destination(_) => BundleStatus::DestinationQueued, 38 | Self::ExecuteSend(_) => BundleStatus::SettlementsQueued, 39 | Self::ExecuteReceive(_) => BundleStatus::SettlementCompletionQueued, 40 | Self::Refund(_) => BundleStatus::RefundsQueued, 41 | Self::FeePayer(_) => BundleStatus::FeePayerQueued, 42 | } 43 | } 44 | 45 | /// Returns true if this is a settlement-related transaction batch. 46 | pub fn is_settlement(&self) -> bool { 47 | matches!(self, Self::ExecuteSend(_) | Self::ExecuteReceive(_) | Self::Refund(_)) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # Ithaca Relay Documentation 2 | 3 | Welcome to the Ithaca Relay developer documentation. This documentation is specifically designed for developers working on the relay codebase. 4 | 5 | ## Quick Links 6 | 7 | - **[Relay README](../README.md)** - Basic setup and running instructions 8 | 9 | ## Documentation Structure 10 | 11 | ### 🚀 Development 12 | - **[Getting Started](development/getting-started.md)** - Set up your development environment 13 | - **[Testing Guide](development/testing.md)** - Unit tests, e2e tests, and testing patterns 14 | - **[Debugging Guide](development/debugging.md)** - Debugging tools, Anvil traces, Cast CLI, and troubleshooting 15 | 16 | ### 🏗️ Architecture 17 | - **[System Overview](architecture/overview.md)** - High-level relay architecture and components 18 | - **[Porto Integration](architecture/porto-integration.md)** - Ecosystem context and intent-based design principles 19 | - **[RPC Endpoints](architecture/rpc-endpoints.md)** - JSON-RPC API implementation details 20 | - **[Transaction Pipeline](architecture/transaction-pipeline.md)** - End-to-end transaction processing flow 21 | - **[Storage Layer](architecture/storage-layer.md)** - Database schema and storage abstractions 22 | - **[Cross-Chain Operations](architecture/cross-chain.md)** - Multichain intent and settlement implementation 23 | 24 | ### 📡 APIs 25 | - **[RPC API Reference](https://porto.sh/rpc-server)** - Complete JSON-RPC endpoint documentation 26 | 27 | ### 🔧 Operations 28 | - **[Configuration Reference](development/getting-started.md#configuration)** - Configuration options and examples 29 | - **[Troubleshooting Guide](troubleshooting/common-issues.md)** - Common issues and solutions 30 | 31 | ### 📊 Diagrams 32 | - **[Bundle State Machine](diagrams/bundle_state_machine.svg)** - Cross-chain bundle processing states 33 | 34 | ## External Resources 35 | 36 | - **[Porto Documentation](https://github.com/ithacaxyz/porto)** - Client SDK and user-facing documentation 37 | - **[Account Contracts](https://github.com/ithacaxyz/account)** - EIP-7702 delegation and orchestrator contracts 38 | - **[Ithaca Protocol](https://porto.sh)** - Protocol overview and specifications 39 | -------------------------------------------------------------------------------- /tests/e2e/cases/metrics.rs: -------------------------------------------------------------------------------- 1 | use alloy::providers::Provider; 2 | use eyre::Result; 3 | use relay::{ 4 | metrics::periodic::{BalanceCollector, MetricCollector, format_units_f64}, 5 | types::IERC20::IERC20Instance, 6 | }; 7 | 8 | use crate::e2e::environment::Environment; 9 | 10 | #[tokio::test(flavor = "multi_thread")] 11 | async fn balance_collector() -> Result<()> { 12 | let env = Environment::setup_multi_chain(3).await?; 13 | 14 | BalanceCollector::new(env.funder, env.relay_handle.chains.clone()).collect().await?; 15 | let output = env.relay_handle.metrics.render(); 16 | 17 | for chain in env.relay_handle.chains.chains_iter() { 18 | let (native_symbol, native_decimals) = chain 19 | .native_symbol() 20 | .zip(chain.assets().native().map(|(_, a)| a.decimals)) 21 | .unwrap_or(("ETH", 18)); 22 | 23 | for signer in chain.signer_addresses() { 24 | let expected_metric = format!( 25 | r#"signer_balance{{address="{}",chain_id="{}",symbol="{}"}} {}"#, 26 | signer.to_checksum(None), 27 | chain.id(), 28 | native_symbol, 29 | format_units_f64(chain.provider().get_balance(signer).await?, native_decimals)? 30 | ); 31 | assert!(output.contains(&expected_metric)); 32 | } 33 | 34 | for (uid, token) in chain.assets().fee_token_iter_sorted() { 35 | let balance = if token.address.is_zero() { 36 | chain.provider().get_balance(env.funder).await? 37 | } else { 38 | IERC20Instance::new(token.address, chain.provider()) 39 | .balanceOf(env.funder) 40 | .call() 41 | .await? 42 | }; 43 | let expected_metric = format!( 44 | r#"funder_balance{{address="{}",chain_id="{}",uid="{}"}} {}"#, 45 | env.funder.to_checksum(None), 46 | chain.id(), 47 | uid, 48 | format_units_f64(balance, token.decimals)? 49 | ); 50 | assert!(output.contains(&expected_metric)); 51 | } 52 | } 53 | 54 | Ok(()) 55 | } 56 | -------------------------------------------------------------------------------- /src/types/storage.rs: -------------------------------------------------------------------------------- 1 | use super::{SignedCall, rpc::AuthorizeKeyResponse}; 2 | use crate::error::RelayError; 3 | use alloy::{ 4 | eips::eip7702::SignedAuthorization, 5 | primitives::{Address, Bytes}, 6 | rpc::types::state::{AccountOverride, StateOverride, StateOverridesBuilder}, 7 | sol_types::SolValue, 8 | }; 9 | use serde::{Deserialize, Serialize}; 10 | 11 | /// CreateAccount request that can be reused across chains. 12 | #[derive(Debug, Clone, Serialize, Deserialize)] 13 | #[serde(rename_all = "camelCase")] 14 | pub struct CreatableAccount { 15 | /// EOA generated address. 16 | pub address: Address, 17 | /// Signed 7702 authorization. 18 | pub signed_authorization: SignedAuthorization, 19 | /// Initialization calls. 20 | pub pre_call: SignedCall, 21 | } 22 | 23 | impl CreatableAccount { 24 | /// Initializes a new account. 25 | pub fn new( 26 | eoa: Address, 27 | pre_call: SignedCall, 28 | signed_authorization: SignedAuthorization, 29 | ) -> Self { 30 | Self { address: eoa, signed_authorization, pre_call } 31 | } 32 | 33 | /// Return abi encoded precall. 34 | pub fn init_data(&self) -> Bytes { 35 | self.pre_call.abi_encode_params().into() 36 | } 37 | 38 | /// Return the list of authorized keys as [`AuthorizeKeyResponse`]. 39 | pub fn authorized_keys(&self) -> Result, RelayError> { 40 | Ok(self.pre_call.authorized_keys_with_permissions()?) 41 | } 42 | 43 | /// Builds state overrides for the account, including 7702 authorization and authorized keys. 44 | pub fn state_overrides(&self) -> Result { 45 | Ok(StateOverridesBuilder::with_capacity(1) 46 | .append( 47 | self.address, 48 | AccountOverride::default() 49 | .with_7702_delegation_designator(self.signed_authorization.address) 50 | .with_state_diff( 51 | self.authorized_keys()? 52 | .into_iter() 53 | .flat_map(|k| k.authorize_key.key.storage_slots().into_iter()), 54 | ), 55 | ) 56 | .build()) 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /src/nonce.rs: -------------------------------------------------------------------------------- 1 | //! Nonce management module. 2 | //! 3 | //! This module provides functionality for managing nonces in the relay system. 4 | //! 5 | //! Nonces are used to ensure that each transaction is unique and prevent replay attacks. 6 | 7 | use std::sync::Arc; 8 | 9 | use alloy::{ 10 | primitives::{Address, ChainId}, 11 | providers::fillers::NonceManager, 12 | }; 13 | use async_trait::async_trait; 14 | use dashmap::DashMap; 15 | use futures_util::lock::Mutex; 16 | 17 | /// [`MultiChainNonceManager`] is a nonce manager that can handle multiple chains and addresses. 18 | /// 19 | /// It is based on [`CachedNonceManager`] and provides a convenient way to manage nonces for 20 | /// multiple chains and addresses. 21 | #[derive(Clone, Debug, Default)] 22 | pub struct MultiChainNonceManager { 23 | #[allow(clippy::type_complexity)] 24 | nonces: Arc>>>, 25 | } 26 | 27 | #[async_trait] 28 | impl NonceManager for MultiChainNonceManager { 29 | async fn get_next_nonce( 30 | &self, 31 | provider: &P, 32 | address: Address, 33 | ) -> alloy::transports::TransportResult 34 | where 35 | P: alloy::providers::Provider, 36 | N: alloy::network::Network, 37 | { 38 | // Use `u64::MAX` as a sentinel value to indicate that the nonce has not been fetched yet. 39 | const NONE: u64 = u64::MAX; 40 | 41 | let chain_id = provider.get_chain_id().await?; 42 | 43 | // Locks dashmap internally for a short duration to clone the `Arc`. 44 | // We also don't want to hold the dashmap lock through the await point below. 45 | let nonce = { 46 | let rm = self 47 | .nonces 48 | .entry((chain_id, address)) 49 | .or_insert_with(|| Arc::new(Mutex::new(NONE))); 50 | Arc::clone(rm.value()) 51 | }; 52 | 53 | let mut nonce = nonce.lock().await; 54 | let new_nonce = if *nonce == NONE { 55 | // Initialize the nonce if we haven't seen this account before. 56 | provider.get_transaction_count(address).pending().await? 57 | } else { 58 | *nonce + 1 59 | }; 60 | *nonce = new_nonce; 61 | Ok(new_nonce) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /tests/e2e/cases/signature.rs: -------------------------------------------------------------------------------- 1 | use crate::e2e::{ 2 | AuthKind, 3 | cases::{upgrade_account_eagerly, upgrade_account_lazily}, 4 | environment::Environment, 5 | }; 6 | use alloy::{ 7 | eips::eip7702::constants::EIP7702_DELEGATION_DESIGNATOR, 8 | primitives::{Address, B256, Bytes}, 9 | rpc::types::state::{AccountOverride, StateOverridesBuilder}, 10 | }; 11 | use relay::{ 12 | rpc::RelayApiClient, 13 | signers::Eip712PayLoadSigner, 14 | types::{Account, KeyType, KeyWith712Signer, rpc::VerifySignatureParameters}, 15 | }; 16 | 17 | #[tokio::test(flavor = "multi_thread")] 18 | async fn verify_signature() -> eyre::Result<()> { 19 | let env = Environment::setup().await?; 20 | 21 | let key = KeyWith712Signer::random_admin(KeyType::Secp256k1)?.unwrap(); 22 | let eoa = env.eoa.address(); 23 | upgrade_account_lazily(&env, &[key.to_authorized()], AuthKind::Auth).await?; 24 | 25 | // Need the state override since the account is not on onchain, and we need to query for the 26 | // eip712 domain name and version to get a signing digest. 27 | let account = Account::new(eoa, env.provider()).with_overrides( 28 | StateOverridesBuilder::with_capacity(1) 29 | .append( 30 | eoa, 31 | AccountOverride::default().with_code(Bytes::from( 32 | [&EIP7702_DELEGATION_DESIGNATOR, env.config.delegation_proxy.as_slice()] 33 | .concat(), 34 | )), 35 | ) 36 | .build(), 37 | ); 38 | let digest = B256::random(); 39 | let signature = key.sign_payload_hash(account.digest_erc1271(digest)).await?; 40 | 41 | let verify = |address: Address| { 42 | env.relay_endpoint.verify_signature(VerifySignatureParameters { 43 | address, 44 | chain_id: env.chain_id(), 45 | digest, 46 | signature: signature.clone(), 47 | }) 48 | }; 49 | 50 | // assert that we can verify signature against account in storage (not onchain) 51 | assert!(verify(eoa).await?.valid); 52 | 53 | // assert that we can verify signature against account onchain 54 | upgrade_account_eagerly(&env, &[key.to_authorized()], &key, AuthKind::Auth).await?; 55 | assert!(verify(eoa).await?.valid); 56 | 57 | Ok(()) 58 | } 59 | -------------------------------------------------------------------------------- /tests/e2e/cases/cli.rs: -------------------------------------------------------------------------------- 1 | use crate::e2e::environment::Environment; 2 | use alloy::primitives::B256; 3 | use relay::{cli::Args, spawn::try_spawn_with_args}; 4 | use std::{ 5 | env::temp_dir, 6 | net::{IpAddr, Ipv4Addr}, 7 | }; 8 | 9 | #[tokio::test] 10 | #[ignore] // todo: borked 11 | async fn respawn_cli() -> eyre::Result<()> { 12 | let env = Environment::setup().await?; 13 | 14 | let dir = temp_dir(); 15 | let config = dir.join("relay.yaml"); 16 | let _ = std::fs::remove_file(&config); 17 | let mnemonic = "test test test test test test test test test test test junk"; 18 | 19 | for _ in 0..=1 { 20 | let _ = try_spawn_with_args( 21 | Args { 22 | config: config.clone(), 23 | address: IpAddr::V4(Ipv4Addr::LOCALHOST), 24 | port: 0, 25 | metrics_port: 0, 26 | max_connections: Default::default(), 27 | orchestrator: Some(env.orchestrator), 28 | delegation_proxy: Some(env.delegation), 29 | legacy_delegation_proxies: Default::default(), 30 | simulator: Default::default(), 31 | funder: Default::default(), 32 | escrow: None, 33 | fee_recipient: Default::default(), 34 | quote_ttl: Default::default(), 35 | rate_ttl: Default::default(), 36 | constant_rate: Default::default(), 37 | intent_gas_buffer: Default::default(), 38 | tx_gas_buffer: Default::default(), 39 | database_url: Default::default(), 40 | max_pending_transactions: Default::default(), 41 | signers_mnemonic: mnemonic.parse().unwrap(), 42 | funder_key: Some(B256::random().to_string()), 43 | public_node_endpoints: Default::default(), 44 | config_only: Default::default(), 45 | resend_api_key: Default::default(), 46 | porto_base_url: Default::default(), 47 | funder_owner_key: Default::default(), 48 | binance_api_key: Default::default(), 49 | binance_api_secret: Default::default(), 50 | skip_diagnostics: true, 51 | }, 52 | &config, 53 | ) 54 | .await?; 55 | } 56 | 57 | Ok(()) 58 | } 59 | -------------------------------------------------------------------------------- /src/estimation/op.rs: -------------------------------------------------------------------------------- 1 | //! Helpers for OP Stack fee estimation. 2 | 3 | use alloy::{ 4 | primitives::{Address, address}, 5 | sol, 6 | }; 7 | 8 | /// Address of the OP Stack [GasPriceOracle](https://github.com/ethereum-optimism/optimism/blob/develop/packages/contracts-bedrock/src/L2/GasPriceOracle.sol) contract. 9 | pub const OP_GAS_PRICE_ORACLE_ADDRESS: Address = 10 | address!("0x420000000000000000000000000000000000000F"); 11 | 12 | sol! { 13 | #[sol(rpc)] 14 | contract OpGasPriceOracle { 15 | /// Computes the L1 portion of the fee based on the provided unsigned encoded transaction. 16 | /// 17 | /// See also: 18 | function getL1Fee(bytes memory _data) external view returns (uint256); 19 | 20 | /// Returns an upper bound for the L1 fee for a given transaction size. It assumes the worst case of fastlz upper-bound which covers %99.99 txs. 21 | /// 22 | /// _unsignedTxSize: Unsigned fully RLP-encoded transaction size to get the L1 fee for. 23 | /// 24 | /// Returns the L1 estimated upper-bound fee that should be paid for the tx. 25 | /// 26 | /// See also 27 | /// 28 | /// This assumes `(_unsignedTxSize + 68) / 255 + 16` is the practical fastlz upper-bound covers %99.99 txs. 29 | function getL1FeeUpperBound(uint256 _unsignedTxSize) external view returns (uint256); 30 | } 31 | } 32 | 33 | #[cfg(test)] 34 | mod tests { 35 | use super::*; 36 | use alloy::{ 37 | hex, 38 | providers::{Provider, ProviderBuilder}, 39 | }; 40 | 41 | #[tokio::test] 42 | async fn test_op_gas_estimate_l1() { 43 | let provider = 44 | ProviderBuilder::new().connect("https://mainnet.optimism.io").await.unwrap().erased(); 45 | 46 | let calldata = hex!("0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef"); 47 | let estimate = OpGasPriceOracle::new(OP_GAS_PRICE_ORACLE_ADDRESS, provider) 48 | .getL1Fee(calldata.into()) 49 | .call() 50 | .await 51 | .unwrap(); 52 | 53 | assert!(estimate > 0); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /tests/e2e/config.rs: -------------------------------------------------------------------------------- 1 | use super::{AuthKind, TxContext, cases::upgrade_account_lazily, environment::Environment}; 2 | use crate::e2e::environment::EnvironmentConfig; 3 | use strum::EnumIter; 4 | 5 | /// Test configuration that will prepare the desired [`Environment`] before a run. 6 | #[derive(Debug, Clone, Copy)] 7 | pub struct TestConfig { 8 | /// Payment method configuration. 9 | pub payment: PaymentConfig, 10 | } 11 | 12 | impl TestConfig { 13 | /// Runs the test for a specific configuration. 14 | pub async fn run<'a, F>(&self, build_txs: F, env_config: EnvironmentConfig) -> eyre::Result<()> 15 | where 16 | F: Fn(&Environment) -> Vec> + Send + Sync, 17 | { 18 | // Setup the initial environment. 19 | let mut env = Environment::setup_with_config(env_config).await?; 20 | 21 | // Apply native or ERC20 payment method 22 | env = self.payment.apply(env); 23 | 24 | // Generate transactions from test case 25 | let txs = build_txs(&env); 26 | 27 | let txs = txs.into_iter().enumerate().peekable(); 28 | for (tx_num, mut tx) in txs { 29 | // The account needs to be set up in the very first transaction. 30 | if tx_num == 0 { 31 | // If a signer is not defined, takes the first authorized key from the tx 32 | // context. 33 | tx.key = Some(tx.key.as_ref().unwrap_or(&tx.authorization_keys[0])); 34 | 35 | // authorization_keys field on the first tx are handled as initialization keys. 36 | upgrade_account_lazily( 37 | &env, 38 | &tx.authorization_keys.drain(..).map(|k| k.to_authorized()).collect::>(), 39 | tx.auth.clone().unwrap_or(AuthKind::Auth), 40 | ) 41 | .await?; 42 | } 43 | tx.process(tx_num, &env).await?; 44 | } 45 | 46 | Ok(()) 47 | } 48 | } 49 | 50 | impl From for TestConfig { 51 | fn from(value: PaymentConfig) -> Self { 52 | Self { payment: value } 53 | } 54 | } 55 | 56 | /// Payment method be used on a intent. 57 | #[derive(Debug, Clone, Copy, EnumIter)] 58 | pub enum PaymentConfig { 59 | Native, 60 | ERC20, 61 | } 62 | 63 | impl PaymentConfig { 64 | /// Modify the environment based on the payment method. 65 | fn apply(self, env: Environment) -> Environment { 66 | match self { 67 | PaymentConfig::ERC20 => env, 68 | PaymentConfig::Native => env.with_native_payment(), 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/types/rpc/permission.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::{Address, U256}; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | use crate::types::{ 5 | CallPermission, 6 | IthacaAccount::{SpendInfo, SpendPeriod}, 7 | }; 8 | 9 | /// Represents key permissions. 10 | #[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)] 11 | #[cfg_attr(test, derive(strum::EnumDiscriminants))] 12 | #[cfg_attr(test, strum_discriminants(derive(strum::EnumIter), doc = "Key permissions"))] 13 | #[serde(tag = "type")] 14 | pub enum Permission { 15 | /// Call permission. 16 | #[serde(rename = "call")] 17 | Call(CallPermission), 18 | /// Spend permission. 19 | #[serde(rename = "spend")] 20 | Spend(SpendPermission), 21 | } 22 | 23 | impl From for Permission { 24 | fn from(perm: CallPermission) -> Self { 25 | Permission::Call(perm) 26 | } 27 | } 28 | 29 | impl From for Permission { 30 | fn from(perm: SpendPermission) -> Self { 31 | Permission::Spend(perm) 32 | } 33 | } 34 | 35 | /// Represents spend permissions. 36 | #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] 37 | pub struct SpendPermission { 38 | /// The spending limit. 39 | pub limit: U256, 40 | /// The spending period. 41 | pub period: SpendPeriod, 42 | /// The token address. 43 | #[serde(default)] 44 | pub token: Address, 45 | } 46 | 47 | impl From for SpendPermission { 48 | fn from(permission: SpendInfo) -> Self { 49 | Self { limit: permission.limit, period: permission.period, token: permission.token } 50 | } 51 | } 52 | 53 | #[cfg(test)] 54 | mod tests { 55 | use crate::types::CallPermission; 56 | use alloy::primitives::{Address, fixed_bytes}; 57 | 58 | #[test] 59 | fn deserialize_call_permission() { 60 | assert_eq!( 61 | serde_json::from_str::( 62 | r#"{ 63 | "to":"0x0000000000000000000000000000000000000000", 64 | "selector":"transfer(address,uint256)" 65 | }"# 66 | ) 67 | .unwrap(), 68 | CallPermission { to: Address::ZERO, selector: fixed_bytes!("0xa9059cbb") } 69 | ); 70 | 71 | assert_eq!( 72 | serde_json::from_str::( 73 | r#" 74 | { 75 | "to":"0x0000000000000000000000000000000000000000", 76 | "selector":"0xa9059cbb" 77 | }"# 78 | ) 79 | .unwrap(), 80 | CallPermission { to: Address::ZERO, selector: fixed_bytes!("0xa9059cbb") } 81 | ) 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /src/transport/error.rs: -------------------------------------------------------------------------------- 1 | //! Additional helpers for RPC error handling 2 | 3 | use alloy::transports::TransportError; 4 | 5 | /// An Extension trait fo [`TransportError`] 6 | pub trait TransportErrExt { 7 | /// Returns true if this is a "already know" error thrown when a transaction if the transactions 8 | /// is already contained within the pool. 9 | /// 10 | /// This represents duplicate transaction submission. 11 | fn is_already_known(&self) -> bool; 12 | 13 | /// Returns true if this is a "replacement transaction underpriced" error thrown when we submit 14 | /// a transaction with a nonce that already exists in the pool. 15 | fn is_replacement_underpriced(&self) -> bool; 16 | 17 | /// Returns true if this is a "transaction underpriced" error thrown when we submit a 18 | /// transaction that has a gas price too low to be included. 19 | fn is_transaction_underpriced(&self) -> bool; 20 | 21 | /// Returns true if this is a "nonce tool" error thrown when we submit a transaction with a 22 | /// higher nonce that what is on chain 23 | fn is_nonce_too_low(&self) -> bool; 24 | } 25 | 26 | impl TransportErrExt for TransportError { 27 | fn is_already_known(&self) -> bool { 28 | // see also: op-geth: https://github.com/ethereum-optimism/op-geth/blob/e666543dc5500428ee7c940e54263fe4968c5efd/core/txpool/legacypool/legacypool.go#L991-L993 29 | // reth: https://github.com/paradigmxyz/reth/blob/a3b749676c6c748bf977983c189f9f4c4f9e9fbe/crates/rpc/rpc-eth-types/src/error/mod.rs#L663-L665 30 | self.as_error_resp().map(|err| err.message == "already known").unwrap_or_default() 31 | } 32 | 33 | fn is_replacement_underpriced(&self) -> bool { 34 | // see also: geth: https://github.com/ethereum/go-ethereum/blob/a56558d0920b74b6553185de4aff79c3de534e01/core/txpool/errors.go#L38-L38 35 | self.as_error_resp() 36 | .map(|err| err.message.contains("replacement transaction underpriced")) 37 | .unwrap_or_default() 38 | } 39 | 40 | fn is_transaction_underpriced(&self) -> bool { 41 | // see also: geth: https://github.com/ethereum/go-ethereum/blob/a56558d0920b74b6553185de4aff79c3de534e01/core/txpool/errors.go#L34-L34 42 | self.as_error_resp() 43 | .map(|err| err.message.contains("transaction underpriced")) 44 | .unwrap_or_default() 45 | } 46 | 47 | fn is_nonce_too_low(&self) -> bool { 48 | // see also: geth: https://github.com/ethereum/go-ethereum/blob/85077be58edea572f29c3b1a6a055077f1a56a8b/core/error.go#L45-L47 49 | self.as_error_resp().map(|err| err.message.contains("nonce too low")).unwrap_or_default() 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: lint 2 | 3 | on: 4 | pull_request: 5 | merge_group: 6 | push: 7 | branches: [main] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | 12 | jobs: 13 | clippy-binaries: 14 | name: clippy 15 | runs-on: ubuntu-latest 16 | timeout-minutes: 30 17 | steps: 18 | - uses: actions/checkout@v4 19 | - uses: dtolnay/rust-toolchain@clippy 20 | with: 21 | toolchain: nightly-2025-08-08 22 | - uses: Swatinem/rust-cache@v2 23 | with: 24 | cache-on-failure: true 25 | - run: cargo clippy --bin relay 26 | env: 27 | RUSTFLAGS: -D warnings 28 | 29 | clippy: 30 | name: clippy 31 | runs-on: ubuntu-latest 32 | timeout-minutes: 30 33 | steps: 34 | - uses: actions/checkout@v4 35 | - uses: dtolnay/rust-toolchain@clippy 36 | with: 37 | toolchain: nightly-2025-08-08 38 | - uses: Swatinem/rust-cache@v2 39 | with: 40 | cache-on-failure: true 41 | - run: cargo clippy --examples --tests --benches --all-features 42 | env: 43 | RUSTFLAGS: -D warnings 44 | 45 | crate-checks: 46 | runs-on: ubuntu-latest 47 | timeout-minutes: 30 48 | steps: 49 | - uses: actions/checkout@v4 50 | - uses: dtolnay/rust-toolchain@stable 51 | - uses: taiki-e/install-action@cargo-hack 52 | - uses: Swatinem/rust-cache@v2 53 | with: 54 | cache-on-failure: true 55 | - run: cargo hack check 56 | 57 | msrv: 58 | name: MSRV 59 | runs-on: ubuntu-latest 60 | timeout-minutes: 30 61 | steps: 62 | - uses: actions/checkout@v4 63 | - uses: dtolnay/rust-toolchain@master 64 | with: 65 | toolchain: "1.88" # MSRV 66 | - uses: Swatinem/rust-cache@v2 67 | with: 68 | cache-on-failure: true 69 | - run: cargo build --bin relay 70 | env: 71 | RUSTFLAGS: -D warnings 72 | 73 | fmt: 74 | name: fmt 75 | runs-on: ubuntu-latest 76 | timeout-minutes: 30 77 | steps: 78 | - uses: actions/checkout@v4 79 | - uses: dtolnay/rust-toolchain@nightly 80 | with: 81 | components: rustfmt 82 | - run: cargo fmt --all --check 83 | 84 | lint-success: 85 | name: lint success 86 | runs-on: ubuntu-latest 87 | if: always() 88 | needs: [clippy-binaries, clippy, crate-checks, fmt] 89 | timeout-minutes: 30 90 | steps: 91 | - name: Decide whether the needed jobs succeeded or failed 92 | uses: re-actors/alls-green@release/v1 93 | with: 94 | jobs: ${{ toJSON(needs) }} 95 | 96 | deny: 97 | uses: ithacaxyz/ci/.github/workflows/deny.yml@main 98 | -------------------------------------------------------------------------------- /src/transport/timeout.rs: -------------------------------------------------------------------------------- 1 | //! Timeout layer for RPC requests. 2 | 3 | use alloy::{ 4 | primitives::ChainId, 5 | rpc::json_rpc::{RequestPacket, ResponsePacket}, 6 | transports::{Transport, TransportError, TransportErrorKind, TransportFut}, 7 | }; 8 | use futures::FutureExt; 9 | use std::{ 10 | task::{Context, Poll}, 11 | time::Duration, 12 | }; 13 | use tower::{Layer, Service}; 14 | use tracing::warn; 15 | 16 | /// A [`tower::Layer`] that adds a timeout to requests. 17 | #[derive(Debug, Clone)] 18 | pub struct TimeoutLayer { 19 | timeout: Duration, 20 | chain_id: ChainId, 21 | } 22 | 23 | impl TimeoutLayer { 24 | /// Create a new [`TimeoutLayer`] with the given timeout duration and chain ID. 25 | pub const fn new(timeout: Duration, chain_id: ChainId) -> Self { 26 | Self { timeout, chain_id } 27 | } 28 | } 29 | 30 | impl Layer for TimeoutLayer { 31 | type Service = TimeoutService; 32 | 33 | fn layer(&self, inner: T) -> Self::Service { 34 | TimeoutService { inner, timeout: self.timeout, chain_id: self.chain_id } 35 | } 36 | } 37 | 38 | /// A service that wraps another service with a timeout. 39 | #[derive(Debug, Clone)] 40 | pub struct TimeoutService { 41 | inner: T, 42 | timeout: Duration, 43 | chain_id: ChainId, 44 | } 45 | 46 | impl Service for TimeoutService 47 | where 48 | T: Transport + Clone, 49 | { 50 | type Response = ResponsePacket; 51 | type Error = TransportError; 52 | type Future = TransportFut<'static>; 53 | 54 | fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll> { 55 | self.inner.poll_ready(cx) 56 | } 57 | 58 | fn call(&mut self, req: RequestPacket) -> Self::Future { 59 | let method = req.as_single().map(|r| r.method()).unwrap_or("unknown").to_string(); 60 | 61 | let fut = self.inner.call(req); 62 | let timeout = self.timeout; 63 | let chain_id = self.chain_id; 64 | 65 | async move { 66 | tokio::time::timeout(timeout, fut) 67 | .await 68 | .inspect_err(|_| { 69 | warn!( 70 | %chain_id, 71 | %method, 72 | timeout_secs = timeout.as_secs(), 73 | "RPC request timeout" 74 | ); 75 | }) 76 | .map_err(|_| { 77 | TransportErrorKind::custom_str(&format!( 78 | "request timeout: chain_id={}, method={}", 79 | chain_id, method 80 | )) 81 | })? 82 | } 83 | .boxed() 84 | } 85 | } 86 | --------------------------------------------------------------------------------