├── .github └── workflows │ ├── deploy-release.yml │ ├── deploy-wallet-connect.yml │ ├── prepare-release.yml │ └── test.yml ├── .gitignore ├── .gitmodules ├── README.md ├── cli ├── index.ts ├── sleuth.ts └── test │ └── sleuth.test.ts ├── foundry.toml ├── jest.config.js ├── logo.png ├── package.json ├── parser ├── .appveyor.yml ├── .gitignore ├── .travis.yml ├── Cargo.toml ├── LICENSE_APACHE ├── LICENSE_MIT ├── README.md ├── pkg │ ├── README.md │ ├── package.json │ ├── parser.d.ts │ ├── parser.js │ ├── parser_bg.js │ ├── parser_bg.wasm │ └── parser_bg.wasm.d.ts ├── src │ ├── abi.rs │ ├── lib.rs │ ├── parse.rs │ ├── query.rs │ ├── resolve.rs │ ├── sleuth.pest │ ├── source.rs │ ├── utils.rs │ └── yul.rs └── tests │ └── web.rs ├── script ├── Sleuth.s.sol ├── deploy-release.sh ├── mainnet │ └── deploy.sh ├── prepare-release.sh └── test.sh ├── src └── Sleuth.sol ├── test ├── Sleuth.t.sol └── examples │ ├── Birthday.sol │ ├── BlockNumber.sol │ ├── Fun.yul │ └── Pair.sol ├── tsconfig.json └── yarn.lock /.github/workflows/deploy-release.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Release 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | release: 7 | name: release 8 | description: Release tag (e.g. v0.0.1) 9 | 10 | network: 11 | name: network 12 | type: choice 13 | options: 14 | - mainnet 15 | - sepolia 16 | - base 17 | - base_sepolia 18 | 19 | env: 20 | FOUNDRY_PROFILE: ci 21 | 22 | permissions: 23 | contents: write 24 | 25 | jobs: 26 | check: 27 | strategy: 28 | fail-fast: true 29 | 30 | name: Foundry project 31 | runs-on: ubuntu-latest 32 | steps: 33 | - uses: actions/checkout@v3 34 | with: 35 | submodules: recursive 36 | 37 | - name: Install Foundry 38 | uses: foundry-rs/foundry-toolchain@v1 39 | with: 40 | version: nightly 41 | 42 | - name: Deploy Release 43 | run: | 44 | export RPC_URL=$(echo $deployer_config | jq -r ".$network.rpc_url") 45 | export DEPLOYER_PK=$(echo $deployer_config | jq -r ".$network.deployer_pk") 46 | script/deploy-release.sh $release 47 | env: 48 | deployer_config: ${{ secrets.deployer_config }} 49 | network: ${{ inputs.network }} 50 | release: ${{ inputs.release }} 51 | -------------------------------------------------------------------------------- /.github/workflows/deploy-wallet-connect.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Sleuth [Mainnet - WalletConnect] 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | deployer_address: 7 | description: WalletConnect address to deploy from 8 | required: true 9 | 10 | env: 11 | FOUNDRY_PROFILE: ci 12 | 13 | jobs: 14 | check: 15 | strategy: 16 | fail-fast: true 17 | 18 | name: Deploy Sleuth [Mainnet] 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Start Seacrest 22 | uses: hayesgm/seacrest@v1 23 | with: 24 | ethereum_url: "${{ secrets.ETH_MAINNET_URL }}" 25 | 26 | - uses: actions/checkout@v3 27 | with: 28 | submodules: recursive 29 | 30 | - name: Install Foundry 31 | uses: foundry-rs/foundry-toolchain@v1 32 | with: 33 | version: nightly 34 | 35 | - name: Run Forge build 36 | run: | 37 | forge --version 38 | forge build --sizes 39 | 40 | - name: Forge Deploy Sleuth [Mainnet] 41 | run: script/mainnet/deploy.sh 42 | env: 43 | ETHERSCAN_API_KEY: "${{ secrets.ETHERSCAN_API_KEY }}" 44 | ETH_FROM: "${{ inputs.deployer_address }}" 45 | RPC_URL: "http://localhost:8585" 46 | -------------------------------------------------------------------------------- /.github/workflows/prepare-release.yml: -------------------------------------------------------------------------------- 1 | name: Prepare Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - '*' 7 | 8 | env: 9 | FOUNDRY_PROFILE: ci 10 | 11 | permissions: 12 | contents: write 13 | 14 | jobs: 15 | check: 16 | strategy: 17 | fail-fast: true 18 | 19 | name: Foundry project 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: actions/checkout@v3 23 | with: 24 | submodules: recursive 25 | 26 | - name: Install Foundry 27 | uses: foundry-rs/foundry-toolchain@v1 28 | with: 29 | version: nightly 30 | 31 | - name: Run Forge build 32 | run: | 33 | forge build 34 | 35 | - name: Prepare Release 36 | run: | 37 | export RPC_URL=$(echo $deployer_config | jq -r ".$network.rpc_url") 38 | export CODE_JAR=$(echo $deployer_config | jq -r ".$network.code_jar") 39 | script/prepare-release.sh 40 | env: 41 | deployer_config: ${{ secrets.deployer_config }} 42 | network: sepolia 43 | 44 | - uses: ncipollo/release-action@v1 45 | with: 46 | artifacts: "release/Sleuth.json,release/Sleuth.sol,release/contracts.json,release/sleuth@*" 47 | bodyFile: "release/RELEASE.md" 48 | allowUpdates: true 49 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | push: 5 | 6 | env: 7 | FOUNDRY_PROFILE: ci 8 | 9 | jobs: 10 | check: 11 | strategy: 12 | fail-fast: true 13 | 14 | name: Foundry project 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@v3 18 | with: 19 | submodules: recursive 20 | 21 | - name: Install Foundry 22 | uses: foundry-rs/foundry-toolchain@v1 23 | with: 24 | version: nightly 25 | 26 | - name: Run Forge build 27 | run: | 28 | forge --version 29 | forge build --sizes 30 | id: build 31 | 32 | - name: Run Forge tests 33 | run: | 34 | forge test -vvv 35 | id: test 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiler files 2 | cache/ 3 | out/ 4 | release/ 5 | .release-tmp/ 6 | 7 | # Ignores development broadcast logs 8 | !/broadcast 9 | /broadcast/*/31337/ 10 | /broadcast/**/dry-run/ 11 | 12 | # Dotenv file 13 | .env 14 | dist 15 | node_modules 16 | yarn-error.log -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "lib/forge-std"] 2 | path = lib/forge-std 3 | url = https://github.com/foundry-rs/forge-std 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Sleuth 2 | 3 | 4 | 5 | ---- 6 | 7 | Sleuth is an easy way to pull data from an EVM-compatible blockchain, allowing for complex queries, similar to an ethers-multicall. Sleuth works by deploying a smart contract and then invoking it in an `eth_call`. This allows you to use complex logic to pull data from many contracts or other items such as `eth_chainId` or `eth_blockNumber`, which you can use for data analysis or in your Web3 front-end. For example: 8 | 9 | **MyQuery.sol** [Note: this is not deployed, and is never deployed] 10 | ```sol 11 | // SPDX-License-Identifier: UNLICENSED 12 | pragma solidity ^0.8.16; 13 | 14 | contract BlockNumber { 15 | function query() external view returns (uint256) { 16 | return block.number; 17 | } 18 | } 19 | ``` 20 | 21 | **MyView.ts** 22 | ```ts 23 | import { Sleuth } from '@compound-finance/sleuth'; 24 | 25 | let blockNumberQuery = await Sleuth.querySol(fs.readFileSync('./MyQuery.sol', 'utf8')); 26 | let sleuth = new Sleuth(provider); 27 | let blockNumber = await sleuth.fetch(blockNumberQuery); 28 | ``` 29 | 30 | You can also use pre-compiled contracts (e.g. if you check in the compilation artifacts from solc). 31 | 32 | **MyView.ts** 33 | ```ts 34 | import { Sleuth } from '@compound-finance/sleuth'; 35 | 36 | let blockNumberQuery = await Sleuth.querySol(fs.readFileSync('./out/MyQuery.json', 'utf8')); 37 | let sleuth = new Sleuth(provider); 38 | let blockNumber = await sleuth.fetch(blockNumberQuery); 39 | ``` 40 | 41 | ## Sleuth Query Language [Experimental] 42 | 43 | Sleuth also comes with a full query language, similar to SQL. You can specify contracts and load data from them. This is a WIP and subject to change. 44 | 45 | ```ts 46 | import { Sleuth } from '@compound-finance/sleuth'; 47 | 48 | let sleuth = new Sleuth(provider); 49 | 50 | // Add a source so the query language knows the shape of the contracts you'll be querying. 51 | sleuth.addSource("comet", "0xc3d688B66703497DAA19211EEdff47f25384cdc3", ["function totalSupply() returns (uint256)"]); 52 | 53 | // Build a query 54 | let q = sleuth.query<[ BigNumber ]>("SELECT comet.totalSupply FROM comet;"); 55 | 56 | // Fetch the data 57 | let [ totalSupply ] = await sleuth.fetch(q); 58 | ``` 59 | 60 | or all in one: 61 | 62 | ```ts 63 | import { Sleuth } from '@compound-finance/sleuth'; 64 | 65 | let sleuth = new Sleuth(provider); 66 | 67 | console.log(await sleuth.fetchSql(` 68 | REGISTER CONTRACT comet AT 0xc3d688B66703497DAA19211EEdff47f25384cdc3 WITH INTERFACE ["function totalSupply() returns (uint256)"]; 69 | SELECT comet.totalSupply FROM comet; 70 | `)); 71 | ``` 72 | 73 | There's a lot more work in Sleuth Query Language to do, mostly around allowing you to pull in multiple "rows" since that's a core aspect of SQL, but for one-off queries, it's quite fun! 74 | 75 | ## Getting Started 76 | 77 | Install Sleuth: 78 | 79 | ``` 80 | yarn add @compound-finance/sleuth 81 | 82 | # npm install --save @compound-finance/sleuth 83 | ``` 84 | 85 | Next, simply build a Solidity file and build Sleuth, as above, to execute the query. E.g. 86 | 87 | ```ts 88 | import { Sleuth } from '@compound-finance/sleuth'; 89 | 90 | let sleuth = new Sleuth(provider); 91 | 92 | let [name, age] = await sleuth.query(` 93 | // SPDX-License-Identifier: UNLICENSED 94 | pragma solidity ^0.8.16; 95 | 96 | contract SimpleQuery { 97 | function query() external pure returns (uint256, string memory) { 98 | return (55, "Bob Jones"); 99 | } 100 | } 101 | `); 102 | ``` 103 | 104 | ## Deploying the Sleuth contract on a network 105 | 106 | To deploy sleuth run the script under `script/mainnet/deploy.sh`, making sure to set the variables `ETHEREUM_PK` and `RPC_URL` (and optionally `ETHERSCAN_API_KEY`) 107 | 108 | ```sh 109 | ETHEREUM_PK=xxx RPC_URL=yyy ./deploy.sh 110 | ``` 111 | 112 | If you run into an `error code -32000: invalid opcode: PUSH0` error, the network may not support the `PUSH0` opcode. Try adding the following to `foundry.toml` to deploy on an older Ethereum fork: 113 | 114 | ```toml 115 | evm_version = "london" 116 | solc = "0.8.23" // optional, to pin solc to a specific version 117 | ``` 118 | 119 | ## Future Considerations 120 | 121 | Instead of having users build solidity files, it might be nice to build a proper query language. This could be SQL-like or ORM-style or anything that compiles to say Yul (the intermediate representation used by Solidity). We could then abstract the interface to something interesting, such as: 122 | 123 | ```ts 124 | await sleuth.query("SELECT comet.name FROM comet(0xc3...) WHERE comet.id = 5"); 125 | ``` 126 | 127 | There's so much we could do here and it sounds really fun! 128 | 129 | ### Parser 130 | 131 | There's an early version up and running, which you can use with Sleuth. See [/parser](/parser) for more information. 132 | 133 | ## License 134 | 135 | MIT 136 | 137 | Copyright 2022, Compound Labs, Inc. Geoffrey Hayes. 138 | -------------------------------------------------------------------------------- /cli/index.ts: -------------------------------------------------------------------------------- 1 | export * from './sleuth'; 2 | -------------------------------------------------------------------------------- /cli/sleuth.ts: -------------------------------------------------------------------------------- 1 | import { Provider } from '@ethersproject/providers'; 2 | import { Contract } from '@ethersproject/contracts'; 3 | import { AbiCoder, FormatTypes, FunctionFragment, Fragment, Interface, ParamType } from '@ethersproject/abi'; 4 | import { keccak256 } from '@ethersproject/keccak256'; 5 | import { getContractAddress } from '@ethersproject/address'; 6 | import { parse } from '../parser/pkg/parser'; 7 | 8 | interface Opts { 9 | network?: string, 10 | version?: number, 11 | contractAddress?: string 12 | }; 13 | 14 | const defaultOpts = { 15 | network: 'mainnet', 16 | version: 1 17 | }; 18 | 19 | const sleuthDeployer = process.env['SLEUTH_DEPLOYER'] ?? '0x84C3e20985d9E7aEc46F80d2EB52b731D8CC40F8'; 20 | 21 | interface Query { 22 | bytecode: string, 23 | callargs?: string, 24 | fn: FunctionFragment 25 | } 26 | 27 | interface Source { 28 | name: string, 29 | address: string, 30 | iface: Interface 31 | } 32 | 33 | interface SolidityQueryOpts { 34 | queryFunctionName?: string; 35 | } 36 | 37 | interface SolcInput { 38 | language?: string, 39 | sources: { 40 | [fileName: string]: { 41 | content: string 42 | } 43 | }, 44 | settings: object 45 | } 46 | 47 | interface SolcContract { 48 | evm?: { 49 | bytecode?: { 50 | object: string 51 | } 52 | }, 53 | bytecode?: { 54 | object: string 55 | }, 56 | abi: Fragment[] 57 | } 58 | 59 | interface SolcOutput { 60 | contracts: { 61 | [fileName: string]: { 62 | [contractName: string]: SolcContract 63 | } 64 | }, 65 | errors?: string[], 66 | } 67 | 68 | function solcCompile(input: SolcInput): SolcOutput { 69 | let solc; 70 | try { 71 | solc = require('solc'); 72 | } catch (e) { 73 | throw new Error(`solc.js yarn dependency not found. Please build with optional dependencies included`); 74 | } 75 | return JSON.parse(solc.compile(JSON.stringify(input))); 76 | } 77 | 78 | function hexify(v: string): string { 79 | return v.startsWith('0x') ? v : `0x${v}`; 80 | } 81 | 82 | export class Sleuth { 83 | provider: Provider; 84 | network: string; 85 | version: number; 86 | sleuthAddr: string; 87 | sources: Source[]; 88 | coder: AbiCoder; 89 | 90 | constructor(provider: Provider, opts: Opts = {}) { 91 | this.provider = provider; 92 | this.network = opts.network ?? defaultOpts.network; 93 | this.version = opts.version ?? defaultOpts.version; 94 | this.sleuthAddr = opts.contractAddress ?? getContractAddress({ from: sleuthDeployer, nonce: this.version - 1 }); 95 | this.sources = []; 96 | this.coder = new AbiCoder(); 97 | } 98 | 99 | query(q: string): Query { 100 | let registrations = this.sources.map((source) => { 101 | let iface = JSON.stringify(source.iface.format(FormatTypes.full)); 102 | return `REGISTER CONTRACT ${source.name} AT ${source.address} WITH INTERFACE ${iface};` 103 | }).join("\n"); 104 | let fullQuery = `${registrations}${q}`; 105 | console.log("Full Query", fullQuery); 106 | let [tuple, yul] = parse(fullQuery).split(';', 2); 107 | console.log("Tuple", tuple, "Yul", yul); 108 | const input = { 109 | language: 'Yul', 110 | sources: { 111 | 'query.yul': { 112 | content: yul 113 | } 114 | }, 115 | settings: { 116 | outputSelection: { 117 | '*': { 118 | '*': ['*'] 119 | } 120 | } 121 | } 122 | }; 123 | 124 | let result = solcCompile(input); 125 | console.log(result.contracts['query.yul']); 126 | if (result.errors && result.errors.length > 0) { 127 | throw new Error("Compilation Error: " + JSON.stringify(result.errors)); 128 | } 129 | 130 | let bytecode = result?.contracts['query.yul']?.Query?.evm?.bytecode?.object; 131 | 132 | if (!bytecode) { 133 | throw new Error(`Missing bytecode from compilation result: ${JSON.stringify(result)}`); 134 | } 135 | 136 | return { 137 | bytecode: bytecode, 138 | fn: FunctionFragment.from({ 139 | name: 'query', 140 | inputs: [], 141 | outputs: ParamType.from(tuple).components, 142 | stateMutability: 'pure', 143 | type: 'function' 144 | }) 145 | }; 146 | } 147 | 148 | static querySol(q: string | object, opts: SolidityQueryOpts = {}): Query { 149 | if (typeof(q) === 'string') { 150 | let r; 151 | try { 152 | // Try to parse as JSON, if that fails, then consider a query 153 | r = JSON.parse(q); 154 | } catch (e) { 155 | // Ignore 156 | } 157 | 158 | if (r) { 159 | return this.querySolOutput(r, opts); 160 | } else { 161 | // This must be a source file, try to compile 162 | return this.querySolSource(q, opts); 163 | } 164 | 165 | } else { 166 | // This was passed in as a pre-parsed contract. Or at least, it should have been. 167 | return this.querySolOutput(q as SolcContract, opts); 168 | } 169 | } 170 | 171 | static querySolOutput(c: SolcContract, opts: SolidityQueryOpts = {}): Query { 172 | let queryFunctionName = opts.queryFunctionName ?? 'query'; 173 | let b = c.evm?.bytecode?.object ?? c.bytecode?.object; 174 | if (!b) { 175 | throw new Error(`Missing (evm.)bytecode.object in contract ${JSON.stringify(c, null, 4)}`); 176 | } 177 | let abi = c.abi; 178 | let queryAbi = abi.find(({type, name}: any) => type === 'function' && name === queryFunctionName); 179 | if (!queryAbi) { 180 | throw new Error(`Query must include function \`${queryFunctionName}()\``); 181 | } 182 | 183 | return { 184 | bytecode: b, 185 | fn: queryAbi as FunctionFragment 186 | }; 187 | } 188 | 189 | static querySolSource(q: string, opts: SolidityQueryOpts = {}): Query { 190 | let fnName = opts.queryFunctionName ?? 'query'; 191 | let input = { 192 | language: 'Solidity', 193 | sources: { 194 | 'query.sol': { 195 | content: q 196 | } 197 | }, 198 | settings: { 199 | outputSelection: { 200 | '*': { 201 | '*': ['*'] 202 | } 203 | } 204 | } 205 | }; 206 | 207 | let result = solcCompile(input); 208 | if (result.errors && result.errors.length > 0) { 209 | throw new Error("Compilation Error: " + JSON.stringify(result.errors)); 210 | } 211 | let contract = result.contracts['query.sol']; 212 | if (!contract) { 213 | throw new Error(`Missing query.sol compiled contract in ${JSON.stringify(Object.keys(result.contracts))}`); 214 | } 215 | let c = Object.values(contract)[0] as any; 216 | if (!c) { 217 | throw new Error(`Query does not contain any contract definitions`); 218 | } else if (Object.keys(contract).length > 1) { 219 | console.warn(`Query contains multiple contracts, using ${Object.keys(contract)[0]}`); 220 | } 221 | return this.querySolOutput(c, opts); 222 | } 223 | 224 | async addSource(name: string, address: string, iface: string[] | Interface) { 225 | if (Array.isArray(iface)) { 226 | iface = new Interface(iface); 227 | } 228 | this.sources.push({name, address, iface}); 229 | } 230 | 231 | async fetch(q: Query, args?: A): Promise { 232 | let sleuthCtx = new Contract(this.sleuthAddr, [ 233 | 'function query(bytes,bytes) public view returns (bytes)' 234 | ], this.provider); 235 | let iface = new Interface([q.fn]); 236 | let argsCoded = iface.encodeFunctionData(q.fn.name, args ?? []); 237 | let queryResult = await sleuthCtx.query(hexify(q.bytecode), argsCoded); 238 | console.log(q.fn); 239 | console.log(queryResult); 240 | let r = this.coder.decode(q.fn.outputs ?? [], queryResult) as unknown; 241 | if (Array.isArray(r) && r.length === 1) { 242 | return r[0] as T; 243 | } else { 244 | return r as T; 245 | } 246 | } 247 | 248 | async fetchSql(q: string): Promise { 249 | let query = this.query(q); 250 | return this.fetch(query, []); 251 | } 252 | } 253 | -------------------------------------------------------------------------------- /cli/test/sleuth.test.ts: -------------------------------------------------------------------------------- 1 | import { Sleuth } from '../sleuth'; 2 | import { BigNumber } from '@ethersproject/bignumber'; 3 | import { Provider, JsonRpcProvider } from '@ethersproject/providers'; 4 | import * as fs from 'fs/promises'; 5 | import * as path from 'path'; 6 | 7 | describe('testing sleuthing', () => { 8 | let provider: Provider; 9 | 10 | beforeAll(() => { 11 | provider = new JsonRpcProvider('http://127.0.0.1:8599'); 12 | }); 13 | 14 | test('should return the block number via compilation', async () => { 15 | let sleuth = new Sleuth(provider); 16 | let solidity = await fs.readFile(path.join(__dirname, '../../src/examples/BlockNumber.sol'), 'utf8'); 17 | let res = await sleuth.fetch(Sleuth.querySol(solidity)); 18 | expect(res.toNumber()).toBe(1); 19 | }); 20 | 21 | test('should return the block number via precompile', async () => { 22 | let sleuth = new Sleuth(provider); 23 | let solidity = await fs.readFile(path.join(__dirname, '../../out/BlockNumber.sol/BlockNumber.json'), 'utf8'); 24 | console.log({solidity}) 25 | let res = await sleuth.fetch(Sleuth.querySol(solidity)); 26 | console.log("res", res); 27 | expect(res.toNumber()).toBe(1); 28 | }); 29 | 30 | test('should handle args', async () => { 31 | let sleuth = new Sleuth(provider); 32 | let solidity = await fs.readFile(path.join(__dirname, '../../out/Birthday.sol/Birthday.json'), 'utf8'); 33 | console.log({solidity}) 34 | let res = await sleuth.fetch(Sleuth.querySol(solidity), [5]); 35 | console.log("res", res); 36 | expect(res.toNumber()).toBe(6); 37 | }); 38 | 39 | test('should return the pair', async () => { 40 | let sleuth = new Sleuth(provider); 41 | let solidity = await fs.readFile(path.join(__dirname, '../../src/examples/Pair.sol'), 'utf8'); 42 | let res = await sleuth.fetch(Sleuth.querySol<[BigNumber, string]>(solidity)); 43 | console.log(res); 44 | expect(res[0].toNumber()).toBe(55); 45 | expect(res[1]).toEqual("hello"); 46 | }); 47 | 48 | test('should fail invalid', async () => { 49 | let sleuth = new Sleuth(provider); 50 | expect(() => sleuth.query("INSERT INTO users;")).toThrow(); 51 | }); 52 | 53 | test('should parse sleuth', async () => { 54 | let sleuth = new Sleuth(provider); 55 | let q = sleuth.query("SELECT block.number FROM block;"); 56 | let number = await sleuth.fetch(q); 57 | // TODO: Check why named return types aren't working 58 | expect(number.toNumber()).toEqual(1); 59 | }); 60 | 61 | test('should parse sleuth too', async () => { 62 | let sleuth = new Sleuth(provider); 63 | let q = sleuth.query<[BigNumber, string, BigNumber]>("SELECT block.number, \"dog\", 22 FROM block;"); 64 | let [number, animal, age] = await sleuth.fetch(q); 65 | expect(number.toNumber()).toEqual(1); 66 | expect(animal).toEqual("dog"); 67 | expect(age.toNumber()).toEqual(22); 68 | }); 69 | 70 | test('including a call', async () => { 71 | let sleuth = new Sleuth(provider); 72 | sleuth.addSource("comet", "0xc3d688B66703497DAA19211EEdff47f25384cdc3", ["function totalSupply() returns (uint256)"]); 73 | let q = sleuth.query<[ BigNumber ]>("SELECT comet.totalSupply FROM comet;"); 74 | let [ totalSupply ] = await sleuth.fetch(q); 75 | // TODO: Check why named return types aren't working 76 | expect(totalSupply.toNumber()).toEqual(160); 77 | }); 78 | 79 | test('fetchSql query', async () => { 80 | let sleuth = new Sleuth(provider); 81 | let [ totalSupply ] = await sleuth.fetchSql<[ BigNumber ]>(` 82 | REGISTER CONTRACT comet AT 0xc3d688B66703497DAA19211EEdff47f25384cdc3 WITH INTERFACE ["function totalSupply() returns (uint256)"]; 83 | SELECT comet.totalSupply FROM comet; 84 | `); 85 | expect(totalSupply.toNumber()).toEqual(160); 86 | }); 87 | }); 88 | -------------------------------------------------------------------------------- /foundry.toml: -------------------------------------------------------------------------------- 1 | [profile.default] 2 | src = 'src' 3 | out = 'out' 4 | libs = ['lib'] 5 | fs_permissions = [ 6 | { access = "read", path = "./out"}, 7 | { access = "read", path = "./.release-tmp"} 8 | ] 9 | 10 | # See more config options https://github.com/foundry-rs/foundry/tree/master/config -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | transform: {"^.+\\.(t|j)sx?$": ["@swc/jest"]}, 3 | testEnvironment: 'node', 4 | testRegex: 'cli/test/.*\\.(test|spec)?\\.(ts|tsx)$', 5 | moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], 6 | transformIgnorePatterns: ["node_modules/.*"], 7 | }; 8 | -------------------------------------------------------------------------------- /logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compound-finance/sleuth/b46d95cb5e75ca97caae2dea06837ef95c426589/logo.png -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@compound-finance/sleuth", 3 | "version": "1.0.1-alpha4", 4 | "main": "dist/index.js", 5 | "types": "dist/index.d.ts", 6 | "files": ["dist/**/*", "parser/pkg/**/*"], 7 | "repository": "https://github.com/compound-finance/sleuth", 8 | "author": "Geoffrey Hayes ", 9 | "license": "MIT", 10 | "scripts": { 11 | "test": "script/test.sh" 12 | }, 13 | "devDependencies": { 14 | "@swc/core": "^1.3.22", 15 | "@swc/jest": "^0.2.24", 16 | "@types/jest": "^29.2.4", 17 | "jest": "^29.3.1", 18 | "ts-node": "^10.9.1", 19 | "typescript": "^4.9.4" 20 | }, 21 | "dependencies": { 22 | "@ethersproject/contracts": "^5.7.0", 23 | "@ethersproject/providers": "^5.7.2" 24 | }, 25 | "optionalDependencies": { 26 | "solc": "^0.8.17" 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /parser/.appveyor.yml: -------------------------------------------------------------------------------- 1 | install: 2 | - appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe 3 | - if not defined RUSTFLAGS rustup-init.exe -y --default-host x86_64-pc-windows-msvc --default-toolchain nightly 4 | - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin 5 | - rustc -V 6 | - cargo -V 7 | 8 | build: false 9 | 10 | test_script: 11 | - cargo test --locked 12 | -------------------------------------------------------------------------------- /parser/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | Cargo.lock 4 | bin/ 5 | wasm-pack.log 6 | -------------------------------------------------------------------------------- /parser/.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | sudo: false 3 | 4 | cache: cargo 5 | 6 | matrix: 7 | include: 8 | 9 | # Builds with wasm-pack. 10 | - rust: beta 11 | env: RUST_BACKTRACE=1 12 | addons: 13 | firefox: latest 14 | chrome: stable 15 | before_script: 16 | - (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update) 17 | - (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate) 18 | - cargo install-update -a 19 | - curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh -s -- -f 20 | script: 21 | - cargo generate --git . --name testing 22 | # Having a broken Cargo.toml (in that it has curlies in fields) anywhere 23 | # in any of our parent dirs is problematic. 24 | - mv Cargo.toml Cargo.toml.tmpl 25 | - cd testing 26 | - wasm-pack build 27 | - wasm-pack test --chrome --firefox --headless 28 | 29 | # Builds on nightly. 30 | - rust: nightly 31 | env: RUST_BACKTRACE=1 32 | before_script: 33 | - (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update) 34 | - (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate) 35 | - cargo install-update -a 36 | - rustup target add wasm32-unknown-unknown 37 | script: 38 | - cargo generate --git . --name testing 39 | - mv Cargo.toml Cargo.toml.tmpl 40 | - cd testing 41 | - cargo check 42 | - cargo check --target wasm32-unknown-unknown 43 | - cargo check --no-default-features 44 | - cargo check --target wasm32-unknown-unknown --no-default-features 45 | - cargo check --no-default-features --features console_error_panic_hook 46 | - cargo check --target wasm32-unknown-unknown --no-default-features --features console_error_panic_hook 47 | - cargo check --no-default-features --features "console_error_panic_hook wee_alloc" 48 | - cargo check --target wasm32-unknown-unknown --no-default-features --features "console_error_panic_hook wee_alloc" 49 | 50 | # Builds on beta. 51 | - rust: beta 52 | env: RUST_BACKTRACE=1 53 | before_script: 54 | - (test -x $HOME/.cargo/bin/cargo-install-update || cargo install cargo-update) 55 | - (test -x $HOME/.cargo/bin/cargo-generate || cargo install --vers "^0.2" cargo-generate) 56 | - cargo install-update -a 57 | - rustup target add wasm32-unknown-unknown 58 | script: 59 | - cargo generate --git . --name testing 60 | - mv Cargo.toml Cargo.toml.tmpl 61 | - cd testing 62 | - cargo check 63 | - cargo check --target wasm32-unknown-unknown 64 | - cargo check --no-default-features 65 | - cargo check --target wasm32-unknown-unknown --no-default-features 66 | - cargo check --no-default-features --features console_error_panic_hook 67 | - cargo check --target wasm32-unknown-unknown --no-default-features --features console_error_panic_hook 68 | # Note: no enabling the `wee_alloc` feature here because it requires 69 | # nightly for now. 70 | -------------------------------------------------------------------------------- /parser/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "parser" 3 | version = "0.1.0" 4 | authors = ["Geoffrey Hayes "] 5 | edition = "2018" 6 | 7 | [lib] 8 | crate-type = ["cdylib", "rlib"] 9 | 10 | [features] 11 | default = ["console_error_panic_hook"] 12 | 13 | [dependencies] 14 | wasm-bindgen = "0.2.63" 15 | 16 | # The `console_error_panic_hook` crate provides better debugging of panics by 17 | # logging them with `console.error`. This is great for development, but requires 18 | # all the `std::fmt` and `std::panicking` infrastructure, so isn't great for 19 | # code size when deploying. 20 | console_error_panic_hook = { version = "0.1.6", optional = true } 21 | 22 | # `wee_alloc` is a tiny allocator for wasm that is only ~1K in code size 23 | # compared to the default allocator's ~10K. It is slower than the default 24 | # allocator, however. 25 | wee_alloc = { version = "0.4.5", optional = true } 26 | 27 | pest = "2.5.1" 28 | pest_derive = "2.5.1" 29 | ethers = "1.0.2" 30 | 31 | [dev-dependencies] 32 | wasm-bindgen-test = "0.3.13" 33 | 34 | [profile.release] 35 | # Tell `rustc` to optimize for small code size. 36 | opt-level = "s" 37 | -------------------------------------------------------------------------------- /parser/LICENSE_APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /parser/LICENSE_MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018 Geoffrey Hayes 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /parser/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 |

wasm-pack-template

4 | 5 | A template for kick starting a Rust and WebAssembly project using wasm-pack. 6 | 7 |

8 | Build Status 9 |

10 | 11 |

12 | Tutorial 13 | | 14 | Chat 15 |

16 | 17 | Built with 🦀🕸 by The Rust and WebAssembly Working Group 18 |
19 | 20 | ## About 21 | 22 | [**📚 Read this template tutorial! 📚**][template-docs] 23 | 24 | This template is designed for compiling Rust libraries into WebAssembly and 25 | publishing the resulting package to NPM. 26 | 27 | Be sure to check out [other `wasm-pack` tutorials online][tutorials] for other 28 | templates and usages of `wasm-pack`. 29 | 30 | [tutorials]: https://rustwasm.github.io/docs/wasm-pack/tutorials/index.html 31 | [template-docs]: https://rustwasm.github.io/docs/wasm-pack/tutorials/npm-browser-packages/index.html 32 | 33 | ## 🚴 Usage 34 | 35 | ### 🐑 Use `cargo generate` to Clone this Template 36 | 37 | [Learn more about `cargo generate` here.](https://github.com/ashleygwilliams/cargo-generate) 38 | 39 | ``` 40 | cargo generate --git https://github.com/rustwasm/wasm-pack-template.git --name my-project 41 | cd my-project 42 | ``` 43 | 44 | ### 🛠️ Build with `wasm-pack build` 45 | 46 | ``` 47 | wasm-pack build -t nodejs 48 | ``` 49 | 50 | ### 🔬 Test in Headless Browsers with `wasm-pack test` 51 | 52 | ``` 53 | wasm-pack test --headless --firefox 54 | ``` 55 | 56 | ### 🎁 Publish to NPM with `wasm-pack publish` 57 | 58 | ``` 59 | wasm-pack publish 60 | ``` 61 | 62 | ## 🔋 Batteries Included 63 | 64 | * [`wasm-bindgen`](https://github.com/rustwasm/wasm-bindgen) for communicating 65 | between WebAssembly and JavaScript. 66 | * [`console_error_panic_hook`](https://github.com/rustwasm/console_error_panic_hook) 67 | for logging panic messages to the developer console. 68 | * [`wee_alloc`](https://github.com/rustwasm/wee_alloc), an allocator optimized 69 | for small code size. 70 | * `LICENSE-APACHE` and `LICENSE-MIT`: most Rust projects are licensed this way, so these are included for you 71 | 72 | ## License 73 | 74 | Licensed under either of 75 | 76 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) 77 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) 78 | 79 | at your option. 80 | 81 | ### Contribution 82 | 83 | Unless you explicitly state otherwise, any contribution intentionally 84 | submitted for inclusion in the work by you, as defined in the Apache-2.0 85 | license, shall be dual licensed as above, without any additional terms or 86 | conditions. -------------------------------------------------------------------------------- /parser/pkg/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 |

wasm-pack-template

4 | 5 | A template for kick starting a Rust and WebAssembly project using wasm-pack. 6 | 7 |

8 | Build Status 9 |

10 | 11 |

12 | Tutorial 13 | | 14 | Chat 15 |

16 | 17 | Built with 🦀🕸 by The Rust and WebAssembly Working Group 18 |
19 | 20 | ## About 21 | 22 | [**📚 Read this template tutorial! 📚**][template-docs] 23 | 24 | This template is designed for compiling Rust libraries into WebAssembly and 25 | publishing the resulting package to NPM. 26 | 27 | Be sure to check out [other `wasm-pack` tutorials online][tutorials] for other 28 | templates and usages of `wasm-pack`. 29 | 30 | [tutorials]: https://rustwasm.github.io/docs/wasm-pack/tutorials/index.html 31 | [template-docs]: https://rustwasm.github.io/docs/wasm-pack/tutorials/npm-browser-packages/index.html 32 | 33 | ## 🚴 Usage 34 | 35 | ### 🐑 Use `cargo generate` to Clone this Template 36 | 37 | [Learn more about `cargo generate` here.](https://github.com/ashleygwilliams/cargo-generate) 38 | 39 | ``` 40 | cargo generate --git https://github.com/rustwasm/wasm-pack-template.git --name my-project 41 | cd my-project 42 | ``` 43 | 44 | ### 🛠️ Build with `wasm-pack build` 45 | 46 | ``` 47 | wasm-pack build -t nodejs 48 | ``` 49 | 50 | ### 🔬 Test in Headless Browsers with `wasm-pack test` 51 | 52 | ``` 53 | wasm-pack test --headless --firefox 54 | ``` 55 | 56 | ### 🎁 Publish to NPM with `wasm-pack publish` 57 | 58 | ``` 59 | wasm-pack publish 60 | ``` 61 | 62 | ## 🔋 Batteries Included 63 | 64 | * [`wasm-bindgen`](https://github.com/rustwasm/wasm-bindgen) for communicating 65 | between WebAssembly and JavaScript. 66 | * [`console_error_panic_hook`](https://github.com/rustwasm/console_error_panic_hook) 67 | for logging panic messages to the developer console. 68 | * [`wee_alloc`](https://github.com/rustwasm/wee_alloc), an allocator optimized 69 | for small code size. 70 | * `LICENSE-APACHE` and `LICENSE-MIT`: most Rust projects are licensed this way, so these are included for you 71 | 72 | ## License 73 | 74 | Licensed under either of 75 | 76 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) 77 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) 78 | 79 | at your option. 80 | 81 | ### Contribution 82 | 83 | Unless you explicitly state otherwise, any contribution intentionally 84 | submitted for inclusion in the work by you, as defined in the Apache-2.0 85 | license, shall be dual licensed as above, without any additional terms or 86 | conditions. -------------------------------------------------------------------------------- /parser/pkg/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "parser", 3 | "collaborators": [ 4 | "Geoffrey Hayes " 5 | ], 6 | "version": "0.1.0", 7 | "files": [ 8 | "parser_bg.wasm", 9 | "parser.js", 10 | "parser.d.ts" 11 | ], 12 | "main": "parser.js", 13 | "types": "parser.d.ts" 14 | } -------------------------------------------------------------------------------- /parser/pkg/parser.d.ts: -------------------------------------------------------------------------------- 1 | /* tslint:disable */ 2 | /* eslint-disable */ 3 | /** 4 | * @param {string} query 5 | * @returns {string} 6 | */ 7 | export function parse(query: string): string; 8 | -------------------------------------------------------------------------------- /parser/pkg/parser.js: -------------------------------------------------------------------------------- 1 | let imports = {}; 2 | imports['__wbindgen_placeholder__'] = module.exports; 3 | let wasm; 4 | const { TextDecoder, TextEncoder } = require(`util`); 5 | 6 | let cachedTextDecoder = new TextDecoder('utf-8', { ignoreBOM: true, fatal: true }); 7 | 8 | cachedTextDecoder.decode(); 9 | 10 | let cachedUint8Memory0 = new Uint8Array(); 11 | 12 | function getUint8Memory0() { 13 | if (cachedUint8Memory0.byteLength === 0) { 14 | cachedUint8Memory0 = new Uint8Array(wasm.memory.buffer); 15 | } 16 | return cachedUint8Memory0; 17 | } 18 | 19 | function getStringFromWasm0(ptr, len) { 20 | return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len)); 21 | } 22 | 23 | const heap = new Array(32).fill(undefined); 24 | 25 | heap.push(undefined, null, true, false); 26 | 27 | let heap_next = heap.length; 28 | 29 | function addHeapObject(obj) { 30 | if (heap_next === heap.length) heap.push(heap.length + 1); 31 | const idx = heap_next; 32 | heap_next = heap[idx]; 33 | 34 | heap[idx] = obj; 35 | return idx; 36 | } 37 | 38 | let WASM_VECTOR_LEN = 0; 39 | 40 | let cachedTextEncoder = new TextEncoder('utf-8'); 41 | 42 | const encodeString = (typeof cachedTextEncoder.encodeInto === 'function' 43 | ? function (arg, view) { 44 | return cachedTextEncoder.encodeInto(arg, view); 45 | } 46 | : function (arg, view) { 47 | const buf = cachedTextEncoder.encode(arg); 48 | view.set(buf); 49 | return { 50 | read: arg.length, 51 | written: buf.length 52 | }; 53 | }); 54 | 55 | function passStringToWasm0(arg, malloc, realloc) { 56 | 57 | if (realloc === undefined) { 58 | const buf = cachedTextEncoder.encode(arg); 59 | const ptr = malloc(buf.length); 60 | getUint8Memory0().subarray(ptr, ptr + buf.length).set(buf); 61 | WASM_VECTOR_LEN = buf.length; 62 | return ptr; 63 | } 64 | 65 | let len = arg.length; 66 | let ptr = malloc(len); 67 | 68 | const mem = getUint8Memory0(); 69 | 70 | let offset = 0; 71 | 72 | for (; offset < len; offset++) { 73 | const code = arg.charCodeAt(offset); 74 | if (code > 0x7F) break; 75 | mem[ptr + offset] = code; 76 | } 77 | 78 | if (offset !== len) { 79 | if (offset !== 0) { 80 | arg = arg.slice(offset); 81 | } 82 | ptr = realloc(ptr, len, len = offset + arg.length * 3); 83 | const view = getUint8Memory0().subarray(ptr + offset, ptr + len); 84 | const ret = encodeString(arg, view); 85 | 86 | offset += ret.written; 87 | } 88 | 89 | WASM_VECTOR_LEN = offset; 90 | return ptr; 91 | } 92 | 93 | let cachedInt32Memory0 = new Int32Array(); 94 | 95 | function getInt32Memory0() { 96 | if (cachedInt32Memory0.byteLength === 0) { 97 | cachedInt32Memory0 = new Int32Array(wasm.memory.buffer); 98 | } 99 | return cachedInt32Memory0; 100 | } 101 | 102 | function getObject(idx) { return heap[idx]; } 103 | 104 | function dropObject(idx) { 105 | if (idx < 36) return; 106 | heap[idx] = heap_next; 107 | heap_next = idx; 108 | } 109 | 110 | function takeObject(idx) { 111 | const ret = getObject(idx); 112 | dropObject(idx); 113 | return ret; 114 | } 115 | /** 116 | * @param {string} query 117 | * @returns {string} 118 | */ 119 | module.exports.parse = function(query) { 120 | try { 121 | const retptr = wasm.__wbindgen_add_to_stack_pointer(-16); 122 | const ptr0 = passStringToWasm0(query, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); 123 | const len0 = WASM_VECTOR_LEN; 124 | wasm.parse(retptr, ptr0, len0); 125 | var r0 = getInt32Memory0()[retptr / 4 + 0]; 126 | var r1 = getInt32Memory0()[retptr / 4 + 1]; 127 | var r2 = getInt32Memory0()[retptr / 4 + 2]; 128 | var r3 = getInt32Memory0()[retptr / 4 + 3]; 129 | var ptr1 = r0; 130 | var len1 = r1; 131 | if (r3) { 132 | ptr1 = 0; len1 = 0; 133 | throw takeObject(r2); 134 | } 135 | return getStringFromWasm0(ptr1, len1); 136 | } finally { 137 | wasm.__wbindgen_add_to_stack_pointer(16); 138 | wasm.__wbindgen_free(ptr1, len1); 139 | } 140 | }; 141 | 142 | module.exports.__wbindgen_string_new = function(arg0, arg1) { 143 | const ret = getStringFromWasm0(arg0, arg1); 144 | return addHeapObject(ret); 145 | }; 146 | 147 | module.exports.__wbindgen_throw = function(arg0, arg1) { 148 | throw new Error(getStringFromWasm0(arg0, arg1)); 149 | }; 150 | 151 | const path = require('path').join(__dirname, 'parser_bg.wasm'); 152 | const bytes = require('fs').readFileSync(path); 153 | 154 | const wasmModule = new WebAssembly.Module(bytes); 155 | const wasmInstance = new WebAssembly.Instance(wasmModule, imports); 156 | wasm = wasmInstance.exports; 157 | module.exports.__wasm = wasm; 158 | 159 | -------------------------------------------------------------------------------- /parser/pkg/parser_bg.js: -------------------------------------------------------------------------------- 1 | import * as wasm from './parser_bg.wasm'; 2 | 3 | const lTextDecoder = typeof TextDecoder === 'undefined' ? (0, module.require)('util').TextDecoder : TextDecoder; 4 | 5 | let cachedTextDecoder = new lTextDecoder('utf-8', { ignoreBOM: true, fatal: true }); 6 | 7 | cachedTextDecoder.decode(); 8 | 9 | let cachedUint8Memory0 = new Uint8Array(); 10 | 11 | function getUint8Memory0() { 12 | if (cachedUint8Memory0.byteLength === 0) { 13 | cachedUint8Memory0 = new Uint8Array(wasm.memory.buffer); 14 | } 15 | return cachedUint8Memory0; 16 | } 17 | 18 | function getStringFromWasm0(ptr, len) { 19 | return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len)); 20 | } 21 | /** 22 | */ 23 | export function greet() { 24 | wasm.greet(); 25 | } 26 | 27 | let WASM_VECTOR_LEN = 0; 28 | 29 | const lTextEncoder = typeof TextEncoder === 'undefined' ? (0, module.require)('util').TextEncoder : TextEncoder; 30 | 31 | let cachedTextEncoder = new lTextEncoder('utf-8'); 32 | 33 | const encodeString = (typeof cachedTextEncoder.encodeInto === 'function' 34 | ? function (arg, view) { 35 | return cachedTextEncoder.encodeInto(arg, view); 36 | } 37 | : function (arg, view) { 38 | const buf = cachedTextEncoder.encode(arg); 39 | view.set(buf); 40 | return { 41 | read: arg.length, 42 | written: buf.length 43 | }; 44 | }); 45 | 46 | function passStringToWasm0(arg, malloc, realloc) { 47 | 48 | if (realloc === undefined) { 49 | const buf = cachedTextEncoder.encode(arg); 50 | const ptr = malloc(buf.length); 51 | getUint8Memory0().subarray(ptr, ptr + buf.length).set(buf); 52 | WASM_VECTOR_LEN = buf.length; 53 | return ptr; 54 | } 55 | 56 | let len = arg.length; 57 | let ptr = malloc(len); 58 | 59 | const mem = getUint8Memory0(); 60 | 61 | let offset = 0; 62 | 63 | for (; offset < len; offset++) { 64 | const code = arg.charCodeAt(offset); 65 | if (code > 0x7F) break; 66 | mem[ptr + offset] = code; 67 | } 68 | 69 | if (offset !== len) { 70 | if (offset !== 0) { 71 | arg = arg.slice(offset); 72 | } 73 | ptr = realloc(ptr, len, len = offset + arg.length * 3); 74 | const view = getUint8Memory0().subarray(ptr + offset, ptr + len); 75 | const ret = encodeString(arg, view); 76 | 77 | offset += ret.written; 78 | } 79 | 80 | WASM_VECTOR_LEN = offset; 81 | return ptr; 82 | } 83 | 84 | let cachedInt32Memory0 = new Int32Array(); 85 | 86 | function getInt32Memory0() { 87 | if (cachedInt32Memory0.byteLength === 0) { 88 | cachedInt32Memory0 = new Int32Array(wasm.memory.buffer); 89 | } 90 | return cachedInt32Memory0; 91 | } 92 | /** 93 | * @param {string} query 94 | * @returns {string} 95 | */ 96 | export function parse(query) { 97 | try { 98 | const retptr = wasm.__wbindgen_add_to_stack_pointer(-16); 99 | const ptr0 = passStringToWasm0(query, wasm.__wbindgen_malloc, wasm.__wbindgen_realloc); 100 | const len0 = WASM_VECTOR_LEN; 101 | wasm.parse(retptr, ptr0, len0); 102 | var r0 = getInt32Memory0()[retptr / 4 + 0]; 103 | var r1 = getInt32Memory0()[retptr / 4 + 1]; 104 | return getStringFromWasm0(r0, r1); 105 | } finally { 106 | wasm.__wbindgen_add_to_stack_pointer(16); 107 | wasm.__wbindgen_free(r0, r1); 108 | } 109 | } 110 | 111 | export function __wbg_alert_29566d116cc8ead8(arg0, arg1) { 112 | alert(getStringFromWasm0(arg0, arg1)); 113 | }; 114 | 115 | -------------------------------------------------------------------------------- /parser/pkg/parser_bg.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compound-finance/sleuth/b46d95cb5e75ca97caae2dea06837ef95c426589/parser/pkg/parser_bg.wasm -------------------------------------------------------------------------------- /parser/pkg/parser_bg.wasm.d.ts: -------------------------------------------------------------------------------- 1 | /* tslint:disable */ 2 | /* eslint-disable */ 3 | export const memory: WebAssembly.Memory; 4 | export function parse(a: number, b: number, c: number): void; 5 | export function __wbindgen_add_to_stack_pointer(a: number): number; 6 | export function __wbindgen_malloc(a: number): number; 7 | export function __wbindgen_realloc(a: number, b: number, c: number): number; 8 | export function __wbindgen_free(a: number, b: number): void; 9 | -------------------------------------------------------------------------------- /parser/src/abi.rs: -------------------------------------------------------------------------------- 1 | use crate::resolve::Resolution; 2 | use ethers::abi::param_type::ParamType; 3 | use ethers::abi::struct_def::FieldType; 4 | 5 | fn param_type(p: &ParamType) -> String { 6 | match p { 7 | ParamType::Address => String::from("address"), 8 | ParamType::Bytes => String::from("bytes"), 9 | ParamType::Int(sz) => format!("int{}", sz), 10 | ParamType::Uint(sz) => format!("uint{}", sz), 11 | ParamType::Bool => String::from("bool"), 12 | ParamType::String => String::from("string"), 13 | ParamType::Array(pp) => format!("{}[]", param_type(&*pp)), 14 | ParamType::FixedBytes(sz) => format!("bytes{}", sz), 15 | ParamType::FixedArray(pp, sz) => format!("{}[{}]", param_type(&*pp), sz), 16 | ParamType::Tuple(els) => { 17 | let inner = els 18 | .iter() 19 | .map(param_type) 20 | .collect::>() 21 | .join(","); 22 | format!("tuple({})", inner) 23 | } 24 | } 25 | } 26 | 27 | fn field_type(ty: &FieldType) -> String { 28 | match ty { 29 | FieldType::Elementary(p) => param_type(p), 30 | _ => unreachable!(), 31 | } 32 | } 33 | 34 | pub fn get_tuple_abi(resolutions: &Vec) -> String { 35 | let fields = resolutions 36 | .iter() 37 | .map(|r: &Resolution| { 38 | let field_ty = field_type(&r.abi); 39 | 40 | match &r.name { 41 | Some(name) => { 42 | format!("{} {}", field_ty, name) 43 | } 44 | _ => field_ty, 45 | } 46 | }) 47 | .collect::>() 48 | .join(","); 49 | format!("tuple({})", fields) 50 | } 51 | 52 | #[cfg(test)] 53 | mod tests { 54 | use crate::abi::{get_tuple_abi, Resolution}; 55 | use crate::source::DataSource; 56 | use ethers::abi::param_type::ParamType; 57 | use ethers::abi::struct_def::FieldType; 58 | 59 | #[test] 60 | fn simple_struct() { 61 | let resolutions = vec![ 62 | Resolution { 63 | name: Some(String::from("name")), 64 | abi: FieldType::Elementary(ParamType::String), 65 | data_source: DataSource::String(String::from("Hello")), 66 | }, 67 | Resolution { 68 | name: Some(String::from("age")), 69 | abi: FieldType::Elementary(ParamType::Uint(256)), 70 | data_source: DataSource::Number(22), 71 | }, 72 | Resolution { 73 | name: None, 74 | abi: FieldType::Elementary(ParamType::Uint(256)), 75 | data_source: DataSource::BlockNumber, 76 | }, 77 | ]; 78 | 79 | assert_eq!( 80 | get_tuple_abi(&resolutions), 81 | String::from("tuple(string name,uint256 age,uint256)") 82 | ); 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /parser/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate pest_derive; 3 | 4 | mod utils; 5 | mod query; 6 | mod parse; 7 | mod resolve; 8 | mod abi; 9 | mod yul; 10 | mod source; 11 | 12 | use wasm_bindgen::prelude::*; 13 | 14 | // When the `wee_alloc` feature is enabled, use `wee_alloc` as the global 15 | // allocator. 16 | #[cfg(feature = "wee_alloc")] 17 | #[global_allocator] 18 | static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; 19 | 20 | #[wasm_bindgen] 21 | pub fn parse(query: String) -> Result { 22 | let query_set = parse::parse_query_cls(&query)?; 23 | let resolutions = resolve::resolve(&query_set)?; 24 | let tuple = abi::get_tuple_abi(&resolutions); 25 | let yul = yul::derive_yul(resolutions)?; 26 | 27 | Ok(format!("{};{}", tuple, yul)) 28 | } 29 | -------------------------------------------------------------------------------- /parser/src/parse.rs: -------------------------------------------------------------------------------- 1 | extern crate pest; 2 | use crate::query; 3 | use pest::iterators::Pair; 4 | use pest::Parser; 5 | 6 | #[derive(Parser)] 7 | #[grammar = "sleuth.pest"] 8 | struct SleuthParser; 9 | 10 | fn parse_full_select_var<'a>( 11 | full_select_var: Pair<'a, Rule>, 12 | ) -> Result, String> { 13 | let mut source: Option<&'a str> = None; 14 | 15 | for pair in full_select_var.into_inner() { 16 | match pair.as_rule() { 17 | Rule::source => { 18 | source = Some(pair.as_str()); 19 | } 20 | Rule::variable => { 21 | return Ok(query::FullSelectVar { 22 | source, 23 | variable: query::SelectVar::Var(pair.as_str()), 24 | }); 25 | } 26 | Rule::wildcard => { 27 | return Ok(query::FullSelectVar { 28 | source, 29 | variable: query::SelectVar::Wildcard, 30 | }); 31 | } 32 | r => return Err(format!("parse_full_select_var::unmatched: {:?}", r)), 33 | } 34 | } 35 | Err(String::from("parse_full_select_var::exit")) 36 | } 37 | 38 | enum Literal<'a> { 39 | Number(u64), 40 | String(&'a str), 41 | } 42 | 43 | fn parse_literal<'a>(literal_var: Pair<'a, Rule>) -> Result, String> { 44 | for pair in literal_var.into_inner() { 45 | match pair.as_rule() { 46 | Rule::number => { 47 | return Ok(Literal::Number(pair.as_str().parse::().unwrap())); 48 | } 49 | Rule::string => { 50 | return Ok(Literal::String(pair.into_inner().next().unwrap().as_str())); 51 | } 52 | r => return Err(format!("parse_literal::unmatched: {:?}", r)), 53 | } 54 | } 55 | Err(String::from("parse_literal::exit")) 56 | } 57 | 58 | fn parse_selection_item<'a>( 59 | selection_item: Pair<'a, Rule>, 60 | ) -> Result, String> { 61 | for pair in selection_item.into_inner() { 62 | match pair.as_rule() { 63 | Rule::full_select_var => { 64 | return Ok(query::Selection::Var(parse_full_select_var(pair)?)); 65 | } 66 | Rule::literal => { 67 | return match parse_literal(pair)? { 68 | Literal::Number(n) => Ok(query::Selection::Number(n)), 69 | Literal::String(s) => Ok(query::Selection::String(s)), 70 | }; 71 | } 72 | r => return Err(format!("parse_selection_item::unmatched: {:?}", r)), 73 | } 74 | } 75 | Err(String::from("parse_selection_item::exit")) 76 | } 77 | 78 | fn parse_selection<'a>(selection: Pair<'a, Rule>) -> Result>, String> { 79 | let mut res: Vec> = vec![]; 80 | for pair in selection.into_inner() { 81 | match pair.as_rule() { 82 | Rule::selection_item => { 83 | res.push(parse_selection_item(pair)?); 84 | } 85 | Rule::selection_item_n => { 86 | res.push(parse_selection_item(pair.into_inner().next().unwrap())?); 87 | } 88 | r => return Err(format!("parse_selection::unmatched: {:?}", r)), 89 | } 90 | } 91 | Ok(res) 92 | } 93 | 94 | fn parse_from<'a>(from: Pair<'a, Rule>) -> Result<&'a str, String> { 95 | for pair in from.into_inner() { 96 | match pair.as_rule() { 97 | Rule::source => { 98 | return Ok(pair.as_str()); 99 | } 100 | r => return Err(format!("parse_from::unmatched: {:?}", r)), 101 | } 102 | } 103 | Err(String::from("parse_from::exit")) 104 | } 105 | 106 | fn parse_select_query<'a>(select_query: Pair<'a, Rule>) -> Result, String> { 107 | let mut selection: Option>> = None; 108 | let mut source: Option<&'a str> = None; 109 | 110 | for pair in select_query.into_inner() { 111 | match pair.as_rule() { 112 | Rule::selection_cls => { 113 | selection = Some(parse_selection(pair)?); 114 | } 115 | Rule::from_cls => { 116 | source = Some(parse_from(pair)?); 117 | } 118 | r => return Err(format!("parse_select_query::unmatched: {:?}", r)), 119 | } 120 | } 121 | 122 | Ok(query::SelectQuery { 123 | select: selection.unwrap(), 124 | source, 125 | }) 126 | } 127 | 128 | fn parse_interface<'a>(with_interface: Pair<'a, Rule>) -> Result, String> { 129 | let mut res = vec![]; 130 | for pair in with_interface.into_inner() { 131 | match pair.as_rule() { 132 | Rule::interface_item => { 133 | res.push( 134 | pair.into_inner() 135 | .next() 136 | .unwrap() 137 | .into_inner() 138 | .next() 139 | .unwrap() 140 | .as_str(), 141 | ); 142 | } 143 | Rule::interface_item_n => { 144 | res.push( 145 | pair.into_inner() 146 | .next() 147 | .unwrap() 148 | .into_inner() 149 | .next() 150 | .unwrap() 151 | .into_inner() 152 | .next() 153 | .unwrap() 154 | .as_str(), 155 | ); 156 | } 157 | r => return Err(format!("parse_interface::inner::unmatched: {:?}", r)), 158 | } 159 | } 160 | Ok(res) 161 | } 162 | 163 | fn parse_register_query<'a>( 164 | register_query: Pair<'a, Rule>, 165 | ) -> Result, String> { 166 | let mut inner = register_query.into_inner(); 167 | let keyword_pair = inner.next().unwrap(); 168 | let source: &str = match keyword_pair.as_rule() { 169 | Rule::keyword => keyword_pair.as_str(), 170 | r => Err(format!( 171 | "parse_register_query::keyword_pair::unmatched: {:?}", 172 | r 173 | ))?, 174 | }; 175 | 176 | let address_pair = inner.next().unwrap(); 177 | let address: &str = match address_pair.as_rule() { 178 | Rule::hex => address_pair.as_str(), 179 | r => Err(format!( 180 | "parse_register_query::address_pair::unmatched: {:?}", 181 | r 182 | ))?, 183 | }; 184 | 185 | let interface = match inner.next() { 186 | Some(pair) => parse_interface(pair)?, 187 | None => vec![], 188 | }; 189 | 190 | Ok(query::RegisterQuery { 191 | source, 192 | address, 193 | interface, 194 | }) 195 | } 196 | 197 | fn parse_query<'a>(query: Pair<'a, Rule>) -> Result, String> { 198 | for pair in query.into_inner().next().unwrap().into_inner() { 199 | match pair.as_rule() { 200 | Rule::select_query => { 201 | return Ok(query::Query::Select(parse_select_query(pair)?)); 202 | } 203 | Rule::register_query => { 204 | return Ok(query::Query::Register(parse_register_query(pair)?)); 205 | } 206 | r => return Err(format!("parse_query::unmatched: {:?}", r)), 207 | } 208 | } 209 | Err(String::from("parse_query::exit")) 210 | } 211 | 212 | pub fn parse_query_cls<'a>(query: &'a str) -> Result>, String> { 213 | let mut pairs = SleuthParser::parse(Rule::main, &query).map_err(|e| e.to_string())?; 214 | let query_cls = pairs.next().unwrap().into_inner().next().unwrap(); 215 | 216 | query_cls.into_inner().map(parse_query).collect() 217 | } 218 | 219 | #[cfg(test)] 220 | mod tests { 221 | use crate::parse::parse_query_cls; 222 | use crate::query::*; 223 | 224 | #[test] 225 | fn simple_query_literal() { 226 | assert_eq!( 227 | parse_query_cls("SELECT 5"), 228 | Ok(vec![Query::Select(SelectQuery { 229 | select: vec![Selection::Number(5)], 230 | source: None 231 | })]) 232 | ); 233 | } 234 | 235 | #[test] 236 | fn simple_query_with_from() { 237 | assert_eq!( 238 | parse_query_cls("SELECT blocks.number FROM blocks"), 239 | Ok(vec![Query::Select(SelectQuery { 240 | select: vec![Selection::Var(FullSelectVar { 241 | source: Some("blocks"), 242 | variable: SelectVar::Var("number") 243 | })], 244 | source: Some("blocks") 245 | })]) 246 | ); 247 | } 248 | 249 | #[test] 250 | fn simple_query_with_multi_select() { 251 | assert_eq!( 252 | parse_query_cls("SELECT blocks.number, 5, \"cat\" FROM blocks"), 253 | Ok(vec![Query::Select(SelectQuery { 254 | select: vec![ 255 | Selection::Var(FullSelectVar { 256 | source: Some("blocks"), 257 | variable: SelectVar::Var("number") 258 | }), 259 | Selection::Number(5), 260 | Selection::String("cat"), 261 | ], 262 | source: Some("blocks") 263 | })]) 264 | ); 265 | } 266 | 267 | #[test] 268 | fn simple_query_with_contract() { 269 | assert_eq!( 270 | parse_query_cls( 271 | r###" 272 | REGISTER CONTRACT comet AT 0xc3d688B66703497DAA19211EEdff47f25384cdc3 WITH INTERFACE ["function totalSupply() returns (uint256)"]; 273 | SELECT comet.totalSupply FROM comet; 274 | "### 275 | ), 276 | Ok(vec![ 277 | Query::Register(RegisterQuery { 278 | source: "comet", 279 | address: "0xc3d688B66703497DAA19211EEdff47f25384cdc3", 280 | interface: vec![ 281 | "function totalSupply() returns (uint256)" 282 | ] 283 | }), 284 | Query::Select(SelectQuery { 285 | select: vec![ 286 | Selection::Var(FullSelectVar { 287 | source: Some("comet"), 288 | variable: SelectVar::Var("totalSupply") 289 | }) 290 | ], 291 | source: Some("comet") 292 | }) 293 | ]) 294 | ); 295 | } 296 | } 297 | -------------------------------------------------------------------------------- /parser/src/query.rs: -------------------------------------------------------------------------------- 1 | 2 | #[derive(Debug, PartialEq)] 3 | pub enum SelectVar<'a> { 4 | Wildcard, 5 | Var(&'a str) 6 | } 7 | 8 | #[derive(Debug, PartialEq)] 9 | pub struct FullSelectVar<'a> { 10 | pub source: Option<&'a str>, 11 | pub variable: SelectVar<'a> 12 | } 13 | 14 | #[derive(Debug, PartialEq)] 15 | pub enum Selection<'a> { 16 | Var(FullSelectVar<'a>), 17 | Number(u64), 18 | String(&'a str) 19 | } 20 | 21 | #[derive(Debug, PartialEq)] 22 | pub struct SelectQuery<'a> { 23 | pub select: Vec>, 24 | pub source: Option<&'a str> 25 | } 26 | 27 | #[derive(Debug, PartialEq)] 28 | pub struct RegisterQuery<'a> { 29 | pub source: &'a str, 30 | pub address: &'a str, 31 | pub interface: Vec<&'a str> 32 | } 33 | 34 | #[derive(Debug, PartialEq)] 35 | pub enum Query<'a> { 36 | Select(SelectQuery<'a>), 37 | Register(RegisterQuery<'a>), 38 | } 39 | 40 | #[cfg(test)] 41 | mod tests { 42 | use crate::query::*; 43 | 44 | #[test] 45 | fn select_query() { 46 | let _: Query = Query::Select(SelectQuery { 47 | select: vec![ 48 | Selection::Var(FullSelectVar { 49 | source: Some("block"), 50 | variable: SelectVar::Wildcard 51 | }), 52 | Selection::Var(FullSelectVar { 53 | source: None, 54 | variable: SelectVar::Var("number") 55 | }), 56 | Selection::Number(55), 57 | Selection::String("Hello") 58 | ], 59 | source: Some("block") 60 | }); 61 | } 62 | 63 | #[test] 64 | fn register_query() { 65 | let _: Query = Query::Register(RegisterQuery { 66 | source: "comet", 67 | address: "0xc3d688B66703497DAA19211EEdff47f25384cdc3", 68 | interface: vec![ 69 | "function totalSupply() returns (uint256)" 70 | ] 71 | }); 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /parser/src/resolve.rs: -------------------------------------------------------------------------------- 1 | use crate::query::{self, Selection}; 2 | use crate::source::{ 3 | find_data_source, find_source, get_all_sources, sources_for_query, DataSource, Source, 4 | }; 5 | use ethers::abi; 6 | 7 | #[derive(PartialEq, Debug)] 8 | pub struct Resolution { 9 | pub name: Option, 10 | pub abi: abi::struct_def::FieldType, 11 | pub data_source: DataSource, 12 | } 13 | 14 | fn show_missing_source_error(source: &str, sources: &Vec) -> String { 15 | format!( 16 | "Cannot find source \"{}\" in sources from query. FROM sources: {}", 17 | source, 18 | sources 19 | .iter() 20 | .map(|s| s.name.clone()) 21 | .collect::>() 22 | .join(",") 23 | ) 24 | } 25 | 26 | fn show_missing_variable_error(variable: &str, source: &Source) -> String { 27 | format!( 28 | "Cannot find variable with name \"{}\" in source \"{}\". Known variables: {}", 29 | variable, 30 | source.name, 31 | source 32 | .mappings 33 | .keys() 34 | .map(|s| String::from(s)) 35 | .collect::>() 36 | .join(",") 37 | ) 38 | } 39 | 40 | pub fn resolve(query_set: &Vec) -> Result, String> { 41 | let mut resolutions: Vec = vec![]; 42 | let all_sources = get_all_sources(query_set)?; 43 | for query in query_set.iter() { 44 | match query { 45 | query::Query::Select(select_query) => { 46 | let sources = sources_for_query(query, &all_sources)?; 47 | for selection in &select_query.select { 48 | match selection { 49 | Selection::Var(fsv) => { 50 | // TODO: Handle vars without listed source 51 | if let Some(source) = fsv.source { 52 | let source = find_source(source, &sources) 53 | .ok_or_else(|| show_missing_source_error(&source, &sources))?; 54 | match fsv.variable { 55 | query::SelectVar::Var(v) => { 56 | let data_source = 57 | find_data_source(v, source).ok_or_else(|| { 58 | show_missing_variable_error(&v, &source) 59 | })?; 60 | resolutions.push(Resolution { 61 | name: Some(String::from(v)), 62 | abi: data_source.abi(), 63 | data_source: data_source.clone(), 64 | }); 65 | } 66 | query::SelectVar::Wildcard => todo!(), 67 | } 68 | } 69 | } 70 | Selection::Number(n) => resolutions.push(Resolution { 71 | name: None, 72 | abi: abi::struct_def::FieldType::Elementary(abi::ParamType::Uint(256)), 73 | data_source: DataSource::Number(*n), 74 | }), 75 | Selection::String(s) => resolutions.push(Resolution { 76 | name: None, 77 | abi: abi::struct_def::FieldType::Elementary(abi::ParamType::String), 78 | data_source: DataSource::String(String::from(*s)), 79 | }), 80 | } 81 | } 82 | } 83 | &query::Query::Register(_) => (), 84 | } 85 | } 86 | Ok(resolutions) 87 | } 88 | 89 | #[cfg(test)] 90 | mod tests { 91 | use crate::query::{FullSelectVar, Query, RegisterQuery, SelectQuery, SelectVar, Selection}; 92 | use crate::resolve::{resolve, Resolution}; 93 | use crate::source::DataSource; 94 | use ethers::abi::param_type::ParamType; 95 | use ethers::abi::struct_def::FieldType; 96 | 97 | fn query_set<'a>(source: Option>, variable: Option<&'a str>) -> Vec> { 98 | vec![ 99 | Query::Register(RegisterQuery { 100 | source: "comet", 101 | address: "0xc3d688B66703497DAA19211EEdff47f25384cdc3", 102 | interface: vec!["function totalSupply() returns (uint256)"], 103 | }), 104 | Query::Select(SelectQuery { 105 | select: vec![Selection::Var(FullSelectVar { 106 | source: source.unwrap_or(Some("block")), 107 | variable: SelectVar::Var(variable.unwrap_or("number")), 108 | })], 109 | source: Some("block"), 110 | }), 111 | ] 112 | } 113 | 114 | #[test] 115 | fn test_valid_resolution() { 116 | let qs = query_set(None, None); 117 | let resolutions = resolve(&qs); 118 | assert_eq!( 119 | resolutions, 120 | Ok(vec![Resolution { 121 | name: Some(String::from("number")), 122 | abi: FieldType::Elementary(ParamType::Uint(256)), 123 | data_source: DataSource::BlockNumber 124 | }]) 125 | ); 126 | } 127 | 128 | // TODO: Test skipping out on source [detection] 129 | // TODO: Test skipping out on source [err: ambiguous] 130 | // TODO: Test wildcard 131 | // TODO: Test aliases 132 | 133 | #[test] 134 | fn test_invalid_resolution_missing_source() { 135 | let qs = query_set(Some(Some("time")), None); 136 | let resolutions = resolve(&qs); 137 | assert_eq!( 138 | resolutions, 139 | Err(String::from( 140 | "Cannot find source \"time\" in sources from query. FROM sources: block" 141 | )) 142 | ); 143 | } 144 | 145 | #[test] 146 | fn test_invalid_resolution_missing_variable() { 147 | let qs = query_set(None, Some("age")); 148 | let resolutions = resolve(&qs); 149 | assert_eq!( 150 | resolutions, 151 | Err(String::from( 152 | "Cannot find variable with name \"age\" in source \"block\". Known variables: number" 153 | )) 154 | ); 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /parser/src/sleuth.pest: -------------------------------------------------------------------------------- 1 | 2 | main = { 3 | SOI 4 | ~ query_cls 5 | ~ ch_semi? 6 | ~ EOI 7 | } 8 | 9 | WHITESPACE = _{ " " | "\t" | NEWLINE } 10 | COMMENT = _{ "/*" ~ (!"*/" ~ ANY)* ~ "*/" } 11 | 12 | select_kw = _{ ^"SELECT" } 13 | register_kw = _{ ^"REGISTER" } 14 | contract_kw = _{ ^"CONTRACT" } 15 | from_kw = _{ ^"FROM" } 16 | with_kw = _{ ^"WITH" } 17 | interface_kw = _{ ^"INTERFACE" } 18 | at_kw = _{ ^"AT" } 19 | ch_semi = _{ ";" } 20 | ch_dot = _{ "." } 21 | ch_comma = _{ "," } 22 | ch_open_bracket = _{ "[" } 23 | ch_close_bracket = _{ "]" } 24 | wildcard = { "*" } 25 | 26 | string = ${ "\"" ~ string_inner ~ "\"" } 27 | string_inner = @{ char* } 28 | char = { 29 | !("\"" | "\\") ~ ANY 30 | | "\\" ~ ("\"" | "\\" | "/" | "b" | "f" | "n" | "r" | "t") 31 | | "\\" ~ ("u" ~ ASCII_HEX_DIGIT{4}) 32 | } 33 | 34 | number = @{ ASCII_DIGIT+ } 35 | keyword = @{ XID_START ~ ASCII_ALPHANUMERIC* } 36 | literal = { string | number } 37 | hex = { "0x" ~ ASCII_HEX_DIGIT+ } 38 | 39 | query_cls = { query_0 ~ query_n* } 40 | query_0 = { query } 41 | query_n = { ch_semi ~ query } 42 | query = { select_query | register_query } 43 | 44 | // REGISTER 45 | register_query = { 46 | register_kw ~ contract_kw ~ keyword ~ at_kw ~ hex ~ ( with_kw ~ interface_kw ~ ch_open_bracket ~ interface_cls ~ ch_close_bracket )? 47 | } 48 | 49 | interface_cls = { interface_item ~ interface_item_n* } 50 | interface_item_n = { "," ~ interface_item } 51 | interface_item = { string } 52 | 53 | // SELECT 54 | select_query = { 55 | select_kw ~ selection_cls ~ ( from_kw ~ from_cls )? 56 | } 57 | 58 | selection_cls = { selection_item ~ selection_item_n* } 59 | selection_item_n = { ch_comma ~ selection_item } 60 | selection_item = { full_select_var | literal } 61 | 62 | full_select_var = { 63 | ( ( source ~ ch_dot )? ~ ( variable | wildcard ) ) 64 | } 65 | 66 | source = @{ keyword } 67 | variable = @{ keyword } 68 | 69 | // FROM 70 | from_cls = { 71 | source 72 | } 73 | -------------------------------------------------------------------------------- /parser/src/source.rs: -------------------------------------------------------------------------------- 1 | use crate::query; 2 | use ethers::abi::{self, struct_def::FieldType, Address, ParamType}; 3 | use ethers::types::{Bytes, H160}; 4 | use ethers::utils::hex::FromHex; 5 | use std::collections::HashMap; 6 | 7 | #[derive(Clone, Debug, PartialEq)] 8 | pub enum DataSource { 9 | BlockNumber, 10 | Number(u64), 11 | String(String), 12 | Call(Address, Bytes, abi::struct_def::FieldType), 13 | } 14 | 15 | impl DataSource { 16 | pub fn abi(&self) -> abi::struct_def::FieldType { 17 | match self { 18 | DataSource::BlockNumber | DataSource::Number(_) => { 19 | abi::struct_def::FieldType::Elementary(abi::ParamType::Uint(256)) 20 | } 21 | DataSource::String(_) => abi::struct_def::FieldType::Elementary(abi::ParamType::String), 22 | DataSource::Call(_, _, abi) => abi.clone(), 23 | } 24 | } 25 | } 26 | 27 | #[derive(Clone, Debug, PartialEq)] 28 | pub struct Source { 29 | pub name: String, 30 | pub mappings: HashMap, 31 | } 32 | 33 | fn block_source() -> Source { 34 | Source { 35 | name: String::from("block"), 36 | mappings: HashMap::from([(String::from("number"), DataSource::BlockNumber)]), 37 | } 38 | } 39 | 40 | fn builtin_sources() -> Vec { 41 | vec![block_source()] 42 | } 43 | 44 | fn get_address(s: &str) -> Result { 45 | let inner = s 46 | .strip_prefix("0x") 47 | .ok_or(format!("Error: address should begin with 0x.."))? 48 | .to_string(); 49 | 50 | let address_bytes = 51 | <[u8; 20]>::from_hex(inner).map_err(|_e| format!("Invalid address: {}", s))?; 52 | Ok(H160::from(address_bytes)) 53 | } 54 | 55 | fn function_outputs_to_abi(outputs: Vec) -> abi::struct_def::FieldType { 56 | let params: Vec = outputs.into_iter().map(|param| param.kind).collect(); 57 | FieldType::Elementary(ParamType::Tuple(params)) 58 | } 59 | 60 | fn get_source_from_register(query: &query::RegisterQuery) -> Result { 61 | let address = get_address(query.address)?; 62 | let contract = ethers::abi::parse_abi(&query.interface) 63 | .map_err(|e| format!("Error parsing interface for {}: {:?}", &query.source, e))?; 64 | let mappings: HashMap = contract 65 | .functions 66 | .into_iter() 67 | .filter_map( 68 | |(name, fs)| match fs.into_iter().find(|f| f.inputs.len() == 0) { 69 | Some(f) => { 70 | let bytes = Bytes::from(f.encode_input(&vec![]).ok()?); 71 | Some(( 72 | name, 73 | DataSource::Call(address, bytes, function_outputs_to_abi(f.outputs)), 74 | )) 75 | } 76 | None => None, 77 | }, 78 | ) 79 | .collect(); 80 | // TODO: Set call data 81 | Ok(Source { 82 | name: query.source.to_string(), 83 | mappings, 84 | }) 85 | } 86 | 87 | pub fn find_source<'a, 'b>(name: &'a str, sources: &'b Vec) -> Option<&'b Source> { 88 | sources.iter().find(|&source| source.name == name) 89 | } 90 | 91 | pub fn find_data_source<'a, 'b>(name: &'a str, source: &'b Source) -> Option<&'b DataSource> { 92 | source.mappings.get(name) 93 | } 94 | 95 | pub fn get_all_sources(query_set: &Vec) -> Result, String> { 96 | let mut all_sources = builtin_sources(); 97 | for query in query_set { 98 | match query { 99 | query::Query::Register(register) => { 100 | all_sources.push(get_source_from_register(register)?); 101 | } 102 | _ => (), 103 | } 104 | } 105 | Ok(all_sources) 106 | } 107 | 108 | pub fn sources_for_query( 109 | query: &query::Query, 110 | all_sources: &Vec, 111 | ) -> Result, String> { 112 | let mut res: Vec = vec![]; 113 | match query { 114 | query::Query::Select(select) => { 115 | if let Some(name) = select.source { 116 | match find_source(name, &all_sources) { 117 | Some(source) => { 118 | res.push(source.clone()); 119 | } 120 | None => Err(format!( 121 | "No such relation \"{}\" referenced in FROM clause", 122 | name 123 | ))?, 124 | } 125 | } 126 | } 127 | query::Query::Register(_) => (), 128 | } 129 | Ok(res) 130 | } 131 | 132 | #[cfg(test)] 133 | mod tests { 134 | use crate::query::{FullSelectVar, Query, RegisterQuery, SelectQuery, SelectVar, Selection}; 135 | use crate::source::{ 136 | block_source, find_data_source, find_source, get_address, get_all_sources, 137 | sources_for_query, DataSource, Source, 138 | }; 139 | use ethers::types::H160; 140 | use std::collections::HashMap; 141 | use ethers::abi; 142 | use ethers::types::Bytes; 143 | 144 | fn select_query<'a>(source: Option<&'a str>) -> Query<'a> { 145 | Query::Select(SelectQuery { 146 | select: vec![Selection::Var(FullSelectVar { 147 | source: Some("block"), 148 | variable: SelectVar::Var("number"), 149 | })], 150 | source: Some(source.unwrap_or("block")), 151 | }) 152 | } 153 | 154 | fn register_query<'a>() -> Query<'a> { 155 | Query::Register(RegisterQuery { 156 | source: "comet", 157 | address: "0xc3d688B66703497DAA19211EEdff47f25384cdc3", 158 | interface: vec!["function totalSupply() returns (uint256)"], 159 | }) 160 | } 161 | 162 | fn comet_source() -> Source { 163 | Source { 164 | name: String::from("comet"), 165 | mappings: HashMap::from([(String::from("totalSupply"), DataSource::Call( 166 | ethers::types::H160([ 167 | 0xc3, 0xd6, 0x88, 0xB6, 0x67, 0x03, 0x49, 0x7D, 0xAA, 0x19, 0x21, 0x1E, 0xEd, 168 | 0xff, 0x47, 0xf2, 0x53, 0x84, 0xcd, 0xc3, 169 | ]), 170 | Bytes::from([0x18, 0x16, 0x0d, 0xdd]), 171 | abi::struct_def::FieldType::Elementary(ethers::abi::param_type::ParamType::Tuple( 172 | vec![abi::ParamType::Uint(256)], 173 | )), 174 | ))]), 175 | } 176 | } 177 | 178 | #[test] 179 | fn get_address_success() { 180 | assert_eq!( 181 | get_address("0xc3d688B66703497DAA19211EEdff47f25384cdc3"), 182 | Ok(H160::from([ 183 | 0xc3, 0xd6, 0x88, 0xB6, 0x67, 0x03, 0x49, 0x7D, 0xAA, 0x19, 0x21, 0x1E, 0xEd, 0xff, 184 | 0x47, 0xf2, 0x53, 0x84, 0xcd, 0xc3 185 | ])) 186 | ); 187 | } 188 | 189 | #[test] 190 | fn get_all_sources_empty() { 191 | assert_eq!(get_all_sources(&vec![]), Ok(vec![block_source()])); 192 | } 193 | 194 | #[test] 195 | fn get_all_sources_register() { 196 | let r = register_query(); 197 | assert_eq!( 198 | get_all_sources(&vec![r]), 199 | Ok(vec![block_source(), comet_source()]) 200 | ); 201 | } 202 | 203 | #[test] 204 | fn sources_for_query_builtin_success() { 205 | let q = select_query(None); 206 | let all_sources = get_all_sources(&vec![register_query()]).unwrap(); 207 | assert_eq!( 208 | sources_for_query(&q, &all_sources), 209 | Ok(vec![block_source()]) 210 | ); 211 | } 212 | 213 | #[test] 214 | fn sources_for_query_registered_success() { 215 | let q = select_query(Some("comet")); 216 | assert_eq!( 217 | sources_for_query(&q, &vec![comet_source()]), 218 | Ok(vec![comet_source()]) 219 | ); 220 | } 221 | 222 | #[test] 223 | fn sources_for_query_missing() { 224 | let q = select_query(Some("time")); 225 | let all_sources = get_all_sources(&vec![register_query()]).unwrap(); 226 | assert_eq!( 227 | sources_for_query(&q, &all_sources), 228 | Err(format!( 229 | "No such relation \"time\" referenced in FROM clause" 230 | )) 231 | ); 232 | } 233 | 234 | #[test] 235 | fn find_source_success() { 236 | let q = select_query(None); 237 | let all_sources = get_all_sources(&vec![register_query()]).unwrap(); 238 | let sources = sources_for_query(&q, &all_sources).unwrap(); 239 | let source = find_source("block", &sources); 240 | assert_eq!(source, Some(&block_source())); 241 | } 242 | 243 | #[test] 244 | fn find_source_missing() { 245 | let q = select_query(None); 246 | let all_sources = get_all_sources(&vec![register_query()]).unwrap(); 247 | let sources = sources_for_query(&q, &all_sources).unwrap(); 248 | let source = find_source("time", &sources); 249 | assert_eq!(source, None); 250 | } 251 | 252 | #[test] 253 | fn find_data_source_success() { 254 | let q = select_query(None); 255 | let all_sources = get_all_sources(&vec![register_query()]).unwrap(); 256 | let sources = sources_for_query(&q, &all_sources).unwrap(); 257 | let source = find_source("block", &sources).unwrap(); 258 | let data_source = find_data_source("number", source); 259 | assert_eq!(data_source, Some(&DataSource::BlockNumber)); 260 | } 261 | 262 | #[test] 263 | fn find_data_source_failure() { 264 | let q = select_query(None); 265 | let all_sources = get_all_sources(&vec![register_query()]).unwrap(); 266 | let sources = sources_for_query(&q, &all_sources).unwrap(); 267 | let source = find_source("block", &sources).unwrap(); 268 | let data_source = find_data_source("age", source); 269 | assert_eq!(data_source, None); 270 | } 271 | } 272 | -------------------------------------------------------------------------------- /parser/src/utils.rs: -------------------------------------------------------------------------------- 1 | pub fn set_panic_hook() { 2 | // When the `console_error_panic_hook` feature is enabled, we can call the 3 | // `set_panic_hook` function at least once during initialization, and then 4 | // we will get better error messages if our code ever panics. 5 | // 6 | // For more details see 7 | // https://github.com/rustwasm/console_error_panic_hook#readme 8 | #[cfg(feature = "console_error_panic_hook")] 9 | console_error_panic_hook::set_once(); 10 | } 11 | -------------------------------------------------------------------------------- /parser/src/yul.rs: -------------------------------------------------------------------------------- 1 | use ethers::utils::hex::ToHex; 2 | use crate::resolve::Resolution; 3 | use crate::source::DataSource; 4 | use ethers::utils::hex; 5 | 6 | const PREFIX: &str = r###" 7 | object "Query" { 8 | code { 9 | // Store the creator in slot zero. 10 | sstore(0, caller()) 11 | 12 | // Deploy the contract 13 | datacopy(0, dataoffset("runtime"), datasize("runtime")) 14 | return(0, datasize("runtime")) 15 | } 16 | object "runtime" { 17 | code { 18 | // Dispatcher 19 | switch selector() 20 | case 0x2c46b205 /* "query()" */ { 21 | "###; 22 | 23 | const SUFFIX: &str = r###" 24 | } 25 | default { 26 | revert(0, 0) 27 | } 28 | 29 | /* ---------- calldata encoding functions ---------- */ 30 | function returnUint(v) { 31 | mstore(0, v) 32 | return(0, 0x20) 33 | } 34 | function returnTrue() { 35 | returnUint(1) 36 | } 37 | 38 | /* ---------- calldata decoding functions ----------- */ 39 | function selector() -> s { 40 | s := div(calldataload(0), 0x100000000000000000000000000000000000000000000000000000000) 41 | } 42 | } 43 | } 44 | }"###; 45 | 46 | fn pad_zeroes(arr: &[u8]) -> [u8; 32] { 47 | let mut b = [0; 32]; 48 | b[..arr.len()].copy_from_slice(&arr); 49 | b 50 | } 51 | 52 | fn copy_bytes(tokens: &mut Vec, bytes: Vec, store_len: bool) -> (usize, usize) { 53 | let bytes_len = bytes.len(); 54 | if store_len { 55 | tokens.push(format!("mstore(free, {})", bytes_len)); 56 | tokens.push(format!("free := add(free, 0x20)")); 57 | } 58 | let mut chunks = 0; 59 | for (index, chunk) in (0..).zip(bytes.chunks(32)) { 60 | tokens.push(format!( 61 | "mstore(add(free,{}),0x{})", 62 | index * 32, 63 | hex::encode(pad_zeroes(chunk)) 64 | )); 65 | chunks += 1; 66 | } 67 | (bytes_len, chunks) 68 | } 69 | 70 | pub fn derive_yul_function(resolutions: Vec) -> Result, String> { 71 | let mut tokens: Vec = vec![ 72 | String::from("let res := 0x80"), 73 | format!("let free := add(0x80,mul({},0x20))", resolutions.len()), 74 | ]; 75 | for resolution in resolutions { 76 | match resolution.data_source { 77 | DataSource::BlockNumber => { 78 | tokens.push(String::from("mstore(res, number())")); 79 | tokens.push(String::from("res := add(res, 0x20)")); 80 | } 81 | DataSource::Number(n) => { 82 | tokens.push(format!("mstore(res, {})", n)); 83 | tokens.push(String::from("res := add(res, 0x20)")); 84 | } 85 | DataSource::String(s) => { 86 | let (_bytes_len, chunks) = copy_bytes(&mut tokens, s.into_bytes(), true); 87 | tokens.push(String::from("mstore(res, sub(free,add(0x80,0x20)))")); 88 | tokens.push(format!("free := add(free, {})", chunks * 32)); 89 | tokens.push(String::from("res := add(res, 0x20)")); 90 | } 91 | DataSource::Call(addr, bytes, _abi) => { 92 | let (bytes_len, _chunks) = copy_bytes(&mut tokens, bytes.to_vec(), false); 93 | tokens.push(format!( 94 | "pop(call(gas(), 0x{}, 0, free, {}, free, 0))", 95 | addr.encode_hex::(), bytes_len 96 | )); 97 | tokens.push(format!("returndatacopy(free, 0, returndatasize())")); 98 | tokens.push(String::from("mstore(res, free)")); 99 | tokens.push(format!("free := add(free, returndatasize())")); 100 | tokens.push(String::from("res := add(res, 0x20)")); 101 | } 102 | } 103 | } 104 | tokens.push(String::from("return(0x80,sub(free,0x80))")); 105 | Ok(tokens) 106 | } 107 | 108 | pub fn derive_yul(resolutions: Vec) -> Result { 109 | let tokens = derive_yul_function(resolutions)?; 110 | let inner = tokens.join("\n "); 111 | Ok(format!("{}{}{}", PREFIX, inner, SUFFIX)) 112 | } 113 | 114 | #[cfg(test)] 115 | mod tests { 116 | use crate::resolve::Resolution; 117 | use crate::source::{DataSource, DataSource::Call}; 118 | use crate::yul; 119 | use ethers::abi; 120 | use ethers::types::Bytes; 121 | 122 | #[test] 123 | fn pad_zeroes() { 124 | assert_eq!( 125 | yul::pad_zeroes(&String::from("cat").into_bytes()), 126 | [ 127 | 99, 97, 116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128 | 0, 0, 0, 0, 0, 0 129 | ] 130 | ); 131 | } 132 | 133 | #[test] 134 | fn simple_derive_yul() { 135 | let resolutions = vec![Resolution { 136 | name: Some(String::from("block")), 137 | abi: abi::struct_def::FieldType::Elementary(abi::ParamType::Uint(256)), 138 | data_source: DataSource::BlockNumber, 139 | }]; 140 | 141 | assert_eq!( 142 | yul::derive_yul_function(resolutions), 143 | Ok(vec![ 144 | String::from("let res := 0x80"), 145 | String::from("let free := add(0x80,mul(1,0x20))"), 146 | String::from("mstore(res, number())"), 147 | String::from("res := add(res, 0x20)"), 148 | String::from("return(0x80,sub(free,0x80))"), 149 | ]) 150 | ) 151 | } 152 | 153 | #[test] 154 | fn derive_yul_call() { 155 | let resolutions = vec![Resolution { 156 | name: Some(String::from("comet")), 157 | abi: abi::struct_def::FieldType::Elementary(abi::ParamType::Uint(256)), 158 | data_source: Call( 159 | ethers::types::H160([ 160 | 0xc3, 0xd6, 0x88, 0xB6, 0x67, 0x03, 0x49, 0x7D, 0xAA, 0x19, 0x21, 0x1E, 0xEd, 161 | 0xff, 0x47, 0xf2, 0x53, 0x84, 0xcd, 0xc3, 162 | ]), 163 | Bytes::from([0x18, 0x16, 0x0d, 0xdd]), 164 | abi::struct_def::FieldType::Elementary(ethers::abi::param_type::ParamType::Tuple( 165 | vec![abi::ParamType::Uint(256)], 166 | )), 167 | ), 168 | }]; 169 | 170 | assert_eq!( 171 | yul::derive_yul_function(resolutions), 172 | Ok(vec![ 173 | String::from("let res := 0x80"), 174 | String::from("let free := add(0x80,mul(1,0x20))"), 175 | String::from("mstore(add(free,0),0x18160ddd00000000000000000000000000000000000000000000000000000000)"), 176 | String::from("pop(call(gas(), 0xc3d688b66703497daa19211eedff47f25384cdc3, 0, free, 4, free, 0))"), 177 | String::from("returndatacopy(free, 0, returndatasize())"), 178 | String::from("mstore(res, free)"), 179 | String::from("free := add(free, returndatasize())"), 180 | String::from("res := add(res, 0x20)"), 181 | String::from("return(0x80,sub(free,0x80))") 182 | ]) 183 | ) 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /parser/tests/web.rs: -------------------------------------------------------------------------------- 1 | //! Test suite for the Web and headless browsers. 2 | 3 | #![cfg(target_arch = "wasm32")] 4 | 5 | extern crate wasm_bindgen_test; 6 | use wasm_bindgen_test::*; 7 | 8 | wasm_bindgen_test_configure!(run_in_browser); 9 | 10 | #[wasm_bindgen_test] 11 | fn pass() { 12 | assert_eq!(1 + 1, 2); 13 | } 14 | -------------------------------------------------------------------------------- /script/Sleuth.s.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | pragma solidity ^0.8.23; 3 | 4 | import "forge-std/Script.sol"; 5 | import "../src/Sleuth.sol"; 6 | 7 | interface CodeJar { 8 | function saveCode(bytes memory code) external returns (address); 9 | } 10 | 11 | contract Prepare is Script { 12 | function setUp() public {} 13 | 14 | function run() public returns (address) { 15 | CodeJar codeJar = CodeJar(vm.envAddress("CODE_JAR")); 16 | console.log("Code Jar Address:", address(codeJar)); 17 | console.log("Chain ID:", block.chainid); 18 | console.logBytes(address(codeJar).code); 19 | 20 | address sleuthAddress = codeJar.saveCode(type(Sleuth).creationCode); 21 | 22 | console.log("Sleuth Address:", sleuthAddress); 23 | 24 | return sleuthAddress; 25 | } 26 | } 27 | 28 | contract Deploy is Script { 29 | error MismatchedSleuthAddress(address expected, address actual); 30 | function setUp() public {} 31 | 32 | function run() public returns (address) { 33 | bytes memory sleuthCreationCode = vm.getCode("./.release-tmp/Sleuth.json"); 34 | CodeJar codeJar = CodeJar(vm.envAddress("CODE_JAR")); 35 | address expectedSleuthAddress = vm.envAddress("SLEUTH_ADDRESS"); 36 | address sleuthAddress = codeJar.saveCode(sleuthCreationCode); 37 | if (sleuthAddress != expectedSleuthAddress) { 38 | revert MismatchedSleuthAddress(expectedSleuthAddress, sleuthAddress); 39 | } 40 | vm.startBroadcast(); 41 | sleuthAddress = codeJar.saveCode(sleuthCreationCode); 42 | vm.stopBroadcast(); 43 | 44 | console.log("Sleuth Address:", sleuthAddress); 45 | 46 | return sleuthAddress; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /script/deploy-release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eo pipefail 4 | 5 | tag="$1" 6 | 7 | if [ -z "$tag" ]; then 8 | echo "usage script/deploy-release.sh " 9 | exit 1 10 | fi 11 | 12 | opts="" 13 | 14 | if [ -n "$DEPLOYER_PK" ]; then 15 | opts="$opts --private-key $DEPLOYER_PK --broadcast" 16 | fi 17 | 18 | if [ -n "$ETHERSCAN_API_KEY" ]; then 19 | opts="$opts --verify --etherscan-api-key \"$ETHERSCAN_API_KEY\"" 20 | fi 21 | 22 | cleanup() { 23 | rv=$? 24 | rm -rf .release-tmp 25 | exit $rv 26 | } 27 | 28 | trap "cleanup" EXIT 29 | 30 | rm -rf .release-tmp 31 | mkdir .release-tmp 32 | 33 | curl -L "https://github.com/compound-finance/sleuth/releases/download/$tag/Sleuth.json" -o ./.release-tmp/Sleuth.json 34 | curl -L "https://github.com/compound-finance/sleuth/releases/download/$tag/contracts.json" -o ./.release-tmp/contracts.json 35 | 36 | if [ -z "$RPC_URL" ]; then 37 | echo "Missing RPC_URL env var" 38 | exit 1 39 | fi 40 | 41 | if ! command -v jq &> /dev/null; then 42 | echo "jq could not be found" 43 | exit 1 44 | fi 45 | 46 | export SLEUTH_ADDRESS="$(cat ./.release-tmp/contracts.json | jq -r '.sleuth')" 47 | export CODE_JAR="$(cat ./.release-tmp/contracts.json | jq -r '.codeJar')" 48 | 49 | forge script \ 50 | --rpc-url="$RPC_URL" \ 51 | $opts \ 52 | script/Sleuth.s.sol:Deploy \ 53 | $@ 54 | -------------------------------------------------------------------------------- /script/mainnet/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -exo pipefail 4 | 5 | if [ -n "$ETHEREUM_PK" ]; then 6 | wallet_args="--private-key $ETHEREUM_PK --broadcast" 7 | else 8 | wallet_args="--unlocked" 9 | fi 10 | 11 | if [ -n "$RPC_URL" ]; then 12 | rpc_args="--rpc-url $RPC_URL" 13 | else 14 | rpc_args="" 15 | fi 16 | 17 | if [ -n "$ETHERSCAN_API_KEY" ]; then 18 | etherscan_args="--verify --etherscan-api-key $ETHERSCAN_API_KEY" 19 | else 20 | etherscan_args="" 21 | fi 22 | 23 | forge create \ 24 | $rpc_args \ 25 | $etherscan_args \ 26 | $wallet_args \ 27 | $@ \ 28 | src/Sleuth.sol:Sleuth 29 | -------------------------------------------------------------------------------- /script/prepare-release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eo pipefail 4 | 5 | if [ -z "$CODE_JAR" ]; then 6 | echo "Missing CODE_JAR env var" 7 | exit 1 8 | fi 9 | 10 | if [ -z "$RPC_URL" ]; then 11 | echo "Missing RPC_URL env var" 12 | exit 1 13 | fi 14 | 15 | if ! command -v jq &> /dev/null; then 16 | echo "jq could not be found" 17 | exit 1 18 | fi 19 | 20 | forge build 21 | mkdir -p release/ 22 | cp out/Sleuth.sol/Sleuth.json release/ 23 | cp src/Sleuth.sol release/ 24 | title="$(git log -1 --pretty="%s")" 25 | body="$(git log -1 --pretty="%b")" 26 | 27 | if [ -z "$title" ]; then 28 | echo "must include git commit title" 29 | exit 1 30 | fi 31 | 32 | if [ -z "$body" ]; then 33 | echo "must include git commit body" 34 | exit 1 35 | fi 36 | 37 | sleuth_address="$(forge script --rpc-url=$RPC_URL --json --silent script/Sleuth.s.sol:Prepare | tee | jq -r '.returns."0".value')" 38 | 39 | echo "title=$title" 40 | echo "body=$body" 41 | echo "sleuth_address=$sleuth_address" 42 | 43 | echo "$sleuth_address" > "release/sleuth@$sleuth_address" 44 | 45 | cat > release/RELEASE.md < release/contracts.json < s { 32 | s := div(calldataload(0), 0x100000000000000000000000000000000000000000000000000000000) 33 | } 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /test/examples/Pair.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | pragma solidity ^0.8.23; 3 | 4 | contract Pair { 5 | function query() external pure returns (uint256, string memory) { 6 | return (55, "hello"); 7 | } 8 | 9 | function queryFail() external pure returns (uint256, string memory) { 10 | revert("bad news"); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig to read more about this file */ 4 | 5 | /* Projects */ 6 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ 7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 8 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ 9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ 10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 12 | 13 | /* Language and Environment */ 14 | "target": "es6", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 16 | // "jsx": "preserve", /* Specify what JSX code is generated. */ 17 | // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ 18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ 20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ 22 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ 23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 25 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ 26 | 27 | /* Modules */ 28 | "module": "commonjs", /* Specify what module code is generated. */ 29 | "rootDir": "./cli", /* Specify the root folder within your source files. */ 30 | // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ 31 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 32 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 33 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 34 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ 35 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */ 36 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 37 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ 38 | // "resolveJsonModule": true, /* Enable importing .json files. */ 39 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ 40 | 41 | /* JavaScript Support */ 42 | "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ 43 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ 44 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ 45 | 46 | /* Emit */ 47 | "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 48 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 49 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 50 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 51 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ 52 | "outDir": "./dist", /* Specify an output folder for all emitted files. */ 53 | // "removeComments": true, /* Disable emitting comments. */ 54 | // "noEmit": true, /* Disable emitting files from a compilation. */ 55 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 56 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ 57 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 58 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 59 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 60 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 61 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 62 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 63 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 64 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ 65 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ 66 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 67 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ 68 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 69 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 70 | 71 | /* Interop Constraints */ 72 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 73 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 74 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ 75 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 76 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 77 | 78 | /* Type Checking */ 79 | "strict": true, /* Enable all strict type-checking options. */ 80 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ 81 | // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ 82 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 83 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ 84 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 85 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ 86 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ 87 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 88 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ 89 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ 90 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 91 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 92 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 93 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ 94 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 95 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ 96 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 97 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 98 | 99 | /* Completeness */ 100 | "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 101 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 102 | }, 103 | "include": [ 104 | "cli/**/*" 105 | ], 106 | "exclude": ["node_modules"] 107 | } 108 | --------------------------------------------------------------------------------