├── .eslintrc.json ├── .github └── workflows │ ├── grunt.yml │ └── publish.yml ├── .gitignore ├── .npmignore ├── Gruntfile.js ├── LICENSE ├── README.md ├── bin └── smart-sync ├── chains ├── .gitignore ├── README.md ├── config.toml ├── devs.pwd ├── disco-spec.json ├── docker-compose.yml ├── evaluation-config.toml ├── evaluation-disco-spec.json ├── evaluation-docker-compose.yml └── keys │ └── disco │ ├── UTC--2020-11-23T17-17-07Z--3c8a970e-60fe-c392-edbf-f4cabfb69969 │ ├── UTC--2020-11-23T17-17-21Z--b76479a6-13a1-cfd0-a501-d72317cf1b19 │ ├── UTC--2020-11-23T17-18-07Z--fce13e1c-4e16-2436-a721-c9f0283c5274 │ └── address_book.json ├── config └── cli-config.json ├── contracts ├── CallRelayContract.sol ├── CallingContract.sol ├── GetProofLib.sol ├── MappingContract.sol ├── MerklePatriciaProof.sol ├── ProxyContract.sol ├── RLPWriter.sol ├── RelayContract.sol ├── SimpleStorage.sol ├── StorageImitator.sol ├── SyncCandidate.sol ├── TestLogicContract.sol └── TestProxyContract.sol ├── evaluation ├── config │ └── test-cli-config.json ├── csv-files │ └── .gitignore ├── eval-utils.ts ├── get-mt-for-map-sizes-1-1000.ts ├── r_scripts │ ├── mt_plotter.R │ └── plotter.R ├── update-multiple-deep-values-with-map-sizes-1-1000.ts ├── update-multiple-values-random-with-map-sizes-1-1000.ts ├── update-one-value-per-mpt-height-with-map-sizes-1-to-1000.ts └── update-one-value-with-map-sizes-1-1000.ts ├── hardhat.config.ts ├── hardhat_scripts ├── deployMappingContract.ts └── list-storage.ts ├── imgs └── trie-layout.png ├── optimized-storage-proof.md ├── package-lock.json ├── package.json ├── src ├── chain-proxy.ts ├── cli │ └── smart-sync.ts ├── config.ts ├── diffHandler │ ├── Add.ts │ ├── Change.ts │ ├── DiffHandler.ts │ ├── Remove.ts │ ├── StorageDiff.ts │ └── Types.ts ├── proofHandler │ ├── BranchNode.ts │ ├── ExtensionNode.ts │ ├── GetProof.ts │ ├── LeafNode.ts │ ├── ProofPathBuilder.ts │ └── Types.ts └── utils │ ├── fileHandler.ts │ ├── logger.ts │ ├── providerHandler.ts │ ├── proxy-contract-builder.ts │ ├── transactionHandler.ts │ └── utils.ts ├── test ├── cli-test.ts ├── config │ ├── encryptedAccount.json │ └── test-cli-config.json ├── extension-validation-test.ts ├── get-diff-test.ts ├── list-storage-test.ts ├── log-delegate-test.ts ├── new-initialization-test.ts ├── optimized-storage-proof-test.ts ├── proof-path-builder-test.ts ├── proxy-deploy-test.ts ├── scale-test.ts ├── state-proof-test.ts ├── storageKeyValuePairs │ ├── delete_earlyPairs_13534149.csv │ ├── delete_latestPairs_13535417.csv │ ├── early_pairs_for_change_mt_through_add_13535603.csv │ └── latest_pairs_for_change_mt_through_add_13536164.csv ├── test-utils.ts └── verify-proxy-test.ts └── tsconfig.json /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": [ 3 | "airbnb-base", 4 | "airbnb-typescript/base" 5 | ], 6 | "plugins": ["@typescript-eslint"], 7 | "parser": "@typescript-eslint/parser", 8 | "rules": { 9 | "@typescript-eslint/indent": ["error", 4], 10 | "semi": ["error", "always"], 11 | "import/extensions": 0, 12 | "max-len": "off", 13 | "no-param-reassign": ["error", { "props": false }] 14 | }, 15 | "overrides": [ 16 | { 17 | "files": [ 18 | "test/**/*.ts", 19 | "evaluation/**/*" 20 | ], 21 | "env": { "mocha": true }, 22 | "rules": { 23 | "@typescript-eslint/no-unused-expressions": "off", 24 | "no-await-in-loop": "off" 25 | } 26 | }, 27 | { 28 | "files": ["*.ts", "*.tsx"], // Your TypeScript files extension 29 | "parserOptions": { 30 | "project": ["./tsconfig.json"] // Specify it only for TypeScript files 31 | } 32 | } 33 | ], 34 | "settings": { 35 | "import/resolver": { 36 | "node": { 37 | "extensions": [".ts", ".tsx"] 38 | } 39 | } 40 | }, 41 | "parserOptions": { 42 | "project": "./tsconfig.json" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /.github/workflows/grunt.yml: -------------------------------------------------------------------------------- 1 | name: NodeJS with Grunt 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | 13 | strategy: 14 | matrix: 15 | node-version: [16.x] 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | 20 | - name: Use Node.js ${{ matrix.node-version }} 21 | uses: actions/setup-node@v1 22 | with: 23 | node-version: ${{ matrix.node-version }} 24 | 25 | - name: Build 26 | run: npm install -f 27 | 28 | - name: Generate types 29 | run: npx hardhat compile 30 | 31 | - name: Eslint 32 | run: grunt eslint 33 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Publish 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - uses: actions/setup-node@v2 13 | with: 14 | node-version: '16.x' 15 | registry-url: 'https://registry.npmjs.org' 16 | - run: npm ci 17 | - run: npx grunt compile-project 18 | - run: npm publish --access public 19 | env: 20 | NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}} 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | logs 3 | *.log 4 | build 5 | dist 6 | src-gen 7 | node_modules/ 8 | *.tsbuildinfo 9 | .env 10 | node_modules 11 | src/types 12 | 13 | #Hardhat files 14 | cache 15 | artifacts 16 | 17 | data/ 18 | .idea/ 19 | target/ 20 | node_modules 21 | .vscode/ 22 | 23 | görliTestAccount.json -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | logs 3 | *.log 4 | build 5 | src-gen 6 | node_modules/ 7 | *.tsbuildinfo 8 | .env 9 | node_modules 10 | src 11 | test 12 | imgs 13 | hardhat_scripts 14 | evaluation 15 | contracts 16 | chains 17 | dist/test 18 | dist/evaluation 19 | hardhat.config.ts 20 | optimized-storage-proof.md 21 | 22 | #Hardhat files 23 | cache 24 | artifacts 25 | 26 | data/ 27 | .idea/ 28 | target/ 29 | node_modules 30 | .vscode/ -------------------------------------------------------------------------------- /Gruntfile.js: -------------------------------------------------------------------------------- 1 | const child_process = require('child_process'); 2 | const fs = require('fs'); 3 | const YAML = require('yaml'); 4 | const CHAIN_DOCKER_NAME = 'crossChainContracts_test_chain'; 5 | const CONFIG_CHAIN_DIR = 'chain1-data'; 6 | const CHAIN_DIR = './chains'; 7 | const CHAIN_DOCKER_NAME_2 = 'crossChainContracts_test_chain_2'; 8 | const CONFIG_CHAIN_DIR_2 = 'chain2-data'; 9 | 10 | module.exports = (grunt) => { 11 | grunt.initConfig({ 12 | pkg: grunt.file.readJSON('package.json'), 13 | eslint: { 14 | target: ['src/**/*.ts', 'test/**/*.ts', 'evaluation/**/*.ts'], 15 | options: { 16 | maxWarnings: 5, 17 | fix: true, 18 | }, 19 | }, 20 | clean: ['dist/'], 21 | // Configure a mochaTest task 22 | mochaTest: { 23 | test: { 24 | options: { 25 | reporter: 'spec', 26 | quiet: false, // Optionally suppress output to standard out (defaults to false) 27 | clearRequireCache: false, // Optionally clear the require cache before running tests (defaults to false) 28 | clearCacheFilter: (key) => true, // Optionally defines which files should keep in cache 29 | noFail: false, // Optionally set to not fail on failed tests (will still fail on other errors) 30 | timeout: 36000000 31 | }, 32 | src: ['dist/test/**/*.js'] 33 | } 34 | }, 35 | watch: { 36 | files: ['<%= eslint.files %>'], 37 | tasks: ['eslint'], 38 | }, 39 | }); 40 | grunt.loadNpmTasks('grunt-eslint'); 41 | grunt.loadNpmTasks('grunt-mocha-test'); 42 | grunt.loadNpmTasks('grunt-contrib-clean'); 43 | 44 | grunt.registerTask('default', ['eslint']); 45 | 46 | grunt.registerTask('start-chains', 'Startup chain', () => { 47 | grunt.verbose.write('Starting test chains...'); 48 | child_process.execSync('docker-compose up -d', { cwd: CHAIN_DIR }); 49 | }); 50 | 51 | grunt.registerTask('start-chains-evaluation', 'Startup chain', () => { 52 | grunt.verbose.write('Starting test chains...'); 53 | child_process.execSync('docker-compose -f evaluation-docker-compose.yml up -d', { cwd: CHAIN_DIR }); 54 | }); 55 | 56 | grunt.registerTask('stop-chains', 'Stopping chain', () => { 57 | let container; 58 | let container2; 59 | try { 60 | const re = new RegExp(`${CHAIN_DOCKER_NAME}`, 'g'); 61 | const re2 = new RegExp(`${CHAIN_DOCKER_NAME_2}`, 'g'); 62 | container = re.exec(child_process.execSync('docker ps').toString()); 63 | container2 = re2.exec(child_process.execSync('docker ps').toString()); 64 | } catch(e) { 65 | grunt.fail.fatal(e); 66 | } 67 | if (container || container2) { 68 | grunt.verbose.write(`Stopping containers...`); 69 | child_process.execSync(`docker-compose stop`, { cwd: CHAIN_DIR }); 70 | child_process.execSync(`docker-compose rm -f`, { cwd: CHAIN_DIR }); 71 | grunt.verbose.ok(); 72 | } 73 | grunt.verbose.write('Removing chain data from test chains...'); 74 | child_process.execSync(`rm -rf ${CHAIN_DIR}/${CONFIG_CHAIN_DIR}`); 75 | child_process.execSync(`rm -rf ${CHAIN_DIR}/${CONFIG_CHAIN_DIR_2}`); 76 | grunt.verbose.ok(); 77 | }); 78 | 79 | grunt.registerTask('update-ports', 'Updates test chain ports', () => { 80 | let port = 9545; 81 | let targetPort = 9547; 82 | let filePath = grunt.option('test-config-path') || './test/config/test-cli-config.json'; 83 | const fileContent = fs.readFileSync(filePath); 84 | const config = JSON.parse(fileContent); 85 | if (grunt.option('test-chain-port')) { 86 | // change ports in all affected files according to given parameter 87 | port = parseInt(grunt.option('test-chain-port')); 88 | targetPort = port + 2; 89 | // change test config 90 | let url = config['srcChainRpcUrl'].match(/(https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256})((\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*))|(:(\d+)))/)[1]; 91 | config['srcChainRpcUrl'] = `${url}:${port}`; 92 | url = config['targetChainRpcUrl'].match(/(https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256})((\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*))|(:(\d+)))/)[1]; 93 | config['targetChainRpcUrl'] = `${url}:${targetPort}`; 94 | // write to file 95 | fs.writeFileSync(filePath, JSON.stringify(config, null, 4)); 96 | } else { 97 | // extract the current port from test config file 98 | port = parseInt(config['srcChainRpcUrl'].match(/https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}((\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*))|(:(\d+)))/)[6]) || port; 99 | targetPort = parseInt(config['targetChainRpcUrl'].match(/https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}((\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*))|(:(\d+)))/)[6]) || port; 100 | } 101 | // change chains/docker-compose.yml 102 | const dockerComposeFileContent = fs.readFileSync(grunt.option('docker-compose-path') || './chains/docker-compose.yml'); 103 | const dockerComposeConfig = YAML.parse(dockerComposeFileContent.toString()); 104 | dockerComposeConfig['services']['chain']['ports'][0] = `${port}:8545`; 105 | dockerComposeConfig['services']['chain']['ports'][1] = `${port + 1}:8546`; 106 | dockerComposeConfig['services']['chain2']['ports'][0] = `${targetPort}:8545`; 107 | dockerComposeConfig['services']['chain2']['ports'][1] = `${targetPort + 1}:8546`; 108 | // write back to file 109 | fs.writeFileSync(grunt.option('docker-compose-path') || './chains/docker-compose.yml', YAML.stringify(dockerComposeConfig, { indent: 4 })); 110 | }); 111 | 112 | grunt.registerTask('compile-project', 'Generate js-files', () => { 113 | grunt.task.run('clean'); 114 | grunt.task.run('compile-contracts'); 115 | grunt.task.run('eslint'); 116 | grunt.task.run('tsc'); 117 | }); 118 | 119 | grunt.registerTask('compile-contracts', 'Generate contract type info', () => { 120 | child_process.execSync('npx hardhat compile', { stdio: 'inherit' }); 121 | }); 122 | 123 | grunt.registerTask('tsc', 'Compile ts files', () => { 124 | if (!grunt.file.exists('./artifacts/contracts/ProxyContract.sol/ProxyContract.json')) { 125 | grunt.log.writeln('Contracts were not compiled yet.'); 126 | grunt.task.run('compile-contracts'); 127 | } 128 | child_process.execSync('tsc', { stdio: 'inherit' }); 129 | grunt.file.copy('./artifacts/contracts/ProxyContract.sol/ProxyContract.json', './dist/artifacts/contracts/ProxyContract.sol/ProxyContract.json'); 130 | }); 131 | 132 | grunt.registerTask('install', 'Install smart-sync', () => { 133 | child_process.execSync(`npm i --development`, { stdio: 'inherit' }); 134 | grunt.task.run('compile-project'); 135 | grunt.task.run('install-global'); 136 | grunt.verbose.ok(); 137 | }); 138 | 139 | grunt.registerTask('install-global', 'Install smart-sync globally', () => { 140 | child_process.execSync(`npm i -g`, { stdio: 'inherit' }); 141 | }); 142 | 143 | grunt.registerTask('pack', 'npm pack smart-sync', () => { 144 | grunt.task.run('install'); 145 | grunt.task.run('npm-pack'); 146 | }); 147 | 148 | grunt.registerTask('npm-pack', 'npm packaging command', () => { 149 | child_process.execSync(`npm pack`, { stdio: 'inherit' }); 150 | }) 151 | 152 | grunt.registerTask('full-pipeline-test', 'Testing precompiled *.ts project', () => { 153 | grunt.task.run('compile-contracts'); 154 | grunt.task.run('eslint'); 155 | grunt.task.run('stop-chains'); 156 | grunt.task.run('update-ports'); 157 | grunt.task.run('start-chains'); 158 | grunt.task.run('test'); 159 | grunt.task.run('stop-chains'); 160 | }); 161 | 162 | grunt.registerTask('full-pipeline-dist-test', 'Testing compiled *.js project inside dist folder', () => { 163 | if (!grunt.file.exists('dist')) { 164 | grunt.log.writeln('Dir dist does not exist. Will compile the project now.'); 165 | grunt.task.run('compile-project'); 166 | } 167 | grunt.task.run('eslint'); 168 | grunt.task.run('stop-chains'); 169 | grunt.task.run('update-ports'); 170 | grunt.task.run('start-chains'); 171 | grunt.task.run('mochaTest'); 172 | grunt.task.run('stop-chains'); 173 | }); 174 | 175 | grunt.registerTask('full-pipeline-evaluation', 'Evaluating project', () => { 176 | grunt.task.run('compile-contracts'); 177 | grunt.task.run('eslint'); 178 | grunt.task.run('stop-chains'); 179 | grunt.task.run('start-chains-evaluation'); 180 | grunt.task.run('evaluate'); 181 | grunt.task.run('stop-chains'); 182 | }); 183 | 184 | grunt.registerTask('evaluate', 'Run evaluation', () => { 185 | child_process.execSync(`NODE_OPTIONS=--max_old_space_size=4096 npx hardhat test ./evaluation/*.ts`, { stdio: 'inherit' }); 186 | }); 187 | 188 | grunt.registerTask('test', 'Run tests', () => { 189 | child_process.execSync(`NODE_OPTIONS=--max_old_space_size=4096 npx hardhat test ./test/*.ts`, { stdio: 'inherit' }); 190 | }); 191 | }; 192 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Getting started 2 | 3 | ## Installation CLI 4 | 5 | To globally install the latest package of smart-sync cli, run: 6 | 7 | ``` 8 | $ npm i smart-sync -g 9 | ``` 10 | 11 | To compile the project yourself and install it, see section [Getting started (Dev)](#getting-started-dev) 12 | 13 | Under `{INSTALL_DIR}/config` you can find the default configuration for the cli. Adjust the fields according to your needs or pass them as options in the command line. 14 | 15 | To execute tests, please refer to the [test](#tests) section. 16 | 17 | ## Usage 18 | 19 | ### Smart Contract Fork 20 | 21 | ```bash 22 | $ smart-sync help fork 23 | Usage: smart-sync fork|f [options] [relay_contract_address] 24 | 25 | Migrates a given contract address to a target chain and deploys a proxy contract. If no relay contract is provided, a relay contract will be deployed too. 26 | 27 | Options: 28 | -l, --log-level verbose level of logging (choices: "fatal", "error", "warn", "info", "debug", 29 | "trace", "silly", default: "debug") 30 | -s, --src-chain-rpc-host url of src chain rpc 31 | -t, --target-chain-rpc-url url of target chain rpc 32 | -c, --config-file path to the config file (default: "./config/cli-config.json") 33 | --connection-timeout connection timeout in ms 34 | --src-blocknr block number of src chain to use 35 | --diff-mode Diff function to use (choices: "storage", "srcTx") 36 | --gas-limit gas limit for tx on target chain 37 | -h, --help display help for command 38 | ``` 39 | Example usage: 40 | ```bash 41 | $ smart-sync fork 0x010A3d554c8d772aAC357e079B4D57B6dA28a43a 42 | ``` 43 | 44 | ### Synchronizing a Smart Contract 45 | 46 | ```bash 47 | $ smart-sync help synchronize 48 | Usage: smart-sync synchronize|s [options] 49 | 50 | Synchronizes the storage of a proxy contract with its source contracts storage up to an optionally provided block nr on the source chain. 51 | 52 | Options: 53 | -l, --log-level verbose level of logging (choices: "fatal", "error", "warn", "info", "debug", "trace", "silly", default: "debug") 54 | -s, --src-chain-rpc-host url of src chain rpc 55 | -t, --target-chain-rpc-url url of target chain rpc 56 | -c, --config-file path to the config file (default: "./config/cli-config.json") 57 | --connection-timeout connection timeout in ms 58 | --src-blocknr block number of src chain to use 59 | --diff-mode Diff function to use. When using storage, option --src-BlockNr equals block on srcChain and --target-BlockNr block on targetChain. When using srcTx 60 | --src-BlockNr describes block from where to replay tx until --target-blockNr. (choices: "storage", "srcTx") 61 | --target-blocknr see --diff-mode for further explanation 62 | --gas-limit gas limit for tx on target chain 63 | -h, --help display help for command 64 | ``` 65 | 66 | Example usage: 67 | ```bash 68 | $ smart-sync s 0x010A3d554c8d772aAC357e079B4D57B6dA28a43a --target-blockNr 450 69 | ``` 70 | 71 | ### Continuously synchronizing Smart Contracts 72 | ```bash 73 | $ smart-sync continuous-synch --help 74 | Usage: smart-sync continuous-synch|c [options] 75 | 76 | Periodically synch state updates. 77 | 78 | Arguments: 79 | proxy_contract_address 80 | period Define the updating period. Be sure to pass the period within " (Example: "*/2 * * * *"). The crontab syntax is based on the GNU crontab syntax. For information visit https://www.npmjs.com/package/node-cron. 81 | 82 | Options: 83 | -l, --log-level verbose level of logging (choices: "fatal", "error", "warn", "info", "debug", "trace", "silly", default: "info") 84 | -s, --src-chain-rpc-host url of src chain rpc 85 | -t, --target-chain-rpc-url url of target chain rpc 86 | -c, --config-file path to the config file (default: "./config/cli-config.json") 87 | --connection-timeout connection timeout in ms 88 | --src-blocknr block number of src chain to use 89 | --gas-limit 90 | --diff-mode Diff function to use. When using storage, option --src-BlockNr equals block on srcChain and --target-BlockNr block on targetChain. When using srcTx --src-BlockNr describes block from where to replay tx until --target-blockNr. (choices: "storage", 91 | "srcTx") 92 | --target-blocknr see --diff-mode for further explanation 93 | -h, --help display help for command 94 | ``` 95 | 96 | Example usage: 97 | ```bash 98 | $ smart-sync c 0x010A3d554c8d772aAC357e079B4D57B6dA28a43a "*/2 * * * *" 99 | ``` 100 | 101 | ### Retrieve migration status 102 | ```bash 103 | $ smart-sync help migration-status 104 | Usage: smart-sync migration-status|status [options] 105 | 106 | Checks if the storage root of the proxy contract equals the current storage root of the source contract in the relay contract on the target chain. 107 | 108 | Options: 109 | -l, --log-level verbose level of logging (choices: "fatal", "error", "warn", "info", "debug", "trace", "silly", default: "debug") 110 | -s, --src-chain-rpc-host url of src chain rpc 111 | -t, --target-chain-rpc-url url of target chain rpc 112 | -c, --config-file path to the config file (default: "./config/cli-config.json") 113 | --connection-timeout connection timeout in ms 114 | --src-blocknr block number of src chain to use 115 | -h, --help display help for command 116 | ``` 117 | Example usage: 118 | ``` bash 119 | $ smart-sync status 0x010A3d554c8d772aAC357e079B4D57B6dA28a43a 120 | ``` 121 | ### Get currrent block number 122 | ```bash 123 | $ smart-sync help get-curr-blocknr 124 | Usage: smart-sync get-curr-blocknr|blocknr [options] 125 | 126 | Get the synched block number of src chain for the provided proxy contract. 127 | 128 | Options: 129 | -l, --log-level verbose level of logging (choices: "fatal", "error", "warn", "info", "debug", "trace", "silly", default: "debug") 130 | -s, --src-chain-rpc-host url of src chain rpc 131 | -t, --target-chain-rpc-url url of target chain rpc 132 | -c, --config-file path to the config file (default: "./config/cli-config.json") 133 | --connection-timeout connection timeout in ms 134 | --src-blocknr block number of src chain to use 135 | -h, --help display help for command 136 | ``` 137 | Example usage: 138 | ```bash 139 | $ smart-sync blocknr 0x20a508640B446990c781Cd541B9a2828ACA3a350 140 | ``` 141 | 142 | ### Retrieve state diff 143 | ```bash 144 | $ smart-sync help state-diff 145 | Usage: smart-sync state-diff|diff [options] [proxy_contract_address] 146 | 147 | Shows the state diff between source contract and proxy contract on target chain. If diff-mode == storage, proxy_contract_address has to be provided. 148 | 149 | Options: 150 | -l, --log-level verbose level of logging (choices: "fatal", "error", "warn", "info", "debug", "trace", "silly", default: "debug") 151 | -s, --src-chain-rpc-host url of src chain rpc 152 | -t, --target-chain-rpc-url url of target chain rpc 153 | -c, --config-file path to the config file (default: "./config/cli-config.json") 154 | --connection-timeout connection timeout in ms 155 | --src-blocknr block number of src chain to use 156 | --diff-mode Diff function to use. When using storage, option --src-BlockNr equals block on srcChain and --target-BlockNr block on targetChain. When using srcTx 157 | --src-BlockNr describes block from where to replay tx until --target-blockNr. If no blocks are given when using srcTx, then only the latest block 158 | is examined. (choices: "storage", "srcTx") 159 | --target-blocknr see --diff-mode for further explanation 160 | -h, --help display help for command 161 | ``` 162 | Example usage: 163 | ```bash 164 | $ smart-sync diff 0x20a508640B446990c781Cd541B9a2828ACA3a350 0xf8f22ab160e8a09fbf404a44139d9b5da936e3cb --diff-mode storage --src-blocknr 450 165 | ``` 166 | 167 | # Getting started (Dev) 168 | 169 | This project uses [hardhat](https://hardhat.org/getting-started/) and [ethers](https://docs.ethers.io/v5/) among other things. 170 | 171 | ### Install dev packages 172 | 173 | ```bash 174 | $ npm i --development 175 | ``` 176 | 177 | ### Compile project 178 | To execute the compile-pipeline of this project run: 179 | 180 | ```bash 181 | $ npx grunt compile-project 182 | ``` 183 | 184 | This cleans the dist folder, compiles the contracts, lints the src files and executes tsc. If you want to execute those steps individually see `npx grunt --help` for all available individual commands: 185 | 186 | Available tasks: 187 | Command | Description 188 | ------------ | ------------- 189 | eslint | Validate files with ESLint * 190 | mochaTest | Run node unit tests with Mocha * 191 | clean | Clean files and folders. * 192 | default | Alias for "eslint" task. 193 | start-chains | Startup chain 194 | stop-chains | Stopping chain 195 | compile-project | Generate js-files 196 | compile-contracts | Generate contract type info 197 | tsc | Compile ts files 198 | install | Install smart-sync locally 199 | install-global | Install smart-sync globally 200 | pack | npm pack smart-sync 201 | npm-pack | npm packaging command 202 | full-pipeline-test | Testing precompiled *.ts project 203 | full-pipeline-dist-test | Testing compiled *.js project inside dist folder 204 | full-pipeline-evaluation | Evaluating project 205 | evaluate | Run evaluation 206 | test | Run tests 207 | 208 | ### Install CLI 209 | To compile and install the CLI run: 210 | ```bash 211 | $ npx grunt install 212 | ``` 213 | ## Chain 214 | Generally, you don't need to start the chain individually since its already started if you execute the command: 215 | ``` bash 216 | $ npm run test 217 | ``` 218 | See [readme](chain/README.md) on how to start the test chains. 219 | 220 | ## Linter 221 | We use the code style from [airbnb](https://www.npmjs.com/package/eslint-config-airbnb-base). 222 | To execute the linter just type: 223 | 224 | ```bash 225 | $ npx grunt eslint 226 | ``` 227 | 228 | ## Tests 229 | To run all the tests run (requires a running ethereum node, see [hardhat.config.ts](./hardhat.config.ts) and [hardhat.org/config](https://hardhat.org/config/)): 230 | 231 | ```bash 232 | $ npm run test 233 | ``` 234 | 235 | ### Adjusting ports of the test chains 236 | If you want to adjust the ports of the test chains, you just have to change them in the test-config file located at `test/config/test-cli-config.json`. Our scripts will then adjust the ports for the docker containers automatically when you run the command from above. 237 | 238 | Alternatively, you can adjust them over the optional parameter `test-chain-port` inside our Grunt task. With `test-config-path` you can even change the test config file path if you changed the location: 239 | 240 | ```bash 241 | $ npx grunt full-pipeline-test --test-chain-port=9545 --test-config-path=./test/config/test-cli-config.json 242 | ``` 243 | 244 | If you just want to adjust the ports without triggering the tests, execute the following: 245 | 246 | ```bash 247 | $ npx grunt update-ports --test-chain-port=9545 248 | ``` 249 | 250 | ### Running single tests 251 | You can also run single tests (chains need to be started manually): 252 | 253 | ```bash 254 | $ npx hardhat test test/list-storage-test.ts 255 | ``` 256 | 257 | ## Evaluation 258 | To run the evaluation run: 259 | 260 | ```bash 261 | $ npm run evaluate 262 | ``` 263 | 264 | Or a specific evaluation (chain needs to be started manually): 265 | 266 | ```bash 267 | $ npx hardhat test evaluation/update-multiple-values-with-map-sizes-1-1000.ts 268 | ``` 269 | -------------------------------------------------------------------------------- /bin/smart-sync: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | require('../dist/src/cli/smart-sync.js'); -------------------------------------------------------------------------------- /chains/.gitignore: -------------------------------------------------------------------------------- 1 | *-data/ -------------------------------------------------------------------------------- /chains/README.md: -------------------------------------------------------------------------------- 1 | 2 | Contains a [openethereum](https://github.com/openethereum/openethereum) config for a private development chain. 3 | 4 | ## Chain Setup 5 | 6 | The node's [config](config/config.toml) file specifies all allowed interfaces and api's the running node should provide. 7 | [keys](config/keys) contains a bunch of development accounts that are imported and unlocked during start up. 8 | Run with [docker-compose.yml](docker-compose.yml): 9 | 10 | ```bash 11 | docker-compose up -d 12 | ``` 13 | docker mounts the config folder and uses the json spec file and keys directory to initialize the chain and its accounts. Changes will persist in the `base_path` directory. Ports `8545` (RPC) and `8546` (WS) are exposed to the host at `127.0.0.1`. 14 | 15 | To run with a local `openethereum` installation instead, (un)comment the relevant directory paths inside the [config.toml](config/config.toml) so that `openethereum` uses the correct spec and keys location. In order to access the persistent chain's data both from a chain started with `docker` and using `openethereum` directly, their versions must match. -------------------------------------------------------------------------------- /chains/config.toml: -------------------------------------------------------------------------------- 1 | [parity] 2 | # Custom chain spec with docker mnt path 3 | chain = "/home/openethereum/.local/share/openethereum/disco-spec.json" 4 | # Use the following lines instead for running `openethereum --config config.toml` and change the password path in `account` 5 | #chain = "disco-spec.json" 6 | #base_path = "./chain-data" 7 | #keys_path = "./keys" 8 | 9 | [account] 10 | unlock = ["0x00ce0c25d2a45e2f22d4416606d928b8c088f8db", "0x009a77b77c1bd10f6bd6ce0c76b6a06c1df5e8ae", "0x00ea67ecec38e3688662110471e6804380418fc7"] 11 | password = ["/home/openethereum/.local/share/openethereum/devs.pwd"] 12 | #password = ["./devs.pwd"] 13 | 14 | [rpc] 15 | interface = "all" 16 | apis = ["all"] 17 | hosts = ["all"] 18 | cors = ["all"] 19 | # necessary for EIP1186 eth_getProof 20 | experimental_rpcs = true 21 | port = 8545 22 | 23 | [websockets] 24 | interface = "all" 25 | apis = ["all"] 26 | hosts = ["all"] 27 | origins = ["all"] 28 | port = 8546 29 | 30 | [mining] 31 | reseal_min_period = 0 32 | min_gas_price = 0 33 | 34 | [footprint] 35 | # Enables Fat DB 36 | fat_db = "on" -------------------------------------------------------------------------------- /chains/devs.pwd: -------------------------------------------------------------------------------- 1 | dev -------------------------------------------------------------------------------- /chains/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.7" 2 | services: 3 | chain: 4 | container_name: crossChainContracts_test_chain 5 | image: openethereum/openethereum 6 | volumes: 7 | - ./:/home/openethereum/.local/share/openethereum 8 | command: > 9 | --config 10 | /home/openethereum/.local/share/openethereum/config.toml --base-path 11 | /home/openethereum/.local/share/openethereum/chain1-data --keys-path 12 | /home/openethereum/.local/share/openethereum/keys --tracing on 13 | ports: 14 | - 9550:8545 15 | - 9551:8546 16 | - 30303:30303 17 | - 30303:30303/udp 18 | chain2: 19 | container_name: crossChainContracts_test_chain_2 20 | image: openethereum/openethereum 21 | volumes: 22 | - ./:/home/openethereum/.local/share/openethereum 23 | command: > 24 | --config 25 | /home/openethereum/.local/share/openethereum/config.toml --base-path 26 | /home/openethereum/.local/share/openethereum/chain2-data --keys-path 27 | /home/openethereum/.local/share/openethereum/keys --tracing on 28 | ports: 29 | - 9552:8545 30 | - 9553:8546 31 | - 30304:30303 32 | - 30304:30303/udp 33 | -------------------------------------------------------------------------------- /chains/evaluation-config.toml: -------------------------------------------------------------------------------- 1 | [parity] 2 | # Custom chain spec with docker mnt path 3 | chain = "/home/openethereum/.local/share/openethereum/evaluation-disco-spec.json" 4 | # Use the following lines instead for running `openethereum --config config.toml` and change the password path in `account` 5 | #chain = "disco-spec.json" 6 | #base_path = "./chain-data" 7 | #keys_path = "./keys" 8 | 9 | [account] 10 | unlock = ["0x00ce0c25d2a45e2f22d4416606d928b8c088f8db", "0x009a77b77c1bd10f6bd6ce0c76b6a06c1df5e8ae", "0x00ea67ecec38e3688662110471e6804380418fc7"] 11 | password = ["/home/openethereum/.local/share/openethereum/devs.pwd"] 12 | #password = ["./devs.pwd"] 13 | 14 | [rpc] 15 | interface = "all" 16 | apis = ["all"] 17 | hosts = ["all"] 18 | cors = ["all"] 19 | # necessary for EIP1186 eth_getProof 20 | experimental_rpcs = true 21 | port = 8545 22 | 23 | [websockets] 24 | interface = "all" 25 | apis = ["all"] 26 | hosts = ["all"] 27 | origins = ["all"] 28 | port = 8546 29 | 30 | [mining] 31 | reseal_min_period = 0 32 | min_gas_price = 0 33 | 34 | [footprint] 35 | # Enables Fat DB 36 | fat_db = "on" -------------------------------------------------------------------------------- /chains/evaluation-docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | chain: 4 | container_name: crossChainContracts_test_chain 5 | image: openethereum/openethereum 6 | volumes: 7 | - ./:/home/openethereum/.local/share/openethereum 8 | command: > 9 | --config /home/openethereum/.local/share/openethereum/evaluation-config.toml 10 | --base-path /home/openethereum/.local/share/openethereum/chain1-data 11 | --keys-path /home/openethereum/.local/share/openethereum/keys 12 | --tracing on 13 | ports: 14 | - 8545:8545 15 | - 8546:8546 16 | - 30303:30303 17 | - 30303:30303/udp 18 | chain2: 19 | container_name: crossChainContracts_test_chain_2 20 | image: openethereum/openethereum 21 | volumes: 22 | - ./:/home/openethereum/.local/share/openethereum 23 | command: > 24 | --config /home/openethereum/.local/share/openethereum/evaluation-config.toml 25 | --base-path /home/openethereum/.local/share/openethereum/chain2-data 26 | --keys-path /home/openethereum/.local/share/openethereum/keys 27 | --ports-shift 2 28 | --tracing on 29 | ports: 30 | - 8547:8547 31 | - 8548:8548 32 | - 30304:30303 33 | - 30304:30303/udp -------------------------------------------------------------------------------- /chains/keys/disco/UTC--2020-11-23T17-17-07Z--3c8a970e-60fe-c392-edbf-f4cabfb69969: -------------------------------------------------------------------------------- 1 | {"id":"3c8a970e-60fe-c392-edbf-f4cabfb69969","version":3,"crypto":{"cipher":"aes-128-ctr","cipherparams":{"iv":"80c42ce7c3d3df449aadf7777c9e5cb6"},"ciphertext":"cd4601072f814c3ffb5a33cf08fc8ef532294ff8b786e2f27fcaa834403a176e","kdf":"pbkdf2","kdfparams":{"c":10240,"dklen":32,"prf":"hmac-sha256","salt":"1727a6867290a70f7c5abeefc709889f1c2e4eb4f4b5324b7c7f0a139e8870f8"},"mac":"9dc83dbc70700cac24428bcaa98ed373f698b18073d600d5c49943317e206338"},"address":"00ce0c25d2a45e2f22d4416606d928b8c088f8db","name":"","meta":"{}"} -------------------------------------------------------------------------------- /chains/keys/disco/UTC--2020-11-23T17-17-21Z--b76479a6-13a1-cfd0-a501-d72317cf1b19: -------------------------------------------------------------------------------- 1 | {"id":"b76479a6-13a1-cfd0-a501-d72317cf1b19","version":3,"crypto":{"cipher":"aes-128-ctr","cipherparams":{"iv":"0d7a25144e6dbec6f3da90e8a5bcb876"},"ciphertext":"4963017e226ec1c245e31e2fda3077a5387b828fc94355b88ff24cd35add219b","kdf":"pbkdf2","kdfparams":{"c":10240,"dklen":32,"prf":"hmac-sha256","salt":"d10b1d628207773742a61f7c96b54ed154a9b49359ebac1febc96a40e5dee0b9"},"mac":"5c9ae872a5e4f9c5afea3bf09c8d95074ae58831b28b9cd79383e2b7b5cd1ea4"},"address":"009a77b77c1bd10f6bd6ce0c76b6a06c1df5e8ae","name":"","meta":"{}"} -------------------------------------------------------------------------------- /chains/keys/disco/UTC--2020-11-23T17-18-07Z--fce13e1c-4e16-2436-a721-c9f0283c5274: -------------------------------------------------------------------------------- 1 | {"id":"fce13e1c-4e16-2436-a721-c9f0283c5274","version":3,"crypto":{"cipher":"aes-128-ctr","cipherparams":{"iv":"866218dbf2c7072c01dbb6d6a769b0f0"},"ciphertext":"3bd652d0ac2c68af75c6642004341b65914001745e534006b5f1ca77f6e1d8af","kdf":"pbkdf2","kdfparams":{"c":10240,"dklen":32,"prf":"hmac-sha256","salt":"1032c1bd2bce7b6c0f63acf8753b29613541407e01d7811a3ac5341980315be3"},"mac":"7f68e75d384c68807d56c6580b940085f7eeb27ee6ff06b3f0f8f82eaae6484d"},"address":"00ea67ecec38e3688662110471e6804380418fc7","name":"","meta":"{}"} -------------------------------------------------------------------------------- /chains/keys/disco/address_book.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /config/cli-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "srcChainRpcUrl": "http://localhost:8545", 3 | "targetChainRpcUrl": "http://localhost:8547", 4 | "connectionTimeout": "36000", 5 | "logLevel": "info", 6 | "targetBlocknr": "latest", 7 | "gasLimit": 1000000000, 8 | "relayContractAddress": null, 9 | "keyValuePairPerBatch": 100 10 | } 11 | -------------------------------------------------------------------------------- /contracts/CallRelayContract.sol: -------------------------------------------------------------------------------- 1 | //SPDX-License-Identifier: Unlicense 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | import './SyncCandidate.sol'; 5 | 6 | contract CallRelayContract { 7 | SyncCandidate proxyContract; 8 | 9 | constructor(address _proxyContract) { 10 | proxyContract = SyncCandidate(_proxyContract); 11 | } 12 | 13 | function insert(uint _key, uint _value) public { 14 | proxyContract.insert(_key, _value); 15 | } 16 | 17 | function getValue(uint _key) public view returns (uint256) { 18 | return proxyContract.getValue(_key); 19 | } 20 | } -------------------------------------------------------------------------------- /contracts/CallingContract.sol: -------------------------------------------------------------------------------- 1 | //SPDX-License-Identifier: Unlicense 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | import './TestLogicContract.sol'; 5 | 6 | contract CallingContract { 7 | TestLogicContract proxyContract; 8 | 9 | constructor(address _proxyContract) { 10 | proxyContract = TestLogicContract(_proxyContract); 11 | } 12 | 13 | function setValue(uint256 value) public { 14 | proxyContract.setValue(value); 15 | } 16 | 17 | function getValue() public view returns (uint256) { 18 | return proxyContract.getValue(); 19 | } 20 | } -------------------------------------------------------------------------------- /contracts/GetProofLib.sol: -------------------------------------------------------------------------------- 1 | pragma solidity >=0.6.0 <0.8.0; 2 | 3 | import "./MerklePatriciaProof.sol"; 4 | import "solidity-rlp/contracts/RLPReader.sol"; 5 | 6 | library GetProofLib { 7 | using RLPReader for RLPReader.RLPItem; 8 | using RLPReader for RLPReader.Iterator; 9 | using RLPReader for bytes; 10 | 11 | struct Account { 12 | uint nonce; // 0 13 | uint balance; // 1 14 | bytes32 storageHash; // 2 15 | bytes32 codeHash; // 3 16 | } 17 | 18 | struct GetProof { 19 | bytes account; 20 | bytes accountProof; 21 | bytes storageProofs; 22 | } 23 | 24 | struct BlockHeader { 25 | bytes32 storageRoot; 26 | } 27 | 28 | struct StorageProof { 29 | // key of the storage 30 | bytes32 key; 31 | // value of the storage at `key` 32 | bytes value; 33 | // rlp-serialized array of rlp-serialized MerkleTree-Nodes, starting with the storageHash-Node 34 | bytes proof; 35 | } 36 | 37 | // TODO this can be removed 38 | function verifyProof(bytes memory rlpAccount, bytes memory rlpAccountNodes, bytes memory encodedPath, bytes32 root) internal pure returns (bool) { 39 | return MerklePatriciaProof.verify(rlpAccount, encodedPath, rlpAccountNodes, root); 40 | } 41 | 42 | 43 | function verifyStorageProof(bytes memory rlpProof, bytes32 storageHash) internal pure returns (bool) { 44 | StorageProof memory proof = parseStorageProof(rlpProof); 45 | bytes memory path = triePath(abi.encodePacked(proof.key)); 46 | 47 | return MerklePatriciaProof.verify( 48 | proof.value, path, proof.proof, storageHash 49 | ); 50 | } 51 | 52 | function parseStorageProof(bytes memory rlpProof) internal pure returns (StorageProof memory proof) { 53 | RLPReader.Iterator memory it = 54 | rlpProof.toRlpItem().iterator(); 55 | 56 | uint idx; 57 | while (it.hasNext()) { 58 | if (idx == 0) { 59 | proof.key = bytes32(it.next().toUint()); 60 | } else if (idx == 1) { 61 | proof.value = it.next().toBytes(); 62 | } else if (idx == 2) { 63 | proof.proof = it.next().toBytes(); 64 | } else { 65 | it.next(); 66 | } 67 | idx++; 68 | } 69 | return proof; 70 | } 71 | 72 | // todo only parses storageRoot for now. 73 | function parseBlockHeader(bytes memory _blockHeader) internal pure returns (BlockHeader memory blockHeader) { 74 | RLPReader.Iterator memory it = _blockHeader.toRlpItem().iterator(); 75 | 76 | uint idx; 77 | while (it.hasNext()) { 78 | if (idx == 3) { 79 | // storageRoot is at index 3 80 | bytes32 storageRoot; 81 | bytes memory storageRootBytes = it.next().toBytes(); 82 | assembly { 83 | storageRoot := mload(add(storageRootBytes, 32)) 84 | } 85 | blockHeader.storageRoot = storageRoot; 86 | return blockHeader; 87 | } else { 88 | it.next(); 89 | } 90 | 91 | idx++; 92 | } 93 | } 94 | 95 | function parseAccount(bytes memory rlpAccount) internal pure returns (Account memory account) { 96 | RLPReader.Iterator memory it = 97 | rlpAccount.toRlpItem().iterator(); 98 | 99 | uint idx; 100 | while (it.hasNext()) { 101 | if (idx == 0) { 102 | account.nonce = it.next().toUint(); 103 | } else if (idx == 1) { 104 | account.balance = it.next().toUint(); 105 | } else if (idx == 2) { 106 | account.storageHash = bytes32(it.next().toUint()); 107 | } else if (idx == 3) { 108 | account.codeHash = bytes32(it.next().toUint()); 109 | } else { 110 | it.next(); 111 | } 112 | idx++; 113 | } 114 | 115 | return account; 116 | } 117 | 118 | function parseProofTest(bytes memory rlpProof) internal pure returns (bytes memory account, bytes memory accountProof, bytes memory storageProof) { 119 | GetProof memory proof = parseProof(rlpProof); 120 | account = proof.account; 121 | accountProof = proof.accountProof; 122 | storageProof = proof.storageProofs; 123 | return (account, accountProof, storageProof); 124 | } 125 | /** 126 | * @dev parses an rlp encoded EIP1186 proof 127 | * @return proof The parsed Proof 128 | */ 129 | function parseProof(bytes memory rlpProof) internal pure returns (GetProof memory proof) { 130 | RLPReader.Iterator memory it = 131 | rlpProof.toRlpItem().iterator(); 132 | 133 | uint idx; 134 | while (it.hasNext()) { 135 | if (idx == 0) { 136 | proof.account = it.next().toBytes(); 137 | } else if (idx == 1) { 138 | proof.accountProof = it.next().toBytes(); 139 | } else if (idx == 2) { 140 | proof.storageProofs = it.next().toBytes(); 141 | } else { 142 | it.next(); 143 | } 144 | idx++; 145 | } 146 | return proof; 147 | } 148 | 149 | /** 150 | * @dev Encodes the address `_a` as path leading to its account in the state trie 151 | * @return path The path in the state trie leading to the account 152 | */ 153 | function encodedAddress(address _a) internal pure returns (bytes memory) { 154 | return triePath(abi.encodePacked(_a)); 155 | } 156 | 157 | function triePath(bytes memory _key) internal pure returns (bytes memory path) { 158 | bytes memory hp = hex"00"; 159 | bytes memory key = abi.encodePacked(keccak256(_key)); 160 | path = abi.encodePacked(hp, key); 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /contracts/MappingContract.sol: -------------------------------------------------------------------------------- 1 | //SPDX-License-Identifier: Unlicense 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | contract MappingContract { 5 | 6 | mapping(uint => uint) map; 7 | 8 | constructor() { 9 | } 10 | 11 | /** 12 | * @dev Set a list of storage keys 13 | */ 14 | function setStorageKey(bytes32[] memory keys, bytes32[] memory values) public { 15 | for (uint i = 0; i < keys.length; i++) { 16 | // store the value in the right slot 17 | bytes32 slot = keys[i]; 18 | bytes32 value = values[i]; 19 | assembly { 20 | sstore(slot, value) 21 | } 22 | } 23 | } 24 | 25 | function insert(uint _key, uint _value) public { 26 | map[_key] = _value; 27 | } 28 | 29 | function insertMultiple(uint[] memory _keys, uint[] memory _values) public { 30 | for (uint i = 0; i < _keys.length; i++) { 31 | map[_keys[i]] = _values[i]; 32 | } 33 | } 34 | 35 | function getValue(uint _key) public view returns (uint256) { 36 | return map[_key]; 37 | } 38 | 39 | function deleteValue(uint _key) public returns (bool) { 40 | map[_key] = 0x0; 41 | return true; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /contracts/MerklePatriciaProof.sol: -------------------------------------------------------------------------------- 1 | // taken from https://github.com/KyberNetwork/peace-relay/blob/master/contracts/MerklePatriciaProof.sol 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | import "solidity-rlp/contracts/RLPReader.sol"; 5 | 6 | library MerklePatriciaProof { 7 | 8 | event ReturnValue(string msg, uint num, bytes currentNode, bytes32 nodekey); 9 | 10 | /* 11 | * @dev Verifies a merkle patricia proof. 12 | * @param value The terminating value in the trie. 13 | * @param encodedPath The path in the trie leading to value. 14 | * @param rlpParentNodes The rlp encoded stack of nodes. 15 | * @param root The root hash of the trie. 16 | * @return The boolean validity of the proof. 17 | */ 18 | function verify(bytes memory value, bytes memory encodedPath, bytes memory rlpParentNodes, bytes32 root) internal pure returns (bool) { 19 | RLPReader.RLPItem memory item = RLPReader.toRlpItem(rlpParentNodes); 20 | RLPReader.RLPItem[] memory parentNodes = RLPReader.toList(item); 21 | bytes memory currentNode; 22 | RLPReader.RLPItem[] memory currentNodeList; 23 | 24 | // stateRoot 25 | bytes32 nodeKey = root; 26 | uint pathPtr = 0; 27 | 28 | bytes memory path = _getNibbleArray(encodedPath); 29 | if (path.length == 0) { 30 | return false; 31 | } 32 | 33 | for (uint i = 0; i < parentNodes.length; i++) { 34 | if (pathPtr > path.length) {return false;} 35 | currentNode = RLPReader.toRlpBytes(parentNodes[i]); 36 | if (nodeKey != keccak256(currentNode)) { 37 | return false;} 38 | currentNodeList = RLPReader.toList(parentNodes[i]); 39 | 40 | if (currentNodeList.length == 17) { 41 | if (pathPtr == path.length) { 42 | if (keccak256(RLPReader.toBytes(currentNodeList[16])) == keccak256(value)) { 43 | return true; 44 | } else { 45 | return false; 46 | } 47 | } 48 | 49 | uint8 nextPathNibble = uint8(path[pathPtr]); 50 | if (nextPathNibble > 16) { return false; } 51 | nodeKey = bytes32(RLPReader.toUint(currentNodeList[nextPathNibble])); 52 | pathPtr += 1; 53 | } else if (currentNodeList.length == 2) { 54 | pathPtr += _nibblesToTraverse(RLPReader.toBytes(currentNodeList[0]), path, pathPtr); 55 | 56 | if (pathPtr == path.length) {//leaf node 57 | if (keccak256(RLPReader.toBytes(currentNodeList[1])) == keccak256(value)) { 58 | return true; 59 | } else { 60 | return false; 61 | } 62 | } 63 | 64 | nodeKey = bytes32(RLPReader.toUint(currentNodeList[1])); 65 | } else { 66 | return false; 67 | } 68 | } 69 | return false; 70 | } 71 | 72 | function _nibblesToTraverse(bytes memory encodedPartialPath, bytes memory path, uint pathPtr) private pure returns (uint) { 73 | uint len; 74 | // encodedPartialPath has elements that are each two hex characters (1 byte), but partialPath 75 | // and slicedPath have elements that are each one hex character (1 nibble) 76 | bytes memory partialPath = _getNibbleArray(encodedPartialPath); 77 | bytes memory slicedPath = new bytes(partialPath.length); 78 | 79 | // pathPtr counts nibbles in path 80 | // partialPath.length is a number of nibbles 81 | for (uint i = pathPtr; i < pathPtr + partialPath.length; i++) { 82 | byte pathNibble = path[i]; 83 | slicedPath[i - pathPtr] = pathNibble; 84 | } 85 | 86 | if (keccak256(partialPath) == keccak256(slicedPath)) { 87 | len = partialPath.length; 88 | } else { 89 | len = 0; 90 | } 91 | return len; 92 | } 93 | 94 | // bytes b must be hp encoded 95 | function _getNibbleArray(bytes memory b) private pure returns (bytes memory) { 96 | bytes memory nibbles; 97 | if (b.length > 0) { 98 | uint8 offset; 99 | uint8 hpNibble = uint8(_getNthNibbleOfBytes(0, b)); 100 | if (hpNibble == 1 || hpNibble == 3) { 101 | nibbles = new bytes(b.length * 2 - 1); 102 | byte oddNibble = _getNthNibbleOfBytes(1, b); 103 | nibbles[0] = oddNibble; 104 | offset = 1; 105 | } else { 106 | nibbles = new bytes(b.length * 2 - 2); 107 | offset = 0; 108 | } 109 | 110 | for (uint i = offset; i < nibbles.length; i++) { 111 | nibbles[i] = _getNthNibbleOfBytes(i - offset + 2, b); 112 | } 113 | } 114 | return nibbles; 115 | } 116 | 117 | /* 118 | *This function takes in the bytes string (hp encoded) and the value of N, to return Nth Nibble. 119 | *@param Value of N 120 | *@param Bytes String 121 | *@return ByteString[N] 122 | */ 123 | function _getNthNibbleOfBytes(uint n, bytes memory str) private pure returns (byte) { 124 | return byte(n % 2 == 0 ? uint8(str[n / 2]) / 0x10 : uint8(str[n / 2]) % 0x10); 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /contracts/RLPWriter.sol: -------------------------------------------------------------------------------- 1 | //SPDX-License-Identifier: Unlicense 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | /** 5 | * @title RLPWriter 6 | * @dev helper functions to rlp-encode items 7 | * @notice adapted from https://github.com/bakaoh/solidity-rlp-encode/blob/master/contracts/RLPEncode.sol 8 | */ 9 | library RLPWriter { 10 | 11 | 12 | /** 13 | * @dev RLP encodes a series of bytes. 14 | * @param _item The bytes to encode. 15 | * @return The RLP encoded bytes. 16 | */ 17 | function encodeBytes(bytes memory _item) internal pure returns (bytes memory) { 18 | bytes memory encoded; 19 | if (_item.length == 1 && uint8(_item[0]) <= 128) { 20 | encoded = _item; 21 | } else { 22 | encoded = concat(encodeLength(_item.length, 128), _item); 23 | } 24 | return encoded; 25 | } 26 | 27 | /** 28 | * @dev RLP encodes a list of RLP encoded byte byte strings. 29 | * @param _list The list of RLP encoded byte strings. 30 | * @return The RLP encoded list of items in bytes. 31 | */ 32 | function encodeList(bytes[] memory _list) internal pure returns (bytes memory) { 33 | bytes memory list = flatten(_list); 34 | return concat(encodeLength(list.length, 192), list); 35 | } 36 | 37 | /** 38 | * @dev Encode the first byte, followed by the `len` in binary form if `length` is more than 55. 39 | * @param len The length of the string or the payload. 40 | * @param offset 128 if item is string, 192 if item is list. 41 | * @return RLP encoded bytes. 42 | */ 43 | function encodeLength(uint len, uint offset) private pure returns (bytes memory) { 44 | bytes memory encoded; 45 | if (len < 56) { 46 | encoded = new bytes(1); 47 | encoded[0] = bytes32(len + offset)[31]; 48 | } else { 49 | uint lenLen; 50 | uint i = 1; 51 | while (len / i != 0) { 52 | lenLen++; 53 | i *= 256; 54 | } 55 | 56 | encoded = new bytes(lenLen + 1); 57 | encoded[0] = bytes32(lenLen + offset + 55)[31]; 58 | for (i = 1; i <= lenLen; i++) { 59 | encoded[i] = bytes32((len / (256 ** (lenLen - i))) % 256)[31]; 60 | } 61 | } 62 | return encoded; 63 | } 64 | 65 | /** 66 | * @dev RLP encodes a uint. 67 | * @param self The uint to encode. 68 | * @return The RLP encoded uint in bytes. 69 | */ 70 | function encodeUint(uint self) internal pure returns (bytes memory) { 71 | return encodeBytes(toBinary(self)); 72 | } 73 | 74 | /** 75 | * @dev Encodes a keccak256 hash value 76 | * @param _hash The hash to encode. 77 | * @return The RLP encoded hash in bytes 78 | */ 79 | function encodeKeccak256Hash(bytes32 _hash) internal pure returns (bytes memory) { 80 | bytes memory hashBytes = new bytes(32); 81 | assembly { 82 | mstore(add(hashBytes, 32), _hash) 83 | } 84 | return encodeBytes(hashBytes); 85 | } 86 | 87 | /** 88 | * @dev Encode integer in big endian binary form with no leading zeroes. 89 | * @notice TODO: This should be optimized with assembly to save gas costs. 90 | * @param _x The integer to encode. 91 | * @return RLP encoded bytes. 92 | */ 93 | function toBinary(uint _x) internal pure returns (bytes memory) { 94 | bytes memory b = new bytes(32); 95 | assembly { 96 | mstore(add(b, 32), _x) 97 | } 98 | uint i; 99 | for (i = 0; i < 32; i++) { 100 | if (b[i] != 0) { 101 | break; 102 | } 103 | } 104 | bytes memory res = new bytes(32 - i); 105 | for (uint j = 0; j < res.length; j++) { 106 | res[j] = b[i++]; 107 | } 108 | return res; 109 | } 110 | 111 | /** 112 | * @dev Copies a piece of memory to another location. 113 | * @notice From: https://github.com/Arachnid/solidity-stringutils/blob/master/src/strings.sol. 114 | * @param _dest Destination location. 115 | * @param _src Source location. 116 | * @param _len Length of memory to copy. 117 | */ 118 | function _copy(uint _dest, uint _src, uint _len) private pure { 119 | uint dest = _dest; 120 | uint src = _src; 121 | uint len = _len; 122 | 123 | for (; len >= 32; len -= 32) { 124 | assembly { 125 | mstore(dest, mload(src)) 126 | } 127 | dest += 32; 128 | src += 32; 129 | } 130 | 131 | uint mask = 256 ** (32 - len) - 1; 132 | assembly { 133 | let srcpart := and(mload(src), not(mask)) 134 | let destpart := and(mload(dest), mask) 135 | mstore(dest, or(destpart, srcpart)) 136 | } 137 | } 138 | 139 | /** 140 | * @dev Flattens a list of byte strings into one byte string. 141 | * @notice From: https://github.com/sammayo/solidity-rlp-encoder/blob/master/RLPEncode.sol. 142 | * @param _list List of byte strings to flatten. 143 | * @return The flattened byte string. 144 | */ 145 | function flatten(bytes[] memory _list) private pure returns (bytes memory) { 146 | if (_list.length == 0) { 147 | return new bytes(0); 148 | } 149 | 150 | uint len; 151 | uint i; 152 | for (i = 0; i < _list.length; i++) { 153 | len += _list[i].length; 154 | } 155 | 156 | bytes memory flattened = new bytes(len); 157 | uint flattenedPtr; 158 | assembly {flattenedPtr := add(flattened, 0x20)} 159 | 160 | for (i = 0; i < _list.length; i++) { 161 | bytes memory item = _list[i]; 162 | 163 | uint listPtr; 164 | assembly {listPtr := add(item, 0x20)} 165 | 166 | _copy(flattenedPtr, listPtr, item.length); 167 | flattenedPtr += _list[i].length; 168 | } 169 | 170 | return flattened; 171 | } 172 | 173 | /** 174 | * @dev Concatenates two bytes. 175 | * @notice From: https://github.com/GNSPS/solidity-bytes-utils/blob/master/contracts/BytesLib.sol. 176 | * @param _preBytes First byte string. 177 | * @param _postBytes Second byte string. 178 | * @return Both byte string combined. 179 | */ 180 | function concat(bytes memory _preBytes, bytes memory _postBytes) private pure returns (bytes memory) { 181 | bytes memory tempBytes; 182 | 183 | assembly { 184 | tempBytes := mload(0x40) 185 | 186 | let length := mload(_preBytes) 187 | mstore(tempBytes, length) 188 | 189 | let mc := add(tempBytes, 0x20) 190 | let end := add(mc, length) 191 | 192 | for { 193 | let cc := add(_preBytes, 0x20) 194 | } lt(mc, end) { 195 | mc := add(mc, 0x20) 196 | cc := add(cc, 0x20) 197 | } { 198 | mstore(mc, mload(cc)) 199 | } 200 | 201 | length := mload(_postBytes) 202 | mstore(tempBytes, add(length, mload(tempBytes))) 203 | 204 | mc := end 205 | end := add(mc, length) 206 | 207 | for { 208 | let cc := add(_postBytes, 0x20) 209 | } lt(mc, end) { 210 | mc := add(mc, 0x20) 211 | cc := add(cc, 0x20) 212 | } { 213 | mstore(mc, mload(cc)) 214 | } 215 | 216 | mstore(0x40, and( 217 | add(add(end, iszero(add(length, mload(_preBytes)))), 31), 218 | not(31) 219 | )) 220 | } 221 | return tempBytes; 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /contracts/RelayContract.sol: -------------------------------------------------------------------------------- 1 | //SPDX-License-Identifier: Unlicense 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | import './ProxyContract.sol'; 5 | import './GetProofLib.sol'; 6 | 7 | contract RelayContract { 8 | 9 | struct ProxyContractInfo { 10 | // The root of storage trie of the contract. 11 | bytes32 storageRoot; 12 | // State of migration if successfull or not 13 | bool migrationState; 14 | // block number of the src contract it is currently synched with 15 | uint blockNumber; 16 | } 17 | 18 | mapping(address => ProxyContractInfo) proxyStorageInfos; 19 | mapping(uint => bytes32) srcContractStateRoots; 20 | uint latestBlockNr; 21 | 22 | constructor() public { 23 | } 24 | 25 | /** 26 | * @dev Called by the proxy to update its state, only after migrationState validation 27 | */ 28 | function updateProxyInfo(bytes32 _newStorage, uint _blockNumber) public { 29 | require(proxyStorageInfos[msg.sender].blockNumber < _blockNumber); 30 | proxyStorageInfos[msg.sender].storageRoot = _newStorage; 31 | proxyStorageInfos[msg.sender].migrationState = true; 32 | proxyStorageInfos[msg.sender].blockNumber = _blockNumber; 33 | } 34 | 35 | function addBlock(bytes32 _stateRoot, uint256 _blockNumber) public { 36 | srcContractStateRoots[_blockNumber] = _stateRoot; 37 | if (_blockNumber > latestBlockNr) latestBlockNr = _blockNumber; 38 | } 39 | 40 | /** 41 | * @dev return state root at the respective blockNumber 42 | */ 43 | function getStateRoot(uint _blockNumber) public view returns (bytes32) { 44 | return srcContractStateRoots[_blockNumber]; 45 | } 46 | 47 | /** 48 | * @dev return the calling contract's storage root (only correct if stored by the contract before only!) 49 | */ 50 | function getStorageRoot() public view returns (bytes32) { 51 | return proxyStorageInfos[msg.sender].storageRoot; 52 | } 53 | 54 | /** 55 | * @dev return migration state of passed proxy contract 56 | * @param _contractAddress address of proxy contract 57 | */ 58 | function getMigrationState(address _contractAddress) public view returns (bool) { 59 | return proxyStorageInfos[_contractAddress].migrationState; 60 | } 61 | 62 | /** 63 | * @dev return current synched block number of src chain from proxy contract 64 | * @param _proxyContractAddress address of proxy contract 65 | */ 66 | function getCurrentBlockNumber(address _proxyContractAddress) public view returns (uint) { 67 | return proxyStorageInfos[_proxyContractAddress].blockNumber; 68 | } 69 | 70 | function getLatestBlockNumber() public view returns (uint) { 71 | return latestBlockNr; 72 | } 73 | 74 | /** 75 | * @dev Used to access the Proxy's abi 76 | */ 77 | function getProxy(address payable proxyAddress) internal pure returns (ProxyContract) { 78 | return ProxyContract(proxyAddress); 79 | } 80 | 81 | /** 82 | * @dev checks if the migration of the source contract to the proxy contract was successful 83 | * @param sourceAccountProof contains source contract account information and the merkle patricia proof of the account 84 | * @param proxyAccountProof contains proxy contract account information and the merkle patricia proof of the account 85 | * @param proxyChainBlockHeader latest block header of the proxy contract's chain 86 | * @param proxyAddress address from proxy contract 87 | * @param proxyChainBlockNumber block number from the proxy chain block header, this is needed because the blockNumber in the header is a hex string 88 | * @param srcChainBlockNumber block number from the src chain from which we take the stateRoot from the srcContract 89 | */ 90 | function verifyMigrateContract(bytes memory sourceAccountProof, bytes memory proxyAccountProof, bytes memory proxyChainBlockHeader, address payable proxyAddress, uint proxyChainBlockNumber, uint srcChainBlockNumber) public { 91 | GetProofLib.BlockHeader memory blockHeader = GetProofLib.parseBlockHeader(proxyChainBlockHeader); 92 | 93 | // compare block header hashes 94 | bytes32 givenBlockHeaderHash = keccak256(proxyChainBlockHeader); 95 | bytes32 actualBlockHeaderHash = blockhash(proxyChainBlockNumber); 96 | require(givenBlockHeaderHash == actualBlockHeaderHash, 'Given proxy chain block header is faulty'); 97 | 98 | // verify sourceAccountProof 99 | // validate that the proof was obtained for the source contract and the account's storage is part of the current state 100 | ProxyContract proxyContract = getProxy(proxyAddress); 101 | address sourceAddress = proxyContract.getSourceAddress(); 102 | bytes memory path = GetProofLib.encodedAddress(sourceAddress); 103 | GetProofLib.GetProof memory getProof = GetProofLib.parseProof(sourceAccountProof); 104 | require(GetProofLib.verifyProof(getProof.account, getProof.accountProof, path, srcContractStateRoots[srcChainBlockNumber]), "Failed to verify the account proof"); 105 | GetProofLib.Account memory sourceAccount = GetProofLib.parseAccount(getProof.account); 106 | 107 | // verify proxyAccountProof 108 | // validate that the proof was obtained for the source contract and the account's storage is part of the current state 109 | path = GetProofLib.encodedAddress(proxyAddress); 110 | getProof = GetProofLib.parseProof(proxyAccountProof); 111 | require(GetProofLib.verifyProof(getProof.account, getProof.accountProof, path, blockHeader.storageRoot), "Failed to verify the account proof"); 112 | GetProofLib.Account memory proxyAccount = GetProofLib.parseAccount(getProof.account); 113 | 114 | // compare storageRootHashes 115 | require(sourceAccount.storageHash == proxyAccount.storageHash, 'storageHashes of the contracts dont match'); 116 | 117 | // update proxy info -> complete migration 118 | proxyStorageInfos[proxyAddress].storageRoot = proxyAccount.storageHash; 119 | proxyStorageInfos[proxyAddress].migrationState = true; 120 | proxyStorageInfos[proxyAddress].blockNumber = srcChainBlockNumber; 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /contracts/SimpleStorage.sol: -------------------------------------------------------------------------------- 1 | //SPDX-License-Identifier: Unlicense 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | contract SimpleStorage { 5 | uint a; 6 | uint b = 42; 7 | address owner; 8 | mapping(address => uint) values; 9 | 10 | mapping(uint => uint) map; 11 | 12 | constructor() { 13 | } 14 | 15 | function setA(uint _a) public { 16 | a = _a; 17 | } 18 | 19 | function getA() public view returns (uint) { 20 | return a; 21 | } 22 | 23 | function setB(uint _b) public { 24 | b = _b; 25 | } 26 | 27 | function getB() public view returns (uint) { 28 | return b; 29 | } 30 | 31 | function getValue(address _address) public view returns (uint) { 32 | return values[_address]; 33 | } 34 | 35 | function setValue(uint _value) public { 36 | values[msg.sender] = _value; 37 | } 38 | 39 | function insert(uint _key, uint _value) public { 40 | map[_key] = _value; 41 | } 42 | 43 | function get(uint _key) public view returns (uint){ 44 | return map[_key]; 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /contracts/StorageImitator.sol: -------------------------------------------------------------------------------- 1 | //SPDX-License-Identifier: Unlicense 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | contract StorageImitator { 5 | /** 6 | * @dev Set a list of storage keys 7 | */ 8 | function setStorageKey(bytes32[] memory keys, bytes32[] memory values) public { 9 | for (uint i = 0; i < keys.length; i++) { 10 | // store the value in the right slot 11 | bytes32 slot = keys[i]; 12 | bytes32 value = values[i]; 13 | assembly { 14 | sstore(slot, value) 15 | } 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /contracts/SyncCandidate.sol: -------------------------------------------------------------------------------- 1 | //SPDX-License-Identifier: Unlicense 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | contract SyncCandidate { 5 | 6 | uint256 valueA; 7 | uint256 valueB; 8 | mapping(uint => uint) map; 9 | 10 | /** 11 | * @dev set a new value to `valueA` 12 | * @return The old value that was replaced 13 | */ 14 | function setValueA(uint256 _valueA) public returns (uint256) { 15 | (_valueA, valueA) = (valueA, _valueA); 16 | return _valueA; 17 | } 18 | 19 | function getValueA() public view returns (uint256) { 20 | return valueA; 21 | } 22 | 23 | function setValueB(uint256 _valueB) public returns (uint256) { 24 | (_valueB, valueB) = (valueB, _valueB); 25 | return _valueB; 26 | } 27 | 28 | function getValueB() public view returns (uint256) { 29 | return valueB; 30 | } 31 | 32 | function insert(uint _key, uint _value) public { 33 | map[_key] = _value; 34 | } 35 | 36 | function getValue(uint _key) public view returns (uint256) { 37 | return map[_key]; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /contracts/TestLogicContract.sol: -------------------------------------------------------------------------------- 1 | //SPDX-License-Identifier: Unlicense 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | /** 5 | * @dev A contract to test proxy calls 6 | */ 7 | contract TestLogicContract { 8 | 9 | address _offset; 10 | 11 | uint256 value; 12 | 13 | function getValue() public view returns (uint256) { 14 | return value; 15 | } 16 | 17 | function setValue(uint256 _value) public { 18 | value = _value; 19 | } 20 | 21 | function valuePure() public pure returns (uint256) { 22 | return 42; 23 | } 24 | 25 | } 26 | -------------------------------------------------------------------------------- /contracts/TestProxyContract.sol: -------------------------------------------------------------------------------- 1 | //SPDX-License-Identifier: Unlicense 2 | pragma solidity >=0.6.0 <0.8.0; 3 | 4 | /** 5 | * @dev A contract to test delegating static calls 6 | */ 7 | contract TestProxyContract { 8 | 9 | address logic; 10 | 11 | uint256 value; 12 | 13 | constructor(address _logic) { 14 | logic = _logic; 15 | value = 37; 16 | } 17 | 18 | function _implementation() internal view returns (address) { 19 | return logic; 20 | } 21 | 22 | 23 | fallback() external { 24 | _fallback(); 25 | } 26 | 27 | function _fallback() internal { 28 | _delegateLogic(); 29 | } 30 | 31 | /** 32 | * @dev Delegates the call to the logic contract after putting the proxy in a static context, 33 | * preventing any state modifications that might occur in the logic's function 34 | */ 35 | function _delegateLogic() internal { 36 | address addr = address(this); 37 | if (msg.sender == addr) { 38 | // solhint-disable-next-line no-inline-assembly 39 | address logicAddr = _implementation(); 40 | assembly { 41 | calldatacopy(0, 0, calldatasize()) 42 | let result := delegatecall(gas(), logicAddr, 0, calldatasize(), 0, 0) 43 | returndatacopy(0, 0, returndatasize()) 44 | switch result 45 | case 0 {revert(0, returndatasize())} 46 | default {return (0, returndatasize())} 47 | } 48 | } else { 49 | (bool result,) = addr.staticcall(msg.data); 50 | assembly { 51 | let ptr := mload(0x40) 52 | returndatacopy(ptr, 0, returndatasize()) 53 | switch result 54 | case 0 { revert(ptr, returndatasize()) } 55 | default { return(ptr, returndatasize()) } 56 | } 57 | } 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /evaluation/config/test-cli-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "srcChainRpcUrl": "http://localhost:8545", 3 | "targetChainRpcUrl": "http://localhost:8547", 4 | "connectionTimeout": "36000", 5 | "logLevel": "info", 6 | "targetBlocknr": "latest", 7 | "gasLimit": 80000000, 8 | "keyValuePairPerBatch": 100 9 | } -------------------------------------------------------------------------------- /evaluation/csv-files/.gitignore: -------------------------------------------------------------------------------- 1 | * -------------------------------------------------------------------------------- /evaluation/eval-utils.ts: -------------------------------------------------------------------------------- 1 | import { stringify } from 'csv-stringify'; 2 | import { parse } from 'csv-parse/sync'; 3 | import { BigNumberish, ethers } from 'ethers'; 4 | import fs from 'fs'; 5 | import * as rlp from 'rlp'; 6 | import { StorageProof } from '../src/proofHandler/Types'; 7 | 8 | export interface CSVDataTemplateBasicMTEdge { 9 | from: string; 10 | to: string; 11 | } 12 | export interface CSVDataTemplateSingleValueMultiple extends CSVDataTemplateSingleValue { 13 | iteration: number | undefined; 14 | } 15 | export interface CSVDataTemplateSingleValue extends CSVDataTemplatePerMTHeight { 16 | changed_value_index: number | undefined; 17 | extensionsCounter: number; 18 | } 19 | 20 | export interface CSVDataTemplateMultipleValues { 21 | mapSize: number; 22 | changed_value_count: number; 23 | max_mpt_depth: number; 24 | used_gas: number; 25 | sequential: Boolean; 26 | changeMigrationTime: number; 27 | } 28 | 29 | export interface CSVDataTemplatePerMTHeight { 30 | mapSize: number; 31 | value_mpt_depth: number | undefined; 32 | max_mpt_depth: number; 33 | used_gas: number; 34 | changeMigrationTime: number; 35 | } 36 | 37 | export class CSVManager { 38 | private dir: string = './evaluation/csv-files'; 39 | 40 | private fileName: string; 41 | 42 | private data: Array; 43 | 44 | constructor(fileName: string, dir?: string) { 45 | this.fileName = fileName; 46 | this.dir = dir ?? this.dir; 47 | this.data = []; 48 | } 49 | 50 | pushData(data: T) { 51 | this.data.push(data); 52 | } 53 | 54 | async writeTofile() { 55 | // turn "Sun May 30 2021 18:19:20 +0200 (Central European Summer Time)" 56 | // into "Sun_May_30_2021_18:19:20" 57 | const timeString = new Date().toString().replace(/ GMT[\w\W]+/g, '').replace(/\s/g, '_'); 58 | 59 | return new Promise((resolve) => { 60 | const writeStream = fs.createWriteStream(`${this.dir}/${timeString}_${this.fileName}`); 61 | const csvStringifier = stringify(this.data, { header: true }); 62 | 63 | writeStream.on('finish', () => { 64 | resolve(resolve); 65 | }); 66 | csvStringifier.pipe(writeStream); 67 | }); 68 | } 69 | 70 | readFromFile() { 71 | const fileContent = fs.readFileSync(`${this.dir}/${this.fileName}`); 72 | const parsedContent = parse(fileContent, { columns: false, skipEmptyLines: true, fromLine: 2 }); 73 | return parsedContent; 74 | } 75 | } 76 | 77 | export function getExtensionsAmountLeadingToValue(value: BigNumberish | undefined, storageProofs: StorageProof[] | undefined): number { 78 | if (value === undefined || storageProofs === undefined) { 79 | return 0; 80 | } 81 | 82 | // find proof with value 83 | const storageProof = storageProofs.find((proof: StorageProof) => ethers.BigNumber.from(proof.value).eq(ethers.BigNumber.from(value))); 84 | 85 | if (storageProof === undefined) { 86 | return 0; 87 | } 88 | 89 | // count extensions 90 | let extensionsCounter = 0; 91 | storageProof.proof.forEach((encodedString: string, index: number) => { 92 | const node = rlp.decode(encodedString); 93 | if ((node as Buffer[]).length === 2 && index !== storageProof.proof.length - 1) { 94 | extensionsCounter += 1; 95 | } 96 | }); 97 | 98 | return extensionsCounter; 99 | } 100 | -------------------------------------------------------------------------------- /evaluation/get-mt-for-map-sizes-1-1000.ts: -------------------------------------------------------------------------------- 1 | import { BigNumber, ethers } from 'ethers'; 2 | import { expect } from 'chai'; 3 | import { JsonRpcProvider } from '@ethersproject/providers'; 4 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 5 | import { CSVDataTemplateBasicMTEdge, CSVManager } from './eval-utils'; 6 | import { TestChainProxy, TestCLI } from '../test/test-utils'; 7 | import { 8 | RelayContract__factory, MappingContract, MappingContract__factory, RelayContract, 9 | } from '../src-gen/types'; 10 | import { logger } from '../src/utils/logger'; 11 | import { getAllKeys } from '../src/utils/utils'; 12 | import GetProof from '../src/proofHandler/GetProof'; 13 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 14 | import FileHandler from '../src/utils/fileHandler'; 15 | 16 | const MAX_VALUE = 1000000; 17 | 18 | describe('get-mt-for-map-sizes-1-1000', async () => { 19 | let srcDeployer: SignerWithAddress; 20 | let targetDeployer: SignerWithAddress; 21 | let srcContract: MappingContract; 22 | let logicContract: MappingContract; 23 | let factory: MappingContract__factory; 24 | let srcProvider: JsonRpcProvider; 25 | let targetProvider: JsonRpcProvider; 26 | let relayContract: RelayContract; 27 | let chainConfigs: TxContractInteractionOptions | undefined; 28 | let csvManager: CSVManager; 29 | let chainProxy: TestChainProxy; 30 | 31 | before(async () => { 32 | const fh = new FileHandler(TestCLI.defaultEvaluationConfigFile); 33 | chainConfigs = fh.getJSON(); 34 | if (!chainConfigs) { 35 | logger.error(`No config available under ${TestCLI.defaultEvaluationConfigFile}`); 36 | process.exit(-1); 37 | } 38 | srcProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 39 | targetProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.targetChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 40 | srcDeployer = await SignerWithAddress.create(srcProvider.getSigner()); 41 | targetDeployer = await SignerWithAddress.create(targetProvider.getSigner()); 42 | logger.setSettings({ minLevel: 'info', name: 'get-mt-for-map-sizes-1-1000.ts' }); 43 | }); 44 | 45 | after(async () => { 46 | }); 47 | 48 | beforeEach(async () => { 49 | factory = new MappingContract__factory(srcDeployer); 50 | srcContract = await factory.deploy(); 51 | logicContract = await factory.deploy(); 52 | // deploy the relay contract 53 | const Relayer = new RelayContract__factory(targetDeployer); 54 | relayContract = await Relayer.deploy(); 55 | if (!chainConfigs) { 56 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 57 | process.exit(-1); 58 | } 59 | chainProxy = new TestChainProxy(srcContract, logicContract, chainConfigs, srcDeployer, targetDeployer, relayContract, srcProvider, targetProvider); 60 | }); 61 | 62 | afterEach(async () => { 63 | await csvManager.writeTofile(); 64 | }); 65 | 66 | it('Contract with map containing 10 values, update 1 value per mpt height', async () => { 67 | const mapSize = 10; 68 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 69 | expect(initialization.migrationState).to.be.true; 70 | csvManager = new CSVManager<{ from: string, to: string }>('10_edges.csv'); 71 | const theKeys = await getAllKeys(srcContract.address, srcProvider); 72 | const proofer = new GetProof(await srcProvider.send('eth_getProof', [srcContract.address, theKeys])); 73 | const existingPairs: { from: string, to: string }[] = []; 74 | proofer.storageProof.forEach((proof) => { 75 | for (let i = 1; i < proof.proof.length; i += 1) { 76 | const fromKec = ethers.utils.keccak256(proof.proof[i - 1]); 77 | const toKec = ethers.utils.keccak256(proof.proof[i]); 78 | const index = existingPairs.findIndex((pair) => pair.from === fromKec && pair.to === toKec); 79 | if (index < 0) { 80 | existingPairs.push({ from: fromKec, to: toKec }); 81 | csvManager.pushData({ from: fromKec, to: toKec }); 82 | } 83 | } 84 | }); 85 | }); 86 | 87 | it('Contract with map containing 100 values, update 1 value per mpt height', async () => { 88 | const mapSize = 100; 89 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 90 | expect(initialization.migrationState).to.be.true; 91 | csvManager = new CSVManager<{ from: string, to: string }>('100_edges.csv'); 92 | const theKeys = await getAllKeys(srcContract.address, srcProvider); 93 | const proofer = new GetProof(await srcProvider.send('eth_getProof', [srcContract.address, theKeys])); 94 | const existingPairs: { from: string, to: string }[] = []; 95 | proofer.storageProof.forEach((proof) => { 96 | for (let i = 1; i < proof.proof.length; i += 1) { 97 | const fromKec = ethers.utils.keccak256(proof.proof[i - 1]); 98 | const toKec = ethers.utils.keccak256(proof.proof[i]); 99 | const index = existingPairs.findIndex((pair) => pair.from === fromKec && pair.to === toKec); 100 | if (index < 0) { 101 | existingPairs.push({ from: fromKec, to: toKec }); 102 | csvManager.pushData({ from: fromKec, to: toKec }); 103 | } 104 | } 105 | }); 106 | }); 107 | 108 | it('Contract with map containing 1000 values, update 1 value per mpt height', async () => { 109 | const mapSize = 1000; 110 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 111 | expect(initialization.migrationState).to.be.true; 112 | csvManager = new CSVManager<{ from: string, to: string }>('1000_edges.csv'); 113 | const theKeys = await getAllKeys(srcContract.address, srcProvider); 114 | const proofer = new GetProof(await srcProvider.send('eth_getProof', [srcContract.address, theKeys])); 115 | const existingPairs: { from: string, to: string }[] = []; 116 | proofer.storageProof.forEach((proof) => { 117 | for (let i = 1; i < proof.proof.length; i += 1) { 118 | const fromKec = ethers.utils.keccak256(proof.proof[i - 1]); 119 | const toKec = ethers.utils.keccak256(proof.proof[i]); 120 | const index = existingPairs.findIndex((pair) => pair.from === fromKec && pair.to === toKec); 121 | if (index < 0) { 122 | existingPairs.push({ from: fromKec, to: toKec }); 123 | csvManager.pushData({ from: fromKec, to: toKec }); 124 | } 125 | } 126 | }); 127 | }); 128 | }); 129 | -------------------------------------------------------------------------------- /evaluation/r_scripts/mt_plotter.R: -------------------------------------------------------------------------------- 1 | install.packages("tidyverse") 2 | install.packages('ggraph') 3 | install.packages('igraph') 4 | library(igraph) 5 | library(tidyverse) 6 | library(ggraph) 7 | 8 | setwd('../csv-files/') 9 | 10 | edgesFilesList <- list.files(pattern = "+_100_edges.csv") 11 | edges <- read.csv(edgesFilesList, header=TRUE) 12 | 13 | mygraph <- graph_from_data_frame(edges) 14 | 15 | ggraph(mygraph, layout = 'dendrogram', circular = FALSE) + 16 | geom_edge_diagonal() + 17 | geom_node_point() + 18 | theme_void() -------------------------------------------------------------------------------- /evaluation/update-multiple-deep-values-with-map-sizes-1-1000.ts: -------------------------------------------------------------------------------- 1 | import { ethers } from 'ethers'; 2 | import { expect } from 'chai'; 3 | import { JsonRpcProvider } from '@ethersproject/providers'; 4 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 5 | import { BigNumber, BigNumberish } from '@ethersproject/bignumber'; 6 | import { TestChainProxy, TestCLI } from '../test/test-utils'; 7 | import { CSVDataTemplateMultipleValues, CSVManager } from './eval-utils'; 8 | import { 9 | RelayContract__factory, MappingContract, MappingContract__factory, RelayContract, 10 | } from '../src-gen/types'; 11 | import DiffHandler from '../src/diffHandler/DiffHandler'; 12 | import { logger } from '../src/utils/logger'; 13 | import StorageDiff from '../src/diffHandler/StorageDiff'; 14 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 15 | import FileHandler from '../src/utils/fileHandler'; 16 | 17 | const MAX_VALUE = 1000000; 18 | const MAX_CHANGED_VALUES = 100; 19 | 20 | describe('update-multiple-values-with-map-sizes-1-1000', async () => { 21 | let srcDeployer: SignerWithAddress; 22 | let targetDeployer: SignerWithAddress; 23 | let srcContract: MappingContract; 24 | let logicContract: MappingContract; 25 | let factory: MappingContract__factory; 26 | let srcProvider: JsonRpcProvider; 27 | let targetProvider: JsonRpcProvider; 28 | let relayContract: RelayContract; 29 | let chainConfigs: TxContractInteractionOptions | undefined; 30 | let chainProxy: TestChainProxy; 31 | let csvManager: CSVManager; 32 | let differ: DiffHandler; 33 | let currBlockNr: number; 34 | 35 | before(async () => { 36 | const fh = new FileHandler(TestCLI.defaultEvaluationConfigFile); 37 | chainConfigs = fh.getJSON(); 38 | if (!chainConfigs) { 39 | logger.error(`No config available under ${TestCLI.defaultEvaluationConfigFile}`); 40 | process.exit(-1); 41 | } 42 | srcProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 43 | targetProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.targetChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 44 | srcDeployer = await SignerWithAddress.create(srcProvider.getSigner()); 45 | targetDeployer = await SignerWithAddress.create(targetProvider.getSigner()); 46 | logger.setSettings({ minLevel: 'info', name: 'update-multiple-deep-values-with-map-sizes-1-1000.ts' }); 47 | csvManager = new CSVManager('measurements-multiple-deep-values-with-map-sizes-1-to-1000.csv'); 48 | differ = new DiffHandler(srcProvider, targetProvider); 49 | }); 50 | 51 | after(async () => { 52 | await csvManager.writeTofile(); 53 | }); 54 | 55 | beforeEach(async () => { 56 | factory = new MappingContract__factory(srcDeployer); 57 | srcContract = await factory.deploy(); 58 | logicContract = await factory.deploy(); 59 | // deploy the relay contract 60 | const Relayer = new RelayContract__factory(targetDeployer); 61 | relayContract = await Relayer.deploy(); 62 | if (!chainConfigs) { 63 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 64 | process.exit(-1); 65 | } 66 | chainProxy = new TestChainProxy(srcContract, logicContract, chainConfigs, srcDeployer, targetDeployer, relayContract, srcProvider, targetProvider); 67 | }); 68 | 69 | afterEach(async () => { 70 | }); 71 | 72 | it('Contract with map containing 10 values, update multiple deep values per iteration', async () => { 73 | const mapSize = 10; 74 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 75 | expect(initialization.migrationState).to.be.true; 76 | currBlockNr = await srcProvider.getBlockNumber() + 1; 77 | 78 | for (let i = 0; i < mapSize; i += 1) { 79 | const valueCount = i + 1; 80 | 81 | // changing values at srcContract 82 | const result = await chainProxy.changeDeepestValues(valueCount, MAX_VALUE); 83 | expect(result).to.be.true; 84 | 85 | // migrate changes to proxy contract 86 | // get the diff set, the storage keys for the changed values 87 | const start: number = (new Date()).getTime(); 88 | const diff: StorageDiff = await differ.getDiffFromSrcContractTxs(srcContract.address, 'latest', currBlockNr); 89 | const changedKeys: Array = diff.getKeys(); 90 | logger.debug(`valueCount: ${valueCount}, changedKeys: ${changedKeys.length}`); 91 | currBlockNr = await srcProvider.getBlockNumber() + 1; 92 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 93 | const timer = (new Date()).getTime() - start; 94 | expect(migrationResult.migrationResult).to.be.true; 95 | if (!migrationResult.receipt) { 96 | logger.fatal('No receipt provided'); 97 | process.exit(-1); 98 | } 99 | 100 | logger.info(`Gas used for updating ${valueCount} values in contract with max depth ${initialization.max_mpt_depth}: `, migrationResult.receipt.gasUsed.toNumber()); 101 | 102 | // add data to csv 103 | csvManager.pushData({ 104 | mapSize, 105 | used_gas: migrationResult.receipt.gasUsed.toNumber(), 106 | changed_value_count: valueCount, 107 | max_mpt_depth: initialization.max_mpt_depth, 108 | sequential: true, 109 | changeMigrationTime: timer, 110 | }); 111 | } 112 | }); 113 | 114 | it('Contract with map containing 100 values, update multiple deep values per iteration', async () => { 115 | const mapSize = 100; 116 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 117 | expect(initialization.migrationState).to.be.true; 118 | currBlockNr = await srcProvider.getBlockNumber() + 1; 119 | 120 | for (let i = 0; i < mapSize; i += 1) { 121 | const valueCount = i + 1; 122 | 123 | // changing values at srcContract 124 | const result = await chainProxy.changeDeepestValues(valueCount, MAX_VALUE); 125 | expect(result).to.be.true; 126 | 127 | // migrate changes to proxy contract 128 | // get the diff set, the storage keys for the changed values 129 | const start: number = (new Date()).getTime(); 130 | const diff: StorageDiff = await differ.getDiffFromSrcContractTxs(srcContract.address, 'latest', currBlockNr); 131 | const changedKeys: Array = diff.getKeys(); 132 | logger.debug(`valueCount: ${valueCount}, changedKeys: ${changedKeys.length}`); 133 | currBlockNr = await srcProvider.getBlockNumber() + 1; 134 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 135 | const timer = (new Date()).getTime() - start; 136 | expect(migrationResult.migrationResult).to.be.true; 137 | if (!migrationResult.receipt) { 138 | logger.fatal('No receipt provided'); 139 | process.exit(-1); 140 | } 141 | 142 | logger.info(`Gas used for updating ${valueCount} values in contract with max depth ${initialization.max_mpt_depth}: `, migrationResult.receipt.gasUsed.toNumber()); 143 | 144 | // add data to csv 145 | csvManager.pushData({ 146 | mapSize, 147 | used_gas: migrationResult.receipt.gasUsed.toNumber(), 148 | changed_value_count: valueCount, 149 | max_mpt_depth: initialization.max_mpt_depth, 150 | sequential: true, 151 | changeMigrationTime: timer, 152 | }); 153 | } 154 | }); 155 | 156 | it('Contract with map containing 1000 values, update multiple deep values per iteration', async () => { 157 | const mapSize = 1000; 158 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 159 | expect(initialization.migrationState).to.be.true; 160 | currBlockNr = await srcProvider.getBlockNumber() + 1; 161 | 162 | for (let i = 0; i < MAX_CHANGED_VALUES; i += 1) { 163 | const valueCount = i + 1; 164 | 165 | // changing values at srcContract 166 | const result = await chainProxy.changeDeepestValues(valueCount, MAX_VALUE); 167 | expect(result).to.be.true; 168 | 169 | // migrate changes to proxy contract 170 | // get the diff set, the storage keys for the changed values 171 | const start: number = (new Date()).getTime(); 172 | const diff: StorageDiff = await differ.getDiffFromSrcContractTxs(srcContract.address, 'latest', currBlockNr); 173 | const changedKeys: Array = diff.getKeys(); 174 | logger.debug(`valueCount: ${valueCount}, changedKeys: ${changedKeys.length}`); 175 | currBlockNr = await srcProvider.getBlockNumber() + 1; 176 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 177 | const timer = (new Date()).getTime() - start; 178 | expect(migrationResult.migrationResult).to.be.true; 179 | if (!migrationResult.receipt) { 180 | logger.fatal('No receipt provided'); 181 | process.exit(-1); 182 | } 183 | 184 | logger.info(`Gas used for updating ${valueCount} values in contract with max depth ${initialization.max_mpt_depth}: `, migrationResult.receipt.gasUsed.toNumber()); 185 | 186 | // add data to csv 187 | csvManager.pushData({ 188 | mapSize, 189 | used_gas: migrationResult.receipt.gasUsed.toNumber(), 190 | changed_value_count: valueCount, 191 | max_mpt_depth: initialization.max_mpt_depth, 192 | sequential: true, 193 | changeMigrationTime: timer, 194 | }); 195 | } 196 | }); 197 | }); 198 | -------------------------------------------------------------------------------- /evaluation/update-multiple-values-random-with-map-sizes-1-1000.ts: -------------------------------------------------------------------------------- 1 | import { ethers } from 'ethers'; 2 | import { expect } from 'chai'; 3 | import { JsonRpcProvider } from '@ethersproject/providers'; 4 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 5 | import { BigNumber, BigNumberish } from '@ethersproject/bignumber'; 6 | import { TestChainProxy, TestCLI } from '../test/test-utils'; 7 | import { CSVDataTemplateMultipleValues, CSVManager } from './eval-utils'; 8 | import { 9 | RelayContract__factory, MappingContract, MappingContract__factory, RelayContract, 10 | } from '../src-gen/types'; 11 | import DiffHandler from '../src/diffHandler/DiffHandler'; 12 | import { logger } from '../src/utils/logger'; 13 | import StorageDiff from '../src/diffHandler/StorageDiff'; 14 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 15 | import FileHandler from '../src/utils/fileHandler'; 16 | 17 | const MAX_VALUE = 1000000; 18 | const MAX_CHANGED_VALUES = 100; 19 | 20 | describe('update-multiple-values-random-with-map-sizes-1-1000', async () => { 21 | let srcDeployer: SignerWithAddress; 22 | let targetDeployer: SignerWithAddress; 23 | let srcProvider: JsonRpcProvider; 24 | let targetProvider: JsonRpcProvider; 25 | let srcContract: MappingContract; 26 | let logicContract: MappingContract; 27 | let factory: MappingContract__factory; 28 | let relayContract: RelayContract; 29 | let chainConfigs: TxContractInteractionOptions | undefined; 30 | let chainProxy: TestChainProxy; 31 | let csvManager: CSVManager; 32 | let differ: DiffHandler; 33 | let currBlockNr: number; 34 | 35 | before(async () => { 36 | const fh = new FileHandler(TestCLI.defaultEvaluationConfigFile); 37 | chainConfigs = fh.getJSON(); 38 | if (!chainConfigs) { 39 | logger.error(`No config available under ${TestCLI.defaultEvaluationConfigFile}`); 40 | process.exit(-1); 41 | } 42 | srcProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 43 | targetProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.targetChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 44 | srcDeployer = await SignerWithAddress.create(srcProvider.getSigner()); 45 | targetDeployer = await SignerWithAddress.create(targetProvider.getSigner()); 46 | logger.setSettings({ minLevel: 'info', name: 'update-multiple-values-random-with-map-sizes-1-1000.ts' }); 47 | csvManager = new CSVManager('measurements-multiple-values-random-with-map-sizes-1-1000.csv'); 48 | differ = new DiffHandler(srcProvider, targetProvider); 49 | }); 50 | 51 | after(async () => { 52 | await csvManager.writeTofile(); 53 | }); 54 | 55 | beforeEach(async () => { 56 | factory = new MappingContract__factory(srcDeployer); 57 | srcContract = await factory.deploy(); 58 | logicContract = await factory.deploy(); 59 | // deploy the relay contract 60 | const Relayer = new RelayContract__factory(targetDeployer); 61 | relayContract = await Relayer.deploy(); 62 | if (!chainConfigs) { 63 | logger.error(`No config available under ${TestCLI.defaultEvaluationConfigFile}`); 64 | process.exit(-1); 65 | } 66 | chainProxy = new TestChainProxy(srcContract, logicContract, chainConfigs, srcDeployer, targetDeployer, relayContract, srcProvider, targetProvider); 67 | }); 68 | 69 | afterEach(async () => { 70 | }); 71 | 72 | it('Contract with map containing 10 values, update multiple values random per iteration', async () => { 73 | const mapSize = 10; 74 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 75 | expect(initialization.migrationState).to.be.true; 76 | currBlockNr = await srcProvider.getBlockNumber() + 1; 77 | 78 | for (let i = 0; i < mapSize; i += 1) { 79 | const valueCount = i + 1; 80 | 81 | // changing values at srcContract 82 | const result = await chainProxy.changeRandomValues(valueCount, MAX_VALUE); 83 | expect(result).to.be.true; 84 | 85 | // migrate changes to proxy contract 86 | // get the diff set, the storage keys for the changed values 87 | const start: number = (new Date()).getTime(); 88 | const diff: StorageDiff = await differ.getDiffFromSrcContractTxs(srcContract.address, 'latest', currBlockNr); 89 | const changedKeys: Array = diff.getKeys(); 90 | logger.debug(`valueCount: ${valueCount}, changedKeys: ${changedKeys.length}`); 91 | currBlockNr = await srcProvider.getBlockNumber() + 1; 92 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 93 | const timer = (new Date()).getTime() - start; 94 | expect(migrationResult.migrationResult).to.be.true; 95 | if (!migrationResult.receipt) { 96 | logger.fatal('No receipt provided'); 97 | process.exit(-1); 98 | } 99 | 100 | logger.info(`value count: ${valueCount}, gas used: `, migrationResult.receipt.gasUsed.toNumber()); 101 | 102 | // add data to csv 103 | csvManager.pushData({ 104 | mapSize, 105 | used_gas: migrationResult.receipt.gasUsed.toNumber(), 106 | changed_value_count: valueCount, 107 | max_mpt_depth: initialization.max_mpt_depth, 108 | sequential: false, 109 | changeMigrationTime: timer, 110 | }); 111 | } 112 | }); 113 | 114 | it('Contract with map containing 100 values, update multiple values random per iteration', async () => { 115 | const mapSize = 100; 116 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 117 | expect(initialization.migrationState).to.be.true; 118 | currBlockNr = await srcProvider.getBlockNumber() + 1; 119 | 120 | for (let i = 0; i < mapSize; i += 1) { 121 | const valueCount = i + 1; 122 | 123 | // changing values at srcContract 124 | const result = await chainProxy.changeRandomValues(valueCount, MAX_VALUE); 125 | expect(result).to.be.true; 126 | 127 | // migrate changes to proxy contract 128 | // get the diff set, the storage keys for the changed values 129 | const start: number = (new Date()).getTime(); 130 | const diff: StorageDiff = await differ.getDiffFromSrcContractTxs(srcContract.address, 'latest', currBlockNr); 131 | const changedKeys: Array = diff.getKeys(); 132 | currBlockNr = await srcProvider.getBlockNumber() + 1; 133 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 134 | const timer = (new Date()).getTime() - start; 135 | expect(migrationResult.migrationResult).to.be.true; 136 | if (!migrationResult.receipt) { 137 | logger.fatal('No receipt provided'); 138 | process.exit(-1); 139 | } 140 | 141 | logger.info(`value count: ${valueCount}, gas used: `, migrationResult.receipt.gasUsed.toNumber()); 142 | 143 | // add data to csv 144 | csvManager.pushData({ 145 | mapSize, 146 | used_gas: migrationResult.receipt.gasUsed.toNumber(), 147 | changed_value_count: valueCount, 148 | max_mpt_depth: initialization.max_mpt_depth, 149 | sequential: false, 150 | changeMigrationTime: timer, 151 | }); 152 | } 153 | }); 154 | 155 | it('Contract with map containing 1000 values, update multiple values random per iteration', async () => { 156 | const mapSize = 1000; 157 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 158 | expect(initialization.migrationState).to.be.true; 159 | currBlockNr = await srcProvider.getBlockNumber() + 1; 160 | 161 | for (let i = 0; i < MAX_CHANGED_VALUES; i += 1) { 162 | const valueCount = i + 1; 163 | 164 | // changing values at srcContract 165 | const result = await chainProxy.changeRandomValues(valueCount, MAX_VALUE); 166 | expect(result).to.be.true; 167 | 168 | // migrate changes to proxy contract 169 | // get the diff set, the storage keys for the changed values 170 | const start = (new Date()).getTime(); 171 | const diff: StorageDiff = await differ.getDiffFromSrcContractTxs(srcContract.address, 'latest', currBlockNr); 172 | const changedKeys: Array = diff.getKeys(); 173 | logger.debug(`valueCount: ${valueCount}, changedKeys: ${changedKeys.length}`); 174 | currBlockNr = await srcProvider.getBlockNumber() + 1; 175 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 176 | const timer = (new Date()).getTime() - start; 177 | expect(migrationResult.migrationResult).to.be.true; 178 | if (!migrationResult.receipt) { 179 | logger.fatal('No receipt provided'); 180 | process.exit(-1); 181 | } 182 | 183 | logger.info(`value count: ${valueCount}, gas used: `, migrationResult.receipt.gasUsed.toNumber()); 184 | 185 | // add data to csv 186 | csvManager.pushData({ 187 | mapSize, 188 | used_gas: migrationResult.receipt.gasUsed.toNumber(), 189 | changed_value_count: valueCount, 190 | max_mpt_depth: initialization.max_mpt_depth, 191 | sequential: false, 192 | changeMigrationTime: timer, 193 | }); 194 | } 195 | }); 196 | }); 197 | -------------------------------------------------------------------------------- /evaluation/update-one-value-per-mpt-height-with-map-sizes-1-to-1000.ts: -------------------------------------------------------------------------------- 1 | import { ethers } from 'ethers'; 2 | import { expect } from 'chai'; 3 | import { JsonRpcProvider } from '@ethersproject/providers'; 4 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 5 | import { BigNumber, BigNumberish } from '@ethersproject/bignumber'; 6 | import { CSVDataTemplatePerMTHeight, CSVManager } from './eval-utils'; 7 | import { TestChainProxy, TestCLI } from '../test/test-utils'; 8 | import { 9 | RelayContract__factory, MappingContract, MappingContract__factory, RelayContract, 10 | } from '../src-gen/types'; 11 | import DiffHandler from '../src/diffHandler/DiffHandler'; 12 | import { logger } from '../src/utils/logger'; 13 | import StorageDiff from '../src/diffHandler/StorageDiff'; 14 | import { getAllKeys } from '../src/utils/utils'; 15 | import GetProof from '../src/proofHandler/GetProof'; 16 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 17 | import FileHandler from '../src/utils/fileHandler'; 18 | 19 | const MAX_VALUE = 1000000; 20 | 21 | describe('update-one-value-per-mpt-height-with-map-sizes-1-to-1000', async () => { 22 | let srcDeployer: SignerWithAddress; 23 | let targetDeployer: SignerWithAddress; 24 | let srcProvider: JsonRpcProvider; 25 | let targetProvider: JsonRpcProvider; 26 | let srcContract: MappingContract; 27 | let logicContract: MappingContract; 28 | let factory: MappingContract__factory; 29 | let relayContract: RelayContract; 30 | let chainConfigs: TxContractInteractionOptions | undefined; 31 | let csvManager: CSVManager; 32 | let chainProxy: TestChainProxy; 33 | let differ: DiffHandler; 34 | let currBlockNr: number; 35 | 36 | before(async () => { 37 | const fh = new FileHandler(TestCLI.defaultEvaluationConfigFile); 38 | chainConfigs = fh.getJSON(); 39 | if (!chainConfigs) { 40 | logger.error(`No config available under ${TestCLI.defaultEvaluationConfigFile}`); 41 | process.exit(-1); 42 | } 43 | srcProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 44 | targetProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.targetChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 45 | srcDeployer = await SignerWithAddress.create(srcProvider.getSigner()); 46 | targetDeployer = await SignerWithAddress.create(targetProvider.getSigner()); 47 | logger.setSettings({ minLevel: 'info', name: 'update-one-value-per-mpt-height-with-map-sizes-1-to-1000.ts' }); 48 | csvManager = new CSVManager('measurements-update-one-value-per-mpt-height-with-map-sizes-1-to-1000.csv'); 49 | differ = new DiffHandler(srcProvider, targetProvider); 50 | }); 51 | 52 | after(async () => { 53 | await csvManager.writeTofile(); 54 | }); 55 | 56 | beforeEach(async () => { 57 | factory = new MappingContract__factory(srcDeployer); 58 | srcContract = await factory.deploy(); 59 | logicContract = await factory.deploy(); 60 | // deploy the relay contract 61 | const Relayer = new RelayContract__factory(targetDeployer); 62 | relayContract = await Relayer.deploy(); 63 | if (!chainConfigs) { 64 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 65 | process.exit(-1); 66 | } 67 | chainProxy = new TestChainProxy(srcContract, logicContract, chainConfigs, srcDeployer, targetDeployer, relayContract, srcProvider, targetProvider); 68 | }); 69 | 70 | afterEach(async () => { 71 | }); 72 | 73 | it('Contract with map containing 1 value, update 1 value', async () => { 74 | const initialization = await chainProxy.initializeProxyContract(1, MAX_VALUE); 75 | expect(initialization.migrationState).to.be.true; 76 | currBlockNr = await srcProvider.getBlockNumber() + 1; 77 | 78 | // change all the previous synced values 79 | await chainProxy.changeValueAtIndex(0, MAX_VALUE); 80 | 81 | // migrate changes to proxy contract 82 | // get the diff set, the storage keys for the changed values 83 | const start = (new Date()).getTime(); 84 | const diff: StorageDiff = await differ.getDiffFromSrcContractTxs(srcContract.address, 'latest', currBlockNr); 85 | const changedKeys: Array = diff.getKeys(); 86 | logger.debug(changedKeys); 87 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 88 | const timer = (new Date()).getTime() - start; 89 | expect(migrationResult.migrationResult).to.be.true; 90 | if (!migrationResult.receipt) { 91 | logger.fatal('No receipt provided'); 92 | process.exit(-1); 93 | } 94 | 95 | logger.info('Gas used for updating 1 value in map with 1 value: ', migrationResult.receipt.gasUsed.toNumber()); 96 | 97 | csvManager.pushData({ 98 | mapSize: 1, 99 | used_gas: migrationResult.receipt.gasUsed.toNumber(), 100 | max_mpt_depth: initialization.max_mpt_depth, 101 | value_mpt_depth: 1, 102 | changeMigrationTime: timer, 103 | }); 104 | }); 105 | 106 | it('Contract with map containing 10 values, update 1 value per mpt height', async () => { 107 | const mapSize = 10; 108 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 109 | expect(initialization.migrationState).to.be.true; 110 | currBlockNr = await srcProvider.getBlockNumber() + 1; 111 | 112 | for (let i = initialization.min_mpt_depth; i <= initialization.max_mpt_depth; i += 1) { 113 | // change value 114 | expect(await chainProxy.changeValueAtMTHeight(i, MAX_VALUE)).to.be.true; 115 | 116 | // migrate changes to proxy contract 117 | // get the diff set, the storage keys for the changed values 118 | const start = (new Date()).getTime(); 119 | const diff: StorageDiff = await differ.getDiffFromSrcContractTxs(srcContract.address, 'latest', currBlockNr); 120 | const changedKeys: Array = diff.getKeys(); 121 | logger.debug(changedKeys); 122 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 123 | const timer = (new Date()).getTime() - start; 124 | expect(migrationResult.migrationResult).to.be.true; 125 | if (!migrationResult.receipt) { 126 | logger.fatal('No receipt provided'); 127 | process.exit(-1); 128 | } 129 | 130 | logger.info(`Gas used for updating value in height ${i} in contract with max depth ${initialization.max_mpt_depth}: `, migrationResult.receipt.gasUsed.toNumber()); 131 | 132 | // add data to csv 133 | csvManager.pushData({ 134 | mapSize, 135 | used_gas: migrationResult.receipt.gasUsed.toNumber(), 136 | max_mpt_depth: initialization.max_mpt_depth, 137 | value_mpt_depth: i, 138 | changeMigrationTime: timer, 139 | }); 140 | } 141 | }); 142 | 143 | it('Contract with map containing 100 values, update 1 value per mpt height', async () => { 144 | const mapSize = 100; 145 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 146 | expect(initialization.migrationState).to.be.true; 147 | currBlockNr = await srcProvider.getBlockNumber() + 1; 148 | const csvDings = new CSVManager<{ from: string, to: string }>('edges.csv'); 149 | const theKeys = await getAllKeys(srcContract.address, srcProvider); 150 | const proofer = new GetProof(await srcProvider.send('eth_getProof', [srcContract.address, theKeys])); 151 | const existingPairs: { from: string, to: string }[] = []; 152 | proofer.storageProof.forEach((proof) => { 153 | for (let i = 1; i < proof.proof.length; i += 1) { 154 | const fromKec = ethers.utils.keccak256(proof.proof[i - 1]); 155 | const toKec = ethers.utils.keccak256(proof.proof[i]); 156 | const index = existingPairs.findIndex((pair) => pair.from === fromKec && pair.to === toKec); 157 | if (index < 0) { 158 | existingPairs.push({ from: fromKec, to: toKec }); 159 | csvDings.pushData({ from: fromKec, to: toKec }); 160 | } 161 | } 162 | }); 163 | await csvDings.writeTofile(); 164 | 165 | for (let i = initialization.min_mpt_depth; i <= initialization.max_mpt_depth; i += 1) { 166 | // change value 167 | expect(await chainProxy.changeValueAtMTHeight(i, MAX_VALUE)).to.be.true; 168 | 169 | // migrate changes to proxy contract 170 | // get the diff set, the storage keys for the changed values 171 | const start = (new Date()).getTime(); 172 | const diff: StorageDiff = await differ.getDiffFromSrcContractTxs(srcContract.address, 'latest', currBlockNr); 173 | const changedKeys: Array = diff.getKeys(); 174 | logger.debug(changedKeys); 175 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 176 | const timer = (new Date()).getTime() - start; 177 | expect(migrationResult.migrationResult).to.be.true; 178 | if (!migrationResult.receipt) { 179 | logger.fatal('No receipt provided'); 180 | process.exit(-1); 181 | } 182 | 183 | logger.info(`Gas used for updating value in height ${i} in contract with max depth ${initialization.max_mpt_depth}: `, migrationResult.receipt.gasUsed.toNumber()); 184 | 185 | // add data to csv 186 | csvManager.pushData({ 187 | mapSize, 188 | used_gas: migrationResult.receipt.gasUsed.toNumber(), 189 | max_mpt_depth: initialization.max_mpt_depth, 190 | value_mpt_depth: i, 191 | changeMigrationTime: timer, 192 | }); 193 | } 194 | }); 195 | 196 | it('Contract with map containing 1000 values, update 1 value per mpt height', async () => { 197 | const mapSize = 1000; 198 | const initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 199 | expect(initialization.migrationState).to.be.true; 200 | currBlockNr = await srcProvider.getBlockNumber() + 1; 201 | 202 | for (let i = initialization.min_mpt_depth; i <= initialization.max_mpt_depth; i += 1) { 203 | // change value 204 | expect(await chainProxy.changeValueAtMTHeight(i, MAX_VALUE)).to.be.true; 205 | 206 | // migrate changes to proxy contract 207 | // get the diff set, the storage keys for the changed values 208 | const start = (new Date()).getTime(); 209 | const diff: StorageDiff = await differ.getDiffFromSrcContractTxs(srcContract.address, 'latest', currBlockNr); 210 | const changedKeys: Array = diff.getKeys(); 211 | logger.debug(changedKeys); 212 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 213 | const timer = (new Date()).getTime() - start; 214 | expect(migrationResult.migrationResult).to.be.true; 215 | if (!migrationResult.receipt) { 216 | logger.fatal('No receipt provided'); 217 | process.exit(-1); 218 | } 219 | 220 | logger.info(`Gas used for updating value in height ${i} in contract with max depth ${initialization.max_mpt_depth}: `, migrationResult.receipt.gasUsed.toNumber()); 221 | 222 | // add data to csv 223 | csvManager.pushData({ 224 | mapSize, 225 | used_gas: migrationResult.receipt.gasUsed.toNumber(), 226 | max_mpt_depth: initialization.max_mpt_depth, 227 | value_mpt_depth: i, 228 | changeMigrationTime: timer, 229 | }); 230 | } 231 | }); 232 | }); 233 | -------------------------------------------------------------------------------- /hardhat.config.ts: -------------------------------------------------------------------------------- 1 | import { task } from "hardhat/config"; 2 | import "@nomiclabs/hardhat-waffle"; 3 | import "@openzeppelin/hardhat-upgrades"; 4 | import "@typechain/hardhat"; 5 | 6 | // This is a sample Hardhat task. To learn how to create your own go to 7 | // https://hardhat.org/guides/create-task.html 8 | task("accounts", "Prints the list of accounts", async (args, hre) => { 9 | const accounts = await hre.ethers.getSigners(); 10 | 11 | for (const account of accounts) { 12 | console.log(await account.address); 13 | } 14 | }); 15 | 16 | // You need to export an object to set up your config 17 | // Go to https://hardhat.org/config/ to learn more 18 | 19 | export default { 20 | defaultNetwork: "disco", 21 | networks: { 22 | hardhat: { 23 | accounts: [{ 24 | privateKey: '0x2f6b8e2dc397013c43281c30e01bd6b67625031b2607b48fd72cc8c9aba08a3a', 25 | balance: '10000000000000000000000' 26 | }] 27 | }, 28 | disco: { 29 | url: "http://127.0.0.1:8545", 30 | gas: 1000000, 31 | timeout: 3600000 32 | }, 33 | disco2: { 34 | url: "http://127.0.0.1:8547", 35 | gas: 1000000000, 36 | timeout: 3600000 37 | } 38 | }, 39 | typechain: { 40 | outDir: "src-gen/types", 41 | target: "ethers-v5", 42 | }, 43 | solidity: { 44 | compilers: [ 45 | { 46 | version: "0.6.2" 47 | }, 48 | { 49 | version: "0.7.0", 50 | settings: { } 51 | } 52 | ] 53 | }, 54 | mocha: { 55 | timeout: 36000000 56 | } 57 | }; 58 | -------------------------------------------------------------------------------- /hardhat_scripts/deployMappingContract.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-await-in-loop */ 2 | import { ethers } from 'hardhat'; 3 | import { logger } from '../src/utils/logger'; 4 | import { getAllKeys } from '../src/utils/utils'; 5 | 6 | async function main() { 7 | const Mapper = await ethers.getContractFactory('MappingContract'); 8 | const mapper = await Mapper.deploy(); 9 | const provider = await new ethers.providers.JsonRpcProvider('http://localhost:8545'); 10 | 11 | await mapper.deployed(); 12 | logger.info(mapper.address); 13 | 14 | for (let i = 0; i < 20; i += 1) { 15 | await mapper.insert(i, i + 1); 16 | const keys = await getAllKeys(mapper.address, provider); 17 | const proof = await provider.send('eth_getProof', [mapper.address, keys]); 18 | logger.info(proof.storageProof); 19 | } 20 | } 21 | 22 | main() 23 | .then(() => process.exit(0)) 24 | .catch((error) => { 25 | logger.error(error); 26 | process.exit(1); 27 | }); 28 | -------------------------------------------------------------------------------- /hardhat_scripts/list-storage.ts: -------------------------------------------------------------------------------- 1 | import * as hre from "hardhat"; 2 | 3 | async function main() { 4 | const provider = new hre.ethers.providers.JsonRpcProvider(); 5 | const accounts = await provider.listAccounts(); 6 | 7 | const resp = await provider.send("parity_listStorageKeys", [ 8 | "0x7CD7fA14c96d34286B0E47fdb1F15Fa4C4BD9bDA",5, null 9 | ]); 10 | console.log(resp); 11 | } 12 | 13 | main() 14 | .then(() => process.exit(0)) 15 | .catch(error => { 16 | console.error(error); 17 | process.exit(1); 18 | }); -------------------------------------------------------------------------------- /imgs/trie-layout.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/disco-project/smart-sync/499ac7814206a7b5841366b617aac25afe6277cf/imgs/trie-layout.png -------------------------------------------------------------------------------- /optimized-storage-proof.md: -------------------------------------------------------------------------------- 1 | # Optimized storage proof 2 | 3 | ## Layout 4 | 5 | The proof obtained via the `eth_getProof` RPC endpoint: [EIPs/eip-1186.md at master · ethereum/EIPs · GitHub](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-1186.md) contains the `storageProof` Array of rlp encoded storage-entries, one entry for each provided `key`. Every storage entry has the following properties: 6 | 7 | * `key`: the storage key as requested in the `eth_getProof` 8 | * `value`: the storage value of the address stored for the `key` 9 | * `proof`: Array of all the rlp-serialized MerkleTree-Nodes, starting with the root node of the storage Patricia-trie, following the path of the `key`. 10 | 11 | The Patricia-Merkle-Tree is described in the wiki greater detail: [patricia-tree | Ethereum Wiki](https://eth.wiki/fundamentals/patricia-tree) 12 | 13 | If a contract contains more than one keys (e.g. two variables that have a value). The root node of the storage trie is guaranteed to be a branch node. Therefor all `proof` arrays in each storage-entry of the `storageProof` array start with the identical root node (branch) and can therefor be optimized. Instead of sending a `proof` array each for every key, the layout can be compressed. 14 | 15 | Considering following storage-trie layout: 16 | 17 | ![storage proof trie](imgs/trie-layout.png) 18 | 19 | 20 | All individual storage proofs share at least the root node. Leaf nodes L3 and L4 share 3 branch nodes (root-B2-B3) as their parents and can therefor be optimized into a single proof with identical path until B3. 21 | The optimized proof that bundles every individual `proof` is a tree structure. Every node (`proof-node`) in this new proof can be expressed with following properties: 22 | * `branches`: an Array of RLP-encoded branch nodes that can be consolidated. The last branch node in `branches` is the deepest node that the underlying leaf nodes share as one of their parent nodes 23 | * `values`: an Array that contains either the terminating `leaf-node` directly or in the case of a continued path another `proof-node`. 24 | 25 | A terminating `leaf-node` contains: 26 | * `key`: the storage key (as described aboth) 27 | * `value`: the storage value (as described aboth) 28 | 29 | For the example this means, the initial `proof-node` contains a single branch (root node) in `branches` and 3 entries in values. For L1 the initial `proof-node` contains another `proof-node` with exactly 1 branch (B1). For L2 it contains the `leaf-node` directly. For L3 and L4 contains a single `proof-node` with branch nodes [B2, B3], this `proof-node` then contains the `leaf-nodes` L3, L4 directly. 30 | 31 | 32 | ## Encoding and Modifying 33 | 34 | To verify that the new proof passed refers to the current state of the proxy, all values of the storage entries in the proof must be replaced with the current values of the proxy and so the storage hash of the proxy has to be determined. To adjust the passed proof the values for the respective storage key have to be read from the contract storage and then the nodes have to be updated upwards to the proof path. All proof nodes are rlp encoded analog to the ethereum patricia tree. RLP-encoding is described in detail in the wiki: [patricia-tree | Ethereum Wiki](https://eth.wiki/fundamentals/patricia-tree). A leaf node, that holds the final value consists of an array with two entries: the encoded path and the value. A parent branch node that references the leaf node stores the hash of the leaf node at its corresponding index. The hash is cumputed by `sha3(rlp([rlp(encodedPath), rlp(value)])`. 35 | After modifying the leaf node's value this hash has to be computed and replaced in the node's parent branch. then the parent branch has to be hashed again to be updated in its parent branch. This is repeated until the final root branch node is reached. Hashing the root branch node then leads to the actual storage hash that can be compared against the relayed storage hash in the relay contract. -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "smart-sync", 3 | "version": "0.3.4", 4 | "description": "", 5 | "repository": { 6 | "url": "git@github.com:disco-project/smart-sync.git" 7 | }, 8 | "main": "./dist/src/cli/smart-sync.js", 9 | "types": "./dist/src/**/*.d.ts", 10 | "bin": { 11 | "smart-sync": "./bin/smart-sync" 12 | }, 13 | "files": [ 14 | "dist/**/*" 15 | ], 16 | "scripts": { 17 | "compile-contracts": "npx grunt compile-contracts", 18 | "refresh": "rm -rf ./node_modules ./package-lock.json && npm install", 19 | "test": "npx grunt full-pipeline-test --force", 20 | "lint": "npx grunt eslint", 21 | "evaluate": "npx grunt full-pipeline-evaluation --force", 22 | "build": "tsc -p tsconfig.json" 23 | }, 24 | "author": "", 25 | "license": "GPL-3.0", 26 | "dependencies": { 27 | "@ethersproject/abstract-provider": "^5.5.1", 28 | "@ethersproject/bignumber": "^5.5.0", 29 | "@ethersproject/contracts": "^5.5.0", 30 | "@ethersproject/providers": "^5.5.1", 31 | "@ethersproject/web": "^5.5.1", 32 | "@nomiclabs/hardhat-ethers": "^2.0.3", 33 | "@openzeppelin/contracts": "^4.4.1", 34 | "@tsconfig/node14": "^1.0.1", 35 | "@types/cli-progress": "^3.9.2", 36 | "chalk": "^5.0.0", 37 | "cli-progress": "^3.9.1", 38 | "commander": "^8.3.0", 39 | "csv-parse": "^5.0.4", 40 | "csv-stringify": "^6.0.5", 41 | "ethers": "^5.5.2", 42 | "hardhat": "^2.8.0", 43 | "merkle-patricia-tree": "^4.2.2", 44 | "node-cron": "^3.0.0", 45 | "rlp": "^2.2.7", 46 | "solc": "^0.8.11", 47 | "solidity-patricia-tree": "^2.0.2", 48 | "solidity-rlp": "^2.0.5", 49 | "ts-generator": "^0.1.1", 50 | "tslog": "^3.3.1", 51 | "web3": "^1.6.1" 52 | }, 53 | "devDependencies": { 54 | "@ethersproject/abi": "^5.5.0", 55 | "@nomiclabs/hardhat-waffle": "^2.0.1", 56 | "@openzeppelin/hardhat-upgrades": "^1.12.0", 57 | "@tsconfig/node14": "^1.0.1", 58 | "@typechain/ethers-v5": "^8.0.5", 59 | "@typechain/hardhat": "^3.1.0", 60 | "@types/chai": "^4.3.0", 61 | "@types/mocha": "^9.0.0", 62 | "@types/node": "^17.0.7", 63 | "@typescript-eslint/eslint-plugin": "^5.9.0", 64 | "@typescript-eslint/parser": "^5.9.0", 65 | "chai": "^4.3.4", 66 | "eslint": "^8.6.0", 67 | "eslint-config-airbnb-base": "^15.0.0", 68 | "eslint-config-airbnb-typescript": "^16.1.0", 69 | "eslint-plugin-import": "^2.25.4", 70 | "ethereum-waffle": "^3.4.0", 71 | "grunt": "^1.4.1", 72 | "grunt-cli": "^1.4.3", 73 | "grunt-contrib-clean": "^2.0.0", 74 | "grunt-contrib-watch": "^1.1.0", 75 | "grunt-eslint": "^24.0.0", 76 | "grunt-mocha-test": "^0.13.3", 77 | "mocha": "^9.1.3", 78 | "ts-node": "^10.4.0", 79 | "ts-node-dev": "^1.1.8", 80 | "typechain": "^6.1.0", 81 | "typescript": "^4.5.4", 82 | "yaml": "^1.10.2" 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/config.ts: -------------------------------------------------------------------------------- 1 | export const PROXY_CONTRACT_FILE_PATH = './artifacts/contracts/ProxyContract.sol'; 2 | 3 | export const PROXY_CONTRACT_NAME = 'ProxyContract'; 4 | 5 | export const PROXY_CONTRACT_FILE_NAME = `${PROXY_CONTRACT_NAME}.json`; 6 | 7 | /** 8 | * The placeholder address used in the `ProxyContract.sol` for the relay contract 9 | */ 10 | export const RELAY_CONTRACT_PLACEHOLDER_ADDRESS = '0xeBf794b5Cf0217CB806f48d2217D3ceE1e25A7C3'; 11 | 12 | /** 13 | * The placeholder address used in the `ProxyContract.sol` for the logic contract 14 | */ 15 | export const LOGIC_CONTRACT_PLACEHOLDER_ADDRESS = '0x55f2155f2fEdbf701262573Be477A6562E09AeE0'; 16 | 17 | /** 18 | * The placeholder address used in the `ProxyContract.sol` for the logic contract 19 | */ 20 | export const SOURCE_CONTRACT_PLACEHOLDER_ADDRESS = '0x0a911618A3dD806a5D14bf856cf355C4b9C84526'; 21 | 22 | export const PROXY_INTERFACE = [ 23 | 'constructor()', 24 | 'function updateStorage(bytes memory proof, uint blockNumber) public', 25 | 'function computeRoots(bytes memory rlpProofNode) view returns (bytes32, bytes32)', 26 | 'function insert(uint _key, uint _value) public', 27 | 'function getValue(uint _key) public view returns (uint256)', 28 | 'function addStorage(bytes32[] memory keys, bytes32[] memory values) public', 29 | 'function getSourceAddress() public view returns (address)', 30 | 'function getRelayAddress() pure returns (address)', 31 | 'function getLogicAddress() public view returns (address)', 32 | ]; 33 | -------------------------------------------------------------------------------- /src/diffHandler/Add.ts: -------------------------------------------------------------------------------- 1 | import { BigNumberish } from 'ethers'; 2 | import { DiffKind, StorageKeyDiff } from './Types'; 3 | 4 | class Add implements StorageKeyDiff { 5 | public key: BigNumberish; 6 | 7 | public value: BigNumberish; 8 | 9 | public diffKind: DiffKind = DiffKind.Add; 10 | 11 | constructor(key: BigNumberish, value: BigNumberish) { 12 | this.key = key; 13 | this.value = value; 14 | } 15 | 16 | kind(): DiffKind { 17 | return this.diffKind; 18 | } 19 | } 20 | 21 | export default Add; 22 | -------------------------------------------------------------------------------- /src/diffHandler/Change.ts: -------------------------------------------------------------------------------- 1 | import { BigNumberish } from 'ethers'; 2 | import { DiffKind, StorageKeyDiff } from './Types'; 3 | 4 | class Change implements StorageKeyDiff { 5 | public key: BigNumberish; 6 | 7 | public srcValue: BigNumberish; 8 | 9 | public targetValue: BigNumberish; 10 | 11 | public diffKind: DiffKind = DiffKind.Change; 12 | 13 | constructor(key: BigNumberish, srcValue: BigNumberish, targetValue: BigNumberish) { 14 | this.key = key; 15 | this.srcValue = srcValue; 16 | this.targetValue = targetValue; 17 | } 18 | 19 | kind(): DiffKind { 20 | return this.diffKind; 21 | } 22 | } 23 | 24 | export default Change; 25 | -------------------------------------------------------------------------------- /src/diffHandler/Remove.ts: -------------------------------------------------------------------------------- 1 | import { BigNumberish } from 'ethers'; 2 | import { DiffKind, StorageKeyDiff } from './Types'; 3 | 4 | class Remove implements StorageKeyDiff { 5 | public key: BigNumberish; 6 | 7 | public value: BigNumberish; 8 | 9 | public diffKind: DiffKind = DiffKind.Remove; 10 | 11 | constructor(key: BigNumberish, value: BigNumberish) { 12 | this.key = key; 13 | this.value = value; 14 | } 15 | 16 | kind(): DiffKind { 17 | return this.diffKind; 18 | } 19 | } 20 | 21 | export default Remove; 22 | -------------------------------------------------------------------------------- /src/diffHandler/StorageDiff.ts: -------------------------------------------------------------------------------- 1 | import { BigNumberish } from 'ethers'; 2 | import Add from './Add'; 3 | import Change from './Change'; 4 | import Remove from './Remove'; 5 | import { DiffKind, StorageKeyDiff } from './Types'; 6 | 7 | class StorageDiff { 8 | public diffs: StorageKeyDiff[]; 9 | 10 | public fromKeys: Array; 11 | 12 | public toKeys: Array; 13 | 14 | /** 15 | * @Param diffs: key/value differences between two contract states 16 | * @param fromKeys: all keys from one contract state 17 | * @param toKeys: all keys from contract state that is compared to fromKeys contract state 18 | */ 19 | constructor(diffs: StorageKeyDiff[], fromKeys: Array, toKeys: Array) { 20 | this.diffs = diffs; 21 | this.fromKeys = fromKeys; 22 | this.toKeys = toKeys; 23 | } 24 | 25 | /** 26 | * @returns true if there are no differences, false otherwise 27 | */ 28 | isEmpty(): boolean { 29 | return this.diffs.length === 0; 30 | } 31 | 32 | /** 33 | * @returns all additional keys 34 | */ 35 | adds(): Add[] { 36 | return this.diffs.filter((diff) => diff.kind() === DiffKind.Add) as Add[]; 37 | } 38 | 39 | /** 40 | * @returns all keys that were deleted 41 | */ 42 | removes(): Remove[] { 43 | return this.diffs.filter((diff) => diff.kind() === DiffKind.Remove) as Remove[]; 44 | } 45 | 46 | /** 47 | * @returns all keys that changed their values 48 | */ 49 | changes(): Change[] { 50 | return this.diffs.filter((diff) => diff.kind() === DiffKind.Change) as Change[]; 51 | } 52 | 53 | /** 54 | * 55 | * @returns all keys 56 | */ 57 | getKeys(): Array { 58 | return this.diffs.map((diff) => diff.key); 59 | } 60 | } 61 | 62 | export default StorageDiff; 63 | -------------------------------------------------------------------------------- /src/diffHandler/Types.ts: -------------------------------------------------------------------------------- 1 | import { BigNumberish } from 'ethers'; 2 | 3 | export type ProcessedParameters = { 4 | srcAddress: string; 5 | srcBlock: number; 6 | targetAddress: string; 7 | targetBlock: number | string; 8 | }; 9 | 10 | /* eslint-disable no-shadow */ 11 | /* eslint-disable no-unused-vars */ 12 | export enum DiffKind { 13 | Add, 14 | Remove, 15 | Change, 16 | } 17 | /* eslint-enable no-shadow */ 18 | /* eslint-enable no-unused-vars */ 19 | 20 | export interface StorageKeyDiff { 21 | /** 22 | * The storage key 23 | */ 24 | key: BigNumberish; 25 | 26 | /** 27 | * What kind of change this is 28 | */ 29 | kind(): DiffKind; 30 | } 31 | -------------------------------------------------------------------------------- /src/proofHandler/BranchNode.ts: -------------------------------------------------------------------------------- 1 | import * as rlp from 'rlp'; 2 | import { logger } from '../utils/logger'; 3 | import LeafNode from './LeafNode'; 4 | // eslint-disable-next-line import/no-cycle 5 | import { EmbeddedNode } from './Types'; 6 | 7 | class BranchNode { 8 | // [[path, hash]] 9 | node: Array | Buffer; 10 | 11 | children: (EmbeddedNode | null)[]; 12 | 13 | constructor(node: Array | Buffer, storageKey: string | undefined) { 14 | this.node = node; 15 | this.children = new Array(17).fill(null); 16 | if (storageKey) { 17 | this.children[16] = new LeafNode(rlp.decode(this.node[16]) as any, [storageKey]); 18 | } 19 | } 20 | 21 | /** 22 | * returns true if this node equals the rlp-encoded hex string, false otherwise 23 | * @param node a hex string with '0x' prefix 24 | * @returns boolean 25 | */ 26 | nodeEquals(node: string): Boolean { 27 | return `0x${rlp.encode(this.node).toString('hex')}` === node; 28 | } 29 | 30 | childEquals(node: Buffer, pos: number): Boolean { 31 | if (this.node instanceof Buffer) { 32 | return false; 33 | } 34 | if (!(this.node[pos] instanceof Buffer)) { 35 | logger.error(`You want to compare ${this.node[pos].constructor} with ${Buffer}`); 36 | return false; 37 | } 38 | return (this.node[pos] as Buffer).equals(node); 39 | } 40 | 41 | hasLeaf() { 42 | for (let i = 0; i < this.children.length; i += 1) { 43 | if (this.children[i] instanceof LeafNode) { 44 | return true; 45 | } 46 | } 47 | return false; 48 | } 49 | 50 | /** 51 | * Encodes the branch node as [[common branches... node], children] 52 | */ 53 | encode() { 54 | const nodes = this.children.map((n) => { 55 | if (n) { 56 | return n.encode(); 57 | } 58 | return []; 59 | }); 60 | return [this.node, nodes]; 61 | } 62 | } 63 | 64 | export default BranchNode; 65 | -------------------------------------------------------------------------------- /src/proofHandler/ExtensionNode.ts: -------------------------------------------------------------------------------- 1 | import * as rlp from 'rlp'; 2 | import { logger } from '../utils/logger'; 3 | // eslint-disable-next-line import/no-cycle 4 | import BranchNode from './BranchNode'; 5 | 6 | class ExtensionNode { 7 | node: Array | Buffer; 8 | 9 | child?: BranchNode; 10 | 11 | constructor(node: Array | Buffer, child?: BranchNode) { 12 | this.node = node; 13 | this.child = child; 14 | } 15 | 16 | /** 17 | * returns true if this node equals the rlp-encoded hex string, false otherwise 18 | * @param node a hex string with '0x' prefix 19 | * @returns boolean 20 | */ 21 | nodeEquals(node: string): Boolean { 22 | return `0x${rlp.encode(this.node).toString('hex')}` === node; 23 | } 24 | 25 | getSharedNibbles(): string { 26 | return (this.node as Array)[0].toString('hex'); 27 | } 28 | 29 | childEquals(node: Buffer): Boolean { 30 | if (this.node instanceof Buffer) { 31 | return false; 32 | } 33 | if (!(this.node[1] instanceof Buffer)) { 34 | logger.error(`You want to compare ${this.node[1]} with ${Buffer}`); 35 | return false; 36 | } 37 | return this.node[1].equals(node); 38 | } 39 | 40 | encode() { 41 | if (!this.child) return undefined; 42 | return [this.node, [this.child.encode()]]; 43 | } 44 | } 45 | 46 | export default ExtensionNode; 47 | -------------------------------------------------------------------------------- /src/proofHandler/LeafNode.ts: -------------------------------------------------------------------------------- 1 | import * as rlp from 'rlp'; 2 | import { ethers } from 'ethers'; 3 | 4 | class LeafNode { 5 | storageKeys: Array; 6 | 7 | node: Array | Buffer; 8 | 9 | constructor(node: Array | Buffer, storageKeys: Array) { 10 | this.node = node; 11 | this.storageKeys = storageKeys; 12 | } 13 | 14 | /** 15 | * 16 | * @param node a hex string with '0x' prefix 17 | * @returns true if this node equals the rlp-encoded hex string, false otherwise 18 | */ 19 | nodeEquals(node: string): Boolean { 20 | return `0x${rlp.encode(this.node).toString('hex')}` === node; 21 | } 22 | 23 | encode() { 24 | return [this.storageKeys.map((storageKey) => ethers.utils.hexZeroPad(storageKey, 32)), this.node[0], this.node[1]]; 25 | } 26 | } 27 | 28 | export default LeafNode; 29 | -------------------------------------------------------------------------------- /src/proofHandler/ProofPathBuilder.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable import/no-cycle */ 2 | import { ethers } from 'ethers'; 3 | import { Logger } from 'tslog'; 4 | import * as rlp from 'rlp'; 5 | import { logger } from '../utils/logger'; 6 | import { hexStringToBuffer } from '../utils/utils'; 7 | import BranchNode from './BranchNode'; 8 | import ExtensionNode from './ExtensionNode'; 9 | import LeafNode from './LeafNode'; 10 | import { EmbeddedNode, ParentNode, StorageProof } from './Types'; 11 | 12 | class ProofPathBuilder { 13 | root: EmbeddedNode; 14 | 15 | logger: Logger; 16 | 17 | constructor(root: Array | Buffer, storageKey?: string) { 18 | this.logger = logger.getChildLogger({ name: 'ProofPathBuilder' }); 19 | if (root.length === 2 && storageKey) { 20 | // root is leaf 21 | this.root = new LeafNode(root, [storageKey]); 22 | } else if (root.length === 2) { 23 | // root is extension 24 | this.logger.trace('extension as root'); 25 | this.root = new ExtensionNode(root); 26 | } else { 27 | // root is branch 28 | this.logger.trace('branch as root'); 29 | this.root = new BranchNode(root, storageKey); 30 | } 31 | } 32 | 33 | /** 34 | * returns true if this node equals the rlp-encoded hex string, false otherwise 35 | * @param node a hex string with '0x' prefix 36 | * @returns boolean 37 | */ 38 | nodeEquals(node: string): Boolean { 39 | return this.root.nodeEquals(node); 40 | } 41 | 42 | addValue(storageKey: string, leafNode, parentNode: ParentNode): LeafNode | undefined { 43 | return this.insert(leafNode, parentNode, storageKey, true) as LeafNode; 44 | } 45 | 46 | addBranch(branchNode, parentNode: ParentNode, storageKey: string | undefined): BranchNode | undefined { 47 | return this.insert(branchNode, parentNode, storageKey, false) as BranchNode; 48 | } 49 | 50 | addExtension(extensionNode, parentNode: ParentNode): ExtensionNode | undefined { 51 | return this.insert(extensionNode, parentNode, undefined, false) as ExtensionNode; 52 | } 53 | 54 | insertChild(childBranch: ParentNode, node, parentNode: ParentNode, storageKey: string | undefined, isLeaf: Boolean): EmbeddedNode | undefined | null { 55 | const nodeRef = hexStringToBuffer(ethers.utils.keccak256(rlp.encode(node))); 56 | if (childBranch instanceof ProofPathBuilder) { 57 | this.logger.debug('not possible to be proofpathbuilder in insertChild'); 58 | process.exit(-1); 59 | } else if (childBranch instanceof ExtensionNode) { 60 | // todo extension root and first child. 61 | if (!childBranch.child) { 62 | childBranch.child = new BranchNode(node, storageKey); 63 | return childBranch.child; 64 | } 65 | if (childBranch.childEquals(nodeRef)) { 66 | // child already exists 67 | return childBranch.child; 68 | } 69 | return this.insertChild(childBranch.child as BranchNode, node, parentNode, storageKey, isLeaf); 70 | } 71 | 72 | for (let i = 0; i < childBranch.children.length; i += 1) { 73 | if (childBranch.childEquals(nodeRef, i)) { 74 | if (isLeaf && storageKey) { 75 | // insert leaf 76 | childBranch.children[i] = new LeafNode(node, [storageKey]); 77 | } else if (isLeaf) { 78 | logger.error(`Storagekey for ${nodeRef} not defined.`); 79 | throw new Error(); 80 | } else if (node.length === 2 && !childBranch.children[i]) { 81 | // insert extension 82 | childBranch.children[i] = new ExtensionNode(node, undefined); 83 | } else if (!childBranch.children[i]) { 84 | // insert branch 85 | childBranch.children[i] = new BranchNode(node, storageKey); 86 | } else if (storageKey && !((childBranch.children[i] as BranchNode).children[16])) { 87 | logger.debug('value at branch'); 88 | (childBranch.children[i] as BranchNode).children[16] = new LeafNode(rlp.decode(node[16]) as any, [storageKey]); 89 | } 90 | return childBranch.children[i]; 91 | } 92 | // check nested 93 | if (childBranch.children[i]) { 94 | if (childBranch.children[i] instanceof BranchNode || childBranch.children[i] instanceof ExtensionNode) { 95 | const newNode = this.insertChild(childBranch.children[i] as ParentNode, node, parentNode, storageKey, isLeaf); 96 | if (newNode !== undefined) { 97 | return newNode; 98 | } 99 | } 100 | } 101 | } 102 | return undefined; 103 | } 104 | 105 | // todo put this functionality into the node classes themselves (Branch, Extension, and Leaf) 106 | insert(node, parentNode: ParentNode, storageKey: string | undefined, isLeaf: Boolean): EmbeddedNode | undefined | null { 107 | const nodeRef = hexStringToBuffer(ethers.utils.keccak256(rlp.encode(node))); 108 | // const parentRef = ethers.utils.keccak256(rlp.encode(parentNode)); 109 | 110 | if (this.root instanceof LeafNode) { 111 | if (this.root instanceof LeafNode) { 112 | logger.error('Change of mt not yet implemented.'); 113 | throw new Error(); 114 | } 115 | } 116 | 117 | // find the parent node 118 | if (this.root instanceof ExtensionNode) { 119 | // root is extension node 120 | if (!this.root.child) { 121 | this.root.child = new BranchNode(node, storageKey); 122 | return this.root.child; 123 | } 124 | if (this.root.childEquals(nodeRef)) { 125 | // if already exists 126 | return this.root.child; 127 | } 128 | // -> check nested 129 | return this.insertChild(this.root.child, node, parentNode, storageKey, isLeaf); 130 | } 131 | 132 | // root is branch node 133 | for (let i = 0; i < this.root.children.length; i += 1) { 134 | if (this.root.childEquals(nodeRef, i)) { 135 | if (isLeaf && storageKey) { 136 | // insert leaf 137 | this.root.children[i] = new LeafNode(node, [storageKey]); 138 | } else if (isLeaf) { 139 | logger.error(`Storagekey for ${nodeRef} not defined.`); 140 | throw new Error(); 141 | } else if (node.length === 2 && !this.root.children[i]) { 142 | // insert extension 143 | this.root.children[i] = new ExtensionNode(node, undefined); 144 | } else if (!this.root.children[i]) { 145 | // insert branch 146 | this.root.children[i] = new BranchNode(node, storageKey); 147 | } else if (storageKey && !((this.root.children[i] as BranchNode).children[16])) { 148 | logger.debug('value at branch'); 149 | (this.root.children[i] as BranchNode).children[16] = new LeafNode(rlp.decode(node[16]) as any, [storageKey]); 150 | } 151 | return this.root.children[i]; 152 | } 153 | // check nested 154 | if (this.root.children[i]) { 155 | if (this.root.children[i] instanceof BranchNode || this.root.children[i] instanceof ExtensionNode) { 156 | const newNode = this.insertChild(this.root.children[i] as ParentNode, node, parentNode, storageKey, isLeaf); 157 | if (newNode !== undefined) { 158 | return newNode; 159 | } 160 | } 161 | } 162 | } 163 | 164 | return undefined; 165 | } 166 | 167 | encode(): Buffer | null { 168 | return rlp.encode(this.root.encode()); 169 | } 170 | } 171 | 172 | // todo this needs testing with other smart contracts than the simple MappingContract 173 | export function addDeletedValue(parentNode: ParentNode, storageProof: StorageProof): LeafNode | undefined { 174 | if (parentNode instanceof ExtensionNode) { 175 | logger.error('Can not add deleted value to ExtensionNode'); 176 | return undefined; 177 | } 178 | if (parentNode instanceof LeafNode) { 179 | logger.error('ParentNode is a leaf node'); 180 | return undefined; 181 | } 182 | const path = ethers.utils.keccak256(ethers.utils.hexZeroPad(storageProof.key, 32)); 183 | let pathPtr = 2; // starts at 2 because of '0x' 184 | for (let i = 0; i < storageProof.proof.length; i += 1) { 185 | const node = rlp.decode(storageProof.proof[i]) as Buffer[]; 186 | if (node.length === 17) pathPtr += 1; 187 | else if (node.length === 2) { 188 | const stringRep = node[0].toString('hex'); 189 | if ((stringRep[0] + stringRep[1]) === '00') { 190 | pathPtr += stringRep.length - 2; 191 | } else { 192 | pathPtr += stringRep.length - 1; 193 | } 194 | } 195 | } 196 | // calc rest of key for leaf creation 197 | const even = (pathPtr % 2) === 0; 198 | let adjustedPath: Buffer; 199 | if (even) { 200 | // eslint-disable-next-line no-bitwise 201 | adjustedPath = Buffer.from((2 << 4).toString(16) + path.substring(pathPtr), 'hex'); 202 | } else { 203 | adjustedPath = Buffer.from(path.substring(pathPtr - 1), 'hex'); 204 | // eslint-disable-next-line no-bitwise 205 | adjustedPath[0] = (3 << 4) + (adjustedPath[0] % 16); 206 | } 207 | logger.debug(adjustedPath); 208 | const artificialNode = [adjustedPath, Buffer.from([0x0])]; 209 | const pathNibble = parseInt(path[pathPtr - 1], 16); 210 | if (parentNode instanceof BranchNode) { 211 | parentNode.children[pathNibble] = new LeafNode(artificialNode, [storageProof.key]); 212 | return parentNode.children[pathNibble] as LeafNode ?? undefined; 213 | } 214 | if (!(parentNode.root instanceof BranchNode)) { 215 | logger.error('Cannot add deleted value to anything else than BranchNode at the moment.'); 216 | return undefined; 217 | } 218 | parentNode.root.children[pathNibble] = new LeafNode(artificialNode, [storageProof.key]); 219 | return parentNode.root.children[pathNibble] as LeafNode ?? undefined; 220 | } 221 | 222 | export default ProofPathBuilder; 223 | -------------------------------------------------------------------------------- /src/proofHandler/Types.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable import/no-cycle */ 2 | import BranchNode from './BranchNode'; 3 | import ExtensionNode from './ExtensionNode'; 4 | import LeafNode from './LeafNode'; 5 | import ProofPathBuilder from './ProofPathBuilder'; 6 | 7 | export type EmbeddedNode = LeafNode | BranchNode | ExtensionNode; 8 | export type ParentNode = BranchNode | ExtensionNode | ProofPathBuilder; 9 | export type StorageProof = { 10 | key: string; 11 | value: string; 12 | proof: string[]; 13 | }; 14 | /** 15 | * Represents a account object 16 | */ 17 | export type Account = { 18 | nonce: string; 19 | balance: string; 20 | storageHash: string; 21 | codeHash: string; 22 | }; 23 | export type BlockHeader = { 24 | baseFeePerGas?: string; 25 | difficulty: string; 26 | extraData: string; 27 | miner: string; 28 | gasLimit: string; 29 | gasUsed: string; 30 | mixHash?: string; 31 | transactionsRoot: string; 32 | receiptsRoot: string; 33 | logsBloom: string; 34 | number: string; 35 | nonce?: string; 36 | parentHash: string; 37 | sha3Uncles: string; 38 | stateRoot: string; 39 | timestamp: string; 40 | }; 41 | /** 42 | * Represents the result of a [`eth_getProof`](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-1186.md) RPC request 43 | */ 44 | export interface IGetProof { 45 | accountProof: string[]; 46 | address: string; 47 | balance: string; 48 | codeHash: string; 49 | nonce: string; 50 | storageHash: string; 51 | storageProof: StorageProof[]; 52 | } 53 | -------------------------------------------------------------------------------- /src/utils/fileHandler.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs'; 2 | import { logger } from './logger'; 3 | 4 | class FileHandler { 5 | private filePath: string; 6 | 7 | constructor(filePath: string) { 8 | this.filePath = filePath; 9 | try { 10 | if (!fs.statSync(this.filePath).isFile) { 11 | logger.error(`Given filePath ${this.filePath} does not lead to a file`); 12 | } 13 | } catch (e) { 14 | logger.error(e); 15 | } 16 | } 17 | 18 | getJSON(): T | undefined { 19 | try { 20 | const fileContent = this.read(); 21 | if (!fileContent) return undefined; 22 | return JSON.parse(fileContent); 23 | } catch (e) { 24 | logger.error(e); 25 | return undefined; 26 | } 27 | } 28 | 29 | read(): string | undefined { 30 | try { 31 | return fs.readFileSync(this.filePath).toString('utf-8'); 32 | } catch (e) { 33 | logger.error(e); 34 | return undefined; 35 | } 36 | } 37 | } 38 | 39 | export default FileHandler; 40 | -------------------------------------------------------------------------------- /src/utils/logger.ts: -------------------------------------------------------------------------------- 1 | import { Logger, TLogLevelName } from 'tslog'; 2 | 3 | // Logger configuration 4 | const LOG_LEVEL: TLogLevelName = process.env.CROSS_CHAIN_LOG_LEVEL ? process.env.CROSS_CHAIN_LOG_LEVEL as TLogLevelName : 'info'; 5 | const LOGGER_NAME = process.env.CROSS_CHAIN_LOGGER_NAME || 'cross chain main logger'; 6 | 7 | export const logger = new Logger({ name: LOGGER_NAME, minLevel: LOG_LEVEL }); 8 | 9 | export const setLogger = (name: string, minLevelString: string) => { 10 | const minLevel: TLogLevelName = minLevelString ? minLevelString as TLogLevelName : 'info'; 11 | process.env.CROSS_CHAIN_LOG_LEVEL = minLevelString; 12 | process.env.CROSS_CHAIN_LOGGER_NAME = name; 13 | 14 | logger.setSettings({ minLevel, name }); 15 | }; 16 | -------------------------------------------------------------------------------- /src/utils/providerHandler.ts: -------------------------------------------------------------------------------- 1 | import { ConnectionInfo } from '@ethersproject/web'; 2 | import { ethers } from 'ethers'; 3 | import { logger } from './logger'; 4 | 5 | export type SupportedProviders = string; 6 | 7 | class ProviderHandler { 8 | private connectionInfo: ConnectionInfo; 9 | 10 | constructor(connectionInfo: ConnectionInfo) { 11 | this.connectionInfo = connectionInfo; 12 | } 13 | 14 | getProviderInstance(): ethers.providers.JsonRpcProvider { 15 | // check if http/https was given 16 | const regexHTTP = /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}((\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()@:%_+.~#?&//=]*))|(:\d+))/; 17 | const match = regexHTTP.exec(this.connectionInfo.url); 18 | if (match === null) { 19 | switch (this.connectionInfo.url as SupportedProviders) { 20 | default: 21 | logger.error(`Provider ${this.connectionInfo.url} not supported.`); 22 | throw new Error(); 23 | } 24 | } 25 | return new ethers.providers.JsonRpcProvider(this.connectionInfo); 26 | } 27 | } 28 | 29 | export default ProviderHandler; 30 | -------------------------------------------------------------------------------- /src/utils/proxy-contract-builder.ts: -------------------------------------------------------------------------------- 1 | import { 2 | LOGIC_CONTRACT_PLACEHOLDER_ADDRESS, 3 | SOURCE_CONTRACT_PLACEHOLDER_ADDRESS, 4 | PROXY_CONTRACT_FILE_NAME, 5 | RELAY_CONTRACT_PLACEHOLDER_ADDRESS, 6 | PROXY_CONTRACT_FILE_PATH, 7 | } from '../config'; 8 | import FileHandler from './fileHandler'; 9 | 10 | class ProxyContractBuilder { 11 | /** 12 | * @dev compiles the proxy and returns its abi and bytecode 13 | * @param relayAddress the address of the relay contract that the proxy should use as constant 14 | * @param logicAddress the address of the deployed logic of the source contract on the target chain 15 | * @param sourceAddress the address of the source contract on the source chain 16 | * @return The abi and bytecode of the proxy 17 | */ 18 | static async compiledAbiAndBytecode(relayAddress: string, logicAddress: string, sourceAddress: string): Promise<{ abi: {}; bytecode: string; error?: Boolean | undefined; }> { 19 | const fh = new FileHandler(`${__dirname}/../../${PROXY_CONTRACT_FILE_PATH}/${PROXY_CONTRACT_FILE_NAME}`); 20 | const proxyContractJson: { abi: {}, bytecode: string, error: Boolean } | undefined = fh.getJSON(); 21 | if (!proxyContractJson) return { abi: {}, bytecode: '', error: true }; 22 | 23 | proxyContractJson.bytecode = proxyContractJson.bytecode.split(RELAY_CONTRACT_PLACEHOLDER_ADDRESS.substr(2).toLowerCase()).join(relayAddress.substr(2).toLowerCase()); 24 | proxyContractJson.bytecode = proxyContractJson.bytecode.split(LOGIC_CONTRACT_PLACEHOLDER_ADDRESS.substr(2).toLowerCase()).join(logicAddress.substr(2).toLowerCase()); 25 | proxyContractJson.bytecode = proxyContractJson.bytecode.split(SOURCE_CONTRACT_PLACEHOLDER_ADDRESS.substr(2).toLowerCase()).join(sourceAddress.substr(2).toLowerCase()); 26 | proxyContractJson.error = false; 27 | return proxyContractJson; 28 | } 29 | } 30 | 31 | export default ProxyContractBuilder; 32 | -------------------------------------------------------------------------------- /src/utils/transactionHandler.ts: -------------------------------------------------------------------------------- 1 | import { JsonRpcProvider } from '@ethersproject/providers'; 2 | import * as CliProgress from 'cli-progress'; 3 | import { logger } from './logger'; 4 | import { 5 | findDeploymentBlock, isDebug, toBlockNumber, toParityQuantity, 6 | } from './utils'; 7 | 8 | type KeyObject = { 9 | '*'?: { 10 | 'to': string 11 | }, 12 | '+'?: string 13 | }; 14 | 15 | export type ParityResponseData = { 16 | stateDiff: { 17 | [ contractAddress: string ]: { 18 | storage: { 19 | [ key: string ] : KeyObject 20 | } 21 | }; 22 | } 23 | }; 24 | 25 | class TransactionHandler { 26 | private contractAddress: string; 27 | 28 | private provider: JsonRpcProvider; 29 | 30 | private batch: number; 31 | 32 | constructor(contractAddress: string, provider: JsonRpcProvider, batch: number = 50) { 33 | this.contractAddress = contractAddress; 34 | this.provider = provider; 35 | this.batch = batch; 36 | } 37 | 38 | async getContractStorageFromTxs(latestBlockNumber: string | number = 'latest', earliest_block_number?: string | number): Promise<{ [ key: string ]: string }> { 39 | const txs = await this.getTransactions(latestBlockNumber, earliest_block_number); 40 | const contractStorage: { [key: string]: string } = {}; 41 | 42 | // getting all tx from srcAddress 43 | const txStoragePromises: Array> = []; 44 | let txStorages: Array<{ [ key: string ]: string } | undefined> = []; 45 | 46 | logger.debug(`Replaying ${txs.length} transactions...`); 47 | let replayBar: CliProgress.SingleBar | undefined; 48 | if (!isDebug(logger.settings.minLevel)) { 49 | replayBar = new CliProgress.SingleBar({}, CliProgress.Presets.shades_classic); 50 | replayBar.start(txs.length, 0); 51 | } 52 | while (txs.length > 0) { 53 | const currTx = txs.pop(); 54 | if (currTx) { 55 | txStoragePromises.push(this.replayTransaction(currTx)); 56 | if (txStoragePromises.length >= this.batch) { 57 | // eslint-disable-next-line no-await-in-loop 58 | txStorages = txStorages.concat(await Promise.all(txStoragePromises)); 59 | replayBar?.increment(this.batch); 60 | } 61 | } 62 | } 63 | replayBar?.stop(); 64 | logger.debug('Done.'); 65 | txStorages.forEach((storage) => { 66 | if (storage) { 67 | logger.debug('srcTx txStorage: ', storage); 68 | 69 | Object.entries(storage).forEach(([key, value]) => { 70 | if (!key.match(/0x0{64}/)) contractStorage[key] = value; 71 | }); 72 | } 73 | }); 74 | 75 | return contractStorage; 76 | } 77 | 78 | async replayTransaction(transaction: string): Promise { 79 | try { 80 | const response: ParityResponseData = await this.provider.send('trace_replayTransaction', [transaction, ['stateDiff']]); 81 | // Ensure the state has been changed 82 | 83 | if (Object.prototype.hasOwnProperty.call(response.stateDiff, this.contractAddress.toLowerCase())) { 84 | const tx = response.stateDiff[this.contractAddress.toLowerCase()]; 85 | logger.debug('tx: ', transaction); 86 | if (tx) { 87 | logger.debug(tx.storage); 88 | const txStorage = tx.storage; 89 | const keys = Object.keys(txStorage); 90 | const obj: { [ key: string ]: string } = {}; 91 | keys.forEach((key) => { 92 | // First case: normal tx 93 | // Second case: deploying tx 94 | const keyObject: KeyObject = txStorage[key]; 95 | if (keyObject['*'] !== undefined) obj[key] = keyObject['*'].to; 96 | else if (keyObject['+'] !== undefined) obj[key] = keyObject['+']; 97 | }); 98 | return obj; 99 | } 100 | } 101 | } catch (err) { 102 | logger.error(err); 103 | } 104 | return undefined; 105 | } 106 | 107 | async getTransactions(latest_block_number: number | string, earliest_block_number?: number | string): Promise> { 108 | logger.debug('Called getTransactions'); 109 | let latest = latest_block_number; 110 | if (typeof (latest) === 'string') latest = await toBlockNumber(latest, this.provider); 111 | 112 | // first find deployment block for more efficiency 113 | let earliest = (earliest_block_number && earliest_block_number !== 'earliest') ? earliest_block_number : await findDeploymentBlock(this.contractAddress, this.provider); 114 | if (typeof (earliest) === 'string') earliest = await toBlockNumber(earliest, this.provider); 115 | 116 | if (latest < earliest) { 117 | logger.debug(`Given latest block number ${latest} older than earliest block number ${earliest}.`); 118 | return []; 119 | } 120 | 121 | // gather all transactions 122 | logger.info(`Getting all txs related to ${this.contractAddress} from ${latest - earliest + 1} blocks...`); 123 | const relatedTxs = await this.provider.send('trace_filter', [{ fromBlock: toParityQuantity(earliest), toBlock: toParityQuantity(latest), toAddress: [this.contractAddress] }]); 124 | if (relatedTxs === null) { 125 | logger.error('Could not get relatedTxs through trace_filter. Most likely the used node was not started with --tracing on'); 126 | throw new Error(); 127 | } 128 | logger.debug(`Got ${relatedTxs.length} related txs.`); 129 | logger.info('Done.'); 130 | 131 | return [...new Set(relatedTxs.map(({ transactionHash }) => transactionHash))]; 132 | } 133 | } 134 | 135 | export default TransactionHandler; 136 | -------------------------------------------------------------------------------- /src/utils/utils.ts: -------------------------------------------------------------------------------- 1 | import { BigNumberish, ethers } from 'ethers'; 2 | import * as rlp from 'rlp'; 3 | import { JsonRpcProvider } from '@ethersproject/providers'; 4 | import { Input } from 'rlp'; 5 | import { logger } from './logger'; 6 | 7 | export const BLOCKNUMBER_TAGS = ['latest', 'earliest', 'pending']; 8 | export const DEBUG_TAGS = ['silly', 'trace', 'debug']; 9 | 10 | export namespace EVMOpcodes { 11 | export const contractByteCodeDeploymentPreamble = '608060405234801561001057600080fd5b50'; 12 | export const PUSH1 = '60'; 13 | export const DUP1 = '80'; 14 | export const CODECOPY = '39'; 15 | export const RETURN = 'F3'; 16 | export const STOP = '00'; 17 | export const SSTORE = '55'; 18 | } 19 | 20 | /** 21 | * Converts a string to a Buffer 22 | * Leading `0x` is stripped 23 | * @param hexString 24 | */ 25 | export function hexStringToBuffer(hexString: string): Buffer { 26 | return ethers.utils.isHexString(hexString) ? Buffer.from(hexString.substring(2), 'hex') : Buffer.from(hexString, 'hex'); 27 | } 28 | 29 | export function toParityQuantity(val: BigNumberish): string { 30 | if (typeof (val) === 'string' && BLOCKNUMBER_TAGS.indexOf(val) > -1) { 31 | return val; 32 | } 33 | return ethers.BigNumber.from(val).toHexString(); 34 | } 35 | 36 | export function isDebug(val: string): Boolean { 37 | return DEBUG_TAGS.indexOf(val) > -1; 38 | } 39 | 40 | export async function toBlockNumber(val: BigNumberish, provider: JsonRpcProvider): Promise { 41 | if (typeof (val) === 'string' && BLOCKNUMBER_TAGS.indexOf(val) > -1) { 42 | return (await provider.getBlock(val)).number; 43 | } 44 | try { 45 | return ethers.BigNumber.from(val).toNumber(); 46 | } catch (e) { 47 | logger.error(`Given val (${val}) is not a valid block identifier.`); 48 | logger.error(e); 49 | throw new Error(); 50 | } 51 | } 52 | 53 | export function encode(input: Input): Buffer { 54 | return (input === '0x0') 55 | ? rlp.encode(Buffer.alloc(0)) 56 | : rlp.encode(input); 57 | } 58 | 59 | export function hexlify(input: string): string { 60 | const val = ethers.utils.hexlify(input); 61 | return (val === '0x') ? '0x0' : val; 62 | } 63 | 64 | // binary search for block where contract was deployed 65 | export async function findDeploymentBlock(contractAddress: string, provider: JsonRpcProvider): Promise { 66 | let low: number = 0; 67 | let high: number = await provider.getBlockNumber(); 68 | 69 | let mid: number; 70 | /* eslint-disable no-await-in-loop */ 71 | while (low <= high) { 72 | mid = Math.trunc((low + high) / 2); 73 | 74 | const currCode = await provider.getCode(contractAddress, mid); 75 | // return mid if the smart contract was deployed on that block (previousBlock.getCode(smartContract) === none) 76 | if (currCode.length > 3 && (mid === 0 || (await provider.getCode(contractAddress, mid - 1)).length < 4)) return mid; 77 | 78 | if (currCode.length > 3) high = mid - 1; 79 | 80 | else low = mid + 1; 81 | } 82 | /* eslint-enable no-await-in-loop */ 83 | 84 | return -1; 85 | } 86 | 87 | /** 88 | * 89 | * @param address The address of the contract 90 | * @param provider The provider to use when sending an RPC request 91 | * @param blockNum the block number to retrieve the storage keys from 92 | * @param batchSize how many keys to retrieve per request [parity_liststoragekeys](https://openethereum.github.io/JSONRPC-parity-module#parity_liststoragekeys) 93 | * @returns all the storage keys of the contract with `address` at block `blockNum` 94 | */ 95 | export async function getAllKeys(contractAddress: string, provider: JsonRpcProvider, blockNum: number | string = 'latest', batchSize: number = 50): Promise { 96 | const keys: Array = []; 97 | let batch: Array = []; 98 | let batchCounter = 1; 99 | const blockNumParity = toParityQuantity(blockNum); 100 | /* eslint-disable no-await-in-loop */ 101 | do { 102 | const offset = (batchCounter > 1) ? keys[keys.length - 1] : null; 103 | 104 | batch = await provider.send('parity_listStorageKeys', [ 105 | contractAddress, batchSize * batchCounter, offset, blockNumParity, 106 | ]); 107 | if (batch === null) { 108 | logger.error(`Could not get keys for ${contractAddress}. Is it deployed at the node at ${provider.connection.url}?`); 109 | throw new Error(); 110 | } 111 | keys.push(...batch); 112 | batchCounter += 1; 113 | } while (batch.length >= batchSize); 114 | /* eslint-enable no-await-in-loop */ 115 | return keys.map((key) => ethers.utils.hexZeroPad(key, 32)); 116 | } 117 | 118 | export function hexToAscii(str1) { 119 | const hex = str1.toString(); 120 | let str = ''; 121 | for (let n = 0; n < hex.length; n += 2) { 122 | str += String.fromCharCode(parseInt(hex.substr(n, 2), 16)); 123 | } 124 | return str; 125 | } 126 | 127 | export async function createDeployingByteCode(srcAddress: string, provider: JsonRpcProvider): Promise { 128 | let code: string = await provider.getCode(srcAddress); 129 | code = code.substring(2); // remove 0x 130 | 131 | let deployCode = EVMOpcodes.contractByteCodeDeploymentPreamble; 132 | const pushOpCodeInt = parseInt(EVMOpcodes.PUSH1, 16); 133 | 134 | // Create Contract code deployment code 135 | let codeLength: string = (code.length / 2).toString(16); // in hex 136 | 137 | codeLength = (codeLength.length % 2) ? `0${codeLength}` : codeLength; 138 | const codeLengthLength: number = codeLength.length / 2; 139 | 140 | deployCode += (pushOpCodeInt + codeLengthLength - 1).toString(16); 141 | deployCode += codeLength; 142 | deployCode += EVMOpcodes.DUP1; 143 | 144 | let deployCodeLength: string = ((deployCode.length / 2) + 9).toString(16); 145 | deployCodeLength = (deployCodeLength.length % 2) ? `0${deployCodeLength}` : deployCodeLength; 146 | // Check length of code length and add length accordingly 147 | deployCodeLength = ((deployCodeLength.length / 2) - 1 + (parseInt(deployCodeLength, 16))).toString(16); 148 | deployCodeLength = (deployCodeLength.length % 2) ? `0${deployCodeLength}` : deployCodeLength; 149 | deployCode += (pushOpCodeInt + deployCodeLength.length / 2 - 1).toString(16); 150 | deployCode += deployCodeLength; 151 | deployCode += EVMOpcodes.PUSH1; 152 | deployCode += '00'; 153 | deployCode += EVMOpcodes.CODECOPY; 154 | deployCode += EVMOpcodes.PUSH1; 155 | deployCode += '00'; 156 | deployCode += EVMOpcodes.RETURN; 157 | deployCode += EVMOpcodes.STOP; 158 | 159 | deployCode += code; 160 | 161 | return deployCode; 162 | } 163 | 164 | export async function processPromiseBatches(promises: Array>, batch: number = 20): Promise> { 165 | let array: Array = []; 166 | while (promises.length > 0) { 167 | try { 168 | // eslint-disable-next-line no-await-in-loop 169 | const currBatch = await Promise.all(promises.splice(0, batch)); 170 | array = array.concat(currBatch); 171 | } catch (e) { 172 | logger.error(e); 173 | process.exit(-1); 174 | } 175 | } 176 | return array; 177 | } 178 | -------------------------------------------------------------------------------- /test/config/encryptedAccount.json: -------------------------------------------------------------------------------- 1 | {"address":"ca356bb324d1a5da8d8f3f58aa7bc53d3cc28309","id":"44240780-d2db-4a1c-9906-f07f04fa6097","version":3,"Crypto":{"cipher":"aes-128-ctr","cipherparams":{"iv":"0c43d3ea26e54d2cf026eac339a48a34"},"ciphertext":"a60a38b804efb1c82e07390f2294159f415805e181bceb97f54fc1d35c63a6b2","kdf":"scrypt","kdfparams":{"salt":"6d946c2a1dc202826c82a0af467c7393d8a5f1bb6fea1692a249c1205edb7c98","n":131072,"dklen":32,"p":1,"r":8},"mac":"affa7cb8805a35937a8b40ea6444cbb55260d6c7a44db03e0fd0ae710a0e5b87"}} -------------------------------------------------------------------------------- /test/config/test-cli-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "srcChainRpcUrl": "http://localhost:9550", 3 | "targetChainRpcUrl": "http://localhost:9552", 4 | "connectionTimeout": "36000", 5 | "logLevel": "info", 6 | "targetBlocknr": "latest", 7 | "gasLimit": 8000000, 8 | "keyValuePairPerBatch": 100 9 | } -------------------------------------------------------------------------------- /test/extension-validation-test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { BigNumber, Contract, ethers } from 'ethers'; 3 | import { JsonRpcProvider } from '@ethersproject/providers'; 4 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 5 | import { 6 | RelayContract__factory, SyncCandidate, SyncCandidate__factory, RelayContract, 7 | } from '../src-gen/types'; 8 | import { getAllKeys } from '../src/utils/utils'; 9 | import DiffHandler from '../src/diffHandler/DiffHandler'; 10 | import { PROXY_INTERFACE } from '../src/config'; 11 | import { logger } from '../src/utils/logger'; 12 | import GetProof from '../src/proofHandler/GetProof'; 13 | import ProxyContractBuilder from '../src/utils/proxy-contract-builder'; 14 | import FileHandler from '../src/utils/fileHandler'; 15 | import { TestCLI } from './test-utils'; 16 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 17 | 18 | describe('Extension Validation', async () => { 19 | let deployer: SignerWithAddress; 20 | let srcContract: SyncCandidate; 21 | let logicContract: SyncCandidate; 22 | let factory: SyncCandidate__factory; 23 | let provider: JsonRpcProvider; 24 | let relayContract: RelayContract; 25 | let latestBlock; 26 | let proxyContract: Contract; 27 | let chainConfigs: TxContractInteractionOptions | undefined; 28 | 29 | before(async () => { 30 | const fh = new FileHandler(TestCLI.defaultTestConfigFile); 31 | chainConfigs = fh.getJSON(); 32 | if (!chainConfigs) { 33 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 34 | process.exit(-1); 35 | } 36 | logger.setSettings({ minLevel: 'info', name: 'extension-validation-test.ts' }); 37 | provider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 38 | deployer = await SignerWithAddress.create(provider.getSigner()); 39 | }); 40 | 41 | beforeEach(async () => { 42 | factory = new SyncCandidate__factory(deployer); 43 | srcContract = await factory.deploy(); 44 | logicContract = await factory.deploy(); 45 | // deploy the relay contract 46 | const Relayer = new RelayContract__factory(deployer); 47 | relayContract = await Relayer.deploy(); 48 | await srcContract.setValueA(42); 49 | await srcContract.setValueB(100); 50 | }); 51 | 52 | it('It should create an optimized proof with extension nodes in it', async () => { 53 | srcContract = await factory.deploy(); 54 | 55 | // insert some random values 56 | await srcContract.insert(420, 30); 57 | await srcContract.insert(470, 1); 58 | await srcContract.insert(710, 2); 59 | await srcContract.insert(337, 3); 60 | await srcContract.insert(331, 4); 61 | await srcContract.insert(20, 5); 62 | await srcContract.insert(400, 6); 63 | await srcContract.insert(50, 8); 64 | await srcContract.insert(752, 6); 65 | await srcContract.insert(602, 7); 66 | await srcContract.insert(691, 9); 67 | await srcContract.insert(333, 33); 68 | 69 | const keys = await getAllKeys(srcContract.address, provider); 70 | latestBlock = await provider.send('eth_getBlockByNumber', ['latest', true]); 71 | // create a proof of the source contract's storage 72 | 73 | const proof = new GetProof(await provider.send('eth_getProof', [srcContract.address, keys])); 74 | 75 | const rlpOptimized = await proof.optimizedStorageProof([]); 76 | expect(rlpOptimized).to.not.be.undefined; 77 | expect(rlpOptimized).to.not.be.null; 78 | if (!rlpOptimized) process.exit(-1); 79 | // todo check for extension node at root instead of hash 80 | expect(ethers.utils.keccak256(rlpOptimized)).to.equal('0x2829d0a86c592741a6ba0f14decb48a8344c9e791b1cb8dc0a5cf68e125ef59e'); 81 | }); 82 | 83 | it('proxyContract should accept proof with extensions in it and create an optimized proof with an extension node as root', async () => { 84 | srcContract = await factory.deploy(); 85 | 86 | // insert some random values 87 | await srcContract.insert(20, 5); 88 | await srcContract.insert(333, 33); 89 | 90 | const keys = await getAllKeys(srcContract.address, provider); 91 | latestBlock = await provider.send('eth_getBlockByNumber', ['latest', true]); 92 | // create a proof of the source contract's storage 93 | const proof = new GetProof(await provider.send('eth_getProof', [srcContract.address, keys])); 94 | 95 | await relayContract.addBlock(latestBlock.stateRoot, latestBlock.number); 96 | 97 | const compiledProxy = await ProxyContractBuilder.compiledAbiAndBytecode(relayContract.address, logicContract.address, srcContract.address); 98 | expect(compiledProxy.error).to.be.false; 99 | 100 | // deploy the proxy with the state of the `srcContract` 101 | const proxyFactory = new ethers.ContractFactory(PROXY_INTERFACE, compiledProxy.bytecode, deployer); 102 | 103 | proxyContract = await proxyFactory.deploy(); 104 | 105 | const proxyKeys: Array = []; 106 | const proxyValues: Array = []; 107 | proof.storageProof.forEach((p) => { 108 | proxyKeys.push(ethers.utils.hexZeroPad(p.key, 32)); 109 | proxyValues.push(ethers.utils.hexZeroPad(p.value, 32)); 110 | }); 111 | await proxyContract.addStorage(proxyKeys, proxyValues, { gasLimit: chainConfigs?.gasLimit }); 112 | 113 | // The storage diff between `srcContract` and `proxyContract` comes up empty: both storage layouts are the same 114 | const differ = new DiffHandler(provider); 115 | const diff = await differ.getDiffFromStorage(srcContract.address, proxyContract.address); 116 | expect(diff.isEmpty()).to.be.true; 117 | 118 | const rlpOptimized = await proof.optimizedStorageProof([]); 119 | expect(rlpOptimized).to.not.be.undefined; 120 | expect(rlpOptimized).to.not.be.null; 121 | if (!rlpOptimized) process.exit(-1); 122 | // todo check for extension node at root instead of hash 123 | expect(ethers.utils.keccak256(rlpOptimized)).to.equal('0x85d573333fa3b30be3498ea3b98794c5317119c4381b4f7afa45e61190b36314'); 124 | }); 125 | }); 126 | -------------------------------------------------------------------------------- /test/get-diff-test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 3 | import { JsonRpcProvider } from '@ethersproject/providers'; 4 | import { BigNumber, ethers } from 'ethers'; 5 | import DiffHandler from '../src/diffHandler/DiffHandler'; 6 | import { SimpleStorage, SimpleStorage__factory } from '../src-gen/types'; 7 | import { logger } from '../src/utils/logger'; 8 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 9 | import FileHandler from '../src/utils/fileHandler'; 10 | import { TestCLI } from './test-utils'; 11 | 12 | describe('Get contract storage diff', () => { 13 | let deployer: SignerWithAddress; 14 | let storageSrc: SimpleStorage; 15 | let storageTarget: SimpleStorage; 16 | let differ: DiffHandler; 17 | let provider: JsonRpcProvider; 18 | let chainConfigs: TxContractInteractionOptions | undefined; 19 | 20 | before(async () => { 21 | const fh = new FileHandler(TestCLI.defaultTestConfigFile); 22 | chainConfigs = fh.getJSON(); 23 | if (!chainConfigs) { 24 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 25 | process.exit(-1); 26 | } 27 | provider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 28 | deployer = await SignerWithAddress.create(provider.getSigner()); 29 | logger.setSettings({ minLevel: 'info', name: 'get-diff-test.ts' }); 30 | }); 31 | 32 | beforeEach(async () => { 33 | differ = new DiffHandler(provider); 34 | const Storage = new SimpleStorage__factory(deployer); 35 | storageSrc = await Storage.deploy(); 36 | storageTarget = await Storage.deploy(); 37 | 38 | expect(storageSrc.address).to.not.equal(storageTarget.address); 39 | }); 40 | 41 | it('Should get an empty diff for same contract with getDiffFromStorage', async () => { 42 | const diff = await differ.getDiffFromStorage(storageSrc.address); 43 | expect(diff.isEmpty()).to.be.true; 44 | }); 45 | 46 | it('Should get a single additional key in diff.getDiffFromStorage after setting a value', async () => { 47 | // set value at storage slot 0 48 | const tx = await storageSrc.setA(1337); 49 | const blockNum = tx.blockNumber ?? await provider.getBlockNumber(); 50 | // compare the second latest block against the block 51 | // that includes the tx that set the value of storage key 0 52 | let diff = await differ.getDiffFromStorage(storageSrc.address, storageSrc.address, blockNum - 1, 'latest'); 53 | // the diff includes an additional key 54 | expect(diff.diffs.length).to.equal(1); 55 | const adds = diff.adds(); 56 | expect(adds.length).to.equal(1); 57 | expect(ethers.BigNumber.from(adds[0].key).eq(ethers.BigNumber.from(0))).to.be.true; 58 | 59 | // comparing the latest block against the second latest ('latest' - 1) 60 | // results in a diff with a removed key 61 | diff = await differ.getDiffFromStorage(storageSrc.address, storageSrc.address, 'latest', blockNum - 1); 62 | expect(diff.diffs.length).to.equal(1); 63 | const removes = diff.removes(); 64 | expect(removes.length).to.equal(1); 65 | expect(ethers.BigNumber.from(removes[0].key).eq(ethers.BigNumber.from(0))).to.be.true; 66 | }); 67 | 68 | it('Should get a single additional key in diff.getDiffFromSrcContractTxs after setting a value', async () => { 69 | // set value at storage slot 0 70 | let tx = await storageSrc.setA(1337); 71 | let blockNum = tx.blockNumber ?? await provider.getBlockNumber(); 72 | // check for changes in srcContract tx 73 | let diff = await differ.getDiffFromSrcContractTxs(storageSrc.address, blockNum, blockNum); 74 | // the diff includes an additional key 75 | expect(diff.diffs.length).to.equal(1); 76 | const adds = diff.adds(); 77 | expect(adds.length).to.equal(1); 78 | expect(ethers.BigNumber.from(adds[0].key).eq(ethers.BigNumber.from(0))).to.be.true; 79 | 80 | // cannot differentiate between add, change and remove 81 | tx = await storageSrc.setA(0x0); 82 | blockNum = tx.blockNumber ?? await provider.getBlockNumber(); 83 | // results in a diff with a removed key 84 | diff = await differ.getDiffFromSrcContractTxs(storageSrc.address, blockNum, blockNum); 85 | expect(diff.diffs.length).to.equal(1); 86 | const removes = diff.removes(); 87 | expect(removes.length).to.equal(1); 88 | expect(ethers.BigNumber.from(removes[0].key).eq(ethers.BigNumber.from(0))).to.be.true; 89 | }); 90 | 91 | it('Should get a single changed key in diff.getDiffFromStorage after changing a value in the same contract', async () => { 92 | // set value at storage slot 0 93 | await storageSrc.setA(1337); 94 | const tx = await storageSrc.setA(42); 95 | const blockNum = tx.blockNumber ?? await provider.getBlockNumber(); 96 | 97 | const diff = await differ.getDiffFromStorage(storageSrc.address, storageSrc.address, blockNum - 1); 98 | expect(diff.diffs.length).to.equal(1); 99 | const changed = diff.changes(); 100 | expect(changed.length).to.equal(1); 101 | expect(ethers.BigNumber.from(changed[0].key).eq(ethers.BigNumber.from(0))).to.be.true; 102 | 103 | // value was changed from 1337 to 42, so the srcValue represents the old value... 104 | expect(ethers.BigNumber.from(changed[0].srcValue).eq(ethers.BigNumber.from(1337))).to.be.true; 105 | 106 | // ...and the target value the new value 107 | expect(ethers.BigNumber.from(changed[0].targetValue).eq(ethers.BigNumber.from(42))).to.be.true; 108 | }); 109 | 110 | it('Should get a single changed key in diff.getDiffFromSrcContractTxs after changing a value in the same contract', async () => { 111 | // set value at storage slot 0 112 | await storageSrc.setA(1337); 113 | const tx = await storageSrc.setA(42); 114 | const blockNum = tx.blockNumber ?? await provider.getBlockNumber(); 115 | 116 | const diff = await differ.getDiffFromSrcContractTxs(storageSrc.address, 'latest', blockNum); 117 | expect(diff.diffs.length).to.equal(1); 118 | const changed = diff.changes(); 119 | expect(changed.length).to.equal(1); 120 | expect(ethers.BigNumber.from(changed[0].key).eq(ethers.BigNumber.from(0))).to.be.true; 121 | }); 122 | 123 | it('Should get correct diff.getDiffFromStorage between different contracts', async () => { 124 | await storageSrc.setA(42); 125 | await storageTarget.setA(42); 126 | // state of both contracts is now identical for the latest block 127 | let diff = await differ.getDiffFromStorage(storageSrc.address, storageTarget.address, 'latest'); 128 | expect(diff.isEmpty()).to.be.true; 129 | 130 | // changing each value in both contracts results in a single diff 131 | await storageSrc.setA(1337); 132 | await storageTarget.setA(9000); 133 | 134 | diff = await differ.getDiffFromStorage(storageSrc.address, storageTarget.address, 'latest'); 135 | expect(diff.diffs.length).to.equal(1); 136 | const changed = diff.changes(); 137 | expect(changed.length).to.equal(1); 138 | expect(ethers.BigNumber.from(changed[0].key).eq(ethers.BigNumber.from(0))).to.be.true; 139 | 140 | expect(ethers.BigNumber.from(changed[0].srcValue).eq(ethers.BigNumber.from(1337))).to.be.true; 141 | 142 | expect(ethers.BigNumber.from(changed[0].targetValue).eq(ethers.BigNumber.from(9000))).to.be.true; 143 | }); 144 | 145 | it('Should get all new changes through getDiffFromSrcBlockTxs', async () => { 146 | // insert some values to fill the contract 147 | const inserts: any = []; 148 | for (let i = 0; i < 10; i += 1) { 149 | inserts.push(storageSrc.insert(i, 20)); 150 | } 151 | await Promise.all(inserts); 152 | const currBlockNr = await provider.getBlockNumber() + 1; 153 | // change some of those values 154 | const changes: any = []; 155 | for (let i = 0; i < 5; i += 1) { 156 | changes.push(storageSrc.insert(i, 30)); 157 | } 158 | await Promise.all(changes); 159 | 160 | const diff = await differ.getDiffFromSrcContractTxs(storageSrc.address, 'latest', currBlockNr); 161 | expect(diff.diffs.length).to.equal(5); 162 | const changed = diff.changes(); 163 | expect(changed.length).to.equal(5); 164 | }); 165 | }); 166 | -------------------------------------------------------------------------------- /test/list-storage-test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 3 | import { JsonRpcProvider } from '@ethersproject/providers'; 4 | import { BigNumber, ethers } from 'ethers'; 5 | import { SimpleStorage, SimpleStorage__factory } from '../src-gen/types'; 6 | import { verifyEthGetProof, TestCLI } from './test-utils'; 7 | import GetProof from '../src/proofHandler/GetProof'; 8 | import FileHandler from '../src/utils/fileHandler'; 9 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 10 | import { logger } from '../src/utils/logger'; 11 | 12 | describe('Storage', async () => { 13 | let deployer: SignerWithAddress; 14 | let storage: SimpleStorage; 15 | let provider: JsonRpcProvider; 16 | let chainConfigs: TxContractInteractionOptions | undefined; 17 | 18 | before(async () => { 19 | const fh = new FileHandler(TestCLI.defaultTestConfigFile); 20 | chainConfigs = fh.getJSON(); 21 | if (!chainConfigs) { 22 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 23 | process.exit(-1); 24 | } 25 | provider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 26 | deployer = await SignerWithAddress.create(provider.getSigner()); 27 | logger.setSettings({ minLevel: 'info', name: 'list-storage-test.ts' }); 28 | }); 29 | 30 | it('Should deploy and return default values', async () => { 31 | const Storage = new SimpleStorage__factory(deployer); 32 | storage = await Storage.deploy(); 33 | 34 | expect((await storage.getA()).eq(0)).to.be.true; 35 | expect((await storage.getB()).eq(42)).to.be.true; 36 | expect((await storage.getValue(deployer.address)).eq(0)).to.be.true; 37 | }); 38 | 39 | it('Should read correct storage after transactions', async () => { 40 | // assign a value to `a` 41 | const newValue = 1337; 42 | expect(await storage.setA(newValue)).to.exist; 43 | const keys = await provider.send('parity_listStorageKeys', [ 44 | storage.address, 5, null, 45 | ]); 46 | // now there should be 3 storage keys 47 | expect(keys.length).to.equal(2); 48 | 49 | // `a` is the first field of the contract and its value is stored at slot 0 50 | const aValue = await provider.getStorageAt(storage.address, 0); 51 | expect(ethers.BigNumber.from(newValue).eq(aValue)).to.be.true; 52 | }); 53 | 54 | it('Should read correct mapping storage', async () => { 55 | const value = 1000; 56 | expect(await storage.setValue(value)).to.exist; 57 | const keys = await provider.send('parity_listStorageKeys', [ 58 | storage.address, 5, null, 59 | ]); 60 | // after setting `a` and inserting a value in the mapping there should be 4 storage keys 61 | expect(keys.length).to.equal(3); 62 | const storageKey = ethers.BigNumber.from(keys[1]); 63 | 64 | // the `storageKey` of the `value` is the hash of the `key` of `value` in the mapping 65 | // concatenated with the slot of the mapping in the contract: `keccak256(key . slot)` 66 | const location = ethers.utils.hexConcat([ 67 | ethers.utils.hexZeroPad(deployer.address, 32), ethers.utils.hexZeroPad('0x03', 32), 68 | ]); 69 | expect(ethers.utils.keccak256(location)).to.equal(keys[1]); 70 | 71 | const storedValue = await provider.getStorageAt(storage.address, storageKey); 72 | expect(ethers.BigNumber.from(storedValue).toNumber()).to.equal(value); 73 | }); 74 | 75 | it('Should return a valid proof', async () => { 76 | const keys = await provider.send('parity_listStorageKeys', [ 77 | storage.address, 5, null, 78 | ]); 79 | // [`eth_getProof`](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-1186.md) implemented at 80 | // https://github.com/openethereum/openethereum/blob/27a0142af14730bcb50eeacc84043dc6f49395e8/rpc/src/v1/impls/eth.rs#L677 81 | const proof = await provider.send('eth_getProof', [storage.address, keys]); 82 | 83 | // get the latest block 84 | const block = await provider.send('eth_getBlockByNumber', ['latest', true]); 85 | 86 | // verify the proof against the block's state root 87 | expect(await verifyEthGetProof(proof, block.stateRoot)).to.be.true; 88 | }); 89 | }); 90 | -------------------------------------------------------------------------------- /test/log-delegate-test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 3 | import { BigNumber, ethers } from 'ethers'; 4 | import { JsonRpcProvider } from '@ethersproject/providers'; 5 | import { 6 | CallingContract, 7 | CallingContract__factory, 8 | TestLogicContract, 9 | TestLogicContract__factory, 10 | TestProxyContract, 11 | TestProxyContract__factory, 12 | } from '../src-gen/types'; 13 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 14 | import FileHandler from '../src/utils/fileHandler'; 15 | import { TestCLI } from './test-utils'; 16 | import { logger } from '../src/utils/logger'; 17 | 18 | describe('Test static proxy calls', () => { 19 | let deployer: SignerWithAddress; 20 | let logic: TestLogicContract; 21 | let proxy: TestProxyContract; 22 | let caller: CallingContract; 23 | let chainConfigs: TxContractInteractionOptions | undefined; 24 | let provider: JsonRpcProvider; 25 | 26 | const abi = [ 27 | 'function getValue() view returns (uint256)', 28 | 'function setValue(uint256 value)', 29 | 'function valuePure() public pure returns (uint256)', 30 | ]; 31 | 32 | before(async () => { 33 | const fh = new FileHandler(TestCLI.defaultTestConfigFile); 34 | chainConfigs = fh.getJSON(); 35 | if (!chainConfigs) { 36 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 37 | process.exit(-1); 38 | } 39 | provider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 40 | deployer = await SignerWithAddress.create(provider.getSigner()); 41 | }); 42 | 43 | it('Should deploy the contracts', async () => { 44 | const Logic = new TestLogicContract__factory(deployer); 45 | logic = await Logic.deploy(); 46 | const Proxy = new TestProxyContract__factory(deployer); 47 | proxy = await Proxy.deploy(logic.address); 48 | const Caller = new CallingContract__factory(deployer); 49 | caller = await Caller.deploy(proxy.address); 50 | }); 51 | 52 | it('Should not delegate set call through proxy contract', async () => { 53 | const contract = new ethers.Contract(proxy.address, abi, deployer); 54 | try { 55 | // try to set the value which should fail 56 | await contract.setValue(2); 57 | } catch (error) { 58 | // ignore error 59 | } 60 | // validate that the setValue did not succeed and the contract variable still has its old value 61 | expect((await contract.getValue()).eq(37)).to.be.true; 62 | }); 63 | 64 | it('Should not delegate set call through calling contract', async () => { 65 | const contract = new ethers.Contract(caller.address, abi, deployer); 66 | try { 67 | await contract.setValue(2); 68 | } catch (error) { 69 | // ignore error 70 | } 71 | expect((await contract.getValue()).eq(37)).to.be.true; 72 | }); 73 | 74 | it('Should allow delegation of pure functions through the proxy', async () => { 75 | const contract = new ethers.Contract(proxy.address, abi, deployer); 76 | expect((await contract.valuePure()).eq(42)).to.be.true; 77 | }); 78 | }); 79 | -------------------------------------------------------------------------------- /test/new-initialization-test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { JsonRpcProvider } from '@ethersproject/providers'; 3 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 4 | import { BigNumber, ethers } from 'ethers'; 5 | import DiffHandler from '../src/diffHandler/DiffHandler'; 6 | import { logger } from '../src/utils/logger'; 7 | import { TestChainProxy, InitializationResult, TestCLI } from './test-utils'; 8 | import { 9 | RelayContract__factory, MappingContract, MappingContract__factory, RelayContract, 10 | } from '../src-gen/types'; 11 | import FileHandler from '../src/utils/fileHandler'; 12 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 13 | 14 | const MAX_VALUE = 1000000; 15 | 16 | describe('New Initialization', async () => { 17 | let srcDeployer: SignerWithAddress; 18 | let targetDeployer: SignerWithAddress; 19 | let srcContract: MappingContract; 20 | let logicContract: MappingContract; 21 | let factory: MappingContract__factory; 22 | let srcProvider: JsonRpcProvider; 23 | let targetProvider: JsonRpcProvider; 24 | let relayContract: RelayContract; 25 | let chainConfigs: TxContractInteractionOptions | undefined; 26 | let chainProxy: TestChainProxy; 27 | 28 | before(async () => { 29 | const fh = new FileHandler(TestCLI.defaultTestConfigFile); 30 | chainConfigs = fh.getJSON(); 31 | if (!chainConfigs) { 32 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 33 | process.exit(-1); 34 | } 35 | srcProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 36 | targetProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.targetChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 37 | srcDeployer = await SignerWithAddress.create(srcProvider.getSigner()); 38 | targetDeployer = await SignerWithAddress.create(targetProvider.getSigner()); 39 | logger.setSettings({ minLevel: 'info', name: 'new-initialization.ts' }); 40 | }); 41 | 42 | beforeEach(async () => { 43 | factory = new MappingContract__factory(srcDeployer); 44 | srcContract = await factory.deploy(); 45 | logicContract = await factory.deploy(); 46 | // deploy the relay contract 47 | const Relayer = new RelayContract__factory(targetDeployer); 48 | relayContract = await Relayer.deploy(); 49 | if (!chainConfigs) { 50 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 51 | process.exit(-1); 52 | } 53 | chainProxy = new TestChainProxy(srcContract, logicContract, chainConfigs, srcDeployer, targetDeployer, relayContract, srcProvider, targetProvider); 54 | }); 55 | 56 | it('Contract with map containing 50 values, update 10 values', async () => { 57 | const mapSize = 50; 58 | let initialization: InitializationResult; 59 | try { 60 | initialization = await chainProxy.initializeProxyContract(mapSize, MAX_VALUE); 61 | expect(initialization.migrationState).to.be.true; 62 | } catch (e) { 63 | logger.error(e); 64 | return false; 65 | } 66 | 67 | // The storage diff between `srcContract` and `proxyContract` comes up empty: both storage layouts are the same 68 | const differ = new DiffHandler(srcProvider, targetProvider); 69 | let diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 70 | expect(diff.isEmpty()).to.be.true; 71 | 72 | // change all the previous synced values 73 | const result = await chainProxy.changeDeepestValues(10, MAX_VALUE); 74 | expect(result).to.be.true; 75 | 76 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 77 | const changedKeys = diff.getKeys(); 78 | 79 | // migrate changes to proxy contract 80 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 81 | expect(migrationResult.migrationResult).to.be.true; 82 | if (!migrationResult.receipt) { 83 | logger.fatal('No receipt provided'); 84 | process.exit(-1); 85 | } 86 | 87 | logger.info('Gas used for updating 10 values in map with 50 values: ', migrationResult.receipt.gasUsed.toNumber()); 88 | 89 | // after update storage layouts are equal, no diffs 90 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 91 | return expect(diff.isEmpty()).to.be.true; 92 | }); 93 | }); 94 | -------------------------------------------------------------------------------- /test/optimized-storage-proof-test.ts: -------------------------------------------------------------------------------- 1 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 2 | import { JsonRpcProvider } from '@ethersproject/providers'; 3 | import { BigNumber, ethers } from 'ethers'; 4 | import { logger } from '../src/utils/logger'; 5 | import { SimpleStorage, SimpleStorage__factory } from '../src-gen/types'; 6 | import GetProof from '../src/proofHandler/GetProof'; 7 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 8 | import FileHandler from '../src/utils/fileHandler'; 9 | import { TestCLI } from './test-utils'; 10 | 11 | describe('Test storage proof optimization', async () => { 12 | let deployer: SignerWithAddress; 13 | let storage: SimpleStorage; 14 | let provider: JsonRpcProvider; 15 | let chainConfigs: TxContractInteractionOptions | undefined; 16 | 17 | before(async () => { 18 | const fh = new FileHandler(TestCLI.defaultTestConfigFile); 19 | chainConfigs = fh.getJSON(); 20 | if (!chainConfigs) { 21 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 22 | process.exit(-1); 23 | } 24 | provider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 25 | deployer = await SignerWithAddress.create(provider.getSigner()); 26 | const Storage = new SimpleStorage__factory(deployer); 27 | storage = await Storage.deploy(); 28 | logger.setSettings({ minLevel: 'info', name: 'optimized-storage-proof-test.ts' }); 29 | }); 30 | 31 | it('Should insert some mappings and create a nested optimized proof', async () => { 32 | const inserts: any = []; 33 | for (let i = 0; i < 10; i += 1) { 34 | // get some random keys 35 | const entry = { key: Math.floor(Math.random() * Math.floor(1000)), value: i }; 36 | inserts.push(storage.insert(entry.key, entry.value)); 37 | } 38 | await Promise.all(inserts); 39 | 40 | const keys = await provider.send('parity_listStorageKeys', [ 41 | storage.address, 100, null, 42 | ]); 43 | 44 | const proof = new GetProof(await provider.send('eth_getProof', [storage.address, keys])); 45 | await proof.optimizedStorageProof([]); 46 | }); 47 | }); 48 | -------------------------------------------------------------------------------- /test/proof-path-builder-test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 3 | import { JsonRpcProvider } from '@ethersproject/providers'; 4 | import { BigNumber, ethers } from 'ethers'; 5 | import { StorageImitator, StorageImitator__factory } from '../src-gen/types'; 6 | import { logger } from '../src/utils/logger'; 7 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 8 | import FileHandler from '../src/utils/fileHandler'; 9 | import { TestCLI } from './test-utils'; 10 | import { CSVManager } from '../evaluation/eval-utils'; 11 | import { ChainProxy, ContractAddressMap, RPCConfig } from '../src/chain-proxy'; 12 | 13 | describe('Proof Path Builder Tests', () => { 14 | let deployer: SignerWithAddress; 15 | let storageSrc: StorageImitator; 16 | let provider: JsonRpcProvider; 17 | let chainConfigs: TxContractInteractionOptions | undefined; 18 | let chainProxy: ChainProxy; 19 | 20 | before(async () => { 21 | const fh = new FileHandler(TestCLI.defaultTestConfigFile); 22 | chainConfigs = fh.getJSON(); 23 | if (!chainConfigs) { 24 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 25 | process.exit(-1); 26 | } 27 | provider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 28 | deployer = await SignerWithAddress.create(provider.getSigner()); 29 | logger.setSettings({ minLevel: 'info', name: 'proof-path-builder-test.ts' }); 30 | }); 31 | 32 | beforeEach(async () => { 33 | const Storage = new StorageImitator__factory(deployer); 34 | storageSrc = await Storage.deploy(); 35 | if (!chainConfigs) { 36 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 37 | process.exit(-1); 38 | } 39 | const srcProviderConnectionInfo: ethers.utils.ConnectionInfo = { 40 | url: chainConfigs?.srcChainRpcUrl, 41 | timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber(), 42 | }; 43 | const targetProviderConnectionInfo: ethers.utils.ConnectionInfo = { 44 | url: chainConfigs.targetChainRpcUrl, 45 | timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber(), 46 | }; 47 | const contractAddressMap: ContractAddressMap = { 48 | srcContract: storageSrc.address, 49 | }; 50 | const srcRPCConfig: RPCConfig = { 51 | gasLimit: BigNumber.from(chainConfigs.gasLimit).toNumber(), 52 | }; 53 | const targetRPCConfig: RPCConfig = { 54 | targetAccountEncryptedJsonPath: TestCLI.targetAccountEncryptedJsonPath, 55 | targetAccountPassword: TestCLI.targetAccountPassword, 56 | gasLimit: BigNumber.from(chainConfigs.gasLimit).toNumber(), 57 | }; 58 | chainProxy = new ChainProxy(contractAddressMap, srcProviderConnectionInfo, srcRPCConfig, targetProviderConnectionInfo, targetRPCConfig); 59 | await chainProxy.init(); 60 | }); 61 | 62 | it('Should build right proof after value was deleted', async () => { 63 | const csvManagerOld = new CSVManager<{ key: string, value: string }>('delete_earlyPairs_13534149.csv', 'test/storageKeyValuePairs'); 64 | const csvManagerNew = new CSVManager<{ key: string, value: string }>('delete_latestPairs_13535417.csv', 'test/storageKeyValuePairs'); 65 | const oldState = csvManagerOld.readFromFile(); 66 | const newState: Array = csvManagerNew.readFromFile(); 67 | const oldKeys: Array = []; 68 | const oldValues: Array = []; 69 | const newKeys: Array = []; 70 | const newValues: Array = []; 71 | oldState.forEach((pair: [key: string, value: string]) => { 72 | oldKeys.push(ethers.utils.hexZeroPad(pair[0], 32)); 73 | oldValues.push(ethers.utils.hexZeroPad(pair[1], 32)); 74 | }); 75 | newState.forEach((pair) => { 76 | newKeys.push(ethers.utils.hexZeroPad(pair[0], 32)); 77 | newValues.push(ethers.utils.hexZeroPad(pair[1], 32)); 78 | }); 79 | while (oldKeys.length > 0) { 80 | await storageSrc.setStorageKey(oldKeys.splice(0, 50), oldValues.splice(0, 50), { gasLimit: BigNumber.from(chainConfigs?.gasLimit).toNumber() }); 81 | } 82 | await chainProxy.migrateSrcContract('latest'); 83 | // todo check if the storage is the same 84 | let changedKeys = await chainProxy.getDiff('srcTx', { targetBlock: 'latest' }); 85 | if (!changedKeys) { 86 | logger.error('Could not get changed keys'); 87 | expect(false); 88 | return; 89 | } 90 | if (changedKeys.getKeys().length > 0) { 91 | logger.error('There is a diff.'); 92 | expect(false); 93 | return; 94 | } 95 | oldState.forEach((pair) => { 96 | const index = newState.findIndex((newPair) => ethers.utils.hexZeroPad(pair[0], 32) === ethers.utils.hexZeroPad(newPair[0], 32)); 97 | if (index < 0) { 98 | newKeys.push(pair[0]); 99 | newValues.push(ethers.utils.hexZeroPad('0x0', 32)); 100 | } 101 | }); 102 | await storageSrc.setStorageKey(newKeys, newValues); 103 | changedKeys = await chainProxy.getDiff('srcTx', { targetBlock: 'latest' }); 104 | if (!changedKeys) { 105 | logger.error('could not get changed keys'); 106 | expect(false); 107 | return; 108 | } 109 | await chainProxy.migrateChangesToProxy(changedKeys?.getKeys(), []); 110 | 111 | const proxyProof = await chainProxy.targetProvider.send('eth_getProof', [chainProxy.proxyContractAddress, []]); 112 | const proxyStorageRoot = proxyProof.storageHash.toLowerCase(); 113 | const srcProof = await chainProxy.srcProvider.send('eth_getProof', [chainProxy.srcContractAddress, []]); 114 | const srcStorageRoot = srcProof.storageHash.toLowerCase(); 115 | expect(proxyStorageRoot).to.equal(srcStorageRoot); 116 | }); 117 | 118 | it('Should build right proof after value that changes the merkle treee', async () => { 119 | const csvManagerOld = new CSVManager<{ key: string, value: string }>('early_pairs_for_change_mt_through_add_13535603.csv', 'test/storageKeyValuePairs'); 120 | const csvManagerNew = new CSVManager<{ key: string, value: string }>('latest_pairs_for_change_mt_through_add_13536164.csv', 'test/storageKeyValuePairs'); 121 | const oldState = csvManagerOld.readFromFile(); 122 | const newState: Array = csvManagerNew.readFromFile(); 123 | const oldKeys: Array = []; 124 | const oldValues: Array = []; 125 | const newKeys: Array = []; 126 | const newValues: Array = []; 127 | oldState.forEach((pair: [key: string, value: string]) => { 128 | oldKeys.push(ethers.utils.hexZeroPad(pair[0], 32)); 129 | oldValues.push(ethers.utils.hexZeroPad(pair[1], 32)); 130 | }); 131 | newState.forEach((pair) => { 132 | newKeys.push(ethers.utils.hexZeroPad(pair[0], 32)); 133 | newValues.push(ethers.utils.hexZeroPad(pair[1], 32)); 134 | }); 135 | while (oldKeys.length > 0) { 136 | await storageSrc.setStorageKey(oldKeys.splice(0, 50), oldValues.splice(0, 50), { gasLimit: BigNumber.from(chainConfigs?.gasLimit).toNumber() }); 137 | } 138 | logger.debug(`srcContractAddress: ${storageSrc.address}`); 139 | await chainProxy.migrateSrcContract('latest'); 140 | 141 | let changedKeys = await chainProxy.getDiff('srcTx', { targetBlock: 'latest' }); 142 | if (!changedKeys) { 143 | logger.error('Could not get changed keys'); 144 | expect(false); 145 | return; 146 | } 147 | if (changedKeys.getKeys().length > 0) { 148 | logger.error('There is a diff.'); 149 | expect(false); 150 | return; 151 | } 152 | oldState.forEach((pair) => { 153 | const index = newState.findIndex((newPair) => ethers.utils.hexZeroPad(pair[0], 32) === ethers.utils.hexZeroPad(newPair[0], 32)); 154 | if (index < 0) { 155 | newKeys.push(pair[0]); 156 | newValues.push(ethers.utils.hexZeroPad('0x0', 32)); 157 | } 158 | }); 159 | await storageSrc.setStorageKey(newKeys, newValues); 160 | changedKeys = await chainProxy.getDiff('srcTx', { targetBlock: 'latest' }); 161 | if (!changedKeys) { 162 | logger.error('could not get changed keys'); 163 | expect(false); 164 | return; 165 | } 166 | await chainProxy.migrateChangesToProxy(changedKeys?.getKeys(), changedKeys.fromKeys); 167 | 168 | const proxyProof = await chainProxy.targetProvider.send('eth_getProof', [chainProxy.proxyContractAddress, []]); 169 | const proxyStorageRoot = proxyProof.storageHash.toLowerCase(); 170 | const srcProof = await chainProxy.srcProvider.send('eth_getProof', [chainProxy.srcContractAddress, []]); 171 | const srcStorageRoot = srcProof.storageHash.toLowerCase(); 172 | expect(proxyStorageRoot).to.equal(srcStorageRoot); 173 | }); 174 | }); 175 | -------------------------------------------------------------------------------- /test/scale-test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { JsonRpcProvider } from '@ethersproject/providers'; 3 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 4 | import { BigNumber, ethers } from 'ethers'; 5 | import DiffHandler from '../src/diffHandler/DiffHandler'; 6 | import { logger } from '../src/utils/logger'; 7 | import { TestChainProxy, TestCLI } from './test-utils'; 8 | import { 9 | RelayContract__factory, 10 | MappingContract, 11 | MappingContract__factory, 12 | RelayContract, 13 | } from '../src-gen/types'; 14 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 15 | import FileHandler from '../src/utils/fileHandler'; 16 | 17 | const MAX_VALUE = 1000000; 18 | 19 | describe('Test scaling of contract', async () => { 20 | let srcDeployer: SignerWithAddress; 21 | let targetDeployer: SignerWithAddress; 22 | let srcContract: MappingContract; 23 | let logicContract: MappingContract; 24 | let factory: MappingContract__factory; 25 | let srcProvider: JsonRpcProvider; 26 | let targetProvider: JsonRpcProvider; 27 | let relayContract: RelayContract; 28 | let chainConfigs: TxContractInteractionOptions | undefined; 29 | let chainProxy: TestChainProxy; 30 | 31 | before(async () => { 32 | const fh = new FileHandler(TestCLI.defaultTestConfigFile); 33 | chainConfigs = fh.getJSON(); 34 | if (!chainConfigs) { 35 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 36 | process.exit(-1); 37 | } 38 | srcProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 39 | targetProvider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.targetChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 40 | srcDeployer = await SignerWithAddress.create(srcProvider.getSigner()); 41 | targetDeployer = await SignerWithAddress.create(targetProvider.getSigner()); 42 | logger.setSettings({ minLevel: 'info', name: 'scale_test.ts' }); 43 | }); 44 | 45 | beforeEach(async () => { 46 | factory = new MappingContract__factory(srcDeployer); 47 | srcContract = await factory.deploy(); 48 | logicContract = await factory.deploy(); 49 | // deploy the relay contract 50 | const Relayer = new RelayContract__factory(targetDeployer); 51 | relayContract = await Relayer.deploy(); 52 | if (!chainConfigs) { 53 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 54 | process.exit(-1); 55 | } 56 | chainProxy = new TestChainProxy( 57 | srcContract, 58 | logicContract, 59 | chainConfigs, 60 | srcDeployer, 61 | targetDeployer, 62 | relayContract, 63 | srcProvider, 64 | targetProvider, 65 | ); 66 | logger.debug(`srcContractAddress: ${srcContract.address}, relayContract: ${relayContract.address}`); 67 | }); 68 | 69 | it('Contract with map containing 1 value, update 1 value', async () => { 70 | // insert some random values 71 | const initialization = await chainProxy.initializeProxyContract(1, MAX_VALUE); 72 | expect(initialization.migrationState).to.be.true; 73 | 74 | // The storage diff between `srcContract` and `proxyContract` comes up empty: both storage layouts are the same 75 | const differ = new DiffHandler(srcProvider, targetProvider); 76 | let diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 77 | expect(diff.isEmpty()).to.be.true; 78 | 79 | // change all the previous synced values 80 | await chainProxy.changeValues(1, MAX_VALUE); 81 | 82 | // get changed keys 83 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 84 | const changedKeys = diff.getKeys(); 85 | 86 | // migrate changes to proxy contract 87 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 88 | expect(migrationResult.migrationResult).to.be.true; 89 | if (!migrationResult.receipt) { 90 | logger.fatal('No receipt provided'); 91 | process.exit(-1); 92 | } 93 | logger.info('Gas used for updating 1 value in map with 1 value: ', migrationResult.receipt.gasUsed.toNumber()); 94 | 95 | // after update storage layouts are equal, no diffs 96 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 97 | expect(diff.isEmpty()).to.be.true; 98 | }); 99 | 100 | it('Contract with map containing 10 values, update 1 value', async () => { 101 | // insert some random values 102 | const initialization = await chainProxy.initializeProxyContract(10, MAX_VALUE); 103 | expect(initialization.migrationState).to.be.true; 104 | 105 | // The storage diff between `srcContract` and `proxyContract` comes up empty: both storage layouts are the same 106 | const differ = new DiffHandler(srcProvider, targetProvider); 107 | let diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 108 | expect(diff.isEmpty()).to.be.true; 109 | 110 | // change all the previous synced values 111 | await chainProxy.changeValueAtIndex(0, MAX_VALUE); 112 | 113 | // get changed keys 114 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 115 | const changedKeys = diff.getKeys(); 116 | 117 | // migrate changes to proxy contract 118 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 119 | expect(migrationResult.migrationResult).to.be.true; 120 | if (!migrationResult.receipt) { 121 | logger.fatal('No receipt provided'); 122 | process.exit(-1); 123 | } 124 | logger.info('Gas used for updating first value in map with 10 values: ', migrationResult.receipt.gasUsed.toNumber()); 125 | 126 | // after update storage layouts are equal, no diffs 127 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 128 | expect(diff.isEmpty()).to.be.true; 129 | }); 130 | 131 | it('Contract with map containing 10 values, update first 5 values', async () => { 132 | // insert some random values 133 | const initialization = await chainProxy.initializeProxyContract(10, MAX_VALUE); 134 | expect(initialization.migrationState).to.be.true; 135 | 136 | // The storage diff between `srcContract` and `proxyContract` comes up empty: both storage layouts are the same 137 | const differ = new DiffHandler(srcProvider, targetProvider); 138 | let diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 139 | expect(diff.isEmpty()).to.be.true; 140 | 141 | // change all the previous synced values 142 | await chainProxy.changeValues(5, MAX_VALUE); 143 | 144 | // get changed keys 145 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 146 | const changedKeys = diff.getKeys(); 147 | 148 | // migrate changes to proxy contract 149 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 150 | expect(migrationResult.migrationResult).to.be.true; 151 | if (!migrationResult.receipt) { 152 | logger.fatal('No receipt provided'); 153 | process.exit(-1); 154 | } 155 | logger.info('Gas used for updating first 5 values in map with 10 values: ', migrationResult.receipt.gasUsed.toNumber()); 156 | 157 | // after update storage layouts are equal, no diffs 158 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 159 | expect(diff.isEmpty()).to.be.true; 160 | }); 161 | 162 | it('Contract with map containing 10 values, update last 5 values', async () => { 163 | // insert some random values 164 | const initialization = await chainProxy.initializeProxyContract(10, MAX_VALUE); 165 | expect(initialization.migrationState).to.be.true; 166 | 167 | // The storage diff between `srcContract` and `proxyContract` comes up empty: both storage layouts are the same 168 | const differ = new DiffHandler(srcProvider, targetProvider); 169 | let diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 170 | expect(diff.isEmpty()).to.be.true; 171 | 172 | // change all the previous synced values 173 | await chainProxy.changeValues(5, MAX_VALUE); 174 | 175 | // get changed keys 176 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 177 | const changedKeys = diff.getKeys(); 178 | 179 | // migrate changes to proxy contract 180 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 181 | expect(migrationResult.migrationResult).to.be.true; 182 | if (!migrationResult.receipt) { 183 | logger.fatal('No receipt provided'); 184 | process.exit(-1); 185 | } 186 | logger.info('Gas used for updating last 5 values in map with 10 values: ', migrationResult.receipt.gasUsed.toNumber()); 187 | 188 | // after update storage layouts are equal, no diffs 189 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 190 | expect(diff.isEmpty()).to.be.true; 191 | }); 192 | 193 | it('Contract with map containing 10 values, update 10 values', async () => { 194 | // insert some random values 195 | const initialization = await chainProxy.initializeProxyContract(10, MAX_VALUE); 196 | expect(initialization.migrationState).to.be.true; 197 | 198 | // The storage diff between `srcContract` and `proxyContract` comes up empty: both storage layouts are the same 199 | const differ = new DiffHandler(srcProvider, targetProvider); 200 | let diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 201 | expect(diff.isEmpty()).to.be.true; 202 | 203 | // change all the previous synced values 204 | await chainProxy.changeValues(10, MAX_VALUE); 205 | 206 | // get changed keys 207 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 208 | const changedKeys = diff.getKeys(); 209 | 210 | // migrate changes to proxy contract 211 | const migrationResult = await chainProxy.migrateChangesToProxy(changedKeys); 212 | expect(migrationResult.migrationResult).to.be.true; 213 | if (!migrationResult.receipt) { 214 | logger.fatal('No receipt provided'); 215 | process.exit(-1); 216 | } 217 | logger.info('Gas used for updating 10 values in map with 10 values: ', migrationResult.receipt.gasUsed.toNumber()); 218 | 219 | // after update storage layouts are equal, no diffs 220 | diff = await differ.getDiffFromStorage(srcContract.address, initialization.proxyContract.address); 221 | expect(diff.isEmpty()).to.be.true; 222 | }); 223 | }); 224 | -------------------------------------------------------------------------------- /test/state-proof-test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { BaseTrie as Trie } from 'merkle-patricia-tree'; 3 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 4 | import { JsonRpcProvider } from '@ethersproject/providers'; 5 | import { BigNumber, ethers } from 'ethers'; 6 | import { SimpleStorage, SimpleStorage__factory } from '../src-gen/types'; 7 | import * as utils from '../src/utils/utils'; 8 | import GetProof from '../src/proofHandler/GetProof'; 9 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 10 | import FileHandler from '../src/utils/fileHandler'; 11 | import { TestCLI } from './test-utils'; 12 | import { logger } from '../src/utils/logger'; 13 | 14 | describe('Validate old contract state', () => { 15 | let deployer: SignerWithAddress; 16 | let storage: SimpleStorage; 17 | let chainConfigs: TxContractInteractionOptions | undefined; 18 | let provider: JsonRpcProvider; 19 | 20 | before(async () => { 21 | const fh = new FileHandler(TestCLI.defaultTestConfigFile); 22 | chainConfigs = fh.getJSON(); 23 | if (!chainConfigs) { 24 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 25 | process.exit(-1); 26 | } 27 | provider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 28 | deployer = await SignerWithAddress.create(provider.getSigner()); 29 | const Storage = new SimpleStorage__factory(deployer); 30 | storage = await Storage.deploy(); 31 | }); 32 | 33 | it('Should validate contract state proof', async () => { 34 | const oldValue = 1; 35 | 36 | await storage.setA(oldValue); 37 | 38 | let keys = await provider.send('parity_listStorageKeys', [ 39 | storage.address, 10, null, 40 | ]); 41 | 42 | const oldProof = await provider.send('eth_getProof', [storage.address, keys]); 43 | 44 | await storage.setA(1337); 45 | 46 | keys = await provider.send('parity_listStorageKeys', [ 47 | storage.address, 10, null, 48 | ]); 49 | 50 | const proof = await provider.send('eth_getProof', [storage.address, keys]); 51 | 52 | const trie = new Trie(); 53 | 54 | await Promise.all(proof.storageProof.map(async (p) => { 55 | const storageKey = utils.hexStringToBuffer(ethers.utils.keccak256(ethers.utils.hexZeroPad(p.key, 32))); 56 | const val = p.value === '0x0' ? Buffer.from([]) : utils.hexStringToBuffer(ethers.BigNumber.from(p.value).toHexString()); 57 | await trie.put( 58 | storageKey, 59 | utils.encode(val), 60 | ); 61 | })); 62 | 63 | expect(proof.storageHash).to.be.equal(`0x${trie.root.toString('hex')}`); 64 | 65 | // reset to old value 66 | await trie.put( 67 | utils.hexStringToBuffer(ethers.utils.keccak256(ethers.utils.hexZeroPad('0x0', 32))), 68 | utils.encode(utils.hexStringToBuffer(ethers.BigNumber.from(oldValue).toHexString())), 69 | ); 70 | 71 | expect(oldProof.storageHash).to.be.equal(`0x${trie.root.toString('hex')}`); 72 | }); 73 | }); 74 | -------------------------------------------------------------------------------- /test/verify-proxy-test.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { SignerWithAddress } from '@nomiclabs/hardhat-ethers/signers'; 3 | import { JsonRpcProvider } from '@ethersproject/providers'; 4 | import { BigNumber, ethers } from 'ethers'; 5 | import { SimpleStorage, SimpleStorage__factory } from '../src-gen/types'; 6 | import { TestCLI, verifyEthGetProof } from './test-utils'; 7 | import GetProof from '../src/proofHandler/GetProof'; 8 | import { TxContractInteractionOptions } from '../src/cli/smart-sync'; 9 | import FileHandler from '../src/utils/fileHandler'; 10 | import { logger } from '../src/utils/logger'; 11 | 12 | describe('Verify State proof', () => { 13 | let deployer: SignerWithAddress; 14 | let storage: SimpleStorage; 15 | let provider: JsonRpcProvider; 16 | let chainConfigs: TxContractInteractionOptions | undefined; 17 | 18 | before(async () => { 19 | const fh = new FileHandler(TestCLI.defaultTestConfigFile); 20 | chainConfigs = fh.getJSON(); 21 | if (!chainConfigs) { 22 | logger.error(`No config available under ${TestCLI.defaultTestConfigFile}`); 23 | process.exit(-1); 24 | } 25 | provider = new ethers.providers.JsonRpcProvider({ url: chainConfigs.srcChainRpcUrl, timeout: BigNumber.from(chainConfigs.connectionTimeout).toNumber() }); 26 | deployer = await SignerWithAddress.create(provider.getSigner()); 27 | }); 28 | 29 | it('Should deploy and return default values', async () => { 30 | const Storage = new SimpleStorage__factory(deployer); 31 | storage = await Storage.deploy(); 32 | 33 | expect((await storage.getA()).eq(0)).to.be.true; 34 | expect((await storage.getB()).eq(42)).to.be.true; 35 | expect((await storage.getValue(deployer.address)).eq(0)).to.be.true; 36 | }); 37 | 38 | it('Should read correct storage after transactions', async () => { 39 | // assign a value to `a` 40 | const newValue = 1337; 41 | expect(await storage.setA(newValue)).to.exist; 42 | const keys = await provider.send('parity_listStorageKeys', [ 43 | storage.address, 5, null, 44 | ]); 45 | // now there should be 2 storage keys 46 | expect(keys.length).to.equal(2); 47 | 48 | // `a` is the first field of the contract and its value is stored at slot 0 49 | const aValue = await provider.getStorageAt(storage.address, 0); 50 | expect(ethers.BigNumber.from(newValue).eq(aValue)).to.be.true; 51 | }); 52 | 53 | it('Should read correct mapping storage', async () => { 54 | const value = 1000; 55 | expect(await storage.setValue(value)).to.exist; 56 | const keys = await provider.send('parity_listStorageKeys', [ 57 | storage.address, 5, null, 58 | ]); 59 | // after setting `a` and inserting a value in the mapping there should be 3 storage keys 60 | expect(keys.length).to.equal(3); 61 | const storageKey = ethers.BigNumber.from(keys[1]); 62 | 63 | // the `storageKey` of the `value` is the hash of the `key` of `value` in the mapping 64 | // concatenated with the slot of the mapping in the contract: `keccak256(key . slot)` 65 | const location = ethers.utils.hexConcat([ 66 | ethers.utils.hexZeroPad(deployer.address, 32), ethers.utils.hexZeroPad('0x03', 32), 67 | ]); 68 | expect(ethers.utils.keccak256(location)).to.equal(keys[1]); 69 | 70 | const storedValue = await provider.getStorageAt(storage.address, storageKey); 71 | expect(ethers.BigNumber.from(storedValue).toNumber()).to.equal(value); 72 | }); 73 | 74 | it('Should return a valid proof', async () => { 75 | const keys = await provider.send('parity_listStorageKeys', [ 76 | storage.address, 5, null, 77 | ]); 78 | // [`eth_getProof`](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-1186.md) implemented at 79 | // https://github.com/openethereum/openethereum/blob/27a0142af14730bcb50eeacc84043dc6f49395e8/rpc/src/v1/impls/eth.rs#L677 80 | const proof = await provider.send('eth_getProof', [storage.address, keys]); 81 | 82 | // get the latest block 83 | const block = await provider.send('eth_getBlockByNumber', ['latest', true]); 84 | 85 | // verify the proof against the block's state root 86 | expect(await verifyEthGetProof(proof, block.stateRoot)).to.be.true; 87 | }); 88 | }); 89 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@tsconfig/node14/tsconfig.json", 3 | "compilerOptions": { 4 | "declaration": true, 5 | "noImplicitAny": false, 6 | "strictPropertyInitialization": false, 7 | "outDir": "dist", 8 | "removeComments": true, 9 | "sourceMap": true, 10 | "resolveJsonModule": true 11 | }, 12 | "ts-node": { 13 | // It is faster to skip typechecking. 14 | "transpileOnly": true, 15 | 16 | "files": true 17 | }, 18 | "include": ["src/**/*", "./test/**/*", "./test/**/*.json", "./evaluation/**/*", "./config/*.json"], 19 | "exclude": ["node_modules", "chains", "hardhat_scripts", "src-gen", "**/*.js"] 20 | } --------------------------------------------------------------------------------