├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── Makefile ├── README.md ├── bin ├── .confirm_action ├── .gen_js_config ├── .load_keys ├── .setup_env ├── db-insert-contract ├── db-setup ├── db-test ├── db-wait ├── dc ├── deploy-client ├── deploy-contracts ├── do-clusters ├── do-configure ├── do-create ├── do-create-prover ├── do-curl ├── do-destory ├── do-loadbalancers ├── do-main-id ├── do-scale-single ├── do-sizes ├── env ├── f ├── franklin ├── gas-price ├── init ├── k8s-apply ├── k8s-secret ├── kube ├── restart-kube └── revert-reason ├── contracts ├── .gitignore ├── bin │ ├── contracts_DepositVerificationKey_sol_DepositVerificationKey.abi │ ├── contracts_DepositVerificationKey_sol_DepositVerificationKey.bin │ ├── contracts_ExitVerificationKey_sol_ExitVerificationKey.abi │ ├── contracts_ExitVerificationKey_sol_ExitVerificationKey.bin │ ├── contracts_FranklinProxy_sol_Franklin.abi │ ├── contracts_FranklinProxy_sol_Franklin.bin │ ├── contracts_Migrations_sol_Migrations.abi │ ├── contracts_Migrations_sol_Migrations.bin │ ├── contracts_PlasmaContract_sol_PlasmaContract.abi │ ├── contracts_PlasmaContract_sol_PlasmaContract.bin │ ├── contracts_PlasmaDepositor_sol_PlasmaDepositor.abi │ ├── contracts_PlasmaDepositor_sol_PlasmaDepositor.bin │ ├── contracts_PlasmaExitor_sol_PlasmaExitor.abi │ ├── contracts_PlasmaExitor_sol_PlasmaExitor.bin │ ├── contracts_PlasmaStorage_sol_PlasmaStorage.abi │ ├── contracts_PlasmaStorage_sol_PlasmaStorage.bin │ ├── contracts_PlasmaTester_sol_PlasmaTester.abi │ ├── contracts_PlasmaTester_sol_PlasmaTester.bin │ ├── contracts_PlasmaTransactor_sol_PlasmaTransactor.abi │ ├── contracts_PlasmaTransactor_sol_PlasmaTransactor.bin │ ├── contracts_Plasma_sol_Plasma.abi │ ├── contracts_Plasma_sol_Plasma.bin │ ├── contracts_Plasma_sol_PlasmaStub.abi │ ├── contracts_Plasma_sol_PlasmaStub.bin │ ├── contracts_ProofChecker_sol_ProofChecker.abi │ ├── contracts_ProofChecker_sol_ProofChecker.bin │ ├── contracts_TransferVerificationKey_sol_TransferVerificationKey.abi │ ├── contracts_TransferVerificationKey_sol_TransferVerificationKey.bin │ ├── contracts_TwistedEdwards_sol_TwistedEdwards.abi │ ├── contracts_TwistedEdwards_sol_TwistedEdwards.bin │ ├── contracts_VerificationKeys_sol_VerificationKeys.abi │ ├── contracts_VerificationKeys_sol_VerificationKeys.bin │ ├── contracts_Verifier_sol_Verifier.abi │ └── contracts_Verifier_sol_Verifier.bin ├── contracts │ ├── Depositor.sol │ ├── Exitor.sol │ ├── FranklinProxy.sol │ ├── Migrations.sol │ ├── Transactor.sol │ ├── common │ │ ├── FranklinCommon.sol │ │ ├── FranklinStorage.sol │ │ ├── TwistedEdwards.sol │ │ ├── VerificationKeys.sol │ │ └── Verifier.sol │ └── keys.example │ │ ├── DepositVerificationKey.sol │ │ ├── ExitVerificationKey.sol │ │ └── TransferVerificationKey.sol ├── migrations │ └── 1_initial_migration.js ├── package-lock.json ├── package.json ├── scripts │ ├── check-price.js │ ├── deposit.js │ ├── publish-source.js │ └── revert-reason.js ├── test │ ├── PlasmaTester.sol │ ├── TestPlasma.sol │ ├── deposit.js │ ├── exit.js │ ├── transfer.js │ ├── twistedEdwards.js │ └── verifier.js ├── tools │ ├── do_deposit.js │ ├── do_exit.js │ ├── get_logs.js │ ├── get_state.js │ ├── migrate_geth.sh │ ├── parse.js │ ├── send_tx.js │ ├── setup-geth.js │ ├── start_ganache.sh │ ├── take_full_exit.js │ └── take_partial_withdraw.js ├── truffle-config.js └── yarn.lock ├── core ├── circuit │ ├── Cargo.toml │ └── src │ │ ├── cheque │ │ ├── bitwindow.rs │ │ └── mod.rs │ │ ├── deposit │ │ ├── circuit.rs │ │ ├── deposit_request.rs │ │ └── mod.rs │ │ ├── encoder.rs │ │ ├── exit │ │ ├── circuit.rs │ │ ├── exit_request.rs │ │ └── mod.rs │ │ ├── leaf.rs │ │ ├── lib.rs │ │ ├── plasma_constants.rs │ │ └── transfer │ │ ├── circuit.rs │ │ ├── mod.rs │ │ └── transaction.rs ├── eth_client │ ├── Cargo.toml │ └── src │ │ ├── lib.rs │ │ └── signer.rs ├── key_generator │ ├── Cargo.toml │ └── src │ │ ├── depositor_key.rs │ │ ├── exitor_key.rs │ │ ├── main.rs │ │ ├── read_write_keys.rs │ │ ├── transactor_key.rs │ │ └── vk_contract_generator.rs ├── merkle_tree │ ├── Cargo.toml │ └── src │ │ ├── account_tree.rs │ │ ├── hasher.rs │ │ ├── lib.rs │ │ ├── parallel_smt.rs │ │ ├── pedersen_hasher.rs │ │ └── sequential_smt.rs ├── models │ ├── Cargo.toml │ └── src │ │ ├── abi.rs │ │ ├── config.rs │ │ ├── lib.rs │ │ ├── plasma │ │ ├── account.rs │ │ ├── block.rs │ │ ├── circuit │ │ │ ├── account.rs │ │ │ ├── deposit.rs │ │ │ ├── exit.rs │ │ │ ├── mod.rs │ │ │ ├── sig.rs │ │ │ ├── transfer.rs │ │ │ └── utils.rs │ │ ├── mod.rs │ │ ├── params.rs │ │ └── tx.rs │ │ └── primitives.rs ├── plasma │ ├── Cargo.toml │ └── src │ │ ├── lib.rs │ │ └── state.rs ├── prover │ ├── Cargo.toml │ └── src │ │ ├── lib.rs │ │ └── main.rs ├── sandbox │ ├── Cargo.toml │ └── src │ │ ├── main.rs │ │ └── nonce_futures.rs ├── server │ ├── Cargo.toml │ └── src │ │ ├── analysis.pgsql │ │ ├── api_server.rs │ │ ├── committer.rs │ │ ├── eth_sender.rs │ │ ├── eth_watch.rs │ │ ├── lib.rs │ │ ├── main.rs │ │ ├── nonce_futures.rs │ │ └── state_keeper.rs └── storage │ ├── .gitignore │ ├── Cargo.toml │ ├── diesel.toml │ ├── migrations │ ├── 00000000000000_diesel_initial_setup │ │ ├── down.sql │ │ └── up.sql │ ├── 2018-12-11-084553_operations │ │ ├── down.sql │ │ └── up.sql │ ├── 2019-04-02-100645_proofs │ │ ├── down.sql │ │ └── up.sql │ ├── 2019-05-02-110639_transactions │ │ ├── down.sql │ │ └── up.sql │ ├── 2019-05-08-114230_config │ │ ├── down.sql │ │ └── up.sql │ └── 2019-05-16-130227_provers │ │ ├── down.sql │ │ └── up.sql │ ├── sql │ ├── active_provers.pgsql │ ├── prover-runs.pgsql │ └── tps.pgsql │ └── src │ ├── lib.rs │ └── schema.rs ├── docker-compose.yml ├── docker ├── flattener │ └── Dockerfile ├── geth │ ├── Dockerfile │ ├── dev.json │ ├── geth-entry.sh │ ├── keystore │ │ └── UTC--2019-04-06T21-13-27.692266000Z--8a91dc2d28b689474298d91899f0c1baf62cb85b │ └── password.sec ├── nginx │ ├── Dockerfile │ └── nginx.conf ├── prover │ ├── Dockerfile │ └── prover-entry.sh └── server │ └── Dockerfile ├── docs ├── circuit.md ├── circuit.py ├── kube-config.png ├── kubernetes.md └── setup-dev.md ├── etc ├── env │ └── dev.env.example ├── kube │ ├── .gitignore │ ├── prover.yaml │ ├── regions-all.json │ ├── regions.json │ ├── server.yaml │ └── test.sh └── tesseracts │ └── tesseracts.toml ├── imgs └── zksync.svg ├── js ├── client │ ├── .babelrc │ ├── README.md │ ├── index.html │ ├── package-lock.json │ ├── package.json │ ├── src │ │ ├── App.vue │ │ ├── Login.vue │ │ ├── Wallet.vue │ │ ├── assets │ │ │ ├── loading.gif │ │ │ └── logo.png │ │ ├── contract.js │ │ ├── main.js │ │ ├── store.js │ │ └── transaction.js │ ├── webpack.config.js │ ├── webpack.config.prod.js │ └── yarn.lock ├── explorer │ ├── .babelrc │ ├── README.md │ ├── index.html │ ├── package.json │ ├── src │ │ ├── App.vue │ │ ├── Block.vue │ │ ├── Home.vue │ │ ├── Transaction.vue │ │ ├── TransactionList.vue │ │ ├── assets │ │ │ ├── loading.gif │ │ │ ├── logo.jpg │ │ │ └── logo0.png │ │ ├── client.js │ │ ├── main.js │ │ └── store.js │ ├── webpack.config.js │ ├── webpack.config.prod.js │ └── yarn.lock ├── franklin │ ├── abi │ │ └── PlasmaContract.json │ ├── package.json │ ├── src │ │ ├── franklin.js │ │ ├── test.js │ │ └── transaction.js │ └── yarn.lock └── loadtest │ ├── escape.js │ ├── loadtest.js │ ├── package.json │ ├── rescue.js │ └── yarn.lock └── yarn.lock /.gitignore: -------------------------------------------------------------------------------- 1 | # Editor directories and files 2 | .idea 3 | *.suo 4 | *.ntvs* 5 | *.njsproj 6 | *.sln 7 | .vscode 8 | .DS_Store 9 | *.bak 10 | node_modules 11 | *.log 12 | target 13 | a.out 14 | .gitconfig 15 | tags 16 | *.orig 17 | 18 | zksync_pk.key 19 | dist 20 | todo 21 | 22 | Cargo.lock 23 | !/Cargo.lock 24 | !/infrastructure/zksync-crypto/Cargo.lock 25 | 26 | /etc/env/* 27 | !/etc/env/dev.env.example 28 | !/etc/env/docker.env 29 | !/etc/env/ci.env 30 | !/etc/env/base 31 | /etc/tokens/localhost.json 32 | !/keys 33 | /keys/* 34 | !/keys/packed 35 | /tmp 36 | /volumes 37 | /logs 38 | /loadtest-config 39 | 40 | .ipynb_checkpoints 41 | 42 | loadtest_accounts_* 43 | 44 | go_to_env.sh 45 | 46 | core/lib/storage/.env 47 | 48 | .zcli-config.json 49 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "core/key_generator", 4 | "core/merkle_tree", 5 | "core/eth_client", 6 | "core/models", 7 | "core/plasma", 8 | "core/prover", 9 | "core/sandbox", 10 | "core/server", 11 | "core/storage", 12 | "core/circuit", 13 | ] 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | tbd -------------------------------------------------------------------------------- /bin/.confirm_action: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ ! "$FRANKLIN_ENV" == "dev" ] 4 | then 5 | echo -n "Dangerous action. Type environment name ('$FRANKLIN_ENV') to confirm: " 6 | read CONFIRMED 7 | if [ ! "$FRANKLIN_ENV" == "$CONFIRMED" ]; then 8 | exit 1 9 | fi 10 | fi 11 | -------------------------------------------------------------------------------- /bin/.gen_js_config: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Generates a json config file with current env var configuration for production 4 | 5 | JS_CONFIG="API_SERVER TRANSFER_BATCH_SIZE SENDER_ACCOUNT CONTRACT_ADDR" 6 | 7 | echo "export default {" 8 | 9 | for VAR in $JS_CONFIG; do 10 | echo " \"$VAR\":\"${!VAR}\"," 11 | done 12 | 13 | echo " \"trailing\": null" 14 | echo "}" -------------------------------------------------------------------------------- /bin/.load_keys: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Checking keys in $KEY_DIR/" 4 | mkdir -p $KEY_DIR 5 | #cd $KEY_DIR 6 | 7 | for i in $KEY_FILES; do 8 | if ! [ -f $KEY_DIR/$i ]; then 9 | echo "Downloading file $SPACE_URL/$i" 10 | axel -o $KEY_DIR/$i -a $SPACE_URL/$i 2>&1 11 | #curl -o $KEY_DIR/$i $SPACE_URL/$i 2>&1 12 | fi 13 | done -------------------------------------------------------------------------------- /bin/.setup_env: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -z "$FRANKLIN_ENV" ] 4 | then 5 | 6 | cd `dirname $0`/.. 7 | 8 | if [ -f etc/env/current ]; then 9 | export FRANKLIN_ENV=`cat etc/env/current` 10 | else 11 | export FRANKLIN_ENV=dev 12 | fi 13 | 14 | export ENV_FILE=./etc/env/$FRANKLIN_ENV.env 15 | 16 | if [ "dev" == "$FRANKLIN_ENV" ] && [ ! -f etc/env/dev.env ] 17 | then 18 | cp etc/env/dev.env.example etc/env/dev.env 19 | fi 20 | 21 | if [ ! -f $ENV_FILE ]; then 22 | echo "Franklin config file not found: $ENV_FILE" 23 | #exit 24 | fi 25 | 26 | # Load env vars 27 | set -o allexport 28 | eval $(grep -v '^#' $ENV_FILE | sed 's/^/export /') 29 | set +o allexport 30 | 31 | # Postprocessing for all configs 32 | 33 | export KUBECONFIG=etc/kube/clusters/kubeconfig-main.yaml 34 | 35 | # if [[ $FIXMODE ]]; then 36 | # export WEB3_URL=http://localhost:8545 37 | # fi 38 | fi 39 | -------------------------------------------------------------------------------- /bin/db-insert-contract: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Force read env -- this is important, sp that we re-ready the new contract value after redeploy!!! 4 | FRANKLIN_ENV= 5 | . .setup_env 6 | 7 | psql "$DATABASE_URL" -c "INSERT INTO server_config (contract_addr) VALUES ('$CONTRACT_ADDR')" || exit 1 8 | echo "successfully inserted contract address into the database" -------------------------------------------------------------------------------- /bin/db-setup: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Force read env 4 | FRANKLIN_ENV= 5 | . .setup_env 6 | cd core/storage 7 | 8 | echo DATABASE_URL=$DATABASE_URL 9 | diesel database setup || exit 1 10 | diesel migration run || exit 1 11 | -------------------------------------------------------------------------------- /bin/db-test: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | cd core/storage 5 | 6 | export DATABASE_URL=postgres://postgres@localhost/plasma_test 7 | 8 | if [ "$1" == "reset" ]; then 9 | diesel database reset 10 | diesel migration run 11 | fi 12 | 13 | cd ../.. 14 | cargo test -p storage 15 | -------------------------------------------------------------------------------- /bin/db-wait: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Checks if db is up and accepting connections. 4 | 5 | . .setup_env 6 | 7 | for i in $(seq 1 5); 8 | do pg_isready -d "$DATABASE_URL" && s=0 && break || s=$? && sleep 5; 9 | done; 10 | exit $s 11 | -------------------------------------------------------------------------------- /bin/dc: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | echo "${@:1}" 6 | docker-compose "${@:1}" 7 | -------------------------------------------------------------------------------- /bin/deploy-client: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SRC=`dirname $0`/../js/explorer 4 | DST=$CLIENT_GITHUB_DIR 5 | 6 | . bin/.gen_js_config 7 | 8 | echo "$DST <= $SRC" 9 | 10 | cd $SRC 11 | 12 | mkdir -p $DST/dist 13 | 14 | #yarn run build 15 | cp index.html $DST/ 16 | cp CNAME $DST/ 17 | cp dist/* $DST/dist/ 18 | cd $DST 19 | git add -A 20 | git commit -m "deploy" 21 | git push --force 22 | -------------------------------------------------------------------------------- /bin/deploy-contracts: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | KEY_FILES=$CONTRACT_KEY_FILES 6 | .load_keys 7 | 8 | mkdir -p contracts/contracts/keys/ 9 | cp -f $KEY_DIR/*.sol contracts/contracts/keys/ 10 | 11 | echo redeploying for the db $DATABASE_URL 12 | 13 | cd contracts 14 | yarn deploy | tee ../deploy.log 15 | cd .. 16 | 17 | export LABEL=$FRANKLIN_ENV-`date +%Y-%m-%d-%H%M%S` 18 | 19 | export NEW_CONTRACT=`grep -A999 "Starting migrations" deploy.log | grep -A4 "'FranklinProxy'" | grep "contract address" | grep -oE '0x(.+)' \ 20 | | sed -n "s/0x//p"` 21 | 22 | if [ ! -z "$NEW_CONTRACT" ] 23 | then 24 | echo New contract at $NEW_CONTRACT 25 | 26 | OLD_CONTRACT=`grep "^CONTRACT_ADDR" ./$ENV_FILE | grep -oE '=(.+)' | sed -n "s/=//p"` 27 | echo Old contract at $OLD_CONTRACT 28 | 29 | mkdir -p logs/$LABEL/ 30 | cp deploy.log logs/$LABEL/deploy.log 31 | cp ./$ENV_FILE logs/$LABEL/$FRANKLIN_ENV.bak 32 | 33 | sed -i".bak" "s/^CONTRACT_ADDR=$OLD_CONTRACT/CONTRACT_ADDR=$NEW_CONTRACT/g" ./$ENV_FILE 34 | 35 | echo successfully deployed contracts 36 | 37 | else 38 | echo "Contract deployment failed" 39 | exit 1 40 | fi 41 | -------------------------------------------------------------------------------- /bin/do-clusters: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . .setup_env 3 | 4 | CLUSTERS=`do-curl -X GET "https://api.digitalocean.com/v2/kubernetes/clusters"` 5 | echo $CLUSTERS | jq '.kubernetes_clusters | map({id: .id, name: .name, region: .region}) | .[]' 6 | echo 7 | 8 | for KUBECONFIG in `ls etc/kube/clusters/kubeconfig*`; do 9 | echo $KUBECONFIG 10 | export KUBECONFIG=$KUBECONFIG 11 | kubectl get deployments 12 | if [[ "$1" == "nodes" ]]; then 13 | kubectl get nodes 14 | fi 15 | if [[ "$1" == "pods" ]]; then 16 | kubectl get pods 17 | fi 18 | echo 19 | done 20 | -------------------------------------------------------------------------------- /bin/do-configure: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . .setup_env 3 | 4 | # # Main cluster 5 | # NAME=$CLUSTER_NAME 6 | # CLUSTER=`echo $CLUSTERS | jq '.kubernetes_clusters | map(select(.name == "'$NAME'")) | .[0]'` 7 | # CLUSTER_ID=`echo $CLUSTER | jq '.id' | tr -d '"' | grep -v null` 8 | # CURRENT_COUNT=`echo $CLUSTER | jq '.node_pools[0] | .count' | grep -v null` 9 | # NODE_POOL_ID=`echo $CLUSTER | jq '.node_pools[0] | .id' | grep -v null | tr -d '"'` 10 | # DATA='{ "name": "prover", "count": 30}' 11 | # echo $CLUSTER_ID $NODE_POOL_ID $DATA 12 | # RET=`do-curl -X PUT "https://api.digitalocean.com/v2/kubernetes/clusters/$CLUSTER_ID/node_pools/$NODE_POOL_ID" --data "$DATA"` 13 | # echo $RET 14 | 15 | # . bin/.kube_gen_secret | kubectl apply -f - || exit 1 16 | # kubectl apply -f etc/kube/prover.yaml || exit 1 17 | # kubectl scale deployments/prover --replicas=$COUNT_PER_REGION || exit 1 18 | 19 | PROVISIONED=0 20 | 21 | CLUSTERS=`do-curl -X GET "https://api.digitalocean.com/v2/kubernetes/clusters"` 22 | CLUSTERS_IDS=`echo $CLUSTERS | jq '.kubernetes_clusters | map(.id) | .[]' | tr -d '"' | grep -v null` 23 | 24 | mkdir -p etc/kube/clusters 25 | 26 | echo "Target: $COUNT_PER_REGION per region" 27 | 28 | # CLUSTERS_IDS="44d52456-f37c-494e-878c-142881db90f3" 29 | # COUNT_PER_REGION= 30 | 31 | for CLUSTER_ID in $CLUSTERS_IDS; do 32 | CLUSTER=`echo $CLUSTERS | jq '.kubernetes_clusters | map(select(.id == "'$CLUSTER_ID'")) | .[0]'` 33 | NAME=`echo $CLUSTER | jq '.name' | tr -d '"'` 34 | 35 | # if [ "$NAME" == "main" ]; then 36 | # continue 37 | # fi 38 | 39 | PROVER_POOL=`echo $CLUSTER | jq '.node_pools | map(select(.name == "prover")) | .[0]'` 40 | NODE_POOL_ID=`echo $PROVER_POOL | jq '.id' | grep -v null | tr -d '"'` 41 | CURRENT_COUNT=`echo $PROVER_POOL | jq '.count'` 42 | 43 | echo "Cluster $NAME, id = $CLUSTER_ID, NODE_POOL_ID = $NODE_POOL_ID, current_count = $CURRENT_COUNT" 44 | 45 | KUBECONFIG=etc/kube/clusters/kubeconfig-$NAME.yaml 46 | 47 | if [[ ! "" == "$COUNT_PER_REGION" ]]; then 48 | DATA='{ "size": "c-32", "name": "prover", "count": '$COUNT_PER_REGION'}' 49 | echo $DATA 50 | RET=`do-curl -X PUT "https://api.digitalocean.com/v2/kubernetes/clusters/$CLUSTER_ID/node_pools/$NODE_POOL_ID" --data "$DATA"` 51 | echo $RET 52 | 53 | do-curl -X GET "https://api.digitalocean.com/v2/kubernetes/clusters/$CLUSTER_ID/kubeconfig" > $KUBECONFIG 54 | fi 55 | 56 | # Generate and apply secret for env config 57 | . bin/k8s-secret | kubectl apply -f - 58 | 59 | kubectl apply -f etc/kube/gen/$FRANKLIN_ENV/prover.yaml 60 | kubectl scale deployments/$FRANKLIN_ENV-prover --replicas=$ACTIVE_PER_REGION 61 | 62 | # if [ ! "$NAME" == "main" ]; then 63 | # kubectl delete deployments/$FRANKLIN_ENV-nginx 64 | # kubectl delete deployments/$FRANKLIN_ENV-server 65 | # fi 66 | 67 | echo 68 | 69 | # PROVISIONED=$((PROVISIONED + COUNT_PER_REGION)) 70 | # if ((PROVISIONED >= COUNT_TOTAL)) 71 | # then 72 | # echo "provisioned $PROVISIONED, enough!" 73 | # exit 0 74 | # fi 75 | done 76 | 77 | -------------------------------------------------------------------------------- /bin/do-create: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . .setup_env 3 | 4 | REGIONS=`cat etc/kube/regions.json | jq '.[]' | tr -d '"'` 5 | CLUSTERS=`do-curl -X GET "https://api.digitalocean.com/v2/kubernetes/clusters"` 6 | 7 | echo $REGIONS 8 | for R in $REGIONS; do 9 | NAME=$R 10 | CLUSTER=`echo $CLUSTERS | jq '.kubernetes_clusters | map(select(.name == "'$NAME'" and .region == "'$R'")) | .[0]'` 11 | CLUSTER_ID=`echo $CLUSTER | jq '.id' | tr -d '"' | grep -v null` 12 | if [ ! -z $CLUSTER_ID ] 13 | then 14 | CURRENT_COUNT=`echo $CLUSTER | jq '.node_pools[0] | .count' | grep -v null` 15 | echo "Cluster $NAME exists, id = $CLUSTER_ID, current_count = $CURRENT_COUNT" 16 | else 17 | echo "Creating cluster $NAME" 18 | DATA='{"name": "'$NAME'", "region": "'$R'", "version": "1.14.1-do.2", "tags": [ "massive" ], "node_pools": [ { "size": "c-32", "name": "prover", "count": '$COUNT_PER_REGION'} ] }' 19 | #echo $DATA 20 | CLUSTER=`do-curl -X POST "https://api.digitalocean.com/v2/kubernetes/clusters" --data "$DATA"` 21 | echo $CLUSTER 22 | 23 | CLUSTER_ID=`echo $CLUSTER | jq '.kubernetes_cluster | .id' | tr -d '"'` 24 | echo "Created cluster $NAME, id = $CLUSTER_ID" 25 | fi 26 | 27 | done 28 | -------------------------------------------------------------------------------- /bin/do-create-prover: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | do-curl -X POST https://api.digitalocean.com/v2/kubernetes/clusters/`do-main-id`/node_pools --data '{ "size": "c-32", "name": "prover", "count": 1 }' -------------------------------------------------------------------------------- /bin/do-curl: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | #echo "${@:1}" 6 | curl -H "Authorization: Bearer $DO_TOKEN" "${@:1}" -H "Content-Type: application/json" 2>/dev/null 7 | echo 8 | -------------------------------------------------------------------------------- /bin/do-destory: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . .setup_env 3 | 4 | REGIONS=`cat etc/kube/regions.json | jq 'map( . ) | .[]' | tr -d '"'` 5 | CLUSTERS=`do-curl -X GET "https://api.digitalocean.com/v2/kubernetes/clusters"` 6 | 7 | # Delete clusters in regions 8 | 9 | for R in $REGIONS; do 10 | NAME=$R 11 | CLUSTER_ID=`echo $CLUSTERS | jq '.kubernetes_clusters | map(select(.name == "'$NAME'" and .region == "'$R'")) | .[0] | .id' | tr -d '"' | grep -v null` 12 | if [ ! -z $CLUSTER_ID ] 13 | then 14 | echo "Shutting down cluster $NAME, id = $CLUSTER_ID" 15 | RET=`do-curl -X DELETE "https://api.digitalocean.com/v2/kubernetes/clusters/$CLUSTER_ID"` 16 | echo $RET 17 | fi 18 | done 19 | 20 | # Scale down main/prover 21 | 22 | do-scale-single `do-main-id` 1 23 | 24 | #kubectl scale deployments/$FRANKLIN_ENV-prover --replicas=1 25 | -------------------------------------------------------------------------------- /bin/do-loadbalancers: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . .setup_env 3 | 4 | ALL=`do-curl -X GET "https://api.digitalocean.com/v2/load_balancers" | jq '.load_balancers'` 5 | NOT_AMS3=`echo $ALL | map(select( .region.slug != "ams3"))` 6 | 7 | echo $ALL | jq 8 | 9 | if [ "$1" == "rm" ]; then 10 | 11 | LIST=`echo $ALL | jq 'map( .id ) | .[]' | tr -d '"'` 12 | 13 | echo Deleting $LIST 14 | for LB in $LIST; do 15 | echo $LB 16 | do-curl -X DELETE "https://api.digitalocean.com/v2/load_balancers/$LB" 17 | done 18 | 19 | fi -------------------------------------------------------------------------------- /bin/do-main-id: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | CLUSTER=`do-curl -X GET "https://api.digitalocean.com/v2/kubernetes/clusters" \ 6 | | jq '.kubernetes_clusters[] | select(.name == "main")'` 7 | 8 | #echo $CLUSTER | jq 9 | 10 | CLUSTER_ID=`echo $CLUSTER | jq '.id' | tr -d '"'` 11 | echo $CLUSTER_ID 12 | -------------------------------------------------------------------------------- /bin/do-scale-single: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | if [ -z $1 ]; then echo "usage: do-scale []"; exit 1; fi 6 | 7 | CLUSTER_ID=$1 8 | echo -n cluster $CLUSTER_ID 9 | if [ ! -z $2 ]; then echo ": set count = $2"; fi 10 | echo 11 | 12 | NODE_POOL=`do-curl -X GET "https://api.digitalocean.com/v2/kubernetes/clusters/$CLUSTER_ID/node_pools" \ 13 | | jq '.node_pools[] | select(.name == "prover") | {id: .id, name: .name, size: .size, count: .count}'` 14 | 15 | echo NODE_POOL=$NODE_POOL 16 | 17 | NODE_POOL_ID=`echo $NODE_POOL | jq '.id' | tr -d '"'` 18 | 19 | echo NODE_POOL_ID=$NODE_POOL_ID 20 | 21 | if [ -z $2 ]; then 22 | 23 | R=`do-curl -X GET "https://api.digitalocean.com/v2/kubernetes/clusters/$CLUSTER_ID/node_pools/$NODE_POOL_ID"` 24 | 25 | else 26 | 27 | DATA='{"name": "prover", "count": '$2'}' 28 | R=`do-curl -X PUT "https://api.digitalocean.com/v2/kubernetes/clusters/$CLUSTER_ID/node_pools/$NODE_POOL_ID" --data "$DATA"` 29 | 30 | fi 31 | 32 | echo $R 33 | echo $R | jq '.node_pool | {id: .id, name: .name, size: .size, count: .count, }' 34 | echo nodes_running: `echo $R | jq '.node_pool | .nodes | length '` 35 | -------------------------------------------------------------------------------- /bin/do-sizes: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | . .setup_env 3 | 4 | do-curl -X GET "https://api.digitalocean.com/v2/sizes?page=1" | jq '.sizes | map(select( .vcpus >= 32 ))' 5 | do-curl -X GET "https://api.digitalocean.com/v2/sizes?page=2" | jq '.sizes | map(select( .vcpus >= 32 ))' 6 | do-curl -X GET "https://api.digitalocean.com/v2/sizes?page=3" | jq '.sizes | map(select( .vcpus >= 32 ))' 7 | do-curl -X GET "https://api.digitalocean.com/v2/sizes?page=4" | jq '.sizes | map(select( .vcpus >= 32 ))' -------------------------------------------------------------------------------- /bin/env: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -z $1 ] 4 | then 5 | for i in `ls etc/env | grep -v .bak | grep -v example | grep -v current`; do 6 | ENV=${i%.*} 7 | if [ "$ENV" == "$FRANKLIN_ENV" ] 8 | then 9 | echo -n " * " 10 | else 11 | echo -n " " 12 | fi 13 | echo $ENV 14 | done 15 | else 16 | ENV_FILE=etc/env/$1.env 17 | if [ ! -f "$ENV_FILE" ] 18 | then 19 | echo "$ENV_FILE not found" 20 | exit 1 21 | fi 22 | 23 | echo $1 > etc/env/current 24 | echo "Franklin environment switched to: $1" 25 | fi 26 | -------------------------------------------------------------------------------- /bin/f: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | $1 "${@:2}" 6 | -------------------------------------------------------------------------------- /bin/franklin: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | if [ -f bin/$1 ]; then 6 | bin/$1 "${@:2}" 7 | else 8 | make "$@" 9 | fi 10 | 11 | #make "$@" -------------------------------------------------------------------------------- /bin/gas-price: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | R=`curl -X POST -H "Content-Type: application/json" \ 6 | --data '{"id": 1, "jsonrpc": "2.0", "method": "eth_gasPrice", "params": []}' $WEB3_URL 2>/dev/null` 7 | 8 | PRICE=`echo $R | jq '.result' | tr -d '"'` 9 | 10 | echo $(($PRICE/1000000000)) GWei 11 | -------------------------------------------------------------------------------- /bin/init: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | franklin dev-up 6 | franklin env 7 | franklin yarn || true # It can fail. 8 | franklin db-wait 9 | franklin db-setup 10 | franklin redeploy 11 | -------------------------------------------------------------------------------- /bin/k8s-apply: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | mkdir -p etc/kube/gen/$FRANKLIN_ENV 4 | envsubst < etc/kube/prover.yaml > etc/kube/gen/$FRANKLIN_ENV/prover.yaml 5 | envsubst < etc/kube/server.yaml > etc/kube/gen/$FRANKLIN_ENV/server.yaml 6 | 7 | # Generate and apply secret for env config 8 | . bin/k8s-secret | kubectl apply -f - 9 | 10 | # Apply cluster configuration 11 | kubectl apply -f etc/kube/gen/$FRANKLIN_ENV/server.yaml 12 | kubectl apply -f etc/kube/gen/$FRANKLIN_ENV/prover.yaml 13 | -------------------------------------------------------------------------------- /bin/k8s-secret: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Generates a yaml kubernetes secret file with current env var configuration for production 4 | 5 | #. .setup_env prod 6 | 7 | kubectl create secret generic $FRANKLIN_ENV-secret --dry-run -o yaml 8 | echo data: 9 | grep -v '^#' $ENV_FILE | grep -v '^$' | while read -r line; do 10 | VAR=`sed 's/=.*//' <<< $line` 11 | echo -n " $VAR: " 12 | echo -n ${!VAR} | base64 13 | done 14 | -------------------------------------------------------------------------------- /bin/kube: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | /usr/local/bin/kubectl "$@" -------------------------------------------------------------------------------- /bin/restart-kube: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | . .setup_env 4 | 5 | # Restart deployments (this will re-pull images) 6 | kubectl patch deployment server -p "{\"spec\":{\"template\":{\"metadata\":{\"labels\":{\"date\":\"$(date +%s)\"}}}}}" || exit 1 7 | kubectl patch deployment prover -p "{\"spec\":{\"template\":{\"metadata\":{\"labels\":{\"date\":\"$(date +%s)\"}}}}}" || exit 1 8 | kubectl patch deployment nginx -p "{\"spec\":{\"template\":{\"metadata\":{\"labels\":{\"date\":\"$(date +%s)\"}}}}}" || exit 1 9 | -------------------------------------------------------------------------------- /bin/revert-reason: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Fetching revert reason -- https://ethereum.stackexchange.com/questions/48383/how-to-receive-revert-reason-for-past-transactions 4 | 5 | if [ -z "$1" ] 6 | then 7 | echo "Usage: revert-reason " 8 | exit 9 | fi 10 | 11 | # TX=$1 12 | # SCRIPT=" tx = eth.getTransaction( \"$TX\" ); tx.data = tx.input; eth.call(tx, tx.blockNumber)" 13 | # geth --exec "$SCRIPT" attach http://localhost:8545 | cut -d '"' -f 2 | cut -c139- | xxd -r -p 14 | # echo 15 | 16 | . `dirname $0`/.setup_env 17 | 18 | node `dirname $0`/../contracts/scripts/revert-reason.js $1 -------------------------------------------------------------------------------- /contracts/.gitignore: -------------------------------------------------------------------------------- 1 | /build 2 | /contracts/keys 3 | /flat -------------------------------------------------------------------------------- /contracts/bin/contracts_DepositVerificationKey_sol_DepositVerificationKey.abi: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /contracts/bin/contracts_DepositVerificationKey_sol_DepositVerificationKey.bin: -------------------------------------------------------------------------------- 1 | 6080604052348015600f57600080fd5b50603580601d6000396000f3006080604052600080fd00a165627a7a723058208f789cc6732c4156a2162db9cb3ac3e4fbe3ba3a02a2dacb617964be2c0bc2d80029 -------------------------------------------------------------------------------- /contracts/bin/contracts_ExitVerificationKey_sol_ExitVerificationKey.abi: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /contracts/bin/contracts_ExitVerificationKey_sol_ExitVerificationKey.bin: -------------------------------------------------------------------------------- 1 | 6080604052348015600f57600080fd5b50603580601d6000396000f3006080604052600080fd00a165627a7a72305820a03ae24d8e95629c70c935fbf412c1d7065c3b37d1076c1f7fcb0e754631ff690029 -------------------------------------------------------------------------------- /contracts/bin/contracts_Migrations_sol_Migrations.abi: -------------------------------------------------------------------------------- 1 | [{"constant":false,"inputs":[{"name":"new_address","type":"address"}],"name":"upgrade","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"constant":true,"inputs":[],"name":"last_completed_migration","outputs":[{"name":"","type":"uint256"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":true,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"payable":false,"stateMutability":"view","type":"function"},{"constant":false,"inputs":[{"name":"completed","type":"uint256"}],"name":"setCompleted","outputs":[],"payable":false,"stateMutability":"nonpayable","type":"function"},{"inputs":[],"payable":false,"stateMutability":"nonpayable","type":"constructor"}] -------------------------------------------------------------------------------- /contracts/bin/contracts_Migrations_sol_Migrations.bin: -------------------------------------------------------------------------------- 1 | 608060405234801561001057600080fd5b50336000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055506102f8806100606000396000f300608060405260043610610062576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff1680630900f01014610067578063445df0ac146100aa5780638da5cb5b146100d5578063fdacd5761461012c575b600080fd5b34801561007357600080fd5b506100a8600480360381019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050610159565b005b3480156100b657600080fd5b506100bf610241565b6040518082815260200191505060405180910390f35b3480156100e157600080fd5b506100ea610247565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34801561013857600080fd5b506101576004803603810190808035906020019092919050505061026c565b005b60008060009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141561023d578190508073ffffffffffffffffffffffffffffffffffffffff1663fdacd5766001546040518263ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040180828152602001915050600060405180830381600087803b15801561022457600080fd5b505af1158015610238573d6000803e3d6000fd5b505050505b5050565b60015481565b6000809054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6000809054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff1614156102c957806001819055505b505600a165627a7a72305820c9ae4aff4ff0cf8f428e010f03313f97e565320e2146571af3f8f4bcaa69e83c0029 -------------------------------------------------------------------------------- /contracts/bin/contracts_Plasma_sol_PlasmaStub.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LuozhuZhang/sourceCode-zkSync-era-rollupContract-code-learning/14168cd267b20a3704d510c801770c3a7cdf3440/contracts/bin/contracts_Plasma_sol_PlasmaStub.bin -------------------------------------------------------------------------------- /contracts/bin/contracts_ProofChecker_sol_ProofChecker.abi: -------------------------------------------------------------------------------- 1 | [{"constant":true,"inputs":[{"name":"circuitType","type":"uint8"},{"name":"proof","type":"uint256[8]"},{"name":"oldRoot","type":"bytes32"},{"name":"newRoot","type":"bytes32"},{"name":"finalHash","type":"bytes32"}],"name":"verifyProof","outputs":[{"name":"valid","type":"bool"}],"payable":false,"stateMutability":"view","type":"function"}] -------------------------------------------------------------------------------- /contracts/bin/contracts_TransferVerificationKey_sol_TransferVerificationKey.abi: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /contracts/bin/contracts_TransferVerificationKey_sol_TransferVerificationKey.bin: -------------------------------------------------------------------------------- 1 | 6080604052348015600f57600080fd5b50603580601d6000396000f3006080604052600080fd00a165627a7a7230582014e0b7a3ab0656e5021a569954477cdc58d52a997deb5807a985354eabe9366c0029 -------------------------------------------------------------------------------- /contracts/bin/contracts_TwistedEdwards_sol_TwistedEdwards.abi: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /contracts/bin/contracts_TwistedEdwards_sol_TwistedEdwards.bin: -------------------------------------------------------------------------------- 1 | 604c602c600b82828239805160001a60731460008114601c57601e565bfe5b5030600052607381538281f30073000000000000000000000000000000000000000030146080604052600080fd00a165627a7a723058200c2bca8787c535b3e05c388074e61330233e50359fc05ff04fbc2390989c71e70029 -------------------------------------------------------------------------------- /contracts/bin/contracts_VerificationKeys_sol_VerificationKeys.abi: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /contracts/bin/contracts_VerificationKeys_sol_VerificationKeys.bin: -------------------------------------------------------------------------------- 1 | 6080604052348015600f57600080fd5b50603580601d6000396000f3006080604052600080fd00a165627a7a7230582090d7593bb4b98eebebada80c66e1e9ac8095be334d1c8ef82f001cdd769594ec0029 -------------------------------------------------------------------------------- /contracts/bin/contracts_Verifier_sol_Verifier.abi: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /contracts/bin/contracts_Verifier_sol_Verifier.bin: -------------------------------------------------------------------------------- 1 | 6080604052348015600f57600080fd5b50603580601d6000396000f3006080604052600080fd00a165627a7a72305820166257231a1145ad6c7f3ff4ca8312bc5200908a4ff3e20692765e1374e0ede90029 -------------------------------------------------------------------------------- /contracts/contracts/FranklinProxy.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import {FranklinCommon} from "./common/FranklinCommon.sol"; 4 | 5 | contract FranklinProxy is FranklinCommon { 6 | 7 | constructor(address _depositor, address _transactor, address _exitor) public { 8 | nextAccountToRegister = 2; 9 | lastVerifiedRoot = EMPTY_TREE_ROOT; 10 | operators[msg.sender] = true; 11 | depositor = _depositor; 12 | transactor = _transactor; 13 | exitor = _exitor; 14 | 15 | } 16 | 17 | function deposit(uint256[2] memory, uint128) public payable { 18 | callExternal(depositor); 19 | } 20 | 21 | function depositInto(uint24, uint128) public payable { 22 | callExternal(depositor); 23 | } 24 | 25 | function cancelDeposit() public { 26 | callExternal(depositor); 27 | } 28 | 29 | function startNextDepositBatch() public { 30 | callExternal(depositor); 31 | } 32 | 33 | function changeDepositBatchFee(uint128) public { 34 | callExternal(depositor); 35 | } 36 | 37 | function commitDepositBlock(uint256, uint24[DEPOSIT_BATCH_SIZE] memory, uint32, bytes32) public { 38 | callExternal(depositor); 39 | } 40 | 41 | function verifyDepositBlock(uint256, uint24[DEPOSIT_BATCH_SIZE] memory, uint32, uint256[8] memory) public { 42 | callExternal(depositor); 43 | } 44 | 45 | function commitTransferBlock(uint32, uint128, bytes memory, bytes32) public { 46 | callExternal(transactor); 47 | } 48 | 49 | function verifyTransferBlock(uint32, uint256[8] memory) public { 50 | callExternal(transactor); 51 | } 52 | 53 | function exit() public payable { 54 | callExternal(exitor); 55 | } 56 | 57 | function cancelExit() public { 58 | callExternal(exitor); 59 | } 60 | 61 | function startNextExitBatch() public { 62 | callExternal(exitor); 63 | } 64 | 65 | function changeExitBatchFee(uint128) public { 66 | callExternal(exitor); 67 | } 68 | 69 | function commitExitBlock(uint256, uint24[EXIT_BATCH_SIZE] memory, uint32, bytes memory, bytes32) public { 70 | callExternal(exitor); 71 | } 72 | 73 | function verifyExitBlock(uint256, uint32, uint256[8] memory) public { 74 | callExternal(exitor); 75 | } 76 | 77 | function withdrawUserBalance(uint256) public { 78 | callExternal(exitor); 79 | } 80 | 81 | // this is inline delegate-call to dispatch functions to subcontracts that are responsible for execution 82 | function callExternal(address callee) internal { 83 | assembly { 84 | let memoryPointer := mload(0x40) 85 | calldatacopy(memoryPointer, 0, calldatasize) 86 | let newFreeMemoryPointer := add(memoryPointer, calldatasize) 87 | mstore(0x40, newFreeMemoryPointer) 88 | let retVal := delegatecall(sub(gas, 2000), callee, memoryPointer, calldatasize, newFreeMemoryPointer, 0x40) 89 | let retDataSize := returndatasize 90 | returndatacopy(newFreeMemoryPointer, 0, retDataSize) 91 | switch retVal case 0 { revert(newFreeMemoryPointer, returndatasize) } default { return(newFreeMemoryPointer, retDataSize) } 92 | //return(newFreeMemoryPointer, retDataSize) 93 | } 94 | } 95 | } -------------------------------------------------------------------------------- /contracts/contracts/Migrations.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | contract Migrations { 4 | address public owner; 5 | uint public last_completed_migration; 6 | 7 | constructor() public { 8 | owner = msg.sender; 9 | } 10 | 11 | modifier restricted() { 12 | if (msg.sender == owner) _; 13 | } 14 | 15 | function setCompleted(uint completed) public restricted { 16 | last_completed_migration = completed; 17 | } 18 | 19 | function upgrade(address new_address) public restricted { 20 | Migrations upgraded = Migrations(new_address); 21 | upgraded.setCompleted(last_completed_migration); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /contracts/contracts/common/FranklinCommon.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "./Verifier.sol"; 4 | import "./VerificationKeys.sol"; 5 | import "./FranklinStorage.sol"; 6 | 7 | contract FranklinCommon is VerificationKeys, FranklinStorage, Verifier { 8 | 9 | modifier active_only() { 10 | require(!stopped, "contract should not be globally stopped"); 11 | _; 12 | } 13 | 14 | modifier operator_only() { 15 | require(operators[msg.sender] == true, "sender should be one of the operators"); 16 | _; 17 | } 18 | 19 | // unit normalization functions 20 | function scaleIntoPlasmaUnitsFromWei(uint256 value) 21 | public 22 | pure 23 | returns (uint128) { 24 | uint256 den = DENOMINATOR; 25 | require(value % den == 0, "amount has higher precision than possible"); 26 | uint256 scaled = value / den; 27 | require(scaled < uint256(1) << 128, "deposit amount is too high"); 28 | return uint128(scaled); 29 | } 30 | 31 | function scaleFromPlasmaUnitsIntoWei(uint128 value) 32 | public 33 | pure 34 | returns (uint256) { 35 | return uint256(value) * DENOMINATOR; 36 | } 37 | 38 | function verifyProof(Circuit circuitType, uint256[8] memory proof, bytes32 oldRoot, bytes32 newRoot, bytes32 finalHash) 39 | internal view returns (bool valid) 40 | { 41 | uint256 mask = (~uint256(0)) >> 3; 42 | uint256[14] memory vk; 43 | uint256[] memory gammaABC; 44 | if (circuitType == Circuit.DEPOSIT) { 45 | (vk, gammaABC) = getVkDepositCircuit(); 46 | } else if (circuitType == Circuit.TRANSFER) { 47 | (vk, gammaABC) = getVkTransferCircuit(); 48 | } else if (circuitType == Circuit.EXIT) { 49 | (vk, gammaABC) = getVkExitCircuit(); 50 | } else { 51 | return false; 52 | } 53 | uint256[] memory inputs = new uint256[](3); 54 | inputs[0] = uint256(oldRoot); 55 | inputs[1] = uint256(newRoot); 56 | inputs[2] = uint256(finalHash) & mask; 57 | return Verify(vk, gammaABC, proof, inputs); 58 | } 59 | 60 | } -------------------------------------------------------------------------------- /contracts/contracts/common/VerificationKeys.sol: -------------------------------------------------------------------------------- 1 | 2 | // This contract is generated programmatically 3 | 4 | pragma solidity ^0.4.24; 5 | import "../keys/DepositVerificationKey.sol"; 6 | import "../keys/TransferVerificationKey.sol"; 7 | import "../keys/ExitVerificationKey.sol"; 8 | 9 | 10 | // Hardcoded constants to avoid accessing store 11 | contract VerificationKeys is TransferVerificationKey, DepositVerificationKey, ExitVerificationKey { 12 | } 13 | -------------------------------------------------------------------------------- /contracts/contracts/common/Verifier.sol: -------------------------------------------------------------------------------- 1 | // from https://github.com/HarryR/ethsnarks/blob/master/contracts/Verifier.sol 2 | pragma solidity ^0.4.24; 3 | 4 | 5 | contract Verifier { 6 | 7 | function NegateY( uint256 Y ) 8 | internal pure returns (uint256) 9 | { 10 | uint q = 21888242871839275222246405745257275088696311157297823662689037894645226208583; 11 | return q - (Y % q); 12 | } 13 | 14 | function Verify ( uint256[14] in_vk, uint256[] vk_gammaABC, uint256[8] in_proof, uint256[] proof_inputs ) 15 | internal 16 | view 17 | returns (bool) 18 | { 19 | require( ((vk_gammaABC.length / 2) - 1) == proof_inputs.length, "Invalid number of public inputs" ); 20 | 21 | // Compute the linear combination vk_x 22 | uint256[3] memory mul_input; 23 | uint256[4] memory add_input; 24 | bool success; 25 | uint m = 2; 26 | 27 | // First two fields are used as the sum 28 | add_input[0] = vk_gammaABC[0]; 29 | add_input[1] = vk_gammaABC[1]; 30 | 31 | // Performs a sum of gammaABC[0] + sum[ gammaABC[i+1]^proof_inputs[i] ] 32 | for (uint i = 0; i < proof_inputs.length; i++) { 33 | mul_input[0] = vk_gammaABC[m++]; 34 | mul_input[1] = vk_gammaABC[m++]; 35 | mul_input[2] = proof_inputs[i]; 36 | 37 | assembly { 38 | // ECMUL, output to last 2 elements of `add_input` 39 | success := staticcall(sub(gas, 2000), 7, mul_input, 0x60, add(add_input, 0x40), 0x40) 40 | } 41 | require( success, "Failed to call ECMUL precompile" ); 42 | 43 | assembly { 44 | // ECADD 45 | success := staticcall(sub(gas, 2000), 6, add_input, 0x80, add_input, 0x40) 46 | } 47 | require( success, "Failed to call ECADD precompile" ); 48 | } 49 | 50 | uint[24] memory input = [ 51 | // (proof.A, proof.B) 52 | in_proof[0], in_proof[1], // proof.A (G1) 53 | in_proof[2], in_proof[3], in_proof[4], in_proof[5], // proof.B (G2) 54 | 55 | // (-vk.alpha, vk.beta) 56 | in_vk[0], NegateY(in_vk[1]), // -vk.alpha (G1) 57 | in_vk[2], in_vk[3], in_vk[4], in_vk[5], // vk.beta (G2) 58 | 59 | // (-vk_x, vk.gamma) 60 | add_input[0], NegateY(add_input[1]), // -vk_x (G1) 61 | in_vk[6], in_vk[7], in_vk[8], in_vk[9], // vk.gamma (G2) 62 | 63 | // (-proof.C, vk.delta) 64 | in_proof[6], NegateY(in_proof[7]), // -proof.C (G1) 65 | in_vk[10], in_vk[11], in_vk[12], in_vk[13] // vk.delta (G2) 66 | ]; 67 | 68 | uint[1] memory out; 69 | assembly { 70 | success := staticcall(sub(gas, 2000), 8, input, 768, out, 0x20) 71 | } 72 | require(success, "Failed to call pairing precompile"); 73 | return out[0] == 1; 74 | } 75 | } -------------------------------------------------------------------------------- /contracts/contracts/keys.example/DepositVerificationKey.sol: -------------------------------------------------------------------------------- 1 | 2 | // This contract is generated programmatically 3 | 4 | pragma solidity ^0.4.24; 5 | 6 | 7 | // Hardcoded constants to avoid accessing store 8 | contract DepositVerificationKey { 9 | 10 | function getVkDepositCircuit() internal pure returns (uint256[14] memory vk, uint256[] memory gammaABC) { 11 | 12 | 13 | vk[0] = 0x2806ceee6e1d1eb6f31ff675b92143dba13b3fbad614f843f5c3afd6568b205a; 14 | vk[1] = 0x122808a613a8f598c4b867f4b2ff0d43f0bb6d88eeb8ee75eb6f24d359bbf8e0; 15 | vk[2] = 0x22448ceb94acf3cd1d15bcaa5b0fea9b93517c5845177e084089d9ee6d79ff50; 16 | vk[3] = 0x031a214677d973a795d26219598bb0512ea0dc76e75dbce39cbc9d9b17a2cc8c; 17 | vk[4] = 0x0a9a3bef0fd1d79ae409f82733e8e462b75ff5f05bea6422066b95ca9e02c9dd; 18 | vk[5] = 0x0c6fa2f476a8f04a57dbcb68e2425440062630c908d6e198c875d875130025e9; 19 | vk[6] = 0x0ae5cc9692f99a91d35fe9abfd2e3786b3ad14b327d0fb9e51c514f0bd95959c; 20 | vk[7] = 0x1d3950dce5e030d612b1167e70c34ab4cf1d9c4320206047652747915ed67097; 21 | vk[8] = 0x0a9138320c05f4c5d26da0bc8d508398b027cabffd4dd36fb65f29d9fa3cc3dd; 22 | vk[9] = 0x2a5daa065c803df8f429e946f2c5700026c2d12dc172e192d270dc73fdc8cc39; 23 | vk[10] = 0x27863bde61126110d0d6f2f31947e2f63c3cdd6594caf3ec225babfa7afa4f23; 24 | vk[11] = 0x20fd57765a2d6a84f2bee03afefaf5bd42fb5804b8a5c0874762d19afafc1f41; 25 | vk[12] = 0x0b451a37fed071dd2ae876826a32f4fe0a58d990c624cfde92096cc2b3fd4a08; 26 | vk[13] = 0x2920445cbffbb49c761e20d864dccf560258c3cd7302773accaa7d78aa206ba9; 27 | 28 | gammaABC = new uint256[](8); 29 | gammaABC[0] = 0x2d8c931ff562a4d5a349685f6fdf2a30a0883e2cfcb8a27a642687a411895577; 30 | gammaABC[1] = 0x2530d4a3fcaea9d34725f2f02100be77a511c2a507e084e6e379e068d4cde559; 31 | gammaABC[2] = 0x01e3781d1f0a0f6beac8e3aa47a8c509d80406326c5c4594aa04f4c0f9b0138a; 32 | gammaABC[3] = 0x235683ea39dc02e4919b9f99e14c927399f26c3c24935caf3d7f745377fd7629; 33 | gammaABC[4] = 0x2fbf7af8adca023e9744087f7845cc19f2bdb9cbba20c57dc77edaa6115625fb; 34 | gammaABC[5] = 0x1c3ad616391a8022720659ceb30a65391117df3b0e501f0e19d99800fea0d662; 35 | gammaABC[6] = 0x1bc9897f50972e9924dfcc9dc9c9104c50588c168b7e137526f9ab8cfdc62482; 36 | gammaABC[7] = 0x04f9efe4f42e792c6abffbb1b5431a71f8b7712a3e65011f07c7bb348914d0d0; 37 | 38 | 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /contracts/contracts/keys.example/ExitVerificationKey.sol: -------------------------------------------------------------------------------- 1 | 2 | // This contract is generated programmatically 3 | 4 | pragma solidity ^0.4.24; 5 | 6 | 7 | // Hardcoded constants to avoid accessing store 8 | contract ExitVerificationKey { 9 | 10 | function getVkExitCircuit() internal pure returns (uint256[14] memory vk, uint256[] memory gammaABC) { 11 | 12 | 13 | vk[0] = 0x08730c8e1cbb3eb57ea2c4d8d5c48fc56aa910d331ef8723e74c9e5750fb943a; 14 | vk[1] = 0x29d8dc5dd6f9b1a692a37c4982faa894119b98dbc35fe5b0e20363ffce6c5fea; 15 | vk[2] = 0x016411e5a45fa79352c47a455eeaddb12059d5bb5579487e8abb4ffd2f472c5a; 16 | vk[3] = 0x27084dc459b5dd6d1c60cccda8d78a80755ae23436dd44aca13d44962dea244a; 17 | vk[4] = 0x16c4a816a173596179da516fcc694c68878da66042afe8c35f098742482ab0a0; 18 | vk[5] = 0x02318a621109b9e543cc09e7bcde9cfe15386429572fb31d08c88a42b418fe8e; 19 | vk[6] = 0x0f10cfbdfaf1753d7214d6d0f2644458bddd10ab5619d19e7e41ac709b30b3ef; 20 | vk[7] = 0x21f68a12e4e0ce060910e1335703431d20ce5ac7109baa52e34423fdcc659c54; 21 | vk[8] = 0x2c505db2e660d4ea4e6f41e0f3b921c8358d6b3bcedaaf8349f01fbf56b03d4f; 22 | vk[9] = 0x0733f6edbaef053203c0654afd9eea4b8909c88473b1f1aa72afdd5a4cded36e; 23 | vk[10] = 0x1a4c2e2c731b7de65f7b0cad686b5c7964fe162741385a33c72b08c903791e29; 24 | vk[11] = 0x1efc25b88bfac326220c61f7ab065be1c77af7b74f67f63262c2e66613eebdac; 25 | vk[12] = 0x264f4171cfa70441d121f28ce3c8186fa9011a9a2611ce7c920cba0d8cb3f826; 26 | vk[13] = 0x126ba4be6253be58ceb3bb5bf94045b310baea07bc35d669283a10ffdf908530; 27 | 28 | gammaABC = new uint256[](8); 29 | gammaABC[0] = 0x01b0002b9d1ceb70fe45b66103672ab9500327509572c1701d3250b00c299e88; 30 | gammaABC[1] = 0x080c6552831b1c735ac286a537343522bd10083d3b3fed202ffae1e83ff87cc0; 31 | gammaABC[2] = 0x2e13eae69b931076d3e4ce5a0764aabf54030b14f53653627d13def87840df07; 32 | gammaABC[3] = 0x0b2032652364ded806919e92594a508eafae7eabb42bb6e731c1bfc0b3918b6c; 33 | gammaABC[4] = 0x024ba284020edb04f39d1eeeccc5f826826086f2bac64e0b233114cd523043e4; 34 | gammaABC[5] = 0x082cb9dada50204406112d784129e28ce9fea7d999f0540fe36131aee46ad632; 35 | gammaABC[6] = 0x2b661ab87456646811a53fbb055addc96eef2396827889305bf2ce7ab8c1ea44; 36 | gammaABC[7] = 0x118ee65eb15ec1db1b648d4c64e490977198de8a01580399d5a3835576d602b9; 37 | 38 | 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /contracts/contracts/keys.example/TransferVerificationKey.sol: -------------------------------------------------------------------------------- 1 | 2 | // This contract is generated programmatically 3 | 4 | pragma solidity ^0.4.24; 5 | 6 | 7 | // Hardcoded constants to avoid accessing store 8 | contract TransferVerificationKey { 9 | 10 | function getVkTransferCircuit() internal pure returns (uint256[14] memory vk, uint256[] memory gammaABC) { 11 | 12 | 13 | vk[0] = 0x20fabca7a936d76e2de75f334d12408a85a3038b2c748e884a7d10b6c8e785ae; 14 | vk[1] = 0x05b7926cd85113320bdad329a22fa8a7bb13fee4e5ae784441c975f7be24b144; 15 | vk[2] = 0x17f95798ad37dc5d5842e0a25085bb7091525e5a19a4e664c4325d43996733bb; 16 | vk[3] = 0x02943aa487fcc67df949c6e2481e6a61b992ce4ad6f5d92372f59b93d15c0d30; 17 | vk[4] = 0x0131b8773666df90359fdbe663c451881a09b24df4564be75dd86ceb485bc16c; 18 | vk[5] = 0x091b92109b2c58ee59d43f1646b910e20203dcb95676b139509886a314e00806; 19 | vk[6] = 0x18d2e27fdde751997e5ac3146a828702404f47c56072d5fd9e628bd9d00b5aac; 20 | vk[7] = 0x0cf19a2bf4b4e09843df3cd201709e55ae3a5ff31d8b59cd89d7b6eb06b98703; 21 | vk[8] = 0x14eaf5b1216b2adc72b75a80df5c202eb16953605656165177035432a6f88b8f; 22 | vk[9] = 0x0f19e2b9bb71cab73500652d5728fe94843dcef2e73a2fd97739f6e8fd83369a; 23 | vk[10] = 0x0135e689579dbf14165032a60d35c70c95fc8fcf56f16a5335c62ef49e4fdfc4; 24 | vk[11] = 0x196c80c924953fae60eb1c57c03beed305ed22454b359ade4265a434f6aef114; 25 | vk[12] = 0x0a282bf79f6d625029d9746a596c33e286ec52ba4fd14c87577b39cdbc656dbc; 26 | vk[13] = 0x216dd1c95ba7a7408e093cb8010c09addcb31e54d9ac23493341660ce3b9d742; 27 | 28 | gammaABC = new uint256[](8); 29 | gammaABC[0] = 0x29c50e44a59741be3b553316fa0181329ec5275b046bb428eb3e041a1d6a4017; 30 | gammaABC[1] = 0x1f799ade5130abbb234abfaa536226b602cd2e2e77c3afb5c6c29bb7835117b1; 31 | gammaABC[2] = 0x20ca58d0e2b5c393272802f45e7fef3d27efa2696c0be7bd7f31435b3403acbd; 32 | gammaABC[3] = 0x2b7f914981d405bc613a0c2c3accb5ac825b900ddbef5b7689567e0ffdf84060; 33 | gammaABC[4] = 0x0be03143ab86092d0872260b1ed87202fd2816e0ddc15e4dda842688ff3f8bba; 34 | gammaABC[5] = 0x1c4900581b3dc146c77b6bf15ceffd90ca3d34fd69b393ee562e8cc614decf91; 35 | gammaABC[6] = 0x1261a3e53a4815cb7237aa39113f0dd33d7464e80a317a1a35ed33a217053584; 36 | gammaABC[7] = 0x17599c2506b56e759ba24bf30920525cfd12337392ed0cebc0b851a142802ca0; 37 | 38 | 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /contracts/migrations/1_initial_migration.js: -------------------------------------------------------------------------------- 1 | var Migrations = artifacts.require("./Migrations.sol") 2 | var FranklinProxy = artifacts.require("./FranklinProxy.sol") 3 | var Depositor = artifacts.require("./Depositor.sol") 4 | var Exitor = artifacts.require("./Exitor.sol") 5 | var Transactor = artifacts.require("./Transactor.sol") 6 | 7 | var ethers = require('ethers') 8 | 9 | module.exports = async function(deployer) { 10 | let m = await deployer.deploy(Migrations) 11 | 12 | await deployer.deploy(Exitor) 13 | let ex = await Exitor.deployed() 14 | 15 | await deployer.deploy(Transactor) 16 | let tr = await Transactor.deployed() 17 | 18 | await deployer.deploy(Depositor) 19 | let dep = await Depositor.deployed() 20 | 21 | let paddingPubKey = JSON.parse(process.env.PADDING_PUB_KEY) 22 | await deployer.deploy(FranklinProxy, dep.address, tr.address, ex.address) 23 | let franklin = await FranklinProxy.deployed() 24 | let value = ethers.utils.parseEther("0.001") 25 | await franklin.deposit(paddingPubKey, 0, {value}) 26 | } -------------------------------------------------------------------------------- /contracts/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "plasma-contracts", 3 | "version": "0.1.0", 4 | "license": "GPL-3.0-only", 5 | "devDependencies": {}, 6 | "dependencies": { 7 | "axios": "^0.18.0", 8 | "bn.js": "^4.11.8", 9 | "brorand": "^1.1.0", 10 | "elliptic": "^6.4.1", 11 | "ethereumjs-util": "^6.0.0", 12 | "ethers": "^4.0.20", 13 | "ethjs": "^0.4.0", 14 | "ganache-cli": "^6.2.3", 15 | "hash.js": "^1.1.7", 16 | "hmac-drbg": "^1.0.1", 17 | "minimalistic-assert": "^1.0.1", 18 | "minimalistic-crypto-utils": "^1.0.1", 19 | "querystring": "^0.2.0", 20 | "solc": "^0.4.24", 21 | "truffle": "^5.0.0", 22 | "truffle-hdwallet-provider": "^1.0.0-web3one.1", 23 | "url": "^0.11.0", 24 | "web3": "^1.0.0-beta.37" 25 | }, 26 | "scripts": { 27 | "build": "solcjs -o bin --bin --abi contracts/*.sol", 28 | "build-common-interface": "solcjs -o bin --bin --abi commonInterfaceVariant/*.sol", 29 | "ganache": "ganache-cli -l 7000000 -i 4 -m \"cliff flag flag fence mesh quarter coyote mechanic cash draw remain priority\" ", 30 | "start-geth": "geth --rpc --rpcapi \"eth,net,web3,personal\" --dev --miner.gastarget 8000000 --dev.period=1", 31 | "deploy": "yarn truffle migrate --reset --network $TRUFFLE_NETWORK", 32 | "geth": "node scripts/setup-geth.js 2& yarn start-geth", 33 | "geth-deployed": "(sleep 10 && node scripts/setup-geth.js && yarn deploy-geth)& yarn start-geth > /dev/null", 34 | "start-parity": "parity --config dev --jsonrpc-apis all", 35 | "parity-deployed": "(sleep 2 && node scripts/setup-geth.js && yarn deploy-local)& yarn start-parity > /dev/null", 36 | "flat": "flat -input contracts/PlasmaContract.sol -output build/PlasmaContractFlattened.sol", 37 | "flatten": "solidity_flattener contracts/FranklinProxy.sol", 38 | "postinstall": "python -m pip install solidity-flattener" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /contracts/scripts/check-price.js: -------------------------------------------------------------------------------- 1 | const ethers = require('ethers') 2 | const provider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL) 3 | 4 | const rinkeby = ethers.getDefaultProvider("rinkeby") 5 | const mainnet = new ethers.providers.InfuraProvider() 6 | const mainnet2 = new ethers.providers.EtherscanProvider() 7 | 8 | async function calc(addr) { 9 | let balanceBefore = await provider.getBalance(addr, 4385572) 10 | let balanceAfter = await provider.getBalance(addr, 4386335) 11 | console.log('balanceBefore:', ethers.utils.formatEther(balanceBefore)) 12 | console.log('balanceAfter:', ethers.utils.formatEther(balanceAfter)) 13 | console.log('diff:', ethers.utils.formatEther(balanceBefore.sub(balanceAfter))) 14 | } 15 | 16 | async function main() { 17 | 18 | console.log('gas price rinkeby', (await provider.getGasPrice()).toNumber() ) 19 | console.log('gas price rinkeby', (await rinkeby.getGasPrice()).toNumber() ) 20 | 21 | console.log('gas price mainnet', (await mainnet.getGasPrice()).toNumber() ) 22 | console.log('gas price mainnet2', (await mainnet2.getGasPrice()).toNumber() ) 23 | 24 | calc('0x'+process.env.SENDER_ACCOUNT) 25 | calc('0xB0587796F36E39c4b0d79790D2Efa874386dcD6d') 26 | } 27 | 28 | main() 29 | -------------------------------------------------------------------------------- /contracts/scripts/deposit.js: -------------------------------------------------------------------------------- 1 | const ethers = require('ethers') 2 | const provider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL) 3 | 4 | const PlasmaContractABI = require('../build/contracts/FranklinProxy.json').abi 5 | //const PlasmaContractABI = JSON.parse(fs.readFileSync('./contracts/build/contracts/FranklinProxy.json', 'utf8')).abi 6 | 7 | const source = ethers.Wallet.fromMnemonic(process.env.MNEMONIC, "m/44'/60'/0'/0/0").connect(provider) 8 | const franklin = new ethers.Contract(process.env.CONTRACT_ADDR, PlasmaContractABI, source) 9 | 10 | async function main() { 11 | let paddingPubKey = JSON.parse(process.env.PADDING_PUB_KEY) 12 | let value = ethers.utils.parseEther("0.001") 13 | let r = await franklin.deposit(paddingPubKey, 0, {value}) 14 | console.log(r) 15 | } 16 | 17 | main() 18 | -------------------------------------------------------------------------------- /contracts/scripts/revert-reason.js: -------------------------------------------------------------------------------- 1 | const ethers = require('ethers') 2 | //const provider = ethers.getDefaultProvider('rinkeby') 3 | //const provider = new ethers.providers.JsonRpcProvider('https://rinkeby.infura.io/v3/48beda66075e41bda8b124c6a48fdfa0') 4 | const provider = new ethers.providers.JsonRpcProvider(process.env.WEB3_URL) 5 | 6 | function hex_to_ascii(str1) { 7 | var hex = str1.toString(); 8 | var str = ''; 9 | for (var n = 0; n < hex.length; n += 2) { 10 | str += String.fromCharCode(parseInt(hex.substr(n, 2), 16)); 11 | } 12 | return str; 13 | } 14 | 15 | async function reason() { 16 | var args = process.argv.slice(2) 17 | let hash = args[0] 18 | console.log('tx hash:', hash) 19 | console.log('provider:', process.env.WEB3_URL) 20 | 21 | let tx = await provider.getTransaction(hash) 22 | if (!tx) { 23 | console.log('tx not found') 24 | } else { 25 | //console.log('tx:', tx) 26 | 27 | let receipt = await provider.getTransactionReceipt(hash) 28 | //console.log('receipt:', receipt) 29 | 30 | if (receipt.status) { 31 | console.log('tx success') 32 | } else { 33 | let code = await provider.call(tx, tx.blockNumber) 34 | let reason = hex_to_ascii(code.substr(138)) 35 | console.log('revert reason:', reason) 36 | } 37 | } 38 | } 39 | 40 | reason() -------------------------------------------------------------------------------- /contracts/test/PlasmaTester.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import {PlasmaTransactor} from "./PlasmaTransactor.sol"; 4 | import {PlasmaDepositor} from "./PlasmaDepositor.sol"; 5 | import {PlasmaExitor} from "./PlasmaExitor.sol"; 6 | 7 | contract PlasmaTester is PlasmaDepositor, PlasmaExitor, PlasmaTransactor { 8 | uint256 constant DEPOSIT_BATCH_SIZE = 1; 9 | 10 | uint24 constant operatorsAccounts = 4; 11 | uint24 public nextAccountToRegister = operatorsAccounts; 12 | 13 | // create technological accounts for an operator. 14 | constructor(uint256[operatorsAccounts - 1] memory defaultPublicKeys) public { 15 | lastVerifiedRoot = EMPTY_TREE_ROOT; 16 | operators[msg.sender] = true; 17 | // account number 0 is NEVER registered 18 | Account memory freshAccount; 19 | for (uint24 i = 1; i < operatorsAccounts; i++) { 20 | freshAccount = Account( 21 | uint8(AccountState.REGISTERED), 22 | uint32(0), 23 | msg.sender, 24 | defaultPublicKeys[i-1], 25 | uint32(0), 26 | uint32(0) 27 | ); 28 | accounts[i] = freshAccount; 29 | } 30 | } 31 | 32 | function verifyProof(Circuit, uint256[8] memory, bytes32, bytes32, bytes32) internal view returns (bool valid) 33 | { 34 | return true; 35 | } 36 | } -------------------------------------------------------------------------------- /contracts/test/TestPlasma.sol: -------------------------------------------------------------------------------- 1 | pragma solidity ^0.4.24; 2 | 3 | import "truffle/Assert.sol"; 4 | import "../contracts/Plasma.sol"; 5 | 6 | 7 | contract GulliblePlasma is PlasmaStub { 8 | 9 | function verifyUpdateProof(uint256[8] memory, bytes32, bytes32, bytes32) 10 | internal view returns (bool valid) 11 | { 12 | return true; 13 | } 14 | 15 | } 16 | 17 | 18 | contract TestPlasma { 19 | 20 | GulliblePlasma plasma; 21 | 22 | constructor() public { 23 | 24 | } 25 | 26 | function beforeAll() public { 27 | plasma = new GulliblePlasma(); 28 | } 29 | 30 | function testCommitment() { 31 | bytes memory empty; 32 | plasma.commitBlock(0, 0, empty, 0); 33 | Assert.equal(true, true, "commitment failed"); 34 | } 35 | 36 | function testVerification() { 37 | uint256[8] memory proof_empty; 38 | plasma.verifyBlock(0, proof_empty); 39 | Assert.equal(true, true, "verification failed"); 40 | } 41 | 42 | } 43 | -------------------------------------------------------------------------------- /contracts/test/deposit.js: -------------------------------------------------------------------------------- 1 | const Plasma = artifacts.require("PlasmaDepositor"); 2 | const assert = require("assert"); 3 | const transactionLib = require("../lib/transaction"); 4 | const ethUtils = require("ethereumjs-util"); 5 | const BN = require("bn.js"); 6 | 7 | console.log("Contract size = " + (Plasma.bytecode.length - 2)/2 ); 8 | 9 | const operatorsAccounts = 4; 10 | 11 | const proof = [ 12 | new BN("16755890309709504255050985180817557075102043093245672893842987730500160692655"), 13 | new BN("17971101070761193284039286941506202506127198560851924391966482795354105619809"), 14 | new BN("4572095663635183615127149738886689560505627507490525050282444962500344069475"), 15 | new BN("15157278983069442488620677124413686978990457609776312356413739423327009119236"), 16 | new BN("17880186821198566711513284459389214912525477464363278607518585813877553130748"), 17 | new BN("10255002830203696592186441422789589545615773753711791040005597942198369865646"), 18 | new BN("14023986121275820632410270476556337277250001417755645438870438964029440399619"), 19 | new BN("11871408088467689433052310116470249687042273778375592692006275805793257751339"), 20 | ] 21 | 22 | contract('Plasma', async (accounts) => { 23 | 24 | 25 | const account = accounts[0]; 26 | let contract; 27 | 28 | beforeEach(async () => { 29 | const accs = []; 30 | for (let i = 1; i < operatorsAccounts; i++) { 31 | const {packedPublicKey} = transactionLib.newKey(); 32 | accs.push(packedPublicKey); 33 | } 34 | 35 | contract = await Plasma.new({from: account}) 36 | console.log("Contract address = " + contract.address); 37 | }) 38 | 39 | function randomPublicDataPiece() { 40 | let from = new BN(Math.floor(Math.random() * 1000000)); 41 | let to = new BN(Math.floor(Math.random() * 1000000)); 42 | let amount = new BN(Math.floor(Math.random() * 1000)); 43 | let fee = new BN(Math.floor(Math.random() * 100)); 44 | return transactionLib.getPublicData({from, to, amount, fee}); 45 | } 46 | 47 | function randomExitDataPiece(account, exitAmount) { 48 | let from = new BN(account); 49 | let to = new BN(0); 50 | let amount = new BN(exitAmount); 51 | let fee = new BN(Math.floor(Math.random() * 100)); 52 | return transactionLib.getPublicData({from, to, amount, fee}).bytes; 53 | } 54 | 55 | function randomPublicData(numTXes) { 56 | const arr = []; 57 | for (let i = 0; i < numTXes; i++) { 58 | arr.push(randomPublicDataPiece().bytes); 59 | } 60 | return Buffer.concat(arr); 61 | } 62 | 63 | it('do a deposit', async () => { 64 | try { 65 | let key = transactionLib.newKey(); 66 | let {x, y} = key.publicKey; 67 | 68 | console.log("Public key = " + x.toString(16) + ", " + y.toString(16)); 69 | console.log("Compressed key = " + key.packedPublicKey.toString(16)); 70 | 71 | let result = await contract.deposit([x, y], 0, {from: account, value: "1000000000000000000"}); 72 | console.log(result); 73 | let total = await contract.totalDepositRequests(); 74 | console.log("total requests = " + total.toString(10)); 75 | } catch(error) { 76 | console.log(error); 77 | throw error; 78 | } 79 | }) 80 | }); 81 | -------------------------------------------------------------------------------- /contracts/test/exit.js: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LuozhuZhang/sourceCode-zkSync-era-rollupContract-code-learning/14168cd267b20a3704d510c801770c3a7cdf3440/contracts/test/exit.js -------------------------------------------------------------------------------- /contracts/test/twistedEdwards.js: -------------------------------------------------------------------------------- 1 | 2 | 3 | const TwistedEdwards = artifacts.require("TwistedEdwards"); 4 | const assert = require("assert"); 5 | 6 | contract('Plasma', async (accounts) => { 7 | const BN = require("bn.js"); 8 | 9 | const account = accounts[0]; 10 | let contract; 11 | 12 | beforeEach(async () => { 13 | let x = new BN("2ef3f9b423a2c8c74e9803958f6c320e854a1c1c06cd5cc8fd221dc052d76df7", 16); 14 | let y = new BN("05a01167ea785d3f784224644a68e4067532c815f5f6d57d984b5c0e9c6c94b7", 16); 15 | contract = await TwistedEdwards.new([x, y], {from: account}) 16 | }) 17 | 18 | it('check generator on curve', async () => { 19 | try { 20 | let x = new BN("2ef3f9b423a2c8c74e9803958f6c320e854a1c1c06cd5cc8fd221dc052d76df7", 16); 21 | let y = new BN("05a01167ea785d3f784224644a68e4067532c815f5f6d57d984b5c0e9c6c94b7", 16); 22 | 23 | let generatorIsCorrect = await contract.checkOnCurve([x, y]); 24 | assert(generatorIsCorrect, "generator is not on curve"); 25 | 26 | let gasEstimate = await contract.checkOnCurve.estimateGas([x, y]); 27 | console.log("Checking a point is on curve takes gas = " + gasEstimate); 28 | 29 | } catch(error) { 30 | console.log(error); 31 | throw error; 32 | } 33 | }) 34 | 35 | it('check generator order', async () => { 36 | try { 37 | let x = new BN("2ef3f9b423a2c8c74e9803958f6c320e854a1c1c06cd5cc8fd221dc052d76df7", 16); 38 | let y = new BN("05a01167ea785d3f784224644a68e4067532c815f5f6d57d984b5c0e9c6c94b7", 16); 39 | 40 | let generatorIsCorrect = await contract.isCorrectGroup([x, y]); 41 | assert(generatorIsCorrect, "generator is not in correct group"); 42 | 43 | let gasEstimate = await contract.isCorrectGroup.estimateGas([x, y]); 44 | console.log("Checking a point order takes gas = " + gasEstimate); 45 | 46 | } catch(error) { 47 | console.log(error); 48 | throw error; 49 | } 50 | }) 51 | 52 | 53 | }); 54 | -------------------------------------------------------------------------------- /contracts/test/verifier.js: -------------------------------------------------------------------------------- 1 | const Plasma = artifacts.require("Plasma"); 2 | const assert = require("assert"); 3 | 4 | contract('Plasma', async (accounts) => { 5 | const BN = require("bn.js"); 6 | 7 | const account = accounts[0]; 8 | let contract; 9 | 10 | beforeEach(async () => { 11 | contract = await Plasma.new({from: account}) 12 | }) 13 | 14 | it('commit to data', async () => { 15 | try { 16 | let publicData = "0x00000080000000be0000000080000000be0000000080000000be0000000080000000be0000000080000000be0000000080000000be0000000080000000be0000000080000000be00"; 17 | let nextRoot = "0x1facb2cc667c5d3e7162274c00881fb98b2f5bf1c80fd7a612c7d7f2ca811089" 18 | let result = await contract.commitBlock(0, 0, publicData, nextRoot); 19 | let block = await contract.blocks(0); 20 | let totalCommitted = await contract.totalCommitted(); 21 | console.log("Total commited = " + totalCommitted); 22 | 23 | let proof = [ 24 | new BN("16755890309709504255050985180817557075102043093245672893842987730500160692655"), 25 | new BN("17971101070761193284039286941506202506127198560851924391966482795354105619809"), 26 | new BN("4572095663635183615127149738886689560505627507490525050282444962500344069475"), 27 | new BN("15157278983069442488620677124413686978990457609776312356413739423327009119236"), 28 | new BN("17880186821198566711513284459389214912525477464363278607518585813877553130748"), 29 | new BN("10255002830203696592186441422789589545615773753711791040005597942198369865646"), 30 | new BN("14023986121275820632410270476556337277250001417755645438870438964029440399619"), 31 | new BN("11871408088467689433052310116470249687042273778375592692006275805793257751339"), 32 | ] 33 | 34 | let proofResult = await contract.verifyBlock(0, proof); 35 | console.log("In verification previous root = " + proofResult.logs[0].args.a.toString(16)); 36 | console.log("In verification new root = " + proofResult.logs[1].args.a.toString(16)); 37 | console.log("In verification data commitment root = " + proofResult.logs[2].args.a.toString(16)); 38 | console.log("Proof verificaiton success = " + proofResult.logs[3].args.b); 39 | let totalVerified = await contract.totalVerified(); 40 | let lastVerifiedRoot = await contract.lastVerifiedRoot(); 41 | assert(lastVerifiedRoot == nextRoot); 42 | 43 | } catch(error) { 44 | console.log(error); 45 | throw error; 46 | } 47 | }) 48 | 49 | 50 | }); 51 | -------------------------------------------------------------------------------- /contracts/tools/do_deposit.js: -------------------------------------------------------------------------------- 1 | const ethers = require("ethers"); 2 | const path = require("path"); 3 | const fs = require("fs"); 4 | const abi_string = fs.readFileSync(path.resolve(__dirname, "../bin/contracts_PlasmaTester_sol_PlasmaTester.abi"), 'UTF-8'); 5 | const assert = require("assert"); 6 | const transactionLib = require("../lib/transaction"); 7 | const ethUtils = require("ethereumjs-util"); 8 | const BN = require("bn.js"); 9 | 10 | // const rpcEndpoint = process.env.WEB3_URL; 11 | // const contractAddress = process.env.CONTRACT_ADDRESS; 12 | // const privateKey = process.env.PRIVATE_KEY; 13 | 14 | const rpcEndpoint = "https://rinkeby.infura.io/48beda66075e41bda8b124c6a48fdfa0"; 15 | const contractAddress = "0x3a0768b1302357033c83E4808D1C3F69f270c463"; 16 | const privateKey = "0x12B7678FF12FE8574AB74FFD23B5B0980B64D84345F9D637C2096CA0EF587806"; 17 | 18 | async function depositInto(acccountString, amountString) { 19 | let provider = new ethers.providers.JsonRpcProvider(rpcEndpoint); 20 | let walletWithProvider = new ethers.Wallet(privateKey, provider); 21 | if (process.env.MNEMONIC !== undefined) { 22 | console.log("Using mnemonics"); 23 | walletWithProvider = ethers.Wallet.fromMnemonic(process.env.MNEMONIC); 24 | walletWithProvider = walletWithProvider.connect(provider); 25 | } 26 | const senderAddress = await walletWithProvider.getAddress(); 27 | console.log("Sending from address " + senderAddress) 28 | let contract = new ethers.Contract(contractAddress, abi_string, walletWithProvider); 29 | const existingID = await contract.ethereumAddressToAccountID(senderAddress); 30 | console.log("This ethereum account has an id = " + existingID.toString(10)); 31 | const transactor = await contract.transactor(); 32 | console.log("Transactor address = " + transactor); 33 | const exitor = await contract.exitor(); 34 | console.log("Exitor address = " + exitor); 35 | const nextAccountToRegister = await contract.nextAccountToRegister(); 36 | console.log("Registering account = " + nextAccountToRegister.toString(10)); 37 | const newKey = transactionLib.newKey(); 38 | console.log("Plasma private key = " + newKey.privateKey.toString(16)); 39 | let {x, y} = newKey.publicKey; 40 | x = "0x" + x.toString(16); 41 | y = "0x" + y.toString(16); 42 | const txAmount = ethers.utils.parseEther("0.001"); 43 | console.log("Tx amount in wei = " + txAmount.toString(10)); 44 | const tx = await contract.deposit([x, y], 0, {value: txAmount}); 45 | console.log("Result = ", tx.hash); 46 | const result = await tx.wait(); 47 | const totalDepositRequests = await contract.totalDepositRequests(); 48 | console.log("Total deposits = " + totalDepositRequests.toString(10)); 49 | const totalExitRequests = await contract.totalExitRequests(); 50 | console.log("Total exits = " + totalExitRequests.toString(10)); 51 | } 52 | 53 | async function run() { 54 | const args = process.argv.slice(2); 55 | const account = args[0]; 56 | const amount = args[1]; 57 | await depositInto(account, amount); 58 | } 59 | 60 | run().then() -------------------------------------------------------------------------------- /contracts/tools/do_exit.js: -------------------------------------------------------------------------------- 1 | const ethers = require("ethers"); 2 | const path = require("path"); 3 | const fs = require("fs"); 4 | const abi_string = fs.readFileSync(path.resolve(__dirname, "../bin/contracts_PlasmaTester_sol_PlasmaTester.abi"), 'UTF-8'); 5 | const assert = require("assert"); 6 | const transactionLib = require("../lib/transaction"); 7 | const ethUtils = require("ethereumjs-util"); 8 | const BN = require("bn.js"); 9 | 10 | // const rpcEndpoint = "http://127.0.0.1:8545"; 11 | // const contractAddress = "0x4169D71D56563eA9FDE76D92185bEB7aa1Da6fB8"; 12 | const rpcEndpoint = "https://rinkeby.infura.io/48beda66075e41bda8b124c6a48fdfa0"; 13 | const contractAddress = "0x3a0768b1302357033c83E4808D1C3F69f270c463"; 14 | 15 | const privateKey = "0x12B7678FF12FE8574AB74FFD23B5B0980B64D84345F9D637C2096CA0EF587806"; 16 | 17 | async function exit() { 18 | let provider = new ethers.providers.JsonRpcProvider(rpcEndpoint); 19 | let walletWithProvider = new ethers.Wallet(privateKey, provider); 20 | if (process.env.MNEMONIC !== undefined) { 21 | console.log("Using mnemonics"); 22 | walletWithProvider = ethers.Wallet.fromMnemonic(process.env.MNEMONIC); 23 | walletWithProvider = walletWithProvider.connect(provider); 24 | } 25 | const senderAddress = await walletWithProvider.getAddress(); 26 | console.log("Sending from address " + senderAddress) 27 | let contract = new ethers.Contract(contractAddress, abi_string, walletWithProvider); 28 | const existingID = await contract.ethereumAddressToAccountID(senderAddress); 29 | console.log("This ethereum account has an id = " + existingID.toString(10)); 30 | const transactor = await contract.transactor(); 31 | console.log("Transactor address = " + transactor); 32 | const exitor = await contract.exitor(); 33 | console.log("Exitor address = " + exitor); 34 | const tx = await contract.exit(); 35 | console.log("Result = ", tx.hash); 36 | const result = await tx.wait(); 37 | const totalDepositRequests = await contract.totalDepositRequests(); 38 | console.log("Total deposits = " + totalDepositRequests.toString(10)); 39 | const totalExitRequests = await contract.totalExitRequests(); 40 | console.log("Total exits = " + totalExitRequests.toString(10)); 41 | } 42 | 43 | async function run() { 44 | await exit(); 45 | } 46 | 47 | run().then() -------------------------------------------------------------------------------- /contracts/tools/get_logs.js: -------------------------------------------------------------------------------- 1 | const ethers = require("ethers"); 2 | const abi_string = require("../build/contracts/PlasmaStorage.json").abi; 3 | const assert = require("assert"); 4 | const transactionLib = require("../lib/transaction"); 5 | const ethUtils = require("ethereumjs-util"); 6 | const BN = require("bn.js"); 7 | 8 | // const rpcEndpoint = "http://127.0.0.1:8545"; 9 | // const contractAddress = "0x4169D71D56563eA9FDE76D92185bEB7aa1Da6fB8"; 10 | 11 | const rpcEndpoint = "https://rinkeby.infura.io/48beda66075e41bda8b124c6a48fdfa0"; 12 | const contractAddress = "0xF8814577CdC0B9Ce7987C02a787efD1ac3bfF40d"; 13 | 14 | async function getLogs(batchNumberString) { 15 | let provider = new ethers.providers.JsonRpcProvider(rpcEndpoint); 16 | 17 | let contract = new ethers.Contract(contractAddress, abi_string, provider); 18 | const depositBatchSize = await contract.DEPOSIT_BATCH_SIZE(); 19 | const totalDepositRequests = await contract.totalDepositRequests(); 20 | console.log("Total deposits happened = " + totalDepositRequests.toString(10)); 21 | const totalBatches = totalDepositRequests.div(depositBatchSize); 22 | console.log("Current batch = " + totalBatches.toString(10)); 23 | for (let i = 0; i < totalBatches.toNumber(); i++) { 24 | console.log("Trying to get all logs for deposit batch " + i); 25 | let filter = contract.filters.LogDepositRequest("0x" + (new BN(i)).toString(16), null, null); 26 | // need to explicitly set block range 27 | let fullFilter = { 28 | fromBlock: 1, 29 | toBlock: 'latest', 30 | address: filter.address, 31 | topics: filter.topics 32 | }; 33 | let events = await provider.getLogs(fullFilter); 34 | console.log(JSON.stringify(events)); 35 | } 36 | } 37 | 38 | async function run() { 39 | const args = process.argv.slice(2); 40 | const batchNumber = args[0]; 41 | await getLogs(batchNumber); 42 | } 43 | 44 | run().then() -------------------------------------------------------------------------------- /contracts/tools/get_state.js: -------------------------------------------------------------------------------- 1 | const ethers = require("ethers"); 2 | const abi_string = require("../build/contracts/PlasmaStorage.json").abi; 3 | const assert = require("assert"); 4 | const transactionLib = require("../lib/transaction"); 5 | const ethUtils = require("ethereumjs-util"); 6 | const BN = require("bn.js"); 7 | 8 | // const rpcEndpoint = "http://127.0.0.1:8545"; 9 | // const contractAddress = "0x4169D71D56563eA9FDE76D92185bEB7aa1Da6fB8"; 10 | 11 | const rpcEndpoint = "https://rinkeby.infura.io/48beda66075e41bda8b124c6a48fdfa0"; 12 | const contractAddress = "0x3a0768b1302357033c83E4808D1C3F69f270c463"; 13 | 14 | const blockNumber = 2; 15 | 16 | async function getState() { 17 | let provider = new ethers.providers.JsonRpcProvider(rpcEndpoint); 18 | 19 | let contract = new ethers.Contract(contractAddress, abi_string, provider); 20 | const depositBatchSize = await contract.DEPOSIT_BATCH_SIZE(); 21 | const totalDepositRequests = await contract.totalDepositRequests(); 22 | console.log("Total deposits happened = " + totalDepositRequests.toString(10)); 23 | const totalBatches = totalDepositRequests.div(depositBatchSize); 24 | console.log("Current batch = " + totalBatches.toString(10)); 25 | 26 | const lastCommited = await contract.lastCommittedBlockNumber(); 27 | console.log("Last committed block = " + lastCommited.toString(10)); 28 | 29 | const lastVerified = await contract.lastVerifiedBlockNumber(); 30 | console.log("Last verified block = " + lastVerified.toString(10)); 31 | 32 | const lastVerifiedRoot = await contract.lastVerifiedRoot(); 33 | console.log("Last verified root = " + lastVerifiedRoot.toString(16)); 34 | 35 | const lastCommittedDepositBatch = await contract.lastCommittedDepositBatch(); 36 | console.log("Last committed deposit batch = " + lastCommittedDepositBatch.toString(10)); 37 | 38 | const lastVerifiedDepositBatch = await contract.lastVerifiedDepositBatch(); 39 | console.log("Last verified deposit batch = " + lastVerifiedDepositBatch.toString(10)); 40 | 41 | const block = await contract.blocks(blockNumber); 42 | console.log(JSON.stringify(block)); 43 | console.log("Block data commitment = " + block.publicDataCommitment); 44 | console.log("Block type = " + block.circuit); 45 | 46 | const batch = await contract.exitBatches(0); 47 | console.log(JSON.stringify(batch)); 48 | 49 | } 50 | 51 | async function run() { 52 | await getState(); 53 | } 54 | 55 | run().then() -------------------------------------------------------------------------------- /contracts/tools/migrate_geth.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | node node_modules/truffle/build/cli.bundled.js migrate --network ganache --reset -------------------------------------------------------------------------------- /contracts/tools/parse.js: -------------------------------------------------------------------------------- 1 | const ethers = require("ethers"); 2 | const path = require("path"); 3 | const fs = require("fs"); 4 | const abi_string = fs.readFileSync(path.resolve(__dirname, "../bin/contracts_PlasmaTester_sol_PlasmaTester.abi"), 'UTF-8'); 5 | const contractAddress = "0x2A8BadcC3d128d814AaEA66a89a6ba3e101D1761"; 6 | const contract = new ethers.Contract(contractAddress, abi_string); 7 | 8 | const parsed = contract.functions.commitExitBlock 9 | 10 | const data = "0x79b2ad7000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000a015e5ec0c956605fc8f2b02f963641118f5c1a1f88fade7f33b5a905a1c28bf2700000000000000000000000000000000000000000000000000000000000000130000020000000000000000000000000000000000000000000000000000000000" 11 | 12 | -------------------------------------------------------------------------------- /contracts/tools/send_tx.js: -------------------------------------------------------------------------------- 1 | const ethers = require("ethers"); 2 | const path = require("path"); 3 | const fs = require("fs"); 4 | const abi_string = fs.readFileSync(path.resolve(__dirname, "../bin/contracts_PlasmaTester_sol_PlasmaTester.abi"), 'UTF-8'); 5 | const assert = require("assert"); 6 | const transactionLib = require("../lib/transaction"); 7 | const ethUtils = require("ethereumjs-util"); 8 | const BN = require("bn.js"); 9 | const axios = require("axios"); 10 | 11 | const BATCH_SIZE = 8; 12 | 13 | const from = 2; 14 | const to = 3; 15 | const privateKey = new BN("3a096bf1e1c006c7f7622015d78d9212e0aff5ca36a9c951afed2d449729d1c", 16); 16 | const startingNonce = 0; 17 | const good_until_block = 100; 18 | const amount = 11; 19 | const fee = 0; 20 | 21 | const endpoint = "http://127.0.0.1:8080/send" 22 | 23 | async function sendTX() { 24 | for (let i = 0; i < BATCH_SIZE; i ++) { 25 | const apiForm = transactionLib.createTransaction(from, to, amount, fee, startingNonce + i, good_until_block, privateKey); 26 | console.log(JSON.stringify(apiForm)); 27 | const result = await axios({ 28 | method: 'post', 29 | url: endpoint, 30 | data: apiForm 31 | }); 32 | console.log(JSON.stringify(result.data)); 33 | } 34 | 35 | } 36 | 37 | async function run() { 38 | await sendTX(); 39 | } 40 | 41 | run().then() -------------------------------------------------------------------------------- /contracts/tools/setup-geth.js: -------------------------------------------------------------------------------- 1 | async function setup() { 2 | 3 | let args = process.argv.slice(2) 4 | await new Promise(resolve => setTimeout(resolve, args[0]*1000 || 0)) 5 | 6 | let Web3 = require('web3') 7 | let web3 = new Web3(new Web3.providers.HttpProvider("http://localhost:8545")) 8 | let eth = web3.eth 9 | let personal = web3.eth.personal 10 | 11 | // might be useful: process.env.PRIVATE_KEY 12 | let pkeys = [ 13 | // from run.sh: 0xb4aaffeAAcb27098d9545A3C0e36924Af9EeDfe0 14 | '12B7678FF12FE8574AB74FFD23B5B0980B64D84345F9D637C2096CA0EF587806', 15 | 16 | '93e1b31cd700c582995dba7bfcca8e9b03effa1e54168f73f618d44e2e730e9c', 17 | 'aa8564af9bef22f581e99125d1829b76c45d08e4f6f0b74d586911f4318b6776', 18 | 'd9ade5186d09f523773611fe31f16f8e7b75ff57d4879dfe38cef5125eeb3885', 19 | '54a18890db30be68ddc20424c8b20c322f325741d0af1b70b780c424fe973bdf', 20 | 'bc35f5e10eda4e0acdf5dbb2a3f6fe7bedded5526191b28b7faac35074922a1f', 21 | 'f6a401a329ff7b0ac1d09428930677fdabfc5aae5f9bc5e0f8dd863c85ef32f3', 22 | '22c4b373706e6d748c2abfc1c44dad6ad1cec0b06354259c44668a4cadd63565', 23 | 'f5f17d35eb238908b3ec3462dcf4ad8e8d84ec09c0d1587e3f5feb8a95686baa', 24 | '5abaea8f281af348587a83c05af384865507d22e111cbc865b1d2be94db84b46', 25 | 'e7b69a24a4154712874791698682acb865884f45cacc1e2100310c23b95fa781', 26 | ] 27 | 28 | let accounts = ["0x6394b37Cf80A7358b38068f0CA4760ad49983a1B"]; 29 | let block = await web3.eth.getBlock("latest"); 30 | // console.log(block.gasLimit); 31 | while (block.gasLimit < 6800000) { 32 | block = await web3.eth.getBlock("latest"); 33 | // console.log(block.gasLimit); 34 | } 35 | 36 | let prefunded = (await personal.getAccounts())[0] 37 | 38 | for(let i in pkeys) { 39 | let account = await personal.importRawKey(pkeys[i], '') 40 | await personal.unlockAccount(account, "", 100000000); 41 | let tx = {from: prefunded, to: account, value: web3.utils.toWei("100", "ether")} 42 | personal.sendTransaction(tx, '') 43 | 44 | console.log('created and funded account ', account) 45 | } 46 | 47 | for (let i in accounts) { 48 | let account = accounts[i]; 49 | let tx = {from: prefunded, to: account, value: web3.utils.toWei("100", "ether")} 50 | personal.sendTransaction(tx, '') 51 | 52 | console.log('created and funded account ', account) 53 | } 54 | 55 | } 56 | 57 | setup() -------------------------------------------------------------------------------- /contracts/tools/start_ganache.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | ganache-cli -l 20000000 --allowUnlimitedContractSize -s 123 -------------------------------------------------------------------------------- /contracts/tools/take_full_exit.js: -------------------------------------------------------------------------------- 1 | const ethers = require("ethers"); 2 | const path = require("path"); 3 | const fs = require("fs"); 4 | const abi_string = fs.readFileSync(path.resolve(__dirname, "../bin/contracts_PlasmaTester_sol_PlasmaTester.abi"), 'UTF-8'); 5 | const assert = require("assert"); 6 | const transactionLib = require("../lib/transaction"); 7 | const ethUtils = require("ethereumjs-util"); 8 | const BN = require("bn.js"); 9 | 10 | // const rpcEndpoint = "http://127.0.0.1:8545"; 11 | // const contractAddress = "0x4169D71D56563eA9FDE76D92185bEB7aa1Da6fB8"; 12 | const rpcEndpoint = "https://rinkeby.infura.io/48beda66075e41bda8b124c6a48fdfa0"; 13 | const contractAddress = "0x3a0768b1302357033c83E4808D1C3F69f270c463"; 14 | 15 | const privateKey = "0x12B7678FF12FE8574AB74FFD23B5B0980B64D84345F9D637C2096CA0EF587806"; 16 | const blockNumber = 2; 17 | 18 | async function fullWithdraw() { 19 | let provider = new ethers.providers.JsonRpcProvider(rpcEndpoint); 20 | let walletWithProvider = new ethers.Wallet(privateKey, provider); 21 | if (process.env.MNEMONIC !== undefined) { 22 | console.log("Using mnemonics"); 23 | walletWithProvider = ethers.Wallet.fromMnemonic(process.env.MNEMONIC); 24 | walletWithProvider = walletWithProvider.connect(provider); 25 | } 26 | const senderAddress = await walletWithProvider.getAddress(); 27 | console.log("Sending from address " + senderAddress) 28 | let contract = new ethers.Contract(contractAddress, abi_string, walletWithProvider); 29 | const existingID = await contract.ethereumAddressToAccountID(senderAddress); 30 | console.log("This ethereum account has an id = " + existingID.toString(10)); 31 | const balanceForWithdraw = await contract.fullExits(blockNumber, existingID); 32 | console.log("Balance for full exit = " + balanceForWithdraw.toString(10)); 33 | const lastVerifiedBlockNumber = await contract.lastVerifiedBlockNumber(); 34 | console.log("Last verified block = " + lastVerifiedBlockNumber); 35 | const tx = await contract.withdrawFullExitBalance(blockNumber); 36 | console.log("Result = ", tx.hash); 37 | const result = await tx.wait(); 38 | } 39 | 40 | async function run() { 41 | await fullWithdraw(); 42 | } 43 | 44 | run().then() -------------------------------------------------------------------------------- /contracts/tools/take_partial_withdraw.js: -------------------------------------------------------------------------------- 1 | const ethers = require("ethers"); 2 | const path = require("path"); 3 | const fs = require("fs"); 4 | const abi_string = fs.readFileSync(path.resolve(__dirname, "../bin/contracts_PlasmaTester_sol_PlasmaTester.abi"), 'UTF-8'); 5 | const assert = require("assert"); 6 | const transactionLib = require("../lib/transaction"); 7 | const ethUtils = require("ethereumjs-util"); 8 | const BN = require("bn.js"); 9 | 10 | // const rpcEndpoint = "http://127.0.0.1:8545"; 11 | // const contractAddress = "0x4169D71D56563eA9FDE76D92185bEB7aa1Da6fB8"; 12 | const rpcEndpoint = "https://rinkeby.infura.io/48beda66075e41bda8b124c6a48fdfa0"; 13 | const contractAddress = "0x2A8BadcC3d128d814AaEA66a89a6ba3e101D1761"; 14 | 15 | const privateKey = "0x12B7678FF12FE8574AB74FFD23B5B0980B64D84345F9D637C2096CA0EF587806"; 16 | const blockNumber = 2; 17 | 18 | async function partialWithdraw() { 19 | let provider = new ethers.providers.JsonRpcProvider(rpcEndpoint); 20 | let walletWithProvider = new ethers.Wallet(privateKey, provider); 21 | if (process.env.MNEMONIC !== undefined) { 22 | console.log("Using mnemonics"); 23 | walletWithProvider = ethers.Wallet.fromMnemonic(process.env.MNEMONIC); 24 | walletWithProvider = walletWithProvider.connect(provider); 25 | } 26 | const senderAddress = await walletWithProvider.getAddress(); 27 | console.log("Sending from address " + senderAddress) 28 | let contract = new ethers.Contract(contractAddress, abi_string, walletWithProvider); 29 | const existingID = await contract.ethereumAddressToAccountID(senderAddress); 30 | console.log("This ethereum account has an id = " + existingID.toString(10)); 31 | const balanceForWithdraw = await contract.partialExits(blockNumber, existingID); 32 | console.log("Balance for partial exit = " + balanceForWithdraw.toString(10)); 33 | const lastVerifiedBlockNumber = await contract.lastVerifiedBlockNumber(); 34 | console.log("Last verified block = " + lastVerifiedBlockNumber); 35 | const tx = await contract.withdrawPartialExitBalance(blockNumber); 36 | console.log("Result = ", tx.hash); 37 | const result = await tx.wait(); 38 | } 39 | 40 | async function run() { 41 | await partialWithdraw(); 42 | } 43 | 44 | run().then() -------------------------------------------------------------------------------- /contracts/truffle-config.js: -------------------------------------------------------------------------------- 1 | /* 2 | * NB: since truffle-hdwallet-provider 0.0.5 you must wrap HDWallet providers in a 3 | * function when declaring them. Failure to do so will cause commands to hang. ex: 4 | * ``` 5 | * mainnet: { 6 | * provider: function() { 7 | * return new HDWalletProvider(mnemonic, 'https://mainnet.infura.io/') 8 | * }, 9 | * network_id: '1', 10 | * gas: 4500000, 11 | * gasPrice: 10000000000, 12 | * }, 13 | */ 14 | 15 | module.exports = { 16 | // See 17 | // to customize your Truffle configuration! 18 | compilers: { 19 | solc: { 20 | version: "0.4.24" // ex: "0.4.20". (Default: Truffle's installed solc) 21 | } 22 | }, 23 | 24 | networks: { 25 | 26 | universal: { 27 | network_id: '9', 28 | gas: 6900000, 29 | provider: function() { 30 | const HDWalletProvider = require("truffle-hdwallet-provider"); 31 | let url = `${process.env.WEB3_URL}` 32 | let mnemonic = process.env.MNEMONIC 33 | return new HDWalletProvider(mnemonic, url) 34 | }, 35 | }, 36 | 37 | mainnet: { 38 | network_id: '1', 39 | gas: 6900000, 40 | provider: function() { 41 | const HDWalletProvider = require("truffle-hdwallet-provider"); 42 | let url = `${process.env.WEB3_URL}` 43 | let mnemonic = process.env.MNEMONIC 44 | return new HDWalletProvider(mnemonic, url) 45 | }, 46 | }, 47 | 48 | rinkeby: { 49 | network_id: '4', 50 | gas: 6900000, 51 | provider: function() { 52 | const HDWalletProvider = require("truffle-hdwallet-provider"); 53 | let url = `${process.env.WEB3_URL}` 54 | let mnemonic = process.env.MNEMONIC 55 | return new HDWalletProvider(mnemonic, url) 56 | }, 57 | }, 58 | 59 | 60 | // rinkeby: { 61 | // network_id: 4, 62 | // gas: 6900000, 63 | // provider: function() { 64 | // const HDWalletProvider = require("truffle-hdwallet-provider"); 65 | // //let url = `https://rinkeby.infura.io/v3/${process.env.INFURA_PROJECT_ID}` 66 | // let url = `${process.env.WEB3_URL}` 67 | // let mnemonic = process.env.MNEMONIC 68 | // return new HDWalletProvider(mnemonic, url) 69 | // }, 70 | // }, 71 | 72 | } 73 | }; 74 | -------------------------------------------------------------------------------- /core/circuit/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "circuit" 3 | version = "0.1.1" 4 | edition = "2018" 5 | 6 | authors = [ 7 | "Alex Gluchowski ", 8 | "Alex Vlasov " 9 | ] 10 | 11 | [dependencies] 12 | merkle_tree = { path = "../merkle_tree" } 13 | models = { path = "../models" } 14 | pairing = { package = "pairing_ce", version = "0.17.0" } 15 | bellman = { package = "bellman_ce", version = "0.3.0" } 16 | sapling_crypto = { package = "sapling-crypto_ce", version = "0.0.5" } 17 | ff = { package = "ff_ce", version = "0.6.0", features = ["derive"] } 18 | rand = "0.4" 19 | rust-crypto = "0.2" 20 | hex = "0.3.2" 21 | 22 | [dev-dependencies] 23 | log = "0.4" 24 | -------------------------------------------------------------------------------- /core/circuit/src/cheque/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod bitwindow; 2 | -------------------------------------------------------------------------------- /core/circuit/src/deposit/deposit_request.rs: -------------------------------------------------------------------------------- 1 | use ff::{BitIterator, PrimeField}; 2 | use models::plasma::params as plasma_constants; 3 | use sapling_crypto::jubjub::{edwards, edwards::Point, JubjubEngine, Unknown}; 4 | 5 | // This is deposit request 6 | 7 | #[derive(Clone)] 8 | pub struct DepositRequest { 9 | pub into: Option, 10 | pub amount: Option, 11 | // here it's only for ease of data encoding 12 | pub public_key: Option>, 13 | } 14 | 15 | impl DepositRequest { 16 | pub fn verify_public_key(&self, params: &E::Params) -> bool { 17 | { 18 | if self.public_key.is_none() { 19 | return false; 20 | } 21 | } 22 | let pk = self.public_key.clone().unwrap(); 23 | let order_check = pk.mul(E::Fs::char(), params); 24 | order_check.eq(&Point::zero()) 25 | } 26 | 27 | // this function returns public data in Ethereum compatible format 28 | pub fn public_data_into_bits(&self) -> Vec { 29 | // fields are 30 | // - into 31 | // - amount 32 | // - compressed public key 33 | let mut into: Vec = BitIterator::new(self.into.unwrap().into_repr()).collect(); 34 | into.reverse(); 35 | into.truncate(plasma_constants::BALANCE_TREE_DEPTH); 36 | // reverse again to have BE as in Ethereum native types 37 | into.reverse(); 38 | 39 | let mut amount: Vec = BitIterator::new(self.amount.unwrap().into_repr()).collect(); 40 | amount.reverse(); 41 | amount.truncate(plasma_constants::BALANCE_BIT_WIDTH); 42 | // reverse again to have BE as in Ethereum native types 43 | amount.reverse(); 44 | 45 | // pack public key to reduce the amount of data 46 | let (y, sign_bit) = self.public_key.clone().unwrap().compress_into_y(); 47 | let mut y_bits: Vec = BitIterator::new(y.into_repr()).collect(); 48 | y_bits.reverse(); 49 | y_bits.truncate(E::Fr::NUM_BITS as usize); 50 | y_bits.resize(plasma_constants::FR_BIT_WIDTH - 1, false); 51 | // push sign bit 52 | y_bits.push(sign_bit); 53 | // reverse again to have BE as in Ethereum native types 54 | y_bits.reverse(); 55 | 56 | let mut packed: Vec = vec![]; 57 | packed.extend(into.into_iter()); 58 | packed.extend(amount.into_iter()); 59 | packed.extend(y_bits.into_iter()); 60 | 61 | packed 62 | } 63 | 64 | pub fn data_as_bytes(&self) -> Vec { 65 | let raw_data: Vec = self.public_data_into_bits(); 66 | 67 | let mut message_bytes: Vec = vec![]; 68 | 69 | let byte_chunks = raw_data.chunks(8); 70 | for byte_chunk in byte_chunks { 71 | let mut byte = 0u8; 72 | for (i, bit) in byte_chunk.iter().enumerate() { 73 | if *bit { 74 | byte |= 1 << i; 75 | } 76 | } 77 | message_bytes.push(byte); 78 | } 79 | 80 | message_bytes 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /core/circuit/src/deposit/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod circuit; 2 | pub mod deposit_request; 3 | -------------------------------------------------------------------------------- /core/circuit/src/encoder.rs: -------------------------------------------------------------------------------- 1 | //use super::{DepositBlock, TransferBlock, ExitBlock}; 2 | use crate::{CircuitDepositRequest, CircuitExitRequest, CircuitTransferTx}; 3 | use models::plasma::block::{Block, BlockData}; 4 | use models::plasma::circuit::utils::be_bit_vector_into_bytes; 5 | 6 | fn convert_transfer( 7 | transactions: &[models::plasma::tx::TransferTx], 8 | ) -> Result>, String> { 9 | transactions 10 | .iter() 11 | .map(|tx| CircuitTransferTx::try_from(tx).map(|tx| tx.public_data_into_bits())) 12 | .collect() 13 | } 14 | 15 | fn convert_deposit( 16 | transactions: &[models::plasma::tx::DepositTx], 17 | ) -> Result>, String> { 18 | transactions 19 | .iter() 20 | .map(|tx| CircuitDepositRequest::try_from(tx).map(|tx| tx.public_data_into_bits())) 21 | .collect() 22 | } 23 | 24 | fn convert_exit(transactions: &[models::plasma::tx::ExitTx]) -> Result>, String> { 25 | transactions 26 | .iter() 27 | .map(|tx| CircuitExitRequest::try_from(tx).map(|tx| tx.public_data_into_bits())) 28 | .collect() 29 | } 30 | 31 | pub fn encode_transactions(block: &Block) -> Result, String> { 32 | let mut encoding: Vec = vec![]; 33 | 34 | let transactions_bits: Vec> = match &block.block_data { 35 | BlockData::Transfer { transactions, .. } => convert_transfer(transactions)?, 36 | BlockData::Deposit { transactions, .. } => convert_deposit(transactions)?, 37 | BlockData::Exit { transactions, .. } => convert_exit(transactions)?, 38 | }; 39 | 40 | for tx_bits in transactions_bits { 41 | let tx_encoding = be_bit_vector_into_bytes(&tx_bits); 42 | encoding.extend(tx_encoding.into_iter()); 43 | } 44 | 45 | Ok(encoding) 46 | } 47 | -------------------------------------------------------------------------------- /core/circuit/src/exit/exit_request.rs: -------------------------------------------------------------------------------- 1 | use ff::{BitIterator, PrimeField}; 2 | use models::plasma::params as plasma_constants; 3 | use sapling_crypto::jubjub::JubjubEngine; 4 | 5 | // This is an exit request 6 | 7 | #[derive(Clone)] 8 | pub struct ExitRequest { 9 | pub from: Option, 10 | // keep an amount in request for ease of public data serialization 11 | // it's NOT USED in a zkSNARK 12 | pub amount: Option, 13 | } 14 | 15 | impl ExitRequest { 16 | pub fn public_data_into_bits(&self) -> Vec { 17 | // fields are 18 | // - from 19 | // - amount 20 | // - compressed public key 21 | let mut from: Vec = BitIterator::new(self.from.unwrap().into_repr()).collect(); 22 | from.reverse(); 23 | from.truncate(plasma_constants::BALANCE_TREE_DEPTH); 24 | // reverse again to have BE as in Ethereum native types 25 | from.reverse(); 26 | 27 | let mut amount: Vec = BitIterator::new(self.amount.unwrap().into_repr()).collect(); 28 | amount.reverse(); 29 | amount.truncate(plasma_constants::BALANCE_BIT_WIDTH); 30 | // reverse again to have BE as in Ethereum native types 31 | amount.reverse(); 32 | 33 | let mut packed: Vec = vec![]; 34 | packed.extend(from.into_iter()); 35 | packed.extend(amount.into_iter()); 36 | 37 | packed 38 | } 39 | 40 | pub fn data_as_bytes(&self) -> Vec { 41 | let raw_data: Vec = self.public_data_into_bits(); 42 | 43 | let mut message_bytes: Vec = vec![]; 44 | 45 | let byte_chunks = raw_data.chunks(8); 46 | for byte_chunk in byte_chunks { 47 | let mut byte = 0u8; 48 | for (i, bit) in byte_chunk.iter().enumerate() { 49 | if *bit { 50 | byte |= 1 << i; 51 | } 52 | } 53 | message_bytes.push(byte); 54 | } 55 | 56 | message_bytes 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /core/circuit/src/exit/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod circuit; 2 | pub mod exit_request; 3 | -------------------------------------------------------------------------------- /core/circuit/src/leaf.rs: -------------------------------------------------------------------------------- 1 | use bellman::{ConstraintSystem, SynthesisError}; 2 | use models::plasma::circuit::utils::append_packed_public_key; 3 | use models::plasma::params as plasma_constants; 4 | use sapling_crypto::circuit::num::AllocatedNum; 5 | use sapling_crypto::circuit::{boolean, Assignment}; 6 | use sapling_crypto::jubjub::JubjubEngine; 7 | 8 | #[derive(Clone)] 9 | pub struct LeafWitness { 10 | pub balance: Option, 11 | pub nonce: Option, 12 | // x coordinate is supplied and parity is constrained 13 | pub pub_x: Option, 14 | pub pub_y: Option, 15 | } 16 | 17 | pub struct LeafContent { 18 | pub leaf_bits: Vec, 19 | pub value: AllocatedNum, 20 | pub value_bits: Vec, 21 | pub nonce: AllocatedNum, 22 | pub nonce_bits: Vec, 23 | pub pub_x: AllocatedNum, 24 | pub pub_y: AllocatedNum, 25 | pub pub_x_bit: Vec, 26 | pub pub_y_bits: Vec, 27 | } 28 | 29 | pub fn make_leaf_content( 30 | mut cs: CS, 31 | witness: LeafWitness, 32 | ) -> Result, SynthesisError> 33 | where 34 | E: JubjubEngine, 35 | CS: ConstraintSystem, 36 | { 37 | let mut leaf_bits = vec![]; 38 | 39 | let value = AllocatedNum::alloc(cs.namespace(|| "allocate leaf value witness"), || { 40 | Ok(*witness.balance.get()?) 41 | })?; 42 | 43 | let mut value_bits = value.into_bits_le(cs.namespace(|| "value bits"))?; 44 | 45 | value_bits.truncate(plasma_constants::BALANCE_BIT_WIDTH); 46 | leaf_bits.extend(value_bits.clone()); 47 | 48 | let nonce = AllocatedNum::alloc(cs.namespace(|| "allocate leaf nonce witness"), || { 49 | Ok(*witness.nonce.get()?) 50 | })?; 51 | 52 | let mut nonce_bits = nonce.into_bits_le(cs.namespace(|| "nonce bits"))?; 53 | 54 | nonce_bits.truncate(plasma_constants::NONCE_BIT_WIDTH); 55 | leaf_bits.extend(nonce_bits.clone()); 56 | 57 | // we allocate (witness) public X and Y to use them also later for signature check 58 | 59 | let pub_x = AllocatedNum::alloc(cs.namespace(|| "allocate public key x witness"), || { 60 | Ok(*witness.pub_x.get()?) 61 | })?; 62 | 63 | let pub_y = AllocatedNum::alloc(cs.namespace(|| "allcoate public key y witness"), || { 64 | Ok(*witness.pub_y.get()?) 65 | })?; 66 | 67 | let mut pub_x_bit = pub_x.into_bits_le(cs.namespace(|| "pub_x bits"))?; 68 | // leave only the parity bit 69 | pub_x_bit.truncate(1); 70 | 71 | let mut pub_y_bits = pub_y.into_bits_le(cs.namespace(|| "pub_y bits"))?; 72 | pub_y_bits.resize( 73 | plasma_constants::FR_BIT_WIDTH - 1, 74 | boolean::Boolean::Constant(false), 75 | ); 76 | 77 | append_packed_public_key(&mut leaf_bits, pub_x_bit.clone(), pub_y_bits.clone()); 78 | 79 | // leaf_bits.extend(pub_y_bits); 80 | // leaf_bits.extend(pub_x_bit); 81 | 82 | assert_eq!( 83 | leaf_bits.len(), 84 | plasma_constants::BALANCE_BIT_WIDTH 85 | + plasma_constants::NONCE_BIT_WIDTH 86 | + plasma_constants::FR_BIT_WIDTH 87 | ); 88 | 89 | Ok(LeafContent { 90 | leaf_bits, 91 | value, 92 | value_bits, 93 | nonce, 94 | nonce_bits, 95 | pub_x, 96 | pub_y, 97 | pub_x_bit, 98 | pub_y_bits, 99 | }) 100 | } 101 | -------------------------------------------------------------------------------- /core/circuit/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod cheque; 2 | pub mod deposit; 3 | pub mod encoder; 4 | pub mod exit; 5 | pub mod leaf; 6 | pub mod plasma_constants; 7 | pub mod transfer; 8 | 9 | use merkle_tree::{PedersenHasher, SparseMerkleTree}; 10 | use models::plasma::circuit::account::CircuitAccount; 11 | use pairing::bn256::{Bn256, Fr}; 12 | 13 | pub type CircuitAccountTree = SparseMerkleTree, Fr, PedersenHasher>; 14 | pub type CircuitTransferTx = models::plasma::circuit::transfer::Tx; 15 | pub type CircuitDepositRequest = models::plasma::circuit::deposit::DepositRequest; 16 | pub type CircuitExitRequest = models::plasma::circuit::exit::ExitRequest; 17 | -------------------------------------------------------------------------------- /core/circuit/src/plasma_constants.rs: -------------------------------------------------------------------------------- 1 | /// Balance tree depth 2 | pub const BALANCE_TREE_DEPTH: &usize = &24; 3 | 4 | /// Balance bit width 5 | pub const BALANCE_BIT_WIDTH: &usize = &128; 6 | 7 | /// Nonce bit width 8 | pub const NONCE_BIT_WIDTH: &usize = &32; 9 | 10 | /// Block number bit width 11 | pub const BLOCK_NUMBER_BIT_WIDTH: &usize = &32; 12 | 13 | /// Amount bit widths 14 | pub const AMOUNT_EXPONENT_BIT_WIDTH: &usize = &5; 15 | pub const AMOUNT_MANTISSA_BIT_WIDTH: &usize = &11; 16 | 17 | /// Fee bit widths 18 | pub const FEE_EXPONENT_BIT_WIDTH: &usize = &5; 19 | pub const FEE_MANTISSA_BIT_WIDTH: &usize = &3; 20 | 21 | // Signature data 22 | pub const SIGNATURE_S_BIT_WIDTH: &usize = &256; 23 | pub const SIGNATURE_R_X_BIT_WIDTH: &usize = &256; 24 | pub const SIGNATURE_R_Y_BIT_WIDTH: &usize = &256; 25 | 26 | // Fr element encoding 27 | pub const FR_BIT_WIDTH: &usize = &256; 28 | -------------------------------------------------------------------------------- /core/circuit/src/transfer/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod circuit; 2 | pub mod transaction; 3 | -------------------------------------------------------------------------------- /core/eth_client/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "eth_client" 3 | version = "0.1.0" 4 | edition = "2018" 5 | 6 | [dependencies] 7 | models = { path = "../models" } 8 | serde = "1.0.90" 9 | serde_derive = "1.0.90" 10 | ethereum-types = "0.4.0" 11 | ethabi = "6.1.0" 12 | web3 = "0.6.0" 13 | ethereum-tx-sign = "0.0.2" 14 | hex = "0.3.2" 15 | reqwest = "0.9.5" 16 | rlp = "0.3.0" 17 | tiny-keccak = "1.4.2" 18 | secp256k1 = "0.11.1" 19 | log = "0.4" 20 | -------------------------------------------------------------------------------- /core/key_generator/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "key_generator" 3 | version = "0.1.1" 4 | edition = "2018" 5 | 6 | authors = [ 7 | "Alex Gluchowski ", 8 | "Alex Vlasov " 9 | ] 10 | 11 | [dependencies] 12 | rand = "0.4" 13 | time = "0.1" 14 | pairing = { package = "pairing_ce", version = "0.17.0" } 15 | sapling_crypto = { package = "sapling-crypto_ce", version = "0.0.5" } 16 | bellman = { package = "bellman_ce", version = "0.3.0" } 17 | circuit = { path = "../circuit" } 18 | models = { path = "../models" } 19 | hex = "0.3.2" 20 | rust-crypto = "0.2" 21 | ff = { package = "ff_ce", version = "0.6.0", features = ["derive"] } 22 | log = "0.4" 23 | env_logger = "0.6" 24 | -------------------------------------------------------------------------------- /core/key_generator/src/depositor_key.rs: -------------------------------------------------------------------------------- 1 | use bellman; 2 | 3 | use time::PreciseTime; 4 | 5 | use pairing::bn256::*; 6 | use rand::OsRng; 7 | use sapling_crypto::alt_babyjubjub::AltJubjubBn256; 8 | 9 | use bellman::groth16::generate_random_parameters; 10 | 11 | use crate::vk_contract_generator::generate_vk_contract; 12 | 13 | use circuit::deposit::circuit::{Deposit, DepositWitness}; 14 | use circuit::deposit::deposit_request::DepositRequest; 15 | use circuit::leaf::LeafWitness; 16 | use models::plasma::params as plasma_constants; 17 | 18 | const DEPOSIT_BATCH_SIZE: usize = 1; 19 | const FILENAME: &str = "deposit_pk.key"; 20 | const CONTRACT_FILENAME: &str = "DepositVerificationKey.sol"; 21 | const CONTRACT_NAME: &str = "DepositVerificationKey"; 22 | const CONTRACT_FUNCTION_NAME: &str = "getVkDepositCircuit"; 23 | 24 | pub fn make_depositor_key() { 25 | // let p_g = FixedGenerators::SpendingKeyGenerator; 26 | let params = &AltJubjubBn256::new(); 27 | // let rng = &mut XorShiftRng::from_seed([0x3dbe6258, 0x8d313d76, 0x3237db17, 0xe5bc0654]); 28 | let rng = &mut OsRng::new().unwrap(); 29 | 30 | let empty_request = DepositRequest { 31 | into: None, 32 | amount: None, 33 | public_key: None, 34 | }; 35 | 36 | let empty_leaf_witness = LeafWitness { 37 | balance: None, 38 | nonce: None, 39 | pub_x: None, 40 | pub_y: None, 41 | }; 42 | 43 | let empty_witness = DepositWitness { 44 | leaf: empty_leaf_witness.clone(), 45 | auth_path: vec![None; plasma_constants::BALANCE_TREE_DEPTH], 46 | leaf_is_empty: None, 47 | new_pub_x: None, 48 | new_pub_y: None, 49 | }; 50 | 51 | let instance_for_generation: Deposit<'_, Bn256> = Deposit { 52 | params, 53 | number_of_deposits: DEPOSIT_BATCH_SIZE, 54 | old_root: None, 55 | new_root: None, 56 | public_data_commitment: None, 57 | block_number: None, 58 | requests: vec![(empty_request, empty_witness); DEPOSIT_BATCH_SIZE], 59 | }; 60 | 61 | info!("generating setup..."); 62 | let start = PreciseTime::now(); 63 | let tmp_cirtuit_params = generate_random_parameters(instance_for_generation, rng).unwrap(); 64 | info!( 65 | "setup generated in {} s", 66 | start.to(PreciseTime::now()).num_milliseconds() as f64 / 1000.0 67 | ); 68 | 69 | use std::fs::File; 70 | use std::io::{BufWriter, Write}; 71 | { 72 | let f = File::create(FILENAME).expect("Unable to create file"); 73 | let mut f = BufWriter::new(f); 74 | tmp_cirtuit_params 75 | .write(&mut f) 76 | .expect("Unable to write proving key"); 77 | } 78 | 79 | use std::io::BufReader; 80 | 81 | let f_r = File::open(FILENAME).expect("Unable to open file"); 82 | let mut r = BufReader::new(f_r); 83 | let circuit_params = bellman::groth16::Parameters::::read(&mut r, true) 84 | .expect("Unable to read proving key"); 85 | 86 | let contract_content = generate_vk_contract( 87 | &circuit_params.vk, 88 | CONTRACT_NAME.to_string(), 89 | CONTRACT_FUNCTION_NAME.to_string(), 90 | ); 91 | 92 | let f_cont = File::create(CONTRACT_FILENAME).expect("Unable to create file"); 93 | let mut f_cont = BufWriter::new(f_cont); 94 | f_cont 95 | .write_all(contract_content.as_bytes()) 96 | .expect("Unable to write contract"); 97 | 98 | info!("Done"); 99 | } 100 | -------------------------------------------------------------------------------- /core/key_generator/src/exitor_key.rs: -------------------------------------------------------------------------------- 1 | use bellman; 2 | 3 | use time::PreciseTime; 4 | 5 | use pairing::bn256::*; 6 | use rand::OsRng; 7 | use sapling_crypto::alt_babyjubjub::AltJubjubBn256; 8 | 9 | use bellman::groth16::generate_random_parameters; 10 | 11 | use crate::vk_contract_generator::generate_vk_contract; 12 | 13 | use circuit::exit::circuit::{Exit, ExitWitness}; 14 | use circuit::exit::exit_request::ExitRequest; 15 | use circuit::leaf::LeafWitness; 16 | use models::plasma::params as plasma_constants; 17 | 18 | const EXIT_BATCH_SIZE: usize = 1; 19 | const FILENAME: &str = "exit_pk.key"; 20 | const CONTRACT_FILENAME: &str = "ExitVerificationKey.sol"; 21 | const CONTRACT_NAME: &str = "ExitVerificationKey"; 22 | const CONTRACT_FUNCTION_NAME: &str = "getVkExitCircuit"; 23 | 24 | pub fn make_exitor_key() { 25 | // let p_g = FixedGenerators::SpendingKeyGenerator; 26 | let params = &AltJubjubBn256::new(); 27 | // let rng = &mut XorShiftRng::from_seed([0x3dbe6258, 0x8d313d76, 0x3237db17, 0xe5bc0654]); 28 | let rng = &mut OsRng::new().unwrap(); 29 | 30 | let empty_request = ExitRequest { 31 | from: None, 32 | amount: None, 33 | }; 34 | 35 | let empty_leaf_witness = LeafWitness { 36 | balance: None, 37 | nonce: None, 38 | pub_x: None, 39 | pub_y: None, 40 | }; 41 | 42 | let empty_witness = ExitWitness { 43 | leaf: empty_leaf_witness.clone(), 44 | auth_path: vec![None; plasma_constants::BALANCE_TREE_DEPTH], 45 | }; 46 | 47 | let instance_for_generation: Exit<'_, Bn256> = Exit { 48 | params, 49 | number_of_exits: EXIT_BATCH_SIZE, 50 | old_root: None, 51 | new_root: None, 52 | public_data_commitment: None, 53 | empty_leaf_witness: empty_leaf_witness.clone(), 54 | block_number: None, 55 | requests: vec![(empty_request, empty_witness); EXIT_BATCH_SIZE], 56 | }; 57 | 58 | info!("generating setup..."); 59 | let start = PreciseTime::now(); 60 | let tmp_cirtuit_params = generate_random_parameters(instance_for_generation, rng).unwrap(); 61 | info!( 62 | "setup generated in {} s", 63 | start.to(PreciseTime::now()).num_milliseconds() as f64 / 1000.0 64 | ); 65 | 66 | use std::fs::File; 67 | use std::io::{BufWriter, Write}; 68 | { 69 | let f = File::create(FILENAME).expect("Unable to create file"); 70 | let mut f = BufWriter::new(f); 71 | tmp_cirtuit_params 72 | .write(&mut f) 73 | .expect("Unable to write proving key"); 74 | } 75 | 76 | use std::io::BufReader; 77 | 78 | let f_r = File::open(FILENAME).expect("Unable to open file"); 79 | let mut r = BufReader::new(f_r); 80 | let circuit_params = bellman::groth16::Parameters::::read(&mut r, true) 81 | .expect("Unable to read proving key"); 82 | 83 | let contract_content = generate_vk_contract( 84 | &circuit_params.vk, 85 | CONTRACT_NAME.to_string(), 86 | CONTRACT_FUNCTION_NAME.to_string(), 87 | ); 88 | 89 | let f_cont = File::create(CONTRACT_FILENAME).expect("Unable to create file"); 90 | let mut f_cont = BufWriter::new(f_cont); 91 | f_cont 92 | .write_all(contract_content.as_bytes()) 93 | .expect("Unable to write contract"); 94 | 95 | info!("Done"); 96 | } 97 | -------------------------------------------------------------------------------- /core/key_generator/src/main.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate log; 3 | 4 | pub mod depositor_key; 5 | pub mod exitor_key; 6 | pub mod read_write_keys; 7 | pub mod transactor_key; 8 | pub mod vk_contract_generator; 9 | 10 | use depositor_key::make_depositor_key; 11 | use exitor_key::make_exitor_key; 12 | use transactor_key::make_transactor_key; 13 | 14 | fn main() { 15 | env_logger::init(); 16 | 17 | make_depositor_key(); 18 | make_exitor_key(); 19 | make_transactor_key(); 20 | } 21 | -------------------------------------------------------------------------------- /core/key_generator/src/vk_contract_generator.rs: -------------------------------------------------------------------------------- 1 | // Library to generate a EVM verifier contract 2 | 3 | use bellman::groth16; 4 | use pairing::{CurveAffine, Engine}; 5 | 6 | // fn unpack(t: &T) -> Vec 7 | // { 8 | // t.into_uncompressed().as_ref().chunks(32).map(|c| "0x".to_owned() + &hex::encode(c)).collect() 9 | // } 10 | 11 | fn unpack_g1(point: &E::G1Affine) -> Vec { 12 | let uncompressed = point.into_uncompressed(); 13 | let uncompressed_slice = uncompressed.as_ref(); 14 | 15 | uncompressed_slice 16 | .chunks(32) 17 | .map(|c| "0x".to_owned() + &hex::encode(c)) 18 | .collect() 19 | } 20 | 21 | fn unpack_g2(point: &E::G2Affine) -> Vec { 22 | let uncompressed = point.into_uncompressed(); 23 | let uncompressed_slice = uncompressed.as_ref(); 24 | uncompressed_slice 25 | .chunks(32) 26 | .map(|c| "0x".to_owned() + &hex::encode(c)) 27 | .collect() 28 | 29 | // let to_reorder: Vec = uncompressed_slice.chunks(32).map(|c| "0x".to_owned() + &hex::encode(c)).collect(); 30 | 31 | // vec![to_reorder[1].clone(), to_reorder[0].clone(), to_reorder[3].clone(), to_reorder[2].clone()] 32 | } 33 | 34 | const SHIFT: &str = " "; 35 | 36 | fn render_array(name: &str, allocate: bool, values: &[Vec]) -> String { 37 | let mut out = String::new(); 38 | out.push('\n'); 39 | let flattened: Vec<&String> = values.iter().flatten().collect(); 40 | if allocate { 41 | out.push_str(&format!( 42 | "{}{} = new uint256[]({});\n", 43 | SHIFT, 44 | name, 45 | flattened.len() 46 | )); 47 | } 48 | for (i, s) in flattened.iter().enumerate() { 49 | out.push_str(&format!("{}{}[{}] = {};\n", SHIFT, name, i, s)); 50 | } 51 | out 52 | } 53 | 54 | pub fn hardcode_vk(vk: &groth16::VerifyingKey) -> String { 55 | let mut out = String::new(); 56 | 57 | let values = &[ 58 | unpack_g1::(&vk.alpha_g1), 59 | unpack_g2::(&vk.beta_g2), 60 | unpack_g2::(&vk.gamma_g2), 61 | unpack_g2::(&vk.delta_g2), 62 | ]; 63 | out.push_str(&render_array("vk", false, values)); 64 | 65 | let ic: Vec> = vk.ic.iter().map(unpack_g1::).collect(); 66 | out.push_str(&render_array("gammaABC", true, ic.as_slice())); 67 | 68 | out 69 | } 70 | 71 | pub fn generate_vk_contract( 72 | vk: &groth16::VerifyingKey, 73 | contract_name: String, 74 | function_name: String, 75 | ) -> String { 76 | format!( 77 | r#" 78 | // This contract is generated programmatically 79 | 80 | pragma solidity ^0.4.24; 81 | 82 | 83 | // Hardcoded constants to avoid accessing store 84 | contract {contract_name} {{ 85 | 86 | function {function_name}() internal pure returns (uint256[14] memory vk, uint256[] memory gammaABC) {{ 87 | 88 | {vk} 89 | 90 | }} 91 | 92 | }} 93 | "#, 94 | vk = hardcode_vk(&vk), 95 | contract_name = contract_name, 96 | function_name = function_name, 97 | ) 98 | } 99 | -------------------------------------------------------------------------------- /core/merkle_tree/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "merkle_tree" 3 | version = "0.1.1" 4 | edition = "2018" 5 | 6 | authors = [ 7 | "Alex Gluchowski ", 8 | "Alex Vlasov " 9 | ] 10 | 11 | [dependencies] 12 | models = { path = "../models" } 13 | ff = { package = "ff_ce", version = "0.6.0", features = ["derive"] } 14 | pairing = { package = "pairing_ce", version = "0.17.0" } 15 | sapling_crypto = { package = "sapling-crypto_ce", version = "0.0.5" } 16 | fnv = "1.0.3" 17 | rayon = "1.0.3" 18 | rand = "0.4" 19 | 20 | [dev-dependencies] 21 | log = "0.4" 22 | 23 | -------------------------------------------------------------------------------- /core/merkle_tree/src/account_tree.rs: -------------------------------------------------------------------------------- 1 | // Plasma account (Merkle tree leaf) 2 | // TODO: - Not used in project 3 | 4 | use ff::Field; 5 | use pairing::bn256::{Bn256, Fr}; 6 | use sapling_crypto::alt_babyjubjub::JubjubEngine; 7 | 8 | use crate::merkle_tree::PedersenHasher; 9 | use crate::merkle_tree::SparseMerkleTree; 10 | use crate::models::params as plasma_constants; 11 | use crate::primitives::{GetBits, GetBitsFixed}; 12 | 13 | #[derive(Debug, Clone)] 14 | pub struct Leaf { 15 | pub balance: E::Fr, 16 | pub nonce: E::Fr, 17 | pub pub_x: E::Fr, 18 | pub pub_y: E::Fr, 19 | } 20 | 21 | impl GetBits for Leaf { 22 | fn get_bits_le(&self) -> Vec { 23 | let mut leaf_content = Vec::new(); 24 | leaf_content.extend( 25 | self.balance 26 | .get_bits_le_fixed(plasma_constants::BALANCE_BIT_WIDTH), 27 | ); 28 | leaf_content.extend( 29 | self.nonce 30 | .get_bits_le_fixed(plasma_constants::NONCE_BIT_WIDTH), 31 | ); 32 | leaf_content.extend( 33 | self.pub_y 34 | .get_bits_le_fixed(plasma_constants::FR_BIT_WIDTH - 1), 35 | ); 36 | leaf_content.extend(self.pub_x.get_bits_le_fixed(1)); 37 | 38 | leaf_content 39 | } 40 | } 41 | 42 | impl Default for Leaf { 43 | fn default() -> Self { 44 | Self { 45 | balance: E::Fr::zero(), 46 | nonce: E::Fr::zero(), 47 | pub_x: E::Fr::zero(), 48 | pub_y: E::Fr::zero(), 49 | } 50 | } 51 | } 52 | 53 | // code below is for testing 54 | 55 | pub type LeafAccount = Leaf; 56 | pub type LeafAccountTree = SparseMerkleTree>; 57 | 58 | impl LeafAccountTree { 59 | pub fn verify_proof(&self, index: u32, item: LeafAccount, proof: Vec<(Fr, bool)>) -> bool { 60 | use crate::merkle_tree::hasher::Hasher; 61 | 62 | assert!(index < self.capacity()); 63 | let item_bits = item.get_bits_le(); 64 | let mut hash = self.hasher.hash_bits(item_bits); 65 | let mut proof_index: u32 = 0; 66 | 67 | for (i, e) in proof.clone().into_iter().enumerate() { 68 | if e.1 { 69 | // current is right 70 | proof_index |= 1 << i; 71 | hash = self.hasher.compress(&e.0, &hash, i); 72 | } else { 73 | // current is left 74 | hash = self.hasher.compress(&hash, &e.0, i); 75 | } 76 | } 77 | 78 | if proof_index != index { 79 | return false; 80 | } 81 | 82 | hash == self.root_hash() 83 | } 84 | } 85 | 86 | #[cfg(test)] 87 | mod tests { 88 | 89 | use super::*; 90 | 91 | #[test] 92 | fn test_balance_tree() { 93 | let mut tree = LeafAccountTree::new(3); 94 | let leaf = LeafAccount { 95 | balance: Fr::zero(), 96 | nonce: Fr::one(), 97 | pub_x: Fr::one(), 98 | pub_y: Fr::one(), 99 | }; 100 | tree.insert(3, leaf); 101 | let _root = tree.root_hash(); 102 | let _path = tree.merkle_path(0); 103 | } 104 | 105 | } 106 | -------------------------------------------------------------------------------- /core/merkle_tree/src/hasher.rs: -------------------------------------------------------------------------------- 1 | // Hasher trait 2 | 3 | pub trait Hasher { 4 | fn hash_bits>(&self, value: I) -> Hash; 5 | fn compress(&self, lhs: &Hash, rhs: &Hash, i: usize) -> Hash; 6 | } 7 | -------------------------------------------------------------------------------- /core/merkle_tree/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod hasher; 2 | pub mod parallel_smt; 3 | pub mod pedersen_hasher; 4 | pub mod sequential_smt; 5 | 6 | use models::plasma::account::Account; 7 | use models::plasma::{Engine, Fr}; 8 | 9 | pub type SparseMerkleTree = sequential_smt::SparseMerkleTree; 10 | pub type PedersenHasher = pedersen_hasher::PedersenHasher; 11 | 12 | pub type AccountTree = SparseMerkleTree>; 13 | 14 | // TODO: return the code below and uncomment asserts 15 | 16 | // pub fn verify_proof(&self, index: u32, item: Account, proof: Vec<(E::Fr, bool)>) -> bool { 17 | // use crate::sparse_merkle_tree::hasher::Hasher; 18 | 19 | // assert!(index < self.capacity()); 20 | // let item_bits = item.get_bits_le(); 21 | // let mut hash = self.hasher.hash_bits(item_bits); 22 | // let mut proof_index: u32 = 0; 23 | 24 | // for (i, e) in proof.clone().into_iter().enumerate() { 25 | // if e.1 { 26 | // // current is right 27 | // proof_index |= 1 << i; 28 | // hash = self.hasher.compress(&e.0, &hash, i); 29 | // } else { 30 | // // current is left 31 | // hash = self.hasher.compress(&hash, &e.0, i); 32 | // } 33 | // // print!("This level hash is {}\n", hash); 34 | // } 35 | 36 | // if proof_index != index { 37 | // return false; 38 | // } 39 | 40 | // hash == self.root_hash() 41 | // } 42 | -------------------------------------------------------------------------------- /core/merkle_tree/src/pedersen_hasher.rs: -------------------------------------------------------------------------------- 1 | // Pedersen hash implementation of the Hasher trait 2 | 3 | use ff::PrimeField; 4 | use sapling_crypto::pedersen_hash::{baby_pedersen_hash, Personalization}; 5 | 6 | use pairing::bn256::Bn256; 7 | use sapling_crypto::alt_babyjubjub::{AltJubjubBn256, JubjubEngine}; 8 | 9 | use crate::hasher::Hasher; 10 | use models::primitives::BitIteratorLe; 11 | 12 | pub struct PedersenHasher { 13 | params: E::Params, 14 | } 15 | 16 | impl Hasher for PedersenHasher { 17 | fn hash_bits>(&self, input: I) -> E::Fr { 18 | baby_pedersen_hash::(Personalization::NoteCommitment, input, &self.params) 19 | .into_xy() 20 | .0 21 | // print!("Leaf hash = {}\n", hash.clone()); 22 | } 23 | 24 | fn compress(&self, lhs: &E::Fr, rhs: &E::Fr, i: usize) -> E::Fr { 25 | let lhs = BitIteratorLe::new(lhs.into_repr()).take(E::Fr::NUM_BITS as usize); 26 | let rhs = BitIteratorLe::new(rhs.into_repr()).take(E::Fr::NUM_BITS as usize); 27 | let input = lhs.chain(rhs); 28 | baby_pedersen_hash::(Personalization::MerkleTree(i), input, &self.params) 29 | .into_xy() 30 | .0 31 | } 32 | } 33 | 34 | pub type BabyPedersenHasher = PedersenHasher; 35 | 36 | impl Default for PedersenHasher { 37 | fn default() -> Self { 38 | Self { 39 | params: AltJubjubBn256::new(), 40 | } 41 | } 42 | } 43 | 44 | #[test] 45 | fn test_pedersen_hash() { 46 | let hasher = BabyPedersenHasher::default(); 47 | 48 | let hash = hasher.hash_bits(vec![false, false, false, true, true, true, true, true]); 49 | //debug!("hash: {:?}", &hash); 50 | 51 | hasher.compress(&hash, &hash, 0); 52 | //debug!("compr: {:?}", &hash2); 53 | 54 | hasher.compress(&hash, &hash, 1); 55 | //debug!("compr: {:?}", &hash3); 56 | 57 | //assert_eq!(hasher.empty_hash(), 58 | } 59 | -------------------------------------------------------------------------------- /core/models/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "models" 3 | version = "0.0.1" 4 | edition = "2018" 5 | 6 | [dependencies] 7 | ff = { package = "ff_ce", version = "0.6.0", features = ["derive"] } 8 | bigdecimal = { version = "0.1.0", features = ["serde"]} 9 | pairing = { package = "pairing_ce", version = "0.17.0" } 10 | sapling_crypto = { package = "sapling-crypto_ce", version = "0.0.5" } 11 | bellman = { package = "bellman_ce", version = "0.3.0" } 12 | web3 = "0.6.0" 13 | serde = "1.0.90" 14 | serde_derive = "1.0.90" 15 | serde_bytes = "0.11.1" 16 | lazy_static = "1.2.0" 17 | rand = "0.4" 18 | fnv = "1.0.3" 19 | log = "0.4" 20 | -------------------------------------------------------------------------------- /core/models/src/abi.rs: -------------------------------------------------------------------------------- 1 | pub type ABI = (&'static [u8], &'static str); 2 | 3 | pub const TEST_PLASMA_ALWAYS_VERIFY: ABI = ( 4 | include_bytes!("../../../contracts/bin/contracts_PlasmaTester_sol_PlasmaTester.abi"), 5 | include_str!("../../../contracts/bin/contracts_PlasmaTester_sol_PlasmaTester.bin"), 6 | ); 7 | 8 | pub const PROD_PLASMA: ABI = ( 9 | include_bytes!("../../../contracts/bin/contracts_PlasmaContract_sol_PlasmaContract.abi"), 10 | include_str!("../../../contracts/bin/contracts_PlasmaContract_sol_PlasmaContract.bin"), 11 | ); 12 | -------------------------------------------------------------------------------- /core/models/src/config.rs: -------------------------------------------------------------------------------- 1 | pub const TRANSFER_BATCH_SIZE: usize = 8; 2 | pub const DEPOSIT_BATCH_SIZE: usize = 1; 3 | pub const EXIT_BATCH_SIZE: usize = 1; 4 | pub const PADDING_INTERVAL: u64 = 60; // sec 5 | pub const PROVER_TIMEOUT: usize = 60; // sec 6 | pub const PROVER_TIMER_TICK: u64 = 5; // sec 7 | pub const PROVER_CYCLE_WAIT: u64 = 5; // sec 8 | 9 | pub const DEFAULT_KEYS_PATH: &str = "keys"; 10 | 11 | lazy_static! { 12 | pub static ref RUNTIME_CONFIG: RuntimeConfig = RuntimeConfig::new(); 13 | } 14 | 15 | use std::env; 16 | 17 | #[derive(Debug, Clone)] 18 | pub struct RuntimeConfig { 19 | pub transfer_batch_size: usize, 20 | pub keys_path: String, 21 | pub max_outstanding_txs: u32, 22 | pub contract_addr: String, 23 | pub mainnet_http_endpoint_string: String, 24 | pub rinkeby_http_endpoint_string: String, 25 | pub mainnet_franklin_contract_address: String, 26 | pub rinkeby_franklin_contract_address: String, 27 | } 28 | 29 | impl RuntimeConfig { 30 | pub fn new() -> Self { 31 | let transfer_batch_size_env = 32 | env::var("TRANSFER_BATCH_SIZE").expect("TRANSFER_BATCH_SIZE env missing"); 33 | let transfer_size = usize::from_str_radix(&(transfer_batch_size_env), 10) 34 | .expect("TRANSFER_BATCH_SIZE invalid"); 35 | let keys_path = env::var("KEY_DIR") 36 | .ok() 37 | .unwrap_or_else(|| DEFAULT_KEYS_PATH.to_string()); 38 | 39 | Self { 40 | transfer_batch_size: transfer_size, 41 | keys_path, 42 | contract_addr: env::var("CONTRACT_ADDR").expect("CONTRACT_ADDR env missing"), 43 | max_outstanding_txs: env::var("MAX_OUTSTANDING_TXS") 44 | .ok() 45 | .and_then(|v| v.parse::().ok()) 46 | .expect("MAX_OUTSTANDING_TXS env var not set"), 47 | mainnet_http_endpoint_string: env::var("TREE_RESTORE_MAINNET_ENDPOINT") 48 | .expect("TREE_RESTORE_MAINNET_ENDPOINT env missing"), 49 | rinkeby_http_endpoint_string: env::var("TREE_RESTORE_RINKEBY_ENDPOINT") 50 | .expect("TREE_RESTORE_RINKEBY_ENDPOINT env missing"), 51 | mainnet_franklin_contract_address: env::var("TREE_RESTORE_MAINNET_CONTRACT_ADDR") 52 | .expect("TREE_RESTORE_MAINNET_CONTRACT_ADDR env missing"), 53 | rinkeby_franklin_contract_address: env::var("TREE_RESTORE_RINKEBY_CONTRACT_ADDR") 54 | .expect("TREE_RESTORE_RINKEBY_CONTRACT_ADDR env missing"), 55 | } 56 | } 57 | } 58 | 59 | impl Default for RuntimeConfig { 60 | fn default() -> Self { 61 | Self::new() 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /core/models/src/plasma/account.rs: -------------------------------------------------------------------------------- 1 | use crate::circuit; 2 | use crate::plasma::params; 3 | use crate::primitives::GetBits; 4 | use crate::{Engine, Fr, PublicKey}; 5 | use bigdecimal::BigDecimal; 6 | use sapling_crypto::jubjub::{edwards, Unknown}; 7 | 8 | #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq)] 9 | pub struct Account { 10 | pub balance: BigDecimal, 11 | pub nonce: u32, 12 | pub public_key_x: Fr, 13 | pub public_key_y: Fr, 14 | } 15 | 16 | impl GetBits for Account { 17 | fn get_bits_le(&self) -> Vec { 18 | circuit::account::CircuitAccount::::from(self.clone()).get_bits_le() 19 | 20 | // TODO: make more efficient: 21 | 22 | // let mut leaf_content = Vec::new(); 23 | // leaf_content.extend(self.balance.get_bits_le_fixed(params::BALANCE_BIT_WIDTH)); 24 | // leaf_content.extend(self.nonce.get_bits_le_fixed(params::NONCE_BIT_WIDTH)); 25 | // leaf_content.extend(self.pub_x.get_bits_le_fixed(params::FR_BIT_WIDTH)); 26 | // leaf_content.extend(self.pub_y.get_bits_le_fixed(params::FR_BIT_WIDTH)); 27 | // leaf_content 28 | } 29 | } 30 | 31 | impl Account { 32 | pub fn get_pub_key(&self) -> Option { 33 | let point = edwards::Point::::from_xy( 34 | self.public_key_x, 35 | self.public_key_y, 36 | ¶ms::JUBJUB_PARAMS, 37 | ); 38 | point.map(sapling_crypto::eddsa::PublicKey::) 39 | } 40 | } 41 | 42 | #[test] 43 | fn test_default_account() { 44 | let a = Account::default(); 45 | a.get_bits_le(); 46 | } 47 | -------------------------------------------------------------------------------- /core/models/src/plasma/block.rs: -------------------------------------------------------------------------------- 1 | pub use crate::plasma::tx::{DepositTx, ExitTx, TransferTx, TxSignature}; 2 | use crate::plasma::{BatchNumber, BlockNumber, Fr}; 3 | use bigdecimal::BigDecimal; 4 | 5 | #[derive(Clone, Debug, Serialize, Deserialize)] 6 | #[serde(tag = "type")] 7 | pub enum BlockData { 8 | Transfer { 9 | //#[serde(skip)] 10 | transactions: Vec, 11 | total_fees: BigDecimal, 12 | }, 13 | Deposit { 14 | //#[serde(skip)] 15 | transactions: Vec, 16 | batch_number: BatchNumber, 17 | }, 18 | Exit { 19 | //#[serde(skip)] 20 | transactions: Vec, 21 | batch_number: BatchNumber, 22 | }, 23 | } 24 | 25 | // #[derive(Clone, Serialize, Deserialize)] 26 | // pub enum BlockType { Transfer, Deposit, Exit } 27 | 28 | // impl BlockData { 29 | // fn block_type(&self) -> BlockType { 30 | // match self { 31 | // BlockData::Transfer{transactions: _, total_fees: _} => BlockType::Transfer, 32 | // BlockData::Deposit{transactions: _, batch_number: _} => BlockType::Deposit, 33 | // BlockData::Exit{transactions: _, batch_number: _} => BlockType::Exit, 34 | // } 35 | // } 36 | // } 37 | 38 | #[derive(Clone, Debug, Serialize, Deserialize)] 39 | pub struct Block { 40 | pub block_number: BlockNumber, 41 | pub new_root_hash: Fr, 42 | pub block_data: BlockData, 43 | } 44 | -------------------------------------------------------------------------------- /core/models/src/plasma/circuit/account.rs: -------------------------------------------------------------------------------- 1 | use crate::plasma::params; 2 | use crate::primitives::{GetBits, GetBitsFixed}; 3 | use ff::{Field, PrimeField}; 4 | use sapling_crypto::alt_babyjubjub::JubjubEngine; 5 | 6 | #[derive(Debug, Clone, Serialize, Deserialize)] 7 | pub struct CircuitAccount { 8 | pub balance: E::Fr, 9 | pub nonce: E::Fr, 10 | pub pub_x: E::Fr, 11 | pub pub_y: E::Fr, 12 | } 13 | 14 | impl std::default::Default for CircuitAccount { 15 | fn default() -> Self { 16 | Self { 17 | balance: E::Fr::zero(), 18 | nonce: E::Fr::zero(), 19 | pub_x: E::Fr::zero(), 20 | pub_y: E::Fr::zero(), 21 | } 22 | } 23 | } 24 | 25 | impl GetBits for CircuitAccount { 26 | fn get_bits_le(&self) -> Vec { 27 | let mut leaf_content = Vec::new(); 28 | leaf_content.extend(self.balance.get_bits_le_fixed(params::BALANCE_BIT_WIDTH)); 29 | leaf_content.extend(self.nonce.get_bits_le_fixed(params::NONCE_BIT_WIDTH)); 30 | leaf_content.extend(self.pub_y.get_bits_le_fixed(params::FR_BIT_WIDTH - 1)); 31 | leaf_content.extend(self.pub_x.get_bits_le_fixed(1)); 32 | 33 | leaf_content 34 | } 35 | } 36 | 37 | // TODO: this is ugly; the correct way is to introduce Serialize/Deserialize interface into JubjubEngine::Fr 38 | // this requires deduplication of JubjubEngines 39 | impl std::convert::From for CircuitAccount { 40 | fn from(a: crate::plasma::Account) -> Self { 41 | use pairing::bn256::Fr; 42 | 43 | Self { 44 | balance: Fr::from_str(&a.balance.to_string()).unwrap(), 45 | nonce: Fr::from_str(&a.nonce.to_string()).unwrap(), 46 | pub_x: a.public_key_x, 47 | pub_y: a.public_key_y, 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /core/models/src/plasma/circuit/deposit.rs: -------------------------------------------------------------------------------- 1 | use crate::plasma::params; 2 | use ff::{BitIterator, PrimeField}; 3 | use sapling_crypto::jubjub::JubjubEngine; 4 | 5 | #[derive(Clone)] 6 | pub struct DepositRequest { 7 | pub into: E::Fr, 8 | pub amount: E::Fr, 9 | pub pub_x: E::Fr, 10 | pub pub_y: E::Fr, 11 | } 12 | 13 | impl DepositRequest { 14 | // this function returns public data in Ethereum compatible format 15 | pub fn public_data_into_bits(&self) -> Vec { 16 | // fields are 17 | // - into 18 | // - amount 19 | // - compressed public key 20 | let mut into: Vec = BitIterator::new(self.into.into_repr()).collect(); 21 | into.reverse(); 22 | into.truncate(params::BALANCE_TREE_DEPTH); 23 | // reverse again to have BE as in Ethereum native types 24 | into.reverse(); 25 | 26 | let mut amount: Vec = BitIterator::new(self.amount.into_repr()).collect(); 27 | amount.reverse(); 28 | amount.truncate(params::BALANCE_BIT_WIDTH); 29 | // reverse again to have BE as in Ethereum native types 30 | amount.reverse(); 31 | 32 | let mut y_bits: Vec = BitIterator::new(self.pub_y.into_repr()).collect(); 33 | y_bits.reverse(); 34 | y_bits.truncate(E::Fr::NUM_BITS as usize); 35 | y_bits.resize(params::FR_BIT_WIDTH - 1, false); 36 | 37 | let mut x_bits: Vec = BitIterator::new(self.pub_x.into_repr()).collect(); 38 | x_bits.reverse(); 39 | // push sign bit 40 | y_bits.push(x_bits[0]); 41 | // reverse again to have BE as in Ethereum native types 42 | y_bits.reverse(); 43 | 44 | let mut packed: Vec = vec![]; 45 | packed.extend(into.into_iter()); 46 | packed.extend(amount.into_iter()); 47 | packed.extend(y_bits.into_iter()); 48 | 49 | packed 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /core/models/src/plasma/circuit/exit.rs: -------------------------------------------------------------------------------- 1 | use ff::{BitIterator, PrimeField}; 2 | 3 | use sapling_crypto::jubjub::JubjubEngine; 4 | 5 | use crate::plasma::params; 6 | 7 | #[derive(Clone)] 8 | pub struct ExitRequest { 9 | pub from: E::Fr, 10 | pub amount: E::Fr, 11 | } 12 | 13 | impl ExitRequest { 14 | pub fn public_data_into_bits(&self) -> Vec { 15 | // fields are 16 | // - from 17 | // - amount 18 | let mut from: Vec = BitIterator::new(self.from.into_repr()).collect(); 19 | from.reverse(); 20 | from.truncate(params::BALANCE_TREE_DEPTH); 21 | // reverse again to have BE as in Ethereum native types 22 | from.reverse(); 23 | 24 | let mut amount: Vec = BitIterator::new(self.amount.into_repr()).collect(); 25 | amount.reverse(); 26 | amount.truncate(params::BALANCE_BIT_WIDTH); 27 | // reverse again to have BE as in Ethereum native types 28 | amount.reverse(); 29 | 30 | let mut packed: Vec = vec![]; 31 | packed.extend(from.into_iter()); 32 | packed.extend(amount.into_iter()); 33 | 34 | packed 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /core/models/src/plasma/circuit/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod account; 2 | pub mod deposit; 3 | pub mod exit; 4 | pub mod sig; 5 | pub mod transfer; 6 | pub mod utils; 7 | -------------------------------------------------------------------------------- /core/models/src/plasma/circuit/sig.rs: -------------------------------------------------------------------------------- 1 | use ff::Field; 2 | use sapling_crypto::alt_babyjubjub::JubjubEngine; 3 | use sapling_crypto::jubjub::{edwards, Unknown}; 4 | 5 | // use crate::models::params; 6 | 7 | #[derive(Clone, Serialize, Deserialize)] 8 | pub struct TransactionSignature { 9 | #[serde(bound = "")] 10 | pub r: edwards::Point, 11 | pub s: E::Fr, 12 | } 13 | 14 | impl TransactionSignature { 15 | pub fn empty() -> Self { 16 | let empty_point: edwards::Point = edwards::Point::zero(); 17 | 18 | Self { 19 | r: empty_point, 20 | s: E::Fr::zero(), 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /core/models/src/plasma/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod account; 2 | pub mod block; 3 | pub mod circuit; 4 | pub mod params; 5 | pub mod tx; 6 | 7 | pub use web3::types::{H256, U128, U256}; 8 | 9 | // use merkle_tree::{PedersenHasher, SparseMerkleTree}; 10 | use pairing::bn256; 11 | use sapling_crypto::eddsa; 12 | 13 | pub use crate::plasma::account::Account; 14 | pub use crate::plasma::tx::{DepositTx, ExitTx, TransferTx, TxSignature}; 15 | 16 | pub type Engine = bn256::Bn256; 17 | pub type Fr = bn256::Fr; 18 | 19 | pub type AccountMap = fnv::FnvHashMap; 20 | 21 | pub type PublicKey = eddsa::PublicKey; 22 | pub type PrivateKey = eddsa::PrivateKey; 23 | 24 | pub type BatchNumber = u32; 25 | pub type BlockNumber = u32; 26 | pub type AccountId = u32; 27 | pub type Nonce = u32; 28 | 29 | #[derive(Debug)] 30 | pub enum TransferApplicationError { 31 | Unknown, 32 | InsufficientBalance, 33 | NonceIsTooLow, 34 | NonceIsTooHigh, 35 | UnknownSigner, 36 | InvalidSigner, 37 | ExpiredTransaction, 38 | InvalidTransaction(String), 39 | } 40 | -------------------------------------------------------------------------------- /core/models/src/plasma/params.rs: -------------------------------------------------------------------------------- 1 | pub const BALANCE_TREE_DEPTH: usize = 24; 2 | 3 | /// Amount bit widths 4 | pub const AMOUNT_EXPONENT_BIT_WIDTH: usize = 5; 5 | pub const AMOUNT_MANTISSA_BIT_WIDTH: usize = 11; 6 | 7 | /// Fee bit widths 8 | pub const FEE_EXPONENT_BIT_WIDTH: usize = 5; 9 | pub const FEE_MANTISSA_BIT_WIDTH: usize = 3; 10 | 11 | pub const BALANCE_BIT_WIDTH: usize = 128; 12 | 13 | /// Nonce bit width 14 | pub const NONCE_BIT_WIDTH: usize = 32; 15 | 16 | /// Block number bit width 17 | pub const BLOCK_NUMBER_BIT_WIDTH: usize = 32; 18 | 19 | // Signature data 20 | pub const SIGNATURE_S_BIT_WIDTH: usize = 256; 21 | pub const SIGNATURE_R_X_BIT_WIDTH: usize = 256; 22 | pub const SIGNATURE_R_Y_BIT_WIDTH: usize = 256; 23 | 24 | // Fr element encoding 25 | pub const FR_BIT_WIDTH: usize = 256; 26 | 27 | // this account does NOT have a public key, so can not spend 28 | // but it does not prevent an exit snark to work properly 29 | pub const SPECIAL_ACCOUNT_EXIT: u32 = 0; 30 | 31 | // This account does have a proper public key, and a set of deposit requests 32 | // to this account virtually padded by the smart-contract 33 | pub const SPECIAL_ACCOUNT_DEPOSIT: u32 = 1; 34 | 35 | use sapling_crypto::alt_babyjubjub::AltJubjubBn256; 36 | 37 | lazy_static! { 38 | pub static ref JUBJUB_PARAMS: AltJubjubBn256 = AltJubjubBn256::new(); 39 | } 40 | -------------------------------------------------------------------------------- /core/plasma/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "plasma" 3 | version = "0.1.1" 4 | edition = "2018" 5 | 6 | authors = [ 7 | "Alex Gluchowski ", 8 | "Alex Vlasov " 9 | ] 10 | 11 | [dependencies] 12 | models = { path = "../models" } 13 | merkle_tree = { path = "../merkle_tree" } 14 | bigdecimal = { version = "0.1.0", features = ["serde"]} 15 | log = "0.4" 16 | 17 | -------------------------------------------------------------------------------- /core/plasma/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate log; 3 | 4 | pub mod state; 5 | -------------------------------------------------------------------------------- /core/prover/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "prover" 3 | version = "0.0.1" 4 | edition = "2018" 5 | 6 | [dependencies] 7 | circuit = { path = "../circuit" } 8 | plasma = { path = "../plasma" } 9 | models = { path = "../models" } 10 | storage = { path = "../storage" } 11 | 12 | ff = { package = "ff_ce", version = "0.6.0", features = ["derive"] } 13 | pairing = { package = "pairing_ce", version = "0.17.0" } 14 | bellman = { package = "bellman_ce", version = "0.3.0" } 15 | sapling-crypto = { package = "sapling-crypto_ce", version = "0.0.5" } 16 | 17 | # ff = { path = "../ff" } 18 | # pairing = { path = "../pairing" } 19 | # bellman = { path = "../bellman" } 20 | # sapling-crypto = { path = "../sapling-crypto" } 21 | 22 | rand = "0.4" 23 | rust-crypto = "0.2" 24 | rustc-hex = "2.0.1" 25 | 26 | signal-hook = "0.1.8" 27 | tokio = "0.1.18" 28 | futures = "0.1.25" 29 | 30 | fnv = "1.0.6" 31 | serde = "1.0.90" 32 | serde_derive = "1.0.90" 33 | serde_json = "1.0.39" 34 | diesel = { version = "1.4.2", features = ["postgres", "serde_json", "r2d2", "chrono"] } 35 | bigdecimal = { version = "0.1.0", features = ["serde"]} 36 | chrono = { version = "0.4", features = ["serde", "rustc-serialize"] } 37 | log = "0.4" 38 | env_logger = "0.6" 39 | -------------------------------------------------------------------------------- /core/prover/src/main.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate log; 3 | 4 | use prover::BabyProver; 5 | use signal_hook::iterator::Signals; 6 | use std::env; 7 | use std::sync::{atomic::AtomicBool, Arc}; 8 | use std::thread; 9 | use storage::StorageProcessor; 10 | use tokio::runtime::current_thread::Runtime; 11 | use tokio::sync::oneshot; 12 | 13 | fn main() { 14 | env_logger::init(); 15 | 16 | // handle ctrl+c 17 | let stop_signal = Arc::new(AtomicBool::new(false)); 18 | signal_hook::flag::register(signal_hook::SIGTERM, Arc::clone(&stop_signal)) 19 | .expect("Error setting SIGTERM handler"); 20 | signal_hook::flag::register(signal_hook::SIGINT, Arc::clone(&stop_signal)) 21 | .expect("Error setting SIGINT handler"); 22 | signal_hook::flag::register(signal_hook::SIGQUIT, Arc::clone(&stop_signal)) 23 | .expect("Error setting SIGQUIT handler"); 24 | 25 | let worker = env::var("POD_NAME").unwrap_or_else(|_| "default".to_string()); 26 | info!("creating prover, worker: {}", worker); 27 | let mut prover = BabyProver::create(worker).unwrap(); 28 | let prover_id = prover.prover_id; 29 | 30 | let mut rt = Runtime::new().unwrap(); 31 | let (shutdown_tx, shutdown_rx) = oneshot::channel(); 32 | prover.start_timer_interval(&rt.handle()); 33 | 34 | // Run tokio timeline in a new thread 35 | thread::spawn(move || { 36 | prover.run(shutdown_tx, stop_signal); 37 | }); 38 | 39 | let signals = Signals::new(&[ 40 | signal_hook::SIGTERM, 41 | signal_hook::SIGINT, 42 | signal_hook::SIGQUIT, 43 | ]) 44 | .expect("Signals::new() failed"); 45 | thread::spawn(move || { 46 | for _ in signals.forever() { 47 | info!( 48 | "Termination signal received. Prover will finish the job and shut down gracefully" 49 | ); 50 | let storage = 51 | StorageProcessor::establish_connection().expect("db connection failed for prover"); 52 | storage.record_prover_stop(prover_id).expect("db failed"); 53 | } 54 | }); 55 | 56 | rt.block_on(shutdown_rx).unwrap(); 57 | } 58 | -------------------------------------------------------------------------------- /core/sandbox/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sandbox" 3 | version = "0.0.1" 4 | edition = "2018" 5 | 6 | [dependencies] 7 | # serde_json = "1.0.39" 8 | # bigdecimal = { version = "0.1.0", features = ["serde"]} 9 | 10 | serde = "1.0.90" 11 | serde_derive = "1.0.90" 12 | log = "0.4" 13 | env_logger = "0.6" 14 | 15 | # ff = { package = "ff_ce", version = "0.6.0", features = ["derive"] } 16 | # models = { path = "../models" } 17 | # plasma = { path = "../plasma" } 18 | 19 | "iron" = "0.6.0" 20 | "persistent" = "0.4.0" 21 | "bodyparser" = "0.8.0" 22 | "tokio" = "0.1.19" 23 | "futures" = "0.1.26" 24 | -------------------------------------------------------------------------------- /core/sandbox/src/main.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate log; 3 | 4 | use futures::Future; 5 | use std::time::{Duration, Instant}; 6 | use tokio::prelude::*; 7 | use tokio::timer::Interval; 8 | 9 | mod nonce_futures; 10 | 11 | use nonce_futures::*; 12 | 13 | fn main() { 14 | env_logger::init(); 15 | 16 | let nm = NonceFutures::default(); 17 | let task = Interval::new(Instant::now(), Duration::from_millis(1000)) 18 | .fold((0, nm.clone()), |acc, _| { 19 | let (i, mut nm) = acc; 20 | info!("i = {}", i); 21 | 22 | if i == 2 { 23 | nm.set(1, 2); 24 | } 25 | 26 | let next = (i + 1, nm); 27 | future::ok(next) 28 | }) 29 | .map_err(|e| panic!("err={:?}", e)); 30 | 31 | tokio::run(future::lazy(move || { 32 | tokio::spawn(task.map(|_| ())); 33 | 34 | for i in 0..=4 { 35 | let task = nm 36 | .nonce_await(1, i) 37 | .timeout(Duration::from_millis(5000)) 38 | .map(|_| info!("success!")) 39 | .or_else(|e| { 40 | error!("error: {:?}", e); 41 | future::ok(()) 42 | }); 43 | tokio::spawn(task); 44 | } 45 | 46 | future::ok(()) 47 | })); 48 | } 49 | -------------------------------------------------------------------------------- /core/server/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "server" 3 | version = "0.0.1" 4 | edition = "2018" 5 | 6 | [dependencies] 7 | circuit = { path = "../circuit" } 8 | eth_client = { path = "../eth_client" } 9 | plasma = { path = "../plasma" } 10 | models = { path = "../models" } 11 | storage = { path = "../storage" } 12 | 13 | ff = { package = "ff_ce", version = "0.6.0", features = ["derive"] } 14 | pairing = { package = "pairing_ce", version = "0.17.0" } 15 | sapling-crypto = { package = "sapling-crypto_ce", version = "0.0.5" } 16 | 17 | rand = "0.4" 18 | hex = "0.3.2" 19 | time = "0.1" 20 | num-bigint = "0.2" 21 | rust-crypto = "0.2" 22 | fnv = "1.0.3" 23 | ethabi = "6.1.0" 24 | web3 = "0.6.0" 25 | rustc-hex = "2.0.1" 26 | futures = "0.1.25" 27 | hyper = "0.12.16" 28 | serde = "1.0.90" 29 | serde_derive = "1.0.90" 30 | serde_bytes = "0.11.1" 31 | serde_json = "1.0.39" 32 | log = "0.4" 33 | env_logger = "0.6" 34 | 35 | im = "12.2.0" 36 | priority-queue = "0.5.2" 37 | num-traits = "0.2" 38 | 39 | rayon = "1.0.3" 40 | 41 | tokio = "0.1.18" 42 | actix = "0.7.9" 43 | actix-web = "0.7.19" 44 | 45 | bigdecimal = { version = "0.1.0", features = ["serde"]} 46 | chrono = { version = "0.4", features = ["serde", "rustc-serialize"] } 47 | diesel = { version = "1.4.2", features = ["postgres", "serde_json", "r2d2", "chrono"] } 48 | signal-hook = { version = "0.1.8", features = ["tokio-support"] } 49 | -------------------------------------------------------------------------------- /core/server/src/analysis.pgsql: -------------------------------------------------------------------------------- 1 | -- update operations 2 | -- set nonce = (id - 17) + (301 + 1), tx_hash = null 3 | -- where id between 17 and 20; 4 | 5 | -- update operations 6 | -- set tx_hash = '0x2e8a98ec143d2db6058e09fd144c4c9c69c0d13e37161a23dc950ad14ddf8d37' 7 | -- -- set nonce = 1, tx_hash = 'busy' 8 | -- where id = 268 and tx_hash = '0x024fc83bd5ff98a4b10a2cce3d757d212ed234c5ecf800bbf7899d4c7df779e6'; 9 | 10 | SELECT 11 | id, block_number, nonce, tx_hash 12 | FROM "operations" 13 | order by nonce desc 14 | LIMIT 1000; 15 | -------------------------------------------------------------------------------- /core/server/src/committer.rs: -------------------------------------------------------------------------------- 1 | use eth_client::ETHClient; 2 | use models::abi::TEST_PLASMA_ALWAYS_VERIFY; 3 | use models::{Action, CommitRequest, Operation}; 4 | use std::sync::mpsc::{Receiver, Sender}; 5 | use std::thread; 6 | use std::time::Duration; 7 | use storage::ConnectionPool; 8 | 9 | pub fn start_committer( 10 | rx_for_ops: Receiver, 11 | tx_for_eth: Sender, 12 | pool: ConnectionPool, 13 | ) { 14 | thread::Builder::new() 15 | .name("committer".to_string()) 16 | .spawn(move || { 17 | run_committer(rx_for_ops, tx_for_eth, pool); 18 | }) 19 | .expect("thread creation failed"); 20 | } 21 | 22 | fn run_committer( 23 | rx_for_ops: Receiver, 24 | tx_for_eth: Sender, 25 | pool: ConnectionPool, 26 | ) { 27 | info!("committer started"); 28 | let storage = pool 29 | .access_storage() 30 | .expect("db connection failed for committer");; 31 | 32 | let eth_client = ETHClient::new(TEST_PLASMA_ALWAYS_VERIFY); 33 | let current_nonce = eth_client.current_nonce().expect("can not get nonce"); 34 | let _ = storage.prepare_nonce_scheduling(ð_client.current_sender(), current_nonce); 35 | 36 | let mut last_verified_block = storage.get_last_verified_block().expect("db failed"); 37 | loop { 38 | let req = rx_for_ops.recv_timeout(Duration::from_millis(100)); 39 | if let Ok(CommitRequest { 40 | block, 41 | accounts_updated, 42 | }) = req 43 | { 44 | let op = Operation { 45 | action: Action::Commit, 46 | block, 47 | accounts_updated: Some(accounts_updated), 48 | tx_meta: None, 49 | id: None, 50 | }; 51 | info!("commit block #{}", op.block.block_number); 52 | let op = storage 53 | .execute_operation(&op) 54 | .expect("committer must commit the op into db"); 55 | //tx_for_proof_requests.send(ProverRequest(op.block.block_number)).expect("must send a proof request"); 56 | tx_for_eth 57 | .send(op) 58 | .expect("must send an operation for commitment to ethereum"); 59 | continue; 60 | } else { 61 | // there was a timeout, so check for the new ready proofs 62 | loop { 63 | let block_number = last_verified_block + 1; 64 | let proof = storage.load_proof(block_number); 65 | if let Ok(proof) = proof { 66 | let block = storage 67 | .load_committed_block(block_number) 68 | .unwrap_or_else(|| panic!("failed to load block #{}", block_number)); 69 | let op = Operation { 70 | action: Action::Verify { 71 | proof: Box::new(proof), 72 | }, 73 | block, 74 | accounts_updated: None, 75 | tx_meta: None, 76 | id: None, 77 | }; 78 | let op = storage 79 | .execute_operation(&op) 80 | .expect("committer must commit the op into db"); 81 | tx_for_eth 82 | .send(op) 83 | .expect("must send an operation for commitment to ethereum"); 84 | last_verified_block += 1; 85 | } else { 86 | break; 87 | } 88 | } 89 | }; 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /core/server/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde_derive; 3 | #[macro_use] 4 | extern crate log; 5 | 6 | pub mod api_server; 7 | pub mod committer; 8 | pub mod eth_sender; 9 | pub mod eth_watch; 10 | pub mod nonce_futures; 11 | pub mod state_keeper; 12 | -------------------------------------------------------------------------------- /core/server/src/main.rs: -------------------------------------------------------------------------------- 1 | //use tokio::runtime::Runtime; 2 | #[macro_use] 3 | extern crate log; 4 | 5 | use std::sync::atomic::{AtomicBool, Ordering}; 6 | use std::sync::mpsc::channel; 7 | use std::sync::Arc; 8 | use std::thread; 9 | use std::time::Duration; 10 | 11 | use server::api_server::start_api_server; 12 | use server::committer::start_committer; 13 | use server::eth_sender; 14 | use server::eth_watch::{start_eth_watch, EthWatch}; 15 | use server::state_keeper::{start_state_keeper, PlasmaStateKeeper}; 16 | 17 | use models::{config, StateKeeperRequest}; 18 | use storage::ConnectionPool; 19 | 20 | fn main() { 21 | env_logger::init(); 22 | 23 | debug!("starting server"); 24 | 25 | // handle ctrl+c 26 | let stop_signal = Arc::new(AtomicBool::new(false)); 27 | signal_hook::flag::register(signal_hook::SIGTERM, Arc::clone(&stop_signal)) 28 | .expect("Error setting SIGTERM handler"); 29 | signal_hook::flag::register(signal_hook::SIGINT, Arc::clone(&stop_signal)) 30 | .expect("Error setting SIGINT handler"); 31 | signal_hook::flag::register(signal_hook::SIGQUIT, Arc::clone(&stop_signal)) 32 | .expect("Error setting SIGQUIT handler"); 33 | 34 | // create main tokio runtime 35 | //let rt = Runtime::new().unwrap(); 36 | 37 | let connection_pool = ConnectionPool::new(); 38 | let state_keeper = PlasmaStateKeeper::new(connection_pool.clone()); 39 | let eth_watch = EthWatch::new(0, 0, connection_pool.clone()); 40 | 41 | let storage = connection_pool 42 | .access_storage() 43 | .expect("db connection failed for committer"); 44 | let contract_addr = storage 45 | .load_config() 46 | .expect("can not load server_config") 47 | .contract_addr 48 | .expect("contract_addr empty in server_config"); 49 | if contract_addr != config::RUNTIME_CONFIG.contract_addr { 50 | panic!( 51 | "Contract addresses mismatch! From DB = {}, from env = {}", 52 | contract_addr, 53 | config::RUNTIME_CONFIG.contract_addr 54 | ); 55 | } 56 | drop(storage); 57 | 58 | // spawn threads for different processes 59 | // see https://docs.google.com/drawings/d/16UeYq7cuZnpkyMWGrgDAbmlaGviN2baY1w1y745Me70/edit?usp=sharing 60 | 61 | info!("starting actors"); 62 | 63 | let (tx_for_state, rx_for_state) = channel(); 64 | start_api_server(tx_for_state.clone(), connection_pool.clone()); 65 | start_eth_watch(eth_watch, tx_for_state.clone()); 66 | let (tx_for_ops, rx_for_ops) = channel(); 67 | start_state_keeper(state_keeper, rx_for_state, tx_for_ops.clone()); 68 | let tx_for_eth = eth_sender::start_eth_sender(connection_pool.clone()); 69 | start_committer(rx_for_ops, tx_for_eth, connection_pool.clone()); 70 | 71 | // start_prover(connection_pool.clone(), "worker 1"); 72 | // start_prover(connection_pool.clone(), "worker 2"); 73 | // start_prover(connection_pool.clone(), "worker 3"); 74 | 75 | // Simple timer, pings every 100 ms 76 | thread::Builder::new() 77 | .name("timer".to_string()) 78 | .spawn(move || loop { 79 | tx_for_state 80 | .send(StateKeeperRequest::TimerTick) 81 | .expect("tx_for_state channel failed"); 82 | thread::sleep(Duration::from_millis(100)); 83 | }) 84 | .expect("thread creation failed"); 85 | 86 | while !stop_signal.load(Ordering::SeqCst) { 87 | thread::sleep(Duration::from_secs(1)); 88 | } 89 | 90 | info!("terminate signal received"); 91 | } 92 | -------------------------------------------------------------------------------- /core/storage/.gitignore: -------------------------------------------------------------------------------- 1 | /src/schema.rs.generated 2 | -------------------------------------------------------------------------------- /core/storage/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "storage" 3 | version = "0.0.1" 4 | edition = "2018" 5 | 6 | [dependencies] 7 | models = { path = "../models" } 8 | fnv = "1.0.6" 9 | serde = "1.0.90" 10 | serde_derive = "1.0.90" 11 | serde_json = "1.0.39" 12 | web3 = "0.6.0" 13 | diesel = { version = "1.4.2", features = ["postgres", "serde_json", "r2d2", "chrono"] } 14 | bigdecimal = { version = "0.1.0", features = ["serde"]} 15 | ff = { package = "ff_ce", version = "0.6.0", features = ["derive"] } 16 | chrono = { version = "0.4", features = ["serde", "rustc-serialize"] } 17 | log = "0.4" 18 | -------------------------------------------------------------------------------- /core/storage/diesel.toml: -------------------------------------------------------------------------------- 1 | # For documentation on how to configure this file, 2 | # see diesel.rs/guides/configuring-diesel-cli 3 | 4 | [print_schema] 5 | file = "src/schema.rs.generated" 6 | -------------------------------------------------------------------------------- /core/storage/migrations/00000000000000_diesel_initial_setup/down.sql: -------------------------------------------------------------------------------- 1 | -- This file was automatically created by Diesel to setup helper functions 2 | -- and other internal bookkeeping. This file is safe to edit, any future 3 | -- changes will be added to existing projects as new migrations. 4 | 5 | DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass); 6 | DROP FUNCTION IF EXISTS diesel_set_updated_at(); 7 | -------------------------------------------------------------------------------- /core/storage/migrations/00000000000000_diesel_initial_setup/up.sql: -------------------------------------------------------------------------------- 1 | -- This file was automatically created by Diesel to setup helper functions 2 | -- and other internal bookkeeping. This file is safe to edit, any future 3 | -- changes will be added to existing projects as new migrations. 4 | 5 | 6 | 7 | 8 | -- Sets up a trigger for the given table to automatically set a column called 9 | -- `updated_at` whenever the row is modified (unless `updated_at` was included 10 | -- in the modified columns) 11 | -- 12 | -- # Example 13 | -- 14 | -- ```sql 15 | -- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW()); 16 | -- 17 | -- SELECT diesel_manage_updated_at('users'); 18 | -- ``` 19 | CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$ 20 | BEGIN 21 | EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s 22 | FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl); 23 | END; 24 | $$ LANGUAGE plpgsql; 25 | 26 | CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$ 27 | BEGIN 28 | IF ( 29 | NEW IS DISTINCT FROM OLD AND 30 | NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at 31 | ) THEN 32 | NEW.updated_at := current_timestamp; 33 | END IF; 34 | RETURN NEW; 35 | END; 36 | $$ LANGUAGE plpgsql; 37 | -------------------------------------------------------------------------------- /core/storage/migrations/2018-12-11-084553_operations/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS account_updates CASCADE; 2 | DROP TABLE IF EXISTS accounts CASCADE; 3 | DROP TABLE IF EXISTS operations CASCADE; 4 | DROP TABLE IF EXISTS op_config CASCADE; -------------------------------------------------------------------------------- /core/storage/migrations/2018-12-11-084553_operations/up.sql: -------------------------------------------------------------------------------- 1 | -- op_config table is used to keep track of nonce sequences for different sender addresses 2 | CREATE TABLE op_config( 3 | addr text primary key, -- sender address for ETH 4 | next_nonce integer -- nonce sequence holder 5 | ); 6 | INSERT INTO op_config VALUES ('0x0', 0); 7 | CREATE RULE noins_op_config AS ON INSERT TO op_config DO NOTHING; 8 | CREATE RULE nodel_op_config AS ON DELETE TO op_config DO NOTHING; 9 | 10 | CREATE OR REPLACE FUNCTION op_config_next_nonce() RETURNS integer AS 11 | $$ 12 | BEGIN 13 | UPDATE op_config SET next_nonce = next_nonce + 1; 14 | RETURN (SELECT next_nonce - 1 from op_config); 15 | END; 16 | $$ LANGUAGE 'plpgsql'; 17 | 18 | CREATE OR REPLACE FUNCTION op_config_addr() RETURNS text AS 19 | $$ 20 | BEGIN 21 | RETURN (SELECT addr from op_config); 22 | END; 23 | $$ LANGUAGE 'plpgsql'; 24 | 25 | CREATE TABLE operations ( 26 | id serial primary key, 27 | data jsonb not null, 28 | addr text not null default op_config_addr(), 29 | nonce integer not null default op_config_next_nonce(), 30 | block_number integer not null, 31 | action_type text not null, 32 | tx_hash text, 33 | created_at timestamp not null default now() 34 | ); 35 | 36 | CREATE INDEX operations_block_index ON operations (block_number); 37 | 38 | CREATE TABLE accounts ( 39 | id integer not null primary key, 40 | last_block integer not null, 41 | data json not null 42 | ); 43 | 44 | CREATE INDEX accounts_block_index ON accounts (last_block); 45 | 46 | CREATE TABLE account_updates ( 47 | account_id integer not null, 48 | block_number integer not null, 49 | data json not null, 50 | PRIMARY KEY (account_id, block_number) 51 | ); 52 | 53 | CREATE INDEX account_updates_block_index ON account_updates (block_number); 54 | 55 | -------------------------------------------------------------------------------- /core/storage/migrations/2019-04-02-100645_proofs/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS proofs CASCADE; -------------------------------------------------------------------------------- /core/storage/migrations/2019-04-02-100645_proofs/up.sql: -------------------------------------------------------------------------------- 1 | -- Your SQL goes here 2 | 3 | CREATE TABLE proofs ( 4 | block_number serial primary key, 5 | proof jsonb not null, 6 | created_at timestamp not null default now() 7 | ); 8 | 9 | CREATE TABLE prover_runs ( 10 | id serial primary key, 11 | block_number int not null, 12 | worker text, 13 | created_at timestamp not null default now(), 14 | updated_at timestamp not null default now() 15 | ); -------------------------------------------------------------------------------- /core/storage/migrations/2019-05-02-110639_transactions/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS transactions CASCADE; -------------------------------------------------------------------------------- /core/storage/migrations/2019-05-02-110639_transactions/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE transactions ( 2 | id serial primary key, 3 | 4 | tx_type text not null, 5 | from_account integer not null, 6 | to_account integer, 7 | nonce integer, 8 | amount integer not null, 9 | fee integer not null, 10 | 11 | block_number integer, 12 | state_root text, 13 | 14 | created_at timestamp not null default now() 15 | ); 16 | 17 | CREATE INDEX transactions_block_index ON transactions (block_number); 18 | -------------------------------------------------------------------------------- /core/storage/migrations/2019-05-08-114230_config/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS server_config CASCADE; 2 | -------------------------------------------------------------------------------- /core/storage/migrations/2019-05-08-114230_config/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE server_config( 2 | -- enforce single record 3 | id bool PRIMARY KEY NOT NULL DEFAULT true, 4 | CONSTRAINT single_server_config CHECK (id), 5 | 6 | contract_addr text 7 | 8 | ); 9 | -------------------------------------------------------------------------------- /core/storage/migrations/2019-05-16-130227_provers/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS provers CASCADE; 2 | -------------------------------------------------------------------------------- /core/storage/migrations/2019-05-16-130227_provers/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE active_provers ( 2 | id serial primary key, 3 | worker text not null, 4 | created_at timestamp not null default now(), 5 | stopped_at timestamp 6 | ); 7 | -------------------------------------------------------------------------------- /core/storage/sql/active_provers.pgsql: -------------------------------------------------------------------------------- 1 | -- update active_provers 2 | -- set stopped_at = now() 3 | -- where id <= 118; 4 | 5 | select count(*) from active_provers 6 | where stopped_at is null 7 | ; 8 | 9 | with pr as ( 10 | SELECT 11 | *, 12 | EXTRACT(epoch FROM (updated_at - created_at)) as since 13 | FROM prover_runs pr 14 | WHERE NOT EXISTS (SELECT * FROM proofs p WHERE p.block_number = pr.block_number) 15 | ORDER BY id desc 16 | ) 17 | select * 18 | from pr 19 | order by block_number asc; 20 | -------------------------------------------------------------------------------- /core/storage/sql/prover-runs.pgsql: -------------------------------------------------------------------------------- 1 | -- with t as ( 2 | -- select 3 | -- block_number, count(*) 4 | -- from prover_runs 5 | -- group by block_number 6 | -- ) 7 | 8 | -- select * from t order by count desc 9 | -- limit 100; 10 | 11 | select * from prover_runs 12 | --where block_number = 6; 13 | order by id 14 | limit 50; 15 | 16 | select block_number, created_at from proofs 17 | where block_number = 6; 18 | -------------------------------------------------------------------------------- /core/storage/sql/tps.pgsql: -------------------------------------------------------------------------------- 1 | with d as ( 2 | with dat as ( 3 | with blocks as ( 4 | select 5 | block_number, action_type, created_at, 6 | 26 as from_block, -- 12 7 | 70 as to_block -- 265 8 | from operations 9 | where 10 | action_type = 'Verify' 11 | and data->'block'->'block_data'->>'type' = 'Transfer' 12 | order by block_number desc 13 | ) 14 | select 15 | count(*) as n, 16 | (select created_at from blocks where block_number = from_block) as from_time, 17 | (select created_at from blocks where block_number = to_block) as to_time 18 | from blocks 19 | where block_number >= from_block and block_number <= to_block 20 | ) select *, n * 256 as txs, EXTRACT(epoch FROM (to_time - from_time)) as seconds from dat 21 | ) select *, txs / seconds as tps from d; 22 | -------------------------------------------------------------------------------- /core/storage/src/schema.rs: -------------------------------------------------------------------------------- 1 | table! { 2 | accounts (id) { 3 | id -> Int4, 4 | last_block -> Int4, 5 | data -> Json, 6 | } 7 | } 8 | 9 | table! { 10 | account_updates (account_id, block_number) { 11 | account_id -> Int4, 12 | block_number -> Int4, 13 | data -> Json, 14 | } 15 | } 16 | 17 | table! { 18 | active_provers (id) { 19 | id -> Int4, 20 | worker -> Text, 21 | created_at -> Timestamp, 22 | stopped_at -> Nullable, 23 | } 24 | } 25 | 26 | table! { 27 | op_config (addr) { 28 | addr -> Text, 29 | next_nonce -> Nullable, 30 | } 31 | } 32 | 33 | table! { 34 | operations (id) { 35 | id -> Int4, 36 | data -> Jsonb, 37 | addr -> Text, 38 | nonce -> Int4, 39 | block_number -> Int4, 40 | action_type -> Text, 41 | tx_hash -> Nullable, 42 | created_at -> Timestamp, 43 | } 44 | } 45 | 46 | table! { 47 | proofs (block_number) { 48 | block_number -> Int4, 49 | proof -> Jsonb, 50 | created_at -> Timestamp, 51 | } 52 | } 53 | 54 | table! { 55 | prover_runs (id) { 56 | id -> Int4, 57 | block_number -> Int4, 58 | worker -> Nullable, 59 | created_at -> Timestamp, 60 | updated_at -> Timestamp, 61 | } 62 | } 63 | 64 | table! { 65 | server_config (id) { 66 | id -> Bool, 67 | contract_addr -> Nullable, 68 | } 69 | } 70 | 71 | table! { 72 | transactions (id) { 73 | id -> Int4, 74 | tx_type -> Text, 75 | from_account -> Int4, 76 | to_account -> Nullable, 77 | nonce -> Nullable, 78 | amount -> Int4, 79 | fee -> Int4, 80 | block_number -> Nullable, 81 | state_root -> Nullable, 82 | created_at -> Timestamp, 83 | } 84 | } 85 | 86 | allow_tables_to_appear_in_same_query!( 87 | accounts, 88 | account_updates, 89 | active_provers, 90 | op_config, 91 | operations, 92 | proofs, 93 | prover_runs, 94 | server_config, 95 | transactions, 96 | ); 97 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.2' 2 | services: 3 | nginx: 4 | image: "gluk64/franklin-nginx:latest" 5 | ports: 6 | - "80:80" 7 | #volumes: 8 | # - type: bind 9 | # source: ./volumes/html 10 | # target: /usr/share/nginx/html 11 | server: 12 | image: "gluk64/franklin:server" 13 | env_file: $ENV_FILE 14 | environment: 15 | - DATABASE_URL=postgres://postgres@postgres/plasma 16 | - WEB3_URL=http://geth:8545 17 | - NODE_NAME=docker 18 | - POD_NAME=docker/server 19 | ports: 20 | - "3000:3000" 21 | prover: 22 | image: "gluk64/franklin:prover" 23 | env_file: $ENV_FILE 24 | environment: 25 | - PROVER_DATABASE_URL=postgres://postgres@postgres/plasma 26 | - NODE_NAME=docker 27 | - POD_NAME=docker/prover 28 | volumes: 29 | - type: bind 30 | source: ./keys 31 | target: /keys 32 | postgres: 33 | image: "postgres:10.4" 34 | ports: 35 | - "5432:5432" 36 | volumes: 37 | - type: bind 38 | source: ./volumes/postgres 39 | target: /var/lib/postgresql/data 40 | geth: 41 | image: "gluk64/franklin:geth" 42 | # image: "ethereum/client-go" 43 | # command: '--rpcapi "db,personal,eth,net,web3" --rpccorsdomain="*" --networkid=456719 --rpc --rpcaddr="0.0.0.0"' 44 | ports: 45 | - "8545:8545" 46 | - "8546:8546" 47 | volumes: 48 | - type: bind 49 | source: ./volumes/geth 50 | target: /var/lib/geth/data 51 | blockscout_postgres: 52 | image: "postgres:10.4" 53 | volumes: 54 | - type: bind 55 | source: ./volumes/blockscout_postgres 56 | target: /var/lib/postgresql/data 57 | blockscout: 58 | image: "gluk64/blockscout" 59 | command: /bin/sh -c "mix phx.server" 60 | ports: 61 | - "4000:4000" 62 | environment: 63 | - MIX_ENV=prod 64 | - DATABASE_URL=postgresql://postgres:@blockscout_postgres:5432/explorer?ssl=false 65 | - ETHEREUM_JSONRPC_VARIANT=geth 66 | - ETHEREUM_JSONRPC_HTTP_URL=http://geth:8545 67 | - COIN=DAI 68 | tesseracts: 69 | image: "adria0/tesseracts" 70 | command: --cfg /tesseracts.toml -vvv 71 | ports: 72 | - "8000:8000" 73 | volumes: 74 | - ./etc/tesseracts/tesseracts.toml:/tesseracts.toml 75 | - type: bind 76 | source: ./volumes/tesseracts 77 | target: /var/lib/tesseracts/data 78 | 79 | -------------------------------------------------------------------------------- /docker/flattener/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8.0a4-alpine 2 | 3 | RUN apk add --no-cache curl 4 | 5 | # from https://github.com/ethereum/solidity/releases/tag/v0.4.24 6 | RUN curl -sL https://github.com/ethereum/solidity/releases/download/v0.4.24/solc-static-linux > /bin/solc 7 | RUN chmod a+x /bin/solc 8 | 9 | RUN pip install solidity-flattener 10 | 11 | ENTRYPOINT [ "sh" ] -------------------------------------------------------------------------------- /docker/geth/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ethereum/client-go 2 | 3 | RUN mkdir -p /seed/keystore 4 | COPY password.sec /seed/ 5 | COPY dev.json /seed/ 6 | COPY keystore/UTC--2019-04-06T21-13-27.692266000Z--8a91dc2d28b689474298d91899f0c1baf62cb85b /seed/keystore/ 7 | 8 | COPY geth-entry.sh /bin/ 9 | 10 | EXPOSE 8545 8546 30303 30303/udp 11 | ENTRYPOINT [ "sh", "/bin/geth-entry.sh" ] -------------------------------------------------------------------------------- /docker/geth/dev.json: -------------------------------------------------------------------------------- 1 | { 2 | "config": { 3 | "chainId": 9, 4 | "homesteadBlock": 1, 5 | "eip150Block": 2, 6 | "eip150Hash": "0x0000000000000000000000000000000000000000000000000000000000000000", 7 | "eip155Block": 3, 8 | "eip158Block": 3, 9 | "byzantiumBlock": 4, 10 | "constantinopleBlock": 5, 11 | "clique": { 12 | "period": 1, 13 | "epoch": 30000 14 | } 15 | }, 16 | "nonce": "0x0", 17 | "timestamp": "0x5ca9158b", 18 | "extraData": "0x00000000000000000000000000000000000000000000000000000000000000008a91dc2d28b689474298d91899f0c1baf62cb85b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", 19 | "gasLimit": "0x147b760", 20 | "difficulty": "0x80000", 21 | "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", 22 | "coinbase": "0x0000000000000000000000000000000000000000", 23 | "alloc": { 24 | "0000000000000000000000000000000000000000": { 25 | "balance": "0x1" 26 | }, 27 | "8a91dc2d28b689474298d91899f0c1baf62cb85b": { 28 | "balance": "0x200000000000000000000000000000000000000000000000000000000000000" 29 | }, 30 | "de03a0b5963f75f1c8485b355ff6d30f3093bde7": { 31 | "balance": "0x200000000000000000000000000000000000000000000000000000000000000" 32 | }, 33 | "52312AD6f01657413b2eaE9287f6B9ADaD93D5FE": { 34 | "balance": "0x200000000000000000000000000000000000000000000000000000000000000" 35 | } 36 | }, 37 | "number": "0x0", 38 | "gasUsed": "0x0", 39 | "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000" 40 | } -------------------------------------------------------------------------------- /docker/geth/geth-entry.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | cd /var/lib/geth/data 4 | 5 | if [ ! -f ./keystore ]; then 6 | echo initializing dev network 7 | cp /seed/dev.json ./ 8 | cp /seed/password.sec ./ 9 | geth --datadir . init dev.json 10 | cp /seed/keystore/UTC--2019-04-06T21-13-27.692266000Z--8a91dc2d28b689474298d91899f0c1baf62cb85b ./keystore/ 11 | fi 12 | 13 | exec geth --networkid 9 --mine --minerthreads 1 \ 14 | --datadir "." \ 15 | --nodiscover \ 16 | --rpc --rpcaddr "0.0.0.0" \ 17 | --rpccorsdomain "*" --nat "any" --rpcapi eth,web3,personal,net \ 18 | --unlock 0 --password "./password.sec" --allow-insecure-unlock \ 19 | --ws --wsport 8546 \ 20 | --gcmode archive \ 21 | --wsorigins "*" --rpcvhosts=* 22 | -------------------------------------------------------------------------------- /docker/geth/keystore/UTC--2019-04-06T21-13-27.692266000Z--8a91dc2d28b689474298d91899f0c1baf62cb85b: -------------------------------------------------------------------------------- 1 | {"address":"8a91dc2d28b689474298d91899f0c1baf62cb85b","crypto":{"cipher":"aes-128-ctr","ciphertext":"c0b1725ea8dcff76578e304023eeed04a9a5ecde683f6e48fe30cd59186f3c6f","cipherparams":{"iv":"eb4d35a8a5f4502cf7d8fa2ae6cef7bd"},"kdf":"scrypt","kdfparams":{"dklen":32,"n":262144,"p":1,"r":8,"salt":"a16f6be667fc2b2a2335e53ca4902f0af1e3abd740373073ed59fcdfdbdd3e91"},"mac":"689c82e199155f38cfac88fa27ba632d6c44e133ed50f43483e407622c1c508e"},"id":"3d639525-1478-47de-8e5a-09a6199214dd","version":3} -------------------------------------------------------------------------------- /docker/geth/password.sec: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LuozhuZhang/sourceCode-zkSync-era-rollupContract-code-learning/14168cd267b20a3704d510c801770c3a7cdf3440/docker/geth/password.sec -------------------------------------------------------------------------------- /docker/nginx/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nginx 2 | 3 | RUN mkdir -p /usr/share/nginx/html/client/dist 4 | #COPY js/client/index.html /usr/share/nginx/html/client/ 5 | COPY js/client/dist/* /usr/share/nginx/html/client/dist/ 6 | 7 | RUN mkdir -p /usr/share/nginx/html/explorer/dist 8 | COPY js/explorer/dist/* /usr/share/nginx/html/explorer/dist/ 9 | 10 | COPY docker/nginx/nginx.conf /etc/nginx/nginx.conf 11 | -------------------------------------------------------------------------------- /docker/nginx/nginx.conf: -------------------------------------------------------------------------------- 1 | events { } 2 | 3 | http { 4 | server { 5 | listen 80; 6 | server_name franklin; 7 | 8 | index index.html; 9 | 10 | location / { 11 | if ($host = localhost) { 12 | return 302 http://$host/explorer/; 13 | } 14 | 15 | # Redirect to https in production 16 | return 302 https://$host/explorer/; 17 | } 18 | 19 | location ~ ^/explorer { 20 | include /etc/nginx/mime.types; 21 | root /usr/share/nginx/html/; # the directory (/admin) will be appended to this, so don't include it in the root otherwise it'll look for /var/www/html/www_new/admin/admin 22 | try_files $uri /explorer/dist/index.html; # try_files will need to be relative to root 23 | } 24 | 25 | location ~ ^/client { 26 | include /etc/nginx/mime.types; 27 | root /usr/share/nginx/html/; # the directory (/admin) will be appended to this, so don't include it in the root otherwise it'll look for /var/www/html/www_new/admin/admin 28 | try_files $uri /client/dist/index.html; # try_files will need to be relative to root 29 | } 30 | 31 | } 32 | } -------------------------------------------------------------------------------- /docker/prover/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine 2 | RUN apk add --no-cache curl 3 | COPY target/x86_64-unknown-linux-musl/release/prover /bin/ 4 | COPY docker/prover/prover-entry.sh /bin/ 5 | COPY bin/.load_keys /bin/ 6 | RUN apk add --no-cache axel 7 | 8 | CMD ["prover-entry.sh"] 9 | -------------------------------------------------------------------------------- /docker/prover/prover-entry.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # remove quotes for docker-compose 4 | export KEY_FILES=`echo $KEY_FILES | sed -e 's/"\(.*\)/\1/g' -e 's/"$//g'` 5 | echo KEY_FILES=$KEY_FILES 6 | 7 | echo NODE_NAME=$NODE_NAME 8 | echo POD_NAME=$POD_NAME 9 | 10 | . /bin/.load_keys 11 | 12 | echo key download complete, starting prover 13 | 14 | export DATABASE_URL=$PROVER_DATABASE_URL 15 | exec prover 2>&1 16 | -------------------------------------------------------------------------------- /docker/server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine 2 | EXPOSE 3000 3 | COPY target/x86_64-unknown-linux-musl/release/server / 4 | ENTRYPOINT ["/server"] 5 | -------------------------------------------------------------------------------- /docs/kube-config.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LuozhuZhang/sourceCode-zkSync-era-rollupContract-code-learning/14168cd267b20a3704d510c801770c3a7cdf3440/docs/kube-config.png -------------------------------------------------------------------------------- /docs/kubernetes.md: -------------------------------------------------------------------------------- 1 | ## Connect the cluster 2 | 3 | Go to Digital Ocean Dashboard > Kubernetes Clusters > {Your Cluster} > More > Download Config 4 | https://cloud.digitalocean.com/kubernetes/clusters?i=ba0188 5 | 6 | ![screenshot](kube-config.png) 7 | 8 | Save it to `etc/kube/kubeconfig.yaml` 9 | 10 | For convenience of testing, add `export KUBECONFIG=/path/to/etc/kube/kubeconfig.yaml` to `~/.bash_profile` 11 | 12 | Now you can check your setup: 13 | 14 | ``` 15 | kubectl config view 16 | ``` 17 | 18 | ## Deploy 19 | 20 | 1. Deploy contracts: 21 | 22 | ``` 23 | deploy-contracts prod 24 | ``` 25 | 26 | 2. Upload the .pk key files to DO Spaces: 27 | 28 | https://cloud.digitalocean.com/spaces/keys?i=ba0188 29 | 30 | 3. Build and push your images to DockerHub: 31 | 32 | ``` 33 | make push 34 | ``` 35 | 36 | 4. Deploy kubernetes and/or update env vars 37 | 38 | ``` 39 | deploy-kube prod 40 | ``` 41 | 42 | 5. Scale nodes: 43 | 44 | ``` 45 | kubectl scale deployments/server --replicas=1 46 | kubectl scale deployments/prover --replicas=3 47 | ``` 48 | 49 | ## Check status: 50 | 51 | 1. Nodes: 52 | ``` 53 | kubectl get pods 54 | ``` 55 | 56 | 2. Web server: 57 | https://api1.mattr.network/api/v0.1/status 58 | 59 | ## Misc 60 | 61 | ### Commands 62 | 63 | https://kubernetes.io/docs/reference/kubectl/cheatsheet/ 64 | 65 | ``` 66 | kubectl get pods -o wide 67 | kubectl logs -f 68 | ``` 69 | 70 | ### Secrets 71 | 72 | View secret: 73 | 74 | ```kubectl get secret franklin-secret -o yaml``` 75 | 76 | Misc: 77 | 78 | ```kubectl set env --from=configmap/myconfigmap deployment/myapp``` 79 | -------------------------------------------------------------------------------- /docs/setup-dev.md: -------------------------------------------------------------------------------- 1 | # Prerequisites 2 | 3 | ## Docker 4 | 5 | Install docker. 6 | 7 | ## Node & Yarn 8 | 9 | Install Node. 10 | 11 | Install yarn. 12 | 13 | ## Axel 14 | 15 | Install axel for downloading keys: 16 | 17 | ```brew install axel``` 18 | 19 | ## Envsubst for mac (to transpile k8s yaml files) 20 | 21 | ``` 22 | brew install gettext 23 | brew link --force gettext 24 | ``` 25 | 26 | ## Rust 27 | 28 | Install the latest rust version (>= 1.32): 29 | 30 | ``` 31 | rustc --version 32 | rustc 1.32.0-nightly (21f268495 2018-12-02) 33 | ``` 34 | 35 | # JQ 36 | 37 | jq is used to work with json when managing DigitalOcean. 38 | 39 | ```brew install jq``` 40 | 41 | # envsubst 42 | 43 | ```bash 44 | brew install gettext 45 | brew link --force gettext 46 | ``` 47 | 48 | # PSQL 49 | 50 | Install `psql` CLI tool to interact with postgres. 51 | 52 | ## Diesel 53 | 54 | ```cargo install diesel_cli --no-default-features --features postgres``` 55 | 56 | ## Environment 57 | 58 | Edit the lines below and add them to your shell profile file (e.g. `~/.bash_profile`): 59 | 60 | ``` 61 | # Add path here: 62 | export FRANKLIN_HOME=/path/to/franklin 63 | 64 | export PATH=$FRANKLIN_HOME/bin:$PATH 65 | complete -W "\`grep -oE '^[a-zA-Z0-9_.-]+:([^=]|$)' $FRANKLIN_HOME/Makefile | sed 's/[^a-zA-Z0-9_.-]*$//'\`" franklin 66 | 67 | # If you're like me, uncomment: 68 | # cd $FRANKLIN_HOME 69 | ``` 70 | -------------------------------------------------------------------------------- /etc/env/dev.env.example: -------------------------------------------------------------------------------- 1 | # Loadtest 2 | 3 | LOADTEST_N_CLIENTS=10 4 | LOADTEST_TPS=100 5 | 6 | # Mnemonic can be generated here: https://bitcoinqrcodegenerator.win/bip39/ 7 | 8 | MNEMONIC="fine music test violin matrix prize squirrel panther purchase material script deal" 9 | #FUNDING_ACCOUNT=52312AD6f01657413b2eaE9287f6B9ADaD93D5FE 10 | SENDER_ACCOUNT=de03a0B5963f75f1C8485B355fF6D30f3093BDE7 11 | PRIVATE_KEY=27593fea79697e947890ecbecce7901b0008345e5d7259710d0dd5e500d040be 12 | 13 | WEB3_URL=http://localhost:8545 14 | TRUFFLE_NETWORK=universal 15 | 16 | # Client 17 | 18 | CLIENT_GITHUB_DIR=~/src/gluk64.github.io/ 19 | 20 | CONTRACT_ADDR=5F939954eA54FA9b61Fd59518945D09E8939f2B2 21 | CHAIN_ID=9 22 | LOGDIR=~/var/log/ 23 | LOGFILE=$~/var/log/plasma.log 24 | 25 | DATABASE_URL=postgres://postgres@localhost/plasma 26 | PROVER_DATABASE_URL=postgres://postgres@localhost/plasma 27 | 28 | # in docker 29 | #DATABASE_URL=postgres://alex@host.docker.internal/plasma 30 | #PROVER_DATABASE_URL=postgres://alex@host.docker.internal/plasma 31 | 32 | DB_POOL_SIZE=10 33 | 34 | PADDING_PUB_KEY="[\"0x18936d8e5f18dc41425e85a25d7a76f63715be4b3c9fac18475d028fca64c740\", \"0x0f933c18160257e0aa54056652e6bc2b8673b31c80cda933421f99dada946bf4\"]" 35 | FROM_BLOCK=0 36 | BLOCK_DELAY=0 37 | PORT=3000 38 | BIND_TO=0.0.0.0 39 | RUST_BACKTRACE=1 40 | 41 | # DigitalOcean 42 | 43 | DO_TOKEN=b2fa120ea629fc62050cba217da8a34c61a83ff84e2a9aebd3f4c4064ae381d4 44 | 45 | # Prover 46 | 47 | BELLMAN_VERBOSE=1 48 | 49 | SPACE_URL=https://keys.ams3.cdn.digitaloceanspaces.com/8 50 | KEY_DIR=keys/8 51 | TRANSFER_BATCH_SIZE=8 52 | 53 | CONTRACT_KEY_FILES="DepositVerificationKey.sol ExitVerificationKey.sol TransferVerificationKey.sol" 54 | KEY_FILES="deposit_pk.key exit_pk.key transfer_pk.key DepositVerificationKey.sol ExitVerificationKey.sol TransferVerificationKey.sol" 55 | 56 | #API_SERVER=https://api1.franklin.network 57 | API_SERVER=http://localhost:3000 58 | #API_SERVER=https://api.plasma-winter.io 59 | MAX_OUTSTANDING_TXS=120000 60 | LOADTEST_MIN_AMOUNT=0.01 61 | 62 | # Tree restore 63 | TREE_RESTORE_RINKEBY_ENDPOINT=https://rinkeby.infura.io/ 64 | TREE_RESTORE_RINKEBY_CONTRACT_ADDR=4fbf331db438c88a83b1316d072b7d73d8366367 65 | TREE_RESTORE_MAINNET_ENDPOINT=https://mainnet.infura.io/ 66 | TREE_RESTORE_MAINNET_CONTRACT_ADDR=4a89f998dce2453e96b795d47603c4b5a16144b0 67 | 68 | RUST_LOG=server=info,storage=info,prover=info,plasma=info,eth_client=info,data_restore=info 69 | -------------------------------------------------------------------------------- /etc/kube/.gitignore: -------------------------------------------------------------------------------- 1 | /kubeconfig*.yaml 2 | /gen 3 | /clusters -------------------------------------------------------------------------------- /etc/kube/prover.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: ${FRANKLIN_ENV}-prover 5 | spec: 6 | selector: 7 | matchLabels: 8 | app: ${FRANKLIN_ENV}-prover 9 | template: 10 | metadata: 11 | labels: 12 | app: ${FRANKLIN_ENV}-prover 13 | spec: 14 | containers: 15 | - name: ${FRANKLIN_ENV}-prover 16 | image: gluk64/franklin:prover 17 | imagePullPolicy: Always 18 | resources: 19 | requests: 20 | memory: "2Gi" 21 | envFrom: 22 | - secretRef: 23 | name: ${FRANKLIN_ENV}-secret 24 | env: 25 | - name: FRANKLIN_ENV 26 | value: "${FRANKLIN_ENV}" 27 | - name: NODE_NAME 28 | valueFrom: 29 | fieldRef: 30 | fieldPath: spec.nodeName 31 | - name: POD_NAME 32 | valueFrom: 33 | fieldRef: 34 | fieldPath: metadata.name 35 | terminationGracePeriodSeconds: 20 36 | nodeSelector: 37 | doks.digitalocean.com/node-pool: prover 38 | -------------------------------------------------------------------------------- /etc/kube/regions-all.json: -------------------------------------------------------------------------------- 1 | [ 2 | "sfo2", 3 | "blr1", 4 | "fra1", 5 | "lon1", 6 | "nyc1", 7 | "nyc3", 8 | "sfo2", 9 | "sgp1", 10 | "tor1" 11 | ] -------------------------------------------------------------------------------- /etc/kube/regions.json: -------------------------------------------------------------------------------- 1 | [ 2 | "nyc1", 3 | "blr1", 4 | "fra1", 5 | "sfo2" 6 | ] -------------------------------------------------------------------------------- /etc/kube/server.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: ${FRANKLIN_ENV}-server 5 | labels: 6 | app: ${FRANKLIN_ENV}-server 7 | spec: 8 | type: LoadBalancer 9 | ports: 10 | - port: 80 11 | targetPort: 3000 12 | protocol: TCP 13 | name: http 14 | selector: 15 | app: ${FRANKLIN_ENV}-server 16 | --- 17 | apiVersion: v1 18 | kind: Service 19 | metadata: 20 | name: ${FRANKLIN_ENV}-nginx 21 | labels: 22 | app: ${FRANKLIN_ENV}-nginx 23 | spec: 24 | type: LoadBalancer 25 | ports: 26 | - port: 80 27 | targetPort: 80 28 | protocol: TCP 29 | name: http 30 | selector: 31 | app: ${FRANKLIN_ENV}-nginx 32 | --- 33 | apiVersion: apps/v1 34 | kind: Deployment 35 | metadata: 36 | name: ${FRANKLIN_ENV}-nginx 37 | spec: 38 | replicas: 1 39 | selector: 40 | matchLabels: 41 | app: ${FRANKLIN_ENV}-nginx 42 | template: 43 | metadata: 44 | labels: 45 | app: ${FRANKLIN_ENV}-nginx 46 | spec: 47 | containers: 48 | - name: ${FRANKLIN_ENV}-nginx 49 | image: gluk64/franklin-nginx:${FRANKLIN_ENV} 50 | imagePullPolicy: Always 51 | ports: 52 | - containerPort: 80 53 | nodeSelector: 54 | doks.digitalocean.com/node-pool: prover 55 | --- 56 | apiVersion: apps/v1 57 | kind: Deployment 58 | metadata: 59 | name: ${FRANKLIN_ENV}-server 60 | spec: 61 | strategy: 62 | type: Recreate 63 | replicas: 1 64 | selector: 65 | matchLabels: 66 | app: ${FRANKLIN_ENV}-server 67 | strategy: 68 | type: Recreate 69 | template: 70 | metadata: 71 | labels: 72 | app: ${FRANKLIN_ENV}-server 73 | spec: 74 | containers: 75 | - name: server 76 | image: gluk64/franklin:server 77 | imagePullPolicy: Always 78 | ports: 79 | - containerPort: 3000 80 | envFrom: 81 | - secretRef: 82 | name: ${FRANKLIN_ENV}-secret 83 | env: 84 | - name: FRANKLIN_ENV 85 | value: "${FRANKLIN_ENV}" 86 | - name: NODE_NAME 87 | valueFrom: 88 | fieldRef: 89 | fieldPath: spec.nodeName 90 | - name: POD_NAME 91 | valueFrom: 92 | fieldRef: 93 | fieldPath: metadata.name 94 | nodeSelector: 95 | doks.digitalocean.com/node-pool: prover 96 | -------------------------------------------------------------------------------- /etc/kube/test.sh: -------------------------------------------------------------------------------- 1 | kubectl --kubeconfig="k8s-ams3-kubeconfig.yaml" get nodes 2 | kubectl create --kubeconfig="k8s-ams3-kubeconfig.yaml" -f ./franklin.yaml 3 | kubectl --kubeconfig="k8s-ams3-kubeconfig.yaml" get pods 4 | kubectl describe pod 5 | -------------------------------------------------------------------------------- /etc/tesseracts/tesseracts.toml: -------------------------------------------------------------------------------- 1 | # user iterface ----------------------------------- 2 | 3 | # title of the page, e.g. "tesseracts" 4 | ui_title = "Franklin" 5 | 6 | # database ---------------------------------------- 7 | 8 | # where the database is located 9 | db_path = "/var/lib/tesseracts/data" 10 | 11 | # true|false if we want to scan blocks and save it into db 12 | scan = false 13 | 14 | # the starting block to start to retrieve blocks (only iff scan==true) 15 | scan_start_block = 1 16 | 17 | # store with tx are contained in addr? (bool) 18 | db_store_addr = false 19 | 20 | # store the transactions and receipts? (bool) 21 | db_store_tx = false 22 | 23 | # store internal transactions? (bool) 24 | db_store_itx = false 25 | 26 | # store list of last non empty blocks? (bool) 27 | db_store_neb = false 28 | 29 | # web3 ---------------------------------------------- 30 | 31 | # web3 json-rpc port, e.g. http://localhost:8545 32 | web3_url = "http://geth:8545" 33 | 34 | # client type 35 | # "geth_clique" for geth PoS 36 | # "geth_pow" for geth PoW 37 | # "geth" to autodetect geth_clique and geth_pow 38 | web3_client = "geth" 39 | 40 | # process internal txs, true or false 41 | # in geth requieres: 42 | # --syncmode=full 43 | # --gcmode=archive 44 | # --rpcapi debug 45 | web3_itx = false 46 | 47 | # compiler ------------------------------------------ 48 | 49 | # the path where solc binaries are stored (optional) 50 | #solc_path = 51 | 52 | # solidity compiler can be bypassedi, by specifing the abi 53 | solc_bypass = true 54 | 55 | # server -------------------------------------------- 56 | 57 | # http server binding (e.g. "0.0.0.0:8000") 58 | bind = "0.0.0.0:8000" 59 | -------------------------------------------------------------------------------- /js/client/.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | ["env", { "modules": false }] 4 | ], 5 | "plugins": [ 6 | ["transform-runtime", { 7 | "polyfill": false, 8 | "regenerator": true 9 | }] 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /js/client/README.md: -------------------------------------------------------------------------------- 1 | # plasma-client 2 | 3 | > Plasma client app 4 | 5 | ## Build Setup 6 | 7 | ``` bash 8 | # install dependencies 9 | yarn 10 | 11 | # serve with hot reload at localhost:8080; API server will be queried at localhost:3000 12 | yarn run dev 13 | 14 | # build for production with minification 15 | yarn run build 16 | ``` 17 | 18 | For detailed explanation on how things work, consult the [docs for vue-loader](http://vuejs.github.io/vue-loader). 19 | -------------------------------------------------------------------------------- /js/client/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Plasma Wallet 7 | 8 | 9 | 10 |
11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /js/client/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "plasma-client", 3 | "description": "Plasma client app", 4 | "version": "1.0.0", 5 | "author": "Alex ", 6 | "private": true, 7 | "scripts": { 8 | "dev": "webpack-dev-server --port 9000 --open --hot --define process.env.NODE_ENV='\"development\"'", 9 | "build": "NODE_ENV=production webpack --config webpack.config.prod.js --progress --hide-modules" 10 | }, 11 | "dependencies": { 12 | "axios": "^0.18.0", 13 | "babel-plugin-transform-runtime": "^6.23.0", 14 | "bn.js": "^4.11.8", 15 | "bootstrap": "^4.0.0-beta.2", 16 | "bootstrap-vue": "^1.2.0", 17 | "buffer": "^5.2.1", 18 | "clean-webpack-plugin": "^2.0.2", 19 | "ethers": "^4.0.20", 20 | "ethjs": "^0.4.0", 21 | "ethjs-util": "^0.1.6", 22 | "html-webpack-plugin": "^3.2.0", 23 | "js-sha3": "^0.8.0", 24 | "popper.js": "^1.12.9", 25 | "vue": "^2.5.3", 26 | "vue-router": "^3.0.2", 27 | "vue-timers": "^2.0.2" 28 | }, 29 | "devDependencies": { 30 | "babel-core": "^6.26.0", 31 | "babel-loader": "^6.0.0", 32 | "babel-preset-env": "^1.6.1", 33 | "cross-env": "^5.2.0", 34 | "css-loader": "^0.28.7", 35 | "extract-text-webpack-plugin": "^3.0.2", 36 | "file-loader": "^1.1.5", 37 | "style-loader": "^0.19.0", 38 | "vue-loader": "^13.5.0", 39 | "vue-template-compiler": "^2.5.3", 40 | "webpack": "^3.8.1", 41 | "webpack-dev-server": "^2.9.4" 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /js/client/src/App.vue: -------------------------------------------------------------------------------- 1 | 4 | 5 | 11 | 12 | -------------------------------------------------------------------------------- /js/client/src/Login.vue: -------------------------------------------------------------------------------- 1 | 17 | 18 | 62 | 63 | -------------------------------------------------------------------------------- /js/client/src/assets/loading.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LuozhuZhang/sourceCode-zkSync-era-rollupContract-code-learning/14168cd267b20a3704d510c801770c3a7cdf3440/js/client/src/assets/loading.gif -------------------------------------------------------------------------------- /js/client/src/assets/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LuozhuZhang/sourceCode-zkSync-era-rollupContract-code-learning/14168cd267b20a3704d510c801770c3a7cdf3440/js/client/src/assets/logo.png -------------------------------------------------------------------------------- /js/client/src/main.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue' 2 | import BootstrapVue from "bootstrap-vue" 3 | import "bootstrap/dist/css/bootstrap.min.css" 4 | import "bootstrap-vue/dist/bootstrap-vue.css" 5 | 6 | import store from './store' 7 | import Eth from 'ethjs' 8 | import {ethers} from 'ethers' 9 | import axios from 'axios' 10 | import url from 'url' 11 | import config from './env-config' 12 | 13 | import Router from 'vue-router' 14 | import App from './App.vue' 15 | import Login from './Login.vue' 16 | import Wallet from './Wallet.vue' 17 | 18 | Vue.use(Router) 19 | Vue.use(BootstrapVue) 20 | 21 | const routes = [ 22 | { path: '/login', component: Login }, 23 | { path: '/wallet', component: Wallet }, 24 | { path: '*', redirect: '/login' }, 25 | ] 26 | 27 | const router = new Router({ 28 | routes, 29 | mode: 'history', 30 | base: '/client', 31 | }) 32 | 33 | Vue.mixin({ 34 | computed: { 35 | store: () => store, 36 | isDev: () => process.env.NODE_ENV === 'development', 37 | apiServer() { return this.store.config.API_SERVER }, 38 | }, 39 | }) 40 | 41 | import ABI from './contract' 42 | 43 | window.app = new Vue({ 44 | el: '#app', 45 | router, 46 | data: () => ({ 47 | storeMain: store 48 | }), 49 | async created() { 50 | this.store.config = config 51 | 52 | let regex = /(?:api-)*(\w*)(?:\..*)*/ 53 | this.store.network = 54 | regex.exec(url.parse(this.store.config.API_SERVER).host)[1] 55 | 56 | // read store.account from local storage? 57 | if (typeof window.web3 !== 'undefined') { 58 | window.eth = new Eth(web3.currentProvider) 59 | window.ethersProvider = new ethers.providers.Web3Provider(web3.currentProvider) 60 | } 61 | if (!store.account.address) { 62 | this.$router.push('/login') 63 | } 64 | }, 65 | render: h => h(App) 66 | }) 67 | 68 | // debug utils 69 | 70 | window.BN = require('bn.js') 71 | window.Buffer = require('buffer/').Buffer 72 | window.store = store 73 | window.p = { 74 | // promise printer for debugging in console 75 | set p(promise) { 76 | promise.then(r => console.log(r) ) 77 | }, 78 | } -------------------------------------------------------------------------------- /js/client/src/store.js: -------------------------------------------------------------------------------- 1 | const store = { 2 | config: null, 3 | contractAddress: null, 4 | account: { 5 | // ethereum part 6 | address: null, 7 | balance: null, 8 | onchain: { 9 | //isClosing: false, 10 | balance: null, 11 | completeWithdrawArgs: null, 12 | }, 13 | plasma: { 14 | id: null, 15 | closing: false, 16 | key: null, 17 | pending_nonce: 0, 18 | pending: { 19 | nonce: 0 20 | }, 21 | committed: { 22 | balance: null, 23 | nonce: 0 24 | }, 25 | verified: { 26 | balance: null, 27 | nonce: 0 28 | } 29 | } 30 | } 31 | } 32 | 33 | export default store -------------------------------------------------------------------------------- /js/client/webpack.config.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var webpack = require('webpack') 3 | const ExtractTextPlugin = require("extract-text-webpack-plugin") 4 | const HtmlWebpackPlugin = require('html-webpack-plugin') 5 | 6 | module.exports = { 7 | entry: './src/main.js', 8 | output: { 9 | path: path.resolve(__dirname, './dist'), 10 | publicPath: '/client/dist/', 11 | filename: 'build.js' 12 | }, 13 | module: { 14 | rules: [ 15 | { 16 | test: /\.vue$/, 17 | loader: 'vue-loader', 18 | options: { 19 | loaders: { 20 | } 21 | // other vue-loader options go here 22 | } 23 | }, 24 | { 25 | test: /\.js$/, 26 | loader: 'babel-loader', 27 | exclude: /node_modules/ 28 | }, 29 | { 30 | test: /\.css$/, 31 | use: ExtractTextPlugin.extract({ 32 | fallback: "style-loader", 33 | use: "css-loader" 34 | }) 35 | }, 36 | { 37 | test: /\.(png|jpg|gif|svg)$/, 38 | loader: 'file-loader', 39 | options: { 40 | name: '[name].[ext]?[hash]' 41 | } 42 | } 43 | ] 44 | }, 45 | resolve: { 46 | alias: { 47 | 'vue$': 'vue/dist/vue.esm.js' 48 | } 49 | }, 50 | devServer: { 51 | historyApiFallback: true, 52 | noInfo: true, 53 | compress: true, 54 | disableHostCheck: true // That solved it 55 | }, 56 | performance: { 57 | hints: false 58 | }, 59 | plugins: [ 60 | new ExtractTextPlugin("main.css"), 61 | new HtmlWebpackPlugin({ 62 | template: 'index.html' 63 | }), 64 | ], 65 | devtool: '#eval-source-map' 66 | } 67 | 68 | if (process.env.NODE_ENV === 'production') { 69 | module.exports.devtool = '#source-map' 70 | // http://vue-loader.vuejs.org/en/workflow/production.html 71 | module.exports.plugins = (module.exports.plugins || []).concat([ 72 | new webpack.DefinePlugin({ 73 | 'process.env': { 74 | NODE_ENV: '"production"' 75 | } 76 | }), 77 | new webpack.optimize.UglifyJsPlugin({ 78 | sourceMap: true, 79 | compress: { 80 | warnings: false 81 | } 82 | }), 83 | new webpack.LoaderOptionsPlugin({ 84 | minimize: true 85 | }) 86 | ]) 87 | } 88 | -------------------------------------------------------------------------------- /js/client/webpack.config.prod.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var webpack = require('webpack') 3 | const ExtractTextPlugin = require('extract-text-webpack-plugin') 4 | const HtmlWebpackPlugin = require('html-webpack-plugin') 5 | const CleanWebpackPlugin = require('clean-webpack-plugin'); 6 | 7 | module.exports = { 8 | entry: './src/main.js', 9 | output: { 10 | path: path.resolve(__dirname, './dist'), 11 | publicPath: '/client/dist/', 12 | filename: '[name].[chunkhash].js' 13 | }, 14 | module: { 15 | rules: [ 16 | { 17 | test: /\.vue$/, 18 | loader: 'vue-loader', 19 | options: { 20 | loaders: { 21 | } 22 | // other vue-loader options go here 23 | } 24 | }, 25 | { 26 | test: /\.js$/, 27 | loader: 'babel-loader', 28 | exclude: /node_modules/ 29 | }, 30 | { 31 | test: /\.css$/, 32 | use: ExtractTextPlugin.extract({ 33 | fallback: "style-loader", 34 | use: "css-loader" 35 | }) 36 | }, 37 | { 38 | test: /\.(png|jpg|gif|svg)$/, 39 | loader: 'file-loader', 40 | options: { 41 | name: '[name].[ext]?[hash]' 42 | } 43 | } 44 | ] 45 | }, 46 | resolve: { 47 | alias: { 48 | 'vue$': 'vue/dist/vue.esm.js' 49 | } 50 | }, 51 | devServer: { 52 | historyApiFallback: true, 53 | noInfo: true, 54 | compress: true, 55 | disableHostCheck: true // That solved it 56 | }, 57 | performance: { 58 | hints: false 59 | }, 60 | plugins: [ 61 | new ExtractTextPlugin("main.[contenthash].css"), 62 | new HtmlWebpackPlugin({ 63 | template: 'index.html' 64 | }), 65 | new CleanWebpackPlugin(), 66 | ], 67 | devtool: '#eval-source-map' 68 | } 69 | 70 | if (process.env.NODE_ENV === 'production') { 71 | module.exports.devtool = '#source-map' 72 | // http://vue-loader.vuejs.org/en/workflow/production.html 73 | module.exports.plugins = (module.exports.plugins || []).concat([ 74 | new webpack.DefinePlugin({ 75 | 'process.env': { 76 | NODE_ENV: '"production"' 77 | } 78 | }), 79 | new webpack.optimize.UglifyJsPlugin({ 80 | sourceMap: true, 81 | compress: { 82 | warnings: false 83 | } 84 | }), 85 | new webpack.LoaderOptionsPlugin({ 86 | minimize: true 87 | }) 88 | ]) 89 | } 90 | -------------------------------------------------------------------------------- /js/explorer/.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | ["env", { "modules": false }] 4 | ], 5 | "plugins": [ 6 | ["transform-runtime", { 7 | "polyfill": false, 8 | "regenerator": true 9 | }] 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /js/explorer/README.md: -------------------------------------------------------------------------------- 1 | # Franklin Block Explorer 2 | -------------------------------------------------------------------------------- /js/explorer/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Matter Explorer 7 | 8 | 9 | 10 | 11 |
12 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /js/explorer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "franklin-explorer", 3 | "description": "Franklin network explorer", 4 | "version": "1.0.0", 5 | "author": "Alex ", 6 | "private": true, 7 | "scripts": { 8 | "dev": "webpack-dev-server --port 9000 --open --hot --define process.env.NODE_ENV='\"development\"'", 9 | "build": "NODE_ENV=production webpack --config webpack.config.prod.js --progress --hide-modules" 10 | }, 11 | "dependencies": { 12 | "axios": "^0.18.0", 13 | "babel-plugin-transform-runtime": "^6.23.0", 14 | "bn.js": "^4.11.8", 15 | "bootstrap": "^4.3.1", 16 | "bootstrap-vue": "2.0.0-rc.20", 17 | "buffer": "^5.2.1", 18 | "clean-webpack-plugin": "^2.0.2", 19 | "ethers": "^4.0.27", 20 | "ethjs": "^0.4.0", 21 | "ethjs-util": "^0.1.6", 22 | "html-webpack-plugin": "^3.2.0", 23 | "js-sha3": "^0.8.0", 24 | "mini-css-extract-plugin": "^0.6.0", 25 | "popper.js": "^1.12.9", 26 | "vue": "^2.6.10", 27 | "vue-router": "^3.0.2", 28 | "vue-timers": "^2.0.2", 29 | "webpack-md5-hash": "^0.0.6", 30 | "webpack-node-externals": "^1.7.2", 31 | "webpack-plugin-hash-output": "^3.2.1" 32 | }, 33 | "devDependencies": { 34 | "babel-core": "^6.26.0", 35 | "babel-loader": "^6.0.0", 36 | "babel-preset-env": "^1.6.1", 37 | "cross-env": "^5.2.0", 38 | "css-loader": "^0.28.7", 39 | "extract-text-webpack-plugin": "^3.0.2", 40 | "file-loader": "^1.1.5", 41 | "style-loader": "^0.19.0", 42 | "vue-loader": "^13.5.0", 43 | "vue-template-compiler": "^2.5.3", 44 | "webpack": "^3.8.1", 45 | "webpack-dev-server": "^2.9.4" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /js/explorer/src/App.vue: -------------------------------------------------------------------------------- 1 | 4 | 5 | 11 | 12 | -------------------------------------------------------------------------------- /js/explorer/src/Transaction.vue: -------------------------------------------------------------------------------- 1 | 21 | 22 | 80 | 81 | -------------------------------------------------------------------------------- /js/explorer/src/TransactionList.vue: -------------------------------------------------------------------------------- 1 | 4 | 5 | 19 | 20 | -------------------------------------------------------------------------------- /js/explorer/src/assets/loading.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LuozhuZhang/sourceCode-zkSync-era-rollupContract-code-learning/14168cd267b20a3704d510c801770c3a7cdf3440/js/explorer/src/assets/loading.gif -------------------------------------------------------------------------------- /js/explorer/src/assets/logo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LuozhuZhang/sourceCode-zkSync-era-rollupContract-code-learning/14168cd267b20a3704d510c801770c3a7cdf3440/js/explorer/src/assets/logo.jpg -------------------------------------------------------------------------------- /js/explorer/src/assets/logo0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LuozhuZhang/sourceCode-zkSync-era-rollupContract-code-learning/14168cd267b20a3704d510c801770c3a7cdf3440/js/explorer/src/assets/logo0.png -------------------------------------------------------------------------------- /js/explorer/src/client.js: -------------------------------------------------------------------------------- 1 | import axios from 'axios' 2 | import store from './store' 3 | 4 | async function fetch(req) { 5 | let r = await axios(req) 6 | if (r.status == 200) { 7 | return r.data 8 | } else { 9 | return null 10 | } 11 | } 12 | 13 | function baseUrl() { 14 | return store.config.API_SERVER + '/api/v0.1' //'http://localhost:3000/api/v0.1' 15 | } 16 | 17 | let self = { 18 | 19 | PAGE_SIZE: 20, // blocks per page 20 | 21 | TX_PER_BLOCK() { 22 | return store.config.TRANSFER_BATCH_SIZE 23 | }, 24 | 25 | async status() { 26 | return fetch({ 27 | method: 'get', 28 | url: `${baseUrl()}/status`, 29 | }) 30 | }, 31 | 32 | async loadBlocks(max) { 33 | return fetch({ 34 | method: 'get', 35 | url: `${baseUrl()}/blocks?max_block=${max}&limit=${self.PAGE_SIZE}`, 36 | }) 37 | }, 38 | 39 | async getBlock(blockNumber) { 40 | return fetch({ 41 | method: 'get', 42 | url: `${baseUrl()}/blocks/${blockNumber}`, 43 | }) 44 | }, 45 | 46 | async getBlockTransactions(blockNumber) { 47 | return fetch({ 48 | method: 'get', 49 | url: `${baseUrl()}/blocks/${blockNumber}/transactions`, 50 | }) 51 | }, 52 | 53 | async searchBlock(query) { 54 | return fetch({ 55 | method: 'get', 56 | url: `${baseUrl()}/search?query=${query}`, 57 | }) 58 | }, 59 | } 60 | 61 | window.client = self 62 | 63 | export default self -------------------------------------------------------------------------------- /js/explorer/src/main.js: -------------------------------------------------------------------------------- 1 | import Vue from 'vue' 2 | import BootstrapVue from "bootstrap-vue" 3 | import "bootstrap/dist/css/bootstrap.min.css" 4 | import "bootstrap-vue/dist/bootstrap-vue.css" 5 | 6 | import store from './store' 7 | 8 | import Router from 'vue-router' 9 | import App from './App.vue' 10 | import Home from './Home.vue' 11 | import Block from './Block.vue' 12 | import Transaction from './Transaction.vue' 13 | 14 | import axios from 'axios' 15 | import url from 'url' 16 | import config from './env-config' 17 | import VueTimers from 'vue-timers' 18 | 19 | const ethers = require('ethers') 20 | 21 | Vue.use(VueTimers) 22 | Vue.use(Router) 23 | Vue.use(BootstrapVue) 24 | 25 | const routes = [ 26 | { path: '/', component: Home }, 27 | { path: '/blocks/:blockNumber', component: Block }, 28 | { path: '/transactions/:id', component: Transaction }, 29 | ] 30 | 31 | const router = new Router({ 32 | routes, // short for `routes: routes` 33 | mode: 'history', 34 | base: '/explorer' 35 | }) 36 | 37 | Vue.mixin({ 38 | data() { 39 | return { 40 | store 41 | } 42 | }, 43 | methods: { 44 | formatFranklin(value) { 45 | return ethers.utils.formatEther(ethers.utils.bigNumberify(value).mul(1000000000000)) 46 | }, 47 | // parseFranklin(value) { 48 | // return ethers.utils.parseEther(value).div(1) 49 | // }, 50 | }, 51 | computed: { 52 | etherscan() { 53 | if (this.store.network === 'localhost') return 'http://localhost:4000' 54 | return 'https://' + (this.store.network === 'mainnet' ? '' : `${this.store.network}.`) + 'etherscan.io' 55 | }, 56 | }, 57 | }) 58 | 59 | window.app = new Vue({ 60 | el: '#app', 61 | router, 62 | async created() { 63 | this.store.config = config 64 | let regex = /(?:api-)*(\w*)(?:\..*)*/ 65 | this.store.network = 66 | regex.exec(url.parse(this.store.config.API_SERVER).host)[1] 67 | }, 68 | render: h => h(App) 69 | }) 70 | 71 | // debug utils 72 | 73 | window.store = store 74 | window.ethers = ethers 75 | window.p = { 76 | // promise printer for debugging in console 77 | set p(promise) { 78 | promise.then(r => console.log(r) ) 79 | }, 80 | } -------------------------------------------------------------------------------- /js/explorer/src/store.js: -------------------------------------------------------------------------------- 1 | const store = { 2 | contractAddress: null, 3 | config: null, 4 | network: null, 5 | } 6 | 7 | export default store 8 | -------------------------------------------------------------------------------- /js/explorer/webpack.config.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var webpack = require('webpack') 3 | const ExtractTextPlugin = require("extract-text-webpack-plugin") 4 | const HtmlWebpackPlugin = require('html-webpack-plugin') 5 | 6 | module.exports = { 7 | entry: './src/main.js', 8 | output: { 9 | path: path.resolve(__dirname, './dist'), 10 | publicPath: '/explorer/dist/', 11 | filename: 'build.js' 12 | }, 13 | module: { 14 | rules: [ 15 | { 16 | test: /\.vue$/, 17 | loader: 'vue-loader', 18 | options: { 19 | loaders: { 20 | } 21 | // other vue-loader options go here 22 | } 23 | }, 24 | { 25 | test: /\.js$/, 26 | loader: 'babel-loader', 27 | exclude: /node_modules/ 28 | }, 29 | { 30 | test: /\.css$/, 31 | use: ExtractTextPlugin.extract({ 32 | fallback: "style-loader", 33 | use: "css-loader" 34 | }) 35 | }, 36 | { 37 | test: /\.(png|jpg|gif|svg)$/, 38 | loader: 'file-loader', 39 | options: { 40 | name: '[name].[ext]?[hash]' 41 | } 42 | } 43 | ] 44 | }, 45 | resolve: { 46 | alias: { 47 | 'vue$': 'vue/dist/vue.esm.js' 48 | } 49 | }, 50 | devServer: { 51 | historyApiFallback: true, 52 | noInfo: true, 53 | compress: true, 54 | disableHostCheck: true // That solved it 55 | }, 56 | performance: { 57 | hints: false 58 | }, 59 | plugins: [ 60 | new ExtractTextPlugin("main.css"), 61 | new HtmlWebpackPlugin({ 62 | template: 'index.html' 63 | }), 64 | ], 65 | devtool: '#eval-source-map' 66 | } 67 | 68 | if (process.env.NODE_ENV === 'production') { 69 | module.exports.devtool = '#source-map' 70 | // http://vue-loader.vuejs.org/en/workflow/production.html 71 | module.exports.plugins = (module.exports.plugins || []).concat([ 72 | new webpack.DefinePlugin({ 73 | 'process.env': { 74 | NODE_ENV: '"production"' 75 | } 76 | }), 77 | new webpack.optimize.UglifyJsPlugin({ 78 | sourceMap: true, 79 | compress: { 80 | warnings: false 81 | } 82 | }), 83 | new webpack.LoaderOptionsPlugin({ 84 | minimize: true 85 | }) 86 | ]) 87 | } 88 | -------------------------------------------------------------------------------- /js/explorer/webpack.config.prod.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var webpack = require('webpack') 3 | const ExtractTextPlugin = require('extract-text-webpack-plugin') 4 | const HtmlWebpackPlugin = require('html-webpack-plugin') 5 | const CleanWebpackPlugin = require('clean-webpack-plugin'); 6 | 7 | module.exports = { 8 | entry: './src/main.js', 9 | output: { 10 | path: path.resolve(__dirname, './dist'), 11 | publicPath: '/explorer/dist/', 12 | filename: '[name].[chunkhash].js' 13 | }, 14 | module: { 15 | rules: [ 16 | { 17 | test: /\.vue$/, 18 | loader: 'vue-loader', 19 | options: { 20 | loaders: { 21 | } 22 | // other vue-loader options go here 23 | } 24 | }, 25 | { 26 | test: /\.js$/, 27 | loader: 'babel-loader', 28 | exclude: /node_modules/ 29 | }, 30 | { 31 | test: /\.css$/, 32 | use: ExtractTextPlugin.extract({ 33 | fallback: "style-loader", 34 | use: "css-loader" 35 | }) 36 | }, 37 | { 38 | test: /\.(png|jpg|gif|svg)$/, 39 | loader: 'file-loader', 40 | options: { 41 | name: '[name].[ext]?[hash]' 42 | } 43 | } 44 | ] 45 | }, 46 | resolve: { 47 | alias: { 48 | 'vue$': 'vue/dist/vue.esm.js' 49 | } 50 | }, 51 | devServer: { 52 | historyApiFallback: true, 53 | noInfo: true, 54 | compress: true, 55 | disableHostCheck: true // That solved it 56 | }, 57 | performance: { 58 | hints: false 59 | }, 60 | plugins: [ 61 | new ExtractTextPlugin("main.[contenthash].css"), 62 | new HtmlWebpackPlugin({ 63 | template: 'index.html' 64 | }), 65 | new CleanWebpackPlugin(), 66 | ], 67 | devtool: '#eval-source-map' 68 | } 69 | 70 | if (process.env.NODE_ENV === 'production') { 71 | module.exports.devtool = '#source-map' 72 | // http://vue-loader.vuejs.org/en/workflow/production.html 73 | module.exports.plugins = (module.exports.plugins || []).concat([ 74 | new webpack.DefinePlugin({ 75 | 'process.env': { 76 | NODE_ENV: '"production"' 77 | } 78 | }), 79 | new webpack.optimize.UglifyJsPlugin({ 80 | sourceMap: true, 81 | compress: { 82 | warnings: false 83 | } 84 | }), 85 | new webpack.LoaderOptionsPlugin({ 86 | minimize: true 87 | }) 88 | ]) 89 | } 90 | -------------------------------------------------------------------------------- /js/franklin/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "franklin", 3 | "version": "1.0.0", 4 | "description": "Franklin JS client library", 5 | "main": "src/franklin.js", 6 | "author": "Alex Gluchowski", 7 | "license": "ISC", 8 | "dependencies": { 9 | "axios": "^0.18.0", 10 | "buffer": "^5.2.1", 11 | "ethers": "^4.0.20", 12 | "ethjs": "^0.4.0", 13 | "ethjs-util": "^0.1.6", 14 | "js-sha3": "^0.8.0" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /js/franklin/src/test.js: -------------------------------------------------------------------------------- 1 | const ethers = require('ethers') 2 | 3 | const provider = new ethers.providers.JsonRpcProvider() 4 | const sleep = async ms => await new Promise(resolve => setTimeout(resolve, ms)) 5 | 6 | let source = ethers.Wallet.fromMnemonic(process.env.MNEMONIC, "m/44'/60'/0'/0/0").connect(provider) 7 | let sourceNonce = null 8 | 9 | const PlasmaContractABI = require('../abi/PlasmaContract.json').abi 10 | const contract = new ethers.Contract(process.env.CONTRACT_ADDR, PlasmaContractABI, provider) 11 | const paddingPubKey = JSON.parse(process.env.PADDING_PUB_KEY); 12 | 13 | (async function() { 14 | 15 | sourceNonce = await source.getTransactionCount("pending") 16 | 17 | console.log('starting...') 18 | 19 | // First 4 bytes of the hash of "fee()" for the sighash selector 20 | //let data = ethers.utils.hexDataSlice(ethers.utils.id('exitor()'), 0, 4); 21 | let data = ethers.utils.hexDataSlice(ethers.utils.id('x()'), 0, 4); 22 | let to = process.env.CONTRACT_ADDR 23 | let tx = {to, data} 24 | 25 | try { 26 | let r = await provider.call(tx); 27 | console.log('r', r) 28 | } catch (error) { 29 | // error.reason now populated with an REVERT reason 30 | console.log("Failure reason:", error); 31 | } 32 | 33 | })() 34 | -------------------------------------------------------------------------------- /js/loadtest/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "loadtest", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "loadtest.js", 6 | "scripts": { 7 | "test": ".setup_env && node loadtest.js" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "axios": "^0.18.0", 13 | "buffer": "^5.2.1", 14 | "ethers": "^4.0.20", 15 | "ethjs": "^0.4.0", 16 | "ethjs-util": "^0.1.6", 17 | "js-sha3": "^0.8.0", 18 | "prando": "^5.1.0" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /js/loadtest/rescue.js: -------------------------------------------------------------------------------- 1 | const ethers = require('ethers') 2 | 3 | const bn = ethers.utils.bigNumberify; 4 | 5 | const gasPriceScaling = bn(12).add(bn(1)); 6 | 7 | async function rescue() { 8 | console.log("This is intended to run on mainnet only!"); 9 | const web3Url = process.env.WEB3_URL; 10 | let privateKey = process.env.PRIVATE_KEY; 11 | const saveAddress = process.env.FUNDING_ADDR; 12 | 13 | // const web3Url = "http://localhost:8545"; 14 | // let privateKey = "27593fea79697e947890ecbecce7901b0008345e5d7259710d0dd5e500d040be"; 15 | if (privateKey === undefined || web3Url === undefined) { 16 | console.log("Missing private key or web3 URL in environment"); 17 | return; 18 | } 19 | if (! privateKey.startsWith("0x")) { 20 | privateKey = "0x" + privateKey; 21 | } 22 | const provider = new ethers.providers.JsonRpcProvider(web3Url); 23 | const source = new ethers.Wallet(privateKey, provider); 24 | const address = source.address, saveAddress 25 | 26 | console.log(address) 27 | //process.exit(0) 28 | 29 | source.connect(provider); 30 | 31 | let gasPrice = await provider.getGasPrice(); 32 | console.log("Current gas price is " + gasPrice.div(bn(1000000000)).toString() + " GWei"); 33 | 34 | gasPrice = gasPrice.mul(gasPriceScaling); 35 | 36 | let latestNonce = await provider.getTransactionCount(address, "latest"); 37 | let pendingNonce = await provider.getTransactionCount(address, "pending"); 38 | let balance = await provider.getBalance(address, "pending"); 39 | 40 | console.log('Nonce: latest = ', latestNonce, ', pending = ', pendingNonce, ', pending balance = ', ethers.utils.formatEther(balance)); 41 | console.log('Saving funds to', saveAddress); 42 | 43 | // if (latestNonce === pendingNonce) { 44 | // console.log("No transactions to replace"); 45 | // return; 46 | // } 47 | 48 | for (let i = 218; i <= 218 + 10; i++) { 49 | console.log("Replacing nonce = " + i); 50 | try { 51 | let gasLimit = 21000 52 | let value = ethers.utils.parseEther('1.8') //balance.sub(gasPrice.mul(gasLimit)).sub(ethers.utils.parseEther('0.1')) 53 | let result = await source.sendTransaction( 54 | { 55 | to: address, 56 | nonce: i, 57 | gasPrice, 58 | gasLimit, 59 | //value, 60 | } 61 | ); 62 | console.log("Successfully send with hash " + result.hash); 63 | console.log("Used gas price " + gasPrice.div(bn(1000000000)).toString() + " GWei and limit 21000"); 64 | } catch(error) { 65 | if (error.transactionHash !== undefined) { 66 | console.log("There may have been a network erro sending transaction, replacements hash = " + error.transactionHash); 67 | console.log('Reason:', error.reason); 68 | } else { 69 | console.log(error); 70 | } 71 | } 72 | } 73 | 74 | } 75 | 76 | rescue().then(() => { 77 | console.log("Done"); 78 | }); -------------------------------------------------------------------------------- /yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | --------------------------------------------------------------------------------