├── .env ├── .github └── workflows │ └── pull_request.yml ├── .gitignore ├── .husky └── pre-commit ├── .prettierrc.json ├── LICENSE ├── README.md ├── jest.config.js ├── package-lock.json ├── package.json ├── script ├── config.js ├── deploy.sh ├── depracated │ └── compile_ton.js ├── generate-private-key.js └── prepare-ton-release.js ├── src ├── binaries.ts ├── cell-builders.ts ├── check-prerequisites.ts ├── controller.ts ├── deploy-controller.ts ├── dynamic-importer.ts ├── firebase-provider.ts ├── ipfs-code-storage-provider.ts ├── latest-known-contracts.ts ├── logger.ts ├── req-id-middleware.ts ├── server.ts ├── source-verifier │ ├── fift-source-verifier.ts │ ├── func-source-verifier.ts │ ├── funcjs-source-verifier.ts │ ├── res │ │ ├── tact141pkg.ts │ │ ├── tact162pkg.ts │ │ └── tact163pkg.ts │ ├── tact-source-verifier.spec.ts │ ├── tact-source-verifier.ts │ ├── tolk-source-verifier.spec.ts │ └── tolk-source-verifier.ts ├── supported-versions-reader.ts ├── ton-reader-client.ts ├── types.ts ├── utils.ts ├── validate-message-cell.ts └── wrappers │ ├── source-item.ts │ ├── sources-registry.ts │ └── verifier-registry.ts ├── test └── controller.spec.ts └── tsconfig.json /.env: -------------------------------------------------------------------------------- 1 | SOURCES_REGISTRY=EQD-BJSVUJviud_Qv7Ymfd3qzXdrmV525e3YDzWQoHIAiInL 2 | VERIFIER_ID=orbs.com 3 | IPFS_PROVIDER=tonsource.infura-ipfs.io 4 | COMPILE_TIMEOUT=5000 5 | LEGACY_FUNC_COMPILER=false -------------------------------------------------------------------------------- /.github/workflows/pull_request.yml: -------------------------------------------------------------------------------- 1 | name: Build PR (Standard) 2 | 3 | on: 4 | pull_request: 5 | branches: ["main"] 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v3 12 | - name: Use Node.js ${{ matrix.node_version }} 13 | uses: actions/setup-node@v3 14 | with: 15 | node-version: ${{ matrix.node_version }} 16 | cache: "npm" 17 | - run: npm ci 18 | - run: npm run test 19 | - run: CI=false npm run build 20 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | secrets/ 2 | node_modules 3 | .DS_Store 4 | *.cell 5 | *.fif 6 | .env.local 7 | tmp/ 8 | resources/ 9 | coverage/ 10 | .secret 11 | secrets-config.json 12 | *.fc 13 | release/ 14 | compilers/ -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | . "$(dirname -- "$0")/_/husky.sh" 3 | 4 | npx pretty-quick --staged 5 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "all", 3 | "jsxBracketSameLine": true, 4 | "printWidth": 100, 5 | "semi": true 6 | } 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 orbs.com 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # contract-verifier-backend 2 | 3 | A backend used for compiling FunC code and returning a signed message containing a proof of verification to be forwarded to the [Sources Registry](https://github.com/ton-blockchain/TEPs/pull/91) contract. 4 | The signed message is later stored on a source item contract as a proof that the source code compiles to a specific code cell hash. 5 | 6 | ## Related repositories 7 | 8 | This repo is a part of the following: 9 | 10 | 1. [contract-verifier-contracts](https://github.com/ton-community/contract-verifier-contracts) - Sources registry contracts which stores an on-chain proof per code cell hash. 11 | 2. contract-verifier-backend (this repo) - Backend for compiling FunC and returning a signature over a message containing the resulting code cell hash. 12 | 3. [contract-verifier-sdk](https://github.com/ton-community/contract-verifier-sdk) - A UI component to fetch and display sources from Ton blockchain and IPFS, including code highlighting. 13 | 4. [contract-verifier](https://github.com/ton-community/contract-verifier) - A UI app to interact with the backend, contracts and publish an on-chain proof. 14 | 15 | ## Configurations 16 | 17 | The backend supports compiling in func in all versions stated in the [config file](https://github.com/ton-community/contract-verifier-config/blob/main/config.json) 18 | 19 | ## Preqrequisites 20 | 21 | ### Binaries + fiftlib 22 | 23 | Binaries can be acquired (precompiled) from [ton-binaries](https://github.com/ton-defi-org/ton-binaries) repo or from the official [ton repo](https://github.com/ton-blockchain/ton). 24 | 25 | Fiftlib can also be acquired from the same repo. 26 | 27 | #### Heroku 28 | 29 | To deploy on heroku, you can use the [func compilation buildpack](https://github.com/ton-defi-org/heroku-buildpack-func-compiler/). 30 | 31 | #### Locally 32 | 33 | - Ensure you have working binaries for func 0.2.0/0.3.0/0.4.0 + fiftlib in this format: 34 | 35 | ``` 36 | resources/ 37 | binaries/ 38 | 0.2.0/fift 39 | 0.2.0/func 40 | 0.2.0/fiftlib (directory) 41 | 0.3.0/fift 42 | 0.3.0/func 43 | 0.3.0/fiftlib (directory) 44 | ``` 45 | 46 | ### Environment variables 47 | 48 | - `INFURA_ID` and `INFURA_SECRET` - The backend persists sources and compilation metadata to an infura IPFS node. 49 | - `PRIVATE_KEY` - To sign its message cell with a private key, which is verified by the [verifier registry](https://github.com/ton-blockchain/TEPs/pull/91). Provide an ED25519 compatible private key. 50 | - `SOURCES_REGISTRY` - The address of the sources registry contract (default from .env) 51 | - `VERIFIER_ID` - Sources verifier id (default from .env) 52 | 53 | ## Running 54 | 55 | - `npm install` 56 | - `npm run start` 57 | 58 | ## License 59 | 60 | MIT 61 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ 2 | module.exports = { 3 | preset: 'ts-jest', 4 | testEnvironment: 'node', 5 | }; -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ton-src-backend", 3 | "dependencies": { 4 | "@babel/preset-env": "^7.18.6", 5 | "@orbs-network/ton-access": "^2.3.3", 6 | "@ton-community/contract-verifier-sdk": "^1.1.2", 7 | "@ton-community/func-js": "^0.9.1", 8 | "async": "^3.2.4", 9 | "base64url": "^3.0.1", 10 | "bigint-buffer": "^1.1.5", 11 | "cors": "^2.8.5", 12 | "dotenv": "^16.0.1", 13 | "express": "^4.18.1", 14 | "express-async-errors": "^3.1.1", 15 | "express-rate-limit": "^6.5.1", 16 | "express-request-id": "^2.0.1", 17 | "firebase": "^11.7.1", 18 | "firebase-admin": "^13.4.0", 19 | "ipfs-http-client": "^56.0.0", 20 | "ipfs-only-hash": "^4.0.0", 21 | "ipfs-utils": "^9.0.14", 22 | "mkdirp": "^1.0.4", 23 | "multer": "^1.4.5-lts.1", 24 | "promise-retry": "^2.0.1", 25 | "randomstring": "^1.3.0", 26 | "tact-1.4.0": "npm:@tact-lang/compiler@1.4.0", 27 | "tact-1.4.1": "npm:@tact-lang/compiler@1.4.1", 28 | "tact-1.6.7": "npm:@tact-lang/compiler@1.6.7", 29 | "tolk-0.12.0": "npm:@ton/tolk-js@0.12", 30 | "ton": "^13.4.1", 31 | "ton-core": "^0.49.0", 32 | "ts-node": "^10.9.1", 33 | "ts-sinon": "^2.0.2", 34 | "tweetnacl": "^1.0.3", 35 | "unzipper": "^0.12.3", 36 | "winston": "^3.17.0" 37 | }, 38 | "scripts": { 39 | "__prebuild": "tslint -p tsconfig.json --fix", 40 | "build": "tsc --noEmit", 41 | "test": "jest", 42 | "__prestart": "npm run build", 43 | "start": "ts-node --transpile-only src/server", 44 | "prepare": "husky install" 45 | }, 46 | "devDependencies": { 47 | "@types/async": "^3.2.16", 48 | "@types/cors": "^2.8.12", 49 | "@types/express": "^4.17.13", 50 | "@types/express-request-id": "^1.4.3", 51 | "@types/jest": "^28.1.6", 52 | "@types/mkdirp": "^1.0.2", 53 | "@types/multer": "^1.4.7", 54 | "@types/node": "^16.11.45", 55 | "@types/promise-retry": "^1.1.6", 56 | "@types/randomstring": "^1.3.0", 57 | "@types/semver": "^7.5.8", 58 | "husky": "^8.0.0", 59 | "jest-mock-extended": "^3.0.7", 60 | "ts-jest": "^28.0.7", 61 | "tslint": "^6.1.3", 62 | "typescript": "^5.8.3" 63 | }, 64 | "engines": { 65 | "node": "22.x" 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /script/config.js: -------------------------------------------------------------------------------- 1 | const execSync = require("child_process").execSync; 2 | 3 | const config = require("../secrets-config.json"); 4 | 5 | for (const network of ["testnet", "mainnet"]) { 6 | for (const node of Object.keys(config[network].nodes)) { 7 | const env = Object.entries(config.shared.env); 8 | env.push(...Object.entries(config[network].env)); 9 | env.push(...Object.entries(config[network].nodes[node].env)); 10 | 11 | const cmd = `heroku config:set --remote ${node} ${env 12 | .map(([key, val]) => `${key}=${val}`) 13 | .join(" ")}`; 14 | 15 | let res = execSync(cmd); 16 | console.log(res.toString()); 17 | 18 | res = execSync(` 19 | heroku buildpacks:clear --remote ${node}; 20 | heroku buildpacks:add --remote ${node} https://github.com/ton-defi-org/heroku-buildpack-func-compiler.git; 21 | heroku buildpacks:add --remote ${node} heroku/nodejs; 22 | heroku stack:set heroku-22 --remote ${node};`); 23 | 24 | console.log(res.toString()); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /script/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | if heroku whoami >/dev/null 2>&1; then 5 | echo "Heroku logged in as $(heroku whoami)" 6 | else 7 | echo "You must run 'heroku login' first." 8 | exit 1 9 | fi 10 | 11 | if [[ $1 == "testnet" ]]; then 12 | values=("prod-testnet-1") 13 | echo "Deploying to tesnet only!" 14 | else 15 | values=("prod-testnet-1" "prod-1" "prod-2" "prod-3") 16 | fi 17 | 18 | for heroku_app in "${values[@]}"; do 19 | echo "Processing heroku_app: $heroku_app" 20 | if ! git remote | grep "$heroku_app" >/dev/null; then 21 | echo "Adding remote" 22 | git remote add $heroku_app "https://git.heroku.com/ton-source-$heroku_app.git" 23 | fi 24 | CURR_BRANCH=$(git rev-parse --abbrev-ref HEAD) 25 | git push $heroku_app $CURR_BRANCH:main -f 26 | done 27 | -------------------------------------------------------------------------------- /script/depracated/compile_ton.js: -------------------------------------------------------------------------------- 1 | const { exec } = require("child_process"); 2 | 3 | function execP(command, cwd, streamFactor) { 4 | return new Promise((resolve, reject) => { 5 | const child = exec(command, { cwd: cwd || undefined }, (error, stdout, stderr) => { 6 | // if (error) { 7 | // reject(error); 8 | // return; 9 | resolve(`${stdout}${stderr}`.trim()); 10 | }); 11 | 12 | child.stdout.on("data", (data) => { 13 | if (Math.random() > streamFactor) { 14 | console.log(data.trim()); 15 | } 16 | }); 17 | }); 18 | } 19 | 20 | const tags = ["func-0.4.0", "func-0.3.0", "func-0.2.0"]; 21 | 22 | (async function () { 23 | for (const tag of tags) { 24 | await execP("rm -rf build; mkdir build"); 25 | 26 | await execP(`git checkout ${tag}`, "ton"); 27 | 28 | const output = await execP(`cmake -DCMAKE_BUILD_TYPE=Release ../ton`, "build"); 29 | if (!/Build files have been written to/.test(output)) { 30 | throw new Error("CMake failed"); 31 | } 32 | 33 | console.log("CMake done. building func"); 34 | const output2 = await execP( 35 | `cmake --build . -j16 --target fift func lite-client`, 36 | "build", 37 | 0.9, 38 | ); 39 | 40 | console.log(output2); 41 | 42 | if ( 43 | !output2.includes("Built target fift") || 44 | !output2.includes("Built target func") || 45 | !output2.includes("Built target lite-client") 46 | ) { 47 | throw new Error("Build fift/func/lite-client failed"); 48 | } 49 | 50 | await execP(`mkdir -p binaries/${tag}; cp build/crypto/func binaries/${tag}/func; 51 | cp build/crypto/fift binaries/${tag}/fift; cp build/lite-client/lite-client binaries/${tag}/lite-client; zip -r -j binaries/${tag}/fiftlib.zip ton/crypto/fift/lib/`); 52 | 53 | console.log("Done with " + tag); 54 | } 55 | })(); 56 | -------------------------------------------------------------------------------- /script/generate-private-key.js: -------------------------------------------------------------------------------- 1 | const nacl = require("tweetnacl"); 2 | 3 | const keypair = nacl.sign.keyPair(); 4 | const privKey = Buffer.from(keypair.secretKey).toString("base64"); 5 | const pubKey = Buffer.from(keypair.publicKey).toString("base64"); 6 | 7 | console.log(`Private key: ${privKey}`); 8 | console.log(`Public key: ${pubKey}`); 9 | -------------------------------------------------------------------------------- /script/prepare-ton-release.js: -------------------------------------------------------------------------------- 1 | const { exec } = require("child_process"); 2 | const path = require("path"); 3 | const fs = require("fs"); 4 | const unzipper = require("unzipper"); 5 | 6 | function execP(command, cwd, streamFactor) { 7 | return new Promise((resolve, reject) => { 8 | const child = exec(command, { cwd: cwd || undefined }, (error, stdout, stderr) => { 9 | resolve(`${stdout}${stderr}`.trim()); 10 | }); 11 | 12 | child.stdout.on("data", (data) => { 13 | if (Math.random() > streamFactor) { 14 | console.log(data.trim()); 15 | } 16 | }); 17 | }); 18 | } 19 | 20 | const tag = process.argv[2]; 21 | 22 | (async function () { 23 | if (!tag) { 24 | let validTags = "; source for valid tags: https://github.com/ton-blockchain/ton) "; 25 | try { 26 | validTags += 27 | "Example: " + 28 | ( 29 | await execP(`git ls-remote --tags https://github.com/ton-blockchain/ton.git | \ 30 | grep -o 'refs/tags/.*' | sed 's|refs/tags/||' | sed 's/\^{}//' | \ 31 | sort -V | tail -n 5`) 32 | ).replace(/\n/g, ", "); 33 | } catch (e) { 34 | console.log("nope"); 35 | } 36 | 37 | throw new Error(`Usage: node prepare_ton_release.js ${validTags}`); 38 | } 39 | 40 | const binariesUrlBase = `https://github.com/ton-blockchain/ton/releases/download/${tag}`; 41 | 42 | console.log(`Downloading binaries from: ${binariesUrlBase} ...`); 43 | 44 | const tonSrcFolderAbs = tag; 45 | 46 | const workFolder = path.join(process.cwd(), "release", tag); 47 | await execP(`mkdir -p ${workFolder}`); 48 | const urls = ["fift", "func", "lite-client"].map((cmd) => [ 49 | `${binariesUrlBase}/${cmd}-linux-x86_64`, 50 | cmd, 51 | ]); 52 | 53 | await Promise.all(urls.map(([url, cmd]) => execP(`curl -L ${url} > ${cmd}`, workFolder))); 54 | 55 | // await execP(`git checkout ${tag}`, tonSrcFolderAbs); 56 | // console.log(await execP(`zip -r -j ${workFolder}/fiftlib.zip crypto/fift/lib`, tonSrcFolderAbs)); 57 | // console.log(`zip -r -j ${workFolder}/fiftlib.zip ton/crypto/fift/lib/`); 58 | 59 | await prepareFiftlibZip(tag, workFolder); 60 | 61 | console.log(` 62 | Done! 63 | 64 | Prepared tag: ${tag} in folder: ${workFolder} 65 | 66 | Next steps: 67 | 1. Go to https://github.com/ton-defi-org/ton-binaries/releases 68 | 2. Draft a new release 69 | 3. Use a version name AND a tag name in the format of "ubuntu-22-[version_name]" where version_name is 0.4.6 etc. 70 | 4. Attach the binaries from the ${workFolder} folder 71 | 5. Publish the release 72 | 6. Make sure that "version_name" appears in https://github.com/ton-community/contract-verifier-config/blob/main/config.json under funcVersions 73 | 7. Redeploy to heroku 74 | `); 75 | })(); 76 | 77 | async function prepareFiftlibZip(tag, workFolder) { 78 | console.log(`Preparing fiftlib.zip ...`); 79 | 80 | const repo = "ton-blockchain/ton"; 81 | const apiBase = `https://api.github.com/repos/${repo}/contents/crypto/fift/lib?ref=${tag}`; 82 | const rawBase = `https://raw.githubusercontent.com/${repo}/${tag}/crypto/fift/lib`; 83 | 84 | const res = await fetch(apiBase); 85 | if (!res.ok) throw new Error(`Failed to fetch file list: ${res.statusText}`); 86 | const files = await res.json(); 87 | 88 | const tmpDir = "fiftlib"; 89 | await execP(`mkdir -p ${tmpDir}`); 90 | 91 | await Promise.all( 92 | files.map(async (file) => { 93 | const filePath = `${tmpDir}/${file.name}`; 94 | await execP(`curl -sL ${rawBase}/${file.name} -o ${filePath}`); 95 | }), 96 | ); 97 | 98 | await execP(`zip -r -j ${workFolder}/fiftlib.zip ${tmpDir}`); 99 | } 100 | -------------------------------------------------------------------------------- /src/binaries.ts: -------------------------------------------------------------------------------- 1 | export const binaryPath = "resources/binaries"; 2 | -------------------------------------------------------------------------------- /src/cell-builders.ts: -------------------------------------------------------------------------------- 1 | import { Address, beginCell, Cell } from "ton"; 2 | import tweetnacl from "tweetnacl"; 3 | import { toBigIntBE } from "bigint-buffer"; 4 | export const DEPLOY_SOURCE_OP = 1002; 5 | 6 | export function deploySource( 7 | queryId: bigint, 8 | codeCellHash: string, 9 | ipfsLink: string, 10 | verifierId: Buffer, 11 | ): Cell { 12 | return beginCell() 13 | .storeUint(DEPLOY_SOURCE_OP, 32) 14 | .storeUint(queryId, 64) 15 | .storeBuffer(verifierId) 16 | .storeUint(toBigIntBE(Buffer.from(codeCellHash, "base64")), 256) 17 | .storeRef( 18 | // Source item content cell 19 | beginCell().storeUint(1, 8).storeBuffer(Buffer.from(ipfsLink)).endCell(), 20 | ) 21 | .endCell(); 22 | } 23 | 24 | export const FORWARD_MESSAGE_OP = 0x75217758; 25 | 26 | export function verifierRegistryForwardMessage( 27 | queryId: bigint, 28 | msgToSign: Cell, 29 | sigCell: Cell, 30 | ): Buffer | undefined { 31 | return beginCell() 32 | .storeUint(FORWARD_MESSAGE_OP, 32) // Forward message 33 | .storeUint(queryId, 64) 34 | .storeRef(msgToSign) 35 | .storeRef(sigCell) 36 | .endCell() 37 | .toBoc(); 38 | } 39 | 40 | export function cellToSign( 41 | senderAddress: string, 42 | queryId: bigint, 43 | codeCellHash: string, 44 | ipfsLink: string, 45 | sourcesRegistry: string, 46 | verifierIdSha256: Buffer, 47 | ) { 48 | return beginCell() 49 | .storeBuffer(verifierIdSha256) 50 | .storeUint(Math.floor(Date.now() / 1000) + 60 * 10, 32) // Valid until 10 minutes from now 51 | .storeAddress(Address.parse(senderAddress)) 52 | .storeAddress(Address.parse(sourcesRegistry)) 53 | .storeRef(deploySource(queryId, codeCellHash, ipfsLink, verifierIdSha256)) 54 | .endCell(); 55 | } 56 | 57 | export function signatureCell(msgToSign: Cell, keypair: tweetnacl.SignKeyPair) { 58 | const sig = Buffer.from(tweetnacl.sign.detached(msgToSign.hash(), keypair.secretKey)); 59 | 60 | const sigCell = beginCell() 61 | .storeBuffer(sig) 62 | .storeBuffer(Buffer.from(keypair.publicKey)) 63 | .endCell(); 64 | 65 | return { sig, sigCell }; 66 | } 67 | -------------------------------------------------------------------------------- /src/check-prerequisites.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import path from "path"; 3 | import { binaryPath } from "./binaries"; 4 | import { supportedVersionsReader } from "./supported-versions-reader"; 5 | import { getLogger } from "./logger"; 6 | 7 | const logger = getLogger("checkPrereqs"); 8 | 9 | export async function checkPrerequisites() { 10 | const missingEnvVars = [ 11 | "VERIFIER_ID", 12 | "SOURCES_REGISTRY", 13 | "INFURA_ID", 14 | "INFURA_SECRET", 15 | "PRIVATE_KEY", 16 | "TACT_DEPLOYER_INFURA_ID", 17 | "TACT_DEPLOYER_INFURA_SECRET", 18 | "NETWORK", 19 | "COMPILE_TIMEOUT", 20 | ] 21 | .filter((e) => !process.env[e]) 22 | .join(" "); 23 | 24 | if (missingEnvVars) throw new Error("Missing env vars: " + missingEnvVars); 25 | 26 | const { funcVersions } = await supportedVersionsReader.versions(); 27 | 28 | const missingFiles = funcVersions! 29 | .map((versionDir: string) => [ 30 | path.join(binaryPath, versionDir, "func"), 31 | path.join(binaryPath, versionDir, "fift"), 32 | path.join(binaryPath, versionDir, "fiftlib", "Asm.fif"), 33 | path.join(binaryPath, versionDir, "fiftlib", "Fift.fif"), 34 | ]) 35 | .flat() 36 | .filter((f) => !fs.existsSync(path.join(process.cwd(), f))) 37 | .join(" "); 38 | 39 | logger.error("Missing files: " + missingFiles); 40 | } 41 | -------------------------------------------------------------------------------- /src/controller.ts: -------------------------------------------------------------------------------- 1 | import { 2 | SourceVerifier, 3 | SourceVerifyPayload, 4 | FiftSourceCompileResult, 5 | FuncSourceCompileResult, 6 | TactSourceCompileResult, 7 | TolkSourceCompileResult, 8 | } from "./types"; 9 | import path from "path"; 10 | import tweetnacl from "tweetnacl"; 11 | import { VerifyResult, Compiler, SourceItem } from "./types"; 12 | import { Cell } from "ton"; 13 | import { CodeStorageProvider } from "./ipfs-code-storage-provider"; 14 | import { sha256, random64BitNumber, getNowHourRoundedDown } from "./utils"; 15 | import { TonReaderClient } from "./ton-reader-client"; 16 | import { validateMessageCell } from "./validate-message-cell"; 17 | import { writeFile } from "fs/promises"; 18 | import { 19 | cellToSign, 20 | deploySource, 21 | signatureCell, 22 | verifierRegistryForwardMessage, 23 | } from "./cell-builders"; 24 | import mkdirp from "mkdirp"; 25 | import { getLogger } from "./logger"; 26 | 27 | export type Base64URL = string; 28 | 29 | const logger = getLogger("controller"); 30 | 31 | interface ControllerConfig { 32 | verifierId: string; 33 | privateKey: string; 34 | sourcesRegistryAddress: string; 35 | allowReverification: boolean; 36 | } 37 | 38 | export class Controller { 39 | private ipfsProvider: CodeStorageProvider; 40 | private keypair: tweetnacl.SignKeyPair; 41 | private VERIFIER_SHA256: Buffer; 42 | private config: ControllerConfig; 43 | private compilers: { [key in Compiler]: SourceVerifier }; 44 | private tonReaderClient: TonReaderClient; 45 | 46 | constructor( 47 | ipfsProvider: CodeStorageProvider, 48 | compilers: { [key in Compiler]: SourceVerifier }, 49 | config: ControllerConfig, 50 | tonReaderClient: TonReaderClient, 51 | ) { 52 | this.VERIFIER_SHA256 = sha256(config.verifierId); 53 | this.config = config; 54 | this.compilers = compilers; 55 | this.ipfsProvider = ipfsProvider; 56 | this.keypair = tweetnacl.sign.keyPair.fromSecretKey( 57 | Buffer.from(this.config.privateKey, "base64"), 58 | ); 59 | 60 | this.tonReaderClient = tonReaderClient; 61 | } 62 | 63 | async addSource(verificationPayload: SourceVerifyPayload): Promise { 64 | // Compile 65 | const compiler = this.compilers[verificationPayload.compiler]; 66 | const compileResult = await compiler.verify(verificationPayload); 67 | if (compileResult.error || compileResult.result !== "similar" || !compileResult.hash) { 68 | return { 69 | compileResult, 70 | }; 71 | } 72 | 73 | if (!this.config.allowReverification) { 74 | const isDeployed = await this.tonReaderClient.isProofDeployed( 75 | verificationPayload.knownContractHash, 76 | this.config.verifierId, 77 | ); 78 | if (isDeployed) { 79 | return { 80 | compileResult: { 81 | result: "unknown_error", 82 | error: "Contract is already deployed", 83 | hash: null, 84 | compilerSettings: compileResult.compilerSettings, 85 | sources: compileResult.sources, 86 | }, 87 | }; 88 | } 89 | } 90 | 91 | // Upload sources to IPFS 92 | const sourcesToUpload = compileResult.sources.map( 93 | ( 94 | s: 95 | | FuncSourceCompileResult 96 | | FiftSourceCompileResult 97 | | TolkSourceCompileResult 98 | | TactSourceCompileResult, 99 | ) => ({ 100 | path: path.join(verificationPayload.tmpDir, s.filename), 101 | name: s.filename, 102 | }), 103 | ); 104 | const fileLocators = await this.ipfsProvider.write(sourcesToUpload, true); 105 | 106 | const sourceSpec: SourceItem = { 107 | compilerSettings: compileResult.compilerSettings, 108 | compiler: verificationPayload.compiler, 109 | hash: compileResult.hash, 110 | verificationDate: getNowHourRoundedDown().getTime(), 111 | sources: fileLocators.map((f, i) => { 112 | return { 113 | url: f, 114 | ...compileResult.sources[i], 115 | }; 116 | }), 117 | knownContractAddress: verificationPayload.knownContractAddress, 118 | }; 119 | 120 | // Upload source spec JSON to IPFS 121 | const [ipfsLink] = await this.ipfsProvider.writeFromContent( 122 | [Buffer.from(JSON.stringify(sourceSpec))], 123 | true, 124 | ); 125 | 126 | logger.info(ipfsLink); 127 | 128 | const queryId = random64BitNumber(); 129 | 130 | // This is the message that will be forwarded to verifier registry 131 | const msgToSign = cellToSign( 132 | verificationPayload.senderAddress, 133 | queryId, 134 | compileResult.hash!, 135 | ipfsLink, 136 | this.config.sourcesRegistryAddress, 137 | this.VERIFIER_SHA256, 138 | ); 139 | 140 | const { sig, sigCell } = signatureCell(msgToSign, this.keypair); 141 | 142 | return { 143 | compileResult, 144 | sig: sig.toString("base64"), 145 | ipfsLink: ipfsLink, 146 | msgCell: verifierRegistryForwardMessage(queryId, msgToSign, sigCell), 147 | }; 148 | } 149 | 150 | public async sign({ messageCell, tmpDir }: { messageCell: Buffer; tmpDir: string }) { 151 | const cell = Cell.fromBoc(Buffer.from(messageCell))[0]; 152 | 153 | const verifierConfig = await this.tonReaderClient.getVerifierConfig( 154 | this.config.verifierId, 155 | this.config.sourcesRegistryAddress, 156 | ); 157 | 158 | const { ipfsPointer, codeCellHash, senderAddress, queryId } = validateMessageCell( 159 | cell, 160 | this.VERIFIER_SHA256, 161 | this.config.sourcesRegistryAddress, 162 | this.keypair, 163 | verifierConfig, 164 | ); 165 | 166 | const sourceTemp = await this.ipfsProvider.read(ipfsPointer); 167 | 168 | const json: SourceItem = JSON.parse(sourceTemp); 169 | 170 | if (json.hash !== codeCellHash) { 171 | throw new Error("Code hash mismatch"); 172 | } 173 | 174 | const compiler = this.compilers[json.compiler]; 175 | 176 | const sources = await Promise.all( 177 | json.sources.map(async (s) => { 178 | const content = await this.ipfsProvider.read(s.url); 179 | const filePath = path.join(tmpDir, s.filename); 180 | 181 | await mkdirp(filePath.substring(0, filePath.lastIndexOf("/"))); 182 | await writeFile(filePath, content); 183 | 184 | return { 185 | ...s, 186 | path: s.filename, 187 | }; 188 | }), 189 | ); 190 | 191 | const sourceToVerify: SourceVerifyPayload = { 192 | sources: sources, 193 | compiler: json.compiler, 194 | compilerSettings: { 195 | ...json.compilerSettings, 196 | // TODO this is a hack because only func has a command line arg for now. 197 | // @ts-ignore 198 | commandLine: json.compilerSettings?.commandLine?.replace(/^func/, ""), 199 | }, 200 | knownContractAddress: json.knownContractAddress, 201 | knownContractHash: json.hash, 202 | tmpDir: tmpDir, 203 | senderAddress: senderAddress.toString(), 204 | }; 205 | 206 | const compileResult = await compiler.verify(sourceToVerify); 207 | 208 | if (compileResult.result !== "similar") { 209 | throw new Error("Invalid compilation result: " + compileResult.result); 210 | } 211 | 212 | const slice = cell.beginParse(); 213 | const msgToSign = slice.loadRef(); 214 | const { sigCell } = signatureCell(msgToSign, this.keypair); 215 | let updateSigCell = addSignatureCell(slice.loadRef(), sigCell); 216 | 217 | return { 218 | msgCell: slice.asBuilder().storeRef(msgToSign).storeRef(updateSigCell).asCell().toBoc(), 219 | }; 220 | } 221 | } 222 | 223 | function addSignatureCell(node: Cell, sigCell: Cell): Cell { 224 | const slice = node.beginParse(); 225 | if (slice.remainingRefs > 0) { 226 | const child = slice.loadRef(); 227 | if (slice.remainingRefs > 0) { 228 | throw new Error("Each signature cell should have at most one ref to another sig cell"); 229 | } 230 | 231 | return slice.asBuilder().storeRef(addSignatureCell(child, sigCell)).asCell(); 232 | } 233 | 234 | return slice.asBuilder().storeRef(sigCell).asCell(); 235 | } 236 | -------------------------------------------------------------------------------- /src/deploy-controller.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import { IpfsCodeStorageProvider } from "./ipfs-code-storage-provider"; 3 | import { FileSystem } from "./source-verifier/tact-source-verifier"; 4 | import { PackageFileFormat } from "tact-1.4.1"; 5 | 6 | export class DeployController { 7 | storageProvider: IpfsCodeStorageProvider; 8 | fileSystem: FileSystem; 9 | 10 | constructor(codeStorageProvider: IpfsCodeStorageProvider, fileSystem: FileSystem) { 11 | this.storageProvider = codeStorageProvider; 12 | this.fileSystem = fileSystem; 13 | } 14 | 15 | async process({ tmpDir }: { tmpDir: string }) { 16 | const files = await this.fileSystem.readdir(tmpDir); 17 | 18 | if (files.length !== 2) throw new Error("Expecting exactly 1 boc file and 1 pkg file"); 19 | 20 | const fileContents = await Promise.all( 21 | files.map(async (name) => { 22 | const content = await this.fileSystem.readFile(path.join(tmpDir, name)); 23 | const [hash] = await this.storageProvider.hashForContent([content]); 24 | return { name, hash, content }; 25 | }), 26 | ); 27 | 28 | const pkgFile = fileContents.find((f) => f.name.endsWith(".pkg"))!.content.toString("utf-8"); 29 | 30 | let pkgContents: PackageFileFormat; 31 | 32 | try { 33 | pkgContents = JSON.parse(pkgFile); 34 | } catch (e) { 35 | throw new Error("Unable to parse pkg file"); 36 | } 37 | 38 | const [rootHash] = await this.storageProvider.writeFromContent( 39 | [ 40 | JSON.stringify({ 41 | pkg: fileContents.find((f) => f.name.endsWith(".pkg"))!.hash, 42 | dataCell: fileContents.find((f) => f.name.endsWith(".boc"))!.hash, 43 | }), 44 | ...fileContents.map(({ content }) => content), 45 | ], 46 | false, 47 | ); 48 | 49 | await this.storageProvider.writeFromContent([pkgContents.abi], true); 50 | 51 | return `https://verifier.ton.org/tactDeployer/${rootHash.replace("ipfs://", "")}${ 52 | process.env.NETWORK === "testnet" ? "?testnet" : "" 53 | }`; 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/dynamic-importer.ts: -------------------------------------------------------------------------------- 1 | import { exec } from "child_process"; 2 | import { access } from "fs/promises"; 3 | import path from "path"; 4 | import { promisify } from "util"; 5 | import { supportedVersionsReader } from "./supported-versions-reader"; 6 | import { getLogger } from "./logger"; 7 | 8 | const execAsync = promisify(exec); 9 | 10 | const pendingInstallations: { [v: string]: Promise } = {}; 11 | 12 | const logger = getLogger("dynamic-importer"); 13 | 14 | export class DynamicImporter { 15 | static async tryImport(compiler: "tact" | "func" | "tolk", version: string) { 16 | const versions = await supportedVersionsReader.versions(); 17 | 18 | let installPath, modulePath, npmPackage: string; 19 | 20 | if (compiler === "tact") { 21 | if (!versions.tactVersions.includes(version)) { 22 | throw new Error(`Unsupported tact version:${version}`); 23 | } 24 | installPath = path.resolve(process.cwd(), `compilers/tact-compiler-${version}`); 25 | 26 | modulePath = path.join(installPath, "node_modules", "@tact-lang", "compiler"); 27 | npmPackage = "@tact-lang/compiler"; 28 | } else if (compiler === "func") { 29 | if (!versions.funcVersions.includes(version)) { 30 | throw new Error(`Unsupported func version:${version}`); 31 | } 32 | 33 | installPath = path.resolve(process.cwd(), `compilers/func-compiler-${version}`); 34 | 35 | modulePath = path.join(installPath, "node_modules", "@ton-community", "func-js-bin"); 36 | npmPackage = "@ton-community/func-js-bin"; 37 | } else if (compiler === "tolk") { 38 | if (!versions.tolkVersions.includes(version)) { 39 | throw new Error(`Unsupported tolk version:${version}`); 40 | } 41 | 42 | installPath = path.resolve(process.cwd(), "compilers", `tolk-compiler-${version}`); 43 | modulePath = path.join(installPath, "node_modules", "@ton", "tolk-js"); 44 | npmPackage = "@ton/tolk-js"; 45 | } else { 46 | throw new Error(`Compiler ${compiler} is not yet supported`); 47 | } 48 | 49 | const key = `${compiler}${version}`; 50 | 51 | // if undefined, will just continue 52 | await pendingInstallations[key]; 53 | 54 | try { 55 | await access(modulePath); 56 | return await import(modulePath); 57 | } catch { 58 | if (!pendingInstallations[key]) { 59 | logger.debug(`Version ${version} not found, installing...`); 60 | 61 | pendingInstallations[key] = execAsync( 62 | `npm install ${npmPackage}@${version} --prefix ${installPath}`, 63 | ).finally(() => { 64 | delete pendingInstallations[key]; 65 | }); 66 | } else { 67 | logger.debug(`Installation for ${key} already in progress`); 68 | } 69 | 70 | await pendingInstallations[key]; 71 | 72 | return await import(modulePath); 73 | } 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /src/firebase-provider.ts: -------------------------------------------------------------------------------- 1 | import { getLogger } from "./logger"; 2 | import admin from "firebase-admin"; 3 | 4 | // We use this for descending order 5 | const MAX_TS = 9999999999999; 6 | 7 | const logger = getLogger("firebase-provider"); 8 | 9 | class FirebaseProvider { 10 | db: admin.database.Database; 11 | 12 | constructor() { 13 | const serviceAccount = JSON.parse(process.env.FIREBASE_SERVICE_ACCOUNT!); 14 | serviceAccount.private_key = serviceAccount.private_key.replaceAll("\\n", "\n"); 15 | const app = admin.initializeApp({ 16 | credential: admin.credential.cert(serviceAccount), 17 | databaseURL: process.env.FIREBASE_DB_URL, 18 | }); 19 | this.db = app.database(); 20 | } 21 | 22 | async addForDescendingOrder(key: string, data: T) { 23 | const r = this.db.ref(key); 24 | 25 | // For descending order 26 | const childKey = String(MAX_TS - Date.now()).padStart(13, "0"); // consistent length 27 | 28 | await r.child(childKey).set(data); 29 | } 30 | 31 | async set(key: string, val: T) { 32 | return this.db.ref(key).set(val); 33 | } 34 | 35 | async setWithTxn(key: string, txn: (val: T) => void) { 36 | return this.db.ref(key).transaction(txn); 37 | } 38 | 39 | async remove(key: string) { 40 | return this.db.ref(key).remove(); 41 | } 42 | 43 | async get(key: string): Promise { 44 | const val = await this.db.ref(key).get(); 45 | 46 | if (!val.exists()) return null; 47 | 48 | return val.val(); 49 | } 50 | 51 | async readItems(key: string, limit = 500) { 52 | try { 53 | const r = this.db.ref(key); 54 | const res = await r.orderByKey().limitToFirst(limit).get(); 55 | 56 | if (res.exists()) { 57 | const items: T[] = []; 58 | 59 | res.forEach((v) => { 60 | items.push(v.val()); 61 | }); 62 | 63 | return items; 64 | } else { 65 | return null; 66 | } 67 | } catch (e) { 68 | logger.warn(e); 69 | return null; 70 | } 71 | } 72 | } 73 | 74 | export const firebaseProvider = new FirebaseProvider(); 75 | -------------------------------------------------------------------------------- /src/ipfs-code-storage-provider.ts: -------------------------------------------------------------------------------- 1 | import { create, IPFSHTTPClient } from "ipfs-http-client"; 2 | import fs from "fs"; 3 | import { ToContent } from "ipfs-core-types/src/utils"; 4 | // @ts-ignore 5 | import { of } from "ipfs-only-hash"; 6 | 7 | // This can be a trivial URL, a firebase key, IPFS hash etc. 8 | export type CodeLocationPointer = string; 9 | 10 | export type FileUploadSpec = { 11 | path: string; 12 | name: string; 13 | }; 14 | 15 | export interface CodeStorageProvider { 16 | write(files: FileUploadSpec[], pin: boolean): Promise; 17 | writeFromContent(files: Buffer[], pin: boolean): Promise; 18 | // Returns URL 19 | read(pointer: CodeLocationPointer): Promise; 20 | } 21 | 22 | export class IpfsCodeStorageProvider implements CodeStorageProvider { 23 | #client: IPFSHTTPClient; 24 | 25 | constructor(infuraId: string, infuraSecret: string) { 26 | const auth = "Basic " + Buffer.from(infuraId + ":" + infuraSecret).toString("base64"); 27 | 28 | this.#client = create({ 29 | url: "https://ipfs.infura.io:5001/api/v0", 30 | headers: { 31 | authorization: auth, 32 | }, 33 | }); 34 | } 35 | 36 | async hashForContent(content: ToContent[]): Promise { 37 | return Promise.all(content.map((c) => of(c))); 38 | } 39 | 40 | async writeFromContent(files: ToContent[], pin: boolean): Promise { 41 | return Promise.all( 42 | files.map((f) => 43 | this.#client.add({ content: f }, { pin }).then((r) => { 44 | return `ipfs://${r.cid.toString()}`; 45 | }), 46 | ), 47 | ); 48 | } 49 | 50 | async write(files: FileUploadSpec[], pin: boolean): Promise { 51 | return this.writeFromContent( 52 | files.map((f) => fs.createReadStream(f.path)), 53 | pin, 54 | ); 55 | } 56 | 57 | async read(pointer: string): Promise { 58 | return ( 59 | await fetch(`https://${process.env.IPFS_PROVIDER}/ipfs/${pointer.replace("ipfs://", "")}`) 60 | ).text(); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/latest-known-contracts.ts: -------------------------------------------------------------------------------- 1 | import dotenv from "dotenv"; 2 | import { Address } from "ton"; 3 | import axios from "axios"; 4 | import async from "async"; 5 | import { sha256 } from "./utils"; 6 | import { getTonClient } from "./ton-reader-client"; 7 | import { toBigIntBE } from "bigint-buffer"; 8 | import { SourceItem } from "./wrappers/source-item"; 9 | import { getLogger } from "./logger"; 10 | import { firebaseProvider } from "./firebase-provider"; 11 | 12 | dotenv.config({ path: ".env.local" }); 13 | dotenv.config({ path: ".env" }); 14 | 15 | const logger = getLogger("latest-known-contracts"); 16 | 17 | const isTestnet = process.env.NETWORK === "testnet"; 18 | const cacheKey = isTestnet ? "cacheTestnet" : "cache"; 19 | const lockKey = cacheKey + `_LOCK`; 20 | 21 | type TonTransactionsArchiveProviderParams = { 22 | address: string; 23 | limit: number; 24 | offset: number; 25 | sort: "asc" | "desc"; 26 | startUtime: number | null; 27 | }; 28 | 29 | async function getTransactions(params: TonTransactionsArchiveProviderParams) { 30 | const urlParams: any = { 31 | account: params.address, 32 | limit: params.limit.toString(), 33 | sort: params.sort, 34 | action_type: "contract_deploy", 35 | }; 36 | 37 | if (params.startUtime) { 38 | urlParams.start_utime = params.startUtime.toString(); 39 | } 40 | 41 | const url = 42 | `https://${isTestnet ? "testnet." : ""}toncenter.com/api/v3/actions?` + 43 | new URLSearchParams(urlParams); 44 | 45 | const response = await fetch(url); 46 | 47 | if (response.status !== 200) { 48 | throw new Error(response.statusText); 49 | } 50 | 51 | const txns = (await response.json()) as { actions: any[] }; 52 | 53 | if ("error" in txns) { 54 | throw new Error(String(txns.error)); 55 | } 56 | 57 | return txns.actions.map((tx: any) => ({ 58 | address: tx.details.destination, 59 | timestamp: Number(tx.trace_end_utime), 60 | })); 61 | } 62 | 63 | async function update(verifierIdSha256: Buffer, ipfsProvider: string) { 64 | logger.debug(`Updating latest verified`); 65 | let lockAcquired = false; 66 | try { 67 | const txnResult = await firebaseProvider.setWithTxn<{ timestamp: number }>(lockKey, (lock) => { 68 | if (lock && Date.now() - lock.timestamp < 40_000) { 69 | logger.debug(`Lock acquired by another instance`); 70 | return; 71 | } 72 | 73 | return { timestamp: Date.now() }; 74 | }); 75 | 76 | lockAcquired = txnResult.committed; 77 | 78 | if (!lockAcquired) return; 79 | 80 | let lastTimestamp = 81 | (await firebaseProvider.readItems<{ timestamp: number }>(cacheKey, 1))?.[0]?.timestamp ?? 82 | null; 83 | 84 | if (lastTimestamp) lastTimestamp += 1; 85 | 86 | logger.debug(`Got latest timestamp: ${lastTimestamp}`); 87 | 88 | const txns = await getTransactions({ 89 | address: process.env.SOURCES_REGISTRY!, 90 | limit: 100, 91 | offset: 0, 92 | sort: "asc", 93 | startUtime: lastTimestamp, 94 | }); 95 | 96 | const tc = await getTonClient(); 97 | 98 | const res = await async.mapLimit(txns, 10, async (obj: any) => { 99 | try { 100 | const sourceItemContract = tc.open( 101 | SourceItem.createFromAddress(Address.parse(obj.address)), 102 | ); 103 | const { verifierId, data } = await sourceItemContract.getData(); 104 | 105 | // Not our verifier id, ignore 106 | if (verifierId !== toBigIntBE(verifierIdSha256)) { 107 | return; 108 | } 109 | 110 | const contentCell = data!.beginParse(); 111 | 112 | const version = contentCell.loadUint(8); 113 | if (version !== 1) throw new Error("Unsupported version"); 114 | const ipfsLink = contentCell.loadStringTail(); 115 | 116 | let ipfsData; 117 | try { 118 | ipfsData = await axios.get( 119 | `https://${ipfsProvider}/ipfs/${ipfsLink.replace("ipfs://", "")}`, 120 | { timeout: 3000 }, 121 | ); 122 | } catch (e) { 123 | throw new Error("Unable to fetch IPFS cid: " + ipfsLink); 124 | } 125 | 126 | const mainFilename = ipfsData.data.sources?.sort((a: any, b: any) => { 127 | if (a.type && b.type) { 128 | return Number(b.type === "code") - Number(a.type === "code"); 129 | } 130 | return Number(b.isEntrypoint) - Number(a.isEntrypoint); 131 | })?.[0]?.filename; 132 | 133 | const nameParts = Array.from(mainFilename.matchAll(/(?:\/|^)([^\/\n]+)/g)).map( 134 | // @ts-ignore 135 | (m) => m[1], 136 | ); 137 | 138 | return { 139 | address: ipfsData.data.knownContractAddress, 140 | mainFile: nameParts[nameParts.length - 1], 141 | compiler: ipfsData.data.compiler, 142 | timestamp: obj.timestamp, 143 | }; 144 | } catch (e) { 145 | logger.warn(e); 146 | return; 147 | } 148 | }); 149 | 150 | logger.debug(res.length); 151 | logger.debug(res.filter((o) => !!o).length); 152 | 153 | for (const r of res.filter((o) => !!o)) { 154 | await firebaseProvider.addForDescendingOrder(cacheKey, r); 155 | } 156 | } catch (e) { 157 | logger.error(e); 158 | } finally { 159 | try { 160 | if (lockAcquired) { 161 | await firebaseProvider.remove(lockKey); 162 | } 163 | } catch (e) { 164 | logger.warn(e); 165 | } 166 | } 167 | } 168 | 169 | export function pollLatestVerified(verifierId: string, ipfsProvider: string) { 170 | void update(sha256(verifierId), ipfsProvider); 171 | 172 | setInterval(async () => { 173 | try { 174 | await update(sha256(verifierId), ipfsProvider); 175 | } catch (e) { 176 | logger.warn(`Unable to fetch latest verified ${e}`); 177 | } 178 | }, 60_000); 179 | } 180 | 181 | export async function getLatestVerified() { 182 | return firebaseProvider.readItems(cacheKey, 500); 183 | } 184 | -------------------------------------------------------------------------------- /src/logger.ts: -------------------------------------------------------------------------------- 1 | import winston, { format, LoggerModified, LoggerOptions } from "winston"; 2 | 3 | import { AsyncLocalStorage } from "async_hooks"; 4 | import { randomUUID } from "crypto"; 5 | import randomstring from "randomstring"; 6 | 7 | const asyncLocalStorage = new AsyncLocalStorage>(); 8 | const globalContext = new Map(); 9 | 10 | declare module "winston" { 11 | interface LoggerModified { 12 | debug: Logger["debug"]; 13 | info: Logger["info"]; 14 | warn: Logger["warn"]; 15 | error: Logger["error"]; 16 | 17 | addToContext: (entries: { [key: string]: unknown }) => LoggerModified; 18 | addToGlobalContext: (entries: { [key: string]: unknown }) => LoggerModified; 19 | 20 | debugSampled: (rate: number, message: unknown, ...args: unknown[]) => LoggerModified; 21 | } 22 | } 23 | 24 | const instanceId = randomstring.generate(6); 25 | 26 | const customLevels = { 27 | levels: { 28 | critical: 0, 29 | error: 1, 30 | warn: 2, 31 | info: 3, 32 | debug: 4, 33 | }, 34 | }; 35 | 36 | export function getLogger( 37 | module: string, 38 | meta: Record = {}, 39 | level: string = "debug", 40 | ) { 41 | const defaultMeta: LoggerOptions["defaultMeta"] = { 42 | module, 43 | instanceId, 44 | }; 45 | 46 | const addMetaAndStack = winston.format.printf((info) => { 47 | info.meta = (info[Symbol.for("splat")] as unknown[])?.[0] ?? {}; 48 | info.meta = { ...(info.meta as any), ...meta }; 49 | 50 | if (info.stack) (info.message as any) += info.stack; 51 | 52 | let stringified = JSON.stringify(info.meta); 53 | delete info.meta; 54 | 55 | if (stringified === "{}") stringified = ""; 56 | return `${info.timestamp} ${info.service} ${module} ${info.level.toUpperCase()} ${ 57 | info.message 58 | } ${stringified}`; 59 | }); 60 | 61 | const _logger = winston.createLogger({ 62 | levels: customLevels.levels, 63 | level, 64 | format: winston.format.combine( 65 | winston.format.timestamp(), 66 | winston.format.errors({ stack: true }), 67 | winston.format.prettyPrint(), 68 | addMetaAndStack, 69 | winston.format((info) => { 70 | info.context = { ...(info.context ?? {}), ...Object.fromEntries(globalContext) }; 71 | const store = asyncLocalStorage.getStore(); 72 | if (!store) return info; 73 | info.context = { ...(info.context as any), ...Object.fromEntries(store.entries()) }; 74 | return info; 75 | })(), 76 | ), 77 | defaultMeta: defaultMeta, 78 | transports: [ 79 | new winston.transports.Console({ 80 | format: process.env.NO_JSON_LOGGING 81 | ? format.printf((info) => { 82 | return `${info[Symbol.for("message")]}`; 83 | }) 84 | : winston.format.json(), 85 | }), 86 | ], 87 | }); 88 | 89 | const logWithStackTrace = (errorCode: string, message: unknown, ...args: unknown[]) => { 90 | let msg = message; 91 | 92 | args[0] = { errorCode, ...(args[0] ?? {}) }; 93 | 94 | if (typeof message === "string") { 95 | const err = new Error(message as never); 96 | // Remove the line coming from the synthetic error created the line above 97 | err.stack = err.stack?.replace(new RegExp(`^\\s+at.*_logger\\.${"error"}.*$\\n`, "m"), ""); 98 | msg = err; 99 | } else if (!(message instanceof Error) && typeof message === "object") { 100 | msg = JSON.stringify(message); 101 | } 102 | 103 | _logger.log("error", msg as never, ...args); 104 | 105 | return _logger; 106 | }; 107 | 108 | // Override logger error to always print stack traces 109 | _logger.error = (message: unknown, ...args: unknown[]) => 110 | logWithStackTrace("error", message, ...args); 111 | 112 | const logger = _logger as unknown as LoggerModified; 113 | 114 | logger.addToContext = (entries: { [key: string]: unknown }) => { 115 | const store = asyncLocalStorage.getStore(); 116 | if (!store) { 117 | logger.error( 118 | "addToContext must be called inside of an async function wrapped in withContext", 119 | { 120 | ...entries, 121 | }, 122 | ); 123 | return logger; 124 | } 125 | 126 | Object.entries(entries).forEach(([key, value]) => store.set(key, value)); 127 | 128 | return logger; 129 | }; 130 | 131 | logger.debugSampled = (rate: number, message: unknown, ...args: unknown[]) => { 132 | if (rate <= 0 || rate > 1) { 133 | logger.warn("sampleOnce rate must be between 0 and 1, ignoring", { rate }); 134 | return logger; 135 | } 136 | 137 | if (Math.random() < rate) { 138 | logger.debug(message as unknown as string, ...args); 139 | } 140 | 141 | return logger; 142 | }; 143 | 144 | logger.addToGlobalContext = (entries: { [key: string]: unknown }) => { 145 | Object.entries(entries).forEach(([key, value]) => globalContext.set(key, value)); 146 | return logger; 147 | }; 148 | 149 | return logger; 150 | } 151 | 152 | export function withContext(fn: () => T) { 153 | let store = asyncLocalStorage.getStore(); 154 | 155 | if (store) { 156 | throw new Error( 157 | "cannot use withContext inside an async function that is already wrapped in withContext", 158 | ); 159 | } 160 | 161 | store = new Map(); 162 | store.set("traceId", randomUUID()); 163 | 164 | return asyncLocalStorage.run(store, fn); 165 | } 166 | 167 | export function hasContext() { 168 | return asyncLocalStorage.getStore() !== undefined; 169 | } 170 | 171 | export function getContext() { 172 | const store = asyncLocalStorage.getStore(); 173 | 174 | if (!store) { 175 | throw new Error("getContext must be called inside of an async function wrapped in withContext"); 176 | } 177 | 178 | return Object.fromEntries(store.entries()); 179 | } 180 | -------------------------------------------------------------------------------- /src/req-id-middleware.ts: -------------------------------------------------------------------------------- 1 | import { randomUUID } from "crypto"; 2 | import { RequestHandler } from "express"; 3 | 4 | declare global { 5 | namespace Express { 6 | interface Request { 7 | id: string; 8 | } 9 | } 10 | } 11 | 12 | const addId: () => RequestHandler = () => (req, res, next) => { 13 | req.id = randomUUID(); 14 | next(); 15 | }; 16 | 17 | export default addId; 18 | -------------------------------------------------------------------------------- /src/server.ts: -------------------------------------------------------------------------------- 1 | import express from "express"; 2 | require("express-async-errors"); 3 | import dotenv from "dotenv"; 4 | dotenv.config({ path: ".env.local" }); 5 | dotenv.config({ path: ".env" }); 6 | 7 | import cors from "cors"; 8 | import { Controller } from "./controller"; 9 | import multer from "multer"; 10 | import { readFile, rm, writeFile, readdir } from "fs/promises"; 11 | import mkdirp from "mkdirp"; 12 | import { rmSync } from "fs"; 13 | import path from "path"; 14 | import idMiddleware from "./req-id-middleware"; 15 | import { IpfsCodeStorageProvider } from "./ipfs-code-storage-provider"; 16 | import rateLimit from "express-rate-limit"; 17 | import { checkPrerequisites } from "./check-prerequisites"; 18 | import { FiftSourceVerifier } from "./source-verifier/fift-source-verifier"; 19 | import { 20 | LegacyFuncSourceVerifier, 21 | specialCharsRegex, 22 | } from "./source-verifier/func-source-verifier"; 23 | import { TactSourceVerifier, FileSystem } from "./source-verifier/tact-source-verifier"; 24 | import { TolkSourceVerifier } from "./source-verifier/tolk-source-verifier"; 25 | import { TonReaderClientImpl } from "./ton-reader-client"; 26 | import { getLatestVerified, pollLatestVerified } from "./latest-known-contracts"; 27 | import { DeployController } from "./deploy-controller"; 28 | import { getLogger } from "./logger"; 29 | import { FuncJSSourceVerifier } from "./source-verifier/funcjs-source-verifier"; 30 | 31 | const logger = getLogger("server"); 32 | 33 | const app = express(); 34 | app.use(idMiddleware()); 35 | app.use(cors()); 36 | app.use(express.json()); 37 | 38 | const limiter = rateLimit({ 39 | windowMs: 5 * 60 * 1000, // 5 minutes 40 | max: 25, // Limit each IP to 25 requests per `window` 41 | standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers 42 | legacyHeaders: false, // Disable the `X-RateLimit-*` headers 43 | }); 44 | 45 | // Set up file handling 46 | const TMP_DIR = "./tmp"; 47 | rmSync(TMP_DIR, { recursive: true, force: true }); 48 | 49 | app.use(async (req, res, next) => { 50 | const _path = path.join(TMP_DIR, req.id); 51 | await mkdirp(_path); 52 | next(); 53 | }); 54 | 55 | const sourcesUpload = multer({ 56 | storage: multer.diskStorage({ 57 | destination: async (req, file, callback) => { 58 | const _path = path.join(TMP_DIR, req.id, ...file.fieldname.split("/").slice(0, -1)); 59 | 60 | await mkdirp(_path); 61 | callback(null, _path); 62 | }, 63 | filename: (req, file, callback) => { 64 | callback(null, file.originalname); 65 | }, 66 | }), 67 | limits: { 68 | files: 150, 69 | fileSize: 1024 * 1024, 70 | }, 71 | fileFilter(req, file, callback) { 72 | callback( 73 | null, 74 | !specialCharsRegex().test(file.originalname) && !specialCharsRegex().test(file.fieldname), 75 | ); 76 | }, 77 | }); 78 | 79 | const tactStagingUpload = multer({ 80 | storage: multer.diskStorage({ 81 | destination: async (req, file, callback) => { 82 | const _path = path.join( 83 | TMP_DIR, 84 | req.id, 85 | file.fieldname.match(/\//) ? file.fieldname.split("/")[0] : "", 86 | ); 87 | 88 | await mkdirp(_path); 89 | callback(null, _path); 90 | }, 91 | filename: (req, file, callback) => { 92 | callback(null, file.originalname); 93 | }, 94 | }), 95 | limits: { 96 | files: 2, 97 | fileSize: 200 * 1024, 98 | }, 99 | fileFilter(req, file, callback) { 100 | callback( 101 | null, 102 | !!file.originalname.match(/\.(boc|pkg)/) && 103 | !specialCharsRegex().test(file.originalname) && 104 | !specialCharsRegex().test(file.fieldname), 105 | ); 106 | }, 107 | }); 108 | 109 | app.use((req, res, next) => { 110 | if (process.env.DISABLE_RM) { 111 | next(); 112 | return; 113 | } 114 | res.on("close", async () => { 115 | if (req.files) { 116 | rm(path.join(TMP_DIR, req.id), { recursive: true, force: true }); 117 | } 118 | }); 119 | next(); 120 | }); 121 | const port = process.env.PORT || 3003; 122 | 123 | // Routes 124 | app.get("/hc", (req, res) => { 125 | res.send("ok"); 126 | }); 127 | 128 | (async () => { 129 | const fileSystem: FileSystem = { 130 | readFile: readFile, 131 | writeFile: async (filePath, content) => { 132 | await mkdirp(path.dirname(filePath)); 133 | await writeFile(filePath, content); 134 | }, 135 | readdir: async (path) => readdir(path), 136 | }; 137 | 138 | const deployController = new DeployController( 139 | new IpfsCodeStorageProvider( 140 | process.env.TACT_DEPLOYER_INFURA_ID!, 141 | process.env.TACT_DEPLOYER_INFURA_SECRET!, 142 | ), 143 | fileSystem, 144 | ); 145 | 146 | const controller = new Controller( 147 | new IpfsCodeStorageProvider(process.env.INFURA_ID!, process.env.INFURA_SECRET!), 148 | { 149 | func: 150 | process.env.LEGACY_FUNC_COMPILER === "true" 151 | ? new LegacyFuncSourceVerifier() 152 | : new FuncJSSourceVerifier(), 153 | fift: new FiftSourceVerifier(), 154 | tolk: new TolkSourceVerifier(), 155 | tact: new TactSourceVerifier(fileSystem), 156 | }, 157 | { 158 | verifierId: process.env.VERIFIER_ID!, 159 | allowReverification: !!process.env.ALLOW_REVERIFICATION, 160 | privateKey: process.env.PRIVATE_KEY!, 161 | sourcesRegistryAddress: process.env.SOURCES_REGISTRY!, 162 | }, 163 | new TonReaderClientImpl(), 164 | ); 165 | 166 | if (process.env.NODE_ENV === "production") 167 | pollLatestVerified(process.env.VERIFIER_ID!, process.env.IPFS_PROVIDER!); 168 | 169 | app.post( 170 | "/source", 171 | limiter, 172 | async (req, _, next) => { 173 | await mkdirp(path.join(TMP_DIR, req.id)); 174 | next(); 175 | }, 176 | sourcesUpload.any(), 177 | async (req, res) => { 178 | const jsonFile = (req.files! as any[]).find((f) => f.fieldname === "json").path; 179 | 180 | const jsonData = await readFile(jsonFile); 181 | const body = JSON.parse(jsonData.toString()); 182 | 183 | const result = await controller.addSource({ 184 | compiler: body.compiler, 185 | compilerSettings: body.compilerSettings, 186 | sources: (req.files! as any[]) 187 | .filter((f: any) => f.fieldname !== "json") 188 | .map((f, i) => ({ 189 | path: f.fieldname, 190 | ...body.sources[i], 191 | })), 192 | knownContractAddress: body.knownContractAddress, 193 | knownContractHash: body.knownContractHash, 194 | tmpDir: path.join(TMP_DIR, req.id), 195 | senderAddress: body.senderAddress, 196 | }); 197 | 198 | res.json(result); 199 | }, 200 | ); 201 | 202 | app.post("/sign", limiter, async (req, res) => { 203 | const result = await controller.sign({ 204 | messageCell: req.body.messageCell.data, 205 | tmpDir: path.join(TMP_DIR, req.id), 206 | }); 207 | res.json(result); 208 | }); 209 | 210 | app.post( 211 | "/prepareTactDeployment", 212 | limiter, 213 | async (req, _, next) => { 214 | await mkdirp(path.join(TMP_DIR, req.id)); 215 | next(); 216 | }, 217 | tactStagingUpload.any(), 218 | async (req, res) => { 219 | try { 220 | const result = await deployController.process({ 221 | tmpDir: path.join(TMP_DIR, req.id), 222 | }); 223 | res.json(result); 224 | } catch (e) { 225 | logger.error(e); 226 | res.status(500).send(e.toString()); 227 | } 228 | }, 229 | ); 230 | 231 | if (process.env.NODE_ENV === "production") checkPrerequisites(); 232 | 233 | app.get("/latestVerified", async (req, res) => { 234 | res.json(await getLatestVerified()); 235 | }); 236 | 237 | app.use(function (err: any, req: any, res: any, next: any) { 238 | logger.error(err); // Log error message in our server's console 239 | if (!err.statusCode) err.statusCode = 500; // If err has no specified error code, set error code to 'Internal Server Error (500)' 240 | res.status(err.statusCode).send(err); // All HTTP requests must have a response, so let's send back an error with its status 241 | }); 242 | 243 | app.listen(port, () => { 244 | logger.info( 245 | `Ton Contract Verifier Server running on ${port}. Verifier Id: ${process.env.VERIFIER_ID}`, 246 | ); 247 | }); 248 | })(); 249 | -------------------------------------------------------------------------------- /src/source-verifier/fift-source-verifier.ts: -------------------------------------------------------------------------------- 1 | import { promisify } from "util"; 2 | import { exec } from "child_process"; 3 | const execAsync = promisify(exec); 4 | import { readFile, writeFile } from "fs/promises"; 5 | import { CompileResult, SourceVerifier, SourceVerifyPayload } from "../types"; 6 | import path from "path"; 7 | import { Cell } from "ton"; 8 | import { FuncCompilerVersion } from "@ton-community/contract-verifier-sdk"; 9 | import { binaryPath } from "../binaries"; 10 | import { specialCharsRegex } from "./func-source-verifier"; 11 | import { getLogger } from "../logger"; 12 | 13 | const logger = getLogger("fift-source-verifier"); 14 | 15 | export async function fiftToCodeCell( 16 | funcVersion: FuncCompilerVersion, 17 | fiftFile: string, 18 | tmpDir: string, 19 | ) { 20 | const b64OutFile = `${fiftFile}-b64.cell`; 21 | 22 | const fiftCellSource = `"${fiftFile}" include \n 23 | boc>B "${b64OutFile}" B>file`; 24 | 25 | const tmpB64Fift = path.join(tmpDir, `${fiftFile}.cell.tmp.fif`); 26 | await writeFile(tmpB64Fift, fiftCellSource); 27 | 28 | const executable = path.join(process.cwd(), binaryPath, funcVersion, "fift"); 29 | 30 | if (specialCharsRegex().test(executable)) { 31 | throw new Error("Unallowed special characters in command line"); 32 | } 33 | 34 | process.env.FIFTPATH = path.join(process.cwd(), binaryPath, funcVersion, "fiftlib"); 35 | 36 | await execAsync(`${executable} -s ${tmpB64Fift}`); 37 | 38 | return Cell.fromBoc(await readFile(b64OutFile))[0]; 39 | } 40 | 41 | export class FiftSourceVerifier implements SourceVerifier { 42 | async verify(payload: SourceVerifyPayload): Promise { 43 | const funcVersion: FuncCompilerVersion = "0.4.1"; // Single version, assuming fift doesn't affect code hash 44 | const sources = payload.sources.map((s) => ({ filename: s.path })); 45 | 46 | try { 47 | if (!process.env.ALLOW_FIFT) { 48 | throw new Error("Fift is disabled"); 49 | } 50 | if (payload.sources.length !== 1) { 51 | throw new Error("Only one source file is allowed for fift verification"); 52 | } 53 | const cell = await fiftToCodeCell(funcVersion, payload.sources[0].path, payload.tmpDir); 54 | const hash = cell.hash().toString("base64"); 55 | 56 | return { 57 | hash, 58 | result: hash === payload.knownContractHash ? "similar" : "not_similar", 59 | error: null, 60 | compilerSettings: { 61 | fiftVersion: funcVersion, // Fift is tied to a FunC version 62 | commandLine: `echo '"${payload.sources[0].path}" include\nboc>B "output.cell" B>file' | fift`, 63 | }, 64 | sources, 65 | }; 66 | } catch (e) { 67 | logger.error(e); 68 | return { 69 | hash: null, 70 | result: "unknown_error", 71 | error: e.toString(), 72 | compilerSettings: { 73 | fiftVersion: funcVersion, 74 | commandLine: "", 75 | }, 76 | sources, 77 | }; 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/source-verifier/func-source-verifier.ts: -------------------------------------------------------------------------------- 1 | import { execAsyncWithTimeout } from "../utils"; 2 | import { promisify } from "util"; 3 | import { exec } from "child_process"; 4 | const execAsync = promisify(exec); 5 | import { 6 | SourceVerifier, 7 | SourceVerifyPayload, 8 | CompileResult, 9 | FuncCliCompileSettings, 10 | FuncSourceToVerify, 11 | } from "../types"; 12 | import path from "path"; 13 | import { fiftToCodeCell } from "./fift-source-verifier"; 14 | import { FuncCompilerVersion } from "@ton-community/contract-verifier-sdk"; 15 | import { binaryPath } from "../binaries"; 16 | 17 | const semverRegex = () => 18 | /^(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(-(0|[1-9A-Za-z-][0-9A-Za-z-]*)(\.[0-9A-Za-z-]+)*)?(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$/; 19 | 20 | export const specialCharsRegex = () => /[;>\&`\|\$\(\)\[\]\{\}'"\\\#]/; 21 | 22 | function prepareFuncCommand( 23 | executable: string, 24 | funcArgs: string, 25 | fiftOutFile: string, 26 | commandLine: string, 27 | ) { 28 | if (specialCharsRegex().test(commandLine)) { 29 | throw new Error("Unallowed special characters in command line"); 30 | } 31 | const getPath = (_path: string) => _path; 32 | 33 | return [getPath(executable), funcArgs, "-o", getPath(fiftOutFile), commandLine] 34 | .filter((c) => c) 35 | .join(" "); 36 | } 37 | 38 | function funcCommandForDisplay(cmd: string): string { 39 | return /\/(func.*)/.exec(cmd)![1]; 40 | } 41 | 42 | async function compileFuncToCodeHash( 43 | funcVersion: FuncCompilerVersion, 44 | funcArgs: string, 45 | commandLine: string, 46 | tmpDir: string, 47 | ) { 48 | if (!semverRegex().test(funcVersion)) { 49 | throw new Error(`Invalid func version: ${funcVersion}`); 50 | } 51 | 52 | const fiftOutFile = "output.fif"; 53 | const executable = path.join(process.cwd(), binaryPath, funcVersion, "func"); 54 | const funcCmd = prepareFuncCommand(executable, funcArgs, fiftOutFile, commandLine); 55 | 56 | const { stderr } = await execAsyncWithTimeout( 57 | funcCmd, 58 | parseInt(process.env.COMPILE_TIMEOUT ?? "1000"), 59 | { 60 | cwd: tmpDir, 61 | }, 62 | ); 63 | if (stderr) { 64 | throw new Error(stderr); 65 | } 66 | 67 | const codeCell = await fiftToCodeCell(funcVersion, fiftOutFile, tmpDir); 68 | 69 | return { 70 | hash: codeCell.hash().toString("base64"), 71 | funcCmd: funcCommandForDisplay(funcCmd), 72 | }; 73 | } 74 | 75 | export class LegacyFuncSourceVerifier implements SourceVerifier { 76 | async verify(payload: SourceVerifyPayload): Promise { 77 | let funcCmd: string | null = null; 78 | const compilerSettings = payload.compilerSettings as FuncCliCompileSettings; 79 | 80 | const sources = payload.sources.map((s: FuncSourceToVerify) => ({ 81 | filename: s.path, 82 | hasIncludeDirectives: s.hasIncludeDirectives, 83 | isEntrypoint: s.isEntrypoint, 84 | isStdLib: s.isStdLib, 85 | includeInCommand: s.includeInCommand, 86 | })); 87 | 88 | try { 89 | const { hash: codeCellHash, funcCmd: _funcCmd } = await compileFuncToCodeHash( 90 | compilerSettings.funcVersion, 91 | "", 92 | compilerSettings.commandLine, 93 | payload.tmpDir, 94 | ); 95 | 96 | funcCmd = _funcCmd; 97 | 98 | return { 99 | hash: codeCellHash, 100 | result: codeCellHash === payload.knownContractHash ? "similar" : "not_similar", 101 | error: null, 102 | compilerSettings: { 103 | funcVersion: compilerSettings.funcVersion, 104 | commandLine: funcCmd, 105 | }, 106 | sources, 107 | }; 108 | } catch (e) { 109 | return { 110 | result: "unknown_error", 111 | error: e.toString(), 112 | hash: null, 113 | compilerSettings: { 114 | funcVersion: compilerSettings.funcVersion, 115 | commandLine: funcCmd ?? "", 116 | }, 117 | sources, 118 | }; 119 | } 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/source-verifier/funcjs-source-verifier.ts: -------------------------------------------------------------------------------- 1 | import { FuncCompiler } from "@ton-community/func-js"; 2 | import { readFile } from "fs/promises"; 3 | import path from "path"; 4 | import { Cell } from "ton-core"; 5 | import { DynamicImporter } from "../dynamic-importer"; 6 | import { 7 | CompileResult, 8 | FuncCliCompileSettings, 9 | FuncSourceToVerify, 10 | SourceVerifier, 11 | SourceVerifyPayload, 12 | } from "../types"; 13 | 14 | export class FuncJSSourceVerifier implements SourceVerifier { 15 | async verify(payload: SourceVerifyPayload): Promise { 16 | let funcCmd: string | null = null; 17 | const compilerSettings = payload.compilerSettings as FuncCliCompileSettings; 18 | 19 | const sources = payload.sources.map((s: FuncSourceToVerify) => ({ 20 | filename: s.path, 21 | hasIncludeDirectives: s.hasIncludeDirectives, 22 | isEntrypoint: s.isEntrypoint, 23 | isStdLib: s.isStdLib, 24 | includeInCommand: s.includeInCommand, 25 | })); 26 | 27 | try { 28 | const module = await DynamicImporter.tryImport("func", compilerSettings.funcVersion); 29 | 30 | const res = await new FuncCompiler(module.object).compileFunc({ 31 | sources: Object.fromEntries( 32 | await Promise.all( 33 | payload.sources.map(async (p) => [ 34 | p.path, 35 | (await readFile(path.join(payload.tmpDir, p.path))).toString(), 36 | ]), 37 | ), 38 | ), 39 | targets: payload.sources 40 | .filter((s: FuncSourceToVerify) => s.includeInCommand) 41 | .map((s) => s.path), 42 | }); 43 | 44 | if (res.status === "error") { 45 | throw new Error(res.message); 46 | } 47 | 48 | const hash = Cell.fromBoc(Buffer.from(res.codeBoc, "base64"))[0].hash().toString("base64"); 49 | 50 | return { 51 | hash, 52 | result: hash === payload.knownContractHash ? "similar" : "not_similar", 53 | error: null, 54 | compilerSettings: { 55 | funcVersion: compilerSettings.funcVersion, 56 | commandLine: compilerSettings.commandLine, 57 | }, 58 | sources, 59 | }; 60 | } catch (e) { 61 | return { 62 | result: "unknown_error", 63 | error: e.toString(), 64 | hash: null, 65 | compilerSettings: { 66 | funcVersion: compilerSettings.funcVersion, 67 | commandLine: funcCmd ?? "", 68 | }, 69 | sources, 70 | }; 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/source-verifier/res/tact141pkg.ts: -------------------------------------------------------------------------------- 1 | export const pkg141 = { 2 | base64: `{"name":"Test141","code":"te6ccgECEQEAAh4AART/APSkE/S88sgLAQIBYgIDApzQAdDTAwFxsKMB+kABINdJgQELuvLgiCDXCwoggQT/uvLQiYMJuvLgiFRQUwNvBPhhAvhi2zxa2zzy4ILI+EMBzH8BygBZAssfyx/J7VQOBAIBWAkKA+ABkjB/4HAh10nCH5UwINcLH94gghCLNBgiuo8gMNMfAYIQizQYIrry4IHTP9MfWWwSMaCI+EIBcG3bPH/gghCUapi2uo6n0x8BghCUapi2uvLggdM/ATHIAYIQr/kPV1jLH8s/yfhCAXBt2zx/4DBwBQYGABgAAAAAQ2FzaGJhY2sBOm1tIm6zmVsgbvLQgG8iAZEy4hAkcAMEgEJQI9s8BwHKyHEBygFQBwHKAHABygJQBSDXSYEBC7ry4Igg1wsKIIEE/7ry0ImDCbry4IjPFlAD+gJwAcpoI26zkX+TJG6z4pczMwFwAcoA4w0hbrOcfwHKAAEgbvLQgAHMlTFwAcoA4skB+wAIAJh/AcoAyHABygBwAcoAJG6znX8BygAEIG7y0IBQBMyWNANwAcoA4iRus51/AcoABCBu8tCAUATMljQDcAHKAOJwAcoAAn8BygACyVjMAgJzCwwAEbgr7tRNDSAAGAIQqhjbPNs8bCEODQIQqZDbPNs8bCEODwACIAFQ7UTQ1AH4Y9IAAZfTH9MfWWwS4Pgo1wsKgwm68uCJgQEB1wABAdHbPBAAAiEAAnA=","abi":"{\"name\":\"Test141\",\"types\":[{\"name\":\"StateInit\",\"header\":null,\"fields\":[{\"name\":\"code\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":false}},{\"name\":\"data\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":false}}]},{\"name\":\"Context\",\"header\":null,\"fields\":[{\"name\":\"bounced\",\"type\":{\"kind\":\"simple\",\"type\":\"bool\",\"optional\":false}},{\"name\":\"sender\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}},{\"name\":\"value\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"raw\",\"type\":{\"kind\":\"simple\",\"type\":\"slice\",\"optional\":false}}]},{\"name\":\"SendParameters\",\"header\":null,\"fields\":[{\"name\":\"bounce\",\"type\":{\"kind\":\"simple\",\"type\":\"bool\",\"optional\":false}},{\"name\":\"to\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}},{\"name\":\"value\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"mode\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"body\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"code\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"data\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}}]},{\"name\":\"Deploy\",\"header\":2490013878,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}}]},{\"name\":\"DeployOk\",\"header\":2952335191,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}}]},{\"name\":\"FactoryDeploy\",\"header\":1829761339,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}},{\"name\":\"cashback\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}}]},{\"name\":\"Add\",\"header\":2335447074,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}},{\"name\":\"amount\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":32}}]}],\"receivers\":[{\"receiver\":\"internal\",\"message\":{\"kind\":\"typed\",\"type\":\"Add\"}},{\"receiver\":\"internal\",\"message\":{\"kind\":\"typed\",\"type\":\"Deploy\"}}],\"getters\":[{\"name\":\"counter\",\"arguments\":[],\"returnType\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"id\",\"arguments\":[],\"returnType\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}}],\"errors\":{\"2\":{\"message\":\"Stack underflow\"},\"3\":{\"message\":\"Stack overflow\"},\"4\":{\"message\":\"Integer overflow\"},\"5\":{\"message\":\"Integer out of expected range\"},\"6\":{\"message\":\"Invalid opcode\"},\"7\":{\"message\":\"Type check error\"},\"8\":{\"message\":\"Cell overflow\"},\"9\":{\"message\":\"Cell underflow\"},\"10\":{\"message\":\"Dictionary error\"},\"13\":{\"message\":\"Out of gas error\"},\"32\":{\"message\":\"Method ID not found\"},\"34\":{\"message\":\"Action is invalid or not supported\"},\"37\":{\"message\":\"Not enough TON\"},\"38\":{\"message\":\"Not enough extra-currencies\"},\"128\":{\"message\":\"Null reference exception\"},\"129\":{\"message\":\"Invalid serialization prefix\"},\"130\":{\"message\":\"Invalid incoming message\"},\"131\":{\"message\":\"Constraints error\"},\"132\":{\"message\":\"Access denied\"},\"133\":{\"message\":\"Contract stopped\"},\"134\":{\"message\":\"Invalid argument\"},\"135\":{\"message\":\"Code of a contract was not found\"},\"136\":{\"message\":\"Invalid address\"},\"137\":{\"message\":\"Masterchain support is not enabled for this contract\"}},\"interfaces\":[\"org.ton.introspection.v0\",\"org.ton.abi.ipfs.v0\",\"org.ton.deploy.lazy.v0\",\"org.ton.debug.v0\",\"org.ton.chain.workchain.v0\"]}","init":{"kind":"direct","args":[{"name":"id","type":{"kind":"simple","type":"int","optional":false,"format":257}}],"prefix":{"bits":1,"value":0},"deployment":{"kind":"system-cell","system":"te6cckECEwEAAigAAQHAAQEFoKJdAgEU/wD0pBP0vPLICwMCAWIECgKc0AHQ0wMBcbCjAfpAASDXSYEBC7ry4Igg1wsKIIEE/7ry0ImDCbry4IhUUFMDbwT4YQL4Yts8Wts88uCCyPhDAcx/AcoAWQLLH8sfye1UDwUD4AGSMH/gcCHXScIflTAg1wsf3iCCEIs0GCK6jyAw0x8BghCLNBgiuvLggdM/0x9ZbBIxoIj4QgFwbds8f+CCEJRqmLa6jqfTHwGCEJRqmLa68uCB0z8BMcgBghCv+Q9XWMsfyz/J+EIBcG3bPH/gMHAGBwcAGAAAAABDYXNoYmFjawE6bW0ibrOZWyBu8tCAbyIBkTLiECRwAwSAQlAj2zwIAcrIcQHKAVAHAcoAcAHKAlAFINdJgQELuvLgiCDXCwoggQT/uvLQiYMJuvLgiM8WUAP6AnABymgjbrORf5MkbrPilzMzAXABygDjDSFus5x/AcoAASBu8tCAAcyVMXABygDiyQH7AAkAmH8BygDIcAHKAHABygAkbrOdfwHKAAQgbvLQgFAEzJY0A3ABygDiJG6znX8BygAEIG7y0IBQBMyWNANwAcoA4nABygACfwHKAALJWMwCAVgLEgICcwwOAhCqGNs82zxsIQ8NAAIgAhCpkNs82zxsIQ8RAVDtRNDUAfhj0gABl9Mf0x9ZbBLg+CjXCwqDCbry4ImBAQHXAAEB0ds8EAACcAACIQARuCvu1E0NIAAY5OdWmQ=="}},"sources":{"contracts/test141.tact":"aW1wb3J0ICJAc3RkbGliL2RlcGxveSI7CgptZXNzYWdlIEFkZCB7CiAgICBxdWVyeUlkOiBJbnQgYXMgdWludDY0OwogICAgYW1vdW50OiBJbnQgYXMgdWludDMyOwp9Cgpjb250cmFjdCBUZXN0MTQxIHdpdGggRGVwbG95YWJsZSB7CiAgICBpZDogSW50IGFzIHVpbnQzMjsKICAgIGNvdW50ZXI6IEludCBhcyB1aW50MzI7CgogICAgaW5pdChpZDogSW50KSB7CiAgICAgICAgc2VsZi5pZCA9IGlkOwogICAgICAgIHNlbGYuY291bnRlciA9IDA7CiAgICB9CgogICAgcmVjZWl2ZShtc2c6IEFkZCkgewogICAgICAgIHNlbGYuY291bnRlciArPSBtc2cuYW1vdW50OwoKICAgICAgICAvLyBOb3RpZnkgdGhlIGNhbGxlciB0aGF0IHRoZSByZWNlaXZlciB3YXMgZXhlY3V0ZWQgYW5kIGZvcndhcmQgcmVtYWluaW5nIHZhbHVlIGJhY2sKICAgICAgICBzZWxmLm5vdGlmeSgiQ2FzaGJhY2siLmFzQ29tbWVudCgpKTsKICAgIH0KCiAgICBnZXQgZnVuIGNvdW50ZXIoKTogSW50IHsKICAgICAgICByZXR1cm4gc2VsZi5jb3VudGVyOwogICAgfQoKICAgIGdldCBmdW4gaWQoKTogSW50IHsKICAgICAgICByZXR1cm4gc2VsZi5pZDsKICAgIH0KfQo="},"compiler":{"name":"tact","version":"1.4.1","parameters":"{\"entrypoint\":\"contracts/test141.tact\",\"options\":{\"debug\":true}}"}}`, 3 | }; 4 | -------------------------------------------------------------------------------- /src/source-verifier/res/tact162pkg.ts: -------------------------------------------------------------------------------- 1 | export const pkg162 = { 2 | base64: `{"name":"MyTactCon","code":"te6ccgECCgEAAWkABPr/ACCP9zAB0HLXIdIA0gD6QCEQNFBmbwT4YQL4Yu1E0NIAAZfTH9MfWWwSmYEBAdcAAQHRcOIDkl8D4AHXDR/y4IIhghCLNBgiuo8gMdM/0x9ZbCESoIj4QgFwbds8yH8BygBZAssfyx/J7VTgAYIQlGqYtrrjAl8D8sCC4QEEAgMAGAAAAABDYXNoYmFjawFQ0z8BMcgBghCv+Q9XWMsfyz/JEvhCAXBt2zzIfwHKAFkCyx/LH8ntVAQBEPSkE/S88sgLBQCgbW0ibrOZWyBu8tCAbyIBkTLiECRwAwSAQlAjEDZVIhLIz4WAygDPhEDOAfoCgGnPQAJcbgFusJNbz4GdWM+GgM+EgPQA9ADPgeL0AMkB+wACA5BnBgcBPqoY7UTQ0gABl9Mf0x9ZbBKZgQEB1wABAdFw4ts8bCEIAT6pkO1E0NIAAZfTH9MfWWwSmYEBAdcAAQHRcOLbPGwhCQACIAACIQ==","abi":"{\"name\":\"MyTactCon\",\"types\":[{\"name\":\"DataSize\",\"header\":null,\"fields\":[{\"name\":\"cells\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"bits\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"refs\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}}]},{\"name\":\"StateInit\",\"header\":null,\"fields\":[{\"name\":\"code\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":false}},{\"name\":\"data\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":false}}]},{\"name\":\"Context\",\"header\":null,\"fields\":[{\"name\":\"bounceable\",\"type\":{\"kind\":\"simple\",\"type\":\"bool\",\"optional\":false}},{\"name\":\"sender\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}},{\"name\":\"value\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"raw\",\"type\":{\"kind\":\"simple\",\"type\":\"slice\",\"optional\":false}}]},{\"name\":\"SendParameters\",\"header\":null,\"fields\":[{\"name\":\"mode\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"body\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"code\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"data\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"value\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"to\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}},{\"name\":\"bounce\",\"type\":{\"kind\":\"simple\",\"type\":\"bool\",\"optional\":false}}]},{\"name\":\"MessageParameters\",\"header\":null,\"fields\":[{\"name\":\"mode\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"body\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"value\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"to\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}},{\"name\":\"bounce\",\"type\":{\"kind\":\"simple\",\"type\":\"bool\",\"optional\":false}}]},{\"name\":\"DeployParameters\",\"header\":null,\"fields\":[{\"name\":\"mode\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"body\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"value\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"bounce\",\"type\":{\"kind\":\"simple\",\"type\":\"bool\",\"optional\":false}},{\"name\":\"init\",\"type\":{\"kind\":\"simple\",\"type\":\"StateInit\",\"optional\":false}}]},{\"name\":\"StdAddress\",\"header\":null,\"fields\":[{\"name\":\"workchain\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":8}},{\"name\":\"address\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":256}}]},{\"name\":\"VarAddress\",\"header\":null,\"fields\":[{\"name\":\"workchain\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":32}},{\"name\":\"address\",\"type\":{\"kind\":\"simple\",\"type\":\"slice\",\"optional\":false}}]},{\"name\":\"BasechainAddress\",\"header\":null,\"fields\":[{\"name\":\"hash\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":true,\"format\":257}}]},{\"name\":\"Deploy\",\"header\":2490013878,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}}]},{\"name\":\"DeployOk\",\"header\":2952335191,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}}]},{\"name\":\"FactoryDeploy\",\"header\":1829761339,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}},{\"name\":\"cashback\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}}]},{\"name\":\"Add\",\"header\":2335447074,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}},{\"name\":\"amount\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":32}}]},{\"name\":\"MyTactCon$Data\",\"header\":null,\"fields\":[{\"name\":\"id\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":32}},{\"name\":\"counter\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":32}}]}],\"receivers\":[{\"receiver\":\"internal\",\"message\":{\"kind\":\"typed\",\"type\":\"Add\"}},{\"receiver\":\"internal\",\"message\":{\"kind\":\"typed\",\"type\":\"Deploy\"}}],\"getters\":[{\"name\":\"counter\",\"methodId\":104984,\"arguments\":[],\"returnType\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"id\",\"methodId\":105872,\"arguments\":[],\"returnType\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}}],\"errors\":{\"2\":{\"message\":\"Stack underflow\"},\"3\":{\"message\":\"Stack overflow\"},\"4\":{\"message\":\"Integer overflow\"},\"5\":{\"message\":\"Integer out of expected range\"},\"6\":{\"message\":\"Invalid opcode\"},\"7\":{\"message\":\"Type check error\"},\"8\":{\"message\":\"Cell overflow\"},\"9\":{\"message\":\"Cell underflow\"},\"10\":{\"message\":\"Dictionary error\"},\"11\":{\"message\":\"'Unknown' error\"},\"12\":{\"message\":\"Fatal error\"},\"13\":{\"message\":\"Out of gas error\"},\"14\":{\"message\":\"Virtualization error\"},\"32\":{\"message\":\"Action list is invalid\"},\"33\":{\"message\":\"Action list is too long\"},\"34\":{\"message\":\"Action is invalid or not supported\"},\"35\":{\"message\":\"Invalid source address in outbound message\"},\"36\":{\"message\":\"Invalid destination address in outbound message\"},\"37\":{\"message\":\"Not enough Toncoin\"},\"38\":{\"message\":\"Not enough extra currencies\"},\"39\":{\"message\":\"Outbound message does not fit into a cell after rewriting\"},\"40\":{\"message\":\"Cannot process a message\"},\"41\":{\"message\":\"Library reference is null\"},\"42\":{\"message\":\"Library change action error\"},\"43\":{\"message\":\"Exceeded maximum number of cells in the library or the maximum depth of the Merkle tree\"},\"50\":{\"message\":\"Account state size exceeded limits\"},\"128\":{\"message\":\"Null reference exception\"},\"129\":{\"message\":\"Invalid serialization prefix\"},\"130\":{\"message\":\"Invalid incoming message\"},\"131\":{\"message\":\"Constraints error\"},\"132\":{\"message\":\"Access denied\"},\"133\":{\"message\":\"Contract stopped\"},\"134\":{\"message\":\"Invalid argument\"},\"135\":{\"message\":\"Code of a contract was not found\"},\"136\":{\"message\":\"Invalid standard address\"}},\"interfaces\":[\"org.ton.introspection.v0\",\"org.ton.abi.ipfs.v0\",\"org.ton.deploy.lazy.v0\",\"org.ton.debug.v0\"]}","init":{"kind":"direct","args":[{"name":"id","type":{"kind":"simple","type":"int","optional":false,"format":257}}],"prefix":{"bits":1,"value":0},"deployment":{"kind":"system-cell","system":null}},"sources":{"contracts/my_tact_con.tact":"aW1wb3J0ICJAc3RkbGliL2RlcGxveSI7CgptZXNzYWdlIEFkZCB7CiAgICBxdWVyeUlkOiBJbnQgYXMgdWludDY0OwogICAgYW1vdW50OiBJbnQgYXMgdWludDMyOwp9Cgpjb250cmFjdCBNeVRhY3RDb24gd2l0aCBEZXBsb3lhYmxlIHsKICAgIGlkOiBJbnQgYXMgdWludDMyOwogICAgY291bnRlcjogSW50IGFzIHVpbnQzMjsKCiAgICBpbml0KGlkOiBJbnQpIHsKICAgICAgICBzZWxmLmlkID0gaWQ7CiAgICAgICAgc2VsZi5jb3VudGVyID0gMDsKICAgIH0KCiAgICByZWNlaXZlKG1zZzogQWRkKSB7CiAgICAgICAgc2VsZi5jb3VudGVyICs9IG1zZy5hbW91bnQ7CgogICAgICAgIC8vIE5vdGlmeSB0aGUgY2FsbGVyIHRoYXQgdGhlIHJlY2VpdmVyIHdhcyBleGVjdXRlZCBhbmQgZm9yd2FyZCByZW1haW5pbmcgdmFsdWUgYmFjawogICAgICAgIHNlbGYubm90aWZ5KCJDYXNoYmFjayIuYXNDb21tZW50KCkpOwogICAgfQoKICAgIGdldCBmdW4gY291bnRlcigpOiBJbnQgewogICAgICAgIHJldHVybiBzZWxmLmNvdW50ZXI7CiAgICB9CgogICAgZ2V0IGZ1biBpZCgpOiBJbnQgewogICAgICAgIHJldHVybiBzZWxmLmlkOwogICAgfQp9Cg=="},"compiler":{"name":"tact","version":"1.6.2","parameters":"{\"entrypoint\":\"contracts/my_tact_con.tact\",\"options\":{\"debug\":true}}"}}`, 3 | }; 4 | -------------------------------------------------------------------------------- /src/source-verifier/res/tact163pkg.ts: -------------------------------------------------------------------------------- 1 | export const pkg163 = { 2 | base64: `{"name":"MyTactCon","code":"te6ccgECCwEAAWkAART/APSkE/S88sgLAQIBYgIDA+7QAdBy1yHSANIA+kAhEDRQZm8E+GEC+GLtRNDSAAGX0x/TH1lsEpmBAQHXAAEB0XDiA5JfA+AB1w0f8uCCIYIQizQYIrqPIDHTP9MfWWwhEqCI+EIBcG3bPMh/AcoAWQLLH8sfye1U4AGCEJRqmLa64wJfA/LAggQGBQIDfTgHCAAYAAAAAENhc2hiYWNrAVDTPwExyAGCEK/5D1dYyx/LP8kS+EIBcG3bPMh/AcoAWQLLH8sfye1UBgCgbW0ibrOZWyBu8tCAbyIBkTLiECRwAwSAQlAjEDZVIhLIz4WAygDPhEDOAfoCgGnPQAJcbgFusJNbz4GdWM+GgM+EgPQA9ADPgeL0AMkB+wABPqoY7UTQ0gABl9Mf0x9ZbBKZgQEB1wABAdFw4ts8bCEJAT6pkO1E0NIAAZfTH9MfWWwSmYEBAdcAAQHRcOLbPGwhCgACIAACIQ==","abi":"{\"name\":\"MyTactCon\",\"types\":[{\"name\":\"DataSize\",\"header\":null,\"fields\":[{\"name\":\"cells\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"bits\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"refs\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}}]},{\"name\":\"StateInit\",\"header\":null,\"fields\":[{\"name\":\"code\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":false}},{\"name\":\"data\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":false}}]},{\"name\":\"Context\",\"header\":null,\"fields\":[{\"name\":\"bounceable\",\"type\":{\"kind\":\"simple\",\"type\":\"bool\",\"optional\":false}},{\"name\":\"sender\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}},{\"name\":\"value\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"raw\",\"type\":{\"kind\":\"simple\",\"type\":\"slice\",\"optional\":false}}]},{\"name\":\"SendParameters\",\"header\":null,\"fields\":[{\"name\":\"mode\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"body\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"code\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"data\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"value\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"to\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}},{\"name\":\"bounce\",\"type\":{\"kind\":\"simple\",\"type\":\"bool\",\"optional\":false}}]},{\"name\":\"MessageParameters\",\"header\":null,\"fields\":[{\"name\":\"mode\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"body\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"value\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"to\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}},{\"name\":\"bounce\",\"type\":{\"kind\":\"simple\",\"type\":\"bool\",\"optional\":false}}]},{\"name\":\"DeployParameters\",\"header\":null,\"fields\":[{\"name\":\"mode\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"body\",\"type\":{\"kind\":\"simple\",\"type\":\"cell\",\"optional\":true}},{\"name\":\"value\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"bounce\",\"type\":{\"kind\":\"simple\",\"type\":\"bool\",\"optional\":false}},{\"name\":\"init\",\"type\":{\"kind\":\"simple\",\"type\":\"StateInit\",\"optional\":false}}]},{\"name\":\"StdAddress\",\"header\":null,\"fields\":[{\"name\":\"workchain\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":8}},{\"name\":\"address\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":256}}]},{\"name\":\"VarAddress\",\"header\":null,\"fields\":[{\"name\":\"workchain\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":32}},{\"name\":\"address\",\"type\":{\"kind\":\"simple\",\"type\":\"slice\",\"optional\":false}}]},{\"name\":\"BasechainAddress\",\"header\":null,\"fields\":[{\"name\":\"hash\",\"type\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":true,\"format\":257}}]},{\"name\":\"Deploy\",\"header\":2490013878,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}}]},{\"name\":\"DeployOk\",\"header\":2952335191,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}}]},{\"name\":\"FactoryDeploy\",\"header\":1829761339,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}},{\"name\":\"cashback\",\"type\":{\"kind\":\"simple\",\"type\":\"address\",\"optional\":false}}]},{\"name\":\"Add\",\"header\":2335447074,\"fields\":[{\"name\":\"queryId\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":64}},{\"name\":\"amount\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":32}}]},{\"name\":\"MyTactCon$Data\",\"header\":null,\"fields\":[{\"name\":\"id\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":32}},{\"name\":\"counter\",\"type\":{\"kind\":\"simple\",\"type\":\"uint\",\"optional\":false,\"format\":32}}]}],\"receivers\":[{\"receiver\":\"internal\",\"message\":{\"kind\":\"typed\",\"type\":\"Add\"}},{\"receiver\":\"internal\",\"message\":{\"kind\":\"typed\",\"type\":\"Deploy\"}}],\"getters\":[{\"name\":\"counter\",\"methodId\":104984,\"arguments\":[],\"returnType\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}},{\"name\":\"id\",\"methodId\":105872,\"arguments\":[],\"returnType\":{\"kind\":\"simple\",\"type\":\"int\",\"optional\":false,\"format\":257}}],\"errors\":{\"2\":{\"message\":\"Stack underflow\"},\"3\":{\"message\":\"Stack overflow\"},\"4\":{\"message\":\"Integer overflow\"},\"5\":{\"message\":\"Integer out of expected range\"},\"6\":{\"message\":\"Invalid opcode\"},\"7\":{\"message\":\"Type check error\"},\"8\":{\"message\":\"Cell overflow\"},\"9\":{\"message\":\"Cell underflow\"},\"10\":{\"message\":\"Dictionary error\"},\"11\":{\"message\":\"'Unknown' error\"},\"12\":{\"message\":\"Fatal error\"},\"13\":{\"message\":\"Out of gas error\"},\"14\":{\"message\":\"Virtualization error\"},\"32\":{\"message\":\"Action list is invalid\"},\"33\":{\"message\":\"Action list is too long\"},\"34\":{\"message\":\"Action is invalid or not supported\"},\"35\":{\"message\":\"Invalid source address in outbound message\"},\"36\":{\"message\":\"Invalid destination address in outbound message\"},\"37\":{\"message\":\"Not enough Toncoin\"},\"38\":{\"message\":\"Not enough extra currencies\"},\"39\":{\"message\":\"Outbound message does not fit into a cell after rewriting\"},\"40\":{\"message\":\"Cannot process a message\"},\"41\":{\"message\":\"Library reference is null\"},\"42\":{\"message\":\"Library change action error\"},\"43\":{\"message\":\"Exceeded maximum number of cells in the library or the maximum depth of the Merkle tree\"},\"50\":{\"message\":\"Account state size exceeded limits\"},\"128\":{\"message\":\"Null reference exception\"},\"129\":{\"message\":\"Invalid serialization prefix\"},\"130\":{\"message\":\"Invalid incoming message\"},\"131\":{\"message\":\"Constraints error\"},\"132\":{\"message\":\"Access denied\"},\"133\":{\"message\":\"Contract stopped\"},\"134\":{\"message\":\"Invalid argument\"},\"135\":{\"message\":\"Code of a contract was not found\"},\"136\":{\"message\":\"Invalid standard address\"},\"138\":{\"message\":\"Not a basechain address\"}},\"interfaces\":[\"org.ton.introspection.v0\",\"org.ton.abi.ipfs.v0\",\"org.ton.deploy.lazy.v0\",\"org.ton.debug.v0\"]}","init":{"kind":"direct","args":[{"name":"id","type":{"kind":"simple","type":"int","optional":false,"format":257}}],"prefix":{"bits":1,"value":0},"deployment":{"kind":"system-cell","system":null}},"sources":{"contracts/my_tact_con.tact":"aW1wb3J0ICJAc3RkbGliL2RlcGxveSI7CgptZXNzYWdlIEFkZCB7CiAgICBxdWVyeUlkOiBJbnQgYXMgdWludDY0OwogICAgYW1vdW50OiBJbnQgYXMgdWludDMyOwp9Cgpjb250cmFjdCBNeVRhY3RDb24gd2l0aCBEZXBsb3lhYmxlIHsKICAgIGlkOiBJbnQgYXMgdWludDMyOwogICAgY291bnRlcjogSW50IGFzIHVpbnQzMjsKCiAgICBpbml0KGlkOiBJbnQpIHsKICAgICAgICBzZWxmLmlkID0gaWQ7CiAgICAgICAgc2VsZi5jb3VudGVyID0gMDsKICAgIH0KCiAgICByZWNlaXZlKG1zZzogQWRkKSB7CiAgICAgICAgc2VsZi5jb3VudGVyICs9IG1zZy5hbW91bnQ7CgogICAgICAgIC8vIE5vdGlmeSB0aGUgY2FsbGVyIHRoYXQgdGhlIHJlY2VpdmVyIHdhcyBleGVjdXRlZCBhbmQgZm9yd2FyZCByZW1haW5pbmcgdmFsdWUgYmFjawogICAgICAgIHNlbGYubm90aWZ5KCJDYXNoYmFjayIuYXNDb21tZW50KCkpOwogICAgfQoKICAgIGdldCBmdW4gY291bnRlcigpOiBJbnQgewogICAgICAgIHJldHVybiBzZWxmLmNvdW50ZXI7CiAgICB9CgogICAgZ2V0IGZ1biBpZCgpOiBJbnQgewogICAgICAgIHJldHVybiBzZWxmLmlkOwogICAgfQp9Cg=="},"compiler":{"name":"tact","version":"1.6.3","parameters":"{\"entrypoint\":\"contracts/my_tact_con.tact\",\"options\":{\"debug\":true}}"}}`, 3 | }; 4 | -------------------------------------------------------------------------------- /src/source-verifier/tact-source-verifier.spec.ts: -------------------------------------------------------------------------------- 1 | import { TactSourceVerifier } from "./tact-source-verifier"; 2 | 3 | import { pkg162 } from "./res/tact162pkg"; 4 | import { pkg163 } from "./res/tact163pkg"; 5 | import { pkg141 } from "./res/tact141pkg"; 6 | 7 | import { supportedVersionsReader } from "../supported-versions-reader"; 8 | import { DynamicImporter } from "../dynamic-importer"; 9 | 10 | jest.mock("../supported-versions-reader", () => ({ 11 | supportedVersionsReader: { 12 | versions: jest.fn(), 13 | }, 14 | })); 15 | 16 | const versionsMock = supportedVersionsReader.versions as jest.Mock; 17 | 18 | beforeEach(() => { 19 | versionsMock.mockResolvedValue({ 20 | funcVersions: [], 21 | tactVersions: ["1.0.0", "1.4.1", "1.6.2", "1.6.3"], 22 | tolkVersions: [], 23 | }); 24 | }); 25 | 26 | jest.spyOn(DynamicImporter as any, "tryImport").mockImplementation(async () => { 27 | return import("tact-1.6.7"); 28 | }); 29 | 30 | describe("TactSourceVerifier", () => { 31 | const packages = [ 32 | [pkg141, "1.4.1"], 33 | [pkg162, "1.6.2"], 34 | [pkg163, "1.6.3"], 35 | ] as const; 36 | 37 | packages.forEach(([pkg, ver]) => { 38 | it(`Compiles ${ver}`, async () => { 39 | const tactVerifier = new TactSourceVerifier({ 40 | writeFile: async (_path, content) => {}, 41 | readFile: async (path) => { 42 | if (path === "echo.pkg") return Buffer.from(pkg.base64, "base64"); 43 | throw new Error("Unknown path"); 44 | }, 45 | readdir: async () => [], 46 | }); 47 | 48 | const res = await tactVerifier.verify({ 49 | compiler: "tact", 50 | compilerSettings: { tactVersion: ver }, 51 | knownContractAddress: "", 52 | knownContractHash: "XhyDRMeBeZs7IK/pJ0XFWNjeKTx2g6n0+hGQ/9Ne2SA=", 53 | senderAddress: "", 54 | sources: [ 55 | { 56 | path: "echo.pkg", 57 | }, 58 | ], 59 | tmpDir: "", 60 | }); 61 | 62 | console.log(res.error); 63 | 64 | expect(res.result).toEqual("unknown_error"); 65 | }); 66 | }); 67 | 68 | it("invalid file format", async function () { 69 | const tactVerifier = new TactSourceVerifier({ 70 | writeFile: async (_path, content) => {}, 71 | readFile: async (path) => { 72 | if (path === "echo.pkg") return Buffer.from("{{"); 73 | throw new Error("Unknown path"); 74 | }, 75 | readdir: async () => [], 76 | }); 77 | 78 | const res = await tactVerifier.verify({ 79 | compiler: "tact", 80 | compilerSettings: { tactVersion: "1.0.0-rc8" }, 81 | knownContractAddress: "", 82 | knownContractHash: "htGkXV77gc/Tx5Z55tyTyZT8aSpmpnpkFPZpe4lPMIQ=", 83 | senderAddress: "", 84 | sources: [ 85 | { 86 | path: "echo.pkg", 87 | }, 88 | ], 89 | tmpDir: "", 90 | }); 91 | 92 | expect(res.result).toEqual("unknown_error"); 93 | }); 94 | }); 95 | -------------------------------------------------------------------------------- /src/source-verifier/tact-source-verifier.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import semver from "semver"; 3 | import type { verify as VerifyFunctionLegacy } from "tact-1.4.0"; 4 | import { Logger, PackageFileFormat } from "tact-1.4.1"; 5 | import type { verify as VerifyFunction } from "tact-1.6.7"; 6 | import { Cell } from "ton"; 7 | import { DynamicImporter } from "../dynamic-importer"; 8 | import { getLogger } from "../logger"; 9 | import { CompileResult, SourceVerifier, SourceVerifyPayload } from "../types"; 10 | import { timeoutPromise } from "../utils"; 11 | 12 | const logger = getLogger("tact-source-verifier"); 13 | 14 | export type FileSystem = { 15 | readFile: (path: string) => Promise; 16 | writeFile: (path: string, content: string | Buffer) => Promise; 17 | readdir: (path: string) => Promise; 18 | }; 19 | 20 | class OutputAppendingLogger extends Logger { 21 | messages: unknown[] = []; 22 | debug(message: string) { 23 | this.messages.push(message); 24 | } 25 | info(message: string | Error): void { 26 | this.messages.push(message); 27 | } 28 | warn(message: string | Error): void { 29 | this.messages.push(message); 30 | } 31 | error(message: string | Error): void { 32 | this.messages.push(message); 33 | } 34 | } 35 | 36 | export class TactSourceVerifier implements SourceVerifier { 37 | fileSystem: FileSystem; 38 | 39 | constructor(fileSystem: FileSystem) { 40 | this.fileSystem = fileSystem; 41 | } 42 | 43 | private isLegacyLogger( 44 | verify: typeof VerifyFunctionLegacy | typeof VerifyFunction, 45 | version: string, 46 | ): verify is typeof VerifyFunctionLegacy { 47 | return semver.lte(version, "1.4.0"); 48 | } 49 | 50 | async verify(payload: SourceVerifyPayload): Promise { 51 | try { 52 | // Sort by depth because we want the original (top-level) pkg file 53 | const pkgFilePath = payload.sources 54 | .sort((a, b) => { 55 | const depthA = a.path.split("/").length; 56 | const depthB = b.path.split("/").length; 57 | return depthA - depthB; 58 | }) 59 | .find((s) => s.path.endsWith(".pkg"))!.path; 60 | 61 | const pkg = (await this.fileSystem.readFile(path.join(payload.tmpDir, pkgFilePath))).toString( 62 | "utf8", 63 | ); 64 | 65 | const pkgParsed: PackageFileFormat = JSON.parse(pkg); 66 | 67 | // Fix windows paths (START) - tact 1.3.0 should handle this automatically 68 | if (pkgParsed.sources) { 69 | pkgParsed.sources = Object.fromEntries( 70 | Object.entries(pkgParsed.sources).map(([key, value]) => [key.replace(/\\/g, "/"), value]), 71 | ); 72 | } 73 | 74 | try { 75 | const parameters = JSON.parse(pkgParsed.compiler.parameters ?? "{}"); 76 | if (parameters.entrypoint) { 77 | pkgParsed.compiler.parameters = pkgParsed.compiler.parameters?.replace(/\\/g, "/"); 78 | } 79 | } catch (e) { 80 | logger.error(e); 81 | logger.warn("Unable to replace windows paths in entrypoint. ", { 82 | info: pkgParsed.compiler, 83 | }); 84 | } 85 | // Fix windows paths (END) 86 | 87 | const compilerSettings = { 88 | tactVersion: pkgParsed.compiler.version, 89 | parameters: pkgParsed.compiler.parameters, 90 | }; 91 | 92 | const output: string[] = []; 93 | 94 | const module = await DynamicImporter.tryImport("tact", pkgParsed.compiler.version); 95 | console.log("Dynamically imported compiler version"); 96 | const verify: typeof VerifyFunctionLegacy | typeof VerifyFunction = module.verify; 97 | 98 | let vPromise; 99 | 100 | if (this.isLegacyLogger(verify, pkgParsed.compiler.version)) { 101 | vPromise = verify({ 102 | pkg, 103 | logger: { 104 | log: (message: string) => output.push(message), 105 | error: (message: string) => output.push(message), 106 | }, 107 | }); 108 | } else { 109 | vPromise = verify({ 110 | pkg, 111 | logger: new OutputAppendingLogger(), 112 | }); 113 | } 114 | 115 | const verificationResult = await timeoutPromise( 116 | vPromise, 117 | parseInt(process.env.COMPILE_TIMEOUT ?? "3000"), 118 | ); 119 | 120 | if (!verificationResult.ok) { 121 | logger.error("Failed to compile tact package", { 122 | output: output.join("\n"), 123 | verificationResult: verificationResult, 124 | compilerSettings, 125 | }); 126 | return { 127 | compilerSettings, 128 | error: [verificationResult.error, ...output].join("\n"), 129 | hash: null, 130 | result: 131 | verificationResult.error === "verification-failed" ? "not_similar" : "unknown_error", 132 | sources: [], 133 | }; 134 | } 135 | 136 | const sources = await Promise.all( 137 | Object.entries(verificationResult.files) 138 | .filter(([filename]) => filename.match(/\.(abi|tact|pkg)$/) && !filename.match(/\.\./)) 139 | .map(async ([filename, contentB64]) => { 140 | const writePath = path.join(payload.tmpDir, filename); 141 | let content = Buffer.from(contentB64, "base64").toString("utf-8"); 142 | if (filename.match(/\.(abi)/)) { 143 | content = JSON.stringify(JSON.parse(content), null, 3); 144 | } 145 | await this.fileSystem.writeFile(writePath, content); 146 | return { filename }; 147 | }), 148 | ); 149 | 150 | /* 151 | Add the original pkg file here. 152 | The reason for this is because in a verify flow what could happen is this: 153 | 1. User supplies "X.pkg" as source of truth 154 | 2. Tact source verifier on BE1 compiles and generates X.pkg, but also Y.pkg (this is possible due to the nature of tact compiler, which will generate a pkg file per contract) 155 | 3. Tact source verifier on BE2, trying to verify BE1 result, now has ambiguity on which pkg file to use 156 | 157 | Therefore we only add the original pkg file 158 | */ 159 | sources.push({ filename: pkgFilePath }); 160 | 161 | const compiledHash = Cell.fromBoc(Buffer.from(verificationResult.package.code, "base64"))[0] 162 | .hash() 163 | .toString("base64"); 164 | 165 | return { 166 | compilerSettings, 167 | error: null, 168 | hash: compiledHash, 169 | result: compiledHash === payload.knownContractHash ? "similar" : "not_similar", 170 | sources: sources.sort( 171 | ({ filename: filenameA }, { filename: filenameB }) => 172 | (filenameA.endsWith(".tact") ? 1 : 0) - (filenameB.endsWith(".tact") ? 1 : 0), 173 | ), 174 | }; 175 | } catch (e) { 176 | return { 177 | error: JSON.stringify(e, Object.getOwnPropertyNames(e)), 178 | hash: null, 179 | compilerSettings: { tactVersion: "unknown" }, 180 | sources: [], 181 | result: "unknown_error", 182 | }; 183 | } 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /src/source-verifier/tolk-source-verifier.spec.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import { TolkSourceVerifier } from "./tolk-source-verifier"; 3 | import { mkdtemp, writeFile } from "fs/promises"; 4 | import os from "os"; 5 | import { randomBytes } from "tweetnacl"; 6 | import { TolkSourceToVerify } from "../types"; 7 | import { mkdir } from "fs/promises"; 8 | import { readFileSync } from "fs"; 9 | import { supportedVersionsReader } from "../supported-versions-reader"; 10 | 11 | const counterData = ` 12 | const OP_INCREASE = 0x7e8764ef; // arbitrary 32-bit number, equal to OP_INCREASE in wrappers/CounterContract.ts 13 | 14 | // storage variables 15 | 16 | // id is required to be able to create different instances of counters 17 | // since addresses in TON depend on the initial state of the contract 18 | global ctxID: int; 19 | global ctxCounter: int; 20 | 21 | // loadData populates storage variables from persistent storage 22 | fun loadData() { 23 | var ds = contract.getData().beginParse(); 24 | 25 | ctxID = ds.loadUint(32); 26 | ctxCounter = ds.loadUint(32); 27 | 28 | ds.assertEnd(); 29 | } 30 | 31 | // saveData stores storage variables as a cell into persistent storage 32 | fun saveData() { 33 | contract.setData( 34 | beginCell() 35 | .storeUint(ctxID, 32) 36 | .storeUint(ctxCounter, 32) 37 | .endCell() 38 | ); 39 | }`; 40 | 41 | const counterMain = ` 42 | // onInternalMessage is the main entrypoint; it's called when a contract receives an internal message from other contracts 43 | fun onInternalMessage(myBalance: int, msgValue: int, msgFull: cell, msgBody: slice) { 44 | if (msgBody.isEnd()) { // ignore all empty messages 45 | return; 46 | } 47 | 48 | var cs: slice = msgFull.beginParse(); 49 | val flags = cs.loadMessageFlags(); 50 | if (isMessageBounced(flags)) { // ignore all bounced messages 51 | return; 52 | } 53 | 54 | loadData(); // here we populate the storage variables 55 | 56 | val op = msgBody.loadMessageOp(); // by convention, the first 32 bits of incoming message is the op 57 | val queryID = msgBody.loadMessageQueryId(); // also by convention, the next 64 bits contain the "query id", although this is not always the case 58 | 59 | if (op == OP_INCREASE) { 60 | val increaseBy = msgBody.loadUint(32); 61 | ctxCounter += increaseBy; 62 | saveData(); 63 | return; 64 | } 65 | 66 | throw 0xffff; // if the message contains an op that is not known to this contract, we throw 67 | }`; 68 | 69 | const counterGetters = ` 70 | // get methods are a means to conveniently read contract data using, for example, HTTP APIs 71 | // note that unlike in many other smart contract VMs, get methods cannot be called by other contracts 72 | 73 | get currentCounter(): int { 74 | loadData(); 75 | return ctxCounter; 76 | } 77 | 78 | get initialId(): int { 79 | loadData(); 80 | return ctxID; 81 | }`; 82 | 83 | jest.mock("../supported-versions-reader", () => ({ 84 | supportedVersionsReader: { 85 | versions: jest.fn(), 86 | }, 87 | })); 88 | 89 | const versionsMock = supportedVersionsReader.versions as jest.Mock; 90 | 91 | beforeEach(() => { 92 | versionsMock.mockResolvedValue({ 93 | funcVersions: [], 94 | tactVersions: [], 95 | tolkVersions: ["0.12.0"], 96 | }); 97 | }); 98 | 99 | describe("Tolk source verifier", () => { 100 | let tolkVersions = ["0.12.0"]; 101 | 102 | it("tolk should compile and match expected hash", async () => { 103 | const sourceName = "counter.tolk"; 104 | const testSource = counterData + counterMain + counterGetters; 105 | const tolkVerifier = new TolkSourceVerifier((path) => { 106 | if (path == sourceName) { 107 | return testSource; 108 | } 109 | throw new Error(`Unknown path: ${path}`); 110 | }); 111 | 112 | for (let tolkVersion of tolkVersions) { 113 | const runTolkCompiler = (await import(`tolk-${tolkVersion}`)).runTolkCompiler; 114 | expect(runTolkCompiler).not.toBeUndefined(); 115 | 116 | const compileRes = await runTolkCompiler({ 117 | entrypointFileName: sourceName, 118 | fsReadCallback: (path: string) => testSource, 119 | }); 120 | if (compileRes.status !== "ok") { 121 | throw "Failed to compile"; 122 | } 123 | 124 | /* 125 | const tmpDir = await mkdtemp(path.join(os.tmpdir(), 'tolk_test')); 126 | const outPath = path.join(tmpDir, sourceName); 127 | await writeFile(outPath, testSource, { encoding: 'utf8' }); 128 | */ 129 | 130 | const resHash = Buffer.from(compileRes.codeHashHex, "hex").toString("base64"); 131 | 132 | const verifyRes = await tolkVerifier.verify({ 133 | compiler: "tolk", 134 | compilerSettings: { tolkVersion: tolkVersion }, 135 | knownContractAddress: "", 136 | knownContractHash: resHash, 137 | senderAddress: "", 138 | sources: [ 139 | { 140 | path: sourceName, 141 | isEntrypoint: true, 142 | } as any, 143 | ], 144 | tmpDir: "", 145 | }); 146 | 147 | expect(verifyRes.error).toBeNull(); 148 | expect(verifyRes.result).toEqual("similar"); 149 | expect(verifyRes.hash).toEqual(resHash); 150 | } 151 | }); 152 | 153 | it("tolk should compile and match expected hash with default readFile handler", async () => { 154 | const sourceName = "counter.tolk"; 155 | const testSource = counterData + counterMain + counterGetters; 156 | const tolkVerifier = new TolkSourceVerifier(); 157 | 158 | // Write file to tmp dir 159 | const tmpDir = await mkdtemp(path.join(os.tmpdir(), "tolk_test")); 160 | const outPath = path.join(tmpDir, sourceName); 161 | await writeFile(outPath, testSource, { encoding: "utf8" }); 162 | 163 | for (let tolkVersion of tolkVersions) { 164 | const runTolkCompiler = (await import(`tolk-${tolkVersion}`)).runTolkCompiler; 165 | expect(runTolkCompiler).not.toBeUndefined(); 166 | 167 | const compileRes = await runTolkCompiler({ 168 | entrypointFileName: sourceName, 169 | fsReadCallback: (path: string) => testSource, 170 | }); 171 | if (compileRes.status !== "ok") { 172 | throw "Failed to compile"; 173 | } 174 | 175 | const resHash = Buffer.from(compileRes.codeHashHex, "hex").toString("base64"); 176 | 177 | const verifyRes = await tolkVerifier.verify({ 178 | compiler: "tolk", 179 | compilerSettings: { tolkVersion: tolkVersion }, 180 | knownContractAddress: "", 181 | knownContractHash: resHash, 182 | senderAddress: "", 183 | sources: [ 184 | { 185 | path: sourceName, 186 | isEntrypoint: true, 187 | } as TolkSourceToVerify, 188 | ], 189 | tmpDir, 190 | }); 191 | 192 | expect(verifyRes.error).toBeNull(); 193 | expect(verifyRes.result).toEqual("similar"); 194 | expect(verifyRes.hash).toEqual(resHash); 195 | } 196 | }); 197 | 198 | it("verifier should reject non-matching hash", async () => { 199 | const sourceName = "counter.tolk"; 200 | const testSource = counterData + counterMain + counterGetters; 201 | const tolkVerifier = new TolkSourceVerifier((path) => { 202 | if (path == sourceName) { 203 | return testSource; 204 | } 205 | throw new Error(`Unknown path: ${path}`); 206 | }); 207 | 208 | for (let tolkVersion of tolkVersions) { 209 | const runTolkCompiler = (await import(`tolk-${tolkVersion}`)).runTolkCompiler; 210 | expect(runTolkCompiler).not.toBeUndefined(); 211 | 212 | const compileRes = await runTolkCompiler({ 213 | entrypointFileName: sourceName, 214 | fsReadCallback: (path: string) => testSource, 215 | }); 216 | if (compileRes.status !== "ok") { 217 | throw "Failed to compile"; 218 | } 219 | 220 | let resHash = Buffer.from(randomBytes(32)).toString("base64"); 221 | 222 | const verifyRes = await tolkVerifier.verify({ 223 | compiler: "tolk", 224 | compilerSettings: { tolkVersion: tolkVersion }, 225 | knownContractAddress: "", 226 | knownContractHash: resHash, 227 | senderAddress: "", 228 | sources: [ 229 | { 230 | path: sourceName, 231 | isEntrypoint: true, 232 | } as TolkSourceToVerify, 233 | ], 234 | tmpDir: "", 235 | }); 236 | 237 | expect(verifyRes.error).toBeNull(); 238 | expect(verifyRes.result).toBe("not_similar"); 239 | } 240 | }); 241 | it("verifier should handle multiple files scenario", async () => { 242 | const mainSource = ` 243 | import "import/data.tolk"; 244 | import "import/getters.tolk"; 245 | 246 | ${counterMain}`; 247 | const gettersSource = ` 248 | import "data.tolk"; 249 | 250 | ${counterGetters} 251 | `; 252 | 253 | const sourceName = "counter.tolk"; 254 | 255 | const readCb = (path: string) => { 256 | if (path == "import/data.tolk") { 257 | return counterData; 258 | } else if (path == "import/getters.tolk") { 259 | return gettersSource; 260 | } else if (path == sourceName) { 261 | return mainSource; 262 | } 263 | throw new Error(`Unknown path: ${path}`); 264 | }; 265 | 266 | const tolkVerifier = new TolkSourceVerifier(readCb); 267 | 268 | for (let tolkVersion of tolkVersions) { 269 | const runTolkCompiler = (await import(`tolk-${tolkVersion}`)).runTolkCompiler; 270 | expect(runTolkCompiler).not.toBeUndefined(); 271 | 272 | const compileRes = await runTolkCompiler({ 273 | entrypointFileName: sourceName, 274 | fsReadCallback: readCb, 275 | }); 276 | 277 | if (compileRes.status !== "ok") { 278 | throw "Failed to compile"; 279 | } 280 | 281 | const resHash = Buffer.from(compileRes.codeHashHex, "hex").toString("base64"); 282 | 283 | const verifyRes = await tolkVerifier.verify({ 284 | compiler: "tolk", 285 | compilerSettings: { tolkVersion: tolkVersion }, 286 | knownContractAddress: "", 287 | knownContractHash: resHash, 288 | senderAddress: "", 289 | sources: [ 290 | { 291 | path: sourceName, 292 | isEntrypoint: true, 293 | } as TolkSourceToVerify, 294 | ], 295 | tmpDir: "", 296 | }); 297 | 298 | expect(verifyRes.error).toBeNull(); 299 | expect(verifyRes.result).toEqual("similar"); 300 | expect(verifyRes.hash).toEqual(resHash); 301 | } 302 | }); 303 | 304 | it("verifier should handle multiple files scenario with default readFile callback", async () => { 305 | const mainSource = ` 306 | import "import/data.tolk"; 307 | import "import/getters.tolk"; 308 | 309 | ${counterMain}`; 310 | const gettersSource = ` 311 | import "data.tolk"; 312 | 313 | ${counterGetters} 314 | `; 315 | 316 | const sourceName = "counter.tolk"; 317 | 318 | const readCb = (path: string) => readFileSync(path, { encoding: "utf8" }); 319 | 320 | // Write file to tmp dir 321 | const tmpDir = await mkdtemp(path.join(os.tmpdir(), "tolk_test_multiple")); 322 | await mkdir(path.join(tmpDir, "import")); 323 | for (let pathTuple of [ 324 | [sourceName, mainSource], 325 | ["import/data.tolk", counterData], 326 | ["import/getters.tolk", gettersSource], 327 | ]) { 328 | const outPath = path.join(tmpDir, pathTuple[0]); 329 | await writeFile(outPath, pathTuple[1], { encoding: "utf8" }); 330 | } 331 | 332 | const tolkVerifier = new TolkSourceVerifier(); // Default readFile handler 333 | 334 | for (let tolkVersion of tolkVersions) { 335 | const runTolkCompiler = (await import(`tolk-${tolkVersion}`)).runTolkCompiler; 336 | expect(runTolkCompiler).not.toBeUndefined(); 337 | 338 | const compileRes = await runTolkCompiler({ 339 | entrypointFileName: path.join(tmpDir, sourceName), 340 | fsReadCallback: readCb, 341 | }); 342 | 343 | if (compileRes.status !== "ok") { 344 | throw "Failed to compile"; 345 | } 346 | 347 | const resHash = Buffer.from(compileRes.codeHashHex, "hex").toString("base64"); 348 | 349 | const verifyRes = await tolkVerifier.verify({ 350 | compiler: "tolk", 351 | compilerSettings: { tolkVersion: tolkVersion }, 352 | knownContractAddress: "", 353 | knownContractHash: resHash, 354 | senderAddress: "", 355 | sources: [ 356 | { 357 | path: sourceName, 358 | isEntrypoint: true, 359 | } as TolkSourceToVerify, 360 | ], 361 | tmpDir, 362 | }); 363 | 364 | expect(verifyRes.error).toBeNull(); 365 | expect(verifyRes.result).toEqual("similar"); 366 | expect(verifyRes.hash).toEqual(resHash); 367 | } 368 | }); 369 | }); 370 | -------------------------------------------------------------------------------- /src/source-verifier/tolk-source-verifier.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import { 3 | CompileResult, 4 | SourceVerifier, 5 | SourceVerifyPayload, 6 | TolkCliCompileSettings, 7 | TolkSourceToVerify, 8 | } from "../types"; 9 | import { supportedVersionsReader } from "../supported-versions-reader"; 10 | import type { runTolkCompiler as CompileFunction } from "tolk-0.12.0"; 11 | import { readFileSync } from "fs"; 12 | import { timeoutPromise } from "../utils"; 13 | import { DynamicImporter } from "../dynamic-importer"; 14 | import { getLogger } from "../logger"; 15 | 16 | // Matches tolk fsReadCallback. Synchronous for whatever reason?? 17 | export type TolkVerifierReadCallback = (path: string) => string; 18 | 19 | const logger = getLogger("tolk-verifier"); 20 | 21 | export class TolkSourceVerifier implements SourceVerifier { 22 | readFile: TolkVerifierReadCallback; 23 | 24 | constructor(fileHook?: TolkVerifierReadCallback) { 25 | if (fileHook) { 26 | this.readFile = fileHook; 27 | } else { 28 | this.readFile = (path: string) => readFileSync(path, { encoding: "utf8" }); 29 | } 30 | } 31 | async verify(payload: SourceVerifyPayload): Promise { 32 | const tolkCompilerOpts: TolkCliCompileSettings = 33 | payload.compilerSettings as TolkCliCompileSettings; 34 | 35 | try { 36 | if (payload.compiler !== "tolk") { 37 | throw "Invalid compiler type passed as tolk:" + payload.compiler; 38 | } 39 | 40 | const entry = payload.sources.filter((s: TolkSourceToVerify) => s.isEntrypoint); 41 | 42 | if (entry.length == 0) { 43 | throw new Error("No entrypoint found"); 44 | } 45 | if (entry.length > 1) { 46 | throw new Error("Multiple entrypoints found"); 47 | } 48 | 49 | const entryPath = path.join(payload.tmpDir, entry[0].path); 50 | 51 | const tolkModule = await DynamicImporter.tryImport("tolk", tolkCompilerOpts.tolkVersion); 52 | 53 | const tolkCompile: typeof CompileFunction = tolkModule.runTolkCompiler; 54 | 55 | const compileRes = await timeoutPromise( 56 | tolkCompile({ 57 | entrypointFileName: entryPath, 58 | fsReadCallback: (filePath) => { 59 | if (payload.tmpDir) { 60 | // Make sure compiler is not allowed to include files outside of the temp dir 61 | const rootPath = filePath.slice(0, payload.tmpDir.length); 62 | const remainingPath = filePath.slice(payload.tmpDir.length); 63 | if ( 64 | rootPath != payload.tmpDir || 65 | remainingPath[0] != path.sep || 66 | path.relative(payload.tmpDir, filePath) != remainingPath.slice(1) 67 | ) { 68 | throw new Error(`Invalid include path: ${filePath}`); 69 | } 70 | } 71 | 72 | return this.readFile(filePath); 73 | }, 74 | }), 75 | parseInt(process.env.COMPILE_TIMEOUT ?? "1000"), 76 | ); 77 | 78 | if (compileRes.status == "error") { 79 | return { 80 | result: "compile_error", 81 | error: compileRes.message, 82 | hash: null, 83 | compilerSettings: tolkCompilerOpts.tolkVersion, 84 | sources: payload.sources.map((s) => { 85 | return { filename: s.path }; 86 | }), 87 | }; 88 | } 89 | 90 | const base64Hash = Buffer.from(compileRes.codeHashHex, "hex").toString("base64"); 91 | return { 92 | hash: base64Hash, 93 | result: base64Hash === payload.knownContractHash ? "similar" : "not_similar", 94 | error: null, 95 | compilerSettings: tolkCompilerOpts, 96 | sources: payload.sources.map((s) => { 97 | return { filename: s.path }; 98 | }), 99 | }; 100 | } catch (e) { 101 | logger.error(e); 102 | return { 103 | result: "unknown_error", 104 | compilerSettings: tolkCompilerOpts, 105 | error: e.toString(), 106 | hash: null, 107 | sources: payload.sources.map((s) => { 108 | return { filename: s.path }; 109 | }), 110 | }; 111 | } 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /src/supported-versions-reader.ts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | import { getLogger } from "./logger"; 3 | import promiseRetry from "promise-retry"; 4 | 5 | class SupportedVersionsReader { 6 | private logger = getLogger("SupportedVersionsReader"); 7 | private _versions: { 8 | funcVersions: string[]; 9 | tactVersions: string[]; 10 | tolkVersions: string[]; 11 | } | null = null; 12 | 13 | private fetchPromise: Promise | null = null; 14 | 15 | constructor() { 16 | setInterval(() => { 17 | this.readVersions(); 18 | }, 30_000); 19 | void this.readVersions(); 20 | } 21 | 22 | private async readVersions() { 23 | if (this.fetchPromise) return this.fetchPromise; 24 | this.fetchPromise = (async () => { 25 | try { 26 | await promiseRetry( 27 | async () => { 28 | const { data } = await axios.get( 29 | "https://raw.githubusercontent.com/ton-community/contract-verifier-config/main/config.json", 30 | { responseType: "json" }, 31 | ); 32 | if (!this._versions) { 33 | this.logger.info(`Initial fetch of supported versions successful`); 34 | } 35 | this._versions = { 36 | funcVersions: data.funcVersions, 37 | tactVersions: data.tactVersions, 38 | tolkVersions: data.tolkVersions 39 | }; 40 | }, 41 | { 42 | retries: 3, 43 | }, 44 | ); 45 | } catch (e) { 46 | this.logger.warn(e); 47 | } finally { 48 | this.fetchPromise = null; 49 | } 50 | })(); 51 | 52 | return this.fetchPromise; 53 | } 54 | 55 | async versions() { 56 | if (this._versions === null) { 57 | await this.readVersions(); 58 | } 59 | 60 | if (this._versions === null) { 61 | throw new Error("Versions were not fetched"); 62 | } 63 | 64 | return this._versions; 65 | } 66 | } 67 | 68 | export const supportedVersionsReader = new SupportedVersionsReader(); 69 | -------------------------------------------------------------------------------- /src/ton-reader-client.ts: -------------------------------------------------------------------------------- 1 | import { Address, Cell, Dictionary, TonClient } from "ton"; 2 | import { DictionaryValue } from "ton-core"; 3 | import { toBigIntBE, toBufferBE } from "bigint-buffer"; 4 | import { sha256 } from "./utils"; 5 | import { getHttpEndpoint } from "@orbs-network/ton-access"; 6 | import { ContractVerifier } from "@ton-community/contract-verifier-sdk"; 7 | import { VerifierRegistry } from "./wrappers/verifier-registry"; 8 | import { SourcesRegistry } from "./wrappers/sources-registry"; 9 | 10 | export type VerifierConfig = { 11 | verifiers: Buffer[]; 12 | quorum: number; 13 | }; 14 | 15 | export interface TonReaderClient { 16 | isProofDeployed(codeCellHash: string, verifierId: string): Promise; 17 | getVerifierConfig(verifierId: string, verifierRegistryAddress: string): Promise; 18 | } 19 | 20 | export async function getTonClient() { 21 | const endpoint = await getHttpEndpoint({ 22 | network: process.env.NETWORK === "testnet" ? "testnet" : "mainnet", 23 | }); 24 | console.log("Using endpoint:" + endpoint); 25 | return new TonClient({ endpoint, apiKey: process.env.TON_ACCESS_API_KEY }); 26 | } 27 | 28 | export function createNullValue(): DictionaryValue { 29 | return { 30 | serialize: (src, buidler) => { 31 | buidler; 32 | }, 33 | parse: (src) => { 34 | return null; 35 | }, 36 | }; 37 | } 38 | 39 | export class TonReaderClientImpl implements TonReaderClient { 40 | async getVerifierConfig( 41 | verifierId: string, 42 | sourcesRegistryAddress: string, 43 | ): Promise { 44 | const tc = await getTonClient(); 45 | 46 | const sourcesRegistryContract = tc.open( 47 | SourcesRegistry.createFromAddress(Address.parse(sourcesRegistryAddress)), 48 | ); 49 | 50 | const verifierRegistryAddress = await sourcesRegistryContract.getVerifierRegistryAddress(); 51 | const verifierRegistryContract = tc.open( 52 | VerifierRegistry.createFromAddress(verifierRegistryAddress), 53 | ); 54 | 55 | const res = await verifierRegistryContract.getVerifier(toBigIntBE(sha256(verifierId))); 56 | const verifierConfig = res.settings!.beginParse(); 57 | 58 | const quorum = verifierConfig.loadUint(8); 59 | const verifiers = Array.from( 60 | verifierConfig.loadDict(Dictionary.Keys.BigUint(256), createNullValue()).keys(), 61 | ).map((k) => toBufferBE(k, 32)); 62 | 63 | return { 64 | verifiers, 65 | quorum, 66 | }; 67 | } 68 | 69 | async isProofDeployed(codeCellHash: string, verifierId: string): Promise { 70 | return !!(await ContractVerifier.getSourcesJsonUrl(codeCellHash, { 71 | verifier: verifierId, 72 | testnet: process.env.NETWORK === "testnet", 73 | })); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | export type Compiler = "func" | "fift" | "tolk" | "tact"; 2 | 3 | import { FuncCompilerVersion } from "@ton-community/contract-verifier-sdk"; 4 | 5 | export interface SourceVerifier { 6 | verify(payload: SourceVerifyPayload): Promise; 7 | } 8 | 9 | export type VerifyResult = { 10 | compileResult: CompileResult; 11 | sig?: string; 12 | ipfsLink?: string; 13 | msgCell?: Buffer; 14 | }; 15 | 16 | export type FuncCliCompileSettings = { 17 | funcVersion: FuncCompilerVersion; 18 | commandLine: string; 19 | }; 20 | 21 | export type FiftCliCompileSettings = { 22 | fiftVersion: string; 23 | commandLine: string; 24 | }; 25 | 26 | export type TolkCliCompileSettings = { 27 | tolkVersion: string; 28 | }; 29 | 30 | export type TactCliCompileSettings = {}; 31 | 32 | export type FuncSourceCompileResult = { 33 | includeInCommand: boolean; 34 | isEntrypoint: boolean; 35 | isStdLib: boolean; 36 | hasIncludeDirectives: boolean; 37 | filename: string; 38 | }; 39 | 40 | export type FiftSourceCompileResult = { 41 | filename: string; 42 | }; 43 | 44 | export type TolkSourceCompileResult = { 45 | filename: string; 46 | }; 47 | 48 | export type TactSourceCompileResult = { 49 | filename: string; 50 | }; 51 | 52 | export type CompileResult = { 53 | result: "similar" | "not_similar" | "compile_error" | "unknown_error"; 54 | error: string | null; 55 | hash: string | null; 56 | compilerSettings: 57 | | FuncCliCompileSettings 58 | | FiftCliCompileSettings 59 | | TolkCliCompileSettings 60 | | TactCliCompileSettings; 61 | sources: ( 62 | | FuncSourceCompileResult 63 | | FiftSourceCompileResult 64 | | TolkSourceCompileResult 65 | | TactSourceCompileResult 66 | )[]; 67 | }; 68 | 69 | type Path = string; 70 | 71 | export type SourceToVerify = { 72 | path: Path; 73 | }; 74 | 75 | export type FuncSourceToVerify = SourceToVerify & { 76 | // TODO - these will be removed and done exclusively on the backend 77 | includeInCommand: boolean; 78 | isEntrypoint: boolean; 79 | isStdLib: boolean; 80 | hasIncludeDirectives: boolean; 81 | }; 82 | 83 | export type TolkSourceToVerify = SourceToVerify & { 84 | isEntrypoint: boolean; 85 | }; 86 | 87 | export type CompileOptions = { 88 | compiler: Compiler; 89 | compilerSettings: 90 | | FuncCliCompileSettings 91 | | FiftCliCompileSettings 92 | | TolkCliCompileSettings 93 | | TactCliCompileSettings; 94 | }; 95 | 96 | export type SourceVerifyPayload = CompileOptions & { 97 | sources: SourceToVerify[]; 98 | knownContractAddress: string; 99 | knownContractHash: string; 100 | tmpDir: string; 101 | senderAddress: string; 102 | }; 103 | 104 | export type SourceItem = { 105 | compilerSettings: FuncCliCompileSettings | FiftCliCompileSettings | TactCliCompileSettings; 106 | compiler: Compiler; 107 | hash: string; 108 | verificationDate: number; 109 | sources: ({ 110 | url: string; 111 | } & ( 112 | | FuncSourceCompileResult 113 | | TactSourceCompileResult 114 | | TolkSourceCompileResult 115 | | FiftSourceCompileResult 116 | ))[]; 117 | knownContractAddress: string; 118 | }; 119 | -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | import { exec, ExecException, ExecOptions } from "child_process"; 2 | import crypto from "crypto"; 3 | 4 | export function sha256(s: string): Buffer { 5 | return crypto.createHash("sha256").update(s).digest(); 6 | } 7 | 8 | export function random64BitNumber() { 9 | const randomBool = () => (Math.random() > 0.5 ? 1 : 0); 10 | const random64BitNumber = Array.from({ length: 64 }, randomBool).join(""); 11 | return BigInt("0b" + random64BitNumber); 12 | } 13 | 14 | export function getNowHourRoundedDown() { 15 | const date = new Date(); 16 | date.setMinutes(0); 17 | date.setSeconds(0); 18 | date.setMilliseconds(0); 19 | return date; 20 | } 21 | 22 | interface ExecResult { 23 | stdout: string; 24 | stderr: string; 25 | } 26 | 27 | export function execAsyncWithTimeout( 28 | command: string, 29 | timeout: number, 30 | options?: ExecOptions, 31 | ): Promise { 32 | return new Promise((resolve, reject) => { 33 | let timedOut = false; 34 | const timer = setTimeout(() => { 35 | timedOut = true; 36 | if (childProcess) { 37 | childProcess.kill(); 38 | } 39 | reject(new Error("Execution timed out")); 40 | }, timeout); 41 | 42 | const childProcess = exec( 43 | command, 44 | options, 45 | (error: ExecException | null, stdout: string, stderr: string) => { 46 | clearTimeout(timer); 47 | if (timedOut) return; // Ignore if the process was already killed due to timeout 48 | 49 | if (error) { 50 | reject(error); 51 | } else { 52 | resolve({ stdout, stderr }); 53 | } 54 | }, 55 | ); 56 | }); 57 | } 58 | 59 | export function timeoutPromise(promise: Promise, timeout: number): Promise { 60 | return new Promise((resolve, reject) => { 61 | const timer = setTimeout(() => { 62 | reject(new Error("Timed out")); 63 | }, timeout); 64 | 65 | promise 66 | .then((value) => { 67 | clearTimeout(timer); 68 | resolve(value); 69 | }) 70 | .catch((err) => { 71 | clearTimeout(timer); 72 | reject(err); 73 | }); 74 | }); 75 | } 76 | -------------------------------------------------------------------------------- /src/validate-message-cell.ts: -------------------------------------------------------------------------------- 1 | import { Address, Cell, Slice } from "ton"; 2 | import tweetnacl from "tweetnacl"; 3 | import { DEPLOY_SOURCE_OP, FORWARD_MESSAGE_OP } from "./cell-builders"; 4 | import { VerifierConfig } from "./ton-reader-client"; 5 | 6 | function validateSignatureCell( 7 | slice: Slice, 8 | signedCell: Cell, 9 | keypair: tweetnacl.SignKeyPair, 10 | verifierConfig: VerifierConfig, 11 | ) { 12 | let currRef: Slice | null = slice; 13 | 14 | if (verifierConfig.quorum <= 1 || verifierConfig.verifiers.length < verifierConfig.quorum) { 15 | throw new Error("Mulisig quorum must be greater than 1"); 16 | } 17 | 18 | if (!verifierConfig.verifiers.find((v) => v.equals(keypair.publicKey))) { 19 | throw new Error("This verifier is not in the multisig config"); 20 | } 21 | 22 | const sigs: Record = {}; 23 | let sigCount = 0; 24 | 25 | while (currRef) { 26 | sigCount += 1; 27 | 28 | if (sigCount >= verifierConfig.quorum) { 29 | throw new Error("Too many signatures"); 30 | } 31 | 32 | if (currRef.remainingBits !== 512 + 256) { 33 | throw new Error("Invalid signature cell"); 34 | } 35 | 36 | const sig = currRef.loadBuffer(512 / 8); 37 | 38 | if (sigs[sig.toString("base64")] === true) { 39 | throw new Error("Duplicate signature"); 40 | } 41 | 42 | const pubKey = currRef.loadBuffer(256 / 8); 43 | 44 | if (pubKey.equals(keypair.publicKey)) { 45 | throw new Error("Invalid signature (signed by self)"); 46 | } 47 | 48 | const isValid = tweetnacl.sign.detached.verify(signedCell.hash(), sig, pubKey); 49 | 50 | if (!isValid) { 51 | throw new Error("Invalid signature"); 52 | } 53 | 54 | if (currRef.remainingRefs === 1) { 55 | currRef = currRef.loadRef().asSlice(); 56 | } else if (currRef.remainingRefs === 0) { 57 | currRef = null; 58 | } else { 59 | throw new Error("Invalid signature cell"); 60 | } 61 | 62 | sigs[sig.toString("base64")] = true; 63 | } 64 | } 65 | 66 | function validateSourcesRegistryMessageCell(slice: Slice, verifierId: Buffer) { 67 | if (slice.remainingBits !== 32 + 64 + 256 + 256 || slice.remainingRefs !== 1) { 68 | throw new Error("Invalid sources registry body cell"); 69 | } 70 | 71 | if (slice.loadUint(32) !== DEPLOY_SOURCE_OP) { 72 | throw new Error("Invalid deploy source op"); 73 | } 74 | 75 | slice.skip(64); 76 | 77 | const verifierInMsg = slice.loadBuffer(32); 78 | 79 | if (!verifierInMsg.equals(verifierId)) { 80 | throw new Error("Invalid verifier id"); 81 | } 82 | 83 | const codeCellHash = slice.loadBuffer(32).toString("base64"); 84 | 85 | const contentCell = slice.loadRef().asSlice(); 86 | if (contentCell.loadUint(8) !== 1) { 87 | throw new Error("Unsupported version of source item content cell"); 88 | } 89 | 90 | const ipfsPointer = contentCell.loadBuffer(contentCell.remainingBits / 8).toString("utf-8"); 91 | return { 92 | codeCellHash, 93 | ipfsPointer, 94 | }; 95 | } 96 | 97 | function validateVerifierRegistryBodyCell( 98 | slice: Slice, 99 | verifierId: Buffer, 100 | sourcesRegistryAddress: string, 101 | ) { 102 | if (slice.remainingBits !== 256 + 32 + 267 + 267 || slice.remainingRefs !== 1) { 103 | throw new Error("Invalid verifier body cell"); 104 | } 105 | 106 | const verifierInMsg = slice.loadBuffer(32); 107 | 108 | if (!verifierInMsg.equals(verifierId)) { 109 | throw new Error("Invalid verifier id"); 110 | } 111 | 112 | const date = slice.loadUint(32); 113 | 114 | const dateInMessage = new Date(date * 1000); 115 | 116 | if (dateInMessage < new Date()) { 117 | throw new Error("Message is expired"); 118 | } 119 | 120 | const senderAddress = slice.loadAddress()!; 121 | const sourcesRegInMsg = slice.loadAddress()!; 122 | 123 | if (sourcesRegInMsg.toString() !== sourcesRegistryAddress) { 124 | throw new Error("Invalid sources registry address"); 125 | } 126 | 127 | return { 128 | senderAddress, 129 | date, 130 | ...validateSourcesRegistryMessageCell(slice.loadRef().asSlice(), verifierId), 131 | }; 132 | } 133 | 134 | export function validateMessageCell( 135 | cell: Cell, 136 | verifierId: Buffer, 137 | sourcesRegistryAddress: string, 138 | keypair: tweetnacl.SignKeyPair, 139 | verifierConfig: VerifierConfig, 140 | ) { 141 | const slice = cell.beginParse(); 142 | if (slice.remainingBits !== 32 + 64 || slice.remainingRefs !== 2) { 143 | throw new Error("Invalid cell"); 144 | } 145 | 146 | // Validate message cell 147 | if (slice.loadUint(32) !== FORWARD_MESSAGE_OP) { 148 | throw new Error("Invalid operation"); 149 | } 150 | 151 | const queryId = slice.loadUint(64); 152 | 153 | const signedCell = slice.loadRef(); 154 | 155 | const { ipfsPointer, codeCellHash, senderAddress, date } = validateVerifierRegistryBodyCell( 156 | signedCell.asSlice(), 157 | verifierId, 158 | sourcesRegistryAddress, 159 | ); 160 | validateSignatureCell(slice.loadRef().asSlice(), signedCell, keypair, verifierConfig); 161 | 162 | return { 163 | ipfsPointer, 164 | codeCellHash, 165 | senderAddress, 166 | date, 167 | queryId, 168 | }; 169 | } 170 | -------------------------------------------------------------------------------- /src/wrappers/source-item.ts: -------------------------------------------------------------------------------- 1 | import { Address, beginCell, Cell, Contract, ContractProvider, Sender, SendMode } from "ton-core"; 2 | 3 | export class SourceItem implements Contract { 4 | constructor(readonly address: Address, readonly init?: { code: Cell; data: Cell }) {} 5 | 6 | static createFromAddress(address: Address) { 7 | return new SourceItem(address); 8 | } 9 | 10 | async sendInternalMessage(provider: ContractProvider, via: Sender, body: Cell, value: bigint) { 11 | await provider.internal(via, { 12 | value: value, 13 | sendMode: SendMode.PAY_GAS_SEPARATELY, 14 | body: body, 15 | }); 16 | } 17 | 18 | async sendDeploy(provider: ContractProvider, via: Sender, value: bigint) { 19 | await provider.internal(via, { 20 | value, 21 | sendMode: SendMode.PAY_GAS_SEPARATELY, 22 | body: beginCell().endCell(), 23 | }); 24 | } 25 | 26 | async getData(provider: ContractProvider): Promise<{ verifierId: bigint; data: Cell | null }> { 27 | const result = await provider.get("get_source_item_data", []); 28 | const verifierId = result.stack.readBigNumber(); 29 | result.stack.skip(2); 30 | return { verifierId, data: result.stack.readCellOpt() }; 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/wrappers/sources-registry.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Address, 3 | beginCell, 4 | Cell, 5 | Contract, 6 | contractAddress, 7 | ContractProvider, 8 | fromNano, 9 | Sender, 10 | SendMode, 11 | toNano, 12 | } from "ton-core"; 13 | 14 | import { toBigIntBE } from "bigint-buffer"; 15 | import { Sha256 } from "@aws-crypto/sha256-js"; 16 | 17 | export function sourceRegistryConfigToCell(params: { 18 | minTons: bigint; 19 | maxTons: bigint; 20 | verifierRegistryAddress: Address; 21 | admin: Address; 22 | sourceItemCode: Cell; 23 | }): Cell { 24 | return beginCell() 25 | .storeCoins(params.minTons) 26 | .storeCoins(params.maxTons) 27 | .storeAddress(params.admin) 28 | .storeAddress(params.verifierRegistryAddress) 29 | .storeRef(params.sourceItemCode) 30 | .endCell(); 31 | } 32 | 33 | export const toSha256Buffer = (s: string) => { 34 | const sha = new Sha256(); 35 | sha.update(s); 36 | return Buffer.from(sha.digestSync()); 37 | }; 38 | 39 | export class SourcesRegistry implements Contract { 40 | constructor(readonly address: Address, readonly init?: { code: Cell; data: Cell }) {} 41 | 42 | static createFromAddress(address: Address) { 43 | return new SourcesRegistry(address); 44 | } 45 | 46 | static create( 47 | verifierRegistryAddress: Address, 48 | admin: Address, 49 | code: Cell, 50 | sourceItemCode: Cell, 51 | workchain = 0, 52 | ) { 53 | const data = sourceRegistryConfigToCell({ 54 | minTons: toNano("0.065"), 55 | maxTons: toNano(1), 56 | admin: admin, 57 | verifierRegistryAddress: verifierRegistryAddress, 58 | sourceItemCode, 59 | }); 60 | const init = { code, data }; 61 | return new SourcesRegistry(contractAddress(workchain, init), init); 62 | } 63 | 64 | async sendInternalMessage(provider: ContractProvider, via: Sender, body: Cell, value: bigint) { 65 | await provider.internal(via, { 66 | value: value, 67 | sendMode: SendMode.PAY_GAS_SEPARATELY, 68 | body: body, 69 | }); 70 | } 71 | 72 | async sendDeploy(provider: ContractProvider, via: Sender, value: bigint, bounce = true) { 73 | await provider.internal(via, { 74 | value, 75 | sendMode: SendMode.PAY_GAS_SEPARATELY, 76 | body: beginCell().endCell(), 77 | bounce, 78 | }); 79 | } 80 | 81 | async getSourceItemAddress( 82 | provider: ContractProvider, 83 | verifier: string, 84 | codeCellHash: string, 85 | ): Promise
{ 86 | const result = await provider.get("get_source_item_address", [ 87 | { 88 | type: "int", 89 | value: toBigIntBE(toSha256Buffer(verifier)), 90 | }, 91 | { 92 | type: "int", 93 | value: toBigIntBE(Buffer.from(codeCellHash, "base64")), 94 | }, 95 | ]); 96 | const item = result.stack.readCell(); 97 | return item.beginParse().loadAddress()!; 98 | } 99 | 100 | async getVerifierRegistryAddress(provider: ContractProvider): Promise
{ 101 | const res = await provider.get("get_verifier_registry_address", []); 102 | const item = res.stack.readCell(); 103 | return item.beginParse().loadAddress(); 104 | } 105 | 106 | async getAdminAddress(provider: ContractProvider) { 107 | const res = await provider.get("get_admin_address", []); 108 | const item = res.stack.readCell(); 109 | return item.beginParse().loadMaybeAddress(); 110 | } 111 | 112 | async getCodeOpt(provider: ContractProvider) { 113 | const state = await provider.getState(); 114 | if (state.state.type != "active") return null; 115 | return state.state.code; 116 | } 117 | 118 | async getDeploymentCosts(provider: ContractProvider) { 119 | const res = await provider.get("get_deployment_costs", []); 120 | const min = res.stack.readBigNumber(); 121 | const max = res.stack.readBigNumber(); 122 | return { min: fromNano(min), max: fromNano(max) }; 123 | } 124 | 125 | async sendDeploySource( 126 | provider: ContractProvider, 127 | via: Sender, 128 | params: { 129 | verifierId: string; 130 | codeCellHash: string; 131 | jsonURL: string; 132 | version: number; 133 | value: bigint; 134 | }, 135 | ) { 136 | const body = beginCell() 137 | .storeUint(1002, 32) 138 | .storeUint(0, 64) 139 | .storeBuffer(toSha256Buffer(params.verifierId)) 140 | .storeUint(toBigIntBE(Buffer.from(params.codeCellHash, "base64")), 256) 141 | .storeRef(beginCell().storeUint(params.version, 8).storeStringTail(params.jsonURL).endCell()) // TODO support snakes 142 | .endCell(); 143 | await provider.internal(via, { 144 | value: params.value, 145 | sendMode: SendMode.PAY_GAS_SEPARATELY, 146 | body, 147 | }); 148 | } 149 | 150 | async sendChangeVerifierRegistry( 151 | provider: ContractProvider, 152 | via: Sender, 153 | params: { value: bigint; newVerifierRegistry: Address }, 154 | ) { 155 | const body = beginCell() 156 | .storeUint(2003, 32) 157 | .storeUint(0, 64) 158 | .storeAddress(params.newVerifierRegistry) 159 | .endCell(); 160 | await provider.internal(via, { 161 | value: params.value, 162 | sendMode: SendMode.PAY_GAS_SEPARATELY, 163 | body, 164 | }); 165 | } 166 | 167 | async sendChangeAdmin( 168 | provider: ContractProvider, 169 | via: Sender, 170 | params: { value: bigint; newAdmin: Address }, 171 | ) { 172 | const body = beginCell() 173 | .storeUint(3004, 32) 174 | .storeUint(0, 64) 175 | .storeAddress(params.newAdmin) 176 | .endCell(); 177 | await provider.internal(via, { 178 | value: params.value, 179 | sendMode: SendMode.PAY_GAS_SEPARATELY, 180 | body, 181 | }); 182 | } 183 | 184 | async sendSetSourceItemCode( 185 | provider: ContractProvider, 186 | via: Sender, 187 | params: { value: bigint; newCode: Cell }, 188 | ) { 189 | const body = beginCell() 190 | .storeUint(4005, 32) 191 | .storeUint(0, 64) 192 | .storeRef(params.newCode) 193 | .endCell(); 194 | await provider.internal(via, { 195 | value: params.value, 196 | sendMode: SendMode.PAY_GAS_SEPARATELY, 197 | body, 198 | }); 199 | } 200 | 201 | async sendChangeCode( 202 | provider: ContractProvider, 203 | via: Sender, 204 | params: { value: bigint; newCode: Cell }, 205 | ) { 206 | const body = beginCell() 207 | .storeUint(5006, 32) 208 | .storeUint(0, 64) 209 | .storeRef(params.newCode) 210 | .endCell(); 211 | await provider.internal(via, { 212 | value: params.value, 213 | sendMode: SendMode.PAY_GAS_SEPARATELY, 214 | body, 215 | }); 216 | } 217 | 218 | async sendSetDeploymentCosts( 219 | provider: ContractProvider, 220 | via: Sender, 221 | params: { value: bigint; min: bigint; max: bigint }, 222 | ) { 223 | const body = beginCell() 224 | .storeUint(6007, 32) 225 | .storeUint(0, 64) 226 | .storeCoins(params.min) 227 | .storeCoins(params.max) 228 | .endCell(); 229 | await provider.internal(via, { 230 | value: params.value, 231 | sendMode: SendMode.PAY_GAS_SEPARATELY, 232 | body, 233 | }); 234 | } 235 | } 236 | -------------------------------------------------------------------------------- /src/wrappers/verifier-registry.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Address, 3 | beginCell, 4 | Cell, 5 | Contract, 6 | contractAddress, 7 | ContractProvider, 8 | Sender, 9 | SendMode, 10 | Dictionary, 11 | DictionaryValue, 12 | Slice, 13 | } from "ton-core"; 14 | 15 | export type RegistryData = { 16 | verifiers: Map; 17 | }; 18 | 19 | export type VerifierConfig = { 20 | admin: Address; 21 | quorum: number; 22 | pub_key_endpoints: Map; 23 | name: string; 24 | marketingUrl: string; 25 | }; 26 | 27 | export const OperationCodes = { 28 | removeVerifier: 0x19fa5637, 29 | updateVerifier: 0x6002d61a, 30 | forwardMessage: 0x75217758, 31 | }; 32 | 33 | export type CollectionMintItemInput = { 34 | passAmount: bigint; 35 | index: number; 36 | ownerAddress: Address; 37 | content: string; 38 | }; 39 | 40 | function createSliceValue(): DictionaryValue { 41 | return { 42 | serialize: (src, buidler) => { 43 | buidler.storeSlice(src); 44 | }, 45 | parse: (src) => { 46 | return src; 47 | }, 48 | }; 49 | } 50 | 51 | export function buildMsgDescription( 52 | id: bigint, 53 | validTill: number, 54 | source: Address, 55 | target: Address, 56 | msg: Cell, 57 | ) { 58 | let desc = beginCell(); 59 | desc.storeUint(id, 256); 60 | desc.storeUint(validTill, 32); 61 | desc.storeAddress(source); 62 | desc.storeAddress(target); 63 | desc.storeRef(msg); 64 | 65 | return desc; 66 | } 67 | 68 | export function buildRegistryDataCell(data: RegistryData, num?: number) { 69 | let dataCell = beginCell(); 70 | let e = Dictionary.empty(Dictionary.Keys.BigUint(256), createSliceValue()); 71 | data.verifiers.forEach(function (val: VerifierConfig, key: bigint) { 72 | let x = beginCell().storeAddress(val.admin).storeUint(val.quorum, 8); 73 | 74 | let points = Dictionary.empty(Dictionary.Keys.BigUint(256), createSliceValue()); 75 | val.pub_key_endpoints.forEach(function (eVal: number, eKey: bigint) { 76 | points.set(eKey, beginCell().storeUint(eVal, 32).asSlice()); 77 | }); 78 | x.storeDict(points); 79 | x.storeRef(beginCell().storeBuffer(Buffer.from(val.name)).endCell()); 80 | x.storeRef(beginCell().storeBuffer(Buffer.from(val.marketingUrl)).endCell()); 81 | e.set(key, x.asSlice()); 82 | }); 83 | 84 | if (num === undefined) { 85 | num = 0; 86 | } 87 | 88 | dataCell.storeDict(e).storeUint(num, 8); 89 | 90 | return dataCell.endCell(); 91 | } 92 | 93 | export class VerifierRegistry implements Contract { 94 | constructor(readonly address: Address, readonly init?: { code: Cell; data: Cell }) {} 95 | 96 | static createFromAddress(address: Address) { 97 | return new VerifierRegistry(address); 98 | } 99 | 100 | static createFromConfig(code: Cell, config: RegistryData, num?: number, workchain = 0) { 101 | let data = buildRegistryDataCell(config, num); 102 | const init = { code, data }; 103 | return new VerifierRegistry(contractAddress(workchain, init), init); 104 | } 105 | 106 | async sendInternalMessage(provider: ContractProvider, via: Sender, body: Cell, value: bigint) { 107 | await provider.internal(via, { 108 | value: value, 109 | sendMode: SendMode.PAY_GAS_SEPARATELY, 110 | body: body, 111 | }); 112 | } 113 | 114 | async sendDeploy(provider: ContractProvider, via: Sender, value: bigint) { 115 | await provider.internal(via, { 116 | value, 117 | sendMode: SendMode.PAY_GAS_SEPARATELY, 118 | body: beginCell().endCell(), 119 | }); 120 | } 121 | 122 | async getVerifier( 123 | provider: ContractProvider, 124 | id: bigint, 125 | ): Promise<{ admin: Address | null; settings: Cell | null }> { 126 | let res = await provider.get("get_verifier", [ 127 | { 128 | type: "int", 129 | value: id, 130 | }, 131 | ]); 132 | const sl = res.stack.readCell(); 133 | const settings = res.stack.readCellOpt(); 134 | const ok = res.stack.readNumber(); 135 | if (ok == 0) { 136 | return { 137 | admin: null, 138 | settings: null, 139 | }; 140 | } 141 | 142 | return { 143 | admin: sl.beginParse().loadAddress(), 144 | settings, 145 | }; 146 | } 147 | 148 | async getVerifiersNum(provider: ContractProvider): Promise { 149 | let res = await provider.get("get_verifiers_num", []); 150 | let num = res.stack.readNumber(); 151 | 152 | return num; 153 | } 154 | 155 | async getVerifiers(provider: ContractProvider): Promise { 156 | let res = await provider.get("get_verifiers", []); 157 | const item = res.stack.readCell(); 158 | const c = item.beginParse(); 159 | const d = c.loadDict(Dictionary.Keys.BigUint(256), createSliceValue()); 160 | 161 | return Array.from(d.values()).map((v) => { 162 | const admin = v.loadAddress()!; 163 | const quorom = v.loadUint(8); 164 | const pubKeyEndpoints = v.loadDict(Dictionary.Keys.BigUint(256), Dictionary.Values.Uint(32)); 165 | 166 | return { 167 | admin: admin, 168 | quorum: quorom, 169 | pub_key_endpoints: new Map( 170 | Array.from(pubKeyEndpoints).map(([k, v]) => [k, v]), 171 | ), 172 | name: v.loadRef().beginParse().loadStringTail(), 173 | marketingUrl: v.loadRef().beginParse().loadStringTail(), 174 | }; 175 | }); 176 | } 177 | 178 | async sendRemoveVerifier( 179 | provider: ContractProvider, 180 | via: Sender, 181 | params: { queryId?: number; id: bigint; value: bigint }, 182 | ) { 183 | let msgBody = beginCell(); 184 | msgBody.storeUint(OperationCodes.removeVerifier, 32); 185 | msgBody.storeUint(params.queryId || 0, 64); 186 | msgBody.storeUint(params.id, 256); 187 | await provider.internal(via, { 188 | value: params.value, 189 | sendMode: SendMode.PAY_GAS_SEPARATELY, 190 | body: msgBody.endCell(), 191 | }); 192 | } 193 | 194 | async sendUpdateVerifier( 195 | provider: ContractProvider, 196 | via: Sender, 197 | params: { 198 | queryId?: number; 199 | id: bigint; 200 | quorum: number; 201 | endpoints: Map; 202 | name: string; 203 | marketingUrl: string; 204 | value: bigint; 205 | }, 206 | ) { 207 | let msgBody = beginCell(); 208 | msgBody.storeUint(OperationCodes.updateVerifier, 32); 209 | msgBody.storeUint(params.queryId || 0, 64); 210 | msgBody.storeUint(params.id, 256); 211 | msgBody.storeUint(params.quorum, 8); 212 | 213 | let e = Dictionary.empty(Dictionary.Keys.BigUint(256), createSliceValue()); 214 | params.endpoints.forEach(function (val: number, key: bigint) { 215 | e.set(key, beginCell().storeUint(val, 32).endCell().beginParse()); 216 | }); 217 | 218 | msgBody.storeDict(e); 219 | msgBody.storeRef(beginCell().storeBuffer(Buffer.from(params.name)).endCell()); 220 | msgBody.storeRef(beginCell().storeBuffer(Buffer.from(params.marketingUrl)).endCell()); 221 | 222 | await provider.internal(via, { 223 | value: params.value, 224 | sendMode: SendMode.PAY_GAS_SEPARATELY, 225 | body: msgBody.endCell(), 226 | }); 227 | } 228 | 229 | async sendForwardMessage( 230 | provider: ContractProvider, 231 | via: Sender, 232 | params: { queryId?: number; desc: Cell; signatures: Map; value: bigint }, 233 | ) { 234 | let msgBody = beginCell(); 235 | msgBody.storeUint(OperationCodes.forwardMessage, 32); 236 | msgBody.storeUint(params.queryId || 0, 64); 237 | msgBody.storeRef(params.desc); 238 | 239 | let signatures = beginCell().endCell(); 240 | if (params.signatures.size > 0) { 241 | let signaturesBuilder = beginCell(); 242 | params.signatures.forEach(function (val, key) { 243 | signaturesBuilder.storeBuffer(val); 244 | signaturesBuilder.storeUint(key, 256); 245 | 246 | let s = beginCell(); 247 | s.storeRef(signaturesBuilder.endCell()); 248 | signaturesBuilder = s; 249 | }); 250 | signatures = signaturesBuilder.asSlice().loadRef(); 251 | } 252 | 253 | msgBody.storeRef(signatures); 254 | 255 | await provider.internal(via, { 256 | value: params.value, 257 | sendMode: SendMode.PAY_GAS_SEPARATELY, 258 | body: msgBody.endCell(), 259 | }); 260 | } 261 | } 262 | -------------------------------------------------------------------------------- /test/controller.spec.ts: -------------------------------------------------------------------------------- 1 | import { Controller } from "../src/controller"; 2 | import { CodeStorageProvider, FileUploadSpec } from "../src/ipfs-code-storage-provider"; 3 | // @ts-ignore 4 | import { of as ipfsHash } from "ipfs-only-hash"; 5 | import tweetnacl from "tweetnacl"; 6 | import { CompileResult, SourceVerifier, SourceVerifyPayload } from "../src/types"; 7 | import { beginCell, Cell, Address, Slice } from "ton"; 8 | import { TonReaderClient, VerifierConfig } from "../src/ton-reader-client"; 9 | import { sha256 } from "../src/utils"; 10 | import Prando from "prando"; 11 | import { FORWARD_MESSAGE_OP, DEPLOY_SOURCE_OP } from "../src/cell-builders"; 12 | 13 | function randomAddress(seed: string, workchain: number = 0) { 14 | const random = new Prando(seed); 15 | const hash = Buffer.alloc(32); 16 | for (let i = 0; i < hash.length; i++) { 17 | hash[i] = random.nextInt(0, 255); 18 | } 19 | return new Address(workchain, hash); 20 | } 21 | 22 | const verificationDate = Math.floor(new Date().getTime() / 1000) + 60 * 10; 23 | const emptyCellHash = new Cell().hash().toString("base64"); 24 | 25 | class StubCodeStorageProvider implements CodeStorageProvider { 26 | storage: Map = new Map(); 27 | 28 | async write(files: FileUploadSpec[], pin: boolean): Promise { 29 | return Promise.all(files.map((file) => ipfsHash(file.name))); 30 | } 31 | 32 | async writeFromContent(files: Buffer[], pin: boolean): Promise { 33 | const hashes = await Promise.all(files.map((file) => ipfsHash(file))); 34 | files.forEach((file, i) => { 35 | this.storage.set(hashes[i], file.toString("utf8")); 36 | }); 37 | 38 | return hashes; 39 | } 40 | 41 | async read(pointer: string): Promise { 42 | return this.storage.get(pointer)!; 43 | } 44 | 45 | clear() { 46 | this.storage.clear(); 47 | } 48 | } 49 | 50 | class StubSourceVerifier implements SourceVerifier { 51 | async verify(payload: SourceVerifyPayload): Promise { 52 | return { 53 | result: "similar", 54 | error: null, 55 | compilerSettings: { 56 | funcVersion: "0.4.0", 57 | commandLine: "some command line", 58 | }, 59 | hash: emptyCellHash, 60 | sources: [], 61 | }; 62 | } 63 | } 64 | 65 | const serverKeypair = tweetnacl.sign.keyPair(); 66 | const server2Keypair = tweetnacl.sign.keyPair(); 67 | const server3Keypair = tweetnacl.sign.keyPair(); 68 | 69 | class StubTonReaderClient implements TonReaderClient { 70 | async getVerifierConfig( 71 | verifierId: string, 72 | verifierRegistryAddress: string, 73 | ): Promise { 74 | return { 75 | quorum: 3, 76 | verifiers: [ 77 | Buffer.from(serverKeypair.publicKey), 78 | Buffer.from(server2Keypair.publicKey), 79 | Buffer.from(server3Keypair.publicKey), 80 | ], 81 | }; 82 | } 83 | async isProofDeployed(codeCellHash: string, verifierId: string): Promise { 84 | return false; 85 | } 86 | } 87 | 88 | const VERIFIER_ID = "some verifier"; 89 | 90 | const stubTonReaderClient = new StubTonReaderClient(); 91 | const stubSourceVerifier = new StubSourceVerifier(); 92 | const stubCodeStorageProvider = new StubCodeStorageProvider(); 93 | 94 | function makeController(keypair: tweetnacl.SignKeyPair): Controller { 95 | return new Controller( 96 | stubCodeStorageProvider, 97 | { 98 | func: stubSourceVerifier, 99 | fift: stubSourceVerifier, 100 | tact: stubSourceVerifier, 101 | tolk: stubSourceVerifier, 102 | }, 103 | { 104 | privateKey: Buffer.from(keypair.secretKey).toString("base64"), 105 | allowReverification: false, 106 | sourcesRegistryAddress: randomAddress("sourcesReg").toString(), 107 | verifierId: VERIFIER_ID, 108 | }, 109 | stubTonReaderClient, 110 | ); 111 | } 112 | 113 | describe("Controller", () => { 114 | let controller: Controller; 115 | let controller2: Controller; 116 | let controller3: Controller; 117 | 118 | beforeEach(() => { 119 | controller = makeController(serverKeypair); 120 | controller2 = makeController(server2Keypair); 121 | controller3 = makeController(server3Keypair); 122 | 123 | stubCodeStorageProvider.clear(); 124 | }); 125 | 126 | it("Adds source", async () => { 127 | const result = await controller.addSource({ 128 | compiler: "func", 129 | compilerSettings: { 130 | funcVersion: "0.4.0", 131 | commandLine: "", // TODO why is this mandatory 132 | }, 133 | knownContractAddress: "N/A", 134 | knownContractHash: "SomeHASH", // TODO this should be validated 135 | senderAddress: randomAddress("sender").toString(), // TODO should be validated to + in the original func 136 | sources: [], 137 | tmpDir: "N/A", // TODO 138 | }); 139 | 140 | expect(result.compileResult.hash).toEqual(emptyCellHash); 141 | }); 142 | 143 | describe("Sign", () => { 144 | it("Signs a source", async () => { 145 | // First server signs 146 | const { msgCell } = await controller.addSource({ 147 | compiler: "func", 148 | compilerSettings: { 149 | funcVersion: "0.2.0", 150 | commandLine: "", // TODO why is this mandatory 151 | }, 152 | knownContractAddress: "N/A", 153 | knownContractHash: "SomeHASH", // TODO this should be validated 154 | senderAddress: randomAddress("sender").toString(), // TODO should be validated to + in the original func 155 | sources: [], 156 | tmpDir: "N/A", // TODO 157 | }); 158 | 159 | // Second server adds signature 160 | const afterController2SigResult = await controller2.sign({ 161 | messageCell: msgCell!, 162 | tmpDir: "", 163 | }); 164 | const afterController2SigSlice = Cell.fromBoc( 165 | afterController2SigResult.msgCell, 166 | )[0].beginParse(); 167 | 168 | // Ensure message itself is intact 169 | const content = afterController2SigSlice.loadRef(); 170 | expect(content.hash()).toEqual(Cell.fromBoc(msgCell!)[0].asSlice().loadRef().hash()); 171 | 172 | // Ensure signature 1 remains included 173 | const server1SigSlice = afterController2SigSlice.loadRef().beginParse(); 174 | expect(server1SigSlice.skip(512).loadBuffer(32)).toEqual( 175 | Buffer.from(serverKeypair.publicKey), 176 | ); 177 | 178 | // Ensure signature 2 was added 179 | const server2SigSlice = server1SigSlice.loadRef().beginParse(); 180 | expect(server2SigSlice.skip(512).loadBuffer(32)).toEqual( 181 | Buffer.from(server2Keypair.publicKey), 182 | ); 183 | 184 | // Third server adds signature 185 | const afterController3SigResult = await controller3.sign({ 186 | messageCell: afterController2SigResult.msgCell, 187 | tmpDir: "", 188 | }); 189 | const afterController3SigSlice = Cell.fromBoc( 190 | afterController3SigResult.msgCell, 191 | )[0].beginParse(); 192 | 193 | // Ensure message itself is intact 194 | const content2 = afterController3SigSlice.loadRef(); 195 | expect(content2.hash()).toEqual(Cell.fromBoc(msgCell!)[0].asSlice().loadRef().hash()); 196 | 197 | // Ensure signature 1 remains included 198 | const server1SigSlice2 = afterController3SigSlice.loadRef().beginParse(); 199 | expect(server1SigSlice2.skip(512).loadBuffer(32)).toEqual( 200 | Buffer.from(serverKeypair.publicKey), 201 | ); 202 | 203 | // Ensure signature 2 remains included 204 | const server2SigSlice2 = server1SigSlice2.loadRef().beginParse(); 205 | expect(server2SigSlice2.skip(512).loadBuffer(32)).toEqual( 206 | Buffer.from(server2Keypair.publicKey), 207 | ); 208 | 209 | // Ensure signature 3 was added 210 | const server3SigSlice = server2SigSlice2.loadRef().beginParse(); 211 | expect(server3SigSlice.skip(512).loadBuffer(32)).toEqual( 212 | Buffer.from(server3Keypair.publicKey), 213 | ); 214 | }); 215 | 216 | describe("Invalid wrapper cell", () => { 217 | Object.entries({ 218 | "invalid op": { 219 | cell: beginCell() 220 | .storeUint(1, 32) 221 | .storeUint(0, 64) 222 | .storeRef(new Cell()) 223 | .storeRef(new Cell()) 224 | .endCell() 225 | .toBoc(), 226 | error: "Invalid operation", 227 | }, 228 | "no query id": { 229 | cell: beginCell() 230 | .storeUint(FORWARD_MESSAGE_OP, 32) 231 | .storeRef(new Cell()) 232 | .storeRef(new Cell()) 233 | .endCell() 234 | .toBoc(), 235 | error: "Invalid cell", 236 | }, 237 | "no refs": { 238 | cell: beginCell().storeUint(1, 32).storeUint(0, 64).endCell().toBoc(), 239 | error: "Invalid cell", 240 | }, 241 | "empty cell": { 242 | cell: new Cell().toBoc(), 243 | error: "Invalid cell", 244 | }, 245 | }).map(([name, config]) => { 246 | const { error, cell } = config; 247 | it(`Rejects: ${name}`, async () => { 248 | await expect(controller.sign({ messageCell: cell, tmpDir: "" })).rejects.toThrow(error); 249 | }); 250 | }); 251 | }); 252 | 253 | describe("Invalid verifier message cell", () => { 254 | Object.entries({ 255 | "different verifier id": { 256 | cell: beginCell() 257 | .storeUint(FORWARD_MESSAGE_OP, 32) 258 | .storeUint(0, 64) 259 | .storeRef( 260 | beginCell() 261 | .storeBuffer(sha256("some other verifier")) 262 | .storeUint(1, 32) 263 | .storeAddress(Address.parse(zeroAddress())) 264 | .storeAddress(randomAddress("sourcesReg")) 265 | .storeRef(new Cell()) 266 | .endCell(), 267 | ) 268 | .storeRef(new Cell()) 269 | .endCell() 270 | .toBoc(), 271 | error: "Invalid verifier id", 272 | }, 273 | expired: { 274 | cell: beginCell() 275 | .storeUint(FORWARD_MESSAGE_OP, 32) 276 | .storeUint(0, 64) 277 | .storeRef( 278 | beginCell() 279 | .storeBuffer(sha256(VERIFIER_ID)) 280 | .storeUint(Math.floor(Date.now() / 1000) - 60 * 5, 32) // Message was valid up until 5 minutes ago 281 | .storeAddress(Address.parse(zeroAddress())) 282 | .storeAddress(randomAddress("sourcesReg")) 283 | .storeRef(new Cell()) 284 | .endCell(), 285 | ) 286 | .storeRef(new Cell()) 287 | .endCell() 288 | .toBoc(), 289 | error: "Message is expired", 290 | }, 291 | "invalid sources registry": { 292 | cell: beginCell() 293 | .storeUint(FORWARD_MESSAGE_OP, 32) 294 | .storeUint(0, 64) 295 | .storeRef( 296 | beginCell() 297 | .storeBuffer(sha256(VERIFIER_ID)) 298 | .storeUint(Math.floor(Date.now() / 1000) + 60 * 5, 32) 299 | .storeAddress(Address.parse(zeroAddress())) 300 | .storeAddress(randomAddress("notSourcesReg")) 301 | .storeRef(new Cell()) 302 | .endCell(), 303 | ) 304 | .storeRef(new Cell()) 305 | .endCell() 306 | .toBoc(), 307 | error: "Invalid sources registry address", 308 | }, 309 | "missing ref": { 310 | cell: beginCell() 311 | .storeUint(FORWARD_MESSAGE_OP, 32) 312 | .storeUint(0, 64) 313 | .storeRef( 314 | beginCell() 315 | .storeBuffer(sha256(VERIFIER_ID)) 316 | .storeUint(Math.floor(Date.now() / 1000) + 60 * 5, 32) 317 | .storeAddress(Address.parse(zeroAddress())) 318 | .storeAddress(randomAddress("sourcesReg")) 319 | .endCell(), 320 | ) 321 | .storeRef(new Cell()) 322 | .endCell() 323 | .toBoc(), 324 | error: "Invalid verifier body cell", 325 | }, 326 | "missing addresses": { 327 | cell: beginCell() 328 | .storeUint(FORWARD_MESSAGE_OP, 32) 329 | .storeUint(0, 64) 330 | .storeRef( 331 | beginCell() 332 | .storeBuffer(sha256(VERIFIER_ID)) 333 | .storeUint(Math.floor(Date.now() / 1000) + 60 * 5, 32) 334 | .storeRef(new Cell()) 335 | .endCell(), 336 | ) 337 | .storeRef(new Cell()) 338 | .endCell() 339 | .toBoc(), 340 | error: "Invalid verifier body cell", 341 | }, 342 | }).map(([name, config]) => { 343 | const { error, cell } = config; 344 | it(`Rejects: ${name}`, async () => { 345 | await expect(controller.sign({ messageCell: cell, tmpDir: "" })).rejects.toThrow(error); 346 | }); 347 | }); 348 | }); 349 | 350 | describe("Invalid sources registry message", () => { 351 | const validWrappingCell = (sourceRegCell: Cell) => 352 | beginCell() 353 | .storeUint(FORWARD_MESSAGE_OP, 32) 354 | .storeUint(0, 64) 355 | .storeRef( 356 | beginCell() 357 | .storeBuffer(sha256(VERIFIER_ID)) 358 | .storeUint(Math.floor(Date.now() / 1000) + 60 * 5, 32) 359 | .storeAddress(Address.parse(zeroAddress())) 360 | .storeAddress(randomAddress("sourcesReg")) 361 | .storeRef(sourceRegCell) 362 | .endCell(), 363 | ) 364 | .storeRef(new Cell()) 365 | .endCell(); 366 | 367 | Object.entries({ 368 | "empty cell": { 369 | cell: validWrappingCell(new Cell()).toBoc(), 370 | error: "Invalid sources registry body cell", 371 | }, 372 | "missing ref": { 373 | cell: validWrappingCell( 374 | beginCell() 375 | .storeUint(1, 32) 376 | .storeUint(0, 64) 377 | .storeUint(0, 256) 378 | .storeUint(0, 256) 379 | .endCell(), 380 | ).toBoc(), 381 | error: "Invalid sources registry body cell", 382 | }, 383 | "invalid op": { 384 | cell: validWrappingCell( 385 | beginCell() 386 | .storeUint(1, 32) 387 | .storeUint(0, 64) 388 | .storeBuffer(sha256(VERIFIER_ID)) 389 | .storeUint(0, 256) 390 | .storeRef(new Cell()) 391 | .endCell(), 392 | ).toBoc(), 393 | error: "Invalid deploy source op", 394 | }, 395 | "invalid verified id": { 396 | cell: validWrappingCell( 397 | beginCell() 398 | .storeUint(DEPLOY_SOURCE_OP, 32) 399 | .storeUint(0, 64) 400 | .storeBuffer(sha256("not verifier id")) 401 | .storeUint(0, 256) 402 | .storeRef(new Cell()) 403 | .endCell(), 404 | ).toBoc(), 405 | error: "Invalid verifier id", 406 | }, 407 | }).map(([name, config]) => { 408 | const { error, cell } = config; 409 | it(`Rejects: ${name}`, async () => { 410 | await expect(controller.sign({ messageCell: cell, tmpDir: "" })).rejects.toThrow(error); 411 | }); 412 | }); 413 | }); 414 | 415 | function makeSigCell(cellToSign: Cell, kp: tweetnacl.SignKeyPair) { 416 | const sig = Buffer.from(tweetnacl.sign.detached(cellToSign.hash(), kp.secretKey)); 417 | return beginCell().storeBuffer(sig).storeBuffer(Buffer.from(kp.publicKey)).endCell(); 418 | } 419 | 420 | describe("Invalid signatures", () => { 421 | const cellToSign = beginCell() 422 | .storeBuffer(sha256(VERIFIER_ID)) 423 | .storeUint(Math.floor(Date.now() / 1000) + 60 * 5, 32) 424 | .storeAddress(Address.parse(zeroAddress())) 425 | .storeAddress(randomAddress("sourcesReg")) 426 | .storeRef( 427 | beginCell() 428 | .storeUint(DEPLOY_SOURCE_OP, 32) 429 | .storeUint(0, 64) 430 | .storeBuffer(sha256(VERIFIER_ID)) 431 | .storeUint(0, 256) 432 | .storeRef(beginCell().storeUint(1, 8).storeBuffer(Buffer.from("someLink")).endCell()) 433 | .endCell(), 434 | ) 435 | .endCell(); 436 | 437 | const validWrappingCell = (signCell: Cell) => 438 | beginCell() 439 | .storeUint(FORWARD_MESSAGE_OP, 32) 440 | .storeUint(0, 64) 441 | .storeRef(cellToSign) 442 | .storeRef(signCell) 443 | .endCell(); 444 | 445 | async function expectSignThrow(signCell: Cell, error: string) { 446 | await expect( 447 | controller.sign({ messageCell: validWrappingCell(signCell).toBoc(), tmpDir: "" }), 448 | ).rejects.toThrow(error); 449 | } 450 | 451 | describe("Invalid signature cell", () => { 452 | it("Empty", async () => { 453 | await expectSignThrow(new Cell(), "Invalid signature cell"); 454 | }); 455 | 456 | it("Non-Empty", async () => { 457 | await expectSignThrow(beginCell().storeUint(0, 1).endCell(), "Invalid signature cell"); 458 | }); 459 | 460 | it("Invalid signing public key", async () => { 461 | const kp = tweetnacl.sign.keyPair(); 462 | const kp2 = tweetnacl.sign.keyPair(); 463 | const sig = Buffer.from(tweetnacl.sign.detached(cellToSign.hash(), kp2.secretKey)); 464 | 465 | const sigCell = beginCell() 466 | .storeBuffer(sig) 467 | .storeBuffer(Buffer.from(kp.publicKey)) 468 | .endCell(); 469 | 470 | await expectSignThrow(sigCell, "Invalid signature"); 471 | }); 472 | 473 | it("Invalid signed cell hash", async () => { 474 | const kp = tweetnacl.sign.keyPair(); 475 | const sig = Buffer.from(tweetnacl.sign.detached(new Cell().hash(), kp.secretKey)); 476 | 477 | const sigCell = beginCell() 478 | .storeBuffer(sig) 479 | .storeBuffer(Buffer.from(kp.publicKey)) 480 | .endCell(); 481 | 482 | await expectSignThrow(sigCell, "Invalid signature"); 483 | }); 484 | 485 | it("Multiple signatures, one invalid", async () => { 486 | const kp = tweetnacl.sign.keyPair(); 487 | const kp2 = tweetnacl.sign.keyPair(); 488 | 489 | const mock = jest.spyOn(stubTonReaderClient, "getVerifierConfig").mockResolvedValue({ 490 | quorum: 3, 491 | verifiers: [ 492 | Buffer.from(serverKeypair.publicKey), 493 | Buffer.from(kp.publicKey), 494 | Buffer.from(kp2.publicKey), 495 | ], 496 | }); 497 | 498 | let sigCell = makeSigCell(cellToSign, kp2); 499 | sigCell = sigCell.asBuilder().storeRef(makeSigCell(new Cell(), kp)).asCell(); 500 | 501 | await expectSignThrow(sigCell, "Invalid signature"); 502 | mock.mockRestore(); 503 | }); 504 | 505 | it("Sig cell contains more than one ref", async () => { 506 | const kp = tweetnacl.sign.keyPair(); 507 | const kp2 = tweetnacl.sign.keyPair(); 508 | 509 | const mock = jest.spyOn(stubTonReaderClient, "getVerifierConfig").mockResolvedValue({ 510 | quorum: 3, 511 | verifiers: [ 512 | Buffer.from(serverKeypair.publicKey), 513 | Buffer.from(kp.publicKey), 514 | Buffer.from(kp2.publicKey), 515 | ], 516 | }); 517 | 518 | const sigBuilder = makeSigCell(cellToSign, kp2).asBuilder(); 519 | sigBuilder.storeRef(makeSigCell(cellToSign, kp)); 520 | sigBuilder.storeRef(makeSigCell(cellToSign, kp)); 521 | 522 | await expectSignThrow(sigBuilder.asCell(), "Invalid signature cell"); 523 | mock.mockRestore(); 524 | }); 525 | }); 526 | 527 | it("Already signed by own", async () => { 528 | const kp = tweetnacl.sign.keyPair(); 529 | const mock = jest.spyOn(stubTonReaderClient, "getVerifierConfig").mockResolvedValue({ 530 | quorum: 2, 531 | verifiers: [Buffer.from(serverKeypair.publicKey), Buffer.from(kp.publicKey)], 532 | }); 533 | 534 | await expectSignThrow( 535 | makeSigCell(cellToSign, serverKeypair), 536 | "Invalid signature (signed by self)", 537 | ); 538 | mock.mockRestore(); 539 | }); 540 | 541 | it("Sig does not belong to verifier id", async () => { 542 | const [kp, kp2, kp3] = [ 543 | tweetnacl.sign.keyPair(), 544 | tweetnacl.sign.keyPair(), 545 | tweetnacl.sign.keyPair(), 546 | ]; 547 | 548 | const mock = jest.spyOn(stubTonReaderClient, "getVerifierConfig").mockResolvedValue({ 549 | quorum: 2, 550 | verifiers: [ 551 | Buffer.from(kp.publicKey), 552 | Buffer.from(kp2.publicKey), 553 | Buffer.from(kp3.publicKey), 554 | ], 555 | }); 556 | 557 | const sigCell = makeSigCell(cellToSign, kp); 558 | await expectSignThrow(sigCell, "This verifier is not in the multisig config"); 559 | 560 | mock.mockRestore(); 561 | }); 562 | 563 | it("Only one in quorum", async () => { 564 | const kp = tweetnacl.sign.keyPair(); 565 | const mock = jest.spyOn(stubTonReaderClient, "getVerifierConfig").mockResolvedValue({ 566 | quorum: 1, 567 | verifiers: [Buffer.from(serverKeypair.publicKey)], 568 | }); 569 | 570 | const sigCell = makeSigCell(cellToSign, kp); 571 | await expectSignThrow(sigCell, "Mulisig quorum must be greater than 1"); 572 | 573 | mock.mockRestore(); 574 | }); 575 | 576 | it("More signatures than need", async () => { 577 | const kp = tweetnacl.sign.keyPair(); 578 | const kp2 = tweetnacl.sign.keyPair(); 579 | const mock = jest.spyOn(stubTonReaderClient, "getVerifierConfig").mockResolvedValue({ 580 | quorum: 2, 581 | verifiers: [ 582 | Buffer.from(serverKeypair.publicKey), 583 | Buffer.from(kp.publicKey), 584 | Buffer.from(kp2.publicKey), 585 | ], 586 | }); 587 | 588 | let sigCell = makeSigCell(cellToSign, kp); 589 | const sigCell2 = makeSigCell(cellToSign, kp2); 590 | sigCell = sigCell.asBuilder().storeRef(sigCell2).asCell(); 591 | await expectSignThrow(sigCell, "Too many signatures"); 592 | 593 | mock.mockRestore(); 594 | }); 595 | 596 | it("Signature appears more than once", async () => { 597 | const kp = tweetnacl.sign.keyPair(); 598 | const kp2 = tweetnacl.sign.keyPair(); 599 | const mock = jest.spyOn(stubTonReaderClient, "getVerifierConfig").mockResolvedValue({ 600 | quorum: 3, 601 | verifiers: [ 602 | Buffer.from(serverKeypair.publicKey), 603 | Buffer.from(kp.publicKey), 604 | Buffer.from(kp2.publicKey), 605 | ], 606 | }); 607 | 608 | let sigCell = makeSigCell(cellToSign, kp); 609 | const sigCell2 = makeSigCell(cellToSign, kp); 610 | sigCell = sigCell.asBuilder().storeRef(sigCell2).asCell(); 611 | await expectSignThrow(sigCell, "Duplicate signature"); 612 | 613 | mock.mockRestore(); 614 | }); 615 | }); 616 | 617 | describe("Invalid compilation results", () => { 618 | const cellToSign = beginCell() 619 | .storeBuffer(sha256(VERIFIER_ID)) 620 | .storeUint(verificationDate, 32) 621 | .storeAddress(Address.parse(zeroAddress())) 622 | .storeAddress(randomAddress("sourcesReg")) 623 | .storeRef( 624 | beginCell() 625 | .storeUint(DEPLOY_SOURCE_OP, 32) 626 | .storeUint(0, 64) 627 | .storeBuffer(sha256(VERIFIER_ID)) 628 | .storeBuffer(new Cell().hash()) // code cell hash 629 | .storeRef(beginCell().storeUint(1, 8).storeBuffer(Buffer.from("someLink")).endCell()) 630 | .endCell(), 631 | ) 632 | .endCell(); 633 | 634 | const validWrappingCell = beginCell() 635 | .storeUint(FORWARD_MESSAGE_OP, 32) 636 | .storeUint(0, 64) 637 | .storeRef(cellToSign) 638 | .storeRef(makeSigCell(cellToSign, server2Keypair)) 639 | .endCell(); 640 | 641 | it("Different code hash", async () => { 642 | const mock = jest.spyOn(stubSourceVerifier, "verify").mockResolvedValue({ 643 | result: "not_similar", 644 | error: null, 645 | compilerSettings: { 646 | funcVersion: "0.3.0", 647 | commandLine: "some command line", 648 | }, 649 | hash: emptyCellHash, 650 | sources: [], 651 | }); 652 | 653 | stubCodeStorageProvider.storage.set( 654 | "someLink", 655 | JSON.stringify({ hash: emptyCellHash, sources: [], compiler: "func" }), 656 | ); 657 | 658 | await expect( 659 | controller.sign({ messageCell: validWrappingCell.toBoc(), tmpDir: "" }), 660 | ).rejects.toThrow("Invalid compilation result"); 661 | 662 | mock.mockRestore(); 663 | }); 664 | 665 | it("Does not compile", async () => { 666 | const mock = jest.spyOn(stubSourceVerifier, "verify").mockResolvedValue({ 667 | result: "compile_error", 668 | error: "some error", 669 | compilerSettings: { 670 | funcVersion: "0.3.0", 671 | commandLine: "some command line", 672 | }, 673 | hash: emptyCellHash, 674 | sources: [], 675 | }); 676 | 677 | stubCodeStorageProvider.storage.set( 678 | "someLink", 679 | JSON.stringify({ hash: emptyCellHash, sources: [], compiler: "func" }), 680 | ); 681 | 682 | await expect( 683 | controller.sign({ messageCell: validWrappingCell.toBoc(), tmpDir: "" }), 684 | ).rejects.toThrow("Invalid compilation result"); 685 | 686 | mock.mockRestore(); 687 | }); 688 | }); 689 | }); 690 | }); 691 | 692 | function zeroAddress(): string { 693 | return "EQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAM9c"; 694 | } 695 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "esModuleInterop": true, 5 | "target": "ES2022", 6 | "noImplicitAny": true, 7 | "moduleResolution": "node", 8 | "sourceMap": true, 9 | "outDir": "dist", 10 | "baseUrl": ".", 11 | "resolveJsonModule": true, 12 | "strictNullChecks": true, 13 | "paths": { 14 | "*": ["node_modules/*"] 15 | }, 16 | "skipLibCheck": true 17 | }, 18 | "include": ["src/**/*"] 19 | } 20 | --------------------------------------------------------------------------------