├── .envrc ├── .eslintrc.js ├── .github ├── dependabot.yml └── workflows │ ├── programs-e2e.yml │ ├── programs-unit.yml │ └── release.yml ├── .gitignore ├── .husky └── pre-commit ├── .mocharc.js ├── .prettierignore ├── .vscode ├── extensions.json └── settings.json ├── .yarn ├── plugins │ └── @yarnpkg │ │ ├── plugin-interactive-tools.cjs │ │ └── plugin-typescript.cjs ├── releases │ └── yarn-3.2.0.cjs └── sdks │ ├── eslint │ ├── bin │ │ └── eslint.js │ ├── lib │ │ └── api.js │ └── package.json │ ├── integrations.yml │ ├── prettier │ ├── index.js │ └── package.json │ └── typescript │ ├── bin │ ├── tsc │ └── tsserver │ ├── lib │ ├── tsc.js │ ├── tsserver.js │ ├── tsserverlibrary.js │ └── typescript.js │ └── package.json ├── .yarnrc.yml ├── Anchor.toml ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── flake.lock ├── flake.nix ├── images └── banner.png ├── package.json ├── programs └── snapshots │ ├── Cargo.toml │ ├── README.md │ ├── README.tpl │ ├── Xargo.toml │ ├── math │ ├── Cargo.toml │ ├── README.md │ └── src │ │ └── lib.rs │ ├── proptest-regressions │ └── instructions │ │ └── sync.txt │ └── src │ ├── instructions │ ├── create_escrow_history.rs │ ├── create_locker_history.rs │ ├── mod.rs │ └── sync.rs │ ├── lib.rs │ └── state.rs ├── scripts ├── download-programs.sh ├── generate-idl-types.sh └── parse-idls.sh ├── src ├── constants.ts ├── index.ts ├── math.ts ├── programs │ ├── index.ts │ └── snapshots.ts ├── sdk.ts └── wrappers │ ├── index.ts │ └── snapshots │ ├── index.ts │ ├── pda.ts │ └── snapshots.ts ├── tests ├── snapshots.spec.ts ├── test-key.json └── workspace │ ├── index.ts │ └── workspace.ts ├── tsconfig.build.json ├── tsconfig.esm.json ├── tsconfig.json └── yarn.lock /.envrc: -------------------------------------------------------------------------------- 1 | watch_file flake.nix 2 | watch_file flake.lock 3 | mkdir -p .direnv 4 | eval "$(nix print-dev-env --profile "$(direnv_layout_dir)/flake-profile")" 5 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | require("@rushstack/eslint-patch/modern-module-resolution"); 2 | 3 | module.exports = { 4 | root: true, 5 | ignorePatterns: ["dist/", "*.js", "target/"], 6 | parserOptions: { 7 | tsconfigRootDir: __dirname, 8 | project: "tsconfig.json", 9 | }, 10 | extends: ["@saberhq"], 11 | }; 12 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | - package-ecosystem: "npm" 8 | directory: "/" 9 | schedule: 10 | interval: "daily" 11 | - package-ecosystem: "cargo" 12 | directory: "/" 13 | schedule: 14 | interval: "daily" 15 | -------------------------------------------------------------------------------- /.github/workflows/programs-e2e.yml: -------------------------------------------------------------------------------- 1 | name: E2E 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | SOLANA_VERSION: "1.9.12" 12 | 13 | jobs: 14 | sdk: 15 | runs-on: ubuntu-latest 16 | name: Build the SDK 17 | defaults: 18 | run: 19 | shell: nix shell .#env-anchor-idls --command bash {0} 20 | 21 | steps: 22 | - uses: actions/checkout@v3 23 | 24 | # Install Cachix 25 | - uses: cachix/install-nix-action@v17 26 | - name: Setup Cachix 27 | uses: cachix/cachix-action@v10 28 | with: 29 | name: saber 30 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 31 | 32 | - name: Parse IDLs 33 | run: ./scripts/parse-idls.sh 34 | 35 | - name: Setup Node 36 | uses: actions/setup-node@v3 37 | 38 | - name: Get yarn cache directory path 39 | id: yarn-cache-dir-path 40 | run: echo "::set-output name=dir::$(yarn config get cacheFolder)" 41 | - name: Yarn Cache 42 | uses: actions/cache@v3.0.2 43 | with: 44 | path: ${{ steps.yarn-cache-dir-path.outputs.dir }} 45 | key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }} 46 | restore-keys: | 47 | ${{ runner.os }}-modules- 48 | 49 | - name: Install Yarn dependencies 50 | run: yarn install 51 | - run: ./scripts/generate-idl-types.sh 52 | - run: yarn build 53 | - run: yarn typecheck 54 | - run: yarn lint 55 | - run: yarn doctor 56 | 57 | integration-tests: 58 | runs-on: ubuntu-latest 59 | defaults: 60 | run: 61 | shell: nix shell .#env-anchor-build --command bash {0} 62 | 63 | steps: 64 | - uses: actions/checkout@v3 65 | 66 | # Install Cachix 67 | - uses: cachix/install-nix-action@v17 68 | - name: Setup Cachix 69 | uses: cachix/cachix-action@v10 70 | with: 71 | name: saber 72 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 73 | 74 | - name: Install Linux dependencies 75 | run: | 76 | sudo apt-get update 77 | sudo apt-get install -y pkg-config build-essential libudev-dev 78 | - name: Install Rustup 79 | uses: actions-rs/toolchain@v1 80 | with: 81 | toolchain: stable 82 | profile: minimal 83 | - uses: Swatinem/rust-cache@v1 84 | 85 | # Install Solana 86 | - name: Cache Solana binaries 87 | id: solana-cache 88 | uses: actions/cache@v3.0.2 89 | with: 90 | path: | 91 | ~/.cache/solana 92 | ~/.local/share/solana/install 93 | key: ${{ runner.os }}-${{ env.SOLANA_VERSION }} 94 | - name: Install Solana 95 | if: steps.solana-cache.outputs.cache-hit != 'true' 96 | run: | 97 | solana-install init ${{ env.SOLANA_VERSION }} 98 | - name: Setup Solana Path 99 | run: | 100 | echo "$HOME/.local/share/solana/install/active_release/bin" >> $GITHUB_PATH 101 | export PATH="/home/runner/.local/share/solana/install/active_release/bin:$PATH" 102 | cargo-build-bpf --version 103 | 104 | # Run build 105 | - name: Build program 106 | run: anchor build 107 | - name: Download programs 108 | run: ./scripts/download-programs.sh 109 | 110 | - name: Get yarn cache directory path 111 | id: yarn-cache-dir-path 112 | run: echo "::set-output name=dir::$(yarn config get cacheFolder)" 113 | - name: Yarn Cache 114 | uses: actions/cache@v3.0.2 115 | with: 116 | path: ${{ steps.yarn-cache-dir-path.outputs.dir }} 117 | key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }} 118 | restore-keys: | 119 | ${{ runner.os }}-modules- 120 | 121 | - run: yarn install 122 | - name: Generate IDL types 123 | run: yarn idl:generate:nolint 124 | - run: yarn build 125 | - name: Run e2e tests 126 | run: yarn test:e2e 127 | -------------------------------------------------------------------------------- /.github/workflows/programs-unit.yml: -------------------------------------------------------------------------------- 1 | name: Unit 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | paths: 7 | - .github/workflows/programs-unit.yml 8 | - programs/** 9 | - Cargo.toml 10 | - Cargo.lock 11 | pull_request: 12 | branches: [master] 13 | paths: 14 | - .github/workflows/programs-unit.yml 15 | - programs/** 16 | - Cargo.toml 17 | - Cargo.lock 18 | 19 | defaults: 20 | run: 21 | shell: nix shell .#rust --command bash {0} 22 | 23 | env: 24 | CARGO_TERM_COLOR: always 25 | 26 | jobs: 27 | lint: 28 | runs-on: ubuntu-latest 29 | steps: 30 | - uses: actions/checkout@v3 31 | - uses: cachix/install-nix-action@v17 32 | - name: Setup Cachix 33 | uses: cachix/cachix-action@v10 34 | with: 35 | name: saber 36 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 37 | - uses: Swatinem/rust-cache@v1 38 | - name: Run fmt 39 | run: cargo fmt -- --check 40 | - name: Run clippy 41 | run: cargo clippy --all-targets -- --deny=warnings 42 | - name: Check if publish works 43 | run: cargo publish --no-verify --dry-run 44 | 45 | unit-tests: 46 | runs-on: ubuntu-latest 47 | steps: 48 | - uses: actions/checkout@v3 49 | - uses: cachix/install-nix-action@v17 50 | - name: Setup Cachix 51 | uses: cachix/cachix-action@v10 52 | with: 53 | name: saber 54 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 55 | - uses: Swatinem/rust-cache@v1 56 | - name: Run unit tests 57 | run: cargo test --lib 58 | 59 | doc: 60 | runs-on: ubuntu-latest 61 | steps: 62 | - uses: actions/checkout@v3 63 | - uses: cachix/install-nix-action@v17 64 | - name: Setup Cachix 65 | uses: cachix/cachix-action@v10 66 | with: 67 | name: saber 68 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 69 | - uses: Swatinem/rust-cache@v1 70 | - name: Generate docs 71 | run: cargo doc 72 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: {} 5 | push: 6 | tags: 7 | - "v*.*.*" 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | NPM_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} 12 | 13 | jobs: 14 | release-sdk: 15 | runs-on: ubuntu-latest 16 | defaults: 17 | run: 18 | shell: nix shell .#env-anchor-idls --command bash {0} 19 | 20 | name: Release SDK on NPM 21 | steps: 22 | - uses: actions/checkout@v3 23 | 24 | - uses: cachix/install-nix-action@v17 25 | - name: Setup Cachix 26 | uses: cachix/cachix-action@v10 27 | with: 28 | name: saber 29 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 30 | 31 | - name: Setup Node 32 | uses: actions/setup-node@v3 33 | - name: Get yarn cache directory path 34 | id: yarn-cache-dir-path 35 | run: echo "::set-output name=dir::$(yarn config get cacheFolder)" 36 | - name: Yarn Cache 37 | uses: actions/cache@v3.0.2 38 | with: 39 | path: ${{ steps.yarn-cache-dir-path.outputs.dir }} 40 | key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }} 41 | restore-keys: | 42 | ${{ runner.os }}-modules- 43 | 44 | - name: Install Yarn dependencies 45 | run: yarn install 46 | - name: Parse IDLs 47 | run: yarn idl:generate 48 | - run: yarn build 49 | - run: | 50 | echo 'npmAuthToken: "${NPM_AUTH_TOKEN}"' >> .yarnrc.yml 51 | - name: Publish 52 | run: yarn npm publish 53 | 54 | release-crate: 55 | runs-on: ubuntu-latest 56 | name: Release crate on crates.io 57 | defaults: 58 | run: 59 | shell: nix shell .#env-release-crates --command bash {0} 60 | 61 | steps: 62 | - uses: actions/checkout@v3 63 | 64 | - uses: cachix/install-nix-action@v17 65 | - name: Setup Cachix 66 | uses: cachix/cachix-action@v10 67 | with: 68 | name: saber 69 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 70 | 71 | - uses: Swatinem/rust-cache@v1 72 | - name: Publish crates 73 | run: cargo workspaces publish --from-git --yes --skip-published --token ${{ secrets.CARGO_PUBLISH_TOKEN }} 74 | 75 | release-binaries: 76 | runs-on: ubuntu-latest 77 | defaults: 78 | run: 79 | shell: nix shell .#env-anchor-build --command bash {0} 80 | 81 | name: Release verifiable binaries 82 | steps: 83 | - uses: actions/checkout@v3 84 | - uses: cachix/install-nix-action@v17 85 | - name: Setup Cachix 86 | uses: cachix/cachix-action@v10 87 | with: 88 | name: saber 89 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 90 | 91 | - name: Build programs 92 | run: anchor build --verifiable 93 | - name: Release 94 | uses: softprops/action-gh-release@v1 95 | with: 96 | files: | 97 | target/deploy/* 98 | target/idl/* 99 | target/verifiable/* 100 | 101 | site: 102 | runs-on: ubuntu-latest 103 | defaults: 104 | run: 105 | shell: nix shell .#env-anchor-idls --command bash {0} 106 | 107 | steps: 108 | - name: Checkout 109 | uses: actions/checkout@v3 110 | 111 | - uses: cachix/install-nix-action@v17 112 | - name: Setup Cachix 113 | uses: cachix/cachix-action@v10 114 | with: 115 | name: saber 116 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 117 | 118 | - name: Get yarn cache directory path 119 | id: yarn-cache-dir-path 120 | run: echo "::set-output name=dir::$(yarn config get cacheFolder)" 121 | - name: Yarn Cache 122 | uses: actions/cache@v3.0.2 123 | with: 124 | path: ${{ steps.yarn-cache-dir-path.outputs.dir }} 125 | key: ${{ runner.os }}-modules-${{ hashFiles('**/yarn.lock') }} 126 | restore-keys: | 127 | ${{ runner.os }}-modules- 128 | 129 | - name: Install Yarn dependencies 130 | run: yarn install 131 | - name: Parse IDLs 132 | run: yarn idl:generate 133 | - run: yarn docs:generate 134 | - run: cp -R images/ site/ 135 | 136 | - name: Deploy 🚀 137 | uses: JamesIves/github-pages-deploy-action@v4.3.3 138 | with: 139 | branch: gh-pages 140 | folder: site 141 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore Mac OS noise 2 | .DS_Store 3 | 4 | # Ignore the build directory for Rust/Anchor 5 | target 6 | 7 | # Ignore backup files creates by cargo fmt. 8 | **/*.rs.bk 9 | 10 | # Ignore logs 11 | .anchor 12 | yarn-error.log 13 | lerna-debug.log 14 | 15 | # Ignore node modules 16 | node_modules 17 | .eslintcache 18 | 19 | # Ignore submodule dependencies 20 | deps 21 | 22 | # VM 23 | .vagrant/ 24 | test-ledger/ 25 | 26 | # Generated IDL types 27 | artifacts/ 28 | dist/ 29 | src/idls/ 30 | 31 | site/ 32 | 33 | .yarn/* 34 | !.yarn/patches 35 | !.yarn/releases 36 | !.yarn/plugins 37 | !.yarn/sdks 38 | !.yarn/versions 39 | .pnp.* 40 | 41 | deployments/ 42 | Captain.toml 43 | .goki/ 44 | 45 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . "$(dirname "$0")/_/husky.sh" 3 | 4 | yarn lint-staged 5 | -------------------------------------------------------------------------------- /.mocharc.js: -------------------------------------------------------------------------------- 1 | require("./.pnp.cjs").setup(); 2 | 3 | module.exports = { 4 | timeout: 30_000, 5 | require: [require.resolve("ts-node/register")], 6 | }; 7 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | .yarn/ 2 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "arcanis.vscode-zipfs", 4 | "dbaeumer.vscode-eslint", 5 | "esbenp.prettier-vscode" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "search.exclude": { 3 | "**/.yarn": true, 4 | "**/.pnp.*": true 5 | }, 6 | "eslint.nodePath": ".yarn/sdks", 7 | "prettier.prettierPath": ".yarn/sdks/prettier/index.js", 8 | "typescript.tsdk": ".yarn/sdks/typescript/lib", 9 | "typescript.enablePromptUseWorkspaceTsdk": true 10 | } 11 | -------------------------------------------------------------------------------- /.yarn/plugins/@yarnpkg/plugin-typescript.cjs: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | //prettier-ignore 3 | module.exports = { 4 | name: "@yarnpkg/plugin-typescript", 5 | factory: function (require) { 6 | var plugin=(()=>{var Ft=Object.create,H=Object.defineProperty,Bt=Object.defineProperties,Kt=Object.getOwnPropertyDescriptor,zt=Object.getOwnPropertyDescriptors,Gt=Object.getOwnPropertyNames,Q=Object.getOwnPropertySymbols,$t=Object.getPrototypeOf,ne=Object.prototype.hasOwnProperty,De=Object.prototype.propertyIsEnumerable;var Re=(e,t,r)=>t in e?H(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,u=(e,t)=>{for(var r in t||(t={}))ne.call(t,r)&&Re(e,r,t[r]);if(Q)for(var r of Q(t))De.call(t,r)&&Re(e,r,t[r]);return e},g=(e,t)=>Bt(e,zt(t)),Lt=e=>H(e,"__esModule",{value:!0});var R=(e,t)=>{var r={};for(var s in e)ne.call(e,s)&&t.indexOf(s)<0&&(r[s]=e[s]);if(e!=null&&Q)for(var s of Q(e))t.indexOf(s)<0&&De.call(e,s)&&(r[s]=e[s]);return r};var I=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports),Vt=(e,t)=>{for(var r in t)H(e,r,{get:t[r],enumerable:!0})},Qt=(e,t,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let s of Gt(t))!ne.call(e,s)&&s!=="default"&&H(e,s,{get:()=>t[s],enumerable:!(r=Kt(t,s))||r.enumerable});return e},C=e=>Qt(Lt(H(e!=null?Ft($t(e)):{},"default",e&&e.__esModule&&"default"in e?{get:()=>e.default,enumerable:!0}:{value:e,enumerable:!0})),e);var xe=I(J=>{"use strict";Object.defineProperty(J,"__esModule",{value:!0});function _(e){let t=[...e.caches],r=t.shift();return r===void 0?ve():{get(s,n,a={miss:()=>Promise.resolve()}){return r.get(s,n,a).catch(()=>_({caches:t}).get(s,n,a))},set(s,n){return r.set(s,n).catch(()=>_({caches:t}).set(s,n))},delete(s){return r.delete(s).catch(()=>_({caches:t}).delete(s))},clear(){return r.clear().catch(()=>_({caches:t}).clear())}}}function ve(){return{get(e,t,r={miss:()=>Promise.resolve()}){return t().then(n=>Promise.all([n,r.miss(n)])).then(([n])=>n)},set(e,t){return Promise.resolve(t)},delete(e){return Promise.resolve()},clear(){return Promise.resolve()}}}J.createFallbackableCache=_;J.createNullCache=ve});var Ee=I(($s,qe)=>{qe.exports=xe()});var Te=I(ae=>{"use strict";Object.defineProperty(ae,"__esModule",{value:!0});function Jt(e={serializable:!0}){let t={};return{get(r,s,n={miss:()=>Promise.resolve()}){let a=JSON.stringify(r);if(a in t)return Promise.resolve(e.serializable?JSON.parse(t[a]):t[a]);let o=s(),d=n&&n.miss||(()=>Promise.resolve());return o.then(y=>d(y)).then(()=>o)},set(r,s){return t[JSON.stringify(r)]=e.serializable?JSON.stringify(s):s,Promise.resolve(s)},delete(r){return delete t[JSON.stringify(r)],Promise.resolve()},clear(){return t={},Promise.resolve()}}}ae.createInMemoryCache=Jt});var we=I((Vs,Me)=>{Me.exports=Te()});var Ce=I(M=>{"use strict";Object.defineProperty(M,"__esModule",{value:!0});function Xt(e,t,r){let s={"x-algolia-api-key":r,"x-algolia-application-id":t};return{headers(){return e===oe.WithinHeaders?s:{}},queryParameters(){return e===oe.WithinQueryParameters?s:{}}}}function Yt(e){let t=0,r=()=>(t++,new Promise(s=>{setTimeout(()=>{s(e(r))},Math.min(100*t,1e3))}));return e(r)}function ke(e,t=(r,s)=>Promise.resolve()){return Object.assign(e,{wait(r){return ke(e.then(s=>Promise.all([t(s,r),s])).then(s=>s[1]))}})}function Zt(e){let t=e.length-1;for(t;t>0;t--){let r=Math.floor(Math.random()*(t+1)),s=e[t];e[t]=e[r],e[r]=s}return e}function er(e,t){return Object.keys(t!==void 0?t:{}).forEach(r=>{e[r]=t[r](e)}),e}function tr(e,...t){let r=0;return e.replace(/%s/g,()=>encodeURIComponent(t[r++]))}var rr="4.2.0",sr=e=>()=>e.transporter.requester.destroy(),oe={WithinQueryParameters:0,WithinHeaders:1};M.AuthMode=oe;M.addMethods=er;M.createAuth=Xt;M.createRetryablePromise=Yt;M.createWaitablePromise=ke;M.destroy=sr;M.encode=tr;M.shuffle=Zt;M.version=rr});var F=I((Js,Ue)=>{Ue.exports=Ce()});var Ne=I(ie=>{"use strict";Object.defineProperty(ie,"__esModule",{value:!0});var nr={Delete:"DELETE",Get:"GET",Post:"POST",Put:"PUT"};ie.MethodEnum=nr});var B=I((Ys,We)=>{We.exports=Ne()});var Ze=I(A=>{"use strict";Object.defineProperty(A,"__esModule",{value:!0});var He=B();function ce(e,t){let r=e||{},s=r.data||{};return Object.keys(r).forEach(n=>{["timeout","headers","queryParameters","data","cacheable"].indexOf(n)===-1&&(s[n]=r[n])}),{data:Object.entries(s).length>0?s:void 0,timeout:r.timeout||t,headers:r.headers||{},queryParameters:r.queryParameters||{},cacheable:r.cacheable}}var X={Read:1,Write:2,Any:3},U={Up:1,Down:2,Timeouted:3},_e=2*60*1e3;function ue(e,t=U.Up){return g(u({},e),{status:t,lastUpdate:Date.now()})}function Fe(e){return e.status===U.Up||Date.now()-e.lastUpdate>_e}function Be(e){return e.status===U.Timeouted&&Date.now()-e.lastUpdate<=_e}function le(e){return{protocol:e.protocol||"https",url:e.url,accept:e.accept||X.Any}}function ar(e,t){return Promise.all(t.map(r=>e.get(r,()=>Promise.resolve(ue(r))))).then(r=>{let s=r.filter(d=>Fe(d)),n=r.filter(d=>Be(d)),a=[...s,...n],o=a.length>0?a.map(d=>le(d)):t;return{getTimeout(d,y){return(n.length===0&&d===0?1:n.length+3+d)*y},statelessHosts:o}})}var or=({isTimedOut:e,status:t})=>!e&&~~t==0,ir=e=>{let t=e.status;return e.isTimedOut||or(e)||~~(t/100)!=2&&~~(t/100)!=4},cr=({status:e})=>~~(e/100)==2,ur=(e,t)=>ir(e)?t.onRetry(e):cr(e)?t.onSucess(e):t.onFail(e);function Qe(e,t,r,s){let n=[],a=$e(r,s),o=Le(e,s),d=r.method,y=r.method!==He.MethodEnum.Get?{}:u(u({},r.data),s.data),b=u(u(u({"x-algolia-agent":e.userAgent.value},e.queryParameters),y),s.queryParameters),f=0,p=(h,S)=>{let O=h.pop();if(O===void 0)throw Ve(de(n));let P={data:a,headers:o,method:d,url:Ge(O,r.path,b),connectTimeout:S(f,e.timeouts.connect),responseTimeout:S(f,s.timeout)},x=j=>{let T={request:P,response:j,host:O,triesLeft:h.length};return n.push(T),T},v={onSucess:j=>Ke(j),onRetry(j){let T=x(j);return j.isTimedOut&&f++,Promise.all([e.logger.info("Retryable failure",pe(T)),e.hostsCache.set(O,ue(O,j.isTimedOut?U.Timeouted:U.Down))]).then(()=>p(h,S))},onFail(j){throw x(j),ze(j,de(n))}};return e.requester.send(P).then(j=>ur(j,v))};return ar(e.hostsCache,t).then(h=>p([...h.statelessHosts].reverse(),h.getTimeout))}function lr(e){let{hostsCache:t,logger:r,requester:s,requestsCache:n,responsesCache:a,timeouts:o,userAgent:d,hosts:y,queryParameters:b,headers:f}=e,p={hostsCache:t,logger:r,requester:s,requestsCache:n,responsesCache:a,timeouts:o,userAgent:d,headers:f,queryParameters:b,hosts:y.map(h=>le(h)),read(h,S){let O=ce(S,p.timeouts.read),P=()=>Qe(p,p.hosts.filter(j=>(j.accept&X.Read)!=0),h,O);if((O.cacheable!==void 0?O.cacheable:h.cacheable)!==!0)return P();let v={request:h,mappedRequestOptions:O,transporter:{queryParameters:p.queryParameters,headers:p.headers}};return p.responsesCache.get(v,()=>p.requestsCache.get(v,()=>p.requestsCache.set(v,P()).then(j=>Promise.all([p.requestsCache.delete(v),j]),j=>Promise.all([p.requestsCache.delete(v),Promise.reject(j)])).then(([j,T])=>T)),{miss:j=>p.responsesCache.set(v,j)})},write(h,S){return Qe(p,p.hosts.filter(O=>(O.accept&X.Write)!=0),h,ce(S,p.timeouts.write))}};return p}function dr(e){let t={value:`Algolia for JavaScript (${e})`,add(r){let s=`; ${r.segment}${r.version!==void 0?` (${r.version})`:""}`;return t.value.indexOf(s)===-1&&(t.value=`${t.value}${s}`),t}};return t}function Ke(e){try{return JSON.parse(e.content)}catch(t){throw Je(t.message,e)}}function ze({content:e,status:t},r){let s=e;try{s=JSON.parse(e).message}catch(n){}return Xe(s,t,r)}function pr(e,...t){let r=0;return e.replace(/%s/g,()=>encodeURIComponent(t[r++]))}function Ge(e,t,r){let s=Ye(r),n=`${e.protocol}://${e.url}/${t.charAt(0)==="/"?t.substr(1):t}`;return s.length&&(n+=`?${s}`),n}function Ye(e){let t=r=>Object.prototype.toString.call(r)==="[object Object]"||Object.prototype.toString.call(r)==="[object Array]";return Object.keys(e).map(r=>pr("%s=%s",r,t(e[r])?JSON.stringify(e[r]):e[r])).join("&")}function $e(e,t){if(e.method===He.MethodEnum.Get||e.data===void 0&&t.data===void 0)return;let r=Array.isArray(e.data)?e.data:u(u({},e.data),t.data);return JSON.stringify(r)}function Le(e,t){let r=u(u({},e.headers),t.headers),s={};return Object.keys(r).forEach(n=>{let a=r[n];s[n.toLowerCase()]=a}),s}function de(e){return e.map(t=>pe(t))}function pe(e){let t=e.request.headers["x-algolia-api-key"]?{"x-algolia-api-key":"*****"}:{};return g(u({},e),{request:g(u({},e.request),{headers:u(u({},e.request.headers),t)})})}function Xe(e,t,r){return{name:"ApiError",message:e,status:t,transporterStackTrace:r}}function Je(e,t){return{name:"DeserializationError",message:e,response:t}}function Ve(e){return{name:"RetryError",message:"Unreachable hosts - your application id may be incorrect. If the error persists, contact support@algolia.com.",transporterStackTrace:e}}A.CallEnum=X;A.HostStatusEnum=U;A.createApiError=Xe;A.createDeserializationError=Je;A.createMappedRequestOptions=ce;A.createRetryError=Ve;A.createStatefulHost=ue;A.createStatelessHost=le;A.createTransporter=lr;A.createUserAgent=dr;A.deserializeFailure=ze;A.deserializeSuccess=Ke;A.isStatefulHostTimeouted=Be;A.isStatefulHostUp=Fe;A.serializeData=$e;A.serializeHeaders=Le;A.serializeQueryParameters=Ye;A.serializeUrl=Ge;A.stackFrameWithoutCredentials=pe;A.stackTraceWithoutCredentials=de});var K=I((en,et)=>{et.exports=Ze()});var tt=I(w=>{"use strict";Object.defineProperty(w,"__esModule",{value:!0});var N=F(),mr=K(),z=B(),hr=e=>{let t=e.region||"us",r=N.createAuth(N.AuthMode.WithinHeaders,e.appId,e.apiKey),s=mr.createTransporter(g(u({hosts:[{url:`analytics.${t}.algolia.com`}]},e),{headers:u(g(u({},r.headers()),{"content-type":"application/json"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)})),n=e.appId;return N.addMethods({appId:n,transporter:s},e.methods)},yr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Post,path:"2/abtests",data:t},r),gr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Delete,path:N.encode("2/abtests/%s",t)},r),fr=e=>(t,r)=>e.transporter.read({method:z.MethodEnum.Get,path:N.encode("2/abtests/%s",t)},r),br=e=>t=>e.transporter.read({method:z.MethodEnum.Get,path:"2/abtests"},t),Pr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Post,path:N.encode("2/abtests/%s/stop",t)},r);w.addABTest=yr;w.createAnalyticsClient=hr;w.deleteABTest=gr;w.getABTest=fr;w.getABTests=br;w.stopABTest=Pr});var st=I((rn,rt)=>{rt.exports=tt()});var at=I(G=>{"use strict";Object.defineProperty(G,"__esModule",{value:!0});var me=F(),jr=K(),nt=B(),Or=e=>{let t=e.region||"us",r=me.createAuth(me.AuthMode.WithinHeaders,e.appId,e.apiKey),s=jr.createTransporter(g(u({hosts:[{url:`recommendation.${t}.algolia.com`}]},e),{headers:u(g(u({},r.headers()),{"content-type":"application/json"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)}));return me.addMethods({appId:e.appId,transporter:s},e.methods)},Ir=e=>t=>e.transporter.read({method:nt.MethodEnum.Get,path:"1/strategies/personalization"},t),Ar=e=>(t,r)=>e.transporter.write({method:nt.MethodEnum.Post,path:"1/strategies/personalization",data:t},r);G.createRecommendationClient=Or;G.getPersonalizationStrategy=Ir;G.setPersonalizationStrategy=Ar});var it=I((nn,ot)=>{ot.exports=at()});var jt=I(i=>{"use strict";Object.defineProperty(i,"__esModule",{value:!0});var l=F(),q=K(),m=B(),Sr=require("crypto");function Y(e){let t=r=>e.request(r).then(s=>{if(e.batch!==void 0&&e.batch(s.hits),!e.shouldStop(s))return s.cursor?t({cursor:s.cursor}):t({page:(r.page||0)+1})});return t({})}var Dr=e=>{let t=e.appId,r=l.createAuth(e.authMode!==void 0?e.authMode:l.AuthMode.WithinHeaders,t,e.apiKey),s=q.createTransporter(g(u({hosts:[{url:`${t}-dsn.algolia.net`,accept:q.CallEnum.Read},{url:`${t}.algolia.net`,accept:q.CallEnum.Write}].concat(l.shuffle([{url:`${t}-1.algolianet.com`},{url:`${t}-2.algolianet.com`},{url:`${t}-3.algolianet.com`}]))},e),{headers:u(g(u({},r.headers()),{"content-type":"application/x-www-form-urlencoded"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)})),n={transporter:s,appId:t,addAlgoliaAgent(a,o){s.userAgent.add({segment:a,version:o})},clearCache(){return Promise.all([s.requestsCache.clear(),s.responsesCache.clear()]).then(()=>{})}};return l.addMethods(n,e.methods)};function ct(){return{name:"MissingObjectIDError",message:"All objects must have an unique objectID (like a primary key) to be valid. Algolia is also able to generate objectIDs automatically but *it's not recommended*. To do it, use the `{'autoGenerateObjectIDIfNotExist': true}` option."}}function ut(){return{name:"ObjectNotFoundError",message:"Object not found."}}function lt(){return{name:"ValidUntilNotFoundError",message:"ValidUntil not found in given secured api key."}}var Rr=e=>(t,r)=>{let d=r||{},{queryParameters:s}=d,n=R(d,["queryParameters"]),a=u({acl:t},s!==void 0?{queryParameters:s}:{}),o=(y,b)=>l.createRetryablePromise(f=>$(e)(y.key,b).catch(p=>{if(p.status!==404)throw p;return f()}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:"1/keys",data:a},n),o)},vr=e=>(t,r,s)=>{let n=q.createMappedRequestOptions(s);return n.queryParameters["X-Algolia-User-ID"]=t,e.transporter.write({method:m.MethodEnum.Post,path:"1/clusters/mapping",data:{cluster:r}},n)},xr=e=>(t,r,s)=>e.transporter.write({method:m.MethodEnum.Post,path:"1/clusters/mapping/batch",data:{users:t,cluster:r}},s),Z=e=>(t,r,s)=>{let n=(a,o)=>L(e)(t,{methods:{waitTask:D}}).waitTask(a.taskID,o);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",t),data:{operation:"copy",destination:r}},s),n)},qr=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Rules]})),Er=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Settings]})),Tr=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Synonyms]})),Mr=e=>(t,r)=>{let s=(n,a)=>l.createRetryablePromise(o=>$(e)(t,a).then(o).catch(d=>{if(d.status!==404)throw d}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/keys/%s",t)},r),s)},wr=()=>(e,t)=>{let r=q.serializeQueryParameters(t),s=Sr.createHmac("sha256",e).update(r).digest("hex");return Buffer.from(s+r).toString("base64")},$=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/keys/%s",t)},r),kr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/logs"},t),Cr=()=>e=>{let t=Buffer.from(e,"base64").toString("ascii"),r=/validUntil=(\d+)/,s=t.match(r);if(s===null)throw lt();return parseInt(s[1],10)-Math.round(new Date().getTime()/1e3)},Ur=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping/top"},t),Nr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/clusters/mapping/%s",t)},r),Wr=e=>t=>{let n=t||{},{retrieveMappings:r}=n,s=R(n,["retrieveMappings"]);return r===!0&&(s.getClusters=!0),e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping/pending"},s)},L=e=>(t,r={})=>{let s={transporter:e.transporter,appId:e.appId,indexName:t};return l.addMethods(s,r.methods)},Hr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/keys"},t),_r=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters"},t),Fr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/indexes"},t),Br=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping"},t),Kr=e=>(t,r,s)=>{let n=(a,o)=>L(e)(t,{methods:{waitTask:D}}).waitTask(a.taskID,o);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",t),data:{operation:"move",destination:r}},s),n)},zr=e=>(t,r)=>{let s=(n,a)=>Promise.all(Object.keys(n.taskID).map(o=>L(e)(o,{methods:{waitTask:D}}).waitTask(n.taskID[o],a)));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:"1/indexes/*/batch",data:{requests:t}},r),s)},Gr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/objects",data:{requests:t}},r),$r=e=>(t,r)=>{let s=t.map(n=>g(u({},n),{params:q.serializeQueryParameters(n.params||{})}));return e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/queries",data:{requests:s},cacheable:!0},r)},Lr=e=>(t,r)=>Promise.all(t.map(s=>{let d=s.params,{facetName:n,facetQuery:a}=d,o=R(d,["facetName","facetQuery"]);return L(e)(s.indexName,{methods:{searchForFacetValues:dt}}).searchForFacetValues(n,a,u(u({},r),o))})),Vr=e=>(t,r)=>{let s=q.createMappedRequestOptions(r);return s.queryParameters["X-Algolia-User-ID"]=t,e.transporter.write({method:m.MethodEnum.Delete,path:"1/clusters/mapping"},s)},Qr=e=>(t,r)=>{let s=(n,a)=>l.createRetryablePromise(o=>$(e)(t,a).catch(d=>{if(d.status!==404)throw d;return o()}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/keys/%s/restore",t)},r),s)},Jr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:"1/clusters/mapping/search",data:{query:t}},r),Xr=e=>(t,r)=>{let s=Object.assign({},r),f=r||{},{queryParameters:n}=f,a=R(f,["queryParameters"]),o=n?{queryParameters:n}:{},d=["acl","indexes","referers","restrictSources","queryParameters","description","maxQueriesPerIPPerHour","maxHitsPerQuery"],y=p=>Object.keys(s).filter(h=>d.indexOf(h)!==-1).every(h=>p[h]===s[h]),b=(p,h)=>l.createRetryablePromise(S=>$(e)(t,h).then(O=>y(O)?Promise.resolve():S()));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Put,path:l.encode("1/keys/%s",t),data:o},a),b)},pt=e=>(t,r)=>{let s=(n,a)=>D(e)(n.taskID,a);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/batch",e.indexName),data:{requests:t}},r),s)},Yr=e=>t=>Y(g(u({},t),{shouldStop:r=>r.cursor===void 0,request:r=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/browse",e.indexName),data:r},t)})),Zr=e=>t=>{let r=u({hitsPerPage:1e3},t);return Y(g(u({},r),{shouldStop:s=>s.hits.lengthg(u({},n),{hits:n.hits.map(a=>(delete a._highlightResult,a))}))}}))},es=e=>t=>{let r=u({hitsPerPage:1e3},t);return Y(g(u({},r),{shouldStop:s=>s.hits.lengthg(u({},n),{hits:n.hits.map(a=>(delete a._highlightResult,a))}))}}))},te=e=>(t,r,s)=>{let y=s||{},{batchSize:n}=y,a=R(y,["batchSize"]),o={taskIDs:[],objectIDs:[]},d=(b=0)=>{let f=[],p;for(p=b;p({action:r,body:h})),a).then(h=>(o.objectIDs=o.objectIDs.concat(h.objectIDs),o.taskIDs.push(h.taskID),p++,d(p)))};return l.createWaitablePromise(d(),(b,f)=>Promise.all(b.taskIDs.map(p=>D(e)(p,f))))},ts=e=>t=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/clear",e.indexName)},t),(r,s)=>D(e)(r.taskID,s)),rs=e=>t=>{let a=t||{},{forwardToReplicas:r}=a,s=R(a,["forwardToReplicas"]),n=q.createMappedRequestOptions(s);return r&&(n.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/clear",e.indexName)},n),(o,d)=>D(e)(o.taskID,d))},ss=e=>t=>{let a=t||{},{forwardToReplicas:r}=a,s=R(a,["forwardToReplicas"]),n=q.createMappedRequestOptions(s);return r&&(n.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/clear",e.indexName)},n),(o,d)=>D(e)(o.taskID,d))},ns=e=>(t,r)=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/deleteByQuery",e.indexName),data:t},r),(s,n)=>D(e)(s.taskID,n)),as=e=>t=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s",e.indexName)},t),(r,s)=>D(e)(r.taskID,s)),os=e=>(t,r)=>l.createWaitablePromise(yt(e)([t],r).then(s=>({taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),yt=e=>(t,r)=>{let s=t.map(n=>({objectID:n}));return te(e)(s,k.DeleteObject,r)},is=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s/rules/%s",e.indexName,t)},a),(d,y)=>D(e)(d.taskID,y))},cs=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s/synonyms/%s",e.indexName,t)},a),(d,y)=>D(e)(d.taskID,y))},us=e=>t=>gt(e)(t).then(()=>!0).catch(r=>{if(r.status!==404)throw r;return!1}),ls=e=>(t,r)=>{let y=r||{},{query:s,paginate:n}=y,a=R(y,["query","paginate"]),o=0,d=()=>ft(e)(s||"",g(u({},a),{page:o})).then(b=>{for(let[f,p]of Object.entries(b.hits))if(t(p))return{object:p,position:parseInt(f,10),page:o};if(o++,n===!1||o>=b.nbPages)throw ut();return d()});return d()},ds=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/%s",e.indexName,t)},r),ps=()=>(e,t)=>{for(let[r,s]of Object.entries(e.hits))if(s.objectID===t)return parseInt(r,10);return-1},ms=e=>(t,r)=>{let o=r||{},{attributesToRetrieve:s}=o,n=R(o,["attributesToRetrieve"]),a=t.map(d=>u({indexName:e.indexName,objectID:d},s?{attributesToRetrieve:s}:{}));return e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/objects",data:{requests:a}},n)},hs=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/rules/%s",e.indexName,t)},r),gt=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/settings",e.indexName),data:{getVersion:2}},t),ys=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/synonyms/%s",e.indexName,t)},r),bt=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/task/%s",e.indexName,t.toString())},r),gs=e=>(t,r)=>l.createWaitablePromise(Pt(e)([t],r).then(s=>({objectID:s.objectIDs[0],taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),Pt=e=>(t,r)=>{let o=r||{},{createIfNotExists:s}=o,n=R(o,["createIfNotExists"]),a=s?k.PartialUpdateObject:k.PartialUpdateObjectNoCreate;return te(e)(t,a,n)},fs=e=>(t,r)=>{let O=r||{},{safe:s,autoGenerateObjectIDIfNotExist:n,batchSize:a}=O,o=R(O,["safe","autoGenerateObjectIDIfNotExist","batchSize"]),d=(P,x,v,j)=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",P),data:{operation:v,destination:x}},j),(T,V)=>D(e)(T.taskID,V)),y=Math.random().toString(36).substring(7),b=`${e.indexName}_tmp_${y}`,f=he({appId:e.appId,transporter:e.transporter,indexName:b}),p=[],h=d(e.indexName,b,"copy",g(u({},o),{scope:["settings","synonyms","rules"]}));p.push(h);let S=(s?h.wait(o):h).then(()=>{let P=f(t,g(u({},o),{autoGenerateObjectIDIfNotExist:n,batchSize:a}));return p.push(P),s?P.wait(o):P}).then(()=>{let P=d(b,e.indexName,"move",o);return p.push(P),s?P.wait(o):P}).then(()=>Promise.all(p)).then(([P,x,v])=>({objectIDs:x.objectIDs,taskIDs:[P.taskID,...x.taskIDs,v.taskID]}));return l.createWaitablePromise(S,(P,x)=>Promise.all(p.map(v=>v.wait(x))))},bs=e=>(t,r)=>ye(e)(t,g(u({},r),{clearExistingRules:!0})),Ps=e=>(t,r)=>ge(e)(t,g(u({},r),{replaceExistingSynonyms:!0})),js=e=>(t,r)=>l.createWaitablePromise(he(e)([t],r).then(s=>({objectID:s.objectIDs[0],taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),he=e=>(t,r)=>{let o=r||{},{autoGenerateObjectIDIfNotExist:s}=o,n=R(o,["autoGenerateObjectIDIfNotExist"]),a=s?k.AddObject:k.UpdateObject;if(a===k.UpdateObject){for(let d of t)if(d.objectID===void 0)return l.createWaitablePromise(Promise.reject(ct()))}return te(e)(t,a,n)},Os=e=>(t,r)=>ye(e)([t],r),ye=e=>(t,r)=>{let d=r||{},{forwardToReplicas:s,clearExistingRules:n}=d,a=R(d,["forwardToReplicas","clearExistingRules"]),o=q.createMappedRequestOptions(a);return s&&(o.queryParameters.forwardToReplicas=1),n&&(o.queryParameters.clearExistingRules=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/batch",e.indexName),data:t},o),(y,b)=>D(e)(y.taskID,b))},Is=e=>(t,r)=>ge(e)([t],r),ge=e=>(t,r)=>{let d=r||{},{forwardToReplicas:s,replaceExistingSynonyms:n}=d,a=R(d,["forwardToReplicas","replaceExistingSynonyms"]),o=q.createMappedRequestOptions(a);return s&&(o.queryParameters.forwardToReplicas=1),n&&(o.queryParameters.replaceExistingSynonyms=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/batch",e.indexName),data:t},o),(y,b)=>D(e)(y.taskID,b))},ft=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/query",e.indexName),data:{query:t},cacheable:!0},r),dt=e=>(t,r,s)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/facets/%s/query",e.indexName,t),data:{facetQuery:r},cacheable:!0},s),mt=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/search",e.indexName),data:{query:t}},r),ht=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/search",e.indexName),data:{query:t}},r),As=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Put,path:l.encode("1/indexes/%s/settings",e.indexName),data:t},a),(d,y)=>D(e)(d.taskID,y))},D=e=>(t,r)=>l.createRetryablePromise(s=>bt(e)(t,r).then(n=>n.status!=="published"?s():void 0)),Ss={AddObject:"addObject",Analytics:"analytics",Browser:"browse",DeleteIndex:"deleteIndex",DeleteObject:"deleteObject",EditSettings:"editSettings",ListIndexes:"listIndexes",Logs:"logs",Recommendation:"recommendation",Search:"search",SeeUnretrievableAttributes:"seeUnretrievableAttributes",Settings:"settings",Usage:"usage"},k={AddObject:"addObject",UpdateObject:"updateObject",PartialUpdateObject:"partialUpdateObject",PartialUpdateObjectNoCreate:"partialUpdateObjectNoCreate",DeleteObject:"deleteObject"},ee={Settings:"settings",Synonyms:"synonyms",Rules:"rules"},Ds={None:"none",StopIfEnoughMatches:"stopIfEnoughMatches"},Rs={Synonym:"synonym",OneWaySynonym:"oneWaySynonym",AltCorrection1:"altCorrection1",AltCorrection2:"altCorrection2",Placeholder:"placeholder"};i.ApiKeyACLEnum=Ss;i.BatchActionEnum=k;i.ScopeEnum=ee;i.StrategyEnum=Ds;i.SynonymEnum=Rs;i.addApiKey=Rr;i.assignUserID=vr;i.assignUserIDs=xr;i.batch=pt;i.browseObjects=Yr;i.browseRules=Zr;i.browseSynonyms=es;i.chunkedBatch=te;i.clearObjects=ts;i.clearRules=rs;i.clearSynonyms=ss;i.copyIndex=Z;i.copyRules=qr;i.copySettings=Er;i.copySynonyms=Tr;i.createBrowsablePromise=Y;i.createMissingObjectIDError=ct;i.createObjectNotFoundError=ut;i.createSearchClient=Dr;i.createValidUntilNotFoundError=lt;i.deleteApiKey=Mr;i.deleteBy=ns;i.deleteIndex=as;i.deleteObject=os;i.deleteObjects=yt;i.deleteRule=is;i.deleteSynonym=cs;i.exists=us;i.findObject=ls;i.generateSecuredApiKey=wr;i.getApiKey=$;i.getLogs=kr;i.getObject=ds;i.getObjectPosition=ps;i.getObjects=ms;i.getRule=hs;i.getSecuredApiKeyRemainingValidity=Cr;i.getSettings=gt;i.getSynonym=ys;i.getTask=bt;i.getTopUserIDs=Ur;i.getUserID=Nr;i.hasPendingMappings=Wr;i.initIndex=L;i.listApiKeys=Hr;i.listClusters=_r;i.listIndices=Fr;i.listUserIDs=Br;i.moveIndex=Kr;i.multipleBatch=zr;i.multipleGetObjects=Gr;i.multipleQueries=$r;i.multipleSearchForFacetValues=Lr;i.partialUpdateObject=gs;i.partialUpdateObjects=Pt;i.removeUserID=Vr;i.replaceAllObjects=fs;i.replaceAllRules=bs;i.replaceAllSynonyms=Ps;i.restoreApiKey=Qr;i.saveObject=js;i.saveObjects=he;i.saveRule=Os;i.saveRules=ye;i.saveSynonym=Is;i.saveSynonyms=ge;i.search=ft;i.searchForFacetValues=dt;i.searchRules=mt;i.searchSynonyms=ht;i.searchUserIDs=Jr;i.setSettings=As;i.updateApiKey=Xr;i.waitTask=D});var It=I((on,Ot)=>{Ot.exports=jt()});var At=I(re=>{"use strict";Object.defineProperty(re,"__esModule",{value:!0});function vs(){return{debug(e,t){return Promise.resolve()},info(e,t){return Promise.resolve()},error(e,t){return Promise.resolve()}}}var xs={Debug:1,Info:2,Error:3};re.LogLevelEnum=xs;re.createNullLogger=vs});var Dt=I((un,St)=>{St.exports=At()});var xt=I(fe=>{"use strict";Object.defineProperty(fe,"__esModule",{value:!0});var Rt=require("http"),vt=require("https"),qs=require("url");function Es(){let e={keepAlive:!0},t=new Rt.Agent(e),r=new vt.Agent(e);return{send(s){return new Promise(n=>{let a=qs.parse(s.url),o=a.query===null?a.pathname:`${a.pathname}?${a.query}`,d=u({agent:a.protocol==="https:"?r:t,hostname:a.hostname,path:o,method:s.method,headers:s.headers},a.port!==void 0?{port:a.port||""}:{}),y=(a.protocol==="https:"?vt:Rt).request(d,h=>{let S="";h.on("data",O=>S+=O),h.on("end",()=>{clearTimeout(f),clearTimeout(p),n({status:h.statusCode||0,content:S,isTimedOut:!1})})}),b=(h,S)=>setTimeout(()=>{y.abort(),n({status:0,content:S,isTimedOut:!0})},h*1e3),f=b(s.connectTimeout,"Connection timeout"),p;y.on("error",h=>{clearTimeout(f),clearTimeout(p),n({status:0,content:h.message,isTimedOut:!1})}),y.once("response",()=>{clearTimeout(f),p=b(s.responseTimeout,"Socket timeout")}),s.data!==void 0&&y.write(s.data),y.end()})},destroy(){return t.destroy(),r.destroy(),Promise.resolve()}}}fe.createNodeHttpRequester=Es});var Et=I((dn,qt)=>{qt.exports=xt()});var kt=I((pn,Tt)=>{"use strict";var Mt=Ee(),Ts=we(),W=st(),be=F(),Pe=it(),c=It(),Ms=Dt(),ws=Et(),ks=K();function wt(e,t,r){let s={appId:e,apiKey:t,timeouts:{connect:2,read:5,write:30},requester:ws.createNodeHttpRequester(),logger:Ms.createNullLogger(),responsesCache:Mt.createNullCache(),requestsCache:Mt.createNullCache(),hostsCache:Ts.createInMemoryCache(),userAgent:ks.createUserAgent(be.version).add({segment:"Node.js",version:process.versions.node})};return c.createSearchClient(g(u(u({},s),r),{methods:{search:c.multipleQueries,searchForFacetValues:c.multipleSearchForFacetValues,multipleBatch:c.multipleBatch,multipleGetObjects:c.multipleGetObjects,multipleQueries:c.multipleQueries,copyIndex:c.copyIndex,copySettings:c.copySettings,copyRules:c.copyRules,copySynonyms:c.copySynonyms,moveIndex:c.moveIndex,listIndices:c.listIndices,getLogs:c.getLogs,listClusters:c.listClusters,multipleSearchForFacetValues:c.multipleSearchForFacetValues,getApiKey:c.getApiKey,addApiKey:c.addApiKey,listApiKeys:c.listApiKeys,updateApiKey:c.updateApiKey,deleteApiKey:c.deleteApiKey,restoreApiKey:c.restoreApiKey,assignUserID:c.assignUserID,assignUserIDs:c.assignUserIDs,getUserID:c.getUserID,searchUserIDs:c.searchUserIDs,listUserIDs:c.listUserIDs,getTopUserIDs:c.getTopUserIDs,removeUserID:c.removeUserID,hasPendingMappings:c.hasPendingMappings,generateSecuredApiKey:c.generateSecuredApiKey,getSecuredApiKeyRemainingValidity:c.getSecuredApiKeyRemainingValidity,destroy:be.destroy,initIndex:n=>a=>c.initIndex(n)(a,{methods:{batch:c.batch,delete:c.deleteIndex,getObject:c.getObject,getObjects:c.getObjects,saveObject:c.saveObject,saveObjects:c.saveObjects,search:c.search,searchForFacetValues:c.searchForFacetValues,waitTask:c.waitTask,setSettings:c.setSettings,getSettings:c.getSettings,partialUpdateObject:c.partialUpdateObject,partialUpdateObjects:c.partialUpdateObjects,deleteObject:c.deleteObject,deleteObjects:c.deleteObjects,deleteBy:c.deleteBy,clearObjects:c.clearObjects,browseObjects:c.browseObjects,getObjectPosition:c.getObjectPosition,findObject:c.findObject,exists:c.exists,saveSynonym:c.saveSynonym,saveSynonyms:c.saveSynonyms,getSynonym:c.getSynonym,searchSynonyms:c.searchSynonyms,browseSynonyms:c.browseSynonyms,deleteSynonym:c.deleteSynonym,clearSynonyms:c.clearSynonyms,replaceAllObjects:c.replaceAllObjects,replaceAllSynonyms:c.replaceAllSynonyms,searchRules:c.searchRules,getRule:c.getRule,deleteRule:c.deleteRule,saveRule:c.saveRule,saveRules:c.saveRules,replaceAllRules:c.replaceAllRules,browseRules:c.browseRules,clearRules:c.clearRules}}),initAnalytics:()=>n=>W.createAnalyticsClient(g(u(u({},s),n),{methods:{addABTest:W.addABTest,getABTest:W.getABTest,getABTests:W.getABTests,stopABTest:W.stopABTest,deleteABTest:W.deleteABTest}})),initRecommendation:()=>n=>Pe.createRecommendationClient(g(u(u({},s),n),{methods:{getPersonalizationStrategy:Pe.getPersonalizationStrategy,setPersonalizationStrategy:Pe.setPersonalizationStrategy}}))}}))}wt.version=be.version;Tt.exports=wt});var Ut=I((mn,je)=>{var Ct=kt();je.exports=Ct;je.exports.default=Ct});var Ws={};Vt(Ws,{default:()=>Ks});var Oe=C(require("@yarnpkg/core")),E=C(require("@yarnpkg/core")),Ie=C(require("@yarnpkg/plugin-essentials")),Ht=C(require("semver"));var se=C(require("@yarnpkg/core")),Nt=C(Ut()),Cs="e8e1bd300d860104bb8c58453ffa1eb4",Us="OFCNCOG2CU",Wt=async(e,t)=>{var a;let r=se.structUtils.stringifyIdent(e),n=Ns(t).initIndex("npm-search");try{return((a=(await n.getObject(r,{attributesToRetrieve:["types"]})).types)==null?void 0:a.ts)==="definitely-typed"}catch(o){return!1}},Ns=e=>(0,Nt.default)(Us,Cs,{requester:{async send(r){try{let s=await se.httpUtils.request(r.url,r.data||null,{configuration:e,headers:r.headers});return{content:s.body,isTimedOut:!1,status:s.statusCode}}catch(s){return{content:s.response.body,isTimedOut:!1,status:s.response.statusCode}}}}});var _t=e=>e.scope?`${e.scope}__${e.name}`:`${e.name}`,Hs=async(e,t,r,s)=>{if(r.scope==="types")return;let{project:n}=e,{configuration:a}=n,o=a.makeResolver(),d={project:n,resolver:o,report:new E.ThrowReport};if(!await Wt(r,a))return;let b=_t(r),f=E.structUtils.parseRange(r.range).selector;if(!E.semverUtils.validRange(f)){let P=await o.getCandidates(r,new Map,d);f=E.structUtils.parseRange(P[0].reference).selector}let p=Ht.default.coerce(f);if(p===null)return;let h=`${Ie.suggestUtils.Modifier.CARET}${p.major}`,S=E.structUtils.makeDescriptor(E.structUtils.makeIdent("types",b),h),O=E.miscUtils.mapAndFind(n.workspaces,P=>{var T,V;let x=(T=P.manifest.dependencies.get(r.identHash))==null?void 0:T.descriptorHash,v=(V=P.manifest.devDependencies.get(r.identHash))==null?void 0:V.descriptorHash;if(x!==r.descriptorHash&&v!==r.descriptorHash)return E.miscUtils.mapAndFind.skip;let j=[];for(let Ae of Oe.Manifest.allDependencies){let Se=P.manifest[Ae].get(S.identHash);typeof Se!="undefined"&&j.push([Ae,Se])}return j.length===0?E.miscUtils.mapAndFind.skip:j});if(typeof O!="undefined")for(let[P,x]of O)e.manifest[P].set(x.identHash,x);else{try{if((await o.getCandidates(S,new Map,d)).length===0)return}catch{return}e.manifest[Ie.suggestUtils.Target.DEVELOPMENT].set(S.identHash,S)}},_s=async(e,t,r)=>{if(r.scope==="types")return;let s=_t(r),n=E.structUtils.makeIdent("types",s);for(let a of Oe.Manifest.allDependencies)typeof e.manifest[a].get(n.identHash)!="undefined"&&e.manifest[a].delete(n.identHash)},Fs=(e,t)=>{t.publishConfig&&t.publishConfig.typings&&(t.typings=t.publishConfig.typings),t.publishConfig&&t.publishConfig.types&&(t.types=t.publishConfig.types)},Bs={hooks:{afterWorkspaceDependencyAddition:Hs,afterWorkspaceDependencyRemoval:_s,beforeWorkspacePacking:Fs}},Ks=Bs;return Ws;})(); 7 | return plugin; 8 | } 9 | }; 10 | -------------------------------------------------------------------------------- /.yarn/sdks/eslint/bin/eslint.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require eslint/bin/eslint.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real eslint/bin/eslint.js your application uses 20 | module.exports = absRequire(`eslint/bin/eslint.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/eslint/lib/api.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require eslint 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real eslint your application uses 20 | module.exports = absRequire(`eslint`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/eslint/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "eslint", 3 | "version": "8.13.0-sdk", 4 | "main": "./lib/api.js", 5 | "type": "commonjs" 6 | } 7 | -------------------------------------------------------------------------------- /.yarn/sdks/integrations.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically generated by @yarnpkg/sdks. 2 | # Manual changes might be lost! 3 | 4 | integrations: 5 | - vscode 6 | -------------------------------------------------------------------------------- /.yarn/sdks/prettier/index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require prettier/index.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real prettier/index.js your application uses 20 | module.exports = absRequire(`prettier/index.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/prettier/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "prettier", 3 | "version": "2.6.2-sdk", 4 | "main": "./index.js", 5 | "type": "commonjs" 6 | } 7 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/bin/tsc: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/bin/tsc 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/bin/tsc your application uses 20 | module.exports = absRequire(`typescript/bin/tsc`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/bin/tsserver: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/bin/tsserver 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/bin/tsserver your application uses 20 | module.exports = absRequire(`typescript/bin/tsserver`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/tsc.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/lib/tsc.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/lib/tsc.js your application uses 20 | module.exports = absRequire(`typescript/lib/tsc.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/tsserver.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | const moduleWrapper = tsserver => { 13 | if (!process.versions.pnp) { 14 | return tsserver; 15 | } 16 | 17 | const {isAbsolute} = require(`path`); 18 | const pnpApi = require(`pnpapi`); 19 | 20 | const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//); 21 | const isPortal = str => str.startsWith("portal:/"); 22 | const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`); 23 | 24 | const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => { 25 | return `${locator.name}@${locator.reference}`; 26 | })); 27 | 28 | // VSCode sends the zip paths to TS using the "zip://" prefix, that TS 29 | // doesn't understand. This layer makes sure to remove the protocol 30 | // before forwarding it to TS, and to add it back on all returned paths. 31 | 32 | function toEditorPath(str) { 33 | // We add the `zip:` prefix to both `.zip/` paths and virtual paths 34 | if (isAbsolute(str) && !str.match(/^\^?(zip:|\/zip\/)/) && (str.match(/\.zip\//) || isVirtual(str))) { 35 | // We also take the opportunity to turn virtual paths into physical ones; 36 | // this makes it much easier to work with workspaces that list peer 37 | // dependencies, since otherwise Ctrl+Click would bring us to the virtual 38 | // file instances instead of the real ones. 39 | // 40 | // We only do this to modules owned by the the dependency tree roots. 41 | // This avoids breaking the resolution when jumping inside a vendor 42 | // with peer dep (otherwise jumping into react-dom would show resolution 43 | // errors on react). 44 | // 45 | const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str; 46 | if (resolved) { 47 | const locator = pnpApi.findPackageLocator(resolved); 48 | if (locator && (dependencyTreeRoots.has(`${locator.name}@${locator.reference}`) || isPortal(locator.reference))) { 49 | str = resolved; 50 | } 51 | } 52 | 53 | str = normalize(str); 54 | 55 | if (str.match(/\.zip\//)) { 56 | switch (hostInfo) { 57 | // Absolute VSCode `Uri.fsPath`s need to start with a slash. 58 | // VSCode only adds it automatically for supported schemes, 59 | // so we have to do it manually for the `zip` scheme. 60 | // The path needs to start with a caret otherwise VSCode doesn't handle the protocol 61 | // 62 | // Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910 63 | // 64 | // Update 2021-10-08: VSCode changed their format in 1.61. 65 | // Before | ^zip:/c:/foo/bar.zip/package.json 66 | // After | ^/zip//c:/foo/bar.zip/package.json 67 | // 68 | // Update 2022-04-06: VSCode changed the format in 1.66. 69 | // Before | ^/zip//c:/foo/bar.zip/package.json 70 | // After | ^/zip/c:/foo/bar.zip/package.json 71 | // 72 | case `vscode <1.61`: { 73 | str = `^zip:${str}`; 74 | } break; 75 | 76 | case `vscode <1.66`: { 77 | str = `^/zip/${str}`; 78 | } break; 79 | 80 | case `vscode`: { 81 | str = `^/zip${str}`; 82 | } break; 83 | 84 | // To make "go to definition" work, 85 | // We have to resolve the actual file system path from virtual path 86 | // and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip) 87 | case `coc-nvim`: { 88 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 89 | str = resolve(`zipfile:${str}`); 90 | } break; 91 | 92 | // Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server) 93 | // We have to resolve the actual file system path from virtual path, 94 | // everything else is up to neovim 95 | case `neovim`: { 96 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 97 | str = `zipfile://${str}`; 98 | } break; 99 | 100 | default: { 101 | str = `zip:${str}`; 102 | } break; 103 | } 104 | } 105 | } 106 | 107 | return str; 108 | } 109 | 110 | function fromEditorPath(str) { 111 | switch (hostInfo) { 112 | case `coc-nvim`: { 113 | str = str.replace(/\.zip::/, `.zip/`); 114 | // The path for coc-nvim is in format of //zipfile://.yarn/... 115 | // So in order to convert it back, we use .* to match all the thing 116 | // before `zipfile:` 117 | return process.platform === `win32` 118 | ? str.replace(/^.*zipfile:\//, ``) 119 | : str.replace(/^.*zipfile:/, ``); 120 | } break; 121 | 122 | case `neovim`: { 123 | str = str.replace(/\.zip::/, `.zip/`); 124 | // The path for neovim is in format of zipfile:////.yarn/... 125 | return str.replace(/^zipfile:\/\//, ``); 126 | } break; 127 | 128 | case `vscode`: 129 | default: { 130 | return process.platform === `win32` 131 | ? str.replace(/^\^?(zip:|\/zip)\/+/, ``) 132 | : str.replace(/^\^?(zip:|\/zip)\/+/, `/`); 133 | } break; 134 | } 135 | } 136 | 137 | // Force enable 'allowLocalPluginLoads' 138 | // TypeScript tries to resolve plugins using a path relative to itself 139 | // which doesn't work when using the global cache 140 | // https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238 141 | // VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but 142 | // TypeScript already does local loads and if this code is running the user trusts the workspace 143 | // https://github.com/microsoft/vscode/issues/45856 144 | const ConfiguredProject = tsserver.server.ConfiguredProject; 145 | const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype; 146 | ConfiguredProject.prototype.enablePluginsWithOptions = function() { 147 | this.projectService.allowLocalPluginLoads = true; 148 | return originalEnablePluginsWithOptions.apply(this, arguments); 149 | }; 150 | 151 | // And here is the point where we hijack the VSCode <-> TS communications 152 | // by adding ourselves in the middle. We locate everything that looks 153 | // like an absolute path of ours and normalize it. 154 | 155 | const Session = tsserver.server.Session; 156 | const {onMessage: originalOnMessage, send: originalSend} = Session.prototype; 157 | let hostInfo = `unknown`; 158 | 159 | Object.assign(Session.prototype, { 160 | onMessage(/** @type {string | object} */ message) { 161 | const isStringMessage = typeof message === 'string'; 162 | const parsedMessage = isStringMessage ? JSON.parse(message) : message; 163 | 164 | if ( 165 | parsedMessage != null && 166 | typeof parsedMessage === `object` && 167 | parsedMessage.arguments && 168 | typeof parsedMessage.arguments.hostInfo === `string` 169 | ) { 170 | hostInfo = parsedMessage.arguments.hostInfo; 171 | if (hostInfo === `vscode` && process.env.VSCODE_IPC_HOOK) { 172 | if (/(\/|-)1\.([1-5][0-9]|60)\./.test(process.env.VSCODE_IPC_HOOK)) { 173 | hostInfo += ` <1.61`; 174 | } else if (/(\/|-)1\.(6[1-5])\./.test(process.env.VSCODE_IPC_HOOK)) { 175 | hostInfo += ` <1.66`; 176 | } 177 | } 178 | } 179 | 180 | const processedMessageJSON = JSON.stringify(parsedMessage, (key, value) => { 181 | return typeof value === 'string' ? fromEditorPath(value) : value; 182 | }); 183 | 184 | return originalOnMessage.call( 185 | this, 186 | isStringMessage ? processedMessageJSON : JSON.parse(processedMessageJSON) 187 | ); 188 | }, 189 | 190 | send(/** @type {any} */ msg) { 191 | return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => { 192 | return typeof value === `string` ? toEditorPath(value) : value; 193 | }))); 194 | } 195 | }); 196 | 197 | return tsserver; 198 | }; 199 | 200 | if (existsSync(absPnpApiPath)) { 201 | if (!process.versions.pnp) { 202 | // Setup the environment to be able to require typescript/lib/tsserver.js 203 | require(absPnpApiPath).setup(); 204 | } 205 | } 206 | 207 | // Defer to the real typescript/lib/tsserver.js your application uses 208 | module.exports = moduleWrapper(absRequire(`typescript/lib/tsserver.js`)); 209 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/tsserverlibrary.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | const moduleWrapper = tsserver => { 13 | if (!process.versions.pnp) { 14 | return tsserver; 15 | } 16 | 17 | const {isAbsolute} = require(`path`); 18 | const pnpApi = require(`pnpapi`); 19 | 20 | const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//); 21 | const isPortal = str => str.startsWith("portal:/"); 22 | const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`); 23 | 24 | const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => { 25 | return `${locator.name}@${locator.reference}`; 26 | })); 27 | 28 | // VSCode sends the zip paths to TS using the "zip://" prefix, that TS 29 | // doesn't understand. This layer makes sure to remove the protocol 30 | // before forwarding it to TS, and to add it back on all returned paths. 31 | 32 | function toEditorPath(str) { 33 | // We add the `zip:` prefix to both `.zip/` paths and virtual paths 34 | if (isAbsolute(str) && !str.match(/^\^?(zip:|\/zip\/)/) && (str.match(/\.zip\//) || isVirtual(str))) { 35 | // We also take the opportunity to turn virtual paths into physical ones; 36 | // this makes it much easier to work with workspaces that list peer 37 | // dependencies, since otherwise Ctrl+Click would bring us to the virtual 38 | // file instances instead of the real ones. 39 | // 40 | // We only do this to modules owned by the the dependency tree roots. 41 | // This avoids breaking the resolution when jumping inside a vendor 42 | // with peer dep (otherwise jumping into react-dom would show resolution 43 | // errors on react). 44 | // 45 | const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str; 46 | if (resolved) { 47 | const locator = pnpApi.findPackageLocator(resolved); 48 | if (locator && (dependencyTreeRoots.has(`${locator.name}@${locator.reference}`) || isPortal(locator.reference))) { 49 | str = resolved; 50 | } 51 | } 52 | 53 | str = normalize(str); 54 | 55 | if (str.match(/\.zip\//)) { 56 | switch (hostInfo) { 57 | // Absolute VSCode `Uri.fsPath`s need to start with a slash. 58 | // VSCode only adds it automatically for supported schemes, 59 | // so we have to do it manually for the `zip` scheme. 60 | // The path needs to start with a caret otherwise VSCode doesn't handle the protocol 61 | // 62 | // Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910 63 | // 64 | // Update 2021-10-08: VSCode changed their format in 1.61. 65 | // Before | ^zip:/c:/foo/bar.zip/package.json 66 | // After | ^/zip//c:/foo/bar.zip/package.json 67 | // 68 | // Update 2022-04-06: VSCode changed the format in 1.66. 69 | // Before | ^/zip//c:/foo/bar.zip/package.json 70 | // After | ^/zip/c:/foo/bar.zip/package.json 71 | // 72 | case `vscode <1.61`: { 73 | str = `^zip:${str}`; 74 | } break; 75 | 76 | case `vscode <1.66`: { 77 | str = `^/zip/${str}`; 78 | } break; 79 | 80 | case `vscode`: { 81 | str = `^/zip${str}`; 82 | } break; 83 | 84 | // To make "go to definition" work, 85 | // We have to resolve the actual file system path from virtual path 86 | // and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip) 87 | case `coc-nvim`: { 88 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 89 | str = resolve(`zipfile:${str}`); 90 | } break; 91 | 92 | // Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server) 93 | // We have to resolve the actual file system path from virtual path, 94 | // everything else is up to neovim 95 | case `neovim`: { 96 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 97 | str = `zipfile://${str}`; 98 | } break; 99 | 100 | default: { 101 | str = `zip:${str}`; 102 | } break; 103 | } 104 | } 105 | } 106 | 107 | return str; 108 | } 109 | 110 | function fromEditorPath(str) { 111 | switch (hostInfo) { 112 | case `coc-nvim`: { 113 | str = str.replace(/\.zip::/, `.zip/`); 114 | // The path for coc-nvim is in format of //zipfile://.yarn/... 115 | // So in order to convert it back, we use .* to match all the thing 116 | // before `zipfile:` 117 | return process.platform === `win32` 118 | ? str.replace(/^.*zipfile:\//, ``) 119 | : str.replace(/^.*zipfile:/, ``); 120 | } break; 121 | 122 | case `neovim`: { 123 | str = str.replace(/\.zip::/, `.zip/`); 124 | // The path for neovim is in format of zipfile:////.yarn/... 125 | return str.replace(/^zipfile:\/\//, ``); 126 | } break; 127 | 128 | case `vscode`: 129 | default: { 130 | return process.platform === `win32` 131 | ? str.replace(/^\^?(zip:|\/zip)\/+/, ``) 132 | : str.replace(/^\^?(zip:|\/zip)\/+/, `/`); 133 | } break; 134 | } 135 | } 136 | 137 | // Force enable 'allowLocalPluginLoads' 138 | // TypeScript tries to resolve plugins using a path relative to itself 139 | // which doesn't work when using the global cache 140 | // https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238 141 | // VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but 142 | // TypeScript already does local loads and if this code is running the user trusts the workspace 143 | // https://github.com/microsoft/vscode/issues/45856 144 | const ConfiguredProject = tsserver.server.ConfiguredProject; 145 | const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype; 146 | ConfiguredProject.prototype.enablePluginsWithOptions = function() { 147 | this.projectService.allowLocalPluginLoads = true; 148 | return originalEnablePluginsWithOptions.apply(this, arguments); 149 | }; 150 | 151 | // And here is the point where we hijack the VSCode <-> TS communications 152 | // by adding ourselves in the middle. We locate everything that looks 153 | // like an absolute path of ours and normalize it. 154 | 155 | const Session = tsserver.server.Session; 156 | const {onMessage: originalOnMessage, send: originalSend} = Session.prototype; 157 | let hostInfo = `unknown`; 158 | 159 | Object.assign(Session.prototype, { 160 | onMessage(/** @type {string | object} */ message) { 161 | const isStringMessage = typeof message === 'string'; 162 | const parsedMessage = isStringMessage ? JSON.parse(message) : message; 163 | 164 | if ( 165 | parsedMessage != null && 166 | typeof parsedMessage === `object` && 167 | parsedMessage.arguments && 168 | typeof parsedMessage.arguments.hostInfo === `string` 169 | ) { 170 | hostInfo = parsedMessage.arguments.hostInfo; 171 | if (hostInfo === `vscode` && process.env.VSCODE_IPC_HOOK) { 172 | if (/(\/|-)1\.([1-5][0-9]|60)\./.test(process.env.VSCODE_IPC_HOOK)) { 173 | hostInfo += ` <1.61`; 174 | } else if (/(\/|-)1\.(6[1-5])\./.test(process.env.VSCODE_IPC_HOOK)) { 175 | hostInfo += ` <1.66`; 176 | } 177 | } 178 | } 179 | 180 | const processedMessageJSON = JSON.stringify(parsedMessage, (key, value) => { 181 | return typeof value === 'string' ? fromEditorPath(value) : value; 182 | }); 183 | 184 | return originalOnMessage.call( 185 | this, 186 | isStringMessage ? processedMessageJSON : JSON.parse(processedMessageJSON) 187 | ); 188 | }, 189 | 190 | send(/** @type {any} */ msg) { 191 | return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => { 192 | return typeof value === `string` ? toEditorPath(value) : value; 193 | }))); 194 | } 195 | }); 196 | 197 | return tsserver; 198 | }; 199 | 200 | if (existsSync(absPnpApiPath)) { 201 | if (!process.versions.pnp) { 202 | // Setup the environment to be able to require typescript/lib/tsserverlibrary.js 203 | require(absPnpApiPath).setup(); 204 | } 205 | } 206 | 207 | // Defer to the real typescript/lib/tsserverlibrary.js your application uses 208 | module.exports = moduleWrapper(absRequire(`typescript/lib/tsserverlibrary.js`)); 209 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/typescript.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/lib/typescript.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/lib/typescript.js your application uses 20 | module.exports = absRequire(`typescript/lib/typescript.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "typescript", 3 | "version": "4.6.3-sdk", 4 | "main": "./lib/typescript.js", 5 | "type": "commonjs" 6 | } 7 | -------------------------------------------------------------------------------- /.yarnrc.yml: -------------------------------------------------------------------------------- 1 | enableGlobalCache: true 2 | 3 | nodeLinker: pnp 4 | 5 | plugins: 6 | - path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs 7 | spec: "@yarnpkg/plugin-interactive-tools" 8 | - path: .yarn/plugins/@yarnpkg/plugin-typescript.cjs 9 | spec: "@yarnpkg/plugin-typescript" 10 | 11 | yarnPath: .yarn/releases/yarn-3.2.0.cjs 12 | -------------------------------------------------------------------------------- /Anchor.toml: -------------------------------------------------------------------------------- 1 | anchor_version = "0.24.2" 2 | solana_version = "1.9.12" 3 | 4 | [features] 5 | seeds = true 6 | 7 | [scripts] 8 | test = "yarn mocha" 9 | 10 | [provider] 11 | cluster = "localnet" 12 | wallet = "./tests/test-key.json" 13 | 14 | [[test.genesis]] 15 | address = "Govz1VyoyLD5BL6CSCxUJLVLsQHRwjfFj1prNsdNg5Jw" 16 | program = "./artifacts/deploy/govern.so" 17 | 18 | [[test.genesis]] 19 | address = "LocktDzaV1W2Bm9DeZeiyz4J9zs4fRqNiYqQyracRXw" 20 | program = "./artifacts/deploy/locked_voter.so" 21 | 22 | [[test.genesis]] 23 | address = "GokivDYuQXPZCWRkwMhdH2h91KpDQXBEmpgBgs55bnpH" 24 | program = "./artifacts/deploy/smart_wallet.so" 25 | 26 | [programs.localnet] 27 | snapshots = "StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK" 28 | 29 | [programs.devnet] 30 | snapshots = "StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK" 31 | 32 | [programs.testnet] 33 | snapshots = "StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK" 34 | 35 | [programs.mainnet] 36 | snapshots = "StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK" 37 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "ahash" 7 | version = "0.7.6" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" 10 | dependencies = [ 11 | "getrandom 0.2.6", 12 | "once_cell", 13 | "version_check", 14 | ] 15 | 16 | [[package]] 17 | name = "aho-corasick" 18 | version = "0.7.18" 19 | source = "registry+https://github.com/rust-lang/crates.io-index" 20 | checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" 21 | dependencies = [ 22 | "memchr", 23 | ] 24 | 25 | [[package]] 26 | name = "anchor-attribute-access-control" 27 | version = "0.24.2" 28 | source = "registry+https://github.com/rust-lang/crates.io-index" 29 | checksum = "a9b75d05b6b4ac9d95bb6e3b786b27d3a708c4c5a87c92ffaa25bbe9ae4c5d91" 30 | dependencies = [ 31 | "anchor-syn", 32 | "anyhow", 33 | "proc-macro2", 34 | "quote", 35 | "regex", 36 | "syn", 37 | ] 38 | 39 | [[package]] 40 | name = "anchor-attribute-account" 41 | version = "0.24.2" 42 | source = "registry+https://github.com/rust-lang/crates.io-index" 43 | checksum = "485351a6d8157750d10d88c8e256f1bf8339262b2220ae9125aed3471309b5de" 44 | dependencies = [ 45 | "anchor-syn", 46 | "anyhow", 47 | "bs58 0.4.0", 48 | "proc-macro2", 49 | "quote", 50 | "rustversion", 51 | "syn", 52 | ] 53 | 54 | [[package]] 55 | name = "anchor-attribute-constant" 56 | version = "0.24.2" 57 | source = "registry+https://github.com/rust-lang/crates.io-index" 58 | checksum = "dc632c540913dd051a78b00587cc47f57013d303163ddfaf4fa18717f7ccc1e0" 59 | dependencies = [ 60 | "anchor-syn", 61 | "proc-macro2", 62 | "syn", 63 | ] 64 | 65 | [[package]] 66 | name = "anchor-attribute-error" 67 | version = "0.24.2" 68 | source = "registry+https://github.com/rust-lang/crates.io-index" 69 | checksum = "3b5bd1dcfa7f3bc22dacef233d70a9e0bee269c4ac484510662f257cba2353a1" 70 | dependencies = [ 71 | "anchor-syn", 72 | "proc-macro2", 73 | "quote", 74 | "syn", 75 | ] 76 | 77 | [[package]] 78 | name = "anchor-attribute-event" 79 | version = "0.24.2" 80 | source = "registry+https://github.com/rust-lang/crates.io-index" 81 | checksum = "6c6f9e6ce551ac9a177a45c99a65699a860c9e95fac68675138af1246e2591b0" 82 | dependencies = [ 83 | "anchor-syn", 84 | "anyhow", 85 | "proc-macro2", 86 | "quote", 87 | "syn", 88 | ] 89 | 90 | [[package]] 91 | name = "anchor-attribute-interface" 92 | version = "0.24.2" 93 | source = "registry+https://github.com/rust-lang/crates.io-index" 94 | checksum = "d104aa17418cb329ed7418b227e083d5f326a27f26ce98f5d92e33da62a5f459" 95 | dependencies = [ 96 | "anchor-syn", 97 | "anyhow", 98 | "heck", 99 | "proc-macro2", 100 | "quote", 101 | "syn", 102 | ] 103 | 104 | [[package]] 105 | name = "anchor-attribute-program" 106 | version = "0.24.2" 107 | source = "registry+https://github.com/rust-lang/crates.io-index" 108 | checksum = "b6831b920b173c004ddf7ae1167d1d25e9f002ffcb1773bbc5c7ce532a4441e1" 109 | dependencies = [ 110 | "anchor-syn", 111 | "anyhow", 112 | "proc-macro2", 113 | "quote", 114 | "syn", 115 | ] 116 | 117 | [[package]] 118 | name = "anchor-attribute-state" 119 | version = "0.24.2" 120 | source = "registry+https://github.com/rust-lang/crates.io-index" 121 | checksum = "cde147b10c71d95dc679785db0b5f3abac0091f789167aa62ac0135e2f54e8b9" 122 | dependencies = [ 123 | "anchor-syn", 124 | "anyhow", 125 | "proc-macro2", 126 | "quote", 127 | "syn", 128 | ] 129 | 130 | [[package]] 131 | name = "anchor-derive-accounts" 132 | version = "0.24.2" 133 | source = "registry+https://github.com/rust-lang/crates.io-index" 134 | checksum = "9cde98a0e1a56046b040ff591dfda391f88917af2b6487d02b45093c05be3514" 135 | dependencies = [ 136 | "anchor-syn", 137 | "anyhow", 138 | "proc-macro2", 139 | "quote", 140 | "syn", 141 | ] 142 | 143 | [[package]] 144 | name = "anchor-lang" 145 | version = "0.24.2" 146 | source = "registry+https://github.com/rust-lang/crates.io-index" 147 | checksum = "a85dd2c5e29e20c7f4701a43724d6cd5406d0ee5694705522e43da0f26542a84" 148 | dependencies = [ 149 | "anchor-attribute-access-control", 150 | "anchor-attribute-account", 151 | "anchor-attribute-constant", 152 | "anchor-attribute-error", 153 | "anchor-attribute-event", 154 | "anchor-attribute-interface", 155 | "anchor-attribute-program", 156 | "anchor-attribute-state", 157 | "anchor-derive-accounts", 158 | "arrayref", 159 | "base64 0.13.0", 160 | "bincode", 161 | "borsh", 162 | "bytemuck", 163 | "solana-program", 164 | "thiserror", 165 | ] 166 | 167 | [[package]] 168 | name = "anchor-spl" 169 | version = "0.24.2" 170 | source = "registry+https://github.com/rust-lang/crates.io-index" 171 | checksum = "0188c33b4a3c124c4e593f2b440415aaea70a7650fac6ba0772395385d71c003" 172 | dependencies = [ 173 | "anchor-lang", 174 | "solana-program", 175 | "spl-associated-token-account", 176 | "spl-token", 177 | ] 178 | 179 | [[package]] 180 | name = "anchor-syn" 181 | version = "0.24.2" 182 | source = "registry+https://github.com/rust-lang/crates.io-index" 183 | checksum = "03549dc2eae0b20beba6333b14520e511822a6321cdb1760f841064a69347316" 184 | dependencies = [ 185 | "anyhow", 186 | "bs58 0.3.1", 187 | "heck", 188 | "proc-macro2", 189 | "proc-macro2-diagnostics", 190 | "quote", 191 | "serde", 192 | "serde_json", 193 | "sha2", 194 | "syn", 195 | "thiserror", 196 | ] 197 | 198 | [[package]] 199 | name = "anyhow" 200 | version = "1.0.56" 201 | source = "registry+https://github.com/rust-lang/crates.io-index" 202 | checksum = "4361135be9122e0870de935d7c439aef945b9f9ddd4199a553b5270b49c82a27" 203 | 204 | [[package]] 205 | name = "arrayref" 206 | version = "0.3.6" 207 | source = "registry+https://github.com/rust-lang/crates.io-index" 208 | checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" 209 | 210 | [[package]] 211 | name = "arrayvec" 212 | version = "0.7.2" 213 | source = "registry+https://github.com/rust-lang/crates.io-index" 214 | checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" 215 | 216 | [[package]] 217 | name = "atty" 218 | version = "0.2.14" 219 | source = "registry+https://github.com/rust-lang/crates.io-index" 220 | checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" 221 | dependencies = [ 222 | "hermit-abi", 223 | "libc", 224 | "winapi", 225 | ] 226 | 227 | [[package]] 228 | name = "autocfg" 229 | version = "1.1.0" 230 | source = "registry+https://github.com/rust-lang/crates.io-index" 231 | checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" 232 | 233 | [[package]] 234 | name = "base64" 235 | version = "0.12.3" 236 | source = "registry+https://github.com/rust-lang/crates.io-index" 237 | checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" 238 | 239 | [[package]] 240 | name = "base64" 241 | version = "0.13.0" 242 | source = "registry+https://github.com/rust-lang/crates.io-index" 243 | checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" 244 | 245 | [[package]] 246 | name = "bincode" 247 | version = "1.3.3" 248 | source = "registry+https://github.com/rust-lang/crates.io-index" 249 | checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" 250 | dependencies = [ 251 | "serde", 252 | ] 253 | 254 | [[package]] 255 | name = "bit-set" 256 | version = "0.5.2" 257 | source = "registry+https://github.com/rust-lang/crates.io-index" 258 | checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de" 259 | dependencies = [ 260 | "bit-vec", 261 | ] 262 | 263 | [[package]] 264 | name = "bit-vec" 265 | version = "0.6.3" 266 | source = "registry+https://github.com/rust-lang/crates.io-index" 267 | checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" 268 | 269 | [[package]] 270 | name = "bitflags" 271 | version = "1.3.2" 272 | source = "registry+https://github.com/rust-lang/crates.io-index" 273 | checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" 274 | 275 | [[package]] 276 | name = "blake3" 277 | version = "1.3.1" 278 | source = "registry+https://github.com/rust-lang/crates.io-index" 279 | checksum = "a08e53fc5a564bb15bfe6fae56bd71522205f1f91893f9c0116edad6496c183f" 280 | dependencies = [ 281 | "arrayref", 282 | "arrayvec", 283 | "cc", 284 | "cfg-if", 285 | "constant_time_eq", 286 | "digest 0.10.3", 287 | ] 288 | 289 | [[package]] 290 | name = "block-buffer" 291 | version = "0.9.0" 292 | source = "registry+https://github.com/rust-lang/crates.io-index" 293 | checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" 294 | dependencies = [ 295 | "block-padding", 296 | "generic-array", 297 | ] 298 | 299 | [[package]] 300 | name = "block-buffer" 301 | version = "0.10.2" 302 | source = "registry+https://github.com/rust-lang/crates.io-index" 303 | checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" 304 | dependencies = [ 305 | "generic-array", 306 | ] 307 | 308 | [[package]] 309 | name = "block-padding" 310 | version = "0.2.1" 311 | source = "registry+https://github.com/rust-lang/crates.io-index" 312 | checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" 313 | 314 | [[package]] 315 | name = "borsh" 316 | version = "0.9.3" 317 | source = "registry+https://github.com/rust-lang/crates.io-index" 318 | checksum = "15bf3650200d8bffa99015595e10f1fbd17de07abbc25bb067da79e769939bfa" 319 | dependencies = [ 320 | "borsh-derive", 321 | "hashbrown", 322 | ] 323 | 324 | [[package]] 325 | name = "borsh-derive" 326 | version = "0.9.3" 327 | source = "registry+https://github.com/rust-lang/crates.io-index" 328 | checksum = "6441c552f230375d18e3cc377677914d2ca2b0d36e52129fe15450a2dce46775" 329 | dependencies = [ 330 | "borsh-derive-internal", 331 | "borsh-schema-derive-internal", 332 | "proc-macro-crate 0.1.5", 333 | "proc-macro2", 334 | "syn", 335 | ] 336 | 337 | [[package]] 338 | name = "borsh-derive-internal" 339 | version = "0.9.3" 340 | source = "registry+https://github.com/rust-lang/crates.io-index" 341 | checksum = "5449c28a7b352f2d1e592a8a28bf139bc71afb0764a14f3c02500935d8c44065" 342 | dependencies = [ 343 | "proc-macro2", 344 | "quote", 345 | "syn", 346 | ] 347 | 348 | [[package]] 349 | name = "borsh-schema-derive-internal" 350 | version = "0.9.3" 351 | source = "registry+https://github.com/rust-lang/crates.io-index" 352 | checksum = "cdbd5696d8bfa21d53d9fe39a714a18538bad11492a42d066dbbc395fb1951c0" 353 | dependencies = [ 354 | "proc-macro2", 355 | "quote", 356 | "syn", 357 | ] 358 | 359 | [[package]] 360 | name = "bs58" 361 | version = "0.3.1" 362 | source = "registry+https://github.com/rust-lang/crates.io-index" 363 | checksum = "476e9cd489f9e121e02ffa6014a8ef220ecb15c05ed23fc34cca13925dc283fb" 364 | 365 | [[package]] 366 | name = "bs58" 367 | version = "0.4.0" 368 | source = "registry+https://github.com/rust-lang/crates.io-index" 369 | checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" 370 | 371 | [[package]] 372 | name = "bumpalo" 373 | version = "3.9.1" 374 | source = "registry+https://github.com/rust-lang/crates.io-index" 375 | checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" 376 | 377 | [[package]] 378 | name = "bv" 379 | version = "0.11.1" 380 | source = "registry+https://github.com/rust-lang/crates.io-index" 381 | checksum = "8834bb1d8ee5dc048ee3124f2c7c1afcc6bc9aed03f11e9dfd8c69470a5db340" 382 | dependencies = [ 383 | "feature-probe", 384 | "serde", 385 | ] 386 | 387 | [[package]] 388 | name = "bytemuck" 389 | version = "1.9.1" 390 | source = "registry+https://github.com/rust-lang/crates.io-index" 391 | checksum = "cdead85bdec19c194affaeeb670c0e41fe23de31459efd1c174d049269cf02cc" 392 | dependencies = [ 393 | "bytemuck_derive", 394 | ] 395 | 396 | [[package]] 397 | name = "bytemuck_derive" 398 | version = "1.1.0" 399 | source = "registry+https://github.com/rust-lang/crates.io-index" 400 | checksum = "562e382481975bc61d11275ac5e62a19abd00b0547d99516a415336f183dcd0e" 401 | dependencies = [ 402 | "proc-macro2", 403 | "quote", 404 | "syn", 405 | ] 406 | 407 | [[package]] 408 | name = "byteorder" 409 | version = "1.4.3" 410 | source = "registry+https://github.com/rust-lang/crates.io-index" 411 | checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" 412 | 413 | [[package]] 414 | name = "cc" 415 | version = "1.0.73" 416 | source = "registry+https://github.com/rust-lang/crates.io-index" 417 | checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" 418 | 419 | [[package]] 420 | name = "cfg-if" 421 | version = "1.0.0" 422 | source = "registry+https://github.com/rust-lang/crates.io-index" 423 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 424 | 425 | [[package]] 426 | name = "console_error_panic_hook" 427 | version = "0.1.7" 428 | source = "registry+https://github.com/rust-lang/crates.io-index" 429 | checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" 430 | dependencies = [ 431 | "cfg-if", 432 | "wasm-bindgen", 433 | ] 434 | 435 | [[package]] 436 | name = "console_log" 437 | version = "0.2.0" 438 | source = "registry+https://github.com/rust-lang/crates.io-index" 439 | checksum = "501a375961cef1a0d44767200e66e4a559283097e91d0730b1d75dfb2f8a1494" 440 | dependencies = [ 441 | "log", 442 | "web-sys", 443 | ] 444 | 445 | [[package]] 446 | name = "constant_time_eq" 447 | version = "0.1.5" 448 | source = "registry+https://github.com/rust-lang/crates.io-index" 449 | checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" 450 | 451 | [[package]] 452 | name = "cpufeatures" 453 | version = "0.2.2" 454 | source = "registry+https://github.com/rust-lang/crates.io-index" 455 | checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" 456 | dependencies = [ 457 | "libc", 458 | ] 459 | 460 | [[package]] 461 | name = "crunchy" 462 | version = "0.2.2" 463 | source = "registry+https://github.com/rust-lang/crates.io-index" 464 | checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" 465 | 466 | [[package]] 467 | name = "crypto-common" 468 | version = "0.1.3" 469 | source = "registry+https://github.com/rust-lang/crates.io-index" 470 | checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" 471 | dependencies = [ 472 | "generic-array", 473 | "typenum", 474 | ] 475 | 476 | [[package]] 477 | name = "crypto-mac" 478 | version = "0.8.0" 479 | source = "registry+https://github.com/rust-lang/crates.io-index" 480 | checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" 481 | dependencies = [ 482 | "generic-array", 483 | "subtle", 484 | ] 485 | 486 | [[package]] 487 | name = "curve25519-dalek" 488 | version = "3.2.1" 489 | source = "registry+https://github.com/rust-lang/crates.io-index" 490 | checksum = "90f9d052967f590a76e62eb387bd0bbb1b000182c3cefe5364db6b7211651bc0" 491 | dependencies = [ 492 | "byteorder", 493 | "digest 0.9.0", 494 | "rand_core 0.5.1", 495 | "subtle", 496 | "zeroize", 497 | ] 498 | 499 | [[package]] 500 | name = "digest" 501 | version = "0.9.0" 502 | source = "registry+https://github.com/rust-lang/crates.io-index" 503 | checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" 504 | dependencies = [ 505 | "generic-array", 506 | ] 507 | 508 | [[package]] 509 | name = "digest" 510 | version = "0.10.3" 511 | source = "registry+https://github.com/rust-lang/crates.io-index" 512 | checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" 513 | dependencies = [ 514 | "block-buffer 0.10.2", 515 | "crypto-common", 516 | "subtle", 517 | ] 518 | 519 | [[package]] 520 | name = "either" 521 | version = "1.6.1" 522 | source = "registry+https://github.com/rust-lang/crates.io-index" 523 | checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" 524 | 525 | [[package]] 526 | name = "env_logger" 527 | version = "0.9.0" 528 | source = "registry+https://github.com/rust-lang/crates.io-index" 529 | checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3" 530 | dependencies = [ 531 | "atty", 532 | "humantime", 533 | "log", 534 | "regex", 535 | "termcolor", 536 | ] 537 | 538 | [[package]] 539 | name = "fastrand" 540 | version = "1.7.0" 541 | source = "registry+https://github.com/rust-lang/crates.io-index" 542 | checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" 543 | dependencies = [ 544 | "instant", 545 | ] 546 | 547 | [[package]] 548 | name = "feature-probe" 549 | version = "0.1.1" 550 | source = "registry+https://github.com/rust-lang/crates.io-index" 551 | checksum = "835a3dc7d1ec9e75e2b5fb4ba75396837112d2060b03f7d43bc1897c7f7211da" 552 | 553 | [[package]] 554 | name = "fnv" 555 | version = "1.0.7" 556 | source = "registry+https://github.com/rust-lang/crates.io-index" 557 | checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" 558 | 559 | [[package]] 560 | name = "generic-array" 561 | version = "0.14.5" 562 | source = "registry+https://github.com/rust-lang/crates.io-index" 563 | checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" 564 | dependencies = [ 565 | "serde", 566 | "typenum", 567 | "version_check", 568 | ] 569 | 570 | [[package]] 571 | name = "getrandom" 572 | version = "0.1.16" 573 | source = "registry+https://github.com/rust-lang/crates.io-index" 574 | checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" 575 | dependencies = [ 576 | "cfg-if", 577 | "js-sys", 578 | "libc", 579 | "wasi 0.9.0+wasi-snapshot-preview1", 580 | "wasm-bindgen", 581 | ] 582 | 583 | [[package]] 584 | name = "getrandom" 585 | version = "0.2.6" 586 | source = "registry+https://github.com/rust-lang/crates.io-index" 587 | checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad" 588 | dependencies = [ 589 | "cfg-if", 590 | "libc", 591 | "wasi 0.10.2+wasi-snapshot-preview1", 592 | ] 593 | 594 | [[package]] 595 | name = "govern" 596 | version = "0.5.6" 597 | source = "registry+https://github.com/rust-lang/crates.io-index" 598 | checksum = "ce51bec0d35865dc60cb1bf93fd8a922dd88fc0ddce8e12dbff8078719fa3bb1" 599 | dependencies = [ 600 | "anchor-lang", 601 | "anchor-spl", 602 | "num-traits", 603 | "smart-wallet", 604 | "vipers", 605 | ] 606 | 607 | [[package]] 608 | name = "hashbrown" 609 | version = "0.11.2" 610 | source = "registry+https://github.com/rust-lang/crates.io-index" 611 | checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" 612 | dependencies = [ 613 | "ahash", 614 | ] 615 | 616 | [[package]] 617 | name = "heck" 618 | version = "0.3.3" 619 | source = "registry+https://github.com/rust-lang/crates.io-index" 620 | checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" 621 | dependencies = [ 622 | "unicode-segmentation", 623 | ] 624 | 625 | [[package]] 626 | name = "hermit-abi" 627 | version = "0.1.19" 628 | source = "registry+https://github.com/rust-lang/crates.io-index" 629 | checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" 630 | dependencies = [ 631 | "libc", 632 | ] 633 | 634 | [[package]] 635 | name = "hex" 636 | version = "0.4.3" 637 | source = "registry+https://github.com/rust-lang/crates.io-index" 638 | checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" 639 | 640 | [[package]] 641 | name = "hmac" 642 | version = "0.8.1" 643 | source = "registry+https://github.com/rust-lang/crates.io-index" 644 | checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" 645 | dependencies = [ 646 | "crypto-mac", 647 | "digest 0.9.0", 648 | ] 649 | 650 | [[package]] 651 | name = "hmac-drbg" 652 | version = "0.3.0" 653 | source = "registry+https://github.com/rust-lang/crates.io-index" 654 | checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" 655 | dependencies = [ 656 | "digest 0.9.0", 657 | "generic-array", 658 | "hmac", 659 | ] 660 | 661 | [[package]] 662 | name = "humantime" 663 | version = "2.1.0" 664 | source = "registry+https://github.com/rust-lang/crates.io-index" 665 | checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" 666 | 667 | [[package]] 668 | name = "instant" 669 | version = "0.1.12" 670 | source = "registry+https://github.com/rust-lang/crates.io-index" 671 | checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" 672 | dependencies = [ 673 | "cfg-if", 674 | ] 675 | 676 | [[package]] 677 | name = "itertools" 678 | version = "0.10.3" 679 | source = "registry+https://github.com/rust-lang/crates.io-index" 680 | checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" 681 | dependencies = [ 682 | "either", 683 | ] 684 | 685 | [[package]] 686 | name = "itoa" 687 | version = "1.0.1" 688 | source = "registry+https://github.com/rust-lang/crates.io-index" 689 | checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" 690 | 691 | [[package]] 692 | name = "js-sys" 693 | version = "0.3.57" 694 | source = "registry+https://github.com/rust-lang/crates.io-index" 695 | checksum = "671a26f820db17c2a2750743f1dd03bafd15b98c9f30c7c2628c024c05d73397" 696 | dependencies = [ 697 | "wasm-bindgen", 698 | ] 699 | 700 | [[package]] 701 | name = "keccak" 702 | version = "0.1.0" 703 | source = "registry+https://github.com/rust-lang/crates.io-index" 704 | checksum = "67c21572b4949434e4fc1e1978b99c5f77064153c59d998bf13ecd96fb5ecba7" 705 | 706 | [[package]] 707 | name = "lazy_static" 708 | version = "1.4.0" 709 | source = "registry+https://github.com/rust-lang/crates.io-index" 710 | checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" 711 | 712 | [[package]] 713 | name = "libc" 714 | version = "0.2.123" 715 | source = "registry+https://github.com/rust-lang/crates.io-index" 716 | checksum = "cb691a747a7ab48abc15c5b42066eaafde10dc427e3b6ee2a1cf43db04c763bd" 717 | 718 | [[package]] 719 | name = "libsecp256k1" 720 | version = "0.6.0" 721 | source = "registry+https://github.com/rust-lang/crates.io-index" 722 | checksum = "c9d220bc1feda2ac231cb78c3d26f27676b8cf82c96971f7aeef3d0cf2797c73" 723 | dependencies = [ 724 | "arrayref", 725 | "base64 0.12.3", 726 | "digest 0.9.0", 727 | "hmac-drbg", 728 | "libsecp256k1-core", 729 | "libsecp256k1-gen-ecmult", 730 | "libsecp256k1-gen-genmult", 731 | "rand 0.7.3", 732 | "serde", 733 | "sha2", 734 | "typenum", 735 | ] 736 | 737 | [[package]] 738 | name = "libsecp256k1-core" 739 | version = "0.2.2" 740 | source = "registry+https://github.com/rust-lang/crates.io-index" 741 | checksum = "d0f6ab710cec28cef759c5f18671a27dae2a5f952cdaaee1d8e2908cb2478a80" 742 | dependencies = [ 743 | "crunchy", 744 | "digest 0.9.0", 745 | "subtle", 746 | ] 747 | 748 | [[package]] 749 | name = "libsecp256k1-gen-ecmult" 750 | version = "0.2.1" 751 | source = "registry+https://github.com/rust-lang/crates.io-index" 752 | checksum = "ccab96b584d38fac86a83f07e659f0deafd0253dc096dab5a36d53efe653c5c3" 753 | dependencies = [ 754 | "libsecp256k1-core", 755 | ] 756 | 757 | [[package]] 758 | name = "libsecp256k1-gen-genmult" 759 | version = "0.2.1" 760 | source = "registry+https://github.com/rust-lang/crates.io-index" 761 | checksum = "67abfe149395e3aa1c48a2beb32b068e2334402df8181f818d3aee2b304c4f5d" 762 | dependencies = [ 763 | "libsecp256k1-core", 764 | ] 765 | 766 | [[package]] 767 | name = "lock_api" 768 | version = "0.4.7" 769 | source = "registry+https://github.com/rust-lang/crates.io-index" 770 | checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" 771 | dependencies = [ 772 | "autocfg", 773 | "scopeguard", 774 | ] 775 | 776 | [[package]] 777 | name = "locked-voter" 778 | version = "0.5.6" 779 | source = "registry+https://github.com/rust-lang/crates.io-index" 780 | checksum = "a1e8274a94cd0f83d18884c9f93adf100e0700363b28814cd6dee23693c723fc" 781 | dependencies = [ 782 | "anchor-lang", 783 | "anchor-spl", 784 | "govern", 785 | "num-traits", 786 | "vipers", 787 | ] 788 | 789 | [[package]] 790 | name = "log" 791 | version = "0.4.16" 792 | source = "registry+https://github.com/rust-lang/crates.io-index" 793 | checksum = "6389c490849ff5bc16be905ae24bc913a9c8892e19b2341dbc175e14c341c2b8" 794 | dependencies = [ 795 | "cfg-if", 796 | ] 797 | 798 | [[package]] 799 | name = "memchr" 800 | version = "2.4.1" 801 | source = "registry+https://github.com/rust-lang/crates.io-index" 802 | checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" 803 | 804 | [[package]] 805 | name = "memmap2" 806 | version = "0.5.3" 807 | source = "registry+https://github.com/rust-lang/crates.io-index" 808 | checksum = "057a3db23999c867821a7a59feb06a578fcb03685e983dff90daf9e7d24ac08f" 809 | dependencies = [ 810 | "libc", 811 | ] 812 | 813 | [[package]] 814 | name = "num-derive" 815 | version = "0.3.3" 816 | source = "registry+https://github.com/rust-lang/crates.io-index" 817 | checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" 818 | dependencies = [ 819 | "proc-macro2", 820 | "quote", 821 | "syn", 822 | ] 823 | 824 | [[package]] 825 | name = "num-traits" 826 | version = "0.2.14" 827 | source = "registry+https://github.com/rust-lang/crates.io-index" 828 | checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" 829 | dependencies = [ 830 | "autocfg", 831 | ] 832 | 833 | [[package]] 834 | name = "num_enum" 835 | version = "0.5.7" 836 | source = "registry+https://github.com/rust-lang/crates.io-index" 837 | checksum = "cf5395665662ef45796a4ff5486c5d41d29e0c09640af4c5f17fd94ee2c119c9" 838 | dependencies = [ 839 | "num_enum_derive", 840 | ] 841 | 842 | [[package]] 843 | name = "num_enum_derive" 844 | version = "0.5.7" 845 | source = "registry+https://github.com/rust-lang/crates.io-index" 846 | checksum = "3b0498641e53dd6ac1a4f22547548caa6864cc4933784319cd1775271c5a46ce" 847 | dependencies = [ 848 | "proc-macro-crate 1.1.3", 849 | "proc-macro2", 850 | "quote", 851 | "syn", 852 | ] 853 | 854 | [[package]] 855 | name = "once_cell" 856 | version = "1.10.0" 857 | source = "registry+https://github.com/rust-lang/crates.io-index" 858 | checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9" 859 | 860 | [[package]] 861 | name = "opaque-debug" 862 | version = "0.3.0" 863 | source = "registry+https://github.com/rust-lang/crates.io-index" 864 | checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" 865 | 866 | [[package]] 867 | name = "parking_lot" 868 | version = "0.11.2" 869 | source = "registry+https://github.com/rust-lang/crates.io-index" 870 | checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" 871 | dependencies = [ 872 | "instant", 873 | "lock_api", 874 | "parking_lot_core", 875 | ] 876 | 877 | [[package]] 878 | name = "parking_lot_core" 879 | version = "0.8.5" 880 | source = "registry+https://github.com/rust-lang/crates.io-index" 881 | checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" 882 | dependencies = [ 883 | "cfg-if", 884 | "instant", 885 | "libc", 886 | "redox_syscall", 887 | "smallvec", 888 | "winapi", 889 | ] 890 | 891 | [[package]] 892 | name = "ppv-lite86" 893 | version = "0.2.16" 894 | source = "registry+https://github.com/rust-lang/crates.io-index" 895 | checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" 896 | 897 | [[package]] 898 | name = "proc-macro-crate" 899 | version = "0.1.5" 900 | source = "registry+https://github.com/rust-lang/crates.io-index" 901 | checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" 902 | dependencies = [ 903 | "toml", 904 | ] 905 | 906 | [[package]] 907 | name = "proc-macro-crate" 908 | version = "1.1.3" 909 | source = "registry+https://github.com/rust-lang/crates.io-index" 910 | checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" 911 | dependencies = [ 912 | "thiserror", 913 | "toml", 914 | ] 915 | 916 | [[package]] 917 | name = "proc-macro2" 918 | version = "1.0.37" 919 | source = "registry+https://github.com/rust-lang/crates.io-index" 920 | checksum = "ec757218438d5fda206afc041538b2f6d889286160d649a86a24d37e1235afd1" 921 | dependencies = [ 922 | "unicode-xid", 923 | ] 924 | 925 | [[package]] 926 | name = "proc-macro2-diagnostics" 927 | version = "0.9.1" 928 | source = "registry+https://github.com/rust-lang/crates.io-index" 929 | checksum = "4bf29726d67464d49fa6224a1d07936a8c08bb3fba727c7493f6cf1616fdaada" 930 | dependencies = [ 931 | "proc-macro2", 932 | "quote", 933 | "syn", 934 | "version_check", 935 | "yansi", 936 | ] 937 | 938 | [[package]] 939 | name = "proptest" 940 | version = "1.0.0" 941 | source = "registry+https://github.com/rust-lang/crates.io-index" 942 | checksum = "1e0d9cc07f18492d879586c92b485def06bc850da3118075cd45d50e9c95b0e5" 943 | dependencies = [ 944 | "bit-set", 945 | "bitflags", 946 | "byteorder", 947 | "lazy_static", 948 | "num-traits", 949 | "quick-error 2.0.1", 950 | "rand 0.8.5", 951 | "rand_chacha 0.3.1", 952 | "rand_xorshift", 953 | "regex-syntax", 954 | "rusty-fork", 955 | "tempfile", 956 | ] 957 | 958 | [[package]] 959 | name = "quick-error" 960 | version = "1.2.3" 961 | source = "registry+https://github.com/rust-lang/crates.io-index" 962 | checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" 963 | 964 | [[package]] 965 | name = "quick-error" 966 | version = "2.0.1" 967 | source = "registry+https://github.com/rust-lang/crates.io-index" 968 | checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" 969 | 970 | [[package]] 971 | name = "quote" 972 | version = "1.0.18" 973 | source = "registry+https://github.com/rust-lang/crates.io-index" 974 | checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1" 975 | dependencies = [ 976 | "proc-macro2", 977 | ] 978 | 979 | [[package]] 980 | name = "rand" 981 | version = "0.7.3" 982 | source = "registry+https://github.com/rust-lang/crates.io-index" 983 | checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" 984 | dependencies = [ 985 | "getrandom 0.1.16", 986 | "libc", 987 | "rand_chacha 0.2.2", 988 | "rand_core 0.5.1", 989 | "rand_hc", 990 | ] 991 | 992 | [[package]] 993 | name = "rand" 994 | version = "0.8.5" 995 | source = "registry+https://github.com/rust-lang/crates.io-index" 996 | checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" 997 | dependencies = [ 998 | "libc", 999 | "rand_chacha 0.3.1", 1000 | "rand_core 0.6.3", 1001 | ] 1002 | 1003 | [[package]] 1004 | name = "rand_chacha" 1005 | version = "0.2.2" 1006 | source = "registry+https://github.com/rust-lang/crates.io-index" 1007 | checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" 1008 | dependencies = [ 1009 | "ppv-lite86", 1010 | "rand_core 0.5.1", 1011 | ] 1012 | 1013 | [[package]] 1014 | name = "rand_chacha" 1015 | version = "0.3.1" 1016 | source = "registry+https://github.com/rust-lang/crates.io-index" 1017 | checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" 1018 | dependencies = [ 1019 | "ppv-lite86", 1020 | "rand_core 0.6.3", 1021 | ] 1022 | 1023 | [[package]] 1024 | name = "rand_core" 1025 | version = "0.5.1" 1026 | source = "registry+https://github.com/rust-lang/crates.io-index" 1027 | checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" 1028 | dependencies = [ 1029 | "getrandom 0.1.16", 1030 | ] 1031 | 1032 | [[package]] 1033 | name = "rand_core" 1034 | version = "0.6.3" 1035 | source = "registry+https://github.com/rust-lang/crates.io-index" 1036 | checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" 1037 | dependencies = [ 1038 | "getrandom 0.2.6", 1039 | ] 1040 | 1041 | [[package]] 1042 | name = "rand_hc" 1043 | version = "0.2.0" 1044 | source = "registry+https://github.com/rust-lang/crates.io-index" 1045 | checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" 1046 | dependencies = [ 1047 | "rand_core 0.5.1", 1048 | ] 1049 | 1050 | [[package]] 1051 | name = "rand_xorshift" 1052 | version = "0.3.0" 1053 | source = "registry+https://github.com/rust-lang/crates.io-index" 1054 | checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" 1055 | dependencies = [ 1056 | "rand_core 0.6.3", 1057 | ] 1058 | 1059 | [[package]] 1060 | name = "redox_syscall" 1061 | version = "0.2.13" 1062 | source = "registry+https://github.com/rust-lang/crates.io-index" 1063 | checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" 1064 | dependencies = [ 1065 | "bitflags", 1066 | ] 1067 | 1068 | [[package]] 1069 | name = "regex" 1070 | version = "1.5.5" 1071 | source = "registry+https://github.com/rust-lang/crates.io-index" 1072 | checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286" 1073 | dependencies = [ 1074 | "aho-corasick", 1075 | "memchr", 1076 | "regex-syntax", 1077 | ] 1078 | 1079 | [[package]] 1080 | name = "regex-syntax" 1081 | version = "0.6.25" 1082 | source = "registry+https://github.com/rust-lang/crates.io-index" 1083 | checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" 1084 | 1085 | [[package]] 1086 | name = "remove_dir_all" 1087 | version = "0.5.3" 1088 | source = "registry+https://github.com/rust-lang/crates.io-index" 1089 | checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" 1090 | dependencies = [ 1091 | "winapi", 1092 | ] 1093 | 1094 | [[package]] 1095 | name = "rustc_version" 1096 | version = "0.4.0" 1097 | source = "registry+https://github.com/rust-lang/crates.io-index" 1098 | checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" 1099 | dependencies = [ 1100 | "semver", 1101 | ] 1102 | 1103 | [[package]] 1104 | name = "rustversion" 1105 | version = "1.0.6" 1106 | source = "registry+https://github.com/rust-lang/crates.io-index" 1107 | checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" 1108 | 1109 | [[package]] 1110 | name = "rusty-fork" 1111 | version = "0.3.0" 1112 | source = "registry+https://github.com/rust-lang/crates.io-index" 1113 | checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" 1114 | dependencies = [ 1115 | "fnv", 1116 | "quick-error 1.2.3", 1117 | "tempfile", 1118 | "wait-timeout", 1119 | ] 1120 | 1121 | [[package]] 1122 | name = "ryu" 1123 | version = "1.0.9" 1124 | source = "registry+https://github.com/rust-lang/crates.io-index" 1125 | checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" 1126 | 1127 | [[package]] 1128 | name = "scopeguard" 1129 | version = "1.1.0" 1130 | source = "registry+https://github.com/rust-lang/crates.io-index" 1131 | checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" 1132 | 1133 | [[package]] 1134 | name = "semver" 1135 | version = "1.0.7" 1136 | source = "registry+https://github.com/rust-lang/crates.io-index" 1137 | checksum = "d65bd28f48be7196d222d95b9243287f48d27aca604e08497513019ff0502cc4" 1138 | 1139 | [[package]] 1140 | name = "serde" 1141 | version = "1.0.136" 1142 | source = "registry+https://github.com/rust-lang/crates.io-index" 1143 | checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" 1144 | dependencies = [ 1145 | "serde_derive", 1146 | ] 1147 | 1148 | [[package]] 1149 | name = "serde_bytes" 1150 | version = "0.11.5" 1151 | source = "registry+https://github.com/rust-lang/crates.io-index" 1152 | checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" 1153 | dependencies = [ 1154 | "serde", 1155 | ] 1156 | 1157 | [[package]] 1158 | name = "serde_derive" 1159 | version = "1.0.136" 1160 | source = "registry+https://github.com/rust-lang/crates.io-index" 1161 | checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" 1162 | dependencies = [ 1163 | "proc-macro2", 1164 | "quote", 1165 | "syn", 1166 | ] 1167 | 1168 | [[package]] 1169 | name = "serde_json" 1170 | version = "1.0.79" 1171 | source = "registry+https://github.com/rust-lang/crates.io-index" 1172 | checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95" 1173 | dependencies = [ 1174 | "itoa", 1175 | "ryu", 1176 | "serde", 1177 | ] 1178 | 1179 | [[package]] 1180 | name = "sha2" 1181 | version = "0.9.9" 1182 | source = "registry+https://github.com/rust-lang/crates.io-index" 1183 | checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" 1184 | dependencies = [ 1185 | "block-buffer 0.9.0", 1186 | "cfg-if", 1187 | "cpufeatures", 1188 | "digest 0.9.0", 1189 | "opaque-debug", 1190 | ] 1191 | 1192 | [[package]] 1193 | name = "sha3" 1194 | version = "0.9.1" 1195 | source = "registry+https://github.com/rust-lang/crates.io-index" 1196 | checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" 1197 | dependencies = [ 1198 | "block-buffer 0.9.0", 1199 | "digest 0.9.0", 1200 | "keccak", 1201 | "opaque-debug", 1202 | ] 1203 | 1204 | [[package]] 1205 | name = "smallvec" 1206 | version = "1.8.0" 1207 | source = "registry+https://github.com/rust-lang/crates.io-index" 1208 | checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" 1209 | 1210 | [[package]] 1211 | name = "smart-wallet" 1212 | version = "0.10.3" 1213 | source = "registry+https://github.com/rust-lang/crates.io-index" 1214 | checksum = "19f89a56ba63872399d3eac135589d9fe33ba80e4fb6687aba8f5947f85e499f" 1215 | dependencies = [ 1216 | "anchor-lang", 1217 | "vipers", 1218 | ] 1219 | 1220 | [[package]] 1221 | name = "snapshots" 1222 | version = "0.2.8" 1223 | dependencies = [ 1224 | "anchor-lang", 1225 | "anchor-spl", 1226 | "locked-voter", 1227 | "num-traits", 1228 | "proptest", 1229 | "snapshots-math", 1230 | "u128", 1231 | "vipers", 1232 | ] 1233 | 1234 | [[package]] 1235 | name = "snapshots-math" 1236 | version = "0.2.8" 1237 | dependencies = [ 1238 | "num-traits", 1239 | ] 1240 | 1241 | [[package]] 1242 | name = "solana-frozen-abi" 1243 | version = "1.9.16" 1244 | source = "registry+https://github.com/rust-lang/crates.io-index" 1245 | checksum = "c8db0d37f7c345c6417898e675d218d76a1ce6d3bd57584d7f463d48badf1541" 1246 | dependencies = [ 1247 | "bs58 0.4.0", 1248 | "bv", 1249 | "generic-array", 1250 | "log", 1251 | "memmap2", 1252 | "rustc_version", 1253 | "serde", 1254 | "serde_derive", 1255 | "sha2", 1256 | "solana-frozen-abi-macro", 1257 | "solana-logger", 1258 | "thiserror", 1259 | ] 1260 | 1261 | [[package]] 1262 | name = "solana-frozen-abi-macro" 1263 | version = "1.9.16" 1264 | source = "registry+https://github.com/rust-lang/crates.io-index" 1265 | checksum = "023560984c7f16a53e280866c177d1ad45225614356224c1ade671de16424466" 1266 | dependencies = [ 1267 | "proc-macro2", 1268 | "quote", 1269 | "rustc_version", 1270 | "syn", 1271 | ] 1272 | 1273 | [[package]] 1274 | name = "solana-logger" 1275 | version = "1.9.16" 1276 | source = "registry+https://github.com/rust-lang/crates.io-index" 1277 | checksum = "57cb0a4ef4dd740397addf5fa50d9dff572371fd47df2bdecc5fb530546490e2" 1278 | dependencies = [ 1279 | "env_logger", 1280 | "lazy_static", 1281 | "log", 1282 | ] 1283 | 1284 | [[package]] 1285 | name = "solana-program" 1286 | version = "1.9.16" 1287 | source = "registry+https://github.com/rust-lang/crates.io-index" 1288 | checksum = "9654224bf5d4c6d80f68c3c996683b389693af1c69103af667c683180bad6c5e" 1289 | dependencies = [ 1290 | "base64 0.13.0", 1291 | "bincode", 1292 | "bitflags", 1293 | "blake3", 1294 | "borsh", 1295 | "borsh-derive", 1296 | "bs58 0.4.0", 1297 | "bv", 1298 | "bytemuck", 1299 | "console_error_panic_hook", 1300 | "console_log", 1301 | "curve25519-dalek", 1302 | "getrandom 0.1.16", 1303 | "itertools", 1304 | "js-sys", 1305 | "lazy_static", 1306 | "libsecp256k1", 1307 | "log", 1308 | "num-derive", 1309 | "num-traits", 1310 | "parking_lot", 1311 | "rand 0.7.3", 1312 | "rustc_version", 1313 | "rustversion", 1314 | "serde", 1315 | "serde_bytes", 1316 | "serde_derive", 1317 | "sha2", 1318 | "sha3", 1319 | "solana-frozen-abi", 1320 | "solana-frozen-abi-macro", 1321 | "solana-logger", 1322 | "solana-sdk-macro", 1323 | "thiserror", 1324 | "wasm-bindgen", 1325 | ] 1326 | 1327 | [[package]] 1328 | name = "solana-sdk-macro" 1329 | version = "1.9.16" 1330 | source = "registry+https://github.com/rust-lang/crates.io-index" 1331 | checksum = "bac8cb60eb2e4c85d76ea1f0429dfc0e8b4ba7834e9d69695bb3164f3966e16d" 1332 | dependencies = [ 1333 | "bs58 0.4.0", 1334 | "proc-macro2", 1335 | "quote", 1336 | "rustversion", 1337 | "syn", 1338 | ] 1339 | 1340 | [[package]] 1341 | name = "spl-associated-token-account" 1342 | version = "1.0.3" 1343 | source = "registry+https://github.com/rust-lang/crates.io-index" 1344 | checksum = "393e2240d521c3dd770806bff25c2c00d761ac962be106e14e22dd912007f428" 1345 | dependencies = [ 1346 | "solana-program", 1347 | "spl-token", 1348 | ] 1349 | 1350 | [[package]] 1351 | name = "spl-token" 1352 | version = "3.3.0" 1353 | source = "registry+https://github.com/rust-lang/crates.io-index" 1354 | checksum = "0cc67166ef99d10c18cb5e9c208901e6d8255c6513bb1f877977eba48e6cc4fb" 1355 | dependencies = [ 1356 | "arrayref", 1357 | "num-derive", 1358 | "num-traits", 1359 | "num_enum", 1360 | "solana-program", 1361 | "thiserror", 1362 | ] 1363 | 1364 | [[package]] 1365 | name = "static_assertions" 1366 | version = "1.1.0" 1367 | source = "registry+https://github.com/rust-lang/crates.io-index" 1368 | checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" 1369 | 1370 | [[package]] 1371 | name = "subtle" 1372 | version = "2.4.1" 1373 | source = "registry+https://github.com/rust-lang/crates.io-index" 1374 | checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" 1375 | 1376 | [[package]] 1377 | name = "syn" 1378 | version = "1.0.91" 1379 | source = "registry+https://github.com/rust-lang/crates.io-index" 1380 | checksum = "b683b2b825c8eef438b77c36a06dc262294da3d5a5813fac20da149241dcd44d" 1381 | dependencies = [ 1382 | "proc-macro2", 1383 | "quote", 1384 | "unicode-xid", 1385 | ] 1386 | 1387 | [[package]] 1388 | name = "tempfile" 1389 | version = "3.3.0" 1390 | source = "registry+https://github.com/rust-lang/crates.io-index" 1391 | checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" 1392 | dependencies = [ 1393 | "cfg-if", 1394 | "fastrand", 1395 | "libc", 1396 | "redox_syscall", 1397 | "remove_dir_all", 1398 | "winapi", 1399 | ] 1400 | 1401 | [[package]] 1402 | name = "termcolor" 1403 | version = "1.1.3" 1404 | source = "registry+https://github.com/rust-lang/crates.io-index" 1405 | checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" 1406 | dependencies = [ 1407 | "winapi-util", 1408 | ] 1409 | 1410 | [[package]] 1411 | name = "thiserror" 1412 | version = "1.0.30" 1413 | source = "registry+https://github.com/rust-lang/crates.io-index" 1414 | checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" 1415 | dependencies = [ 1416 | "thiserror-impl", 1417 | ] 1418 | 1419 | [[package]] 1420 | name = "thiserror-impl" 1421 | version = "1.0.30" 1422 | source = "registry+https://github.com/rust-lang/crates.io-index" 1423 | checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" 1424 | dependencies = [ 1425 | "proc-macro2", 1426 | "quote", 1427 | "syn", 1428 | ] 1429 | 1430 | [[package]] 1431 | name = "toml" 1432 | version = "0.5.9" 1433 | source = "registry+https://github.com/rust-lang/crates.io-index" 1434 | checksum = "8d82e1a7758622a465f8cee077614c73484dac5b836c02ff6a40d5d1010324d7" 1435 | dependencies = [ 1436 | "serde", 1437 | ] 1438 | 1439 | [[package]] 1440 | name = "typenum" 1441 | version = "1.15.0" 1442 | source = "registry+https://github.com/rust-lang/crates.io-index" 1443 | checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" 1444 | 1445 | [[package]] 1446 | name = "u128" 1447 | version = "0.1.0" 1448 | source = "registry+https://github.com/rust-lang/crates.io-index" 1449 | checksum = "ba924bf1e6b59ea8f1bbb8c7c5998fbc22c6a544e34446ee336f693ea86ccef3" 1450 | dependencies = [ 1451 | "uint", 1452 | ] 1453 | 1454 | [[package]] 1455 | name = "uint" 1456 | version = "0.9.1" 1457 | source = "registry+https://github.com/rust-lang/crates.io-index" 1458 | checksum = "6470ab50f482bde894a037a57064480a246dbfdd5960bd65a44824693f08da5f" 1459 | dependencies = [ 1460 | "byteorder", 1461 | "crunchy", 1462 | "hex", 1463 | "static_assertions", 1464 | ] 1465 | 1466 | [[package]] 1467 | name = "unicode-segmentation" 1468 | version = "1.9.0" 1469 | source = "registry+https://github.com/rust-lang/crates.io-index" 1470 | checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" 1471 | 1472 | [[package]] 1473 | name = "unicode-xid" 1474 | version = "0.2.2" 1475 | source = "registry+https://github.com/rust-lang/crates.io-index" 1476 | checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" 1477 | 1478 | [[package]] 1479 | name = "version_check" 1480 | version = "0.9.4" 1481 | source = "registry+https://github.com/rust-lang/crates.io-index" 1482 | checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" 1483 | 1484 | [[package]] 1485 | name = "vipers" 1486 | version = "2.0.3" 1487 | source = "registry+https://github.com/rust-lang/crates.io-index" 1488 | checksum = "889d3e2503cb5381bb87e4ef252427caa61023c5bedf837070fdf2d77dd83d03" 1489 | dependencies = [ 1490 | "anchor-lang", 1491 | "anchor-spl", 1492 | ] 1493 | 1494 | [[package]] 1495 | name = "wait-timeout" 1496 | version = "0.2.0" 1497 | source = "registry+https://github.com/rust-lang/crates.io-index" 1498 | checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" 1499 | dependencies = [ 1500 | "libc", 1501 | ] 1502 | 1503 | [[package]] 1504 | name = "wasi" 1505 | version = "0.9.0+wasi-snapshot-preview1" 1506 | source = "registry+https://github.com/rust-lang/crates.io-index" 1507 | checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" 1508 | 1509 | [[package]] 1510 | name = "wasi" 1511 | version = "0.10.2+wasi-snapshot-preview1" 1512 | source = "registry+https://github.com/rust-lang/crates.io-index" 1513 | checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" 1514 | 1515 | [[package]] 1516 | name = "wasm-bindgen" 1517 | version = "0.2.80" 1518 | source = "registry+https://github.com/rust-lang/crates.io-index" 1519 | checksum = "27370197c907c55e3f1a9fbe26f44e937fe6451368324e009cba39e139dc08ad" 1520 | dependencies = [ 1521 | "cfg-if", 1522 | "wasm-bindgen-macro", 1523 | ] 1524 | 1525 | [[package]] 1526 | name = "wasm-bindgen-backend" 1527 | version = "0.2.80" 1528 | source = "registry+https://github.com/rust-lang/crates.io-index" 1529 | checksum = "53e04185bfa3a779273da532f5025e33398409573f348985af9a1cbf3774d3f4" 1530 | dependencies = [ 1531 | "bumpalo", 1532 | "lazy_static", 1533 | "log", 1534 | "proc-macro2", 1535 | "quote", 1536 | "syn", 1537 | "wasm-bindgen-shared", 1538 | ] 1539 | 1540 | [[package]] 1541 | name = "wasm-bindgen-macro" 1542 | version = "0.2.80" 1543 | source = "registry+https://github.com/rust-lang/crates.io-index" 1544 | checksum = "17cae7ff784d7e83a2fe7611cfe766ecf034111b49deb850a3dc7699c08251f5" 1545 | dependencies = [ 1546 | "quote", 1547 | "wasm-bindgen-macro-support", 1548 | ] 1549 | 1550 | [[package]] 1551 | name = "wasm-bindgen-macro-support" 1552 | version = "0.2.80" 1553 | source = "registry+https://github.com/rust-lang/crates.io-index" 1554 | checksum = "99ec0dc7a4756fffc231aab1b9f2f578d23cd391390ab27f952ae0c9b3ece20b" 1555 | dependencies = [ 1556 | "proc-macro2", 1557 | "quote", 1558 | "syn", 1559 | "wasm-bindgen-backend", 1560 | "wasm-bindgen-shared", 1561 | ] 1562 | 1563 | [[package]] 1564 | name = "wasm-bindgen-shared" 1565 | version = "0.2.80" 1566 | source = "registry+https://github.com/rust-lang/crates.io-index" 1567 | checksum = "d554b7f530dee5964d9a9468d95c1f8b8acae4f282807e7d27d4b03099a46744" 1568 | 1569 | [[package]] 1570 | name = "web-sys" 1571 | version = "0.3.57" 1572 | source = "registry+https://github.com/rust-lang/crates.io-index" 1573 | checksum = "7b17e741662c70c8bd24ac5c5b18de314a2c26c32bf8346ee1e6f53de919c283" 1574 | dependencies = [ 1575 | "js-sys", 1576 | "wasm-bindgen", 1577 | ] 1578 | 1579 | [[package]] 1580 | name = "winapi" 1581 | version = "0.3.9" 1582 | source = "registry+https://github.com/rust-lang/crates.io-index" 1583 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 1584 | dependencies = [ 1585 | "winapi-i686-pc-windows-gnu", 1586 | "winapi-x86_64-pc-windows-gnu", 1587 | ] 1588 | 1589 | [[package]] 1590 | name = "winapi-i686-pc-windows-gnu" 1591 | version = "0.4.0" 1592 | source = "registry+https://github.com/rust-lang/crates.io-index" 1593 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 1594 | 1595 | [[package]] 1596 | name = "winapi-util" 1597 | version = "0.1.5" 1598 | source = "registry+https://github.com/rust-lang/crates.io-index" 1599 | checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" 1600 | dependencies = [ 1601 | "winapi", 1602 | ] 1603 | 1604 | [[package]] 1605 | name = "winapi-x86_64-pc-windows-gnu" 1606 | version = "0.4.0" 1607 | source = "registry+https://github.com/rust-lang/crates.io-index" 1608 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 1609 | 1610 | [[package]] 1611 | name = "yansi" 1612 | version = "0.5.1" 1613 | source = "registry+https://github.com/rust-lang/crates.io-index" 1614 | checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" 1615 | 1616 | [[package]] 1617 | name = "zeroize" 1618 | version = "1.3.0" 1619 | source = "registry+https://github.com/rust-lang/crates.io-index" 1620 | checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" 1621 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["programs/*", "programs/snapshots/math"] 3 | 4 | [profile.release] 5 | lto = "fat" 6 | codegen-units = 1 7 | 8 | [profile.release.build-override] 9 | opt-level = 3 10 | incremental = false 11 | codegen-units = 1 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU AFFERO GENERAL PUBLIC LICENSE 2 | Version 3, 19 November 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU Affero General Public License is a free, copyleft license for 11 | software and other kinds of works, specifically designed to ensure 12 | cooperation with the community in the case of network server software. 13 | 14 | The licenses for most software and other practical works are designed 15 | to take away your freedom to share and change the works. By contrast, 16 | our General Public Licenses are intended to guarantee your freedom to 17 | share and change all versions of a program--to make sure it remains free 18 | software for all its users. 19 | 20 | When we speak of free software, we are referring to freedom, not 21 | price. Our General Public Licenses are designed to make sure that you 22 | have the freedom to distribute copies of free software (and charge for 23 | them if you wish), that you receive source code or can get it if you 24 | want it, that you can change the software or use pieces of it in new 25 | free programs, and that you know you can do these things. 26 | 27 | Developers that use our General Public Licenses protect your rights 28 | with two steps: (1) assert copyright on the software, and (2) offer 29 | you this License which gives you legal permission to copy, distribute 30 | and/or modify the software. 31 | 32 | A secondary benefit of defending all users' freedom is that 33 | improvements made in alternate versions of the program, if they 34 | receive widespread use, become available for other developers to 35 | incorporate. Many developers of free software are heartened and 36 | encouraged by the resulting cooperation. However, in the case of 37 | software used on network servers, this result may fail to come about. 38 | The GNU General Public License permits making a modified version and 39 | letting the public access it on a server without ever releasing its 40 | source code to the public. 41 | 42 | The GNU Affero General Public License is designed specifically to 43 | ensure that, in such cases, the modified source code becomes available 44 | to the community. It requires the operator of a network server to 45 | provide the source code of the modified version running there to the 46 | users of that server. Therefore, public use of a modified version, on 47 | a publicly accessible server, gives the public access to the source 48 | code of the modified version. 49 | 50 | An older license, called the Affero General Public License and 51 | published by Affero, was designed to accomplish similar goals. This is 52 | a different license, not a version of the Affero GPL, but Affero has 53 | released a new version of the Affero GPL which permits relicensing under 54 | this license. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | TERMS AND CONDITIONS 60 | 61 | 0. Definitions. 62 | 63 | "This License" refers to version 3 of the GNU Affero General Public License. 64 | 65 | "Copyright" also means copyright-like laws that apply to other kinds of 66 | works, such as semiconductor masks. 67 | 68 | "The Program" refers to any copyrightable work licensed under this 69 | License. Each licensee is addressed as "you". "Licensees" and 70 | "recipients" may be individuals or organizations. 71 | 72 | To "modify" a work means to copy from or adapt all or part of the work 73 | in a fashion requiring copyright permission, other than the making of an 74 | exact copy. The resulting work is called a "modified version" of the 75 | earlier work or a work "based on" the earlier work. 76 | 77 | A "covered work" means either the unmodified Program or a work based 78 | on the Program. 79 | 80 | To "propagate" a work means to do anything with it that, without 81 | permission, would make you directly or secondarily liable for 82 | infringement under applicable copyright law, except executing it on a 83 | computer or modifying a private copy. Propagation includes copying, 84 | distribution (with or without modification), making available to the 85 | public, and in some countries other activities as well. 86 | 87 | To "convey" a work means any kind of propagation that enables other 88 | parties to make or receive copies. Mere interaction with a user through 89 | a computer network, with no transfer of a copy, is not conveying. 90 | 91 | An interactive user interface displays "Appropriate Legal Notices" 92 | to the extent that it includes a convenient and prominently visible 93 | feature that (1) displays an appropriate copyright notice, and (2) 94 | tells the user that there is no warranty for the work (except to the 95 | extent that warranties are provided), that licensees may convey the 96 | work under this License, and how to view a copy of this License. If 97 | the interface presents a list of user commands or options, such as a 98 | menu, a prominent item in the list meets this criterion. 99 | 100 | 1. Source Code. 101 | 102 | The "source code" for a work means the preferred form of the work 103 | for making modifications to it. "Object code" means any non-source 104 | form of a work. 105 | 106 | A "Standard Interface" means an interface that either is an official 107 | standard defined by a recognized standards body, or, in the case of 108 | interfaces specified for a particular programming language, one that 109 | is widely used among developers working in that language. 110 | 111 | The "System Libraries" of an executable work include anything, other 112 | than the work as a whole, that (a) is included in the normal form of 113 | packaging a Major Component, but which is not part of that Major 114 | Component, and (b) serves only to enable use of the work with that 115 | Major Component, or to implement a Standard Interface for which an 116 | implementation is available to the public in source code form. A 117 | "Major Component", in this context, means a major essential component 118 | (kernel, window system, and so on) of the specific operating system 119 | (if any) on which the executable work runs, or a compiler used to 120 | produce the work, or an object code interpreter used to run it. 121 | 122 | The "Corresponding Source" for a work in object code form means all 123 | the source code needed to generate, install, and (for an executable 124 | work) run the object code and to modify the work, including scripts to 125 | control those activities. However, it does not include the work's 126 | System Libraries, or general-purpose tools or generally available free 127 | programs which are used unmodified in performing those activities but 128 | which are not part of the work. For example, Corresponding Source 129 | includes interface definition files associated with source files for 130 | the work, and the source code for shared libraries and dynamically 131 | linked subprograms that the work is specifically designed to require, 132 | such as by intimate data communication or control flow between those 133 | subprograms and other parts of the work. 134 | 135 | The Corresponding Source need not include anything that users 136 | can regenerate automatically from other parts of the Corresponding 137 | Source. 138 | 139 | The Corresponding Source for a work in source code form is that 140 | same work. 141 | 142 | 2. Basic Permissions. 143 | 144 | All rights granted under this License are granted for the term of 145 | copyright on the Program, and are irrevocable provided the stated 146 | conditions are met. This License explicitly affirms your unlimited 147 | permission to run the unmodified Program. The output from running a 148 | covered work is covered by this License only if the output, given its 149 | content, constitutes a covered work. This License acknowledges your 150 | rights of fair use or other equivalent, as provided by copyright law. 151 | 152 | You may make, run and propagate covered works that you do not 153 | convey, without conditions so long as your license otherwise remains 154 | in force. You may convey covered works to others for the sole purpose 155 | of having them make modifications exclusively for you, or provide you 156 | with facilities for running those works, provided that you comply with 157 | the terms of this License in conveying all material for which you do 158 | not control copyright. Those thus making or running the covered works 159 | for you must do so exclusively on your behalf, under your direction 160 | and control, on terms that prohibit them from making any copies of 161 | your copyrighted material outside their relationship with you. 162 | 163 | Conveying under any other circumstances is permitted solely under 164 | the conditions stated below. Sublicensing is not allowed; section 10 165 | makes it unnecessary. 166 | 167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 168 | 169 | No covered work shall be deemed part of an effective technological 170 | measure under any applicable law fulfilling obligations under article 171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 172 | similar laws prohibiting or restricting circumvention of such 173 | measures. 174 | 175 | When you convey a covered work, you waive any legal power to forbid 176 | circumvention of technological measures to the extent such circumvention 177 | is effected by exercising rights under this License with respect to 178 | the covered work, and you disclaim any intention to limit operation or 179 | modification of the work as a means of enforcing, against the work's 180 | users, your or third parties' legal rights to forbid circumvention of 181 | technological measures. 182 | 183 | 4. Conveying Verbatim Copies. 184 | 185 | You may convey verbatim copies of the Program's source code as you 186 | receive it, in any medium, provided that you conspicuously and 187 | appropriately publish on each copy an appropriate copyright notice; 188 | keep intact all notices stating that this License and any 189 | non-permissive terms added in accord with section 7 apply to the code; 190 | keep intact all notices of the absence of any warranty; and give all 191 | recipients a copy of this License along with the Program. 192 | 193 | You may charge any price or no price for each copy that you convey, 194 | and you may offer support or warranty protection for a fee. 195 | 196 | 5. Conveying Modified Source Versions. 197 | 198 | You may convey a work based on the Program, or the modifications to 199 | produce it from the Program, in the form of source code under the 200 | terms of section 4, provided that you also meet all of these conditions: 201 | 202 | a) The work must carry prominent notices stating that you modified 203 | it, and giving a relevant date. 204 | 205 | b) The work must carry prominent notices stating that it is 206 | released under this License and any conditions added under section 207 | 7. This requirement modifies the requirement in section 4 to 208 | "keep intact all notices". 209 | 210 | c) You must license the entire work, as a whole, under this 211 | License to anyone who comes into possession of a copy. This 212 | License will therefore apply, along with any applicable section 7 213 | additional terms, to the whole of the work, and all its parts, 214 | regardless of how they are packaged. This License gives no 215 | permission to license the work in any other way, but it does not 216 | invalidate such permission if you have separately received it. 217 | 218 | d) If the work has interactive user interfaces, each must display 219 | Appropriate Legal Notices; however, if the Program has interactive 220 | interfaces that do not display Appropriate Legal Notices, your 221 | work need not make them do so. 222 | 223 | A compilation of a covered work with other separate and independent 224 | works, which are not by their nature extensions of the covered work, 225 | and which are not combined with it such as to form a larger program, 226 | in or on a volume of a storage or distribution medium, is called an 227 | "aggregate" if the compilation and its resulting copyright are not 228 | used to limit the access or legal rights of the compilation's users 229 | beyond what the individual works permit. Inclusion of a covered work 230 | in an aggregate does not cause this License to apply to the other 231 | parts of the aggregate. 232 | 233 | 6. Conveying Non-Source Forms. 234 | 235 | You may convey a covered work in object code form under the terms 236 | of sections 4 and 5, provided that you also convey the 237 | machine-readable Corresponding Source under the terms of this License, 238 | in one of these ways: 239 | 240 | a) Convey the object code in, or embodied in, a physical product 241 | (including a physical distribution medium), accompanied by the 242 | Corresponding Source fixed on a durable physical medium 243 | customarily used for software interchange. 244 | 245 | b) Convey the object code in, or embodied in, a physical product 246 | (including a physical distribution medium), accompanied by a 247 | written offer, valid for at least three years and valid for as 248 | long as you offer spare parts or customer support for that product 249 | model, to give anyone who possesses the object code either (1) a 250 | copy of the Corresponding Source for all the software in the 251 | product that is covered by this License, on a durable physical 252 | medium customarily used for software interchange, for a price no 253 | more than your reasonable cost of physically performing this 254 | conveying of source, or (2) access to copy the 255 | Corresponding Source from a network server at no charge. 256 | 257 | c) Convey individual copies of the object code with a copy of the 258 | written offer to provide the Corresponding Source. This 259 | alternative is allowed only occasionally and noncommercially, and 260 | only if you received the object code with such an offer, in accord 261 | with subsection 6b. 262 | 263 | d) Convey the object code by offering access from a designated 264 | place (gratis or for a charge), and offer equivalent access to the 265 | Corresponding Source in the same way through the same place at no 266 | further charge. You need not require recipients to copy the 267 | Corresponding Source along with the object code. If the place to 268 | copy the object code is a network server, the Corresponding Source 269 | may be on a different server (operated by you or a third party) 270 | that supports equivalent copying facilities, provided you maintain 271 | clear directions next to the object code saying where to find the 272 | Corresponding Source. Regardless of what server hosts the 273 | Corresponding Source, you remain obligated to ensure that it is 274 | available for as long as needed to satisfy these requirements. 275 | 276 | e) Convey the object code using peer-to-peer transmission, provided 277 | you inform other peers where the object code and Corresponding 278 | Source of the work are being offered to the general public at no 279 | charge under subsection 6d. 280 | 281 | A separable portion of the object code, whose source code is excluded 282 | from the Corresponding Source as a System Library, need not be 283 | included in conveying the object code work. 284 | 285 | A "User Product" is either (1) a "consumer product", which means any 286 | tangible personal property which is normally used for personal, family, 287 | or household purposes, or (2) anything designed or sold for incorporation 288 | into a dwelling. In determining whether a product is a consumer product, 289 | doubtful cases shall be resolved in favor of coverage. For a particular 290 | product received by a particular user, "normally used" refers to a 291 | typical or common use of that class of product, regardless of the status 292 | of the particular user or of the way in which the particular user 293 | actually uses, or expects or is expected to use, the product. A product 294 | is a consumer product regardless of whether the product has substantial 295 | commercial, industrial or non-consumer uses, unless such uses represent 296 | the only significant mode of use of the product. 297 | 298 | "Installation Information" for a User Product means any methods, 299 | procedures, authorization keys, or other information required to install 300 | and execute modified versions of a covered work in that User Product from 301 | a modified version of its Corresponding Source. The information must 302 | suffice to ensure that the continued functioning of the modified object 303 | code is in no case prevented or interfered with solely because 304 | modification has been made. 305 | 306 | If you convey an object code work under this section in, or with, or 307 | specifically for use in, a User Product, and the conveying occurs as 308 | part of a transaction in which the right of possession and use of the 309 | User Product is transferred to the recipient in perpetuity or for a 310 | fixed term (regardless of how the transaction is characterized), the 311 | Corresponding Source conveyed under this section must be accompanied 312 | by the Installation Information. But this requirement does not apply 313 | if neither you nor any third party retains the ability to install 314 | modified object code on the User Product (for example, the work has 315 | been installed in ROM). 316 | 317 | The requirement to provide Installation Information does not include a 318 | requirement to continue to provide support service, warranty, or updates 319 | for a work that has been modified or installed by the recipient, or for 320 | the User Product in which it has been modified or installed. Access to a 321 | network may be denied when the modification itself materially and 322 | adversely affects the operation of the network or violates the rules and 323 | protocols for communication across the network. 324 | 325 | Corresponding Source conveyed, and Installation Information provided, 326 | in accord with this section must be in a format that is publicly 327 | documented (and with an implementation available to the public in 328 | source code form), and must require no special password or key for 329 | unpacking, reading or copying. 330 | 331 | 7. Additional Terms. 332 | 333 | "Additional permissions" are terms that supplement the terms of this 334 | License by making exceptions from one or more of its conditions. 335 | Additional permissions that are applicable to the entire Program shall 336 | be treated as though they were included in this License, to the extent 337 | that they are valid under applicable law. If additional permissions 338 | apply only to part of the Program, that part may be used separately 339 | under those permissions, but the entire Program remains governed by 340 | this License without regard to the additional permissions. 341 | 342 | When you convey a copy of a covered work, you may at your option 343 | remove any additional permissions from that copy, or from any part of 344 | it. (Additional permissions may be written to require their own 345 | removal in certain cases when you modify the work.) You may place 346 | additional permissions on material, added by you to a covered work, 347 | for which you have or can give appropriate copyright permission. 348 | 349 | Notwithstanding any other provision of this License, for material you 350 | add to a covered work, you may (if authorized by the copyright holders of 351 | that material) supplement the terms of this License with terms: 352 | 353 | a) Disclaiming warranty or limiting liability differently from the 354 | terms of sections 15 and 16 of this License; or 355 | 356 | b) Requiring preservation of specified reasonable legal notices or 357 | author attributions in that material or in the Appropriate Legal 358 | Notices displayed by works containing it; or 359 | 360 | c) Prohibiting misrepresentation of the origin of that material, or 361 | requiring that modified versions of such material be marked in 362 | reasonable ways as different from the original version; or 363 | 364 | d) Limiting the use for publicity purposes of names of licensors or 365 | authors of the material; or 366 | 367 | e) Declining to grant rights under trademark law for use of some 368 | trade names, trademarks, or service marks; or 369 | 370 | f) Requiring indemnification of licensors and authors of that 371 | material by anyone who conveys the material (or modified versions of 372 | it) with contractual assumptions of liability to the recipient, for 373 | any liability that these contractual assumptions directly impose on 374 | those licensors and authors. 375 | 376 | All other non-permissive additional terms are considered "further 377 | restrictions" within the meaning of section 10. If the Program as you 378 | received it, or any part of it, contains a notice stating that it is 379 | governed by this License along with a term that is a further 380 | restriction, you may remove that term. If a license document contains 381 | a further restriction but permits relicensing or conveying under this 382 | License, you may add to a covered work material governed by the terms 383 | of that license document, provided that the further restriction does 384 | not survive such relicensing or conveying. 385 | 386 | If you add terms to a covered work in accord with this section, you 387 | must place, in the relevant source files, a statement of the 388 | additional terms that apply to those files, or a notice indicating 389 | where to find the applicable terms. 390 | 391 | Additional terms, permissive or non-permissive, may be stated in the 392 | form of a separately written license, or stated as exceptions; 393 | the above requirements apply either way. 394 | 395 | 8. Termination. 396 | 397 | You may not propagate or modify a covered work except as expressly 398 | provided under this License. Any attempt otherwise to propagate or 399 | modify it is void, and will automatically terminate your rights under 400 | this License (including any patent licenses granted under the third 401 | paragraph of section 11). 402 | 403 | However, if you cease all violation of this License, then your 404 | license from a particular copyright holder is reinstated (a) 405 | provisionally, unless and until the copyright holder explicitly and 406 | finally terminates your license, and (b) permanently, if the copyright 407 | holder fails to notify you of the violation by some reasonable means 408 | prior to 60 days after the cessation. 409 | 410 | Moreover, your license from a particular copyright holder is 411 | reinstated permanently if the copyright holder notifies you of the 412 | violation by some reasonable means, this is the first time you have 413 | received notice of violation of this License (for any work) from that 414 | copyright holder, and you cure the violation prior to 30 days after 415 | your receipt of the notice. 416 | 417 | Termination of your rights under this section does not terminate the 418 | licenses of parties who have received copies or rights from you under 419 | this License. If your rights have been terminated and not permanently 420 | reinstated, you do not qualify to receive new licenses for the same 421 | material under section 10. 422 | 423 | 9. Acceptance Not Required for Having Copies. 424 | 425 | You are not required to accept this License in order to receive or 426 | run a copy of the Program. Ancillary propagation of a covered work 427 | occurring solely as a consequence of using peer-to-peer transmission 428 | to receive a copy likewise does not require acceptance. However, 429 | nothing other than this License grants you permission to propagate or 430 | modify any covered work. These actions infringe copyright if you do 431 | not accept this License. Therefore, by modifying or propagating a 432 | covered work, you indicate your acceptance of this License to do so. 433 | 434 | 10. Automatic Licensing of Downstream Recipients. 435 | 436 | Each time you convey a covered work, the recipient automatically 437 | receives a license from the original licensors, to run, modify and 438 | propagate that work, subject to this License. You are not responsible 439 | for enforcing compliance by third parties with this License. 440 | 441 | An "entity transaction" is a transaction transferring control of an 442 | organization, or substantially all assets of one, or subdividing an 443 | organization, or merging organizations. If propagation of a covered 444 | work results from an entity transaction, each party to that 445 | transaction who receives a copy of the work also receives whatever 446 | licenses to the work the party's predecessor in interest had or could 447 | give under the previous paragraph, plus a right to possession of the 448 | Corresponding Source of the work from the predecessor in interest, if 449 | the predecessor has it or can get it with reasonable efforts. 450 | 451 | You may not impose any further restrictions on the exercise of the 452 | rights granted or affirmed under this License. For example, you may 453 | not impose a license fee, royalty, or other charge for exercise of 454 | rights granted under this License, and you may not initiate litigation 455 | (including a cross-claim or counterclaim in a lawsuit) alleging that 456 | any patent claim is infringed by making, using, selling, offering for 457 | sale, or importing the Program or any portion of it. 458 | 459 | 11. Patents. 460 | 461 | A "contributor" is a copyright holder who authorizes use under this 462 | License of the Program or a work on which the Program is based. The 463 | work thus licensed is called the contributor's "contributor version". 464 | 465 | A contributor's "essential patent claims" are all patent claims 466 | owned or controlled by the contributor, whether already acquired or 467 | hereafter acquired, that would be infringed by some manner, permitted 468 | by this License, of making, using, or selling its contributor version, 469 | but do not include claims that would be infringed only as a 470 | consequence of further modification of the contributor version. For 471 | purposes of this definition, "control" includes the right to grant 472 | patent sublicenses in a manner consistent with the requirements of 473 | this License. 474 | 475 | Each contributor grants you a non-exclusive, worldwide, royalty-free 476 | patent license under the contributor's essential patent claims, to 477 | make, use, sell, offer for sale, import and otherwise run, modify and 478 | propagate the contents of its contributor version. 479 | 480 | In the following three paragraphs, a "patent license" is any express 481 | agreement or commitment, however denominated, not to enforce a patent 482 | (such as an express permission to practice a patent or covenant not to 483 | sue for patent infringement). To "grant" such a patent license to a 484 | party means to make such an agreement or commitment not to enforce a 485 | patent against the party. 486 | 487 | If you convey a covered work, knowingly relying on a patent license, 488 | and the Corresponding Source of the work is not available for anyone 489 | to copy, free of charge and under the terms of this License, through a 490 | publicly available network server or other readily accessible means, 491 | then you must either (1) cause the Corresponding Source to be so 492 | available, or (2) arrange to deprive yourself of the benefit of the 493 | patent license for this particular work, or (3) arrange, in a manner 494 | consistent with the requirements of this License, to extend the patent 495 | license to downstream recipients. "Knowingly relying" means you have 496 | actual knowledge that, but for the patent license, your conveying the 497 | covered work in a country, or your recipient's use of the covered work 498 | in a country, would infringe one or more identifiable patents in that 499 | country that you have reason to believe are valid. 500 | 501 | If, pursuant to or in connection with a single transaction or 502 | arrangement, you convey, or propagate by procuring conveyance of, a 503 | covered work, and grant a patent license to some of the parties 504 | receiving the covered work authorizing them to use, propagate, modify 505 | or convey a specific copy of the covered work, then the patent license 506 | you grant is automatically extended to all recipients of the covered 507 | work and works based on it. 508 | 509 | A patent license is "discriminatory" if it does not include within 510 | the scope of its coverage, prohibits the exercise of, or is 511 | conditioned on the non-exercise of one or more of the rights that are 512 | specifically granted under this License. You may not convey a covered 513 | work if you are a party to an arrangement with a third party that is 514 | in the business of distributing software, under which you make payment 515 | to the third party based on the extent of your activity of conveying 516 | the work, and under which the third party grants, to any of the 517 | parties who would receive the covered work from you, a discriminatory 518 | patent license (a) in connection with copies of the covered work 519 | conveyed by you (or copies made from those copies), or (b) primarily 520 | for and in connection with specific products or compilations that 521 | contain the covered work, unless you entered into that arrangement, 522 | or that patent license was granted, prior to 28 March 2007. 523 | 524 | Nothing in this License shall be construed as excluding or limiting 525 | any implied license or other defenses to infringement that may 526 | otherwise be available to you under applicable patent law. 527 | 528 | 12. No Surrender of Others' Freedom. 529 | 530 | If conditions are imposed on you (whether by court order, agreement or 531 | otherwise) that contradict the conditions of this License, they do not 532 | excuse you from the conditions of this License. If you cannot convey a 533 | covered work so as to satisfy simultaneously your obligations under this 534 | License and any other pertinent obligations, then as a consequence you may 535 | not convey it at all. For example, if you agree to terms that obligate you 536 | to collect a royalty for further conveying from those to whom you convey 537 | the Program, the only way you could satisfy both those terms and this 538 | License would be to refrain entirely from conveying the Program. 539 | 540 | 13. Remote Network Interaction; Use with the GNU General Public License. 541 | 542 | Notwithstanding any other provision of this License, if you modify the 543 | Program, your modified version must prominently offer all users 544 | interacting with it remotely through a computer network (if your version 545 | supports such interaction) an opportunity to receive the Corresponding 546 | Source of your version by providing access to the Corresponding Source 547 | from a network server at no charge, through some standard or customary 548 | means of facilitating copying of software. This Corresponding Source 549 | shall include the Corresponding Source for any work covered by version 3 550 | of the GNU General Public License that is incorporated pursuant to the 551 | following paragraph. 552 | 553 | Notwithstanding any other provision of this License, you have 554 | permission to link or combine any covered work with a work licensed 555 | under version 3 of the GNU General Public License into a single 556 | combined work, and to convey the resulting work. The terms of this 557 | License will continue to apply to the part which is the covered work, 558 | but the work with which it is combined will remain governed by version 559 | 3 of the GNU General Public License. 560 | 561 | 14. Revised Versions of this License. 562 | 563 | The Free Software Foundation may publish revised and/or new versions of 564 | the GNU Affero General Public License from time to time. Such new versions 565 | will be similar in spirit to the present version, but may differ in detail to 566 | address new problems or concerns. 567 | 568 | Each version is given a distinguishing version number. If the 569 | Program specifies that a certain numbered version of the GNU Affero General 570 | Public License "or any later version" applies to it, you have the 571 | option of following the terms and conditions either of that numbered 572 | version or of any later version published by the Free Software 573 | Foundation. If the Program does not specify a version number of the 574 | GNU Affero General Public License, you may choose any version ever published 575 | by the Free Software Foundation. 576 | 577 | If the Program specifies that a proxy can decide which future 578 | versions of the GNU Affero General Public License can be used, that proxy's 579 | public statement of acceptance of a version permanently authorizes you 580 | to choose that version for the Program. 581 | 582 | Later license versions may give you additional or different 583 | permissions. However, no additional obligations are imposed on any 584 | author or copyright holder as a result of your choosing to follow a 585 | later version. 586 | 587 | 15. Disclaimer of Warranty. 588 | 589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 597 | 598 | 16. Limitation of Liability. 599 | 600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 608 | SUCH DAMAGES. 609 | 610 | 17. Interpretation of Sections 15 and 16. 611 | 612 | If the disclaimer of warranty and limitation of liability provided 613 | above cannot be given local legal effect according to their terms, 614 | reviewing courts shall apply local law that most closely approximates 615 | an absolute waiver of all civil liability in connection with the 616 | Program, unless a warranty or assumption of liability accompanies a 617 | copy of the Program in return for a fee. 618 | 619 | END OF TERMS AND CONDITIONS 620 | 621 | How to Apply These Terms to Your New Programs 622 | 623 | If you develop a new program, and you want it to be of the greatest 624 | possible use to the public, the best way to achieve this is to make it 625 | free software which everyone can redistribute and change under these terms. 626 | 627 | To do so, attach the following notices to the program. It is safest 628 | to attach them to the start of each source file to most effectively 629 | state the exclusion of warranty; and each file should have at least 630 | the "copyright" line and a pointer to where the full notice is found. 631 | 632 | 633 | Copyright (C) 634 | 635 | This program is free software: you can redistribute it and/or modify 636 | it under the terms of the GNU Affero General Public License as published 637 | by the Free Software Foundation, either version 3 of the License, or 638 | (at your option) any later version. 639 | 640 | This program is distributed in the hope that it will be useful, 641 | but WITHOUT ANY WARRANTY; without even the implied warranty of 642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 643 | GNU Affero General Public License for more details. 644 | 645 | You should have received a copy of the GNU Affero General Public License 646 | along with this program. If not, see . 647 | 648 | Also add information on how to contact you by electronic and paper mail. 649 | 650 | If your software can interact with users remotely through a computer 651 | network, you should also make sure that it provides a way for users to 652 | get its source. For example, if your program is a web application, its 653 | interface could display a "Source" link that leads users to an archive 654 | of the code. There are many ways you could offer source, and different 655 | solutions will be better for different programs; see section 13 for the 656 | specific requirements. 657 | 658 | You should also get your employer (if you work as a programmer) or school, 659 | if any, to sign a "copyright disclaimer" for the program, if necessary. 660 | For more information on this, and how to apply and follow the GNU AGPL, see 661 | . -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # snapshots 2 | 3 | [![Crates.io](https://img.shields.io/crates/v/snapshots)](https://crates.io/crates/snapshots) 4 | [![Docs.rs](https://img.shields.io/docsrs/snapshots)](https://docs.rs/snapshots) 5 | [![License](https://img.shields.io/crates/l/snapshots)](https://github.com/saber-hq/snapshots/blob/master/LICENSE) 6 | [![Build Status](https://img.shields.io/github/workflow/status/saber-hq/snapshots/E2E/master)](https://github.com/saber-hq/snapshots/actions/workflows/programs-e2e.yml?query=branch%3Amaster) 7 | [![Contributors](https://img.shields.io/github/contributors/saber-hq/snapshots)](https://github.com/saber-hq/snapshots/graphs/contributors) 8 | [![NPM](https://img.shields.io/npm/v/@saberhq/snapshots)](https://www.npmjs.com/package/@saberhq/snapshots) 9 | 10 |

11 | 12 |

13 | 14 | Voting Escrow Snapshots: Historical snapshots of previous voting escrow balances. 15 | 16 | ## Motivation 17 | 18 | There are several instances in which one may want to use an instantaneous snapshot of all vote escrow balances, for example: 19 | 20 | - **Fee distribution.** One may want to send protocol revenue to veToken holders. 21 | - **Airdrops.** One may want to send tokens to holders of a veToken. 22 | 23 | ## Mechanism 24 | 25 | veToken balances are recorded for every `period`. A period is recorded for every 3 days. 26 | 27 | There are two accounts that are used to compute historical balances: 28 | 29 | - [LockerHistory], which stores the total number of veTokens for each period, and 30 | - [EscrowHistory], which stores the veTokens in each Escrow per period. 31 | 32 | Any time someone refreshes and/or modifies their vote escrow, they should refresh their [EscrowHistory] accounts. 33 | 34 | ## Program Addresses 35 | 36 | - **[snapshots]:** [StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK](https://anchor.so/programs/StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK) 37 | 38 | ## License 39 | 40 | The [snapshots] program is licensed under the Affero General Public License version 3. 41 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-utils": { 4 | "locked": { 5 | "lastModified": 1649676176, 6 | "narHash": "sha256-OWKJratjt2RW151VUlJPRALb7OU2S5s+f0vLj4o1bHM=", 7 | "owner": "numtide", 8 | "repo": "flake-utils", 9 | "rev": "a4b154ebbdc88c8498a5c7b01589addc9e9cb678", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "numtide", 14 | "repo": "flake-utils", 15 | "type": "github" 16 | } 17 | }, 18 | "flake-utils_2": { 19 | "locked": { 20 | "lastModified": 1637014545, 21 | "narHash": "sha256-26IZAc5yzlD9FlDT54io1oqG/bBoyka+FJk5guaX4x4=", 22 | "owner": "numtide", 23 | "repo": "flake-utils", 24 | "rev": "bba5dcc8e0b20ab664967ad83d24d64cb64ec4f4", 25 | "type": "github" 26 | }, 27 | "original": { 28 | "owner": "numtide", 29 | "repo": "flake-utils", 30 | "type": "github" 31 | } 32 | }, 33 | "nixpkgs": { 34 | "locked": { 35 | "lastModified": 1649961138, 36 | "narHash": "sha256-8ZCPrazs+qd2V8Elw84lIWuk0kKfVQ8Ei/19gahURhM=", 37 | "owner": "NixOS", 38 | "repo": "nixpkgs", 39 | "rev": "d08394e7cd5c7431a1e8f53b7f581e74ee909548", 40 | "type": "github" 41 | }, 42 | "original": { 43 | "owner": "NixOS", 44 | "ref": "nixpkgs-unstable", 45 | "repo": "nixpkgs", 46 | "type": "github" 47 | } 48 | }, 49 | "root": { 50 | "inputs": { 51 | "saber-overlay": "saber-overlay" 52 | } 53 | }, 54 | "rust-overlay": { 55 | "inputs": { 56 | "flake-utils": "flake-utils_2", 57 | "nixpkgs": [ 58 | "saber-overlay", 59 | "nixpkgs" 60 | ] 61 | }, 62 | "locked": { 63 | "lastModified": 1649903781, 64 | "narHash": "sha256-m+3EZo0a4iS8IwHQhkM/riPuFpu76505xKqmN9j5O+E=", 65 | "owner": "oxalica", 66 | "repo": "rust-overlay", 67 | "rev": "e45696bedc4a13a5970376b8fc09660fdd0e6f6c", 68 | "type": "github" 69 | }, 70 | "original": { 71 | "owner": "oxalica", 72 | "repo": "rust-overlay", 73 | "type": "github" 74 | } 75 | }, 76 | "saber-overlay": { 77 | "inputs": { 78 | "flake-utils": "flake-utils", 79 | "nixpkgs": "nixpkgs", 80 | "rust-overlay": "rust-overlay" 81 | }, 82 | "locked": { 83 | "lastModified": 1649978970, 84 | "narHash": "sha256-hj+Yp3iacTNU/5+EhzcQ3xASiaifHP5AW3752vLMAn0=", 85 | "owner": "saber-hq", 86 | "repo": "saber-overlay", 87 | "rev": "5ec6426c8cc205d0577660fac5469f47f2dccabf", 88 | "type": "github" 89 | }, 90 | "original": { 91 | "owner": "saber-hq", 92 | "repo": "saber-overlay", 93 | "type": "github" 94 | } 95 | } 96 | }, 97 | "root": "root", 98 | "version": 7 99 | } 100 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Saber development environment."; 3 | 4 | inputs = { saber-overlay.url = "github:saber-hq/saber-overlay"; }; 5 | 6 | outputs = { self, saber-overlay }: saber-overlay.lib.buildFlakeOutputs { }; 7 | } 8 | -------------------------------------------------------------------------------- /images/banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saber-hq/snapshots/aca5acdfdad126de7a677ddc04365365dd9e7bc1/images/banner.png -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@saberhq/snapshots", 3 | "version": "0.2.8", 4 | "description": "The TypeScript SDK for the Saber Voting Escrow Snapshot system.", 5 | "keywords": [ 6 | "solana", 7 | "tribeca", 8 | "saber", 9 | "governance" 10 | ], 11 | "main": "dist/cjs/index.js", 12 | "module": "dist/esm/index.js", 13 | "repository": "git@github.com:saber-hq/snapshots.git", 14 | "author": "Saber Team ", 15 | "bugs": { 16 | "url": "https://github.com/saber-hq/snapshots/issues", 17 | "email": "team@saber.so" 18 | }, 19 | "homepage": "https://saber.so", 20 | "license": "AGPL-3.0", 21 | "scripts": { 22 | "build": "rm -fr dist/ && tsc -P tsconfig.build.json && tsc -P tsconfig.esm.json", 23 | "clean": "rm -fr dist/", 24 | "idl:generate": "./scripts/parse-idls.sh && ./scripts/generate-idl-types.sh", 25 | "idl:generate:nolint": "./scripts/parse-idls.sh && RUN_ESLINT=none ./scripts/generate-idl-types.sh", 26 | "typecheck": "tsc", 27 | "lint": "eslint . --cache", 28 | "lint:ci": "eslint . --max-warnings=0", 29 | "test:e2e": "anchor test --skip-build 'tests/**/*.ts'", 30 | "docs:generate": "typedoc --excludePrivate --includeVersion --out site/ts/ src/index.ts", 31 | "prepare": "husky install", 32 | "cargo-readme": "cd programs/snapshots/ && cargo readme > README.md && cd ../../ && rm README.md && cp programs/snapshots/README.md README.md" 33 | }, 34 | "devDependencies": { 35 | "@gokiprotocol/client": "^0.10.3", 36 | "@project-serum/anchor": "^0.24.2", 37 | "@rushstack/eslint-patch": "^1.1.3", 38 | "@saberhq/anchor-contrib": "^1.12.61", 39 | "@saberhq/chai-solana": "^1.12.61", 40 | "@saberhq/eslint-config": "^1.12.61", 41 | "@saberhq/solana-contrib": "^1.12.61", 42 | "@saberhq/token-utils": "^1.12.61", 43 | "@saberhq/tsconfig": "^1.12.61", 44 | "@solana/web3.js": "^1.39.1", 45 | "@tribecahq/tribeca-sdk": "^0.5.6", 46 | "@types/bn.js": "^5.1.0", 47 | "@types/chai": "^4.3.1", 48 | "@types/lodash": "^4.14.181", 49 | "@types/mocha": "^9.1.0", 50 | "@types/node": "^17.0.24", 51 | "@types/prettier": "^2.6.0", 52 | "@yarnpkg/doctor": "^4.0.0-rc.2", 53 | "bn.js": "^5.2.0", 54 | "chai": "^4.3.6", 55 | "eslint": "^8.13.0", 56 | "eslint-import-resolver-node": "^0.3.6", 57 | "eslint-plugin-import": "^2.26.0", 58 | "husky": "^7.0.4", 59 | "jsbi": "^4.2.1", 60 | "lint-staged": "^12.3.8", 61 | "lodash": "^4.17.21", 62 | "mocha": "^9.2.2", 63 | "prettier": "^2.6.2", 64 | "ts-node": "^10.7.0", 65 | "typedoc": "^0.22.15", 66 | "typescript": "^4.6.3" 67 | }, 68 | "peerDependencies": { 69 | "@project-serum/anchor": ">=0.19", 70 | "@saberhq/anchor-contrib": "^1.12", 71 | "@saberhq/solana-contrib": "^1.12", 72 | "@saberhq/token-utils": "^1.12", 73 | "@solana/web3.js": "^1.29.2", 74 | "@tribecahq/tribeca-sdk": "^0.3.1", 75 | "bn.js": "^5.2.0" 76 | }, 77 | "resolutions": { 78 | "bn.js": "^5.2.0", 79 | "@types/bn.js": "^5.1.0", 80 | "chai": "=4.3.4" 81 | }, 82 | "publishConfig": { 83 | "access": "public" 84 | }, 85 | "files": [ 86 | "dist/", 87 | "src/" 88 | ], 89 | "lint-staged": { 90 | "*.{ts,tsx}": "eslint --cache --fix", 91 | "*.{js,json,jsx,html,css,md}": "prettier --write" 92 | }, 93 | "dependencies": { 94 | "@solana/buffer-layout": "^4.0.0", 95 | "superstruct": "^0.15.4", 96 | "tiny-invariant": "^1.2.0", 97 | "tslib": "^2.3.1" 98 | }, 99 | "packageManager": "yarn@3.2.0" 100 | } 101 | -------------------------------------------------------------------------------- /programs/snapshots/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "snapshots" 3 | version = "0.2.8" 4 | description = "Voting Escrow Snapshots: Historical snapshots of previous voting escrow balances." 5 | edition = "2021" 6 | homepage = "https://saber.so" 7 | repository = "https://github.com/saber-hq/snapshots" 8 | authors = ["Ian Macalinao "] 9 | license = "AGPL-3.0" 10 | keywords = ["solana", "anchor", "governance", "saber", "tribeca"] 11 | 12 | [lib] 13 | crate-type = ["cdylib", "lib"] 14 | name = "snapshots" 15 | path = "src/lib.rs" 16 | 17 | [features] 18 | no-entrypoint = [] 19 | no-idl = [] 20 | cpi = ["no-entrypoint"] 21 | default = [] 22 | 23 | [dependencies] 24 | anchor-lang = ">=0.22, <=0.24" 25 | anchor-spl = ">=0.22, <=0.24" 26 | locked-voter = { version = "^0.5", features = ["cpi"] } 27 | num-traits = "0.2" 28 | snapshots-math = { version = "^0.2", path = "./math" } 29 | u128 = "0.1.0" 30 | vipers = "^2.0" 31 | 32 | [dev-dependencies] 33 | proptest = "1.0.0" 34 | -------------------------------------------------------------------------------- /programs/snapshots/README.md: -------------------------------------------------------------------------------- 1 | # snapshots 2 | 3 | [![Crates.io](https://img.shields.io/crates/v/snapshots)](https://crates.io/crates/snapshots) 4 | [![Docs.rs](https://img.shields.io/docsrs/snapshots)](https://docs.rs/snapshots) 5 | [![License](https://img.shields.io/crates/l/snapshots)](https://github.com/saber-hq/snapshots/blob/master/LICENSE) 6 | [![Build Status](https://img.shields.io/github/workflow/status/saber-hq/snapshots/E2E/master)](https://github.com/saber-hq/snapshots/actions/workflows/programs-e2e.yml?query=branch%3Amaster) 7 | [![Contributors](https://img.shields.io/github/contributors/saber-hq/snapshots)](https://github.com/saber-hq/snapshots/graphs/contributors) 8 | [![NPM](https://img.shields.io/npm/v/@saberhq/snapshots)](https://www.npmjs.com/package/@saberhq/snapshots) 9 | 10 |

11 | 12 |

13 | 14 | Voting Escrow Snapshots: Historical snapshots of previous voting escrow balances. 15 | 16 | ## Motivation 17 | 18 | There are several instances in which one may want to use an instantaneous snapshot of all vote escrow balances, for example: 19 | 20 | - **Fee distribution.** One may want to send protocol revenue to veToken holders. 21 | - **Airdrops.** One may want to send tokens to holders of a veToken. 22 | 23 | ## Mechanism 24 | 25 | veToken balances are recorded for every `period`. A period is recorded for every 3 days. 26 | 27 | There are two accounts that are used to compute historical balances: 28 | 29 | - [LockerHistory], which stores the total number of veTokens for each period, and 30 | - [EscrowHistory], which stores the veTokens in each Escrow per period. 31 | 32 | Any time someone refreshes and/or modifies their vote escrow, they should refresh their [EscrowHistory] accounts. 33 | 34 | ## Program Addresses 35 | 36 | - **[snapshots]:** [StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK](https://anchor.so/programs/StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK) 37 | 38 | ## License 39 | 40 | The [snapshots] program is licensed under the Affero General Public License version 3. 41 | -------------------------------------------------------------------------------- /programs/snapshots/README.tpl: -------------------------------------------------------------------------------- 1 | # {{crate}} 2 | 3 | [![Crates.io](https://img.shields.io/crates/v/{{crate}})](https://crates.io/crates/{{crate}}) 4 | [![Docs.rs](https://img.shields.io/docsrs/{{crate}})](https://docs.rs/{{crate}}) 5 | [![License](https://img.shields.io/crates/l/{{crate}})](https://github.com/saber-hq/snapshots/blob/master/LICENSE) 6 | [![Build Status](https://img.shields.io/github/workflow/status/saber-hq/snapshots/E2E/master)](https://github.com/saber-hq/snapshots/actions/workflows/programs-e2e.yml?query=branch%3Amaster) 7 | [![Contributors](https://img.shields.io/github/contributors/saber-hq/snapshots)](https://github.com/saber-hq/snapshots/graphs/contributors) 8 | [![NPM](https://img.shields.io/npm/v/@saberhq/snapshots)](https://www.npmjs.com/package/@saberhq/snapshots) 9 | 10 |

11 | 12 |

13 | 14 | {{readme}} 15 | -------------------------------------------------------------------------------- /programs/snapshots/Xargo.toml: -------------------------------------------------------------------------------- 1 | [target.bpfel-unknown-unknown.dependencies.std] 2 | features = [] 3 | -------------------------------------------------------------------------------- /programs/snapshots/math/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "snapshots-math" 3 | version = "0.2.8" 4 | description = "Calculations for voting escrow snapshots." 5 | edition = "2021" 6 | homepage = "https://saber.so" 7 | repository = "https://github.com/saber-hq/snapshots" 8 | authors = ["Ian Macalinao "] 9 | license = "AGPL-3.0" 10 | keywords = ["solana", "anchor", "governance", "saber", "tribeca"] 11 | 12 | [lib] 13 | crate-type = ["cdylib", "lib"] 14 | name = "snapshots_math" 15 | path = "src/lib.rs" 16 | 17 | [dependencies] 18 | num-traits = "0.2" 19 | -------------------------------------------------------------------------------- /programs/snapshots/math/README.md: -------------------------------------------------------------------------------- 1 | # snapshots-math 2 | 3 | Calculations for voting escrow snapshots. 4 | 5 | These functions are split into a separate crate to ensure `anchor-lang` version mismatches 6 | do not prevent building against this code. 7 | 8 | License: AGPL-3.0 9 | -------------------------------------------------------------------------------- /programs/snapshots/math/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Calculations for voting escrow snapshots. 2 | //! 3 | //! These functions are split into a separate crate to ensure `anchor-lang` version mismatches 4 | //! do not prevent building against this code. 5 | #![deny(rustdoc::all)] 6 | #![allow(rustdoc::missing_doc_code_examples)] 7 | #![deny(clippy::unwrap_used, clippy::integer_arithmetic)] 8 | #![deny(missing_docs)] 9 | 10 | use num_traits::cast::ToPrimitive; 11 | 12 | /// Number of periods in an era. 13 | pub const ERA_NUM_PERIODS: usize = (u8::MAX as usize) + 1; 14 | 15 | /// Number of seconds in a period. 16 | pub const PERIOD_SECONDS: u32 = 86_400 * 3; 17 | 18 | /// Number of seconds in an era. 19 | pub const SECONDS_PER_ERA: u64 = (ERA_NUM_PERIODS as u64) * (PERIOD_SECONDS as u64); 20 | 21 | /// The Unix timestamp of the start of the first era. 22 | pub const COMMON_ERA_UNIX_TS: u64 = 1640995200; 23 | 24 | /// Calculates the start timestamp of an era. 25 | pub fn calculate_era_start_ts(era: u16) -> Option { 26 | COMMON_ERA_UNIX_TS.checked_add(SECONDS_PER_ERA.checked_mul(era.into())?) 27 | } 28 | 29 | /// Calculates the start timestamp of a period of an era. 30 | pub fn calculate_period_start_ts(era: u16, period: u8) -> Option { 31 | calculate_era_start_ts(era)? 32 | .checked_add(period.to_u64()?.checked_mul(PERIOD_SECONDS.to_u64()?)?) 33 | } 34 | 35 | /// A period is `elapsed` if its start time has passed. 36 | /// 37 | /// Elapsed periods cannot increase their locker veToken balance. 38 | pub fn has_period_elapsed(era: u16, period: u8, now: i64) -> Option { 39 | let start = calculate_period_start_ts(era, period)?; 40 | let now = now.to_u64()?; 41 | // `>` instead of `>=` to prevent potential off-by-one errors 42 | // by programmers that are not aware of the definition of elapsed. 43 | // one second isn't a big deal. 44 | Some(now > start) 45 | } 46 | 47 | /// Calculates the era and period of the given Unix timestamp. 48 | pub fn calculate_era_and_period_of_ts(now: u64) -> Option<(u16, u8)> { 49 | let current_era: u16 = now 50 | .checked_sub(COMMON_ERA_UNIX_TS)? 51 | .checked_div(SECONDS_PER_ERA)? 52 | .to_u16()?; 53 | let current_era_start_ts = calculate_era_start_ts(current_era)?; 54 | let current_period: u8 = now 55 | .checked_sub(current_era_start_ts)? 56 | .checked_div(PERIOD_SECONDS.into())? 57 | .to_u8()?; 58 | Some((current_era, current_period)) 59 | } 60 | 61 | /// Calculates the next era and period of the given period. 62 | pub fn calculate_next_era_and_period(era: u16, period: u8) -> Option<(u16, u8)> { 63 | Some(if period == u8::MAX { 64 | (era.checked_add(1)?, 0_u8) 65 | } else { 66 | (era, period.checked_add(1)?) 67 | }) 68 | } 69 | 70 | /// Calculates the next era and period of the given Unix timestamp. 71 | pub fn calculate_next_era_and_period_of_ts(now: u64) -> Option<(u16, u8)> { 72 | let (current_era, current_period) = calculate_era_and_period_of_ts(now)?; 73 | calculate_next_era_and_period(current_era, current_period) 74 | } 75 | 76 | #[cfg(test)] 77 | #[allow(clippy::unwrap_used, clippy::integer_arithmetic)] 78 | mod tests { 79 | use super::*; 80 | 81 | #[test] 82 | fn test_has_period_elapsed() { 83 | // beginning of period 2: so period 2 has not elapsed yet. 84 | let current_time = (COMMON_ERA_UNIX_TS + (PERIOD_SECONDS as u64) * 2) 85 | .to_i64() 86 | .unwrap(); 87 | 88 | assert!(has_period_elapsed(0, 0, current_time).unwrap()); 89 | assert!(has_period_elapsed(0, 1, current_time).unwrap()); 90 | assert!(!has_period_elapsed(0, 2, current_time).unwrap()); 91 | 92 | assert!(!has_period_elapsed(1, 0, current_time).unwrap()); 93 | } 94 | 95 | #[test] 96 | fn test_has_period_elapsed_boundary() { 97 | // beginning of period 2: so period 2 has not elapsed yet. 98 | let current_time = (COMMON_ERA_UNIX_TS + (PERIOD_SECONDS as u64) * 2 + 1) 99 | .to_i64() 100 | .unwrap(); 101 | 102 | assert!(has_period_elapsed(0, 0, current_time).unwrap()); 103 | assert!(has_period_elapsed(0, 1, current_time).unwrap()); 104 | assert!(has_period_elapsed(0, 2, current_time).unwrap()); 105 | assert!(!has_period_elapsed(0, 3, current_time).unwrap()); 106 | 107 | assert!(!has_period_elapsed(1, 0, current_time).unwrap()); 108 | } 109 | 110 | #[test] 111 | fn test_calculate_next_era_and_period_normal() { 112 | let era = 2_u16; 113 | let period = 4_u8; 114 | let start = calculate_period_start_ts(era, period).unwrap() + 40; 115 | 116 | let (result_era, result_period) = calculate_era_and_period_of_ts(start).unwrap(); 117 | assert_eq!(result_era, era); 118 | assert_eq!(result_period, period); 119 | 120 | let (result_next_era, result_next_period) = 121 | calculate_next_era_and_period_of_ts(start).unwrap(); 122 | assert_eq!(result_next_era, era); 123 | assert_eq!(result_next_period, period + 1); 124 | } 125 | 126 | #[test] 127 | fn test_calculate_next_era_and_period_boundary() { 128 | let era = 2_u16; 129 | let period = 255_u8; 130 | let start = calculate_period_start_ts(era, period).unwrap() + 40; 131 | 132 | let (result_era, result_period) = calculate_era_and_period_of_ts(start).unwrap(); 133 | assert_eq!(result_era, era); 134 | assert_eq!(result_period, period); 135 | 136 | let (result_next_era, result_next_period) = 137 | calculate_next_era_and_period_of_ts(start).unwrap(); 138 | assert_eq!(result_next_era, era + 1); 139 | assert_eq!(result_next_period, 0_u8); 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /programs/snapshots/proptest-regressions/instructions/sync.txt: -------------------------------------------------------------------------------- 1 | # Seeds for failure cases proptest has generated in the past. It is 2 | # automatically read and these particular cases re-run before any 3 | # novel cases are generated. 4 | # 5 | # It is recommended to check this file in to source control so that 6 | # everyone who runs the test benefits from these saved cases. 7 | cc 84e80908539ba4551b8966eff390a4af356025617c8edf3bb938f9c6ff404adc # shrinks to power_if_max_lockup = 6, period_start_ts = 3365836746710489934, escrow_started_at = 1, escrow_ends_at = 5124745715660478989, max_stake_duration = 0 8 | cc f1b29dd69c418547f61b0793db71abbc0055fc69b5df070b2542ca30483733c4 # shrinks to escrow_amount = 1, max_stake_vote_multiplier = 9, period_start_ts = 496038611235145017, escrow_started_at = 1, escrow_ends_at = 1266256638931745117, max_stake_duration = 0 9 | -------------------------------------------------------------------------------- /programs/snapshots/src/instructions/create_escrow_history.rs: -------------------------------------------------------------------------------- 1 | //! Processor for [snapshots::create_escrow_history]. 2 | 3 | use crate::*; 4 | use locked_voter::Escrow; 5 | 6 | /// Accounts for [snapshots::create_escrow_history]. 7 | #[derive(Accounts)] 8 | #[instruction(era: u16)] 9 | pub struct CreateEscrowHistory<'info> { 10 | /// The [Escrow]. 11 | pub escrow: Account<'info, Escrow>, 12 | 13 | /// The [EscrowHistory] to be created. 14 | #[account( 15 | init, 16 | seeds = [ 17 | b"EscrowHistory".as_ref(), 18 | escrow.key().as_ref(), 19 | era.to_le_bytes().as_ref() 20 | ], 21 | bump, 22 | space = 8 + EscrowHistory::LEN, 23 | payer = payer 24 | )] 25 | pub escrow_history: AccountLoader<'info, EscrowHistory>, 26 | 27 | /// Payer. 28 | #[account(mut)] 29 | pub payer: Signer<'info>, 30 | 31 | /// [System] program. 32 | pub system_program: Program<'info, System>, 33 | } 34 | 35 | impl<'info> CreateEscrowHistory<'info> { 36 | fn create_escrow_history(&mut self, bump: u8, era: u16) -> Result<()> { 37 | let history = &mut self.escrow_history.load_init()?; 38 | history.escrow = self.escrow.key(); 39 | history.era = era; 40 | history.bump = bump; 41 | Ok(()) 42 | } 43 | } 44 | 45 | pub fn handler(ctx: Context, era: u16) -> Result<()> { 46 | ctx.accounts 47 | .create_escrow_history(*unwrap_int!(ctx.bumps.get("escrow_history")), era)?; 48 | Ok(()) 49 | } 50 | 51 | impl<'info> Validate<'info> for CreateEscrowHistory<'info> { 52 | fn validate(&self) -> Result<()> { 53 | Ok(()) 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /programs/snapshots/src/instructions/create_locker_history.rs: -------------------------------------------------------------------------------- 1 | //! Processor for [snapshots::create_locker_history]. 2 | 3 | use crate::*; 4 | use locked_voter::Locker; 5 | 6 | /// Accounts for [snapshots::create_locker_history]. 7 | #[derive(Accounts)] 8 | #[instruction(era: u16)] 9 | pub struct CreateLockerHistory<'info> { 10 | /// The [Locker]. 11 | pub locker: Account<'info, Locker>, 12 | 13 | /// The [LockerHistory] to be created. 14 | #[account( 15 | init, 16 | seeds = [ 17 | b"LockerHistory".as_ref(), 18 | locker.key().as_ref(), 19 | era.to_le_bytes().as_ref() 20 | ], 21 | bump, 22 | space = 8 + LockerHistory::LEN, 23 | payer = payer 24 | )] 25 | pub locker_history: AccountLoader<'info, LockerHistory>, 26 | 27 | /// Payer. 28 | #[account(mut)] 29 | pub payer: Signer<'info>, 30 | 31 | /// [System] program. 32 | pub system_program: Program<'info, System>, 33 | } 34 | 35 | impl<'info> CreateLockerHistory<'info> { 36 | fn create_locker_history(&mut self, bump: u8, era: u16) -> Result<()> { 37 | let history = &mut self.locker_history.load_init()?; 38 | history.locker = self.locker.key(); 39 | history.era = era; 40 | history.bump = bump; 41 | Ok(()) 42 | } 43 | } 44 | 45 | pub fn handler(ctx: Context, era: u16) -> Result<()> { 46 | ctx.accounts 47 | .create_locker_history(*unwrap_int!(ctx.bumps.get("locker_history")), era) 48 | } 49 | 50 | impl<'info> Validate<'info> for CreateLockerHistory<'info> { 51 | fn validate(&self) -> Result<()> { 52 | Ok(()) 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /programs/snapshots/src/instructions/mod.rs: -------------------------------------------------------------------------------- 1 | //! Instructions for the [crate::snapshots] program. 2 | 3 | pub mod create_escrow_history; 4 | pub mod create_locker_history; 5 | pub mod sync; 6 | 7 | pub use create_escrow_history::*; 8 | pub use create_locker_history::*; 9 | pub use sync::*; 10 | -------------------------------------------------------------------------------- /programs/snapshots/src/instructions/sync.rs: -------------------------------------------------------------------------------- 1 | //! Processor for [snapshots::sync]. 2 | 3 | use crate::*; 4 | use ::u128::mul_div_u64; 5 | use locked_voter::{Escrow, Locker}; 6 | use num_traits::ToPrimitive; 7 | 8 | /// Accounts for [snapshots::sync]. 9 | #[derive(Accounts)] 10 | pub struct Sync<'info> { 11 | /// The [Locker]. 12 | pub locker: Account<'info, Locker>, 13 | 14 | /// The [Escrow]. 15 | pub escrow: Account<'info, Escrow>, 16 | 17 | /// The [LockerHistory] to sync. 18 | #[account(mut)] 19 | pub locker_history: AccountLoader<'info, LockerHistory>, 20 | 21 | /// The [EscrowHistory] to sync. 22 | #[account(mut)] 23 | pub escrow_history: AccountLoader<'info, EscrowHistory>, 24 | } 25 | 26 | impl<'info> Sync<'info> { 27 | fn sync(&self) -> Result<()> { 28 | let locker_history = &mut self.locker_history.load_mut()?; 29 | let escrow_history = &mut self.escrow_history.load_mut()?; 30 | 31 | assert_keys_eq!(locker_history.locker, self.locker); 32 | assert_keys_eq!(escrow_history.escrow, self.escrow); 33 | invariant!(locker_history.era == escrow_history.era, EraMismatch); 34 | 35 | let start_ts = unwrap_int!(calculate_era_start_ts(locker_history.era)); 36 | let now = unwrap_int!(Clock::get()?.unix_timestamp.to_u64()); 37 | 38 | // The voting power at max lockup. 39 | // This is used as a multiplicand to determine the total voting power 40 | // at a given time. 41 | let power_if_max_lockup = unwrap_int!(self 42 | .escrow 43 | .amount 44 | .checked_mul(self.locker.params.max_stake_vote_multiplier.into())); 45 | let escrow_started_at = unwrap_int!(self.escrow.escrow_started_at.to_u64()); 46 | let escrow_ends_at = unwrap_int!(self.escrow.escrow_ends_at.to_u64()); 47 | 48 | // If the escrow never started, we should not be updating anything. 49 | if escrow_started_at == 0 { 50 | return Ok(()); 51 | } 52 | 53 | // calculate every period 54 | let mut period_start_ts = start_ts; 55 | for period in 0..ERA_NUM_PERIODS { 56 | if period > 0 { 57 | // add the period each iteration 58 | period_start_ts = unwrap_int!(period_start_ts.checked_add(PERIOD_SECONDS.into())); 59 | } 60 | 61 | // skip over periods that have already passed. 62 | if now >= period_start_ts { 63 | continue; 64 | } 65 | 66 | // The previous value of this period's ve balance. 67 | // !WARNING!: not to be confused with the veBalance of the previous period. 68 | let prev_period_ve_balance = escrow_history.ve_balances[period]; 69 | 70 | // The current value of this period's ve balance. 71 | let ve_balance: u64 = unwrap_int!(calculate_voter_power_for_period( 72 | power_if_max_lockup, 73 | period_start_ts, 74 | escrow_started_at, 75 | escrow_ends_at, 76 | self.locker.params.max_stake_duration, 77 | )); 78 | 79 | // skip zero ve balance 80 | if ve_balance == 0 { 81 | // prev ve balance should have been zero 82 | invariant!(prev_period_ve_balance == 0); 83 | continue; 84 | } 85 | 86 | locker_history.ve_balances[period] = unwrap_checked!({ 87 | locker_history.ve_balances[period] 88 | .checked_sub(prev_period_ve_balance)? 89 | .checked_add(ve_balance) 90 | }); 91 | escrow_history.ve_balances[period] = ve_balance; 92 | 93 | invariant!(ve_balance >= prev_period_ve_balance, EscrowBalanceDecreased); 94 | 95 | // If the previous balance was zero, this is a newly tracked escrow. 96 | // This voter should be recorded in the counts. 97 | if prev_period_ve_balance == 0 && ve_balance != 0 { 98 | locker_history.ve_counts[period] = 99 | unwrap_int!(locker_history.ve_counts[period].checked_add(1)); 100 | } 101 | } 102 | 103 | Ok(()) 104 | } 105 | } 106 | 107 | /// Calculates voter power at the start of a period. 108 | /// 109 | /// - `power_if_max_lockup`: Voting power if the user's lockup was at the maximum amount 110 | /// - `period_start_ts`: When the period started. 111 | /// - `escrow_started_at`: When the user's escrow started. 112 | /// - `escrow_ends_at`: When the user's escrow ends. 113 | /// - `max_stake_duration`: The maximum duration of a lockup. This is also the duration used to compute the `power_if_max_lockup`. 114 | fn calculate_voter_power_for_period( 115 | power_if_max_lockup: u64, 116 | period_start_ts: u64, 117 | escrow_started_at: u64, 118 | escrow_ends_at: u64, 119 | max_stake_duration: u64, 120 | ) -> Option { 121 | // invalid `now` argument, should never happen. 122 | if period_start_ts == 0 { 123 | return None; 124 | } 125 | if escrow_started_at == 0 { 126 | return Some(0); 127 | } 128 | // Lockup had zero power before the start time. 129 | // at the end time, lockup also has zero power. 130 | if period_start_ts < escrow_started_at || period_start_ts >= escrow_ends_at { 131 | return Some(0); 132 | } 133 | 134 | // multiply the max lockup power by the fraction of the max stake duration 135 | let seconds_until_lockup_expiry = escrow_ends_at.checked_sub(period_start_ts)?.to_u64()?; 136 | // elapsed seconds, clamped to the maximum duration 137 | let relevant_seconds_until_lockup_expiry = seconds_until_lockup_expiry.min(max_stake_duration); 138 | 139 | // multiply the max lockup power by the fraction of the max stake duration 140 | let power = mul_div_u64( 141 | power_if_max_lockup, 142 | relevant_seconds_until_lockup_expiry, 143 | max_stake_duration, 144 | )?; 145 | 146 | Some(power) 147 | } 148 | 149 | pub fn handler(ctx: Context) -> Result<()> { 150 | ctx.accounts.sync() 151 | } 152 | 153 | impl<'info> Validate<'info> for Sync<'info> { 154 | fn validate(&self) -> Result<()> { 155 | assert_keys_eq!(self.locker, self.escrow.locker); 156 | Ok(()) 157 | } 158 | } 159 | 160 | #[cfg(test)] 161 | #[allow(clippy::unwrap_used)] 162 | mod test { 163 | use super::*; 164 | use locked_voter::LockerParams; 165 | use proptest::prelude::*; 166 | 167 | fn run_test_identical_to_locked_voter( 168 | escrow_amount: u64, 169 | max_stake_vote_multiplier: u8, 170 | period_start_ts: i64, 171 | escrow_started_at: i64, 172 | escrow_ends_at: i64, 173 | max_stake_duration: u64, 174 | ) { 175 | let reference_locker = LockerParams { 176 | max_stake_duration, 177 | max_stake_vote_multiplier, 178 | ..Default::default() 179 | }; 180 | let reference_escrow = Escrow { 181 | amount: escrow_amount, 182 | escrow_started_at, 183 | escrow_ends_at, 184 | ..Default::default() 185 | }; 186 | let power_if_max_lockup = escrow_amount * (max_stake_vote_multiplier as u64); 187 | let reference_power = 188 | reference_locker.calculate_voter_power(&reference_escrow, period_start_ts); 189 | let new_power = calculate_voter_power_for_period( 190 | power_if_max_lockup, 191 | period_start_ts as u64, 192 | escrow_started_at as u64, 193 | escrow_ends_at as u64, 194 | max_stake_duration, 195 | ); 196 | assert_eq!(reference_power, new_power); 197 | } 198 | 199 | proptest! { 200 | #[test] 201 | fn test_identical_to_locked_voter( 202 | escrow_amount in 0..=(u64::MAX >> 8), 203 | max_stake_vote_multiplier: u8, 204 | period_start_ts in 0..=i64::MAX, 205 | escrow_started_at in 0..=i64::MAX, 206 | escrow_ends_at in 0..=i64::MAX, 207 | max_stake_duration: u64, 208 | ) { 209 | run_test_identical_to_locked_voter( 210 | escrow_amount, 211 | max_stake_vote_multiplier, 212 | period_start_ts, 213 | escrow_started_at, 214 | escrow_ends_at, 215 | max_stake_duration 216 | ); 217 | } 218 | } 219 | } 220 | -------------------------------------------------------------------------------- /programs/snapshots/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Voting Escrow Snapshots: Historical snapshots of previous voting escrow balances. 2 | //! 3 | //! # Motivation 4 | //! 5 | //! There are several instances in which one may want to use an instantaneous snapshot of all vote escrow balances, for example: 6 | //! 7 | //! - **Fee distribution.** One may want to send protocol revenue to veToken holders. 8 | //! - **Airdrops.** One may want to send tokens to holders of a veToken. 9 | //! 10 | //! # Mechanism 11 | //! 12 | //! veToken balances are recorded for every `period`. A period is recorded for every 3 days. 13 | //! 14 | //! There are two accounts that are used to compute historical balances: 15 | //! 16 | //! - [LockerHistory], which stores the total number of veTokens for each period, and 17 | //! - [EscrowHistory], which stores the veTokens in each Escrow per period. 18 | //! 19 | //! Any time someone refreshes and/or modifies their vote escrow, they should refresh their [EscrowHistory] accounts. 20 | //! 21 | //! # Program Addresses 22 | //! 23 | //! - **[snapshots]:** [StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK](https://anchor.so/programs/StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK) 24 | //! 25 | //! # License 26 | //! 27 | //! The [snapshots] program is licensed under the Affero General Public License version 3. 28 | 29 | #![deny(rustdoc::all)] 30 | #![allow(rustdoc::missing_doc_code_examples)] 31 | #![deny(clippy::unwrap_used)] 32 | 33 | use anchor_lang::prelude::*; 34 | use vipers::prelude::*; 35 | 36 | mod instructions; 37 | mod state; 38 | 39 | pub use snapshots_math::*; 40 | pub use state::*; 41 | 42 | use instructions::*; 43 | 44 | declare_id!("StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK"); 45 | 46 | /// The [snapshots] program. 47 | #[program] 48 | pub mod snapshots { 49 | use super::*; 50 | 51 | /// Creates a [EscrowHistory]. 52 | #[access_control(ctx.accounts.validate())] 53 | pub fn create_escrow_history(ctx: Context, era: u16) -> Result<()> { 54 | create_escrow_history::handler(ctx, era) 55 | } 56 | 57 | /// Creates a [LockerHistory]. 58 | #[access_control(ctx.accounts.validate())] 59 | pub fn create_locker_history(ctx: Context, era: u16) -> Result<()> { 60 | create_locker_history::handler(ctx, era) 61 | } 62 | 63 | /// Synchronize an [locked_voter::Escrow] with the [LockerHistory]/[EscrowHistory]. 64 | #[access_control(ctx.accounts.validate())] 65 | pub fn sync(ctx: Context) -> Result<()> { 66 | sync::handler(ctx) 67 | } 68 | } 69 | 70 | /// Errors. 71 | #[error_code] 72 | pub enum ErrorCode { 73 | #[msg("Locker/escrow mismatch.")] 74 | LockerEscrowMismatch, 75 | #[msg("Era mismatch.")] 76 | EraMismatch, 77 | #[msg("Escrow balances cannot decrease.")] 78 | EscrowBalanceDecreased, 79 | } 80 | -------------------------------------------------------------------------------- /programs/snapshots/src/state.rs: -------------------------------------------------------------------------------- 1 | //! Struct definitions for accounts that hold state. 2 | 3 | use anchor_lang::solana_program::pubkey::PUBKEY_BYTES; 4 | 5 | use crate::*; 6 | 7 | /// Stores the total number of veTokens in circulation for each period. 8 | /// 9 | /// The [LockerHistory] account stores 256 periods, each 3 days each. 10 | /// For a 5-year [locked_voter::Locker], there will be at least 3 of these accounts existing 11 | /// at any given time, since the maximum lock period is 5 years. 12 | #[account(zero_copy)] 13 | #[derive(Debug, PartialEq, Eq)] 14 | pub struct LockerHistory { 15 | /// The [locked_voter::Locker] being tracked. 16 | pub locker: Pubkey, 17 | /// The era. Multiplying this by [ERA_NUM_PERIODS] * [PERIOD_SECONDS]; 18 | pub era: u16, 19 | /// Bump seed. 20 | pub bump: u8, 21 | /// Padding for aligning the struct to an 8-byte boundary. 22 | pub _padding: [u8; 5], 23 | /// The sum of all tracked historical vote escrow balances. 24 | pub ve_balances: [u64; 256], 25 | /// Number of voters with non-zero balances at each epoch. 26 | pub ve_counts: [u64; 256], 27 | } 28 | 29 | impl Default for LockerHistory { 30 | fn default() -> Self { 31 | Self { 32 | locker: Default::default(), 33 | era: Default::default(), 34 | bump: Default::default(), 35 | _padding: Default::default(), 36 | ve_balances: [0; ERA_NUM_PERIODS], 37 | ve_counts: [0; ERA_NUM_PERIODS], 38 | } 39 | } 40 | } 41 | 42 | impl LockerHistory { 43 | /// Number of bytes in a serialized [LockerHistory], 44 | pub const LEN: usize = PUBKEY_BYTES + 2 + 1 + 5 + 8 * 256 + 8 * 256; 45 | } 46 | 47 | /// Stores the total veToken balance of an [locked_voter::Escrow] 48 | /// for the given epochs. 49 | /// 50 | /// Any time someone refreshes and/or modifies their vote [locked_voter::Escrow], they 51 | /// should refresh their [EscrowHistory] accounts. 52 | #[account(zero_copy)] 53 | #[derive(Debug, PartialEq, Eq)] 54 | pub struct EscrowHistory { 55 | /// The [locked_voter::Escrow] being tracked. 56 | pub escrow: Pubkey, 57 | /// The era. 58 | pub era: u16, 59 | /// Bump seed. 60 | pub bump: u8, 61 | /// Padding for aligning the struct to an 8-byte boundary. 62 | pub _padding: [u8; 5], 63 | /// All tracked historical vote escrow balances for this [locked_voter::Escrow]. 64 | pub ve_balances: [u64; 256], 65 | } 66 | 67 | impl Default for EscrowHistory { 68 | fn default() -> Self { 69 | Self { 70 | escrow: Default::default(), 71 | era: Default::default(), 72 | bump: Default::default(), 73 | _padding: Default::default(), 74 | ve_balances: [0; ERA_NUM_PERIODS], 75 | } 76 | } 77 | } 78 | 79 | impl EscrowHistory { 80 | /// Number of bytes in a serialized [LockerHistory], 81 | pub const LEN: usize = PUBKEY_BYTES + 2 + 1 + 5 + 8 * 256; 82 | } 83 | 84 | #[cfg(test)] 85 | mod tests { 86 | use super::*; 87 | use std::mem::size_of; 88 | 89 | #[test] 90 | fn test_locker_history_len() { 91 | assert_eq!(size_of::(), LockerHistory::LEN); 92 | } 93 | 94 | #[test] 95 | fn test_escrow_history_len() { 96 | assert_eq!(size_of::(), EscrowHistory::LEN); 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /scripts/download-programs.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | cd $(dirname $0)/.. 4 | 5 | mkdir -p artifacts/deploy/ 6 | 7 | curl -L https://github.com/TribecaHQ/tribeca/releases/download/v0.5.6/locked_voter.so > \ 8 | artifacts/deploy/locked_voter.so 9 | 10 | curl -L https://github.com/TribecaHQ/tribeca/releases/download/v0.5.6/govern.so > \ 11 | artifacts/deploy/govern.so 12 | 13 | curl -L https://github.com/GokiProtocol/goki/releases/download/v0.10.3/smart_wallet.so > \ 14 | artifacts/deploy/smart_wallet.so 15 | -------------------------------------------------------------------------------- /scripts/generate-idl-types.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | shopt -s extglob 4 | 5 | cd $(dirname $0)/.. 6 | 7 | generate_declaration_file() { 8 | PROGRAM_SO=$1 9 | OUT_DIR=$2 10 | 11 | prog="$(basename $PROGRAM_SO .json)" 12 | OUT_PATH="$OUT_DIR/$prog.ts" 13 | if [ ! $(which gsed) ]; then 14 | PREFIX=$(echo $prog | sed -E 's/(^|_)([a-z])/\U\2/g') 15 | else 16 | PREFIX=$(echo $prog | gsed -E 's/(^|_)([a-z])/\U\2/g') 17 | fi 18 | typename="${PREFIX}IDL" 19 | rawName="${PREFIX}JSON" 20 | 21 | # types 22 | echo "export type $typename =" >>$OUT_PATH 23 | cat $PROGRAM_SO >>$OUT_PATH 24 | echo ";" >>$OUT_PATH 25 | 26 | # raw json 27 | echo "export const $rawName: $typename =" >>$OUT_PATH 28 | cat $PROGRAM_SO >>$OUT_PATH 29 | echo ";" >>$OUT_PATH 30 | 31 | # error type 32 | echo "import { generateErrorMap } from '@saberhq/anchor-contrib';" >>$OUT_PATH 33 | echo "export const ${PREFIX}Errors = generateErrorMap($rawName);" >>$OUT_PATH 34 | } 35 | 36 | generate_sdk_idls() { 37 | SDK_DIR=${1:-"./packages/sdk/src/idls"} 38 | IDL_JSONS=$2 39 | 40 | echo "Generating IDLs for the following programs:" 41 | echo $IDL_JSONS 42 | echo "" 43 | 44 | rm -rf $SDK_DIR 45 | mkdir -p $SDK_DIR 46 | if [ $(ls -l artifacts/idl/ | wc -l) -ne 0 ]; then 47 | for f in $IDL_JSONS; do 48 | generate_declaration_file $f $SDK_DIR 49 | done 50 | if [[ $RUN_ESLINT != "none" ]]; then 51 | yarn eslint --fix $SDK_DIR 52 | fi 53 | else 54 | echo "Warning: no IDLs found. Make sure you ran ./scripts/idl.sh first." 55 | fi 56 | } 57 | 58 | generate_sdk_idls ./src/idls 'artifacts/idl/*.json' 59 | -------------------------------------------------------------------------------- /scripts/parse-idls.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script generates the IDL JSONs without buildling the full packages. 4 | 5 | rm -fr artifacts/idl/ 6 | mkdir -p artifacts/idl/ 7 | 8 | for PROGRAM in $(find programs/ -maxdepth 3 -name lib.rs); do 9 | PROGRAM_NAME=$(dirname $PROGRAM | xargs dirname | xargs basename | tr '-' '_') 10 | echo "Parsing IDL for $PROGRAM_NAME" 11 | anchor idl parse --file $PROGRAM >artifacts/idl/$PROGRAM_NAME.json || { 12 | echo "Could not parse IDL" 13 | exit 1 14 | } 15 | done 16 | -------------------------------------------------------------------------------- /src/constants.ts: -------------------------------------------------------------------------------- 1 | import { buildCoderMap } from "@saberhq/anchor-contrib"; 2 | import { PublicKey } from "@solana/web3.js"; 3 | 4 | import type { SnapshotsProgram, SnapshotsTypes } from "./programs"; 5 | import { SnapshotsJSON } from "./programs"; 6 | 7 | /** 8 | * Snapshots program types. 9 | */ 10 | export interface SnapshotsPrograms { 11 | Snapshots: SnapshotsProgram; 12 | } 13 | 14 | /** 15 | * Snapshots addresses. 16 | */ 17 | export const SNAPSHOTS_ADDRESSES = { 18 | Snapshots: new PublicKey("StakeSSzfxn391k3LvdKbZP5WVwWd6AsY1DNiXHjQfK"), 19 | }; 20 | 21 | /** 22 | * Program IDLs. 23 | */ 24 | export const SNAPSHOTS_IDLS = { 25 | Snapshots: SnapshotsJSON, 26 | }; 27 | 28 | /** 29 | * Coders. 30 | */ 31 | export const SNAPSHOTS_CODERS = buildCoderMap<{ 32 | Snapshots: SnapshotsTypes; 33 | }>(SNAPSHOTS_IDLS, SNAPSHOTS_ADDRESSES); 34 | 35 | /** 36 | * Number of periods in an era. 37 | */ 38 | export const ERA_NUM_PERIODS = 256; 39 | 40 | /** 41 | * Number of seconds in a period. 42 | */ 43 | export const PERIOD_SECONDS = 86_400 * 3; 44 | 45 | /** 46 | * The Unix timestamp of the start of the first era. 47 | */ 48 | export const COMMON_ERA_UNIX_TS = 1640995200; 49 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./constants"; 2 | export * from "./math"; 3 | export * from "./programs"; 4 | export * from "./sdk"; 5 | export * from "./wrappers"; 6 | -------------------------------------------------------------------------------- /src/math.ts: -------------------------------------------------------------------------------- 1 | import invariant from "tiny-invariant"; 2 | 3 | import { 4 | COMMON_ERA_UNIX_TS, 5 | ERA_NUM_PERIODS, 6 | PERIOD_SECONDS, 7 | } from "./constants"; 8 | 9 | /** 10 | * Number of seconds in an era. 11 | */ 12 | export const SECONDS_PER_ERA = PERIOD_SECONDS * ERA_NUM_PERIODS; 13 | 14 | /** 15 | * Calculates the era that the given {@link Date} is in. 16 | * @param date 17 | * @returns 18 | */ 19 | export const calculateEra = (date: Date): number => { 20 | return Math.floor( 21 | (Math.floor(date.getTime() / 1_000) - COMMON_ERA_UNIX_TS) / SECONDS_PER_ERA 22 | ); 23 | }; 24 | 25 | /** 26 | * Calculates the start date of a period. 27 | * @param era 28 | * @param period 29 | * @returns 30 | */ 31 | export const calculatePeriodStart = (era: number, period: number): Date => { 32 | return new Date( 33 | (COMMON_ERA_UNIX_TS + era * SECONDS_PER_ERA + period * PERIOD_SECONDS) * 34 | 1_000 35 | ); 36 | }; 37 | 38 | /** 39 | * Calculates the start date of an era. 40 | * @param era 41 | * @returns 42 | */ 43 | export const calculateEraStart = (era: number): Date => 44 | calculatePeriodStart(era, 0); 45 | 46 | /** 47 | * Returns the eras included in a given period. 48 | * 49 | * This is useful for figuring out what histories must be fetched. 50 | * 51 | * @param start 52 | * @param end 53 | * @returns 54 | */ 55 | export const calculateErasForPeriod = ( 56 | start: Date, 57 | end: Date 58 | ): readonly number[] => { 59 | const currentEra = calculateEra(start); 60 | const lastEra = calculateEra(end); 61 | invariant( 62 | lastEra >= currentEra, 63 | "Last era must be greater than current era." 64 | ); 65 | return Array(lastEra - currentEra + 1) 66 | .fill(null) 67 | .map((_, i) => currentEra + i); 68 | }; 69 | -------------------------------------------------------------------------------- /src/programs/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./snapshots"; 2 | -------------------------------------------------------------------------------- /src/programs/snapshots.ts: -------------------------------------------------------------------------------- 1 | import type { AnchorTypes } from "@saberhq/anchor-contrib"; 2 | 3 | import type { SnapshotsIDL } from "../idls/snapshots"; 4 | 5 | export * from "../idls/snapshots"; 6 | 7 | export type SnapshotsTypes = AnchorTypes< 8 | SnapshotsIDL, 9 | { 10 | lockerHistory: LockerHistoryData; 11 | escrowHistory: EscrowHistoryData; 12 | } 13 | >; 14 | 15 | type Accounts = SnapshotsTypes["Accounts"]; 16 | 17 | export type LockerHistoryData = Accounts["LockerHistory"]; 18 | export type EscrowHistoryData = Accounts["EscrowHistory"]; 19 | 20 | export type SnapshotsProgram = SnapshotsTypes["Program"]; 21 | -------------------------------------------------------------------------------- /src/sdk.ts: -------------------------------------------------------------------------------- 1 | import { newProgramMap } from "@saberhq/anchor-contrib"; 2 | import type { AugmentedProvider, Provider } from "@saberhq/solana-contrib"; 3 | import { SolanaAugmentedProvider } from "@saberhq/solana-contrib"; 4 | import type { Signer } from "@solana/web3.js"; 5 | 6 | import type { SnapshotsPrograms } from "."; 7 | import { SNAPSHOTS_ADDRESSES, SNAPSHOTS_IDLS } from "./constants"; 8 | import { SnapshotsWrapper } from "./wrappers"; 9 | 10 | /** 11 | * Snapshots SDK. 12 | */ 13 | export class SnapshotsSDK { 14 | constructor( 15 | readonly provider: AugmentedProvider, 16 | readonly programs: SnapshotsPrograms 17 | ) {} 18 | 19 | /** 20 | * Creates a new instance of the SDK with the given keypair. 21 | */ 22 | withSigner(signer: Signer): SnapshotsSDK { 23 | return SnapshotsSDK.load({ 24 | provider: this.provider.withSigner(signer), 25 | }); 26 | } 27 | 28 | /** 29 | * Loads the SDK. 30 | * @returns 31 | */ 32 | static load({ provider }: { provider: Provider }): SnapshotsSDK { 33 | const programs: SnapshotsPrograms = newProgramMap( 34 | provider, 35 | SNAPSHOTS_IDLS, 36 | SNAPSHOTS_ADDRESSES 37 | ); 38 | return new SnapshotsSDK(new SolanaAugmentedProvider(provider), programs); 39 | } 40 | 41 | /** 42 | * Snapshots program helpers. 43 | */ 44 | get snapshots(): SnapshotsWrapper { 45 | return new SnapshotsWrapper(this); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/wrappers/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./snapshots"; 2 | -------------------------------------------------------------------------------- /src/wrappers/snapshots/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./pda"; 2 | export * from "./snapshots"; 3 | -------------------------------------------------------------------------------- /src/wrappers/snapshots/pda.ts: -------------------------------------------------------------------------------- 1 | import { utils } from "@project-serum/anchor"; 2 | import { PublicKey } from "@solana/web3.js"; 3 | 4 | import { SNAPSHOTS_ADDRESSES } from "../../constants"; 5 | 6 | const encodeU16 = (num: number): Buffer => { 7 | const buf = Buffer.alloc(2); 8 | buf.writeUInt16LE(num); 9 | return buf; 10 | }; 11 | 12 | /** 13 | * Finds the address of an EscrowHistory. 14 | */ 15 | export const findEscrowHistoryAddress = async ( 16 | escrow: PublicKey, 17 | era: number 18 | ): Promise<[PublicKey, number]> => { 19 | return await PublicKey.findProgramAddress( 20 | [ 21 | utils.bytes.utf8.encode("EscrowHistory"), 22 | escrow.toBuffer(), 23 | encodeU16(era), 24 | ], 25 | SNAPSHOTS_ADDRESSES.Snapshots 26 | ); 27 | }; 28 | 29 | /** 30 | * Finds the address of a LockerHistory. 31 | */ 32 | export const findLockerHistoryAddress = async ( 33 | locker: PublicKey, 34 | era: number 35 | ): Promise<[PublicKey, number]> => { 36 | return await PublicKey.findProgramAddress( 37 | [ 38 | utils.bytes.utf8.encode("LockerHistory"), 39 | locker.toBuffer(), 40 | encodeU16(era), 41 | ], 42 | SNAPSHOTS_ADDRESSES.Snapshots 43 | ); 44 | }; 45 | -------------------------------------------------------------------------------- /src/wrappers/snapshots/snapshots.ts: -------------------------------------------------------------------------------- 1 | import type { TransactionEnvelope } from "@saberhq/solana-contrib"; 2 | import type { PublicKey } from "@solana/web3.js"; 3 | import { SystemProgram } from "@solana/web3.js"; 4 | import { findEscrowAddress } from "@tribecahq/tribeca-sdk"; 5 | 6 | import type { 7 | EscrowHistoryData, 8 | LockerHistoryData, 9 | SnapshotsProgram, 10 | } from "../../programs/snapshots"; 11 | import type { SnapshotsSDK } from "../../sdk"; 12 | import { findEscrowHistoryAddress, findLockerHistoryAddress } from "."; 13 | 14 | /** 15 | * Handles interacting with the Snapshots program. 16 | */ 17 | export class SnapshotsWrapper { 18 | readonly program: SnapshotsProgram; 19 | 20 | /** 21 | * Constructor for a {@link SnapshotsWrapper}. 22 | * @param sdk 23 | */ 24 | constructor(readonly sdk: SnapshotsSDK) { 25 | this.program = sdk.programs.Snapshots; 26 | } 27 | 28 | get provider() { 29 | return this.sdk.provider; 30 | } 31 | 32 | async fetchLockerHistory(key: PublicKey): Promise { 33 | return await this.program.account.lockerHistory.fetchNullable(key); 34 | } 35 | 36 | async fetchEscrowHistory(key: PublicKey): Promise { 37 | return await this.program.account.escrowHistory.fetchNullable(key); 38 | } 39 | 40 | /** 41 | * Creates a Locker History. 42 | * @returns 43 | */ 44 | async createLockerHistory({ 45 | locker, 46 | era, 47 | }: { 48 | locker: PublicKey; 49 | era: number; 50 | }): Promise<{ lockerHistory: PublicKey; tx: TransactionEnvelope }> { 51 | const [lockerHistory] = await findLockerHistoryAddress(locker, era); 52 | return { 53 | lockerHistory, 54 | tx: this.provider.newTX([ 55 | this.program.instruction.createLockerHistory(era, { 56 | accounts: { 57 | locker, 58 | lockerHistory, 59 | payer: this.provider.wallet.publicKey, 60 | systemProgram: SystemProgram.programId, 61 | }, 62 | }), 63 | ]), 64 | }; 65 | } 66 | 67 | /** 68 | * Creates an Escrow History. 69 | * @returns 70 | */ 71 | async createEscrowHistory({ 72 | escrow, 73 | era, 74 | }: { 75 | escrow: PublicKey; 76 | era: number; 77 | }): Promise<{ escrowHistory: PublicKey; tx: TransactionEnvelope }> { 78 | const [escrowHistory] = await findEscrowHistoryAddress(escrow, era); 79 | return { 80 | escrowHistory, 81 | tx: this.provider.newTX([ 82 | this.program.instruction.createEscrowHistory(era, { 83 | accounts: { 84 | escrow, 85 | escrowHistory, 86 | payer: this.provider.wallet.publicKey, 87 | systemProgram: SystemProgram.programId, 88 | }, 89 | }), 90 | ]), 91 | }; 92 | } 93 | 94 | /** 95 | * Synchronizes an EscrowHistory. 96 | * @returns 97 | */ 98 | async sync({ 99 | locker, 100 | owner, 101 | era, 102 | }: { 103 | locker: PublicKey; 104 | owner: PublicKey; 105 | era: number; 106 | }): Promise { 107 | const [lockerHistory] = await findLockerHistoryAddress(locker, era); 108 | const [escrow] = await findEscrowAddress(locker, owner); 109 | const [escrowHistory] = await findEscrowHistoryAddress(escrow, era); 110 | return this.provider.newTX([ 111 | this.program.instruction.sync({ 112 | accounts: { 113 | locker, 114 | escrow, 115 | lockerHistory, 116 | escrowHistory, 117 | }, 118 | }), 119 | ]); 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /tests/snapshots.spec.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-misused-promises */ 2 | import { GokiSDK } from "@gokiprotocol/client"; 3 | import { expectTX, expectTXTable } from "@saberhq/chai-solana"; 4 | import { createMint, sleep, u64 } from "@saberhq/token-utils"; 5 | import type { PublicKey, Signer } from "@solana/web3.js"; 6 | import type { LockerWrapper } from "@tribecahq/tribeca-sdk"; 7 | import { 8 | createLocker, 9 | DEFAULT_LOCKER_PARAMS, 10 | findEscrowAddress, 11 | TribecaSDK, 12 | } from "@tribecahq/tribeca-sdk"; 13 | import { expect } from "chai"; 14 | import { zip } from "lodash"; 15 | import invariant from "tiny-invariant"; 16 | 17 | import { 18 | findEscrowHistoryAddress, 19 | findLockerHistoryAddress, 20 | } from "../src/wrappers/snapshots/pda"; 21 | import { createUser, makeSDK } from "./workspace"; 22 | 23 | export const INITIAL_MINT_AMOUNT = new u64(1_000_000_000000); 24 | 25 | describe("Locked Voter", () => { 26 | const sdk = makeSDK(); 27 | const gokiSDK = GokiSDK.load({ provider: sdk.provider }); 28 | const tribecaSDK = TribecaSDK.load({ provider: sdk.provider }); 29 | 30 | let govTokenMint: PublicKey; 31 | 32 | let lockerW: LockerWrapper; 33 | let user: Signer; 34 | 35 | beforeEach(async () => { 36 | govTokenMint = await createMint(sdk.provider); 37 | 38 | const owners = [sdk.provider.wallet.publicKey]; 39 | 40 | const { createTXs, lockerWrapper } = await createLocker({ 41 | sdk: tribecaSDK, 42 | gokiSDK, 43 | govTokenMint, 44 | owners, 45 | lockerParams: { 46 | proposalActivationMinVotes: INITIAL_MINT_AMOUNT, 47 | }, 48 | }); 49 | 50 | for (const { tx: createTX } of createTXs) { 51 | await expectTX(createTX).to.be.fulfilled; 52 | } 53 | 54 | lockerW = lockerWrapper; 55 | 56 | const { tx: createLockerHistoryTX } = 57 | await sdk.snapshots.createLockerHistory({ 58 | locker: lockerW.locker, 59 | era: 0, 60 | }); 61 | await expectTX(createLockerHistoryTX).to.be.fulfilled; 62 | }); 63 | 64 | beforeEach("Create user and deposit tokens", async () => { 65 | user = await createUser(sdk.provider, govTokenMint); 66 | const lockTx = await lockerW.lockTokens({ 67 | amount: INITIAL_MINT_AMOUNT, 68 | duration: DEFAULT_LOCKER_PARAMS.maxStakeDuration, 69 | authority: user.publicKey, 70 | }); 71 | lockTx.addSigners(user); 72 | await expectTX(lockTx, "lock tokens").to.be.fulfilled; 73 | 74 | const [escrowKey] = await findEscrowAddress(lockerW.locker, user.publicKey); 75 | 76 | await Promise.all( 77 | Array(6) 78 | .fill(null) 79 | .map(async (_, era) => { 80 | const { tx: createEscrowHistoryTX } = 81 | await sdk.snapshots.createEscrowHistory({ 82 | escrow: escrowKey, 83 | era, 84 | }); 85 | await expectTX(createEscrowHistoryTX, "create escrow history").to.be 86 | .fulfilled; 87 | }) 88 | ); 89 | }); 90 | 91 | it("syncs single escrow", async () => { 92 | const [escrowKey] = await findEscrowAddress(lockerW.locker, user.publicKey); 93 | 94 | const [lockerHistory] = await findLockerHistoryAddress(lockerW.locker, 0); 95 | const [escrowHistory] = await findEscrowHistoryAddress(escrowKey, 0); 96 | 97 | const syncTX = await sdk.snapshots.sync({ 98 | locker: lockerW.locker, 99 | owner: user.publicKey, 100 | era: 0, 101 | }); 102 | await expectTXTable(syncTX, "snapshots").to.be.fulfilled; 103 | 104 | const lockerHistoryData = await sdk.snapshots.fetchLockerHistory( 105 | lockerHistory 106 | ); 107 | const escrowHistoryData = await sdk.snapshots.fetchEscrowHistory( 108 | escrowHistory 109 | ); 110 | 111 | invariant(lockerHistoryData && escrowHistoryData); 112 | 113 | expect(lockerHistoryData.veBalances).to.deep.eq( 114 | escrowHistoryData.veBalances 115 | ); 116 | }); 117 | 118 | it("sync multiple times should have no effect", async () => { 119 | const syncTX1 = await sdk.snapshots.sync({ 120 | locker: lockerW.locker, 121 | owner: user.publicKey, 122 | era: 0, 123 | }); 124 | await expectTXTable(syncTX1, "sync").to.be.fulfilled; 125 | 126 | const [escrowKey] = await findEscrowAddress(lockerW.locker, user.publicKey); 127 | 128 | const [lockerHistory] = await findLockerHistoryAddress(lockerW.locker, 0); 129 | const [escrowHistory] = await findEscrowHistoryAddress(escrowKey, 0); 130 | 131 | const lockerHistoryData1 = await sdk.snapshots.fetchLockerHistory( 132 | lockerHistory 133 | ); 134 | const escrowHistoryData1 = await sdk.snapshots.fetchEscrowHistory( 135 | escrowHistory 136 | ); 137 | 138 | const syncTX2 = await sdk.snapshots.sync({ 139 | locker: lockerW.locker, 140 | owner: user.publicKey, 141 | era: 0, 142 | }); 143 | await expectTXTable(syncTX2, "sync again").to.be.fulfilled; 144 | 145 | const lockerHistoryData2 = await sdk.snapshots.fetchLockerHistory( 146 | lockerHistory 147 | ); 148 | const escrowHistoryData2 = await sdk.snapshots.fetchEscrowHistory( 149 | escrowHistory 150 | ); 151 | 152 | invariant( 153 | lockerHistoryData1 && 154 | escrowHistoryData1 && 155 | lockerHistoryData2 && 156 | escrowHistoryData2 157 | ); 158 | 159 | expect(lockerHistoryData1.veBalances).to.deep.eq( 160 | lockerHistoryData2.veBalances 161 | ); 162 | expect(escrowHistoryData1.veBalances).to.deep.eq( 163 | escrowHistoryData2.veBalances 164 | ); 165 | }); 166 | 167 | it("syncs multiple escrows", async () => { 168 | const [lockerHistory] = await findLockerHistoryAddress(lockerW.locker, 0); 169 | const initialLockerHistoryData = await sdk.snapshots.fetchLockerHistory( 170 | lockerHistory 171 | ); 172 | 173 | invariant(initialLockerHistoryData); 174 | expect(initialLockerHistoryData.veBalances.length).to.eq(256); 175 | 176 | Array(256) 177 | .fill(null) 178 | .map((_, i) => { 179 | expect(initialLockerHistoryData.veBalances[i]).to.bignumber.zero; 180 | expect(initialLockerHistoryData.veCounts[i]).to.bignumber.zero; 181 | }); 182 | 183 | const user2 = await createUser(sdk.provider, govTokenMint); 184 | const lockTx = await lockerW.lockTokens({ 185 | amount: INITIAL_MINT_AMOUNT, 186 | duration: DEFAULT_LOCKER_PARAMS.maxStakeDuration, 187 | authority: user2.publicKey, 188 | }); 189 | lockTx.addSigners(user2); 190 | await expectTX(lockTx, "lock tokens").to.be.fulfilled; 191 | 192 | const [escrow2Key] = await findEscrowAddress( 193 | lockerW.locker, 194 | user2.publicKey 195 | ); 196 | await Promise.all( 197 | Array(6) 198 | .fill(null) 199 | .map(async (_, era) => { 200 | const { tx: createEscrowHistoryTX } = await sdk 201 | .withSigner(user2) 202 | .snapshots.createEscrowHistory({ 203 | escrow: escrow2Key, 204 | era, 205 | }); 206 | await expectTX(createEscrowHistoryTX, "create escrow history").to.be 207 | .fulfilled; 208 | }) 209 | ); 210 | 211 | const [escrow2History] = await findEscrowHistoryAddress(escrow2Key, 0); 212 | 213 | const syncTX = await sdk.snapshots.sync({ 214 | locker: lockerW.locker, 215 | owner: user.publicKey, 216 | era: 0, 217 | }); 218 | await expectTXTable(syncTX, "snapshots 1").to.be.fulfilled; 219 | 220 | const sync2TX = await sdk.withSigner(user2).snapshots.sync({ 221 | locker: lockerW.locker, 222 | owner: user2.publicKey, 223 | era: 0, 224 | }); 225 | await expectTXTable(sync2TX, "snapshots 2", { 226 | verbosity: "always", 227 | }).to.be.fulfilled; 228 | 229 | const lockerHistoryData = await sdk.snapshots.fetchLockerHistory( 230 | lockerHistory 231 | ); 232 | 233 | const [escrow1Key] = await findEscrowAddress( 234 | lockerW.locker, 235 | user.publicKey 236 | ); 237 | const [escrow1History] = await findEscrowHistoryAddress(escrow1Key, 0); 238 | const escrow1HistoryData = await sdk.snapshots.fetchEscrowHistory( 239 | escrow1History 240 | ); 241 | const escrow2HistoryData = await sdk.snapshots.fetchEscrowHistory( 242 | escrow2History 243 | ); 244 | 245 | invariant(lockerHistoryData && escrow1HistoryData && escrow2HistoryData); 246 | 247 | zip(escrow1HistoryData.veBalances, escrow2HistoryData.veBalances).map( 248 | ([e1, e2], i) => { 249 | invariant(e1 && e2); 250 | const expected = e1.add(e2); 251 | expect(lockerHistoryData.veBalances[i], `period ${i}`).to.bignumber.eq( 252 | expected 253 | ); 254 | if (!e1.isZero() && !e2.isZero()) { 255 | expect(lockerHistoryData.veCounts[i], `count ${i}`).to.bignumber.eq( 256 | "2" 257 | ); 258 | } 259 | } 260 | ); 261 | }); 262 | 263 | it("changes with a refresh", async () => { 264 | const [escrowKey] = await findEscrowAddress(lockerW.locker, user.publicKey); 265 | 266 | const [lockerHistory] = await findLockerHistoryAddress(lockerW.locker, 0); 267 | const [escrowHistory] = await findEscrowHistoryAddress(escrowKey, 0); 268 | 269 | const syncTX = await sdk.snapshots.sync({ 270 | locker: lockerW.locker, 271 | owner: user.publicKey, 272 | era: 0, 273 | }); 274 | await expectTXTable(syncTX, "snapshots").to.be.fulfilled; 275 | 276 | const lockerHistoryData = await sdk.snapshots.fetchLockerHistory( 277 | lockerHistory 278 | ); 279 | const escrowHistoryData = await sdk.snapshots.fetchEscrowHistory( 280 | escrowHistory 281 | ); 282 | invariant(lockerHistoryData && escrowHistoryData); 283 | expect(lockerHistoryData.veBalances).to.deep.eq( 284 | escrowHistoryData.veBalances 285 | ); 286 | 287 | // sleep so we can get more lock 288 | await sleep(1_000); 289 | 290 | const lockTx = await lockerW.lockTokens({ 291 | amount: new u64(0), 292 | duration: DEFAULT_LOCKER_PARAMS.maxStakeDuration, 293 | authority: user.publicKey, 294 | }); 295 | lockTx.addSigners(user); 296 | await expectTX(lockTx, "lock tokens").to.be.fulfilled; 297 | 298 | const sync2TX = await sdk.snapshots.sync({ 299 | locker: lockerW.locker, 300 | owner: user.publicKey, 301 | era: 0, 302 | }); 303 | await expectTXTable(sync2TX, "snapshots sync v2").to.be.fulfilled; 304 | 305 | const lockerHistoryData2 = await sdk.snapshots.fetchLockerHistory( 306 | lockerHistory 307 | ); 308 | const escrowHistoryData2 = await sdk.snapshots.fetchEscrowHistory( 309 | escrowHistory 310 | ); 311 | invariant(lockerHistoryData2 && escrowHistoryData2); 312 | expect(lockerHistoryData2.veBalances).to.deep.eq( 313 | escrowHistoryData2.veBalances 314 | ); 315 | 316 | // should have changed 317 | expect(lockerHistoryData2.veBalances).to.not.deep.eq( 318 | lockerHistoryData.veBalances 319 | ); 320 | expect(escrowHistoryData2.veBalances).to.not.deep.eq( 321 | escrowHistoryData.veBalances 322 | ); 323 | }); 324 | }); 325 | -------------------------------------------------------------------------------- /tests/test-key.json: -------------------------------------------------------------------------------- 1 | [124,235,244,191,124,130,119,54,168,240,95,43,143,143,172,111,220,140,28,138,65,147,182,234,201,213,36,167,248,146,241,236,79,20,175,246,26,100,63,46,248,14,252,148,108,135,142,124,217,127,182,45,19,45,150,23,49,10,91,167,54,72,230,57] -------------------------------------------------------------------------------- /tests/workspace/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./workspace"; 2 | -------------------------------------------------------------------------------- /tests/workspace/workspace.ts: -------------------------------------------------------------------------------- 1 | import * as anchor from "@project-serum/anchor"; 2 | import { AnchorProvider } from "@project-serum/anchor"; 3 | import { makeSaberProvider } from "@saberhq/anchor-contrib"; 4 | import { chaiSolana, expectTX } from "@saberhq/chai-solana"; 5 | import type { Provider } from "@saberhq/solana-contrib"; 6 | import { TransactionEnvelope } from "@saberhq/solana-contrib"; 7 | import { 8 | getOrCreateATA, 9 | SPLToken, 10 | TOKEN_PROGRAM_ID, 11 | u64, 12 | } from "@saberhq/token-utils"; 13 | import type { PublicKey, Signer } from "@solana/web3.js"; 14 | import { 15 | Keypair, 16 | LAMPORTS_PER_SOL, 17 | TransactionInstruction, 18 | } from "@solana/web3.js"; 19 | import chai from "chai"; 20 | 21 | import type { SnapshotsPrograms } from "../../src"; 22 | import { SnapshotsSDK } from "../../src"; 23 | import { INITIAL_MINT_AMOUNT } from "../snapshots.spec"; 24 | 25 | chai.use(chaiSolana); 26 | 27 | export type Workspace = SnapshotsPrograms; 28 | 29 | export const makeSDK = (): SnapshotsSDK => { 30 | const anchorProvider = AnchorProvider.env(); 31 | anchor.setProvider(anchorProvider); 32 | const provider = makeSaberProvider(anchorProvider); 33 | return SnapshotsSDK.load({ 34 | provider, 35 | }); 36 | }; 37 | 38 | export const DUMMY_INSTRUCTIONS = [ 39 | Keypair.generate().publicKey, 40 | Keypair.generate().publicKey, 41 | Keypair.generate().publicKey, 42 | ].map( 43 | (pid) => 44 | new TransactionInstruction({ 45 | programId: pid, 46 | keys: [], 47 | }) 48 | ); 49 | 50 | export const createUser = async ( 51 | provider: Provider, 52 | govTokenMint: PublicKey 53 | ): Promise => { 54 | const user = Keypair.generate(); 55 | 56 | await provider.connection.requestAirdrop(user.publicKey, LAMPORTS_PER_SOL); 57 | 58 | const { address, instruction } = await getOrCreateATA({ 59 | provider, 60 | mint: govTokenMint, 61 | owner: user.publicKey, 62 | }); 63 | const mintToIx = SPLToken.createMintToInstruction( 64 | TOKEN_PROGRAM_ID, 65 | govTokenMint, 66 | address, 67 | provider.wallet.publicKey, 68 | [], 69 | new u64(INITIAL_MINT_AMOUNT) 70 | ); 71 | 72 | const tx = new TransactionEnvelope( 73 | provider, 74 | instruction ? [instruction, mintToIx] : [mintToIx] 75 | ); 76 | await expectTX(tx, "mint gov tokens to user").to.be.fulfilled; 77 | 78 | return user; 79 | }; 80 | -------------------------------------------------------------------------------- /tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "compilerOptions": { 4 | "noEmit": false, 5 | "module": "CommonJS", 6 | "outDir": "dist/cjs/" 7 | }, 8 | "include": ["src/"] 9 | } 10 | -------------------------------------------------------------------------------- /tsconfig.esm.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.build.json", 3 | "compilerOptions": { 4 | "module": "ES2015", 5 | "outDir": "dist/esm/" 6 | }, 7 | "include": ["src/"] 8 | } 9 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@saberhq/tsconfig/tsconfig.lib.json", 3 | "compilerOptions": { 4 | "module": "CommonJS", 5 | "noErrorTruncation": true, 6 | "types": ["mocha"] 7 | }, 8 | "include": ["src/", "tests/"] 9 | } 10 | --------------------------------------------------------------------------------