├── .envrc ├── .eslintrc.js ├── .github ├── dependabot.yml └── workflows │ ├── nix.yml │ ├── programs.yml │ ├── release.yml │ └── rust.yml ├── .gitignore ├── .husky ├── .gitignore └── pre-commit ├── .prettierignore ├── .vscode ├── extensions.json └── settings.json ├── .yarn ├── plugins │ └── @yarnpkg │ │ ├── plugin-interactive-tools.cjs │ │ ├── plugin-typescript.cjs │ │ └── plugin-version.cjs ├── releases │ └── yarn-3.0.2.cjs └── sdks │ ├── eslint │ ├── bin │ │ └── eslint.js │ ├── lib │ │ └── api.js │ └── package.json │ ├── integrations.yml │ ├── prettier │ ├── index.js │ └── package.json │ └── typescript │ ├── bin │ ├── tsc │ └── tsserver │ ├── lib │ ├── tsc.js │ ├── tsserver.js │ ├── tsserverlibrary.js │ └── typescript.js │ └── package.json ├── .yarnrc.yml ├── Anchor.toml ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── ci.nix ├── flake.lock ├── flake.nix ├── package.json ├── programs └── migrator │ ├── Cargo.toml │ ├── README.md │ ├── Xargo.toml │ └── src │ ├── account_contexts.rs │ ├── account_validators.rs │ ├── bpf_loader_upgradeable.rs │ ├── instructions │ ├── approver.rs │ ├── mod.rs │ └── public.rs │ ├── lib.rs │ ├── macros.rs │ ├── migrate.rs │ └── state.rs ├── scripts ├── generate-idl-types.sh └── idl.sh ├── shell.nix ├── src ├── constants.ts ├── index.ts ├── pda.ts ├── sdk.ts ├── types.ts └── wrapper.ts ├── tests └── migrator.spec.ts ├── tsconfig.build.json ├── tsconfig.esm.json ├── tsconfig.json └── yarn.lock /.envrc: -------------------------------------------------------------------------------- 1 | watch_file flake.nix 2 | watch_file flake.lock 3 | mkdir -p .direnv 4 | dotenv 5 | eval "$(nix print-dev-env --profile "$(direnv_layout_dir)/flake-profile")" 6 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | require("@rushstack/eslint-patch/modern-module-resolution"); 2 | 3 | module.exports = { 4 | extends: ["@saberhq"], 5 | parserOptions: { 6 | tsconfigRootDir: __dirname, 7 | project: "tsconfig.json", 8 | }, 9 | }; 10 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "cargo" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | 8 | - package-ecosystem: "npm" 9 | directory: "/" 10 | schedule: 11 | interval: "daily" 12 | 13 | - package-ecosystem: "github-actions" 14 | directory: "/" 15 | schedule: 16 | interval: "daily" 17 | -------------------------------------------------------------------------------- /.github/workflows/nix.yml: -------------------------------------------------------------------------------- 1 | name: Nix environment 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | paths: 8 | - "*.nix" 9 | - flake.lock 10 | pull_request: 11 | branches: 12 | - master 13 | paths: 14 | - "*.nix" 15 | - flake.lock 16 | 17 | jobs: 18 | build: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v2 22 | - uses: cachix/install-nix-action@v14 23 | with: 24 | install_url: https://nixos-nix-install-tests.cachix.org/serve/i6laym9jw3wg9mw6ncyrk6gjx4l34vvx/install 25 | install_options: "--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve" 26 | extra_nix_config: | 27 | experimental-features = nix-command flakes 28 | access-tokens = github.com=${{ secrets.GITHUB_TOKEN }} 29 | - name: Setup Cachix 30 | uses: cachix/cachix-action@v10 31 | with: 32 | name: deploydao 33 | extraPullNames: saber 34 | authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}" 35 | - name: Check flake 36 | run: nix flake check -v --show-trace --no-update-lock-file 37 | - run: nix develop -c echo success 38 | -------------------------------------------------------------------------------- /.github/workflows/programs.yml: -------------------------------------------------------------------------------- 1 | name: Programs 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | SOLANA_VERSION: "1.7.11" 12 | RUST_TOOLCHAIN: nightly-2021-09-24 13 | 14 | jobs: 15 | integration-tests: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v2 19 | 20 | # Install Rust 21 | - name: Install Rust nightly 22 | uses: actions-rs/toolchain@v1 23 | with: 24 | override: true 25 | profile: minimal 26 | toolchain: ${{ env.RUST_TOOLCHAIN }} 27 | - uses: Swatinem/rust-cache@v1 28 | - name: Install Linux dependencies 29 | run: | 30 | sudo apt-get update 31 | sudo apt-get install -y pkg-config build-essential libudev-dev 32 | 33 | - uses: cachix/install-nix-action@v14 34 | with: 35 | install_url: https://nixos-nix-install-tests.cachix.org/serve/i6laym9jw3wg9mw6ncyrk6gjx4l34vvx/install 36 | install_options: "--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve" 37 | extra_nix_config: | 38 | experimental-features = nix-command flakes 39 | - name: Setup Cachix 40 | uses: cachix/cachix-action@v10 41 | with: 42 | name: deploydao 43 | extraPullNames: saber 44 | authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}" 45 | 46 | # Install Solana 47 | - name: Cache Solana binaries 48 | uses: actions/cache@v2 49 | with: 50 | path: ~/.cache/solana 51 | key: ${{ runner.os }}-${{ env.SOLANA_VERSION }} 52 | - name: Install Solana 53 | run: | 54 | sh -c "$(curl -sSfL https://release.solana.com/v${{ env.SOLANA_VERSION }}/install)" 55 | echo "$HOME/.local/share/solana/install/active_release/bin" >> $GITHUB_PATH 56 | export PATH="/home/runner/.local/share/solana/install/active_release/bin:$PATH" 57 | solana --version 58 | echo "Generating keypair..." 59 | solana-keygen new -o "$HOME/.config/solana/id.json" --no-passphrase --silent 60 | 61 | - name: Yarn Cache 62 | uses: actions/cache@v2 63 | with: 64 | path: ./.yarn/cache 65 | key: ${{ runner.os }}-${{ hashFiles('./.yarn/**/*.js') }} 66 | - name: Setup Node 67 | uses: actions/setup-node@v2 68 | with: 69 | node-version: "16" 70 | - run: yarn install 71 | 72 | - name: Build program 73 | run: nix shell .#ci --command anchor build 74 | - run: nix shell .#ci --command yarn idl:generate 75 | - run: nix shell .#ci --command yarn build 76 | - run: nix shell .#ci --command yarn test 77 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: {} 5 | push: 6 | tags: 7 | - "v*.*.*" 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | SOLANA_VERSION: "1.7.11" 12 | RUST_TOOLCHAIN: nightly-2021-09-01 13 | NPM_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} 14 | 15 | jobs: 16 | release-sdk: 17 | runs-on: ubuntu-latest 18 | name: Release SDK on NPM 19 | steps: 20 | - uses: actions/checkout@v2 21 | 22 | - uses: cachix/install-nix-action@v14 23 | with: 24 | install_url: https://nixos-nix-install-tests.cachix.org/serve/i6laym9jw3wg9mw6ncyrk6gjx4l34vvx/install 25 | install_options: "--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve" 26 | extra_nix_config: | 27 | experimental-features = nix-command flakes 28 | - name: Setup Cachix 29 | uses: cachix/cachix-action@v10 30 | with: 31 | name: deploydao 32 | extraPullNames: saber 33 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 34 | 35 | - name: Setup Node 36 | uses: actions/setup-node@v2 37 | with: 38 | always-auth: true 39 | node-version: 14.x 40 | - name: Yarn Cache 41 | uses: actions/cache@v2 42 | with: 43 | path: ./.yarn/cache 44 | key: ${{ runner.os }}-${{ hashFiles('./.yarn/**/*.js') }} 45 | - name: Install Yarn dependencies 46 | run: yarn install 47 | - name: Parse IDLs 48 | run: nix shell .#ci --command yarn idl:generate 49 | - run: yarn build 50 | - run: | 51 | echo 'npmAuthToken: "${NPM_AUTH_TOKEN}"' >> .yarnrc.yml 52 | cat .yarnrc.yml 53 | - name: Publish 54 | run: yarn npm publish 55 | env: 56 | NPM_AUTH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} 57 | 58 | release-binaries: 59 | runs-on: ubuntu-latest 60 | name: Release verifiable binaries 61 | steps: 62 | - uses: actions/checkout@v2 63 | - uses: cachix/install-nix-action@v14 64 | with: 65 | install_url: https://nixos-nix-install-tests.cachix.org/serve/i6laym9jw3wg9mw6ncyrk6gjx4l34vvx/install 66 | install_options: "--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve" 67 | extra_nix_config: | 68 | experimental-features = nix-command flakes 69 | - name: Setup Cachix 70 | uses: cachix/cachix-action@v10 71 | with: 72 | name: deploydao 73 | extraPullNames: saber 74 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 75 | 76 | - name: Build programs 77 | run: nix shell .#ci --command anchor build -v 78 | - name: Release 79 | uses: softprops/action-gh-release@v1 80 | with: 81 | files: | 82 | target/deploy/* 83 | target/idl/* 84 | target/verifiable/* 85 | 86 | release-crate: 87 | runs-on: ubuntu-latest 88 | name: Release crate on crates.io 89 | steps: 90 | - uses: actions/checkout@v2 91 | - name: Install Rust nightly 92 | uses: actions-rs/toolchain@v1 93 | with: 94 | override: true 95 | profile: minimal 96 | toolchain: ${{ env.RUST_TOOLCHAIN }} 97 | - uses: Swatinem/rust-cache@v1 98 | - name: Install cargo-workspaces 99 | run: cargo install cargo-workspaces || true 100 | - name: Publish crates 101 | run: cargo ws publish --from-git --yes --skip-published --token ${{ secrets.CARGO_PUBLISH_TOKEN }} 102 | -------------------------------------------------------------------------------- /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | paths: 7 | - Anchor.toml 8 | - .github/workflows/rust.yml 9 | - programs/** 10 | - Cargo.* 11 | pull_request: 12 | branches: [master] 13 | paths: 14 | - Anchor.toml 15 | - .github/workflows/rust.yml 16 | - programs/** 17 | - Cargo.* 18 | 19 | env: 20 | CARGO_TERM_COLOR: always 21 | RUST_TOOLCHAIN: nightly-2021-09-24 22 | 23 | jobs: 24 | lint: 25 | runs-on: ubuntu-latest 26 | steps: 27 | - uses: actions/checkout@v2 28 | - name: Install Rust nightly 29 | uses: actions-rs/toolchain@v1 30 | with: 31 | override: true 32 | profile: minimal 33 | toolchain: ${{ env.RUST_TOOLCHAIN }} 34 | components: rustfmt, clippy 35 | - uses: Swatinem/rust-cache@v1 36 | - name: Run fmt 37 | run: cargo fmt -- --check 38 | - name: Run clippy 39 | run: cargo clippy --all-targets -- --deny=warnings 40 | 41 | unit-tests: 42 | runs-on: ubuntu-latest 43 | steps: 44 | - uses: actions/checkout@v2 45 | - name: Install Rust nightly 46 | uses: actions-rs/toolchain@v1 47 | with: 48 | override: true 49 | profile: minimal 50 | toolchain: ${{ env.RUST_TOOLCHAIN }} 51 | components: rustfmt, clippy 52 | - uses: Swatinem/rust-cache@v1 53 | - name: Run unit tests 54 | run: cargo test --lib 55 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | .anchor 3 | .DS_Store 4 | target 5 | **/*.rs.bk 6 | 7 | # Created by https://www.toptal.com/developers/gitignore/api/node 8 | # Edit at https://www.toptal.com/developers/gitignore?templates=node 9 | 10 | ### Node ### 11 | # Logs 12 | logs 13 | *.log 14 | npm-debug.log* 15 | yarn-debug.log* 16 | yarn-error.log* 17 | lerna-debug.log* 18 | .pnpm-debug.log* 19 | 20 | # Diagnostic reports (https://nodejs.org/api/report.html) 21 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 22 | 23 | # Runtime data 24 | pids 25 | *.pid 26 | *.seed 27 | *.pid.lock 28 | 29 | # Directory for instrumented libs generated by jscoverage/JSCover 30 | lib-cov 31 | 32 | # Coverage directory used by tools like istanbul 33 | coverage 34 | *.lcov 35 | 36 | # nyc test coverage 37 | .nyc_output 38 | 39 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 40 | .grunt 41 | 42 | # Bower dependency directory (https://bower.io/) 43 | bower_components 44 | 45 | # node-waf configuration 46 | .lock-wscript 47 | 48 | # Compiled binary addons (https://nodejs.org/api/addons.html) 49 | build/Release 50 | 51 | # Dependency directories 52 | node_modules/ 53 | jspm_packages/ 54 | 55 | # Snowpack dependency directory (https://snowpack.dev/) 56 | web_modules/ 57 | 58 | # TypeScript cache 59 | *.tsbuildinfo 60 | 61 | # Optional npm cache directory 62 | .npm 63 | 64 | # Optional eslint cache 65 | .eslintcache 66 | 67 | # Microbundle cache 68 | .rpt2_cache/ 69 | .rts2_cache_cjs/ 70 | .rts2_cache_es/ 71 | .rts2_cache_umd/ 72 | 73 | # Optional REPL history 74 | .node_repl_history 75 | 76 | # Output of 'npm pack' 77 | *.tgz 78 | 79 | # Yarn Integrity file 80 | .yarn-integrity 81 | 82 | # dotenv environment variables file 83 | .env 84 | .env.test 85 | .env.production 86 | 87 | # parcel-bundler cache (https://parceljs.org/) 88 | .cache 89 | .parcel-cache 90 | 91 | # Next.js build output 92 | .next 93 | out 94 | 95 | # Nuxt.js build / generate output 96 | .nuxt 97 | dist 98 | 99 | # Gatsby files 100 | .cache/ 101 | # Comment in the public line in if your project uses Gatsby and not Next.js 102 | # https://nextjs.org/blog/next-9-1#public-directory-support 103 | # public 104 | 105 | # vuepress build output 106 | .vuepress/dist 107 | 108 | # Serverless directories 109 | .serverless/ 110 | 111 | # FuseBox cache 112 | .fusebox/ 113 | 114 | # DynamoDB Local files 115 | .dynamodb/ 116 | 117 | # TernJS port file 118 | .tern-port 119 | 120 | # Stores VSCode versions used for testing VSCode extensions 121 | .vscode-test 122 | 123 | .pnp.* 124 | .yarn/* 125 | !.yarn/patches 126 | !.yarn/plugins 127 | !.yarn/releases 128 | !.yarn/sdks 129 | 130 | 131 | # End of https://www.toptal.com/developers/gitignore/api/node 132 | 133 | artifacts/ 134 | src/idls/ 135 | data/distributor-info.json 136 | -------------------------------------------------------------------------------- /.husky/.gitignore: -------------------------------------------------------------------------------- 1 | _ 2 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . "$(dirname "$0")/_/husky.sh" 3 | 4 | yarn lint-staged 5 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | .yarn/ 2 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "arcanis.vscode-zipfs", 4 | "dbaeumer.vscode-eslint", 5 | "esbenp.prettier-vscode" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "typescript.tsdk": ".yarn/sdks/typescript/lib", 3 | "search.exclude": { 4 | "**/.yarn": true, 5 | "**/.pnp.*": true 6 | }, 7 | "eslint.nodePath": ".yarn/sdks", 8 | "prettier.prettierPath": ".yarn/sdks/prettier/index.js", 9 | "typescript.enablePromptUseWorkspaceTsdk": true 10 | } 11 | -------------------------------------------------------------------------------- /.yarn/plugins/@yarnpkg/plugin-typescript.cjs: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | //prettier-ignore 3 | module.exports = { 4 | name: "@yarnpkg/plugin-typescript", 5 | factory: function (require) { 6 | var plugin=(()=>{var Ft=Object.create,H=Object.defineProperty,Bt=Object.defineProperties,Kt=Object.getOwnPropertyDescriptor,zt=Object.getOwnPropertyDescriptors,Gt=Object.getOwnPropertyNames,Q=Object.getOwnPropertySymbols,$t=Object.getPrototypeOf,ne=Object.prototype.hasOwnProperty,De=Object.prototype.propertyIsEnumerable;var Re=(e,t,r)=>t in e?H(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,u=(e,t)=>{for(var r in t||(t={}))ne.call(t,r)&&Re(e,r,t[r]);if(Q)for(var r of Q(t))De.call(t,r)&&Re(e,r,t[r]);return e},g=(e,t)=>Bt(e,zt(t)),Lt=e=>H(e,"__esModule",{value:!0});var R=(e,t)=>{var r={};for(var s in e)ne.call(e,s)&&t.indexOf(s)<0&&(r[s]=e[s]);if(e!=null&&Q)for(var s of Q(e))t.indexOf(s)<0&&De.call(e,s)&&(r[s]=e[s]);return r};var I=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports),Vt=(e,t)=>{for(var r in t)H(e,r,{get:t[r],enumerable:!0})},Qt=(e,t,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let s of Gt(t))!ne.call(e,s)&&s!=="default"&&H(e,s,{get:()=>t[s],enumerable:!(r=Kt(t,s))||r.enumerable});return e},C=e=>Qt(Lt(H(e!=null?Ft($t(e)):{},"default",e&&e.__esModule&&"default"in e?{get:()=>e.default,enumerable:!0}:{value:e,enumerable:!0})),e);var xe=I(J=>{"use strict";Object.defineProperty(J,"__esModule",{value:!0});function _(e){let t=[...e.caches],r=t.shift();return r===void 0?ve():{get(s,n,a={miss:()=>Promise.resolve()}){return r.get(s,n,a).catch(()=>_({caches:t}).get(s,n,a))},set(s,n){return r.set(s,n).catch(()=>_({caches:t}).set(s,n))},delete(s){return r.delete(s).catch(()=>_({caches:t}).delete(s))},clear(){return r.clear().catch(()=>_({caches:t}).clear())}}}function ve(){return{get(e,t,r={miss:()=>Promise.resolve()}){return t().then(n=>Promise.all([n,r.miss(n)])).then(([n])=>n)},set(e,t){return Promise.resolve(t)},delete(e){return Promise.resolve()},clear(){return Promise.resolve()}}}J.createFallbackableCache=_;J.createNullCache=ve});var Ee=I(($s,qe)=>{qe.exports=xe()});var Te=I(ae=>{"use strict";Object.defineProperty(ae,"__esModule",{value:!0});function Jt(e={serializable:!0}){let t={};return{get(r,s,n={miss:()=>Promise.resolve()}){let a=JSON.stringify(r);if(a in t)return Promise.resolve(e.serializable?JSON.parse(t[a]):t[a]);let o=s(),d=n&&n.miss||(()=>Promise.resolve());return o.then(y=>d(y)).then(()=>o)},set(r,s){return t[JSON.stringify(r)]=e.serializable?JSON.stringify(s):s,Promise.resolve(s)},delete(r){return delete t[JSON.stringify(r)],Promise.resolve()},clear(){return t={},Promise.resolve()}}}ae.createInMemoryCache=Jt});var we=I((Vs,Me)=>{Me.exports=Te()});var Ce=I(M=>{"use strict";Object.defineProperty(M,"__esModule",{value:!0});function Xt(e,t,r){let s={"x-algolia-api-key":r,"x-algolia-application-id":t};return{headers(){return e===oe.WithinHeaders?s:{}},queryParameters(){return e===oe.WithinQueryParameters?s:{}}}}function Yt(e){let t=0,r=()=>(t++,new Promise(s=>{setTimeout(()=>{s(e(r))},Math.min(100*t,1e3))}));return e(r)}function ke(e,t=(r,s)=>Promise.resolve()){return Object.assign(e,{wait(r){return ke(e.then(s=>Promise.all([t(s,r),s])).then(s=>s[1]))}})}function Zt(e){let t=e.length-1;for(t;t>0;t--){let r=Math.floor(Math.random()*(t+1)),s=e[t];e[t]=e[r],e[r]=s}return e}function er(e,t){return Object.keys(t!==void 0?t:{}).forEach(r=>{e[r]=t[r](e)}),e}function tr(e,...t){let r=0;return e.replace(/%s/g,()=>encodeURIComponent(t[r++]))}var rr="4.2.0",sr=e=>()=>e.transporter.requester.destroy(),oe={WithinQueryParameters:0,WithinHeaders:1};M.AuthMode=oe;M.addMethods=er;M.createAuth=Xt;M.createRetryablePromise=Yt;M.createWaitablePromise=ke;M.destroy=sr;M.encode=tr;M.shuffle=Zt;M.version=rr});var F=I((Js,Ue)=>{Ue.exports=Ce()});var Ne=I(ie=>{"use strict";Object.defineProperty(ie,"__esModule",{value:!0});var nr={Delete:"DELETE",Get:"GET",Post:"POST",Put:"PUT"};ie.MethodEnum=nr});var B=I((Ys,We)=>{We.exports=Ne()});var Ze=I(A=>{"use strict";Object.defineProperty(A,"__esModule",{value:!0});var He=B();function ce(e,t){let r=e||{},s=r.data||{};return Object.keys(r).forEach(n=>{["timeout","headers","queryParameters","data","cacheable"].indexOf(n)===-1&&(s[n]=r[n])}),{data:Object.entries(s).length>0?s:void 0,timeout:r.timeout||t,headers:r.headers||{},queryParameters:r.queryParameters||{},cacheable:r.cacheable}}var X={Read:1,Write:2,Any:3},U={Up:1,Down:2,Timeouted:3},_e=2*60*1e3;function ue(e,t=U.Up){return g(u({},e),{status:t,lastUpdate:Date.now()})}function Fe(e){return e.status===U.Up||Date.now()-e.lastUpdate>_e}function Be(e){return e.status===U.Timeouted&&Date.now()-e.lastUpdate<=_e}function le(e){return{protocol:e.protocol||"https",url:e.url,accept:e.accept||X.Any}}function ar(e,t){return Promise.all(t.map(r=>e.get(r,()=>Promise.resolve(ue(r))))).then(r=>{let s=r.filter(d=>Fe(d)),n=r.filter(d=>Be(d)),a=[...s,...n],o=a.length>0?a.map(d=>le(d)):t;return{getTimeout(d,y){return(n.length===0&&d===0?1:n.length+3+d)*y},statelessHosts:o}})}var or=({isTimedOut:e,status:t})=>!e&&~~t==0,ir=e=>{let t=e.status;return e.isTimedOut||or(e)||~~(t/100)!=2&&~~(t/100)!=4},cr=({status:e})=>~~(e/100)==2,ur=(e,t)=>ir(e)?t.onRetry(e):cr(e)?t.onSucess(e):t.onFail(e);function Qe(e,t,r,s){let n=[],a=$e(r,s),o=Le(e,s),d=r.method,y=r.method!==He.MethodEnum.Get?{}:u(u({},r.data),s.data),b=u(u(u({"x-algolia-agent":e.userAgent.value},e.queryParameters),y),s.queryParameters),f=0,p=(h,S)=>{let O=h.pop();if(O===void 0)throw Ve(de(n));let P={data:a,headers:o,method:d,url:Ge(O,r.path,b),connectTimeout:S(f,e.timeouts.connect),responseTimeout:S(f,s.timeout)},x=j=>{let T={request:P,response:j,host:O,triesLeft:h.length};return n.push(T),T},v={onSucess:j=>Ke(j),onRetry(j){let T=x(j);return j.isTimedOut&&f++,Promise.all([e.logger.info("Retryable failure",pe(T)),e.hostsCache.set(O,ue(O,j.isTimedOut?U.Timeouted:U.Down))]).then(()=>p(h,S))},onFail(j){throw x(j),ze(j,de(n))}};return e.requester.send(P).then(j=>ur(j,v))};return ar(e.hostsCache,t).then(h=>p([...h.statelessHosts].reverse(),h.getTimeout))}function lr(e){let{hostsCache:t,logger:r,requester:s,requestsCache:n,responsesCache:a,timeouts:o,userAgent:d,hosts:y,queryParameters:b,headers:f}=e,p={hostsCache:t,logger:r,requester:s,requestsCache:n,responsesCache:a,timeouts:o,userAgent:d,headers:f,queryParameters:b,hosts:y.map(h=>le(h)),read(h,S){let O=ce(S,p.timeouts.read),P=()=>Qe(p,p.hosts.filter(j=>(j.accept&X.Read)!=0),h,O);if((O.cacheable!==void 0?O.cacheable:h.cacheable)!==!0)return P();let v={request:h,mappedRequestOptions:O,transporter:{queryParameters:p.queryParameters,headers:p.headers}};return p.responsesCache.get(v,()=>p.requestsCache.get(v,()=>p.requestsCache.set(v,P()).then(j=>Promise.all([p.requestsCache.delete(v),j]),j=>Promise.all([p.requestsCache.delete(v),Promise.reject(j)])).then(([j,T])=>T)),{miss:j=>p.responsesCache.set(v,j)})},write(h,S){return Qe(p,p.hosts.filter(O=>(O.accept&X.Write)!=0),h,ce(S,p.timeouts.write))}};return p}function dr(e){let t={value:`Algolia for JavaScript (${e})`,add(r){let s=`; ${r.segment}${r.version!==void 0?` (${r.version})`:""}`;return t.value.indexOf(s)===-1&&(t.value=`${t.value}${s}`),t}};return t}function Ke(e){try{return JSON.parse(e.content)}catch(t){throw Je(t.message,e)}}function ze({content:e,status:t},r){let s=e;try{s=JSON.parse(e).message}catch(n){}return Xe(s,t,r)}function pr(e,...t){let r=0;return e.replace(/%s/g,()=>encodeURIComponent(t[r++]))}function Ge(e,t,r){let s=Ye(r),n=`${e.protocol}://${e.url}/${t.charAt(0)==="/"?t.substr(1):t}`;return s.length&&(n+=`?${s}`),n}function Ye(e){let t=r=>Object.prototype.toString.call(r)==="[object Object]"||Object.prototype.toString.call(r)==="[object Array]";return Object.keys(e).map(r=>pr("%s=%s",r,t(e[r])?JSON.stringify(e[r]):e[r])).join("&")}function $e(e,t){if(e.method===He.MethodEnum.Get||e.data===void 0&&t.data===void 0)return;let r=Array.isArray(e.data)?e.data:u(u({},e.data),t.data);return JSON.stringify(r)}function Le(e,t){let r=u(u({},e.headers),t.headers),s={};return Object.keys(r).forEach(n=>{let a=r[n];s[n.toLowerCase()]=a}),s}function de(e){return e.map(t=>pe(t))}function pe(e){let t=e.request.headers["x-algolia-api-key"]?{"x-algolia-api-key":"*****"}:{};return g(u({},e),{request:g(u({},e.request),{headers:u(u({},e.request.headers),t)})})}function Xe(e,t,r){return{name:"ApiError",message:e,status:t,transporterStackTrace:r}}function Je(e,t){return{name:"DeserializationError",message:e,response:t}}function Ve(e){return{name:"RetryError",message:"Unreachable hosts - your application id may be incorrect. If the error persists, contact support@algolia.com.",transporterStackTrace:e}}A.CallEnum=X;A.HostStatusEnum=U;A.createApiError=Xe;A.createDeserializationError=Je;A.createMappedRequestOptions=ce;A.createRetryError=Ve;A.createStatefulHost=ue;A.createStatelessHost=le;A.createTransporter=lr;A.createUserAgent=dr;A.deserializeFailure=ze;A.deserializeSuccess=Ke;A.isStatefulHostTimeouted=Be;A.isStatefulHostUp=Fe;A.serializeData=$e;A.serializeHeaders=Le;A.serializeQueryParameters=Ye;A.serializeUrl=Ge;A.stackFrameWithoutCredentials=pe;A.stackTraceWithoutCredentials=de});var K=I((en,et)=>{et.exports=Ze()});var tt=I(w=>{"use strict";Object.defineProperty(w,"__esModule",{value:!0});var N=F(),mr=K(),z=B(),hr=e=>{let t=e.region||"us",r=N.createAuth(N.AuthMode.WithinHeaders,e.appId,e.apiKey),s=mr.createTransporter(g(u({hosts:[{url:`analytics.${t}.algolia.com`}]},e),{headers:u(g(u({},r.headers()),{"content-type":"application/json"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)})),n=e.appId;return N.addMethods({appId:n,transporter:s},e.methods)},yr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Post,path:"2/abtests",data:t},r),gr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Delete,path:N.encode("2/abtests/%s",t)},r),fr=e=>(t,r)=>e.transporter.read({method:z.MethodEnum.Get,path:N.encode("2/abtests/%s",t)},r),br=e=>t=>e.transporter.read({method:z.MethodEnum.Get,path:"2/abtests"},t),Pr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Post,path:N.encode("2/abtests/%s/stop",t)},r);w.addABTest=yr;w.createAnalyticsClient=hr;w.deleteABTest=gr;w.getABTest=fr;w.getABTests=br;w.stopABTest=Pr});var st=I((rn,rt)=>{rt.exports=tt()});var at=I(G=>{"use strict";Object.defineProperty(G,"__esModule",{value:!0});var me=F(),jr=K(),nt=B(),Or=e=>{let t=e.region||"us",r=me.createAuth(me.AuthMode.WithinHeaders,e.appId,e.apiKey),s=jr.createTransporter(g(u({hosts:[{url:`recommendation.${t}.algolia.com`}]},e),{headers:u(g(u({},r.headers()),{"content-type":"application/json"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)}));return me.addMethods({appId:e.appId,transporter:s},e.methods)},Ir=e=>t=>e.transporter.read({method:nt.MethodEnum.Get,path:"1/strategies/personalization"},t),Ar=e=>(t,r)=>e.transporter.write({method:nt.MethodEnum.Post,path:"1/strategies/personalization",data:t},r);G.createRecommendationClient=Or;G.getPersonalizationStrategy=Ir;G.setPersonalizationStrategy=Ar});var it=I((nn,ot)=>{ot.exports=at()});var jt=I(i=>{"use strict";Object.defineProperty(i,"__esModule",{value:!0});var l=F(),q=K(),m=B(),Sr=require("crypto");function Y(e){let t=r=>e.request(r).then(s=>{if(e.batch!==void 0&&e.batch(s.hits),!e.shouldStop(s))return s.cursor?t({cursor:s.cursor}):t({page:(r.page||0)+1})});return t({})}var Dr=e=>{let t=e.appId,r=l.createAuth(e.authMode!==void 0?e.authMode:l.AuthMode.WithinHeaders,t,e.apiKey),s=q.createTransporter(g(u({hosts:[{url:`${t}-dsn.algolia.net`,accept:q.CallEnum.Read},{url:`${t}.algolia.net`,accept:q.CallEnum.Write}].concat(l.shuffle([{url:`${t}-1.algolianet.com`},{url:`${t}-2.algolianet.com`},{url:`${t}-3.algolianet.com`}]))},e),{headers:u(g(u({},r.headers()),{"content-type":"application/x-www-form-urlencoded"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)})),n={transporter:s,appId:t,addAlgoliaAgent(a,o){s.userAgent.add({segment:a,version:o})},clearCache(){return Promise.all([s.requestsCache.clear(),s.responsesCache.clear()]).then(()=>{})}};return l.addMethods(n,e.methods)};function ct(){return{name:"MissingObjectIDError",message:"All objects must have an unique objectID (like a primary key) to be valid. Algolia is also able to generate objectIDs automatically but *it's not recommended*. To do it, use the `{'autoGenerateObjectIDIfNotExist': true}` option."}}function ut(){return{name:"ObjectNotFoundError",message:"Object not found."}}function lt(){return{name:"ValidUntilNotFoundError",message:"ValidUntil not found in given secured api key."}}var Rr=e=>(t,r)=>{let d=r||{},{queryParameters:s}=d,n=R(d,["queryParameters"]),a=u({acl:t},s!==void 0?{queryParameters:s}:{}),o=(y,b)=>l.createRetryablePromise(f=>$(e)(y.key,b).catch(p=>{if(p.status!==404)throw p;return f()}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:"1/keys",data:a},n),o)},vr=e=>(t,r,s)=>{let n=q.createMappedRequestOptions(s);return n.queryParameters["X-Algolia-User-ID"]=t,e.transporter.write({method:m.MethodEnum.Post,path:"1/clusters/mapping",data:{cluster:r}},n)},xr=e=>(t,r,s)=>e.transporter.write({method:m.MethodEnum.Post,path:"1/clusters/mapping/batch",data:{users:t,cluster:r}},s),Z=e=>(t,r,s)=>{let n=(a,o)=>L(e)(t,{methods:{waitTask:D}}).waitTask(a.taskID,o);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",t),data:{operation:"copy",destination:r}},s),n)},qr=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Rules]})),Er=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Settings]})),Tr=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Synonyms]})),Mr=e=>(t,r)=>{let s=(n,a)=>l.createRetryablePromise(o=>$(e)(t,a).then(o).catch(d=>{if(d.status!==404)throw d}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/keys/%s",t)},r),s)},wr=()=>(e,t)=>{let r=q.serializeQueryParameters(t),s=Sr.createHmac("sha256",e).update(r).digest("hex");return Buffer.from(s+r).toString("base64")},$=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/keys/%s",t)},r),kr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/logs"},t),Cr=()=>e=>{let t=Buffer.from(e,"base64").toString("ascii"),r=/validUntil=(\d+)/,s=t.match(r);if(s===null)throw lt();return parseInt(s[1],10)-Math.round(new Date().getTime()/1e3)},Ur=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping/top"},t),Nr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/clusters/mapping/%s",t)},r),Wr=e=>t=>{let n=t||{},{retrieveMappings:r}=n,s=R(n,["retrieveMappings"]);return r===!0&&(s.getClusters=!0),e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping/pending"},s)},L=e=>(t,r={})=>{let s={transporter:e.transporter,appId:e.appId,indexName:t};return l.addMethods(s,r.methods)},Hr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/keys"},t),_r=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters"},t),Fr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/indexes"},t),Br=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping"},t),Kr=e=>(t,r,s)=>{let n=(a,o)=>L(e)(t,{methods:{waitTask:D}}).waitTask(a.taskID,o);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",t),data:{operation:"move",destination:r}},s),n)},zr=e=>(t,r)=>{let s=(n,a)=>Promise.all(Object.keys(n.taskID).map(o=>L(e)(o,{methods:{waitTask:D}}).waitTask(n.taskID[o],a)));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:"1/indexes/*/batch",data:{requests:t}},r),s)},Gr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/objects",data:{requests:t}},r),$r=e=>(t,r)=>{let s=t.map(n=>g(u({},n),{params:q.serializeQueryParameters(n.params||{})}));return e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/queries",data:{requests:s},cacheable:!0},r)},Lr=e=>(t,r)=>Promise.all(t.map(s=>{let d=s.params,{facetName:n,facetQuery:a}=d,o=R(d,["facetName","facetQuery"]);return L(e)(s.indexName,{methods:{searchForFacetValues:dt}}).searchForFacetValues(n,a,u(u({},r),o))})),Vr=e=>(t,r)=>{let s=q.createMappedRequestOptions(r);return s.queryParameters["X-Algolia-User-ID"]=t,e.transporter.write({method:m.MethodEnum.Delete,path:"1/clusters/mapping"},s)},Qr=e=>(t,r)=>{let s=(n,a)=>l.createRetryablePromise(o=>$(e)(t,a).catch(d=>{if(d.status!==404)throw d;return o()}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/keys/%s/restore",t)},r),s)},Jr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:"1/clusters/mapping/search",data:{query:t}},r),Xr=e=>(t,r)=>{let s=Object.assign({},r),f=r||{},{queryParameters:n}=f,a=R(f,["queryParameters"]),o=n?{queryParameters:n}:{},d=["acl","indexes","referers","restrictSources","queryParameters","description","maxQueriesPerIPPerHour","maxHitsPerQuery"],y=p=>Object.keys(s).filter(h=>d.indexOf(h)!==-1).every(h=>p[h]===s[h]),b=(p,h)=>l.createRetryablePromise(S=>$(e)(t,h).then(O=>y(O)?Promise.resolve():S()));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Put,path:l.encode("1/keys/%s",t),data:o},a),b)},pt=e=>(t,r)=>{let s=(n,a)=>D(e)(n.taskID,a);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/batch",e.indexName),data:{requests:t}},r),s)},Yr=e=>t=>Y(g(u({},t),{shouldStop:r=>r.cursor===void 0,request:r=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/browse",e.indexName),data:r},t)})),Zr=e=>t=>{let r=u({hitsPerPage:1e3},t);return Y(g(u({},r),{shouldStop:s=>s.hits.lengthg(u({},n),{hits:n.hits.map(a=>(delete a._highlightResult,a))}))}}))},es=e=>t=>{let r=u({hitsPerPage:1e3},t);return Y(g(u({},r),{shouldStop:s=>s.hits.lengthg(u({},n),{hits:n.hits.map(a=>(delete a._highlightResult,a))}))}}))},te=e=>(t,r,s)=>{let y=s||{},{batchSize:n}=y,a=R(y,["batchSize"]),o={taskIDs:[],objectIDs:[]},d=(b=0)=>{let f=[],p;for(p=b;p({action:r,body:h})),a).then(h=>(o.objectIDs=o.objectIDs.concat(h.objectIDs),o.taskIDs.push(h.taskID),p++,d(p)))};return l.createWaitablePromise(d(),(b,f)=>Promise.all(b.taskIDs.map(p=>D(e)(p,f))))},ts=e=>t=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/clear",e.indexName)},t),(r,s)=>D(e)(r.taskID,s)),rs=e=>t=>{let a=t||{},{forwardToReplicas:r}=a,s=R(a,["forwardToReplicas"]),n=q.createMappedRequestOptions(s);return r&&(n.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/clear",e.indexName)},n),(o,d)=>D(e)(o.taskID,d))},ss=e=>t=>{let a=t||{},{forwardToReplicas:r}=a,s=R(a,["forwardToReplicas"]),n=q.createMappedRequestOptions(s);return r&&(n.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/clear",e.indexName)},n),(o,d)=>D(e)(o.taskID,d))},ns=e=>(t,r)=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/deleteByQuery",e.indexName),data:t},r),(s,n)=>D(e)(s.taskID,n)),as=e=>t=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s",e.indexName)},t),(r,s)=>D(e)(r.taskID,s)),os=e=>(t,r)=>l.createWaitablePromise(yt(e)([t],r).then(s=>({taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),yt=e=>(t,r)=>{let s=t.map(n=>({objectID:n}));return te(e)(s,k.DeleteObject,r)},is=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s/rules/%s",e.indexName,t)},a),(d,y)=>D(e)(d.taskID,y))},cs=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s/synonyms/%s",e.indexName,t)},a),(d,y)=>D(e)(d.taskID,y))},us=e=>t=>gt(e)(t).then(()=>!0).catch(r=>{if(r.status!==404)throw r;return!1}),ls=e=>(t,r)=>{let y=r||{},{query:s,paginate:n}=y,a=R(y,["query","paginate"]),o=0,d=()=>ft(e)(s||"",g(u({},a),{page:o})).then(b=>{for(let[f,p]of Object.entries(b.hits))if(t(p))return{object:p,position:parseInt(f,10),page:o};if(o++,n===!1||o>=b.nbPages)throw ut();return d()});return d()},ds=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/%s",e.indexName,t)},r),ps=()=>(e,t)=>{for(let[r,s]of Object.entries(e.hits))if(s.objectID===t)return parseInt(r,10);return-1},ms=e=>(t,r)=>{let o=r||{},{attributesToRetrieve:s}=o,n=R(o,["attributesToRetrieve"]),a=t.map(d=>u({indexName:e.indexName,objectID:d},s?{attributesToRetrieve:s}:{}));return e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/objects",data:{requests:a}},n)},hs=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/rules/%s",e.indexName,t)},r),gt=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/settings",e.indexName),data:{getVersion:2}},t),ys=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/synonyms/%s",e.indexName,t)},r),bt=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/task/%s",e.indexName,t.toString())},r),gs=e=>(t,r)=>l.createWaitablePromise(Pt(e)([t],r).then(s=>({objectID:s.objectIDs[0],taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),Pt=e=>(t,r)=>{let o=r||{},{createIfNotExists:s}=o,n=R(o,["createIfNotExists"]),a=s?k.PartialUpdateObject:k.PartialUpdateObjectNoCreate;return te(e)(t,a,n)},fs=e=>(t,r)=>{let O=r||{},{safe:s,autoGenerateObjectIDIfNotExist:n,batchSize:a}=O,o=R(O,["safe","autoGenerateObjectIDIfNotExist","batchSize"]),d=(P,x,v,j)=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",P),data:{operation:v,destination:x}},j),(T,V)=>D(e)(T.taskID,V)),y=Math.random().toString(36).substring(7),b=`${e.indexName}_tmp_${y}`,f=he({appId:e.appId,transporter:e.transporter,indexName:b}),p=[],h=d(e.indexName,b,"copy",g(u({},o),{scope:["settings","synonyms","rules"]}));p.push(h);let S=(s?h.wait(o):h).then(()=>{let P=f(t,g(u({},o),{autoGenerateObjectIDIfNotExist:n,batchSize:a}));return p.push(P),s?P.wait(o):P}).then(()=>{let P=d(b,e.indexName,"move",o);return p.push(P),s?P.wait(o):P}).then(()=>Promise.all(p)).then(([P,x,v])=>({objectIDs:x.objectIDs,taskIDs:[P.taskID,...x.taskIDs,v.taskID]}));return l.createWaitablePromise(S,(P,x)=>Promise.all(p.map(v=>v.wait(x))))},bs=e=>(t,r)=>ye(e)(t,g(u({},r),{clearExistingRules:!0})),Ps=e=>(t,r)=>ge(e)(t,g(u({},r),{replaceExistingSynonyms:!0})),js=e=>(t,r)=>l.createWaitablePromise(he(e)([t],r).then(s=>({objectID:s.objectIDs[0],taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),he=e=>(t,r)=>{let o=r||{},{autoGenerateObjectIDIfNotExist:s}=o,n=R(o,["autoGenerateObjectIDIfNotExist"]),a=s?k.AddObject:k.UpdateObject;if(a===k.UpdateObject){for(let d of t)if(d.objectID===void 0)return l.createWaitablePromise(Promise.reject(ct()))}return te(e)(t,a,n)},Os=e=>(t,r)=>ye(e)([t],r),ye=e=>(t,r)=>{let d=r||{},{forwardToReplicas:s,clearExistingRules:n}=d,a=R(d,["forwardToReplicas","clearExistingRules"]),o=q.createMappedRequestOptions(a);return s&&(o.queryParameters.forwardToReplicas=1),n&&(o.queryParameters.clearExistingRules=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/batch",e.indexName),data:t},o),(y,b)=>D(e)(y.taskID,b))},Is=e=>(t,r)=>ge(e)([t],r),ge=e=>(t,r)=>{let d=r||{},{forwardToReplicas:s,replaceExistingSynonyms:n}=d,a=R(d,["forwardToReplicas","replaceExistingSynonyms"]),o=q.createMappedRequestOptions(a);return s&&(o.queryParameters.forwardToReplicas=1),n&&(o.queryParameters.replaceExistingSynonyms=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/batch",e.indexName),data:t},o),(y,b)=>D(e)(y.taskID,b))},ft=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/query",e.indexName),data:{query:t},cacheable:!0},r),dt=e=>(t,r,s)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/facets/%s/query",e.indexName,t),data:{facetQuery:r},cacheable:!0},s),mt=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/search",e.indexName),data:{query:t}},r),ht=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/search",e.indexName),data:{query:t}},r),As=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Put,path:l.encode("1/indexes/%s/settings",e.indexName),data:t},a),(d,y)=>D(e)(d.taskID,y))},D=e=>(t,r)=>l.createRetryablePromise(s=>bt(e)(t,r).then(n=>n.status!=="published"?s():void 0)),Ss={AddObject:"addObject",Analytics:"analytics",Browser:"browse",DeleteIndex:"deleteIndex",DeleteObject:"deleteObject",EditSettings:"editSettings",ListIndexes:"listIndexes",Logs:"logs",Recommendation:"recommendation",Search:"search",SeeUnretrievableAttributes:"seeUnretrievableAttributes",Settings:"settings",Usage:"usage"},k={AddObject:"addObject",UpdateObject:"updateObject",PartialUpdateObject:"partialUpdateObject",PartialUpdateObjectNoCreate:"partialUpdateObjectNoCreate",DeleteObject:"deleteObject"},ee={Settings:"settings",Synonyms:"synonyms",Rules:"rules"},Ds={None:"none",StopIfEnoughMatches:"stopIfEnoughMatches"},Rs={Synonym:"synonym",OneWaySynonym:"oneWaySynonym",AltCorrection1:"altCorrection1",AltCorrection2:"altCorrection2",Placeholder:"placeholder"};i.ApiKeyACLEnum=Ss;i.BatchActionEnum=k;i.ScopeEnum=ee;i.StrategyEnum=Ds;i.SynonymEnum=Rs;i.addApiKey=Rr;i.assignUserID=vr;i.assignUserIDs=xr;i.batch=pt;i.browseObjects=Yr;i.browseRules=Zr;i.browseSynonyms=es;i.chunkedBatch=te;i.clearObjects=ts;i.clearRules=rs;i.clearSynonyms=ss;i.copyIndex=Z;i.copyRules=qr;i.copySettings=Er;i.copySynonyms=Tr;i.createBrowsablePromise=Y;i.createMissingObjectIDError=ct;i.createObjectNotFoundError=ut;i.createSearchClient=Dr;i.createValidUntilNotFoundError=lt;i.deleteApiKey=Mr;i.deleteBy=ns;i.deleteIndex=as;i.deleteObject=os;i.deleteObjects=yt;i.deleteRule=is;i.deleteSynonym=cs;i.exists=us;i.findObject=ls;i.generateSecuredApiKey=wr;i.getApiKey=$;i.getLogs=kr;i.getObject=ds;i.getObjectPosition=ps;i.getObjects=ms;i.getRule=hs;i.getSecuredApiKeyRemainingValidity=Cr;i.getSettings=gt;i.getSynonym=ys;i.getTask=bt;i.getTopUserIDs=Ur;i.getUserID=Nr;i.hasPendingMappings=Wr;i.initIndex=L;i.listApiKeys=Hr;i.listClusters=_r;i.listIndices=Fr;i.listUserIDs=Br;i.moveIndex=Kr;i.multipleBatch=zr;i.multipleGetObjects=Gr;i.multipleQueries=$r;i.multipleSearchForFacetValues=Lr;i.partialUpdateObject=gs;i.partialUpdateObjects=Pt;i.removeUserID=Vr;i.replaceAllObjects=fs;i.replaceAllRules=bs;i.replaceAllSynonyms=Ps;i.restoreApiKey=Qr;i.saveObject=js;i.saveObjects=he;i.saveRule=Os;i.saveRules=ye;i.saveSynonym=Is;i.saveSynonyms=ge;i.search=ft;i.searchForFacetValues=dt;i.searchRules=mt;i.searchSynonyms=ht;i.searchUserIDs=Jr;i.setSettings=As;i.updateApiKey=Xr;i.waitTask=D});var It=I((on,Ot)=>{Ot.exports=jt()});var At=I(re=>{"use strict";Object.defineProperty(re,"__esModule",{value:!0});function vs(){return{debug(e,t){return Promise.resolve()},info(e,t){return Promise.resolve()},error(e,t){return Promise.resolve()}}}var xs={Debug:1,Info:2,Error:3};re.LogLevelEnum=xs;re.createNullLogger=vs});var Dt=I((un,St)=>{St.exports=At()});var xt=I(fe=>{"use strict";Object.defineProperty(fe,"__esModule",{value:!0});var Rt=require("http"),vt=require("https"),qs=require("url");function Es(){let e={keepAlive:!0},t=new Rt.Agent(e),r=new vt.Agent(e);return{send(s){return new Promise(n=>{let a=qs.parse(s.url),o=a.query===null?a.pathname:`${a.pathname}?${a.query}`,d=u({agent:a.protocol==="https:"?r:t,hostname:a.hostname,path:o,method:s.method,headers:s.headers},a.port!==void 0?{port:a.port||""}:{}),y=(a.protocol==="https:"?vt:Rt).request(d,h=>{let S="";h.on("data",O=>S+=O),h.on("end",()=>{clearTimeout(f),clearTimeout(p),n({status:h.statusCode||0,content:S,isTimedOut:!1})})}),b=(h,S)=>setTimeout(()=>{y.abort(),n({status:0,content:S,isTimedOut:!0})},h*1e3),f=b(s.connectTimeout,"Connection timeout"),p;y.on("error",h=>{clearTimeout(f),clearTimeout(p),n({status:0,content:h.message,isTimedOut:!1})}),y.once("response",()=>{clearTimeout(f),p=b(s.responseTimeout,"Socket timeout")}),s.data!==void 0&&y.write(s.data),y.end()})},destroy(){return t.destroy(),r.destroy(),Promise.resolve()}}}fe.createNodeHttpRequester=Es});var Et=I((dn,qt)=>{qt.exports=xt()});var kt=I((pn,Tt)=>{"use strict";var Mt=Ee(),Ts=we(),W=st(),be=F(),Pe=it(),c=It(),Ms=Dt(),ws=Et(),ks=K();function wt(e,t,r){let s={appId:e,apiKey:t,timeouts:{connect:2,read:5,write:30},requester:ws.createNodeHttpRequester(),logger:Ms.createNullLogger(),responsesCache:Mt.createNullCache(),requestsCache:Mt.createNullCache(),hostsCache:Ts.createInMemoryCache(),userAgent:ks.createUserAgent(be.version).add({segment:"Node.js",version:process.versions.node})};return c.createSearchClient(g(u(u({},s),r),{methods:{search:c.multipleQueries,searchForFacetValues:c.multipleSearchForFacetValues,multipleBatch:c.multipleBatch,multipleGetObjects:c.multipleGetObjects,multipleQueries:c.multipleQueries,copyIndex:c.copyIndex,copySettings:c.copySettings,copyRules:c.copyRules,copySynonyms:c.copySynonyms,moveIndex:c.moveIndex,listIndices:c.listIndices,getLogs:c.getLogs,listClusters:c.listClusters,multipleSearchForFacetValues:c.multipleSearchForFacetValues,getApiKey:c.getApiKey,addApiKey:c.addApiKey,listApiKeys:c.listApiKeys,updateApiKey:c.updateApiKey,deleteApiKey:c.deleteApiKey,restoreApiKey:c.restoreApiKey,assignUserID:c.assignUserID,assignUserIDs:c.assignUserIDs,getUserID:c.getUserID,searchUserIDs:c.searchUserIDs,listUserIDs:c.listUserIDs,getTopUserIDs:c.getTopUserIDs,removeUserID:c.removeUserID,hasPendingMappings:c.hasPendingMappings,generateSecuredApiKey:c.generateSecuredApiKey,getSecuredApiKeyRemainingValidity:c.getSecuredApiKeyRemainingValidity,destroy:be.destroy,initIndex:n=>a=>c.initIndex(n)(a,{methods:{batch:c.batch,delete:c.deleteIndex,getObject:c.getObject,getObjects:c.getObjects,saveObject:c.saveObject,saveObjects:c.saveObjects,search:c.search,searchForFacetValues:c.searchForFacetValues,waitTask:c.waitTask,setSettings:c.setSettings,getSettings:c.getSettings,partialUpdateObject:c.partialUpdateObject,partialUpdateObjects:c.partialUpdateObjects,deleteObject:c.deleteObject,deleteObjects:c.deleteObjects,deleteBy:c.deleteBy,clearObjects:c.clearObjects,browseObjects:c.browseObjects,getObjectPosition:c.getObjectPosition,findObject:c.findObject,exists:c.exists,saveSynonym:c.saveSynonym,saveSynonyms:c.saveSynonyms,getSynonym:c.getSynonym,searchSynonyms:c.searchSynonyms,browseSynonyms:c.browseSynonyms,deleteSynonym:c.deleteSynonym,clearSynonyms:c.clearSynonyms,replaceAllObjects:c.replaceAllObjects,replaceAllSynonyms:c.replaceAllSynonyms,searchRules:c.searchRules,getRule:c.getRule,deleteRule:c.deleteRule,saveRule:c.saveRule,saveRules:c.saveRules,replaceAllRules:c.replaceAllRules,browseRules:c.browseRules,clearRules:c.clearRules}}),initAnalytics:()=>n=>W.createAnalyticsClient(g(u(u({},s),n),{methods:{addABTest:W.addABTest,getABTest:W.getABTest,getABTests:W.getABTests,stopABTest:W.stopABTest,deleteABTest:W.deleteABTest}})),initRecommendation:()=>n=>Pe.createRecommendationClient(g(u(u({},s),n),{methods:{getPersonalizationStrategy:Pe.getPersonalizationStrategy,setPersonalizationStrategy:Pe.setPersonalizationStrategy}}))}}))}wt.version=be.version;Tt.exports=wt});var Ut=I((mn,je)=>{var Ct=kt();je.exports=Ct;je.exports.default=Ct});var Ws={};Vt(Ws,{default:()=>Ks});var Oe=C(require("@yarnpkg/core")),E=C(require("@yarnpkg/core")),Ie=C(require("@yarnpkg/plugin-essentials")),Ht=C(require("semver"));var se=C(require("@yarnpkg/core")),Nt=C(Ut()),Cs="e8e1bd300d860104bb8c58453ffa1eb4",Us="OFCNCOG2CU",Wt=async(e,t)=>{var a;let r=se.structUtils.stringifyIdent(e),n=Ns(t).initIndex("npm-search");try{return((a=(await n.getObject(r,{attributesToRetrieve:["types"]})).types)==null?void 0:a.ts)==="definitely-typed"}catch(o){return!1}},Ns=e=>(0,Nt.default)(Us,Cs,{requester:{async send(r){try{let s=await se.httpUtils.request(r.url,r.data||null,{configuration:e,headers:r.headers});return{content:s.body,isTimedOut:!1,status:s.statusCode}}catch(s){return{content:s.response.body,isTimedOut:!1,status:s.response.statusCode}}}}});var _t=e=>e.scope?`${e.scope}__${e.name}`:`${e.name}`,Hs=async(e,t,r,s)=>{if(r.scope==="types")return;let{project:n}=e,{configuration:a}=n,o=a.makeResolver(),d={project:n,resolver:o,report:new E.ThrowReport};if(!await Wt(r,a))return;let b=_t(r),f=E.structUtils.parseRange(r.range).selector;if(!E.semverUtils.validRange(f)){let P=await o.getCandidates(r,new Map,d);f=E.structUtils.parseRange(P[0].reference).selector}let p=Ht.default.coerce(f);if(p===null)return;let h=`${Ie.suggestUtils.Modifier.CARET}${p.major}`,S=E.structUtils.makeDescriptor(E.structUtils.makeIdent("types",b),h),O=E.miscUtils.mapAndFind(n.workspaces,P=>{var T,V;let x=(T=P.manifest.dependencies.get(r.identHash))==null?void 0:T.descriptorHash,v=(V=P.manifest.devDependencies.get(r.identHash))==null?void 0:V.descriptorHash;if(x!==r.descriptorHash&&v!==r.descriptorHash)return E.miscUtils.mapAndFind.skip;let j=[];for(let Ae of Oe.Manifest.allDependencies){let Se=P.manifest[Ae].get(S.identHash);typeof Se!="undefined"&&j.push([Ae,Se])}return j.length===0?E.miscUtils.mapAndFind.skip:j});if(typeof O!="undefined")for(let[P,x]of O)e.manifest[P].set(x.identHash,x);else{try{if((await o.getCandidates(S,new Map,d)).length===0)return}catch{return}e.manifest[Ie.suggestUtils.Target.DEVELOPMENT].set(S.identHash,S)}},_s=async(e,t,r)=>{if(r.scope==="types")return;let s=_t(r),n=E.structUtils.makeIdent("types",s);for(let a of Oe.Manifest.allDependencies)typeof e.manifest[a].get(n.identHash)!="undefined"&&e.manifest[a].delete(n.identHash)},Fs=(e,t)=>{t.publishConfig&&t.publishConfig.typings&&(t.typings=t.publishConfig.typings),t.publishConfig&&t.publishConfig.types&&(t.types=t.publishConfig.types)},Bs={hooks:{afterWorkspaceDependencyAddition:Hs,afterWorkspaceDependencyRemoval:_s,beforeWorkspacePacking:Fs}},Ks=Bs;return Ws;})(); 7 | return plugin; 8 | } 9 | }; 10 | -------------------------------------------------------------------------------- /.yarn/sdks/eslint/bin/eslint.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require eslint/bin/eslint.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real eslint/bin/eslint.js your application uses 20 | module.exports = absRequire(`eslint/bin/eslint.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/eslint/lib/api.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require eslint/lib/api.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real eslint/lib/api.js your application uses 20 | module.exports = absRequire(`eslint/lib/api.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/eslint/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "eslint", 3 | "version": "7.32.0-sdk", 4 | "main": "./lib/api.js", 5 | "type": "commonjs" 6 | } 7 | -------------------------------------------------------------------------------- /.yarn/sdks/integrations.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically generated by @yarnpkg/sdks. 2 | # Manual changes might be lost! 3 | 4 | integrations: 5 | - vscode 6 | -------------------------------------------------------------------------------- /.yarn/sdks/prettier/index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require prettier/index.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real prettier/index.js your application uses 20 | module.exports = absRequire(`prettier/index.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/prettier/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "prettier", 3 | "version": "2.4.1-sdk", 4 | "main": "./index.js", 5 | "type": "commonjs" 6 | } 7 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/bin/tsc: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/bin/tsc 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/bin/tsc your application uses 20 | module.exports = absRequire(`typescript/bin/tsc`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/bin/tsserver: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/bin/tsserver 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/bin/tsserver your application uses 20 | module.exports = absRequire(`typescript/bin/tsserver`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/tsc.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/lib/tsc.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/lib/tsc.js your application uses 20 | module.exports = absRequire(`typescript/lib/tsc.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/tsserver.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | const moduleWrapper = tsserver => { 13 | if (!process.versions.pnp) { 14 | return tsserver; 15 | } 16 | 17 | const {isAbsolute} = require(`path`); 18 | const pnpApi = require(`pnpapi`); 19 | 20 | const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//); 21 | const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`); 22 | 23 | const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => { 24 | return `${locator.name}@${locator.reference}`; 25 | })); 26 | 27 | // VSCode sends the zip paths to TS using the "zip://" prefix, that TS 28 | // doesn't understand. This layer makes sure to remove the protocol 29 | // before forwarding it to TS, and to add it back on all returned paths. 30 | 31 | function toEditorPath(str) { 32 | // We add the `zip:` prefix to both `.zip/` paths and virtual paths 33 | if (isAbsolute(str) && !str.match(/^\^zip:/) && (str.match(/\.zip\//) || isVirtual(str))) { 34 | // We also take the opportunity to turn virtual paths into physical ones; 35 | // this makes it much easier to work with workspaces that list peer 36 | // dependencies, since otherwise Ctrl+Click would bring us to the virtual 37 | // file instances instead of the real ones. 38 | // 39 | // We only do this to modules owned by the the dependency tree roots. 40 | // This avoids breaking the resolution when jumping inside a vendor 41 | // with peer dep (otherwise jumping into react-dom would show resolution 42 | // errors on react). 43 | // 44 | const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str; 45 | if (resolved) { 46 | const locator = pnpApi.findPackageLocator(resolved); 47 | if (locator && dependencyTreeRoots.has(`${locator.name}@${locator.reference}`)) { 48 | str = resolved; 49 | } 50 | } 51 | 52 | str = normalize(str); 53 | 54 | if (str.match(/\.zip\//)) { 55 | switch (hostInfo) { 56 | // Absolute VSCode `Uri.fsPath`s need to start with a slash. 57 | // VSCode only adds it automatically for supported schemes, 58 | // so we have to do it manually for the `zip` scheme. 59 | // The path needs to start with a caret otherwise VSCode doesn't handle the protocol 60 | // 61 | // Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910 62 | // 63 | case `vscode`: { 64 | str = `^zip:${str}`; 65 | } break; 66 | 67 | // To make "go to definition" work, 68 | // We have to resolve the actual file system path from virtual path 69 | // and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip) 70 | case `coc-nvim`: { 71 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 72 | str = resolve(`zipfile:${str}`); 73 | } break; 74 | 75 | // Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server) 76 | // We have to resolve the actual file system path from virtual path, 77 | // everything else is up to neovim 78 | case `neovim`: { 79 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 80 | str = `zipfile:${str}`; 81 | } break; 82 | 83 | default: { 84 | str = `zip:${str}`; 85 | } break; 86 | } 87 | } 88 | } 89 | 90 | return str; 91 | } 92 | 93 | function fromEditorPath(str) { 94 | switch (hostInfo) { 95 | case `coc-nvim`: 96 | case `neovim`: { 97 | str = str.replace(/\.zip::/, `.zip/`); 98 | // The path for coc-nvim is in format of //zipfile://.yarn/... 99 | // So in order to convert it back, we use .* to match all the thing 100 | // before `zipfile:` 101 | return process.platform === `win32` 102 | ? str.replace(/^.*zipfile:\//, ``) 103 | : str.replace(/^.*zipfile:/, ``); 104 | } break; 105 | 106 | case `vscode`: 107 | default: { 108 | return process.platform === `win32` 109 | ? str.replace(/^\^?zip:\//, ``) 110 | : str.replace(/^\^?zip:/, ``); 111 | } break; 112 | } 113 | } 114 | 115 | // Force enable 'allowLocalPluginLoads' 116 | // TypeScript tries to resolve plugins using a path relative to itself 117 | // which doesn't work when using the global cache 118 | // https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238 119 | // VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but 120 | // TypeScript already does local loads and if this code is running the user trusts the workspace 121 | // https://github.com/microsoft/vscode/issues/45856 122 | const ConfiguredProject = tsserver.server.ConfiguredProject; 123 | const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype; 124 | ConfiguredProject.prototype.enablePluginsWithOptions = function() { 125 | this.projectService.allowLocalPluginLoads = true; 126 | return originalEnablePluginsWithOptions.apply(this, arguments); 127 | }; 128 | 129 | // And here is the point where we hijack the VSCode <-> TS communications 130 | // by adding ourselves in the middle. We locate everything that looks 131 | // like an absolute path of ours and normalize it. 132 | 133 | const Session = tsserver.server.Session; 134 | const {onMessage: originalOnMessage, send: originalSend} = Session.prototype; 135 | let hostInfo = `unknown`; 136 | 137 | Object.assign(Session.prototype, { 138 | onMessage(/** @type {string} */ message) { 139 | const parsedMessage = JSON.parse(message) 140 | 141 | if ( 142 | parsedMessage != null && 143 | typeof parsedMessage === `object` && 144 | parsedMessage.arguments && 145 | typeof parsedMessage.arguments.hostInfo === `string` 146 | ) { 147 | hostInfo = parsedMessage.arguments.hostInfo; 148 | } 149 | 150 | return originalOnMessage.call(this, JSON.stringify(parsedMessage, (key, value) => { 151 | return typeof value === `string` ? fromEditorPath(value) : value; 152 | })); 153 | }, 154 | 155 | send(/** @type {any} */ msg) { 156 | return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => { 157 | return typeof value === `string` ? toEditorPath(value) : value; 158 | }))); 159 | } 160 | }); 161 | 162 | return tsserver; 163 | }; 164 | 165 | if (existsSync(absPnpApiPath)) { 166 | if (!process.versions.pnp) { 167 | // Setup the environment to be able to require typescript/lib/tsserver.js 168 | require(absPnpApiPath).setup(); 169 | } 170 | } 171 | 172 | // Defer to the real typescript/lib/tsserver.js your application uses 173 | module.exports = moduleWrapper(absRequire(`typescript/lib/tsserver.js`)); 174 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/tsserverlibrary.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | const moduleWrapper = tsserver => { 13 | if (!process.versions.pnp) { 14 | return tsserver; 15 | } 16 | 17 | const {isAbsolute} = require(`path`); 18 | const pnpApi = require(`pnpapi`); 19 | 20 | const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//); 21 | const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`); 22 | 23 | const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => { 24 | return `${locator.name}@${locator.reference}`; 25 | })); 26 | 27 | // VSCode sends the zip paths to TS using the "zip://" prefix, that TS 28 | // doesn't understand. This layer makes sure to remove the protocol 29 | // before forwarding it to TS, and to add it back on all returned paths. 30 | 31 | function toEditorPath(str) { 32 | // We add the `zip:` prefix to both `.zip/` paths and virtual paths 33 | if (isAbsolute(str) && !str.match(/^\^zip:/) && (str.match(/\.zip\//) || isVirtual(str))) { 34 | // We also take the opportunity to turn virtual paths into physical ones; 35 | // this makes it much easier to work with workspaces that list peer 36 | // dependencies, since otherwise Ctrl+Click would bring us to the virtual 37 | // file instances instead of the real ones. 38 | // 39 | // We only do this to modules owned by the the dependency tree roots. 40 | // This avoids breaking the resolution when jumping inside a vendor 41 | // with peer dep (otherwise jumping into react-dom would show resolution 42 | // errors on react). 43 | // 44 | const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str; 45 | if (resolved) { 46 | const locator = pnpApi.findPackageLocator(resolved); 47 | if (locator && dependencyTreeRoots.has(`${locator.name}@${locator.reference}`)) { 48 | str = resolved; 49 | } 50 | } 51 | 52 | str = normalize(str); 53 | 54 | if (str.match(/\.zip\//)) { 55 | switch (hostInfo) { 56 | // Absolute VSCode `Uri.fsPath`s need to start with a slash. 57 | // VSCode only adds it automatically for supported schemes, 58 | // so we have to do it manually for the `zip` scheme. 59 | // The path needs to start with a caret otherwise VSCode doesn't handle the protocol 60 | // 61 | // Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910 62 | // 63 | case `vscode`: { 64 | str = `^zip:${str}`; 65 | } break; 66 | 67 | // To make "go to definition" work, 68 | // We have to resolve the actual file system path from virtual path 69 | // and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip) 70 | case `coc-nvim`: { 71 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 72 | str = resolve(`zipfile:${str}`); 73 | } break; 74 | 75 | // Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server) 76 | // We have to resolve the actual file system path from virtual path, 77 | // everything else is up to neovim 78 | case `neovim`: { 79 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 80 | str = `zipfile:${str}`; 81 | } break; 82 | 83 | default: { 84 | str = `zip:${str}`; 85 | } break; 86 | } 87 | } 88 | } 89 | 90 | return str; 91 | } 92 | 93 | function fromEditorPath(str) { 94 | switch (hostInfo) { 95 | case `coc-nvim`: 96 | case `neovim`: { 97 | str = str.replace(/\.zip::/, `.zip/`); 98 | // The path for coc-nvim is in format of //zipfile://.yarn/... 99 | // So in order to convert it back, we use .* to match all the thing 100 | // before `zipfile:` 101 | return process.platform === `win32` 102 | ? str.replace(/^.*zipfile:\//, ``) 103 | : str.replace(/^.*zipfile:/, ``); 104 | } break; 105 | 106 | case `vscode`: 107 | default: { 108 | return process.platform === `win32` 109 | ? str.replace(/^\^?zip:\//, ``) 110 | : str.replace(/^\^?zip:/, ``); 111 | } break; 112 | } 113 | } 114 | 115 | // Force enable 'allowLocalPluginLoads' 116 | // TypeScript tries to resolve plugins using a path relative to itself 117 | // which doesn't work when using the global cache 118 | // https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238 119 | // VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but 120 | // TypeScript already does local loads and if this code is running the user trusts the workspace 121 | // https://github.com/microsoft/vscode/issues/45856 122 | const ConfiguredProject = tsserver.server.ConfiguredProject; 123 | const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype; 124 | ConfiguredProject.prototype.enablePluginsWithOptions = function() { 125 | this.projectService.allowLocalPluginLoads = true; 126 | return originalEnablePluginsWithOptions.apply(this, arguments); 127 | }; 128 | 129 | // And here is the point where we hijack the VSCode <-> TS communications 130 | // by adding ourselves in the middle. We locate everything that looks 131 | // like an absolute path of ours and normalize it. 132 | 133 | const Session = tsserver.server.Session; 134 | const {onMessage: originalOnMessage, send: originalSend} = Session.prototype; 135 | let hostInfo = `unknown`; 136 | 137 | Object.assign(Session.prototype, { 138 | onMessage(/** @type {string} */ message) { 139 | const parsedMessage = JSON.parse(message) 140 | 141 | if ( 142 | parsedMessage != null && 143 | typeof parsedMessage === `object` && 144 | parsedMessage.arguments && 145 | typeof parsedMessage.arguments.hostInfo === `string` 146 | ) { 147 | hostInfo = parsedMessage.arguments.hostInfo; 148 | } 149 | 150 | return originalOnMessage.call(this, JSON.stringify(parsedMessage, (key, value) => { 151 | return typeof value === `string` ? fromEditorPath(value) : value; 152 | })); 153 | }, 154 | 155 | send(/** @type {any} */ msg) { 156 | return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => { 157 | return typeof value === `string` ? toEditorPath(value) : value; 158 | }))); 159 | } 160 | }); 161 | 162 | return tsserver; 163 | }; 164 | 165 | if (existsSync(absPnpApiPath)) { 166 | if (!process.versions.pnp) { 167 | // Setup the environment to be able to require typescript/lib/tsserverlibrary.js 168 | require(absPnpApiPath).setup(); 169 | } 170 | } 171 | 172 | // Defer to the real typescript/lib/tsserverlibrary.js your application uses 173 | module.exports = moduleWrapper(absRequire(`typescript/lib/tsserverlibrary.js`)); 174 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/typescript.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/lib/typescript.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/lib/typescript.js your application uses 20 | module.exports = absRequire(`typescript/lib/typescript.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "typescript", 3 | "version": "4.4.3-sdk", 4 | "main": "./lib/typescript.js", 5 | "type": "commonjs" 6 | } 7 | -------------------------------------------------------------------------------- /.yarnrc.yml: -------------------------------------------------------------------------------- 1 | plugins: 2 | - path: .yarn/plugins/@yarnpkg/plugin-typescript.cjs 3 | spec: "@yarnpkg/plugin-typescript" 4 | - path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs 5 | spec: "@yarnpkg/plugin-interactive-tools" 6 | - path: .yarn/plugins/@yarnpkg/plugin-version.cjs 7 | spec: "@yarnpkg/plugin-version" 8 | 9 | yarnPath: .yarn/releases/yarn-3.0.2.cjs 10 | -------------------------------------------------------------------------------- /Anchor.toml: -------------------------------------------------------------------------------- 1 | [registry] 2 | url = "https://anchor.projectserum.com" 3 | 4 | [provider] 5 | cluster = "localnet" 6 | wallet = "~/.config/solana/id.json" 7 | 8 | [programs.localnet] 9 | migrator = "M1G1VdgdfvjMCdUhVtzaejnutPmLknEiraq2F59YGxr" 10 | 11 | [scripts] 12 | test = "yarn ts-mocha -t 1000000" 13 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "ahash" 7 | version = "0.4.7" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "739f4a8db6605981345c5654f3a85b056ce52f37a39d34da03f25bf2151ea16e" 10 | 11 | [[package]] 12 | name = "aho-corasick" 13 | version = "0.7.18" 14 | source = "registry+https://github.com/rust-lang/crates.io-index" 15 | checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" 16 | dependencies = [ 17 | "memchr", 18 | ] 19 | 20 | [[package]] 21 | name = "alloc-traits" 22 | version = "0.1.1" 23 | source = "registry+https://github.com/rust-lang/crates.io-index" 24 | checksum = "6b2d54853319fd101b8dd81de382bcbf3e03410a64d8928bbee85a3e7dcde483" 25 | 26 | [[package]] 27 | name = "anchor-attribute-access-control" 28 | version = "0.17.0" 29 | source = "registry+https://github.com/rust-lang/crates.io-index" 30 | checksum = "7b8ab97bfde16e49bc399586a857e9bd56e7c867a66a89ca809134d53d999138" 31 | dependencies = [ 32 | "anchor-syn", 33 | "anyhow", 34 | "proc-macro2", 35 | "quote", 36 | "regex", 37 | "syn", 38 | ] 39 | 40 | [[package]] 41 | name = "anchor-attribute-account" 42 | version = "0.17.0" 43 | source = "registry+https://github.com/rust-lang/crates.io-index" 44 | checksum = "40d3c2f1ebf823c4a8f0e41c57125991713177d4f02957600f8c1da8bd87adfd" 45 | dependencies = [ 46 | "anchor-syn", 47 | "anyhow", 48 | "bs58 0.4.0", 49 | "proc-macro2", 50 | "quote", 51 | "rustversion", 52 | "syn", 53 | ] 54 | 55 | [[package]] 56 | name = "anchor-attribute-error" 57 | version = "0.17.0" 58 | source = "registry+https://github.com/rust-lang/crates.io-index" 59 | checksum = "f8b5b954878c4cb1ad373143b42765abaf789691e13dbd0a3a8707dbfd0612cd" 60 | dependencies = [ 61 | "anchor-syn", 62 | "proc-macro2", 63 | "quote", 64 | "syn", 65 | ] 66 | 67 | [[package]] 68 | name = "anchor-attribute-event" 69 | version = "0.17.0" 70 | source = "registry+https://github.com/rust-lang/crates.io-index" 71 | checksum = "418daba265c778d2386c27191b4ec927c24be270ed6a8667be81de9e541c7a3e" 72 | dependencies = [ 73 | "anchor-syn", 74 | "anyhow", 75 | "proc-macro2", 76 | "quote", 77 | "syn", 78 | ] 79 | 80 | [[package]] 81 | name = "anchor-attribute-interface" 82 | version = "0.17.0" 83 | source = "registry+https://github.com/rust-lang/crates.io-index" 84 | checksum = "fd2159348897db16999d76ff396ba8722fb101e0e0cc6845b3722eb7472bd0d0" 85 | dependencies = [ 86 | "anchor-syn", 87 | "anyhow", 88 | "heck", 89 | "proc-macro2", 90 | "quote", 91 | "syn", 92 | ] 93 | 94 | [[package]] 95 | name = "anchor-attribute-program" 96 | version = "0.17.0" 97 | source = "registry+https://github.com/rust-lang/crates.io-index" 98 | checksum = "e6695b491d73439ad9839565beb0749107f5acca6d96b4cbaaaef428ba7b6c11" 99 | dependencies = [ 100 | "anchor-syn", 101 | "anyhow", 102 | "proc-macro2", 103 | "quote", 104 | "syn", 105 | ] 106 | 107 | [[package]] 108 | name = "anchor-attribute-state" 109 | version = "0.17.0" 110 | source = "registry+https://github.com/rust-lang/crates.io-index" 111 | checksum = "4bcbbeade2b868e597b55d90418dc51334c4e388f988c0eea1af5d511083ed10" 112 | dependencies = [ 113 | "anchor-syn", 114 | "anyhow", 115 | "proc-macro2", 116 | "quote", 117 | "syn", 118 | ] 119 | 120 | [[package]] 121 | name = "anchor-derive-accounts" 122 | version = "0.17.0" 123 | source = "registry+https://github.com/rust-lang/crates.io-index" 124 | checksum = "dc82ef304c38e7529883176c428acfab9a7bb9e851aa694fff53c8789fbc47b3" 125 | dependencies = [ 126 | "anchor-syn", 127 | "anyhow", 128 | "proc-macro2", 129 | "quote", 130 | "syn", 131 | ] 132 | 133 | [[package]] 134 | name = "anchor-lang" 135 | version = "0.17.0" 136 | source = "registry+https://github.com/rust-lang/crates.io-index" 137 | checksum = "ff6b7025eb65638005fd2af58e2bd136b61c2ecbadda379e908a5af541351a3a" 138 | dependencies = [ 139 | "anchor-attribute-access-control", 140 | "anchor-attribute-account", 141 | "anchor-attribute-error", 142 | "anchor-attribute-event", 143 | "anchor-attribute-interface", 144 | "anchor-attribute-program", 145 | "anchor-attribute-state", 146 | "anchor-derive-accounts", 147 | "base64 0.13.0", 148 | "borsh", 149 | "bytemuck", 150 | "solana-program", 151 | "thiserror", 152 | ] 153 | 154 | [[package]] 155 | name = "anchor-spl" 156 | version = "0.17.0" 157 | source = "registry+https://github.com/rust-lang/crates.io-index" 158 | checksum = "b49dfaf04f0794ecbdafa1f5dda93d47fc042ae70478fc079194c6c7cd265e94" 159 | dependencies = [ 160 | "anchor-lang", 161 | "lazy_static", 162 | "serum_dex", 163 | "solana-program", 164 | "spl-associated-token-account", 165 | "spl-token", 166 | ] 167 | 168 | [[package]] 169 | name = "anchor-syn" 170 | version = "0.17.0" 171 | source = "registry+https://github.com/rust-lang/crates.io-index" 172 | checksum = "321cca8ea1c35b199956e11b2869e8b1b1ae2d547326a12fc45375d0806470c8" 173 | dependencies = [ 174 | "anyhow", 175 | "bs58 0.3.1", 176 | "heck", 177 | "proc-macro2", 178 | "proc-macro2-diagnostics", 179 | "quote", 180 | "serde", 181 | "serde_json", 182 | "sha2", 183 | "syn", 184 | "thiserror", 185 | ] 186 | 187 | [[package]] 188 | name = "anyhow" 189 | version = "1.0.44" 190 | source = "registry+https://github.com/rust-lang/crates.io-index" 191 | checksum = "61604a8f862e1d5c3229fdd78f8b02c68dcf73a4c4b05fd636d12240aaa242c1" 192 | 193 | [[package]] 194 | name = "arrayref" 195 | version = "0.3.6" 196 | source = "registry+https://github.com/rust-lang/crates.io-index" 197 | checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544" 198 | 199 | [[package]] 200 | name = "arrayvec" 201 | version = "0.5.2" 202 | source = "registry+https://github.com/rust-lang/crates.io-index" 203 | checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" 204 | 205 | [[package]] 206 | name = "atty" 207 | version = "0.2.14" 208 | source = "registry+https://github.com/rust-lang/crates.io-index" 209 | checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" 210 | dependencies = [ 211 | "hermit-abi", 212 | "libc", 213 | "winapi", 214 | ] 215 | 216 | [[package]] 217 | name = "autocfg" 218 | version = "1.0.1" 219 | source = "registry+https://github.com/rust-lang/crates.io-index" 220 | checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" 221 | 222 | [[package]] 223 | name = "base64" 224 | version = "0.12.3" 225 | source = "registry+https://github.com/rust-lang/crates.io-index" 226 | checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" 227 | 228 | [[package]] 229 | name = "base64" 230 | version = "0.13.0" 231 | source = "registry+https://github.com/rust-lang/crates.io-index" 232 | checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" 233 | 234 | [[package]] 235 | name = "bincode" 236 | version = "1.3.3" 237 | source = "registry+https://github.com/rust-lang/crates.io-index" 238 | checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" 239 | dependencies = [ 240 | "serde", 241 | ] 242 | 243 | [[package]] 244 | name = "blake3" 245 | version = "0.3.8" 246 | source = "registry+https://github.com/rust-lang/crates.io-index" 247 | checksum = "b64485778c4f16a6a5a9d335e80d449ac6c70cdd6a06d2af18a6f6f775a125b3" 248 | dependencies = [ 249 | "arrayref", 250 | "arrayvec", 251 | "cc", 252 | "cfg-if 0.1.10", 253 | "constant_time_eq", 254 | "crypto-mac", 255 | "digest 0.9.0", 256 | ] 257 | 258 | [[package]] 259 | name = "block-buffer" 260 | version = "0.9.0" 261 | source = "registry+https://github.com/rust-lang/crates.io-index" 262 | checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" 263 | dependencies = [ 264 | "block-padding", 265 | "generic-array 0.14.4", 266 | ] 267 | 268 | [[package]] 269 | name = "block-padding" 270 | version = "0.2.1" 271 | source = "registry+https://github.com/rust-lang/crates.io-index" 272 | checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" 273 | 274 | [[package]] 275 | name = "borsh" 276 | version = "0.9.1" 277 | source = "registry+https://github.com/rust-lang/crates.io-index" 278 | checksum = "18dda7dc709193c0d86a1a51050a926dc3df1cf262ec46a23a25dba421ea1924" 279 | dependencies = [ 280 | "borsh-derive", 281 | "hashbrown", 282 | ] 283 | 284 | [[package]] 285 | name = "borsh-derive" 286 | version = "0.9.1" 287 | source = "registry+https://github.com/rust-lang/crates.io-index" 288 | checksum = "684155372435f578c0fa1acd13ebbb182cc19d6b38b64ae7901da4393217d264" 289 | dependencies = [ 290 | "borsh-derive-internal", 291 | "borsh-schema-derive-internal", 292 | "proc-macro-crate 0.1.5", 293 | "proc-macro2", 294 | "syn", 295 | ] 296 | 297 | [[package]] 298 | name = "borsh-derive-internal" 299 | version = "0.9.1" 300 | source = "registry+https://github.com/rust-lang/crates.io-index" 301 | checksum = "2102f62f8b6d3edeab871830782285b64cc1830168094db05c8e458f209bc5c3" 302 | dependencies = [ 303 | "proc-macro2", 304 | "quote", 305 | "syn", 306 | ] 307 | 308 | [[package]] 309 | name = "borsh-schema-derive-internal" 310 | version = "0.9.1" 311 | source = "registry+https://github.com/rust-lang/crates.io-index" 312 | checksum = "196c978c4c9b0b142d446ef3240690bf5a8a33497074a113ff9a337ccb750483" 313 | dependencies = [ 314 | "proc-macro2", 315 | "quote", 316 | "syn", 317 | ] 318 | 319 | [[package]] 320 | name = "bs58" 321 | version = "0.3.1" 322 | source = "registry+https://github.com/rust-lang/crates.io-index" 323 | checksum = "476e9cd489f9e121e02ffa6014a8ef220ecb15c05ed23fc34cca13925dc283fb" 324 | 325 | [[package]] 326 | name = "bs58" 327 | version = "0.4.0" 328 | source = "registry+https://github.com/rust-lang/crates.io-index" 329 | checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" 330 | 331 | [[package]] 332 | name = "bv" 333 | version = "0.11.1" 334 | source = "registry+https://github.com/rust-lang/crates.io-index" 335 | checksum = "8834bb1d8ee5dc048ee3124f2c7c1afcc6bc9aed03f11e9dfd8c69470a5db340" 336 | dependencies = [ 337 | "feature-probe", 338 | "serde", 339 | ] 340 | 341 | [[package]] 342 | name = "bytemuck" 343 | version = "1.7.2" 344 | source = "registry+https://github.com/rust-lang/crates.io-index" 345 | checksum = "72957246c41db82b8ef88a5486143830adeb8227ef9837740bdec67724cf2c5b" 346 | 347 | [[package]] 348 | name = "byteorder" 349 | version = "1.4.3" 350 | source = "registry+https://github.com/rust-lang/crates.io-index" 351 | checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" 352 | 353 | [[package]] 354 | name = "cc" 355 | version = "1.0.70" 356 | source = "registry+https://github.com/rust-lang/crates.io-index" 357 | checksum = "d26a6ce4b6a484fa3edb70f7efa6fc430fd2b87285fe8b84304fd0936faa0dc0" 358 | 359 | [[package]] 360 | name = "cfg-if" 361 | version = "0.1.10" 362 | source = "registry+https://github.com/rust-lang/crates.io-index" 363 | checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" 364 | 365 | [[package]] 366 | name = "cfg-if" 367 | version = "1.0.0" 368 | source = "registry+https://github.com/rust-lang/crates.io-index" 369 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 370 | 371 | [[package]] 372 | name = "constant_time_eq" 373 | version = "0.1.5" 374 | source = "registry+https://github.com/rust-lang/crates.io-index" 375 | checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" 376 | 377 | [[package]] 378 | name = "cpufeatures" 379 | version = "0.2.1" 380 | source = "registry+https://github.com/rust-lang/crates.io-index" 381 | checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469" 382 | dependencies = [ 383 | "libc", 384 | ] 385 | 386 | [[package]] 387 | name = "crunchy" 388 | version = "0.2.2" 389 | source = "registry+https://github.com/rust-lang/crates.io-index" 390 | checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" 391 | 392 | [[package]] 393 | name = "crypto-mac" 394 | version = "0.8.0" 395 | source = "registry+https://github.com/rust-lang/crates.io-index" 396 | checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" 397 | dependencies = [ 398 | "generic-array 0.14.4", 399 | "subtle", 400 | ] 401 | 402 | [[package]] 403 | name = "curve25519-dalek" 404 | version = "2.1.3" 405 | source = "registry+https://github.com/rust-lang/crates.io-index" 406 | checksum = "4a9b85542f99a2dfa2a1b8e192662741c9859a846b296bef1c92ef9b58b5a216" 407 | dependencies = [ 408 | "byteorder", 409 | "digest 0.8.1", 410 | "rand_core", 411 | "subtle", 412 | "zeroize", 413 | ] 414 | 415 | [[package]] 416 | name = "derivative" 417 | version = "2.2.0" 418 | source = "registry+https://github.com/rust-lang/crates.io-index" 419 | checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" 420 | dependencies = [ 421 | "proc-macro2", 422 | "quote", 423 | "syn", 424 | ] 425 | 426 | [[package]] 427 | name = "digest" 428 | version = "0.8.1" 429 | source = "registry+https://github.com/rust-lang/crates.io-index" 430 | checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" 431 | dependencies = [ 432 | "generic-array 0.12.4", 433 | ] 434 | 435 | [[package]] 436 | name = "digest" 437 | version = "0.9.0" 438 | source = "registry+https://github.com/rust-lang/crates.io-index" 439 | checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" 440 | dependencies = [ 441 | "generic-array 0.14.4", 442 | ] 443 | 444 | [[package]] 445 | name = "either" 446 | version = "1.6.1" 447 | source = "registry+https://github.com/rust-lang/crates.io-index" 448 | checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" 449 | 450 | [[package]] 451 | name = "enumflags2" 452 | version = "0.6.4" 453 | source = "registry+https://github.com/rust-lang/crates.io-index" 454 | checksum = "83c8d82922337cd23a15f88b70d8e4ef5f11da38dd7cdb55e84dd5de99695da0" 455 | dependencies = [ 456 | "enumflags2_derive", 457 | ] 458 | 459 | [[package]] 460 | name = "enumflags2_derive" 461 | version = "0.6.4" 462 | source = "registry+https://github.com/rust-lang/crates.io-index" 463 | checksum = "946ee94e3dbf58fdd324f9ce245c7b238d46a66f00e86a020b71996349e46cce" 464 | dependencies = [ 465 | "proc-macro2", 466 | "quote", 467 | "syn", 468 | ] 469 | 470 | [[package]] 471 | name = "env_logger" 472 | version = "0.8.4" 473 | source = "registry+https://github.com/rust-lang/crates.io-index" 474 | checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" 475 | dependencies = [ 476 | "atty", 477 | "humantime", 478 | "log", 479 | "regex", 480 | "termcolor", 481 | ] 482 | 483 | [[package]] 484 | name = "feature-probe" 485 | version = "0.1.1" 486 | source = "registry+https://github.com/rust-lang/crates.io-index" 487 | checksum = "835a3dc7d1ec9e75e2b5fb4ba75396837112d2060b03f7d43bc1897c7f7211da" 488 | 489 | [[package]] 490 | name = "field-offset" 491 | version = "0.3.4" 492 | source = "registry+https://github.com/rust-lang/crates.io-index" 493 | checksum = "1e1c54951450cbd39f3dbcf1005ac413b49487dabf18a720ad2383eccfeffb92" 494 | dependencies = [ 495 | "memoffset", 496 | "rustc_version 0.3.3", 497 | ] 498 | 499 | [[package]] 500 | name = "generic-array" 501 | version = "0.12.4" 502 | source = "registry+https://github.com/rust-lang/crates.io-index" 503 | checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" 504 | dependencies = [ 505 | "typenum", 506 | ] 507 | 508 | [[package]] 509 | name = "generic-array" 510 | version = "0.14.4" 511 | source = "registry+https://github.com/rust-lang/crates.io-index" 512 | checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817" 513 | dependencies = [ 514 | "serde", 515 | "typenum", 516 | "version_check", 517 | ] 518 | 519 | [[package]] 520 | name = "getrandom" 521 | version = "0.1.16" 522 | source = "registry+https://github.com/rust-lang/crates.io-index" 523 | checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" 524 | dependencies = [ 525 | "cfg-if 1.0.0", 526 | "libc", 527 | "wasi", 528 | ] 529 | 530 | [[package]] 531 | name = "hashbrown" 532 | version = "0.9.1" 533 | source = "registry+https://github.com/rust-lang/crates.io-index" 534 | checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04" 535 | dependencies = [ 536 | "ahash", 537 | ] 538 | 539 | [[package]] 540 | name = "heck" 541 | version = "0.3.3" 542 | source = "registry+https://github.com/rust-lang/crates.io-index" 543 | checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" 544 | dependencies = [ 545 | "unicode-segmentation", 546 | ] 547 | 548 | [[package]] 549 | name = "hermit-abi" 550 | version = "0.1.19" 551 | source = "registry+https://github.com/rust-lang/crates.io-index" 552 | checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" 553 | dependencies = [ 554 | "libc", 555 | ] 556 | 557 | [[package]] 558 | name = "hex" 559 | version = "0.4.3" 560 | source = "registry+https://github.com/rust-lang/crates.io-index" 561 | checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" 562 | 563 | [[package]] 564 | name = "hmac" 565 | version = "0.8.1" 566 | source = "registry+https://github.com/rust-lang/crates.io-index" 567 | checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" 568 | dependencies = [ 569 | "crypto-mac", 570 | "digest 0.9.0", 571 | ] 572 | 573 | [[package]] 574 | name = "hmac-drbg" 575 | version = "0.3.0" 576 | source = "registry+https://github.com/rust-lang/crates.io-index" 577 | checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" 578 | dependencies = [ 579 | "digest 0.9.0", 580 | "generic-array 0.14.4", 581 | "hmac", 582 | ] 583 | 584 | [[package]] 585 | name = "humantime" 586 | version = "2.1.0" 587 | source = "registry+https://github.com/rust-lang/crates.io-index" 588 | checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" 589 | 590 | [[package]] 591 | name = "itertools" 592 | version = "0.9.0" 593 | source = "registry+https://github.com/rust-lang/crates.io-index" 594 | checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" 595 | dependencies = [ 596 | "either", 597 | ] 598 | 599 | [[package]] 600 | name = "itoa" 601 | version = "0.4.8" 602 | source = "registry+https://github.com/rust-lang/crates.io-index" 603 | checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" 604 | 605 | [[package]] 606 | name = "keccak" 607 | version = "0.1.0" 608 | source = "registry+https://github.com/rust-lang/crates.io-index" 609 | checksum = "67c21572b4949434e4fc1e1978b99c5f77064153c59d998bf13ecd96fb5ecba7" 610 | 611 | [[package]] 612 | name = "lazy_static" 613 | version = "1.4.0" 614 | source = "registry+https://github.com/rust-lang/crates.io-index" 615 | checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" 616 | 617 | [[package]] 618 | name = "libc" 619 | version = "0.2.103" 620 | source = "registry+https://github.com/rust-lang/crates.io-index" 621 | checksum = "dd8f7255a17a627354f321ef0055d63b898c6fb27eff628af4d1b66b7331edf6" 622 | 623 | [[package]] 624 | name = "libsecp256k1" 625 | version = "0.5.0" 626 | source = "registry+https://github.com/rust-lang/crates.io-index" 627 | checksum = "bd1137239ab33b41aa9637a88a28249e5e70c40a42ccc92db7f12cc356c1fcd7" 628 | dependencies = [ 629 | "arrayref", 630 | "base64 0.12.3", 631 | "digest 0.9.0", 632 | "hmac-drbg", 633 | "libsecp256k1-core", 634 | "libsecp256k1-gen-ecmult", 635 | "libsecp256k1-gen-genmult", 636 | "rand", 637 | "serde", 638 | "sha2", 639 | "typenum", 640 | ] 641 | 642 | [[package]] 643 | name = "libsecp256k1-core" 644 | version = "0.2.2" 645 | source = "registry+https://github.com/rust-lang/crates.io-index" 646 | checksum = "d0f6ab710cec28cef759c5f18671a27dae2a5f952cdaaee1d8e2908cb2478a80" 647 | dependencies = [ 648 | "crunchy", 649 | "digest 0.9.0", 650 | "subtle", 651 | ] 652 | 653 | [[package]] 654 | name = "libsecp256k1-gen-ecmult" 655 | version = "0.2.1" 656 | source = "registry+https://github.com/rust-lang/crates.io-index" 657 | checksum = "ccab96b584d38fac86a83f07e659f0deafd0253dc096dab5a36d53efe653c5c3" 658 | dependencies = [ 659 | "libsecp256k1-core", 660 | ] 661 | 662 | [[package]] 663 | name = "libsecp256k1-gen-genmult" 664 | version = "0.2.1" 665 | source = "registry+https://github.com/rust-lang/crates.io-index" 666 | checksum = "67abfe149395e3aa1c48a2beb32b068e2334402df8181f818d3aee2b304c4f5d" 667 | dependencies = [ 668 | "libsecp256k1-core", 669 | ] 670 | 671 | [[package]] 672 | name = "log" 673 | version = "0.4.14" 674 | source = "registry+https://github.com/rust-lang/crates.io-index" 675 | checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" 676 | dependencies = [ 677 | "cfg-if 1.0.0", 678 | ] 679 | 680 | [[package]] 681 | name = "memchr" 682 | version = "2.4.1" 683 | source = "registry+https://github.com/rust-lang/crates.io-index" 684 | checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" 685 | 686 | [[package]] 687 | name = "memmap2" 688 | version = "0.1.0" 689 | source = "registry+https://github.com/rust-lang/crates.io-index" 690 | checksum = "d9b70ca2a6103ac8b665dc150b142ef0e4e89df640c9e6cf295d189c3caebe5a" 691 | dependencies = [ 692 | "libc", 693 | ] 694 | 695 | [[package]] 696 | name = "memoffset" 697 | version = "0.6.4" 698 | source = "registry+https://github.com/rust-lang/crates.io-index" 699 | checksum = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9" 700 | dependencies = [ 701 | "autocfg", 702 | ] 703 | 704 | [[package]] 705 | name = "migrator" 706 | version = "0.1.0" 707 | dependencies = [ 708 | "anchor-lang", 709 | "anchor-spl", 710 | "bincode", 711 | "solana-program", 712 | "spl-associated-token-account", 713 | "vipers", 714 | ] 715 | 716 | [[package]] 717 | name = "num-derive" 718 | version = "0.3.3" 719 | source = "registry+https://github.com/rust-lang/crates.io-index" 720 | checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" 721 | dependencies = [ 722 | "proc-macro2", 723 | "quote", 724 | "syn", 725 | ] 726 | 727 | [[package]] 728 | name = "num-traits" 729 | version = "0.2.14" 730 | source = "registry+https://github.com/rust-lang/crates.io-index" 731 | checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" 732 | dependencies = [ 733 | "autocfg", 734 | ] 735 | 736 | [[package]] 737 | name = "num_enum" 738 | version = "0.5.4" 739 | source = "registry+https://github.com/rust-lang/crates.io-index" 740 | checksum = "3f9bd055fb730c4f8f4f57d45d35cd6b3f0980535b056dc7ff119cee6a66ed6f" 741 | dependencies = [ 742 | "derivative", 743 | "num_enum_derive", 744 | ] 745 | 746 | [[package]] 747 | name = "num_enum_derive" 748 | version = "0.5.4" 749 | source = "registry+https://github.com/rust-lang/crates.io-index" 750 | checksum = "486ea01961c4a818096de679a8b740b26d9033146ac5291b1c98557658f8cdd9" 751 | dependencies = [ 752 | "proc-macro-crate 1.1.0", 753 | "proc-macro2", 754 | "quote", 755 | "syn", 756 | ] 757 | 758 | [[package]] 759 | name = "opaque-debug" 760 | version = "0.3.0" 761 | source = "registry+https://github.com/rust-lang/crates.io-index" 762 | checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" 763 | 764 | [[package]] 765 | name = "pest" 766 | version = "2.1.3" 767 | source = "registry+https://github.com/rust-lang/crates.io-index" 768 | checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53" 769 | dependencies = [ 770 | "ucd-trie", 771 | ] 772 | 773 | [[package]] 774 | name = "ppv-lite86" 775 | version = "0.2.10" 776 | source = "registry+https://github.com/rust-lang/crates.io-index" 777 | checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857" 778 | 779 | [[package]] 780 | name = "proc-macro-crate" 781 | version = "0.1.5" 782 | source = "registry+https://github.com/rust-lang/crates.io-index" 783 | checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" 784 | dependencies = [ 785 | "toml", 786 | ] 787 | 788 | [[package]] 789 | name = "proc-macro-crate" 790 | version = "1.1.0" 791 | source = "registry+https://github.com/rust-lang/crates.io-index" 792 | checksum = "1ebace6889caf889b4d3f76becee12e90353f2b8c7d875534a71e5742f8f6f83" 793 | dependencies = [ 794 | "thiserror", 795 | "toml", 796 | ] 797 | 798 | [[package]] 799 | name = "proc-macro2" 800 | version = "1.0.29" 801 | source = "registry+https://github.com/rust-lang/crates.io-index" 802 | checksum = "b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d" 803 | dependencies = [ 804 | "unicode-xid", 805 | ] 806 | 807 | [[package]] 808 | name = "proc-macro2-diagnostics" 809 | version = "0.9.1" 810 | source = "registry+https://github.com/rust-lang/crates.io-index" 811 | checksum = "4bf29726d67464d49fa6224a1d07936a8c08bb3fba727c7493f6cf1616fdaada" 812 | dependencies = [ 813 | "proc-macro2", 814 | "quote", 815 | "syn", 816 | "version_check", 817 | "yansi", 818 | ] 819 | 820 | [[package]] 821 | name = "quote" 822 | version = "1.0.9" 823 | source = "registry+https://github.com/rust-lang/crates.io-index" 824 | checksum = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7" 825 | dependencies = [ 826 | "proc-macro2", 827 | ] 828 | 829 | [[package]] 830 | name = "rand" 831 | version = "0.7.3" 832 | source = "registry+https://github.com/rust-lang/crates.io-index" 833 | checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" 834 | dependencies = [ 835 | "getrandom", 836 | "libc", 837 | "rand_chacha", 838 | "rand_core", 839 | "rand_hc", 840 | ] 841 | 842 | [[package]] 843 | name = "rand_chacha" 844 | version = "0.2.2" 845 | source = "registry+https://github.com/rust-lang/crates.io-index" 846 | checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" 847 | dependencies = [ 848 | "ppv-lite86", 849 | "rand_core", 850 | ] 851 | 852 | [[package]] 853 | name = "rand_core" 854 | version = "0.5.1" 855 | source = "registry+https://github.com/rust-lang/crates.io-index" 856 | checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" 857 | dependencies = [ 858 | "getrandom", 859 | ] 860 | 861 | [[package]] 862 | name = "rand_hc" 863 | version = "0.2.0" 864 | source = "registry+https://github.com/rust-lang/crates.io-index" 865 | checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" 866 | dependencies = [ 867 | "rand_core", 868 | ] 869 | 870 | [[package]] 871 | name = "regex" 872 | version = "1.5.4" 873 | source = "registry+https://github.com/rust-lang/crates.io-index" 874 | checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" 875 | dependencies = [ 876 | "aho-corasick", 877 | "memchr", 878 | "regex-syntax", 879 | ] 880 | 881 | [[package]] 882 | name = "regex-syntax" 883 | version = "0.6.25" 884 | source = "registry+https://github.com/rust-lang/crates.io-index" 885 | checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" 886 | 887 | [[package]] 888 | name = "rustc_version" 889 | version = "0.2.3" 890 | source = "registry+https://github.com/rust-lang/crates.io-index" 891 | checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" 892 | dependencies = [ 893 | "semver 0.9.0", 894 | ] 895 | 896 | [[package]] 897 | name = "rustc_version" 898 | version = "0.3.3" 899 | source = "registry+https://github.com/rust-lang/crates.io-index" 900 | checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" 901 | dependencies = [ 902 | "semver 0.11.0", 903 | ] 904 | 905 | [[package]] 906 | name = "rustversion" 907 | version = "1.0.5" 908 | source = "registry+https://github.com/rust-lang/crates.io-index" 909 | checksum = "61b3909d758bb75c79f23d4736fac9433868679d3ad2ea7a61e3c25cfda9a088" 910 | 911 | [[package]] 912 | name = "ryu" 913 | version = "1.0.5" 914 | source = "registry+https://github.com/rust-lang/crates.io-index" 915 | checksum = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e" 916 | 917 | [[package]] 918 | name = "safe-transmute" 919 | version = "0.11.2" 920 | source = "registry+https://github.com/rust-lang/crates.io-index" 921 | checksum = "98a01dab6acf992653be49205bdd549f32f17cb2803e8eacf1560bf97259aae8" 922 | 923 | [[package]] 924 | name = "semver" 925 | version = "0.9.0" 926 | source = "registry+https://github.com/rust-lang/crates.io-index" 927 | checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" 928 | dependencies = [ 929 | "semver-parser 0.7.0", 930 | ] 931 | 932 | [[package]] 933 | name = "semver" 934 | version = "0.11.0" 935 | source = "registry+https://github.com/rust-lang/crates.io-index" 936 | checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" 937 | dependencies = [ 938 | "semver-parser 0.10.2", 939 | ] 940 | 941 | [[package]] 942 | name = "semver-parser" 943 | version = "0.7.0" 944 | source = "registry+https://github.com/rust-lang/crates.io-index" 945 | checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" 946 | 947 | [[package]] 948 | name = "semver-parser" 949 | version = "0.10.2" 950 | source = "registry+https://github.com/rust-lang/crates.io-index" 951 | checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" 952 | dependencies = [ 953 | "pest", 954 | ] 955 | 956 | [[package]] 957 | name = "serde" 958 | version = "1.0.130" 959 | source = "registry+https://github.com/rust-lang/crates.io-index" 960 | checksum = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913" 961 | dependencies = [ 962 | "serde_derive", 963 | ] 964 | 965 | [[package]] 966 | name = "serde_bytes" 967 | version = "0.11.5" 968 | source = "registry+https://github.com/rust-lang/crates.io-index" 969 | checksum = "16ae07dd2f88a366f15bd0632ba725227018c69a1c8550a927324f8eb8368bb9" 970 | dependencies = [ 971 | "serde", 972 | ] 973 | 974 | [[package]] 975 | name = "serde_derive" 976 | version = "1.0.130" 977 | source = "registry+https://github.com/rust-lang/crates.io-index" 978 | checksum = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b" 979 | dependencies = [ 980 | "proc-macro2", 981 | "quote", 982 | "syn", 983 | ] 984 | 985 | [[package]] 986 | name = "serde_json" 987 | version = "1.0.68" 988 | source = "registry+https://github.com/rust-lang/crates.io-index" 989 | checksum = "0f690853975602e1bfe1ccbf50504d67174e3bcf340f23b5ea9992e0587a52d8" 990 | dependencies = [ 991 | "itoa", 992 | "ryu", 993 | "serde", 994 | ] 995 | 996 | [[package]] 997 | name = "serum_dex" 998 | version = "0.4.0" 999 | source = "registry+https://github.com/rust-lang/crates.io-index" 1000 | checksum = "02705854bae4622e552346c8edd43ab90c7425da35d63d2c689f39238f8d8b25" 1001 | dependencies = [ 1002 | "arrayref", 1003 | "bincode", 1004 | "bytemuck", 1005 | "byteorder", 1006 | "enumflags2", 1007 | "field-offset", 1008 | "itertools", 1009 | "num-traits", 1010 | "num_enum", 1011 | "safe-transmute", 1012 | "serde", 1013 | "solana-program", 1014 | "spl-token", 1015 | "static_assertions", 1016 | "thiserror", 1017 | "without-alloc", 1018 | ] 1019 | 1020 | [[package]] 1021 | name = "sha2" 1022 | version = "0.9.8" 1023 | source = "registry+https://github.com/rust-lang/crates.io-index" 1024 | checksum = "b69f9a4c9740d74c5baa3fd2e547f9525fa8088a8a958e0ca2409a514e33f5fa" 1025 | dependencies = [ 1026 | "block-buffer", 1027 | "cfg-if 1.0.0", 1028 | "cpufeatures", 1029 | "digest 0.9.0", 1030 | "opaque-debug", 1031 | ] 1032 | 1033 | [[package]] 1034 | name = "sha3" 1035 | version = "0.9.1" 1036 | source = "registry+https://github.com/rust-lang/crates.io-index" 1037 | checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" 1038 | dependencies = [ 1039 | "block-buffer", 1040 | "digest 0.9.0", 1041 | "keccak", 1042 | "opaque-debug", 1043 | ] 1044 | 1045 | [[package]] 1046 | name = "solana-frozen-abi" 1047 | version = "1.7.11" 1048 | source = "registry+https://github.com/rust-lang/crates.io-index" 1049 | checksum = "21ddfc2b65a555c0e0156c043bce092d473bc4f00daa7ca3c223d97d92d2e807" 1050 | dependencies = [ 1051 | "bs58 0.3.1", 1052 | "bv", 1053 | "generic-array 0.14.4", 1054 | "log", 1055 | "memmap2", 1056 | "rustc_version 0.2.3", 1057 | "serde", 1058 | "serde_derive", 1059 | "sha2", 1060 | "solana-frozen-abi-macro", 1061 | "solana-logger", 1062 | "thiserror", 1063 | ] 1064 | 1065 | [[package]] 1066 | name = "solana-frozen-abi-macro" 1067 | version = "1.7.11" 1068 | source = "registry+https://github.com/rust-lang/crates.io-index" 1069 | checksum = "a876aa31298fdee6560c8ee0695ebed313bbdbb6fbbee439ac3b9df8aebfb87c" 1070 | dependencies = [ 1071 | "proc-macro2", 1072 | "quote", 1073 | "rustc_version 0.2.3", 1074 | "syn", 1075 | ] 1076 | 1077 | [[package]] 1078 | name = "solana-logger" 1079 | version = "1.7.11" 1080 | source = "registry+https://github.com/rust-lang/crates.io-index" 1081 | checksum = "98a07290cc521e529bff0b0afd3aacd1d3904a41f35321ede6d1f3574efa3e94" 1082 | dependencies = [ 1083 | "env_logger", 1084 | "lazy_static", 1085 | "log", 1086 | ] 1087 | 1088 | [[package]] 1089 | name = "solana-program" 1090 | version = "1.7.11" 1091 | source = "registry+https://github.com/rust-lang/crates.io-index" 1092 | checksum = "49ffc60d33a318300682e42d28ff4f1276327f6374cab9591c8620a54be7aec1" 1093 | dependencies = [ 1094 | "bincode", 1095 | "blake3", 1096 | "borsh", 1097 | "borsh-derive", 1098 | "bs58 0.3.1", 1099 | "bv", 1100 | "curve25519-dalek", 1101 | "hex", 1102 | "itertools", 1103 | "lazy_static", 1104 | "libsecp256k1", 1105 | "log", 1106 | "num-derive", 1107 | "num-traits", 1108 | "rand", 1109 | "rustc_version 0.2.3", 1110 | "rustversion", 1111 | "serde", 1112 | "serde_bytes", 1113 | "serde_derive", 1114 | "sha2", 1115 | "sha3", 1116 | "solana-frozen-abi", 1117 | "solana-frozen-abi-macro", 1118 | "solana-logger", 1119 | "solana-sdk-macro", 1120 | "thiserror", 1121 | ] 1122 | 1123 | [[package]] 1124 | name = "solana-sdk-macro" 1125 | version = "1.7.11" 1126 | source = "registry+https://github.com/rust-lang/crates.io-index" 1127 | checksum = "b453dca160617b1676c47e3cfd4361f455dc5bb1c93659ec84b0c5d566b5c039" 1128 | dependencies = [ 1129 | "bs58 0.3.1", 1130 | "proc-macro2", 1131 | "quote", 1132 | "rustversion", 1133 | "syn", 1134 | ] 1135 | 1136 | [[package]] 1137 | name = "spl-associated-token-account" 1138 | version = "1.0.3" 1139 | source = "registry+https://github.com/rust-lang/crates.io-index" 1140 | checksum = "393e2240d521c3dd770806bff25c2c00d761ac962be106e14e22dd912007f428" 1141 | dependencies = [ 1142 | "solana-program", 1143 | "spl-token", 1144 | ] 1145 | 1146 | [[package]] 1147 | name = "spl-token" 1148 | version = "3.2.0" 1149 | source = "registry+https://github.com/rust-lang/crates.io-index" 1150 | checksum = "93bfdd5bd7c869cb565c7d7635c4fafe189b988a0bdef81063cd9585c6b8dc01" 1151 | dependencies = [ 1152 | "arrayref", 1153 | "num-derive", 1154 | "num-traits", 1155 | "num_enum", 1156 | "solana-program", 1157 | "thiserror", 1158 | ] 1159 | 1160 | [[package]] 1161 | name = "static_assertions" 1162 | version = "1.1.0" 1163 | source = "registry+https://github.com/rust-lang/crates.io-index" 1164 | checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" 1165 | 1166 | [[package]] 1167 | name = "subtle" 1168 | version = "2.4.1" 1169 | source = "registry+https://github.com/rust-lang/crates.io-index" 1170 | checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" 1171 | 1172 | [[package]] 1173 | name = "syn" 1174 | version = "1.0.77" 1175 | source = "registry+https://github.com/rust-lang/crates.io-index" 1176 | checksum = "5239bc68e0fef57495900cfea4e8dc75596d9a319d7e16b1e0a440d24e6fe0a0" 1177 | dependencies = [ 1178 | "proc-macro2", 1179 | "quote", 1180 | "unicode-xid", 1181 | ] 1182 | 1183 | [[package]] 1184 | name = "termcolor" 1185 | version = "1.1.2" 1186 | source = "registry+https://github.com/rust-lang/crates.io-index" 1187 | checksum = "2dfed899f0eb03f32ee8c6a0aabdb8a7949659e3466561fc0adf54e26d88c5f4" 1188 | dependencies = [ 1189 | "winapi-util", 1190 | ] 1191 | 1192 | [[package]] 1193 | name = "thiserror" 1194 | version = "1.0.29" 1195 | source = "registry+https://github.com/rust-lang/crates.io-index" 1196 | checksum = "602eca064b2d83369e2b2f34b09c70b605402801927c65c11071ac911d299b88" 1197 | dependencies = [ 1198 | "thiserror-impl", 1199 | ] 1200 | 1201 | [[package]] 1202 | name = "thiserror-impl" 1203 | version = "1.0.29" 1204 | source = "registry+https://github.com/rust-lang/crates.io-index" 1205 | checksum = "bad553cc2c78e8de258400763a647e80e6d1b31ee237275d756f6836d204494c" 1206 | dependencies = [ 1207 | "proc-macro2", 1208 | "quote", 1209 | "syn", 1210 | ] 1211 | 1212 | [[package]] 1213 | name = "toml" 1214 | version = "0.5.8" 1215 | source = "registry+https://github.com/rust-lang/crates.io-index" 1216 | checksum = "a31142970826733df8241ef35dc040ef98c679ab14d7c3e54d827099b3acecaa" 1217 | dependencies = [ 1218 | "serde", 1219 | ] 1220 | 1221 | [[package]] 1222 | name = "typenum" 1223 | version = "1.14.0" 1224 | source = "registry+https://github.com/rust-lang/crates.io-index" 1225 | checksum = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec" 1226 | 1227 | [[package]] 1228 | name = "ucd-trie" 1229 | version = "0.1.3" 1230 | source = "registry+https://github.com/rust-lang/crates.io-index" 1231 | checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" 1232 | 1233 | [[package]] 1234 | name = "unicode-segmentation" 1235 | version = "1.8.0" 1236 | source = "registry+https://github.com/rust-lang/crates.io-index" 1237 | checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" 1238 | 1239 | [[package]] 1240 | name = "unicode-xid" 1241 | version = "0.2.2" 1242 | source = "registry+https://github.com/rust-lang/crates.io-index" 1243 | checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" 1244 | 1245 | [[package]] 1246 | name = "version_check" 1247 | version = "0.9.3" 1248 | source = "registry+https://github.com/rust-lang/crates.io-index" 1249 | checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" 1250 | 1251 | [[package]] 1252 | name = "vipers" 1253 | version = "1.2.1" 1254 | source = "registry+https://github.com/rust-lang/crates.io-index" 1255 | checksum = "8960b053a136ee9be9620d0e57c386803db236fa041255b3c6bf135bd7fae4d7" 1256 | dependencies = [ 1257 | "anchor-lang", 1258 | "anchor-spl", 1259 | "solana-program", 1260 | "spl-associated-token-account", 1261 | ] 1262 | 1263 | [[package]] 1264 | name = "wasi" 1265 | version = "0.9.0+wasi-snapshot-preview1" 1266 | source = "registry+https://github.com/rust-lang/crates.io-index" 1267 | checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" 1268 | 1269 | [[package]] 1270 | name = "winapi" 1271 | version = "0.3.9" 1272 | source = "registry+https://github.com/rust-lang/crates.io-index" 1273 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 1274 | dependencies = [ 1275 | "winapi-i686-pc-windows-gnu", 1276 | "winapi-x86_64-pc-windows-gnu", 1277 | ] 1278 | 1279 | [[package]] 1280 | name = "winapi-i686-pc-windows-gnu" 1281 | version = "0.4.0" 1282 | source = "registry+https://github.com/rust-lang/crates.io-index" 1283 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 1284 | 1285 | [[package]] 1286 | name = "winapi-util" 1287 | version = "0.1.5" 1288 | source = "registry+https://github.com/rust-lang/crates.io-index" 1289 | checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" 1290 | dependencies = [ 1291 | "winapi", 1292 | ] 1293 | 1294 | [[package]] 1295 | name = "winapi-x86_64-pc-windows-gnu" 1296 | version = "0.4.0" 1297 | source = "registry+https://github.com/rust-lang/crates.io-index" 1298 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 1299 | 1300 | [[package]] 1301 | name = "without-alloc" 1302 | version = "0.2.1" 1303 | source = "registry+https://github.com/rust-lang/crates.io-index" 1304 | checksum = "5e34736feff52a0b3e5680927e947a4d8fac1f0b80dc8120b080dd8de24d75e2" 1305 | dependencies = [ 1306 | "alloc-traits", 1307 | ] 1308 | 1309 | [[package]] 1310 | name = "yansi" 1311 | version = "0.5.0" 1312 | source = "registry+https://github.com/rust-lang/crates.io-index" 1313 | checksum = "9fc79f4a1e39857fc00c3f662cbf2651c771f00e9c15fe2abc341806bd46bd71" 1314 | 1315 | [[package]] 1316 | name = "zeroize" 1317 | version = "1.4.2" 1318 | source = "registry+https://github.com/rust-lang/crates.io-index" 1319 | checksum = "bf68b08513768deaa790264a7fac27a58cbf2705cfcdc9448362229217d7e970" 1320 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "programs/*" 4 | ] 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DeployDAO Migrator 2 | 3 | [![Crates.io](https://img.shields.io/crates/v/migrator)](https://crates.io/crates/migrator) 4 | [![License](https://img.shields.io/crates/l/migrator)](https://github.com/DeployDAO/migrator/blob/master/LICENSE) 5 | [![Build Status](https://img.shields.io/github/workflow/status/DeployDAO/migrator/Rust/master)](https://github.com/DeployDAO/migrator/actions/workflows/rust.yml?query=branch%3Amaster) 6 | [![Contributors](https://img.shields.io/github/contributors/DeployDAO/migrator)](https://github.com/DeployDAO/migrator/graphs/contributors) 7 | 8 | **WARNING: This code is a work in progress. Please do not use it as is.** 9 | 10 | A program for deploying and upgrading programs. 11 | 12 | ## About 13 | 14 | The Migrator: 15 | 16 | - Performs program deploys and upgrades 17 | - Decouples program deploys/upgrades into 3 roles: the proposer, the approver, and the deployer 18 | - Leaves an on-chain audit trail of program upgrades and deploys 19 | 20 | ## Usage 21 | 22 | There are two forms of intended usage: "self-hosted" and the DeployDAO. 23 | 24 | ### Self-hosted 25 | 26 | This may be used for development or if you want to maintain full control over your own smart contract deployment. 27 | 28 | 1. Reserve a program ID. This allows for the program to be deployed at the same address across multiple chains. 29 | 2. Create a new migrator, with the approver set to your own address, a multisig, or a DAO. 30 | 3. Upload the bytecode of the program to a buffer via `solana program write-buffer`. Ideally this bytecode is generated in a [verifiable manner](https://anchor.projectserum.com/). 31 | 4. Create a proposal to deploy the program. 32 | 5. Approve your proposal. 33 | 6. Anyone must supply the migrator account with enough SOL to cover the program deployment. 34 | 7. Anyone may deploy the new migration, until the migration expires. 35 | 36 | ### DeployDAO 37 | 38 | _note: this is subject to change_ 39 | 40 | The DeployDAO is a decentralized autonomous organization that elects multisig holders to approve program upgrades and deploys. 41 | 42 | To deploy a program, one should: 43 | 44 | 1. Reserve a program ID. This allows for the program to be deployed at the same address across multiple chains. 45 | 2. Create a new migrator, with the approver set to the DeployDAO address. 46 | 3. Upload the bytecode of the program to a buffer via `solana program write-buffer`. Ideally this bytecode is generated in a [verifiable manner](https://anchor.projectserum.com/). 47 | 4. Create a proposal to deploy the program. 48 | 5. Contact the DeployDAO requesting for your program to be approved for deployment. 49 | 6. If the DeployDAO likes your code, they may approve the deployment. 50 | 7. Anyone in the community must supply the migrator account with enough SOL to cover the program deployment. 51 | 8. Anyone in the community may deploy the new migration, until the migration expires. 52 | 53 | Upgrading is done very similarly. 54 | 55 | ## License 56 | 57 | The DeployDAO program and SDK is distributed under the GPL v3.0 license. 58 | -------------------------------------------------------------------------------- /ci.nix: -------------------------------------------------------------------------------- 1 | { pkgs }: 2 | let 3 | anchor-parse-idls = pkgs.writeShellScriptBin "anchor-parse-idls" 4 | (builtins.readFile ./scripts/idl.sh); 5 | in pkgs.buildEnv { 6 | name = "ci"; 7 | paths = with pkgs; 8 | (pkgs.lib.optionals pkgs.stdenv.isLinux ([ libudev ])) ++ [ 9 | anchor-0_17_0 10 | anchor-parse-idls 11 | 12 | # sdk 13 | nodejs 14 | yarn 15 | python3 16 | 17 | pkgconfig 18 | openssl 19 | jq 20 | gnused 21 | 22 | libiconv 23 | ] ++ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ 24 | pkgs.darwin.apple_sdk.frameworks.AppKit 25 | pkgs.darwin.apple_sdk.frameworks.IOKit 26 | pkgs.darwin.apple_sdk.frameworks.Foundation 27 | ]); 28 | } 29 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-utils": { 4 | "locked": { 5 | "lastModified": 1631561581, 6 | "narHash": "sha256-3VQMV5zvxaVLvqqUrNz3iJelLw30mIVSfZmAaauM3dA=", 7 | "owner": "numtide", 8 | "repo": "flake-utils", 9 | "rev": "7e5bf3925f6fbdfaf50a2a7ca0be2879c4261d19", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "numtide", 14 | "repo": "flake-utils", 15 | "type": "github" 16 | } 17 | }, 18 | "flake-utils_2": { 19 | "locked": { 20 | "lastModified": 1631561581, 21 | "narHash": "sha256-3VQMV5zvxaVLvqqUrNz3iJelLw30mIVSfZmAaauM3dA=", 22 | "owner": "numtide", 23 | "repo": "flake-utils", 24 | "rev": "7e5bf3925f6fbdfaf50a2a7ca0be2879c4261d19", 25 | "type": "github" 26 | }, 27 | "original": { 28 | "owner": "numtide", 29 | "repo": "flake-utils", 30 | "type": "github" 31 | } 32 | }, 33 | "flake-utils_3": { 34 | "locked": { 35 | "lastModified": 1623875721, 36 | "narHash": "sha256-A8BU7bjS5GirpAUv4QA+QnJ4CceLHkcXdRp4xITDB0s=", 37 | "owner": "numtide", 38 | "repo": "flake-utils", 39 | "rev": "f7e004a55b120c02ecb6219596820fcd32ca8772", 40 | "type": "github" 41 | }, 42 | "original": { 43 | "owner": "numtide", 44 | "repo": "flake-utils", 45 | "type": "github" 46 | } 47 | }, 48 | "nixpkgs": { 49 | "locked": { 50 | "lastModified": 1633329294, 51 | "narHash": "sha256-0LpQLS4KMgxslMgmDHmxG/5twFlXDBW9z4Or1iOrCvU=", 52 | "owner": "NixOS", 53 | "repo": "nixpkgs", 54 | "rev": "ee084c02040e864eeeb4cf4f8538d92f7c675671", 55 | "type": "github" 56 | }, 57 | "original": { 58 | "owner": "NixOS", 59 | "ref": "nixpkgs-unstable", 60 | "repo": "nixpkgs", 61 | "type": "github" 62 | } 63 | }, 64 | "root": { 65 | "inputs": { 66 | "flake-utils": "flake-utils", 67 | "nixpkgs": "nixpkgs", 68 | "saber-overlay": "saber-overlay" 69 | } 70 | }, 71 | "rust-overlay": { 72 | "inputs": { 73 | "flake-utils": "flake-utils_3", 74 | "nixpkgs": [ 75 | "nixpkgs" 76 | ] 77 | }, 78 | "locked": { 79 | "lastModified": 1633400100, 80 | "narHash": "sha256-kHQV7jZ2vVHVI9sfda1mUROVBbQbdfKcbIpKG9WdqGo=", 81 | "owner": "oxalica", 82 | "repo": "rust-overlay", 83 | "rev": "9c2fc6a62ccbc6f420d71ecac6bf0b84dbbee64f", 84 | "type": "github" 85 | }, 86 | "original": { 87 | "owner": "oxalica", 88 | "repo": "rust-overlay", 89 | "type": "github" 90 | } 91 | }, 92 | "saber-overlay": { 93 | "inputs": { 94 | "flake-utils": "flake-utils_2", 95 | "nixpkgs": [ 96 | "nixpkgs" 97 | ], 98 | "rust-overlay": "rust-overlay" 99 | }, 100 | "locked": { 101 | "lastModified": 1633473440, 102 | "narHash": "sha256-kRX8MS9t63PJ6hDp6rc0RgBuM4lvGM8bPF2DLnw7g/s=", 103 | "owner": "saber-hq", 104 | "repo": "saber-overlay", 105 | "rev": "b8c0530465e61bb750c0e8d275dc77263742f4e4", 106 | "type": "github" 107 | }, 108 | "original": { 109 | "owner": "saber-hq", 110 | "repo": "saber-overlay", 111 | "type": "github" 112 | } 113 | } 114 | }, 115 | "root": "root", 116 | "version": 7 117 | } 118 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "StableSwap development environment."; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; 6 | saber-overlay.url = "github:saber-hq/saber-overlay"; 7 | saber-overlay.inputs.nixpkgs.follows = "nixpkgs"; 8 | flake-utils.url = "github:numtide/flake-utils"; 9 | }; 10 | 11 | outputs = { self, nixpkgs, saber-overlay, flake-utils }: 12 | flake-utils.lib.eachSystem [ 13 | "aarch64-darwin" 14 | "x86_64-linux" 15 | "x86_64-darwin" 16 | ] (system: 17 | let 18 | pkgs = import nixpkgs { 19 | inherit system; 20 | overlays = [ saber-overlay.overlay ]; 21 | }; 22 | in { 23 | devShell = import ./shell.nix { inherit pkgs; }; 24 | packages.ci = import ./ci.nix { inherit pkgs; }; 25 | }); 26 | } 27 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@deploydao/migrator", 3 | "version": "0.1.0", 4 | "description": "Migration management for Solana programs.", 5 | "main": "dist/cjs/index.js", 6 | "module": "dist/esm/index.js", 7 | "keywords": [ 8 | "solana", 9 | "blockchain" 10 | ], 11 | "bugs": { 12 | "url": "https://github.com/deploydao/migrator/issues", 13 | "email": "team@deploydao.com" 14 | }, 15 | "repository": { 16 | "type": "git", 17 | "url": "git+https://github.com/deploydao/migrator.git" 18 | }, 19 | "author": "DeployDAO ", 20 | "homepage": "https://deploydao.com", 21 | "license": "Apache-2.0", 22 | "publishConfig": { 23 | "access": "public" 24 | }, 25 | "devDependencies": { 26 | "@project-serum/anchor": "^0.17.1-beta.1", 27 | "@rushstack/eslint-patch": "^1.0.7", 28 | "@saberhq/anchor-contrib": "^1.3.14", 29 | "@saberhq/chai-solana": "^1.3.14", 30 | "@saberhq/eslint-config": "^1.3.14", 31 | "@saberhq/solana-contrib": "^1.3.14", 32 | "@saberhq/token-utils": "^1.3.14", 33 | "@solana/spl-token": "^0.1.8", 34 | "@solana/spl-token-registry": "^0.2.272", 35 | "@solana/web3.js": "^1.29.2", 36 | "@types/bn.js": "^5.1.0", 37 | "@types/chai": "^4.2.22", 38 | "@types/eslint": "^7", 39 | "@types/mocha": "^9.0.0", 40 | "@types/node": "^16.10.3", 41 | "bn.js": "^5.2.0", 42 | "chai": "^4.3.4", 43 | "eslint": "^7.32.0", 44 | "eslint-import-resolver-node": "^0.3.6", 45 | "eslint-plugin-import": "^2.24.2", 46 | "husky": "^7.0.2", 47 | "lint-staged": "^11.2.0", 48 | "mocha": "8", 49 | "prettier": "^2.4.1", 50 | "ts-mocha": "^8.0.0", 51 | "ts-node": "^10.2.1", 52 | "typescript": "^4.4.3" 53 | }, 54 | "lint-staged": { 55 | "*.{ts,tsx}": "eslint --cache --fix", 56 | "*.{js,jsx,json,html,css,md}": "prettier --write", 57 | "*.nix": "nixfmt" 58 | }, 59 | "scripts": { 60 | "build": "rm -fr dist/ && tsc -P tsconfig.build.json && tsc -P tsconfig.esm.json", 61 | "lint": "eslint . --cache", 62 | "test": "anchor test --skip-build tests/*.ts", 63 | "prepare": "husky install", 64 | "idl:generate": "./scripts/idl.sh && ./scripts/generate-idl-types.sh", 65 | "idl:generate:nolint": "./scripts/idl.sh && RUN_ESLINT=none ./scripts/generate-idl-types.sh" 66 | }, 67 | "dependencies": { 68 | "tiny-invariant": "^1.1.0", 69 | "tslib": "^2.3.1" 70 | }, 71 | "peerDependencies": { 72 | "@project-serum/anchor": "^0.17.0", 73 | "@saberhq/anchor-contrib": "^1.3.14", 74 | "@saberhq/solana-contrib": "^1.3.14", 75 | "@saberhq/token-utils": "^1.3.14", 76 | "@solana/web3.js": "^1.29.2", 77 | "bn.js": "^5.2.0" 78 | }, 79 | "packageManager": "yarn@3.0.2", 80 | "files": [ 81 | "dist/", 82 | "src/" 83 | ] 84 | } 85 | -------------------------------------------------------------------------------- /programs/migrator/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "migrator" 3 | version = "0.1.0" 4 | description = "A Solana program for deploying and upgrading programs." 5 | edition = "2018" 6 | homepage = "https://deploydao.com" 7 | repository = "https://github.com/DeployDAO/migrator" 8 | license = "Apache-2.0" 9 | keywords = ["solana", "deploy", "anchor"] 10 | 11 | [lib] 12 | crate-type = ["cdylib", "lib"] 13 | name = "migrator" 14 | 15 | [features] 16 | no-entrypoint = [] 17 | no-idl = [] 18 | cpi = ["no-entrypoint"] 19 | default = [] 20 | 21 | [dependencies] 22 | anchor-lang = "0.17.0" 23 | anchor-spl = "0.17.0" 24 | bincode = "1.3.3" 25 | solana-program = "1.7.11" 26 | spl-associated-token-account = { version = "1.0.3", features = [ 27 | "no-entrypoint" 28 | ] } 29 | vipers = "1.2.1" 30 | -------------------------------------------------------------------------------- /programs/migrator/README.md: -------------------------------------------------------------------------------- 1 | # `migrator` 2 | 3 | Program deploy and upgrade manager. 4 | -------------------------------------------------------------------------------- /programs/migrator/Xargo.toml: -------------------------------------------------------------------------------- 1 | [target.bpfel-unknown-unknown.dependencies.std] 2 | features = [] 3 | -------------------------------------------------------------------------------- /programs/migrator/src/account_contexts.rs: -------------------------------------------------------------------------------- 1 | //! Accounts structs. 2 | 3 | use crate::{ 4 | bpf_loader_upgradeable::{BPFLoaderUpgradeable, UpgradeableLoaderAccount}, 5 | state::*, 6 | }; 7 | use anchor_lang::prelude::*; 8 | 9 | #[derive(Accounts)] 10 | #[instruction(bump: u8, name: String, description: String)] 11 | pub struct NewMigrator<'info> { 12 | /// [Migrator]. 13 | #[account( 14 | init, 15 | seeds = [ 16 | b"migrator".as_ref(), 17 | program.key().to_bytes().as_ref() 18 | ], 19 | bump = bump, 20 | payer = payer, 21 | space = std::mem::size_of::() + name.as_bytes().len() + description.as_bytes().len() 22 | )] 23 | pub migrator: Account<'info, Migrator>, 24 | 25 | /// Account which will approve migrations. 26 | pub approver: UncheckedAccount<'info>, 27 | 28 | /// Program ID. 29 | pub program: UncheckedAccount<'info>, 30 | 31 | /// Address where the program data will be stored. 32 | pub program_data: UncheckedAccount<'info>, 33 | 34 | /// Payer of transactions. 35 | pub payer: Signer<'info>, 36 | 37 | /// The [System] program. 38 | pub system_program: Program<'info, System>, 39 | } 40 | 41 | #[derive(Accounts)] 42 | pub struct DeployProgram<'info> { 43 | /// The approved [Migration] and its [Migrator]. 44 | pub approved_migration: ApprovedMigration<'info>, 45 | 46 | /// The program which has not yet been deployed. 47 | pub program: UndeployedProgram<'info>, 48 | 49 | /// The [Rent] sysvar. 50 | pub rent: Sysvar<'info, Rent>, 51 | /// The [Clock] sysvar. 52 | pub clock: Sysvar<'info, Clock>, 53 | /// The [System] program. 54 | pub system_program: Program<'info, System>, 55 | /// The [bpf_loader_upgradeable] program. 56 | pub bpf_loader_upgradeable_program: Program<'info, BPFLoaderUpgradeable>, 57 | } 58 | 59 | #[derive(Accounts)] 60 | pub struct UpgradeProgram<'info> { 61 | /// The approved [Migration] and its [Migrator]. 62 | pub approved_migration: ApprovedMigration<'info>, 63 | 64 | /// The existing, live program. 65 | pub program: LiveProgram<'info>, 66 | 67 | /// The [Rent] sysvar. 68 | pub rent: Sysvar<'info, Rent>, 69 | /// The [Clock] sysvar. 70 | pub clock: Sysvar<'info, Clock>, 71 | /// The [System] program. 72 | pub system_program: Program<'info, System>, 73 | /// The [bpf_loader_upgradeable] program. 74 | pub bpf_loader_upgradeable_program: Program<'info, BPFLoaderUpgradeable>, 75 | } 76 | 77 | /// Accounts for [migrator::reserve_program_id]. 78 | #[derive(Accounts)] 79 | pub struct ReserveProgramID<'info> { 80 | /// Account containing the program ID. 81 | pub program: Signer<'info>, 82 | /// Payer to create the distributor. 83 | pub payer: Signer<'info>, 84 | /// The [System] program. 85 | pub system_program: Program<'info, System>, 86 | } 87 | 88 | #[derive(Accounts)] 89 | pub struct ApproveMigration<'info> { 90 | /// The migrator. 91 | pub migrator: Account<'info, Migrator>, 92 | /// The migration. 93 | pub migration: Account<'info, Migration>, 94 | /// [Migrator::approver]. 95 | pub approver: Signer<'info>, 96 | } 97 | 98 | #[derive(Accounts)] 99 | pub struct RejectMigration<'info> { 100 | /// The migrator. 101 | pub migrator: Account<'info, Migrator>, 102 | /// The migration. 103 | pub migration: Account<'info, Migration>, 104 | /// [Migrator::approver]. 105 | pub approver: Signer<'info>, 106 | } 107 | 108 | #[derive(Accounts)] 109 | #[instruction(bump: u8, title: String, description: String)] 110 | pub struct ProposeMigration<'info> { 111 | /// The approved [Migration] and its [Migrator]. 112 | pub migrator: Account<'info, Migrator>, 113 | /// The approved [Migration] and its [Migrator]. 114 | #[account( 115 | init, 116 | seeds = [ 117 | b"migration".as_ref(), 118 | migrator.num_migrations.to_le_bytes().as_ref() 119 | ], 120 | bump = bump, 121 | payer = proposer, 122 | space = std::mem::size_of::() + title.as_bytes().len() + description.as_bytes().len() 123 | )] 124 | pub migration: Account<'info, Migration>, 125 | /// The existing, live program. 126 | pub buffer: Account<'info, UpgradeableLoaderAccount>, 127 | /// The one proposing the migration. Also the payer. 128 | pub proposer: Signer<'info>, 129 | /// The [System] program. 130 | pub system_program: Program<'info, System>, 131 | } 132 | 133 | ////////////////////////////////////////// 134 | // Context structs 135 | ////////////////////////////////////////// 136 | 137 | /// A new, undeployed program. 138 | #[derive(Accounts)] 139 | pub struct UndeployedProgram<'info> { 140 | /// Program with no data in it, owned by this program. 141 | pub program: UncheckedAccount<'info>, 142 | /// Address where the program data will be stored. 143 | pub program_data: UncheckedAccount<'info>, 144 | } 145 | 146 | #[derive(Accounts)] 147 | pub struct LiveProgram<'info> { 148 | /// Account containing the program ID. 149 | pub program: Account<'info, UpgradeableLoaderAccount>, 150 | /// Address where the program data will be stored. 151 | pub program_data: Account<'info, UpgradeableLoaderAccount>, 152 | } 153 | 154 | #[derive(Accounts)] 155 | pub struct ApprovedMigration<'info> { 156 | /// The [Migrator] associated with the program to be deployed. 157 | pub migrator: Account<'info, Migrator>, 158 | /// The [Migration] to deploy. 159 | pub migration: Account<'info, Migration>, 160 | /// Account containing the buffer to deploy. 161 | pub buffer: Account<'info, UpgradeableLoaderAccount>, 162 | /// Account which executed the deployment. 163 | pub executor: Signer<'info>, 164 | } 165 | -------------------------------------------------------------------------------- /programs/migrator/src/account_validators.rs: -------------------------------------------------------------------------------- 1 | use anchor_lang::prelude::*; 2 | use solana_program::{ 3 | bpf_loader_upgradeable::{self, UpgradeableLoaderState}, 4 | system_program, 5 | }; 6 | use vipers::{assert_keys, invariant, program_err, unwrap_opt, validate::Validate}; 7 | 8 | use crate::{ 9 | account_contexts::{NewMigrator, RejectMigration, ReserveProgramID}, 10 | bpf_loader_upgradeable::UpgradeableLoaderAccount, 11 | ApproveMigration, ApprovedMigration, DeployProgram, LiveProgram, ProposeMigration, 12 | UndeployedProgram, UpgradeProgram, 13 | }; 14 | 15 | impl<'info> Validate<'info> for NewMigrator<'info> { 16 | fn validate(&self) -> ProgramResult { 17 | let migrator_key = self.migrator.key(); 18 | let program = &self.program; 19 | let program_data = &self.program_data; 20 | 21 | if program_data.data_is_empty() { 22 | // migrator for an undeployed program 23 | (UndeployedProgram { 24 | program: self.program.clone(), 25 | program_data: self.program_data.clone(), 26 | }) 27 | .validate_for_migrator(migrator_key)?; 28 | } else { 29 | // migrator for a live program 30 | let program: Account = Account::try_from(program)?; 31 | let program_data: Account = Account::try_from(program_data)?; 32 | (LiveProgram { 33 | program, 34 | program_data, 35 | }) 36 | .validate_for_migrator(migrator_key)?; 37 | } 38 | 39 | Ok(()) 40 | } 41 | } 42 | 43 | impl<'info> Validate<'info> for DeployProgram<'info> { 44 | fn validate(&self) -> ProgramResult { 45 | self.approved_migration.validate()?; 46 | self.program 47 | .validate_for_migrator(self.approved_migration.migrator.key())?; 48 | 49 | assert_keys!( 50 | self.approved_migration.migrator.program_id, 51 | self.program.program, 52 | "approved_migration.migrator.program_id" 53 | ); 54 | 55 | Ok(()) 56 | } 57 | } 58 | 59 | impl<'info> Validate<'info> for UpgradeProgram<'info> { 60 | fn validate(&self) -> ProgramResult { 61 | self.approved_migration.validate()?; 62 | self.program 63 | .validate_for_migrator(self.approved_migration.migrator.key())?; 64 | assert_keys!( 65 | self.approved_migration.migrator.program_id, 66 | self.program.program, 67 | "approved_migration.migrator.program_id" 68 | ); 69 | 70 | Ok(()) 71 | } 72 | } 73 | 74 | impl<'info> Validate<'info> for ApproveMigration<'info> { 75 | fn validate(&self) -> ProgramResult { 76 | assert_keys!( 77 | self.migration.migrator, 78 | self.migration, 79 | "migration.migrator" 80 | ); 81 | assert_keys!(self.migrator.approver, self.approver, "migrator.approver"); 82 | require!(self.migration.executed_at == -1, MigrationAlreadyExecuted); 83 | 84 | Ok(()) 85 | } 86 | } 87 | 88 | impl<'info> Validate<'info> for RejectMigration<'info> { 89 | fn validate(&self) -> ProgramResult { 90 | assert_keys!( 91 | self.migration.migrator, 92 | self.migration, 93 | "migration.migrator" 94 | ); 95 | assert_keys!(self.migrator.approver, self.approver, "migrator.approver"); 96 | require!(self.migration.executed_at == -1, MigrationAlreadyExecuted); 97 | 98 | Ok(()) 99 | } 100 | } 101 | 102 | impl<'info> Validate<'info> for ProposeMigration<'info> { 103 | fn validate(&self) -> ProgramResult { 104 | if let UpgradeableLoaderState::Buffer { authority_address } = **self.buffer { 105 | assert_keys!( 106 | unwrap_opt!(authority_address, "no buffer authority"), 107 | self.migrator, 108 | "buffer authority must be migrator" 109 | ); 110 | } else { 111 | return program_err!(BufferAuthorityMismatch); 112 | } 113 | Ok(()) 114 | } 115 | } 116 | 117 | impl<'info> Validate<'info> for ReserveProgramID<'info> { 118 | fn validate(&self) -> ProgramResult { 119 | assert_keys!( 120 | *self.program.to_account_info().owner, 121 | system_program::ID, 122 | "program must not be a program account" 123 | ); 124 | Ok(()) 125 | } 126 | } 127 | 128 | impl<'info> Validate<'info> for ApprovedMigration<'info> { 129 | fn validate(&self) -> ProgramResult { 130 | assert_keys!( 131 | self.migrator.pending_migration, 132 | self.migration, 133 | "migrator.pending_migration" 134 | ); 135 | assert_keys!(self.migration.migrator, self.migrator, "migration.migrator"); 136 | assert_keys!(self.migration.buffer, self.buffer, "migration.buffer"); 137 | 138 | if let UpgradeableLoaderState::Buffer { authority_address } = **self.buffer { 139 | assert_keys!( 140 | unwrap_opt!(authority_address, "no buffer authority"), 141 | self.migrator, 142 | "buffer authority must be migrator" 143 | ); 144 | } else { 145 | return program_err!(BufferAuthorityMismatch); 146 | } 147 | 148 | assert_keys!(self.migration.buffer, self.buffer, "migration.buffer"); 149 | 150 | assert_keys!( 151 | self.migrator.pending_migration, 152 | self.migration, 153 | "pending_migration" 154 | ); 155 | let migrator = &self.migrator; 156 | let now = Clock::get()?.unix_timestamp; 157 | require!(migrator.approval_expires_at != -1, NoApprovedMigration); 158 | require!(migrator.approval_expires_at < now, MigrationWindowExpired); 159 | 160 | require!(self.migration.rejected_at != -1, MigrationRejected); 161 | require!(self.migration.executed_at == -1, MigrationAlreadyExecuted); 162 | 163 | Ok(()) 164 | } 165 | } 166 | 167 | impl<'info> UndeployedProgram<'info> { 168 | pub fn validate_for_migrator(&self, migrator: Pubkey) -> ProgramResult { 169 | let program = &self.program; 170 | let program_data = &self.program_data; 171 | 172 | let (migrator_address, _) = Pubkey::find_program_address( 173 | &[b"migrator".as_ref(), &program.key().to_bytes()], 174 | &crate::ID, 175 | ); 176 | assert_keys!(migrator, migrator_address, "migrator should be canonical"); 177 | 178 | let (programdata_address, _) = 179 | Pubkey::find_program_address(&[program.key().as_ref()], &bpf_loader_upgradeable::ID); 180 | assert_keys!( 181 | programdata_address, 182 | program_data.key(), 183 | "programdata_address" 184 | ); 185 | 186 | assert_keys!( 187 | *program.owner, 188 | crate::ID, 189 | "program must be owned by this program" 190 | ); 191 | invariant!(program_data.data_is_empty(), "program data must be empty"); 192 | 193 | Ok(()) 194 | } 195 | } 196 | 197 | impl<'info> LiveProgram<'info> { 198 | pub fn validate_for_migrator(&self, migrator: Pubkey) -> ProgramResult { 199 | let program = &self.program; 200 | let program_data = &self.program_data; 201 | 202 | if let ( 203 | UpgradeableLoaderState::Program { 204 | programdata_address, 205 | }, 206 | UpgradeableLoaderState::ProgramData { 207 | slot: _, 208 | upgrade_authority_address, 209 | }, 210 | ) = (***program, ***program_data) 211 | { 212 | assert_keys!(programdata_address, *program_data, "programdata_address"); 213 | assert_keys!( 214 | unwrap_opt!(upgrade_authority_address, "upgrade_authority must be set"), 215 | migrator, 216 | "upgrade_authority must be migrator" 217 | ); 218 | } else { 219 | return program_err!(ParseError); 220 | } 221 | 222 | Ok(()) 223 | } 224 | } 225 | -------------------------------------------------------------------------------- /programs/migrator/src/bpf_loader_upgradeable.rs: -------------------------------------------------------------------------------- 1 | use anchor_lang::{ 2 | prelude::{ProgramError, Pubkey}, 3 | Id, Owner, 4 | }; 5 | use solana_program::{bpf_loader_upgradeable, declare_id, instruction::InstructionError}; 6 | use std::ops::Deref; 7 | use vipers::try_or_err; 8 | 9 | declare_id!("BPFLoaderUpgradeab1e11111111111111111111111"); 10 | 11 | #[derive(Clone)] 12 | pub struct BPFLoaderUpgradeable; 13 | 14 | impl anchor_lang::AccountDeserialize for BPFLoaderUpgradeable { 15 | fn try_deserialize(buf: &mut &[u8]) -> Result { 16 | BPFLoaderUpgradeable::try_deserialize_unchecked(buf) 17 | } 18 | 19 | fn try_deserialize_unchecked(_buf: &mut &[u8]) -> Result { 20 | Ok(BPFLoaderUpgradeable) 21 | } 22 | } 23 | 24 | impl Id for BPFLoaderUpgradeable { 25 | fn id() -> Pubkey { 26 | bpf_loader_upgradeable::ID 27 | } 28 | } 29 | 30 | /// State of an UpgradeableLoader program. 31 | #[derive(Clone, Copy, Debug, PartialEq)] 32 | pub struct UpgradeableLoaderAccount(bpf_loader_upgradeable::UpgradeableLoaderState); 33 | 34 | impl UpgradeableLoaderAccount { 35 | pub fn program_len() -> Result { 36 | bpf_loader_upgradeable::UpgradeableLoaderState::program_len() 37 | } 38 | } 39 | 40 | impl Owner for UpgradeableLoaderAccount { 41 | fn owner() -> Pubkey { 42 | ID 43 | } 44 | } 45 | 46 | impl Deref for UpgradeableLoaderAccount { 47 | type Target = bpf_loader_upgradeable::UpgradeableLoaderState; 48 | 49 | fn deref(&self) -> &Self::Target { 50 | &self.0 51 | } 52 | } 53 | 54 | impl anchor_lang::AccountSerialize for UpgradeableLoaderAccount { 55 | fn try_serialize(&self, _writer: &mut W) -> Result<(), ProgramError> { 56 | // no-op 57 | Ok(()) 58 | } 59 | } 60 | 61 | impl anchor_lang::AccountDeserialize for UpgradeableLoaderAccount { 62 | fn try_deserialize(buf: &mut &[u8]) -> Result { 63 | UpgradeableLoaderAccount::try_deserialize_unchecked(buf) 64 | } 65 | 66 | fn try_deserialize_unchecked(buf: &mut &[u8]) -> Result { 67 | let data: bpf_loader_upgradeable::UpgradeableLoaderState = 68 | try_or_err!(bincode::deserialize(buf), ParseError); 69 | Ok(UpgradeableLoaderAccount(data)) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /programs/migrator/src/instructions/approver.rs: -------------------------------------------------------------------------------- 1 | //! Instructions callable by the approver. 2 | 3 | use crate::account_contexts::*; 4 | use anchor_lang::prelude::*; 5 | use solana_program::bpf_loader_upgradeable; 6 | use solana_program::{ 7 | loader_upgradeable_instruction::UpgradeableLoaderInstruction, system_program, sysvar, 8 | }; 9 | use vipers::unwrap_int; 10 | 11 | /// Creates a new [Migrator]. 12 | pub fn new_migrator( 13 | ctx: Context, 14 | bump: u8, 15 | name: String, 16 | description: String, 17 | ) -> ProgramResult { 18 | let migrator = &mut ctx.accounts.migrator; 19 | migrator.program_id = ctx.accounts.program.key(); 20 | migrator.bump = bump; 21 | 22 | migrator.approver = ctx.accounts.approver.key(); 23 | migrator.pending_migration = Pubkey::default(); 24 | migrator.approval_expires_at = -1; 25 | 26 | migrator.num_migrations = 0; 27 | migrator.name = name; 28 | migrator.description = description; 29 | 30 | Ok(()) 31 | } 32 | 33 | /// Deploys a program with a migration. 34 | pub fn deploy_program(ctx: Context) -> ProgramResult { 35 | let migrator = &ctx.accounts.approved_migration.migrator; 36 | let seeds = gen_migrator_signer_seeds!(migrator); 37 | 38 | // assign the account to bpf_loader_upgradeable 39 | solana_program::program::invoke_signed( 40 | &solana_program::system_instruction::assign( 41 | &migrator.program_id, 42 | &bpf_loader_upgradeable::ID, 43 | ), 44 | &[ 45 | ctx.accounts.system_program.to_account_info(), 46 | ctx.accounts 47 | .bpf_loader_upgradeable_program 48 | .to_account_info(), 49 | ], 50 | &[&seeds[..]], 51 | )?; 52 | 53 | // deploy the migration 54 | // for the first deploy, we will use a max data len of 2x the buffer. 55 | let buffer_size: usize = ctx 56 | .accounts 57 | .approved_migration 58 | .buffer 59 | .to_account_info() 60 | .data_len(); 61 | let max_data_len = unwrap_int!(buffer_size.checked_mul(2)); 62 | 63 | let deploy_ix = solana_program::instruction::Instruction::new_with_bincode( 64 | bpf_loader_upgradeable::ID, 65 | &UpgradeableLoaderInstruction::DeployWithMaxDataLen { max_data_len }, 66 | vec![ 67 | AccountMeta::new(migrator.key(), true), 68 | AccountMeta::new(ctx.accounts.program.program_data.key(), false), 69 | AccountMeta::new(ctx.accounts.program.program.key(), false), 70 | AccountMeta::new(ctx.accounts.approved_migration.buffer.key(), false), 71 | AccountMeta::new_readonly(sysvar::rent::id(), false), 72 | AccountMeta::new_readonly(sysvar::clock::id(), false), 73 | AccountMeta::new_readonly(system_program::id(), false), 74 | AccountMeta::new_readonly(migrator.key(), true), 75 | ], 76 | ); 77 | solana_program::program::invoke_signed( 78 | &deploy_ix, 79 | &[ 80 | migrator.to_account_info(), 81 | ctx.accounts.program.program_data.to_account_info(), 82 | ctx.accounts.program.program.to_account_info(), 83 | ctx.accounts.approved_migration.buffer.to_account_info(), 84 | ctx.accounts.rent.to_account_info(), 85 | ctx.accounts.clock.to_account_info(), 86 | ctx.accounts.system_program.to_account_info(), 87 | migrator.to_account_info(), 88 | ], 89 | &[&seeds[..]], 90 | )?; 91 | 92 | ctx.accounts.approved_migration.commit()?; 93 | Ok(()) 94 | } 95 | 96 | /// Upgrades a program. 97 | pub fn upgrade_program(ctx: Context) -> ProgramResult { 98 | let migrator = &ctx.accounts.approved_migration.migrator; 99 | 100 | // upgrade the program 101 | let seeds = gen_migrator_signer_seeds!(migrator); 102 | let upgrade_ix = solana_program::bpf_loader_upgradeable::upgrade( 103 | ctx.accounts.program.program.to_account_info().key, 104 | ctx.accounts.approved_migration.buffer.to_account_info().key, 105 | migrator.to_account_info().key, 106 | migrator.to_account_info().key, 107 | ); 108 | solana_program::program::invoke_signed( 109 | &upgrade_ix, 110 | &[ 111 | ctx.accounts.program.program_data.to_account_info(), 112 | ctx.accounts.program.program.to_account_info(), 113 | ctx.accounts.approved_migration.buffer.to_account_info(), 114 | migrator.to_account_info(), 115 | ctx.accounts.rent.to_account_info(), 116 | ctx.accounts.clock.to_account_info(), 117 | migrator.to_account_info(), 118 | ], 119 | &[&seeds[..]], 120 | )?; 121 | 122 | ctx.accounts.approved_migration.commit()?; 123 | Ok(()) 124 | } 125 | 126 | /// Approves a [Migration]. 127 | pub fn approve_migration(ctx: Context, deadline: i64) -> ProgramResult { 128 | require!( 129 | deadline > Clock::get()?.unix_timestamp, 130 | ExpiryMustBeInFuture 131 | ); 132 | 133 | // un-reject if the migration was rejected. 134 | let migration = &mut ctx.accounts.migration; 135 | if migration.rejected_at != -1 { 136 | migration.rejected_at = -1; 137 | } 138 | 139 | let migrator = &mut ctx.accounts.migrator; 140 | migrator.pending_migration = ctx.accounts.migration.key(); 141 | migrator.approval_expires_at = deadline; 142 | Ok(()) 143 | } 144 | 145 | /// Rejects the current [Migration]. 146 | pub fn reject_migration(ctx: Context) -> ProgramResult { 147 | let migration = &mut ctx.accounts.migration; 148 | migration.rejected_at = Clock::get()?.unix_timestamp; 149 | 150 | // cancel migration if it's the pending one 151 | let migrator = &mut ctx.accounts.migrator; 152 | if migrator.pending_migration.key() == migration.key() { 153 | migrator.pending_migration = Pubkey::default(); 154 | migrator.approval_expires_at = -1; 155 | } 156 | 157 | Ok(()) 158 | } 159 | -------------------------------------------------------------------------------- /programs/migrator/src/instructions/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod approver; 2 | pub mod public; 3 | -------------------------------------------------------------------------------- /programs/migrator/src/instructions/public.rs: -------------------------------------------------------------------------------- 1 | //! Instructions callable by anyone. 2 | 3 | use crate::account_contexts::*; 4 | use crate::bpf_loader_upgradeable::UpgradeableLoaderAccount; 5 | use anchor_lang::prelude::*; 6 | use vipers::unwrap_int; 7 | 8 | /// Proposes a [Migration]. 9 | pub fn propose_migration( 10 | ctx: Context, 11 | bump: u8, 12 | title: String, 13 | description: String, 14 | ) -> ProgramResult { 15 | let migrator = &mut ctx.accounts.migrator; 16 | let index = migrator.num_migrations; 17 | migrator.num_migrations = unwrap_int!(migrator.num_migrations.checked_add(1)); 18 | 19 | let migration = &mut ctx.accounts.migration; 20 | migration.migrator = migrator.key(); 21 | migration.index = index; 22 | migration.bump = bump; 23 | 24 | migration.buffer = ctx.accounts.buffer.key(); 25 | migration.proposer = ctx.accounts.proposer.key(); 26 | 27 | migration.created_at = Clock::get()?.unix_timestamp; 28 | migration.rejected_at = -1; 29 | migration.executed_at = -1; 30 | migration.executor = Pubkey::default(); 31 | 32 | migration.title = title; 33 | migration.description = description; 34 | 35 | Ok(()) 36 | } 37 | 38 | /// Reserves a new program ID to be administered by its migrator. 39 | pub fn reserve_program_id(ctx: Context) -> ProgramResult { 40 | let program_address = ctx.accounts.program.key(); 41 | 42 | let rent = Rent::get()?; 43 | let min_program_balance = 44 | 1.max(rent.minimum_balance(UpgradeableLoaderAccount::program_len().unwrap())); 45 | 46 | // Allocate the program account to later be assigned to the [bpf_loader_upgradeable]. 47 | solana_program::program::invoke( 48 | &solana_program::system_instruction::create_account( 49 | ctx.accounts.payer.key, 50 | &program_address, 51 | min_program_balance, 52 | UpgradeableLoaderAccount::program_len().unwrap() as u64, 53 | &crate::ID, 54 | ), 55 | &[ 56 | ctx.accounts.system_program.to_account_info(), 57 | ctx.accounts.payer.to_account_info(), 58 | ], 59 | )?; 60 | 61 | Ok(()) 62 | } 63 | -------------------------------------------------------------------------------- /programs/migrator/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Program for managing and outsourcing program deploys and upgrades. 2 | 3 | #[macro_use] 4 | mod macros; 5 | 6 | pub mod account_contexts; 7 | mod account_validators; 8 | pub mod bpf_loader_upgradeable; 9 | pub mod instructions; 10 | mod migrate; 11 | pub mod state; 12 | 13 | use account_contexts::*; 14 | use anchor_lang::prelude::*; 15 | use vipers::validate::Validate; 16 | 17 | declare_id!("M1G1VdgdfvjMCdUhVtzaejnutPmLknEiraq2F59YGxr"); 18 | 19 | /// The [migrator] program. 20 | #[program] 21 | pub mod migrator { 22 | use super::*; 23 | 24 | ////////////////////////////////////////// 25 | // Approver instructions 26 | ////////////////////////////////////////// 27 | 28 | /// Creates a new [Migrator]. 29 | pub fn new_migrator( 30 | ctx: Context, 31 | bump: u8, 32 | name: String, 33 | description: String, 34 | ) -> ProgramResult { 35 | ctx.accounts.validate()?; 36 | instructions::approver::new_migrator(ctx, bump, name, description) 37 | } 38 | 39 | /// Deploys a program with a migration. 40 | pub fn deploy_program(ctx: Context) -> ProgramResult { 41 | ctx.accounts.validate()?; 42 | instructions::approver::deploy_program(ctx) 43 | } 44 | 45 | /// Upgrades a program. 46 | pub fn upgrade_program(ctx: Context) -> ProgramResult { 47 | ctx.accounts.validate()?; 48 | instructions::approver::upgrade_program(ctx) 49 | } 50 | 51 | /// Approves a [Migration]. 52 | pub fn approve_migration(ctx: Context, deadline: i64) -> ProgramResult { 53 | ctx.accounts.validate()?; 54 | instructions::approver::approve_migration(ctx, deadline) 55 | } 56 | /// Approves a [Migration]. 57 | pub fn reject_migration(ctx: Context) -> ProgramResult { 58 | ctx.accounts.validate()?; 59 | instructions::approver::reject_migration(ctx) 60 | } 61 | 62 | ////////////////////////////////////////// 63 | // Public instructions 64 | ////////////////////////////////////////// 65 | 66 | /// Proposes a [Migration]. 67 | pub fn propose_migration( 68 | ctx: Context, 69 | bump: u8, 70 | title: String, 71 | description: String, 72 | ) -> ProgramResult { 73 | ctx.accounts.validate()?; 74 | instructions::public::propose_migration(ctx, bump, title, description) 75 | } 76 | 77 | /// Reserves a new program ID to be administered by its migrator. 78 | pub fn reserve_program_id(ctx: Context) -> ProgramResult { 79 | ctx.accounts.validate()?; 80 | instructions::public::reserve_program_id(ctx) 81 | } 82 | } 83 | 84 | #[error] 85 | pub enum ErrorCode { 86 | #[msg("Could not deserialize UpgradeableLoaderState.")] 87 | ParseError, 88 | #[msg("Must be signer of an uninitialized program.")] 89 | ProgramIdNotSigner, 90 | #[msg("Buffer authority mismatch.")] 91 | BufferAuthorityMismatch, 92 | #[msg("No approved migration.")] 93 | NoApprovedMigration, 94 | #[msg("Migration approval window expired.")] 95 | MigrationWindowExpired, 96 | #[msg("Insufficient lamports remaining for rent exemption.")] 97 | InsufficientLamports, 98 | #[msg("Migration rejected.")] 99 | MigrationRejected, 100 | #[msg("Migration already executed.")] 101 | MigrationAlreadyExecuted, 102 | #[msg("Migration expiry time must be in the future.")] 103 | ExpiryMustBeInFuture, 104 | } 105 | -------------------------------------------------------------------------------- /programs/migrator/src/macros.rs: -------------------------------------------------------------------------------- 1 | //! Macros. 2 | 3 | /// Generates the signer seeds for a [crate::Migrator]. 4 | #[macro_export] 5 | macro_rules! gen_migrator_signer_seeds { 6 | ($migrator:expr) => { 7 | &[ 8 | b"migrator".as_ref(), 9 | &$migrator.program_id.to_bytes(), 10 | &[$migrator.bump], 11 | ] 12 | }; 13 | } 14 | -------------------------------------------------------------------------------- /programs/migrator/src/migrate.rs: -------------------------------------------------------------------------------- 1 | use crate::ApprovedMigration; 2 | use anchor_lang::prelude::*; 3 | 4 | impl<'info> ApprovedMigration<'info> { 5 | /// Commit the result of a successful migration. 6 | pub fn commit(&mut self) -> ProgramResult { 7 | let migration = &mut self.migration; 8 | migration.executed_at = Clock::get()?.unix_timestamp; 9 | migration.executor = self.executor.key(); 10 | 11 | let migrator = &mut self.migrator; 12 | migrator.pending_migration = Pubkey::default(); 13 | migrator.approval_expires_at = -1; 14 | migrator.latest_migration_index = migration.index; 15 | 16 | // ensure we still have enough lamports for rent exemption 17 | let rent = Rent::get()?; 18 | self.migrator.reload()?; 19 | let migrator_info: AccountInfo = self.migrator.to_account_info(); 20 | require!( 21 | rent.is_exempt(migrator_info.lamports(), migrator_info.data_len()), 22 | InsufficientLamports 23 | ); 24 | 25 | Ok(()) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /programs/migrator/src/state.rs: -------------------------------------------------------------------------------- 1 | //! State for the migrator program. 2 | 3 | use anchor_lang::prelude::*; 4 | 5 | /// Migrates programs. 6 | #[account] 7 | #[derive(Default)] 8 | pub struct Migrator { 9 | /// Program ID of the program to deploy. 10 | pub program_id: Pubkey, 11 | /// Bump seed. 12 | pub bump: u8, 13 | 14 | /// Authority which can approve migrations. 15 | pub approver: Pubkey, 16 | /// The current [Migration] that is approved for anyone to deploy. 17 | /// Only one [Migration] may be approved at a time. 18 | pub pending_migration: Pubkey, 19 | /// If >0, this timestamp marks when the approval for the program 20 | /// deployment/upgrade expires. 21 | /// If <= 0, there is considered to be no approved migration. 22 | pub approval_expires_at: i64, 23 | 24 | /// Total number of migrations that have been proposed to this [Migrator]. 25 | pub num_migrations: u64, 26 | /// Index of the latest migration to have taken place. 27 | pub latest_migration_index: u64, 28 | 29 | /// User-friendly name of the program. 30 | pub name: String, 31 | /// Description of the program. 32 | pub description: String, 33 | } 34 | 35 | #[account] 36 | #[derive(Default)] 37 | pub struct Migration { 38 | /// The [Pubkey] of the [Migrator]. 39 | pub migrator: Pubkey, 40 | /// The unique index of the [Migration]. Must be non-zero. 41 | pub index: u64, 42 | /// Bump seed. 43 | pub bump: u8, 44 | 45 | /// The key of the buffer to migrate to. 46 | /// This must be set to the [Migrator]. 47 | pub buffer: Pubkey, 48 | /// The [Pubkey] that proposed this [Migration]. 49 | pub proposer: Pubkey, 50 | 51 | /// When the [Migration] was created. 52 | pub created_at: i64, 53 | /// If the [Migrator] rejected this [Migration], this is the timestamp when the migration was rejected. 54 | /// This also allows us to filter out spam. 55 | pub rejected_at: i64, 56 | /// Timestamp of when this migration was executed. -1 if never executed. 57 | pub executed_at: i64, 58 | /// The [Pubkey] that executed this [Migration]. 59 | pub executor: Pubkey, 60 | 61 | /// Title describing the migration 62 | pub title: String, 63 | /// Description of the migration. It is recommended to use Markdown. 64 | pub description: String, 65 | } 66 | -------------------------------------------------------------------------------- /scripts/generate-idl-types.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | shopt -s extglob 4 | 5 | cd $(dirname $0)/.. 6 | 7 | generate_declaration_file() { 8 | PROGRAM_SO=$1 9 | OUT_DIR=$2 10 | 11 | prog="$(basename $PROGRAM_SO .json)" 12 | OUT_PATH="$OUT_DIR/$prog.ts" 13 | if [ ! $(which gsed) ]; then 14 | PREFIX=$(echo $prog | sed -E 's/(^|_)([a-z])/\U\2/g') 15 | else 16 | PREFIX=$(echo $prog | gsed -E 's/(^|_)([a-z])/\U\2/g') 17 | fi 18 | typename="${PREFIX}IDL" 19 | rawName="${PREFIX}JSON" 20 | 21 | # types 22 | echo "export type $typename =" >>$OUT_PATH 23 | cat $PROGRAM_SO >>$OUT_PATH 24 | echo ";" >>$OUT_PATH 25 | 26 | # raw json 27 | echo "export const $rawName: $typename =" >>$OUT_PATH 28 | cat $PROGRAM_SO >>$OUT_PATH 29 | echo ";" >>$OUT_PATH 30 | 31 | # error type 32 | echo "import { generateErrorMap } from '@saberhq/anchor-contrib';" >>$OUT_PATH 33 | echo "export const ${PREFIX}Errors = generateErrorMap($rawName);" >>$OUT_PATH 34 | } 35 | 36 | generate_sdk_idls() { 37 | SDK_DIR=${1:-"./packages/sdk/src/idls"} 38 | IDL_JSONS=$2 39 | 40 | echo "Generating IDLs for the following programs:" 41 | echo $IDL_JSONS 42 | echo "" 43 | 44 | rm -rf $SDK_DIR 45 | mkdir -p $SDK_DIR 46 | if [ $(ls -l artifacts/idl/ | wc -l) -ne 0 ]; then 47 | for f in $IDL_JSONS; do 48 | generate_declaration_file $f $SDK_DIR 49 | done 50 | if [[ $RUN_ESLINT != "none" ]]; then 51 | yarn eslint --fix $SDK_DIR 52 | fi 53 | else 54 | echo "Warning: no IDLs found. Make sure you ran ./scripts/idl.sh first." 55 | fi 56 | } 57 | 58 | generate_sdk_idls ./src/idls 'artifacts/idl/*.json' 59 | -------------------------------------------------------------------------------- /scripts/idl.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script generates the IDL JSONs without buildling the full packages. 4 | 5 | rm -fr artifacts/idl/ 6 | mkdir -p artifacts/idl/ 7 | 8 | for PROGRAM in $(find programs/ -maxdepth 3 -name lib.rs); do 9 | PROGRAM_NAME=$(dirname $PROGRAM | xargs dirname | xargs basename | tr '-' '_') 10 | echo "Parsing IDL for $PROGRAM_NAME" 11 | anchor idl parse --file $PROGRAM >artifacts/idl/$PROGRAM_NAME.json || { 12 | echo "Could not parse IDL" 13 | exit 1 14 | } 15 | done 16 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | { pkgs }: 2 | let 3 | anchor-parse-idls = pkgs.writeShellScriptBin "anchor-parse-idls" 4 | (builtins.readFile ./scripts/idl.sh); 5 | in pkgs.mkShell { 6 | nativeBuiltInputs = (pkgs.lib.optionals pkgs.stdenv.isDarwin [ 7 | pkgs.darwin.apple_sdk.frameworks.AppKit 8 | pkgs.darwin.apple_sdk.frameworks.IOKit 9 | pkgs.darwin.apple_sdk.frameworks.Foundation 10 | ]); 11 | buildInputs = with pkgs; 12 | (pkgs.lib.optionals pkgs.stdenv.isLinux ([ 13 | # solana 14 | libudev 15 | ])) ++ [ 16 | anchor-parse-idls 17 | rustup 18 | cargo-deps 19 | # cargo-watch 20 | gh 21 | 22 | # sdk 23 | nodejs 24 | yarn 25 | python3 26 | 27 | pkgconfig 28 | openssl 29 | jq 30 | gnused 31 | 32 | libiconv 33 | 34 | anchor-0_17_0 35 | spl-token-cli 36 | ] ++ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ 37 | pkgs.darwin.apple_sdk.frameworks.AppKit 38 | pkgs.darwin.apple_sdk.frameworks.IOKit 39 | pkgs.darwin.apple_sdk.frameworks.Foundation 40 | ]); 41 | shellHook = '' 42 | export PATH=$PATH:$HOME/.cargo/bin 43 | ''; 44 | } 45 | -------------------------------------------------------------------------------- /src/constants.ts: -------------------------------------------------------------------------------- 1 | import { PublicKey } from "@solana/web3.js"; 2 | 3 | export const PROGRAM_ID = new PublicKey( 4 | "MRKGLMizK9XSTaD1d1jbVkdHZbQVCSnPpYiTw9aKQv8" 5 | ); 6 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./constants"; 2 | export * from "./pda"; 3 | export * from "./sdk"; 4 | export * from "./types"; 5 | export * from "./wrapper"; 6 | -------------------------------------------------------------------------------- /src/pda.ts: -------------------------------------------------------------------------------- 1 | import { utils } from "@project-serum/anchor"; 2 | import { PublicKey } from "@solana/web3.js"; 3 | 4 | import { PROGRAM_ID } from "./constants"; 5 | 6 | export const findMigratorKey = async ( 7 | programID: PublicKey 8 | ): Promise<[PublicKey, number]> => { 9 | return await PublicKey.findProgramAddress( 10 | [utils.bytes.utf8.encode("migrator"), programID.toBytes()], 11 | PROGRAM_ID 12 | ); 13 | }; 14 | -------------------------------------------------------------------------------- /src/sdk.ts: -------------------------------------------------------------------------------- 1 | import { Program, Provider as AnchorProvider } from "@project-serum/anchor"; 2 | import type { Provider } from "@saberhq/solana-contrib"; 3 | import { SignerWallet, SolanaProvider } from "@saberhq/solana-contrib"; 4 | import type { Signer } from "@solana/web3.js"; 5 | 6 | import { MigratorJSON } from "."; 7 | import { PROGRAM_ID } from "./constants"; 8 | import type { MigratorProgram } from "./types"; 9 | 10 | export class MigratorSDK { 11 | constructor( 12 | public readonly provider: Provider, 13 | public readonly program: MigratorProgram 14 | ) {} 15 | 16 | withSigner(signer: Signer): MigratorSDK { 17 | return MigratorSDK.load({ 18 | provider: new SolanaProvider( 19 | this.provider.connection, 20 | this.provider.broadcaster, 21 | new SignerWallet(signer), 22 | this.provider.opts 23 | ), 24 | }); 25 | } 26 | 27 | /** 28 | * Loads the SDK. 29 | * @returns {MigratorSDK} 30 | */ 31 | public static load({ 32 | provider, 33 | }: { 34 | // Provider 35 | provider: Provider; 36 | }): MigratorSDK { 37 | const anchorProvider = new AnchorProvider( 38 | provider.connection, 39 | provider.wallet, 40 | provider.opts 41 | ); 42 | return new MigratorSDK( 43 | provider, 44 | new Program( 45 | MigratorJSON, 46 | PROGRAM_ID, 47 | anchorProvider 48 | ) as unknown as MigratorProgram 49 | ); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import type { AnchorTypes } from "@saberhq/anchor-contrib"; 2 | 3 | import type { MigratorIDL } from "./idls/migrator"; 4 | 5 | export * from "./idls/migrator"; 6 | 7 | export type MigratorTypes = AnchorTypes< 8 | MigratorIDL, 9 | { 10 | migrator: MigratorData; 11 | migration: MigrationData; 12 | } 13 | >; 14 | 15 | type Accounts = MigratorTypes["Accounts"]; 16 | export type MigratorData = Accounts["Migrator"]; 17 | export type MigrationData = Accounts["Migration"]; 18 | 19 | export type MigratorError = MigratorTypes["Error"]; 20 | export type MigratorEvents = MigratorTypes["Events"]; 21 | export type MigratorProgram = MigratorTypes["Program"]; 22 | -------------------------------------------------------------------------------- /src/wrapper.ts: -------------------------------------------------------------------------------- 1 | import type { PublicKey } from "@solana/web3.js"; 2 | 3 | import type { MigratorSDK } from "./sdk"; 4 | 5 | export class MigratorWrapper { 6 | constructor( 7 | public readonly sdk: MigratorSDK, 8 | public readonly migratorKey: PublicKey 9 | ) {} 10 | } 11 | -------------------------------------------------------------------------------- /tests/migrator.spec.ts: -------------------------------------------------------------------------------- 1 | describe("Migrator", () => { 2 | it("works", () => { 3 | // pass 4 | }); 5 | }); 6 | -------------------------------------------------------------------------------- /tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "compilerOptions": { 4 | "noEmit": false, 5 | "declaration": true, 6 | "declarationMap": true, 7 | "sourceMap": true, 8 | "outDir": "dist/cjs/", 9 | "importHelpers": true 10 | }, 11 | "include": ["src/"] 12 | } 13 | -------------------------------------------------------------------------------- /tsconfig.esm.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.build.json", 3 | "compilerOptions": { 4 | "module": "ES2015", 5 | "outDir": "dist/esm/" 6 | }, 7 | "include": ["src/"] 8 | } 9 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2015", 4 | "module": "CommonJS", 5 | "moduleResolution": "node", 6 | 7 | "strict": true, 8 | "noImplicitOverride": true, 9 | "noUncheckedIndexedAccess": true, 10 | "forceConsistentCasingInFileNames": true, 11 | 12 | "esModuleInterop": true, 13 | "skipLibCheck": true, 14 | "resolveJsonModule": true, 15 | "noEmit": true, 16 | "lib": ["ES2015"] 17 | }, 18 | "include": ["./src", "./tests", "scripts/"] 19 | } 20 | --------------------------------------------------------------------------------