├── .envrc ├── .eslintrc.js ├── .github ├── dependabot.yml └── workflows │ ├── programs-e2e.yml │ ├── programs-unit.yml │ ├── release.yml │ └── site.yml ├── .gitignore ├── .husky └── pre-commit ├── .mocharc.js ├── .prettierignore ├── .vscode ├── extensions.json └── settings.json ├── .yarn ├── plugins │ └── @yarnpkg │ │ ├── plugin-interactive-tools.cjs │ │ └── plugin-typescript.cjs ├── releases │ └── yarn-3.0.2.cjs └── sdks │ ├── eslint │ ├── bin │ │ └── eslint.js │ ├── lib │ │ └── api.js │ └── package.json │ ├── integrations.yml │ ├── prettier │ ├── index.js │ └── package.json │ └── typescript │ ├── bin │ ├── tsc │ └── tsserver │ ├── lib │ ├── tsc.js │ ├── tsserver.js │ ├── tsserverlibrary.js │ └── typescript.js │ └── package.json ├── .yarnrc.yml ├── Anchor.toml ├── Cargo.lock ├── Cargo.toml ├── LICENSE.txt ├── README.md ├── ci.nix ├── flake.lock ├── flake.nix ├── images └── banner.jpeg ├── lib ├── lido │ ├── Cargo.toml │ └── src │ │ └── lib.rs └── marinade │ ├── Cargo.toml │ └── src │ └── lib.rs ├── package.json ├── programs └── asol │ ├── Cargo.toml │ ├── README.md │ ├── Xargo.toml │ └── src │ ├── account_validators.rs │ ├── accounting.rs │ ├── events.rs │ ├── lib.rs │ ├── pool.rs │ ├── snapshot.rs │ ├── state.rs │ └── types.rs ├── scripts ├── generate-idl-types.sh ├── parse-idls.sh └── pull-crate.sh ├── shell.nix ├── src ├── asol.ts ├── constants.ts ├── index.ts ├── pda.ts └── programs │ ├── asol.ts │ └── index.ts ├── tests ├── asol.ts ├── fixture-key.json └── workspace.ts ├── tsconfig.build.json ├── tsconfig.esm.json ├── tsconfig.json └── yarn.lock /.envrc: -------------------------------------------------------------------------------- 1 | watch_file flake.nix 2 | watch_file flake.lock 3 | mkdir -p .direnv 4 | dotenv 5 | eval "$(nix print-dev-env --profile "$(direnv_layout_dir)/flake-profile")" 6 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | require("@rushstack/eslint-patch/modern-module-resolution"); 2 | 3 | module.exports = { 4 | extends: ["@saberhq/eslint-config"], 5 | parserOptions: { 6 | project: "tsconfig.json", 7 | }, 8 | }; 9 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | - package-ecosystem: "npm" 8 | directory: "/" 9 | schedule: 10 | interval: "daily" 11 | - package-ecosystem: "cargo" 12 | directory: "/" 13 | schedule: 14 | interval: "daily" 15 | -------------------------------------------------------------------------------- /.github/workflows/programs-e2e.yml: -------------------------------------------------------------------------------- 1 | name: E2E 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | pull_request: 7 | branches: [master] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | SOLANA_VERSION: "1.7.11" 12 | RUST_TOOLCHAIN: nightly-2021-10-01 13 | 14 | jobs: 15 | sdk: 16 | runs-on: ubuntu-latest 17 | name: Build the SDK 18 | steps: 19 | - uses: actions/checkout@v2 20 | 21 | - uses: cachix/install-nix-action@v14 22 | with: 23 | install_url: https://nixos-nix-install-tests.cachix.org/serve/i6laym9jw3wg9mw6ncyrk6gjx4l34vvx/install 24 | install_options: "--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve" 25 | extra_nix_config: | 26 | experimental-features = nix-command flakes 27 | - name: Setup Cachix 28 | uses: cachix/cachix-action@v10 29 | with: 30 | name: asol 31 | extraPullNames: quarry 32 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 33 | - name: Parse IDLs 34 | run: nix shell .#ci --command ./scripts/parse-idls.sh 35 | 36 | - name: Setup Node 37 | uses: actions/setup-node@v2 38 | with: 39 | always-auth: true 40 | node-version: "16" 41 | - name: Yarn Cache 42 | uses: actions/cache@v2 43 | with: 44 | path: ./.yarn 45 | key: ${{ runner.os }}-${{ hashFiles('./.yarn/**/*.js') }} 46 | - name: Install Yarn dependencies 47 | run: yarn install 48 | - run: ./scripts/generate-idl-types.sh 49 | - run: yarn build 50 | - run: yarn typecheck 51 | - run: yarn lint 52 | - run: yarn dlx @yarnpkg/doctor 53 | 54 | integration-tests: 55 | runs-on: ubuntu-latest 56 | steps: 57 | - uses: actions/checkout@v2 58 | 59 | # Install Rust and Anchor 60 | - name: Install Rust nightly 61 | uses: actions-rs/toolchain@v1 62 | with: 63 | override: true 64 | profile: minimal 65 | toolchain: ${{ env.RUST_TOOLCHAIN }} 66 | - uses: Swatinem/rust-cache@v1 67 | - name: Install Linux dependencies 68 | run: | 69 | sudo apt-get update 70 | sudo apt-get install -y pkg-config build-essential libudev-dev 71 | 72 | # Install Solana 73 | - name: Cache Solana binaries 74 | uses: actions/cache@v2 75 | with: 76 | path: ~/.cache/solana 77 | key: ${{ runner.os }}-${{ env.SOLANA_VERSION }} 78 | - name: Install Solana 79 | run: | 80 | sh -c "$(curl -sSfL https://release.solana.com/v${{ env.SOLANA_VERSION }}/install)" 81 | echo "$HOME/.local/share/solana/install/active_release/bin" >> $GITHUB_PATH 82 | export PATH="/home/runner/.local/share/solana/install/active_release/bin:$PATH" 83 | solana --version 84 | echo "Generating keypair..." 85 | solana-keygen new -o "$HOME/.config/solana/id.json" --no-passphrase --silent 86 | 87 | # Run build 88 | - uses: cachix/install-nix-action@v14 89 | with: 90 | install_url: https://nixos-nix-install-tests.cachix.org/serve/i6laym9jw3wg9mw6ncyrk6gjx4l34vvx/install 91 | install_options: "--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve" 92 | extra_nix_config: | 93 | experimental-features = nix-command flakes 94 | - name: Setup Cachix 95 | uses: cachix/cachix-action@v10 96 | with: 97 | name: asol 98 | extraPullNames: quarry 99 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 100 | - name: Build program 101 | run: nix shell .#ci --command anchor build 102 | 103 | - name: Yarn Cache 104 | uses: actions/cache@v2 105 | with: 106 | path: ./.yarn 107 | key: ${{ runner.os }}-${{ hashFiles('./.yarn/**/*.js') }} 108 | - name: Setup Node 109 | uses: actions/setup-node@v2 110 | with: 111 | node-version: "16" 112 | - run: yarn install 113 | - name: Generate IDL types 114 | run: nix shell .#ci --command yarn idl:generate:nolint 115 | - run: yarn build 116 | - run: ./scripts/pull-crate.sh 117 | - name: Run e2e tests 118 | run: nix shell .#ci --command yarn test:e2e 119 | -------------------------------------------------------------------------------- /.github/workflows/programs-unit.yml: -------------------------------------------------------------------------------- 1 | name: Unit 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | paths: 7 | - .github/workflows/programs-unit.yml 8 | - programs/** 9 | - Cargo.toml 10 | - Cargo.lock 11 | pull_request: 12 | branches: [master] 13 | paths: 14 | - .github/workflows/programs-unit.yml 15 | - programs/** 16 | - Cargo.toml 17 | - Cargo.lock 18 | 19 | env: 20 | CARGO_TERM_COLOR: always 21 | RUST_TOOLCHAIN: nightly-2021-10-01 22 | 23 | jobs: 24 | lint: 25 | runs-on: ubuntu-latest 26 | steps: 27 | - uses: actions/checkout@v2 28 | - name: Install Rust nightly 29 | uses: actions-rs/toolchain@v1 30 | with: 31 | override: true 32 | profile: minimal 33 | toolchain: ${{ env.RUST_TOOLCHAIN }} 34 | components: rustfmt, clippy 35 | - uses: Swatinem/rust-cache@v1 36 | - name: Run fmt 37 | run: cargo fmt -- --check 38 | - name: Run clippy 39 | run: cargo clippy --all-targets -- --deny=warnings 40 | 41 | unit-tests: 42 | runs-on: ubuntu-latest 43 | steps: 44 | - uses: actions/checkout@v2 45 | - name: Install Rust nightly 46 | uses: actions-rs/toolchain@v1 47 | with: 48 | override: true 49 | profile: minimal 50 | toolchain: ${{ env.RUST_TOOLCHAIN }} 51 | components: rustfmt, clippy 52 | - uses: Swatinem/rust-cache@v1 53 | - name: Run unit tests 54 | run: cargo test --lib 55 | 56 | doc: 57 | runs-on: ubuntu-latest 58 | steps: 59 | - uses: actions/checkout@v2 60 | - name: Install Rust nightly 61 | uses: actions-rs/toolchain@v1 62 | with: 63 | override: true 64 | profile: minimal 65 | toolchain: ${{ env.RUST_TOOLCHAIN }} 66 | components: rustfmt, clippy 67 | - uses: Swatinem/rust-cache@v1 68 | - run: cargo doc 69 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: {} 5 | push: 6 | tags: 7 | - "v*.*.*" 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | SOLANA_VERSION: "1.7.11" 12 | RUST_TOOLCHAIN: nightly-2021-10-01 13 | NPM_PUBLISH_TOKEN: ${{ secrets.NPM_PUBLISH_TOKEN }} 14 | 15 | jobs: 16 | release-sdk: 17 | runs-on: ubuntu-latest 18 | name: Release SDK on NPM 19 | steps: 20 | - uses: actions/checkout@v2 21 | 22 | - uses: cachix/install-nix-action@v14 23 | with: 24 | install_url: https://nixos-nix-install-tests.cachix.org/serve/i6laym9jw3wg9mw6ncyrk6gjx4l34vvx/install 25 | install_options: "--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve" 26 | extra_nix_config: | 27 | experimental-features = nix-command flakes 28 | - name: Setup Cachix 29 | uses: cachix/cachix-action@v10 30 | with: 31 | name: asol 32 | extraPullNames: quarry 33 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 34 | 35 | - name: Setup Node 36 | uses: actions/setup-node@v2 37 | with: 38 | always-auth: true 39 | node-version: 14.x 40 | - name: Yarn Cache 41 | uses: actions/cache@v2 42 | with: 43 | path: ./.yarn 44 | key: ${{ runner.os }}-${{ hashFiles('./.yarn/**/*.js') }} 45 | - name: Install Yarn dependencies 46 | run: yarn install 47 | - name: Parse IDLs 48 | run: nix shell .#ci --command yarn idl:generate 49 | - run: yarn build 50 | - run: | 51 | echo 'npmAuthToken: "${NPM_PUBLISH_TOKEN}"' >> .yarnrc.yml 52 | - name: Publish 53 | run: yarn npm publish 54 | 55 | release-crate: 56 | runs-on: ubuntu-latest 57 | name: Release crate on crates.io 58 | steps: 59 | - uses: actions/checkout@v2 60 | - name: Install Rust nightly 61 | uses: actions-rs/toolchain@v1 62 | with: 63 | override: true 64 | profile: minimal 65 | toolchain: ${{ env.RUST_TOOLCHAIN }} 66 | - uses: Swatinem/rust-cache@v1 67 | - name: Install cargo-workspaces 68 | run: cargo install cargo-workspaces || true 69 | - name: Publish crates 70 | run: cargo ws publish --from-git --yes --skip-published --token ${{ secrets.CARGO_PUBLISH_TOKEN }} 71 | 72 | release-binaries: 73 | runs-on: ubuntu-latest 74 | name: Release verifiable binaries 75 | steps: 76 | - uses: actions/checkout@v2 77 | - uses: cachix/install-nix-action@v14 78 | with: 79 | install_url: https://nixos-nix-install-tests.cachix.org/serve/i6laym9jw3wg9mw6ncyrk6gjx4l34vvx/install 80 | install_options: "--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve" 81 | extra_nix_config: | 82 | experimental-features = nix-command flakes 83 | - name: Setup Cachix 84 | uses: cachix/cachix-action@v10 85 | with: 86 | name: asol 87 | extraPullNames: quarry 88 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 89 | 90 | - name: Build programs 91 | run: nix shell .#ci --command anchor build -v 92 | - name: Release 93 | uses: softprops/action-gh-release@v1 94 | with: 95 | files: | 96 | target/deploy/* 97 | target/idl/* 98 | target/verifiable/* 99 | -------------------------------------------------------------------------------- /.github/workflows/site.yml: -------------------------------------------------------------------------------- 1 | name: Documentation Site 2 | 3 | on: 4 | push: 5 | branches: [master] 6 | 7 | jobs: 8 | build-and-deploy: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Checkout 12 | uses: actions/checkout@v2 13 | 14 | - uses: cachix/install-nix-action@v14 15 | with: 16 | install_url: https://nixos-nix-install-tests.cachix.org/serve/i6laym9jw3wg9mw6ncyrk6gjx4l34vvx/install 17 | install_options: "--tarball-url-prefix https://nixos-nix-install-tests.cachix.org/serve" 18 | extra_nix_config: | 19 | experimental-features = nix-command flakes 20 | - name: Setup Cachix 21 | uses: cachix/cachix-action@v10 22 | with: 23 | name: asol 24 | extraPullNames: quarry 25 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 26 | 27 | - name: Cache Yarn 28 | uses: actions/cache@v2 29 | with: 30 | path: ./.yarn 31 | key: ${{ runner.os }}-${{ hashFiles('./.yarn/**/*.js') }} 32 | - name: Install Yarn dependencies 33 | run: yarn install 34 | - name: Parse IDLs 35 | run: nix shell .#ci --command yarn idl:generate 36 | - run: yarn docs:generate 37 | - run: cp -R images/ site/ 38 | 39 | - name: Deploy 🚀 40 | uses: JamesIves/github-pages-deploy-action@4.1.5 41 | with: 42 | branch: gh-pages 43 | folder: site 44 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .anchor/ 2 | node_modules/ 3 | artifacts/ 4 | dist/ 5 | target/ 6 | yarn-error.log 7 | 8 | .yarn/* 9 | !.yarn/patches 10 | !.yarn/releases 11 | !.yarn/plugins 12 | !.yarn/sdks 13 | !.yarn/versions 14 | .pnp.* 15 | 16 | artifacts/ 17 | src/idls/ 18 | 19 | .eslintcache 20 | site/ 21 | Captain.toml 22 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | . "$(dirname "$0")/_/husky.sh" 3 | 4 | yarn lint-staged 5 | -------------------------------------------------------------------------------- /.mocharc.js: -------------------------------------------------------------------------------- 1 | require("./.pnp.cjs").setup(); 2 | 3 | module.exports = { 4 | timeout: 30_000, 5 | require: [require.resolve("ts-node/register")], 6 | }; 7 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | .yarn/ 2 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "arcanis.vscode-zipfs", 4 | "dbaeumer.vscode-eslint", 5 | "esbenp.prettier-vscode" 6 | ] 7 | } 8 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "search.exclude": { 3 | "**/.yarn": true, 4 | "**/.pnp.*": true 5 | }, 6 | "eslint.nodePath": ".yarn/sdks", 7 | "typescript.tsdk": ".yarn/sdks/typescript/lib", 8 | "typescript.enablePromptUseWorkspaceTsdk": true, 9 | "prettier.prettierPath": ".yarn/sdks/prettier/index.js" 10 | } 11 | -------------------------------------------------------------------------------- /.yarn/plugins/@yarnpkg/plugin-typescript.cjs: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | //prettier-ignore 3 | module.exports = { 4 | name: "@yarnpkg/plugin-typescript", 5 | factory: function (require) { 6 | var plugin=(()=>{var Ft=Object.create,H=Object.defineProperty,Bt=Object.defineProperties,Kt=Object.getOwnPropertyDescriptor,zt=Object.getOwnPropertyDescriptors,Gt=Object.getOwnPropertyNames,Q=Object.getOwnPropertySymbols,$t=Object.getPrototypeOf,ne=Object.prototype.hasOwnProperty,De=Object.prototype.propertyIsEnumerable;var Re=(e,t,r)=>t in e?H(e,t,{enumerable:!0,configurable:!0,writable:!0,value:r}):e[t]=r,u=(e,t)=>{for(var r in t||(t={}))ne.call(t,r)&&Re(e,r,t[r]);if(Q)for(var r of Q(t))De.call(t,r)&&Re(e,r,t[r]);return e},g=(e,t)=>Bt(e,zt(t)),Lt=e=>H(e,"__esModule",{value:!0});var R=(e,t)=>{var r={};for(var s in e)ne.call(e,s)&&t.indexOf(s)<0&&(r[s]=e[s]);if(e!=null&&Q)for(var s of Q(e))t.indexOf(s)<0&&De.call(e,s)&&(r[s]=e[s]);return r};var I=(e,t)=>()=>(t||e((t={exports:{}}).exports,t),t.exports),Vt=(e,t)=>{for(var r in t)H(e,r,{get:t[r],enumerable:!0})},Qt=(e,t,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let s of Gt(t))!ne.call(e,s)&&s!=="default"&&H(e,s,{get:()=>t[s],enumerable:!(r=Kt(t,s))||r.enumerable});return e},C=e=>Qt(Lt(H(e!=null?Ft($t(e)):{},"default",e&&e.__esModule&&"default"in e?{get:()=>e.default,enumerable:!0}:{value:e,enumerable:!0})),e);var xe=I(J=>{"use strict";Object.defineProperty(J,"__esModule",{value:!0});function _(e){let t=[...e.caches],r=t.shift();return r===void 0?ve():{get(s,n,a={miss:()=>Promise.resolve()}){return r.get(s,n,a).catch(()=>_({caches:t}).get(s,n,a))},set(s,n){return r.set(s,n).catch(()=>_({caches:t}).set(s,n))},delete(s){return r.delete(s).catch(()=>_({caches:t}).delete(s))},clear(){return r.clear().catch(()=>_({caches:t}).clear())}}}function ve(){return{get(e,t,r={miss:()=>Promise.resolve()}){return t().then(n=>Promise.all([n,r.miss(n)])).then(([n])=>n)},set(e,t){return Promise.resolve(t)},delete(e){return Promise.resolve()},clear(){return Promise.resolve()}}}J.createFallbackableCache=_;J.createNullCache=ve});var Ee=I(($s,qe)=>{qe.exports=xe()});var Te=I(ae=>{"use strict";Object.defineProperty(ae,"__esModule",{value:!0});function Jt(e={serializable:!0}){let t={};return{get(r,s,n={miss:()=>Promise.resolve()}){let a=JSON.stringify(r);if(a in t)return Promise.resolve(e.serializable?JSON.parse(t[a]):t[a]);let o=s(),d=n&&n.miss||(()=>Promise.resolve());return o.then(y=>d(y)).then(()=>o)},set(r,s){return t[JSON.stringify(r)]=e.serializable?JSON.stringify(s):s,Promise.resolve(s)},delete(r){return delete t[JSON.stringify(r)],Promise.resolve()},clear(){return t={},Promise.resolve()}}}ae.createInMemoryCache=Jt});var we=I((Vs,Me)=>{Me.exports=Te()});var Ce=I(M=>{"use strict";Object.defineProperty(M,"__esModule",{value:!0});function Xt(e,t,r){let s={"x-algolia-api-key":r,"x-algolia-application-id":t};return{headers(){return e===oe.WithinHeaders?s:{}},queryParameters(){return e===oe.WithinQueryParameters?s:{}}}}function Yt(e){let t=0,r=()=>(t++,new Promise(s=>{setTimeout(()=>{s(e(r))},Math.min(100*t,1e3))}));return e(r)}function ke(e,t=(r,s)=>Promise.resolve()){return Object.assign(e,{wait(r){return ke(e.then(s=>Promise.all([t(s,r),s])).then(s=>s[1]))}})}function Zt(e){let t=e.length-1;for(t;t>0;t--){let r=Math.floor(Math.random()*(t+1)),s=e[t];e[t]=e[r],e[r]=s}return e}function er(e,t){return Object.keys(t!==void 0?t:{}).forEach(r=>{e[r]=t[r](e)}),e}function tr(e,...t){let r=0;return e.replace(/%s/g,()=>encodeURIComponent(t[r++]))}var rr="4.2.0",sr=e=>()=>e.transporter.requester.destroy(),oe={WithinQueryParameters:0,WithinHeaders:1};M.AuthMode=oe;M.addMethods=er;M.createAuth=Xt;M.createRetryablePromise=Yt;M.createWaitablePromise=ke;M.destroy=sr;M.encode=tr;M.shuffle=Zt;M.version=rr});var F=I((Js,Ue)=>{Ue.exports=Ce()});var Ne=I(ie=>{"use strict";Object.defineProperty(ie,"__esModule",{value:!0});var nr={Delete:"DELETE",Get:"GET",Post:"POST",Put:"PUT"};ie.MethodEnum=nr});var B=I((Ys,We)=>{We.exports=Ne()});var Ze=I(A=>{"use strict";Object.defineProperty(A,"__esModule",{value:!0});var He=B();function ce(e,t){let r=e||{},s=r.data||{};return Object.keys(r).forEach(n=>{["timeout","headers","queryParameters","data","cacheable"].indexOf(n)===-1&&(s[n]=r[n])}),{data:Object.entries(s).length>0?s:void 0,timeout:r.timeout||t,headers:r.headers||{},queryParameters:r.queryParameters||{},cacheable:r.cacheable}}var X={Read:1,Write:2,Any:3},U={Up:1,Down:2,Timeouted:3},_e=2*60*1e3;function ue(e,t=U.Up){return g(u({},e),{status:t,lastUpdate:Date.now()})}function Fe(e){return e.status===U.Up||Date.now()-e.lastUpdate>_e}function Be(e){return e.status===U.Timeouted&&Date.now()-e.lastUpdate<=_e}function le(e){return{protocol:e.protocol||"https",url:e.url,accept:e.accept||X.Any}}function ar(e,t){return Promise.all(t.map(r=>e.get(r,()=>Promise.resolve(ue(r))))).then(r=>{let s=r.filter(d=>Fe(d)),n=r.filter(d=>Be(d)),a=[...s,...n],o=a.length>0?a.map(d=>le(d)):t;return{getTimeout(d,y){return(n.length===0&&d===0?1:n.length+3+d)*y},statelessHosts:o}})}var or=({isTimedOut:e,status:t})=>!e&&~~t==0,ir=e=>{let t=e.status;return e.isTimedOut||or(e)||~~(t/100)!=2&&~~(t/100)!=4},cr=({status:e})=>~~(e/100)==2,ur=(e,t)=>ir(e)?t.onRetry(e):cr(e)?t.onSucess(e):t.onFail(e);function Qe(e,t,r,s){let n=[],a=$e(r,s),o=Le(e,s),d=r.method,y=r.method!==He.MethodEnum.Get?{}:u(u({},r.data),s.data),b=u(u(u({"x-algolia-agent":e.userAgent.value},e.queryParameters),y),s.queryParameters),f=0,p=(h,S)=>{let O=h.pop();if(O===void 0)throw Ve(de(n));let P={data:a,headers:o,method:d,url:Ge(O,r.path,b),connectTimeout:S(f,e.timeouts.connect),responseTimeout:S(f,s.timeout)},x=j=>{let T={request:P,response:j,host:O,triesLeft:h.length};return n.push(T),T},v={onSucess:j=>Ke(j),onRetry(j){let T=x(j);return j.isTimedOut&&f++,Promise.all([e.logger.info("Retryable failure",pe(T)),e.hostsCache.set(O,ue(O,j.isTimedOut?U.Timeouted:U.Down))]).then(()=>p(h,S))},onFail(j){throw x(j),ze(j,de(n))}};return e.requester.send(P).then(j=>ur(j,v))};return ar(e.hostsCache,t).then(h=>p([...h.statelessHosts].reverse(),h.getTimeout))}function lr(e){let{hostsCache:t,logger:r,requester:s,requestsCache:n,responsesCache:a,timeouts:o,userAgent:d,hosts:y,queryParameters:b,headers:f}=e,p={hostsCache:t,logger:r,requester:s,requestsCache:n,responsesCache:a,timeouts:o,userAgent:d,headers:f,queryParameters:b,hosts:y.map(h=>le(h)),read(h,S){let O=ce(S,p.timeouts.read),P=()=>Qe(p,p.hosts.filter(j=>(j.accept&X.Read)!=0),h,O);if((O.cacheable!==void 0?O.cacheable:h.cacheable)!==!0)return P();let v={request:h,mappedRequestOptions:O,transporter:{queryParameters:p.queryParameters,headers:p.headers}};return p.responsesCache.get(v,()=>p.requestsCache.get(v,()=>p.requestsCache.set(v,P()).then(j=>Promise.all([p.requestsCache.delete(v),j]),j=>Promise.all([p.requestsCache.delete(v),Promise.reject(j)])).then(([j,T])=>T)),{miss:j=>p.responsesCache.set(v,j)})},write(h,S){return Qe(p,p.hosts.filter(O=>(O.accept&X.Write)!=0),h,ce(S,p.timeouts.write))}};return p}function dr(e){let t={value:`Algolia for JavaScript (${e})`,add(r){let s=`; ${r.segment}${r.version!==void 0?` (${r.version})`:""}`;return t.value.indexOf(s)===-1&&(t.value=`${t.value}${s}`),t}};return t}function Ke(e){try{return JSON.parse(e.content)}catch(t){throw Je(t.message,e)}}function ze({content:e,status:t},r){let s=e;try{s=JSON.parse(e).message}catch(n){}return Xe(s,t,r)}function pr(e,...t){let r=0;return e.replace(/%s/g,()=>encodeURIComponent(t[r++]))}function Ge(e,t,r){let s=Ye(r),n=`${e.protocol}://${e.url}/${t.charAt(0)==="/"?t.substr(1):t}`;return s.length&&(n+=`?${s}`),n}function Ye(e){let t=r=>Object.prototype.toString.call(r)==="[object Object]"||Object.prototype.toString.call(r)==="[object Array]";return Object.keys(e).map(r=>pr("%s=%s",r,t(e[r])?JSON.stringify(e[r]):e[r])).join("&")}function $e(e,t){if(e.method===He.MethodEnum.Get||e.data===void 0&&t.data===void 0)return;let r=Array.isArray(e.data)?e.data:u(u({},e.data),t.data);return JSON.stringify(r)}function Le(e,t){let r=u(u({},e.headers),t.headers),s={};return Object.keys(r).forEach(n=>{let a=r[n];s[n.toLowerCase()]=a}),s}function de(e){return e.map(t=>pe(t))}function pe(e){let t=e.request.headers["x-algolia-api-key"]?{"x-algolia-api-key":"*****"}:{};return g(u({},e),{request:g(u({},e.request),{headers:u(u({},e.request.headers),t)})})}function Xe(e,t,r){return{name:"ApiError",message:e,status:t,transporterStackTrace:r}}function Je(e,t){return{name:"DeserializationError",message:e,response:t}}function Ve(e){return{name:"RetryError",message:"Unreachable hosts - your application id may be incorrect. If the error persists, contact support@algolia.com.",transporterStackTrace:e}}A.CallEnum=X;A.HostStatusEnum=U;A.createApiError=Xe;A.createDeserializationError=Je;A.createMappedRequestOptions=ce;A.createRetryError=Ve;A.createStatefulHost=ue;A.createStatelessHost=le;A.createTransporter=lr;A.createUserAgent=dr;A.deserializeFailure=ze;A.deserializeSuccess=Ke;A.isStatefulHostTimeouted=Be;A.isStatefulHostUp=Fe;A.serializeData=$e;A.serializeHeaders=Le;A.serializeQueryParameters=Ye;A.serializeUrl=Ge;A.stackFrameWithoutCredentials=pe;A.stackTraceWithoutCredentials=de});var K=I((en,et)=>{et.exports=Ze()});var tt=I(w=>{"use strict";Object.defineProperty(w,"__esModule",{value:!0});var N=F(),mr=K(),z=B(),hr=e=>{let t=e.region||"us",r=N.createAuth(N.AuthMode.WithinHeaders,e.appId,e.apiKey),s=mr.createTransporter(g(u({hosts:[{url:`analytics.${t}.algolia.com`}]},e),{headers:u(g(u({},r.headers()),{"content-type":"application/json"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)})),n=e.appId;return N.addMethods({appId:n,transporter:s},e.methods)},yr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Post,path:"2/abtests",data:t},r),gr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Delete,path:N.encode("2/abtests/%s",t)},r),fr=e=>(t,r)=>e.transporter.read({method:z.MethodEnum.Get,path:N.encode("2/abtests/%s",t)},r),br=e=>t=>e.transporter.read({method:z.MethodEnum.Get,path:"2/abtests"},t),Pr=e=>(t,r)=>e.transporter.write({method:z.MethodEnum.Post,path:N.encode("2/abtests/%s/stop",t)},r);w.addABTest=yr;w.createAnalyticsClient=hr;w.deleteABTest=gr;w.getABTest=fr;w.getABTests=br;w.stopABTest=Pr});var st=I((rn,rt)=>{rt.exports=tt()});var at=I(G=>{"use strict";Object.defineProperty(G,"__esModule",{value:!0});var me=F(),jr=K(),nt=B(),Or=e=>{let t=e.region||"us",r=me.createAuth(me.AuthMode.WithinHeaders,e.appId,e.apiKey),s=jr.createTransporter(g(u({hosts:[{url:`recommendation.${t}.algolia.com`}]},e),{headers:u(g(u({},r.headers()),{"content-type":"application/json"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)}));return me.addMethods({appId:e.appId,transporter:s},e.methods)},Ir=e=>t=>e.transporter.read({method:nt.MethodEnum.Get,path:"1/strategies/personalization"},t),Ar=e=>(t,r)=>e.transporter.write({method:nt.MethodEnum.Post,path:"1/strategies/personalization",data:t},r);G.createRecommendationClient=Or;G.getPersonalizationStrategy=Ir;G.setPersonalizationStrategy=Ar});var it=I((nn,ot)=>{ot.exports=at()});var jt=I(i=>{"use strict";Object.defineProperty(i,"__esModule",{value:!0});var l=F(),q=K(),m=B(),Sr=require("crypto");function Y(e){let t=r=>e.request(r).then(s=>{if(e.batch!==void 0&&e.batch(s.hits),!e.shouldStop(s))return s.cursor?t({cursor:s.cursor}):t({page:(r.page||0)+1})});return t({})}var Dr=e=>{let t=e.appId,r=l.createAuth(e.authMode!==void 0?e.authMode:l.AuthMode.WithinHeaders,t,e.apiKey),s=q.createTransporter(g(u({hosts:[{url:`${t}-dsn.algolia.net`,accept:q.CallEnum.Read},{url:`${t}.algolia.net`,accept:q.CallEnum.Write}].concat(l.shuffle([{url:`${t}-1.algolianet.com`},{url:`${t}-2.algolianet.com`},{url:`${t}-3.algolianet.com`}]))},e),{headers:u(g(u({},r.headers()),{"content-type":"application/x-www-form-urlencoded"}),e.headers),queryParameters:u(u({},r.queryParameters()),e.queryParameters)})),n={transporter:s,appId:t,addAlgoliaAgent(a,o){s.userAgent.add({segment:a,version:o})},clearCache(){return Promise.all([s.requestsCache.clear(),s.responsesCache.clear()]).then(()=>{})}};return l.addMethods(n,e.methods)};function ct(){return{name:"MissingObjectIDError",message:"All objects must have an unique objectID (like a primary key) to be valid. Algolia is also able to generate objectIDs automatically but *it's not recommended*. To do it, use the `{'autoGenerateObjectIDIfNotExist': true}` option."}}function ut(){return{name:"ObjectNotFoundError",message:"Object not found."}}function lt(){return{name:"ValidUntilNotFoundError",message:"ValidUntil not found in given secured api key."}}var Rr=e=>(t,r)=>{let d=r||{},{queryParameters:s}=d,n=R(d,["queryParameters"]),a=u({acl:t},s!==void 0?{queryParameters:s}:{}),o=(y,b)=>l.createRetryablePromise(f=>$(e)(y.key,b).catch(p=>{if(p.status!==404)throw p;return f()}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:"1/keys",data:a},n),o)},vr=e=>(t,r,s)=>{let n=q.createMappedRequestOptions(s);return n.queryParameters["X-Algolia-User-ID"]=t,e.transporter.write({method:m.MethodEnum.Post,path:"1/clusters/mapping",data:{cluster:r}},n)},xr=e=>(t,r,s)=>e.transporter.write({method:m.MethodEnum.Post,path:"1/clusters/mapping/batch",data:{users:t,cluster:r}},s),Z=e=>(t,r,s)=>{let n=(a,o)=>L(e)(t,{methods:{waitTask:D}}).waitTask(a.taskID,o);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",t),data:{operation:"copy",destination:r}},s),n)},qr=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Rules]})),Er=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Settings]})),Tr=e=>(t,r,s)=>Z(e)(t,r,g(u({},s),{scope:[ee.Synonyms]})),Mr=e=>(t,r)=>{let s=(n,a)=>l.createRetryablePromise(o=>$(e)(t,a).then(o).catch(d=>{if(d.status!==404)throw d}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/keys/%s",t)},r),s)},wr=()=>(e,t)=>{let r=q.serializeQueryParameters(t),s=Sr.createHmac("sha256",e).update(r).digest("hex");return Buffer.from(s+r).toString("base64")},$=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/keys/%s",t)},r),kr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/logs"},t),Cr=()=>e=>{let t=Buffer.from(e,"base64").toString("ascii"),r=/validUntil=(\d+)/,s=t.match(r);if(s===null)throw lt();return parseInt(s[1],10)-Math.round(new Date().getTime()/1e3)},Ur=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping/top"},t),Nr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/clusters/mapping/%s",t)},r),Wr=e=>t=>{let n=t||{},{retrieveMappings:r}=n,s=R(n,["retrieveMappings"]);return r===!0&&(s.getClusters=!0),e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping/pending"},s)},L=e=>(t,r={})=>{let s={transporter:e.transporter,appId:e.appId,indexName:t};return l.addMethods(s,r.methods)},Hr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/keys"},t),_r=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters"},t),Fr=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/indexes"},t),Br=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:"1/clusters/mapping"},t),Kr=e=>(t,r,s)=>{let n=(a,o)=>L(e)(t,{methods:{waitTask:D}}).waitTask(a.taskID,o);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",t),data:{operation:"move",destination:r}},s),n)},zr=e=>(t,r)=>{let s=(n,a)=>Promise.all(Object.keys(n.taskID).map(o=>L(e)(o,{methods:{waitTask:D}}).waitTask(n.taskID[o],a)));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:"1/indexes/*/batch",data:{requests:t}},r),s)},Gr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/objects",data:{requests:t}},r),$r=e=>(t,r)=>{let s=t.map(n=>g(u({},n),{params:q.serializeQueryParameters(n.params||{})}));return e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/queries",data:{requests:s},cacheable:!0},r)},Lr=e=>(t,r)=>Promise.all(t.map(s=>{let d=s.params,{facetName:n,facetQuery:a}=d,o=R(d,["facetName","facetQuery"]);return L(e)(s.indexName,{methods:{searchForFacetValues:dt}}).searchForFacetValues(n,a,u(u({},r),o))})),Vr=e=>(t,r)=>{let s=q.createMappedRequestOptions(r);return s.queryParameters["X-Algolia-User-ID"]=t,e.transporter.write({method:m.MethodEnum.Delete,path:"1/clusters/mapping"},s)},Qr=e=>(t,r)=>{let s=(n,a)=>l.createRetryablePromise(o=>$(e)(t,a).catch(d=>{if(d.status!==404)throw d;return o()}));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/keys/%s/restore",t)},r),s)},Jr=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:"1/clusters/mapping/search",data:{query:t}},r),Xr=e=>(t,r)=>{let s=Object.assign({},r),f=r||{},{queryParameters:n}=f,a=R(f,["queryParameters"]),o=n?{queryParameters:n}:{},d=["acl","indexes","referers","restrictSources","queryParameters","description","maxQueriesPerIPPerHour","maxHitsPerQuery"],y=p=>Object.keys(s).filter(h=>d.indexOf(h)!==-1).every(h=>p[h]===s[h]),b=(p,h)=>l.createRetryablePromise(S=>$(e)(t,h).then(O=>y(O)?Promise.resolve():S()));return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Put,path:l.encode("1/keys/%s",t),data:o},a),b)},pt=e=>(t,r)=>{let s=(n,a)=>D(e)(n.taskID,a);return l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/batch",e.indexName),data:{requests:t}},r),s)},Yr=e=>t=>Y(g(u({},t),{shouldStop:r=>r.cursor===void 0,request:r=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/browse",e.indexName),data:r},t)})),Zr=e=>t=>{let r=u({hitsPerPage:1e3},t);return Y(g(u({},r),{shouldStop:s=>s.hits.lengthg(u({},n),{hits:n.hits.map(a=>(delete a._highlightResult,a))}))}}))},es=e=>t=>{let r=u({hitsPerPage:1e3},t);return Y(g(u({},r),{shouldStop:s=>s.hits.lengthg(u({},n),{hits:n.hits.map(a=>(delete a._highlightResult,a))}))}}))},te=e=>(t,r,s)=>{let y=s||{},{batchSize:n}=y,a=R(y,["batchSize"]),o={taskIDs:[],objectIDs:[]},d=(b=0)=>{let f=[],p;for(p=b;p({action:r,body:h})),a).then(h=>(o.objectIDs=o.objectIDs.concat(h.objectIDs),o.taskIDs.push(h.taskID),p++,d(p)))};return l.createWaitablePromise(d(),(b,f)=>Promise.all(b.taskIDs.map(p=>D(e)(p,f))))},ts=e=>t=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/clear",e.indexName)},t),(r,s)=>D(e)(r.taskID,s)),rs=e=>t=>{let a=t||{},{forwardToReplicas:r}=a,s=R(a,["forwardToReplicas"]),n=q.createMappedRequestOptions(s);return r&&(n.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/clear",e.indexName)},n),(o,d)=>D(e)(o.taskID,d))},ss=e=>t=>{let a=t||{},{forwardToReplicas:r}=a,s=R(a,["forwardToReplicas"]),n=q.createMappedRequestOptions(s);return r&&(n.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/clear",e.indexName)},n),(o,d)=>D(e)(o.taskID,d))},ns=e=>(t,r)=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/deleteByQuery",e.indexName),data:t},r),(s,n)=>D(e)(s.taskID,n)),as=e=>t=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s",e.indexName)},t),(r,s)=>D(e)(r.taskID,s)),os=e=>(t,r)=>l.createWaitablePromise(yt(e)([t],r).then(s=>({taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),yt=e=>(t,r)=>{let s=t.map(n=>({objectID:n}));return te(e)(s,k.DeleteObject,r)},is=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s/rules/%s",e.indexName,t)},a),(d,y)=>D(e)(d.taskID,y))},cs=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Delete,path:l.encode("1/indexes/%s/synonyms/%s",e.indexName,t)},a),(d,y)=>D(e)(d.taskID,y))},us=e=>t=>gt(e)(t).then(()=>!0).catch(r=>{if(r.status!==404)throw r;return!1}),ls=e=>(t,r)=>{let y=r||{},{query:s,paginate:n}=y,a=R(y,["query","paginate"]),o=0,d=()=>ft(e)(s||"",g(u({},a),{page:o})).then(b=>{for(let[f,p]of Object.entries(b.hits))if(t(p))return{object:p,position:parseInt(f,10),page:o};if(o++,n===!1||o>=b.nbPages)throw ut();return d()});return d()},ds=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/%s",e.indexName,t)},r),ps=()=>(e,t)=>{for(let[r,s]of Object.entries(e.hits))if(s.objectID===t)return parseInt(r,10);return-1},ms=e=>(t,r)=>{let o=r||{},{attributesToRetrieve:s}=o,n=R(o,["attributesToRetrieve"]),a=t.map(d=>u({indexName:e.indexName,objectID:d},s?{attributesToRetrieve:s}:{}));return e.transporter.read({method:m.MethodEnum.Post,path:"1/indexes/*/objects",data:{requests:a}},n)},hs=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/rules/%s",e.indexName,t)},r),gt=e=>t=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/settings",e.indexName),data:{getVersion:2}},t),ys=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/synonyms/%s",e.indexName,t)},r),bt=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Get,path:l.encode("1/indexes/%s/task/%s",e.indexName,t.toString())},r),gs=e=>(t,r)=>l.createWaitablePromise(Pt(e)([t],r).then(s=>({objectID:s.objectIDs[0],taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),Pt=e=>(t,r)=>{let o=r||{},{createIfNotExists:s}=o,n=R(o,["createIfNotExists"]),a=s?k.PartialUpdateObject:k.PartialUpdateObjectNoCreate;return te(e)(t,a,n)},fs=e=>(t,r)=>{let O=r||{},{safe:s,autoGenerateObjectIDIfNotExist:n,batchSize:a}=O,o=R(O,["safe","autoGenerateObjectIDIfNotExist","batchSize"]),d=(P,x,v,j)=>l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/operation",P),data:{operation:v,destination:x}},j),(T,V)=>D(e)(T.taskID,V)),y=Math.random().toString(36).substring(7),b=`${e.indexName}_tmp_${y}`,f=he({appId:e.appId,transporter:e.transporter,indexName:b}),p=[],h=d(e.indexName,b,"copy",g(u({},o),{scope:["settings","synonyms","rules"]}));p.push(h);let S=(s?h.wait(o):h).then(()=>{let P=f(t,g(u({},o),{autoGenerateObjectIDIfNotExist:n,batchSize:a}));return p.push(P),s?P.wait(o):P}).then(()=>{let P=d(b,e.indexName,"move",o);return p.push(P),s?P.wait(o):P}).then(()=>Promise.all(p)).then(([P,x,v])=>({objectIDs:x.objectIDs,taskIDs:[P.taskID,...x.taskIDs,v.taskID]}));return l.createWaitablePromise(S,(P,x)=>Promise.all(p.map(v=>v.wait(x))))},bs=e=>(t,r)=>ye(e)(t,g(u({},r),{clearExistingRules:!0})),Ps=e=>(t,r)=>ge(e)(t,g(u({},r),{replaceExistingSynonyms:!0})),js=e=>(t,r)=>l.createWaitablePromise(he(e)([t],r).then(s=>({objectID:s.objectIDs[0],taskID:s.taskIDs[0]})),(s,n)=>D(e)(s.taskID,n)),he=e=>(t,r)=>{let o=r||{},{autoGenerateObjectIDIfNotExist:s}=o,n=R(o,["autoGenerateObjectIDIfNotExist"]),a=s?k.AddObject:k.UpdateObject;if(a===k.UpdateObject){for(let d of t)if(d.objectID===void 0)return l.createWaitablePromise(Promise.reject(ct()))}return te(e)(t,a,n)},Os=e=>(t,r)=>ye(e)([t],r),ye=e=>(t,r)=>{let d=r||{},{forwardToReplicas:s,clearExistingRules:n}=d,a=R(d,["forwardToReplicas","clearExistingRules"]),o=q.createMappedRequestOptions(a);return s&&(o.queryParameters.forwardToReplicas=1),n&&(o.queryParameters.clearExistingRules=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/batch",e.indexName),data:t},o),(y,b)=>D(e)(y.taskID,b))},Is=e=>(t,r)=>ge(e)([t],r),ge=e=>(t,r)=>{let d=r||{},{forwardToReplicas:s,replaceExistingSynonyms:n}=d,a=R(d,["forwardToReplicas","replaceExistingSynonyms"]),o=q.createMappedRequestOptions(a);return s&&(o.queryParameters.forwardToReplicas=1),n&&(o.queryParameters.replaceExistingSynonyms=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/batch",e.indexName),data:t},o),(y,b)=>D(e)(y.taskID,b))},ft=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/query",e.indexName),data:{query:t},cacheable:!0},r),dt=e=>(t,r,s)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/facets/%s/query",e.indexName,t),data:{facetQuery:r},cacheable:!0},s),mt=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/rules/search",e.indexName),data:{query:t}},r),ht=e=>(t,r)=>e.transporter.read({method:m.MethodEnum.Post,path:l.encode("1/indexes/%s/synonyms/search",e.indexName),data:{query:t}},r),As=e=>(t,r)=>{let o=r||{},{forwardToReplicas:s}=o,n=R(o,["forwardToReplicas"]),a=q.createMappedRequestOptions(n);return s&&(a.queryParameters.forwardToReplicas=1),l.createWaitablePromise(e.transporter.write({method:m.MethodEnum.Put,path:l.encode("1/indexes/%s/settings",e.indexName),data:t},a),(d,y)=>D(e)(d.taskID,y))},D=e=>(t,r)=>l.createRetryablePromise(s=>bt(e)(t,r).then(n=>n.status!=="published"?s():void 0)),Ss={AddObject:"addObject",Analytics:"analytics",Browser:"browse",DeleteIndex:"deleteIndex",DeleteObject:"deleteObject",EditSettings:"editSettings",ListIndexes:"listIndexes",Logs:"logs",Recommendation:"recommendation",Search:"search",SeeUnretrievableAttributes:"seeUnretrievableAttributes",Settings:"settings",Usage:"usage"},k={AddObject:"addObject",UpdateObject:"updateObject",PartialUpdateObject:"partialUpdateObject",PartialUpdateObjectNoCreate:"partialUpdateObjectNoCreate",DeleteObject:"deleteObject"},ee={Settings:"settings",Synonyms:"synonyms",Rules:"rules"},Ds={None:"none",StopIfEnoughMatches:"stopIfEnoughMatches"},Rs={Synonym:"synonym",OneWaySynonym:"oneWaySynonym",AltCorrection1:"altCorrection1",AltCorrection2:"altCorrection2",Placeholder:"placeholder"};i.ApiKeyACLEnum=Ss;i.BatchActionEnum=k;i.ScopeEnum=ee;i.StrategyEnum=Ds;i.SynonymEnum=Rs;i.addApiKey=Rr;i.assignUserID=vr;i.assignUserIDs=xr;i.batch=pt;i.browseObjects=Yr;i.browseRules=Zr;i.browseSynonyms=es;i.chunkedBatch=te;i.clearObjects=ts;i.clearRules=rs;i.clearSynonyms=ss;i.copyIndex=Z;i.copyRules=qr;i.copySettings=Er;i.copySynonyms=Tr;i.createBrowsablePromise=Y;i.createMissingObjectIDError=ct;i.createObjectNotFoundError=ut;i.createSearchClient=Dr;i.createValidUntilNotFoundError=lt;i.deleteApiKey=Mr;i.deleteBy=ns;i.deleteIndex=as;i.deleteObject=os;i.deleteObjects=yt;i.deleteRule=is;i.deleteSynonym=cs;i.exists=us;i.findObject=ls;i.generateSecuredApiKey=wr;i.getApiKey=$;i.getLogs=kr;i.getObject=ds;i.getObjectPosition=ps;i.getObjects=ms;i.getRule=hs;i.getSecuredApiKeyRemainingValidity=Cr;i.getSettings=gt;i.getSynonym=ys;i.getTask=bt;i.getTopUserIDs=Ur;i.getUserID=Nr;i.hasPendingMappings=Wr;i.initIndex=L;i.listApiKeys=Hr;i.listClusters=_r;i.listIndices=Fr;i.listUserIDs=Br;i.moveIndex=Kr;i.multipleBatch=zr;i.multipleGetObjects=Gr;i.multipleQueries=$r;i.multipleSearchForFacetValues=Lr;i.partialUpdateObject=gs;i.partialUpdateObjects=Pt;i.removeUserID=Vr;i.replaceAllObjects=fs;i.replaceAllRules=bs;i.replaceAllSynonyms=Ps;i.restoreApiKey=Qr;i.saveObject=js;i.saveObjects=he;i.saveRule=Os;i.saveRules=ye;i.saveSynonym=Is;i.saveSynonyms=ge;i.search=ft;i.searchForFacetValues=dt;i.searchRules=mt;i.searchSynonyms=ht;i.searchUserIDs=Jr;i.setSettings=As;i.updateApiKey=Xr;i.waitTask=D});var It=I((on,Ot)=>{Ot.exports=jt()});var At=I(re=>{"use strict";Object.defineProperty(re,"__esModule",{value:!0});function vs(){return{debug(e,t){return Promise.resolve()},info(e,t){return Promise.resolve()},error(e,t){return Promise.resolve()}}}var xs={Debug:1,Info:2,Error:3};re.LogLevelEnum=xs;re.createNullLogger=vs});var Dt=I((un,St)=>{St.exports=At()});var xt=I(fe=>{"use strict";Object.defineProperty(fe,"__esModule",{value:!0});var Rt=require("http"),vt=require("https"),qs=require("url");function Es(){let e={keepAlive:!0},t=new Rt.Agent(e),r=new vt.Agent(e);return{send(s){return new Promise(n=>{let a=qs.parse(s.url),o=a.query===null?a.pathname:`${a.pathname}?${a.query}`,d=u({agent:a.protocol==="https:"?r:t,hostname:a.hostname,path:o,method:s.method,headers:s.headers},a.port!==void 0?{port:a.port||""}:{}),y=(a.protocol==="https:"?vt:Rt).request(d,h=>{let S="";h.on("data",O=>S+=O),h.on("end",()=>{clearTimeout(f),clearTimeout(p),n({status:h.statusCode||0,content:S,isTimedOut:!1})})}),b=(h,S)=>setTimeout(()=>{y.abort(),n({status:0,content:S,isTimedOut:!0})},h*1e3),f=b(s.connectTimeout,"Connection timeout"),p;y.on("error",h=>{clearTimeout(f),clearTimeout(p),n({status:0,content:h.message,isTimedOut:!1})}),y.once("response",()=>{clearTimeout(f),p=b(s.responseTimeout,"Socket timeout")}),s.data!==void 0&&y.write(s.data),y.end()})},destroy(){return t.destroy(),r.destroy(),Promise.resolve()}}}fe.createNodeHttpRequester=Es});var Et=I((dn,qt)=>{qt.exports=xt()});var kt=I((pn,Tt)=>{"use strict";var Mt=Ee(),Ts=we(),W=st(),be=F(),Pe=it(),c=It(),Ms=Dt(),ws=Et(),ks=K();function wt(e,t,r){let s={appId:e,apiKey:t,timeouts:{connect:2,read:5,write:30},requester:ws.createNodeHttpRequester(),logger:Ms.createNullLogger(),responsesCache:Mt.createNullCache(),requestsCache:Mt.createNullCache(),hostsCache:Ts.createInMemoryCache(),userAgent:ks.createUserAgent(be.version).add({segment:"Node.js",version:process.versions.node})};return c.createSearchClient(g(u(u({},s),r),{methods:{search:c.multipleQueries,searchForFacetValues:c.multipleSearchForFacetValues,multipleBatch:c.multipleBatch,multipleGetObjects:c.multipleGetObjects,multipleQueries:c.multipleQueries,copyIndex:c.copyIndex,copySettings:c.copySettings,copyRules:c.copyRules,copySynonyms:c.copySynonyms,moveIndex:c.moveIndex,listIndices:c.listIndices,getLogs:c.getLogs,listClusters:c.listClusters,multipleSearchForFacetValues:c.multipleSearchForFacetValues,getApiKey:c.getApiKey,addApiKey:c.addApiKey,listApiKeys:c.listApiKeys,updateApiKey:c.updateApiKey,deleteApiKey:c.deleteApiKey,restoreApiKey:c.restoreApiKey,assignUserID:c.assignUserID,assignUserIDs:c.assignUserIDs,getUserID:c.getUserID,searchUserIDs:c.searchUserIDs,listUserIDs:c.listUserIDs,getTopUserIDs:c.getTopUserIDs,removeUserID:c.removeUserID,hasPendingMappings:c.hasPendingMappings,generateSecuredApiKey:c.generateSecuredApiKey,getSecuredApiKeyRemainingValidity:c.getSecuredApiKeyRemainingValidity,destroy:be.destroy,initIndex:n=>a=>c.initIndex(n)(a,{methods:{batch:c.batch,delete:c.deleteIndex,getObject:c.getObject,getObjects:c.getObjects,saveObject:c.saveObject,saveObjects:c.saveObjects,search:c.search,searchForFacetValues:c.searchForFacetValues,waitTask:c.waitTask,setSettings:c.setSettings,getSettings:c.getSettings,partialUpdateObject:c.partialUpdateObject,partialUpdateObjects:c.partialUpdateObjects,deleteObject:c.deleteObject,deleteObjects:c.deleteObjects,deleteBy:c.deleteBy,clearObjects:c.clearObjects,browseObjects:c.browseObjects,getObjectPosition:c.getObjectPosition,findObject:c.findObject,exists:c.exists,saveSynonym:c.saveSynonym,saveSynonyms:c.saveSynonyms,getSynonym:c.getSynonym,searchSynonyms:c.searchSynonyms,browseSynonyms:c.browseSynonyms,deleteSynonym:c.deleteSynonym,clearSynonyms:c.clearSynonyms,replaceAllObjects:c.replaceAllObjects,replaceAllSynonyms:c.replaceAllSynonyms,searchRules:c.searchRules,getRule:c.getRule,deleteRule:c.deleteRule,saveRule:c.saveRule,saveRules:c.saveRules,replaceAllRules:c.replaceAllRules,browseRules:c.browseRules,clearRules:c.clearRules}}),initAnalytics:()=>n=>W.createAnalyticsClient(g(u(u({},s),n),{methods:{addABTest:W.addABTest,getABTest:W.getABTest,getABTests:W.getABTests,stopABTest:W.stopABTest,deleteABTest:W.deleteABTest}})),initRecommendation:()=>n=>Pe.createRecommendationClient(g(u(u({},s),n),{methods:{getPersonalizationStrategy:Pe.getPersonalizationStrategy,setPersonalizationStrategy:Pe.setPersonalizationStrategy}}))}}))}wt.version=be.version;Tt.exports=wt});var Ut=I((mn,je)=>{var Ct=kt();je.exports=Ct;je.exports.default=Ct});var Ws={};Vt(Ws,{default:()=>Ks});var Oe=C(require("@yarnpkg/core")),E=C(require("@yarnpkg/core")),Ie=C(require("@yarnpkg/plugin-essentials")),Ht=C(require("semver"));var se=C(require("@yarnpkg/core")),Nt=C(Ut()),Cs="e8e1bd300d860104bb8c58453ffa1eb4",Us="OFCNCOG2CU",Wt=async(e,t)=>{var a;let r=se.structUtils.stringifyIdent(e),n=Ns(t).initIndex("npm-search");try{return((a=(await n.getObject(r,{attributesToRetrieve:["types"]})).types)==null?void 0:a.ts)==="definitely-typed"}catch(o){return!1}},Ns=e=>(0,Nt.default)(Us,Cs,{requester:{async send(r){try{let s=await se.httpUtils.request(r.url,r.data||null,{configuration:e,headers:r.headers});return{content:s.body,isTimedOut:!1,status:s.statusCode}}catch(s){return{content:s.response.body,isTimedOut:!1,status:s.response.statusCode}}}}});var _t=e=>e.scope?`${e.scope}__${e.name}`:`${e.name}`,Hs=async(e,t,r,s)=>{if(r.scope==="types")return;let{project:n}=e,{configuration:a}=n,o=a.makeResolver(),d={project:n,resolver:o,report:new E.ThrowReport};if(!await Wt(r,a))return;let b=_t(r),f=E.structUtils.parseRange(r.range).selector;if(!E.semverUtils.validRange(f)){let P=await o.getCandidates(r,new Map,d);f=E.structUtils.parseRange(P[0].reference).selector}let p=Ht.default.coerce(f);if(p===null)return;let h=`${Ie.suggestUtils.Modifier.CARET}${p.major}`,S=E.structUtils.makeDescriptor(E.structUtils.makeIdent("types",b),h),O=E.miscUtils.mapAndFind(n.workspaces,P=>{var T,V;let x=(T=P.manifest.dependencies.get(r.identHash))==null?void 0:T.descriptorHash,v=(V=P.manifest.devDependencies.get(r.identHash))==null?void 0:V.descriptorHash;if(x!==r.descriptorHash&&v!==r.descriptorHash)return E.miscUtils.mapAndFind.skip;let j=[];for(let Ae of Oe.Manifest.allDependencies){let Se=P.manifest[Ae].get(S.identHash);typeof Se!="undefined"&&j.push([Ae,Se])}return j.length===0?E.miscUtils.mapAndFind.skip:j});if(typeof O!="undefined")for(let[P,x]of O)e.manifest[P].set(x.identHash,x);else{try{if((await o.getCandidates(S,new Map,d)).length===0)return}catch{return}e.manifest[Ie.suggestUtils.Target.DEVELOPMENT].set(S.identHash,S)}},_s=async(e,t,r)=>{if(r.scope==="types")return;let s=_t(r),n=E.structUtils.makeIdent("types",s);for(let a of Oe.Manifest.allDependencies)typeof e.manifest[a].get(n.identHash)!="undefined"&&e.manifest[a].delete(n.identHash)},Fs=(e,t)=>{t.publishConfig&&t.publishConfig.typings&&(t.typings=t.publishConfig.typings),t.publishConfig&&t.publishConfig.types&&(t.types=t.publishConfig.types)},Bs={hooks:{afterWorkspaceDependencyAddition:Hs,afterWorkspaceDependencyRemoval:_s,beforeWorkspacePacking:Fs}},Ks=Bs;return Ws;})(); 7 | return plugin; 8 | } 9 | }; 10 | -------------------------------------------------------------------------------- /.yarn/sdks/eslint/bin/eslint.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require eslint/bin/eslint.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real eslint/bin/eslint.js your application uses 20 | module.exports = absRequire(`eslint/bin/eslint.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/eslint/lib/api.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require eslint/lib/api.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real eslint/lib/api.js your application uses 20 | module.exports = absRequire(`eslint/lib/api.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/eslint/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "eslint", 3 | "version": "7.32.0-sdk", 4 | "main": "./lib/api.js", 5 | "type": "commonjs" 6 | } 7 | -------------------------------------------------------------------------------- /.yarn/sdks/integrations.yml: -------------------------------------------------------------------------------- 1 | # This file is automatically generated by @yarnpkg/sdks. 2 | # Manual changes might be lost! 3 | 4 | integrations: 5 | - vscode 6 | -------------------------------------------------------------------------------- /.yarn/sdks/prettier/index.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require prettier/index.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real prettier/index.js your application uses 20 | module.exports = absRequire(`prettier/index.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/prettier/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "prettier", 3 | "version": "2.4.1-sdk", 4 | "main": "./index.js", 5 | "type": "commonjs" 6 | } 7 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/bin/tsc: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/bin/tsc 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/bin/tsc your application uses 20 | module.exports = absRequire(`typescript/bin/tsc`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/bin/tsserver: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/bin/tsserver 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/bin/tsserver your application uses 20 | module.exports = absRequire(`typescript/bin/tsserver`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/tsc.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/lib/tsc.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/lib/tsc.js your application uses 20 | module.exports = absRequire(`typescript/lib/tsc.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/tsserver.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | const moduleWrapper = tsserver => { 13 | if (!process.versions.pnp) { 14 | return tsserver; 15 | } 16 | 17 | const {isAbsolute} = require(`path`); 18 | const pnpApi = require(`pnpapi`); 19 | 20 | const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//); 21 | const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`); 22 | 23 | const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => { 24 | return `${locator.name}@${locator.reference}`; 25 | })); 26 | 27 | // VSCode sends the zip paths to TS using the "zip://" prefix, that TS 28 | // doesn't understand. This layer makes sure to remove the protocol 29 | // before forwarding it to TS, and to add it back on all returned paths. 30 | 31 | function toEditorPath(str) { 32 | // We add the `zip:` prefix to both `.zip/` paths and virtual paths 33 | if (isAbsolute(str) && !str.match(/^\^?(zip:|\/zip\/)/) && (str.match(/\.zip\//) || isVirtual(str))) { 34 | // We also take the opportunity to turn virtual paths into physical ones; 35 | // this makes it much easier to work with workspaces that list peer 36 | // dependencies, since otherwise Ctrl+Click would bring us to the virtual 37 | // file instances instead of the real ones. 38 | // 39 | // We only do this to modules owned by the the dependency tree roots. 40 | // This avoids breaking the resolution when jumping inside a vendor 41 | // with peer dep (otherwise jumping into react-dom would show resolution 42 | // errors on react). 43 | // 44 | const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str; 45 | if (resolved) { 46 | const locator = pnpApi.findPackageLocator(resolved); 47 | if (locator && dependencyTreeRoots.has(`${locator.name}@${locator.reference}`)) { 48 | str = resolved; 49 | } 50 | } 51 | 52 | str = normalize(str); 53 | 54 | if (str.match(/\.zip\//)) { 55 | switch (hostInfo) { 56 | // Absolute VSCode `Uri.fsPath`s need to start with a slash. 57 | // VSCode only adds it automatically for supported schemes, 58 | // so we have to do it manually for the `zip` scheme. 59 | // The path needs to start with a caret otherwise VSCode doesn't handle the protocol 60 | // 61 | // Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910 62 | // 63 | // Update Oct 8 2021: VSCode changed their format in 1.61. 64 | // Before | ^zip:/c:/foo/bar.zip/package.json 65 | // After | ^/zip//c:/foo/bar.zip/package.json 66 | // 67 | case `vscode <1.61`: { 68 | str = `^zip:${str}`; 69 | } break; 70 | 71 | case `vscode`: { 72 | str = `^/zip/${str}`; 73 | } break; 74 | 75 | // To make "go to definition" work, 76 | // We have to resolve the actual file system path from virtual path 77 | // and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip) 78 | case `coc-nvim`: { 79 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 80 | str = resolve(`zipfile:${str}`); 81 | } break; 82 | 83 | // Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server) 84 | // We have to resolve the actual file system path from virtual path, 85 | // everything else is up to neovim 86 | case `neovim`: { 87 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 88 | str = `zipfile:${str}`; 89 | } break; 90 | 91 | default: { 92 | str = `zip:${str}`; 93 | } break; 94 | } 95 | } 96 | } 97 | 98 | return str; 99 | } 100 | 101 | function fromEditorPath(str) { 102 | switch (hostInfo) { 103 | case `coc-nvim`: 104 | case `neovim`: { 105 | str = str.replace(/\.zip::/, `.zip/`); 106 | // The path for coc-nvim is in format of //zipfile://.yarn/... 107 | // So in order to convert it back, we use .* to match all the thing 108 | // before `zipfile:` 109 | return process.platform === `win32` 110 | ? str.replace(/^.*zipfile:\//, ``) 111 | : str.replace(/^.*zipfile:/, ``); 112 | } break; 113 | 114 | case `vscode`: 115 | default: { 116 | return process.platform === `win32` 117 | ? str.replace(/^\^?(zip:|\/zip)\/+/, ``) 118 | : str.replace(/^\^?(zip:|\/zip)\/+/, `/`); 119 | } break; 120 | } 121 | } 122 | 123 | // Force enable 'allowLocalPluginLoads' 124 | // TypeScript tries to resolve plugins using a path relative to itself 125 | // which doesn't work when using the global cache 126 | // https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238 127 | // VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but 128 | // TypeScript already does local loads and if this code is running the user trusts the workspace 129 | // https://github.com/microsoft/vscode/issues/45856 130 | const ConfiguredProject = tsserver.server.ConfiguredProject; 131 | const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype; 132 | ConfiguredProject.prototype.enablePluginsWithOptions = function() { 133 | this.projectService.allowLocalPluginLoads = true; 134 | return originalEnablePluginsWithOptions.apply(this, arguments); 135 | }; 136 | 137 | // And here is the point where we hijack the VSCode <-> TS communications 138 | // by adding ourselves in the middle. We locate everything that looks 139 | // like an absolute path of ours and normalize it. 140 | 141 | const Session = tsserver.server.Session; 142 | const {onMessage: originalOnMessage, send: originalSend} = Session.prototype; 143 | let hostInfo = `unknown`; 144 | 145 | Object.assign(Session.prototype, { 146 | onMessage(/** @type {string} */ message) { 147 | const parsedMessage = JSON.parse(message) 148 | 149 | if ( 150 | parsedMessage != null && 151 | typeof parsedMessage === `object` && 152 | parsedMessage.arguments && 153 | typeof parsedMessage.arguments.hostInfo === `string` 154 | ) { 155 | hostInfo = parsedMessage.arguments.hostInfo; 156 | if (hostInfo === `vscode` && process.env.VSCODE_IPC_HOOK && process.env.VSCODE_IPC_HOOK.match(/Code\/1\.([1-5][0-9]|60)\./)) { 157 | hostInfo += ` <1.61`; 158 | } 159 | } 160 | 161 | return originalOnMessage.call(this, JSON.stringify(parsedMessage, (key, value) => { 162 | return typeof value === `string` ? fromEditorPath(value) : value; 163 | })); 164 | }, 165 | 166 | send(/** @type {any} */ msg) { 167 | return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => { 168 | return typeof value === `string` ? toEditorPath(value) : value; 169 | }))); 170 | } 171 | }); 172 | 173 | return tsserver; 174 | }; 175 | 176 | if (existsSync(absPnpApiPath)) { 177 | if (!process.versions.pnp) { 178 | // Setup the environment to be able to require typescript/lib/tsserver.js 179 | require(absPnpApiPath).setup(); 180 | } 181 | } 182 | 183 | // Defer to the real typescript/lib/tsserver.js your application uses 184 | module.exports = moduleWrapper(absRequire(`typescript/lib/tsserver.js`)); 185 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/tsserverlibrary.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | const moduleWrapper = tsserver => { 13 | if (!process.versions.pnp) { 14 | return tsserver; 15 | } 16 | 17 | const {isAbsolute} = require(`path`); 18 | const pnpApi = require(`pnpapi`); 19 | 20 | const isVirtual = str => str.match(/\/(\$\$virtual|__virtual__)\//); 21 | const normalize = str => str.replace(/\\/g, `/`).replace(/^\/?/, `/`); 22 | 23 | const dependencyTreeRoots = new Set(pnpApi.getDependencyTreeRoots().map(locator => { 24 | return `${locator.name}@${locator.reference}`; 25 | })); 26 | 27 | // VSCode sends the zip paths to TS using the "zip://" prefix, that TS 28 | // doesn't understand. This layer makes sure to remove the protocol 29 | // before forwarding it to TS, and to add it back on all returned paths. 30 | 31 | function toEditorPath(str) { 32 | // We add the `zip:` prefix to both `.zip/` paths and virtual paths 33 | if (isAbsolute(str) && !str.match(/^\^?(zip:|\/zip\/)/) && (str.match(/\.zip\//) || isVirtual(str))) { 34 | // We also take the opportunity to turn virtual paths into physical ones; 35 | // this makes it much easier to work with workspaces that list peer 36 | // dependencies, since otherwise Ctrl+Click would bring us to the virtual 37 | // file instances instead of the real ones. 38 | // 39 | // We only do this to modules owned by the the dependency tree roots. 40 | // This avoids breaking the resolution when jumping inside a vendor 41 | // with peer dep (otherwise jumping into react-dom would show resolution 42 | // errors on react). 43 | // 44 | const resolved = isVirtual(str) ? pnpApi.resolveVirtual(str) : str; 45 | if (resolved) { 46 | const locator = pnpApi.findPackageLocator(resolved); 47 | if (locator && dependencyTreeRoots.has(`${locator.name}@${locator.reference}`)) { 48 | str = resolved; 49 | } 50 | } 51 | 52 | str = normalize(str); 53 | 54 | if (str.match(/\.zip\//)) { 55 | switch (hostInfo) { 56 | // Absolute VSCode `Uri.fsPath`s need to start with a slash. 57 | // VSCode only adds it automatically for supported schemes, 58 | // so we have to do it manually for the `zip` scheme. 59 | // The path needs to start with a caret otherwise VSCode doesn't handle the protocol 60 | // 61 | // Ref: https://github.com/microsoft/vscode/issues/105014#issuecomment-686760910 62 | // 63 | // Update Oct 8 2021: VSCode changed their format in 1.61. 64 | // Before | ^zip:/c:/foo/bar.zip/package.json 65 | // After | ^/zip//c:/foo/bar.zip/package.json 66 | // 67 | case `vscode <1.61`: { 68 | str = `^zip:${str}`; 69 | } break; 70 | 71 | case `vscode`: { 72 | str = `^/zip/${str}`; 73 | } break; 74 | 75 | // To make "go to definition" work, 76 | // We have to resolve the actual file system path from virtual path 77 | // and convert scheme to supported by [vim-rzip](https://github.com/lbrayner/vim-rzip) 78 | case `coc-nvim`: { 79 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 80 | str = resolve(`zipfile:${str}`); 81 | } break; 82 | 83 | // Support neovim native LSP and [typescript-language-server](https://github.com/theia-ide/typescript-language-server) 84 | // We have to resolve the actual file system path from virtual path, 85 | // everything else is up to neovim 86 | case `neovim`: { 87 | str = normalize(resolved).replace(/\.zip\//, `.zip::`); 88 | str = `zipfile:${str}`; 89 | } break; 90 | 91 | default: { 92 | str = `zip:${str}`; 93 | } break; 94 | } 95 | } 96 | } 97 | 98 | return str; 99 | } 100 | 101 | function fromEditorPath(str) { 102 | switch (hostInfo) { 103 | case `coc-nvim`: 104 | case `neovim`: { 105 | str = str.replace(/\.zip::/, `.zip/`); 106 | // The path for coc-nvim is in format of //zipfile://.yarn/... 107 | // So in order to convert it back, we use .* to match all the thing 108 | // before `zipfile:` 109 | return process.platform === `win32` 110 | ? str.replace(/^.*zipfile:\//, ``) 111 | : str.replace(/^.*zipfile:/, ``); 112 | } break; 113 | 114 | case `vscode`: 115 | default: { 116 | return process.platform === `win32` 117 | ? str.replace(/^\^?(zip:|\/zip)\/+/, ``) 118 | : str.replace(/^\^?(zip:|\/zip)\/+/, `/`); 119 | } break; 120 | } 121 | } 122 | 123 | // Force enable 'allowLocalPluginLoads' 124 | // TypeScript tries to resolve plugins using a path relative to itself 125 | // which doesn't work when using the global cache 126 | // https://github.com/microsoft/TypeScript/blob/1b57a0395e0bff191581c9606aab92832001de62/src/server/project.ts#L2238 127 | // VSCode doesn't want to enable 'allowLocalPluginLoads' due to security concerns but 128 | // TypeScript already does local loads and if this code is running the user trusts the workspace 129 | // https://github.com/microsoft/vscode/issues/45856 130 | const ConfiguredProject = tsserver.server.ConfiguredProject; 131 | const {enablePluginsWithOptions: originalEnablePluginsWithOptions} = ConfiguredProject.prototype; 132 | ConfiguredProject.prototype.enablePluginsWithOptions = function() { 133 | this.projectService.allowLocalPluginLoads = true; 134 | return originalEnablePluginsWithOptions.apply(this, arguments); 135 | }; 136 | 137 | // And here is the point where we hijack the VSCode <-> TS communications 138 | // by adding ourselves in the middle. We locate everything that looks 139 | // like an absolute path of ours and normalize it. 140 | 141 | const Session = tsserver.server.Session; 142 | const {onMessage: originalOnMessage, send: originalSend} = Session.prototype; 143 | let hostInfo = `unknown`; 144 | 145 | Object.assign(Session.prototype, { 146 | onMessage(/** @type {string} */ message) { 147 | const parsedMessage = JSON.parse(message) 148 | 149 | if ( 150 | parsedMessage != null && 151 | typeof parsedMessage === `object` && 152 | parsedMessage.arguments && 153 | typeof parsedMessage.arguments.hostInfo === `string` 154 | ) { 155 | hostInfo = parsedMessage.arguments.hostInfo; 156 | if (hostInfo === `vscode` && process.env.VSCODE_IPC_HOOK && process.env.VSCODE_IPC_HOOK.match(/Code\/1\.([1-5][0-9]|60)\./)) { 157 | hostInfo += ` <1.61`; 158 | } 159 | } 160 | 161 | return originalOnMessage.call(this, JSON.stringify(parsedMessage, (key, value) => { 162 | return typeof value === `string` ? fromEditorPath(value) : value; 163 | })); 164 | }, 165 | 166 | send(/** @type {any} */ msg) { 167 | return originalSend.call(this, JSON.parse(JSON.stringify(msg, (key, value) => { 168 | return typeof value === `string` ? toEditorPath(value) : value; 169 | }))); 170 | } 171 | }); 172 | 173 | return tsserver; 174 | }; 175 | 176 | if (existsSync(absPnpApiPath)) { 177 | if (!process.versions.pnp) { 178 | // Setup the environment to be able to require typescript/lib/tsserverlibrary.js 179 | require(absPnpApiPath).setup(); 180 | } 181 | } 182 | 183 | // Defer to the real typescript/lib/tsserverlibrary.js your application uses 184 | module.exports = moduleWrapper(absRequire(`typescript/lib/tsserverlibrary.js`)); 185 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/lib/typescript.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const {existsSync} = require(`fs`); 4 | const {createRequire, createRequireFromPath} = require(`module`); 5 | const {resolve} = require(`path`); 6 | 7 | const relPnpApiPath = "../../../../.pnp.cjs"; 8 | 9 | const absPnpApiPath = resolve(__dirname, relPnpApiPath); 10 | const absRequire = (createRequire || createRequireFromPath)(absPnpApiPath); 11 | 12 | if (existsSync(absPnpApiPath)) { 13 | if (!process.versions.pnp) { 14 | // Setup the environment to be able to require typescript/lib/typescript.js 15 | require(absPnpApiPath).setup(); 16 | } 17 | } 18 | 19 | // Defer to the real typescript/lib/typescript.js your application uses 20 | module.exports = absRequire(`typescript/lib/typescript.js`); 21 | -------------------------------------------------------------------------------- /.yarn/sdks/typescript/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "typescript", 3 | "version": "4.4.4-sdk", 4 | "main": "./lib/typescript.js", 5 | "type": "commonjs" 6 | } 7 | -------------------------------------------------------------------------------- /.yarnrc.yml: -------------------------------------------------------------------------------- 1 | plugins: 2 | - path: .yarn/plugins/@yarnpkg/plugin-interactive-tools.cjs 3 | spec: "@yarnpkg/plugin-interactive-tools" 4 | - path: .yarn/plugins/@yarnpkg/plugin-typescript.cjs 5 | spec: "@yarnpkg/plugin-typescript" 6 | 7 | yarnPath: .yarn/releases/yarn-3.0.2.cjs 8 | -------------------------------------------------------------------------------- /Anchor.toml: -------------------------------------------------------------------------------- 1 | anchor_version = "0.17.0" 2 | 3 | [registry] 4 | url = "https://anchor.projectserum.com" 5 | 6 | [provider] 7 | cluster = "localnet" 8 | wallet = "tests/fixture-key.json" 9 | 10 | [scripts] 11 | test = "yarn mocha" 12 | 13 | [[test.genesis]] 14 | address = "CRATwLpu6YZEeiVq9ajjxs61wPQ9f29s1UoQR9siJCRs" 15 | program = "./artifacts/programs/crate_token.so" 16 | 17 | [[test.genesis]] 18 | address = "1NKyU3qShZC3oJgvCCftAHDi5TFxcJwfyUz2FeZsiwE" 19 | program = "./artifacts/programs/crate_redeem_in_kind.so" 20 | 21 | [programs.mainnet] 22 | asol = "AURUqAcTZP8mhR6sWVxWyfBbpJRj4A3qqeFzLNhrwayE" 23 | 24 | [programs.devnet] 25 | asol = "AURUqAcTZP8mhR6sWVxWyfBbpJRj4A3qqeFzLNhrwayE" 26 | 27 | [programs.testnet] 28 | asol = "AURUqAcTZP8mhR6sWVxWyfBbpJRj4A3qqeFzLNhrwayE" 29 | 30 | [programs.localnet] 31 | asol = "AURUqAcTZP8mhR6sWVxWyfBbpJRj4A3qqeFzLNhrwayE" 32 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["lib/*", "programs/*"] 3 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | GNU AFFERO GENERAL PUBLIC LICENSE 2 | Version 3, 19 November 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU Affero General Public License is a free, copyleft license for 11 | software and other kinds of works, specifically designed to ensure 12 | cooperation with the community in the case of network server software. 13 | 14 | The licenses for most software and other practical works are designed 15 | to take away your freedom to share and change the works. By contrast, 16 | our General Public Licenses are intended to guarantee your freedom to 17 | share and change all versions of a program--to make sure it remains free 18 | software for all its users. 19 | 20 | When we speak of free software, we are referring to freedom, not 21 | price. Our General Public Licenses are designed to make sure that you 22 | have the freedom to distribute copies of free software (and charge for 23 | them if you wish), that you receive source code or can get it if you 24 | want it, that you can change the software or use pieces of it in new 25 | free programs, and that you know you can do these things. 26 | 27 | Developers that use our General Public Licenses protect your rights 28 | with two steps: (1) assert copyright on the software, and (2) offer 29 | you this License which gives you legal permission to copy, distribute 30 | and/or modify the software. 31 | 32 | A secondary benefit of defending all users' freedom is that 33 | improvements made in alternate versions of the program, if they 34 | receive widespread use, become available for other developers to 35 | incorporate. Many developers of free software are heartened and 36 | encouraged by the resulting cooperation. However, in the case of 37 | software used on network servers, this result may fail to come about. 38 | The GNU General Public License permits making a modified version and 39 | letting the public access it on a server without ever releasing its 40 | source code to the public. 41 | 42 | The GNU Affero General Public License is designed specifically to 43 | ensure that, in such cases, the modified source code becomes available 44 | to the community. It requires the operator of a network server to 45 | provide the source code of the modified version running there to the 46 | users of that server. Therefore, public use of a modified version, on 47 | a publicly accessible server, gives the public access to the source 48 | code of the modified version. 49 | 50 | An older license, called the Affero General Public License and 51 | published by Affero, was designed to accomplish similar goals. This is 52 | a different license, not a version of the Affero GPL, but Affero has 53 | released a new version of the Affero GPL which permits relicensing under 54 | this license. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | TERMS AND CONDITIONS 60 | 61 | 0. Definitions. 62 | 63 | "This License" refers to version 3 of the GNU Affero General Public License. 64 | 65 | "Copyright" also means copyright-like laws that apply to other kinds of 66 | works, such as semiconductor masks. 67 | 68 | "The Program" refers to any copyrightable work licensed under this 69 | License. Each licensee is addressed as "you". "Licensees" and 70 | "recipients" may be individuals or organizations. 71 | 72 | To "modify" a work means to copy from or adapt all or part of the work 73 | in a fashion requiring copyright permission, other than the making of an 74 | exact copy. The resulting work is called a "modified version" of the 75 | earlier work or a work "based on" the earlier work. 76 | 77 | A "covered work" means either the unmodified Program or a work based 78 | on the Program. 79 | 80 | To "propagate" a work means to do anything with it that, without 81 | permission, would make you directly or secondarily liable for 82 | infringement under applicable copyright law, except executing it on a 83 | computer or modifying a private copy. Propagation includes copying, 84 | distribution (with or without modification), making available to the 85 | public, and in some countries other activities as well. 86 | 87 | To "convey" a work means any kind of propagation that enables other 88 | parties to make or receive copies. Mere interaction with a user through 89 | a computer network, with no transfer of a copy, is not conveying. 90 | 91 | An interactive user interface displays "Appropriate Legal Notices" 92 | to the extent that it includes a convenient and prominently visible 93 | feature that (1) displays an appropriate copyright notice, and (2) 94 | tells the user that there is no warranty for the work (except to the 95 | extent that warranties are provided), that licensees may convey the 96 | work under this License, and how to view a copy of this License. If 97 | the interface presents a list of user commands or options, such as a 98 | menu, a prominent item in the list meets this criterion. 99 | 100 | 1. Source Code. 101 | 102 | The "source code" for a work means the preferred form of the work 103 | for making modifications to it. "Object code" means any non-source 104 | form of a work. 105 | 106 | A "Standard Interface" means an interface that either is an official 107 | standard defined by a recognized standards body, or, in the case of 108 | interfaces specified for a particular programming language, one that 109 | is widely used among developers working in that language. 110 | 111 | The "System Libraries" of an executable work include anything, other 112 | than the work as a whole, that (a) is included in the normal form of 113 | packaging a Major Component, but which is not part of that Major 114 | Component, and (b) serves only to enable use of the work with that 115 | Major Component, or to implement a Standard Interface for which an 116 | implementation is available to the public in source code form. A 117 | "Major Component", in this context, means a major essential component 118 | (kernel, window system, and so on) of the specific operating system 119 | (if any) on which the executable work runs, or a compiler used to 120 | produce the work, or an object code interpreter used to run it. 121 | 122 | The "Corresponding Source" for a work in object code form means all 123 | the source code needed to generate, install, and (for an executable 124 | work) run the object code and to modify the work, including scripts to 125 | control those activities. However, it does not include the work's 126 | System Libraries, or general-purpose tools or generally available free 127 | programs which are used unmodified in performing those activities but 128 | which are not part of the work. For example, Corresponding Source 129 | includes interface definition files associated with source files for 130 | the work, and the source code for shared libraries and dynamically 131 | linked subprograms that the work is specifically designed to require, 132 | such as by intimate data communication or control flow between those 133 | subprograms and other parts of the work. 134 | 135 | The Corresponding Source need not include anything that users 136 | can regenerate automatically from other parts of the Corresponding 137 | Source. 138 | 139 | The Corresponding Source for a work in source code form is that 140 | same work. 141 | 142 | 2. Basic Permissions. 143 | 144 | All rights granted under this License are granted for the term of 145 | copyright on the Program, and are irrevocable provided the stated 146 | conditions are met. This License explicitly affirms your unlimited 147 | permission to run the unmodified Program. The output from running a 148 | covered work is covered by this License only if the output, given its 149 | content, constitutes a covered work. This License acknowledges your 150 | rights of fair use or other equivalent, as provided by copyright law. 151 | 152 | You may make, run and propagate covered works that you do not 153 | convey, without conditions so long as your license otherwise remains 154 | in force. You may convey covered works to others for the sole purpose 155 | of having them make modifications exclusively for you, or provide you 156 | with facilities for running those works, provided that you comply with 157 | the terms of this License in conveying all material for which you do 158 | not control copyright. Those thus making or running the covered works 159 | for you must do so exclusively on your behalf, under your direction 160 | and control, on terms that prohibit them from making any copies of 161 | your copyrighted material outside their relationship with you. 162 | 163 | Conveying under any other circumstances is permitted solely under 164 | the conditions stated below. Sublicensing is not allowed; section 10 165 | makes it unnecessary. 166 | 167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 168 | 169 | No covered work shall be deemed part of an effective technological 170 | measure under any applicable law fulfilling obligations under article 171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 172 | similar laws prohibiting or restricting circumvention of such 173 | measures. 174 | 175 | When you convey a covered work, you waive any legal power to forbid 176 | circumvention of technological measures to the extent such circumvention 177 | is effected by exercising rights under this License with respect to 178 | the covered work, and you disclaim any intention to limit operation or 179 | modification of the work as a means of enforcing, against the work's 180 | users, your or third parties' legal rights to forbid circumvention of 181 | technological measures. 182 | 183 | 4. Conveying Verbatim Copies. 184 | 185 | You may convey verbatim copies of the Program's source code as you 186 | receive it, in any medium, provided that you conspicuously and 187 | appropriately publish on each copy an appropriate copyright notice; 188 | keep intact all notices stating that this License and any 189 | non-permissive terms added in accord with section 7 apply to the code; 190 | keep intact all notices of the absence of any warranty; and give all 191 | recipients a copy of this License along with the Program. 192 | 193 | You may charge any price or no price for each copy that you convey, 194 | and you may offer support or warranty protection for a fee. 195 | 196 | 5. Conveying Modified Source Versions. 197 | 198 | You may convey a work based on the Program, or the modifications to 199 | produce it from the Program, in the form of source code under the 200 | terms of section 4, provided that you also meet all of these conditions: 201 | 202 | a) The work must carry prominent notices stating that you modified 203 | it, and giving a relevant date. 204 | 205 | b) The work must carry prominent notices stating that it is 206 | released under this License and any conditions added under section 207 | 7. This requirement modifies the requirement in section 4 to 208 | "keep intact all notices". 209 | 210 | c) You must license the entire work, as a whole, under this 211 | License to anyone who comes into possession of a copy. This 212 | License will therefore apply, along with any applicable section 7 213 | additional terms, to the whole of the work, and all its parts, 214 | regardless of how they are packaged. This License gives no 215 | permission to license the work in any other way, but it does not 216 | invalidate such permission if you have separately received it. 217 | 218 | d) If the work has interactive user interfaces, each must display 219 | Appropriate Legal Notices; however, if the Program has interactive 220 | interfaces that do not display Appropriate Legal Notices, your 221 | work need not make them do so. 222 | 223 | A compilation of a covered work with other separate and independent 224 | works, which are not by their nature extensions of the covered work, 225 | and which are not combined with it such as to form a larger program, 226 | in or on a volume of a storage or distribution medium, is called an 227 | "aggregate" if the compilation and its resulting copyright are not 228 | used to limit the access or legal rights of the compilation's users 229 | beyond what the individual works permit. Inclusion of a covered work 230 | in an aggregate does not cause this License to apply to the other 231 | parts of the aggregate. 232 | 233 | 6. Conveying Non-Source Forms. 234 | 235 | You may convey a covered work in object code form under the terms 236 | of sections 4 and 5, provided that you also convey the 237 | machine-readable Corresponding Source under the terms of this License, 238 | in one of these ways: 239 | 240 | a) Convey the object code in, or embodied in, a physical product 241 | (including a physical distribution medium), accompanied by the 242 | Corresponding Source fixed on a durable physical medium 243 | customarily used for software interchange. 244 | 245 | b) Convey the object code in, or embodied in, a physical product 246 | (including a physical distribution medium), accompanied by a 247 | written offer, valid for at least three years and valid for as 248 | long as you offer spare parts or customer support for that product 249 | model, to give anyone who possesses the object code either (1) a 250 | copy of the Corresponding Source for all the software in the 251 | product that is covered by this License, on a durable physical 252 | medium customarily used for software interchange, for a price no 253 | more than your reasonable cost of physically performing this 254 | conveying of source, or (2) access to copy the 255 | Corresponding Source from a network server at no charge. 256 | 257 | c) Convey individual copies of the object code with a copy of the 258 | written offer to provide the Corresponding Source. This 259 | alternative is allowed only occasionally and noncommercially, and 260 | only if you received the object code with such an offer, in accord 261 | with subsection 6b. 262 | 263 | d) Convey the object code by offering access from a designated 264 | place (gratis or for a charge), and offer equivalent access to the 265 | Corresponding Source in the same way through the same place at no 266 | further charge. You need not require recipients to copy the 267 | Corresponding Source along with the object code. If the place to 268 | copy the object code is a network server, the Corresponding Source 269 | may be on a different server (operated by you or a third party) 270 | that supports equivalent copying facilities, provided you maintain 271 | clear directions next to the object code saying where to find the 272 | Corresponding Source. Regardless of what server hosts the 273 | Corresponding Source, you remain obligated to ensure that it is 274 | available for as long as needed to satisfy these requirements. 275 | 276 | e) Convey the object code using peer-to-peer transmission, provided 277 | you inform other peers where the object code and Corresponding 278 | Source of the work are being offered to the general public at no 279 | charge under subsection 6d. 280 | 281 | A separable portion of the object code, whose source code is excluded 282 | from the Corresponding Source as a System Library, need not be 283 | included in conveying the object code work. 284 | 285 | A "User Product" is either (1) a "consumer product", which means any 286 | tangible personal property which is normally used for personal, family, 287 | or household purposes, or (2) anything designed or sold for incorporation 288 | into a dwelling. In determining whether a product is a consumer product, 289 | doubtful cases shall be resolved in favor of coverage. For a particular 290 | product received by a particular user, "normally used" refers to a 291 | typical or common use of that class of product, regardless of the status 292 | of the particular user or of the way in which the particular user 293 | actually uses, or expects or is expected to use, the product. A product 294 | is a consumer product regardless of whether the product has substantial 295 | commercial, industrial or non-consumer uses, unless such uses represent 296 | the only significant mode of use of the product. 297 | 298 | "Installation Information" for a User Product means any methods, 299 | procedures, authorization keys, or other information required to install 300 | and execute modified versions of a covered work in that User Product from 301 | a modified version of its Corresponding Source. The information must 302 | suffice to ensure that the continued functioning of the modified object 303 | code is in no case prevented or interfered with solely because 304 | modification has been made. 305 | 306 | If you convey an object code work under this section in, or with, or 307 | specifically for use in, a User Product, and the conveying occurs as 308 | part of a transaction in which the right of possession and use of the 309 | User Product is transferred to the recipient in perpetuity or for a 310 | fixed term (regardless of how the transaction is characterized), the 311 | Corresponding Source conveyed under this section must be accompanied 312 | by the Installation Information. But this requirement does not apply 313 | if neither you nor any third party retains the ability to install 314 | modified object code on the User Product (for example, the work has 315 | been installed in ROM). 316 | 317 | The requirement to provide Installation Information does not include a 318 | requirement to continue to provide support service, warranty, or updates 319 | for a work that has been modified or installed by the recipient, or for 320 | the User Product in which it has been modified or installed. Access to a 321 | network may be denied when the modification itself materially and 322 | adversely affects the operation of the network or violates the rules and 323 | protocols for communication across the network. 324 | 325 | Corresponding Source conveyed, and Installation Information provided, 326 | in accord with this section must be in a format that is publicly 327 | documented (and with an implementation available to the public in 328 | source code form), and must require no special password or key for 329 | unpacking, reading or copying. 330 | 331 | 7. Additional Terms. 332 | 333 | "Additional permissions" are terms that supplement the terms of this 334 | License by making exceptions from one or more of its conditions. 335 | Additional permissions that are applicable to the entire Program shall 336 | be treated as though they were included in this License, to the extent 337 | that they are valid under applicable law. If additional permissions 338 | apply only to part of the Program, that part may be used separately 339 | under those permissions, but the entire Program remains governed by 340 | this License without regard to the additional permissions. 341 | 342 | When you convey a copy of a covered work, you may at your option 343 | remove any additional permissions from that copy, or from any part of 344 | it. (Additional permissions may be written to require their own 345 | removal in certain cases when you modify the work.) You may place 346 | additional permissions on material, added by you to a covered work, 347 | for which you have or can give appropriate copyright permission. 348 | 349 | Notwithstanding any other provision of this License, for material you 350 | add to a covered work, you may (if authorized by the copyright holders of 351 | that material) supplement the terms of this License with terms: 352 | 353 | a) Disclaiming warranty or limiting liability differently from the 354 | terms of sections 15 and 16 of this License; or 355 | 356 | b) Requiring preservation of specified reasonable legal notices or 357 | author attributions in that material or in the Appropriate Legal 358 | Notices displayed by works containing it; or 359 | 360 | c) Prohibiting misrepresentation of the origin of that material, or 361 | requiring that modified versions of such material be marked in 362 | reasonable ways as different from the original version; or 363 | 364 | d) Limiting the use for publicity purposes of names of licensors or 365 | authors of the material; or 366 | 367 | e) Declining to grant rights under trademark law for use of some 368 | trade names, trademarks, or service marks; or 369 | 370 | f) Requiring indemnification of licensors and authors of that 371 | material by anyone who conveys the material (or modified versions of 372 | it) with contractual assumptions of liability to the recipient, for 373 | any liability that these contractual assumptions directly impose on 374 | those licensors and authors. 375 | 376 | All other non-permissive additional terms are considered "further 377 | restrictions" within the meaning of section 10. If the Program as you 378 | received it, or any part of it, contains a notice stating that it is 379 | governed by this License along with a term that is a further 380 | restriction, you may remove that term. If a license document contains 381 | a further restriction but permits relicensing or conveying under this 382 | License, you may add to a covered work material governed by the terms 383 | of that license document, provided that the further restriction does 384 | not survive such relicensing or conveying. 385 | 386 | If you add terms to a covered work in accord with this section, you 387 | must place, in the relevant source files, a statement of the 388 | additional terms that apply to those files, or a notice indicating 389 | where to find the applicable terms. 390 | 391 | Additional terms, permissive or non-permissive, may be stated in the 392 | form of a separately written license, or stated as exceptions; 393 | the above requirements apply either way. 394 | 395 | 8. Termination. 396 | 397 | You may not propagate or modify a covered work except as expressly 398 | provided under this License. Any attempt otherwise to propagate or 399 | modify it is void, and will automatically terminate your rights under 400 | this License (including any patent licenses granted under the third 401 | paragraph of section 11). 402 | 403 | However, if you cease all violation of this License, then your 404 | license from a particular copyright holder is reinstated (a) 405 | provisionally, unless and until the copyright holder explicitly and 406 | finally terminates your license, and (b) permanently, if the copyright 407 | holder fails to notify you of the violation by some reasonable means 408 | prior to 60 days after the cessation. 409 | 410 | Moreover, your license from a particular copyright holder is 411 | reinstated permanently if the copyright holder notifies you of the 412 | violation by some reasonable means, this is the first time you have 413 | received notice of violation of this License (for any work) from that 414 | copyright holder, and you cure the violation prior to 30 days after 415 | your receipt of the notice. 416 | 417 | Termination of your rights under this section does not terminate the 418 | licenses of parties who have received copies or rights from you under 419 | this License. If your rights have been terminated and not permanently 420 | reinstated, you do not qualify to receive new licenses for the same 421 | material under section 10. 422 | 423 | 9. Acceptance Not Required for Having Copies. 424 | 425 | You are not required to accept this License in order to receive or 426 | run a copy of the Program. Ancillary propagation of a covered work 427 | occurring solely as a consequence of using peer-to-peer transmission 428 | to receive a copy likewise does not require acceptance. However, 429 | nothing other than this License grants you permission to propagate or 430 | modify any covered work. These actions infringe copyright if you do 431 | not accept this License. Therefore, by modifying or propagating a 432 | covered work, you indicate your acceptance of this License to do so. 433 | 434 | 10. Automatic Licensing of Downstream Recipients. 435 | 436 | Each time you convey a covered work, the recipient automatically 437 | receives a license from the original licensors, to run, modify and 438 | propagate that work, subject to this License. You are not responsible 439 | for enforcing compliance by third parties with this License. 440 | 441 | An "entity transaction" is a transaction transferring control of an 442 | organization, or substantially all assets of one, or subdividing an 443 | organization, or merging organizations. If propagation of a covered 444 | work results from an entity transaction, each party to that 445 | transaction who receives a copy of the work also receives whatever 446 | licenses to the work the party's predecessor in interest had or could 447 | give under the previous paragraph, plus a right to possession of the 448 | Corresponding Source of the work from the predecessor in interest, if 449 | the predecessor has it or can get it with reasonable efforts. 450 | 451 | You may not impose any further restrictions on the exercise of the 452 | rights granted or affirmed under this License. For example, you may 453 | not impose a license fee, royalty, or other charge for exercise of 454 | rights granted under this License, and you may not initiate litigation 455 | (including a cross-claim or counterclaim in a lawsuit) alleging that 456 | any patent claim is infringed by making, using, selling, offering for 457 | sale, or importing the Program or any portion of it. 458 | 459 | 11. Patents. 460 | 461 | A "contributor" is a copyright holder who authorizes use under this 462 | License of the Program or a work on which the Program is based. The 463 | work thus licensed is called the contributor's "contributor version". 464 | 465 | A contributor's "essential patent claims" are all patent claims 466 | owned or controlled by the contributor, whether already acquired or 467 | hereafter acquired, that would be infringed by some manner, permitted 468 | by this License, of making, using, or selling its contributor version, 469 | but do not include claims that would be infringed only as a 470 | consequence of further modification of the contributor version. For 471 | purposes of this definition, "control" includes the right to grant 472 | patent sublicenses in a manner consistent with the requirements of 473 | this License. 474 | 475 | Each contributor grants you a non-exclusive, worldwide, royalty-free 476 | patent license under the contributor's essential patent claims, to 477 | make, use, sell, offer for sale, import and otherwise run, modify and 478 | propagate the contents of its contributor version. 479 | 480 | In the following three paragraphs, a "patent license" is any express 481 | agreement or commitment, however denominated, not to enforce a patent 482 | (such as an express permission to practice a patent or covenant not to 483 | sue for patent infringement). To "grant" such a patent license to a 484 | party means to make such an agreement or commitment not to enforce a 485 | patent against the party. 486 | 487 | If you convey a covered work, knowingly relying on a patent license, 488 | and the Corresponding Source of the work is not available for anyone 489 | to copy, free of charge and under the terms of this License, through a 490 | publicly available network server or other readily accessible means, 491 | then you must either (1) cause the Corresponding Source to be so 492 | available, or (2) arrange to deprive yourself of the benefit of the 493 | patent license for this particular work, or (3) arrange, in a manner 494 | consistent with the requirements of this License, to extend the patent 495 | license to downstream recipients. "Knowingly relying" means you have 496 | actual knowledge that, but for the patent license, your conveying the 497 | covered work in a country, or your recipient's use of the covered work 498 | in a country, would infringe one or more identifiable patents in that 499 | country that you have reason to believe are valid. 500 | 501 | If, pursuant to or in connection with a single transaction or 502 | arrangement, you convey, or propagate by procuring conveyance of, a 503 | covered work, and grant a patent license to some of the parties 504 | receiving the covered work authorizing them to use, propagate, modify 505 | or convey a specific copy of the covered work, then the patent license 506 | you grant is automatically extended to all recipients of the covered 507 | work and works based on it. 508 | 509 | A patent license is "discriminatory" if it does not include within 510 | the scope of its coverage, prohibits the exercise of, or is 511 | conditioned on the non-exercise of one or more of the rights that are 512 | specifically granted under this License. You may not convey a covered 513 | work if you are a party to an arrangement with a third party that is 514 | in the business of distributing software, under which you make payment 515 | to the third party based on the extent of your activity of conveying 516 | the work, and under which the third party grants, to any of the 517 | parties who would receive the covered work from you, a discriminatory 518 | patent license (a) in connection with copies of the covered work 519 | conveyed by you (or copies made from those copies), or (b) primarily 520 | for and in connection with specific products or compilations that 521 | contain the covered work, unless you entered into that arrangement, 522 | or that patent license was granted, prior to 28 March 2007. 523 | 524 | Nothing in this License shall be construed as excluding or limiting 525 | any implied license or other defenses to infringement that may 526 | otherwise be available to you under applicable patent law. 527 | 528 | 12. No Surrender of Others' Freedom. 529 | 530 | If conditions are imposed on you (whether by court order, agreement or 531 | otherwise) that contradict the conditions of this License, they do not 532 | excuse you from the conditions of this License. If you cannot convey a 533 | covered work so as to satisfy simultaneously your obligations under this 534 | License and any other pertinent obligations, then as a consequence you may 535 | not convey it at all. For example, if you agree to terms that obligate you 536 | to collect a royalty for further conveying from those to whom you convey 537 | the Program, the only way you could satisfy both those terms and this 538 | License would be to refrain entirely from conveying the Program. 539 | 540 | 13. Remote Network Interaction; Use with the GNU General Public License. 541 | 542 | Notwithstanding any other provision of this License, if you modify the 543 | Program, your modified version must prominently offer all users 544 | interacting with it remotely through a computer network (if your version 545 | supports such interaction) an opportunity to receive the Corresponding 546 | Source of your version by providing access to the Corresponding Source 547 | from a network server at no charge, through some standard or customary 548 | means of facilitating copying of software. This Corresponding Source 549 | shall include the Corresponding Source for any work covered by version 3 550 | of the GNU General Public License that is incorporated pursuant to the 551 | following paragraph. 552 | 553 | Notwithstanding any other provision of this License, you have 554 | permission to link or combine any covered work with a work licensed 555 | under version 3 of the GNU General Public License into a single 556 | combined work, and to convey the resulting work. The terms of this 557 | License will continue to apply to the part which is the covered work, 558 | but the work with which it is combined will remain governed by version 559 | 3 of the GNU General Public License. 560 | 561 | 14. Revised Versions of this License. 562 | 563 | The Free Software Foundation may publish revised and/or new versions of 564 | the GNU Affero General Public License from time to time. Such new versions 565 | will be similar in spirit to the present version, but may differ in detail to 566 | address new problems or concerns. 567 | 568 | Each version is given a distinguishing version number. If the 569 | Program specifies that a certain numbered version of the GNU Affero General 570 | Public License "or any later version" applies to it, you have the 571 | option of following the terms and conditions either of that numbered 572 | version or of any later version published by the Free Software 573 | Foundation. If the Program does not specify a version number of the 574 | GNU Affero General Public License, you may choose any version ever published 575 | by the Free Software Foundation. 576 | 577 | If the Program specifies that a proxy can decide which future 578 | versions of the GNU Affero General Public License can be used, that proxy's 579 | public statement of acceptance of a version permanently authorizes you 580 | to choose that version for the Program. 581 | 582 | Later license versions may give you additional or different 583 | permissions. However, no additional obligations are imposed on any 584 | author or copyright holder as a result of your choosing to follow a 585 | later version. 586 | 587 | 15. Disclaimer of Warranty. 588 | 589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 597 | 598 | 16. Limitation of Liability. 599 | 600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 608 | SUCH DAMAGES. 609 | 610 | 17. Interpretation of Sections 15 and 16. 611 | 612 | If the disclaimer of warranty and limitation of liability provided 613 | above cannot be given local legal effect according to their terms, 614 | reviewing courts shall apply local law that most closely approximates 615 | an absolute waiver of all civil liability in connection with the 616 | Program, unless a warranty or assumption of liability accompanies a 617 | copy of the Program in return for a fee. 618 | 619 | END OF TERMS AND CONDITIONS 620 | 621 | How to Apply These Terms to Your New Programs 622 | 623 | If you develop a new program, and you want it to be of the greatest 624 | possible use to the public, the best way to achieve this is to make it 625 | free software which everyone can redistribute and change under these terms. 626 | 627 | To do so, attach the following notices to the program. It is safest 628 | to attach them to the start of each source file to most effectively 629 | state the exclusion of warranty; and each file should have at least 630 | the "copyright" line and a pointer to where the full notice is found. 631 | 632 | 633 | Copyright (C) 634 | 635 | This program is free software: you can redistribute it and/or modify 636 | it under the terms of the GNU Affero General Public License as published by 637 | the Free Software Foundation, either version 3 of the License, or 638 | (at your option) any later version. 639 | 640 | This program is distributed in the hope that it will be useful, 641 | but WITHOUT ANY WARRANTY; without even the implied warranty of 642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 643 | GNU Affero General Public License for more details. 644 | 645 | You should have received a copy of the GNU Affero General Public License 646 | along with this program. If not, see . 647 | 648 | Also add information on how to contact you by electronic and paper mail. 649 | 650 | If your software can interact with users remotely through a computer 651 | network, you should also make sure that it provides a way for users to 652 | get its source. For example, if your program is a web application, its 653 | interface could display a "Source" link that leads users to an archive 654 | of the code. There are many ways you could offer source, and different 655 | solutions will be better for different programs; see section 13 for the 656 | specific requirements. 657 | 658 | You should also get your employer (if you work as a programmer) or school, 659 | if any, to sign a "copyright disclaimer" for the program, if necessary. 660 | For more information on this, and how to apply and follow the GNU AGPL, see 661 | . -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 🐉 aSOL: Aggregate Stake Pool 2 | 3 | [![License](https://img.shields.io/badge/license-AGPL%203.0-blue)](https://github.com/aSolHQ/asol/blob/master/LICENSE) 4 | [![Build Status](https://img.shields.io/github/workflow/status/aSolHQ/asol/E2E/master)](https://github.com/aSolHQ/asol/actions/workflows/programs-e2e.yml?query=branch%3Amaster) 5 | [![Contributors](https://img.shields.io/github/contributors/aSolHQ/asol)](https://github.com/aSolHQ/asol/graphs/contributors) 6 | 7 | ![aSOL](/images/banner.jpeg) 8 | 9 | aSOL is an unbiased stake pool aggregator built to tackle one goal: to ensure all SOL on Solana is staked into stake pools. 10 | 11 | We're in active development. For the latest updates, please join our community: 12 | 13 | - Twitter: https://twitter.com/aSOLprotocol 14 | 15 | ## Packages 16 | 17 | | Package | Description | Version | Docs | 18 | | :------------ | :--------------------------------- | :------------------------------------------------------------------------------------------------ | :---------------------------------------------------------------------------------- | 19 | | `asol` | aSol aggregate staked SOL token | [![Crates.io](https://img.shields.io/crates/v/asol)](https://crates.io/crates/asol) | [![Docs.rs](https://docs.rs/asol/badge.svg)](https://docs.rs/asol) | 20 | | `lido-anchor` | Lido stake pool Anchor client | [![Crates.io](https://img.shields.io/crates/v/lido-anchor)](https://crates.io/crates/lido-anchor) | [![Docs.rs](https://docs.rs/lido-anchor/badge.svg)](https://docs.rs/lido-anchor) | 21 | | `marinade` | Marinade stake pool Anchor client. | [![Crates.io](https://img.shields.io/crates/v/marinade)](https://crates.io/crates/marinade) | [![Docs.rs](https://docs.rs/marinade/badge.svg)](https://docs.rs/marinade) | 22 | | `@asolhq/sdk` | TypeScript SDK for aSOL | [![npm](https://img.shields.io/npm/v/@asolhq/sdk.svg)](https://www.npmjs.com/package/@asolhq/sdk) | [![Docs](https://img.shields.io/badge/docs-typedoc-blue)](https://docs.asol.so/ts/) | 23 | 24 | ## Note 25 | 26 | - **aSOL is in active development, so all APIs are subject to change.** 27 | - **This code is unaudited. Use at your own risk.** 28 | 29 | ## Contribution 30 | 31 | Thank you for your interest in contributing to aSOL Protocol! All contributions are welcome no 32 | matter how big or small. This includes (but is not limited to) filing issues, 33 | adding documentation, fixing bugs, creating examples, and implementing features. 34 | 35 | If you'd like to contribute, please claim an issue by commenting, forking, and 36 | opening a pull request, even if empty. This allows the maintainers to track who 37 | is working on what issue as to not overlap work. 38 | 39 | For simple documentation changes, feel free to just open a pull request. 40 | 41 | If you're considering larger changes or self motivated features, please file an issue 42 | and engage with the maintainers by messaging them on [Keybase](https://keybase.io/0xaurelion). 43 | 44 | ## License 45 | 46 | aSOL Protocol is licensed under [the Affero GPL 3.0 license](/LICENSE.txt). 47 | 48 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in aSOL Protocol by you, as defined in the AGPL-3.0 license, shall be licensed as above, without any additional terms or conditions. 49 | -------------------------------------------------------------------------------- /ci.nix: -------------------------------------------------------------------------------- 1 | { pkgs }: 2 | pkgs.buildEnv { 3 | name = "ci"; 4 | paths = with pkgs; 5 | (pkgs.lib.optionals pkgs.stdenv.isLinux [ libudev ]) ++ [ 6 | anchor 7 | 8 | nodejs 9 | yarn 10 | python3 11 | 12 | pkgconfig 13 | openssl 14 | jq 15 | gnused 16 | 17 | libiconv 18 | ] ++ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ 19 | pkgs.darwin.apple_sdk.frameworks.AppKit 20 | pkgs.darwin.apple_sdk.frameworks.IOKit 21 | pkgs.darwin.apple_sdk.frameworks.Foundation 22 | ]); 23 | } 24 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-utils": { 4 | "locked": { 5 | "lastModified": 1634851050, 6 | "narHash": "sha256-N83GlSGPJJdcqhUxSCS/WwW5pksYf3VP1M13cDRTSVA=", 7 | "owner": "numtide", 8 | "repo": "flake-utils", 9 | "rev": "c91f3de5adaf1de973b797ef7485e441a65b8935", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "numtide", 14 | "repo": "flake-utils", 15 | "type": "github" 16 | } 17 | }, 18 | "flake-utils_2": { 19 | "locked": { 20 | "lastModified": 1631561581, 21 | "narHash": "sha256-3VQMV5zvxaVLvqqUrNz3iJelLw30mIVSfZmAaauM3dA=", 22 | "owner": "numtide", 23 | "repo": "flake-utils", 24 | "rev": "7e5bf3925f6fbdfaf50a2a7ca0be2879c4261d19", 25 | "type": "github" 26 | }, 27 | "original": { 28 | "owner": "numtide", 29 | "repo": "flake-utils", 30 | "type": "github" 31 | } 32 | }, 33 | "flake-utils_3": { 34 | "locked": { 35 | "lastModified": 1623875721, 36 | "narHash": "sha256-A8BU7bjS5GirpAUv4QA+QnJ4CceLHkcXdRp4xITDB0s=", 37 | "owner": "numtide", 38 | "repo": "flake-utils", 39 | "rev": "f7e004a55b120c02ecb6219596820fcd32ca8772", 40 | "type": "github" 41 | }, 42 | "original": { 43 | "owner": "numtide", 44 | "repo": "flake-utils", 45 | "type": "github" 46 | } 47 | }, 48 | "nixpkgs": { 49 | "locked": { 50 | "lastModified": 1633422745, 51 | "narHash": "sha256-gA6Ok64nPbkjHk3Oanq4641EeYkjcKhisDF9wBjLxEk=", 52 | "owner": "NixOS", 53 | "repo": "nixpkgs", 54 | "rev": "8e1eab9eae4278c9bb1dcae426848a581943db5a", 55 | "type": "github" 56 | }, 57 | "original": { 58 | "owner": "NixOS", 59 | "ref": "nixpkgs-unstable", 60 | "repo": "nixpkgs", 61 | "type": "github" 62 | } 63 | }, 64 | "root": { 65 | "inputs": { 66 | "flake-utils": "flake-utils", 67 | "nixpkgs": "nixpkgs", 68 | "saber-overlay": "saber-overlay" 69 | } 70 | }, 71 | "rust-overlay": { 72 | "inputs": { 73 | "flake-utils": "flake-utils_3", 74 | "nixpkgs": [ 75 | "nixpkgs" 76 | ] 77 | }, 78 | "locked": { 79 | "lastModified": 1633400100, 80 | "narHash": "sha256-kHQV7jZ2vVHVI9sfda1mUROVBbQbdfKcbIpKG9WdqGo=", 81 | "owner": "oxalica", 82 | "repo": "rust-overlay", 83 | "rev": "9c2fc6a62ccbc6f420d71ecac6bf0b84dbbee64f", 84 | "type": "github" 85 | }, 86 | "original": { 87 | "owner": "oxalica", 88 | "repo": "rust-overlay", 89 | "type": "github" 90 | } 91 | }, 92 | "saber-overlay": { 93 | "inputs": { 94 | "flake-utils": "flake-utils_2", 95 | "nixpkgs": [ 96 | "nixpkgs" 97 | ], 98 | "rust-overlay": "rust-overlay" 99 | }, 100 | "locked": { 101 | "lastModified": 1633959992, 102 | "narHash": "sha256-VF/KdIv+f0hnYJuk8ulHEpmykELPkhdcSpMBlL034s8=", 103 | "owner": "saber-hq", 104 | "repo": "saber-overlay", 105 | "rev": "b0f81e0dff5c14b0cca61436eff914d32a4e5dcb", 106 | "type": "github" 107 | }, 108 | "original": { 109 | "owner": "saber-hq", 110 | "repo": "saber-overlay", 111 | "type": "github" 112 | } 113 | } 114 | }, 115 | "root": "root", 116 | "version": 7 117 | } 118 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "aSOL development environment."; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; 6 | saber-overlay.url = "github:saber-hq/saber-overlay"; 7 | saber-overlay.inputs.nixpkgs.follows = "nixpkgs"; 8 | flake-utils.url = "github:numtide/flake-utils"; 9 | }; 10 | 11 | outputs = { self, nixpkgs, saber-overlay, flake-utils }: 12 | flake-utils.lib.eachSystem [ 13 | "aarch64-darwin" 14 | "x86_64-linux" 15 | "x86_64-darwin" 16 | ] (system: 17 | let 18 | pkgs = import nixpkgs { 19 | inherit system; 20 | overlays = [ saber-overlay.overlay ]; 21 | }; 22 | in { 23 | devShell = import ./shell.nix { inherit pkgs; }; 24 | packages.ci = import ./ci.nix { inherit pkgs; }; 25 | }); 26 | } 27 | -------------------------------------------------------------------------------- /images/banner.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aSolHQ/asol/efa922709be60176c3caca4fa12844badf71dfd5/images/banner.jpeg -------------------------------------------------------------------------------- /lib/lido/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "lido-anchor" 3 | version = "0.1.1" 4 | description = "Lido stake pool client" 5 | edition = "2018" 6 | homepage = "https://asol.so" 7 | repository = "https://github.com/aSolHQ/asol" 8 | authors = ["0xAurelion "] 9 | license = "AGPL-3.0" 10 | keywords = ["solana", "stake-pool"] 11 | 12 | [lib] 13 | crate-type = ["cdylib", "lib"] 14 | name = "lido_anchor" 15 | 16 | [features] 17 | no-entrypoint = [] 18 | no-idl = [] 19 | cpi = ["no-entrypoint"] 20 | default = [] 21 | 22 | [dependencies] 23 | anchor-lang = "0.17.0" 24 | borsh = "0.9.1" 25 | lido = { version = "=1.1.0-patch.1", features = ["no-entrypoint"] } 26 | -------------------------------------------------------------------------------- /lib/lido/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Wrapper around the [lido] program. 2 | use std::ops::Deref; 3 | 4 | use anchor_lang::prelude::*; 5 | 6 | declare_id!("CrX7kMhLC3cSsXJdT7JDgqrRVWGnUpX3gfEfxxU2NVLi"); 7 | 8 | mod solido_account { 9 | use anchor_lang::declare_id; 10 | 11 | declare_id!("49Yi1TKkNyYjPAFdR9LBvoHcUjuPX4Df5T5yv39w2XTn"); 12 | } 13 | 14 | /// Solido account 15 | pub static SOLIDO_ACCOUNT: Pubkey = solido_account::ID; 16 | 17 | #[derive(Clone, Debug, Default)] 18 | pub struct Lido(lido::state::Lido); 19 | 20 | impl Owner for Lido { 21 | fn owner() -> Pubkey { 22 | crate::ID 23 | } 24 | } 25 | 26 | impl Deref for Lido { 27 | type Target = lido::state::Lido; 28 | 29 | fn deref(&self) -> &Self::Target { 30 | &self.0 31 | } 32 | } 33 | 34 | impl AccountSerialize for Lido { 35 | fn try_serialize(&self, writer: &mut W) -> Result<(), ProgramError> { 36 | AnchorSerialize::serialize(&self.0, writer).map_err(|_| ProgramError::InvalidAccountData) 37 | } 38 | } 39 | 40 | impl AccountDeserialize for Lido { 41 | fn try_deserialize(buf: &mut &[u8]) -> Result { 42 | Self::try_deserialize_unchecked(buf) 43 | } 44 | 45 | fn try_deserialize_unchecked(buf: &mut &[u8]) -> Result { 46 | let result: lido::state::Lido = AnchorDeserialize::deserialize(buf)?; 47 | Ok(Lido(result)) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /lib/marinade/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "marinade" 3 | version = "0.1.1" 4 | description = "Marinade stake pool Anchor client." 5 | edition = "2018" 6 | homepage = "https://asol.so" 7 | repository = "https://github.com/aSolHQ/asol" 8 | authors = ["0xAurelion "] 9 | license = "AGPL-3.0" 10 | keywords = ["solana", "stake-pool"] 11 | 12 | [lib] 13 | crate-type = ["cdylib", "lib"] 14 | name = "marinade" 15 | 16 | [features] 17 | no-entrypoint = [] 18 | no-idl = [] 19 | cpi = ["no-entrypoint"] 20 | default = [] 21 | 22 | [dependencies] 23 | anchor-lang = "0.17.0" 24 | -------------------------------------------------------------------------------- /lib/marinade/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Derived from . 2 | use anchor_lang::prelude::*; 3 | 4 | declare_id!("MarBmsSgKXdrN1egZf5sqe1TMai9K1rChYNDJgjq7aD"); 5 | 6 | /// Marinade main state account. 7 | /// See: . 8 | pub mod main_state { 9 | use anchor_lang::declare_id; 10 | 11 | declare_id!("8szGkuLTAux9XMgZ2vtY39jVSowEcpBfFfD8hXSEqdGC"); 12 | } 13 | 14 | #[program] 15 | #[allow(deprecated)] 16 | pub mod marinade { 17 | use super::*; 18 | 19 | #[state] 20 | #[derive(Default)] 21 | pub struct State { 22 | pub msol_mint: Pubkey, 23 | pub admin_authority: Pubkey, 24 | pub operational_sol_account: Pubkey, 25 | pub treasury_msol_account: Pubkey, 26 | pub reserve_bump_seed: u8, 27 | pub msol_mint_authority_bump_seed: u8, 28 | pub rent_exempt_for_token_acc: u64, 29 | pub reward_fee: Fee, 30 | pub stake_system: StakeSystem, 31 | pub validator_system: ValidatorSystem, 32 | pub liq_pool: LiqPool, 33 | pub available_reserve_balance: u64, 34 | pub msol_supply: u64, 35 | pub msol_price: u64, 36 | pub circulating_ticket_count: u64, 37 | pub circulating_ticket_balance: u64, 38 | pub lent_from_reserve: u64, 39 | pub min_deposit: u64, 40 | pub min_withdraw: u64, 41 | pub staking_sol_cap: u64, 42 | pub emergency_cooling_down: u64, 43 | } 44 | } 45 | 46 | #[account] 47 | #[derive(Default)] 48 | pub struct State { 49 | pub msol_mint: Pubkey, 50 | pub admin_authority: Pubkey, 51 | pub operational_sol_account: Pubkey, 52 | pub treasury_msol_account: Pubkey, 53 | pub reserve_bump_seed: u8, 54 | pub msol_mint_authority_bump_seed: u8, 55 | pub rent_exempt_for_token_acc: u64, 56 | pub reward_fee: Fee, 57 | pub stake_system: StakeSystem, 58 | pub validator_system: ValidatorSystem, 59 | pub liq_pool: LiqPool, 60 | pub available_reserve_balance: u64, 61 | pub msol_supply: u64, 62 | pub msol_price: u64, 63 | pub circulating_ticket_count: u64, 64 | pub circulating_ticket_balance: u64, 65 | pub lent_from_reserve: u64, 66 | pub min_deposit: u64, 67 | pub min_withdraw: u64, 68 | pub staking_sol_cap: u64, 69 | pub emergency_cooling_down: u64, 70 | } 71 | 72 | #[derive(AnchorSerialize, AnchorDeserialize, Default, Debug, Clone, Copy)] 73 | pub struct Fee { 74 | pub basis_points: u32, 75 | } 76 | 77 | #[derive(AnchorSerialize, AnchorDeserialize, Default, Debug, Clone, Copy)] 78 | pub struct LiqPool { 79 | pub lp_mint: Pubkey, 80 | pub lp_mint_authority_bump_seed: u8, 81 | pub sol_leg_bump_seed: u8, 82 | pub msol_leg_authority_bump_seed: u8, 83 | pub msol_leg: Pubkey, 84 | pub lp_liquidity_target: u64, 85 | pub lp_max_fee: Fee, 86 | pub lp_min_fee: Fee, 87 | pub treasury_cut: Fee, 88 | pub lp_supply: u64, 89 | pub lent_from_sol_leg: u64, 90 | pub liquidity_sol_cap: u64, 91 | } 92 | 93 | #[derive(AnchorSerialize, AnchorDeserialize, Default, Debug, Clone, Copy)] 94 | pub struct StakeSystem { 95 | pub stake_list: List, 96 | pub delayed_unstake_cooling_down: u64, 97 | pub stake_deposit_bump_seed: u8, 98 | pub stake_withdraw_bump_seed: u8, 99 | pub slots_for_stake_delta: u64, 100 | pub last_stake_delta_epoch: u64, 101 | pub min_stake: u64, 102 | pub extra_stake_delta_runs: u32, 103 | } 104 | 105 | #[derive(AnchorSerialize, AnchorDeserialize, Default, Debug, Clone, Copy)] 106 | pub struct ValidatorSystem { 107 | pub validator_list: List, 108 | pub manager_authority: Pubkey, 109 | pub total_validator_score: u32, 110 | pub total_active_balance: u64, 111 | pub auto_add_validator_enabled: u8, 112 | } 113 | 114 | #[derive(AnchorSerialize, AnchorDeserialize, Default, Debug, Clone, Copy)] 115 | pub struct List { 116 | pub account: Pubkey, 117 | pub item_size: u32, 118 | pub count: u32, 119 | pub new_account: Pubkey, 120 | pub copied_count: u32, 121 | } 122 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@asolhq/sdk", 3 | "version": "0.1.2", 4 | "description": "Aggregate of Solana stake pools.", 5 | "main": "dist/cjs/index.js", 6 | "module": "dist/esm/index.js", 7 | "repository": "https://github.com/aSolHQ/asol", 8 | "author": "0xAurelion ", 9 | "bugs": { 10 | "url": "https://github.com/aSolHQ/asol/issues", 11 | "email": "team@asol.so" 12 | }, 13 | "publishConfig": { 14 | "access": "public" 15 | }, 16 | "homepage": "https://asol.so", 17 | "license": "AGPL-3.0", 18 | "devDependencies": { 19 | "@crateprotocol/crate-sdk": "^0.3.0", 20 | "@project-serum/anchor": "^0.18.0", 21 | "@rushstack/eslint-patch": "^1.0.9", 22 | "@saberhq/anchor-contrib": "^1.6.2", 23 | "@saberhq/chai-solana": "^1.6.2", 24 | "@saberhq/eslint-config": "^1.6.2", 25 | "@saberhq/solana-contrib": "^1.6.2", 26 | "@saberhq/token-utils": "^1.6.2", 27 | "@solana/spl-token-registry": "^0.2.287", 28 | "@solana/web3.js": "^1.30.2", 29 | "@types/bn.js": "^5.1.0", 30 | "@types/mocha": "^9.0.0", 31 | "@types/node": "^16.11.6", 32 | "@types/prettier": "^2.4.1", 33 | "bn.js": "^5.2.0", 34 | "chai": "^4.3.4", 35 | "eslint": "^7.32.0", 36 | "eslint-import-resolver-node": "^0.3.6", 37 | "eslint-plugin-import": "^2.25.2", 38 | "husky": "^7.0.4", 39 | "lerna": "^4.0.0", 40 | "lint-staged": "^11.2.6", 41 | "mocha": "^9.1.3", 42 | "prettier": "^2.4.1", 43 | "ts-node": "^10.4.0", 44 | "typedoc": "^0.22.7", 45 | "typescript": "^4.4.4" 46 | }, 47 | "scripts": { 48 | "build": "rm -fr dist/ && tsc -P tsconfig.build.json && tsc -P tsconfig.esm.json", 49 | "docs:generate": "typedoc --excludePrivate --includeVersion --out site/ts/ src/index.ts", 50 | "typecheck": "tsc", 51 | "idl:generate": "./scripts/parse-idls.sh && ./scripts/generate-idl-types.sh", 52 | "idl:generate:nolint": "./scripts/parse-idls.sh && RUN_ESLINT=none ./scripts/generate-idl-types.sh", 53 | "lint": "eslint . --cache", 54 | "test:e2e": "anchor test --skip-build tests/*.ts", 55 | "prepare": "husky install" 56 | }, 57 | "peerDependencies": { 58 | "@crateprotocol/crate-sdk": "^0.3.0", 59 | "@project-serum/anchor": "^0.17.1-beta.1", 60 | "@saberhq/anchor-contrib": "^1.3.18", 61 | "@saberhq/solana-contrib": "^1.3.18", 62 | "@saberhq/token-utils": "^1.3.18", 63 | "@solana/web3.js": "^1.29.2" 64 | }, 65 | "packageManager": "yarn@3.0.2", 66 | "dependencies": { 67 | "tiny-invariant": "^1.2.0", 68 | "tslib": "^2.3.1" 69 | }, 70 | "lint-staged": { 71 | "*.ts": "eslint --cache --fix", 72 | "*.{md,json,js,yml,yaml}": "prettier --write" 73 | }, 74 | "files": [ 75 | "dist/", 76 | "src/" 77 | ] 78 | } 79 | -------------------------------------------------------------------------------- /programs/asol/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "asol" 3 | version = "0.1.2" 4 | description = "aSOL: an aggregated Solana stake pool." 5 | edition = "2018" 6 | homepage = "https://asol.so" 7 | repository = "https://github.com/aSolHQ/asol" 8 | authors = ["0xAurelion "] 9 | license = "AGPL-3.0" 10 | keywords = ["solana", "stake-pool"] 11 | 12 | [lib] 13 | crate-type = ["cdylib", "lib"] 14 | name = "asol" 15 | 16 | [features] 17 | no-entrypoint = [] 18 | no-idl = [] 19 | cpi = ["no-entrypoint"] 20 | default = [] 21 | 22 | [dependencies] 23 | anchor-lang = "0.17.0" 24 | anchor-spl = "0.17.0" 25 | base64 = "0.13.0" 26 | crate-token = { version = "0.3.0", features = ["cpi"] } 27 | crate-redeem-in-kind = { version = "0.3.0", features = ["cpi"] } 28 | lido-anchor = { path = "../../lib/lido", version = "0.1.0" } 29 | marinade = { path = "../../lib/marinade", version = "0.1.0", features = [ 30 | "cpi" 31 | ] } 32 | num-traits = "0.2" 33 | vipers = "1.4.0" 34 | -------------------------------------------------------------------------------- /programs/asol/README.md: -------------------------------------------------------------------------------- 1 | # `asol` 2 | 3 | [![Crates.io](https://img.shields.io/crates/v/asol)](https://crates.io/crates/asol) 4 | [![Docs.rs](https://docs.rs/asol/badge.svg)](https://docs.rs/asol) 5 | [![License](https://img.shields.io/badge/license-AGPL%203.0-blue)](https://github.com/aSolHQ/asol/blob/master/LICENSE) 6 | 7 | An aggregate of staked SOL pools. 8 | 9 | ## Addresses 10 | 11 | The program address is the same on devnet, testnet, and mainnet-beta. 12 | 13 | Program Address: [`AURUqAcTZP8mhR6sWVxWyfBbpJRj4A3qqeFzLNhrwayE`](https://explorer.solana.com/address/AURUqAcTZP8mhR6sWVxWyfBbpJRj4A3qqeFzLNhrwayE) 14 | -------------------------------------------------------------------------------- /programs/asol/Xargo.toml: -------------------------------------------------------------------------------- 1 | [target.bpfel-unknown-unknown.dependencies.std] 2 | features = [] 3 | -------------------------------------------------------------------------------- /programs/asol/src/account_validators.rs: -------------------------------------------------------------------------------- 1 | //! Validate accounts 2 | 3 | use anchor_lang::prelude::*; 4 | use vipers::{assert_keys, invariant}; 5 | 6 | use crate::{ 7 | stake_pool_mints, AddStakePool, MintASol, NewAggregate, SetCurator, SyncAll, SyncAndMint, 8 | SyncLido, SyncMarinade, LAMPORTS_DECIMALS, 9 | }; 10 | use vipers::validate::Validate; 11 | 12 | impl<'info> Validate<'info> for NewAggregate<'info> { 13 | fn validate(&self) -> ProgramResult { 14 | assert_keys!( 15 | self.redeem_in_kind, 16 | crate_redeem_in_kind::WITHDRAW_AUTHORITY_ADDRESS, 17 | "redeem_in_kind" 18 | ); 19 | invariant!(self.crate_mint.supply == 0, "supply must be zero"); 20 | invariant!( 21 | self.crate_mint.decimals == LAMPORTS_DECIMALS, 22 | "decimals should be 9" 23 | ); 24 | Ok(()) 25 | } 26 | } 27 | 28 | impl<'info> Validate<'info> for AddStakePool<'info> { 29 | fn validate(&self) -> ProgramResult { 30 | require!( 31 | self.curator.key() == self.aggregate.curator, 32 | UnauthorizedNotCurator 33 | ); 34 | require!( 35 | !self 36 | .aggregate 37 | .stake_pools 38 | .iter() 39 | .any(|pool| pool.mint == self.mint.key()), 40 | PoolAlreadyAdded 41 | ); 42 | Ok(()) 43 | } 44 | } 45 | 46 | impl<'info> Validate<'info> for SetCurator<'info> { 47 | fn validate(&self) -> ProgramResult { 48 | require!( 49 | self.aggregate.curator_setter == self.curator_setter.key(), 50 | UnauthorizedNotCuratorSetter 51 | ); 52 | Ok(()) 53 | } 54 | } 55 | 56 | impl<'info> Validate<'info> for SyncAll<'info> { 57 | fn validate(&self) -> ProgramResult { 58 | self.lido.validate()?; 59 | self.marinade.validate()?; 60 | Ok(()) 61 | } 62 | } 63 | 64 | impl<'info> Validate<'info> for SyncLido<'info> { 65 | fn validate(&self) -> ProgramResult { 66 | assert_keys!(*self.lido, lido_anchor::SOLIDO_ACCOUNT, "lido"); 67 | // redundant since it's already validated by being in the list 68 | assert_keys!( 69 | self.lido_stake_pool_tokens.mint, 70 | stake_pool_mints::lido_stsol::ID, 71 | "lido_stake_pool_tokens.mint" 72 | ); 73 | Ok(()) 74 | } 75 | } 76 | 77 | impl<'info> Validate<'info> for SyncMarinade<'info> { 78 | fn validate(&self) -> ProgramResult { 79 | assert_keys!(*self.marinade, marinade::main_state::ID, "marinade"); 80 | // redundant since it's already validated by being in the list 81 | assert_keys!( 82 | self.marinade_stake_pool_tokens.mint, 83 | stake_pool_mints::marinade_msol::ID, 84 | "marinade_stake_pool_tokens.mint" 85 | ); 86 | Ok(()) 87 | } 88 | } 89 | 90 | impl<'info> Validate<'info> for MintASol<'info> { 91 | fn validate(&self) -> ProgramResult { 92 | assert_keys!( 93 | self.stake_pool.aggregate, 94 | self.aggregate, 95 | "stake_pool.aggregate" 96 | ); 97 | assert_keys!( 98 | self.stake_pool_tokens.mint, 99 | self.stake_pool.mint, 100 | "stake_pool_tokens.mint", 101 | ); 102 | assert_keys!( 103 | *self.crate_token, 104 | self.stake_pool_tokens.owner, 105 | "crate_token should be stake_pool_tokens.owner" 106 | ); 107 | assert_keys!(self.crate_token.mint, *self.crate_mint, "crate_token.mint"); 108 | assert_keys!( 109 | self.mint_destination.mint, 110 | self.crate_token.mint, 111 | "mint_destination.mint" 112 | ); 113 | 114 | assert_keys!( 115 | self.depositor_source.mint, 116 | self.stake_pool_tokens.mint, 117 | "depositor_source.mint" 118 | ); 119 | assert_keys!( 120 | self.depositor_source.owner, 121 | self.depositor, 122 | "depositor_source.owner" 123 | ); 124 | Ok(()) 125 | } 126 | } 127 | 128 | impl<'info> Validate<'info> for SyncAndMint<'info> { 129 | fn validate(&self) -> ProgramResult { 130 | self.sync.validate()?; 131 | self.mint_asol.validate()?; 132 | Ok(()) 133 | } 134 | } 135 | -------------------------------------------------------------------------------- /programs/asol/src/accounting.rs: -------------------------------------------------------------------------------- 1 | use anchor_lang::prelude::*; 2 | use anchor_spl::token::TokenAccount; 3 | use num_traits::cast::ToPrimitive; 4 | use vipers::unwrap_int; 5 | 6 | use crate::{state::AccountingMethod, SyncLido, SyncMarinade, SOL}; 7 | 8 | /// Can account for the amount of SOL in a stake pool. 9 | pub trait Accountant<'info> { 10 | /// The accounting method to use for the stake pool. 11 | const METHOD: AccountingMethod; 12 | 13 | /// Calculates the value of the stake pool token amount in SOL. 14 | fn sol_value(&self, amount: u64) -> Result; 15 | 16 | /// Gets the [TokenAccount] of stake pool tokens associated with the Crate. 17 | fn crate_reserves(&self) -> &TokenAccount; 18 | } 19 | 20 | impl<'info> Accountant<'info> for SyncMarinade<'info> { 21 | const METHOD: AccountingMethod = AccountingMethod::Marinade; 22 | 23 | fn sol_value(&self, amount: u64) -> Result { 24 | let msol_price: u64 = self.marinade.msol_price; 25 | let sol_value = unwrap_int!((amount as u128) 26 | .checked_mul(msol_price.into()) 27 | .and_then(|v| v.checked_div(0x1_0000_0000_u128)) 28 | .and_then(|v| v.to_u64())); 29 | Ok(SOL::from(sol_value)) 30 | } 31 | 32 | fn crate_reserves(&self) -> &TokenAccount { 33 | &self.marinade_stake_pool_tokens 34 | } 35 | } 36 | 37 | impl<'info> Accountant<'info> for SyncLido<'info> { 38 | const METHOD: AccountingMethod = AccountingMethod::Lido; 39 | 40 | fn sol_value(&self, amount: u64) -> Result { 41 | let lido = &*self.lido; 42 | let sol_value = unwrap_int!((amount as u128) 43 | .checked_mul(lido.exchange_rate.sol_balance.0.into()) 44 | .and_then(|v| v.checked_div(lido.exchange_rate.st_sol_supply.0.into())) 45 | .and_then(|v| v.to_u64())); 46 | Ok(SOL::from(sol_value)) 47 | } 48 | 49 | fn crate_reserves(&self) -> &TokenAccount { 50 | &self.lido_stake_pool_tokens 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /programs/asol/src/events.rs: -------------------------------------------------------------------------------- 1 | //! Crate events 2 | 3 | use anchor_lang::prelude::*; 4 | 5 | use crate::*; 6 | 7 | /// Emitted when an [Aggregate] is created. 8 | #[event] 9 | pub struct NewAggregateEvent { 10 | /// Aggregate 11 | #[index] 12 | pub aggregate: Pubkey, 13 | /// Curator. 14 | pub curator: Pubkey, 15 | /// Timestamp of the event. 16 | pub timestamp: i64, 17 | } 18 | 19 | /// Emitted when a [StakePool] is added. 20 | #[event] 21 | pub struct AddStakePoolEvent { 22 | /// Aggregate 23 | #[index] 24 | pub aggregate: Pubkey, 25 | /// Stake pool 26 | #[index] 27 | pub stake_pool: Pubkey, 28 | 29 | /// The [Aggregate::curator]. 30 | pub curator: Pubkey, 31 | /// The [Mint]. 32 | pub mint: Pubkey, 33 | /// The accounting method used. 34 | pub accounting_method: AccountingMethod, 35 | 36 | /// Timestamp of the event. 37 | pub timestamp: i64, 38 | } 39 | 40 | /// Emitted when an [Aggregate]'s curator is modified. 41 | #[event] 42 | pub struct SetCuratorEvent { 43 | /// Aggregate 44 | #[index] 45 | pub aggregate: Pubkey, 46 | 47 | /// The new [Aggregate::curator]. 48 | pub curator: Pubkey, 49 | /// The previous [Aggregate::curator]. 50 | pub previous_curator: Pubkey, 51 | /// The [Aggregate::curator_setter]. 52 | pub curator_setter: Pubkey, 53 | 54 | /// Timestamp of the event. 55 | pub timestamp: i64, 56 | } 57 | 58 | /// Emitted when ASol is minted. 59 | #[event] 60 | pub struct MintASolEvent { 61 | /// Depositor 62 | #[index] 63 | pub depositor: Pubkey, 64 | 65 | /// The mint of the stake pool token deposited. 66 | #[index] 67 | pub stake_pool_mint: Pubkey, 68 | 69 | /// Accounting method used. 70 | #[index] 71 | pub accounting_method: AccountingMethod, 72 | 73 | /// Amount of stake pool tokens deposited. 74 | pub deposit_amount: u64, 75 | 76 | /// Amount of aSOL minted. 77 | pub mint_amount: u64, 78 | 79 | /// Timestamp of the event. 80 | pub timestamp: i64, 81 | } 82 | 83 | /// Information about an aggregate. 84 | #[event] 85 | pub struct AggregateInfoEvent { 86 | /// Pool snapshot. 87 | pub snapshot: Snapshot, 88 | /// Time that the info was fetched. 89 | pub timestamp: i64, 90 | } 91 | -------------------------------------------------------------------------------- /programs/asol/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! aSOL: an aggregated Solana stake pool. 2 | #![deny(rustdoc::all)] 3 | #![allow(rustdoc::missing_doc_code_examples)] 4 | 5 | mod account_validators; 6 | mod pool; 7 | 8 | pub mod accounting; 9 | pub mod events; 10 | pub mod snapshot; 11 | pub mod state; 12 | pub mod types; 13 | 14 | use anchor_lang::{prelude::*, solana_program::native_token::LAMPORTS_PER_SOL}; 15 | use anchor_spl::token::{Mint, Token, TokenAccount}; 16 | use vipers::validate::Validate; 17 | 18 | pub use events::*; 19 | pub use snapshot::*; 20 | pub use state::*; 21 | pub use types::*; 22 | 23 | /// Maximum number of stake pools supported. 24 | pub const MAX_STAKE_POOLS: usize = 30; 25 | 26 | /// Number of decimals in lamports. 27 | pub const LAMPORTS_DECIMALS: u8 = 9; 28 | 29 | /// The minimum amount of liquidity in the pool for the "exact calculation" of SOL/ASOL price to be used. 30 | /// If the amount of SOL in the pool is less than this number, the price of ASOL is pegged to 1 SOL. 31 | pub const MIN_LIQUIDITY_FOR_EXACT_CALCULATION: u64 = LAMPORTS_PER_SOL; 32 | 33 | declare_id!("AURUqAcTZP8mhR6sWVxWyfBbpJRj4A3qqeFzLNhrwayE"); 34 | 35 | pub mod stake_pool_mints { 36 | pub mod lido_stsol { 37 | use anchor_lang::declare_id; 38 | declare_id!("7dHbWXmci3dT8UFYWYZweBLXgycu7Y3iL6trKn1Y7ARj"); 39 | } 40 | pub mod marinade_msol { 41 | use anchor_lang::declare_id; 42 | declare_id!("mSoLzYCxHdYgdzU16g5QSh3i5K3z3KZK7ytfqcJm7So"); 43 | } 44 | } 45 | 46 | /// [asol] program. 47 | #[program] 48 | pub mod asol { 49 | use super::*; 50 | 51 | /// Provisions a new aggregate SOL. 52 | #[access_control(ctx.accounts.validate())] 53 | pub fn new_aggregate( 54 | ctx: Context, 55 | agg_bump: u8, 56 | crate_bump: u8, 57 | ) -> ProgramResult { 58 | crate_token::cpi::new_crate( 59 | CpiContext::new( 60 | ctx.accounts.crate_token_program.to_account_info(), 61 | crate_token::cpi::accounts::NewCrate { 62 | crate_mint: ctx.accounts.crate_mint.to_account_info(), 63 | crate_token: ctx.accounts.crate_token.to_account_info(), 64 | fee_to_setter: ctx.accounts.aggregate.to_account_info(), 65 | fee_setter_authority: ctx.accounts.aggregate.to_account_info(), 66 | author_fee_to: ctx.accounts.aggregate.to_account_info(), 67 | issue_authority: ctx.accounts.aggregate.to_account_info(), 68 | withdraw_authority: ctx.accounts.redeem_in_kind.to_account_info(), 69 | payer: ctx.accounts.payer.to_account_info(), 70 | system_program: ctx.accounts.system_program.to_account_info(), 71 | }, 72 | ), 73 | crate_bump, 74 | )?; 75 | 76 | let signer_seeds: &[&[&[u8]]] = &[&[ 77 | b"Aggregate".as_ref(), 78 | &ctx.accounts.crate_token.key().to_bytes(), 79 | &[agg_bump], 80 | ]]; 81 | 82 | // Withdraw fee is 0.5% or 50 bps 83 | crate_token::cpi::set_withdraw_fee( 84 | CpiContext::new( 85 | ctx.accounts.crate_token_program.to_account_info(), 86 | crate_token::cpi::accounts::SetFees { 87 | crate_token: ctx.accounts.crate_token.to_account_info(), 88 | fee_setter: ctx.accounts.aggregate.to_account_info(), 89 | }, 90 | ) 91 | .with_signer(signer_seeds), 92 | 50, 93 | )?; 94 | 95 | let aggregate = &mut ctx.accounts.aggregate; 96 | aggregate.crate_token = ctx.accounts.crate_token.key(); 97 | aggregate.bump = agg_bump; 98 | 99 | aggregate.curator = ctx.accounts.admin.key(); 100 | aggregate.curator_setter = ctx.accounts.admin.key(); 101 | 102 | emit!(NewAggregateEvent { 103 | aggregate: aggregate.key(), 104 | curator: aggregate.curator, 105 | timestamp: Clock::get()?.unix_timestamp 106 | }); 107 | 108 | Ok(()) 109 | } 110 | 111 | /// Adds a new stake pool to an aggregate. 112 | #[access_control(ctx.accounts.validate())] 113 | pub fn add_stake_pool( 114 | ctx: Context, 115 | bump: u8, 116 | accounting_method: AccountingMethod, 117 | ) -> ProgramResult { 118 | let aggregate = &ctx.accounts.aggregate; 119 | 120 | let stake_pool = &mut ctx.accounts.stake_pool; 121 | stake_pool.aggregate = aggregate.key(); 122 | stake_pool.mint = ctx.accounts.mint.key(); 123 | stake_pool.bump = bump; 124 | stake_pool.accounting_method = accounting_method; 125 | 126 | stake_pool.stats.total_amount_deposited = 0; 127 | stake_pool.stats.total_amount_minted = ASOL::from(0); 128 | 129 | let aggregate = &mut ctx.accounts.aggregate; 130 | aggregate.stake_pools.push(StakePoolMeta { 131 | mint: stake_pool.mint, 132 | accounting_method, 133 | }); 134 | 135 | emit!(AddStakePoolEvent { 136 | aggregate: aggregate.key(), 137 | stake_pool: stake_pool.key(), 138 | 139 | curator: aggregate.curator, 140 | mint: stake_pool.mint, 141 | accounting_method, 142 | 143 | timestamp: Clock::get()?.unix_timestamp 144 | }); 145 | 146 | Ok(()) 147 | } 148 | 149 | /// Sets the curator. 150 | #[access_control(ctx.accounts.validate())] 151 | pub fn set_curator(ctx: Context) -> ProgramResult { 152 | let aggregate = &mut ctx.accounts.aggregate; 153 | let previous_curator = aggregate.curator; 154 | aggregate.curator = ctx.accounts.next_curator.key(); 155 | 156 | emit!(SetCuratorEvent { 157 | aggregate: aggregate.key(), 158 | previous_curator, 159 | curator: aggregate.curator, 160 | curator_setter: aggregate.curator_setter, 161 | timestamp: Clock::get()?.unix_timestamp 162 | }); 163 | 164 | Ok(()) 165 | } 166 | 167 | /// Mints aSOL from Lido stSOL. 168 | #[access_control(ctx.accounts.validate())] 169 | pub fn mint_lido(ctx: Context, deposit_amount: u64) -> ProgramResult { 170 | ctx.accounts.sync_and_mint_lido(deposit_amount) 171 | } 172 | 173 | /// Mints aSOL from Marinade mSOL. 174 | #[access_control(ctx.accounts.validate())] 175 | pub fn mint_marinade(ctx: Context, deposit_amount: u64) -> ProgramResult { 176 | ctx.accounts.sync_and_mint_marinade(deposit_amount) 177 | } 178 | 179 | /// Getter that logs the prices of all staked SOLs. 180 | pub fn print_aggregate_info(ctx: Context) -> ProgramResult { 181 | // ensure not mut 182 | let accounts: &SyncAndMint = ctx.accounts; 183 | emit!(AggregateInfoEvent { 184 | snapshot: accounts.build_snapshot()?, 185 | timestamp: Clock::get()?.unix_timestamp 186 | }); 187 | Ok(()) 188 | } 189 | } 190 | 191 | // -------------------------------- 192 | // Context Structs 193 | // -------------------------------- 194 | 195 | /// Accounts for [asol::new_aggregate]. 196 | #[derive(Accounts)] 197 | #[instruction(agg_bump: u8)] 198 | pub struct NewAggregate<'info> { 199 | /// Information about the crate. 200 | #[account( 201 | init, 202 | seeds = [ 203 | b"Aggregate".as_ref(), 204 | crate_token.key().to_bytes().as_ref() 205 | ], 206 | bump = agg_bump, 207 | payer = payer, 208 | // support up to 30 stake pools for aSOL 209 | space = 8 + std::mem::size_of::() + std::mem::size_of::() * MAX_STAKE_POOLS 210 | )] 211 | pub aggregate: Account<'info, Aggregate>, 212 | 213 | /// [Mint] of the [crate_token::CrateToken]. 214 | pub crate_mint: Account<'info, Mint>, 215 | 216 | #[account(mut)] 217 | pub crate_token: UncheckedAccount<'info>, 218 | 219 | /// Redeem in kind. 220 | pub redeem_in_kind: UncheckedAccount<'info>, 221 | 222 | /// Payer of the crate initialization. 223 | #[account(mut)] 224 | pub payer: Signer<'info>, 225 | 226 | /// The admin, who becomes the curator and the curator setter. 227 | pub admin: UncheckedAccount<'info>, 228 | 229 | /// System program. 230 | pub system_program: Program<'info, System>, 231 | 232 | /// Crate token program. 233 | pub crate_token_program: Program<'info, crate_token::program::CrateToken>, 234 | } 235 | 236 | /// Accounts for [asol::add_stake_pool]. 237 | #[derive(Accounts)] 238 | #[instruction(bump: u8)] 239 | pub struct AddStakePool<'info> { 240 | #[account(mut)] 241 | pub aggregate: Account<'info, Aggregate>, 242 | 243 | /// The [StakePool] to add. 244 | #[account( 245 | init, 246 | seeds = [ 247 | b"StakePool", 248 | aggregate.key().to_bytes().as_ref(), 249 | mint.key().to_bytes().as_ref() 250 | ], 251 | bump = bump, 252 | payer = payer 253 | )] 254 | pub stake_pool: Account<'info, StakePool>, 255 | 256 | /// [Mint] of the stake pool. 257 | pub mint: Account<'info, Mint>, 258 | 259 | /// The [Aggregate::curator]. 260 | pub curator: Signer<'info>, 261 | 262 | /// Payer of the crate initialization. 263 | #[account(mut)] 264 | pub payer: Signer<'info>, 265 | 266 | /// System program. 267 | pub system_program: Program<'info, System>, 268 | } 269 | 270 | /// Accounts for [asol::set_curator]. 271 | #[derive(Accounts)] 272 | pub struct SetCurator<'info> { 273 | /// [Aggregate]. 274 | #[account(mut)] 275 | pub aggregate: Account<'info, Aggregate>, 276 | /// The [Aggregate::curator]. 277 | pub curator_setter: Signer<'info>, 278 | /// The [Aggregate::curator] to set. 279 | pub next_curator: UncheckedAccount<'info>, 280 | } 281 | 282 | /// Accounts for minting aSOL. 283 | #[derive(Accounts)] 284 | pub struct MintASol<'info> { 285 | /// Information about the aggregate. 286 | #[account(mut)] 287 | pub aggregate: Account<'info, Aggregate>, 288 | 289 | /// The [StakePool]. 290 | #[account(mut)] 291 | pub stake_pool: Account<'info, StakePool>, 292 | 293 | /// [TokenAccount] holding the [StakePool] tokens of the [crate_token::CrateToken]. 294 | #[account(mut)] 295 | pub stake_pool_tokens: Box>, 296 | 297 | /// Information about the crate. 298 | pub crate_token: Box>, 299 | 300 | /// [Mint] of the [crate_token::CrateToken]. 301 | #[account(mut)] 302 | pub crate_mint: Box>, 303 | 304 | /// The depositor into the pool. 305 | #[account(mut)] 306 | pub depositor: Signer<'info>, 307 | 308 | /// The source of the deposited [StakePool] tokens. 309 | #[account(mut)] 310 | pub depositor_source: Box>, 311 | 312 | /// Destination of the issued tokens. 313 | #[account(mut)] 314 | pub mint_destination: Box>, 315 | 316 | /// [Token] program. 317 | pub token_program: Program<'info, Token>, 318 | 319 | /// [crate_token::program::CrateToken] program. 320 | pub crate_token_program: Program<'info, crate_token::program::CrateToken>, 321 | } 322 | 323 | #[derive(Accounts)] 324 | pub struct SyncAndMint<'info> { 325 | /// Mint aSOL 326 | pub mint_asol: MintASol<'info>, 327 | /// Sync accounts 328 | pub sync: SyncAll<'info>, 329 | } 330 | 331 | /// Accounts for synchronization. 332 | /// TODO: this should be generic 333 | #[derive(Accounts)] 334 | pub struct SyncAll<'info> { 335 | /// Marinade accounts. 336 | pub marinade: SyncMarinade<'info>, 337 | 338 | /// Lido accounts. 339 | pub lido: SyncLido<'info>, 340 | } 341 | 342 | #[derive(Accounts)] 343 | pub struct SyncMarinade<'info> { 344 | /// [marinade] state account. 345 | pub marinade: Box>, 346 | 347 | /// [TokenAccount] holding the tokens of the [StakePool]. 348 | pub marinade_stake_pool_tokens: Box>, 349 | } 350 | 351 | #[derive(Accounts)] 352 | pub struct SyncLido<'info> { 353 | /// [lido_anchor] account. 354 | pub lido: Box>, 355 | 356 | /// [TokenAccount] holding the tokens of the [StakePool]. 357 | pub lido_stake_pool_tokens: Box>, 358 | } 359 | 360 | /// Errors. 361 | #[error] 362 | pub enum ErrorCode { 363 | #[msg("Must be curator.")] 364 | UnauthorizedNotCurator, 365 | #[msg("Must be curator setter.")] 366 | UnauthorizedNotCuratorSetter, 367 | 368 | #[msg("Pool not found in snapshot.", offset = 10)] 369 | PoolNotFoundInSnapshot, 370 | #[msg("Cannot add a pool that has already been added.")] 371 | PoolAlreadyAdded, 372 | } 373 | -------------------------------------------------------------------------------- /programs/asol/src/pool.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | accounting::Accountant, types::SOL, AccountingMethod, MintASol, Snapshot, StakePoolSnapshot, 3 | SyncAndMint, ASOL, 4 | }; 5 | use anchor_lang::prelude::*; 6 | use vipers::{unwrap_int, unwrap_or_err}; 7 | 8 | impl<'info> SyncAndMint<'info> { 9 | /// Builds a snapshot of all balances and conversions. 10 | pub fn build_snapshot(&self) -> Result { 11 | let pool_snapshots: Vec = self 12 | .mint_asol 13 | .aggregate 14 | .stake_pools 15 | .iter() 16 | .map(|pool| match pool.accounting_method { 17 | AccountingMethod::Lido => { 18 | StakePoolSnapshot::try_from_accountant(pool, &self.sync.lido) 19 | } 20 | AccountingMethod::Marinade => { 21 | StakePoolSnapshot::try_from_accountant(pool, &self.sync.marinade) 22 | } 23 | }) 24 | .collect::, ProgramError>>()?; 25 | 26 | let balance_sol_u64: u64 = pool_snapshots 27 | .iter() 28 | .map(|snap| Ok(unwrap_int!(snap.pool_balance_sol()))) 29 | .sum::>()?; 30 | let balance_sol = SOL::from(balance_sol_u64); 31 | 32 | Ok(Snapshot { 33 | balance_sol, 34 | supply: ASOL::from(self.mint_asol.crate_mint.supply), 35 | stake_pools: pool_snapshots, 36 | }) 37 | } 38 | 39 | pub fn sync_and_mint_lido(&mut self, deposit_amount: u64) -> ProgramResult { 40 | let snapshot = self.build_snapshot()?; 41 | self.mint_asol 42 | .mint_asol(&snapshot, &self.sync.lido, deposit_amount)?; 43 | Ok(()) 44 | } 45 | 46 | pub fn sync_and_mint_marinade(&mut self, deposit_amount: u64) -> ProgramResult { 47 | let snapshot = self.build_snapshot()?; 48 | self.mint_asol 49 | .mint_asol(&snapshot, &self.sync.marinade, deposit_amount)?; 50 | Ok(()) 51 | } 52 | } 53 | 54 | impl<'info> MintASol<'info> { 55 | /// Mints aSOL. 56 | pub fn mint_asol>( 57 | &mut self, 58 | snapshot: &Snapshot, 59 | minter: &T, 60 | deposit_amount: u64, 61 | ) -> ProgramResult { 62 | let pool_snapshot = unwrap_or_err!( 63 | snapshot 64 | .stake_pools 65 | .iter() 66 | .find(|pool| pool.pool_mint == minter.crate_reserves().mint), 67 | PoolNotFoundInSnapshot 68 | ); 69 | 70 | // ignore zero deposit 71 | if deposit_amount == 0 { 72 | return Ok(()); 73 | } 74 | 75 | // compute the amount of tokens to mint 76 | let deposit_sol_value = minter.sol_value(deposit_amount)?; 77 | let mint_amount = snapshot.compute_asol_amount_from_sol(deposit_sol_value)?; 78 | 79 | // ignore zero mint 80 | if mint_amount.amount == 0 { 81 | return Ok(()); 82 | } 83 | 84 | let signer_seeds: &[&[&[u8]]] = &[&[ 85 | b"Aggregate".as_ref(), 86 | &self.aggregate.crate_token.to_bytes(), 87 | &[self.aggregate.bump], 88 | ]]; 89 | 90 | // transfer stake pool tokens to the crate 91 | anchor_spl::token::transfer( 92 | CpiContext::new( 93 | self.token_program.to_account_info(), 94 | anchor_spl::token::Transfer { 95 | from: self.depositor_source.to_account_info(), 96 | to: self.stake_pool_tokens.to_account_info(), 97 | authority: self.depositor.to_account_info(), 98 | }, 99 | ), 100 | deposit_amount, 101 | )?; 102 | 103 | // issue new crate tokens 104 | crate_token::cpi::issue( 105 | CpiContext::new_with_signer( 106 | self.crate_token_program.to_account_info(), 107 | crate_token::cpi::accounts::Issue { 108 | crate_token: self.crate_token.to_account_info(), 109 | crate_mint: self.crate_mint.to_account_info(), 110 | issue_authority: self.aggregate.to_account_info(), 111 | mint_destination: self.mint_destination.to_account_info(), 112 | 113 | // there are no author/protocol fees, so we pass in garbage here 114 | author_fee_destination: self.mint_destination.to_account_info(), 115 | protocol_fee_destination: self.mint_destination.to_account_info(), 116 | 117 | token_program: self.token_program.to_account_info(), 118 | }, 119 | signer_seeds, 120 | ), 121 | mint_amount.amount, 122 | )?; 123 | 124 | // update stats 125 | let stake_pool_state = &mut self.stake_pool; 126 | stake_pool_state.stats.total_amount_deposited = unwrap_int!(stake_pool_state 127 | .stats 128 | .total_amount_deposited 129 | .checked_add(deposit_amount)); 130 | stake_pool_state.stats.total_amount_minted = ASOL::from(unwrap_int!(stake_pool_state 131 | .stats 132 | .total_amount_minted 133 | .amount 134 | .checked_add(mint_amount.amount))); 135 | 136 | // record snapshot 137 | stake_pool_state.latest_snapshot.aggregate_balance_sol = snapshot.balance_sol; 138 | stake_pool_state.latest_snapshot.aggregate_supply = snapshot.supply; 139 | stake_pool_state.latest_snapshot.snapshot = *pool_snapshot; 140 | stake_pool_state.latest_snapshot.snapshot_ts = Clock::get()?.unix_timestamp; 141 | 142 | // record aggregate snapshot 143 | let aggregate = &mut self.aggregate; 144 | aggregate.latest_snapshot = snapshot.clone(); 145 | let now = Clock::get()?.unix_timestamp; 146 | aggregate.latest_snapshot_ts = now; 147 | 148 | // emit event 149 | emit!(crate::MintASolEvent { 150 | depositor: self.depositor.key(), 151 | stake_pool_mint: self.depositor_source.mint, 152 | accounting_method: T::METHOD, 153 | deposit_amount, 154 | mint_amount: mint_amount.amount, 155 | timestamp: now 156 | }); 157 | 158 | Ok(()) 159 | } 160 | } 161 | -------------------------------------------------------------------------------- /programs/asol/src/snapshot.rs: -------------------------------------------------------------------------------- 1 | use vipers::{assert_keys, unwrap_int}; 2 | 3 | use crate::{ 4 | accounting::Accountant, StakePoolMeta, ASOL, MIN_LIQUIDITY_FOR_EXACT_CALCULATION, SOL, 5 | }; 6 | use anchor_lang::{prelude::*, solana_program::native_token::LAMPORTS_PER_SOL}; 7 | use num_traits::ToPrimitive; 8 | 9 | /// A balance snapshot. 10 | #[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug, Default, PartialEq, Eq)] 11 | pub struct Snapshot { 12 | /// SOL value of the pool's balance at the time of the snapshot. 13 | pub balance_sol: SOL, 14 | /// Total supply. 15 | pub supply: ASOL, 16 | /// Stake pools. 17 | pub stake_pools: Vec, 18 | } 19 | 20 | /// A balance snapshot of a stake pool. 21 | #[derive(AnchorSerialize, AnchorDeserialize, Copy, Clone, Debug, Default, PartialEq, Eq)] 22 | pub struct StakePoolSnapshot { 23 | /// Mint of the pool. 24 | pub pool_mint: Pubkey, 25 | /// Amount of stake pool tokens in the pool. 26 | pub pool_balance: u64, 27 | /// Amount of SOL received for 1e9 tokens. (Price) 28 | pub sol_for_1e9: SOL, 29 | } 30 | 31 | impl Snapshot { 32 | /// Gets the number of [ASOL] corresponding to an amount of [SOL]. 33 | pub fn compute_asol_amount_from_sol(&self, sol_amount: SOL) -> Result { 34 | // if less than 1 SOL is staked, the price is equal to SOL price. 35 | // this is to avoid precision errors with tiny balances. 36 | if self.balance_sol.amount <= MIN_LIQUIDITY_FOR_EXACT_CALCULATION { 37 | return Ok(ASOL::from(sol_amount.amount)); 38 | } 39 | Ok(unwrap_int!( 40 | sol_amount.checked_mul_asol(self.supply, self.balance_sol) 41 | )) 42 | } 43 | } 44 | 45 | impl StakePoolSnapshot { 46 | /// The [SOL] value of the pool's balance, based on the price. 47 | pub fn pool_balance_sol(&self) -> Option { 48 | self.sol_for_1e9 49 | .to_u128() 50 | .checked_mul(self.pool_balance.into())? 51 | .checked_div(LAMPORTS_PER_SOL.into())? 52 | .to_u64() 53 | } 54 | 55 | /// Creates a pool snapshot from an [Accountant]. 56 | pub fn try_from_accountant<'info, T: Accountant<'info>>( 57 | pool: &StakePoolMeta, 58 | accountant: &T, 59 | ) -> Result { 60 | assert_keys!( 61 | pool.mint, 62 | accountant.crate_reserves().mint, 63 | format!("incorrect pool mint for {:?}", T::METHOD) 64 | ); 65 | Self::try_from_accountant_unchecked(accountant) 66 | } 67 | 68 | /// Creates a pool snapshot from an accountant. 69 | fn try_from_accountant_unchecked<'info, T: Accountant<'info>>( 70 | accountant: &T, 71 | ) -> Result { 72 | let reserves = accountant.crate_reserves(); 73 | Ok(StakePoolSnapshot { 74 | pool_mint: reserves.mint, 75 | pool_balance: reserves.amount, 76 | sol_for_1e9: accountant.sol_value(LAMPORTS_PER_SOL)?, 77 | }) 78 | } 79 | } 80 | 81 | #[cfg(test)] 82 | mod tests { 83 | use crate::{stake_pool_mints::*, *}; 84 | use anchor_lang::solana_program::native_token::LAMPORTS_PER_SOL; 85 | 86 | #[test] 87 | fn test_compute_output_mint_amount() { 88 | let agg = Snapshot { 89 | balance_sol: SOL::from(2_200_000), 90 | supply: ASOL::from(2_000_000), 91 | stake_pools: vec![ 92 | StakePoolSnapshot { 93 | pool_mint: lido_stsol::ID, 94 | pool_balance: 1_000_000, 95 | sol_for_1e9: SOL::from(1_100_000), 96 | }, 97 | StakePoolSnapshot { 98 | pool_mint: marinade_msol::ID, 99 | pool_balance: 1_000_000, 100 | sol_for_1e9: SOL::from(1_100_000), 101 | }, 102 | ], 103 | }; 104 | let output = agg 105 | .compute_asol_amount_from_sol(SOL::from(1_100_000)) 106 | .unwrap(); 107 | 108 | // it's below the minimum 109 | assert_eq!(output, ASOL::from(1_100_000)); 110 | } 111 | 112 | #[test] 113 | fn test_compute_output_mint_amount_imbalanced() { 114 | let agg = Snapshot { 115 | balance_sol: SOL::from(3_300_000), 116 | supply: ASOL::from(3_000_000), 117 | stake_pools: vec![ 118 | StakePoolSnapshot { 119 | pool_mint: lido_stsol::ID, 120 | pool_balance: 1_000_000, 121 | sol_for_1e9: SOL::from(1_100_000), 122 | }, 123 | StakePoolSnapshot { 124 | pool_mint: marinade_msol::ID, 125 | pool_balance: 1_000_000, 126 | sol_for_1e9: SOL::from(2_200_000), 127 | }, 128 | ], 129 | }; 130 | let output = agg 131 | .compute_asol_amount_from_sol(SOL::from(1_100_000)) 132 | .unwrap(); 133 | 134 | // it's below the minimum 135 | assert_eq!(output, ASOL::from(1_100_000)); 136 | } 137 | 138 | #[test] 139 | fn test_compute_output_mint_amount_above_minimum_imbalanced() { 140 | let agg = Snapshot { 141 | balance_sol: SOL::from(3_300_000_000), 142 | supply: ASOL::from(3_000_000_000), 143 | stake_pools: vec![ 144 | StakePoolSnapshot { 145 | pool_mint: lido_stsol::ID, 146 | pool_balance: 1_000_000_000, 147 | sol_for_1e9: SOL::from(1_100_000), 148 | }, 149 | StakePoolSnapshot { 150 | pool_mint: marinade_msol::ID, 151 | pool_balance: 1_000_000_000, 152 | sol_for_1e9: SOL::from(2_200_000), 153 | }, 154 | ], 155 | }; 156 | let output = agg 157 | .compute_asol_amount_from_sol(SOL::from(1_100_000)) 158 | .unwrap(); 159 | 160 | // it's below the minimum 161 | assert_eq!(output, ASOL::from(1_000_000)); 162 | } 163 | 164 | #[test] 165 | fn test_pool_balance_sol_empty() { 166 | let snap = StakePoolSnapshot { 167 | pool_balance: 0, 168 | sol_for_1e9: SOL::from(LAMPORTS_PER_SOL), 169 | ..Default::default() 170 | }; 171 | assert_eq!(snap.pool_balance_sol().unwrap(), 0); 172 | } 173 | 174 | #[test] 175 | fn test_pool_balance_sol_nonempty() { 176 | let snap = StakePoolSnapshot { 177 | pool_balance: 1_000_000, 178 | sol_for_1e9: SOL::from(LAMPORTS_PER_SOL), 179 | ..Default::default() 180 | }; 181 | assert_eq!(snap.pool_balance_sol().unwrap(), 1_000_000); 182 | } 183 | 184 | #[test] 185 | fn test_pool_balance_sol_large() { 186 | let snap = StakePoolSnapshot { 187 | pool_balance: 1_000_000, 188 | sol_for_1e9: SOL::from(1_100_000_000), 189 | ..Default::default() 190 | }; 191 | assert_eq!(snap.pool_balance_sol().unwrap(), 1_100_000); 192 | } 193 | 194 | #[test] 195 | fn test_pool_balance_zero_price() { 196 | let snap = StakePoolSnapshot { 197 | pool_balance: 1_000_000, 198 | sol_for_1e9: SOL::from(0), 199 | ..Default::default() 200 | }; 201 | assert_eq!(snap.pool_balance_sol().unwrap(), 0); 202 | } 203 | } 204 | -------------------------------------------------------------------------------- /programs/asol/src/state.rs: -------------------------------------------------------------------------------- 1 | use anchor_lang::prelude::*; 2 | 3 | use crate::{Snapshot, StakePoolSnapshot, ASOL, SOL}; 4 | 5 | /// Contains the info of the aggregate token. 6 | /// Make sure to allocate enough storage to handle a lot of stake pools. 7 | #[account] 8 | #[derive(Debug, Default, PartialEq, Eq)] 9 | pub struct Aggregate { 10 | /// The [crate_token::CrateToken]. 11 | pub crate_token: Pubkey, 12 | /// Bump. 13 | pub bump: u8, 14 | 15 | /// Account that can add or remove stake pools from the aggregate. 16 | pub curator: Pubkey, 17 | /// Account that can change who the curator is. 18 | pub curator_setter: Pubkey, 19 | 20 | /// A stake pool. 21 | pub stake_pools: Vec, 22 | 23 | /// Latest snapshot of the aggregate. 24 | pub latest_snapshot: Snapshot, 25 | /// When the latest snapshot was taken. 26 | pub latest_snapshot_ts: i64, 27 | } 28 | 29 | #[derive(AnchorSerialize, AnchorDeserialize, Copy, Clone, Debug, Default, PartialEq, Eq)] 30 | pub struct StakePoolMeta { 31 | /// Mint of the stake pool. 32 | pub mint: Pubkey, 33 | /// The accounting method. 34 | pub accounting_method: AccountingMethod, 35 | } 36 | 37 | /// Contains the state of the [StakePoolMeta]. 38 | /// Currently this is just used for TVL tracking. 39 | #[account] 40 | #[derive(Debug, Default, PartialEq, Eq)] 41 | pub struct StakePool { 42 | /// The [Aggregate]. 43 | pub aggregate: Pubkey, 44 | /// Mint of the stake pool. 45 | pub mint: Pubkey, 46 | /// The bump. 47 | pub bump: u8, 48 | 49 | /// Accounting method the stake pool uses. 50 | pub accounting_method: AccountingMethod, 51 | 52 | /// Statistics on the stake pool. 53 | pub stats: StakePoolStats, 54 | 55 | /// The latest snapshot of the [StakePool]. 56 | pub latest_snapshot: StakePoolStateSnapshot, 57 | } 58 | 59 | /// A balance snapshot of a stake pool. 60 | #[derive(AnchorSerialize, AnchorDeserialize, Copy, Clone, Debug, Default, PartialEq, Eq)] 61 | pub struct StakePoolStats { 62 | /// Total amount of [StakePool::mint] tokens ever deposited. 63 | pub total_amount_deposited: u64, 64 | /// Total amount of [Aggregate::crate_token] tokens ever minted from this pool. 65 | pub total_amount_minted: ASOL, 66 | } 67 | 68 | /// A balance snapshot of a stake pool. 69 | #[derive(AnchorSerialize, AnchorDeserialize, Copy, Clone, Debug, Default, PartialEq, Eq)] 70 | pub struct StakePoolStateSnapshot { 71 | /// Aggregate [SOL] balance. 72 | pub aggregate_balance_sol: SOL, 73 | /// Aggregate [ASOL] total supply. 74 | pub aggregate_supply: ASOL, 75 | /// Stake pool snapshot information. 76 | pub snapshot: StakePoolSnapshot, 77 | /// Time the last snapshot was taken. 78 | pub snapshot_ts: i64, 79 | } 80 | 81 | /// The accounting method of the stake pool. 82 | #[repr(C)] 83 | #[derive( 84 | AnchorSerialize, AnchorDeserialize, Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, 85 | )] 86 | pub enum AccountingMethod { 87 | /// Marinade mSOL. 88 | Marinade, 89 | /// Lido stSOL. 90 | Lido, 91 | } 92 | 93 | impl Default for AccountingMethod { 94 | fn default() -> Self { 95 | AccountingMethod::Marinade 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /programs/asol/src/types.rs: -------------------------------------------------------------------------------- 1 | use anchor_lang::prelude::*; 2 | use num_traits::ToPrimitive; 3 | 4 | /// An amount of SOL. 5 | #[derive( 6 | AnchorSerialize, AnchorDeserialize, Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, 7 | )] 8 | pub struct SOL { 9 | pub amount: u64, 10 | } 11 | 12 | /// An amount of aSOL. 13 | #[derive( 14 | AnchorSerialize, AnchorDeserialize, Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, 15 | )] 16 | pub struct ASOL { 17 | pub amount: u64, 18 | } 19 | 20 | impl SOL { 21 | /// Converts to u128. 22 | pub fn to_u128(&self) -> u128 { 23 | self.amount as u128 24 | } 25 | 26 | /// Converts the [SOL] amount to [ASOL]. 27 | pub fn checked_mul_asol(&self, numerator: ASOL, denominator: SOL) -> Option { 28 | Some(ASOL::from( 29 | (self.to_u128()) 30 | .checked_mul(numerator.to_u128()) 31 | .and_then(|v| v.checked_div(denominator.to_u128())) 32 | .and_then(|v| v.to_u64())?, 33 | )) 34 | } 35 | } 36 | 37 | impl ASOL { 38 | /// Converts to u128. 39 | pub fn to_u128(&self) -> u128 { 40 | self.amount as u128 41 | } 42 | } 43 | 44 | impl From for u128 { 45 | fn from(sol: SOL) -> Self { 46 | sol.to_u128() 47 | } 48 | } 49 | 50 | impl From for u128 { 51 | fn from(asol: ASOL) -> Self { 52 | asol.to_u128() 53 | } 54 | } 55 | 56 | impl From for SOL { 57 | fn from(amount: u64) -> Self { 58 | SOL { amount } 59 | } 60 | } 61 | 62 | impl From for ASOL { 63 | fn from(amount: u64) -> Self { 64 | ASOL { amount } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /scripts/generate-idl-types.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | shopt -s extglob 4 | 5 | cd $(dirname $0)/.. 6 | 7 | generate_declaration_file() { 8 | PROGRAM_SO=$1 9 | OUT_DIR=$2 10 | 11 | prog="$(basename $PROGRAM_SO .json)" 12 | OUT_PATH="$OUT_DIR/$prog.ts" 13 | if [ ! $(which gsed) ]; then 14 | PREFIX=$(echo $prog | sed -E 's/(^|_)([a-z])/\U\2/g') 15 | else 16 | PREFIX=$(echo $prog | gsed -E 's/(^|_)([a-z])/\U\2/g') 17 | fi 18 | typename="${PREFIX}IDL" 19 | rawName="${PREFIX}JSON" 20 | 21 | # types 22 | echo "export type $typename =" >>$OUT_PATH 23 | cat $PROGRAM_SO >>$OUT_PATH 24 | echo ";" >>$OUT_PATH 25 | 26 | # raw json 27 | echo "export const $rawName: $typename =" >>$OUT_PATH 28 | cat $PROGRAM_SO >>$OUT_PATH 29 | echo ";" >>$OUT_PATH 30 | 31 | # error type 32 | echo "import { generateErrorMap } from '@saberhq/anchor-contrib';" >>$OUT_PATH 33 | echo "export const ${PREFIX}Errors = generateErrorMap($rawName);" >>$OUT_PATH 34 | } 35 | 36 | generate_sdk_idls() { 37 | SDK_DIR=${1:-"./packages/sdk/src/idls"} 38 | IDL_JSONS=$2 39 | 40 | echo "Generating IDLs for the following programs:" 41 | echo $IDL_JSONS 42 | echo "" 43 | 44 | rm -rf $SDK_DIR 45 | mkdir -p $SDK_DIR 46 | if [ $(ls -l artifacts/idl/ | wc -l) -ne 0 ]; then 47 | for f in $IDL_JSONS; do 48 | generate_declaration_file $f $SDK_DIR 49 | done 50 | if [[ $RUN_ESLINT != "none" ]]; then 51 | yarn eslint --fix $SDK_DIR 52 | fi 53 | else 54 | echo "Warning: no IDLs found. Make sure you ran ./scripts/idl.sh first." 55 | fi 56 | } 57 | 58 | generate_sdk_idls ./src/idls 'artifacts/idl/*.json' 59 | -------------------------------------------------------------------------------- /scripts/parse-idls.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script generates the IDL JSONs without buildling the full packages. 4 | 5 | rm -fr artifacts/idl/ 6 | mkdir -p artifacts/idl/ 7 | 8 | for PROGRAM in $(find programs/ -maxdepth 3 -name lib.rs); do 9 | PROGRAM_NAME=$(dirname $PROGRAM | xargs dirname | xargs basename | tr '-' '_') 10 | echo "Parsing IDL for $PROGRAM_NAME" 11 | anchor idl parse --file $PROGRAM >artifacts/idl/$PROGRAM_NAME.json || { 12 | echo "Could not parse IDL" 13 | exit 1 14 | } 15 | done 16 | -------------------------------------------------------------------------------- /scripts/pull-crate.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | cd $(dirname $0)/.. 4 | 5 | mkdir -p artifacts/programs/ 6 | 7 | solana program dump CRATwLpu6YZEeiVq9ajjxs61wPQ9f29s1UoQR9siJCRs \ 8 | artifacts/programs/crate_token.so --url devnet 9 | 10 | solana program dump 1NKyU3qShZC3oJgvCCftAHDi5TFxcJwfyUz2FeZsiwE \ 11 | artifacts/programs/crate_redeem_in_kind.so --url devnet 12 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | { pkgs }: 2 | pkgs.mkShell { 3 | nativeBuiltInputs = (pkgs.lib.optionals pkgs.stdenv.isDarwin [ 4 | pkgs.darwin.apple_sdk.frameworks.AppKit 5 | pkgs.darwin.apple_sdk.frameworks.IOKit 6 | pkgs.darwin.apple_sdk.frameworks.Foundation 7 | ]); 8 | buildInputs = with pkgs; 9 | (pkgs.lib.optionals pkgs.stdenv.isLinux ([ 10 | # solana 11 | libudev 12 | ])) ++ [ 13 | rustup 14 | cargo-deps 15 | gh 16 | 17 | # sdk 18 | nodejs 19 | yarn 20 | python3 21 | 22 | pkgconfig 23 | openssl 24 | jq 25 | gnused 26 | 27 | libiconv 28 | 29 | anchor 30 | spl-token-cli 31 | ] ++ (pkgs.lib.optionals pkgs.stdenv.isDarwin [ 32 | pkgs.darwin.apple_sdk.frameworks.AppKit 33 | pkgs.darwin.apple_sdk.frameworks.IOKit 34 | pkgs.darwin.apple_sdk.frameworks.Foundation 35 | ]); 36 | shellHook = '' 37 | export PATH=$PATH:$HOME/.cargo/bin 38 | ''; 39 | } 40 | -------------------------------------------------------------------------------- /src/asol.ts: -------------------------------------------------------------------------------- 1 | import { 2 | CRATE_ADDRESSES, 3 | CRATE_REDEEM_IN_KIND_WITHDRAW_AUTHORITY, 4 | CrateSDK, 5 | generateCrateAddress, 6 | } from "@crateprotocol/crate-sdk"; 7 | import { Program, Provider as AnchorProvider } from "@project-serum/anchor"; 8 | import type { Provider } from "@saberhq/solana-contrib"; 9 | import { 10 | SignerWallet, 11 | SolanaProvider, 12 | TransactionEnvelope, 13 | } from "@saberhq/solana-contrib"; 14 | import type { TokenAmount } from "@saberhq/token-utils"; 15 | import { 16 | createInitMintInstructions, 17 | getATAAddresses, 18 | getOrCreateATAs, 19 | TOKEN_PROGRAM_ID, 20 | } from "@saberhq/token-utils"; 21 | import type { PublicKey, Signer } from "@solana/web3.js"; 22 | import { Keypair, SystemProgram } from "@solana/web3.js"; 23 | 24 | import { generateStakePoolAddress } from "."; 25 | import { 26 | ASOL_PROGRAM_ID, 27 | LAMPORTS_DECIMALS, 28 | LIDO_STAKED_SOL, 29 | MARINADE_STAKED_SOL, 30 | MARINADE_STATE_ACCOUNT, 31 | SOLIDO_ACCOUNT, 32 | STAKE_POOL_TOKENS, 33 | } from "./constants"; 34 | import { generateAggregateAddress } from "./pda"; 35 | import type { 36 | AccountingMethod, 37 | AggregateData, 38 | ASolProgram, 39 | } from "./programs/asol"; 40 | import { AsolJSON } from "./programs/asol"; 41 | 42 | /** 43 | * Javascript SDK for interacting with Crate tokens. 44 | */ 45 | export class ASolSDK { 46 | /** 47 | * Reference to the Crate SDK. 48 | */ 49 | public readonly crate: CrateSDK; 50 | 51 | constructor( 52 | public readonly provider: Provider, 53 | public readonly program: ASolProgram 54 | ) { 55 | this.crate = CrateSDK.init(provider); 56 | } 57 | 58 | /** 59 | * Initialize from a Provider 60 | * @param provider 61 | * @param asolProgramID 62 | * @returns 63 | */ 64 | static init( 65 | provider: Provider, 66 | asolProgramID: PublicKey = ASOL_PROGRAM_ID 67 | ): ASolSDK { 68 | return new ASolSDK( 69 | provider, 70 | new Program( 71 | AsolJSON, 72 | asolProgramID, 73 | new AnchorProvider(provider.connection, provider.wallet, provider.opts) 74 | ) as unknown as ASolProgram 75 | ); 76 | } 77 | 78 | /** 79 | * Creates a new instance of the SDK with the given keypair. 80 | */ 81 | public withSigner(signer: Signer): ASolSDK { 82 | return ASolSDK.init( 83 | new SolanaProvider( 84 | this.provider.connection, 85 | this.provider.broadcaster, 86 | new SignerWallet(signer), 87 | this.provider.opts 88 | ) 89 | ); 90 | } 91 | 92 | /** 93 | * Creates a new Aggregate. 94 | * @returns 95 | */ 96 | async newAggregate({ 97 | mintKP = Keypair.generate(), 98 | admin = this.provider.wallet.publicKey, 99 | payer = this.provider.wallet.publicKey, 100 | }: { 101 | mintKP?: Keypair; 102 | admin?: PublicKey; 103 | payer?: PublicKey; 104 | } = {}): Promise<{ 105 | tx: TransactionEnvelope; 106 | aggregateKey: PublicKey; 107 | crateKey: PublicKey; 108 | }> { 109 | const [crateKey, crateBump] = await generateCrateAddress(mintKP.publicKey); 110 | const [aggregateKey, aggBump] = await generateAggregateAddress(crateKey); 111 | const initMintTX = await createInitMintInstructions({ 112 | provider: this.provider, 113 | mintKP, 114 | decimals: LAMPORTS_DECIMALS, // lamports 115 | mintAuthority: crateKey, 116 | freezeAuthority: crateKey, 117 | }); 118 | const newAggregateTX = new TransactionEnvelope(this.provider, [ 119 | this.program.instruction.newAggregate(aggBump, crateBump, { 120 | accounts: { 121 | crateMint: mintKP.publicKey, 122 | payer, 123 | redeemInKind: CRATE_REDEEM_IN_KIND_WITHDRAW_AUTHORITY, 124 | aggregate: aggregateKey, 125 | crateToken: crateKey, 126 | admin, 127 | systemProgram: SystemProgram.programId, 128 | crateTokenProgram: CRATE_ADDRESSES.CrateToken, 129 | }, 130 | }), 131 | ]); 132 | return { tx: initMintTX.combine(newAggregateTX), aggregateKey, crateKey }; 133 | } 134 | 135 | /** 136 | * Adds a new stake pool. 137 | * @returns 138 | */ 139 | async addStakePool({ 140 | aggregate, 141 | mint, 142 | method, 143 | curator = this.provider.wallet.publicKey, 144 | payer = this.provider.wallet.publicKey, 145 | }: { 146 | aggregate: PublicKey; 147 | mint: PublicKey; 148 | method: AccountingMethod; 149 | curator?: PublicKey; 150 | payer?: PublicKey; 151 | }): Promise<{ tx: TransactionEnvelope; stakePoolKey: PublicKey }> { 152 | const [stakePool, bump] = await generateStakePoolAddress( 153 | aggregate, 154 | mint, 155 | this.program.programId 156 | ); 157 | const newStakePoolTX = new TransactionEnvelope(this.provider, [ 158 | this.program.instruction.addStakePool(bump, method, { 159 | accounts: { 160 | aggregate, 161 | stakePool, 162 | mint, 163 | curator, 164 | payer, 165 | systemProgram: SystemProgram.programId, 166 | }, 167 | }), 168 | ]); 169 | return { tx: newStakePoolTX, stakePoolKey: stakePool }; 170 | } 171 | 172 | /** 173 | * Mints tokens. 174 | * @returns 175 | */ 176 | async mintASol({ 177 | aggregateKey, 178 | amount, 179 | depositor = this.provider.wallet.publicKey, 180 | }: { 181 | aggregateKey: PublicKey; 182 | amount: TokenAmount; 183 | depositor?: PublicKey; 184 | }): Promise { 185 | const depositMint = amount.token.mintAccount; 186 | const method = depositMint.equals(MARINADE_STAKED_SOL) 187 | ? "mintMarinade" 188 | : depositMint.equals(LIDO_STAKED_SOL) 189 | ? "mintLido" 190 | : null; 191 | if (!method) { 192 | throw new Error("Invalid mint."); 193 | } 194 | 195 | const aggregate = (await this.program.account.aggregate.fetchNullable( 196 | aggregateKey 197 | )) as AggregateData; 198 | if (!aggregate) { 199 | throw new Error("No aggregate found."); 200 | } 201 | 202 | const crate = await this.crate.fetchCrateTokenData(aggregate.crateToken); 203 | if (!crate) { 204 | throw new Error("No crate found."); 205 | } 206 | 207 | const stakePoolATAs = await getATAAddresses({ 208 | mints: { 209 | marinade: MARINADE_STAKED_SOL, 210 | lido: LIDO_STAKED_SOL, 211 | input: amount.token.mintAccount, 212 | }, 213 | owner: aggregate.crateToken, 214 | }); 215 | 216 | const depositorATAs = await getOrCreateATAs({ 217 | provider: this.provider, 218 | mints: { 219 | input: amount.token.mintAccount, 220 | crate: crate.mint, 221 | }, 222 | owner: depositor, 223 | }); 224 | 225 | const [stakePool] = await generateStakePoolAddress( 226 | aggregateKey, 227 | amount.token.mintAccount 228 | ); 229 | 230 | const mintTX = new TransactionEnvelope(this.provider, [ 231 | ...(depositorATAs.createAccountInstructions.crate 232 | ? [depositorATAs.createAccountInstructions.crate] 233 | : []), 234 | this.program.instruction[method](amount.toU64(), { 235 | accounts: { 236 | sync: { 237 | marinade: { 238 | marinade: MARINADE_STATE_ACCOUNT, 239 | marinadeStakePoolTokens: stakePoolATAs.accounts.marinade.address, 240 | }, 241 | lido: { 242 | lido: SOLIDO_ACCOUNT, 243 | lidoStakePoolTokens: stakePoolATAs.accounts.lido.address, 244 | }, 245 | }, 246 | mintAsol: { 247 | aggregate: aggregateKey, 248 | stakePool, 249 | crateToken: aggregate.crateToken, 250 | crateMint: crate.mint, 251 | tokenProgram: TOKEN_PROGRAM_ID, 252 | crateTokenProgram: CRATE_ADDRESSES.CrateToken, 253 | 254 | depositor, 255 | depositorSource: depositorATAs.accounts.input, 256 | stakePoolTokens: stakePoolATAs.accounts.input.address, 257 | mintDestination: depositorATAs.accounts.crate, 258 | }, 259 | }, 260 | }), 261 | ]); 262 | 263 | return mintTX; 264 | } 265 | 266 | /** 267 | * Redeems Crate tokens for the underlying tokens. 268 | */ 269 | async redeem({ 270 | amount, 271 | owner = this.provider.wallet.publicKey, 272 | }: { 273 | amount: TokenAmount; 274 | owner?: PublicKey; 275 | }): Promise { 276 | return await this.crate.redeem({ 277 | amount, 278 | owner, 279 | underlyingTokens: Object.values(STAKE_POOL_TOKENS), 280 | }); 281 | } 282 | } 283 | -------------------------------------------------------------------------------- /src/constants.ts: -------------------------------------------------------------------------------- 1 | import { Token } from "@saberhq/token-utils"; 2 | import { PublicKey } from "@solana/web3.js"; 3 | 4 | /** 5 | * Program ID of the aSOL program. 6 | */ 7 | export const ASOL_PROGRAM_ID = new PublicKey( 8 | "AURUqAcTZP8mhR6sWVxWyfBbpJRj4A3qqeFzLNhrwayE" 9 | ); 10 | 11 | export const MARINADE_STATE_ACCOUNT = new PublicKey( 12 | "8szGkuLTAux9XMgZ2vtY39jVSowEcpBfFfD8hXSEqdGC" 13 | ); 14 | 15 | export const SOLIDO_ACCOUNT = new PublicKey( 16 | "49Yi1TKkNyYjPAFdR9LBvoHcUjuPX4Df5T5yv39w2XTn" 17 | ); 18 | 19 | export const MARINADE_STAKED_SOL = new PublicKey( 20 | "mSoLzYCxHdYgdzU16g5QSh3i5K3z3KZK7ytfqcJm7So" 21 | ); 22 | 23 | export const LIDO_STAKED_SOL = new PublicKey( 24 | "7dHbWXmci3dT8UFYWYZweBLXgycu7Y3iL6trKn1Y7ARj" 25 | ); 26 | 27 | /** 28 | * Supported stake pools. 29 | */ 30 | export const STAKE_POOL_TOKENS = { 31 | LIDO: new Token({ 32 | chainId: 101, 33 | address: LIDO_STAKED_SOL.toString(), 34 | symbol: "stSOL", 35 | name: "Lido Staked SOL", 36 | decimals: 9, 37 | logoURI: 38 | "https://raw.githubusercontent.com/solana-labs/token-list/main/assets/mainnet/7dHbWXmci3dT8UFYWYZweBLXgycu7Y3iL6trKn1Y7ARj/logo.png", 39 | tags: [], 40 | extensions: { 41 | website: "https://solana.lido.fi/", 42 | twitter: "https://twitter.com/LidoFinance/", 43 | }, 44 | }), 45 | MARINADE: new Token({ 46 | chainId: 101, 47 | address: MARINADE_STAKED_SOL.toString(), 48 | symbol: "mSOL", 49 | name: "Marinade staked SOL (mSOL)", 50 | decimals: 9, 51 | logoURI: 52 | "https://raw.githubusercontent.com/solana-labs/token-list/main/assets/mainnet/mSoLzYCxHdYgdzU16g5QSh3i5K3z3KZK7ytfqcJm7So/logo.png", 53 | tags: [], 54 | extensions: { 55 | coingeckoId: "msol", 56 | website: "https://marinade.finance", 57 | twitter: "https://twitter.com/MarinadeFinance", 58 | discord: "https://discord.gg/mGqZA5pjRN", 59 | medium: "https://medium.com/marinade-finance", 60 | github: "https://github.com/marinade-finance", 61 | }, 62 | }), 63 | }; 64 | 65 | /** 66 | * Number of decimals in one SOL. 67 | */ 68 | export const LAMPORTS_DECIMALS = 9; 69 | 70 | /** 71 | * Mint address of the aSOL token. 72 | */ 73 | export const ASOL_MINT = new PublicKey( 74 | "ASoLXbfe7cd6igh5yiEsU8M7FW64QRxPKkxk7sjAfond" 75 | ); 76 | 77 | /** 78 | * The aSOL token. 79 | */ 80 | export const ASOL_TOKEN = new Token({ 81 | chainId: 101, 82 | address: ASOL_MINT.toString(), 83 | symbol: "aSOL", 84 | name: "aSOL Aggregate Solana Stake Pool", 85 | decimals: LAMPORTS_DECIMALS, 86 | logoURI: "https://asol.so/images/asol-token-icon.svg", 87 | tags: [], 88 | extensions: { 89 | coingeckoId: "solana", 90 | description: "aSOL is the standard for transacting with staked SOL tokens.", 91 | website: "https://asol.so", 92 | twitter: "https://twitter.com/aSOLprotocol", 93 | github: "https://github.com/aSolHQ", 94 | }, 95 | }); 96 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./asol"; 2 | export * from "./constants"; 3 | export * from "./pda"; 4 | export * from "./programs"; 5 | -------------------------------------------------------------------------------- /src/pda.ts: -------------------------------------------------------------------------------- 1 | import { utils } from "@project-serum/anchor"; 2 | import { PublicKey } from "@solana/web3.js"; 3 | 4 | import { ASOL_PROGRAM_ID } from "./constants"; 5 | 6 | export const generateAggregateAddress = ( 7 | crateToken: PublicKey, 8 | programID: PublicKey = ASOL_PROGRAM_ID 9 | ): Promise<[PublicKey, number]> => { 10 | return PublicKey.findProgramAddress( 11 | [utils.bytes.utf8.encode("Aggregate"), crateToken.toBuffer()], 12 | programID 13 | ); 14 | }; 15 | 16 | export const generateStakePoolAddress = ( 17 | aggregate: PublicKey, 18 | mint: PublicKey, 19 | programID: PublicKey = ASOL_PROGRAM_ID 20 | ): Promise<[PublicKey, number]> => { 21 | return PublicKey.findProgramAddress( 22 | [ 23 | utils.bytes.utf8.encode("StakePool"), 24 | aggregate.toBuffer(), 25 | mint.toBuffer(), 26 | ], 27 | programID 28 | ); 29 | }; 30 | -------------------------------------------------------------------------------- /src/programs/asol.ts: -------------------------------------------------------------------------------- 1 | import type { AnchorTypes } from "@saberhq/anchor-contrib"; 2 | 3 | import type { AsolIDL } from "../idls/asol"; 4 | 5 | export * from "../idls/asol"; 6 | 7 | type ASolTypes = AnchorTypes< 8 | AsolIDL, 9 | { 10 | aggregate: AggregateData; 11 | stakePool: StakePoolData; 12 | }, 13 | { 14 | AccountingMethod: AccountingMethod; 15 | StakePoolMeta: StakePoolMeta; 16 | StakePoolSnapshot: StakePoolSnapshot; 17 | StakePoolStateSnapshot: StakePoolStateSnapshot; 18 | StakePoolStats: StakePoolStats; 19 | Snapshot: Snapshot; 20 | SOL: SOLValue; 21 | ASOL: ASOLValue; 22 | } 23 | >; 24 | 25 | export type AggregateData = ASolTypes["Accounts"]["Aggregate"]; 26 | export type StakePoolData = ASolTypes["Accounts"]["StakePool"]; 27 | 28 | export type AccountingMethod = 29 | typeof AccountingMethods[keyof typeof AccountingMethods]; 30 | 31 | export const AccountingMethods = { 32 | Marinade: { 33 | marinade: {}, 34 | }, 35 | Lido: { 36 | lido: {}, 37 | }, 38 | } as const; 39 | 40 | export type SOLValue = ASolTypes["Defined"]["SOL"]; 41 | export type ASOLValue = ASolTypes["Defined"]["ASOL"]; 42 | 43 | export type StakePoolMeta = ASolTypes["Defined"]["StakePoolMeta"]; 44 | export type StakePoolSnapshot = ASolTypes["Defined"]["StakePoolSnapshot"]; 45 | export type StakePoolStateSnapshot = 46 | ASolTypes["Defined"]["StakePoolStateSnapshot"]; 47 | export type StakePoolStats = ASolTypes["Defined"]["StakePoolStats"]; 48 | export type Snapshot = ASolTypes["Defined"]["Snapshot"]; 49 | 50 | export type ASolProgram = ASolTypes["Program"]; 51 | 52 | export type NewAggregateEvent = ASolTypes["Events"]["NewAggregateEvent"]; 53 | export type AddStakePoolEvent = ASolTypes["Events"]["AddStakePoolEvent"]; 54 | export type SetCuratorEvent = ASolTypes["Events"]["SetCuratorEvent"]; 55 | export type MintASolEvent = ASolTypes["Events"]["MintASolEvent"]; 56 | export type AggregateInfoEvent = ASolTypes["Events"]["AggregateInfoEvent"]; 57 | 58 | export type SyncAndMintAccounts = 59 | ASolTypes["Instructions"]["mintLido"]["accounts"]; 60 | -------------------------------------------------------------------------------- /src/programs/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./asol"; 2 | -------------------------------------------------------------------------------- /tests/asol.ts: -------------------------------------------------------------------------------- 1 | import { EventParser } from "@project-serum/anchor"; 2 | import { expectTX } from "@saberhq/chai-solana"; 3 | import { createInitMintInstructions } from "@saberhq/token-utils"; 4 | import type { PublicKey } from "@solana/web3.js"; 5 | import { Keypair } from "@solana/web3.js"; 6 | import { expect } from "chai"; 7 | 8 | import type { AddStakePoolEvent } from "../src"; 9 | import { AccountingMethods } from "../src"; 10 | import { makeSDK } from "./workspace"; 11 | 12 | describe("aSOL", () => { 13 | const sdk = makeSDK(); 14 | let aggregate: PublicKey; 15 | 16 | beforeEach(async () => { 17 | const mintKP = Keypair.generate(); 18 | const { tx: createTX, aggregateKey } = await sdk.newAggregate({ 19 | mintKP, 20 | }); 21 | aggregate = aggregateKey; 22 | await expectTX(createTX, "Create Crate Token").to.be.fulfilled; 23 | }); 24 | 25 | it("add stake pools", async () => { 26 | const lidoKP = Keypair.generate(); 27 | const fakeLido = await createInitMintInstructions({ 28 | provider: sdk.provider, 29 | mintKP: lidoKP, 30 | decimals: 9, 31 | }); 32 | await expectTX(fakeLido).to.be.fulfilled; 33 | 34 | const { tx } = await sdk.addStakePool({ 35 | aggregate, 36 | mint: lidoKP.publicKey, 37 | method: AccountingMethods.Lido, 38 | }); 39 | const result = await tx.send(); 40 | await expectTX(result).to.be.fulfilled; 41 | 42 | const parser = new EventParser(sdk.program.programId, sdk.program.coder); 43 | const logs = (await result.wait()).response.meta?.logMessages ?? []; 44 | 45 | parser.parseLogs(logs, (ev) => { 46 | const event: AddStakePoolEvent = ev as unknown as AddStakePoolEvent; 47 | 48 | expect(event.name).to.eq("AddStakePoolEvent"); 49 | expect(event.data.aggregate).to.eqAddress(aggregate); 50 | expect(event.data.curator).to.eqAddress(sdk.provider.wallet.publicKey); 51 | expect(event.data.mint).to.eqAddress(lidoKP.publicKey); 52 | expect(event.data.accountingMethod).to.deep.eq(AccountingMethods.Lido); 53 | }); 54 | }); 55 | 56 | // todo: figure out how to mock lido and marinade locally 57 | }); 58 | -------------------------------------------------------------------------------- /tests/fixture-key.json: -------------------------------------------------------------------------------- 1 | [ 2 | 148, 243, 45, 212, 135, 246, 72, 26, 73, 140, 56, 126, 177, 224, 224, 170, 3 | 236, 120, 106, 35, 82, 69, 249, 235, 237, 125, 11, 106, 191, 236, 0, 39, 4, 4 | 105, 253, 195, 52, 130, 7, 89, 145, 103, 113, 215, 175, 175, 188, 81, 200, 89, 5 | 116, 202, 162, 197, 253, 236, 129, 122, 206, 63, 253, 182, 227, 207 6 | ] 7 | -------------------------------------------------------------------------------- /tests/workspace.ts: -------------------------------------------------------------------------------- 1 | import * as anchor from "@project-serum/anchor"; 2 | import { chaiSolana } from "@saberhq/chai-solana"; 3 | import { SolanaProvider } from "@saberhq/solana-contrib"; 4 | import chai from "chai"; 5 | 6 | import { ASolSDK } from "../src"; 7 | 8 | chai.use(chaiSolana); 9 | 10 | const anchorProvider = anchor.Provider.env(); 11 | anchor.setProvider(anchorProvider); 12 | 13 | const provider = SolanaProvider.load({ 14 | connection: anchorProvider.connection, 15 | wallet: anchorProvider.wallet, 16 | opts: anchorProvider.opts, 17 | }); 18 | 19 | export const makeSDK = (): ASolSDK => { 20 | return ASolSDK.init(provider); 21 | }; 22 | -------------------------------------------------------------------------------- /tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "compilerOptions": { 4 | "noEmit": false, 5 | "declaration": true, 6 | "declarationMap": true, 7 | "sourceMap": true, 8 | "inlineSources": true, 9 | "importHelpers": true, 10 | "outDir": "dist/cjs/" 11 | }, 12 | "include": ["src/"], 13 | "exclude": ["**/*.test.ts"] 14 | } 15 | -------------------------------------------------------------------------------- /tsconfig.esm.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.build.json", 3 | "compilerOptions": { 4 | "module": "ESNext", 5 | "moduleResolution": "node", 6 | "outDir": "dist/esm/" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2015", 4 | "module": "CommonJS", 5 | 6 | "strict": true, 7 | "strictFunctionTypes": true, 8 | "noImplicitOverride": true, 9 | "esModuleInterop": true, 10 | "noUncheckedIndexedAccess": true, 11 | "skipLibCheck": true, 12 | "forceConsistentCasingInFileNames": true, 13 | "resolveJsonModule": true, 14 | "noEmit": true, 15 | "lib": ["ES2019"] 16 | }, 17 | "include": ["src/", "tests/"] 18 | } 19 | --------------------------------------------------------------------------------