├── .env-example ├── .eslintignore ├── .eslintrc.json ├── .github └── workflows │ └── main.yml ├── .gitignore ├── .prettierrc.js ├── Dockerfile ├── LICENSE ├── README.md ├── docker-compose.yaml ├── package.json ├── sql ├── V1_CREATE_DATABASE.sql ├── V2_CREATE_TABLES_AND_ADD_SUBSTRATE_CHAIN_NAME.sql └── V3_ALTER_TABLE_PIN_OBJECT_ADD_RETRY_TIMES.sql ├── src ├── app.ts ├── common │ ├── commonUtils.ts │ └── promise-utils.ts ├── config │ └── config.ts ├── dao │ ├── commonDao.ts │ └── pinObjectDao.ts ├── db │ └── db.ts ├── logger │ └── index.ts ├── middlewares │ ├── auth │ │ └── authHandler.ts │ └── validate │ │ └── validationHandler.ts ├── migrations │ └── 001.do.sql ├── models │ ├── Chains.ts │ ├── Failure.ts │ ├── PinObjects.ts │ └── Users.ts ├── routes │ └── psa.ts └── service │ ├── crust │ ├── account.ts │ ├── api.ts │ ├── krp.ts │ └── order.ts │ └── pinning │ └── index.ts ├── tsconfig.json └── yarn.lock /.env-example: -------------------------------------------------------------------------------- 1 | NODE_ENV= 2 | MYSQL_HOST= 3 | MYSQL_PORT= 4 | MYSQL_DB= 5 | MYSQL_USER= 6 | MYSQL_PASSWORD= 7 | MYSQL_POOL_MAX= 8 | MYSQL_POOL_MIN= 9 | MYSQL_POOL_IDLE= 10 | MYSQL_POOL_ACQUIRE= 11 | CRUST_SEED= 12 | WS_ENDPOINT= 13 | DEFAULT_FILE_SIZE= 14 | CRUST_TIPS= 15 | VALID_FILE_REPLICAS= 16 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts/", 3 | "rules": { 4 | "@typescript-eslint/no-explicit-any": ["off"], 5 | "node/no-extraneous-import": ["off"], 6 | "@typescript-eslint/no-unused-vars": ["off"] 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Build ipfs w3auth pinning service 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: [ "main" ] 7 | 8 | jobs: 9 | build: 10 | if: github.repository == 'crustio/ipfs-w3auth-pinning-service' 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | node-version: [20.x] 15 | steps: 16 | - uses: actions/checkout@v2 17 | - name: Use Node.js ${{ matrix.node-version }} 18 | uses: actions/setup-node@v1 19 | with: 20 | node-version: ${{ matrix.node-version }} 21 | - run: yarn install 22 | - run: yarn build 23 | 24 | - 25 | name: Set up QEMU 26 | uses: docker/setup-qemu-action@v1 27 | - 28 | name: Set up Docker Buildx 29 | uses: docker/setup-buildx-action@v1 30 | - 31 | name: Login to DockerHub 32 | uses: docker/login-action@v1 33 | with: 34 | username: ${{ secrets.DOCKERHUB_USERNAME }} 35 | password: ${{ secrets.DOCKERHUB_TOKEN }} 36 | - 37 | name: Build and push 38 | id: docker_build 39 | uses: docker/build-push-action@v2 40 | with: 41 | context: . 42 | push: true 43 | tags: crustio/ipfs-w3auth-pinning-service:latest 44 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | -------------------------------------------------------------------------------- /.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | ...require('gts/.prettierrc.json') 3 | } 4 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:20-alpine 2 | WORKDIR /app 3 | COPY dist ./dist 4 | COPY node_modules ./node_modules 5 | EXPOSE 3000 6 | CMD ["node", "./dist/app.js"] 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # IPFS W3Auth Pinning Service 2 | 3 | Decentralized IPFS remote pinning service with [Crust Network](https://crust.network). 4 | 5 | ## [❓ About IPFS W3Auth Pinning Service](https://wiki.crust.network/docs/en/buildIPFSW3AuthPin) 6 | 7 | ## ✨ Usage 8 | 9 | The IPFS W3Auth Pinning Service(aka. W3Auth PS) is compatible with standard IPFS remote pinning service. You can refer [IPFS Docs](https://docs.ipfs.io/how-to/work-with-pinning-services/#use-an-existing-pinning-service) to learn how to use remote pin. 10 | 11 | As for the **Access Token** of W3Auth PS, you can easily get it with several web3 ways. 12 | 13 | ```shell 14 | Authorization: Bear 15 | ``` 16 | 17 | Let's take `ipfs cli` as an example 18 | 19 | ```shell 20 | ipfs pin remote service add crustpinner http://localhost:3000/psa base64(ChainType-PubKey:SignedMsg) 21 | ``` 22 | 23 | ### Get ChainType 24 | 25 | `ChainType` now can be: 26 | 27 | 1. `sub`(or `substrate`) 28 | 2. `eth`(or `ethereum`) 29 | 3. `sol`(or `solana`) 30 | 4. `pol` (or `polygon`) 31 | 5. `nea` (or `near`) 32 | 6. `ava`(or `avalanche`) 33 | 34 | And you can get `PubKey` and `SignedMsg` by using the following web3-ways: 35 | 36 | ### Get Pubkey and SignedMsg 37 | 38 | ### 1. With Substrate 39 | 40 | #### Get `PubKey` 41 | 42 | `PubKey` is just the substrate address, like `5Chu5r5GA41xFgMXLQd6CDjz1ABGEGVGS276xjv93ApY6vD7` 43 | 44 | All substrate-based chains are adapted: 45 | 46 | - [Crust](https://apps.crust.network/?rpc=wss%3A%2F%2Frpc.crust.network#/explorer) 47 | - [Polkadot](https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fpolkadot.elara.patract.io#/explorer) 48 | - [Kusama](https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Fkusama.api.onfinality.io%2Fpublic-ws#/explorer) 49 | - ... 50 | 51 | #### Get `SignedMsg` 52 | 53 | Just sign the `PubKey` with your private key to get the `SignedMsg` 54 | 55 | - With [Crust Apps](https://apps.crust.network/?rpc=wss%3A%2F%2Frpc.crust.network#/signing) 56 | - With [Polkadot Apps](https://polkadot.js.org/apps/?rpc=wss%3A%2F%2Frpc.polkadot.io#/signing) 57 | - With [Subkey](https://substrate.dev/docs/en/knowledgebase/integrate/subkey#signing-and-verifying-messages) 58 | - With [Node SDK](https://apps.crust.network/docs/util-crypto/examples/encrypt-decrypt) 59 | - With [Code Sample](https://github.com/RoyTimes/crust-workshop/blob/master/src/substrate.ts) 60 | 61 | ### 2. With Ethereum 62 | 63 | #### Get `PubKey` 64 | 65 | `PubKey` is just the ethereum address(42-characters) start with `0x` 66 | 67 | #### Get `SignedMsg` 68 | 69 | Just sign the `PubKey` with your eth private key to get the `SignedMsg` 70 | 71 | - With [MyEtherWallet](https://www.myetherwallet.com/wallet/sign) 72 | - With [MyCrypto](https://app.mycrypto.com/sign-message) 73 | - With [Code Sample](https://github.com/RoyTimes/crust-workshop/blob/master/src/eth.ts) 74 | 75 | ### 3. With Moonriver 76 | 77 | Moonriver is fully compatiable with the Ethereum, you can just follow the same steps with the Ethereum. 78 | 79 | #### Get `PubKey` 80 | 81 | `PubKey` is just the moonriver(ethereum) address(42-characters) start with `0x` 82 | 83 | #### Get `SignedMsg` 84 | 85 | Just sign the `PubKey` with your moonriver private key to get the `SignedMsg` 86 | 87 | - With [MyEtherWallet](https://www.myetherwallet.com/wallet/sign) 88 | - With [MyCrypto](https://app.mycrypto.com/sign-message) 89 | - With [Code Sample](https://github.com/RoyTimes/crust-workshop/blob/master/src/eth.ts) 90 | 91 | ### 4. With Solana 92 | 93 | #### Get `PubKey` 94 | 95 | `PubKey` is just the solana address 96 | 97 | #### Get `SignedMsg` 98 | 99 | You can sign the `PubKey` with your solana private key to get the `SignedMsg` 100 | 101 | - With [Solana Signer Sandbox](https://bafybeiexn4chci4exl54hlispdhwste6mpdcvgnu5zei53r2yl24hq2kri.ipfs.dweb.link/) (deploy with IPFS(cid: `QmYXnTQwKkup7yNLXZz2VyBvBj9eJB1knG8V8dnmjNuNnu`), source code is [here](https://github.com/zikunfan/solana-signer), you can deploy yourself) 102 | - With [Phantom](https://docs.phantom.app/integrating/signing-a-message) 103 | 104 | ### 5. With Polygon 105 | 106 | #### Get PubKey 107 | 108 | `PubKey` is just the polygon address(42-characters) start with `0x`. It's compatiable with the ethereum. 109 | 110 | #### Get SignedMsg 111 | 112 | Just sign the `PubKey` with your polygon private key to get the `SignedMsg` 113 | 114 | - With [MyEtherWallet](https://www.myetherwallet.com/wallet/sign) 115 | - With [MyCrypto](https://app.mycrypto.com/sign-message) 116 | - With [Code Sample](https://github.com/RoyTimes/crust-workshop/blob/master/src/eth.ts) 117 | 118 | ### 6. With Near 119 | 120 | You can sign the `PubKey` with one of your near private key associated with your account to get the `SignedMsg` 121 | 122 | - With [Near Wallet Example](https://bafybeifl55z3nnwr7c73fsimgbcpjjhx7psubzzh6gyfs3ynbwpkbkws4m.ipfs.dweb.link/) (deploy with IPFS(cid: `QmZupCTkUs6fDCAjYPfDqDtx85GekztfDa9u6Y8dsWhsvA`), source code is [here](https://github.com/MyronFanQiu/wallet-example), you can deploy yourself) 123 | - With [Near-API-JS Example](https://github.com/near/near-api-js/blob/master/examples/cookbook/utils/verify-signature.js) 124 | - With [Code Sample](https://github.com/RoyTimes/crust-workshop/blob/master/src/near.ts) 125 | 126 | ### 7. With Avalanche 127 | 128 | You can sign the `Address` **without** chainID prefix, such as `avax1se4e9lvhlfwhcqnzjr0vpswqcnhsy5atn5r0l3`, with your X- or P- chain private key associated with your account to get the `SignedMsg`. 129 | 130 | - With [Avalanche Wallet](https://wallet.avax.network/wallet/advanced) 131 | 132 | ### 8. With Apots 133 | 134 | #### Get `PubKey` 135 | 136 | `PubKey` is the aptos's account pubkey, such as `0xaa79510150c3a6753f224ef47a315ea6ae9acd23f4506a866feb25f8995c60c`. Please pay attention that it's not the same as the address. 137 | 138 | #### Get `SignedMsg` 139 | 140 | You can sign the `PubKey` with your aptos private key to get the `SignedMsg` 141 | 142 | - With [Martian Wallet](https://chrome.google.com/webstore/detail/martian-aptos-wallet/efbglgofoippbgcjepnhiblaibcnclgk) 143 | 144 | ## 🚀 Deploy 145 | 146 | ### 1. Start MySQL 147 | 148 | W3Auth PS uses MySQL as its state database, you can just use [docker](https://hub.docker.com/_/mysql) to start and config the db service. 149 | 150 | ### 2. Init State DB 151 | 152 | Please execute the `sql script` under `./sql` folder to create database and state tables. 153 | 154 | ### 3. Config 155 | 156 | #### 3.1 Config pinning service 157 | 158 | Please create an `.env` file baseon `.env-example`, each config item means: 159 | 160 | ```js 161 | NODE_ENV // `production` or `dev` 162 | MYSQL_HOST // optional, default is `localhost` 163 | MYSQL_PORT // optional, default is `3306` 164 | MYSQL_DB // optional, default is `pinning_service` created by ./sql/V1_CREATE_DATABASE.sql 165 | MYSQL_USER // optional, default is `root` 166 | MYSQL_PASSWORD // optional, default is `root` 167 | MYSQL_POOL_MAX // optional, default is `10` 168 | MYSQL_POOL_MIN // optional, default is `0` 169 | MYSQL_POOL_IDLE // optional, default is `30,000` 170 | MYSQL_POOL_ACQUIRE // optional, default is `30,000` 171 | CRUST_SEED // required, the pinning pool private seeds 172 | WS_ENDPOINT // optional, crust chain websocket address, default is `wss://rpc-crust-mainnet.decoo.io` 173 | DEFAULT_FILE_SIZE // optional, ordering file size, default is 2GB 174 | CRUST_TIPS // optional, ordering tips, default is 0.00005 CRUs 175 | VALID_FILE_REPLICAS // optional, the successfully pinning replica count, default is 3 176 | ``` 177 | 178 | #### 3.2 Config supported chains 179 | 180 | W3Auth PS allows nodes config supported chains with sql script, default supported chain is `substrate`, you can config different `chain_name` with corresponding `chain_type`(auth way): 181 | 182 | `chain_type` 183 | 184 | - `0`: Support all substrate-based chains authentication 185 | - `1`: Support all eth-compatible chains authentication, like `ethereum`/`polygon`/`bsc`/... 186 | - `2`: Support solana authentication 187 | 188 | And you can run the following sql to add/delete supported chains: 189 | 190 | - Add 191 | 192 | ```sql 193 | INSERT INTO `pinning_service`.`chain` (`chain_name`, `chain_type`) VALUES ('eth', 1); 194 | ``` 195 | 196 | - Delete 197 | 198 | ```sql 199 | DELETE FROM `pinning_service`.`chain` WHERE `chain_name`='eth'; 200 | ``` 201 | 202 | ### 4. Start pinning service 203 | 204 | - Run with docker 205 | 206 | Just run with the compose file in this repo 207 | 208 | ```shell 209 | docker-compose up -d order-service 210 | ``` 211 | 212 | - Run with node native 213 | 214 | ```shell 215 | # 1. Clone repo 216 | git clone https://github.com/crustio/ipfs-w3auth-pinning-service.git 217 | # 2. Install and build 218 | yarn && yarn build 219 | # 3. Run 220 | yarn start 221 | ``` 222 | 223 | Then, you can just config the standard IPFS remote pinning service with `http://localhost:3000/psa`! 224 | 225 | ## 🙋🏻‍♂️ Contribute 226 | 227 | Please feel free to send a PR. 228 | 229 | ## License 230 | 231 | [Apache 2.0](./LICENSE) 232 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3.8" 2 | services: 3 | order-service: 4 | image: crustio/ipfs-w3auth-pinning-service 5 | container_name: ipfs-w3auth-pinning-service 6 | ports: 7 | - 23000:3000 8 | working_dir: /app 9 | env_file: ./.env 10 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ipfs-w3auth-pinning-service", 3 | "version": "0.4.0", 4 | "main": "src/index.ts", 5 | "repository": "https://github.com/crustio/ipfs-w3auth-pinning-service.git", 6 | "author": "Zikun Fan ", 7 | "contributors": [ 8 | "zibo " 9 | ], 10 | "license": "MIT", 11 | "scripts": { 12 | "dev": "NODE_ENV=development nodemon ./src/app.ts", 13 | "build": "tsc && cp -R src/migrations dist", 14 | "start": "node ./dist/app.js", 15 | "lint": "gts lint", 16 | "clean": "gts clean", 17 | "compile": "tsc", 18 | "fix": "gts fix", 19 | "prepare": "yarn run compile", 20 | "pretest": "yarn run compile", 21 | "posttest": "yarn run lint" 22 | }, 23 | "resolutions": { 24 | "@crustio/ipfs-w3auth-substrate": "0.0.4", 25 | "usb": "1.8.8" 26 | }, 27 | "dependencies": { 28 | "@crustio/ipfs-w3auth-handler": "^0.0.19", 29 | "@crustio/type-definitions": "^1.3.0", 30 | "@elrondnetwork/erdjs": "^8.0.1-alpha.5", 31 | "@koa/router": "^10.1.1", 32 | "@types/uuid": "^8.3.1", 33 | "bignumber.js": "^9.0.1", 34 | "bluebird": "^3.7.2", 35 | "body-parser": "^1.19.0", 36 | "bs58": "^4.0.1", 37 | "cors": "^2.8.5", 38 | "create-hash": "^1.2.0", 39 | "dingtalk-robot-sender": "^1.2.0", 40 | "dotenv": "^10.0.0", 41 | "express": "^4.17.1", 42 | "express-validator": "^6.12.1", 43 | "http-proxy": "^1.18.1", 44 | "lodash": "^4.17.21", 45 | "moment": "^2.29.1", 46 | "mysql2": "^2.3.0", 47 | "node-schedule": "^2.0.0", 48 | "postgrator": "^4.3.1", 49 | "sequelize": "^6.6.5", 50 | "uuid": "^8.3.2", 51 | "winston": "^3.3.3" 52 | }, 53 | "devDependencies": { 54 | "@types/cors": "^2.8.12", 55 | "@types/bluebird": "^3.5.36", 56 | "@types/express": "^4.17.13", 57 | "@types/http-proxy": "^1.17.7", 58 | "@types/lodash": "^4.14.172", 59 | "@types/mocha": "^9.0.0", 60 | "@types/node": "^14.11.2", 61 | "gts": "^3.1.0", 62 | "nodemon": "^2.0.12", 63 | "ts-node": "^10.2.1", 64 | "typescript": "^4.0.3" 65 | }, 66 | "engines": { 67 | "node": ">=11.0.0" 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /sql/V1_CREATE_DATABASE.sql: -------------------------------------------------------------------------------- 1 | CREATE DATABASE IF NOT EXISTS pinning_service DEFAULT CHARSET utf8mb4 COLLATE utf8mb4_general_ci; 2 | -------------------------------------------------------------------------------- /sql/V2_CREATE_TABLES_AND_ADD_SUBSTRATE_CHAIN_NAME.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE IF EXISTS `pinning_service`.`chain`; 2 | CREATE TABLE `pinning_service`.`chain` ( 3 | `id` bigint NOT NULL AUTO_INCREMENT, 4 | `chain_name` varchar(32) COLLATE utf8mb4_general_ci NOT NULL COMMENT 'chain name', 5 | `chain_type` int NOT NULL COMMENT '0:polkadot,1:eth,2: solana', 6 | `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', 7 | `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', 8 | PRIMARY KEY (`id`), 9 | UNIQUE KEY `uniq_chain_name_type` (`chain_name`,`chain_type`) USING BTREE 10 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; 11 | 12 | DROP TABLE IF EXISTS `pinning_service`.`pin_object`; 13 | CREATE TABLE `pinning_service`.`pin_object` ( 14 | `id` bigint NOT NULL AUTO_INCREMENT, 15 | `name` varchar(255) COLLATE utf8mb4_general_ci NOT NULL COMMENT 'name default:cid', 16 | `request_id` varchar(64) COLLATE utf8mb4_general_ci NOT NULL COMMENT 'request_id', 17 | `user_id` bigint NOT NULL COMMENT 'user.id', 18 | `cid` varchar(64) COLLATE utf8mb4_general_ci NOT NULL COMMENT 'ipfs cid', 19 | `status` varchar(16) COLLATE utf8mb4_general_ci NOT NULL COMMENT 'status', 20 | `info` json DEFAULT NULL COMMENT 'info', 21 | `meta` json DEFAULT NULL COMMENT 'meta', 22 | `delegates` text COLLATE utf8mb4_general_ci COMMENT 'delegates (join with ,)', 23 | `origins` text COLLATE utf8mb4_general_ci COMMENT 'origins (join with ,)', 24 | `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', 25 | `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', 26 | `deleted` int NOT NULL DEFAULT '0' COMMENT '1:deleted, 0:undeleted', 27 | PRIMARY KEY (`id`), 28 | UNIQUE KEY `uniq_pin_object_request_id` (`request_id`) USING BTREE, 29 | UNIQUE KEY `uniq_pin_object_user_id_cid` (`user_id`,`cid`) USING BTREE, 30 | KEY `index_pin_object_status` (`status`) USING BTREE 31 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; 32 | 33 | DROP TABLE IF EXISTS `pinning_service`.`user`; 34 | CREATE TABLE `pinning_service`.`user` ( 35 | `id` bigint NOT NULL AUTO_INCREMENT, 36 | `chain_type` int NOT NULL COMMENT '0: polkadot, 1: eth, 2: solana', 37 | `address` varchar(64) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT 'address', 38 | `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create_time', 39 | PRIMARY KEY (`id`), 40 | UNIQUE KEY `uniq_user_address_chain_type` (`address`,`chain_type`) USING BTREE 41 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci; 42 | 43 | INSERT INTO `pinning_service`.`chain` (`chain_name`, `chain_type`) VALUES ('substrate', 0); 44 | -------------------------------------------------------------------------------- /sql/V3_ALTER_TABLE_PIN_OBJECT_ADD_RETRY_TIMES.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE `pinning_service`.`pin_object` 2 | ADD COLUMN `retry_times` int NOT NULL DEFAULT 0 COMMENT 'retry order times' AFTER `deleted`; 3 | -------------------------------------------------------------------------------- /src/app.ts: -------------------------------------------------------------------------------- 1 | import * as express from 'express'; 2 | import * as cors from 'cors'; 3 | import {router as psaRouter} from './routes/psa'; 4 | import * as bodyParser from 'body-parser'; 5 | const pinningAuthHandler = require('./middlewares/auth/authHandler'); 6 | const w3authHandler = require('@crustio/ipfs-w3auth-handler'); 7 | const Postgrator = require('postgrator'); 8 | const path = require('path'); 9 | import {updatePinObjectStatus, orderStart, pinExpireFiles} from './service/pinning'; 10 | import {configs} from './config/config'; 11 | 12 | const app = express(); 13 | 14 | app.use(cors()); 15 | app.use(bodyParser.json()); 16 | app.use(bodyParser.urlencoded({extended: true})); 17 | app.use(w3authHandler); 18 | app.use('/psa', pinningAuthHandler, psaRouter); 19 | 20 | const postgrator = new Postgrator({ 21 | migrationDirectory: path.join(__dirname, configs.evolution.location), 22 | schemaTable: configs.evolution.schema_table, 23 | driver: 'mysql2', 24 | host: configs.db.host, 25 | port: configs.db.port, 26 | database: configs.db.db, 27 | username: configs.db.user, 28 | password: configs.db.password, 29 | }); 30 | 31 | postgrator.migrate('max').then((migrations: any) => { 32 | app.listen(configs.server.port); 33 | updatePinObjectStatus(); 34 | orderStart(); 35 | pinExpireFiles(); 36 | }); 37 | -------------------------------------------------------------------------------- /src/common/commonUtils.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @auther zhouzibo 3 | * @date 2021/9/6 4 | */ 5 | import * as moment from 'moment'; 6 | import BigNumber from 'bignumber.js'; 7 | import {v4 as uuidv4} from 'uuid'; 8 | 9 | export const fromDecimal = (amount: number | string) => { 10 | const amountBN = new BigNumber(amount); 11 | return amountBN.multipliedBy(new BigNumber(1_000_000_000_000)); 12 | }; 13 | 14 | export function parserStrToObj(str: any) { 15 | if (!str) { 16 | return null; 17 | } else { 18 | return JSON.parse(JSON.stringify(str)); 19 | } 20 | } 21 | 22 | export const TextMatchingStrategy = { 23 | exact: 'exact', 24 | iexact: 'iexact', 25 | partial: 'partial', 26 | ipartial: 'ipartial', 27 | }; 28 | 29 | export const PinObjectStatus = { 30 | queued: 'queued', 31 | pinning: 'pinning', 32 | pinned: 'pinned', 33 | failed: 'failed', 34 | }; 35 | 36 | export const isDate = (value: string): boolean => { 37 | return moment(value).isValid(); 38 | }; 39 | 40 | export const getEnv = (value: string, defaultValue: any): any => { 41 | return process.env[value] || defaultValue; 42 | }; 43 | 44 | export const uuid = (): string => { 45 | return `${uuidv4()}-${new Date().getTime()}`; 46 | }; 47 | 48 | export function sleep(time: number) { 49 | return new Promise((resolve, reject) => { 50 | setTimeout(() => { 51 | resolve(); 52 | }, time); 53 | }); 54 | } 55 | -------------------------------------------------------------------------------- /src/common/promise-utils.ts: -------------------------------------------------------------------------------- 1 | import * as Bluebird from 'bluebird'; 2 | 3 | export async function timeout( 4 | p: Promise, 5 | timeout: number, 6 | timeoutValue: T | (() => T) 7 | ): Promise { 8 | const emptyResult = {} as any; // eslint-disable-line 9 | const v = await Bluebird.race([p, Bluebird.delay(timeout, emptyResult)]); 10 | if (v === emptyResult) { 11 | if (typeof timeoutValue === 'function') { 12 | return (timeoutValue as () => T)(); 13 | } 14 | return timeoutValue; 15 | } 16 | return v; 17 | } 18 | 19 | export async function timeoutOrError( 20 | name: string, 21 | p: Promise, 22 | time: number 23 | ): Promise { 24 | return timeout(p, time, () => { 25 | throw new Error(`"${name}" failed to resolve in ${time}ms`); 26 | }); 27 | } 28 | -------------------------------------------------------------------------------- /src/config/config.ts: -------------------------------------------------------------------------------- 1 | const {getEnv} = require('../common/commonUtils'); 2 | const _ = require('lodash'); 3 | 4 | export const configs = { 5 | db: { 6 | host: getEnv('MYSQL_HOST', 'localhost'), 7 | port: _.parseInt(getEnv('MYSQL_PORT', 3306)), 8 | db: getEnv('MYSQL_DB', 'pinning_service'), 9 | user: getEnv('MYSQL_USER', 'root'), 10 | password: getEnv('MYSQL_PASSWORD', 'root'), 11 | db_pool_max: _.parseInt(getEnv('MYSQL_POOL_MAX', 10)), 12 | db_pool_min: _.parseInt(getEnv('MYSQL_POOL_MIN', 0)), 13 | db_pool_idle: _.parseInt(getEnv('MYSQL_POOL_IDLE', 30000)), 14 | db_pool_acquire: _.parseInt(getEnv('MYSQL_POOL_ACQUIRE', 30000)), 15 | }, 16 | ipfs: { 17 | delegates: [] as string[], 18 | }, 19 | evolution: { 20 | schema_table: 'data_migrations', 21 | location: '/migrations', 22 | }, 23 | crust: { 24 | seed: getEnv('CRUST_SEED', ''), 25 | chainWsUrl: getEnv('WS_ENDPOINT', 'wss://rpc.crust.network'), 26 | defaultFileSize: _.parseInt(getEnv('DEFAULT_FILE_SIZE', 2147483648)), 27 | tips: getEnv('CRUST_TIPS', 0), 28 | expireBlockNumber: getEnv('EXPIRE_BLOCK_NUMBER', 10 * 60 * 24 * 30), 29 | validFileSize: _.parseInt(getEnv('VALID_FILE_REPLICAS', 30)), 30 | orderTimeAwait: _.parseInt(getEnv('ORDER_TIME_AWAIT', 3000)), 31 | loopTimeAwait: _.parseInt(getEnv('LOOP_TIME_AWAIT', 20000)), 32 | checkAmountTimeAwait: _.parseInt(getEnv('CHECK_AMOUNT_TIME_AWAIT', 120000)), 33 | checkAmountRetryTimes: _.parseInt(getEnv('CHECK_AMOUNT_RETRY_TIMES', 3)), 34 | orderRetryTimes: _.parseInt(getEnv('ORDER_RETRY_TIMES', 3)), 35 | minimumAmount: _.parseInt(getEnv('MINIMUM_AMOUNT', 1)), 36 | warningAccessToken: getEnv( 37 | 'WARNING_ACCESSTOKEN', 38 | '' 39 | ), 40 | transactionTimeout: _.parseInt(getEnv('TRANSACTION_TIMEOUT', 60 * 1000)), 41 | }, 42 | server: { 43 | port: 3000, 44 | name: getEnv('NODE_ENV', 'prod'), 45 | }, 46 | }; 47 | -------------------------------------------------------------------------------- /src/dao/commonDao.ts: -------------------------------------------------------------------------------- 1 | const _ = require('lodash'); 2 | import {sequelize} from '../db/db'; 3 | import {logger} from '../logger'; 4 | 5 | const commonDao = { 6 | queryForCount: queryForCount, 7 | queryForArray: queryForArray, 8 | queryForObj: queryForObj, 9 | queryForUpdate: queryForUpdate, 10 | }; 11 | 12 | function queryForCount(sql: string, replace: any[]): Promise { 13 | return sequelize 14 | .query(sql, { 15 | replacements: replace, 16 | type: sequelize.QueryTypes.SELECT, 17 | raw: true, 18 | }) 19 | .then((r: any[]) => { 20 | if (!_.isEmpty(r)) { 21 | const res = r[0]; 22 | return res[Object.keys(res)[0]]; 23 | } 24 | }); 25 | } 26 | 27 | function queryForArray(sql: string, replace: any[]): Promise { 28 | return sequelize 29 | .query(sql, { 30 | replacements: replace, 31 | type: sequelize.QueryTypes.SELECT, 32 | }) 33 | .then((r: any[]) => { 34 | if (!_.isEmpty(r)) { 35 | return r; 36 | } 37 | return []; 38 | }); 39 | } 40 | 41 | function queryForObj(sql: string, replace: any[]): Promise { 42 | return sequelize 43 | .query(sql, { 44 | replacements: replace, 45 | type: sequelize.QueryTypes.SELECT, 46 | }) 47 | .then((r: any[]) => { 48 | if (!_.isEmpty(r)) { 49 | return r[0]; 50 | } 51 | return {}; 52 | }); 53 | } 54 | 55 | function queryForUpdate(sql: string, replace: any[]): Promise { 56 | return sequelize 57 | .query(sql, { 58 | replacements: replace, 59 | type: sequelize.QueryTypes.UPDATE, 60 | }) 61 | .then((r: any) => { 62 | logger.info(r); 63 | return 0; 64 | }); 65 | } 66 | 67 | module.exports = commonDao; 68 | -------------------------------------------------------------------------------- /src/dao/pinObjectDao.ts: -------------------------------------------------------------------------------- 1 | import {PinObjectsQuery, PinResults, PinStatus} from '../models/PinObjects'; 2 | import {PinObjectStatus} from "../common/commonUtils"; 3 | const {TextMatchingStrategy} = require('../common/commonUtils'); 4 | const _ = require('lodash'); 5 | const commonDao = require('./commonDao'); 6 | const pinObjectDao = { 7 | selectPinObjectCountByQuery: selectPinObjectCountByQuery, 8 | selectPinObjectListByQuery: selectPinObjectListByQuery, 9 | selectPinObjectByRequestIdAndUserId: selectPinObjectByRequestIdAndUserId, 10 | deletePinObjectByRequestIdAndUserId: deletePinObjectByRequestIdAndUserId, 11 | queryPinningObjects: queryPinningObjects, 12 | }; 13 | 14 | async function deletePinObjectByRequestIdAndUserId( 15 | requestId: string, 16 | userId: number 17 | ) { 18 | return commonDao.queryForUpdate( 19 | 'update pin_object set deleted = 1 where user_id = ? and request_id = ?', 20 | [userId, requestId] 21 | ); 22 | } 23 | 24 | async function queryPinningObjects(limit: number = 100) { 25 | return commonDao.queryForArray( 26 | 'select * from pin_object where deleted = ? and status = ? limit ?', 27 | [0, PinObjectStatus.pinning, limit] 28 | ); 29 | } 30 | 31 | async function selectPinObjectByRequestIdAndUserId( 32 | requestId: string, 33 | userId: number 34 | ): Promise { 35 | const result = await commonDao.queryForObj( 36 | 'select * from pin_object where deleted = 0 and user_id = ? and request_id = ?', 37 | [userId, requestId] 38 | ); 39 | if (!_.isEmpty(result)) { 40 | return PinStatus.parseBaseData(result); 41 | } else { 42 | return null; 43 | } 44 | } 45 | 46 | async function selectPinObjectListByQuery( 47 | query: PinObjectsQuery 48 | ): Promise { 49 | const count = await selectPinObjectCountByQuery(query); 50 | const pinResult = new PinResults(); 51 | pinResult.count = count; 52 | if (count > 0) { 53 | const [sql, args] = await parsePinObjectQuery( 54 | query, 55 | 'select * from pin_object where deleted = 0 and user_id = ?', 56 | [query.userId] 57 | ); 58 | const result = await commonDao.queryForArray(sql, args); 59 | pinResult.results = _.map(result, (i: any) => PinStatus.parseBaseData(i)); 60 | } else { 61 | pinResult.results = []; 62 | } 63 | return pinResult; 64 | } 65 | 66 | function selectPinObjectCountByQuery(query: PinObjectsQuery): Promise { 67 | const [sql, args] = parsePinObjectQuery( 68 | query, 69 | 'select count(*) from pin_object where deleted = 0 and user_id = ?', 70 | [query.userId] 71 | ); 72 | return commonDao.queryForCount(sql, args); 73 | } 74 | 75 | function parsePinObjectQuery( 76 | query: PinObjectsQuery, 77 | baseSql: string, 78 | baseArgs: any[] 79 | ): [string, any[]] { 80 | let sql = baseSql; 81 | let args = baseArgs; 82 | if (query.cid) { 83 | if (_.isArray(query.cid)) { 84 | sql = `${sql} and cid in (${_.map(query.cid, () => '?').join(',')})`; 85 | } else { 86 | sql = `${sql} and cid = ?`; 87 | } 88 | args = _.concat(args, query.cid); 89 | } 90 | if (query.after) { 91 | sql = `${sql} and create_time > ?`; 92 | args.push(query.after); 93 | } 94 | if (query.before) { 95 | sql = `${sql} and create_time < ?`; 96 | args.push(query.before); 97 | } 98 | if (query.status) { 99 | if (_.isArray(query.status)) { 100 | sql = `${sql} and status in (${_.map(query.status, () => '?').join( 101 | ',' 102 | )})`; 103 | } else { 104 | sql = `${sql} and status = ?`; 105 | } 106 | args = _.concat(args, query.status); 107 | } 108 | if (query.name) { 109 | if (query.match === TextMatchingStrategy.exact) { 110 | sql = `${sql} and name = ?`; 111 | args.push(query.name); 112 | } else if (query.match === TextMatchingStrategy.iexact) { 113 | sql = `${sql} and UPPER(name) = ?`; 114 | args.push(query.name.toUpperCase()); 115 | } else if (query.match === TextMatchingStrategy.partial) { 116 | sql = `${sql} and name like ?`; 117 | args.push(`%${query.name}%`); 118 | } else { 119 | sql = `${sql} and UPPER(name) like ?`; 120 | args.push(`%${query.name}%`); 121 | } 122 | } 123 | if (query.meta && query.meta.size > 0) { 124 | const metaSql: string[] = []; 125 | query.meta.forEach((value: string, key: string) => { 126 | let queryValue = value; 127 | if (query.match === TextMatchingStrategy.iexact) { 128 | queryValue = `"${value}"`; 129 | metaSql.push('UPPER(meta->?)=UPPER(?)'); 130 | } else if (query.match === TextMatchingStrategy.partial) { 131 | queryValue = `%${value}%`; 132 | metaSql.push('meta->? like ?'); 133 | } else if (query.match === TextMatchingStrategy.ipartial) { 134 | queryValue = `%${value}%`; 135 | metaSql.push('UPPER(meta->?) like UPPER(?)'); 136 | } else { 137 | metaSql.push('meta->?=?'); 138 | } 139 | args.push(`$.${key}`, queryValue); 140 | }); 141 | sql = `${sql} and (${metaSql.join(' and ')})`; 142 | } 143 | sql = `${sql} order by create_time desc`; 144 | if (query.limit) { 145 | sql = `${sql} limit ?`; 146 | args.push(query.limit); 147 | } 148 | return [sql, args]; 149 | } 150 | 151 | module.exports = pinObjectDao; 152 | -------------------------------------------------------------------------------- /src/db/db.ts: -------------------------------------------------------------------------------- 1 | require('mysql2'); 2 | const Sequelize = require('sequelize'); 3 | import {configs} from '../config/config'; 4 | const {getEnv} = require('../common/commonUtils'); 5 | 6 | export const sequelize = new Sequelize( 7 | configs.db.db, 8 | configs.db.user, 9 | configs.db.password, 10 | { 11 | host: configs.db.host, 12 | port: configs.db.port, 13 | logging: !(getEnv('NODE_ENV', 'test') === 'production'), 14 | dialect: 'mysql', 15 | pool: { 16 | max: configs.db.db_pool_max, 17 | min: configs.db.db_pool_min, 18 | idle: configs.db.db_pool_idle, 19 | acquire: configs.db.db_pool_acquire, 20 | }, 21 | operatorsAliases: false, 22 | timezone: '+08:00', 23 | } 24 | ); 25 | -------------------------------------------------------------------------------- /src/logger/index.ts: -------------------------------------------------------------------------------- 1 | import * as winston from 'winston'; 2 | 3 | export const logger = winston.createLogger({ 4 | format: winston.format.combine( 5 | winston.format.timestamp({ 6 | format: 'YYYY-MM-DD HH:mm:ss', 7 | }), 8 | winston.format.printf( 9 | info => `[${info.level}] [${[info.timestamp]}] ${info.message}` 10 | ) 11 | ), 12 | transports: [ 13 | new winston.transports.Console(), 14 | new winston.transports.File({filename: 'logger.log'}), 15 | ], 16 | }); 17 | -------------------------------------------------------------------------------- /src/middlewares/auth/authHandler.ts: -------------------------------------------------------------------------------- 1 | import { Request } from 'express'; 2 | import { logger } from '../../logger'; 3 | const _ = require('lodash'); 4 | const Users = require('./../../models/Users'); 5 | 6 | async function auth(req: Request, res: any, next: any) { 7 | const chainType = res.chainType; 8 | let address = res.chainAddress; 9 | 10 | logger.info(`Validate chainType: ${chainType} address: ${address} success`); 11 | logger.info(`txMsg: ${res.txMsg} tyMsg: ${res.tyMsg}`) 12 | if (chainType == "xx") { 13 | address = res.txMsg 14 | } 15 | // Find or create user 16 | const [user, created] = await Users.findOrCreate({ 17 | where: { chain_type: chainType, address: address }, 18 | defaults: { chain_type: chainType, address: address }, 19 | }); 20 | 21 | req.query.userId = user.id; 22 | next(); 23 | } 24 | 25 | module.exports = auth; 26 | -------------------------------------------------------------------------------- /src/middlewares/validate/validationHandler.ts: -------------------------------------------------------------------------------- 1 | import {validationResult} from 'express-validator'; 2 | import {Request, Response} from 'express'; 3 | import {Failure, FailureError} from '../../models/Failure'; 4 | import * as _ from 'lodash'; 5 | 6 | function validate(validations: any[]) { 7 | return async (req: Request, res: Response, next: any) => { 8 | for (const validation of validations) { 9 | const result = await validation.run(req); 10 | if (result.errors.length) break; 11 | } 12 | const errors = validationResult(req); 13 | if (errors.isEmpty()) { 14 | return next(); 15 | } 16 | const reason = _.head(errors.array()).msg as string; 17 | res.status(400).json(new Failure(new FailureError(reason, reason))); 18 | }; 19 | } 20 | 21 | module.exports = validate; 22 | -------------------------------------------------------------------------------- /src/migrations/001.do.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE `pinning_service`.`user` 2 | MODIFY COLUMN `chain_type` varchar(32) CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci NOT NULL COMMENT 'chain type as: crust, cru' AFTER `id`; 3 | update `user` set chain_type = 'substrate' where chain_type = '0'; 4 | update `user` set chain_type = 'ethereum' where chain_type = '1'; 5 | update `user` set chain_type = 'solana' where chain_type = '2'; 6 | update `user` set chain_type = 'avalanche' where chain_type = '3'; 7 | update `user` set chain_type = 'flow' where chain_type = '4'; 8 | update `user` set chain_type = 'elrond' where chain_type = '5'; 9 | -------------------------------------------------------------------------------- /src/models/Chains.ts: -------------------------------------------------------------------------------- 1 | import {sequelize} from '../db/db'; 2 | import * as Sequelize from 'sequelize'; 3 | 4 | const Chains = sequelize.define( 5 | 'chain', 6 | { 7 | id: { 8 | type: Sequelize.BIGINT, 9 | primaryKey: true, 10 | autoIncrement: true, 11 | }, 12 | chain_name: { 13 | type: Sequelize.STRING(32), 14 | allowNull: false, 15 | }, 16 | chain_type: { 17 | type: Sequelize.INTEGER, 18 | allowNull: false, 19 | }, 20 | create_time: Sequelize.DATE, 21 | update_time: Sequelize.DATE, 22 | }, 23 | { 24 | timestamps: false, 25 | tableName: 'chain', 26 | } 27 | ); 28 | 29 | module.exports = Chains; 30 | -------------------------------------------------------------------------------- /src/models/Failure.ts: -------------------------------------------------------------------------------- 1 | export class Failure { 2 | error: FailureError; 3 | 4 | constructor(error: FailureError) { 5 | this.error = error; 6 | } 7 | 8 | static commonErr(err: string): Failure { 9 | return new Failure(FailureError.commonErr(err)); 10 | } 11 | } 12 | 13 | export class FailureError { 14 | reason: string; 15 | details: string; 16 | 17 | constructor(reason: string, details: string) { 18 | this.reason = reason; 19 | this.details = details; 20 | } 21 | 22 | static commonErr(err: string): FailureError { 23 | return new FailureError(err, err); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/models/PinObjects.ts: -------------------------------------------------------------------------------- 1 | import * as Sequelize from 'sequelize'; 2 | import {sequelize} from '../db/db'; 3 | import {Request} from 'express'; 4 | const _ = require('lodash'); 5 | const moment = require('moment'); 6 | 7 | const DEFAULT_DELEGATES = ['/ip4/183.131.193.198/tcp/14001/p2p/12D3KooWMcAHcs97R49PLZjGUKDbP1fr9iijeepod8fkktHTLCgN']; 8 | 9 | export const PinObjects = sequelize.define( 10 | 'pin_object', 11 | { 12 | id: { 13 | type: Sequelize.BIGINT, 14 | primaryKey: true, 15 | autoIncrement: true, 16 | }, 17 | name: { 18 | type: Sequelize.STRING(255), 19 | allowNull: false, 20 | }, 21 | request_id: { 22 | type: Sequelize.STRING(64), 23 | allowNull: false, 24 | }, 25 | user_id: { 26 | type: Sequelize.BIGINT, 27 | allowNull: false, 28 | }, 29 | cid: { 30 | type: Sequelize.STRING(64), 31 | allowNull: false, 32 | }, 33 | status: { 34 | type: Sequelize.STRING(16), 35 | allowNull: false, 36 | }, 37 | info: Sequelize.JSON, 38 | meta: Sequelize.JSON, 39 | delegates: Sequelize.TEXT, 40 | origins: Sequelize.TEXT, 41 | create_time: Sequelize.DATE, 42 | update_time: Sequelize.DATE, 43 | deleted: Sequelize.INTEGER, 44 | retry_times: Sequelize.INTEGER, 45 | }, 46 | { 47 | timestamps: false, 48 | tableName: 'pin_object', 49 | } 50 | ); 51 | 52 | export class PinObjectsQuery { 53 | userId: number; 54 | cid: string[]; 55 | name: string; 56 | match: string; 57 | status: string[]; 58 | before: string; 59 | after: string; 60 | limit: number; 61 | meta: Map; 62 | 63 | static parseQuery(req: Request): PinObjectsQuery { 64 | const query = new PinObjectsQuery(); 65 | query.userId = _.parseInt(req.query.userId as string); 66 | query.cid = req.query.cid ? (req.query.cid as string).split(',') : null; 67 | query.name = req.query.name as string; 68 | query.match = req.query.match as string; 69 | query.status = req.query.status 70 | ? (req.query.status as string).split(',') 71 | : null; 72 | query.before = req.query.before 73 | ? moment(req.query.before).format('YYYY-MM-DD HH:mm:ss') 74 | : null; 75 | query.after = req.query.after 76 | ? moment(req.query.after).format('YYYY-MM-DD HH:mm:ss') 77 | : null; 78 | query.limit = _.parseInt(req.query.limit as string); 79 | const meta = new Map(); 80 | req.query = _.omit(req.query, baseQuery); 81 | _.forEach(req.query, (v: string, k: string) => { 82 | meta.set(k, v); 83 | }); 84 | query.meta = meta; 85 | return query; 86 | } 87 | } 88 | 89 | export class PinResults { 90 | count: number; 91 | results: PinStatus[]; 92 | } 93 | 94 | export class PinStatus { 95 | requestid: string; 96 | status: string; 97 | created: string; 98 | pin: Pin; 99 | delegates: string[]; 100 | info: Map; 101 | 102 | static parseBaseData(baseData: any): PinStatus { 103 | const result = new PinStatus(); 104 | result.requestid = baseData.request_id; 105 | result.status = baseData.status; 106 | result.created = dateFormat(baseData.create_time); 107 | result.pin = Pin.parseBaseData(baseData); 108 | result.delegates = baseData.delegates ? baseData.delegates.split(',') : DEFAULT_DELEGATES; 109 | result.info = baseData.info ? baseData.info : {}; 110 | return result; 111 | } 112 | } 113 | 114 | export class Pin { 115 | cid: string; 116 | name: string; 117 | origins: Set; 118 | meta: Map; 119 | 120 | static parseBaseData(baseData: any): Pin { 121 | const pin = new Pin(); 122 | pin.cid = baseData.cid; 123 | pin.name = baseData.name; 124 | pin.meta = baseData.meta; 125 | pin.origins = 126 | baseData.origins && baseData.origins.length > 0 127 | ? baseData.origins.split(',') 128 | : []; 129 | return pin; 130 | } 131 | 132 | static parsePinFromRequest(req: any): Pin { 133 | const pin = new Pin(); 134 | pin.cid = req.body.cid; 135 | pin.name = req.body.name ? req.body.name : req.body.cid; 136 | pin.origins = _.isEmpty(req.body.origins) ? [] : req.body.origins; 137 | pin.meta = req.body.meta; 138 | return pin; 139 | } 140 | } 141 | 142 | function dateFormat(time: any): string { 143 | return moment(time).format('yyyy-MM-DDTHH:mm:ssZ'); 144 | } 145 | 146 | const baseQuery = [ 147 | 'userId', 148 | 'cid', 149 | 'name', 150 | 'match', 151 | 'status', 152 | 'before', 153 | 'after', 154 | 'limit', 155 | ]; 156 | -------------------------------------------------------------------------------- /src/models/Users.ts: -------------------------------------------------------------------------------- 1 | import {sequelize} from '../db/db'; 2 | import * as Sequelize from 'sequelize'; 3 | 4 | const Users = sequelize.define( 5 | 'user', 6 | { 7 | id: { 8 | type: Sequelize.BIGINT, 9 | primaryKey: true, 10 | autoIncrement: true, 11 | }, 12 | chain_type: { 13 | type: Sequelize.STRING(32), 14 | allowNull: false, 15 | }, 16 | address: { 17 | type: Sequelize.STRING(64), 18 | allowNull: false, 19 | }, 20 | create_time: Sequelize.DATE, 21 | }, 22 | { 23 | timestamps: false, 24 | tableName: 'user', 25 | } 26 | ); 27 | 28 | module.exports = Users; 29 | -------------------------------------------------------------------------------- /src/routes/psa.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @auther zibo 3 | * @date 2021/9/6 4 | */ 5 | import * as express from 'express'; 6 | import { query, body, param } from 'express-validator'; 7 | import { 8 | PinObjects, 9 | PinObjectsQuery, 10 | PinResults, 11 | PinStatus, 12 | Pin, 13 | } from '../models/PinObjects'; 14 | import { Failure } from '../models/Failure'; 15 | const pinObjectDao = require('../dao/pinObjectDao'); 16 | const validate = require('../middlewares/validate/validationHandler'); 17 | const { 18 | TextMatchingStrategy, 19 | PinObjectStatus, 20 | isDate, 21 | } = require('./../common/commonUtils'); 22 | import { pinByCid, replacePin } from '../service/pinning'; 23 | import { logger } from '../logger'; 24 | const _ = require('lodash'); 25 | const Users = require('./../models/Users'); 26 | export const router = express.Router(); 27 | router.get( 28 | '/pins', 29 | validate([ 30 | query('cid') 31 | .optional() 32 | .custom((value: any) => { 33 | if (_.isArray(value)) { 34 | return value.length > 0 && value.length < 10; 35 | } else { 36 | return _.isString(value); 37 | } 38 | }), 39 | query('name').optional().isString().isLength({ max: 255 }), 40 | query('match').optional().isIn(_.keys(TextMatchingStrategy)), 41 | query('status') 42 | .optional() 43 | .custom((value: any) => { 44 | if (_.isString(value)) { 45 | const pinStatus = _.keys(PinObjectStatus); 46 | const values = (value as string).split(','); 47 | for (const item of values) { 48 | if (!_.includes(pinStatus, item)) { 49 | return false; 50 | } 51 | } 52 | return true; 53 | } else { 54 | return false; 55 | } 56 | }), 57 | query('before').custom(isDate), 58 | query('after').custom(isDate), 59 | query('limit').default(10).isInt({ max: 1000, min: 1 }), 60 | ]), 61 | (req, res) => { 62 | pinObjectDao 63 | .selectPinObjectListByQuery(PinObjectsQuery.parseQuery(req)) 64 | .then((r: PinResults) => { 65 | res.json(r); 66 | }); 67 | } 68 | ); 69 | 70 | router.get('/pins/:requestId', (req, res) => { 71 | pinObjectDao 72 | .selectPinObjectByRequestIdAndUserId(req.params.requestId, req.query.userId) 73 | .then((r: PinStatus) => { 74 | if (_.isEmpty(r)) { 75 | res.status(404).json(Failure.commonErr('not found')); 76 | } else { 77 | res.json(r); 78 | } 79 | }); 80 | }); 81 | 82 | router.get('/cids/:address', 83 | validate([ 84 | param('address').isString().notEmpty(), 85 | ]), 86 | async (req, res) => { 87 | const user = await Users.findOne({ 88 | where: { address: req.params.address }, 89 | order: [['create_time', 'DESC']] 90 | }); 91 | if (user) { 92 | const pobjs = await PinObjects.findAll({ 93 | where: { user_id: user.id }, 94 | order: [['update_time', 'DESC']] 95 | }); 96 | if (pobjs) { 97 | const cids = [] 98 | for (let p of pobjs) { 99 | cids.push(p.cid) 100 | } 101 | res.json({ 102 | address: req.params.address, 103 | cids: cids 104 | }); 105 | } else { 106 | res.sendStatus(404); 107 | } 108 | } else { 109 | res.sendStatus(404); 110 | } 111 | }); 112 | 113 | router.post( 114 | '/pins/:requestId', 115 | validate([ 116 | body('cid').isString().notEmpty().withMessage('cid not empty'), 117 | body('name').optional().isString(), 118 | body('origins').optional().isArray(), 119 | param('requestId').isString().notEmpty(), 120 | ]), 121 | (req, res) => { 122 | replacePin( 123 | _.parseInt(req.query.userId), 124 | req.params.requestId, 125 | Pin.parsePinFromRequest(req) 126 | ).then((r: PinStatus) => { 127 | res.status(202).json(r); 128 | }); 129 | } 130 | ); 131 | 132 | router.post( 133 | '/pins', 134 | validate([ 135 | body('cid').isString().notEmpty().withMessage('cid not empty'), 136 | body('name').optional().isString(), 137 | body('origins').optional().isArray(), 138 | ]), 139 | (req, res) => { 140 | pinByCid(_.parseInt(req.query.userId), Pin.parsePinFromRequest(req)) 141 | .then((r: PinStatus) => { 142 | res.status(202).json(r); 143 | }) 144 | .catch((e: Error) => { 145 | res.status(500).json(Failure.commonErr(e.message)); 146 | }); 147 | } 148 | ); 149 | 150 | router.delete('/pins/:requestId', (req, res) => { 151 | pinObjectDao 152 | .deletePinObjectByRequestIdAndUserId(req.params.requestId, req.query.userId) 153 | .then(() => { 154 | res.sendStatus(202); 155 | }); 156 | }); 157 | -------------------------------------------------------------------------------- /src/service/crust/account.ts: -------------------------------------------------------------------------------- 1 | import {u8aToHex} from '@polkadot/util'; 2 | 3 | import {hdLedger, mnemonicGenerate} from '@polkadot/util-crypto'; 4 | import {Keyring} from '@polkadot/keyring'; 5 | type PairType = 'ecdsa' | 'ed25519' | 'ed25519-ledger' | 'ethereum' | 'sr25519'; 6 | 7 | const keyring = new Keyring(); 8 | function getSuri(seed: string, derivePath: string, pairType: PairType): string { 9 | return pairType === 'ed25519-ledger' 10 | ? u8aToHex(hdLedger(seed, derivePath).secretKey.slice(0, 32)) 11 | : pairType === 'ethereum' 12 | ? `${seed}/${derivePath}` 13 | : `${seed}${derivePath}`; 14 | } 15 | 16 | export function addressFromSeed( 17 | seed: string, 18 | derivePath = '', 19 | pairType: PairType = 'sr25519' 20 | ): string { 21 | keyring.setSS58Format(66); 22 | return keyring.createFromUri( 23 | getSuri(seed, derivePath, pairType), 24 | {}, 25 | pairType === 'ed25519-ledger' ? 'ed25519' : pairType 26 | ).address; 27 | } 28 | 29 | interface AddressState { 30 | address: string; 31 | seed: string; 32 | } 33 | 34 | export async function create(): Promise { 35 | const seed = mnemonicGenerate(); 36 | const address = addressFromSeed(seed); 37 | if (!address || !seed) { 38 | throw new Error('Generate Error'); 39 | } 40 | 41 | return { 42 | address, 43 | seed, 44 | }; 45 | } 46 | -------------------------------------------------------------------------------- /src/service/crust/api.ts: -------------------------------------------------------------------------------- 1 | import {ApiPromise, ApiRx, WsProvider} from '@polkadot/api'; 2 | import {typesBundleForPolkadot} from '@crustio/type-definitions'; 3 | import {configs} from '../../config/config'; 4 | 5 | export const apiConnect = (): ApiPromise => { 6 | return new ApiPromise({ 7 | provider: new WsProvider(configs.crust.chainWsUrl), 8 | typesBundle: typesBundleForPolkadot, 9 | }); 10 | } 11 | 12 | export const disconnectApi = async (api: ApiPromise) => { 13 | if (api) { 14 | await api.disconnect().catch((e) => {}); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/service/crust/krp.ts: -------------------------------------------------------------------------------- 1 | import {Keyring} from '@polkadot/keyring'; 2 | 3 | const kr = new Keyring({ 4 | type: 'sr25519', 5 | }); 6 | 7 | // krp will be used in sending transaction 8 | export default function (seeds: string) { 9 | return kr.addFromUri(seeds); 10 | } 11 | -------------------------------------------------------------------------------- /src/service/crust/order.ts: -------------------------------------------------------------------------------- 1 | import { KeyringPair } from '@polkadot/keyring/types'; 2 | import { ApiPromise } from '@polkadot/api'; 3 | import { SubmittableExtrinsic } from '@polkadot/api/promise/types'; 4 | import { configs } from '../../config/config'; 5 | import { logger } from '../../logger'; 6 | import BigNumber from 'bignumber.js'; 7 | import { sleep } from '../../common/commonUtils'; 8 | import createKeyring from './krp'; 9 | const ChatBot = require('dingtalk-robot-sender'); 10 | const robot = new ChatBot({ 11 | webhook: `https://oapi.dingtalk.com/robot/send?access_token=${configs.crust.warningAccessToken}`, 12 | }); 13 | 14 | async function checkingAccountBalance(api: ApiPromise): Promise { 15 | try { 16 | await api.isReady; 17 | const seeds = configs.crust.seed; 18 | const krp = createKeyring(seeds); 19 | let orderBalance = await getAccountBalance(api, krp.address); 20 | orderBalance = orderBalance.dividedBy(1_000_000_000_000); 21 | const minimumAmount = configs.crust.minimumAmount; 22 | if (orderBalance.comparedTo(minimumAmount) >= 0) { 23 | return true; 24 | } 25 | logger.error( 26 | `orderBalance: ${orderBalance.toFixed(5)} min: ${minimumAmount}` 27 | ); 28 | sendCrustOrderWarningMsg( 29 | 'crust-pinner balance warning', 30 | `### crust-pinner(${configs.server.name}) \n address: ${krp.address 31 | } \n current balance: ${orderBalance 32 | .dividedBy(1_000_000_000_000) 33 | .toString()}cru, min balance: ${minimumAmount}cru` 34 | ); 35 | } catch (e) { 36 | logger.warn(`check account balance failed: ${e.message}`); 37 | } 38 | return false; 39 | } 40 | 41 | export async function checkAccountBalanceAndWarning( 42 | api: ApiPromise 43 | ): Promise { 44 | let retryTimes = 0; 45 | while (retryTimes <= configs.crust.checkAmountRetryTimes) { 46 | if (await checkingAccountBalance(api)) { 47 | return true; 48 | } 49 | await sleep(configs.crust.checkAmountTimeAwait); 50 | retryTimes++; 51 | } 52 | return false; 53 | } 54 | 55 | export function sendCrustOrderWarningMsg(title: string, text: string) { 56 | const textContent = { 57 | actionCard: { 58 | title: title, 59 | text: text, 60 | }, 61 | msgtype: 'actionCard', 62 | }; 63 | robot.send(textContent); 64 | } 65 | 66 | export async function getAccountBalance( 67 | api: ApiPromise, 68 | account: string 69 | ): Promise { 70 | await api.isReadyOrError; 71 | const infoStr = await api.query.system.account(account); 72 | const info = JSON.parse(JSON.stringify(infoStr)); 73 | return new BigNumber(info.data.free); 74 | } 75 | 76 | export async function placeOrder( 77 | api: ApiPromise, 78 | krp: KeyringPair, 79 | fileCID: string, 80 | fileSize: number, 81 | tip: string, 82 | memo: string 83 | ) { 84 | // Determine whether to connect to the chain 85 | await api.isReadyOrError; 86 | // Generate transaction 87 | // fileCid, fileSize, tip, 0 88 | const pso = api.tx.market.placeStorageOrder(fileCID, fileSize, tip, memo); 89 | const txRes = JSON.parse(JSON.stringify(await sendTx(krp, pso))); 90 | return JSON.parse(JSON.stringify(txRes)); 91 | } 92 | 93 | export async function sendTx(krp: KeyringPair, tx: SubmittableExtrinsic) { 94 | return new Promise((resolve, reject) => { 95 | tx.signAndSend(krp, ({ events = [], status }) => { 96 | logger.info( 97 | ` ↪ 💸 [tx]: Transaction status: ${status.type}, nonce: ${tx.nonce}` 98 | ); 99 | 100 | if ( 101 | status.isInvalid || 102 | status.isDropped || 103 | status.isUsurped || 104 | status.isRetracted 105 | ) { 106 | reject(new Error('order invalid')); 107 | } 108 | 109 | if (status.isInBlock) { 110 | events.forEach(({ event: { method, section } }) => { 111 | if (section === 'system' && method === 'ExtrinsicFailed') { 112 | // Error with no detail, just return error 113 | logger.info(` ↪ 💸 ❌ [tx]: Send transaction(${tx.type}) failed.`); 114 | 115 | resolve(false); 116 | } else if (method === 'ExtrinsicSuccess') { 117 | logger.info( 118 | ` ↪ 💸 ✅ [tx]: Send transaction(${tx.type}) success.` 119 | ); 120 | } 121 | }); 122 | logger.info('Included at block hash', status.asInBlock.toHex()); 123 | resolve(status.asInBlock.toHex()); 124 | } else if (status.isFinalized) { 125 | logger.info('Finalized block hash', status.asFinalized.toHex()); 126 | } 127 | }).catch((e: any) => { 128 | reject(e); 129 | }); 130 | }); 131 | } 132 | 133 | interface IFileInfo { 134 | file_size: number; 135 | expired_at: number; 136 | calculated_at: number; 137 | amount: number; 138 | prepaid: number; 139 | reported_replica_count: number; 140 | replicas: any; 141 | } 142 | 143 | export async function getOrderState(api: ApiPromise, cid: string) { 144 | await api.isReadyOrError; 145 | const res = await api.query.market.filesV2(cid); 146 | const data = res ? JSON.parse(JSON.stringify(res)) : null; 147 | if (data) { 148 | try { 149 | const { replicas, ...meaningfulData } = data as IFileInfo; 150 | return { 151 | meaningfulData, 152 | replicas, 153 | }; 154 | } catch (e) { 155 | return null; 156 | } 157 | } 158 | return null; 159 | } 160 | 161 | export async function getFinalizeBlockNumber(api: ApiPromise) { 162 | await api.isReadyOrError; 163 | const res = await api.rpc.chain.getHeader(); 164 | if (res) { 165 | return res.number.toNumber(); 166 | } 167 | return null; 168 | } 169 | -------------------------------------------------------------------------------- /src/service/pinning/index.ts: -------------------------------------------------------------------------------- 1 | import {Pin, PinStatus, PinObjects} from '../../models/PinObjects'; 2 | import {uuid, PinObjectStatus, fromDecimal} from '../../common/commonUtils'; 3 | import {configs} from '../../config/config'; 4 | import { 5 | placeOrder, 6 | getOrderState, 7 | checkAccountBalanceAndWarning, 8 | sendCrustOrderWarningMsg, 9 | sendTx, 10 | } from '../crust/order'; 11 | import {apiConnect, disconnectApi} from '../crust/api'; 12 | import createKeyring from '../crust/krp'; 13 | const commonDao = require('../../dao/commonDao'); 14 | const moment = require('moment'); 15 | const _ = require('lodash'); 16 | import {logger} from '../../logger'; 17 | import {timeoutOrError} from '../../common/promise-utils'; 18 | import {ApiPromise} from "@polkadot/api"; 19 | import {disconnect} from "node:cluster"; 20 | const Sequelize = require('sequelize'); 21 | const {sleep} = require('../../common/commonUtils'); 22 | const pinObjectDao = require('../../dao/pinObjectDao'); 23 | const Op = Sequelize.Op; 24 | 25 | export async function replacePin( 26 | userId: number, 27 | requestId: string, 28 | pin: Pin 29 | ): Promise { 30 | await pinObjectDao.deletePinObjectByRequestIdAndUserId(requestId, userId); 31 | return pinByCid(userId, pin); 32 | } 33 | 34 | export async function pinByCid(userId: number, pin: Pin): Promise { 35 | let pinObjects = await PinObjects.findOne({ 36 | where: {user_id: userId, cid: pin.cid}, 37 | }); 38 | if (_.isEmpty(pinObjects)) { 39 | const obj = { 40 | name: pin.name ? pin.name : pin.cid, 41 | request_id: uuid(), 42 | user_id: userId, 43 | cid: pin.cid, 44 | status: PinObjectStatus.queued, 45 | meta: pin.meta, 46 | origins: [...pin.origins].join(','), 47 | delegates: configs.ipfs.delegates.join(','), 48 | }; 49 | logger.info(`obj: ${JSON.stringify(obj)}`); 50 | pinObjects = await PinObjects.create(obj); 51 | } else { 52 | pinObjects.request_id = uuid(); 53 | pinObjects.update_time = moment().format('YYYY-MM-DD HH:mm:ss'); 54 | pinObjects.status = PinObjectStatus.queued; 55 | pinObjects.meta = pin.meta; 56 | pinObjects.origins = [...pin.origins].join(','); 57 | pinObjects.delegates = configs.ipfs.delegates.join(','); 58 | pinObjects.retry_times = 0; 59 | pinObjects.deleted = 0; 60 | await pinObjects.save(); 61 | } 62 | return PinStatus.parseBaseData(pinObjects); 63 | } 64 | 65 | export async function orderStart() { 66 | while (true) { 67 | let apiPromise; 68 | try { 69 | apiPromise = apiConnect(); 70 | const checkAccount = await checkAccountBalanceAndWarning(apiPromise); 71 | if (!checkAccount) { 72 | await sleep(configs.crust.loopTimeAwait); 73 | continue; 74 | } 75 | await placeOrderQueuedFiles(apiPromise); 76 | await sleep(configs.crust.loopTimeAwait); 77 | } catch (e) { 78 | logger.error(`place order loop error: ${e.message}`); 79 | sendCrustOrderWarningMsg( 80 | `crust-pinner(${configs.server.name}) error`, 81 | `### crust-pinner(${configs.server.name}) error \n err msg: ${e.message}` 82 | ); 83 | await sleep(configs.crust.loopTimeAwait); 84 | } finally { 85 | await disconnectApi(apiPromise) 86 | } 87 | } 88 | } 89 | 90 | async function placeOrderQueuedFiles(apiPromise: ApiPromise) { 91 | logger.info('start placeOrderQueuedFiles'); 92 | const pinObjects = await PinObjects.findAll({ 93 | where: { 94 | deleted: 0, 95 | [Op.or]: [ 96 | {status: PinObjectStatus.failed}, 97 | {status: PinObjectStatus.queued}, 98 | ], 99 | retry_times: { 100 | [Op.lt]: configs.crust.orderRetryTimes, 101 | }, 102 | }, 103 | order: [['update_time', 'asc']], 104 | }); 105 | if (_.isEmpty(pinObjects)) { 106 | logger.info('not pin objects to order'); 107 | return; 108 | } 109 | const cidRetryGroup = _(pinObjects) 110 | .groupBy((i: any) => i.cid) 111 | .toPairs() 112 | .map((i: any) => _.maxBy(i[1], 'retry_times')) 113 | .groupBy((i: any) => i.cid) 114 | .value(); 115 | for (const cid of _.map(cidRetryGroup, (i: any, j: any) => j)) { 116 | const needToOrder = await needOrder(cid, cidRetryGroup[cid][0].retry_times); 117 | if (needToOrder.needOrder) { 118 | await placeOrderInCrust(apiPromise, cid, needToOrder.retryTimes).catch(e => { 119 | logger.error(`order error catch: ${JSON.stringify(e)} cid: ${cid}`); 120 | }); 121 | await sleep(configs.crust.orderTimeAwait); 122 | } else { 123 | await PinObjects.update( 124 | { 125 | status: needToOrder.status, 126 | retry_times: needToOrder.retryTimes, 127 | }, 128 | { 129 | where: { 130 | deleted: 0, 131 | cid: cid, 132 | [Op.or]: [ 133 | {status: PinObjectStatus.failed}, 134 | {status: PinObjectStatus.queued}, 135 | ], 136 | }, 137 | } 138 | ); 139 | } 140 | } 141 | } 142 | 143 | async function needOrder( 144 | cid: string, 145 | retryTimes: number 146 | ): Promise { 147 | const result = new PinObjectState(); 148 | result.needOrder = retryTimes <= configs.crust.orderRetryTimes; 149 | result.retryTimes = retryTimes; 150 | result.status = !result.needOrder 151 | ? PinObjectStatus.failed.toString() 152 | : PinObjectStatus.pinning.toString(); 153 | return result; 154 | } 155 | 156 | class PinObjectState { 157 | needOrder = false; 158 | retryTimes = 0; 159 | status: string; 160 | } 161 | 162 | async function placeOrderInCrust(apiPromise: ApiPromise, cid: string, retryTimes = 0) { 163 | let pinStatus = PinObjectStatus.pinning; 164 | let retryTimeAdd = false; 165 | try { 166 | const fileCid = cid; 167 | const fileSize = configs.crust.defaultFileSize; 168 | const seeds = configs.crust.seed; 169 | const tips = configs.crust.tips; 170 | const krp = createKeyring(seeds); 171 | logger.info(`order cid: ${cid} in crust`); 172 | pinStatus = PinObjectStatus.pinning; 173 | const res = await timeoutOrError( 174 | 'Crust place order', 175 | placeOrder( 176 | apiPromise, 177 | krp, 178 | fileCid, 179 | fileSize, 180 | fromDecimal(tips).toFixed(0), 181 | undefined 182 | ), 183 | configs.crust.transactionTimeout 184 | ); 185 | if (!res) { 186 | retryTimeAdd = true; 187 | pinStatus = PinObjectStatus.failed; 188 | logger.error(`order cid: ${cid} failed result is empty`); 189 | } 190 | } catch (e) { 191 | pinStatus = PinObjectStatus.failed; 192 | retryTimeAdd = true; 193 | logger.error(`order cid: ${cid} failed error: ${e.toString()}`); 194 | } finally { 195 | const times = 196 | retryTimeAdd && retryTimes <= configs.crust.orderRetryTimes 197 | ? retryTimes + 1 198 | : retryTimes; 199 | await PinObjects.update( 200 | { 201 | status: pinStatus, 202 | retry_times: times, 203 | }, 204 | { 205 | where: { 206 | deleted: 0, 207 | cid: cid, 208 | [Op.or]: [ 209 | {status: PinObjectStatus.failed}, 210 | {status: PinObjectStatus.queued}, 211 | ], 212 | }, 213 | } 214 | ); 215 | } 216 | } 217 | 218 | export async function updatePinObjectStatus() { 219 | let apiPromise; 220 | while (true) { 221 | try { 222 | const pinningObjects = await pinObjectDao.queryPinningObjects(); 223 | if (_.isEmpty(pinningObjects)) { 224 | await sleep(configs.crust.loopTimeAwait); 225 | continue; 226 | } 227 | apiPromise = apiConnect(); 228 | for (const obj of pinningObjects) { 229 | const res = await getOrderState(apiPromise, obj.cid); 230 | if (res) { 231 | if ( 232 | res.meaningfulData.reported_replica_count >= 233 | configs.crust.validFileSize 234 | ) { 235 | obj.status = PinObjectStatus.pinned; 236 | } else { 237 | obj.status = PinObjectStatus.pinning; 238 | } 239 | } else { 240 | // invalid file size 241 | obj.deleted = 1; 242 | obj.status = PinObjectStatus.failed; 243 | } 244 | await PinObjects.update({ 245 | status: obj.status, 246 | deleted: obj.deleted, 247 | }, { 248 | where: { id: obj.id } 249 | }); 250 | } 251 | } catch (e) { 252 | logger.error(`get order state err: ${e}`); 253 | } finally { 254 | await disconnectApi(apiPromise); 255 | } 256 | } 257 | } 258 | 259 | export async function pinExpireFiles() { 260 | let apiPromise; 261 | while(true) { 262 | try { 263 | apiPromise = apiConnect(); 264 | const pinningObjects = await PinObjects.findAll({ 265 | where: { status: PinObjectStatus.pinned, deleted: 0 }, 266 | order: [['id', 'asc']], 267 | limit: 1000 268 | }); 269 | if (_.isEmpty(pinningObjects)) { 270 | await sleep(1000 * 6); 271 | continue; 272 | } 273 | const hash = await apiPromise.rpc.chain.getFinalizedHead(); 274 | const block = await apiPromise.rpc.chain.getBlock(hash); 275 | const finalizeNumber = block.block.header.number.toNumber(); 276 | for (const p of pinningObjects) { 277 | const existFileNotPinned = await PinObjects.findOne({ 278 | attributes: ['id'], 279 | where: { status: PinObjectStatus.queued, deleted: 0 }, 280 | order: [['id', 'asc']], 281 | limit: 1 282 | }); 283 | if (_.isEmpty(existFileNotPinned)) { 284 | const res = await getOrderState(apiPromise, p.cid); 285 | if (_.isEmpty(res) || (res.meaningfulData.expired_at <= (finalizeNumber + configs.crust.expireBlockNumber))) { 286 | await PinObjects.update({status: PinObjectStatus.queued, retry_times: 0}, { where: { id: p.id } }) 287 | } 288 | } 289 | await sleep(100); 290 | } 291 | } catch (e) { 292 | await sleep(1000 * 60); 293 | } finally { 294 | await disconnectApi(apiPromise); 295 | } 296 | } 297 | } 298 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "commonjs", 4 | "target": "es2017", 5 | "skipLibCheck": true, 6 | "noImplicitAny": true, 7 | "outDir": "./dist", 8 | "sourceMap": true 9 | }, 10 | "types": ["mocha"], 11 | "include": ["./**/*.ts", "./**/*.js"], 12 | "exclude": ["node_modules", "dist"] 13 | } 14 | --------------------------------------------------------------------------------