├── .editorconfig ├── .eslintrc.js ├── .github ├── FUNDING.yml └── workflows │ ├── deployment-production.yml │ ├── deployment-staging.yml │ └── test-workflow.yml ├── .gitignore ├── .nvmrc ├── LICENSE ├── Makefile ├── README.md ├── development.env.sample ├── package-lock.json ├── package.json ├── production.env.sample ├── src ├── index.js ├── routes │ ├── bots │ │ └── :id.js │ ├── count.js │ ├── health.js │ ├── legacy-ids.js │ ├── lists.js │ └── lists │ │ └── :id.js └── util │ ├── getFeatures.js │ ├── getList.js │ ├── getLists.js │ ├── getRoutes.js │ ├── isType.js │ ├── mapLegacy.js │ ├── ratelimit.js │ └── userAgent.js ├── staging.env.sample ├── webpack.config.js └── wrangler.toml /.editorconfig: -------------------------------------------------------------------------------- 1 | # editorconfig.org 2 | root = true 3 | 4 | [*] 5 | indent_style = space 6 | indent_size = 4 7 | end_of_line = lf 8 | charset = utf-8 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | 12 | [*.{yml,json}] 13 | indent_size = 2 14 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | browser: true, 4 | commonjs: true, 5 | es2021: true, 6 | node: true, 7 | }, 8 | parserOptions: { 9 | ecmaVersion: 12, 10 | }, 11 | extends: 'eslint:recommended', 12 | rules: { 13 | 'space-before-function-paren': [ 14 | 'error', 15 | { 16 | anonymous: 'always', 17 | named: 'never', 18 | asyncArrow: 'always', 19 | }, 20 | ], 21 | 'object-curly-spacing': [ 22 | 'error', 23 | 'always', 24 | ], 25 | 'no-console': 'off', 26 | 'no-var': 'error', 27 | 'prefer-const': 'error', 28 | indent: [ 29 | 'error', 30 | 4, 31 | { 32 | SwitchCase: 1, 33 | }, 34 | ], 35 | semi: [ 36 | 'error', 37 | 'always', 38 | ], 39 | quotes: [ 40 | 'error', 41 | 'single', 42 | ], 43 | 'quote-props': [ 44 | 'error', 45 | 'as-needed', 46 | ], 47 | 'object-curly-newline': [ 48 | 'error', 49 | { 50 | multiline: true, 51 | consistent: true, 52 | }, 53 | ], 54 | 'comma-dangle': [ 55 | 'error', 56 | 'always-multiline', 57 | ], 58 | 'comma-spacing': [ 59 | 'error', 60 | { 61 | before: false, 62 | after: true, 63 | }, 64 | ], 65 | 'comma-style': [ 66 | 'error', 67 | 'last', 68 | ], 69 | 'eol-last': 'error', 70 | 'key-spacing': [ 71 | 'error', 72 | { 73 | beforeColon: false, 74 | afterColon: true, 75 | }, 76 | ], 77 | 'keyword-spacing': [ 78 | 'error', 79 | { 80 | before: true, 81 | after: true, 82 | }, 83 | ], 84 | 'block-spacing': 'error', 85 | 'space-in-parens': [ 86 | 'error', 87 | 'never', 88 | ], 89 | 'space-before-blocks': 'error', 90 | 'no-trailing-spaces': 'error', 91 | 'semi-spacing': [ 92 | 'error', 93 | { 94 | before: false, 95 | after: true, 96 | }, 97 | ], 98 | 'space-infix-ops': 'error', 99 | }, 100 | }; 101 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: MattIPv4 2 | patreon: IPv4 3 | ko_fi: MattIPv4 4 | custom: https://paypal.me/MattIPv4Cowley 5 | -------------------------------------------------------------------------------- /.github/workflows/deployment-production.yml: -------------------------------------------------------------------------------- 1 | name: Deploy to Production 2 | 3 | on: 4 | push: 5 | branches: 6 | - production 7 | workflow_dispatch: 8 | 9 | jobs: 10 | test: 11 | name: Test 12 | runs-on: ubuntu-latest 13 | 14 | env: 15 | NODE_OPTIONS: --unhandled-rejections=strict 16 | 17 | steps: 18 | - uses: actions/checkout@v4 19 | 20 | - name: Use Node.js 21 | uses: actions/setup-node@v4 22 | with: 23 | node-version-file: .nvmrc 24 | cache: npm 25 | 26 | - name: Install Dependencies 27 | run: npm ci 28 | 29 | - name: Run Tests 30 | run: npm test 31 | 32 | deploy: 33 | name: Deploy 34 | needs: test 35 | runs-on: ubuntu-latest 36 | 37 | env: 38 | NODE_OPTIONS: --unhandled-rejections=strict 39 | 40 | steps: 41 | - uses: actions/checkout@v4 42 | 43 | - name: Use Node.js 44 | uses: actions/setup-node@v4 45 | with: 46 | node-version-file: .nvmrc 47 | cache: npm 48 | 49 | - name: Install Dependencies 50 | run: npm ci 51 | 52 | - name: Generate production.env 53 | env: 54 | SENTRY_AUTH_TOKEN: ${{ secrets.PRODUCTION_SENTRY_AUTH_TOKEN }} 55 | SENTRY_ORG: ${{ secrets.PRODUCTION_SENTRY_ORG }} 56 | SENTRY_PROJECT: ${{ secrets.PRODUCTION_SENTRY_PROJECT }} 57 | SENTRY_DSN: ${{ secrets.PRODUCTION_SENTRY_DSN }} 58 | run: envsubst < production.env.sample > production.env 59 | 60 | - name: Fetch data 61 | run: git clone https://github.com/botblock/data 62 | 63 | - name: Publish to Production 64 | run: npm run publish:production 65 | env: 66 | CF_API_TOKEN: ${{ secrets.CF_API_TOKEN }} 67 | -------------------------------------------------------------------------------- /.github/workflows/deployment-staging.yml: -------------------------------------------------------------------------------- 1 | name: Deploy to Staging 2 | 3 | on: 4 | push: 5 | branches: 6 | - staging 7 | workflow_dispatch: 8 | 9 | jobs: 10 | test: 11 | name: Test 12 | runs-on: ubuntu-latest 13 | 14 | env: 15 | NODE_OPTIONS: --unhandled-rejections=strict 16 | 17 | steps: 18 | - uses: actions/checkout@v4 19 | 20 | - name: Use Node.js 21 | uses: actions/setup-node@v4 22 | with: 23 | node-version-file: .nvmrc 24 | cache: npm 25 | 26 | - name: Install Dependencies 27 | run: npm ci 28 | 29 | - name: Run Tests 30 | run: npm test 31 | 32 | deploy: 33 | name: Deploy 34 | needs: test 35 | runs-on: ubuntu-latest 36 | 37 | env: 38 | NODE_OPTIONS: --unhandled-rejections=strict 39 | 40 | steps: 41 | - uses: actions/checkout@v4 42 | 43 | - name: Use Node.js 44 | uses: actions/setup-node@v4 45 | with: 46 | node-version-file: .nvmrc 47 | cache: npm 48 | 49 | - name: Install Dependencies 50 | run: npm ci 51 | 52 | - name: Generate staging.env 53 | env: 54 | SENTRY_AUTH_TOKEN: ${{ secrets.STAGING_SENTRY_AUTH_TOKEN }} 55 | SENTRY_ORG: ${{ secrets.STAGING_SENTRY_ORG }} 56 | SENTRY_PROJECT: ${{ secrets.STAGING_SENTRY_PROJECT }} 57 | SENTRY_DSN: ${{ secrets.STAGING_SENTRY_DSN }} 58 | run: envsubst < staging.env.sample > staging.env 59 | 60 | - name: Fetch data 61 | run: git clone https://github.com/botblock/data 62 | 63 | - name: Publish to Staging 64 | run: npm run publish:staging 65 | env: 66 | CF_API_TOKEN: ${{ secrets.CF_API_TOKEN }} 67 | -------------------------------------------------------------------------------- /.github/workflows/test-workflow.yml: -------------------------------------------------------------------------------- 1 | name: Test Latest Commit 2 | 3 | on: 4 | push: 5 | branches-ignore: 6 | - staging 7 | - production 8 | pull_request: 9 | 10 | jobs: 11 | test: 12 | name: Test 13 | runs-on: ubuntu-latest 14 | 15 | env: 16 | NODE_OPTIONS: --unhandled-rejections=strict 17 | 18 | steps: 19 | - uses: actions/checkout@v4 20 | 21 | - name: Use Node.js 22 | uses: actions/setup-node@v4 23 | with: 24 | node-version-file: .nvmrc 25 | cache: npm 26 | 27 | - name: Install Dependencies 28 | run: npm ci 29 | 30 | - name: Run Tests 31 | run: npm test 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | node_modules/ 3 | .DS_Store 4 | tmp/ 5 | dist/ 6 | data/ 7 | *.env 8 | -------------------------------------------------------------------------------- /.nvmrc: -------------------------------------------------------------------------------- 1 | v16.13.0 2 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2021 Matt (IPv4) Cowley 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | GIT_BRANCH = $(shell git rev-parse --abbrev-ref HEAD 2>/dev/null) 2 | 3 | error: 4 | $(error Please use a target of either deploy-production, deploy-staging) 5 | 6 | .PHONY: deploy-production 7 | deploy-production: 8 | git push origin $(GIT_BRANCH):production -f 9 | 10 | .PHONY: deploy-staging 11 | deploy-staging: 12 | git push origin $(GIT_BRANCH):staging -f 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # api-worker 2 | 3 | The Cloudflare Worker that powers the BotBlock.org API routes. 4 | 5 | Read the API docs at . 6 | 7 | ## Development 8 | 9 | 1. Create your `development.env` file. Copy `development.env.sample` and fill out the information. 10 | 2. Authenticate with Wrangler by running `wrangler login`. 11 | 3. Update `wrangler.toml` for your account. 12 | - Use `wrangler whoami` to get your account ID, update the value in `wrangler.toml` to match. 13 | - Use `wrangler kv:namespace create "RATELIMIT"` to create the KV namespace, update the `id` and `preview_id` in `wrangler.toml` to match. 14 | 4. Clone a copy of BotBlock's open data with `git clone https://github.com/botblock/data`. 15 | 5. Develop with the worker by running `npm run dev`. 16 | 17 | ## Deployments 18 | 19 | `wrangler.toml` and this repository is currently designed for a staging deployment and a production deployment. 20 | 21 | Ensure that you've created and configured `staging.env` and `production.env` appropriately. 22 | 23 | Ensure that the staging/production environments in `wrangler.toml` have been updated with your zone IDs and routes for the workers. 24 | 25 | Ensure that the KV namespaces are created for staging/production environments and are configured in `wrangler.toml`. 26 | Use `wrangler kv:namespace create "RATELIMIT" --env `. 27 | 28 | To deploy from local, run `npm run publish:staging` to deploy to staging, and `npm run publish:production` to deploy to the production environment. 29 | 30 | To deploy using GitHub, run `make deploy-staging` to force push and deploy to staging, and `make deploy-production` to force push and deploy to the production environment. 31 | 32 | Live logs for both environments can be accessed with `npm run logs:staging` and `npm run logs:production` as needed. 33 | -------------------------------------------------------------------------------- /development.env.sample: -------------------------------------------------------------------------------- 1 | SENTRY_AUTH_TOKEN=${SENTRY_AUTH_TOKEN} 2 | SENTRY_ORG=${SENTRY_ORG} 3 | SENTRY_PROJECT=${SENTRY_PROJECT} 4 | SENTRY_DSN=${SENTRY_DSN} 5 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "api-worker", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "dist/worker.js", 6 | "scripts": { 7 | "build": "webpack", 8 | "dev": "NODE_ENV=development wrangler dev", 9 | "publish:production": "NODE_ENV=production wrangler publish -e production", 10 | "publish:staging": "NODE_ENV=staging wrangler publish -e staging", 11 | "logs:production": "wrangler tail -e production", 12 | "logs:staging": "wrangler tail -e staging", 13 | "lint": "eslint \"{src/**/*,*}.js\"", 14 | "lint:fix": "npm run lint -- --fix", 15 | "test": "npm run lint" 16 | }, 17 | "repository": { 18 | "type": "git", 19 | "url": "git+https://github.com/botblock/api-worker.git" 20 | }, 21 | "keywords": [], 22 | "author": "Matt (IPv4) Cowley", 23 | "license": "Apache-2.0", 24 | "bugs": { 25 | "url": "https://github.com/botblock/api-worker/issues" 26 | }, 27 | "homepage": "https://github.com/botblock/api-worker#readme", 28 | "dependencies": { 29 | "url-pattern": "^1.0.3", 30 | "workers-sentry": "^0.0.6" 31 | }, 32 | "devDependencies": { 33 | "@cloudflare/wrangler": "^1.19.4", 34 | "dotenv": "^10.0.0", 35 | "eslint": "^8.2.0", 36 | "webpack": "^5.62.1", 37 | "webpack-cli": "^4.9.1" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /production.env.sample: -------------------------------------------------------------------------------- 1 | SENTRY_AUTH_TOKEN=${SENTRY_AUTH_TOKEN} 2 | SENTRY_ORG=${SENTRY_ORG} 3 | SENTRY_PROJECT=${SENTRY_PROJECT} 4 | SENTRY_DSN=${SENTRY_DSN} 5 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | const WorkersSentry = require('workers-sentry/worker'); 2 | const UrlPattern = require('url-pattern'); 3 | const routeData = require('./util/getRoutes')(); 4 | 5 | // Process all requests to the worker 6 | const handleRequest = async ({ request, wait, sentry }) => { 7 | const url = new URL(request.url); 8 | url.pathname = url.pathname.replace(/(?<=.)\/$/, ''); 9 | 10 | // Attempt to find a matching route 11 | for (const route of routeData) { 12 | if (route.method !== request.method) continue; 13 | const match = new UrlPattern(route.route, { segmentValueCharset: 'a-zA-Z0-9-_~ %.' }).match(url.pathname); 14 | if (!match) continue; 15 | 16 | // Execute the route 17 | request.params = match; 18 | return route.handler({ request, wait, sentry }); 19 | } 20 | 21 | // Handle docs redirects 22 | if (request.method === 'GET' && url.pathname === '/api/docs') 23 | return new Response(null, { 24 | status: 302, 25 | headers: { Location: '/docs' }, 26 | }); 27 | if (request.method === 'GET' && url.pathname === '/api/docs/libs') 28 | return new Response(null, { 29 | status: 302, 30 | headers: { Location: '/docs/libraries' }, 31 | }); 32 | 33 | // 404 response 34 | return new Response(null, { status: 404 }); 35 | }; 36 | 37 | // Register the worker listener 38 | addEventListener('fetch', event => { 39 | // Start Sentry 40 | const sentry = new WorkersSentry(event, process.env.SENTRY_DSN); 41 | 42 | // Process the event 43 | return event.respondWith(handleRequest({ 44 | request: event.request, 45 | wait: event.waitUntil.bind(event), 46 | sentry, 47 | }).catch(err => { 48 | // Log & re-throw any errors 49 | console.error(err); 50 | sentry.captureException(err); 51 | throw err; 52 | })); 53 | }); 54 | -------------------------------------------------------------------------------- /src/routes/bots/:id.js: -------------------------------------------------------------------------------- 1 | const listsData = require('../../util/getLists')(); 2 | const { isSnowflake } = require('../../util/isType'); 3 | const ratelimit = require('../../util/ratelimit'); 4 | const userAgent = require('../../util/userAgent'); 5 | 6 | const getMostCommon = array => Array.isArray(array) && array.length 7 | ? [ ...array.reduce((map, val) => { 8 | map.set(val, (map.get(val) || 0) + 1); 9 | return map; 10 | }, new Map()).entries() ].sort((a, b) => a[1] < b[1] ? 1 : -1)[0][0] 11 | : null; 12 | 13 | const jsonOrText = text => { 14 | try { 15 | return JSON.parse(text); 16 | } catch (_) { 17 | return text; 18 | } 19 | }; 20 | 21 | module.exports = { 22 | method: 'GET', 23 | route: '/api/bots/:id', 24 | handler: async ({ request }) => { 25 | // Validate the id 26 | if (!isSnowflake(request.params.id)) return new Response(JSON.stringify({ 27 | error: true, 28 | status: 400, 29 | message: '\'id\' must be a snowflake', 30 | }, null, 2), { 31 | status: 400, 32 | headers: { 33 | 'Content-Type': 'application/json', 34 | 'X-Served-By': 'botblock-api-worker', 35 | }, 36 | }); 37 | 38 | // Ratelimit request 39 | const ratelimited = await ratelimit(30, request, request.params.id); 40 | if (ratelimited) return ratelimited; 41 | 42 | // Get lists to interact with 43 | const lists = listsData.filter(list => !!list.api_get && !list.defunct); 44 | 45 | // Run all requests concurrently 46 | const requests = lists.map(list => { 47 | // Create the 2s abort controller 48 | const controller = new AbortController(); 49 | const { signal } = controller; 50 | const timeout = setTimeout(() => controller.abort(), 2000); 51 | 52 | // Make the request 53 | return fetch(list.api_get.replace(':id', request.params.id), { 54 | method: 'GET', 55 | headers: { 56 | 'Content-Type': 'application/json', 57 | 'User-Agent': request.headers.get('user-agent') || userAgent.random(), 58 | 'X-Forwarded-For': request.headers.get('cf-connecting-ip'), 59 | }, 60 | signal, 61 | }).then(async resp => ({ 62 | list: list.id, 63 | data: [ jsonOrText(await resp.text().catch(() => '')), resp.status ], 64 | })).catch(err => ({ 65 | list: list.id, 66 | data: [ err.name === 'AbortError' ? 'Timeout after 2s' : '', -1 ], 67 | })).finally(() => { 68 | clearTimeout(timeout); 69 | }); 70 | }); 71 | 72 | // Wait for all requests to complete 73 | const results = await Promise.all(requests) 74 | .then(data => data.reduce((obj, { list, data }) => ({ ...obj, [list]: data }), {})); 75 | 76 | // Create empty data obj 77 | const data = { 78 | username: [], 79 | discriminator: [], 80 | owners: [], 81 | server_count: [], 82 | invite: [], 83 | prefix: [], 84 | website: [], 85 | github: [], 86 | support: [], 87 | library: [], 88 | }; 89 | 90 | // Normalize response from each list 91 | for (const id of Object.keys(results)) { 92 | // Check the response from list was good 93 | const list = results[id]; 94 | if (list[1] !== 200) continue; 95 | if (!list[0] || typeof list[0] !== 'object') continue; 96 | 97 | // Work through the fields in the response 98 | for (const [ key, value ] of Object.entries(list[0])) { 99 | // If bad value, skip 100 | if (!value) continue; 101 | 102 | // Bot name 103 | if (['name', 'username', 'bot_name'].includes(key)) data.username.push(value); 104 | if (['discrim', 'discriminator', 'disc'].includes(key)) data.discriminator.push(String(value)); 105 | 106 | // Owners 107 | if (['owner', 'owners', 'authors', 'bot_owners', 'owner_id', 'coOwners', 'secondaryOwners'].includes(key)) { 108 | // Ensure we're working with an array 109 | const valueArray = Array.isArray(value) ? value : [ value ]; 110 | for (const owner of valueArray) { 111 | if (typeof owner === 'string' || typeof owner === 'number') data.owners.push(owner); 112 | if (typeof owner === 'object') { 113 | if (owner['id']) data.owners.push(owner['id']); 114 | if (owner['userId']) data.owners.push(owner['userId']); 115 | } 116 | } 117 | } 118 | 119 | // Server count 120 | if (['count', 'servers', 'server_count', 'servercount', 'serverCount', 'bot_server_count', 'guilds', 121 | 'guild_count', 'guildcount', 'guildCount'].includes(key)) { 122 | const temp = Number.parseInt(value); 123 | if (typeof temp === 'number') data.server_count.push(temp); 124 | } 125 | if (key === 'stats' && typeof value === 'object') { 126 | if (value['guilds']) { 127 | const temp = Number.parseInt(value['guilds']); 128 | if (typeof temp === 'number') data.server_count.push(temp); 129 | } 130 | } 131 | 132 | // Links 133 | if (key === 'links' && typeof value === 'object') { 134 | if (value['invite']) data.invite.push(value['invite']); 135 | if (value['support']) data.support.push(value['support']); 136 | } 137 | if (['invite', 'bot_invite', 'botInvite', 'bot_invite_link', 'oauth_url', 'inviteURL'].includes(key)) { 138 | if (typeof key === 'string') data.invite.push(value); 139 | } 140 | if (['website', 'bot_website', 'websiteURL'].includes(key)) { 141 | if (typeof key === 'string') data.website.push(value); 142 | } 143 | if (['github', 'bot_github_repo', 'openSource', 'git', 'source_code'].includes(key)) { 144 | if (typeof key === 'string') data.github.push(value); 145 | } 146 | if (['support', 'supportInvite', 'support_server', 'discord', 'server_invite', 'bot_support_discord', 147 | 'server', 'supportServer'].includes(key)) { 148 | if (typeof key === 'string') data.support.push(value); 149 | } 150 | 151 | // Prefix 152 | if (['prefix', 'bot_prefix'].includes(key)) { 153 | if (typeof key === 'string') data.prefix.push(value); 154 | } 155 | 156 | // Library 157 | if (['library', 'libraryName', 'bot_library', 'lang'].includes(key)) { 158 | if (typeof key === 'string') data.library.push(value); 159 | } 160 | } 161 | } 162 | 163 | // Condense the output 164 | const response = { 165 | id: request.params.id, 166 | username: getMostCommon(data.username) || 'Unknown', 167 | discriminator: getMostCommon(data.discriminator) || '0000', 168 | owners: data.owners.filter((v, i, a) => a.indexOf(v) === i && isSnowflake(v)) || [], 169 | server_count: Math.max(...data.server_count) || 0, 170 | invite: getMostCommon(data.invite) || '', 171 | prefix: getMostCommon(data.prefix) || '', 172 | website: getMostCommon(data.website) || '', 173 | github: getMostCommon(data.github) || '', 174 | support: getMostCommon(data.support) || '', 175 | library: getMostCommon(data.library) || '', 176 | list_data: results, 177 | }; 178 | 179 | // TODO: Cache? 180 | 181 | // Done 182 | return new Response(JSON.stringify(response, null, 2), { 183 | status: 200, 184 | headers: { 185 | 'Content-Type': 'application/json', 186 | 'X-Served-By': 'botblock-api-worker', 187 | }, 188 | }); 189 | }, 190 | }; 191 | -------------------------------------------------------------------------------- /src/routes/count.js: -------------------------------------------------------------------------------- 1 | const listsData = require('../util/getLists')(); 2 | const userAgent = require('../util/userAgent'); 3 | const { isInteger, isSnowflake } = require('../util/isType'); 4 | const ratelimit = require('../util/ratelimit'); 5 | const mapLegacy = require('../util/mapLegacy'); 6 | 7 | const validationError = message => new Response(JSON.stringify({ error: true, status: 400, message }, null, 2), { 8 | status: 400, 9 | headers: { 10 | 'Content-Type': 'application/json', 11 | 'X-Served-By': 'botblock-api-worker', 12 | }, 13 | }); 14 | 15 | module.exports = { 16 | method: 'POST', 17 | route: '/api/count', 18 | handler: async ({ request }) => { 19 | // Get data in request 20 | const data = await request.json().catch(() => {}); 21 | 22 | // Validate the provided data 23 | if (!data) return validationError('Body must be JSON object'); 24 | 25 | if (!('bot_id' in data)) return validationError('\'bot_id\' is required'); 26 | if (typeof data.bot_id !== 'string') return validationError('\'bot_id\' must be a string'); 27 | if (!isSnowflake(data.bot_id)) return validationError('\'bot_id\' must be a snowflake'); 28 | 29 | if (!('server_count' in data)) return validationError('\'server_count\' is required'); 30 | if (typeof data.server_count !== 'number') return validationError('\'server_count\' must be a number'); 31 | if (!isInteger(data.server_count)) return validationError('\'server_count\' must be a number'); 32 | 33 | if ('shard_id' in data && (!isInteger(data.shard_id) && data.shard_id !== null)) 34 | return validationError('\'shard_id\' must be a number or null'); 35 | if ('shard_count' in data && (!isInteger(data.shard_count) && data.shard_count !== null)) 36 | return validationError('\'shard_count\' must be a number or null'); 37 | 38 | if ('shards' in data) { 39 | if (!Array.isArray(data.shards) && data.shards !== null) 40 | return validationError('\'shards\' must be an array or null'); 41 | if (data.shards && data.shards.some(n => !isInteger(n))) 42 | return validationError('\'shards\' contains incorrect values'); 43 | } 44 | 45 | // Ratelimit request 46 | const ratelimited = await ratelimit(120, request, data.bot_id); 47 | if (ratelimited) return ratelimited; 48 | 49 | // Get lists to interact with 50 | const keys = Object.keys(data).map(mapLegacy); 51 | const lists = listsData.filter(list => keys.includes(list.id) && !!list.api_post && !list.defunct); 52 | 53 | // Run all requests concurrently 54 | const requests = lists.map(list => { 55 | // Generate the payload 56 | const payload = {}; 57 | if ('shards' in data && list.api_shards) payload[list.api_shards] = data.shards; 58 | if ('server_count' in data && list.api_field) payload[list.api_field] = data.server_count; 59 | if ('shard_id' in data && list.api_shard_id) payload[list.api_shard_id] = data.shard_id; 60 | if ('shard_count' in data && list.api_shard_count) payload[list.api_shard_count] = data.shard_count; 61 | 62 | // Create the 10s abort controller 63 | const controller = new AbortController(); 64 | const { signal } = controller; 65 | const timeout = setTimeout(() => controller.abort(), 10000); 66 | 67 | // Make the request 68 | return fetch(list.api_post.replace(':id', data.bot_id), { 69 | method: list.api_post_method ?? 'POST', 70 | body: JSON.stringify(payload), 71 | headers: { 72 | Authorization: data[list.id], 73 | 'Content-Type': 'application/json', 74 | 'User-Agent': request.headers.get('user-agent') || userAgent.random(), 75 | }, 76 | signal, 77 | }).then(async resp => ({ 78 | list: list.id, 79 | success: resp.ok, 80 | data: [ resp.status, await resp.text(), JSON.stringify(payload) ], 81 | })).catch(err => ({ 82 | list: list.id, 83 | success: false, 84 | data: [ -1, err.name === 'AbortError' ? 'Timeout after 10s' : '', JSON.stringify(payload) ], 85 | })).finally(() => { 86 | clearTimeout(timeout); 87 | }); 88 | }); 89 | 90 | // Wait for all requests to complete 91 | const results = await Promise.all(requests) 92 | .then(data => data.reduce((obj, { list, success, data }) => ({ 93 | ...obj, 94 | [success ? 'success' : 'failure']: { 95 | ...obj[success ? 'success' : 'failure'], 96 | [list]: data, 97 | }, 98 | }), { success: {}, failure: {} })); 99 | 100 | // Done 101 | return new Response(JSON.stringify(results, null, 2), { 102 | status: 200, 103 | headers: { 104 | 'Content-Type': 'application/json', 105 | 'X-Served-By': 'botblock-api-worker', 106 | }, 107 | }); 108 | }, 109 | }; 110 | -------------------------------------------------------------------------------- /src/routes/health.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | method: 'GET', 3 | route: '/api/health', 4 | handler: () => new Response('OK', { 5 | headers: { 6 | 'Content-Type': 'text/plain', 7 | 'Cache-Control': 'no-store, no-cache, must-revalidate, proxy-revalidate', 8 | Expires: '0', 9 | 'Surrogate-Control': 'no-store', 10 | }, 11 | }), 12 | }; 13 | -------------------------------------------------------------------------------- /src/routes/legacy-ids.js: -------------------------------------------------------------------------------- 1 | const legacy = require('../../data/data/legacy.json'); 2 | 3 | module.exports = { 4 | method: 'GET', 5 | route: '/api/legacy-ids', 6 | handler: () => { 7 | // Get legacy IDs and sort 8 | const map = Object.entries(legacy) 9 | .filter((k) => k[0] !== '$schema') 10 | .sort((a, b) => a[0].localeCompare(b[0]) ? -1 : 1) 11 | .reduce((obj, [key, value]) => ({ ...obj, [key]: value }), {}); 12 | 13 | return new Response(JSON.stringify(map, null, 2), { 14 | status: 200, 15 | headers: { 16 | 'Content-Type': 'application/json', 17 | 'X-Served-By': 'botblock-api-worker', 18 | }, 19 | }); 20 | }, 21 | }; 22 | -------------------------------------------------------------------------------- /src/routes/lists.js: -------------------------------------------------------------------------------- 1 | const listsData = require('../util/getLists')(); 2 | const getList = require('../util/getList'); 3 | 4 | module.exports = { 5 | method: 'GET', 6 | route: '/api/lists', 7 | handler: async ({ request }) => { 8 | // Check if we need to filter 9 | const filter = new URL(request.url).searchParams.get('filter') === 'true'; 10 | 11 | // Get lists with features 12 | const lists = listsData 13 | .map(list => getList(list.id)) 14 | .filter(list => { 15 | if (!filter) return true; 16 | 17 | // Defunct lists and lists with no `api_` data are filtered out 18 | if (list.defunct) return false; 19 | return Object.entries(list).filter(([ key, val ]) => key.startsWith('api_') && val !== null).length > 0; 20 | }) 21 | .sort((a, b) => { 22 | if (a.discord_only !== b.discord_only) return a.discord_only ? -1 : 1; 23 | return a.id.localeCompare(b.id) > 0 ? 1 : -1; 24 | }) 25 | .reduce((obj, list) => ({ 26 | ...obj, 27 | [list.id]: filter 28 | // Keys not starting with `api_` are filtered out 29 | ? Object.entries(list).reduce((obj, [ key, val ]) => key.startsWith('api_') 30 | ? { ...obj, [key]: val } 31 | : obj, 32 | {}) 33 | : list, 34 | }), {}); 35 | 36 | // Done 37 | return new Response(JSON.stringify(lists, null, 2), { 38 | status: 200, 39 | headers: { 40 | 'Content-Type': 'application/json', 41 | 'X-Served-By': 'botblock-api-worker', 42 | }, 43 | }); 44 | }, 45 | }; 46 | -------------------------------------------------------------------------------- /src/routes/lists/:id.js: -------------------------------------------------------------------------------- 1 | const getList = require('../../util/getList'); 2 | 3 | module.exports = { 4 | method: 'GET', 5 | route: '/api/lists/:id', 6 | handler: ({ request }) => { 7 | // Attempt to get the list 8 | const list = getList(request.params.id); 9 | if (list) return new Response(JSON.stringify(list, null, 2), { 10 | status: 200, 11 | headers: { 12 | 'Content-Type': 'application/json', 13 | 'X-Served-By': 'botblock-api-worker', 14 | }, 15 | }); 16 | 17 | // Not found 18 | return new Response(JSON.stringify({ error: true, status: 404, message: 'List not found' }, null, 2), { 19 | status: 404, 20 | headers: { 21 | 'Content-Type': 'application/json', 22 | 'X-Served-By': 'botblock-api-worker', 23 | }, 24 | }); 25 | }, 26 | }; 27 | -------------------------------------------------------------------------------- /src/util/getFeatures.js: -------------------------------------------------------------------------------- 1 | // Load all features with webpack magic 2 | module.exports = () => (ctx => ctx.keys().map(ctx))(require.context('../../data/data/features', true, /\.json$/)); 3 | -------------------------------------------------------------------------------- /src/util/getList.js: -------------------------------------------------------------------------------- 1 | const listsData = require('./getLists')(); 2 | const featuresData = require('./getFeatures')(); 3 | 4 | module.exports = id => { 5 | const match = listsData.find(list => list.id === id); 6 | if (!match) return null; 7 | 8 | // Clone the object 9 | const list = { ...match }; 10 | 11 | // Load full features 12 | list.features = featuresData.map(feature => { 13 | const withValue = { 14 | ...feature, 15 | value: list.features.includes(feature.id) ? 1 : 0, 16 | }; 17 | 18 | delete withValue['$schema']; 19 | 20 | return withValue; 21 | }).sort((a, b) => { 22 | if (a.value !== b.value) return a.value ? -1 : 1; 23 | if (a.display !== b.display) return a.display > b.display ? -1 : 1; 24 | return a.name.localeCompare(b.name) ? -1 : 1; 25 | }); 26 | 27 | // Drop $schema 28 | delete list['$schema']; 29 | 30 | return list; 31 | }; 32 | -------------------------------------------------------------------------------- /src/util/getLists.js: -------------------------------------------------------------------------------- 1 | // Load all lists with webpack magic 2 | module.exports = () => (ctx => ctx.keys().map(ctx))(require.context('../../data/data/lists', true, /\.json$/)); 3 | -------------------------------------------------------------------------------- /src/util/getRoutes.js: -------------------------------------------------------------------------------- 1 | // Load all routes with webpack magic 2 | module.exports = () => (ctx => ctx.keys().map(ctx))(require.context('../routes', true, /\.js$/)); 3 | -------------------------------------------------------------------------------- /src/util/isType.js: -------------------------------------------------------------------------------- 1 | module.exports.isInteger = val => typeof val === 'number' && Number.isInteger(val); 2 | module.exports.isSnowflake = val => typeof val === 'string' && /^\d+$/.test(val) && val.length >= 16; 3 | -------------------------------------------------------------------------------- /src/util/mapLegacy.js: -------------------------------------------------------------------------------- 1 | const legacy = require('../../data/data/legacy.json'); 2 | 3 | module.exports = val => legacy[val] || val; 4 | -------------------------------------------------------------------------------- /src/util/ratelimit.js: -------------------------------------------------------------------------------- 1 | /* global RATELIMIT */ 2 | 3 | const { isSnowflake } = require('./isType'); 4 | 5 | module.exports = async (limit, request, botId = '') => { 6 | // Construct the key 7 | const route = new URL(request.url).pathname; 8 | const ip = request.headers.get('cf-connecting-ip'); 9 | const extra = botId && isSnowflake(botId) ? botId : ''; 10 | const key = `${request.method}-${route}-${ip}${extra ? `-${extra}` : ''}`; 11 | 12 | // Check if the key exists 13 | const existing = await RATELIMIT.get(key); 14 | if (existing && Number(existing) > Date.now()) { 15 | // Ratelimited 16 | const data = { 17 | error: true, 18 | status: 429, 19 | retry_after: Math.ceil((Number(existing) - Date.now()) / 1000), 20 | ratelimit_reset: Math.ceil(Number(existing) / 1000), 21 | ratelimit_method: request.method, 22 | ratelimit_route: route, 23 | ratelimit_ip: ip, 24 | ratelimit_bot_id: extra, 25 | }; 26 | return new Response(JSON.stringify(data, null, 2), { 27 | status: 429, 28 | headers: { 29 | 'Content-Type': 'application/json', 30 | 'X-Served-By': 'botblock-api-worker', 31 | 'Retry-After': data.retry_after, 32 | 'X-Rate-Limit-Reset': data.ratelimit_reset, 33 | 'X-Rate-Limit-Method': data.ratelimit_method, 34 | 'X-Rate-Limit-Route': data.ratelimit_route, 35 | 'X-Rate-Limit-IP': data.ratelimit_ip, 36 | 'X-Rate-Limit-Bot-ID': data.ratelimit_bot_id, 37 | }, 38 | }); 39 | } 40 | 41 | // Write to KV 42 | await RATELIMIT.put(key, Date.now() + (limit * 1000), { expirationTtl: Math.max(limit, 60) }); 43 | return false; 44 | }; 45 | -------------------------------------------------------------------------------- /src/util/userAgent.js: -------------------------------------------------------------------------------- 1 | const userAgents = []; 2 | 3 | for (const a of ['4', '5', '6', '7']) { 4 | for (const v of ['3.4.4', '3.4.3', '3.4.2', '3.4.1', '3.4.0', '3.3.2', '3.3.1', '3.3.0']) { 5 | userAgents.push('Python/3.' + a + ' aiohttp/' + v); 6 | } 7 | } 8 | 9 | for (const a of ['2.20.0', '2.19.1', '2.19.0', '2.18.4', '2.18.3']) { 10 | userAgents.push('python-requests/' + a); 11 | } 12 | 13 | for (const a of ['2.2.0', '2.1.2', '2.1.1', '2.1.0', '2.0.0']) { 14 | userAgents.push('node-fetch/' + a + ' (+https://github.com/bitinn/node-fetch)'); 15 | } 16 | 17 | for (const a of ['4.0.4', '4.0.3', '4.0.2', '4.0.1', '4.0.0', '3.6.4', '3.6.3', '3.6.2']) { 18 | userAgents.push('snekfetch/' + a); 19 | } 20 | 21 | for (const a of ['3.11.0', '3.10.0', '3.9.1', '3.9.0']) { 22 | userAgents.push('okhttp/' + a); 23 | } 24 | 25 | module.exports.all = Object.freeze(userAgents); 26 | module.exports.random = () => userAgents[Math.floor(Math.random() * userAgents.length)]; 27 | -------------------------------------------------------------------------------- /staging.env.sample: -------------------------------------------------------------------------------- 1 | SENTRY_AUTH_TOKEN=${SENTRY_AUTH_TOKEN} 2 | SENTRY_ORG=${SENTRY_ORG} 3 | SENTRY_PROJECT=${SENTRY_PROJECT} 4 | SENTRY_DSN=${SENTRY_DSN} 5 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const NODE_ENV = process.env.NODE_ENV || 'development'; 3 | const env = require('dotenv').config({ path: path.join(__dirname, `${NODE_ENV}.env`) }); 4 | const { DefinePlugin } = require('webpack'); 5 | const WorkersSentryWebpackPlugin = require('workers-sentry/webpack'); 6 | 7 | console.log(`Using ${NODE_ENV} environment for build...`); 8 | 9 | module.exports = { 10 | mode: 'none', 11 | target: 'webworker', 12 | entry: './src/index.js', 13 | output: { 14 | path: path.join(__dirname, 'dist'), 15 | filename: 'worker.js', 16 | }, 17 | plugins: [ 18 | // Expose our environment in the worker 19 | new DefinePlugin(Object.entries(env.parsed).reduce((obj, [ key, val ]) => { 20 | obj[`process.env.${key}`] = JSON.stringify(val); 21 | return obj; 22 | }, { 'process.env.NODE_ENV': JSON.stringify(NODE_ENV) })), 23 | 24 | // Publish source maps to Sentry on each build 25 | new WorkersSentryWebpackPlugin( 26 | process.env.SENTRY_AUTH_TOKEN, 27 | process.env.SENTRY_ORG, 28 | process.env.SENTRY_PROJECT, 29 | ), 30 | ], 31 | }; 32 | -------------------------------------------------------------------------------- /wrangler.toml: -------------------------------------------------------------------------------- 1 | name = "botblock-api-worker" 2 | type = "javascript" 3 | account_id = "fb1f542488f2441acf88ca15f3a8390d" 4 | workers_dev = true 5 | compatibility_date = "2021-10-17" 6 | kv_namespaces = [ 7 | { binding = "RATELIMIT", id = "075636ba3f0f4d81b1dd2569177a98e9", preview_id = "075636ba3f0f4d81b1dd2569177a98e9" } 8 | ] 9 | 10 | [build] 11 | command = "npm run build" 12 | 13 | [build.upload] 14 | format = "service-worker" 15 | 16 | [env.staging] 17 | zone_id = "ccea9c3e6518bf407939c5493a122423" 18 | workers_dev = false 19 | route = "staging.botblock.org/api/*" 20 | kv_namespaces = [ 21 | { binding = "RATELIMIT", id = "17c7991d3811406ca22d6329b17792b4" } 22 | ] 23 | 24 | [env.production] 25 | zone_id = "ccea9c3e6518bf407939c5493a122423" 26 | workers_dev = false 27 | route = "botblock.org/api/*" 28 | kv_namespaces = [ 29 | { binding = "RATELIMIT", id = "3c44c7d7710c45a2a0ae77379b5801b4" } 30 | ] 31 | --------------------------------------------------------------------------------