├── .eslintignore ├── .eslintrc ├── .github └── workflows │ └── deploy.yml ├── .gitignore ├── .vscode └── settings.json ├── LICENSE ├── README.md ├── package.json ├── pnpm-lock.yaml ├── pnpm-workspace.yaml ├── scripts ├── new.ts ├── run.ts └── utils.ts ├── template ├── .env ├── README.md ├── build.config.ts ├── package.json ├── src │ └── index.ts ├── types.d.ts ├── wrangler.config.ts └── wrangler.toml ├── tsconfig.json ├── type.d.ts ├── uno.config.ts └── workers ├── README.md ├── github ├── README.md ├── build.config.ts ├── dist │ ├── index.d.ts │ └── index.mjs ├── package.json ├── screenshot │ └── 1.jpg ├── src │ └── index.ts ├── types.d.ts └── wrangler.config.ts ├── ip ├── README.md ├── build.config.ts ├── dist │ ├── index.d.ts │ └── index.mjs ├── package.json ├── src │ └── index.ts ├── types.d.ts └── wrangler.config.ts ├── openai ├── README.md ├── build.config.ts ├── dist │ ├── index.d.ts │ └── index.mjs ├── package.json ├── src │ ├── chat.html │ ├── default.config.ts │ └── index.ts ├── types.d.ts └── wrangler.config.ts ├── proxy ├── README.md ├── build.config.ts ├── dist │ ├── index.d.ts │ └── index.mjs ├── package.json ├── src │ ├── home.html │ └── index.ts ├── types.d.ts └── wrangler.config.ts ├── short-domain ├── README.md ├── build.config.ts ├── dist │ ├── index.d.ts │ └── index.mjs ├── package.json ├── src │ ├── index.ts │ ├── manage.html │ └── new.html ├── types.d.ts └── wrangler.config.ts └── utils.ts /.eslintignore: -------------------------------------------------------------------------------- 1 | example 2 | node_modules 3 | .output 4 | dist 5 | tailwindcss.css -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@aliuq", 3 | "globals": { 4 | "Prism": true 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Deploy to Cloudflare Workers 2 | 3 | on: 4 | repository_dispatch: 5 | 6 | jobs: 7 | build-and-deploy: 8 | runs-on: ubuntu-latest 9 | name: Deploy 10 | steps: 11 | - uses: actions/checkout@v2 12 | - name: Publish 13 | uses: cloudflare/wrangler-action@2.0.0 14 | with: 15 | apiToken: ${{ secrets.CF_API_TOKEN }} 16 | env: 17 | CF_ACCOUNT_ID: ${{secrets.CF_ACCOUNT_ID}} 18 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | 3 | logs 4 | _.log 5 | npm-debug.log_ 6 | yarn-debug.log* 7 | yarn-error.log* 8 | lerna-debug.log* 9 | .pnpm-debug.log* 10 | 11 | # Diagnostic reports (https://nodejs.org/api/report.html) 12 | 13 | report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json 14 | 15 | # Runtime data 16 | 17 | pids 18 | _.pid 19 | _.seed 20 | \*.pid.lock 21 | 22 | # Directory for instrumented libs generated by jscoverage/JSCover 23 | 24 | lib-cov 25 | 26 | # Coverage directory used by tools like istanbul 27 | 28 | coverage 29 | \*.lcov 30 | 31 | # nyc test coverage 32 | 33 | .nyc_output 34 | 35 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 36 | 37 | .grunt 38 | 39 | # Bower dependency directory (https://bower.io/) 40 | 41 | bower_components 42 | 43 | # node-waf configuration 44 | 45 | .lock-wscript 46 | 47 | # Compiled binary addons (https://nodejs.org/api/addons.html) 48 | 49 | build/Release 50 | 51 | # Dependency directories 52 | 53 | node_modules/ 54 | jspm_packages/ 55 | 56 | # Snowpack dependency directory (https://snowpack.dev/) 57 | 58 | web_modules/ 59 | 60 | # TypeScript cache 61 | 62 | \*.tsbuildinfo 63 | 64 | # Optional npm cache directory 65 | 66 | .npm 67 | 68 | # Optional eslint cache 69 | 70 | .eslintcache 71 | 72 | # Optional stylelint cache 73 | 74 | .stylelintcache 75 | 76 | # Microbundle cache 77 | 78 | .rpt2_cache/ 79 | .rts2_cache_cjs/ 80 | .rts2_cache_es/ 81 | .rts2_cache_umd/ 82 | 83 | # Optional REPL history 84 | 85 | .node_repl_history 86 | 87 | # Output of 'npm pack' 88 | 89 | \*.tgz 90 | 91 | # Yarn Integrity file 92 | 93 | .yarn-integrity 94 | 95 | # dotenv environment variable files 96 | 97 | .env 98 | .env.development.local 99 | .env.test.local 100 | .env.production.local 101 | .env.local 102 | 103 | # parcel-bundler cache (https://parceljs.org/) 104 | 105 | .cache 106 | .parcel-cache 107 | 108 | # Next.js build output 109 | 110 | .next 111 | out 112 | 113 | # Nuxt.js build / generate output 114 | 115 | .nuxt 116 | 117 | # Gatsby files 118 | 119 | .cache/ 120 | 121 | # Comment in the public line in if your project uses Gatsby and not Next.js 122 | 123 | # https://nextjs.org/blog/next-9-1#public-directory-support 124 | 125 | # public 126 | 127 | # vuepress build output 128 | 129 | .vuepress/dist 130 | 131 | # vuepress v2.x temp and cache directory 132 | 133 | .temp 134 | .cache 135 | 136 | # Docusaurus cache and generated files 137 | 138 | .docusaurus 139 | 140 | # Serverless directories 141 | 142 | .serverless/ 143 | 144 | # FuseBox cache 145 | 146 | .fusebox/ 147 | 148 | # DynamoDB Local files 149 | 150 | .dynamodb/ 151 | 152 | # TernJS port file 153 | 154 | .tern-port 155 | 156 | # Stores VSCode versions used for testing VSCode extensions 157 | 158 | .vscode-test 159 | 160 | # yarn v2 161 | 162 | .yarn/cache 163 | .yarn/unplugged 164 | .yarn/build-state.yml 165 | .yarn/install-state.gz 166 | .pnp.\* 167 | 168 | # wrangler project 169 | 170 | .history/** 171 | !template/.env 172 | **/dist/*.wrangler.toml 173 | workers/**/wrangler.toml 174 | .dev.vars 175 | workers/openai-forward/** 176 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "yaml.schemas": { 3 | "https://json.schemastore.org/github-workflow.json": "file:///home/liuq/apps/proxy-github/.github/workflows/deploy.yml" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 liuq 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # cf-proxy 2 | 3 | 管理发布多个 cloudflare workers 的仓库 4 | 5 | ## 使用方法 6 | 7 | 要求: 8 | 9 | + [pnpm](https://pnpm.io/zh/installation) 10 | + nodejs >= 16.13.0 11 | 12 | ```bash 13 | # clone this repo 14 | git clone https://github.com/aliuq/cf-proxy.git 15 | ``` 16 | 17 | ### 命令 `new` 18 | 19 | 创建新的 worker 20 | 21 | ```bash 22 | pnpm run new [worker name] 23 | ``` 24 | 25 | options: 26 | 27 | + `--name/-n`:worker 名称 28 | + `--date/-d`: `yyyy-mm-dd` 格式的日期,用于确定使用哪个版本的 Workers 运行时,默认为当前日期 29 | + `--dest/-o`: worker 生成目录,默认为 `./workers` 30 | 31 | ### 命令 `exec` 32 | 33 | 管理 worker 34 | 35 | ```bash 36 | pnpm run exec [worker name] [command] [params] [args] 37 | ``` 38 | 39 | options: 40 | 41 | + `--config/-c`: 配置文件路径 42 | + `--env/-e`: 环境变量文件路径 43 | + `--help/-h`: 帮助信息 44 | + `--version/-v`: 版本信息 45 | + `--params`:固定参数 46 | + `--workers-root`: worker 生成目录,默认为 `./workers` 47 | + `--unbuild`: 使用 unbuild 编译 worker,默认为 `true` 48 | + `--push`: 是否推送到 github,默认为 `false` 49 | + `--[Name]`: 额外的 wrangler 命令参数 50 | 51 | ## License 52 | 53 | [MIT](/LICENSE) 54 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cf-workers", 3 | "type": "module", 4 | "version": "0.0.0", 5 | "private": true, 6 | "packageManager": "pnpm@7.0.0", 7 | "scripts": { 8 | "new": "tsx ./scripts/new.ts", 9 | "exec": "tsx ./scripts/run.ts", 10 | "lint": "eslint . --ext .ts" 11 | }, 12 | "devDependencies": { 13 | "@aliuq/eslint-config": "^0.0.5", 14 | "@cloudflare/workers-types": "^4.20221111.1", 15 | "@iarna/toml": "^3.0.0", 16 | "@types/fs-extra": "^11.0.1", 17 | "@types/inquirer": "^9.0.3", 18 | "@types/node": "^18.11.18", 19 | "dotenv": "^16.0.3", 20 | "eslint": "^8.32.0", 21 | "execa": "^6.1.0", 22 | "fs-extra": "^11.1.0", 23 | "get-port": "^6.1.2", 24 | "handlebars": "^4.7.7", 25 | "inquirer": "^9.1.4", 26 | "kolorist": "^1.6.0", 27 | "ora": "^6.1.2", 28 | "semver": "^7.3.8", 29 | "toml": "^3.0.0", 30 | "tsx": "^3.12.2", 31 | "typescript": "^4.9.4", 32 | "unbuild": "^1.1.1", 33 | "wrangler": "2.7.1" 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /pnpm-workspace.yaml: -------------------------------------------------------------------------------- 1 | packages: 2 | - workers/* 3 | -------------------------------------------------------------------------------- /scripts/new.ts: -------------------------------------------------------------------------------- 1 | // Create a new worker by template 2 | import path from 'path' 3 | import type { QuestionCollection } from 'inquirer' 4 | import inquirer from 'inquirer' 5 | import { parseArgs, renderTemplate } from './utils' 6 | 7 | interface Args { 8 | name: boolean 9 | date: string 10 | dest: string 11 | } 12 | 13 | const args = parseArgs({ 14 | maps: { 15 | n: 'name', 16 | d: 'date', 17 | o: 'dest', 18 | }, 19 | }) 20 | 21 | // worker 名称 22 | const workerName = process.argv[2] || args.name 23 | // worker 根目录 24 | const workersRoot = 'workers' 25 | 26 | const questions: QuestionCollection = [ 27 | { 28 | type: 'input', 29 | name: 'name', 30 | message: 'Worker name:', 31 | validate: (input: string) => { 32 | if (!input) 33 | return 'Worker name is required' 34 | if (!/^[a-z0-9-]+$/.test(input)) 35 | return 'Worker name must be lowercase letters, numbers, and dashes' 36 | return true 37 | }, 38 | default: args.name, 39 | when: () => !workerName, 40 | }, 41 | { 42 | type: 'input', 43 | name: 'date', 44 | message: 'Date:', 45 | default: args.date || new Date().toISOString().split('T')[0], 46 | }, 47 | { 48 | type: 'input', 49 | name: 'dest', 50 | message: 'Destination:', 51 | default: args.dest || workersRoot, 52 | }, 53 | ] 54 | const answers = await inquirer.prompt(questions) 55 | answers.name = answers.name || workerName 56 | 57 | const root = process.cwd() 58 | const templateRoot = path.resolve(root, 'template') 59 | const destRoot = path.resolve(root, answers.dest, answers.name) 60 | renderTemplate(templateRoot, destRoot, answers) 61 | 62 | -------------------------------------------------------------------------------- /scripts/run.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import os from 'os' 4 | import inquirer from 'inquirer' 5 | import toml from '@iarna/toml' 6 | import * as execa from 'execa' 7 | import dotenv from 'dotenv' 8 | import { cyan, gray, green, lightCyan, magenta, red, yellow } from 'kolorist' 9 | import fse from 'fs-extra' 10 | import ora from 'ora' 11 | import semver from 'semver' 12 | import { execString, parseArgs } from './utils' 13 | 14 | // 15 | // === Check required wrangler and node version 16 | // 17 | await checkRequired() 18 | 19 | // 20 | // === Parse args and options 21 | // 22 | const { worker, command, params, workersRoot, push, unbuild, ...flags } = parseCliOptions() 23 | /** All options */ 24 | const answers = { worker, command, workersRoot, push, unbuild, params, flags } 25 | 26 | // help and version command will not run any worker 27 | answers.flags.help && printtHelp() 28 | answers.flags.version && await printVersion() 29 | 30 | // Get all available workers 31 | const workers = getAllWorkers(answers.workersRoot) 32 | if (!answers.worker || !workers.includes(answers.worker)) { 33 | console.log(yellow(`\n** Worker name (${answers.worker}) is invalid. **\n`)) 34 | const answer = await inquirer.prompt([{ 35 | type: 'list', 36 | name: 'worker', 37 | message: 'Select a worker to run:', 38 | choices: workers, 39 | }]) 40 | answers.worker = answer.worker 41 | } 42 | 43 | /** See more at `wrangler -h` 44 | */ 45 | const commands = [ 46 | // { value: 'docs', name: 'docs [command] 📚 Open wrangler\'s docs in your browser' }, 47 | // { value: 'init', name: 'init [name] 📥 Create a wrangler.toml configuration file' }, 48 | { value: 'dev', name: 'dev [script] 👂 Start a local server for developing your worker' }, 49 | { value: 'publish', name: 'publish [script] 🆙 Publish your Worker to Cloudflare.' }, 50 | { value: 'delete', name: 'delete [script] 🗑 Delete your Worker from Cloudflare.' }, 51 | // { value: 'tail', name: 'tail [worker] 🦚 Starts a log tailing session for a published Worker.' }, 52 | // { value: 'secret', name: 'secret 🤫 Generate a secret that can be referenced in a Worker' }, 53 | { value: 'secret:bulk', name: 'secret:bulk 🗄️ Bulk upload secrets for a Worker' }, 54 | // { value: 'kv:namespace', name: 'kv:namespace 🗂️ Interact with your Workers KV Namespaces' }, 55 | // { value: 'kv:key', name: 'kv:key 🔑 Individually manage Workers KV key-value pairs' }, 56 | // { value: 'kv:bulk', name: 'kv:bulk 💪 Interact with multiple Workers KV key-value pairs at once' }, 57 | // { value: 'pages', name: 'pages ⚡️ Configure Cloudflare Pages' }, 58 | // { value: 'queues', name: 'queues 🇶 Configure Workers Queues' }, 59 | // { value: 'r2', name: 'r2 📦 Interact with an R2 store' }, 60 | // { value: 'dispatch-namespace', name: 'dispatch-namespace 📦 Interact with a dispatch namespace' }, 61 | // { value: 'd1', name: 'd1 🗄 Interact with a D1 database' }, 62 | // { value: 'pubsub', name: 'pubsub 📮 Interact and manage Pub/Sub Brokers' }, 63 | // { value: 'login', name: 'login 🔓 Login to Cloudflare' }, 64 | // { value: 'logout', name: 'logout 🚪 Logout from Cloudflare' }, 65 | // { value: 'whoami', name: 'whoami 🕵️ Retrieve your user info and test your auth config' }, 66 | // { value: 'types', name: 'types 📝 Generate types from bindings & module rules in config' }, 67 | // { value: 'deployments', name: 'deployments 🚢 Displays the 10 most recent deployments for a worker' }, 68 | ] 69 | if (!answers.command || !commands.map(c => c.value).includes(answers.command)) { 70 | console.log(yellow(`\n** Command (${answers.command}) is invalid. **\n`)) 71 | const answer = await inquirer.prompt([{ 72 | type: 'list', 73 | pageSize: 25, 74 | name: 'command', 75 | message: 'Select a command to run:', 76 | choices: commands, 77 | }]) 78 | answers.command = answer.command 79 | } 80 | 81 | // 82 | // === Related paths and read configurations. 83 | // 84 | /** Path to project */ 85 | const root = process.cwd() 86 | /** Path to worker directory */ 87 | const workerRoot = path.resolve(root, answers.workersRoot, answers.worker) 88 | /** Path to `.dev.vars` */ 89 | const devVarsPath = path.resolve(workerRoot, '.dev.vars') 90 | /** Path to `wrangler.toml`, only used in dev mode */ 91 | const wranglerTomlDevPath = path.resolve(workerRoot, 'wrangler.toml') 92 | /** Path to `wrangler.config.ts`, for generate dynamic configuration */ 93 | const wranglerConfigPath = path.resolve(workerRoot, 'wrangler.config.ts') 94 | /** Path to `.env` */ 95 | const envPath = path.resolve(workerRoot, '.env') 96 | /** Path to `package.json` */ 97 | const packagePath = path.resolve(workerRoot, 'package.json') 98 | 99 | console.log(` 100 | ${lightCyan('Related paths:')} 101 | 102 | ${magenta('root:')} ${cyan(root)} 103 | ${magenta('workerRoot:')} ${cyan(workerRoot)} 104 | ${magenta('wrangler.toml:')} ${cyan(wranglerTomlDevPath)} 105 | ${magenta('wrangler.config.ts:')} ${cyan(wranglerConfigPath)} 106 | ${magenta('package.json:')} ${cyan(packagePath)} 107 | ${magenta('.dev.vars:')} ${cyan(devVarsPath)} 108 | ${magenta('.env:')} ${cyan(envPath)} 109 | `) 110 | 111 | // Read configurations 112 | const envConfig = fs.existsSync(envPath) ? dotenv.parse(fs.readFileSync(envPath, 'utf-8')) : {} 113 | const { wranglerConfig } = await import(wranglerConfigPath) 114 | 115 | // 116 | // === Run command 117 | console.info(cyan(`Running ${green(answers.command)} in worker ${green(answers.worker)}...`)) 118 | if (answers.command === 'dev') { 119 | // Get the environment variables 120 | const envVars = Object.keys(envConfig).length > 0 ? await parseEnvConfig(envConfig, workerRoot) : {} 121 | const envVarsInternal: Record = {} 122 | Object.keys(envVars).forEach((key: string) => { 123 | if (key.startsWith('__') && key.endsWith('__')) { 124 | envVarsInternal[key.replace(/__(.*?)__/, '$1')] = envVars[key] 125 | delete envVars[key] 126 | } 127 | }) 128 | 129 | // Parsing the worker configuration 130 | const wranglerConfigParsed = await wranglerConfig({ env: envVarsInternal }) 131 | console.debug('Read the parsed `wrangler.config.ts` configuration:', gray(JSON.stringify({ 'wrangler.toml': wranglerConfigParsed }))) 132 | 133 | // Select environment 134 | await selectEnv(wranglerConfigParsed, answers) 135 | 136 | // Parse the `wrangler.toml` path, based on `workerRoot` 137 | const wranglerTomlPath = wranglerTomlDevPath 138 | console.debug('Parsing the `wrangler.toml` path:', cyan(wranglerTomlPath)) 139 | 140 | // Store the parsed worker configuration in `wrangler.toml` 141 | await fse.writeFile(wranglerTomlPath, toml.stringify(wranglerConfigParsed || {})) 142 | console.debug('Write to `wrangler.toml` file successfully:', cyan(wranglerTomlPath)) 143 | 144 | // Store `key:value` from `.env` into `.dev.vars`, overwriting the original content 145 | if (Object.keys(envVars).length > 0) { 146 | const envVarsStr = Object.keys(envVars).map((key: string) => `${key}=${envVars[key]}`) 147 | await fse.outputFile(devVarsPath, envVarsStr.join(os.EOL)) 148 | console.info('Write to `.dev.vars` file successfully:', cyan(devVarsPath)) 149 | } 150 | 151 | // Run command 152 | answers.flags.local = typeof answers.flags.local === 'undefined' ? true : answers.flags.local 153 | await runCommand(answers, workerRoot) 154 | } 155 | else if (answers.command === 'publish') { 156 | // Confirm publish 157 | await checkConfirm() 158 | 159 | if (answers.unbuild) { 160 | // Unbuild package 161 | console.debug(`${gray(`Execution command(${answers.command}):`)} ${green('unbuild')}\n`) 162 | await execa.execaCommand('unbuild', { cwd: workerRoot, stdio: 'inherit' }) 163 | } 164 | 165 | // Bumpp package.json version 166 | if (answers.push) { 167 | const bumppCommandArr = ['npx', 'bumpp', 'package.json', '--no-tag', '--commit', `chore(${answers.worker}):\\ release\\ v`] 168 | console.debug(`${gray('Execution command(bumpp):')} ${green(bumppCommandArr.join(' '))}\n`) 169 | await execa.execaCommand(bumppCommandArr.join(' '), { cwd: workerRoot, stdio: 'inherit' }) 170 | } 171 | 172 | // Get the environment variables 173 | const envVars = Object.keys(envConfig).length > 0 ? await parseEnvConfig(envConfig, workerRoot) : {} 174 | const envVarsInternal: Record = {} 175 | Object.keys(envVars).forEach((key: string) => { 176 | if (key.startsWith('__') && key.endsWith('__')) { 177 | envVarsInternal[key.replace(/__(.*?)__/, '$1')] = envVars[key] 178 | delete envVars[key] 179 | } 180 | }) 181 | 182 | // Parsing the worker configuration 183 | const { outDir: unbuildOutDir, ...wranglerConfigParsed } = await wranglerConfig({ unbuild: answers.unbuild, env: envVarsInternal }) 184 | console.debug('Read the parsed `wrangler.config.ts` configuration:', gray(JSON.stringify({ 'wrangler.toml': wranglerConfigParsed }))) 185 | 186 | // Select environment 187 | await selectEnv(wranglerConfigParsed, answers) 188 | 189 | // Parse the `wrangler.toml` path, based on `workerRoot` 190 | let wranglerTomlPath = wranglerTomlDevPath 191 | let tomlName = 'wrangler.toml' 192 | if (answers.unbuild) { 193 | // Set `outDir` path 194 | const outDir = path.resolve(workerRoot, unbuildOutDir) 195 | tomlName = `${answers.worker}.wrangler.toml` 196 | wranglerTomlPath = path.resolve(outDir, tomlName) 197 | // Set `wrangler.toml` path 198 | answers.flags.config = wranglerTomlPath 199 | } 200 | console.debug(`Parsing the \`${tomlName}\` path:`, cyan(wranglerTomlPath)) 201 | 202 | // Store the parsed worker configuration in `wrangler.toml` 203 | await fse.writeFile(wranglerTomlPath, toml.stringify(wranglerConfigParsed || {})) 204 | console.debug(`Write to \`${tomlName}\` file successfully:`, cyan(wranglerTomlPath)) 205 | 206 | // Run command 207 | await runCommand(answers, workerRoot) 208 | 209 | // Secrets 210 | if (Object.keys(envVars).length > 0) { 211 | const tmpEnvPath = path.resolve(workerRoot, '.tmp.env.json') 212 | await fse.outputJson(tmpEnvPath, envVars) 213 | console.debug('Write to `.tmp.env.json` file successfully:', cyan(tmpEnvPath)) 214 | const secretAnswers = Object.assign({}, answers, { command: 'secret:bulk', params: tmpEnvPath }) 215 | await runCommand(secretAnswers, workerRoot) 216 | await fse.unlink(tmpEnvPath) 217 | console.debug('Delete `.tmp.env.json` file successfully:', cyan(tmpEnvPath)) 218 | } 219 | 220 | // Publish assets to github 221 | if (answers.unbuild && answers.push) { 222 | const outDir = path.resolve(workerRoot, unbuildOutDir) 223 | const gitCommandArr = [ 224 | `git add ${outDir}`, 225 | `git commit -m chore(${answers.worker}):\\ publish\\ assets`, 226 | 'git push', 227 | ] 228 | console.info(cyan('\nPublish assets...')) 229 | 230 | for await (const command of gitCommandArr) { 231 | console.debug(`${gray('Execution command(git):')} ${green(command)}\n`) 232 | await execa.execaCommand(command, { cwd: workerRoot, stdio: 'inherit' }) 233 | } 234 | } 235 | } 236 | else if (answers.command === 'delete') { 237 | // Confirm delete 238 | await checkConfirm() 239 | // Parsing the worker configuration 240 | const wranglerConfigParsed = await wranglerConfig() 241 | console.debug('Read the parsed `wrangler.config.ts` configuration:', gray(JSON.stringify({ 'wrangler.toml': wranglerConfigParsed }))) 242 | // Select environment 243 | await selectEnv(wranglerConfigParsed, answers) 244 | // Run command 245 | await runCommand(answers, workerRoot) 246 | } 247 | else if (answers.command === 'secret:bulk') { 248 | // Confirm secrets 249 | await checkConfirm() 250 | 251 | // Get the environment variables 252 | const envVars = Object.keys(envConfig).length > 0 ? await parseEnvConfig(envConfig, workerRoot) : {} 253 | Object.keys(envVars).forEach((key: string) => { 254 | if (key.startsWith('__') && key.endsWith('__')) 255 | delete envVars[key] 256 | }) 257 | 258 | if (!Object.keys(envVars).length) { 259 | console.error(red(`No secrets found in \`${envPath}\` file`)) 260 | process.exit(0) 261 | } 262 | 263 | // Parsing the worker configuration 264 | const wranglerConfigParsed = await wranglerConfig() 265 | console.debug('Read the parsed `wrangler.config.ts` configuration:', gray(JSON.stringify({ 'wrangler.toml': wranglerConfigParsed }))) 266 | // Select environment 267 | await selectEnv(wranglerConfigParsed, answers) 268 | 269 | const tmpEnvPath = path.resolve(workerRoot, '.tmp.env.json') 270 | await fse.outputJson(tmpEnvPath, envVars) 271 | console.debug('Write to `.tmp.env.json` file successfully:', cyan(tmpEnvPath)) 272 | 273 | answers.params = tmpEnvPath 274 | await runCommand(answers, workerRoot) 275 | 276 | await fse.unlink(tmpEnvPath) 277 | console.debug('Delete `.tmp.env.json` file successfully:', cyan(tmpEnvPath)) 278 | } 279 | 280 | // 281 | // 282 | // ====================== Functions ====================== 283 | // 284 | // 285 | 286 | /** Confirmation operation before executing the next step, exit the process if `false` 287 | */ 288 | async function checkConfirm() { 289 | console.log() 290 | const answer = await inquirer.prompt({ 291 | type: 'confirm', 292 | name: 'confirm', 293 | message: 'Are you sure to continue?', 294 | default: false, 295 | }) 296 | if (!answer.confirm) { 297 | console.info(yellow('Exit the process...')) 298 | process.exit(0) 299 | } 300 | } 301 | 302 | /** Check required wrangler and node version, exit the process if not passed 303 | */ 304 | async function checkRequired() { 305 | console.log(cyan('Checking required...\n')) 306 | // Check Node version >= v16.13.0, exit if false 307 | const spinner = ora('Checking Node version...').start() 308 | const MIN_NODE_VERSION = '16.13.0' 309 | if (!semver.satisfies(process.version, `>=${MIN_NODE_VERSION}`)) { 310 | spinner.text = red(`Node version must >= ${MIN_NODE_VERSION}, current is ${process.version}`) 311 | spinner.fail() 312 | process.exit(0) 313 | } 314 | else { 315 | spinner.text = green(`Node version: ${process.version}`) 316 | spinner.succeed() 317 | } 318 | 319 | // Check wrangler version 320 | const spinner2 = ora('Checking wrangler library...').start() 321 | try { 322 | const { stdout } = await execa.execaCommand('wrangler -v') 323 | spinner2.text = green(`Wrangler version: v${stdout}`) 324 | spinner2.succeed() 325 | } 326 | catch (error) { 327 | spinner2.text = red('Wrangler not found, please install it first.') 328 | spinner2.fail() 329 | // TODO: Install wrangler 330 | process.exit(0) 331 | } 332 | } 333 | 334 | /** Parsed `process.argv` to object */ 335 | function parseCliOptions() { 336 | /** Slice number process.argv from */ 337 | let start = 4 338 | let worker = process.argv[2] 339 | if (!worker || !worker.match(/^[a-zA-Z]/)) { 340 | start = 3 341 | worker = '' 342 | } 343 | let command = process.argv[3] 344 | if (!command || !command.match(/^[a-zA-Z]/)) { 345 | start = 2 346 | command = '' 347 | } 348 | 349 | const args = parseArgs({ 350 | maps: { 351 | c: 'config', 352 | e: 'env', 353 | h: 'help', 354 | v: 'version', 355 | }, 356 | start, 357 | }) 358 | return { 359 | worker, 360 | command, 361 | params: args.params ?? '', 362 | workersRoot: args.workersRoot ?? 'workers', 363 | unbuild: args.unbuild ?? true, 364 | push: args.push ?? false, 365 | /** Options, from `wrangler -h` */ 366 | ...args, 367 | } as Required 368 | } 369 | 370 | /** Get all available workers in `workersRoot` path. 371 | * 372 | * If a worker directory contains `wrangler.toml` and `package.json` files, it will be returned. 373 | * 374 | * Ensure `wrangler.toml` name same as directory name. 375 | * @param workersRoot Path to workers directory 376 | * 377 | * @tips `wrangler.toml` can be absent, but `package.json` must have 378 | */ 379 | function getAllWorkers(workersRoot: string) { 380 | const workers = fs.readdirSync(workersRoot).filter((name) => { 381 | const stat = fs.statSync 382 | return stat(`${workersRoot}/${name}`).isDirectory() 383 | && fs.existsSync(`${workersRoot}/${name}/package.json`) 384 | }) 385 | return workers 386 | } 387 | 388 | function printtHelp() { 389 | console.log(` 390 | Usage: pnpm run exec [WorkerName] [Command] [Args] 391 | 392 | Options: 393 | --config, -c Path to .toml configuration file 394 | --env, -e Environment to use for operations and .env files 395 | --help, -h Show help 396 | --version, -v Show version number 397 | --worker Worker name 398 | --command Command name 399 | --params Positionals parameters 400 | --workersRoot Workers root directory 401 | --unbuild Use Unbuild to build worker 402 | `) 403 | process.exit(0) 404 | } 405 | 406 | async function printVersion() { 407 | const { stdout } = await execa.execaCommand('wrangler -v') 408 | console.log(`wrangler v${stdout}`) 409 | process.exit(0) 410 | } 411 | 412 | /** Select a enviroment */ 413 | async function selectEnv(wranglerConfigParsed: Record, answers: Record) { 414 | const envs = Object.keys(wranglerConfigParsed.env || {}) 415 | if (envs.length > 0) 416 | console.log(cyan(`\nFound ${envs.length} enviroments in \`wrangler.config.ts\`: ${envs.join(', ')}\n`)) 417 | 418 | const env = answers.flags.env 419 | if (env) 420 | console.log(cyan(`Default enviroment: ${green(env)}`)) 421 | 422 | if (envs.length > 0 && (typeof env === 'undefined' || (env && !envs.includes(env)))) { 423 | const answer = await inquirer.prompt({ 424 | type: 'list', 425 | name: 'env', 426 | message: 'Select a enviroment', 427 | choices: [{ name: 'Empty', value: '' }, ...envs], 428 | }) 429 | console.log(cyan(`Default enviroment: ${green(answer.env)}`)) 430 | answers.flags.env = answer.env 431 | } 432 | } 433 | 434 | /** Parse `.env` file content to object 435 | */ 436 | async function parseEnvConfig(envConfig: Record, cwd: string) { 437 | if (envConfig && Object.keys(envConfig).length > 0) { 438 | const envVars: Record = {} 439 | for (const key of Object.keys(envConfig)) { 440 | const value = envConfig[key] 441 | envVars[key] = typeof value === 'string' ? await execString(value, cwd) : value 442 | } 443 | console.info('Read the parsed `.env` configuration:', gray(JSON.stringify({ envKeys: Object.keys(envVars) }))) 444 | return envVars 445 | } 446 | return {} 447 | } 448 | 449 | /** Run `wrangler` command */ 450 | async function runCommand(answers: Record, cwd: string) { 451 | // Splice command 452 | const commandArr = ['wrangler', answers.command] 453 | if (typeof answers.params === 'string' && answers.params) 454 | commandArr.push(answers.params) 455 | 456 | Object.keys(answers.flags).forEach((key) => { 457 | if (answers.flags[key]) { 458 | commandArr.push(`--${key}`) 459 | commandArr.push(answers.flags[key]) 460 | } 461 | }) 462 | 463 | // Run command 464 | console.info(`Execution command(${green(answers.command)}): ${green(commandArr.join(' '))}\n`) 465 | await execa.execaCommand(commandArr.join(' '), { cwd, stdio: 'inherit' }) 466 | } 467 | 468 | // 469 | // ================ Types ================ 470 | // 471 | interface Args { 472 | [key: string]: any 473 | /** Path to .toml configuration file */ 474 | config?: string 475 | /** Environment to use for operations and .env files */ 476 | env?: string 477 | /** Print help text */ 478 | help?: boolean 479 | /** Print version info */ 480 | version?: boolean 481 | /** Worker name, ensure the worker root path */ 482 | worker?: string 483 | /** Command name, from `wrangler -h` */ 484 | command?: string 485 | /** Command params, from `wrangler -h` */ 486 | params?: string 487 | /** Path to workers directory */ 488 | workersRoot?: string 489 | /** Use unbuild bundle package, only support publish */ 490 | unbuild?: boolean 491 | /** Enable bumpp version and push to github 492 | * @default false 493 | */ 494 | push?: boolean 495 | } 496 | -------------------------------------------------------------------------------- /scripts/utils.ts: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | import fs from 'fs' 3 | import handlebars from 'handlebars' 4 | import * as execa from 'execa' 5 | 6 | // 遍历templateRoot目录下的所有文件,如果是文件夹,递归遍历,如果是文件,渲染模板,渲染模板时,使用answers作为模板数据,渲染后的文件,保存到destRoot目录下 7 | export function renderTemplate(templateRoot: string, destRoot: string, answers: Object) { 8 | if (!fs.existsSync(templateRoot)) 9 | throw new Error(`template root: ${templateRoot} not exist`) 10 | 11 | if (!fs.existsSync(destRoot)) 12 | fs.mkdirSync(destRoot) 13 | 14 | // read all file names in templateRoot 15 | const files = fs.readdirSync(templateRoot) 16 | 17 | files.forEach((fileName) => { 18 | const templatePath = path.resolve(templateRoot, fileName) 19 | const destPath = path.resolve(destRoot, fileName) 20 | // judge if file is folder 21 | const isDir = fs.statSync(templatePath).isDirectory() 22 | if (isDir) { 23 | // clone recursively 24 | renderTemplate(templatePath, destPath, answers) 25 | } 26 | else { 27 | // read template file 28 | const templateContent: string = fs.readFileSync(templatePath, 'utf-8') 29 | const content = handlebars.compile(templateContent)(answers) 30 | // write to dest file 31 | fs.writeFileSync(destPath, content, 'utf-8') 32 | } 33 | }) 34 | } 35 | 36 | interface ParseArgs { 37 | maps?: Record 38 | start?: number 39 | serialize?: (value: any, key: string) => any 40 | } 41 | 42 | export function parseArgs(options: ParseArgs): T { 43 | const { 44 | maps = {}, 45 | start = 2, 46 | serialize = (value: any, _key: string) => value, 47 | } = options 48 | const args = process.argv.slice(start) 49 | const result: Pick = {} 50 | 51 | const isMatchKey = (key: string) => key.match(/^--(.+)/) 52 | const isMatchShortKey = (key: string) => key.match(/^-([a-zA-Z])/) 53 | const validKey = (key: string) => isMatchKey(key) || isMatchShortKey(key) 54 | const getRealKey = (key: string, maps: Record = {}) => { 55 | if (isMatchKey(key)) 56 | return key.replace(/^--/, '') 57 | if (isMatchShortKey(key)) 58 | return maps[key.replace(/^-/, '')] 59 | } 60 | 61 | for (let i = 0; i < args.length; i++) { 62 | if (!validKey(args[i])) 63 | continue 64 | 65 | const _key = getRealKey(args[i], maps) 66 | const nextKey = args[i + 1] 67 | if (_key) { 68 | // no-xxx 69 | if (_key.match(/^(no-)[a-zA-Z]/) && (!nextKey || validKey(nextKey))) { 70 | result[_key.substring(3)] = false 71 | continue 72 | } 73 | result[_key] = nextKey && !validKey(nextKey) ? serialize(nextKey, _key) : true 74 | } 75 | } 76 | 77 | return result 78 | } 79 | 80 | export function sleep(ms: number) { 81 | return new Promise(resolve => setTimeout(resolve, ms)) 82 | } 83 | 84 | // 从字符串中提取出命令,执行命令,返回命令的输出 85 | export async function execString(str: string, cwd?: string) { 86 | const matches = str.match(/^\$\((.*?)\)$/) 87 | if (matches) { 88 | const { stdout, stderr } = await execa.execaCommand(matches[1], cwd ? { cwd } : {}) 89 | if (stderr) 90 | throw new Error(stderr) 91 | return stdout 92 | } 93 | return str 94 | } 95 | -------------------------------------------------------------------------------- /template/.env: -------------------------------------------------------------------------------- 1 | # Environment Variables 2 | # FOO=bar 3 | 4 | # __[Name]__ means a internal variable for build `wrangler.toml` 5 | # __DOMAIN__=example.com 6 | -------------------------------------------------------------------------------- /template/README.md: -------------------------------------------------------------------------------- 1 | # Worker: {{ name }} 2 | 3 | ## 使用 4 | 5 | ```bash 6 | # 开发 7 | pnpm run exec {{ name }} dev --env localhost 8 | # 部署 9 | pnpm run exec {{ name }} publish --env production 10 | ``` 11 | 12 | ## 环境变量 13 | 14 | 在 `.env` 文件中进行编辑,用于编译配置的环境变量以 `__` 开头和结尾,例如 `__DOMAIN__`,这样是为了区分和 `wrangler` 的环境变量 15 | 16 | | 环境变量 | 说明 | 17 | | :--- | :--- | 18 | | `__DOMAIN__` | 域名,用于配置路由 | 19 | -------------------------------------------------------------------------------- /template/build.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from 'unbuild' 2 | 3 | export default defineBuildConfig({ 4 | entries: [ 5 | 'src/index', 6 | ], 7 | clean: true, 8 | // If met some error, you can set failOnWarn to false to ignore it. 9 | // failOnWarn: false, 10 | // Incompatible with `rollup.esbuild.minify`, an error will occur 11 | declaration: true, 12 | externals: [], 13 | rollup: { 14 | esbuild: { 15 | // See `declaration` 16 | // minify: true, 17 | }, 18 | }, 19 | }) 20 | -------------------------------------------------------------------------------- /template/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cf-workers-{{ name }}", 3 | "type": "module", 4 | "version": "0.0.0", 5 | "description": "Simple cloudflare worker {{ name }}", 6 | "author": "AliuQ ", 7 | "license": "MIT", 8 | "homepage": "https://github.com/aliuq/cf-proxy/tree/master/workers/{{ name }}#readme", 9 | "bugs": { 10 | "url": "https://github.com/aliuq/cf-proxy/issues" 11 | }, 12 | "keywords": [ 13 | "cloudflare", 14 | "worker", 15 | "{{ name }}" 16 | ], 17 | "main": "dist/index.mjs", 18 | "scripts": { 19 | "dev": "wrangler dev -l", 20 | "build": "unbuild" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /template/src/index.ts: -------------------------------------------------------------------------------- 1 | // @ts-expect-error it is a template 2 | import { needCancelRequest, replyText, replyUnsupport } from '../../utils' 3 | 4 | export default { 5 | async fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise { 6 | const needCancel = await needCancelRequest(request) 7 | if (needCancel) 8 | return needCancel 9 | 10 | const url = new URL(request.url) 11 | if (url.pathname === '/robots.txt') 12 | return replyText('User-agent: *\nDisallow: /', env) 13 | 14 | // TODO: Do something 15 | 16 | return await replyUnsupport({ url: decodeURIComponent(url.toString()) }, env) 17 | }, 18 | } 19 | -------------------------------------------------------------------------------- /template/types.d.ts: -------------------------------------------------------------------------------- 1 | interface ENV { 2 | GIT_HASH: string 3 | VERSION: string 4 | } 5 | 6 | interface INTERNAL_ENV { 7 | DOMAIN?: string 8 | } 9 | 10 | interface Options { 11 | unbuild?: boolean 12 | env: INTERNAL_ENV 13 | } 14 | -------------------------------------------------------------------------------- /template/wrangler.config.ts: -------------------------------------------------------------------------------- 1 | // 这里处理 wrangler 的配置文件 2 | // 3 | import path from 'path' 4 | import * as execa from 'execa' 5 | import getPort, { portNumbers } from 'get-port' 6 | import type { BuildEntry } from 'unbuild' 7 | import pkg from './package.json' 8 | 9 | async function wranglerConfig({ unbuild: useUnbuild, env: _env }: Options = { unbuild: false, env: {} }) { 10 | const port = await getPort({ port: portNumbers(8787, 8887) }) 11 | 12 | const { default: buildConfig } = await import('./build.config') 13 | const outDir = buildConfig.outDir || 'dist' 14 | 15 | /** 16 | * @example 17 | * entries: ['src/worker'] => nameFull: 'src/worker'; outName: 'worker' 18 | * entries: [{ input: 'src/worker' }] => nameFull: 'src/worker'; outName: 'worker' 19 | * 20 | * main: isDev ? `src/worker.ts` : `worker.mjs` 21 | * 22 | */ 23 | const entrie = (<(string | BuildEntry)[]>buildConfig.entries)[0] 24 | const nameFull = typeof entrie === 'string' ? entrie : entrie.input 25 | const outName = path.basename(nameFull) 26 | 27 | const vars = { 28 | GIT_HASH: execa.execaCommandSync('git rev-parse --short HEAD').stdout, 29 | VERSION: `v${pkg.version}`, 30 | } 31 | 32 | return { 33 | name: '{{ name }}', 34 | main: useUnbuild ? `${outName}.mjs` : `${nameFull}.ts`, 35 | compatibility_date: '{{ date }}', 36 | /** If set to `true`, the worker will not be bundled. so the output file 37 | * must be a single file and no import module. if exists, will throw error. 38 | * 39 | * such as: `import axios from 'axios'`, 40 | */ 41 | // no_bundle: undefined, 42 | vars: { 43 | mode: 'default', 44 | ...vars, 45 | }, 46 | dev: { 47 | ip: 'localhost', 48 | // local_protocol: 'https', 49 | port, 50 | }, 51 | env: { 52 | // For local development, Do not pulish the enviroment to cloudflare. 53 | localhost: { 54 | vars: { 55 | mode: 'localhost', 56 | ...vars, 57 | }, 58 | // routes: [ 59 | // { pattern: `foo.localhost:${port}`, zone_name: `localhost:${port}`, custom_domain: true }, 60 | // ], 61 | }, 62 | production: { 63 | vars: { 64 | mode: 'production', 65 | ...vars, 66 | }, 67 | // routes: env.DOMAIN 68 | // ? [ 69 | // { pattern: `demo.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 70 | // ] 71 | // : undefined, 72 | }, 73 | }, 74 | outDir: useUnbuild ? outDir : undefined, 75 | } 76 | } 77 | 78 | export default wranglerConfig 79 | export { wranglerConfig } 80 | -------------------------------------------------------------------------------- /template/wrangler.toml: -------------------------------------------------------------------------------- 1 | name = "{{ name }}" 2 | main = "src/index.ts" 3 | compatibility_date = "{{ date }}" 4 | 5 | [dev] 6 | port = 8787 7 | ip = "localhost" 8 | # local_protocol = "https" 9 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2021", 4 | "lib": [ 5 | "es2021", 6 | "dom" 7 | ], 8 | "jsx": "react", 9 | "module": "es2022", 10 | "moduleResolution": "node", 11 | "types": [ 12 | "@cloudflare/workers-types", 13 | "node" 14 | ], 15 | "resolveJsonModule": true, 16 | "allowJs": true, 17 | "checkJs": false, 18 | "noEmit": true, 19 | "isolatedModules": true, 20 | "allowSyntheticDefaultImports": true, 21 | "forceConsistentCasingInFileNames": true, 22 | "strict": true, 23 | "skipLibCheck": true 24 | }, 25 | "exclude": [ 26 | "**/node_modules", 27 | "**/test" 28 | ] 29 | } 30 | -------------------------------------------------------------------------------- /type.d.ts: -------------------------------------------------------------------------------- 1 | declare module '*.html' { 2 | const content: string 3 | export default content 4 | } 5 | 6 | declare module '*.css' { 7 | const content: string 8 | export default content 9 | } 10 | -------------------------------------------------------------------------------- /uno.config.ts: -------------------------------------------------------------------------------- 1 | // 用于在 VSCode 中触发 UnoCSS 智能提示 2 | // https://github.com/unocss/unocss/tree/main/packages/vscode 3 | export default {} 4 | -------------------------------------------------------------------------------- /workers/README.md: -------------------------------------------------------------------------------- 1 | # Workers 列表 2 | 3 | 4 | | Name | Description | 5 | | :---- | :--- | 6 | | [github](./github/README.md) | 代理转发 GitHub | 7 | | [ip](./ip/README.md) | 获取 IP 地址 | 8 | | [proxy](./proxy/README.md) | 常规代理转发 | 9 | | [openai](./openai/README.md) | ChatGPT | 10 | | [short-domain](./short-domain/README.md) | 短网址服务 | 11 | -------------------------------------------------------------------------------- /workers/github/README.md: -------------------------------------------------------------------------------- 1 | 2 | # Worker: github 3 | 4 | 通过 Cloudflare Workers 代理 GitHub 的请求来实现 Github 无法访问的问题,支持文件代理加速下载 5 | 6 | > **Note** 7 | > 无法保证 `hub.llll.host` 持续可用,建议自行部署 8 | 9 | ## 使用方法 10 | 11 | ```diff 12 | # 克隆仓库 13 | - git clone https://github.com/aliuq/proxy-github.git 14 | + git clone https://hub.llll.host/aliuq/proxy-github.git 15 | 16 | # 代理 raw.githubusercontent.com 17 | - https://raw.githubusercontent.com/aliuq/proxy-github/master/README.md 18 | + https://raw.llll.host/aliuq/proxy-github/master/README.md 19 | 20 | # 代理 github.githubassets.com 21 | - https://github.githubassets.com/images/modules/site/social-cards/package-registry.png 22 | + https://assets.llll.host/images/modules/site/social-cards/package-registry.png 23 | 24 | # 文件代理加速 25 | # https://dl.llll.host/ 26 | - https://raw.githubusercontent.com/aliuq/proxy-github/master/README.md 27 | + https://dl.llll.host/https://raw.githubusercontent.com/aliuq/proxy-github/master/README.md 28 | ``` 29 | 30 | 详细代理列表如下: 31 | 32 | | Proxy | Hostname | 33 | |:---------|:---------| 34 | | hub.llll.host | github.com | 35 | | assets.llll.host | github.githubassets.com | 36 | | raw.llll.host | raw.githubusercontent.com | 37 | | download.llll.host | codeload.github.com | 38 | | object.llll.host | objects.githubusercontent.com | 39 | | media.llll.host | media.githubusercontent.com | 40 | | avatars.llll.host | avatars.githubusercontent.com | 41 | | gist.llll.host | gist.github.com | 42 | 43 | cloudflare 免费版每天有 $\textcolor{yellow}{\text{10万次}}$ 免费请求,每分钟1000次请求的限制,如果不够用,可升级到\$5的高级版本,每月可用1000万次请求(超出部分$0.5/百万次请求) 44 | 45 | 如果发现 [hub.llll.host](https://hub.llll.host) 不能访问或访问过慢,请进行私有化部署,如果只是几个人使用,完全足够 46 | 47 | 请尽量减少对仓库页面的访问,以减少 `assets.llll.host` 的负担,一个页面随随便便就有50+请求,尽量减少不必要的次数浪费,建议通过以下方式对单个文件进行访问或者clone 48 | 49 | 请尽量避免使用代理进入 GitHub 的时候进行登录 50 | 51 | ## Github 私有仓库 52 | 53 | > **Note** 54 | > cf-proxy 不会保存任何数据,仅作代理转发,但由于使用次数限制,建议进行私有化部署 55 | 56 | 私有仓库操作与公开仓库使用方式一致,仅多了一步登录授权,需输入用户名和 Github Token 57 | 58 | [创建 personal access token](https://github.com/settings/tokens/new) 59 | 60 | ```bash 61 | # clone a private repo 62 | git clone https://github.com//.git 63 | # 替换如下 64 | git clone https://hub.llll.host//.git 65 | 66 | # 按照提示,输入用户名和刚刚创建的 token 67 | ``` 68 | 69 | 使用 git 缓存凭据,避免每次都需要输入用户名和 token 70 | 71 | 缓存在内存中 72 | 73 | ```bash 74 | # 缓存 15 分钟 75 | git config --global credential.helper cache 76 | # 缓存 1 小时 77 | git config --global credential.helper 'cache --timeout=3600' 78 | 79 | # 接着执行 clone 操作,输入用户名和 token,在缓存时限内不需要再次输入 80 | ``` 81 | 82 | 缓存在磁盘中 83 | 84 | ```bash 85 | git config --global credential.helper store 86 | 87 | # 接着执行 clone 操作,输入用户名和 token,此后不需要再次输入 88 | 89 | # 查看凭据位置 90 | cat ~/.git-credentials 91 | ``` 92 | 93 | ## 开发 94 | 95 | wrangler 要求 node 版本 >= v16.7.0 96 | 97 | ```bash 98 | # 安装 cloudflare worker 开发工具 wrangler 99 | npm install wrangler -g 100 | # cloudflare 登录授权 101 | wrangler login 102 | # 查看登录信息 103 | wrangler whoami 104 | 105 | # clone 106 | git clone https://github.com/aliuq/cf-proxy.git 107 | # 进入项目目录 108 | cd cf-proxy 109 | # 安装依赖,依赖仅做类型提示 110 | npm install 111 | # 开发 等同于 `wrangler dev -l --ip localhost` 112 | npm run dev 113 | # 然后按下 `b` 打开浏览器,访问 https://localhost:8787 114 | 115 | # 发布 等同于 `wrangler publish` 116 | npm run deploy 117 | ``` 118 | 119 | ## 部署 120 | 121 | > **Warning** 122 | > 由于只有一个 Cloudflare 账号,所以没有使用多余账号进行测试 123 | 124 | [![Deploy to Cloudflare Workers](https://deploy.workers.cloudflare.com/button)](https://deploy.workers.cloudflare.com/?url=https://github.com/aliuq/proxy-github) 125 | 126 | 按照提示进行下一步操作 127 | 128 | 手动部署 129 | 130 | ```bash 131 | git clone https://github.com/aliuq/proxy-github.git 132 | cd proxy-github 133 | npm install 134 | npm install wrangler -g 135 | ``` 136 | 137 | 使用 [wrangler](https://developers.cloudflare.com/workers/wrangler/) 进行开发和部署,先使用 `wrangler login` 进行 cf 登录授权,然后修改 `wrangler.toml` 下面的 `DOMAIN` 字段,指定用于代理的域名,然后运行 `npm run deploy` 进行发布 138 | 139 | 控制台操作 140 | 141 | 首先得有一个域名,并且在 cf 上添加为站点,在 `Workers` - `proxy-github` 下,选择`触发器`,将代理域名添加到`路由`和`自定义域`下,如果需要[增加代理](https://github.com/aliuq/proxy-github/blob/master/src/index.ts#L40),同样需要添加到`路由`和`自定义域`下,`DOMAIN` 字段也可通过控制台 `Workers` - `proxy-github` - `设置` - `变量` - `环境变量` 进行设置, 142 | 143 | ![s1](https://img2.bilishare.com/img/2022/08/01/223559c7ae0.png/normal) 144 | 145 | + [手把手实现 Github 代理加速](https://www.bilishare.com/tech/2022/08/23/cf-proxy-github.html) 146 | 147 | ## Q&A 148 | 149 |
150 | 域名被封,被注册局锁定 151 | 152 | 以 `.host` 域名为例 153 | 154 | 可能会因为未知的原因被 Google/Netcraft 认定是网络欺诈和诈骗网站,从而导致域名被封 155 | 156 | ### Google 157 | 158 | Google 的解决办法是点击报告检测问题,进行反馈,反馈内容可以和下面的模板类似,稍微修改一下,然后等待解封 159 | 160 | ![Google](./screenshot/1.jpg) 161 | 162 | ### Netcraft 163 | 164 | Netcraft 的解决办法是提交申诉,然后等待解封 165 | 166 | 找到对应域名的域名注册局,host 域名是在 [Radix](https://radix.website/report-abuse#unsuspensionsteps) 上面进行申请解封,输入域名,可以看到域名被锁定的原因,申请解封需要填入邮件、域名以及内容。 167 | 168 | 我分两天申请了两次,是否解封应该和输入的内容有关,正常一封邮件足够了,申请后大概一小时左右收到了官方的回复,内容是指出了域名被锁定的原因 169 | 170 | ```txt 171 | The domain name llll.host has been suspended due to its blacklisting by Netcraft for phishing activities. xxxxxx 172 | ``` 173 | 174 | 然后回复了下面这封邮件,再次进行解释,三天后收到了解封的邮件 175 | 176 | ### 邮件模板 177 | 178 | > **Note** 179 | > 模板仅供参考,不保证一定能解封,修改 `example.com` 为你的域名,修改 `` 为你的仓库地址 180 | 181 | ```txt 182 | Hi, 183 | 184 | Thank you for your reply, this address is one of the proxy GitHub requests 185 | 186 | gist.example.com -> gist.github.com 187 | https://gist.example.com/starred/ 188 | 189 | According to the proxy forwarding logic, the content of this address should be the same as the one shown below 190 | 191 | https://gist.github.com/starred/ 192 | 193 | Because it doesn't have phishing content, it just does a layer of forwarding and doesn't store any data, it's deployed on top of cloudflare workers and the source code is stored on GitHub 194 | 195 | https://github.com/ 196 | 197 | The purpose of this URL is to allow users who do not have access to GitHub to be able to access the resources on GitHub normally, such as in China, I hope to review it again and look forward to your reply! 198 | 199 | Thanks 200 | ``` 201 | 202 |
203 | 204 | ## 其他代理项目 205 | 206 | + [FastGit](https://doc.fastgit.org/zh-cn/) 207 | + [gh-proxy](https://github.com/hunshcn/gh-proxy) 208 | -------------------------------------------------------------------------------- /workers/github/build.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from 'unbuild' 2 | 3 | export default defineBuildConfig({ 4 | entries: [ 5 | 'src/index', 6 | ], 7 | clean: true, 8 | declaration: true, 9 | externals: [], 10 | }) 11 | -------------------------------------------------------------------------------- /workers/github/dist/index.d.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Proxy list, e.g. `my.domain` 3 | * 4 | * | Proxy | Hostname | 5 | * |:---------|:---------| 6 | * | hub.my.domain | github.com | 7 | * | raw.my.domain | raw.githubusercontent.com | 8 | * | assets.my.domain | github.githubassets.com | 9 | * | download.my.domain | codeload.github.com | 10 | * | object.my.domain | objects.githubusercontent.com | 11 | * | media.my.domain | media.githubusercontent.com | 12 | * | gist.my.domain | gist.github.com | 13 | */ 14 | declare const _default: { 15 | fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise; 16 | }; 17 | 18 | export { _default as default }; 19 | -------------------------------------------------------------------------------- /workers/github/dist/index.mjs: -------------------------------------------------------------------------------- 1 | let domainMaps = {}; 2 | let reverseDomainMaps = {}; 3 | const index = { 4 | async fetch(request, env, _ctx) { 5 | const needCancel = await needCancelRequest(request); 6 | if (needCancel) 7 | return new Response("", { status: 204 }); 8 | const url = new URL(request.url); 9 | const { domain, subdomain } = getDomainAndSubdomain(request); 10 | if (url.pathname === "/robots.txt") 11 | return new Response("User-agent: *\nDisallow: /", { status: 200 }); 12 | domainMaps = { 13 | [`hub.${domain}`]: "github.com", 14 | [`assets.${domain}`]: "github.githubassets.com", 15 | [`raw.${domain}`]: "raw.githubusercontent.com", 16 | [`download.${domain}`]: "codeload.github.com", 17 | [`object.${domain}`]: "objects.githubusercontent.com", 18 | [`media.${domain}`]: "media.githubusercontent.com", 19 | [`avatars.${domain}`]: "avatars.githubusercontent.com", 20 | [`gist.${domain}`]: "gist.github.com" 21 | }; 22 | reverseDomainMaps = Object.fromEntries(Object.entries(domainMaps).map((arr) => arr.reverse())); 23 | if (url.host in domainMaps) { 24 | url.host = domainMaps[url.host]; 25 | if (url.port !== "80" && url.port !== "443") 26 | url.port = url.protocol === "https:" ? "443" : "80"; 27 | const newRequest = getNewRequest(url, request); 28 | return proxy(url, newRequest); 29 | } 30 | return new Response(`Unsupported domain ${subdomain ? `${subdomain}.` : ""}${domain}`, { 31 | status: 200, 32 | headers: { "content-type": "text/plain;charset=utf-8", "git-hash": env.GIT_HASH } 33 | }); 34 | } 35 | }; 36 | function getDomainAndSubdomain(request) { 37 | const url = new URL(request.url); 38 | const hostArr = url.host.split("."); 39 | let subdomain = ""; 40 | let domain = ""; 41 | if (hostArr.length > 2) { 42 | subdomain = hostArr[0]; 43 | domain = hostArr.slice(1).join("."); 44 | } else if (hostArr.length === 2) { 45 | subdomain = hostArr[1].match(/^localhost(:\d+)?$/) ? hostArr[0] : ""; 46 | domain = hostArr[1].match(/^localhost(:\d+)?$/) ? hostArr[1] : hostArr.join("."); 47 | } else { 48 | domain = hostArr.join("."); 49 | } 50 | return { domain, subdomain }; 51 | } 52 | async function needCancelRequest(request, matches = []) { 53 | const url = new URL(request.url); 54 | matches = matches.length ? matches : [ 55 | "/favicon.", 56 | "/sw.js" 57 | ]; 58 | return matches.some((match) => url.pathname.includes(match)); 59 | } 60 | function getNewRequest(url, request) { 61 | const headers = new Headers(request.headers); 62 | headers.set("reason", "mirror of China"); 63 | const newRequestInit = { redirect: "manual", headers }; 64 | return new Request(url.toString(), new Request(request, newRequestInit)); 65 | } 66 | async function proxy(url, request, env) { 67 | try { 68 | const res = await fetch(url.toString(), request); 69 | const headers = res.headers; 70 | const newHeaders = new Headers(headers); 71 | const status = res.status; 72 | if (newHeaders.has("location")) { 73 | const loc = newHeaders.get("location"); 74 | if (loc) { 75 | try { 76 | const locUrl = new URL(loc); 77 | if (locUrl.host in reverseDomainMaps) { 78 | locUrl.host = reverseDomainMaps[locUrl.host]; 79 | newHeaders.set("location", locUrl.toString()); 80 | } 81 | } catch (e) { 82 | console.error(e); 83 | } 84 | } 85 | } 86 | newHeaders.set("access-control-expose-headers", "*"); 87 | newHeaders.set("access-control-allow-origin", "*"); 88 | newHeaders.delete("content-security-policy"); 89 | newHeaders.delete("content-security-policy-report-only"); 90 | newHeaders.delete("clear-site-data"); 91 | if (res.headers.get("content-type")?.indexOf("text/html") !== -1) { 92 | const body = await res.text(); 93 | const regAll = new RegExp(Object.keys(reverseDomainMaps).map((r) => `(https?://${r})`).join("|"), "g"); 94 | const newBody = body.replace(regAll, (match) => { 95 | return match.replace(/^(https?:\/\/)(.*?)$/g, (m, p1, p2) => { 96 | return reverseDomainMaps[p2] ? `${p1}${reverseDomainMaps[p2]}` : m; 97 | }); 98 | }).replace(/integrity=\".*?\"/g, ""); 99 | return new Response(newBody, { status, headers: newHeaders }); 100 | } 101 | return new Response(res.body, { status, headers: newHeaders }); 102 | } catch (e) { 103 | return new Response(e.message, { status: 500 }); 104 | } 105 | } 106 | 107 | export { index as default }; 108 | -------------------------------------------------------------------------------- /workers/github/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cf-workers-github", 3 | "type": "module", 4 | "version": "0.0.1", 5 | "description": "github is a cloudflare worker", 6 | "author": "AliuQ ", 7 | "license": "MIT", 8 | "homepage": "https://github.com/aliuq/cf-proxy/tree/master/workers/github#readme", 9 | "bugs": { 10 | "url": "https://github.com/aliuq/cf-proxy/issues" 11 | }, 12 | "keywords": [ 13 | "cloudflare", 14 | "worker", 15 | "github" 16 | ], 17 | "main": "dist/index.mjs", 18 | "scripts": { 19 | "dev": "wrangler dev -l" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /workers/github/screenshot/1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aliuq/cf-proxy/26abc6b32e47ad18e0d273ea530c83124e12d533/workers/github/screenshot/1.jpg -------------------------------------------------------------------------------- /workers/github/src/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Proxy list, e.g. `my.domain` 3 | * 4 | * | Proxy | Hostname | 5 | * |:---------|:---------| 6 | * | hub.my.domain | github.com | 7 | * | raw.my.domain | raw.githubusercontent.com | 8 | * | assets.my.domain | github.githubassets.com | 9 | * | download.my.domain | codeload.github.com | 10 | * | object.my.domain | objects.githubusercontent.com | 11 | * | media.my.domain | media.githubusercontent.com | 12 | * | gist.my.domain | gist.github.com | 13 | */ 14 | 15 | let domainMaps: Record = {} 16 | let reverseDomainMaps: Record = {} 17 | export default { 18 | async fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise { 19 | const needCancel = await needCancelRequest(request) 20 | if (needCancel) 21 | return new Response('', { status: 204 }) 22 | 23 | const url = new URL(request.url) 24 | const { domain, subdomain } = getDomainAndSubdomain(request) 25 | 26 | if (url.pathname === '/robots.txt') 27 | return new Response('User-agent: *\nDisallow: /', { status: 200 }) 28 | 29 | // ============ 逻辑处理 ============ 30 | // 31 | // 32 | domainMaps = { 33 | [`hub.${domain}`]: 'github.com', 34 | [`assets.${domain}`]: 'github.githubassets.com', 35 | [`raw.${domain}`]: 'raw.githubusercontent.com', 36 | [`download.${domain}`]: 'codeload.github.com', 37 | [`object.${domain}`]: 'objects.githubusercontent.com', 38 | [`media.${domain}`]: 'media.githubusercontent.com', 39 | [`avatars.${domain}`]: 'avatars.githubusercontent.com', 40 | [`gist.${domain}`]: 'gist.github.com', 41 | } 42 | reverseDomainMaps = Object.fromEntries(Object.entries(domainMaps).map(arr => arr.reverse())) 43 | 44 | if (url.host in domainMaps) { 45 | url.host = domainMaps[url.host] 46 | if (url.port !== '80' && url.port !== '443') 47 | url.port = url.protocol === 'https:' ? '443' : '80' 48 | 49 | const newRequest = getNewRequest(url, request) 50 | return proxy(url, newRequest, env) 51 | } 52 | 53 | return new Response(`Unsupported domain ${subdomain ? `${subdomain}.` : ''}${domain}`, { 54 | status: 200, 55 | headers: { 'content-type': 'text/plain;charset=utf-8', 'git-hash': env.GIT_HASH }, 56 | }) 57 | }, 58 | } 59 | 60 | /** Get domain and subdomain from request url 61 | */ 62 | function getDomainAndSubdomain(request: Request): { domain: string; subdomain: string } { 63 | const url = new URL(request.url) 64 | const hostArr = url.host.split('.') 65 | let subdomain = '' 66 | let domain = '' 67 | if (hostArr.length > 2) { 68 | subdomain = hostArr[0] 69 | domain = hostArr.slice(1).join('.') 70 | } 71 | else if (hostArr.length === 2) { 72 | subdomain = hostArr[1].match(/^localhost(:\d+)?$/) ? hostArr[0] : '' 73 | domain = hostArr[1].match(/^localhost(:\d+)?$/) ? hostArr[1] : hostArr.join('.') 74 | } 75 | else { 76 | domain = hostArr.join('.') 77 | } 78 | return { domain, subdomain } 79 | } 80 | 81 | /** 需要终止请求 82 | * @param request 83 | * @returns true: 需要终止请求 84 | */ 85 | async function needCancelRequest(request: Request, matches: string[] = []): Promise { 86 | const url = new URL(request.url) 87 | matches = matches.length 88 | ? matches 89 | : [ 90 | '/favicon.', 91 | '/sw.js', 92 | '/login', 93 | '/join', 94 | '/session', 95 | '/auth', 96 | ] 97 | return matches.some(match => url.pathname.includes(match)) 98 | } 99 | 100 | /** 生成新的 request 101 | */ 102 | function getNewRequest(url: URL, request: Request) { 103 | const headers = new Headers(request.headers) 104 | headers.set('reason', 'mirror of China') 105 | const newRequestInit: RequestInit = { redirect: 'manual', headers } 106 | return new Request(url.toString(), new Request(request, newRequestInit)) 107 | } 108 | 109 | /** 代理转发处理 110 | */ 111 | async function proxy(url: URL, request: Request, _env: ENV) { 112 | try { 113 | const res = await fetch(url.toString(), request) 114 | const headers = res.headers 115 | const newHeaders = new Headers(headers) 116 | const status = res.status 117 | 118 | if (newHeaders.has('location')) { 119 | const loc = newHeaders.get('location') 120 | if (loc) { 121 | try { 122 | const locUrl = new URL(loc) 123 | if (locUrl.host in reverseDomainMaps) { 124 | locUrl.host = reverseDomainMaps[locUrl.host] 125 | newHeaders.set('location', locUrl.toString()) 126 | } 127 | } 128 | catch (e) { 129 | console.error(e) 130 | } 131 | } 132 | } 133 | 134 | newHeaders.set('access-control-expose-headers', '*') 135 | newHeaders.set('access-control-allow-origin', '*') 136 | 137 | newHeaders.delete('content-security-policy') 138 | newHeaders.delete('content-security-policy-report-only') 139 | newHeaders.delete('clear-site-data') 140 | 141 | if (res.headers.get('content-type')?.indexOf('text/html') !== -1) { 142 | const body = await res.text() 143 | const regAll = new RegExp(Object.keys(reverseDomainMaps).map((r: string) => `(https?://${r})`).join('|'), 'g') 144 | const newBody = body 145 | // Replace all hostname to proxy domain 146 | .replace(regAll, (match) => { 147 | return match.replace(/^(https?:\/\/)(.*?)$/g, (m, p1, p2) => { 148 | return reverseDomainMaps[p2] ? `${p1}${reverseDomainMaps[p2]}` : m 149 | }) 150 | }) 151 | // Avoid integrity error 152 | .replace(/integrity=\".*?\"/g, '') 153 | 154 | return new Response(newBody, { status, headers: newHeaders }) 155 | } 156 | return new Response(res.body, { status, headers: newHeaders }) 157 | } 158 | catch (e: any) { 159 | return new Response(e.message, { status: 500 }) 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /workers/github/types.d.ts: -------------------------------------------------------------------------------- 1 | interface ENV { 2 | GIT_HASH: string 3 | } 4 | 5 | interface INTERNAL_ENV { 6 | DOMAIN?: string 7 | } 8 | 9 | interface Options { 10 | unbuild?: boolean 11 | env: INTERNAL_ENV 12 | } 13 | -------------------------------------------------------------------------------- /workers/github/wrangler.config.ts: -------------------------------------------------------------------------------- 1 | // 这里处理 wrangler 的配置文件 2 | // 3 | import path from 'path' 4 | import * as execa from 'execa' 5 | import getPort, { portNumbers } from 'get-port' 6 | import type { BuildEntry } from 'unbuild' 7 | 8 | async function wranglerConfig({ unbuild: useUnbuild, env }: Options = { unbuild: false, env: {} }) { 9 | const port = await getPort({ port: portNumbers(8787, 8887) }) 10 | 11 | const { default: buildConfig } = await import('./build.config') 12 | const outDir = buildConfig.outDir || 'dist' 13 | 14 | const entrie = (<(string | BuildEntry)[]>buildConfig.entries)[0] 15 | const nameFull = typeof entrie === 'string' ? entrie : entrie.input 16 | const outName = path.basename(nameFull) 17 | 18 | const gitHash = execa.execaCommandSync('git rev-parse --short HEAD').stdout 19 | 20 | return { 21 | name: 'github', 22 | main: useUnbuild ? `${outName}.mjs` : `${nameFull}.ts`, 23 | compatibility_date: new Date().toISOString().split('T')[0], 24 | no_bundle: useUnbuild ? true : undefined, 25 | vars: { 26 | GIT_HASH: gitHash, 27 | }, 28 | dev: { 29 | ip: 'localhost', 30 | local_protocol: 'https', 31 | port, 32 | }, 33 | env: { 34 | production: { 35 | vars: { 36 | mode: 'production', 37 | GIT_HASH: gitHash, 38 | }, 39 | routes: env.DOMAIN 40 | ? [ 41 | { pattern: `hub.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 42 | { pattern: `assets.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 43 | { pattern: `raw.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 44 | { pattern: `download.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 45 | { pattern: `object.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 46 | { pattern: `media.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 47 | { pattern: `avatars.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 48 | { pattern: `gist.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 49 | ] 50 | : undefined, 51 | }, 52 | }, 53 | outDir: useUnbuild ? outDir : undefined, 54 | } 55 | } 56 | 57 | export default wranglerConfig 58 | export { wranglerConfig } 59 | -------------------------------------------------------------------------------- /workers/ip/README.md: -------------------------------------------------------------------------------- 1 | # Worker: ip 2 | 3 | 获取公网 IP 地址 4 | 5 | 示例地址:[https://ip.llll.host](https://ip.llll.host) 6 | 7 | ## 环境变量 8 | 9 | + `__DOMAIN__`: 主域名,用于配置路由、触发器等 10 | 11 | ## 开发 12 | 13 | ```bash 14 | # Development 15 | pnpm run exec ip dev 16 | # Publish 17 | pnpm run exec ip publish 18 | ``` 19 | -------------------------------------------------------------------------------- /workers/ip/build.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from 'unbuild' 2 | 3 | export default defineBuildConfig({ 4 | entries: [ 5 | 'src/index', 6 | ], 7 | clean: true, 8 | declaration: true, 9 | externals: [], 10 | }) 11 | -------------------------------------------------------------------------------- /workers/ip/dist/index.d.ts: -------------------------------------------------------------------------------- 1 | declare const _default: { 2 | fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise; 3 | }; 4 | 5 | export { _default as default }; 6 | -------------------------------------------------------------------------------- /workers/ip/dist/index.mjs: -------------------------------------------------------------------------------- 1 | async function replyText(text, env, init = {}) { 2 | return new Response(text, deepMerge({ 3 | status: 200, 4 | headers: { "content-type": "text/plain;charset=UTF-8", "version": env.VERSION } 5 | }, init)); 6 | } 7 | async function replyUnsupport(options, env, init = {}) { 8 | return new Response(renderTemplate("Unsupported url {{ url }}", options), deepMerge({ 9 | status: 200, 10 | headers: { "content-type": "text/plain;charset=UTF-8", "version": env.VERSION } 11 | }, init)); 12 | } 13 | function getDomainAndSubdomain(request) { 14 | const url = new URL(request.url); 15 | const hostArr = url.host.split("."); 16 | let subdomain = ""; 17 | let domain = ""; 18 | if (url.hostname.endsWith("localhost")) { 19 | subdomain = hostArr.length === 1 ? "" : hostArr[0]; 20 | domain = hostArr.length === 1 ? hostArr[0] : hostArr.slice(1).join("."); 21 | } else { 22 | subdomain = hostArr.length > 2 ? hostArr[0] : ""; 23 | domain = hostArr.length > 2 ? hostArr.slice(1).join(".") : hostArr.join("."); 24 | } 25 | return { domain, subdomain }; 26 | } 27 | async function needCancelRequest(request, matches = []) { 28 | const url = new URL(request.url); 29 | matches = matches.length ? matches : [ 30 | "/favicon.", 31 | "/sw.js" 32 | ]; 33 | const isCancel = matches.some((match) => url.pathname.includes(match)); 34 | if (isCancel) 35 | return replyText("", {}, { status: 204 }); 36 | } 37 | function renderTemplate(content, data) { 38 | return content.replace(/\{{\s*([a-zA-Z0-9_]+)\s*}}/g, (match, key) => { 39 | return data[key] || ""; 40 | }); 41 | } 42 | function deepMerge(target, ...sources) { 43 | for (const source of sources) { 44 | if (isObject(target) && isObject(source)) { 45 | for (const key in source) { 46 | if (isObject(source[key])) { 47 | if (!target[key]) { 48 | target[key] = {}; 49 | } 50 | deepMerge(target[key], source[key]); 51 | } else { 52 | target[key] = source[key]; 53 | } 54 | } 55 | } 56 | } 57 | return target; 58 | } 59 | function isObject(item) { 60 | return item && typeof item === "object" && !Array.isArray(item); 61 | } 62 | 63 | const index = { 64 | async fetch(request, env, _ctx) { 65 | const needCancel = await needCancelRequest(request); 66 | if (needCancel) 67 | return needCancel; 68 | const url = new URL(request.url); 69 | if (url.pathname === "/robots.txt") 70 | return replyText("User-agent: *\nDisallow: /", env); 71 | const { subdomain } = getDomainAndSubdomain(request); 72 | if (subdomain === "ip" && url.pathname === "/") 73 | return replyText(request.headers.get("cf-connecting-ip"), env); 74 | return replyUnsupport(request, env); 75 | } 76 | }; 77 | 78 | export { index as default }; 79 | -------------------------------------------------------------------------------- /workers/ip/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cf-workers-ip", 3 | "type": "module", 4 | "version": "0.0.2", 5 | "description": "ip is a cloudflare worker", 6 | "author": "AliuQ ", 7 | "license": "MIT", 8 | "homepage": "https://github.com/aliuq/cf-proxy/tree/master/workers/ip#readme", 9 | "bugs": { 10 | "url": "https://github.com/aliuq/cf-proxy/issues" 11 | }, 12 | "keywords": [ 13 | "cloudflare", 14 | "worker", 15 | "ip" 16 | ], 17 | "main": "dist/index.mjs", 18 | "scripts": { 19 | "dev": "wrangler dev -l" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /workers/ip/src/index.ts: -------------------------------------------------------------------------------- 1 | import { getDomainAndSubdomain, needCancelRequest, replyText, replyUnsupport } from '../../utils' 2 | 3 | export default { 4 | async fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise { 5 | const needCancel = await needCancelRequest(request) 6 | if (needCancel) 7 | return needCancel 8 | 9 | const url = new URL(request.url) 10 | 11 | if (url.pathname === '/robots.txt') 12 | return replyText('User-agent: *\nDisallow: /', env) 13 | 14 | const { subdomain } = getDomainAndSubdomain(request) 15 | if (subdomain === 'ip' && url.pathname === '/') 16 | return replyText(request.headers.get('cf-connecting-ip') as string, env) 17 | 18 | return replyUnsupport(request, env) 19 | }, 20 | } 21 | -------------------------------------------------------------------------------- /workers/ip/types.d.ts: -------------------------------------------------------------------------------- 1 | interface ENV { 2 | GIT_HASH: string 3 | } 4 | 5 | interface INTERNAL_ENV { 6 | DOMAIN?: string 7 | } 8 | 9 | interface Options { 10 | unbuild?: boolean 11 | env: INTERNAL_ENV 12 | } 13 | -------------------------------------------------------------------------------- /workers/ip/wrangler.config.ts: -------------------------------------------------------------------------------- 1 | // 这里处理 wrangler 的配置文件 2 | // 3 | import path from 'path' 4 | import * as execa from 'execa' 5 | import getPort, { portNumbers } from 'get-port' 6 | import type { BuildEntry } from 'unbuild' 7 | 8 | async function wranglerConfig({ unbuild: useUnbuild, env }: Options = { unbuild: false, env: {} }) { 9 | const port = await getPort({ port: portNumbers(8787, 8887) }) 10 | 11 | const { default: buildConfig } = await import('./build.config') 12 | const outDir = buildConfig.outDir || 'dist' 13 | 14 | const entrie = (<(string | BuildEntry)[]>buildConfig.entries)[0] 15 | const nameFull = typeof entrie === 'string' ? entrie : entrie.input 16 | const outName = path.basename(nameFull) 17 | 18 | const gitHash = execa.execaCommandSync('git rev-parse --short HEAD').stdout 19 | 20 | return { 21 | name: 'ip', 22 | main: useUnbuild ? `${outName}.mjs` : `${nameFull}.ts`, 23 | compatibility_date: new Date().toISOString().split('T')[0], 24 | no_bundle: useUnbuild ? true : undefined, 25 | vars: { 26 | GIT_HASH: gitHash, 27 | }, 28 | dev: { 29 | ip: 'localhost', 30 | port, 31 | }, 32 | env: { 33 | // For local development, Do not pulish the enviroment to cloudflare. 34 | localhost: { 35 | vars: { 36 | mode: 'localhost', 37 | GIT_HASH: gitHash, 38 | }, 39 | routes: [ 40 | { pattern: `ip.localhost:${port}`, zone_name: `localhost:${port}`, custom_domain: true }, 41 | ], 42 | }, 43 | production: { 44 | vars: { 45 | mode: 'production', 46 | GIT_HASH: gitHash, 47 | }, 48 | routes: env.DOMAIN 49 | ? [ 50 | { pattern: `ip.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 51 | ] 52 | : undefined, 53 | }, 54 | }, 55 | outDir: useUnbuild ? outDir : undefined, 56 | } 57 | } 58 | 59 | export default wranglerConfig 60 | export { wranglerConfig } 61 | -------------------------------------------------------------------------------- /workers/openai/README.md: -------------------------------------------------------------------------------- 1 | # Worker: openai 2 | 3 | 通过 API Key 实现 OpenAI 的 ChatGPT 机器人聊天 4 | 5 | 示例地址:[https://chat.llll.host](https://chat.llll.host) 6 | 7 | ## 环境变量 8 | 9 | + `__DOMAIN__`: 主域名,用于配置路由、触发器等 10 | + `OPENAI_API_KEY`: OpenAI API Key 11 | 12 | 默认参数配置: 13 | 14 | ```ts 15 | const params: CreateCompletionRequest = { 16 | model: 'text-davinci-003', 17 | prompt: Prompt, 18 | temperature: 0.9, 19 | max_tokens: 150, 20 | top_p: 1, 21 | frequency_penalty: 0.0, 22 | presence_penalty: 0.6, 23 | stop: [' Human:', ' AI:'], 24 | } 25 | ``` 26 | -------------------------------------------------------------------------------- /workers/openai/build.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from 'unbuild' 2 | 3 | export default defineBuildConfig({ 4 | entries: [ 5 | 'src/index', 6 | ], 7 | clean: true, 8 | // If met some error, you can set failOnWarn to false to ignore it. 9 | // failOnWarn: false, 10 | // Incompatible with `rollup.esbuild.minify`, an error will occur 11 | declaration: true, 12 | externals: [], 13 | rollup: { 14 | esbuild: { 15 | // See `declaration` 16 | // minify: true, 17 | }, 18 | }, 19 | }) 20 | -------------------------------------------------------------------------------- /workers/openai/dist/index.d.ts: -------------------------------------------------------------------------------- 1 | declare const _default: { 2 | fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise; 3 | }; 4 | 5 | export { _default as default }; 6 | -------------------------------------------------------------------------------- /workers/openai/dist/index.mjs: -------------------------------------------------------------------------------- 1 | import { OpenAIApi, Configuration } from 'openai'; 2 | import fetchAdapter from '@vespaiach/axios-fetch-adapter'; 3 | 4 | async function replyText(text, env, init = {}) { 5 | return new Response(text, deepMerge({ 6 | status: 200, 7 | headers: { "content-type": "text/plain;charset=UTF-8", "version": env.VERSION } 8 | }, init)); 9 | } 10 | async function replyHtml(html, env, init = {}) { 11 | return new Response(html, deepMerge({ 12 | status: 200, 13 | headers: { "content-type": "text/html;charset=UTF-8", "version": env.VERSION } 14 | }, init)); 15 | } 16 | async function replyUnsupport(options, env, init = {}) { 17 | return new Response(renderTemplate("Unsupported url {{ url }}", options), deepMerge({ 18 | status: 200, 19 | headers: { "content-type": "text/plain;charset=UTF-8", "version": env.VERSION } 20 | }, init)); 21 | } 22 | async function replyJson(json, env, init = {}) { 23 | return new Response(JSON.stringify(json), deepMerge({ 24 | status: 200, 25 | headers: { "content-type": "application/json;charset=UTF-8", "version": env.VERSION } 26 | }, init)); 27 | } 28 | function getDomainAndSubdomain(request) { 29 | const url = new URL(request.url); 30 | const hostArr = url.host.split("."); 31 | let subdomain = ""; 32 | let domain = ""; 33 | if (url.hostname.endsWith("localhost")) { 34 | subdomain = hostArr.length === 1 ? "" : hostArr[0]; 35 | domain = hostArr.length === 1 ? hostArr[0] : hostArr.slice(1).join("."); 36 | } else { 37 | subdomain = hostArr.length > 2 ? hostArr[0] : ""; 38 | domain = hostArr.length > 2 ? hostArr.slice(1).join(".") : hostArr.join("."); 39 | } 40 | return { domain, subdomain }; 41 | } 42 | async function needCancelRequest(request, matches = []) { 43 | const url = new URL(request.url); 44 | matches = matches.length ? matches : [ 45 | "/favicon.", 46 | "/sw.js" 47 | ]; 48 | const isCancel = matches.some((match) => url.pathname.includes(match)); 49 | if (isCancel) 50 | return replyText("", {}, { status: 204 }); 51 | } 52 | function renderTemplate(content, data) { 53 | return content.replace(/\{{\s*([a-zA-Z0-9_]+)\s*}}/g, (match, key) => { 54 | return data[key] || ""; 55 | }); 56 | } 57 | function deepMerge(target, ...sources) { 58 | for (const source of sources) { 59 | if (isObject(target) && isObject(source)) { 60 | for (const key in source) { 61 | if (isObject(source[key])) { 62 | if (!target[key]) { 63 | target[key] = {}; 64 | } 65 | deepMerge(target[key], source[key]); 66 | } else { 67 | target[key] = source[key]; 68 | } 69 | } 70 | } 71 | } 72 | return target; 73 | } 74 | function isObject(item) { 75 | return item && typeof item === "object" && !Array.isArray(item); 76 | } 77 | 78 | const ChatHtml = "\n\n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
\n \n
\n
\n \n \n
\n
\n
\n \n
\n
\n
\n \n \n \n \n
\n
\n \n \n
\n

\n Powerd by Cloudflare Workers. ChatGPT 是一个基于OpenAI API的聊天机器人, 如果条件允许, 请前往OpenAI官网体验更好的服务.\n

\n

声明: UI 仿照官方, 数据通过 API key 进行请求, 如果遇到 Bug 等问题, 请访问 cf-proxy 查看源码

\n
\n
\n \n \n \n\n\n\n"; 79 | 80 | const config = { 81 | conversation: { 82 | // https://beta.openai.com/examples/default-qa 83 | qa: { 84 | model: "text-davinci-003", 85 | prompt: "", 86 | temperature: 0, 87 | max_tokens: 100, 88 | top_p: 1, 89 | frequency_penalty: 0, 90 | presence_penalty: 0, 91 | stop: ["\n"] 92 | }, 93 | // https://beta.openai.com/examples/default-factual-answering 94 | factualAnswering: { 95 | model: "text-davinci-003", 96 | prompt: "", 97 | temperature: 0, 98 | max_tokens: 60, 99 | top_p: 1, 100 | frequency_penalty: 0, 101 | presence_penalty: 0 102 | }, 103 | // https://beta.openai.com/examples/default-js-helper 104 | jsHelp: { 105 | model: "code-davinci-002", 106 | prompt: "", 107 | temperature: 0, 108 | max_tokens: 60, 109 | top_p: 1, 110 | frequency_penalty: 0.5, 111 | presence_penalty: 0, 112 | stop: ["You:"] 113 | }, 114 | // https://beta.openai.com/examples/default-ml-ai-tutor 115 | aiLangModelTutor: { 116 | model: "text-davinci-003", 117 | prompt: "", 118 | temperature: 0.3, 119 | max_tokens: 60, 120 | top_p: 1, 121 | frequency_penalty: 0.5, 122 | presence_penalty: 0, 123 | stop: ["You:"] 124 | }, 125 | /** https://beta.openai.com/examples/default-chat */ 126 | chat: { 127 | model: "text-davinci-003", 128 | prompt: "", 129 | temperature: 0.9, 130 | max_tokens: 150, 131 | top_p: 1, 132 | frequency_penalty: 0, 133 | presence_penalty: 0.6, 134 | stop: [" Human:", " AI:"] 135 | }, 136 | // https://beta.openai.com/examples/default-friend-chat 137 | friendChat: { 138 | model: "text-davinci-003", 139 | prompt: "", 140 | temperature: 0.5, 141 | max_tokens: 60, 142 | top_p: 1, 143 | frequency_penalty: 0.5, 144 | presence_penalty: 0, 145 | stop: ["You:"] 146 | }, 147 | // https://beta.openai.com/examples/default-marv-sarcastic-chat 148 | marvSarcasticChat: { 149 | model: "text-davinci-003", 150 | prompt: "", 151 | temperature: 0.5, 152 | max_tokens: 60, 153 | top_p: 0.3, 154 | frequency_penalty: 0.5, 155 | presence_penalty: 0 156 | } 157 | } 158 | }; 159 | 160 | const index = { 161 | async fetch(request, env, _ctx) { 162 | const needCancel = await needCancelRequest(request); 163 | if (needCancel) 164 | return needCancel; 165 | const url = new URL(request.url); 166 | if (url.pathname === "/robots.txt") 167 | return replyText("User-agent: *\nDisallow: /", env); 168 | const { domain } = getDomainAndSubdomain(request); 169 | if (url.host === domain) { 170 | if (url.pathname === "/chat" || url.pathname === "/fchat") { 171 | const newUrl = new URL(url.toString()); 172 | newUrl.host = url.pathname === "/chat" ? `chat.${domain}` : `fchat.${domain}`; 173 | newUrl.pathname = "/"; 174 | return Response.redirect(newUrl.toString(), 301); 175 | } 176 | } 177 | if (url.pathname === "/") 178 | return replyHtml(ChatHtml, env); 179 | else if (url.pathname === "/api") 180 | return await handlerApi(url, env); 181 | return await replyUnsupport({ url: decodeURIComponent(url.toString()) }, env); 182 | } 183 | }; 184 | async function handlerApi(url, env) { 185 | const prompt = url.searchParams.get("q"); 186 | if (!prompt) 187 | return replyText("Missing query parameter: q", env); 188 | const openai = await initOpenAI(env); 189 | try { 190 | const moderation = await checkModeration(openai, prompt); 191 | if (moderation) 192 | return replyJson({ code: -1, text: moderation }, env); 193 | const params = Object.assign({}, config.conversation.chat, { prompt, user: "aliuq" }); 194 | const { data } = await openai.createCompletion(params); 195 | return replyJson({ code: 0, text: data.choices[0].text }, env); 196 | } catch (error) { 197 | console.log(error.response?.data?.error?.message); 198 | const status = error.response?.status || 500; 199 | return replyText("", env, { status }); 200 | } 201 | } 202 | async function initOpenAI(env) { 203 | return new OpenAIApi(new Configuration({ 204 | apiKey: env.OPENAI_API_KEY, 205 | baseOptions: { adapter: fetchAdapter } 206 | })); 207 | } 208 | async function checkModeration(openai, input) { 209 | try { 210 | const { data } = await openai.createModeration({ model: "text-moderation-latest", input }); 211 | const { flagged, categories } = data.results[0]; 212 | if (flagged) { 213 | const maps = { 214 | "hate": "\u8868\u8FBE\u3001\u717D\u52A8\u6216\u4FC3\u8FDB\u57FA\u4E8E\u79CD\u65CF\u3001\u6027\u522B\u3001\u6C11\u65CF\u3001\u5B97\u6559\u3001\u56FD\u7C4D\u3001\u6027\u53D6\u5411\u3001\u6B8B\u75BE\u72B6\u51B5\u6216\u79CD\u59D3\u7684\u4EC7\u6068\u7684\u5185\u5BB9", 215 | "hate/threatening": "\u8FD8\u5305\u62EC\u5BF9\u76EE\u6807\u7FA4\u4F53\u7684\u66B4\u529B\u6216\u4E25\u91CD\u4F24\u5BB3\u7684\u4EC7\u6068\u6027\u5185\u5BB9", 216 | "self-harm": "\u5021\u5BFC\u3001\u9F13\u52B1\u6216\u63CF\u8FF0\u81EA\u6211\u4F24\u5BB3\u884C\u4E3A\u7684\u5185\u5BB9\uFF0C\u5982\u81EA\u6740\u3001\u5207\u5272\u548C\u996E\u98DF\u7D0A\u4E71", 217 | "sexual": "\u65E8\u5728\u5F15\u8D77\u6027\u5174\u594B\u7684\u5185\u5BB9\uFF0C\u5982\u5BF9\u6027\u6D3B\u52A8\u7684\u63CF\u8FF0\uFF0C\u6216\u4FC3\u8FDB\u6027\u670D\u52A1\u7684\u5185\u5BB9\uFF08\u4E0D\u5305\u62EC\u6027\u6559\u80B2\u548C\u5065\u5EB7\uFF09", 218 | "sexual/minors": "\u5305\u62EC\u672A\u6EE118\u5C81\u7684\u4EBA\u7684\u6027\u5185\u5BB9", 219 | "violence": "\u5BA3\u626C\u6216\u7F8E\u5316\u66B4\u529B\u6216\u8D5E\u7F8E\u4ED6\u4EBA\u7684\u75DB\u82E6\u6216\u7F9E\u8FB1\u7684\u5185\u5BB9", 220 | "violence/graphic": "\u63CF\u7ED8\u6B7B\u4EA1\u3001\u66B4\u529B\u6216\u4E25\u91CD\u8EAB\u4F53\u4F24\u5BB3\u7684\u66B4\u529B\u5185\u5BB9\uFF0C\u5176\u753B\u9762\u611F\u6781\u5F3A" 221 | }; 222 | const text = []; 223 | Object.keys(categories).forEach((key) => { 224 | if (!categories[key]) 225 | text.push(maps[key]); 226 | }); 227 | const mdtext = [ 228 | "# \u8FDD\u53CD\u5185\u5BB9\u7B56\u7565", 229 | "", 230 | text.map((item, index) => `${index + 1} ${item}`).join("\n") 231 | ].join("\n"); 232 | return mdtext; 233 | } 234 | return ""; 235 | } catch (error) { 236 | return error.response?.data?.error?.message || error.message || ""; 237 | } 238 | } 239 | 240 | export { index as default }; 241 | -------------------------------------------------------------------------------- /workers/openai/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cf-workers-openai", 3 | "type": "module", 4 | "version": "0.0.2", 5 | "description": "openai is a cloudflare worker", 6 | "author": "AliuQ ", 7 | "license": "MIT", 8 | "homepage": "https://github.com/aliuq/cf-proxy/tree/master/workers/openai#readme", 9 | "bugs": { 10 | "url": "https://github.com/aliuq/cf-proxy/issues" 11 | }, 12 | "keywords": [ 13 | "cloudflare", 14 | "worker", 15 | "openai" 16 | ], 17 | "main": "dist/index.mjs", 18 | "scripts": { 19 | "dev": "wrangler dev -l", 20 | "build": "unbuild" 21 | }, 22 | "dependencies": { 23 | "@vespaiach/axios-fetch-adapter": "^0.3.1", 24 | "axios": "0.26.0", 25 | "openai": "^3.1.0" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /workers/openai/src/chat.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 64 | 65 | 66 | 67 |
68 |
69 | 70 |
71 |
72 | 111 | 112 |
113 |
114 |
115 | 116 |
117 |
118 |
119 | 120 |
122 | 123 | 133 |
134 |
135 |
136 | 137 |
138 |

139 | Powerd by Cloudflare Workers. ChatGPT 是一个基于OpenAI API的聊天机器人, 如果条件允许, 请前往OpenAI官网体验更好的服务. 141 |

142 |

声明: UI 仿照官方, 数据通过 API key 进行请求, 如果遇到 Bug 等问题, 请访问 cf-proxy 查看源码

143 |
144 |
145 |
146 |
147 | 213 | 214 | 215 | 216 | -------------------------------------------------------------------------------- /workers/openai/src/default.config.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * 返回配置对象 3 | * 参考官方示例 https://beta.openai.com/examples/ 4 | */ 5 | 6 | export default { 7 | conversation: { 8 | // https://beta.openai.com/examples/default-qa 9 | qa: { 10 | model: 'text-davinci-003', 11 | prompt: '', 12 | temperature: 0, 13 | max_tokens: 100, 14 | top_p: 1, 15 | frequency_penalty: 0.0, 16 | presence_penalty: 0.0, 17 | stop: ['\n'], 18 | }, 19 | // https://beta.openai.com/examples/default-factual-answering 20 | factualAnswering: { 21 | model: 'text-davinci-003', 22 | prompt: '', 23 | temperature: 0, 24 | max_tokens: 60, 25 | top_p: 1.0, 26 | frequency_penalty: 0.0, 27 | presence_penalty: 0.0, 28 | }, 29 | // https://beta.openai.com/examples/default-js-helper 30 | jsHelp: { 31 | model: 'code-davinci-002', 32 | prompt: '', 33 | temperature: 0, 34 | max_tokens: 60, 35 | top_p: 1.0, 36 | frequency_penalty: 0.5, 37 | presence_penalty: 0.0, 38 | stop: ['You:'], 39 | }, 40 | // https://beta.openai.com/examples/default-ml-ai-tutor 41 | aiLangModelTutor: { 42 | model: 'text-davinci-003', 43 | prompt: '', 44 | temperature: 0.3, 45 | max_tokens: 60, 46 | top_p: 1.0, 47 | frequency_penalty: 0.5, 48 | presence_penalty: 0.0, 49 | stop: ['You:'], 50 | }, 51 | /** https://beta.openai.com/examples/default-chat */ 52 | chat: { 53 | model: 'text-davinci-003', 54 | prompt: '', 55 | temperature: 0.9, 56 | max_tokens: 150, 57 | top_p: 1, 58 | frequency_penalty: 0.0, 59 | presence_penalty: 0.6, 60 | stop: [' Human:', ' AI:'], 61 | }, 62 | // https://beta.openai.com/examples/default-friend-chat 63 | friendChat: { 64 | model: 'text-davinci-003', 65 | prompt: '', 66 | temperature: 0.5, 67 | max_tokens: 60, 68 | top_p: 1.0, 69 | frequency_penalty: 0.5, 70 | presence_penalty: 0.0, 71 | stop: ['You:'], 72 | }, 73 | // https://beta.openai.com/examples/default-marv-sarcastic-chat 74 | marvSarcasticChat: { 75 | model: 'text-davinci-003', 76 | prompt: '', 77 | temperature: 0.5, 78 | max_tokens: 60, 79 | top_p: 0.3, 80 | frequency_penalty: 0.5, 81 | presence_penalty: 0.0, 82 | }, 83 | }, 84 | } 85 | -------------------------------------------------------------------------------- /workers/openai/src/index.ts: -------------------------------------------------------------------------------- 1 | import type { CreateCompletionRequest, CreateModerationResponseResultsInnerCategories } from 'openai' 2 | import { Configuration, OpenAIApi } from 'openai' 3 | import fetchAdapter from '@vespaiach/axios-fetch-adapter' 4 | import { getDomainAndSubdomain, needCancelRequest, replyHtml, replyJson, replyText, replyUnsupport } from '../../utils' 5 | import ChatHtml from './chat.html' 6 | import config from './default.config' 7 | 8 | export default { 9 | async fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise { 10 | const needCancel = await needCancelRequest(request) 11 | if (needCancel) 12 | return needCancel 13 | 14 | const url = new URL(request.url) 15 | 16 | if (url.pathname === '/robots.txt') 17 | return replyText('User-agent: *\nDisallow: /', env) 18 | 19 | const { domain } = getDomainAndSubdomain(request) 20 | 21 | if (url.host === domain) { 22 | if (url.pathname === '/chat' || url.pathname === '/fchat') { 23 | const newUrl = new URL(url.toString()) 24 | newUrl.host = url.pathname === '/chat' ? `chat.${domain}` : `fchat.${domain}` 25 | newUrl.pathname = '/' 26 | return Response.redirect(newUrl.toString(), 301) 27 | } 28 | } 29 | 30 | if (url.pathname === '/') 31 | return replyHtml(ChatHtml, env) 32 | else if (url.pathname === '/api') 33 | return await handlerApi(url, env) 34 | 35 | return await replyUnsupport({ url: decodeURIComponent(url.toString()) }, env) 36 | }, 37 | } 38 | 39 | async function handlerApi(url: URL, env: ENV) { 40 | const prompt = url.searchParams.get('q') 41 | if (!prompt) 42 | return replyText('Missing query parameter: q', env) 43 | 44 | const openai = await initOpenAI(env) 45 | try { 46 | const moderation = await checkModeration(openai, prompt) 47 | if (moderation) 48 | return replyJson({ code: -1, text: moderation }, env) 49 | 50 | const params: CreateCompletionRequest = Object.assign({}, config.conversation.chat, { prompt, user: 'aliuq' }) 51 | const { data } = await openai.createCompletion(params) 52 | return replyJson({ code: 0, text: data.choices[0].text }, env) 53 | } 54 | catch (error: any) { 55 | // eslint-disable-next-line no-console 56 | console.log(error.response?.data?.error?.message) 57 | const status = error.response?.status || 500 58 | return replyText('', env, { status }) 59 | } 60 | } 61 | 62 | /** 初始化 OpenAI 实例 */ 63 | async function initOpenAI(env: ENV) { 64 | return new OpenAIApi(new Configuration({ 65 | apiKey: env.OPENAI_API_KEY, 66 | baseOptions: { adapter: fetchAdapter }, 67 | })) 68 | } 69 | 70 | /** 判断输入的文本是否违反了 OpenAI 的内容策略,防止被封号 */ 71 | async function checkModeration(openai: OpenAIApi, input: string) { 72 | try { 73 | const { data } = await openai.createModeration({ model: 'text-moderation-latest', input }) 74 | const { flagged, categories } = data.results[0] 75 | // 违反内容策略 76 | if (flagged) { 77 | const maps: Record = { 78 | 'hate': '表达、煽动或促进基于种族、性别、民族、宗教、国籍、性取向、残疾状况或种姓的仇恨的内容', 79 | 'hate/threatening': '还包括对目标群体的暴力或严重伤害的仇恨性内容', 80 | 'self-harm': '倡导、鼓励或描述自我伤害行为的内容,如自杀、切割和饮食紊乱', 81 | 'sexual': '旨在引起性兴奋的内容,如对性活动的描述,或促进性服务的内容(不包括性教育和健康)', 82 | 'sexual/minors': '包括未满18岁的人的性内容', 83 | 'violence': '宣扬或美化暴力或赞美他人的痛苦或羞辱的内容', 84 | 'violence/graphic': '描绘死亡、暴力或严重身体伤害的暴力内容,其画面感极强', 85 | } 86 | const text: string[] = [] 87 | Object.keys(categories).forEach((key) => { 88 | if (!categories[key as Categories]) 89 | text.push(maps[key as Categories]) 90 | }) 91 | 92 | const mdtext = [ 93 | '# 违反内容策略', 94 | '', 95 | text.map((item: string, index: number) => `${index + 1} ${item}`).join('\n'), 96 | ].join('\n') 97 | 98 | return mdtext 99 | } 100 | 101 | return '' 102 | } 103 | catch (error: any) { 104 | return error.response?.data?.error?.message || error.message || '' 105 | } 106 | } 107 | 108 | type Categories = keyof CreateModerationResponseResultsInnerCategories 109 | -------------------------------------------------------------------------------- /workers/openai/types.d.ts: -------------------------------------------------------------------------------- 1 | interface ENV { 2 | GIT_HASH: string 3 | VERSION: string 4 | OPENAI_API_KEY: string 5 | } 6 | 7 | interface INTERNAL_ENV { 8 | DOMAIN?: string 9 | } 10 | 11 | interface Options { 12 | unbuild?: boolean 13 | env: INTERNAL_ENV 14 | } 15 | -------------------------------------------------------------------------------- /workers/openai/wrangler.config.ts: -------------------------------------------------------------------------------- 1 | // 这里处理 wrangler 的配置文件 2 | // 3 | import path from 'path' 4 | import * as execa from 'execa' 5 | import getPort, { portNumbers } from 'get-port' 6 | import type { BuildEntry } from 'unbuild' 7 | import pkg from './package.json' 8 | 9 | async function wranglerConfig({ unbuild: useUnbuild, env }: Options = { unbuild: false, env: {} }) { 10 | const port = await getPort({ port: portNumbers(8787, 8887) }) 11 | 12 | const { default: buildConfig } = await import('./build.config') 13 | const outDir = buildConfig.outDir || 'dist' 14 | 15 | /** 16 | * @example 17 | * entries: ['src/worker'] => nameFull: 'src/worker'; outName: 'worker' 18 | * entries: [{ input: 'src/worker' }] => nameFull: 'src/worker'; outName: 'worker' 19 | * 20 | * main: isDev ? `src/worker.ts` : `worker.mjs` 21 | * 22 | */ 23 | const entrie = (<(string | BuildEntry)[]>buildConfig.entries)[0] 24 | const nameFull = typeof entrie === 'string' ? entrie : entrie.input 25 | const outName = path.basename(nameFull) 26 | 27 | const vars = { 28 | GIT_HASH: execa.execaCommandSync('git rev-parse --short HEAD').stdout, 29 | VERSION: `v${pkg.version}`, 30 | } 31 | 32 | return { 33 | name: 'openai', 34 | main: useUnbuild ? `${outName}.mjs` : `${nameFull}.ts`, 35 | compatibility_date: new Date().toISOString().split('T')[0], 36 | /** If set to `true`, the worker will not be bundled. so the output file 37 | * must be a single file and no import module. if exists, will throw error. 38 | * 39 | * such as: `import axios from 'axios'`, 40 | */ 41 | // no_bundle: undefined, 42 | vars: { 43 | mode: 'default', 44 | ...vars, 45 | }, 46 | dev: { 47 | ip: 'localhost', 48 | local_protocol: 'https', 49 | port, 50 | }, 51 | env: { 52 | // For local development, Do not pulish the enviroment to cloudflare. 53 | localhost: { 54 | vars: { 55 | mode: 'localhost', 56 | ...vars, 57 | }, 58 | routes: env.DOMAIN 59 | ? [ 60 | { pattern: `chat.localhost:${port}`, zone_name: `localhost:${port}`, custom_domain: true }, 61 | { pattern: `localhost:${port}`, zone_name: `localhost:${port}`, custom_domain: true }, 62 | ] 63 | : undefined, 64 | }, 65 | production: { 66 | vars: { 67 | mode: 'production', 68 | ...vars, 69 | }, 70 | routes: env.DOMAIN 71 | ? [ 72 | { pattern: `chat.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 73 | { pattern: `${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 74 | ] 75 | : undefined, 76 | }, 77 | }, 78 | outDir: useUnbuild ? outDir : undefined, 79 | } 80 | } 81 | 82 | export default wranglerConfig 83 | export { wranglerConfig } 84 | -------------------------------------------------------------------------------- /workers/proxy/README.md: -------------------------------------------------------------------------------- 1 | # Worker: proxy 2 | 3 | 常规代理请求 4 | 5 | 示例地址:[https://dl.llll.host](https://dl.llll.host) 6 | 7 | ## 环境变量 8 | 9 | + `__DOMAIN__`: 主域名,用于配置路由、触发器等 10 | -------------------------------------------------------------------------------- /workers/proxy/build.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from 'unbuild' 2 | 3 | export default defineBuildConfig({ 4 | entries: [ 5 | 'src/index', 6 | ], 7 | clean: true, 8 | declaration: true, 9 | externals: [], 10 | }) 11 | -------------------------------------------------------------------------------- /workers/proxy/dist/index.d.ts: -------------------------------------------------------------------------------- 1 | declare const _default: { 2 | fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise; 3 | }; 4 | 5 | export { _default as default }; 6 | -------------------------------------------------------------------------------- /workers/proxy/dist/index.mjs: -------------------------------------------------------------------------------- 1 | const HomeHtml = "\n \n \n \n \n \n cf-proxy\n \n \n
\n
\n

文件代理加速 by Cloudflare Workers

\n

\n 每日有使用次数限制,建议通过 cloudflare workers 部署自己的代理加速服务, 部署教程请参考\n readme\n

\n \n \n Go\n \n

\n Usage:\n {{url}}/<file_path>\n

\n
\n
\n

Copyright @ aliuq. All Rights Reserved. [{{hash}}]

\n
\n
\n \n\n"; 2 | 3 | const index = { 4 | async fetch(request, env, _ctx) { 5 | const needCancel = await needCancelRequest(request); 6 | if (needCancel) 7 | return new Response("", { status: 204 }); 8 | const url = new URL(request.url); 9 | const { domain, subdomain } = getDomainAndSubdomain(request); 10 | if (url.pathname === "/robots.txt") 11 | return new Response("User-agent: *\nDisallow: /", { status: 200 }); 12 | if (url.host === `dl.${domain}`) { 13 | if (url.pathname === "/") { 14 | return new Response(renderTemplate(HomeHtml, { 15 | url: url.origin, 16 | hash: env.GIT_HASH 17 | }), { 18 | status: 200, 19 | headers: { "content-type": "text/html;charset=utf-8" } 20 | }); 21 | } else { 22 | const sourceUrl = url.href.replace(url.origin, "").substring(1).replace(/^(https?:)\/+/g, "$1//"); 23 | try { 24 | const newSourceUrl = new URL(sourceUrl); 25 | const newRequest = getNewRequest(newSourceUrl, request); 26 | return proxy(newSourceUrl, newRequest); 27 | } catch (e) { 28 | return new Response(`${sourceUrl} is invalid url`, { status: 400 }); 29 | } 30 | } 31 | } 32 | return new Response(`Unsupported domain ${subdomain ? `${subdomain}.` : ""}${domain}`, { 33 | status: 200, 34 | headers: { "content-type": "text/plain;charset=utf-8", "git-hash": env.GIT_HASH } 35 | }); 36 | } 37 | }; 38 | function getDomainAndSubdomain(request) { 39 | const url = new URL(request.url); 40 | const hostArr = url.host.split("."); 41 | let subdomain = ""; 42 | let domain = ""; 43 | if (hostArr.length > 2) { 44 | subdomain = hostArr[0]; 45 | domain = hostArr.slice(1).join("."); 46 | } else if (hostArr.length === 2) { 47 | subdomain = hostArr[1].match(/^localhost(:\d+)?$/) ? hostArr[0] : ""; 48 | domain = hostArr[1].match(/^localhost(:\d+)?$/) ? hostArr[1] : hostArr.join("."); 49 | } else { 50 | domain = hostArr.join("."); 51 | } 52 | return { domain, subdomain }; 53 | } 54 | async function needCancelRequest(request, matches = []) { 55 | const url = new URL(request.url); 56 | matches = matches.length ? matches : [ 57 | "/favicon.", 58 | "/sw.js" 59 | ]; 60 | return matches.some((match) => url.pathname.includes(match)); 61 | } 62 | function getNewRequest(url, request) { 63 | const headers = new Headers(request.headers); 64 | headers.set("reason", "mirror of China"); 65 | const newRequestInit = { redirect: "manual", headers }; 66 | return new Request(url.toString(), new Request(request, newRequestInit)); 67 | } 68 | async function proxy(url, request) { 69 | try { 70 | const res = await fetch(url.toString(), request); 71 | const headers = res.headers; 72 | const newHeaders = new Headers(headers); 73 | const status = res.status; 74 | newHeaders.set("access-control-expose-headers", "*"); 75 | newHeaders.set("access-control-allow-origin", "*"); 76 | newHeaders.delete("content-security-policy"); 77 | newHeaders.delete("content-security-policy-report-only"); 78 | newHeaders.delete("clear-site-data"); 79 | return new Response(res.body, { status, headers: newHeaders }); 80 | } catch (e) { 81 | return new Response(e.message, { status: 500 }); 82 | } 83 | } 84 | function renderTemplate(content, data) { 85 | return content.replace(/{{\s*([a-zA-Z0-9_]+)\s*}}/g, (match, key) => { 86 | return data[key] || ""; 87 | }); 88 | } 89 | 90 | export { index as default }; 91 | -------------------------------------------------------------------------------- /workers/proxy/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cf-workers-proxy", 3 | "type": "module", 4 | "version": "0.0.1", 5 | "description": "proxy is a cloudflare worker", 6 | "author": "AliuQ ", 7 | "license": "MIT", 8 | "homepage": "https://github.com/aliuq/cf-proxy/tree/master/workers/proxy#readme", 9 | "bugs": { 10 | "url": "https://github.com/aliuq/cf-proxy/issues" 11 | }, 12 | "keywords": [ 13 | "cloudflare", 14 | "worker", 15 | "proxy" 16 | ], 17 | "main": "dist/index.mjs", 18 | "scripts": { 19 | "dev": "wrangler dev -l" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /workers/proxy/src/home.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | cf-proxy 8 | 9 | 10 |
11 |
12 |

文件代理加速 by Cloudflare Workers

13 |

14 | 每日有使用次数限制,建议通过 cloudflare workers 部署自己的代理加速服务, 部署教程请参考 15 | readme 16 |

17 |
23 | 29 | 33 |
34 |

35 | Usage: 36 | {{url}}/<file_path> 37 |

38 |
39 |
40 |

Copyright @ aliuq. All Rights Reserved. [{{hash}}]

41 |
42 |
43 | 44 | 45 | -------------------------------------------------------------------------------- /workers/proxy/src/index.ts: -------------------------------------------------------------------------------- 1 | import HomeHtml from './home.html' 2 | 3 | export default { 4 | async fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise { 5 | const needCancel = await needCancelRequest(request) 6 | if (needCancel) 7 | return new Response('', { status: 204 }) 8 | 9 | const url = new URL(request.url) 10 | const { domain, subdomain } = getDomainAndSubdomain(request) 11 | 12 | if (url.pathname === '/robots.txt') 13 | return new Response('User-agent: *\nDisallow: /', { status: 200 }) 14 | 15 | // ============ 逻辑处理 ============ 16 | // 17 | // 18 | if (url.host === `dl.${domain}`) { 19 | if (url.pathname === '/') { 20 | return new Response(renderTemplate(HomeHtml, { 21 | url: url.origin, 22 | hash: env.GIT_HASH, 23 | }), { 24 | status: 200, 25 | headers: { 'content-type': 'text/html;charset=utf-8' }, 26 | }) 27 | } 28 | else { 29 | const sourceUrl = url.href.replace(url.origin, '').substring(1).replace(/^(https?:)\/+/g, '$1//') 30 | try { 31 | const newSourceUrl = new URL(sourceUrl) 32 | const newRequest = getNewRequest(newSourceUrl, request) 33 | return proxy(newSourceUrl, newRequest) 34 | } 35 | catch (e) { 36 | return new Response(`${sourceUrl} is invalid url`, { status: 400 }) 37 | } 38 | } 39 | } 40 | 41 | return new Response(`Unsupported domain ${subdomain ? `${subdomain}.` : ''}${domain}`, { 42 | status: 200, 43 | headers: { 'content-type': 'text/plain;charset=utf-8', 'git-hash': env.GIT_HASH }, 44 | }) 45 | }, 46 | } 47 | 48 | /** Get domain and subdomain from request url 49 | */ 50 | function getDomainAndSubdomain(request: Request): { domain: string; subdomain: string } { 51 | const url = new URL(request.url) 52 | const hostArr = url.host.split('.') 53 | let subdomain = '' 54 | let domain = '' 55 | if (hostArr.length > 2) { 56 | subdomain = hostArr[0] 57 | domain = hostArr.slice(1).join('.') 58 | } 59 | else if (hostArr.length === 2) { 60 | subdomain = hostArr[1].match(/^localhost(:\d+)?$/) ? hostArr[0] : '' 61 | domain = hostArr[1].match(/^localhost(:\d+)?$/) ? hostArr[1] : hostArr.join('.') 62 | } 63 | else { 64 | domain = hostArr.join('.') 65 | } 66 | return { domain, subdomain } 67 | } 68 | 69 | /** 需要终止请求 70 | * @param request 71 | * @returns true: 需要终止请求 72 | */ 73 | async function needCancelRequest(request: Request, matches: string[] = []): Promise { 74 | const url = new URL(request.url) 75 | matches = matches.length 76 | ? matches 77 | : [ 78 | '/favicon.', 79 | '/sw.js', 80 | ] 81 | return matches.some(match => url.pathname.includes(match)) 82 | } 83 | 84 | /** 生成新的请求 85 | */ 86 | function getNewRequest(url: URL, request: Request) { 87 | const headers = new Headers(request.headers) 88 | headers.set('reason', 'mirror of China') 89 | const newRequestInit: RequestInit = { redirect: 'manual', headers } 90 | return new Request(url.toString(), new Request(request, newRequestInit)) 91 | } 92 | 93 | /** 代理转发 94 | */ 95 | async function proxy(url: URL, request: Request) { 96 | try { 97 | const res = await fetch(url.toString(), request) 98 | const headers = res.headers 99 | const newHeaders = new Headers(headers) 100 | const status = res.status 101 | newHeaders.set('access-control-expose-headers', '*') 102 | newHeaders.set('access-control-allow-origin', '*') 103 | // Remove CSP 104 | newHeaders.delete('content-security-policy') 105 | newHeaders.delete('content-security-policy-report-only') 106 | newHeaders.delete('clear-site-data') 107 | return new Response(res.body, { status, headers: newHeaders }) 108 | } 109 | catch (e: any) { 110 | return new Response(e.message, { status: 500 }) 111 | } 112 | } 113 | 114 | /** 渲染模板 115 | */ 116 | function renderTemplate(content: string, data: Record) { 117 | return content.replace(/{{\s*([a-zA-Z0-9_]+)\s*}}/g, (match, key) => { 118 | return data[key] || '' 119 | }) 120 | } 121 | -------------------------------------------------------------------------------- /workers/proxy/types.d.ts: -------------------------------------------------------------------------------- 1 | interface ENV { 2 | GIT_HASH: string 3 | } 4 | 5 | interface INTERNAL_ENV { 6 | DOMAIN?: string 7 | } 8 | 9 | interface Options { 10 | unbuild?: boolean 11 | env: INTERNAL_ENV 12 | } 13 | -------------------------------------------------------------------------------- /workers/proxy/wrangler.config.ts: -------------------------------------------------------------------------------- 1 | // 这里处理 wrangler 的配置文件 2 | // 3 | import path from 'path' 4 | import * as execa from 'execa' 5 | import getPort, { portNumbers } from 'get-port' 6 | import type { BuildEntry } from 'unbuild' 7 | 8 | async function wranglerConfig({ unbuild: useUnbuild, env }: Options = { unbuild: false, env: {} }) { 9 | const port = await getPort({ port: portNumbers(8787, 8887) }) 10 | 11 | const { default: buildConfig } = await import('./build.config') 12 | const outDir = buildConfig.outDir || 'dist' 13 | 14 | const entrie = (<(string | BuildEntry)[]>buildConfig.entries)[0] 15 | const nameFull = typeof entrie === 'string' ? entrie : entrie.input 16 | const outName = path.basename(nameFull) 17 | 18 | const gitHash = execa.execaCommandSync('git rev-parse --short HEAD').stdout 19 | 20 | return { 21 | name: 'proxy', 22 | main: useUnbuild ? `${outName}.mjs` : `${nameFull}.ts`, 23 | compatibility_date: new Date().toISOString().split('T')[0], 24 | no_bundle: useUnbuild ? true : undefined, 25 | vars: { 26 | GIT_HASH: gitHash, 27 | }, 28 | dev: { 29 | ip: 'localhost', 30 | // local_protocol: 'https', 31 | port, 32 | }, 33 | env: { 34 | // For local development, Do not pulish the enviroment to cloudflare. 35 | localhost: { 36 | vars: { 37 | mode: 'localhost', 38 | GIT_HASH: gitHash, 39 | }, 40 | routes: [ 41 | { pattern: `dl.localhost:${port}`, zone_name: `localhost:${port}`, custom_domain: true }, 42 | ], 43 | }, 44 | production: { 45 | vars: { 46 | mode: 'production', 47 | GIT_HASH: gitHash, 48 | }, 49 | routes: env.DOMAIN 50 | ? [ 51 | { pattern: `dl.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 52 | ] 53 | : undefined, 54 | }, 55 | }, 56 | outDir: useUnbuild ? outDir : undefined, 57 | } 58 | } 59 | 60 | export default wranglerConfig 61 | export { wranglerConfig } 62 | -------------------------------------------------------------------------------- /workers/short-domain/README.md: -------------------------------------------------------------------------------- 1 | # Worker: short-domain 2 | 3 | 短网址服务, 示例地址:[https://s.llll.host](https://s.llll.host) 4 | 5 | ## kv:namespace 6 | 7 | 创建短域名的 kv 命名空间,方便本地开发,修改下面的 `SHORTURLS` 为你的命名空间名称 8 | 9 | ```bash 10 | wrangler kv:namespace create "SHORTURLS" 11 | # 开发环境需搭配 --preview 参数 12 | wrangler kv:namespace create "SHORTURLS" --preview 13 | ``` 14 | 15 | ## 使用 16 | 17 | ```bash 18 | # 开发 19 | pnpm run exec short-domain dev --env localhost 20 | # 部署 21 | pnpm run exec short-domain publish --env production 22 | ``` 23 | 24 | ## 环境变量 25 | 26 | 在 `.env` 文件中进行编辑,用于编译配置的环境变量以 `__` 开头和结尾,例如 `__DOMAIN__`,这样是为了区分和 `wrangler` 的环境变量 27 | 28 | | 环境变量 | 说明 | 29 | | :--- | :--- | 30 | | `ADMIN_USERNAME` | 用户名 | 31 | | `ADMIN_PASSWORD` | 密码 | 32 | | `__DOMAIN__` | 域名,用于配置路由 | 33 | | `__KV_BINDING__` | 短域名 kv 命名空间名称,比如 `SHORTURLS` | 34 | | `__KV_NAMESPACE_ID__` | 短域名 kv 命名空间 ID | 35 | | `__KV_PREVIEW_ID__` | 短域名 kv 命名空间预览 ID,用于本地开发 | 36 | -------------------------------------------------------------------------------- /workers/short-domain/build.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from 'unbuild' 2 | 3 | export default defineBuildConfig({ 4 | entries: [ 5 | 'src/index', 6 | ], 7 | clean: true, 8 | // If met some error, you can set failOnWarn to false to ignore it. 9 | // failOnWarn: false, 10 | // Incompatible with `rollup.esbuild.minify`, an error will occur 11 | declaration: true, 12 | externals: [], 13 | rollup: { 14 | esbuild: { 15 | // See `declaration` 16 | // minify: true, 17 | }, 18 | }, 19 | }) 20 | -------------------------------------------------------------------------------- /workers/short-domain/dist/index.d.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Description: Short domain worker 3 | * Routes: 4 | * - `/new?url=URL`: 创建新的短网址 5 | * - `/all`: 获取所有短网址 6 | * - `/{short}`: 重定向到原始域名 7 | */ 8 | declare const _default: { 9 | fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise; 10 | }; 11 | 12 | export { _default as default }; 13 | -------------------------------------------------------------------------------- /workers/short-domain/dist/index.mjs: -------------------------------------------------------------------------------- 1 | async function replyText(text, env, init = {}) { 2 | return new Response(text, deepMerge({ 3 | status: 200, 4 | headers: { "content-type": "text/plain;charset=UTF-8", "version": env.VERSION } 5 | }, init)); 6 | } 7 | async function replyHtml(html, env, init = {}) { 8 | return new Response(html, deepMerge({ 9 | status: 200, 10 | headers: { "content-type": "text/html;charset=UTF-8", "version": env.VERSION } 11 | }, init)); 12 | } 13 | async function replyUnsupport(options, env, init = {}) { 14 | return new Response(renderTemplate("Unsupported url {{ url }}", options), deepMerge({ 15 | status: 200, 16 | headers: { "content-type": "text/plain;charset=UTF-8", "version": env.VERSION } 17 | }, init)); 18 | } 19 | async function replyJson(json, env, init = {}) { 20 | return new Response(JSON.stringify(json), deepMerge({ 21 | status: 200, 22 | headers: { "content-type": "application/json;charset=UTF-8", "version": env.VERSION } 23 | }, init)); 24 | } 25 | function getDomainAndSubdomain(request) { 26 | const url = new URL(request.url); 27 | const hostArr = url.host.split("."); 28 | let subdomain = ""; 29 | let domain = ""; 30 | if (url.hostname.endsWith("localhost")) { 31 | subdomain = hostArr.length === 1 ? "" : hostArr[0]; 32 | domain = hostArr.length === 1 ? hostArr[0] : hostArr.slice(1).join("."); 33 | } else { 34 | subdomain = hostArr.length > 2 ? hostArr[0] : ""; 35 | domain = hostArr.length > 2 ? hostArr.slice(1).join(".") : hostArr.join("."); 36 | } 37 | return { domain, subdomain }; 38 | } 39 | async function needCancelRequest(request, matches = []) { 40 | const url = new URL(request.url); 41 | matches = matches.length ? matches : [ 42 | "/favicon.", 43 | "/sw.js" 44 | ]; 45 | const isCancel = matches.some((match) => url.pathname.includes(match)); 46 | if (isCancel) 47 | return replyText("", {}, { status: 204 }); 48 | } 49 | function renderTemplate(content, data) { 50 | return content.replace(/\{{\s*([a-zA-Z0-9_]+)\s*}}/g, (match, key) => { 51 | return data[key] || ""; 52 | }); 53 | } 54 | function deepMerge(target, ...sources) { 55 | for (const source of sources) { 56 | if (isObject(target) && isObject(source)) { 57 | for (const key in source) { 58 | if (isObject(source[key])) { 59 | if (!target[key]) { 60 | target[key] = {}; 61 | } 62 | deepMerge(target[key], source[key]); 63 | } else { 64 | target[key] = source[key]; 65 | } 66 | } 67 | } 68 | } 69 | return target; 70 | } 71 | function isObject(item) { 72 | return item && typeof item === "object" && !Array.isArray(item); 73 | } 74 | 75 | const NewUI = "\n\n\n 添加短网址\n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
\n \n \n
\n\n
\n
\n

添加短网址

\n

添加成功或者url已经存在将会自动将短网址复制到剪贴板

\n
\n\n \n \n \n \n \n \n \n \n
\n 添加\n
\n
\n\n \n \n {{ data.short }}\n \n \n {{ data.source }}\n \n \n
\n
\n\n \n \n\n\n\n"; 76 | 77 | const ManageUI = "\n\n\n Short Domain Manage\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n\n\n
\n
\n \n \n
\n\n
\n
\n

所有短网址({{ list.length }})

\n \n \n \n
\n\n
\n
\n 序号\n 短网址\n 源网址\n 操作\n
\n \n \n
\n
\n
\n \n \n\n\n\n"; 78 | 79 | const index = { 80 | async fetch(request, env, _ctx) { 81 | const needCancel = await needCancelRequest(request); 82 | if (needCancel) 83 | return needCancel; 84 | const url = new URL(request.url); 85 | if (url.pathname === "/robots.txt") 86 | return replyText("User-agent: *\nDisallow: /", env); 87 | const { domain } = getDomainAndSubdomain(request); 88 | const pathnameArr = url.pathname.slice(1).split("/"); 89 | if (pathnameArr.length === 1) { 90 | const maps = { 91 | "": () => { 92 | const isLogin = !!checkAuthorization(request, env, false); 93 | return replyHtml(NewUI, env, { headers: { "Set-Cookie": `auth=${isLogin}; path=/;` } }); 94 | }, 95 | "admin": async () => { 96 | const valid = checkAuthorization(request, env); 97 | if (valid && typeof valid !== "boolean") 98 | return valid; 99 | return replyHtml(ManageUI, env); 100 | } 101 | }; 102 | const handler = maps[pathnameArr[0]]; 103 | if (handler) 104 | return await handler(); 105 | else 106 | return await handlerRedirect(pathnameArr[0], env); 107 | } else if (pathnameArr.length === 2 && pathnameArr[0] === "api") { 108 | const apiMaps = { 109 | list: handlerApiList, 110 | new: handlerApiNew, 111 | delete: handlerApiDelete 112 | }; 113 | const handler = apiMaps[pathnameArr[1]]; 114 | if (handler) 115 | return await handler(request, env, domain); 116 | else 117 | return replyJson({ code: -1, message: `api [/api/${pathnameArr[1]}] not found` }, env); 118 | } 119 | return await replyUnsupport({ url: decodeURIComponent(url.toString()) }, env); 120 | } 121 | }; 122 | function randomString(length) { 123 | const chars = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; 124 | let result = ""; 125 | for (let i = length; i > 0; --i) 126 | result += chars[Math.floor(Math.random() * chars.length)]; 127 | return result; 128 | } 129 | async function handlerApiList(request, env, domain) { 130 | const valid = checkAuthorization(request, env); 131 | if (valid && typeof valid !== "boolean") 132 | return valid; 133 | if (request.method !== "POST") 134 | return replyJson({ error: `method [${request.method}] not allowed` }, env); 135 | const body = await request.json(); 136 | const cursor = body.cursor; 137 | const list = await env.SHORTURLS.list({ prefix: "md5:", limit: 500, cursor }); 138 | const result = await Promise.all(list.keys.map(async (key) => { 139 | const value = await env.SHORTURLS.get(key.name); 140 | const decodeValue = value ? decodeURIComponent(value) : ""; 141 | const [id, url] = decodeValue.split("|||"); 142 | return { 143 | id, 144 | short: getShortUrl(request, id, domain), 145 | source: url 146 | }; 147 | })); 148 | return replyJson({ data: result, cursor: list.list_complete ? "" : list.cursor }, env); 149 | } 150 | async function handlerApiNew(request, env, domain) { 151 | try { 152 | if (request.method !== "POST") 153 | return replyJson({ error: `method [${request.method}] not allowed` }, env); 154 | const body = await request.json(); 155 | const url = body.url ? decodeURIComponent(body.url) : ""; 156 | if (!url) 157 | return replyJson({ error: "\u7F3A\u5C11 url \u53C2\u6570" }, env); 158 | const newUrl = new URL(url); 159 | const decodeUrl = decodeURIComponent(newUrl.toString()); 160 | const md5 = await getMD5(decodeUrl); 161 | const exist = await env.SHORTURLS.get(`md5:${md5}`); 162 | if (!exist) { 163 | const isAuth = !!checkAuthorization(request, env, false); 164 | const short = isAuth && body.id ? body.id : randomString(6); 165 | await env.SHORTURLS.put(`md5:${md5}`, `${short}|||${decodeUrl}`); 166 | await handlerIdMd5Maps(env, short, md5); 167 | const shortUrl = getShortUrl(request, short, domain); 168 | return replyJson({ code: 0, data: { short: shortUrl, source: decodeUrl } }, env); 169 | } else { 170 | const [short] = exist.split("|||"); 171 | const shortUrl = getShortUrl(request, short, domain); 172 | return replyJson({ code: -1, message: "url \u5DF2\u7ECF\u5B58\u5728", data: { short: shortUrl, source: decodeUrl } }, env); 173 | } 174 | } catch (e) { 175 | return replyJson({ code: -1, message: e.message }, env); 176 | } 177 | } 178 | async function handlerApiDelete(request, env) { 179 | const valid = checkAuthorization(request, env); 180 | if (valid && typeof valid !== "boolean") 181 | return valid; 182 | if (request.method !== "POST") 183 | return replyJson({ error: `method [${request.method}] not allowed` }, env); 184 | const body = await request.json(); 185 | const url = body.url ? decodeURIComponent(body.url) : ""; 186 | if (!url) 187 | return replyJson({ error: "\u7F3A\u5C11 url \u53C2\u6570" }, env); 188 | const md5 = await getMD5(url); 189 | const exist = await env.SHORTURLS.get(`md5:${md5}`); 190 | if (!exist) 191 | return replyJson({ error: `[${url}] not found` }, env); 192 | await env.SHORTURLS.delete(`md5:${md5}`); 193 | const [id] = exist.split("|||"); 194 | await handlerIdMd5Maps(env, id); 195 | return replyJson({ message: "\u5220\u9664\u6210\u529F" }, env); 196 | } 197 | async function handlerIdMd5Maps(env, id, md5) { 198 | const idMd5MapsStr = await env.SHORTURLS.get("id_md5_maps"); 199 | const idMd5Maps = idMd5MapsStr ? JSON.parse(idMd5MapsStr) : {}; 200 | if (id && md5) { 201 | idMd5Maps[id] = md5; 202 | } else if (id && !md5) { 203 | if (!Object.keys(idMd5Maps).length) 204 | return; 205 | delete idMd5Maps[id]; 206 | } else if (!id && !md5) { 207 | return idMd5Maps; 208 | } 209 | await env.SHORTURLS.put("id_md5_maps", JSON.stringify(idMd5Maps)); 210 | } 211 | async function handlerRedirect(id, env) { 212 | const idMd5Maps = await handlerIdMd5Maps(env) || {}; 213 | const md5 = idMd5Maps[id]; 214 | if (!md5) 215 | return replyJson({ error: `[${id}] not found` }, env); 216 | const exist = await env.SHORTURLS.get(`md5:${md5}`); 217 | if (exist) { 218 | const url = exist.split("|||")[1]; 219 | return Response.redirect(decodeURIComponent(url), 301); 220 | } else { 221 | return replyJson({ error: `[${id}] not found` }, env); 222 | } 223 | } 224 | function checkAuthorization(request, env, need401 = true) { 225 | const authorization = request.headers.get("Authorization"); 226 | if (!authorization && need401) { 227 | return new Response(null, { 228 | status: 401, 229 | headers: { 230 | "WWW-Authenticate": 'Basic realm="Restricted", charset="UTF-8"' 231 | } 232 | }); 233 | } else if (authorization) { 234 | const str = atob(authorization.split(" ")[1]); 235 | const [username, password] = str.split(":"); 236 | const isValid = username === env.ADMIN_USERNAME && password === env.ADMIN_PASSWORD; 237 | if (!isValid && need401) { 238 | return new Response(null, { 239 | status: 401, 240 | headers: { 241 | "WWW-Authenticate": 'Basic realm="Restricted", charset="UTF-8"' 242 | } 243 | }); 244 | } else { 245 | return isValid; 246 | } 247 | } 248 | return false; 249 | } 250 | async function getMD5(url) { 251 | return crypto.subtle.digest("MD5", new TextEncoder().encode(url)).then((hash) => { 252 | return hex(hash); 253 | }); 254 | } 255 | function hex(buffer) { 256 | const hexCodes = []; 257 | const view = new DataView(buffer); 258 | for (let i = 0; i < view.byteLength; i += 4) { 259 | const value = view.getUint32(i); 260 | const stringValue = value.toString(16); 261 | const padding = "00000000"; 262 | const paddedValue = (padding + stringValue).slice(-padding.length); 263 | hexCodes.push(paddedValue); 264 | } 265 | return hexCodes.join(""); 266 | } 267 | function getShortUrl(request, id, domain) { 268 | const url = new URL(request.url); 269 | const protocol = url.protocol === "https:" ? "https" : "http"; 270 | return `${protocol}://s.${domain}/${id}`; 271 | } 272 | 273 | export { index as default }; 274 | -------------------------------------------------------------------------------- /workers/short-domain/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cf-workers-short-domain", 3 | "type": "module", 4 | "version": "0.0.1", 5 | "description": "Simple cloudflare worker short-domain", 6 | "author": "AliuQ ", 7 | "license": "MIT", 8 | "homepage": "https://github.com/aliuq/cf-proxy/tree/master/workers/short-domain#readme", 9 | "bugs": { 10 | "url": "https://github.com/aliuq/cf-proxy/issues" 11 | }, 12 | "keywords": [ 13 | "cloudflare", 14 | "worker", 15 | "short-domain" 16 | ], 17 | "main": "dist/index.mjs", 18 | "scripts": { 19 | "dev": "wrangler dev -l", 20 | "build": "unbuild" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /workers/short-domain/src/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Description: Short domain worker 3 | * Routes: 4 | * - `/new?url=URL`: 创建新的短网址 5 | * - `/all`: 获取所有短网址 6 | * - `/{short}`: 重定向到原始域名 7 | */ 8 | 9 | import { getDomainAndSubdomain, needCancelRequest, replyHtml, replyJson, replyText, replyUnsupport } from '../../utils' 10 | import NewUI from './new.html' 11 | import ManageUI from './manage.html' 12 | 13 | export default { 14 | async fetch(request: Request, env: ENV, _ctx: ExecutionContext): Promise { 15 | const needCancel = await needCancelRequest(request) 16 | if (needCancel) 17 | return needCancel 18 | 19 | const url = new URL(request.url) 20 | if (url.pathname === '/robots.txt') 21 | return replyText('User-agent: *\nDisallow: /', env) 22 | 23 | const { domain } = getDomainAndSubdomain(request) 24 | 25 | const pathnameArr = url.pathname.slice(1).split('/') 26 | if (pathnameArr.length === 1) { 27 | const maps: Record = { 28 | '': () => { 29 | const isLogin = !!checkAuthorization(request, env, false) 30 | return replyHtml(NewUI, env, { headers: { 'Set-Cookie': `auth=${isLogin}; path=/;` } }) 31 | }, 32 | 'admin': async () => { 33 | const valid = checkAuthorization(request, env) 34 | if (valid && typeof valid !== 'boolean') 35 | return valid 36 | return replyHtml(ManageUI, env) 37 | }, 38 | } 39 | const handler = maps[pathnameArr[0]] 40 | if (handler) 41 | return await handler() 42 | else 43 | return await handlerRedirect(pathnameArr[0], env) 44 | } 45 | else if (pathnameArr.length === 2 && pathnameArr[0] === 'api') { 46 | const apiMaps: Record = { 47 | list: handlerApiList, 48 | new: handlerApiNew, 49 | delete: handlerApiDelete, 50 | } 51 | const handler = apiMaps[pathnameArr[1]] 52 | if (handler) 53 | return await handler(request, env, domain) 54 | else 55 | return replyJson({ code: -1, message: `api [/api/${pathnameArr[1]}] not found` }, env) 56 | } 57 | 58 | return await replyUnsupport({ url: decodeURIComponent(url.toString()) }, env) 59 | }, 60 | } 61 | 62 | /** 生成随机字符串 */ 63 | function randomString(length: number) { 64 | const chars = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' 65 | let result = '' 66 | for (let i = length; i > 0; --i) result += chars[Math.floor(Math.random() * chars.length)] 67 | return result 68 | } 69 | 70 | /** 获取列表 */ 71 | async function handlerApiList(request: Request, env: ENV, domain: string) { 72 | const valid = checkAuthorization(request, env) 73 | if (valid && typeof valid !== 'boolean') 74 | return valid 75 | if (request.method !== 'POST') 76 | return replyJson({ error: `method [${request.method}] not allowed` }, env) 77 | 78 | const body: Record = await request.json() 79 | const cursor = body.cursor 80 | 81 | const list = await env.SHORTURLS.list({ prefix: 'md5:', limit: 500, cursor }) 82 | const result = await Promise.all(list.keys.map(async (key) => { 83 | const value = await env.SHORTURLS.get(key.name) 84 | const decodeValue = value ? decodeURIComponent(value) : '' 85 | const [id, url] = decodeValue.split('|||') 86 | return { 87 | id, 88 | short: getShortUrl(request, id, domain), 89 | source: url, 90 | } 91 | })) 92 | 93 | return replyJson({ data: result, cursor: list.list_complete ? '' : list.cursor }, env) 94 | } 95 | 96 | /** 添加短网址 */ 97 | async function handlerApiNew(request: Request, env: ENV, domain: string) { 98 | try { 99 | if (request.method !== 'POST') 100 | return replyJson({ error: `method [${request.method}] not allowed` }, env) 101 | 102 | const body: Record = await request.json() 103 | const url = body.url ? decodeURIComponent(body.url) : '' 104 | if (!url) 105 | return replyJson({ error: '缺少 url 参数' }, env) 106 | 107 | const newUrl = new URL(url) 108 | const decodeUrl = decodeURIComponent(newUrl.toString()) 109 | const md5 = await getMD5(decodeUrl) 110 | 111 | const exist = await env.SHORTURLS.get(`md5:${md5}`) 112 | if (!exist) { 113 | const isAuth = !!checkAuthorization(request, env, false) 114 | const short = isAuth && body.id ? body.id : randomString(6) 115 | await env.SHORTURLS.put(`md5:${md5}`, `${short}|||${decodeUrl}`) 116 | await handlerIdMd5Maps(env, short, md5) 117 | const shortUrl = getShortUrl(request, short, domain) 118 | return replyJson({ code: 0, data: { short: shortUrl, source: decodeUrl } }, env) 119 | } 120 | else { 121 | const [short] = exist.split('|||') 122 | const shortUrl = getShortUrl(request, short, domain) 123 | return replyJson({ code: -1, message: 'url 已经存在', data: { short: shortUrl, source: decodeUrl } }, env) 124 | } 125 | } 126 | catch (e: any) { 127 | return replyJson({ code: -1, message: e.message }, env) 128 | } 129 | } 130 | 131 | /** 删除短网址 */ 132 | async function handlerApiDelete(request: Request, env: ENV) { 133 | const valid = checkAuthorization(request, env) 134 | if (valid && typeof valid !== 'boolean') 135 | return valid 136 | if (request.method !== 'POST') 137 | return replyJson({ error: `method [${request.method}] not allowed` }, env) 138 | 139 | const body: Record = await request.json() 140 | const url = body.url ? decodeURIComponent(body.url) : '' 141 | if (!url) 142 | return replyJson({ error: '缺少 url 参数' }, env) 143 | 144 | const md5 = await getMD5(url) 145 | const exist = await env.SHORTURLS.get(`md5:${md5}`) 146 | if (!exist) 147 | return replyJson({ error: `[${url}] not found` }, env) 148 | 149 | await env.SHORTURLS.delete(`md5:${md5}`) 150 | 151 | const [id] = exist.split('|||') 152 | await handlerIdMd5Maps(env, id) 153 | 154 | return replyJson({ message: '删除成功' }, env) 155 | } 156 | 157 | /** 处理 ID 和 MD5 的映射关系 158 | * 159 | * + KV 的 key 为 `id_md5_maps`, value 为 JSON 字符串, `{ id: md5 }` 160 | * + 当 id 和 md5 都存在时,添加映射关系 161 | * + 当 id 存在,md5 不存在时,删除映射关系 162 | * + 当 id 和 md5 都不存在时,返回映射关系 163 | */ 164 | async function handlerIdMd5Maps(env: ENV, id?: string, md5?: string) { 165 | const idMd5MapsStr = await env.SHORTURLS.get('id_md5_maps') 166 | const idMd5Maps = idMd5MapsStr ? JSON.parse(idMd5MapsStr) : {} 167 | if (id && md5) { 168 | idMd5Maps[id] = md5 169 | } 170 | else if (id && !md5) { 171 | if (!Object.keys(idMd5Maps).length) 172 | return 173 | delete idMd5Maps[id] 174 | } 175 | else if (!id && !md5) { 176 | return idMd5Maps 177 | } 178 | 179 | await env.SHORTURLS.put('id_md5_maps', JSON.stringify(idMd5Maps)) 180 | } 181 | 182 | /** 处理短网址重定向 */ 183 | async function handlerRedirect(id: string, env: ENV) { 184 | const idMd5Maps = await handlerIdMd5Maps(env) || {} 185 | const md5 = idMd5Maps[id] 186 | if (!md5) 187 | return replyJson({ error: `[${id}] not found` }, env) 188 | 189 | const exist = await env.SHORTURLS.get(`md5:${md5}`) 190 | if (exist) { 191 | const url = exist.split('|||')[1] 192 | return Response.redirect(decodeURIComponent(url), 301) 193 | } 194 | else { 195 | return replyJson({ error: `[${id}] not found` }, env) 196 | } 197 | } 198 | 199 | /** 校验 Authorization,未通过则跳转到登录页面 */ 200 | function checkAuthorization(request: Request, env: ENV, need401 = true) { 201 | const authorization = request.headers.get('Authorization') 202 | if (!authorization && need401) { 203 | return new Response(null, { 204 | status: 401, 205 | headers: { 206 | 'WWW-Authenticate': 'Basic realm="Restricted", charset="UTF-8"', 207 | }, 208 | }) 209 | } 210 | else if (authorization) { 211 | const str = atob(authorization.split(' ')[1]) 212 | const [username, password] = str.split(':') 213 | const isValid = username === env.ADMIN_USERNAME && password === env.ADMIN_PASSWORD 214 | if (!isValid && need401) { 215 | return new Response(null, { 216 | status: 401, 217 | headers: { 218 | 'WWW-Authenticate': 'Basic realm="Restricted", charset="UTF-8"', 219 | }, 220 | }) 221 | } 222 | else { 223 | return isValid 224 | } 225 | } 226 | return false 227 | } 228 | 229 | /** 计算 URL 的 MD5 值 */ 230 | async function getMD5(url: string) { 231 | return crypto.subtle.digest('MD5', new TextEncoder().encode(url)).then((hash) => { 232 | return hex(hash) 233 | }) 234 | } 235 | 236 | function hex(buffer: ArrayBuffer) { 237 | const hexCodes = [] 238 | const view = new DataView(buffer) 239 | for (let i = 0; i < view.byteLength; i += 4) { 240 | // Using getUint32 reduces the number of iterations needed (we process 4 bytes each time) 241 | const value = view.getUint32(i) 242 | // toString(16) will give the hex representation of the number without padding 243 | const stringValue = value.toString(16) 244 | // We use concatenation and slice for padding 245 | const padding = '00000000' 246 | const paddedValue = (padding + stringValue).slice(-padding.length) 247 | hexCodes.push(paddedValue) 248 | } 249 | 250 | // Join all the hex strings into one 251 | return hexCodes.join('') 252 | } 253 | 254 | function getShortUrl(request: Request, id: string, domain: string) { 255 | const url = new URL(request.url) 256 | const protocol = url.protocol === 'https:' ? 'https' : 'http' 257 | return `${protocol}://s.${domain}/${id}` 258 | } 259 | -------------------------------------------------------------------------------- /workers/short-domain/src/manage.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Short Domain Manage 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 25 | 26 | 27 | 28 |
29 |
30 | 31 | 32 |
33 | 34 |
35 |
36 |

所有短网址({{ list.length }})

37 | 38 | 41 | 42 |
43 | 44 |
45 |
46 | 序号 47 | 短网址 48 | 源网址 49 | 操作 50 |
51 | 85 | 90 |
91 |
92 |
93 | 94 | 192 | 193 | 194 | 195 | -------------------------------------------------------------------------------- /workers/short-domain/src/new.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 添加短网址 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 21 | 22 | 23 | 24 |
25 |
26 | 27 | 28 |
29 | 30 |
31 |
32 |

添加短网址

33 |

添加成功或者url已经存在将会自动将短网址复制到剪贴板

34 |
35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 |
45 | 添加 46 |
47 |
48 | 49 | 50 | 51 | {{ data.short }} 52 | 53 | 54 | {{ data.source }} 55 | 56 | 57 |
58 |
59 | 60 | 61 | 190 | 191 | 192 | 193 | -------------------------------------------------------------------------------- /workers/short-domain/types.d.ts: -------------------------------------------------------------------------------- 1 | interface ENV { 2 | GIT_HASH: string 3 | VERSION: string 4 | SHORTURLS: KVNamespace 5 | ADMIN_USERNAME: string 6 | ADMIN_PASSWORD: string 7 | } 8 | 9 | // Only for build configuration 10 | interface INTERNAL_ENV { 11 | DOMAIN?: string 12 | KV_BINDING?: string 13 | KV_NAMESPACE_ID?: string 14 | KV_PREVIEW_ID?: string 15 | } 16 | 17 | interface Options { 18 | unbuild?: boolean 19 | env: INTERNAL_ENV 20 | } 21 | -------------------------------------------------------------------------------- /workers/short-domain/wrangler.config.ts: -------------------------------------------------------------------------------- 1 | // 这里处理 wrangler 的配置文件 2 | // 3 | import path from 'path' 4 | import * as execa from 'execa' 5 | import getPort, { portNumbers } from 'get-port' 6 | import type { BuildEntry } from 'unbuild' 7 | import pkg from './package.json' 8 | 9 | async function wranglerConfig({ unbuild: useUnbuild, env }: Options = { unbuild: false, env: {} }) { 10 | const port = await getPort({ port: portNumbers(8787, 8887) }) 11 | 12 | const { default: buildConfig } = await import('./build.config') 13 | const outDir = buildConfig.outDir || 'dist' 14 | 15 | /** 16 | * @example 17 | * entries: ['src/worker'] => nameFull: 'src/worker'; outName: 'worker' 18 | * entries: [{ input: 'src/worker' }] => nameFull: 'src/worker'; outName: 'worker' 19 | * 20 | * main: isDev ? `src/worker.ts` : `worker.mjs` 21 | * 22 | */ 23 | const entrie = (<(string | BuildEntry)[]>buildConfig.entries)[0] 24 | const nameFull = typeof entrie === 'string' ? entrie : entrie.input 25 | const outName = path.basename(nameFull) 26 | 27 | const vars = { 28 | GIT_HASH: execa.execaCommandSync('git rev-parse --short HEAD').stdout, 29 | VERSION: `v${pkg.version}`, 30 | } 31 | 32 | return { 33 | name: 'short-domain', 34 | main: useUnbuild ? `${outName}.mjs` : `${nameFull}.ts`, 35 | compatibility_date: new Date().toISOString().split('T')[0], 36 | /** If set to `true`, the worker will not be bundled. so the output file 37 | * must be a single file and no import module. if exists, will throw error. 38 | * 39 | * such as: `import axios from 'axios'`, 40 | */ 41 | // no_bundle: undefined, 42 | vars: { 43 | mode: 'default', 44 | ...vars, 45 | }, 46 | dev: { 47 | ip: 'localhost', 48 | // local_protocol: 'https', 49 | port, 50 | }, 51 | env: { 52 | // For local development, Do not pulish the enviroment to cloudflare. 53 | localhost: { 54 | vars: { 55 | mode: 'localhost', 56 | ...vars, 57 | }, 58 | routes: [ 59 | { pattern: `localhost:${port}`, zone_name: `localhost:${port}`, custom_domain: true }, 60 | ], 61 | kv_namespaces: [ 62 | { binding: env.KV_BINDING, id: env.KV_NAMESPACE_ID, preview_id: env.KV_PREVIEW_ID }, 63 | ], 64 | }, 65 | production: { 66 | vars: { 67 | mode: 'production', 68 | ...vars, 69 | }, 70 | routes: env.DOMAIN 71 | ? [ 72 | { pattern: `s.${env.DOMAIN}`, zone_name: env.DOMAIN, custom_domain: true }, 73 | ] 74 | : undefined, 75 | kv_namespaces: [ 76 | { binding: env.KV_BINDING, id: env.KV_NAMESPACE_ID }, 77 | ], 78 | }, 79 | }, 80 | outDir: useUnbuild ? outDir : undefined, 81 | } 82 | } 83 | 84 | export default wranglerConfig 85 | export { wranglerConfig } 86 | -------------------------------------------------------------------------------- /workers/utils.ts: -------------------------------------------------------------------------------- 1 | export async function replyText(text: string, env: ENV, init: ResponseInit = {}): Promise { 2 | return new Response(text, deepMerge({ 3 | status: 200, 4 | headers: { 'content-type': 'text/plain;charset=UTF-8', 'version': env.VERSION }, 5 | }, init)) 6 | } 7 | 8 | export async function replyHtml(html: string, env: ENV, init: ResponseInit = {}): Promise { 9 | return new Response(html, deepMerge({ 10 | status: 200, 11 | headers: { 'content-type': 'text/html;charset=UTF-8', 'version': env.VERSION }, 12 | }, init)) 13 | } 14 | 15 | export async function replyUnsupport(options: {}, env: ENV, init: ResponseInit = {}): Promise { 16 | return new Response(renderTemplate('Unsupported url {{ url }}', options), deepMerge({ 17 | status: 200, 18 | headers: { 'content-type': 'text/plain;charset=UTF-8', 'version': env.VERSION }, 19 | }, init)) 20 | } 21 | 22 | export async function replyJson(json: any, env: ENV, init: ResponseInit = {}): Promise { 23 | return new Response(JSON.stringify(json), deepMerge({ 24 | status: 200, 25 | headers: { 'content-type': 'application/json;charset=UTF-8', 'version': env.VERSION }, 26 | }, init)) 27 | } 28 | 29 | /** Get domain and subdomain from request url, for more custom domain mode, 30 | * if set only one domain, such as `foo.example.com`, you can ingore this function 31 | * 32 | * @example foo.example.com => { domain: 'example.com', subdomain: 'foo' } 33 | * @example example.com => { domain: 'example.com', subdomain: '' } 34 | * @example foo.localhost => { domain: 'localhost', subdomain: 'foo' } 35 | * @example localhost => { domain: 'localhost', subdomain: '' } 36 | * @example localhost:8787 => { domain: 'localhost:8787', subdomain: '' } 37 | */ 38 | export function getDomainAndSubdomain(request: Request): { domain: string; subdomain: string } { 39 | const url = new URL(request.url) 40 | const hostArr = url.host.split('.') 41 | let subdomain = '' 42 | let domain = '' 43 | if (url.hostname.endsWith('localhost')) { 44 | subdomain = hostArr.length === 1 ? '' : hostArr[0] 45 | domain = hostArr.length === 1 ? hostArr[0] : hostArr.slice(1).join('.') 46 | } 47 | else { 48 | subdomain = hostArr.length > 2 ? hostArr[0] : '' 49 | domain = hostArr.length > 2 ? hostArr.slice(1).join('.') : hostArr.join('.') 50 | } 51 | return { domain, subdomain } 52 | } 53 | 54 | /** Need cancel a request when request url contains some string, such as `favicon.ico`, `sw.js` 55 | * + `true` means need cancel request 56 | * 57 | * @param request 58 | * @param matches 59 | * @returns 60 | */ 61 | export async function needCancelRequest(request: Request, matches: string[] = []): Promise { 62 | const url = new URL(request.url) 63 | matches = matches.length 64 | ? matches 65 | : [ 66 | '/favicon.', 67 | '/sw.js', 68 | ] 69 | const isCancel = matches.some(match => url.pathname.includes(match)) 70 | if (isCancel) 71 | return replyText('', {} as any, { status: 204 }) 72 | } 73 | 74 | /** Render template, Replace `{{ key }}` with `data[key]` 75 | * 76 | * @example renderTemplate('Hello {{ name }}', { name: 'world' }) => 'Hello world' 77 | */ 78 | export function renderTemplate(content: string, data: Record) { 79 | return content.replace(/\{{\s*([a-zA-Z0-9_]+)\s*}}/g, (match, key) => { 80 | return data[key] || '' 81 | }) 82 | } 83 | 84 | /** Deep merge object 85 | * 86 | * This function takes in a target object and an array of source objects, 87 | * and deeply merges the properties of the source objects into the target object. 88 | * If both the target and a source object have a property with the same key, 89 | * the value from the source object will overwrite the value in the target. 90 | * If the value at the key in the source object is an object, 91 | * the function will recursively call itself to merge the values of those objects as well. 92 | * 93 | * @example 94 | * ```ts 95 | * const target = { a: 1, b: 2, c: { d: 3 } }; 96 | * const source1 = { b: 3, c: { e: 4 } }; 97 | * const source2 = { c: { f: 5 } }; 98 | * 99 | * deepMerge(target, source1, source2); 100 | * 101 | * console.log(target); // { a: 1, b: 3, c: { d: 3, e: 4, f: 5 } } 102 | * ``` 103 | */ 104 | function deepMerge(target: any, ...sources: any[]): any { 105 | // Iterate through each source object 106 | for (const source of sources) { 107 | // If the target and source are both objects, we'll merge them recursively 108 | if (isObject(target) && isObject(source)) { 109 | for (const key in source) { 110 | if (isObject(source[key])) { 111 | // If the value at the current key is an object, we'll merge it recursively 112 | if (!target[key]) { 113 | // If the target doesn't have a value at the current key, we'll create an empty object to merge into 114 | target[key] = {} 115 | } 116 | deepMerge(target[key], source[key]) 117 | } 118 | else { 119 | // If the value at the current key is not an object, we'll just overwrite the value in the target with the value in the source 120 | target[key] = source[key] 121 | } 122 | } 123 | } 124 | } 125 | 126 | return target 127 | } 128 | 129 | function isObject(item: any): boolean { 130 | return item && typeof item === 'object' && !Array.isArray(item) 131 | } 132 | --------------------------------------------------------------------------------