├── .github └── workflows │ ├── package.yml │ ├── publish-cli.yml │ ├── publish-gui.yml │ └── test.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── LICENSE.txt ├── README.md ├── app-icon.png ├── cli ├── Cargo.toml ├── bacon.toml └── src │ ├── archive.rs │ ├── args.rs │ ├── auth.rs │ └── main.rs ├── core ├── Cargo.toml ├── bacon.toml ├── resources.tar.gz ├── src │ ├── action_code.rs │ ├── archiver.rs │ ├── archiver │ │ ├── anonymous.rs │ │ ├── download_manager.rs │ │ ├── fetchers.rs │ │ ├── template.rs │ │ └── utils.rs │ ├── atomic_file.rs │ ├── client.rs │ ├── error.rs │ ├── lib.rs │ ├── middleware.rs │ ├── models.rs │ ├── preloaded_store.rs │ └── shared_promise.rs └── templates │ ├── index.hbs │ └── resources │ ├── 205da9bd7e50046f118af4e49e6a562905a2ca26.js │ ├── 79ef348eb9f79f287b6c835ff09169b855d710f2.js │ ├── dd73fca2e692e339380e847ce439a9e2a083d9bb.js │ ├── desktop_a6ebc8d83f637bb00f4f3fe2bed708de5b2f46cb.css │ ├── desktop_theme_18_6044d2798548d883edb7b504e9678e59bef1ea37.css │ ├── desktop_theme_34_b6cc8e9ec8740a61600e8db01d7b9428a3940549.css │ ├── desktop_theme_37_127867e7b59b2a0102d5189a36d24792f6a176f0.css │ ├── desktop_theme_3_8828b367cb8a28b2296c7bfe6f433ad504773044.css │ ├── desktop_theme_52_4859297324a16e5bb8c1f9d01f3bf0ffa6a9b29c.css │ ├── desktop_theme_54_e006d487a3f75a32724103e42208dca26c361ac8.css │ ├── desktop_theme_56_ecee7c2e40c9669a25d0f4e842a1b86202e00733.css │ ├── desktop_theme_58_c33a83ba3ef5ab3b25e5a5619c4161eb0f495dce.css │ ├── desktop_theme_59_0cc413d2f69104e201591ee89508c6059f2237e7.css │ └── f8a88a3a02eaa06e8d2c51ea944df2f7abdc374e.png ├── gui ├── src-tauri │ ├── .gitignore │ ├── Cargo.toml │ ├── Info.plist │ ├── bacon.toml │ ├── build.rs │ ├── entitlements.plist │ ├── icons │ │ ├── 128x128.png │ │ ├── 128x128@2x.png │ │ ├── 32x32.png │ │ ├── Square107x107Logo.png │ │ ├── Square142x142Logo.png │ │ ├── Square150x150Logo.png │ │ ├── Square284x284Logo.png │ │ ├── Square30x30Logo.png │ │ ├── Square310x310Logo.png │ │ ├── Square44x44Logo.png │ │ ├── Square71x71Logo.png │ │ ├── Square89x89Logo.png │ │ ├── StoreLogo.png │ │ ├── icon.icns │ │ ├── icon.ico │ │ └── icon.png │ ├── src │ │ ├── main.rs │ │ └── url_scheme.rs │ └── tauri.conf.json └── src │ ├── App.tsx │ ├── Main.tsx │ ├── bindings.ts │ ├── commands.ts │ ├── index.html │ ├── index.tsx │ ├── states.ts │ └── steps │ ├── Archive.tsx │ ├── Config.tsx │ ├── Finish.tsx │ └── Login.tsx ├── package.json ├── pnpm-lock.yaml ├── screenshots └── 1.png └── tsconfig.json /.github/workflows/package.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - master 5 | pull_request: 6 | branches: 7 | - master 8 | 9 | name: Package 10 | 11 | jobs: 12 | packaging-test: 13 | name: Packaging test 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | platform: [ macos-latest, ubuntu-20.04, windows-latest ] 18 | runs-on: ${{ matrix.platform }} 19 | steps: 20 | - uses: actions/checkout@v3 21 | - name: Setup Node 22 | uses: actions/setup-node@v3 23 | with: 24 | node-version: 16 25 | - name: Setup Cargo Toolchain (Mac) 🛎️ 26 | uses: dtolnay/rust-toolchain@stable 27 | if: matrix.platform == 'macos-latest' 28 | with: 29 | targets: aarch64-apple-darwin 30 | - name: Setup Cargo Toolchain 🛎️ 31 | uses: dtolnay/rust-toolchain@stable 32 | if: matrix.platform != 'macos-latest' 33 | - uses: Swatinem/rust-cache@v2 34 | with: 35 | key: ${{ matrix.platform }} 36 | - name: Install Dependencies (Ubuntu only) 37 | if: matrix.platform == 'ubuntu-20.04' 38 | run: | 39 | sudo apt-get update 40 | sudo apt-get install -y libwebkit2gtk-4.0-dev \ 41 | build-essential \ 42 | curl \ 43 | wget \ 44 | libgtk-3-dev \ 45 | libayatana-appindicator3-dev \ 46 | librsvg2-dev 47 | - name: Cache Pnpm Modules 48 | uses: actions/cache@v3 49 | with: 50 | path: ~/.pnpm-store 51 | key: ${{ runner.os }}-${{ hashFiles('**/pnpm-lock.yaml') }} 52 | restore-keys: | 53 | ${{ runner.os }}- 54 | - uses: pnpm/action-setup@v2 55 | name: Install App Dependencies 56 | with: 57 | version: latest 58 | run_install: true 59 | - uses: tauri-apps/tauri-action@dev 60 | env: 61 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 62 | with: 63 | args: ${{ matrix.platform == 'macos-latest' && '-v --target universal-apple-darwin' || '-v' }} -------------------------------------------------------------------------------- /.github/workflows/publish-cli.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | tags: 4 | - 'shuiyuan-archiver-cli/v*' 5 | 6 | permissions: 7 | contents: write 8 | 9 | name: Publish (CLI) 10 | 11 | jobs: 12 | create_release: 13 | name: Create Release 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v3 17 | - uses: taiki-e/create-gh-release-action@v1 18 | with: 19 | prefix: 'shuiyuan-archiver-cli/' 20 | draft: true 21 | token: ${{ secrets.GITHUB_TOKEN }} 22 | 23 | publish_cli: 24 | name: Publish (CLI) 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | include: 29 | - target: aarch64-unknown-linux-musl 30 | os: ubuntu-latest 31 | cross: true 32 | - target: x86_64-unknown-linux-musl 33 | os: ubuntu-latest 34 | cross: true 35 | - target: x86_64-unknown-linux-gnu 36 | os: ubuntu-20.04 37 | cross: false 38 | - target: universal-apple-darwin 39 | os: macos-latest 40 | cross: false 41 | - target: x86_64-pc-windows-msvc 42 | os: windows-latest 43 | cross: false 44 | runs-on: ${{ matrix.os }} 45 | steps: 46 | - uses: actions/checkout@v3 47 | - name: Setup Node 48 | uses: actions/setup-node@v3 49 | with: 50 | node-version: 16 51 | - name: Setup Cargo Toolchain 52 | uses: dtolnay/rust-toolchain@stable 53 | - name: Setup Cross Toolchain 54 | if: matrix.cross 55 | uses: taiki-e/install-action@cross 56 | - uses: Swatinem/rust-cache@v2 57 | with: 58 | key: ${{ matrix.target }} 59 | - uses: taiki-e/upload-rust-binary-action@v1 60 | with: 61 | bin: shuiyuan-archiver-cli 62 | target: ${{ matrix.target }} 63 | checksum: sha512 64 | token: ${{ secrets.GITHUB_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/publish-gui.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | tags: 4 | - 'shuiyuan-archiver/v*' 5 | 6 | permissions: 7 | contents: write 8 | 9 | name: Publish (GUI) 10 | 11 | jobs: 12 | publish_gui: 13 | name: Publish (GUI) 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | platform: [ macos-latest, ubuntu-20.04, windows-latest ] 18 | runs-on: ${{ matrix.platform }} 19 | steps: 20 | - uses: actions/checkout@v3 21 | - name: Setup Node 22 | uses: actions/setup-node@v3 23 | with: 24 | node-version: 16 25 | - name: Setup Cargo Toolchain (Mac) 🛎️ 26 | uses: dtolnay/rust-toolchain@stable 27 | if: matrix.platform == 'macos-latest' 28 | with: 29 | targets: aarch64-apple-darwin 30 | - name: Setup Cargo Toolchain 🛎️ 31 | uses: dtolnay/rust-toolchain@stable 32 | if: matrix.platform != 'macos-latest' 33 | - uses: Swatinem/rust-cache@v2 34 | with: 35 | key: ${{ matrix.platform }} 36 | - name: Install Dependencies (Ubuntu only) 37 | if: matrix.platform == 'ubuntu-20.04' 38 | run: | 39 | sudo apt-get update 40 | sudo apt-get install -y libwebkit2gtk-4.0-dev \ 41 | build-essential \ 42 | curl \ 43 | wget \ 44 | libgtk-3-dev \ 45 | libayatana-appindicator3-dev \ 46 | librsvg2-dev 47 | - name: Cache Pnpm Modules 48 | uses: actions/cache@v3 49 | with: 50 | path: ~/.pnpm-store 51 | key: release-${{ runner.os }}-${{ hashFiles('**/pnpm-lock.yaml') }} 52 | restore-keys: | 53 | release-${{ runner.os }}- 54 | - uses: pnpm/action-setup@v2 55 | name: Install App Dependencies 56 | with: 57 | version: latest 58 | run_install: true 59 | - uses: tauri-apps/tauri-action@dev 60 | env: 61 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 62 | ENABLE_CODE_SIGNING: ${{ secrets.APPLE_CERTIFICATE }} 63 | APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }} 64 | APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }} 65 | APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }} 66 | APPLE_ID: ${{ secrets.APPLE_ID }} 67 | APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }} 68 | with: 69 | tagName: 'shuiyuan-archiver/v__VERSION__' 70 | releaseName: 'shuiyuan-archiver/v__VERSION__' 71 | releaseBody: '' 72 | releaseDraft: true 73 | prerelease: false 74 | args: ${{ matrix.platform == 'macos-latest' && '-v --target universal-apple-darwin' || '-v' }} 75 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - master 5 | pull_request: 6 | branches: 7 | - master 8 | 9 | name: Test 10 | 11 | jobs: 12 | lint: 13 | name: Lint 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v2 17 | name: Checkout 🛎️ 18 | - uses: dtolnay/rust-toolchain@stable 19 | name: Setup Cargo Toolchain 🛎️ 20 | with: 21 | components: rustfmt, clippy 22 | - name: Install Dependencies 23 | run: | 24 | sudo apt-get update 25 | sudo apt-get install -y libwebkit2gtk-4.0-dev \ 26 | build-essential \ 27 | curl \ 28 | wget \ 29 | libgtk-3-dev \ 30 | libayatana-appindicator3-dev \ 31 | librsvg2-dev 32 | - uses: Swatinem/rust-cache@v2 33 | - name: Check Code Format 🔧 34 | run: cargo fmt -- --check 35 | - name: Run Clippy Lints 🔨 36 | run: mkdir dist && cargo clippy --all-targets --all-features --tests 37 | 38 | test_core: 39 | name: Test (core) 40 | runs-on: ubuntu-latest 41 | steps: 42 | - uses: actions/checkout@v3 43 | name: Checkout 🛎️ 44 | - name: Setup Cargo Toolchain 🛎️ 45 | uses: dtolnay/rust-toolchain@stable 46 | - uses: Swatinem/rust-cache@v2 47 | - name: Running Tests 🚀 48 | run: cargo test --package sa_core 49 | 50 | test_cli: 51 | name: Test (cli) 52 | runs-on: ubuntu-latest 53 | steps: 54 | - uses: actions/checkout@v3 55 | name: Checkout 🛎️ 56 | - name: Setup Cargo Toolchain 🛎️ 57 | uses: dtolnay/rust-toolchain@stable 58 | - uses: Swatinem/rust-cache@v2 59 | - name: Running Tests 🚀 60 | run: cargo test --package shuiyuan-archiver-cli 61 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ### JetBrains template 2 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 3 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 4 | 5 | .idea/ 6 | 7 | # CMake 8 | cmake-build-*/ 9 | 10 | # File-based project format 11 | *.iws 12 | 13 | # IntelliJ 14 | out/ 15 | 16 | # mpeltonen/sbt-idea plugin 17 | .idea_modules/ 18 | 19 | # JIRA plugin 20 | atlassian-ide-plugin.xml 21 | 22 | # Crashlytics plugin (for Android Studio and IntelliJ) 23 | com_crashlytics_export_strings.xml 24 | crashlytics.properties 25 | crashlytics-build.properties 26 | fabric.properties 27 | 28 | 29 | ### Node template 30 | # Logs 31 | logs 32 | *.log 33 | npm-debug.log* 34 | yarn-debug.log* 35 | yarn-error.log* 36 | lerna-debug.log* 37 | 38 | # Diagnostic reports (https://nodejs.org/api/report.html) 39 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 40 | 41 | # Runtime data 42 | pids 43 | *.pid 44 | *.seed 45 | *.pid.lock 46 | 47 | # Directory for instrumented libs generated by jscoverage/JSCover 48 | lib-cov 49 | 50 | # Coverage directory used by tools like istanbul 51 | coverage 52 | *.lcov 53 | 54 | # nyc test coverage 55 | .nyc_output 56 | 57 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 58 | .grunt 59 | 60 | # Bower dependency directory (https://bower.io/) 61 | bower_components 62 | 63 | # node-waf configuration 64 | .lock-wscript 65 | 66 | # Compiled binary addons (https://nodejs.org/api/addons.html) 67 | build/Release 68 | 69 | # Dependency directories 70 | node_modules/ 71 | jspm_packages/ 72 | 73 | # Snowpack dependency directory (https://snowpack.dev/) 74 | web_modules/ 75 | 76 | # TypeScript cache 77 | *.tsbuildinfo 78 | 79 | # Optional npm cache directory 80 | .npm 81 | 82 | # Optional eslint cache 83 | .eslintcache 84 | 85 | # Microbundle cache 86 | .rpt2_cache/ 87 | .rts2_cache_cjs/ 88 | .rts2_cache_es/ 89 | .rts2_cache_umd/ 90 | 91 | # Optional REPL history 92 | .node_repl_history 93 | 94 | # Output of 'npm pack' 95 | *.tgz 96 | 97 | # Yarn Integrity file 98 | .yarn-integrity 99 | 100 | # dotenv environment variables file 101 | .env 102 | .env.test 103 | 104 | # parcel-bundler cache (https://parceljs.org/) 105 | .cache 106 | .parcel-cache 107 | 108 | # Next.js build output 109 | .next 110 | out 111 | 112 | # Nuxt.js build / generate output 113 | .nuxt 114 | dist 115 | 116 | # Gatsby files 117 | .cache/ 118 | # Comment in the public line in if your project uses Gatsby and not Next.js 119 | # https://nextjs.org/blog/next-9-1#public-directory-support 120 | # public 121 | 122 | # vuepress build output 123 | .vuepress/dist 124 | 125 | # Serverless directories 126 | .serverless/ 127 | 128 | # FuseBox cache 129 | .fusebox/ 130 | 131 | # DynamoDB Local files 132 | .dynamodb/ 133 | 134 | # TernJS port file 135 | .tern-port 136 | 137 | # Stores VSCode versions used for testing VSCode extensions 138 | .vscode-test 139 | 140 | # yarn v2 141 | .yarn/cache 142 | .yarn/unplugged 143 | .yarn/build-state.yml 144 | .yarn/install-state.gz 145 | .pnp.* 146 | 147 | target -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["cli", "core", "gui/src-tauri"] 3 | 4 | [profile.release] 5 | strip = true 6 | lto = true 7 | 8 | [profile.dev.package.num-bigint-dig] 9 | opt-level = 3 10 | 11 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 LightQuantum 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 维护状态 2 | 3 | # 本项目现在已经不再积极维护,并正在征集维护者/替代品,见 [#25](https://github.com/PhotonQuantum/shuiyuan-archiver/issues/25)。 4 | 5 | --- 6 | 7 | # 水源社区存档工具 8 | 9 | 一个将上海交通大学[水源社区](https://shuiyuan.sjtu.edu.cn)的文章存档的工具。 10 | 11 | screenshot_2 12 | 13 | ## 使用方法 14 | 15 | 1. 下载并安装 msi (Windows) 或 dmg (Mac) 文件。 16 | 17 | > 如果系统询问您是否要运行程序,请选择允许。 18 | > 19 | > Debian 用户可以安装 deb 包,其他 Linux 用户可下载 AppImage,并在 chmod +x 后直接运行 20 | 21 | 2. 在弹出的浏览器窗口中登录水源论坛,选择授权,并将授权代码按 Ctrl+V 粘贴到程序中。 22 | 23 | 3. 将要下载的贴子的完整地址粘贴到输入框中,选择保存位置,然后点击下载。 24 | 25 | 贴子将以 页码.html 的文件名存储。注意在移动存档时,请务必保留 resources 文件夹。 26 | 27 | ## 许可 28 | 29 | 本项目遵循 MIT 协议。详情请参见 [LICENSE](LICENSE.txt)。以下文本为节选译注,仅英文原文有法律效力。 30 | 31 | > 本软件是“如此”提供的,没有任何形式的明示或暗示的保证,包括但不限于对适销性、特定用途的适用性和不侵权的保证。 32 | > 在任何情况下,作者或版权持有人都不对任何索赔、损害或其他责任负责, 33 | > 无论这些追责来自合同、侵权或其它行为中,还是产生于、源于或有关于本软件以及本软件的使用或其它处置。 34 | 35 | # Archiver for Shuiyuan BBS 36 | 37 | A tool to archive an article on [Shuiyuan BBS](https://shuiyuan.sjtu.edu.cn) of SJTU. 38 | 39 | ## Usage 40 | 41 | 1. Install the downloaded msi (Windows) or dmg (Mac) file. 42 | 43 | > If the system asks you whether to run the program, please select "Allow". 44 | > 45 | > Debian users can install deb packages, and other Linux users can download AppImage and run it directly. 46 | 47 | 2. Login to the Shuiyuan BBS, click "Authorize" and paste the authorization code into the program. 48 | 49 | 3. Paste the full URL of the article you want to download into the input box, 50 | select the location to save, and click "下载"(download). 51 | 52 | The article will be saved as "{page}.html". Note that when moving the archive, do not delete the "resources" folder. 53 | 54 | ## License 55 | 56 | This project is licensed under the [MIT License](LICENSE.txt). 57 | -------------------------------------------------------------------------------- /app-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/app-icon.png -------------------------------------------------------------------------------- /cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "shuiyuan-archiver-cli" 3 | version = "0.1.5" 4 | edition = "2021" 5 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 6 | 7 | [dependencies] 8 | anyhow = "1.0" 9 | clap = { version = "4.1", features = ["derive"] } 10 | console = "0.15" 11 | dialoguer = "0.10" 12 | indicatif = "0.17" 13 | once_cell = "1.17" 14 | rand = "0.8" 15 | regex = "1.7" 16 | sa_core = { path = "../core" } 17 | sanitize-filename = "0.4" 18 | tap = "1.0" 19 | tokio = { version = "1.17", features = ["rt", "sync", "time", "macros", "rt-multi-thread", "parking_lot"] } 20 | tracing = "0.1" 21 | tracing-subscriber = { version = "0.3", features = ["env-filter"] } 22 | webbrowser = "0.8" 23 | -------------------------------------------------------------------------------- /cli/bacon.toml: -------------------------------------------------------------------------------- 1 | # This is a configuration file for the bacon tool 2 | # More info at https://github.com/Canop/bacon 3 | 4 | default_job = "clippy" 5 | 6 | [keybindings] 7 | k = "scroll-lines(-1)" 8 | j = "scroll-lines(1)" 9 | c = "job:clippy" 10 | t = "job:test" 11 | f = "job:fix" 12 | shift-F9 = "toggle-backtrace" 13 | ctrl-r = "toggle-raw-output" 14 | ctrl-u = "scroll-page(-1)" 15 | ctrl-d = "scroll-page(1)" 16 | 17 | [jobs] 18 | 19 | [jobs.clippy] 20 | command = ["cargo", "clippy", "--tests", "--color", "always", "--", "-W", "clippy::all", "-W", "clippy::nursery", "-W", "clippy::pedantic"] 21 | need_stdout = false 22 | 23 | [jobs.test] 24 | command = ["cargo", "test", "--color", "always"] 25 | need_stdout = true 26 | watch = ["tests"] 27 | 28 | [jobs.doc] 29 | command = ["cargo", "doc", "--color", "always", "--no-deps"] 30 | need_stdout = false 31 | 32 | [jobs.fix] 33 | command = ["cargo", "clippy", "--fix", "--allow-staged", "--allow-dirty", "--tests", "--color", "always", "--", "-W", "clippy::all", "-W", "clippy::nursery", "-W", "clippy::pedantic"] 34 | need_stdout = false 35 | on_success = "job:clippy" 36 | -------------------------------------------------------------------------------- /cli/src/archive.rs: -------------------------------------------------------------------------------- 1 | use std::future::Future; 2 | use std::path::Path; 3 | use std::sync::{Arc, Mutex}; 4 | use std::time::Duration; 5 | 6 | use anyhow::bail; 7 | use console::style; 8 | use dialoguer::theme::ColorfulTheme; 9 | use dialoguer::Select; 10 | use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; 11 | use sanitize_filename::sanitize; 12 | use tokio::sync::mpsc; 13 | use tokio::task::JoinHandle; 14 | 15 | use sa_core::archiver; 16 | use sa_core::archiver::{fetch_topic_meta, DownloadEvent}; 17 | use sa_core::client::create_client_with_token; 18 | 19 | #[derive(Debug)] 20 | struct TimeoutInEffect { 21 | handler: JoinHandle<()>, 22 | progress_bar: ProgressBar, 23 | } 24 | 25 | fn rate_limit_callback(progress: MultiProgress) -> impl 'static + Fn(u64) + Send + Sync { 26 | let sty = ProgressStyle::with_template( 27 | "[{elapsed_precise:.yellow}] {bar:40.yellow/red} {pos:>7}/{len:7} {msg:.yellow}", 28 | ) 29 | .unwrap() 30 | .progress_chars("##-"); 31 | let timeout_progress: Arc>> = Arc::new(Mutex::new(None)); 32 | move |delay| { 33 | let mut timeout_progress = timeout_progress.lock().unwrap(); 34 | if let Some(timeout) = &mut *timeout_progress { 35 | let remaining = timeout.progress_bar.length().expect("has length") 36 | - timeout.progress_bar.position(); 37 | if delay < remaining { 38 | return; // No need to update the timeout. 39 | } 40 | 41 | // Need to remove the old progress bar. 42 | progress.remove(&timeout.progress_bar); 43 | timeout.handler.abort(); 44 | } 45 | let timeout_bar = ProgressBar::new(delay) 46 | .with_style(sty.clone()) 47 | .with_message("Rate limited..."); 48 | progress.add(timeout_bar.clone()); 49 | let handler = tokio::spawn({ 50 | let timeout_bar = timeout_bar.clone(); 51 | async move { 52 | while timeout_bar.position() < timeout_bar.length().expect("has length") { 53 | timeout_bar.inc(1); 54 | tokio::time::sleep(Duration::from_secs(1)).await; 55 | } 56 | timeout_bar.finish_and_clear(); 57 | } 58 | }); 59 | 60 | *timeout_progress = Some(TimeoutInEffect { 61 | handler, 62 | progress_bar: timeout_bar, 63 | }); 64 | } 65 | } 66 | 67 | fn display_task( 68 | progress: MultiProgress, 69 | mut rx: mpsc::Receiver, 70 | ) -> impl Future { 71 | let sty = ProgressStyle::with_template( 72 | "[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}", 73 | ) 74 | .unwrap() 75 | .progress_chars("##-"); 76 | let (mut post_progress, mut asset_progress) = (None, None); 77 | 78 | async move { 79 | while let Some(msg) = rx.recv().await { 80 | match msg { 81 | DownloadEvent::PostChunksTotal(total) => { 82 | let post_prog = ProgressBar::new(u64::from(total)) 83 | .with_style(sty.clone()) 84 | .with_message("Downloading posts..."); 85 | post_progress = Some(post_prog.clone()); 86 | progress.add(post_prog.clone()); 87 | post_prog.enable_steady_tick(Duration::from_millis(100)); 88 | } 89 | DownloadEvent::PostChunksDownloadedInc => { 90 | let post_prog = post_progress.as_ref().unwrap(); 91 | post_prog.inc(1); 92 | if post_prog.position() == post_prog.length().unwrap() { 93 | post_prog.finish_with_message("Downloading posts... done"); 94 | let asset_prog = ProgressBar::new(0) 95 | .with_style(sty.clone()) 96 | .with_message("Downloading assets..."); 97 | asset_progress = Some(asset_prog.clone()); 98 | progress.add(asset_prog.clone()); 99 | asset_prog.enable_steady_tick(Duration::from_millis(100)); 100 | } 101 | } 102 | DownloadEvent::ResourceTotalInc => { 103 | let asset_prog = asset_progress.as_ref().unwrap(); 104 | asset_prog.inc_length(1); 105 | } 106 | DownloadEvent::ResourceDownloadedInc => { 107 | let asset_prog = asset_progress.as_ref().unwrap(); 108 | asset_prog.inc(1); 109 | if asset_prog.position() == asset_prog.length().unwrap() { 110 | asset_prog.finish_with_message("Downloading assets... done"); 111 | } 112 | } 113 | } 114 | } 115 | } 116 | } 117 | 118 | const PROMPTS: [(&str, [&str; 2], usize); 2] = [ 119 | ( 120 | "The directory you picked is not empty.", 121 | [ 122 | "Just save in this directory", 123 | "Create a subdirectory and save in it", 124 | ], 125 | 1, 126 | ), 127 | ( 128 | "The directory you picked is not empty, and it seems to be an archive of this topic.", 129 | [ 130 | "Update this archive", 131 | "Create a subdirectory and save in it", 132 | ], 133 | 0, 134 | ), 135 | ]; 136 | 137 | pub async fn archive( 138 | token: &str, 139 | topic_id: u32, 140 | save_to: &Path, 141 | anonymous: bool, 142 | create_subdir: Option, 143 | ) -> anyhow::Result<()> { 144 | let progress = MultiProgress::new(); 145 | 146 | let spinner = ProgressBar::new_spinner().with_message("Fetching metadata..."); 147 | spinner.enable_steady_tick(Duration::from_millis(100)); 148 | 149 | let client = create_client_with_token(token, rate_limit_callback(progress.clone())).await?; 150 | let topic_meta = fetch_topic_meta(&client, topic_id).await?; 151 | let filename = sanitize(format!("水源_{}", &topic_meta.title)); 152 | 153 | spinner.finish_with_message("Fetching metadata... done"); 154 | 155 | let create_subdir_ = if let Some(b) = create_subdir { 156 | if b { 157 | eprintln!("{}", style("A subdirectory will be created.").bold()); 158 | } else { 159 | eprintln!( 160 | "{}", 161 | style("The files will be saved in the directory you picked.").bold() 162 | ); 163 | } 164 | b 165 | } else { 166 | save_to.exists() // Only create a subdir if the path exists. 167 | && save_to.read_dir()?.next().is_some() // ... and is not empty. 168 | && { 169 | let is_archive = save_to.ends_with(&filename); 170 | let prompt = PROMPTS[usize::from(is_archive)]; 171 | if Select::with_theme(&ColorfulTheme::default()) 172 | .with_prompt(prompt.0) 173 | .default(prompt.2) 174 | .items(&prompt.1) 175 | .interact() 176 | .unwrap() 177 | == 0 178 | { 179 | false 180 | } else { 181 | if save_to.join(&filename).exists() && Select::with_theme(&ColorfulTheme::default()) 182 | .with_prompt("A previous archive already exists.") 183 | .items(&["Update it", "Abort"]) 184 | .default(1) 185 | .interact() 186 | .unwrap() == 1 { 187 | bail!("Aborted."); 188 | } 189 | true 190 | } 191 | } 192 | }; 193 | let save_path = if create_subdir_ { 194 | save_to.join(&filename) 195 | } else { 196 | save_to.to_path_buf() 197 | }; 198 | 199 | let (tx, rx) = mpsc::channel(8); 200 | tokio::spawn(display_task(progress, rx)); 201 | archiver::archive(&client, topic_meta, &save_path, anonymous, tx).await?; 202 | 203 | eprintln!("{}", style("Done.").green()); 204 | println!("{}", save_path.display()); 205 | Ok(()) 206 | } 207 | -------------------------------------------------------------------------------- /cli/src/args.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use clap::{ArgGroup, Args, Parser, Subcommand}; 4 | 5 | #[derive(Parser)] 6 | #[command(author, version, about)] 7 | #[command(propagate_version = true)] 8 | pub struct Opts { 9 | #[command(subcommand)] 10 | pub command: Commands, 11 | } 12 | 13 | #[derive(Subcommand)] 14 | pub enum Commands { 15 | /// Authenticate with Shuiyuan BBS and get the API token. 16 | Auth { 17 | /// Do not open the browser automatically. 18 | #[clap(short, long)] 19 | no_open: bool, 20 | }, 21 | /// Archive a topic. 22 | Archive(Archive), 23 | } 24 | 25 | #[derive(Args)] 26 | #[command(group(ArgGroup::new("topic").args(["topic_id", "url"]).required(true)))] 27 | pub struct Archive { 28 | /// The ID of the topic to archive. 29 | #[clap(short = 'i', long)] 30 | pub topic_id: Option, 31 | /// The URL of the topic to archive. 32 | #[clap(short, long)] 33 | pub url: Option, 34 | /// The path to save the archive. 35 | #[clap(short, long)] 36 | pub save_to: PathBuf, 37 | /// Whether to mask the username. 38 | #[clap(short, long)] 39 | pub anonymous: bool, 40 | /// API token. You can get one by `auth` command. 41 | #[clap(short, long)] 42 | pub token: Option, 43 | /// Assume yes to create subdirectories even if save path exists and is not empty. 44 | #[clap(long, group = "subdir")] 45 | pub create_subdir: bool, 46 | /// Assume no to create subdirectories even if save path exists and is not empty. 47 | #[clap(long, group = "subdir")] 48 | pub no_create_subdir: bool, 49 | } 50 | -------------------------------------------------------------------------------- /cli/src/auth.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{anyhow, Result}; 2 | use console::style; 3 | use dialoguer::theme::ColorfulTheme; 4 | use dialoguer::Input; 5 | use tracing::warn; 6 | 7 | use sa_core::client::{oauth_url, token_from_payload}; 8 | use sa_core::re_exports::rsa; 9 | 10 | use crate::APP_ID; 11 | 12 | pub fn auth(no_open: bool) -> Result<()> { 13 | let key = 14 | rsa::RsaPrivateKey::new(&mut rand::thread_rng(), 2048).expect("generate rsa private key"); 15 | let url = oauth_url(&APP_ID, key.as_ref(), false); 16 | if !no_open && webbrowser::open(&url).is_ok() { 17 | eprintln!("A browser window should have been opened.\n\ 18 | Please log in and authorize the app. Then copy the authenticate key from the website and paste it here."); 19 | } else { 20 | eprintln!("Please open the following URL in a browser and log in to authorize the app. Then copy the authenticate key from the website and paste it here."); 21 | eprintln!("{url}"); 22 | } 23 | let payload: String = Input::with_theme(&ColorfulTheme::default()) 24 | .with_prompt(format!( 25 | "{} {}", 26 | style("?").green().bold(), 27 | style("Paste the authenticate key").bold() 28 | )) 29 | .interact_text() 30 | .unwrap(); 31 | match token_from_payload(&payload, &key) { 32 | Ok(token) => { 33 | eprintln!("\nUse the following token to authenticate in the future."); 34 | eprintln!("{} {token}", style("Token:").bold()); 35 | Ok(()) 36 | } 37 | Err(e) => { 38 | warn!(?e, "Failed to get token from payload."); 39 | Err(anyhow!("This is not a valid token.")) 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /cli/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::process::ExitCode; 2 | use std::str::FromStr; 3 | 4 | use anyhow::{anyhow, Result}; 5 | use clap::Parser; 6 | use console::style; 7 | use once_cell::sync::Lazy; 8 | use regex::Regex; 9 | use tracing_subscriber::EnvFilter; 10 | 11 | use sa_core::re_exports::uuid::Uuid; 12 | 13 | use crate::args::{Archive, Commands, Opts}; 14 | use crate::auth::auth; 15 | 16 | mod archive; 17 | mod args; 18 | mod auth; 19 | 20 | static APP_ID: Lazy = 21 | Lazy::new(|| Uuid::from_str("db559e8d-1bb1-4cf1-a5b8-b5cb4e05ea82").unwrap()); 22 | 23 | #[tokio::main] 24 | async fn main() -> ExitCode { 25 | if let Err(e) = entry().await { 26 | eprintln!("{}", style(e).red()); 27 | ExitCode::FAILURE 28 | } else { 29 | ExitCode::SUCCESS 30 | } 31 | } 32 | 33 | async fn entry() -> Result<()> { 34 | tracing_subscriber::fmt() 35 | .with_env_filter(EnvFilter::from_default_env()) 36 | .init(); 37 | 38 | let opts = Opts::parse(); 39 | match opts.command { 40 | Commands::Auth { no_open } => auth(no_open), 41 | Commands::Archive(Archive { 42 | topic_id, 43 | url, 44 | save_to, 45 | anonymous, 46 | token, 47 | create_subdir, 48 | no_create_subdir, 49 | }) => { 50 | static RE_URL: Lazy = 51 | Lazy::new(|| Regex::new(r#"https://shuiyuan.sjtu.edu.cn/t/topic/(\d+)"#).unwrap()); 52 | let topic = if let Some(url) = url { 53 | RE_URL 54 | .captures(&url) 55 | .and_then(|caps| caps.get(1).expect("regex match").as_str().parse().ok()) 56 | .ok_or_else(|| anyhow!("Invalid token."))? 57 | } else { 58 | topic_id.expect("clap arg match") 59 | }; 60 | let token = token 61 | .or_else(|| std::env::var("SHUIYUAN_TOKEN").ok()) 62 | .ok_or_else(|| anyhow!("Missing token. Please specify an API token via `token` argument or `SHUIYUAN_TOKEN` environment variable."))?; 63 | 64 | let create_subdir = create_subdir 65 | .then_some(true) 66 | .or_else(|| no_create_subdir.then_some(false)); 67 | 68 | archive::archive(&token, topic, &save_to, anonymous, create_subdir).await 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /core/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sa_core" 3 | version = "0.2.0" 4 | edition = "2021" 5 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 6 | 7 | [dependencies] 8 | async-trait = "0.1" 9 | base64 = "0.21" 10 | bytes = "1.3" 11 | chrono = { version = "0.4", features = ["serde"] } 12 | fake = "2.4" 13 | flate2 = "1.0" 14 | futures = "0.3" 15 | futures-retry-policies = "0.2" 16 | handlebars = "4.2" 17 | html2text = "0.6" 18 | htmlescape = "0.3" 19 | leaky-bucket = "1.0" 20 | lol_html = "1.0" 21 | mac_address = "1.1" 22 | once_cell = "1.10" 23 | regex = "1.5" 24 | reqwest = { version = "0.11", default-features = false, features = ["rustls-tls-webpki-roots", "multipart", "json", "cookies", "stream"] } 25 | reqwest-middleware = "0.2" 26 | reqwest-retry = "0.2" 27 | retry-policies = "0.1" 28 | rsa = "0.9" 29 | sanitize-filename = "0.4" 30 | serde = { version = "1.0", features = ["derive"] } 31 | serde_json = "1.0" 32 | serde_urlencoded = "0.7" 33 | tap = "1.0" 34 | tar = "0.4" 35 | task-local-extensions = "0.1" 36 | tempfile = "3.3" 37 | thiserror = "1.0" 38 | tokio = { version = "1.17", features = ["rt", "sync", "time", "macros"] } 39 | tokio-stream = "0.1" 40 | tracing = "0.1" 41 | typeshare = "1.0" 42 | uuid = { version = "1.2", features = ["v4", "v5"] } 43 | -------------------------------------------------------------------------------- /core/bacon.toml: -------------------------------------------------------------------------------- 1 | # This is a configuration file for the bacon tool 2 | # More info at https://github.com/Canop/bacon 3 | 4 | default_job = "clippy" 5 | 6 | [keybindings] 7 | k = "scroll-lines(-1)" 8 | j = "scroll-lines(1)" 9 | c = "job:clippy" 10 | t = "job:test" 11 | f = "job:fix" 12 | shift-F9 = "toggle-backtrace" 13 | ctrl-r = "toggle-raw-output" 14 | ctrl-u = "scroll-page(-1)" 15 | ctrl-d = "scroll-page(1)" 16 | 17 | [jobs] 18 | 19 | [jobs.clippy] 20 | command = ["cargo", "clippy", "--tests", "--color", "always", "--", "-W", "clippy::all", "-W", "clippy::nursery", "-W", "clippy::pedantic"] 21 | need_stdout = false 22 | 23 | [jobs.test] 24 | command = ["cargo", "test", "--color", "always"] 25 | need_stdout = true 26 | watch = ["tests"] 27 | 28 | [jobs.doc] 29 | command = ["cargo", "doc", "--color", "always", "--no-deps"] 30 | need_stdout = false 31 | 32 | [jobs.fix] 33 | command = ["cargo", "clippy", "--fix", "--allow-staged", "--allow-dirty", "--tests", "--color", "always", "--", "-W", "clippy::all", "-W", "clippy::nursery", "-W", "clippy::pedantic"] 34 | need_stdout = false 35 | on_success = "job:clippy" 36 | -------------------------------------------------------------------------------- /core/resources.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/core/resources.tar.gz -------------------------------------------------------------------------------- /core/src/action_code.rs: -------------------------------------------------------------------------------- 1 | pub const ACTION_CODE_MAP: &[(&str, &str)] = &[ 2 | ("public_topic", "将此话题设为公开"), 3 | ("open_topic", "将此转换为话题"), 4 | ("private_topic", "将此话题转换为个人消息"), 5 | ("split_topic", "拆分了此话题"), 6 | ("invited_user", "邀请了用户"), 7 | ("invited_group", "邀请了组"), 8 | ("user_left", "在将自己从此消息中移除"), 9 | ("removed_user", "移除了用户"), 10 | ("removed_group", "移除了组"), 11 | ("autobumped", "自动提升"), 12 | ("autoclosed.enabled", "关闭话题"), 13 | ("autoclosed.disabled", "打开话题"), 14 | ("closed.enabled", "关闭话题"), 15 | ("closed.disabled", "打开话题"), 16 | ("archived.enabled", "归档话题"), 17 | ("archived.disabled", "取消归档话题"), 18 | ("pinned.enabled", "置顶话题"), 19 | ("pinned.disabled", "取消置顶话题"), 20 | ("pinned_globally.enabled", "全站置顶话题"), 21 | ("pinned_globally.disabled", "取消全站置顶话题"), 22 | ("visible.enabled", "公开话题"), 23 | ("visible.disabled", "取消公开话题"), 24 | ( 25 | "banner.enabled", 26 | "将此设置为横幅。在用户忽略前,它将显示在每个页面的顶部。", 27 | ), 28 | ( 29 | "banner.disabled", 30 | "移除了此横幅。它将不再显示在每个页面的顶部。", 31 | ), 32 | ]; 33 | -------------------------------------------------------------------------------- /core/src/archiver.rs: -------------------------------------------------------------------------------- 1 | //! Well this file is really a mess. Good luck if you try to modify it. 2 | 3 | use std::fs; 4 | use std::fs::File; 5 | use std::path::Path; 6 | use std::sync::Arc; 7 | 8 | use futures::stream::FuturesOrdered; 9 | use futures::TryStreamExt; 10 | use once_cell::sync::Lazy; 11 | use regex::Regex; 12 | use serde::Serialize; 13 | use tokio::sync::mpsc::Sender; 14 | use tokio::sync::Barrier; 15 | use typeshare::typeshare; 16 | 17 | pub use fetchers::fetch_topic_meta; 18 | 19 | use crate::archiver::download_manager::DownloadManager; 20 | use crate::archiver::template::HANDLEBARS; 21 | use crate::client::{Client, RequestBuilderExt, MAX_CONN, MAX_THROTTLE_WEIGHT}; 22 | use crate::error::{Error, Result}; 23 | pub use crate::models::{Category, TopicMeta}; 24 | use crate::models::{Params, Post, RespPost, RespPosts, Topic}; 25 | use crate::preloaded_store::PreloadedStore; 26 | 27 | mod anonymous; 28 | mod download_manager; 29 | mod fetchers; 30 | mod template; 31 | mod utils; 32 | 33 | const FETCH_PAGE_SIZE: usize = 400; 34 | const EXPORT_PAGE_SIZE: usize = 20; 35 | 36 | /// Download events. 37 | #[typeshare] 38 | #[derive(Debug, Copy, Clone, Serialize)] 39 | #[serde(tag = "kind", content = "value", rename_all = "kebab-case")] 40 | pub enum DownloadEvent { 41 | /// Total post chunks to download. It's determined once metadata is fetched. 42 | PostChunksTotal(u32), 43 | /// A post chunk is downloaded. 44 | PostChunksDownloadedInc, 45 | /// A new resource has been discovered. Total count of resources to download is not known 46 | /// because of incremental fetching. 47 | ResourceTotalInc, 48 | /// A resource is downloaded. 49 | ResourceDownloadedInc, 50 | } 51 | 52 | /// Archive given topic into directory. 53 | /// 54 | /// # Arguments 55 | /// 56 | /// * `topic_id` - The topic id to archive. 57 | /// * `save_to_base` - The base directory to save the archive to. 58 | /// * `anonymous` - Whether to anonymize usernames. 59 | /// * `reporter` - The sender to send download events to. 60 | /// 61 | /// # Errors 62 | /// 63 | /// There are many possible errors. See the `Error` enum for details. 64 | pub async fn archive( 65 | client: &Client, 66 | topic_meta: TopicMeta, 67 | save_to: &Path, 68 | anonymous: bool, 69 | reporter: Sender, 70 | ) -> Result<()> { 71 | // Fetch preload emojis. 72 | let preloaded_store = PreloadedStore::from_client(client).await?; 73 | 74 | // 1. Create directories and extract resources. 75 | fs::create_dir_all(save_to.join("resources"))?; 76 | template::extract_resources(save_to.join("resources"))?; 77 | 78 | // 2. Fetch all posts and download assets. 79 | let download_manager = 80 | DownloadManager::new(client.clone(), save_to.to_path_buf(), reporter.clone()); 81 | let mut posts = archive_resp_posts( 82 | client, 83 | &download_manager, 84 | &preloaded_store, 85 | anonymous, 86 | &topic_meta, 87 | reporter, 88 | ) 89 | .await?; 90 | 91 | // 3. If anonymous mode enabled, mask all usernames. 92 | if anonymous { 93 | let fake_name_map = anonymous::collect_anonymous_names(&posts); 94 | for post in &mut posts { 95 | post.name = String::new(); 96 | post.username = fake_name_map 97 | .get(&post.username) 98 | .expect("collected") 99 | .clone(); 100 | post.avatar = None; 101 | post.content = anonymous::mask_username_in_cooked(&fake_name_map, post.content.clone()); 102 | } 103 | } 104 | 105 | // 4. Write posts to files. 106 | posts 107 | .chunks(EXPORT_PAGE_SIZE) 108 | .enumerate() 109 | .try_for_each(move |(page, group)| { 110 | write_page(topic_meta.clone(), page + 1, group, save_to) 111 | })?; 112 | 113 | Ok(()) 114 | } 115 | 116 | fn write_page(meta: TopicMeta, page: usize, posts: &[Post], save_to: &Path) -> Result<()> { 117 | let post_count = meta.post_ids.len(); 118 | let total_pages = utils::ceil_div(post_count, EXPORT_PAGE_SIZE); 119 | let last_page = page == total_pages; 120 | let topic = Topic { 121 | id: meta.id, 122 | title: meta.title, 123 | description: meta.description, 124 | categories: meta.categories, 125 | tags: meta.tags, 126 | posts, 127 | page, 128 | total_pages, 129 | prev_page: match page { 130 | 1 => None, 131 | 2 => Some(String::from("index")), 132 | _ => Some(format!("{}", page - 1)), 133 | }, 134 | next_page: if last_page { None } else { Some(page + 1) }, 135 | }; 136 | let params = Params::from(topic); 137 | let filename = if page == 1 { 138 | String::from("index.html") 139 | } else { 140 | format!("{page}.html") 141 | }; 142 | let output = File::create(save_to.join(filename))?; 143 | Ok(HANDLEBARS.render_to_write("index", ¶ms, output)?) 144 | } 145 | 146 | async fn archive_resp_posts( 147 | client: &Client, 148 | download_manager: &DownloadManager, 149 | preloaded_store: &PreloadedStore, 150 | anonymous: bool, 151 | topic_meta: &TopicMeta, 152 | reporter: Sender, 153 | ) -> Result> { 154 | let topic_id = topic_meta.id; 155 | let posts_total = topic_meta.post_ids.len(); 156 | let chunks_total = utils::ceil_div(posts_total, FETCH_PAGE_SIZE); 157 | reporter 158 | .send(DownloadEvent::PostChunksTotal(chunks_total as u32)) 159 | .await?; 160 | 161 | let barrier = Arc::new(Barrier::new(chunks_total)); 162 | let futs: FuturesOrdered<_> = topic_meta 163 | .post_ids 164 | .chunks(FETCH_PAGE_SIZE) 165 | .map(move |post_ids| { 166 | let reporter = reporter.clone(); 167 | let barrier = barrier.clone(); 168 | 169 | let url = format!("https://shuiyuan.sjtu.edu.cn/t/{topic_id}/posts.json"); 170 | let query: Vec<_> = post_ids.iter().map(|i| ("post_ids[]", i)).collect(); 171 | let req = client 172 | .get(url) 173 | .query(&query) 174 | .with_conn_weight(MAX_CONN as u32) 175 | .with_throttle_weight(MAX_THROTTLE_WEIGHT); 176 | async move { 177 | let resp: RespPosts = client.send_json(req).await?; 178 | 179 | reporter 180 | .send(DownloadEvent::PostChunksDownloadedInc) 181 | .await?; 182 | // Continue only after all posts ids are fetched 183 | barrier.wait().await; 184 | 185 | let futs: FuturesOrdered<_> = resp 186 | .post_stream 187 | .posts 188 | .into_iter() 189 | .map(|resp_post| { 190 | process_resp_post( 191 | client, 192 | download_manager, 193 | preloaded_store, 194 | anonymous, 195 | resp_post, 196 | ) 197 | }) 198 | .collect(); 199 | let posts: Vec = futs.try_collect().await?; 200 | Ok::<_, Error>(posts) 201 | } 202 | }) 203 | .collect(); 204 | 205 | let nested: Vec> = futs.try_collect().await?; 206 | Ok(nested.into_iter().flatten().collect()) 207 | } 208 | 209 | async fn process_resp_post( 210 | client: &Client, 211 | download_manager: &DownloadManager, 212 | preloaded_store: &PreloadedStore, 213 | anonymous: bool, 214 | resp_post: RespPost, 215 | ) -> Result { 216 | static RE_AVATAR: Lazy = Lazy::new(|| Regex::new(r#""#).unwrap()); 217 | 218 | let resp_post = fetchers::fetch_special_post(client, resp_post).await?; 219 | let resp_post = fetchers::reify_vote(resp_post)?; 220 | let cooked = 221 | fetchers::fetch_assets_of_content(download_manager, &resp_post.cooked, anonymous).await?; 222 | let (cooked, avatar) = if anonymous { 223 | (RE_AVATAR.replace_all(&cooked, "").to_string(), None) 224 | } else { 225 | ( 226 | cooked, 227 | Some(fetchers::fetch_avatar(download_manager, &resp_post).await?), 228 | ) 229 | }; 230 | let likes = likes_of_resp_post(&resp_post); 231 | let futs: FuturesOrdered<_> = resp_post 232 | .retorts 233 | .into_iter() 234 | .map(|r| fetchers::fetch_emoji_from_retort(download_manager, preloaded_store, r)) 235 | .collect(); 236 | let emojis = futs.try_collect().await?; 237 | 238 | Ok(Post { 239 | name: resp_post.name, 240 | number: resp_post.post_number, 241 | username: resp_post.username, 242 | created_at: resp_post.created_at.to_string(), 243 | created_at_display: utils::datetime_to_display(&resp_post.created_at), 244 | content: cooked, 245 | likes, 246 | reply_to: resp_post.reply_to_post_number, 247 | emojis, 248 | avatar, 249 | }) 250 | } 251 | 252 | fn likes_of_resp_post(resp_post: &RespPost) -> usize { 253 | resp_post 254 | .actions_summary 255 | .iter() 256 | .filter(|a| a.id == 2) 257 | .find_map(|a| a.count) 258 | .unwrap_or_default() 259 | } 260 | -------------------------------------------------------------------------------- /core/src/archiver/anonymous.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use fake::faker::name::en::Name; 4 | use fake::Fake; 5 | use once_cell::sync::Lazy; 6 | use regex::{Captures, Regex}; 7 | 8 | use crate::models::Post; 9 | 10 | // Minimum trimmed length for an ascii username to be replaced globally in a post on anonymous mode. 11 | const MIN_ASCII_NAME_LENGTH: usize = 5; 12 | // Minimum trimmed length for a unicode username to be replaced globally in a post on anonymous mode. 13 | const MIN_UNICODE_NAME_LENGTH: usize = 2; 14 | 15 | static RE_MENTION: Lazy = 16 | Lazy::new(|| Regex::new(r#"@(.*)"#).unwrap()); 17 | static RE_QUOTE: Lazy = 18 | Lazy::new(|| Regex::new(r#" (.*):"#).unwrap()); 19 | static RE_FROM: Lazy = Lazy::new(|| Regex::new(r#"来自 (.*)"#).unwrap()); 20 | 21 | pub fn collect_anonymous_names<'a>( 22 | posts: impl IntoIterator + Clone, 23 | ) -> HashMap { 24 | let mut fake_name_map = HashMap::new(); 25 | for post in posts.clone() { 26 | if !fake_name_map.contains_key(&post.username) { 27 | let project: String = Name().fake(); 28 | fake_name_map.insert(post.username.clone(), project.clone()); 29 | fake_name_map.insert(post.name.clone(), project); 30 | } 31 | } 32 | for post in posts { 33 | // Note: we only get username for mention and name for quote here. 34 | // Theoretically we should fetch the other one too but to avoid network traffic we don't. 35 | for re in [&RE_MENTION, &RE_QUOTE, &RE_FROM] { 36 | for cap in re.captures_iter(&post.content) { 37 | fake_name_map 38 | .entry( 39 | cap.get(1) 40 | .expect("has at least one group") 41 | .as_str() 42 | .to_string(), 43 | ) 44 | .or_insert_with(|| Name().fake()); 45 | } 46 | } 47 | } 48 | fake_name_map 49 | } 50 | 51 | pub fn mask_username_in_cooked(fake_name_map: &HashMap, mut s: String) -> String { 52 | #[allow(clippy::type_complexity)] 53 | let re_f: &[(_, fn(&str) -> String)] = &[ 54 | (&RE_MENTION, |fake_name| { 55 | format!(r#"@{fake_name}"#) 56 | }), 57 | (&RE_QUOTE, |fake_name| format!(r#" {fake_name}:"#)), 58 | (&RE_FROM, |fake_name| format!(r#"来自 {fake_name}"#)), 59 | ]; 60 | for (re, f) in re_f { 61 | s = re 62 | .replace_all(&s, |caps: &Captures| { 63 | let name = caps.get(1).expect("has at least one group"); 64 | let fake_name = fake_name_map 65 | .get(name.as_str()) 66 | .expect("should have been collected") 67 | .as_str(); 68 | f(fake_name) 69 | }) 70 | .to_string(); 71 | } 72 | 73 | fake_name_map.iter().fold(s, |s, (name, fake_name)| { 74 | match (name.is_ascii(), name.trim().len()) { 75 | (true, l) if l >= MIN_ASCII_NAME_LENGTH => s.replace(name, fake_name), 76 | (false, l) if l >= MIN_UNICODE_NAME_LENGTH => s.replace(name, fake_name), 77 | _ => s, 78 | } 79 | }) 80 | } 81 | -------------------------------------------------------------------------------- /core/src/archiver/download_manager.rs: -------------------------------------------------------------------------------- 1 | use std::collections::hash_map::Entry; 2 | use std::collections::{HashMap, HashSet}; 3 | use std::path::PathBuf; 4 | use std::sync::{Arc, Mutex}; 5 | 6 | use reqwest::header::CONTENT_TYPE; 7 | use tap::{Pipe, TapFallible, TapOptional}; 8 | use tokio::sync::mpsc::Sender; 9 | use tokio::sync::Semaphore; 10 | use tracing::warn; 11 | 12 | use crate::archiver::DownloadEvent; 13 | use crate::atomic_file::AtomicFile; 14 | use crate::client::{Client, IntoRequestBuilderWrapped, RequestBuilderExt, ResponseExt}; 15 | use crate::error; 16 | use crate::shared_promise::{shared_promise_pair, SharedPromise}; 17 | 18 | const OPEN_FILES_LIMIT: usize = 128; 19 | 20 | pub struct DownloadManager { 21 | client: Client, 22 | downloaded_assets: Mutex>, 23 | downloaded_avatars: Mutex>>, 24 | save_to: PathBuf, 25 | reporter: Sender, 26 | open_files_sem: Arc, 27 | } 28 | 29 | impl DownloadManager { 30 | pub fn new(client: Client, save_to: PathBuf, reporter: Sender) -> Self { 31 | Self { 32 | client, 33 | save_to, 34 | downloaded_assets: Mutex::new(HashSet::new()), 35 | downloaded_avatars: Mutex::new(HashMap::new()), 36 | reporter, 37 | open_files_sem: Arc::new(Semaphore::new(OPEN_FILES_LIMIT)), 38 | } 39 | } 40 | } 41 | 42 | impl DownloadManager { 43 | pub async fn download_asset( 44 | &self, 45 | from: String, 46 | filename: &str, 47 | bypass_limit: bool, 48 | ) -> error::Result<()> { 49 | if !self.downloaded_assets.lock().unwrap().insert(from.clone()) { 50 | return Ok(()); 51 | } 52 | 53 | self.reporter.send(DownloadEvent::ResourceTotalInc).await?; 54 | 55 | let save_path = self.save_to.join("resources").join(filename); 56 | 57 | if !save_path.exists() { 58 | let req = self 59 | .client 60 | .get(from) 61 | .into_request_builder_wrapped() 62 | .pipe(|req| { 63 | if bypass_limit { 64 | req.bypass_max_conn().bypass_throttle() 65 | } else { 66 | req 67 | } 68 | }); 69 | self.client 70 | .with(req, move |req| { 71 | let save_path = save_path.clone(); 72 | let open_files_sem = self.open_files_sem.clone(); 73 | async move { 74 | let resp = req.send().await?; 75 | 76 | let _guard = open_files_sem.acquire().await.expect("semaphore closed"); 77 | let file = AtomicFile::new(&save_path).tap_err(|e| { 78 | warn!(?save_path, ?e, "[download_asset] atomic_file_create"); 79 | })?; 80 | 81 | resp.bytes_to_atomic_file(file).await.tap_err(|e| { 82 | warn!(?save_path, ?e, "[download_asset] atomic_file_write"); 83 | })?; 84 | Ok(()) 85 | } 86 | }) 87 | .await?; 88 | } 89 | 90 | self.reporter 91 | .send(DownloadEvent::ResourceDownloadedInc) 92 | .await?; 93 | Ok(()) 94 | } 95 | pub async fn download_avatar(&self, from: String, filename: &str) -> error::Result { 96 | let filename = PathBuf::from(filename); 97 | let relative_path = PathBuf::from("resources").join(&filename); 98 | let save_path = self.save_to.join(&relative_path); 99 | 100 | #[allow(clippy::significant_drop_in_scrutinee)] 101 | let swear_or_promise = match self.downloaded_avatars.lock().unwrap().entry(from.clone()) { 102 | Entry::Occupied(e) => Err(e.get().clone()), 103 | Entry::Vacant(e) => { 104 | let (swear, promise) = shared_promise_pair(); 105 | e.insert(promise); 106 | Ok(swear) 107 | } 108 | }; 109 | 110 | match swear_or_promise { 111 | Ok(swear) => { 112 | self.reporter.send(DownloadEvent::ResourceTotalInc).await?; 113 | 114 | if !save_path.exists() { 115 | let url = format!("https://shuiyuan.sjtu.edu.cn{from}"); 116 | let req = self.client.get(url); 117 | self.client 118 | .with(req, move |req| { 119 | let mut save_path = save_path.clone(); 120 | let mut filename = filename.clone(); 121 | let open_files_sem = self.open_files_sem.clone(); 122 | async move { 123 | let resp = req.send().await?; 124 | let content_type = 125 | resp.headers().get(CONTENT_TYPE).unwrap().clone(); 126 | 127 | if content_type.to_str().unwrap().contains("svg") { 128 | save_path.set_extension("svg"); 129 | filename.set_extension("svg"); 130 | } 131 | 132 | let _guard = 133 | open_files_sem.acquire().await.expect("semaphore closed"); 134 | let file = AtomicFile::new(&save_path).tap_err(|e| { 135 | warn!(?save_path, ?e, "[download_avatar] atomic_file_create"); 136 | })?; 137 | 138 | resp.bytes_to_atomic_file(file).await.tap_err(|e| { 139 | warn!(?save_path, ?e, "[download_avatar] atomic_file_write"); 140 | })?; 141 | Ok(()) 142 | } 143 | }) 144 | .await?; 145 | } 146 | 147 | swear.fulfill(relative_path.clone()); 148 | 149 | self.reporter 150 | .send(DownloadEvent::ResourceDownloadedInc) 151 | .await?; 152 | Ok(relative_path) 153 | } 154 | Err(promise) => Ok( 155 | promise 156 | .recv() 157 | .await 158 | .tap_none(|| { 159 | warn!("Promise not fulfilled which indicates an error in another task."); 160 | }) 161 | .unwrap_or_default(), // error in another task will be collected so what is returned here doesn't matter 162 | ), 163 | } 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /core/src/archiver/fetchers.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::iter; 3 | use std::path::PathBuf; 4 | 5 | use futures::stream::FuturesUnordered; 6 | use futures::{stream, TryStreamExt}; 7 | use lol_html::html_content::ContentType; 8 | use lol_html::{element, rewrite_str, RewriteStrSettings}; 9 | use tap::TapFallible; 10 | use tracing::{error, warn}; 11 | 12 | use crate::action_code::ACTION_CODE_MAP; 13 | use crate::archiver::download_manager::DownloadManager; 14 | use crate::archiver::utils; 15 | use crate::archiver::utils::summarize; 16 | use crate::client::Client; 17 | use crate::error; 18 | use crate::models::{ 19 | Category, RespCategory, RespCooked, RespPost, RespRetort, RespTopic, TopicMeta, 20 | }; 21 | use crate::preloaded_store::PreloadedStore; 22 | 23 | const IMAGE_SUFFIXES: [&str; 4] = ["jpg", "jpeg", "gif", "png"]; 24 | const VIDEO_SUFFIXES: [&str; 3] = ["mp4", "mov", "avi"]; 25 | 26 | pub async fn fetch_avatar( 27 | download_manager: &DownloadManager, 28 | resp_post: &RespPost, 29 | ) -> error::Result { 30 | let avatar_url = resp_post.avatar_template.replace("{size}", "40"); 31 | let avatar_filename = format!( 32 | "{}_{}", 33 | utils::calculate_hash(&avatar_url), 34 | sanitize_filename::sanitize(avatar_url.split('/').last().unwrap()) 35 | ); 36 | 37 | download_manager 38 | .download_avatar(avatar_url, &avatar_filename) 39 | .await 40 | .tap_err(|e| error!(post = resp_post.id, ?e, "Failed to download avatar")) 41 | } 42 | 43 | pub async fn fetch_emoji_from_retort( 44 | download_manager: &DownloadManager, 45 | preloaded_store: &PreloadedStore, 46 | r: RespRetort, 47 | ) -> error::Result<(String, usize)> { 48 | let filename = if let Some(emoji_path) = preloaded_store.custom_emoji(&r.emoji) { 49 | let filename = sanitize_filename::sanitize(emoji_path.rsplit('/').next().unwrap()); 50 | download_manager 51 | .download_asset(absolute_url(emoji_path), &filename, false) 52 | .await?; 53 | filename 54 | } else { 55 | let filename = sanitize_filename::sanitize(format!("{}.png", r.emoji)); 56 | let url = format!( 57 | "/images/emoji/google/{}.png", 58 | utils::normalize_emoji(&r.emoji) 59 | ); 60 | download_manager 61 | .download_asset(absolute_url(&url), &filename, false) 62 | .await?; 63 | filename 64 | }; 65 | let count = r.usernames.len(); 66 | Ok((filename, count)) 67 | } 68 | 69 | fn url_to_filename(url: &str) -> String { 70 | let (url, query) = url.split_once('?').unwrap_or((url, "")); 71 | let (url, fragment) = url.split_once('#').unwrap_or((url, "")); 72 | let filename = url.rsplit_once('/').map_or(url, |(_, filename)| filename); 73 | let (basename, ext) = filename.rsplit_once('.').unwrap_or((filename, "")); 74 | let mut new_name = basename.to_string(); 75 | if !query.is_empty() { 76 | new_name.push('_'); 77 | new_name.push_str(query); 78 | } 79 | if !fragment.is_empty() { 80 | new_name.push('_'); 81 | new_name.push_str(fragment); 82 | } 83 | if !ext.is_empty() { 84 | new_name.push('.'); 85 | new_name.push_str(ext); 86 | } 87 | sanitize_filename::sanitize(new_name) 88 | } 89 | 90 | fn absolute_url(url: &str) -> String { 91 | if url.starts_with("//") { 92 | format!("https:{url}") 93 | } else if url.starts_with('/') { 94 | format!("https://shuiyuan.sjtu.edu.cn{url}") 95 | } else { 96 | url.to_string() 97 | } 98 | } 99 | 100 | pub async fn fetch_assets_of_content( 101 | download_manager: &DownloadManager, 102 | content: &str, 103 | anonymous: bool, 104 | ) -> error::Result { 105 | let ExtractAssetResult { 106 | urls, 107 | rewritten_content, 108 | } = extract_asset_url(content, anonymous); 109 | 110 | let futs: FuturesUnordered<_> = urls 111 | .into_iter() 112 | .map(|url| async move { 113 | download_manager 114 | .download_asset(absolute_url(&url), &url_to_filename(&url), false) 115 | .await 116 | }) 117 | .collect(); 118 | futs.try_collect().await?; 119 | 120 | Ok(rewritten_content) 121 | } 122 | 123 | /// Fetch topic meta data. 124 | /// 125 | /// # Errors 126 | /// 127 | /// Returns error if failed to fetch topic meta or failed to fetch category names. 128 | pub async fn fetch_topic_meta(client: &Client, topic_id: u32) -> error::Result { 129 | let url = format!("https://shuiyuan.sjtu.edu.cn/t/{topic_id}.json"); 130 | let resp: RespTopic = client.send_json(client.get(url)).await?; 131 | 132 | let first_post = resp.post_stream.posts.first().expect("at least one post"); 133 | let description = utils::summarize(&first_post.cooked); 134 | 135 | Ok(TopicMeta { 136 | id: topic_id, 137 | title: resp.title, 138 | description, 139 | categories: categories_from_id(client, resp.category_id).await?, 140 | tags: resp.tags, 141 | post_ids: resp.post_stream.stream.expect("exists"), 142 | }) 143 | } 144 | 145 | /// Get category names from a leaf category id. 146 | async fn categories_from_id(client: &Client, leaf_id: usize) -> error::Result> { 147 | stream::try_unfold(leaf_id, |id| async move { 148 | let url = format!("https://shuiyuan.sjtu.edu.cn/c/{id}/show.json"); 149 | let resp: RespCategory = client.send_json(client.get(url)).await?; 150 | 151 | let yielded = resp.category.inner; 152 | let next = resp.category.parent_category_id; 153 | Ok(next.map(|id| (yielded, id))) 154 | }) 155 | .try_collect() 156 | .await 157 | } 158 | 159 | /// Reveal hidden posts and convert system messages. 160 | pub async fn fetch_special_post(client: &Client, post: RespPost) -> error::Result { 161 | if let Some((_, system_msg)) = post 162 | .action_code 163 | .as_ref() 164 | .and_then(|code| ACTION_CODE_MAP.iter().find(|(c, _)| c == code)) 165 | { 166 | Ok(RespPost { 167 | cooked: format!("

系统消息:{system_msg}

"), 168 | ..post 169 | }) 170 | } else if post.cooked_hidden { 171 | let url = format!("https://shuiyuan.sjtu.edu.cn/posts/{}/cooked.json", post.id); 172 | let resp: RespCooked = client.send_json(client.get(url)).await?; 173 | Ok(RespPost { 174 | cooked: format!(r#"

被折叠的内容

{}"#, resp.cooked), 175 | ..post 176 | }) 177 | } else { 178 | Ok(post) 179 | } 180 | } 181 | 182 | pub fn reify_vote(post: RespPost) -> error::Result { 183 | if post.polls.is_empty() { 184 | return Ok(post); 185 | } 186 | 187 | let rewrites = post.polls.iter().flat_map(|poll| { 188 | iter::once(element!( 189 | format!( 190 | r#"div.poll[data-poll-name="{}"] span.info-number"#, 191 | poll.name 192 | ), 193 | |el| { 194 | el.set_inner_content(&poll.voters.to_string(), ContentType::Text); 195 | Ok(()) 196 | } 197 | )) 198 | .chain(poll.options.iter().map(|option| { 199 | element!( 200 | format!( 201 | r#"div.poll[data-poll-name="{}"] li[data-poll-option-id="{}"]"#, 202 | poll.name, option.id 203 | ), 204 | |el| { 205 | if let Some(votes) = option.votes { 206 | el.append(&format!(" - {votes} 票"), ContentType::Text); 207 | return Ok(()); 208 | } 209 | let title = summarize(&option.html); 210 | warn!( 211 | "No vote count for option {} available. \ 212 | Please check if results are protected (e.g. display on vote).", 213 | title.trim() 214 | ); 215 | Ok(()) 216 | } 217 | ) 218 | })) 219 | }); 220 | 221 | let cooked = lol_html::rewrite_str( 222 | &post.cooked, 223 | RewriteStrSettings { 224 | element_content_handlers: rewrites.collect(), 225 | ..RewriteStrSettings::default() 226 | }, 227 | )?; 228 | Ok(RespPost { cooked, ..post }) 229 | } 230 | 231 | fn rewrite_srcset(attr: &str, mut rewrite: impl FnMut(&str) -> Option) -> Option { 232 | attr.split(',') 233 | .map(|s| { 234 | let a = s.chars().take_while(|c| c.is_whitespace()).count(); 235 | let b = s.chars().rev().take_while(|c| c.is_whitespace()).count(); 236 | (a, &s[a..s.len() - b], b) 237 | }) 238 | .map(|(a, s, b)| { 239 | if let Some((url, scale)) = s.rsplit_once(' ') { 240 | rewrite(url).map(|s| format!("{}{} {}{}", " ".repeat(a), s, scale, " ".repeat(b))) 241 | } else { 242 | rewrite(s).map(|s| format!("{}{}{}", " ".repeat(a), s, " ".repeat(b))) 243 | } 244 | }) 245 | .try_fold(String::new(), |mut acc, s| { 246 | acc.push_str(&s?); 247 | Some(acc) 248 | }) 249 | } 250 | 251 | fn filter_media(url: &str) -> bool { 252 | let no_query = url.rsplit_once('?').map_or(url, |(url, _)| url); 253 | let no_fragment = no_query.rsplit_once('#').map_or(no_query, |(url, _)| url); 254 | let filename = no_fragment 255 | .rsplit_once('/') 256 | .map_or(no_fragment, |(_, filename)| filename); 257 | let ext = filename.rsplit_once('.').map_or(filename, |(_, ext)| ext); 258 | VIDEO_SUFFIXES.iter().any(|&s| ext.eq_ignore_ascii_case(s)) 259 | || IMAGE_SUFFIXES.iter().any(|&s| ext.eq_ignore_ascii_case(s)) 260 | } 261 | 262 | struct ExtractAssetResult { 263 | urls: Vec, 264 | rewritten_content: String, 265 | } 266 | 267 | fn extract_asset_url(content: &str, anonymous: bool) -> ExtractAssetResult { 268 | let urls = RefCell::new(vec![]); 269 | 270 | let a_rule = element!("a", |el| { 271 | if let Some(url) = el.get_attribute("src") { 272 | if filter_media(&url) { 273 | el.set_attribute("src", &format!("resources/{}", url_to_filename(&url)))?; 274 | urls.borrow_mut().push(url); 275 | } 276 | } 277 | if let Some(srcset) = el.get_attribute("srcset") { 278 | let mut srcset_imgs = vec![]; 279 | if let Some(srcset) = rewrite_srcset(&srcset, |url| { 280 | if filter_media(url) { 281 | srcset_imgs.push(url.to_string()); 282 | Some(format!("resources/{}", url_to_filename(url))) 283 | } else { 284 | None 285 | } 286 | }) { 287 | el.set_attribute("srcset", &srcset)?; 288 | urls.borrow_mut().extend(srcset_imgs); 289 | } 290 | } 291 | Ok(()) 292 | }); 293 | 294 | let img_n_source = if anonymous { 295 | "img:not(.avatar), source" 296 | } else { 297 | "img, source" 298 | }; 299 | let img_rule = element!(img_n_source, |el| { 300 | if let Some(url) = el.get_attribute("src") { 301 | if filter_media(&url) { 302 | el.set_attribute("src", &format!("resources/{}", url_to_filename(&url)))?; 303 | urls.borrow_mut().push(url); 304 | } 305 | } 306 | if let Some(srcset) = el.get_attribute("srcset") { 307 | let mut srcset_imgs = vec![]; 308 | if let Some(srcset) = rewrite_srcset(&srcset, |url| { 309 | if filter_media(url) { 310 | srcset_imgs.push(url.to_string()); 311 | Some(format!("resources/{}", url_to_filename(url))) 312 | } else { 313 | None 314 | } 315 | }) { 316 | el.set_attribute("srcset", &srcset)?; 317 | urls.borrow_mut().extend(srcset_imgs); 318 | } 319 | } 320 | Ok(()) 321 | }); 322 | 323 | let rewritten_content = rewrite_str( 324 | content, 325 | RewriteStrSettings { 326 | element_content_handlers: vec![a_rule, img_rule], 327 | ..RewriteStrSettings::default() 328 | }, 329 | ) 330 | .unwrap(); 331 | ExtractAssetResult { 332 | urls: urls.into_inner(), 333 | rewritten_content, 334 | } 335 | } 336 | -------------------------------------------------------------------------------- /core/src/archiver/template.rs: -------------------------------------------------------------------------------- 1 | use std::io::Cursor; 2 | use std::path::Path; 3 | 4 | use handlebars::no_escape; 5 | use handlebars::Handlebars; 6 | use handlebars::{handlebars_helper, html_escape}; 7 | use once_cell::sync::Lazy; 8 | 9 | use crate::error; 10 | 11 | const TEMPLATE: &str = include_str!("../../templates/index.hbs"); 12 | const RESOURCES: &[u8] = include_bytes!("../../resources.tar.gz"); 13 | 14 | handlebars_helper!(escape: | x: String | html_escape( & x)); 15 | 16 | pub static HANDLEBARS: Lazy> = Lazy::new(|| { 17 | let mut handlebars = Handlebars::new(); 18 | handlebars.register_escape_fn(no_escape); 19 | handlebars.set_strict_mode(true); 20 | handlebars.register_helper("escape", Box::new(escape)); 21 | handlebars 22 | .register_template_string("index", TEMPLATE) 23 | .unwrap(); 24 | handlebars 25 | }); 26 | 27 | pub fn extract_resources(to: impl AsRef) -> error::Result<()> { 28 | let mut archive = tar::Archive::new(flate2::read::GzDecoder::new(Cursor::new(RESOURCES))); 29 | archive.unpack(to)?; 30 | Ok(()) 31 | } 32 | -------------------------------------------------------------------------------- /core/src/archiver/utils.rs: -------------------------------------------------------------------------------- 1 | use std::collections::hash_map::DefaultHasher; 2 | use std::fmt::Display; 3 | use std::hash::{Hash, Hasher}; 4 | 5 | use chrono::{DateTime, Local, Utc}; 6 | use html2text::render::text_renderer::TrivialDecorator; 7 | use once_cell::sync::Lazy; 8 | use regex::Regex; 9 | 10 | pub fn ceil_div(x: usize, y: usize) -> usize { 11 | x / y + usize::from(x % y != 0) 12 | } 13 | 14 | pub fn calculate_hash(t: &T) -> u64 { 15 | let mut s = DefaultHasher::new(); 16 | t.hash(&mut s); 17 | s.finish() 18 | } 19 | 20 | pub fn normalize_emoji(emoji: &str) -> impl Display + '_ { 21 | // For toned emoji, see 22 | // https://github.com/discourse/discourse/blob/c85e3e80838d75d8eec132267e2903d729f12aa4/app/models/emoji.rb#L104 23 | static EMOJI_RE: Lazy = Lazy::new(|| Regex::new(r"(.+):t([1-6])").unwrap()); 24 | EMOJI_RE.replace_all(emoji.trim_matches(':'), "$1/$2") 25 | } 26 | 27 | pub fn summarize(content: &str) -> String { 28 | html2text::parse(content.as_bytes()) 29 | .render(120, TrivialDecorator::new()) 30 | .into_string() 31 | } 32 | 33 | pub fn datetime_to_display(datetime: &DateTime) -> String { 34 | datetime 35 | .with_timezone(&Local) 36 | .format("%Y年%m月%d日 %H:%M") 37 | .to_string() 38 | } 39 | -------------------------------------------------------------------------------- /core/src/atomic_file.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | use std::io; 3 | use std::io::{BufWriter, Write}; 4 | use std::path::{Path, PathBuf}; 5 | #[cfg(test)] 6 | use std::sync::atomic::AtomicBool; 7 | #[cfg(test)] 8 | use std::sync::atomic::Ordering; 9 | 10 | use bytes::Bytes; 11 | use futures::FutureExt; 12 | use tap::TapFallible; 13 | use tempfile::NamedTempFile; 14 | use tokio::runtime::Handle; 15 | use tokio::sync::oneshot::error::TryRecvError; 16 | use tokio::sync::{mpsc, oneshot}; 17 | use tokio::task::JoinHandle; 18 | use tracing::warn; 19 | 20 | use crate::error::{Error, Result}; 21 | 22 | #[cfg(test)] 23 | static WRITE_FAIL: AtomicBool = AtomicBool::new(false); 24 | 25 | #[derive(Debug)] 26 | enum Event { 27 | Data(Bytes), 28 | Finish, 29 | } 30 | 31 | pub struct AtomicFile { 32 | handle: JoinHandle<()>, 33 | data_tx: mpsc::Sender, 34 | cancel_tx: Option>, 35 | result_rx: Option>>, 36 | } 37 | 38 | impl Drop for AtomicFile { 39 | fn drop(&mut self) { 40 | self.handle.abort(); 41 | } 42 | } 43 | 44 | impl AtomicFile { 45 | pub fn new(path: &Path) -> Result { 46 | let file = NamedTempFile::new_in(path.parent().expect("file has parent"))?; 47 | let (data_tx, data_rx) = mpsc::channel(16); 48 | let (cancel_tx, cancel_rx) = oneshot::channel(); 49 | let (result_tx, result_rx) = oneshot::channel(); 50 | let handle = tokio::task::spawn_blocking({ 51 | let path = path.to_path_buf(); 52 | move || Self::blocking_task(file, path, data_rx, cancel_rx, result_tx) 53 | }); 54 | Ok(Self { 55 | handle, 56 | data_tx, 57 | cancel_tx: Some(cancel_tx), 58 | result_rx: Some(result_rx), 59 | }) 60 | } 61 | pub async fn write(&mut self, data: Bytes) -> Result<()> { 62 | let result = self 63 | .result_rx 64 | .as_mut() 65 | .ok_or(Error::AtomicFilePoisoned)? 66 | .try_recv(); 67 | match result { 68 | Ok(Ok(())) => unreachable!("finalized or cancel channel closed"), 69 | Ok(Err(e)) => { 70 | self.result_rx.take().expect("poison"); 71 | return Err(e); 72 | } // write error 73 | Err(TryRecvError::Empty) => (), // no error 74 | Err(TryRecvError::Closed) => { 75 | unreachable!("sync thread dead without sending result, or poll after complete") 76 | } 77 | } 78 | self.data_tx.send(Event::Data(data)).await?; 79 | Ok(()) 80 | } 81 | 82 | /// Commit the file to the final path. 83 | /// 84 | /// # Errors 85 | /// 86 | /// Returns error if data task is dead, or any error occurs during file write. 87 | pub async fn commit(mut self) -> Result<()> { 88 | if let Some(result_rx) = self.result_rx.take() { 89 | self.data_tx 90 | .send(Event::Finish) 91 | .await 92 | .expect("sync thread dead"); 93 | result_rx 94 | .await 95 | .expect("sync thread dead without sending result") 96 | } else { 97 | Err(Error::AtomicFilePoisoned) 98 | } 99 | } 100 | 101 | /// Commit the file to the final path. 102 | /// 103 | /// # Errors 104 | /// 105 | /// Returns error if data task is dead, or any error occurs during file write. 106 | pub async fn cancel(mut self) -> Result<()> { 107 | if let Some(result_rx) = self.result_rx.take() { 108 | self.cancel_tx 109 | .take() 110 | .expect("can only cancel once") 111 | .send(()) 112 | .expect("sync thread dead"); 113 | result_rx 114 | .await 115 | .expect("sync thread dead without sending result") 116 | } else { 117 | Err(Error::AtomicFilePoisoned) 118 | } 119 | } 120 | fn blocking_task( 121 | file: NamedTempFile, 122 | path: PathBuf, 123 | mut data_rx: mpsc::Receiver, 124 | cancel_rx: oneshot::Receiver<()>, 125 | result_tx: oneshot::Sender>, 126 | ) { 127 | let mut writer = BufWriter::with_capacity(64 * 1024, file); 128 | let mut cancel_rx = cancel_rx.fuse(); 129 | let res = Handle::current().block_on(async move { 130 | loop { 131 | break tokio::select! { 132 | res = data_rx.recv() => match res { 133 | Some(Event::Data(data)) => match writer.write_all(&data) { 134 | Ok(()) => { // file write succeeded 135 | #[cfg(test)] 136 | if WRITE_FAIL.load(Ordering::SeqCst) { 137 | Err(Error::from(io::Error::new(io::ErrorKind::Other, "test"))) 138 | } else { 139 | continue 140 | } 141 | #[cfg(not(test))] 142 | continue 143 | }, 144 | Err(e) => Err(Error::from(e)), // file write failed 145 | }, 146 | Some(Event::Finish) => writer 147 | .into_inner() 148 | .map_err(std::io::IntoInnerError::into_error)? 149 | .persist(path) 150 | .map_err(std::convert::Into::into) 151 | .and_then(|f| Ok(f.sync_all()?)), 152 | None => continue, // data channel closed, should receive cancel_rx ok or err soon 153 | }, 154 | res = &mut cancel_rx => match res { 155 | Ok(()) => Ok(()), // cancel requested 156 | Err(_e) => { 157 | warn!("AtomicFile closed without cancel or commit"); 158 | Ok(()) 159 | } 160 | } 161 | }; 162 | } 163 | }); 164 | drop( 165 | result_tx 166 | .send(res) 167 | .tap_err(|_| warn!("async side is not listening")), 168 | ); 169 | } 170 | } 171 | 172 | #[cfg(test)] 173 | mod tests { 174 | use std::sync::atomic::Ordering; 175 | use std::time::Duration; 176 | use tokio::sync::RwLock; 177 | 178 | use bytes::Bytes; 179 | use once_cell::sync::Lazy; 180 | use tempfile::TempDir; 181 | 182 | use crate::error::Error; 183 | 184 | use super::AtomicFile; 185 | use super::WRITE_FAIL; 186 | 187 | /// Read lock for normal behavior and write lock for fail cases. 188 | static TEST_LOCK: Lazy> = Lazy::new(|| RwLock::new(())); 189 | 190 | #[tokio::test] 191 | async fn must_commit() { 192 | let _guard = TEST_LOCK.read().await; 193 | 194 | let dir = TempDir::new().unwrap(); 195 | let mut f = AtomicFile::new(&dir.path().join("test")).unwrap(); 196 | 197 | f.write(Bytes::from("hello world")).await.unwrap(); 198 | f.commit().await.unwrap(); 199 | 200 | assert_eq!( 201 | std::fs::read_to_string(dir.path().join("test")).unwrap(), 202 | "hello world" 203 | ); 204 | } 205 | 206 | #[tokio::test] 207 | async fn must_cancel() { 208 | let _guard = TEST_LOCK.read().await; 209 | 210 | let dir = TempDir::new().unwrap(); 211 | let mut f = AtomicFile::new(&dir.path().join("test")).unwrap(); 212 | 213 | f.write(Bytes::from("hello world")).await.unwrap(); 214 | f.cancel().await.unwrap(); 215 | 216 | tokio::time::sleep(Duration::from_millis(500)).await; // hope that's enough 217 | assert!(!dir.path().join("test").exists()); 218 | } 219 | 220 | #[tokio::test] 221 | async fn must_cancel_on_drop() { 222 | let _guard = TEST_LOCK.read().await; 223 | 224 | let dir = TempDir::new().unwrap(); 225 | 226 | { 227 | let mut f = AtomicFile::new(&dir.path().join("test")).unwrap(); 228 | f.write(Bytes::from("hello world")).await.unwrap(); 229 | } 230 | 231 | tokio::time::sleep(Duration::from_millis(500)).await; // hope that's enough 232 | assert!(!dir.path().join("test").exists()); 233 | } 234 | 235 | #[tokio::test] 236 | async fn must_error_on_write() { 237 | let _guard = TEST_LOCK.write().await; 238 | WRITE_FAIL.store(true, Ordering::SeqCst); 239 | 240 | let dir = TempDir::new().unwrap(); 241 | let mut f = AtomicFile::new(&dir.path().join("test")).unwrap(); 242 | 243 | assert!(f.write(Bytes::from("hello world")).await.is_ok()); // write failed, but no error at once 244 | tokio::time::sleep(Duration::from_millis(500)).await; // hope that's enough 245 | assert!(f.write(Bytes::from("hello world")).await.is_err()); // last error returned 246 | assert!(matches!( 247 | f.write(Bytes::from("hello world")).await, 248 | Err(Error::AtomicFilePoisoned) 249 | )); // may not write afterwards 250 | 251 | assert!(matches!(f.cancel().await, Err(Error::AtomicFilePoisoned))); // poisoned after error 252 | 253 | WRITE_FAIL.store(false, Ordering::SeqCst); 254 | } 255 | } 256 | -------------------------------------------------------------------------------- /core/src/client.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::{Debug, Formatter}; 2 | use std::future::Future; 3 | use std::iter; 4 | use std::ops::Deref; 5 | use std::sync::Arc; 6 | use std::time::Duration; 7 | 8 | use base64::prelude::BASE64_STANDARD; 9 | use base64::Engine; 10 | use futures::StreamExt; 11 | use futures_retry_policies::retry_policies::RetryPolicies; 12 | use leaky_bucket::RateLimiter; 13 | use reqwest::header::{HeaderName, HeaderValue}; 14 | use reqwest::{ClientBuilder, Response}; 15 | use reqwest_middleware::{ 16 | ClientBuilder as ClientBuilderWithMiddleware, ClientWithMiddleware, RequestBuilder, 17 | }; 18 | use reqwest_retry::policies::ExponentialBackoff; 19 | use rsa::pkcs1::EncodeRsaPublicKey; 20 | use rsa::{Pkcs1v15Encrypt, RsaPrivateKey, RsaPublicKey}; 21 | use serde::de::DeserializeOwned; 22 | use serde::Deserialize; 23 | use tokio::sync::Semaphore; 24 | use uuid::Uuid; 25 | 26 | use crate::atomic_file::AtomicFile; 27 | use crate::error::{Error, Result}; 28 | use crate::middleware::{BypassThrottle, RetryMiddleware, TimeoutMiddleware}; 29 | 30 | pub const MAX_CONN: usize = 4; 31 | pub const LOOSE_MAX_CONN: usize = 64; 32 | pub const MAX_THROTTLE_WEIGHT: usize = 4; 33 | pub const MAX_STREAM_STUCK_TIME: Duration = Duration::from_secs(10); 34 | 35 | const DEFAULT_BACKOFF: ExponentialBackoff = ExponentialBackoff { 36 | max_n_retries: 3, 37 | min_retry_interval: Duration::from_secs(1), 38 | max_retry_interval: Duration::from_secs(30 * 60), 39 | backoff_exponent: 3, 40 | }; 41 | 42 | #[derive(Debug, Deserialize)] 43 | struct Payload { 44 | key: String, 45 | } 46 | 47 | fn generate_client_id(app_id: &Uuid) -> String { 48 | let mac = mac_address::get_mac_address() 49 | .unwrap() 50 | .expect("No mac address found"); 51 | let client_id = Uuid::new_v5(app_id, &mac.bytes()); 52 | client_id.to_string() 53 | } 54 | 55 | /// Generate the OAuth URL from given app ID and public key. 56 | #[must_use] 57 | pub fn oauth_url(app_id: &Uuid, key: &RsaPublicKey, callback: bool) -> String { 58 | let client_id = generate_client_id(app_id); 59 | let pubkey = key 60 | .to_pkcs1_pem(Default::default()) 61 | .expect("failed to encode key"); 62 | let mut query = vec![ 63 | ("application_name", "Shuiyuan Archiver"), 64 | ("client_id", &client_id), 65 | ("scopes", "session_info,read"), 66 | ("nonce", "1"), 67 | ("public_key", &pubkey), 68 | ]; 69 | if callback { 70 | query.push(("auth_redirect", "discourse://auth_redirect")); 71 | } 72 | let parsed_query = serde_urlencoded::to_string(query).expect("failed to encode query"); 73 | format!("https://shuiyuan.sjtu.edu.cn/user-api-key/new?{parsed_query}") 74 | } 75 | 76 | /// Unpack the OAuth token from the given payload. 77 | /// 78 | /// # Errors 79 | /// 80 | /// This function will return an error if the payload is invalid. 81 | pub fn token_from_payload(payload: &str, key: &RsaPrivateKey) -> Result { 82 | let ciphertext = BASE64_STANDARD.decode(payload.replace(' ', "").trim())?; 83 | 84 | let decrypted = key.decrypt(Pkcs1v15Encrypt, &ciphertext)?; 85 | 86 | Ok(serde_json::from_slice::(&decrypted)?.key) 87 | } 88 | 89 | pub struct RequestBuilderWrapped { 90 | req: RequestBuilder, 91 | sem_weight: u32, 92 | throttle_weight: usize, 93 | bypass_max_conn: bool, 94 | bypass_throttle: bool, 95 | } 96 | 97 | pub trait RequestBuilderExt { 98 | fn with_conn_weight(self, weight: u32) -> RequestBuilderWrapped; 99 | fn with_throttle_weight(self, weight: usize) -> RequestBuilderWrapped; 100 | fn bypass_max_conn(self) -> RequestBuilderWrapped; 101 | fn bypass_throttle(self) -> RequestBuilderWrapped; 102 | } 103 | 104 | impl RequestBuilderExt for RequestBuilderWrapped { 105 | fn with_conn_weight(self, weight: u32) -> RequestBuilderWrapped { 106 | Self { 107 | sem_weight: weight, 108 | ..self 109 | } 110 | } 111 | 112 | fn with_throttle_weight(self, weight: usize) -> RequestBuilderWrapped { 113 | Self { 114 | throttle_weight: weight, 115 | ..self 116 | } 117 | } 118 | 119 | fn bypass_max_conn(self) -> RequestBuilderWrapped { 120 | Self { 121 | bypass_max_conn: true, 122 | ..self 123 | } 124 | } 125 | 126 | fn bypass_throttle(self) -> RequestBuilderWrapped { 127 | Self { 128 | bypass_throttle: true, 129 | ..self 130 | } 131 | } 132 | } 133 | 134 | impl RequestBuilderExt for RequestBuilder { 135 | fn with_conn_weight(self, weight: u32) -> RequestBuilderWrapped { 136 | RequestBuilderWrapped { 137 | sem_weight: weight, 138 | ..self.into_request_builder_wrapped() 139 | } 140 | } 141 | 142 | fn with_throttle_weight(self, weight: usize) -> RequestBuilderWrapped { 143 | RequestBuilderWrapped { 144 | throttle_weight: weight, 145 | ..self.into_request_builder_wrapped() 146 | } 147 | } 148 | 149 | fn bypass_max_conn(self) -> RequestBuilderWrapped { 150 | RequestBuilderWrapped { 151 | bypass_max_conn: true, 152 | ..self.into_request_builder_wrapped() 153 | } 154 | } 155 | 156 | fn bypass_throttle(self) -> RequestBuilderWrapped { 157 | RequestBuilderWrapped { 158 | bypass_throttle: true, 159 | ..self.into_request_builder_wrapped() 160 | } 161 | } 162 | } 163 | 164 | pub trait IntoRequestBuilderWrapped: 'static + Send + Sync { 165 | fn into_request_builder_wrapped(self) -> RequestBuilderWrapped; 166 | } 167 | 168 | impl IntoRequestBuilderWrapped for RequestBuilderWrapped { 169 | fn into_request_builder_wrapped(self) -> RequestBuilderWrapped { 170 | self 171 | } 172 | } 173 | 174 | impl IntoRequestBuilderWrapped for RequestBuilder { 175 | fn into_request_builder_wrapped(self) -> RequestBuilderWrapped { 176 | RequestBuilderWrapped { 177 | req: self, 178 | sem_weight: 1, 179 | throttle_weight: 1, 180 | bypass_max_conn: false, 181 | bypass_throttle: false, 182 | } 183 | } 184 | } 185 | 186 | #[derive(Clone)] 187 | pub struct Client { 188 | client: ClientWithMiddleware, 189 | loose_sem: Arc, 190 | sem: Arc, 191 | bucket: Arc, 192 | } 193 | 194 | impl Debug for Client { 195 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 196 | f.debug_struct("Client") 197 | .field("loose_sem", &self.loose_sem) 198 | .field("sem", &self.sem) 199 | .field("bucket", &"RateLimiter") 200 | .finish() 201 | } 202 | } 203 | 204 | impl Deref for Client { 205 | type Target = ClientWithMiddleware; 206 | 207 | fn deref(&self) -> &Self::Target { 208 | &self.client 209 | } 210 | } 211 | 212 | impl Client { 213 | /// Send a request and return the json response. 214 | /// 215 | /// This method applies rate limiting and connection limiting, and retries on failure. 216 | /// 217 | /// # Errors 218 | /// 219 | /// Returns an error if the request failed after retrying. 220 | pub async fn send_json( 221 | &self, 222 | req: impl IntoRequestBuilderWrapped, 223 | ) -> Result { 224 | self.with(req, |req| async move { 225 | Ok(req 226 | .timeout(MAX_STREAM_STUCK_TIME) 227 | .send() 228 | .await? 229 | .json() 230 | .await?) 231 | }) 232 | .await 233 | } 234 | /// Execute given function with given request. 235 | /// 236 | /// This method applies rate limiting and connection limiting, and retries on failure. 237 | /// 238 | /// # Errors 239 | /// 240 | /// Returns an error if the request failed after retrying. 241 | pub async fn with(&self, req: impl IntoRequestBuilderWrapped, f: F) -> Result 242 | where 243 | F: Fn(RequestBuilder) -> Fut + Clone + Send + Sync, 244 | Fut: Future> + Send, 245 | { 246 | let RequestBuilderWrapped { 247 | req, 248 | sem_weight, 249 | throttle_weight, 250 | bypass_max_conn, 251 | bypass_throttle, 252 | } = req.into_request_builder_wrapped(); 253 | futures_retry_policies::retry( 254 | RetryPolicies::new(DEFAULT_BACKOFF), 255 | tokio::time::sleep, 256 | move || { 257 | let sem = if bypass_max_conn { 258 | self.loose_sem.clone() 259 | } else { 260 | self.sem.clone() 261 | }; 262 | let bucket = self.bucket.clone(); 263 | let req = req 264 | .try_clone() 265 | .expect("clone request") 266 | .with_extension(BypassThrottle(bypass_throttle)); 267 | let f = f.clone(); 268 | async move { 269 | let _guard = sem 270 | .acquire_many(sem_weight) 271 | .await 272 | .expect("acquire semaphore"); 273 | if !bypass_throttle { 274 | bucket.acquire(throttle_weight).await; 275 | } 276 | f(req).await 277 | } 278 | }, 279 | ) 280 | .await 281 | } 282 | } 283 | 284 | /// Create a client with given token. 285 | /// 286 | /// # Errors 287 | /// 288 | /// Errors if an http client can't be created, or the token is illegal. 289 | pub async fn create_client_with_token( 290 | token: &str, 291 | rate_limit_callback: impl 'static + Fn(u64) + Send + Sync, 292 | ) -> Result { 293 | let client = ClientBuilder::new() 294 | .connect_timeout(Duration::from_secs(10)) 295 | .default_headers( 296 | iter::once(( 297 | HeaderName::from_static("user-api-key"), 298 | HeaderValue::from_str(token).expect("illegal token"), 299 | )) 300 | .collect(), 301 | ) 302 | .build()?; 303 | 304 | let client = ClientBuilderWithMiddleware::new(client) 305 | .with(RetryMiddleware::new(rate_limit_callback)) 306 | .with(TimeoutMiddleware::new(MAX_STREAM_STUCK_TIME)) 307 | .build(); 308 | 309 | client 310 | .get("https://shuiyuan.sjtu.edu.cn/session/current.json") 311 | .send() 312 | .await? 313 | .error_for_status()?; 314 | Ok(Client { 315 | client, 316 | loose_sem: Arc::new(Semaphore::new(LOOSE_MAX_CONN)), 317 | sem: Arc::new(Semaphore::new(MAX_CONN)), 318 | bucket: Arc::new( 319 | RateLimiter::builder() 320 | .interval(Duration::from_millis(200)) 321 | .max(MAX_THROTTLE_WEIGHT) 322 | .build(), 323 | ), 324 | }) 325 | } 326 | 327 | /// Add timeout to a try future. 328 | /// 329 | /// # Errors 330 | /// 331 | /// Returns `Error::StreamStuck` if the future times out. 332 | pub async fn with_timeout(fut: Fut, dur: Duration) -> Result 333 | where 334 | Fut: Future> + Send, 335 | E: Into, 336 | { 337 | tokio::time::timeout(dur, fut) 338 | .await 339 | .map(|r| r.map_err(Into::into)) 340 | .unwrap_or(Err(Error::StreamStuck)) 341 | } 342 | 343 | #[async_trait::async_trait] 344 | pub trait ResponseExt { 345 | async fn bytes_to_atomic_file(self, file: AtomicFile) -> Result<()>; 346 | } 347 | 348 | #[async_trait::async_trait] 349 | impl ResponseExt for Response { 350 | async fn bytes_to_atomic_file(self, mut file: AtomicFile) -> Result<()> { 351 | let mut stream = self.bytes_stream(); 352 | loop { 353 | break match tokio::time::timeout(Duration::from_secs(10), stream.next()).await { 354 | Ok(Some(Ok(bytes))) => { 355 | file.write(bytes).await?; 356 | continue; 357 | } 358 | Ok(Some(Err(e))) => Err(e.into()), 359 | Ok(None) => { 360 | file.commit().await?; 361 | Ok(()) 362 | } 363 | Err(_) => Err(Error::StreamStuck), 364 | }; 365 | } 366 | } 367 | } 368 | -------------------------------------------------------------------------------- /core/src/error.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | 3 | use futures_retry_policies::ShouldRetry; 4 | use lol_html::errors::RewritingError; 5 | use reqwest::StatusCode; 6 | use tempfile::PersistError; 7 | use thiserror::Error; 8 | use tokio::sync::mpsc; 9 | use tracing::warn; 10 | 11 | pub type Result = std::result::Result; 12 | 13 | #[derive(Debug, Error)] 14 | pub enum Error { 15 | #[error("reqwest_middleware error: {0}")] 16 | ReqwestMiddleware(#[from] reqwest_middleware::Error), 17 | #[error("reqwest error: {0}")] 18 | Reqwest(#[from] reqwest::Error), 19 | #[error("channel closed")] 20 | Sender, 21 | #[error("io error: {0}")] 22 | IO(#[from] io::Error), 23 | #[error("json error: {0}")] 24 | Json(#[from] serde_json::Error), 25 | #[error("join error: {0}")] 26 | Join(#[from] tokio::task::JoinError), 27 | #[error("handlebars render error: {0}")] 28 | Handlebars(#[from] handlebars::RenderError), 29 | #[error("base64 decode error: {0}")] 30 | Base64Decode(#[from] base64::DecodeError), 31 | #[error("rsa error: {0}")] 32 | Rsa(#[from] rsa::errors::Error), 33 | #[error("bytes stream stuck")] 34 | StreamStuck, 35 | #[error("atomic file poisoned")] 36 | AtomicFilePoisoned, 37 | #[error("atomic file write error: {0}")] 38 | AtomicFileWrite(#[from] PersistError), 39 | #[error("rewriting error: {0}")] 40 | Rewriting(#[from] RewritingError), 41 | } 42 | 43 | fn classify_reqwest_error(e: &reqwest::Error) -> bool { 44 | e.is_timeout() 45 | || e.is_connect() 46 | || e.is_request() 47 | || e.status() 48 | .map(|status| { 49 | status.is_server_error() 50 | || !status.is_client_error() 51 | || status == StatusCode::REQUEST_TIMEOUT 52 | || status == StatusCode::TOO_MANY_REQUESTS 53 | }) 54 | .unwrap_or_default() 55 | } 56 | 57 | impl ShouldRetry for Error { 58 | fn should_retry(&self, attempts: u32) -> bool { 59 | let retry = match self { 60 | Self::ReqwestMiddleware(reqwest_middleware::Error::Reqwest(e)) | Self::Reqwest(e) => { 61 | classify_reqwest_error(e) 62 | } 63 | Self::IO(_) | Self::StreamStuck => true, 64 | _ => false, 65 | }; 66 | warn!(attempts, retry, e=?self, "ShouldRetry: Error occurred"); 67 | retry 68 | } 69 | } 70 | 71 | impl From> for Error { 72 | fn from(_value: mpsc::error::SendError) -> Self { 73 | Self::Sender 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /core/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow( 2 | clippy::module_name_repetitions, 3 | clippy::default_trait_access, 4 | clippy::cast_possible_truncation 5 | )] 6 | mod action_code; 7 | pub mod archiver; 8 | mod atomic_file; 9 | pub mod client; 10 | pub mod error; 11 | mod middleware; 12 | mod models; 13 | mod preloaded_store; 14 | mod shared_promise; 15 | 16 | pub mod re_exports { 17 | pub use reqwest; 18 | pub use rsa; 19 | pub use uuid; 20 | } 21 | -------------------------------------------------------------------------------- /core/src/middleware.rs: -------------------------------------------------------------------------------- 1 | use std::time::{Duration, Instant}; 2 | 3 | use reqwest::{Request, Response, StatusCode}; 4 | use reqwest_middleware::{Error, Result}; 5 | use reqwest_middleware::{Middleware, Next}; 6 | use task_local_extensions::Extensions; 7 | use tracing::warn; 8 | 9 | use crate::client::with_timeout; 10 | 11 | pub struct RetryMiddleware { 12 | rate_limit_callback: C, 13 | rate_limit_lock: tokio::sync::RwLock<()>, 14 | } 15 | 16 | pub struct BypassThrottle(pub bool); 17 | 18 | impl RetryMiddleware { 19 | pub fn new(rate_limit_callback: C) -> Self { 20 | Self { 21 | rate_limit_callback, 22 | rate_limit_lock: Default::default(), 23 | } 24 | } 25 | } 26 | 27 | #[async_trait::async_trait] 28 | impl Middleware for RetryMiddleware 29 | where 30 | C: 'static + Fn(u64) + Send + Sync, 31 | { 32 | async fn handle( 33 | &self, 34 | req: Request, 35 | extensions: &mut Extensions, 36 | next: Next<'_>, 37 | ) -> Result { 38 | loop { 39 | let duplicate_request = req.try_clone().expect("Request object is not clonable"); 40 | 41 | let bypass_throttle = extensions 42 | .get::() 43 | .map(|BypassThrottle(b)| *b) 44 | .unwrap_or_default(); 45 | if !bypass_throttle { 46 | drop(self.rate_limit_lock.read().await); // ensure no rate limit in effect 47 | } 48 | 49 | let result = next.clone().run(duplicate_request, extensions).await; 50 | break match result { 51 | Ok(payload) if payload.status() == StatusCode::TOO_MANY_REQUESTS => { 52 | warn!(url=?payload.url(), "TOO MANY REQUESTS"); 53 | let retry_after = 54 | payload 55 | .headers() 56 | .get("retry-after") 57 | .and_then(|retry_after| { 58 | retry_after.to_str().unwrap_or_default().parse::().ok() 59 | }); 60 | if let Some(retry_after) = retry_after { 61 | // Lock all other requests. 62 | let before_lock = Instant::now(); 63 | 64 | let _guard = self.rate_limit_lock.write().await; 65 | 66 | let elapsed = before_lock.elapsed(); 67 | let retry_after = retry_after.saturating_sub(elapsed.as_secs()); 68 | 69 | if retry_after != 0 { 70 | (self.rate_limit_callback)(retry_after + 1); 71 | tokio::time::sleep(Duration::from_secs(retry_after + 1)).await; 72 | } 73 | continue; 74 | } 75 | Ok(payload) 76 | } 77 | _ => result, 78 | }; 79 | } 80 | } 81 | } 82 | 83 | #[derive(Debug, Copy, Clone)] 84 | pub struct TimeoutMiddleware(Duration); 85 | 86 | impl TimeoutMiddleware { 87 | pub const fn new(timeout: Duration) -> Self { 88 | Self(timeout) 89 | } 90 | } 91 | 92 | #[async_trait::async_trait] 93 | impl Middleware for TimeoutMiddleware { 94 | async fn handle( 95 | &self, 96 | req: Request, 97 | extensions: &mut Extensions, 98 | next: Next<'_>, 99 | ) -> Result { 100 | with_timeout(next.run(req, extensions), self.0) 101 | .await 102 | .map_err(Error::middleware) 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /core/src/models.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::path::PathBuf; 3 | 4 | use chrono::{DateTime, Datelike, Local, Utc}; 5 | use serde::{Deserialize, Serialize}; 6 | use typeshare::typeshare; 7 | 8 | #[derive(Debug, Deserialize)] 9 | pub struct RespTopic { 10 | pub title: String, 11 | pub category_id: usize, 12 | pub tags: Vec, 13 | pub post_stream: PostStream, 14 | pub posts_count: usize, 15 | } 16 | 17 | #[derive(Debug, Deserialize)] 18 | pub struct PostStream { 19 | pub posts: Vec, 20 | pub stream: Option>, 21 | } 22 | 23 | #[derive(Debug, Deserialize)] 24 | pub struct RespPosts { 25 | pub post_stream: PostStream, 26 | } 27 | 28 | #[derive(Debug, Deserialize)] 29 | pub struct RespCategory { 30 | pub category: RespCategoryInner, 31 | } 32 | 33 | #[derive(Debug, Deserialize)] 34 | pub struct RespCategoryInner { 35 | #[serde(flatten)] 36 | pub inner: Category, 37 | pub parent_category_id: Option, 38 | } 39 | 40 | #[typeshare] 41 | #[derive(Debug, Clone, Serialize, Deserialize)] 42 | pub struct Category { 43 | pub name: String, 44 | pub color: String, 45 | } 46 | 47 | #[derive(Debug, Clone, Serialize)] 48 | pub struct Post { 49 | pub name: String, 50 | pub number: usize, 51 | pub username: String, 52 | pub created_at: String, 53 | pub created_at_display: String, 54 | pub content: String, 55 | pub likes: usize, 56 | pub reply_to: Option, 57 | pub emojis: HashMap, 58 | pub avatar: Option, 59 | } 60 | 61 | #[derive(Debug, Deserialize)] 62 | pub struct RespPost { 63 | pub id: usize, 64 | pub post_number: usize, 65 | pub name: String, 66 | pub username: String, 67 | pub created_at: DateTime, 68 | pub cooked: String, 69 | #[serde(default)] 70 | pub cooked_hidden: bool, 71 | pub actions_summary: Vec, 72 | pub reply_to_post_number: Option, 73 | pub retorts: Vec, 74 | pub avatar_template: String, 75 | pub action_code: Option, 76 | #[serde(default)] 77 | pub polls: Vec, 78 | } 79 | 80 | #[derive(Debug, Deserialize)] 81 | pub struct RespPoll { 82 | pub name: String, 83 | pub options: Vec, 84 | pub voters: usize, 85 | } 86 | 87 | #[derive(Debug, Deserialize)] 88 | pub struct RespPollOption { 89 | pub id: String, 90 | pub html: String, 91 | #[serde(default)] 92 | pub votes: Option, 93 | } 94 | 95 | #[derive(Debug, Deserialize)] 96 | pub struct RespRetort { 97 | pub usernames: Vec, 98 | pub emoji: String, 99 | } 100 | 101 | #[derive(Debug, Deserialize)] 102 | pub struct RespCooked { 103 | pub cooked: String, 104 | } 105 | 106 | #[derive(Debug, Deserialize)] 107 | pub struct Actions { 108 | pub id: usize, 109 | pub count: Option, 110 | } 111 | 112 | #[derive(Debug, Clone, Serialize)] 113 | pub struct Topic<'a> { 114 | pub id: u32, 115 | pub title: String, 116 | pub description: String, 117 | pub categories: Vec, 118 | pub tags: Vec, 119 | pub posts: &'a [Post], 120 | pub page: usize, 121 | pub total_pages: usize, 122 | pub prev_page: Option, 123 | // can be "index" 124 | pub next_page: Option, 125 | } 126 | 127 | #[derive(Debug, Clone, Serialize)] 128 | pub struct Params<'a> { 129 | #[serde(flatten)] 130 | pub topic: Topic<'a>, 131 | pub app_version: String, 132 | pub year: i32, 133 | } 134 | 135 | impl<'a> From> for Params<'a> { 136 | fn from(t: Topic<'a>) -> Self { 137 | Self { 138 | topic: t, 139 | app_version: env!("CARGO_PKG_VERSION").to_string(), 140 | year: Local::now().year(), 141 | } 142 | } 143 | } 144 | 145 | #[typeshare] 146 | #[derive(Clone, Serialize, Deserialize)] 147 | #[serde(rename_all = "kebab-case")] 148 | pub struct TopicMeta { 149 | pub id: u32, 150 | pub title: String, 151 | pub description: String, 152 | pub categories: Vec, 153 | pub tags: Vec, 154 | pub post_ids: Vec, 155 | } 156 | -------------------------------------------------------------------------------- /core/src/preloaded_store.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use lol_html::{element, HtmlRewriter, RewriteStrSettings}; 4 | use reqwest_middleware::ClientWithMiddleware; 5 | use serde::de::{DeserializeOwned, Error}; 6 | use serde::{Deserialize, Deserializer}; 7 | 8 | use crate::error::Result; 9 | 10 | #[derive(Debug, Deserialize)] 11 | #[serde(rename_all = "camelCase")] 12 | pub struct PreloadedStore { 13 | #[serde(deserialize_with = "de_from_emojis")] 14 | custom_emoji: HashMap, 15 | } 16 | 17 | #[derive(Debug, Deserialize)] 18 | pub struct Emoji { 19 | name: String, 20 | url: String, 21 | } 22 | 23 | impl PreloadedStore { 24 | pub async fn from_client(client: &ClientWithMiddleware) -> Result { 25 | let body = client 26 | .get("https://shuiyuan.sjtu.edu.cn") 27 | .send() 28 | .await? 29 | .text() 30 | .await?; 31 | let mut preloaded = None; 32 | let rule = element!("#data-preloaded", |el| { 33 | if preloaded 34 | .replace(el.get_attribute("data-preloaded").expect("data-preloaded")) 35 | .is_some() 36 | { 37 | panic!("multiple #data-preloaded") 38 | } 39 | Ok(()) 40 | }); 41 | let _ = HtmlRewriter::new( 42 | RewriteStrSettings { 43 | element_content_handlers: vec![rule], 44 | ..RewriteStrSettings::default() 45 | } 46 | .into(), 47 | |_: &[u8]| (), 48 | ) 49 | .write(body.as_bytes()); 50 | 51 | let unescaped = 52 | htmlescape::decode_html(&preloaded.expect("#data-preloaded")).expect("unescaped"); 53 | 54 | Ok(serde_json::from_str(&unescaped)?) 55 | } 56 | pub fn custom_emoji(&self, name: &str) -> Option<&str> { 57 | self.custom_emoji.get(name).map(String::as_str) 58 | } 59 | } 60 | 61 | fn de_from_str<'de, D, T>(deserializer: D) -> Result 62 | where 63 | D: Deserializer<'de>, 64 | T: DeserializeOwned, 65 | { 66 | let s = String::deserialize(deserializer)?; 67 | let mut json_de = serde_json::Deserializer::from_str(&s); 68 | T::deserialize(&mut json_de).map_err(Error::custom) 69 | } 70 | 71 | fn de_from_emojis<'de, D>(deserializer: D) -> Result, D::Error> 72 | where 73 | D: Deserializer<'de>, 74 | { 75 | let emojis: Vec = de_from_str(deserializer)?; 76 | Ok(emojis.into_iter().map(|e| (e.name, e.url)).collect()) 77 | } 78 | -------------------------------------------------------------------------------- /core/src/shared_promise.rs: -------------------------------------------------------------------------------- 1 | use futures::{future, FutureExt}; 2 | use tap::TapFallible; 3 | use tokio::sync::oneshot; 4 | use tracing::warn; 5 | 6 | pub struct Swear { 7 | tx: Option>, 8 | } 9 | 10 | #[derive(Debug, Clone)] 11 | pub struct SharedPromise(future::Shared>); 12 | 13 | pub fn shared_promise_pair() -> (Swear, SharedPromise) { 14 | let (tx, rx) = oneshot::channel(); 15 | (Swear::new(tx), SharedPromise(rx.shared())) 16 | } 17 | 18 | impl Swear { 19 | fn new(tx: oneshot::Sender) -> Self { 20 | Self { tx: Some(tx) } 21 | } 22 | pub fn fulfill(mut self, value: T) { 23 | drop( 24 | self.tx 25 | .take() 26 | .expect("fulfilled only once") 27 | .send(value) 28 | .tap_err(|_e| warn!("Nobody's listening on promise")), 29 | ); 30 | } 31 | } 32 | 33 | impl Drop for Swear { 34 | fn drop(&mut self) { 35 | if self.tx.is_some() { 36 | warn!("Unfulfilled promise"); 37 | } 38 | } 39 | } 40 | 41 | impl SharedPromise { 42 | pub async fn recv(self) -> Option { 43 | self.0.await.ok() 44 | } 45 | } 46 | 47 | #[cfg(test)] 48 | mod tests { 49 | use std::assert_eq; 50 | 51 | use tokio::sync::{mpsc, oneshot}; 52 | 53 | #[tokio::test] 54 | async fn must_resolve() { 55 | let (swear, promise) = super::shared_promise_pair(); 56 | let (tx, _rx) = oneshot::channel(); 57 | let handler = tokio::spawn(async move { 58 | tx.send(()).unwrap(); 59 | assert_eq!(42, promise.recv().await.unwrap()); 60 | }); 61 | swear.fulfill(42); 62 | handler.await.unwrap(); 63 | } 64 | 65 | #[tokio::test] 66 | async fn must_retain() { 67 | let (swear, promise) = super::shared_promise_pair(); 68 | let (tx, _rx) = oneshot::channel(); 69 | let handler = { 70 | let promise = promise.clone(); 71 | let promise_2 = promise.clone(); 72 | tokio::spawn(async move { 73 | tx.send(()).unwrap(); 74 | 75 | assert_eq!(42, promise.recv().await.unwrap()); 76 | assert_eq!(42, promise_2.recv().await.unwrap()); 77 | }) 78 | }; 79 | swear.fulfill(42); 80 | assert_eq!(42, promise.recv().await.unwrap()); 81 | handler.await.unwrap(); 82 | } 83 | 84 | #[tokio::test] 85 | async fn must_resolve_multi() { 86 | let (swear, promise) = super::shared_promise_pair(); 87 | let (tx, mut rx) = mpsc::channel(5); 88 | let handlers: Vec<_> = (0..5) 89 | .map(|_| { 90 | let tx = tx.clone(); 91 | let promise = promise.clone(); 92 | tokio::spawn(async move { 93 | tx.send(()).await.unwrap(); 94 | assert_eq!(42, promise.recv().await.unwrap()); 95 | }) 96 | }) 97 | .collect(); 98 | for _ in 0..5 { 99 | drop(rx.recv()); 100 | } 101 | swear.fulfill(42); 102 | for handler in handlers { 103 | handler.await.unwrap(); 104 | } 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /core/templates/index.hbs: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 15 | 16 | {{#if description}} 17 | 18 | {{/if}} 19 | {{escape title}} | 第 {{page}} 页 20 | 22 | 24 | 27 | 29 | 31 | 34 | 37 | 40 | 42 | 44 | 45 | 46 | 47 | 48 | 49 | 50 |
51 | 52 | 54 | 55 |
56 |
57 |
58 |

59 | {{escape title}} 60 |

61 | 62 |
63 | {{#each categories}} 64 | 65 | 66 | 67 | 68 | {{escape this.name}} 69 | 70 | 71 | 72 | {{/each}} 73 |
74 | 75 |
76 |
77 | {{#each tags}} 78 | 79 | {{/each}} 80 |
81 |
82 |
83 | 84 | {{#each posts}} 85 |
86 | 109 |
110 | {{this.content}} 111 |
112 | 113 | {{#if this.emojis}} 114 |
115 | 116 | 117 | {{#each this.emojis}} 118 | 119 | {{this}} 120 | {{/each}} 121 |
122 | {{/if}} 123 | {{#if this.likes}} 124 |
125 | 126 | 127 | {{this.likes}} 个赞 128 |
129 | {{/if}} 130 | 131 |
132 | {{/each}} 133 | 142 |
143 |
144 | 145 |
146 |
©{{year}} 上海交通大学
147 |
148 | 149 | -------------------------------------------------------------------------------- /core/templates/resources/205da9bd7e50046f118af4e49e6a562905a2ca26.js: -------------------------------------------------------------------------------- 1 | (function() { 2 | if ('require' in window) { 3 | require("discourse/lib/theme-settings-store").registerSettings(18, {"minimum_trust_level_to_create_TOC":0,"composer_toc_text":"此主题将会生成目录","table_of_contents_icon":"align-left","anchor_icon":"hashtag","theme_uploads":{"icons-sprite":"/uploads/default/original/3X/8/0/80ed408554201b1aea5b03b7b3a2ab0b0be0a012.svg"}}); 4 | } 5 | })(); 6 | if ('define' in window) { 7 | define("discourse/theme-18/initializers/theme-field-64-common-html-script-1", ["exports", "discourse/lib/plugin-api"], function (_exports, _pluginApi) { 8 | "use strict"; 9 | 10 | Object.defineProperty(_exports, "__esModule", { 11 | value: true 12 | }); 13 | _exports.default = void 0; 14 | 15 | var settings = require("discourse/lib/theme-settings-store").getObjectForTheme(18); 16 | 17 | var themePrefix = function themePrefix(key) { 18 | return "theme_translations.18.".concat(key); 19 | }; 20 | 21 | var _default = { 22 | name: "theme-field-64-common-html-script-1", 23 | after: "inject-objects", 24 | initialize: function initialize() { 25 | var _this = this; 26 | 27 | (0, _pluginApi.withPluginApi)("0.1", function (api) { 28 | var minimumOffset = require("discourse/lib/offset-calculator").minimumOffset; 29 | 30 | var _require = require("discourse-common/lib/icon-library"), 31 | iconHTML = _require.iconHTML; 32 | 33 | var _Ember = Ember, 34 | run = _Ember.run; 35 | var mobileView = $("html").hasClass("mobile-view"); 36 | var linkIcon = iconHTML(settings.anchor_icon); 37 | var closeIcon = iconHTML("times"); 38 | var dtocIcon = iconHTML("align-left"); 39 | var currUser = api.getCurrentUser(); 40 | var currUserTrustLevel = currUser ? currUser.trust_level : ""; 41 | var minimumTrustLevel = settings.minimum_trust_level_to_create_TOC; 42 | var SCROLL_THROTTLE = 300; 43 | var SMOOTH_SCROLL_SPEED = 300; 44 | var TOC_ANIMATION_SPEED = 300; 45 | 46 | var cleanUp = function cleanUp(item) { 47 | var cleanItem = item.trim().toLowerCase().replace(/[\{\}\[\]\\\/\<\>\(\)\|\+\?\*\^\'\`\'\"\.\_\$\s~!@#%&,;:=]/gi, "-").replace(/\-\-+/g, "-").replace(/^\-/, "").replace(/\-$/, ""); 48 | return cleanItem; 49 | }; 50 | 51 | var setUpTocItem = function setUpTocItem(item) { 52 | var unique = item.attr("id"); 53 | var text = item.text(); 54 | var tocItem = $("
  • ", { 55 | class: "d-toc-item", 56 | "data-d-toc": unique 57 | }); 58 | tocItem.append($("", { 59 | text: text 60 | })); 61 | return tocItem; 62 | }; 63 | 64 | (function (dToc) { 65 | var _arguments = arguments, 66 | _this3 = this; 67 | 68 | dToc($, window); 69 | $.widget("discourse.dToc", { 70 | _create: function _create() { 71 | this.generateDtoc(); 72 | this.setEventHandlers(); 73 | }, 74 | generateDtoc: function generateDtoc() { 75 | var self = this; 76 | var primaryHeadings = $(this.options.cooked).find(this.options.selectors.substr(0, this.options.selectors.indexOf(","))); 77 | self.element.addClass("d-toc"); 78 | primaryHeadings.each(function (index) { 79 | var selectors = self.options.selectors, 80 | ul = $("
      ", { 81 | id: "d-toc-top-heading-".concat(index), 82 | class: "d-toc-heading" 83 | }); 84 | ul.append(setUpTocItem($(this))); 85 | self.element.append(ul); 86 | $(this).nextUntil(this.nodeName.toLowerCase()).each(function () { 87 | var headings = $(this).find(selectors).length ? $(this).find(selectors) : $(this).filter(selectors); 88 | headings.each(function () { 89 | self.nestTocItem.call(this, self, ul); 90 | }); 91 | }); 92 | }); 93 | }, 94 | nestTocItem: function nestTocItem(self, ul) { 95 | var index = $(this).index(self.options.selectors); 96 | var previousHeader = $(self.options.selectors).eq(index - 1); 97 | var previousTagName = previousHeader.prop("tagName").charAt(1); 98 | var currentTagName = $(this).prop("tagName").charAt(1); 99 | 100 | if (currentTagName < previousTagName) { 101 | self.element.find(".d-toc-subheading[data-tag=\"".concat(currentTagName, "\"]")).last().append(setUpTocItem($(this))); 102 | } else if (currentTagName === previousTagName) { 103 | ul.find(".d-toc-item").last().after(setUpTocItem($(this))); 104 | } else { 105 | ul.find(".d-toc-item").last().after($("
        ", { 106 | class: "d-toc-subheading", 107 | "data-tag": currentTagName 108 | })).next(".d-toc-subheading").append(setUpTocItem($(this))); 109 | } 110 | }, 111 | setEventHandlers: function setEventHandlers() { 112 | var _this2 = this; 113 | 114 | var self = this; 115 | 116 | var dtocMobile = function dtocMobile() { 117 | $(".d-toc").toggleClass("d-toc-mobile"); 118 | }; 119 | 120 | this.element.on("click.d-toc", "li", function () { 121 | self.element.find(".d-toc-active").removeClass("d-toc-active"); 122 | $(this).addClass("d-toc-active"); 123 | 124 | if (mobileView) { 125 | dtocMobile(); 126 | } else { 127 | var elem = $("li[data-d-toc=\"".concat($(this).attr("data-d-toc"), "\"]")); 128 | self.triggerShowHide(elem); 129 | } 130 | 131 | self.scrollTo($(this)); 132 | }); 133 | $("#main").on("click.toggleDtoc", ".d-toc-toggle, .d-toc-close, .post-bottom-wrapper a", dtocMobile); 134 | 135 | var onScroll = function onScroll() { 136 | run.throttle(_this2, self.highlightItemsOnScroll, self, SCROLL_THROTTLE); 137 | }; 138 | 139 | $(window).on("scroll.d-toc", onScroll); 140 | }, 141 | highlightItemsOnScroll: function highlightItemsOnScroll(self) { 142 | $("html, body").promise().done(function () { 143 | var winScrollTop = $(window).scrollTop(); 144 | var anchors = $(self.options.cooked).find("[data-d-toc]"); 145 | var closestAnchorDistance = null; 146 | var closestAnchorIdx = null; 147 | anchors.each(function (idx) { 148 | var distance = Math.abs($(this).offset().top - minimumOffset() - winScrollTop); 149 | 150 | if (closestAnchorDistance == null || distance < closestAnchorDistance) { 151 | closestAnchorDistance = distance; 152 | closestAnchorIdx = idx; 153 | } else { 154 | return false; 155 | } 156 | }); 157 | var anchorText = $(anchors[closestAnchorIdx]).attr("data-d-toc"); 158 | var elem = $("li[data-d-toc=\"".concat(anchorText, "\"]")); 159 | 160 | if (elem.length) { 161 | self.element.find(".d-toc-active").removeClass("d-toc-active"); 162 | elem.addClass("d-toc-active"); 163 | } 164 | 165 | if (!mobileView) { 166 | self.triggerShowHide(elem); 167 | } 168 | }); 169 | }, 170 | triggerShowHide: function triggerShowHide(elem) { 171 | if (elem.parent().is(".d-toc-heading") || elem.next().is(".d-toc-subheading")) { 172 | this.showHide(elem.next(".d-toc-subheading")); 173 | } else if (elem.parent().is(".d-toc-subheading")) { 174 | this.showHide(elem.parent()); 175 | } 176 | }, 177 | showHide: function showHide(elem) { 178 | return elem.is(":visible") ? this.hide(elem) : this.show(elem); 179 | }, 180 | hide: function hide(elem) { 181 | var target = $(".d-toc-subheading").not(elem).not(elem.parents(".d-toc-subheading:has(.d-toc-active)")); 182 | return target.slideUp(TOC_ANIMATION_SPEED); 183 | }, 184 | show: function show(elem) { 185 | return elem.slideDown(TOC_ANIMATION_SPEED); 186 | }, 187 | scrollTo: function scrollTo(elem) { 188 | var currentDiv = $("[data-d-toc=\"".concat(elem.attr("data-d-toc"), "\"]")); 189 | $("html, body").animate({ 190 | scrollTop: "".concat(currentDiv.offset().top - minimumOffset()) 191 | }, { 192 | duration: SMOOTH_SCROLL_SPEED 193 | }); 194 | }, 195 | setOptions: function setOptions() { 196 | $.Widget.prototype._setOptions.apply(_this3, _arguments); 197 | } 198 | }); 199 | })(function () {}); 200 | 201 | api.decorateCooked(function ($elem) { 202 | run.scheduleOnce("actions", function () { 203 | if ($elem.hasClass("d-editor-preview")) return; 204 | if (!$elem.parents("article#post_1").length) return; 205 | var dToc = $elem.find("[data-theme-toc=\"true\"]"); 206 | if (!dToc.length) return _this; 207 | var body = $elem; 208 | body.find("div, aside, blockquote, article, details").each(function () { 209 | $(this).children("h1,h2,h3,h4,h5,h6").each(function () { 210 | $(this).replaceWith("
        ".concat($(this).html(), "
        ")); 211 | }); 212 | }); 213 | body.append(""); 214 | var dTocHeadingSelectors = "h1,h2,h3,h4,h5,h6"; 215 | 216 | if (!body.has(">h1").length) { 217 | dTocHeadingSelectors = "h2,h3,h4,h5,h6"; 218 | 219 | if (!body.has(">h2").length) { 220 | dTocHeadingSelectors = "h3,h4,h5,h6"; 221 | 222 | if (!body.has(">h3").length) { 223 | dTocHeadingSelectors = "h4,h5,h6"; 224 | 225 | if (!body.has(">h4").length) { 226 | dTocHeadingSelectors = "h5,h6"; 227 | 228 | if (!body.has(">h5").length) { 229 | dTocHeadingSelectors = "h6"; 230 | } 231 | } 232 | } 233 | } 234 | } 235 | 236 | body.find(dTocHeadingSelectors).each(function () { 237 | if ($(this).hasClass("d-toc-ignore")) return; 238 | var heading = $(this); 239 | var id = heading.attr("id") || ""; 240 | 241 | if (!id.length) { 242 | id = cleanUp(heading.text()); 243 | } 244 | 245 | heading.attr({ 246 | id: id, 247 | "data-d-toc": id 248 | }).addClass("d-toc-post-heading"); 249 | }); 250 | body.addClass("d-toc-cooked").prepend("\n ".concat(dtocIcon, " ").concat(I18n.t(themePrefix("table_of_contents")), "\n ")).parents(".regular").addClass("d-toc-regular").parents("article").addClass("d-toc-article").append("
        \n \n \n
        \n ")).parents(".topic-post").addClass("d-toc-post").parents("body").addClass("d-toc-timeline"); 251 | $("#d-toc").dToc({ 252 | cooked: body, 253 | selectors: dTocHeadingSelectors 254 | }); 255 | }); 256 | }, { 257 | id: "disco-toc" 258 | }); 259 | api.cleanupStream(function () { 260 | $(window).off("scroll.d-toc"); 261 | $("#main").off("click.toggleDtoc"); 262 | $(".d-toc-timeline").removeClass("d-toc-timeline d-toc-timeline-visible"); 263 | }); 264 | api.onAppEvent("topic:current-post-changed", function (post) { 265 | if (!$(".d-toc-timeline").length) return; 266 | run.scheduleOnce("afterRender", function () { 267 | if (post.post.post_number <= 2) { 268 | $("body").removeClass("d-toc-timeline-visible"); 269 | $(".d-toc-toggle").fadeIn(100); 270 | } else { 271 | $("body").addClass("d-toc-timeline-visible"); 272 | $(".d-toc-toggle").fadeOut(100); 273 | } 274 | }); 275 | }); 276 | 277 | if (currUserTrustLevel >= minimumTrustLevel) { 278 | if (!I18n.translations[I18n.currentLocale()].js.composer) { 279 | I18n.translations[I18n.currentLocale()].js.composer = {}; 280 | } 281 | 282 | I18n.translations[I18n.currentLocale()].js.composer.contains_dtoc = " "; 283 | api.addToolbarPopupMenuOptionsCallback(function () { 284 | var composerController = api.container.lookup("controller:composer"); 285 | return { 286 | action: "insertDtoc", 287 | icon: "align-left", 288 | label: themePrefix("insert_table_of_contents"), 289 | condition: composerController.get("model.canCategorize") 290 | }; 291 | }); 292 | api.modifyClass("controller:composer", { 293 | pluginId: "DiscoTOC", 294 | actions: { 295 | insertDtoc: function insertDtoc() { 296 | this.get("toolbarEvent").applySurround("
        ", "
        ", "contains_dtoc"); 297 | } 298 | } 299 | }); 300 | } 301 | }); 302 | } 303 | }; 304 | _exports.default = _default; 305 | }); 306 | } 307 | -------------------------------------------------------------------------------- /core/templates/resources/79ef348eb9f79f287b6c835ff09169b855d710f2.js: -------------------------------------------------------------------------------- 1 | if ('define' in window) { 2 | define("discourse/theme-47/initializers/theme-field-220-common-html-script-1", ["exports", "discourse/lib/plugin-api"], function (_exports, _pluginApi) { 3 | "use strict"; 4 | 5 | Object.defineProperty(_exports, "__esModule", { 6 | value: true 7 | }); 8 | _exports.default = void 0; 9 | 10 | function _applyDecoratedDescriptor(target, property, decorators, descriptor, context) { var desc = {}; Object.keys(descriptor).forEach(function (key) { desc[key] = descriptor[key]; }); desc.enumerable = !!desc.enumerable; desc.configurable = !!desc.configurable; if ('value' in desc || desc.initializer) { desc.writable = true; } desc = decorators.slice().reverse().reduce(function (desc, decorator) { return decorator(target, property, desc) || desc; }, desc); if (context && desc.initializer !== void 0) { desc.value = desc.initializer ? desc.initializer.call(context) : void 0; desc.initializer = undefined; } if (desc.initializer === void 0) { Object.defineProperty(target, property, desc); desc = null; } return desc; } 11 | 12 | var settings = require("discourse/lib/theme-settings-store").getObjectForTheme(47); 13 | 14 | var themePrefix = function themePrefix(key) { 15 | return "theme_translations.47.".concat(key); 16 | }; 17 | 18 | var _default = { 19 | name: "theme-field-220-common-html-script-1", 20 | after: "inject-objects", 21 | initialize: function initialize() { 22 | (0, _pluginApi.withPluginApi)("0.8.23", function (api) { 23 | var _dec, _obj; 24 | 25 | var computed = require("discourse-common/utils/decorators").default; 26 | 27 | api.modifyClass('controller:user', (_dec = computed("viewingSelf", "currentUser.admin"), (_obj = { 28 | showPrivateMessages: function showPrivateMessages(viewingSelf, isAdmin) { 29 | return this.siteSettings.enable_personal_messages && viewingSelf; 30 | } 31 | }, (_applyDecoratedDescriptor(_obj, "showPrivateMessages", [_dec], Object.getOwnPropertyDescriptor(_obj, "showPrivateMessages"), _obj)), _obj))); 32 | }); 33 | } 34 | }; 35 | _exports.default = _default; 36 | }); 37 | } 38 | -------------------------------------------------------------------------------- /core/templates/resources/dd73fca2e692e339380e847ce439a9e2a083d9bb.js: -------------------------------------------------------------------------------- 1 | (function() { 2 | if ('require' in window) { 3 | require("discourse/lib/theme-settings-store").registerSettings(27, {"immediate_reload":true,"show_section_header":false}); 4 | } 5 | })(); 6 | if ('define' in window) { 7 | define("discourse/theme-27/initializers/theme-field-141-common-html-script-1", ["exports", "discourse/lib/plugin-api"], function (_exports, _pluginApi) { 8 | "use strict"; 9 | 10 | Object.defineProperty(_exports, "__esModule", { 11 | value: true 12 | }); 13 | _exports.default = void 0; 14 | 15 | var settings = require("discourse/lib/theme-settings-store").getObjectForTheme(27); 16 | 17 | var themePrefix = function themePrefix(key) { 18 | return "theme_translations.27.".concat(key); 19 | }; 20 | 21 | var _default = { 22 | name: "theme-field-141-common-html-script-1", 23 | after: "inject-objects", 24 | initialize: function initialize() { 25 | (0, _pluginApi.withPluginApi)("0.8", function (api) { 26 | var h = require('virtual-dom').h; 27 | 28 | var ajax = require('discourse/lib/ajax').ajax; 29 | 30 | var themeSelector = require('discourse/lib/theme-selector'); 31 | 32 | api.createWidget("theme-selector", { 33 | buildKey: function buildKey(attrs) { 34 | return "theme-selector"; 35 | }, 36 | defaultState: function defaultState() { 37 | return { 38 | currentThemeId: themeSelector.currentThemeId() 39 | }; 40 | }, 41 | click: function click(event) { 42 | var _this = this; 43 | 44 | var $target = $(event.target); 45 | var id = $target.data('id'); 46 | var user = api.getCurrentUser(); 47 | 48 | if (user) { 49 | user.findDetails().then(function (user) { 50 | var seq = user.get("user_option.theme_key_seq"); 51 | 52 | _this.setTheme(id, seq); 53 | }); 54 | } else { 55 | this.setTheme(id); 56 | } 57 | 58 | ; 59 | return true; 60 | }, 61 | setTheme: function setTheme(themeId) { 62 | var seq = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; 63 | 64 | if (themeId == null) { 65 | return; 66 | } 67 | 68 | themeSelector.setLocalTheme([themeId], seq); 69 | this.state.currentThemeId = themeId; 70 | 71 | if (settings.immediate_reload) { 72 | window.location.reload(); 73 | } else { 74 | themeSelector.previewTheme([themeId]); 75 | } 76 | 77 | this.scheduleRerender(); 78 | }, 79 | themeHtml: function themeHtml(currentThemeId) { 80 | var themes = themeSelector.listThemes(this.site); 81 | 82 | if (themes && themes.length > 1) { 83 | return themes.map(function (theme) { 84 | var name = [theme.name]; 85 | 86 | if (theme.id === currentThemeId) { 87 | name.push('\xa0' + "*"); 88 | } 89 | 90 | return h('li', { 91 | attributes: { 92 | "data-name": theme.name 93 | } 94 | }, h('a.widget-link', { 95 | attributes: { 96 | "data-id": theme.id 97 | } 98 | }, name)); 99 | }); 100 | } 101 | }, 102 | html: function html(attrs, state) { 103 | var themeHtml = this.themeHtml(state.currentThemeId); 104 | var sectionHeader = null; 105 | var sectionHeaderText = I18n.t(themePrefix("hamburger_menu.theme_selector")); 106 | 107 | if (!themeHtml) { 108 | return; 109 | } 110 | 111 | if (settings.show_section_header) { 112 | var user = api.getCurrentUser(); 113 | var sectionHeaderLink = null; 114 | 115 | if (user) { 116 | sectionHeaderLink = h('a.widget-link', { 117 | href: "/my/preferences/interface" 118 | }, sectionHeaderText); 119 | } else { 120 | sectionHeaderLink = h('span', {}, sectionHeaderText); 121 | } 122 | 123 | sectionHeader = h('li', { 124 | style: "width: 100%;" + (user == null ? "padding: 0.25em 0.5em;" : null) 125 | }, sectionHeaderLink); 126 | } 127 | 128 | return [h('ul.menu-links.columned', [sectionHeader, themeHtml]), h('.clearfix'), h('hr')]; 129 | } 130 | }); 131 | api.decorateWidget('menu-links:before', function (helper) { 132 | if (helper.attrs.name === 'footer-links') { 133 | return [helper.widget.attach('theme-selector')]; 134 | } 135 | }); 136 | }); 137 | } 138 | }; 139 | _exports.default = _default; 140 | }); 141 | } 142 | -------------------------------------------------------------------------------- /core/templates/resources/desktop_theme_18_6044d2798548d883edb7b504e9678e59bef1ea37.css: -------------------------------------------------------------------------------- 1 | .d-toc-regular [data-theme-toc]{display:none}.d-toc-regular .d-toc-ignore{font-size:var(--font-up-1);margin:0 0 10px 0;font-weight:bold}.d-toc-regular .highlighted{animation:fadein 1s}.d-toc-regular .d-toc{transform:translate3d(0, 0, 0);transition:opacity 0.25s}.d-toc-regular .d-toc ul,.d-toc-regular .d-toc li{list-style:none;margin:0;padding:0;border:none}.d-toc-regular .d-toc .d-toc-item{padding:6px 0}.d-toc-regular .d-toc .d-toc-item a{color:var(--primary-high, #646464)}.d-toc-regular .d-toc .d-toc-item.d-toc-active{position:relative}.d-toc-regular .d-toc .d-toc-item.d-toc-active:before{height:100%;content:"";width:1px;position:absolute;top:0}:not(.rtl) .d-toc-regular .d-toc .d-toc-item.d-toc-active:before{border-left:1px solid var(--tertiary, #08c)}.rtl .d-toc-regular .d-toc .d-toc-item.d-toc-active:before{border-right:1px solid var(--tertiary, #08c)}.d-toc-regular .d-toc .d-toc-item.d-toc-active a{color:var(--primary, #222);text-shadow:0.1px 0.1px var(--primary, #222),-0.1px -0.1px var(--primary, #222)}:not(.rtl) .d-toc-regular .d-toc .d-toc-heading{padding-left:10px}.rtl .d-toc-regular .d-toc .d-toc-heading{padding-right:10px}:not(.rtl) .d-toc-regular .d-toc .d-toc-heading .d-toc-active:before{left:-10px}.rtl .d-toc-regular .d-toc .d-toc-heading .d-toc-active:before{right:-10px}:not(.rtl) .d-toc-regular .d-toc .d-toc-subheading{padding-left:20px}.rtl .d-toc-regular .d-toc .d-toc-subheading{padding-right:20px}:not(.rtl) .d-toc-regular .d-toc .d-toc-subheading .d-toc-active:before{left:-30px}.rtl .d-toc-regular .d-toc .d-toc-subheading .d-toc-active:before{right:-30px}:not(.rtl) .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading{padding-left:30px}.rtl .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading{padding-right:30px}:not(.rtl) .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading .d-toc-active:before{left:-60px}.rtl .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading .d-toc-active:before{right:-60px}:not(.rtl) .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading .d-toc-subheading{padding-left:40px}.rtl .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading .d-toc-subheading{padding-right:40px}:not(.rtl) .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading .d-toc-subheading .d-toc-active:before{left:-70px}.rtl .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading .d-toc-subheading .d-toc-active:before{right:-70px}:not(.rtl) .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading .d-toc-subheading .d-toc-subheading{padding-left:50px}.rtl .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading .d-toc-subheading .d-toc-subheading{padding-right:50px}:not(.rtl) .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading .d-toc-subheading .d-toc-subheading .d-toc-active:before{left:-80px}.rtl .d-toc-regular .d-toc .d-toc-subheading .d-toc-subheading .d-toc-subheading .d-toc-subheading .d-toc-active:before{right:-80px}.d-toc-regular .d-toc .d-toc-subheading li{font-size:0.8em}.d-toc-regular #bottom-anchor{opacity:0;height:0;margin:0}.d-toc-regular .post-bottom-wrapper a{color:var(--primary-med-or-secondary-med)}@media screen and (min-width: 1110px){.d-toc{margin-top:1em}.post-bottom-wrapper{padding:1em 0.5em 0 0.5em}.post-bottom-wrapper.mobile{display:none}.d-toc-toggle{display:none !important}.d-toc-close-wrapper,.d-toc-subheading{display:none}.d-toc-post .topic-body,.d-toc-post .topic-avatar{border-top:none}.d-toc-post .d-toc{max-height:85vh;padding-left:0;position:-webkit-sticky;position:sticky;top:75px;margin-bottom:135px;max-width:235px;overflow-y:auto;overflow-x:hidden;align-self:flex-start;flex:1 1 auto}:not(.rtl) .d-toc-post .d-toc{margin-left:-1px}.rtl .d-toc-post .d-toc{margin-right:-1px}.d-toc-post .d-toc-article{display:flex}.d-toc-post .d-toc-article .post-notice{display:none}.d-toc-post .d-toc-article .topic-map{margin-bottom:0}:not(.rtl) .d-toc-post .d-toc-article>.row{border-right:1px solid var(--primary-low, #e9e9e9)}.rtl .d-toc-post .d-toc-article>.row{border-left:1px solid var(--primary-low, #e9e9e9)}.d-toc-post #topic-title{margin-bottom:0}.d-toc-post #topic-title .title-wrapper{border-bottom:1px solid var(--primary-low, #e9e9e9);padding-bottom:0.5em;width:auto}}@media screen and (max-width: 1110px){.d-toc-regular .post-bottom-wrapper{padding:1em 0.75em}.d-toc-regular .post-bottom-wrapper.desktop{display:none}.d-toc-regular #d-toc{z-index:1001;background:var(--secondary, #fff);position:fixed;right:0;top:0;height:100vh;width:100vw;max-width:500px;overflow:scroll;transition:transform 0.5s, opacity 0.25s;transform:translatex(100%);opacity:0}:not(.rtl) .d-toc-regular #d-toc{margin-left:-1px}.rtl .d-toc-regular #d-toc{margin-right:-1px}.d-toc-regular #d-toc.d-toc-mobile{transform:translatex(0);opacity:1}:not(.rtl) .d-toc-regular #d-toc .d-toc-active:before{margin-left:-1px}.rtl .d-toc-regular #d-toc .d-toc-active:before{margin-right:-1px}.d-toc-regular .d-toc-close-wrapper{height:3em;background:var(--secondary, #fff);color:var(--primary-med-or-secondary-med);margin-bottom:1em;position:-webkit-sticky;position:sticky;top:0;display:flex;align-items:center;justify-content:flex-end}.d-toc-regular .d-toc-close-wrapper .d-toc-close{padding:1em 0.75em}.d-toc-regular .d-toc-toggle{position:fixed;bottom:5px;padding:0.5em 1em;background:var(--tertiary, #08c);color:var(--secondary, #fff);z-index:3;margin-bottom:env(safe-area-inset-bottom)}:not(.rtl) .d-toc-regular .d-toc-toggle{right:16px}.rtl .d-toc-regular .d-toc-toggle{left:16px}body.footer-nav-visible .d-toc-regular .d-toc-toggle{bottom:49px}:not(.rtl) .d-toc-regular #d-toc>ul{margin-left:20px;border-left:1px solid var(--primary-low, #e9e9e9)}.rtl .d-toc-regular #d-toc>ul{margin-right:20px;border-right:1px solid var(--primary-low, #e9e9e9)}.d-toc-regular #d-toc>ul:last-child{margin-bottom:5em}}.d-toc-timeline .timeline-container,.d-toc-timeline #topic-progress-wrapper{opacity:0;pointer-events:none;transition:opacity 0.25s}.d-toc-timeline.d-toc-timeline-visible .timeline-container,.d-toc-timeline.d-toc-timeline-visible #topic-progress-wrapper{opacity:1;pointer-events:initial}.edit-title .d-editor-preview [data-theme-toc]{background:var(--tertiary, #08c);color:var(--secondary, #fff);border-top:2px solid var(--secondary, #fff);position:-webkit-sticky;position:sticky;top:0;height:30px;display:flex;align-items:center;justify-content:center}.edit-title .d-editor-preview [data-theme-toc]:before{content:"此主题将会生成目录"} 2 | 3 | /*# sourceMappingURL=desktop_theme_18_6044d2798548d883edb7b504e9678e59bef1ea37.css.map */ 4 | -------------------------------------------------------------------------------- /core/templates/resources/desktop_theme_34_b6cc8e9ec8740a61600e8db01d7b9428a3940549.css: -------------------------------------------------------------------------------- 1 | .pdf-preview{width:100%;background-color:#646464;border:0}.attachment.new-tab-pdf:before{display:none}.attachment.new-tab-pdf .new-tab-pdf-icon{margin-right:4px} 2 | 3 | /*# sourceMappingURL=desktop_theme_34_b6cc8e9ec8740a61600e8db01d7b9428a3940549.css.map */ 4 | -------------------------------------------------------------------------------- /core/templates/resources/desktop_theme_37_127867e7b59b2a0102d5189a36d24792f6a176f0.css: -------------------------------------------------------------------------------- 1 | .svg-icon-title{display:none;visibility:hidden;width:0} 2 | 3 | /*# sourceMappingURL=desktop_theme_37_127867e7b59b2a0102d5189a36d24792f6a176f0.css.map */ 4 | -------------------------------------------------------------------------------- /core/templates/resources/desktop_theme_3_8828b367cb8a28b2296c7bfe6f433ad504773044.css: -------------------------------------------------------------------------------- 1 | html{font-family:Arial, "Microsoft YaHei", "微软雅黑";font-size:14px}.topic-list>tbody>tr{font-size:14px}.timeline-scrollarea-wrapper{font-size:14px}#topic-title h1{font-size:16px}h2{font-size:14px;font-weight:bold}h3{font-size:14px}.menu-links.columned li{font-size:14px}.contents{font-size:14px}.cooked,.d-editor-preview{font-size:15px}.cooked,.d-editor-preview img{max-width:100%;height:auto}.body-page p{font-size:14px}.body-page h2{font-size:14px;font-weight:bold}.body-page h3{font-size:14px}.category-list-item h3{font-size:16px}.category-list tbody .category h3{font-size:16px}.extra-info h1{font-size:16px}nav.post-controls button{font-size:14px}.body-page table{font-size:14px}.body-page ul{font-size:14px}.nav-pills>li>a{font-size:14px}.list-controls .category-breadcrumb .dropdown-header{font-size:14px}.list-controls .btn{font-size:14px;font-size:14px}#reply-control a.cancel{font-size:14px} 2 | 3 | /*# sourceMappingURL=desktop_theme_3_8828b367cb8a28b2296c7bfe6f433ad504773044.css.map */ 4 | -------------------------------------------------------------------------------- /core/templates/resources/desktop_theme_52_4859297324a16e5bb8c1f9d01f3bf0ffa6a9b29c.css: -------------------------------------------------------------------------------- 1 | .topic-list .topic-excerpt{display:none} 2 | 3 | /*# sourceMappingURL=desktop_theme_52_4859297324a16e5bb8c1f9d01f3bf0ffa6a9b29c.css.map */ 4 | -------------------------------------------------------------------------------- /core/templates/resources/desktop_theme_54_e006d487a3f75a32724103e42208dca26c361ac8.css: -------------------------------------------------------------------------------- 1 | .post-retort{float:none;border:none;background:none;margin:0px 2px} 2 | 3 | /*# sourceMappingURL=desktop_theme_54_e006d487a3f75a32724103e42208dca26c361ac8.css.map */ 4 | -------------------------------------------------------------------------------- /core/templates/resources/desktop_theme_56_ecee7c2e40c9669a25d0f4e842a1b86202e00733.css: -------------------------------------------------------------------------------- 1 | .post-controls .unaccepted .d-button-label{display:none} 2 | 3 | /*# sourceMappingURL=desktop_theme_56_ecee7c2e40c9669a25d0f4e842a1b86202e00733.css.map */ 4 | -------------------------------------------------------------------------------- /core/templates/resources/desktop_theme_58_c33a83ba3ef5ab3b25e5a5619c4161eb0f495dce.css: -------------------------------------------------------------------------------- 1 | .post-stream .gap{display:none !important} 2 | 3 | /*# sourceMappingURL=desktop_theme_58_c33a83ba3ef5ab3b25e5a5619c4161eb0f495dce.css.map */ 4 | -------------------------------------------------------------------------------- /core/templates/resources/desktop_theme_59_0cc413d2f69104e201591ee89508c6059f2237e7.css: -------------------------------------------------------------------------------- 1 | div#watermark-background{position:absolute;top:var(--header-offset, 0);left:0;bottom:0;right:0;min-height:100vh;z-index:0;mix-blend-mode:multiply;padding:0;margin:0;pointer-events:none}div#watermark-background.fixed{position:fixed} 2 | 3 | /*# sourceMappingURL=desktop_theme_59_0cc413d2f69104e201591ee89508c6059f2237e7.css.map */ 4 | -------------------------------------------------------------------------------- /core/templates/resources/f8a88a3a02eaa06e8d2c51ea944df2f7abdc374e.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/core/templates/resources/f8a88a3a02eaa06e8d2c51ea944df2f7abdc374e.png -------------------------------------------------------------------------------- /gui/src-tauri/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /dist 3 | .DS_STORE 4 | /.idea 5 | WixTools 6 | -------------------------------------------------------------------------------- /gui/src-tauri/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "shuiyuan-archiver" 3 | description = "一个将上海交通大学水源社区的文章存档的工具" 4 | authors = ["LightQuantum "] 5 | version = "0.4.7" 6 | edition = "2021" 7 | build = "build.rs" 8 | repository = "https://github.com/PhotonQuantum/shuiyuan-archiver" 9 | 10 | [dependencies] 11 | directories = "5.0" 12 | once_cell = "1.17" 13 | rand = "0.8" 14 | regex = "1.7" 15 | sa_core = { path = "../../core" } 16 | sanitize-filename = "0.4" 17 | sentry = { version = "0.31", default-features = false, features = ["backtrace", "contexts", "panic", "reqwest", "rustls"] } 18 | tap = "1.0" 19 | tauri = { version = "1.3", features = ["dialog-open", "fs-exists", "fs-read-dir", "path-all", "shell-open"] } 20 | tauri-plugin-deep-link = "0.1" 21 | tauri-plugin-store = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "dev" } 22 | tokio = { version = "1.24", features = ["sync"] } 23 | tracing-subscriber = { version = "0.3", features = ["env-filter"] } 24 | urlencoding = "2.1" 25 | webbrowser = "0.8" 26 | 27 | [build-dependencies] 28 | tauri-build = { version = "1.3", features = [] } 29 | 30 | [features] 31 | # by default Tauri runs in production mode 32 | # when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` is an URL 33 | default = ["custom-protocol"] 34 | # this feature is used used for production builds where `devPath` points to the filesystem 35 | # DO NOT remove this 36 | custom-protocol = ["tauri/custom-protocol"] 37 | -------------------------------------------------------------------------------- /gui/src-tauri/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleURLTypes 6 | 7 | 8 | CFBundleURLName 9 | me.lightquantum.shuiyuan-archiver 10 | CFBundleURLSchemes 11 | 12 | discourse 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /gui/src-tauri/bacon.toml: -------------------------------------------------------------------------------- 1 | # This is a configuration file for the bacon tool 2 | # More info at https://github.com/Canop/bacon 3 | 4 | default_job = "clippy" 5 | 6 | [keybindings] 7 | k = "scroll-lines(-1)" 8 | j = "scroll-lines(1)" 9 | c = "job:clippy" 10 | t = "job:test" 11 | f = "job:fix" 12 | shift-F9 = "toggle-backtrace" 13 | ctrl-r = "toggle-raw-output" 14 | ctrl-u = "scroll-page(-1)" 15 | ctrl-d = "scroll-page(1)" 16 | 17 | [jobs] 18 | 19 | [jobs.clippy] 20 | command = ["cargo", "clippy", "--tests", "--color", "always"] 21 | need_stdout = false 22 | 23 | [jobs.test] 24 | command = ["cargo", "test", "--color", "always"] 25 | need_stdout = true 26 | watch = ["tests"] 27 | 28 | [jobs.doc] 29 | command = ["cargo", "doc", "--color", "always", "--no-deps"] 30 | need_stdout = false 31 | 32 | [jobs.fix] 33 | command = ["cargo", "clippy", "--fix", "--allow-staged", "--allow-dirty", "--workspace", "--tests", "--color", "always", "--", "-W", "clippy::all", "-W", "clippy::nursery", "-W", "clippy::pedantic"] 34 | need_stdout = false 35 | on_success = "job:clippy" -------------------------------------------------------------------------------- /gui/src-tauri/build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | tauri_build::build(); 3 | } 4 | -------------------------------------------------------------------------------- /gui/src-tauri/entitlements.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | com.apple.security.app-sandbox 6 | 7 | com.apple.security.network.client 8 | 9 | com.apple.security.files.user-selected.read-write 10 | 11 | 12 | -------------------------------------------------------------------------------- /gui/src-tauri/icons/128x128.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/128x128.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/128x128@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/128x128@2x.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/32x32.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/Square107x107Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/Square107x107Logo.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/Square142x142Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/Square142x142Logo.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/Square150x150Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/Square150x150Logo.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/Square284x284Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/Square284x284Logo.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/Square30x30Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/Square30x30Logo.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/Square310x310Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/Square310x310Logo.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/Square44x44Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/Square44x44Logo.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/Square71x71Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/Square71x71Logo.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/Square89x89Logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/Square89x89Logo.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/StoreLogo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/StoreLogo.png -------------------------------------------------------------------------------- /gui/src-tauri/icons/icon.icns: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/icon.icns -------------------------------------------------------------------------------- /gui/src-tauri/icons/icon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/icon.ico -------------------------------------------------------------------------------- /gui/src-tauri/icons/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PhotonQuantum/shuiyuan-archiver/07751f0a94c2bc4c7c8bcdf21f173a373c6e150a/gui/src-tauri/icons/icon.png -------------------------------------------------------------------------------- /gui/src-tauri/src/main.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr( 2 | all(not(debug_assertions), target_os = "windows"), 3 | windows_subsystem = "windows" 4 | )] 5 | #![allow(clippy::module_name_repetitions)] 6 | 7 | use std::error::Error as StdError; 8 | use std::path::PathBuf; 9 | use std::str::FromStr; 10 | use std::sync::Mutex; 11 | 12 | use once_cell::sync::Lazy; 13 | use tap::Tap; 14 | use tauri::async_runtime::channel; 15 | use tauri::Wry; 16 | use tracing_subscriber::EnvFilter; 17 | 18 | use sa_core::archiver; 19 | use sa_core::archiver::{fetch_topic_meta, TopicMeta}; 20 | use sa_core::client::{create_client_with_token, oauth_url, token_from_payload, Client}; 21 | use sa_core::re_exports::rsa; 22 | use sa_core::re_exports::uuid::Uuid; 23 | 24 | use crate::url_scheme::{URLScheme, URLSchemePlugin}; 25 | 26 | mod url_scheme; 27 | 28 | type BoxedError = Box; 29 | type Result = std::result::Result; 30 | 31 | static APP_ID: Lazy = 32 | Lazy::new(|| Uuid::from_str("1bf328bf-239b-46ed-9696-92fdcb51f2b1").unwrap()); 33 | 34 | #[tauri::command] 35 | fn sanitize(s: String) -> String { 36 | sanitize_filename::sanitize(s) 37 | } 38 | 39 | #[tauri::command] 40 | fn open_browser( 41 | key: tauri::State, 42 | url_scheme: tauri::State, 43 | ) -> bool { 44 | let use_callback = dbg!(url_scheme.registered); 45 | webbrowser::open(&oauth_url(&APP_ID, key.as_ref(), use_callback)).expect("no browser"); 46 | use_callback 47 | } 48 | 49 | #[tauri::command] 50 | fn token_from_oauth(payload: String, key: tauri::State) -> String { 51 | token_from_payload(&payload, &key).unwrap_or_default() 52 | } 53 | 54 | #[tauri::command] 55 | async fn login_with_token( 56 | token: String, 57 | client: tauri::State<'_, Mutex>>, 58 | window: tauri::Window, 59 | ) -> Result<(), String> { 60 | let rate_limit_callback = { 61 | let window = window.clone(); 62 | move |t| { 63 | eprintln!("rate limit: {}", t); 64 | window.emit("rate-limit-event", t).unwrap(); 65 | } 66 | }; 67 | 68 | let new_client = create_client_with_token(&token, rate_limit_callback) 69 | .await 70 | .map_err(|e| { 71 | sentry::capture_error(&e); 72 | e.to_string() 73 | })?; 74 | *client.lock().unwrap() = Some(new_client); 75 | Ok(()) 76 | } 77 | 78 | #[tauri::command] 79 | async fn fetch_meta( 80 | topic_id: u32, 81 | client: tauri::State<'_, Mutex>>, 82 | ) -> Result { 83 | eprintln!("fetch meta {}", topic_id); 84 | let client = client.lock().unwrap().clone().expect("client"); 85 | fetch_topic_meta(&client, topic_id) 86 | .await 87 | .tap(|_| { 88 | eprintln!("fetch meta done"); 89 | }) 90 | .map_err(|e| { 91 | eprintln!("fetch meta error: {}", e); 92 | sentry::capture_error(&e); 93 | e.to_string() 94 | }) 95 | } 96 | 97 | #[tauri::command] 98 | async fn archive( 99 | topic_meta: TopicMeta, 100 | save_to: String, 101 | mask_user: bool, 102 | window: tauri::Window, 103 | client: tauri::State<'_, Mutex>>, 104 | ) -> Result<(), String> { 105 | let client = client.lock().unwrap().clone().expect("client"); 106 | let path = PathBuf::from(save_to); 107 | let (tx, mut rx) = channel(8); 108 | tauri::async_runtime::spawn(async move { 109 | while let Some(ev) = rx.recv().await { 110 | window.emit("progress-event", ev).unwrap(); 111 | } 112 | }); 113 | if let Err(e) = archiver::archive(&client, topic_meta, &path, mask_user, tx).await { 114 | sentry::capture_error(&e); 115 | return Err(e.to_string()); 116 | } 117 | Ok(()) 118 | } 119 | 120 | fn main() { 121 | let _guard = option_env!("SENTRY_DSN").map(|dsn| { 122 | sentry::init(( 123 | dsn, 124 | sentry::ClientOptions { 125 | release: sentry::release_name!(), 126 | ..Default::default() 127 | }, 128 | )) 129 | }); 130 | tracing_subscriber::fmt() 131 | .with_env_filter(EnvFilter::from_default_env()) 132 | .init(); 133 | 134 | tauri_plugin_deep_link::prepare("me.lightquantum.shuiyuan-archiver"); 135 | 136 | let key = rsa::RsaPrivateKey::new(&mut rand::thread_rng(), 2048).unwrap(); 137 | let client: Mutex> = Mutex::new(None); 138 | 139 | tauri::Builder::default() 140 | .plugin(tauri_plugin_store::Builder::default().build()) 141 | .plugin(URLSchemePlugin) 142 | .manage(key) 143 | .manage(client) 144 | .invoke_handler(tauri::generate_handler![ 145 | sanitize, 146 | login_with_token, 147 | open_browser, 148 | token_from_oauth, 149 | fetch_meta, 150 | archive, 151 | ]) 152 | .run(tauri::generate_context!()) 153 | .expect("error while running tauri application"); 154 | } 155 | -------------------------------------------------------------------------------- /gui/src-tauri/src/url_scheme.rs: -------------------------------------------------------------------------------- 1 | use regex::Regex; 2 | use tauri::plugin::Plugin; 3 | use tauri::{AppHandle, Manager, Wry}; 4 | use tauri_plugin_store::JsonValue; 5 | 6 | pub struct URLSchemePlugin; 7 | 8 | pub struct URLScheme { 9 | pub registered: bool, 10 | } 11 | 12 | impl Plugin for URLSchemePlugin { 13 | fn name(&self) -> &'static str { 14 | "URLSchemePlugin" 15 | } 16 | fn initialize( 17 | &mut self, 18 | app: &AppHandle, 19 | _config: JsonValue, 20 | ) -> tauri::plugin::Result<()> { 21 | app.manage(URLScheme { 22 | registered: register_deep_link(app.clone()), 23 | }); 24 | Ok(()) 25 | } 26 | } 27 | 28 | fn register_deep_link(handle: AppHandle) -> bool { 29 | #[cfg(any(target_os = "macos", target_os = "windows", target_os = "linux"))] 30 | { 31 | tauri_plugin_deep_link::register("discourse", move |request| { 32 | let re = Regex::new(r#"discourse://auth_redirect/?\?payload=(.+)"#).unwrap(); 33 | if let Some(s) = re.captures(&request).map(|m| { 34 | urlencoding::decode(m.get(1).expect("no payload").as_str()) 35 | .expect("utf8") 36 | .to_string() 37 | }) { 38 | handle.emit_all("update-token", s).unwrap() 39 | } 40 | }) 41 | .expect("failed to register deep link handler"); 42 | true 43 | } 44 | #[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))] 45 | false 46 | } 47 | -------------------------------------------------------------------------------- /gui/src-tauri/tauri.conf.json: -------------------------------------------------------------------------------- 1 | { 2 | "package": { 3 | "productName": "ShuiyuanArchiver", 4 | "version": "0.4.7" 5 | }, 6 | "build": { 7 | "distDir": "../../dist", 8 | "devPath": "http://localhost:1234", 9 | "beforeDevCommand": "pnpm start", 10 | "beforeBuildCommand": "pnpm build" 11 | }, 12 | "tauri": { 13 | "bundle": { 14 | "active": true, 15 | "targets": "all", 16 | "identifier": "me.lightquantum.shuiyuan-archiver", 17 | "icon": [ 18 | "icons/32x32.png", 19 | "icons/128x128.png", 20 | "icons/128x128@2x.png", 21 | "icons/icon.icns", 22 | "icons/icon.ico" 23 | ], 24 | "resources": [], 25 | "externalBin": [], 26 | "copyright": "Copyright © 2022 LightQuantum.", 27 | "category": "Productivity", 28 | "shortDescription": "一个将上海交通大学水源社区的文章存档的工具。", 29 | "longDescription": "", 30 | "deb": { 31 | "depends": [] 32 | }, 33 | "macOS": { 34 | "frameworks": [], 35 | "exceptionDomain": "", 36 | "signingIdentity": null, 37 | "providerShortName": null, 38 | "entitlements": "entitlements.plist" 39 | }, 40 | "windows": { 41 | "wix": { 42 | "language": "zh-CN" 43 | }, 44 | "certificateThumbprint": null, 45 | "digestAlgorithm": "sha256", 46 | "timestampUrl": "" 47 | } 48 | }, 49 | "updater": { 50 | "active": false 51 | }, 52 | "allowlist": { 53 | "dialog": { 54 | "open": true 55 | }, 56 | "fs": { 57 | "readDir": true, 58 | "exists": true 59 | }, 60 | "path": { 61 | "all": true 62 | }, 63 | "shell": { 64 | "open": ".*" 65 | } 66 | }, 67 | "windows": [ 68 | { 69 | "title": "水源存档工具", 70 | "width": 500, 71 | "height": 380, 72 | "resizable": false, 73 | "fullscreen": false 74 | } 75 | ], 76 | "security": { 77 | "csp": null 78 | } 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /gui/src/App.tsx: -------------------------------------------------------------------------------- 1 | import {useColorScheme} from "@mantine/hooks"; 2 | import {MantineProvider} from "@mantine/core"; 3 | import {Main} from "./Main"; 4 | import {RecoilRoot} from "recoil"; 5 | import {ModalsProvider} from "@mantine/modals"; 6 | 7 | export const App = () => { 8 | const colorScheme = useColorScheme(); 9 | 10 | return ( 11 | 12 | 13 | 14 |
        15 | 16 | 17 | 18 | ) 19 | } -------------------------------------------------------------------------------- /gui/src/Main.tsx: -------------------------------------------------------------------------------- 1 | import {AppShell, Container, Stepper} from "@mantine/core"; 2 | import {Login} from "./steps/Login"; 3 | import {useRecoilState} from "recoil"; 4 | import {currentStep} from "./states"; 5 | import {Config} from "./steps/Config"; 6 | import {Archive} from "./steps/Archive"; 7 | import {Finish} from "./steps/Finish"; 8 | 9 | export const Main = () => { 10 | const [step, _] = useRecoilState(currentStep); 11 | return ( 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | ) 37 | } -------------------------------------------------------------------------------- /gui/src/bindings.ts: -------------------------------------------------------------------------------- 1 | /* 2 | Generated by typeshare 1.0.0 3 | */ 4 | 5 | export interface Category { 6 | name: string; 7 | color: string; 8 | } 9 | 10 | export interface TopicMeta { 11 | id: number; 12 | title: string; 13 | description: string; 14 | categories: Category[]; 15 | tags: string[]; 16 | "post-ids": number[]; 17 | } 18 | 19 | /** Download events. */ 20 | export type DownloadEvent = 21 | /** Total post chunks to download. It's determined once metadata is fetched. */ 22 | | { kind: "post-chunks-total", value: number } 23 | /** A post chunk is downloaded. */ 24 | | { kind: "post-chunks-downloaded-inc", value?: undefined } 25 | /** 26 | * A new resource has been discovered. Total count of resources to download is not known 27 | * because of incremental fetching. 28 | */ 29 | | { kind: "resource-total-inc", value?: undefined } 30 | /** A resource is downloaded. */ 31 | | { kind: "resource-downloaded-inc", value?: undefined }; 32 | 33 | -------------------------------------------------------------------------------- /gui/src/commands.ts: -------------------------------------------------------------------------------- 1 | import {invoke} from "@tauri-apps/api"; 2 | import {TopicMeta} from "./bindings"; 3 | 4 | export const openBrowser = async () => { 5 | return await invoke("open_browser"); 6 | } 7 | export const tokenFromOauth = async (payload: string) => { 8 | return await invoke("token_from_oauth", {payload}); 9 | } 10 | 11 | export const loginWithToken = async (token: string) => { 12 | await invoke("login_with_token", {token}); 13 | } 14 | 15 | export const fetchMeta = async (topicId: number) => { 16 | return await invoke("fetch_meta", {topicId}); 17 | } 18 | 19 | export const archive = async (topicMeta: TopicMeta, saveTo: string, maskUser: boolean) => { 20 | await invoke("archive", {topicMeta, saveTo, maskUser}); 21 | } 22 | 23 | export const sanitize = async (s: string) => { 24 | return await invoke("sanitize", {s}); 25 | } 26 | -------------------------------------------------------------------------------- /gui/src/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 水源存档工具 6 | 7 | 8 |
        9 | 10 | 11 | -------------------------------------------------------------------------------- /gui/src/index.tsx: -------------------------------------------------------------------------------- 1 | import {App} from "./App"; 2 | import {createRoot} from "react-dom/client"; 3 | 4 | const app = document.getElementById("app"); 5 | const root = createRoot(app!); 6 | root.render(); -------------------------------------------------------------------------------- /gui/src/states.ts: -------------------------------------------------------------------------------- 1 | import {atom} from "recoil"; 2 | import {Store} from "tauri-plugin-store-api"; 3 | import {TopicMeta} from "./bindings"; 4 | 5 | export const maskUserState = atom({ 6 | key: "maskUserState", 7 | default: false, 8 | }); 9 | 10 | export const saveToState = atom({ 11 | key: "saveToState", 12 | default: "", 13 | }); 14 | 15 | export const currentStep = atom({ 16 | key: "currentStep", 17 | default: 0, 18 | }); 19 | 20 | export const archiveResultState = atom({ 21 | key: "archiveResult", 22 | default: { 23 | success: true, 24 | message: "", 25 | }, 26 | }); 27 | 28 | const store = new Store("settings"); 29 | export const tokenState = atom({ 30 | key: "token", 31 | default: "", 32 | effects: [ 33 | ({setSelf, onSet}) => { 34 | store.get("token") 35 | .then(token => { 36 | setSelf(token as string); 37 | }) 38 | 39 | onSet((newValue, _, isReset) => { 40 | if (isReset || newValue === '') { 41 | store.delete("token").then(_ => { 42 | }); 43 | } else { 44 | store.set("token", newValue).then(_ => { 45 | }); 46 | } 47 | }) 48 | } 49 | ] 50 | }); 51 | 52 | let rateLimitInterval: number | undefined = undefined; 53 | export const rateLimitState = atom({ 54 | key: "rateLimit", 55 | default: 0, 56 | effects: [ 57 | ({setSelf, onSet}) => { 58 | onSet((newValue, oldValue, isReset) => { 59 | console.log("rateLimitState", newValue, oldValue, isReset); 60 | if (isReset) { 61 | setSelf(0); 62 | clearInterval(rateLimitInterval) 63 | return; 64 | } 65 | if (newValue > oldValue) { 66 | clearInterval(rateLimitInterval); 67 | console.log("rateLimitState", "start interval", newValue); 68 | setSelf(newValue); 69 | 70 | let i = 1; 71 | rateLimitInterval = setInterval(() => { 72 | console.log("rateLimitState", "interval", newValue - i); 73 | setSelf(newValue - i); 74 | if (newValue - i <= 0) { 75 | clearInterval(rateLimitInterval); 76 | } else { 77 | i++; 78 | } 79 | }, 1000); 80 | } 81 | }); 82 | } 83 | ] 84 | }) 85 | 86 | export const topicMetaState = atom({ 87 | key: "topicMeta", 88 | default: null 89 | }) -------------------------------------------------------------------------------- /gui/src/steps/Archive.tsx: -------------------------------------------------------------------------------- 1 | import {Alert, Center, Group, Loader, Stack, Text, useMantineTheme} from "@mantine/core"; 2 | import {appWindow} from "@tauri-apps/api/window"; 3 | import {useRecoilState, useRecoilValue, useSetRecoilState} from "recoil"; 4 | import {useEffect, useState} from "react"; 5 | import {archiveResultState, currentStep, maskUserState, rateLimitState, saveToState, topicMetaState} from "../states"; 6 | import {listen, UnlistenFn} from "@tauri-apps/api/event"; 7 | import {AlertCircle, Check, Clock, CloudDownload} from "tabler-icons-react"; 8 | import {DownloadEvent} from "../bindings"; 9 | import {archive} from "../commands"; 10 | 11 | type UnlistenStruct = { 12 | unsubscribe: UnlistenFn; 13 | } 14 | 15 | export const Archive = () => { 16 | const theme = useMantineTheme(); 17 | 18 | const topicMeta = useRecoilValue(topicMetaState); 19 | const saveTo = useRecoilValue(saveToState); 20 | const maskUser = useRecoilValue(maskUserState); 21 | const setArchiveResult = useSetRecoilState(archiveResultState); 22 | const setStep = useSetRecoilState(currentStep); 23 | 24 | const [rateLimit, setRateLimit] = useRecoilState(rateLimitState); 25 | const [fetchMeta, setFetchMeta] = useState(true); 26 | const [pageDownloaded, setPageDownloaded] = useState(0); 27 | const [pageTotal, setPageTotal] = useState(0); 28 | const [resourcesDownloaded, setResourcesDownloaded] = useState(0); 29 | const [resourcesTotal, setResourcesTotal] = useState(0); 30 | 31 | const [channelRateLimit, setChannelRateLimit] = useState(null); 32 | const [channelProgress, setChannelProgress] = useState(null); 33 | 34 | useEffect(() => { 35 | archive(topicMeta!, saveTo, maskUser) 36 | .then(() => { 37 | setArchiveResult({success: true, message: ""}); 38 | setStep(3); 39 | }) 40 | .catch(resp => { 41 | setArchiveResult({success: false, message: resp as string}); 42 | setStep(3); 43 | }) 44 | }, [topicMeta, saveTo, maskUser]); 45 | 46 | useEffect(() => { 47 | listen("rate-limit-event", (rateLimit) => { 48 | console.log("rateLimit", rateLimit); 49 | setRateLimit(rateLimit.payload); 50 | }).then(unsubscribe => { 51 | setChannelRateLimit({unsubscribe}); 52 | }).catch(e => { 53 | console.error(e); 54 | }); 55 | appWindow.listen("progress-event", (progress) => { 56 | const payload = progress.payload; 57 | if (payload.kind === "post-chunks-total") { 58 | setFetchMeta(false); 59 | setPageTotal(payload.value!); 60 | } else if (payload.kind === "post-chunks-downloaded-inc") { 61 | setPageDownloaded((v) => v + 1); 62 | } else if (payload.kind === "resource-downloaded-inc") { 63 | setResourcesDownloaded((v) => v + 1); 64 | } else if (payload.kind === "resource-total-inc") { 65 | setResourcesTotal((v) => v + 1); 66 | } 67 | }).then(unsubscribe => { 68 | setChannelProgress({unsubscribe}); 69 | }).catch(e => { 70 | console.error(e); 71 | }); 72 | 73 | return () => { 74 | console.log("start unsubscribing channels", channelProgress, channelRateLimit); 75 | if (channelRateLimit !== null) { 76 | console.log("unsubscribe rate-limit"); 77 | channelRateLimit.unsubscribe(); 78 | } 79 | if (channelProgress !== null) { 80 | console.log("unsubscribe progress"); 81 | channelProgress.unsubscribe(); 82 | } 83 | }; 84 | }, []); 85 | 86 | const stage = fetchMeta ? 0 : (pageDownloaded !== pageTotal ? 1 : 2); 87 | 88 | return ( 89 | <> 90 | {rateLimit !== 0 && 91 | } title="限流" color="orange"> 92 | 检测到您被限流!将在等待 {rateLimit} 秒后继续下载... 93 | 94 | } 95 |
        96 | 97 | 98 | 99 | 100 | {stage === 0 ? 101 | : 102 | 103 | } 104 | 读取元信息 ...{stage !== 0 && " 完成"} 105 | 106 | 107 | {stage === 0 ? 108 | : 109 | (stage === 1 ? 110 | : 111 | ) 112 | } 113 | 抓取页面 114 | {(stage > 0) && (stage === 1 ? ` ... ${pageDownloaded}/${pageTotal}` : " ... 完成")} 115 | 116 | 117 | 118 | {stage < 2 ? 119 | : 120 | 121 | } 122 | 抓取资源 123 | {stage === 2 && ` ... ${resourcesDownloaded}/${resourcesTotal}`} 124 | 125 | 126 | 127 | 128 | 129 |
        130 | 131 | ); 132 | } -------------------------------------------------------------------------------- /gui/src/steps/Config.tsx: -------------------------------------------------------------------------------- 1 | import {Button, Code, Group, Loader, Space, Stack, Switch, Text, TextInput, Tooltip} from "@mantine/core"; 2 | import {atom, useRecoilState, useSetRecoilState} from "recoil"; 3 | import {currentStep, maskUserState, saveToState, topicMetaState} from "../states"; 4 | import {fetchMeta, sanitize} from "../commands"; 5 | import debounce from "debounce-promise"; 6 | import {useState} from "react"; 7 | import {openConfirmModal} from "@mantine/modals"; 8 | import {OpenConfirmModal} from "@mantine/modals/lib/context"; 9 | import {dialog, fs, path} from "@tauri-apps/api"; 10 | 11 | const debouncedFetchMeta = debounce(fetchMeta, 500); 12 | 13 | const topicUrlState = atom({ 14 | key: "topicUrl", 15 | default: "", 16 | }); 17 | const topicErrorState = atom({ 18 | key: "topicError", 19 | default: false, 20 | }); 21 | const savePathState = atom({ 22 | key: "savePath", 23 | default: "", 24 | }); 25 | 26 | const prompts = [ 27 | {desc: "您选择的文件夹不为空", no_subdir: "直接保存", subdir: "存在新建文件夹中"}, 28 | {desc: "您选择的文件夹不为空,而且这个文件夹看起来是一个先前的存档", no_subdir: "更新存档", subdir: "存在新建文件夹中"} 29 | ]; 30 | 31 | const asyncConfirm = (options: OpenConfirmModal) => new Promise((resolve) => openConfirmModal({ 32 | onConfirm: () => resolve(true), 33 | onCancel: () => resolve(false), 34 | onClose: () => resolve(null), 35 | ...options 36 | })); 37 | 38 | export const Config = () => { 39 | const [topicMeta, setTopicMeta] = useRecoilState(topicMetaState); 40 | const setSaveTo = useSetRecoilState(saveToState); 41 | const [maskUser, setMaskUser] = useRecoilState(maskUserState); 42 | const setStep = useSetRecoilState(currentStep); 43 | 44 | const [topicUrl, setTopicUrl] = useRecoilState(topicUrlState); 45 | const [topicError, setTopicError] = useRecoilState(topicErrorState); 46 | const [fetching, setFetching] = useState(false); 47 | const [savePath, setSavePath] = useRecoilState(savePathState); 48 | 49 | const extractTopic = (topic: string) => { 50 | const [, name] = topic.match(/https:\/\/shuiyuan.sjtu.edu.cn\/t\/topic\/(\d+)/) || []; 51 | return name; 52 | }; 53 | 54 | const ready = !topicError && !fetching && topicMeta !== null && savePath !== ""; 55 | 56 | const onNextStep = async () => { 57 | console.log("readDir", await fs.readDir(savePath)); 58 | if (await fs.exists(savePath) && (await fs.readDir(savePath)).length > 0) { 59 | const filename = await sanitize(`水源_${topicMeta!.title}`); 60 | const isArchive = (await path.basename(savePath) === filename); 61 | 62 | const prompt = prompts[isArchive ? 1 : 0]; 63 | const create_subdir = await asyncConfirm({ 64 | title: "文件夹不为空", 65 | children: {prompt.desc}, 66 | labels: {confirm: prompt.subdir, cancel: prompt.no_subdir}, 67 | centered: true 68 | }); 69 | if (create_subdir === null) { 70 | return 71 | } 72 | 73 | if (create_subdir) { 74 | const newPath = await path.join(savePath, filename); 75 | if (await fs.exists(await path.join(savePath, filename))) { 76 | const confirm = await asyncConfirm({ 77 | title: "存档已存在", 78 | children: {newPath} 已存在一个存档, 79 | labels: {confirm: "更新存档", cancel: "取消"}, 80 | centered: true 81 | }); 82 | if (!confirm) { 83 | return; 84 | } 85 | } 86 | setSaveTo(newPath); 87 | } else { 88 | setSaveTo(savePath); 89 | } 90 | } else { 91 | setSaveTo(savePath); 92 | } 93 | setStep(2); 94 | } 95 | 96 | return ( 97 | 98 | 103 | 104 | 加载中 105 | : 106 | (topicMeta && 107 | {topicMeta.title}}> 108 | {topicMeta.title} 109 | 110 | )} 111 | onChange={ev => { 112 | const value = ev.target.value; 113 | setTopicUrl(value); 114 | if (value === '') { 115 | setTopicError(false); 116 | setTopicMeta(null); 117 | } else { 118 | const topic = extractTopic(value); 119 | if (topic) { 120 | setTopicError(false); 121 | (async () => { 122 | try { 123 | setFetching(true); 124 | const meta = await debouncedFetchMeta(parseInt(topic)); 125 | setFetching(false); 126 | setTopicMeta(meta); 127 | } catch (e) { 128 | console.log("Topic error"); 129 | setTopicError(true); 130 | } 131 | })() 132 | } else { 133 | setTopicError(true); 134 | } 135 | } 136 | }}/> 137 | { 139 | dialog.open({directory: true}).then(folder => { 140 | if (folder !== "") { 141 | setSavePath(folder as string); 142 | } 143 | }) 144 | }}>浏览 145 | }/> 146 | setMaskUser(ev.currentTarget.checked)} 148 | label="打码用户名及头像" 149 | /> 150 | 151 | 152 | 153 | ) 154 | } 155 | -------------------------------------------------------------------------------- /gui/src/steps/Finish.tsx: -------------------------------------------------------------------------------- 1 | import {Button, Center, Group, Stack, Text, ThemeIcon} from "@mantine/core"; 2 | import {MoodCry, MoodSmile} from "tabler-icons-react"; 3 | import {useRecoilState, useRecoilValue, useSetRecoilState} from "recoil"; 4 | import {archiveResultState, currentStep, saveToState} from "../states"; 5 | import {shell} from "@tauri-apps/api"; 6 | 7 | export const Finish = () => { 8 | const [{message, success}, _] = useRecoilState(archiveResultState); 9 | const saveTo = useRecoilValue(saveToState); 10 | const setStep = useSetRecoilState(currentStep); 11 | return ( 12 | 13 |
        14 | 15 | 16 | {success ? : } 17 | 18 | 19 | {success ? "存档完成" : "存档失败"} 20 | {message && {message}} 21 | 22 | 23 |
        24 |
        25 | 26 | 28 | {success && } 29 | 30 |
        31 |
        32 | ) 33 | } -------------------------------------------------------------------------------- /gui/src/steps/Login.tsx: -------------------------------------------------------------------------------- 1 | import {useRecoilState, useSetRecoilState} from "recoil"; 2 | import {currentStep, tokenState} from "../states"; 3 | import {Button, Center, Group, Loader, Space, Stack, Text, Textarea, useMantineTheme} from "@mantine/core"; 4 | import {useEffect, useState} from "react"; 5 | import {loginWithToken, openBrowser, tokenFromOauth} from "../commands"; 6 | import {listen, UnlistenFn} from "@tauri-apps/api/event"; 7 | import {AlertTriangle} from "tabler-icons-react"; 8 | 9 | type UnlistenStruct = { 10 | unsubscribe: UnlistenFn; 11 | } 12 | 13 | enum OpenState { 14 | NotOpened, 15 | Plain, 16 | Callback 17 | } 18 | 19 | export const Login = () => { 20 | const theme = useMantineTheme(); 21 | 22 | const [loading, setLoading] = useState(false); 23 | const [opened, setOpened] = useState(OpenState.NotOpened); 24 | const [token, setToken] = useRecoilState(tokenState); 25 | const setCurrentStep = useSetRecoilState(currentStep); 26 | const [OAuthKey, setOAuthKey] = useState(""); 27 | const [keyError, setKeyError] = useState(""); 28 | const [channelUpdateToken, setChannelUpdateToken] = useState(null); 29 | const enabled = OAuthKey.trim().length > 0; 30 | 31 | const validateToken = async (token: string) => { 32 | setLoading(true); 33 | try { 34 | await loginWithToken(token); 35 | setCurrentStep(1); 36 | } catch (e) { 37 | setToken(''); 38 | } 39 | setLoading(false); 40 | } 41 | 42 | const loginWithOAuthKey = async (key: string) => { 43 | const token = await tokenFromOauth(key.replaceAll("\n", "")); 44 | if (token !== '') { 45 | setToken(token); 46 | } else { 47 | setKeyError("无效的授权码"); 48 | } 49 | } 50 | 51 | useEffect(() => { 52 | // tauri listen for event `update_token` 53 | listen("update-token", (token) => { 54 | loginWithOAuthKey(token.payload).then(_ => { 55 | }) 56 | }).then(unsubscribe => { 57 | setChannelUpdateToken({unsubscribe}); 58 | }).catch(e => { 59 | console.error(e); 60 | }); 61 | 62 | return () => { 63 | if (channelUpdateToken !== null) { 64 | channelUpdateToken.unsubscribe(); 65 | } 66 | }; 67 | }, []) 68 | 69 | useEffect(() => { 70 | if (token !== '') { 71 | validateToken(token).then(_ => { 72 | }); 73 | } 74 | }, [token]); 75 | 76 | return ( 77 | <>{loading ? 78 |
        79 | 80 | 81 | 正在登录... 82 | 83 |
        : 84 | (opened === OpenState.NotOpened ? 85 | 86 | 为了存档水源贴子,我们需要您水源账号的只读权限 87 | 96 | : (opened === OpenState.Plain ? 97 | 请在弹出的网页授权后,按照指引将授权码粘贴在下方 98 |