├── .gitignore ├── CHANGELOG.txt ├── CONFIG.example.ts ├── LICENSE ├── README.md ├── db ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── README.md ├── build.rs ├── build.sh ├── cohost_static.txt ├── config.example.toml ├── diesel.toml ├── md-render │ ├── .gitignore │ ├── build.sh │ ├── package-lock.json │ ├── package.json │ ├── rollup.config.mjs │ ├── src │ │ ├── awawawa.patch │ │ ├── client.tsx │ │ ├── lightbox2.tsx │ │ ├── patch_util.js │ │ └── server-render.tsx │ └── tsconfig.json ├── migrations │ ├── 2024-09-27-134854_init │ │ ├── down.sql │ │ └── up.sql │ ├── 2024-10-01-150618_related_tags │ │ ├── down.sql │ │ └── up.sql │ ├── 2024-10-11-193723_data_migration │ │ ├── down.sql │ │ └── up.sql │ └── 2024-10-20-105004_draft_nonces │ │ ├── down.sql │ │ └── up.sql ├── src │ ├── bundled_files.rs │ ├── comment.rs │ ├── context.rs │ ├── data.rs │ ├── dl.rs │ ├── feed.rs │ ├── import_cdl1.rs │ ├── login.rs │ ├── main.rs │ ├── merge.rs │ ├── post.rs │ ├── project.rs │ ├── render │ │ ├── api_data.rs │ │ ├── feed.rs │ │ ├── index.rs │ │ ├── md_render.rs │ │ ├── md_render_rt.js │ │ ├── mod.rs │ │ ├── project_profile.rs │ │ ├── rewrite.rs │ │ └── single_post.rs │ ├── res_ref.rs │ ├── schema.rs │ ├── server.rs │ └── trpc.rs ├── static │ ├── base.css │ └── tailwind-prose.css └── templates │ ├── base.html │ ├── comments.html │ ├── dashboard.html │ ├── error.html │ ├── index.html │ ├── liked_feed.html │ ├── pagination_eggs.html │ ├── post.html │ ├── project_profile.html │ ├── project_sidebar.html │ ├── single_post.html │ └── tag_feed.html ├── deno.json ├── deno.lock ├── main.ts ├── run.sh └── src ├── changelog.ts ├── cohost-source.ts ├── comment.ts ├── config.ts ├── context.ts ├── data-portability-archive.ts ├── likes.ts ├── markdown.ts ├── model.ts ├── post-index.ts ├── post-page.ts ├── post.ts ├── project.ts ├── script-compiler.ts └── scripts ├── index.ts ├── post-index.tsx ├── post-page.tsx └── shared.ts /.gitignore: -------------------------------------------------------------------------------- 1 | /CONFIG.ts 2 | out 3 | -------------------------------------------------------------------------------- /CHANGELOG.txt: -------------------------------------------------------------------------------- 1 | url=https://raw.githubusercontent.com/cpsdqs/cohost-dl/main/CHANGELOG.txt 2 | 3 | * 2024-09-19: handle objects created by the new post editor correctly 4 | * 2024-09-19: The Cohost Archive Global Feed 5 | * 2024-09-19: add better project index pages with pagination 6 | * 2024-09-17: add update checking because it won't be possible to post on Cohost anymore soon 7 | * 2024-09-17: add Microsoft Windows compatibility 8 | -------------------------------------------------------------------------------- /CONFIG.example.ts: -------------------------------------------------------------------------------- 1 | // Copy the cookie header from dev tools on cohost.org. This is used to log in, so don't share it. 2 | // 3 | // The page you’re currently logged into will be the point of view for cohost-dl data. 4 | // You probably shouldn’t switch pages in the browser while the script is running. 5 | // However, you can switch to different pages before running the script multiple times if you’d like 6 | // to e.g. download liked posts for your sideblogs as well! 7 | export const COOKIE = 'connect.sid=adhjsakfahdsfjkash'; 8 | 9 | // Load all of your own posts 10 | // 11 | // You must list your handle here for likes to be loaded. 12 | // (Also, make sure that the page you’re currently logged into doesn’t have any of these muted or something) 13 | export const PROJECTS = ['your-handle']; 14 | 15 | // Load some specific additional posts 16 | export const POSTS = [ 17 | 'https://cohost.org/example/123456-example-post', 18 | ]; 19 | 20 | // Some CSS posts contain external images that load forever 21 | export const DO_NOT_FETCH_HOSTNAMES = [ 22 | 'eggbugpocket.queertra.sh', // GIF plays Pokémon 23 | 'r0t.is', // Cohost runs Windows XP 24 | ]; 25 | 26 | // Some posts may have disappeared between loading the list of posts and actually loading the posts, 27 | // and give you a '404 not found' error. 28 | // These post IDs can then be listed here and be skipped when loading, so as not to keep retrying 29 | // every time you run the script. 30 | export const SKIP_POSTS = [ 31 | 9639936, 32 | ]; 33 | 34 | // Skips downloading your likes 35 | export const SKIP_LIKES = false; 36 | 37 | // You can keep this set to '' if you don't have a data portability archive from cohost. 38 | // If you do have one, set this to the path to the directory that contains the `user.json` file. 39 | // e.g. if you have it at /Users/example/Desktop/cohost-data/user.json, 40 | // then set this to '/Users/example/Desktop/cohost-data'. 41 | // This information will then be used to also load posts you've commented on or sent an ask for. 42 | export const DATA_PORTABILITY_ARCHIVE_PATH = ''; 43 | 44 | // Set this to false to disable Javascript, which is responsible for interaction on the generated pages 45 | // (read more/read less, opening/closing CWs, image attachments, etc.). 46 | // It's a little janky, so maybe you want an HTML-only export. 47 | export const ENABLE_JAVASCRIPT = true; 48 | 49 | // Alters pages to look like they're being viewed by a more generic observer instead of how your account sees things. 50 | // - Attempts to revert settings for silenced tags, CWs, 18+ 51 | // - These cannot be completely removed right now. Original settings will be briefly visible if they were applicable 52 | // to that particular post. 53 | // - Reverts to the default theme 54 | // - Removes bookmarked tags, private notes, private contact info, the page switcher, whether you liked a post, 55 | // and whether you were following someone. 56 | // - Does not remove your own handle from some internal data 57 | // - Does not hide posts from private accounts 58 | // 59 | // NOTE: currently breaks Javascript on all of your own post pages 60 | export const GENERIC_OBSERVER = false; 61 | 62 | // Number of seconds to wait between requests. 63 | // Increase this to slow down your download. This might be considered polite towards servers. 64 | export const REQUEST_DELAY_SECS = 0; 65 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 cohost-dl contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # cohost-dl 2 | Downloads posts onto your computer from cohost.org, which is shutting down. 3 | 4 | - Post pages are downloaded exactly as they appear on Cohost, including shared posts, comments, and with your display settings (silenced tags, etc.) 5 | - Downloads all of your own posts and all of your liked posts 6 | - If you have a data portability archive: also downloads all posts you’ve commented on 7 | - Legal: using this software does not somehow grant you a license to re-publish posts and comments from other people 8 | 9 | See also: [cohost-dl 2](db), which is easier to use and handles large amounts of data much better 10 | 11 | ## Downloaded Data 12 | Downloaded data will be placed in an `out` directory. 13 | 14 |
15 | Detailed breakdown 16 | 17 | - HTML files openable in a web browser 18 | - `out/index.html`: a simple overview page 19 | - `out/~all/index.html`: The Cohost Archive Global Feed 20 | - `out/{handle}/index.html`: page that shows all posts from {handle} 21 | - `out/{handle}/post/12345-example.html`: page that shows just that post, as it appeared on cohost.org 22 | - Page resources 23 | - `out/static/`: files from cohost.org/static, such as CSS files 24 | - `out/rc/attachment/`: post images and audio files 25 | - `out/rc/attachment-redirect/`: honestly, no idea. ostensibly also post attachments 26 | - `out/rc/avatar/`, `out/rc/default-avatar/`: user avatars 27 | - `out/rc/header/`: user header images 28 | - `out/rc/external/`: external images not hosted on cohost.org but included in posts 29 | - `out/{handle}/cdl-index.js`: full-text search index 30 | - `out/{handle}/cdl-chunk~{handle}~{n}.js`: post data used in the list of all posts 31 | - `out/~cohost-dl/`: Javascript for all generated pages 32 | - Data files 33 | - `out/{your-handle}/liked.json`: data for all posts you liked 34 | - `out/{your-handle}/posts.json`: data for all posts you made 35 | - `out/{handle}/post/12345-example` (without `.html`): original data for that post from cohost.org 36 | - `out/~src/{site-version}/`: unpacked source code for the Cohost frontend (used to create cohost-dl Javascript) 37 | - `out/~headers.json`: stores content type headers for some URLs that don’t have a good file extension 38 | 39 |
40 | 41 | For file size, expect something around 1 GB for 1000 posts. 42 | 43 | Files you can probably safely rehost online: 44 | - `out/{your-handle}/index.html` 45 | - `out/{your-handle}/cdl-index.js` 46 | - `out/{your-handle}/cdl-chunk~{...}.js` 47 | - `out/~cohost-dl/` 48 | - files in `out/rc/` required for the above page(s) to work 49 | 50 | Why other files may not be safe to rehost online: 51 | - `out/{your-handle}/post/12345-example.html`: is a very faithful Cohost page and hence contains all of your settings (sideblogs, muted tags, etc.) 52 | - The `GENERIC_OBSERVER` setting attempts to mitigate this, but it breaks a bunch of other things 53 | - `out/{not-your-handle}/`: not yours 54 | 55 | ## Usage 56 | 1. Copy `CONFIG.example.ts` to `CONFIG.ts` 57 | 2. edit `CONFIG.ts` appropriately 58 | 3. Install Deno 59 | 4. `./run.sh` 60 | - if you’re using a system that doesn’t support Bash, such as Windows, 61 | you can just copy the `deno run ...` command from this file and run it directly. 62 | 63 | It's safe to interrupt and re-start the script at any time. 64 | Things that have already been downloaded will not be downloaded again, 65 | and any changes in configuration will be taken into account upon restart. 66 | 67 | -------------------------------------------------------------------------------- /db/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /data.db* 3 | /config.toml 4 | /downloader-state.json 5 | -------------------------------------------------------------------------------- /db/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cohost-dl" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | anyhow = "1.0" 8 | async-recursion = "1.1" 9 | async-scoped = { version = "0.9", features = ["use-tokio"] } 10 | axum = { version = "0.7", features = ["macros"] } 11 | base64 = "0.22" 12 | chrono = "0.4" 13 | clap = { version = "4.5", features = ["derive"] } 14 | cssparser = "0.34" 15 | deno_console = "0.170" 16 | deno_core = "0.311" 17 | deno_url = "0.170" 18 | deno_web = "0.201" 19 | deno_webidl = "0.170" 20 | diesel = { version = "2.2", features = ["sqlite", "returning_clauses_for_sqlite_3_35"] } 21 | diesel_migrations = "2.2" 22 | env_logger = "0.11" 23 | hex = "0.4" 24 | html5ever = "0.26" 25 | indicatif = "0.17" 26 | kuchikiki = "0.8" 27 | log = "0.4" 28 | pbkdf2 = "0.12" 29 | pulldown-cmark = "0.12" 30 | reqwest = "0.12" 31 | rmp-serde = "1.3" 32 | rpassword = "7.3" 33 | rustyline = "14.0" 34 | serde = { version = "1.0", features = ["derive"] } 35 | serde_json = "1.0" 36 | sha2 = "0.10" 37 | tempfile = "3.1" 38 | tera = "1.20" 39 | thiserror = "1.0" 40 | tokio = { version = "1.36", features = ["full"] } 41 | tokio-util = { version = "0.7", features = ["io"] } 42 | toml = "0.8" 43 | toml_edit = "0.22" 44 | urlencoding = "2.1" 45 | webbrowser = "1.0" 46 | 47 | [target.'cfg(windows)'.dependencies] 48 | libsqlite3-sys = { version = "0.30.1", features = ["bundled"] } 49 | -------------------------------------------------------------------------------- /db/README.md: -------------------------------------------------------------------------------- 1 | # cohost-dl 2: DL harder 2 | cohost-dl but downloading a lot more data but less faithfully 3 | 4 | [website with precompiled binary downloads](https://cloudwithlightning.net/random/chostin/cohost-dl) 5 | 6 | - Post pages are *not* downloaded exactly as they appear on Cohost. 7 | Instead, this tool downloads only the data, and then re-creates something close to what they looked like on Cohost. 8 | - Can download your own posts, your liked posts, your dashboard, and tag feeds 9 | - Legal: using this software does not somehow grant you a license to re-publish posts and comments from other people 10 | 11 | Usage Notes: 12 | - You can interrupt this at any time, but if it’s doing something where there’s no progress bar, it’ll start over from page 1. 13 | This is probably annoying if you were on, like, page 200. 14 | - I am not very good at SQL 15 | 16 | Download stages: 17 | 1. downloading posts 18 | 2. downloading post comments 19 | 3. downloading image and audio resources 20 | 21 | Files: 22 | - the database: stores all post data 23 | - the output directory: stores all resources like images 24 | - downloader-state.json: file to remember what’s already been downloaded before and skip downloading those things (can be edited) 25 | 26 | > Note: if you have used cohost-dl 2 before, you should probably run it again with the `try_fix_transparent_shares` option. 27 | 28 | ## Compiling and running from source 29 | 1. compile the post & markdown renderer. this is super jank. it currently requires running cohost-dl 1 as well 30 | - if ASSC ever ships an open source post renderer, this will be replaced with that (if possible) 31 | - if you don’t care about serve mode, just make an empty `md-render/dist/server-render.js` and `md-render/dist/client.js` 32 | file so the Rust code compiles 33 | - in repo root: 34 | - `rm out/staff/post/7611443-cohost-to-shut-down` (if it exists) 35 | - why? because this post is used to determine the current Cohost version 36 | - `./run.sh` 37 | - wait for it to download Cohost version `a2ecdc59` 38 | - if this is no longer the current Cohost version, then the following build script will need an update 39 | - `cd db/md-render` 40 | - `./build.sh` 41 | 2. `cargo run -- download` 42 | 3. `cargo run -- serve` (can be run in parallel) 43 | -------------------------------------------------------------------------------- /db/build.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::process::Command; 3 | 4 | fn main() { 5 | let hash = Command::new("git") 6 | .args(&["rev-parse", "HEAD"]) 7 | .output() 8 | .and_then(|out| { 9 | if !out.status.success() { 10 | return Err(io::Error::new( 11 | io::ErrorKind::Other, 12 | format!( 13 | "error: exited with {:?}\n{}", 14 | out.status, 15 | String::from_utf8_lossy(&out.stderr) 16 | ), 17 | )); 18 | } 19 | 20 | String::from_utf8(out.stdout).map_err(|e| io::Error::new(io::ErrorKind::Other, e)) 21 | }) 22 | .expect("could not determine commit hash"); 23 | 24 | println!("cargo:rustc-env=BUILD_COMMIT={hash}"); 25 | } 26 | -------------------------------------------------------------------------------- /db/build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euxo pipefail 3 | cd "$(dirname "$0")" 4 | 5 | ./md-render/build.sh 6 | 7 | export RUSTFLAGS="--remap-path-prefix=$(pwd)=~" 8 | 9 | for name in $(ls $HOME/.cargo/registry/src); do 10 | registry="$HOME/.cargo/registry/src/$name" 11 | export RUSTFLAGS="$RUSTFLAGS --remap-path-prefix=$registry=cargo" 12 | done 13 | 14 | cargo build --release 15 | -------------------------------------------------------------------------------- /db/cohost_static.txt: -------------------------------------------------------------------------------- 1 | https://cohost.org/static/014b0a8cc35206ef151d.png 2 | https://cohost.org/static/0573a2ae2e466eba0356.woff2 3 | https://cohost.org/static/06101a7ccf85875d015f.woff2 4 | https://cohost.org/static/0c3fc7e5bd85351aa4a2.png 5 | https://cohost.org/static/0ee09eba18f52f40ea6b.woff 6 | https://cohost.org/static/11c5493261064ffa82c0.png 7 | https://cohost.org/static/160a2de1e18bbfbce0d6.woff2 8 | https://cohost.org/static/17aa2d48956926005de9.png 9 | https://cohost.org/static/1d032f03e8c1619c1840.woff 10 | https://cohost.org/static/1d8ea3d31970f45e1efa.woff2 11 | https://cohost.org/static/228d3a13bd5f7796b434.png 12 | https://cohost.org/static/2778ef1c97c10fcf7087.woff2 13 | https://cohost.org/static/2b33853bc0f8ab628206.woff 14 | https://cohost.org/static/32426174753867c5e4d5.png 15 | https://cohost.org/static/3633c116f0941d94d237.png 16 | https://cohost.org/static/3bc3a1c5272e2ceb8712.png 17 | https://cohost.org/static/3c154cde88b7ed1ca92a.png 18 | https://cohost.org/static/3e2b26f2f1e719024296.svg 19 | https://cohost.org/static/41454e429d62b5cb7963.png 20 | https://cohost.org/static/46d6791cd192354ea229.woff2 21 | https://cohost.org/static/4fd0f5fb276c23f89e61.png 22 | https://cohost.org/static/530f8cf75eac87716702.png 23 | https://cohost.org/static/53635f5fe850274b1a7d.png 24 | https://cohost.org/static/5a7a3becf5ca45951382.png 25 | https://cohost.org/static/5cf84d596a2c422967de.png 26 | https://cohost.org/static/5dc77fa7c8c5443ffdad.woff 27 | https://cohost.org/static/63054d023ff63adc16f8.woff 28 | https://cohost.org/static/6c09b07bbd10f336c332.woff2 29 | https://cohost.org/static/7c05f33b4fe5f8132439.woff2 30 | https://cohost.org/static/7e4e543cdb2277f35ef3.woff 31 | https://cohost.org/static/7ec6f0f3aef87d734f9b.png 32 | https://cohost.org/static/8b5b4d218ea99eee8038.woff 33 | https://cohost.org/static/8ff201350af3c70fb5b8.svg 34 | https://cohost.org/static/90058099e741e483208a.png 35 | https://cohost.org/static/9559ff8058a895328d76.png 36 | https://cohost.org/static/9977bdf408811d1dd51e.png 37 | https://cohost.org/static/99c7fbf98de865cc9726.png 38 | https://cohost.org/static/9a6014af31fb1ca65a1f.png 39 | https://cohost.org/static/9bb403f3822c6457baf6.png 40 | https://cohost.org/static/9e9015373afa62c1438a.woff 41 | https://cohost.org/static/a09d966cd188c9ebaa4c.png 42 | https://cohost.org/static/a0da22773386bbbf0200.woff2 43 | https://cohost.org/static/a278964027d51119ea29.woff2 44 | https://cohost.org/static/a4f72033a674e35d4cc9.png 45 | https://cohost.org/static/a8e9086e0667f460ca1a.woff 46 | https://cohost.org/static/add6a1fe8239b714d178.woff2 47 | https://cohost.org/static/ae53a8b5de7c919100e6.png 48 | https://cohost.org/static/ae598bfc9a58e910240d.woff 49 | https://cohost.org/static/b07ad0e0d01ba4d56c1a.woff2 50 | https://cohost.org/static/b25a9fdf230219087003.png 51 | https://cohost.org/static/b59709333449a01e3e0a.png 52 | https://cohost.org/static/b792e48e2d0bcaf3099b.woff2 53 | https://cohost.org/static/bfa6d6316fd95ae76803.png 54 | https://cohost.org/static/c45b6d8f9de20f725b98.png 55 | https://cohost.org/static/c4f3f2c6b9ffb85934e7.png 56 | https://cohost.org/static/c9cba9a97beb26e73cb4.png 57 | https://cohost.org/static/ca4719f7af550ea00632.png 58 | https://cohost.org/static/ca6ddc773ed5324a034e.png 59 | https://cohost.org/static/cb9a5640d7ef7b361a1a.png 60 | https://cohost.org/static/cc20b15eacd73fe42dfe.woff2 61 | https://cohost.org/static/ced2a691a6ab0e36bb1b.woff 62 | https://cohost.org/static/d2753b632211c395538e.png 63 | https://cohost.org/static/d3227e1e99e39fc3d8c8.woff 64 | https://cohost.org/static/d7ec7f057e6fb15a94cc.png 65 | https://cohost.org/static/dafc923e634e693d31e1.woff 66 | https://cohost.org/static/de31eb962de32ee6933f.svg 67 | https://cohost.org/static/de7a6730ae8672a12406.svg 68 | https://cohost.org/static/e217d09d5f52145f24cc.png 69 | https://cohost.org/static/e5904d7796f14a6284b7.woff2 70 | https://cohost.org/static/e5d55348f39c65a20148.png 71 | https://cohost.org/static/e67a74185ea96e93820e.jpg 72 | https://cohost.org/static/ebbf360236a95b62bdfc.png 73 | https://cohost.org/static/edcc39b1702e4bd4b95e.svg 74 | https://cohost.org/static/f0c56e99113f1a0731b4.svg 75 | https://cohost.org/static/f59b84127fa7b6c48b6c.png 76 | https://cohost.org/static/fa26fdc235475dc2ab2b.woff2 77 | https://cohost.org/static/fa883e2377fea8945237.png 78 | -------------------------------------------------------------------------------- /db/config.example.toml: -------------------------------------------------------------------------------- 1 | # database file path 2 | database = "data.db" 3 | 4 | # file output directory. this will store lots of images and probably get very large. 5 | # 6 | # works similarly to cohost-dl 1 and is probably 99% compatible, 7 | # so you can point this at the cohost-dl 1 output directory to avoid having duplicate images. 8 | root_dir = "path/to/out" 9 | 10 | # Copy the cookie header from dev tools on cohost.org. This is used to log in, so don't share it. 11 | # 12 | # The page you’re currently logged into will be the point of view for cohost-dl data. 13 | # You probably shouldn’t switch pages in the browser while the script is running. 14 | # However, you can switch to different pages before running the script multiple times if you’d like 15 | # to e.g. download liked posts for your sideblogs as well! 16 | cookie = "connect.sid=adhjsakfahdsfjkash" 17 | 18 | # don't load external resources from these domains 19 | do_not_fetch_domains = [ 20 | 'eggbugpocket.queertra.sh', # GIF plays Pokémon 21 | 'r0t.is', # Cohost runs Windows XP 22 | ] 23 | 24 | # load all posts from these pages 25 | load_profile_posts = [ 26 | 'example-handle', 27 | ] 28 | 29 | # load all posts from these tags (without leading #) 30 | load_tagged_posts = [ 31 | 'interactable', 32 | ] 33 | 34 | # load some specific posts from URLs 35 | load_specific_posts = [ 36 | 'https://cohost.org/staff/post/7611443-cohost-to-shut-down', 37 | ] 38 | 39 | # load all posts from all followed projects 40 | load_dashboard = false 41 | 42 | # when loading the dashboard: ignore these particular pages 43 | skip_follows = [ 44 | 'example-handle', 45 | ] 46 | 47 | # load all liked posts for the currently active page 48 | load_likes = false 49 | 50 | # load new posts for all projects previously saved in full 51 | # (does not affect liked posts or bookmarked tags) 52 | load_new_posts = false 53 | 54 | # load comments for all posts 55 | load_comments = false 56 | 57 | # for existing cohost-dl downloads: will attempt to fix transparent shares that point at nothing. 58 | # use this if you see a post that seems to be a share of a post with no contents at all. 59 | try_fix_transparent_shares = false 60 | 61 | # load images and audio attachments in posts 62 | load_post_resources = false 63 | 64 | # load avatars, headers, and images in projects descriptions 65 | load_project_resources = false 66 | 67 | # load images in comments 68 | load_comment_resources = false 69 | 70 | # how many seconds to wait before giving up on a request 71 | request_timeout_secs = 60 72 | 73 | # port when running the web server to look at the archive 74 | server_port = 26467 75 | -------------------------------------------------------------------------------- /db/diesel.toml: -------------------------------------------------------------------------------- 1 | # For documentation on how to configure this file, 2 | # see https://diesel.rs/guides/configuring-diesel-cli 3 | 4 | [print_schema] 5 | file = "src/schema.rs" 6 | custom_type_derives = ["diesel::query_builder::QueryId", "Clone"] 7 | 8 | [migrations_directory] 9 | dir = "migrations" 10 | -------------------------------------------------------------------------------- /db/md-render/.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules 2 | /src/cohost 3 | /dist 4 | -------------------------------------------------------------------------------- /db/md-render/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euxo pipefail 3 | cd "$(dirname "$0")" 4 | 5 | mkdir -p src/cohost 6 | cp -r ../../out/~src/a2ecdc59/* src/cohost/ 7 | 8 | cd src/cohost/ 9 | patch -p1 -u -i ../awawawa.patch 10 | 11 | cd ../.. 12 | 13 | npm ci 14 | npm run build 15 | -------------------------------------------------------------------------------- /db/md-render/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cohost-dl-md-render", 3 | "version": "1.0.0", 4 | "main": "compiled.js", 5 | "scripts": { 6 | "build": "rollup -c" 7 | }, 8 | "license": "MIT", 9 | "devDependencies": { 10 | "@rollup/plugin-alias": "^5.1.1", 11 | "@rollup/plugin-commonjs": "^28.0.0", 12 | "@rollup/plugin-json": "^6.1.0", 13 | "@rollup/plugin-node-resolve": "^15.3.0", 14 | "@rollup/plugin-replace": "^6.0.1", 15 | "@rollup/plugin-terser": "^0.4.4", 16 | "@rollup/plugin-typescript": "^12.1.0", 17 | "@types/css-tree": "^2.3.8", 18 | "@types/hast": "^3.0.4", 19 | "@types/luxon": "^3.4.2", 20 | "@types/mdast": "^4.0.4", 21 | "@types/react-dom": "^18.3.0", 22 | "rollup": "^4.24.0", 23 | "tslib": "^2.7.0" 24 | }, 25 | "dependencies": { 26 | "@headlessui/react": "^1.7.14", 27 | "@heroicons/react": "^2.0.13", 28 | "buffer": "^6.0.3", 29 | "classnames": "^2.3.1", 30 | "css-tree": "^3.0.0", 31 | "events": "^3.3.0", 32 | "hast": "^0.0.2", 33 | "html-to-text": "^8.1.0", 34 | "i18next": "^23.5.1", 35 | "lodash": "^4.17.21", 36 | "luxon": "^2.0.2", 37 | "mdast": "^2.3.2", 38 | "path": "^0.12.7", 39 | "path-to-regexp": "^6.2.0", 40 | "react": "^18.3.1", 41 | "react-dom": "^18.3.1", 42 | "react-render-if-visible": "^2.1.0", 43 | "react-swipeable": "^6.2.0", 44 | "readable-stream": "^4.5.2", 45 | "rehype-external-links": "^2.0.0", 46 | "rehype-raw": "^6.1.1", 47 | "rehype-react": "^7.1.1", 48 | "rehype-sanitize": "^5.0.1", 49 | "rehype-stringify": "^9.0.3", 50 | "remark-breaks": "^3.0.3", 51 | "remark-gfm": "^3.0.1", 52 | "remark-parse": "^10.0.1", 53 | "remark-rehype": "^10.1.0", 54 | "remark-stringify": "^10.0.3", 55 | "style-to-object": "^0.3.0", 56 | "unified": "^10.1.2", 57 | "unist-util-is": "^5.2.0", 58 | "unist-util-visit": "^4.1.1", 59 | "unist-util-visit-parents": "^5.1.3", 60 | "url": "^0.11.4", 61 | "url-parse": "^1.5.3", 62 | "util": "^0.12.5", 63 | "zod": "^3.14.4" 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /db/md-render/rollup.config.mjs: -------------------------------------------------------------------------------- 1 | import resolve from '@rollup/plugin-node-resolve'; 2 | import commonjs from '@rollup/plugin-commonjs'; 3 | import typescript from '@rollup/plugin-typescript'; 4 | import replace from '@rollup/plugin-replace'; 5 | import alias from '@rollup/plugin-alias'; 6 | import json from '@rollup/plugin-json'; 7 | import terser from '@rollup/plugin-terser'; 8 | 9 | const EMOJI = { 10 | "chunks.png": "f59b84127fa7b6c48b6c.png", 11 | "eggbug-classic.png": "41454e429d62b5cb7963.png", 12 | "eggbug.png": "17aa2d48956926005de9.png", 13 | "sixty.png": "9a6014af31fb1ca65a1f.png", 14 | "unyeah.png": "5cf84d596a2c422967de.png", 15 | "yeah.png": "014b0a8cc35206ef151d.png", 16 | }; 17 | const PLUS_EMOJI = { 18 | "eggbug-asleep.png": "ebbf360236a95b62bdfc.png", 19 | "eggbug-devious.png": "c4f3f2c6b9ffb85934e7.png", 20 | "eggbug-heart-sob.png": "b59709333449a01e3e0a.png", 21 | "eggbug-nervous.png": "d2753b632211c395538e.png", 22 | "eggbug-pensive.png": "ae53a8b5de7c919100e6.png", 23 | "eggbug-pleading.png": "11c5493261064ffa82c0.png", 24 | "eggbug-relieved.png": "3633c116f0941d94d237.png", 25 | "eggbug-shocked.png": "b25a9fdf230219087003.png", 26 | "eggbug-smile-hearts.png": "d7ec7f057e6fb15a94cc.png", 27 | "eggbug-sob.png": "9559ff8058a895328d76.png", 28 | "eggbug-tuesday.png": "90058099e741e483208a.png", 29 | "eggbug-uwu.png": "228d3a13bd5f7796b434.png", 30 | "eggbug-wink.png": "3bc3a1c5272e2ceb8712.png", 31 | "host-aww.png": "9bb403f3822c6457baf6.png", 32 | "host-cry.png": "530f8cf75eac87716702.png", 33 | "host-evil.png": "cb9a5640d7ef7b361a1a.png", 34 | "host-frown.png": "99c7fbf98de865cc9726.png", 35 | "host-joy.png": "53635f5fe850274b1a7d.png", 36 | "host-love.png": "c45b6d8f9de20f725b98.png", 37 | "host-nervous.png": "e5d55348f39c65a20148.png", 38 | "host-plead.png": "fa883e2377fea8945237.png", 39 | "host-shock.png": "bfa6d6316fd95ae76803.png", 40 | "host-stare.png": "a09d966cd188c9ebaa4c.png", 41 | }; 42 | 43 | function convertEmoji(map) { 44 | return Object.fromEntries(Object.entries(map).map(([k, v]) => [k, `/static/${v}`])); 45 | } 46 | 47 | const banner = ` 48 | if (!globalThis.process) globalThis.process = { env: {}, cwd: () => '/' }; 49 | process.env.HOME_URL = 'https://cohost.org/'; 50 | 51 | globalThis.require = {}; 52 | require.context = (dir, useSubdirs) => { 53 | if ((dir === "../../images/emoji" || dir === "../images/emoji") && !useSubdirs) { 54 | const data = ${JSON.stringify(convertEmoji(EMOJI))}; 55 | const f = (n) => data[n]; 56 | f.keys = () => Object.keys(data); 57 | return f; 58 | } else if ((dir === "../../images/plus-emoji" || dir === "../images/plus-emoji") && !useSubdirs) { 59 | const data = ${JSON.stringify(convertEmoji(PLUS_EMOJI))}; 60 | const f = (n) => data[n]; 61 | f.keys = () => Object.keys(data); 62 | return f; 63 | } 64 | throw new Error('not supported: require.context for ' + dir); 65 | }; 66 | `; 67 | 68 | const DEV = false; 69 | const CLIENT_ONLY = false; 70 | 71 | const plugins = [ 72 | alias({ 73 | entries: [ 74 | { find: 'stream', replacement: 'readable-stream' }, 75 | // Rollup will complain about these relative paths, but this is the only way to compile on Windows. 76 | // I don’t know why. The output file hashes are the same, so I guess it’s fine. 77 | { find: 'util', replacement: './src/patch_util.js' }, 78 | { find: 'css-tree', replacement: './node_modules/css-tree/dist/csstree.esm.js' }, 79 | ] 80 | }), 81 | typescript(), 82 | json(), 83 | replace({ 84 | "process.env.NODE_ENV": DEV ? "'development'" : "'production'", 85 | preventAssignment: true, 86 | }), 87 | resolve({ preferBuiltins: false }), 88 | commonjs(), 89 | ]; 90 | 91 | export default [ 92 | { 93 | input: 'src/client.tsx', 94 | output: { 95 | banner, 96 | format: 'iife', 97 | dir: 'dist', 98 | }, 99 | plugins: [...plugins, !DEV && terser()].filter(x => x), 100 | }, 101 | !CLIENT_ONLY && { 102 | input: 'src/server-render.tsx', 103 | output: { 104 | banner, 105 | dir: 'dist', 106 | }, 107 | plugins, 108 | }, 109 | ].filter(x => x); 110 | -------------------------------------------------------------------------------- /db/md-render/src/lightbox2.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect, useRef, useState } from "react"; 2 | import { PostId } from "./cohost/shared/types/ids"; 3 | import { AttachmentViewBlock } from "./cohost/shared/types/post-blocks"; 4 | import { useLightbox } from "./cohost/preact/components/lightbox"; 5 | import { flushSync } from "react-dom"; 6 | 7 | const lightboxContent = new Map(); 8 | export const globalLightboxContext: ReturnType = { 9 | openLightbox: () => undefined, 10 | closeLightbox: () => undefined, 11 | setLightboxContentForPost: (post: PostId, blocks: AttachmentViewBlock[]) => { 12 | lightboxContent.set(post, blocks); 13 | }, 14 | }; 15 | 16 | function findPostElementForAttachment(attachment: AttachmentViewBlock) { 17 | const rPreviewURL = new URL(attachment.attachment.previewURL, location.href).href; 18 | const rFileURL = new URL(attachment.attachment.fileURL, location.href).href; 19 | 20 | return [...document.querySelectorAll('.i-post-body .group img')].find(img => img.src === rPreviewURL || img.src === rFileURL); 21 | } 22 | 23 | export function Lightbox2() { 24 | const [open, setOpen] = useState(false); 25 | const [post, setPost] = useState(null); 26 | const [attachmentIndex, setAttachmentIndex] = useState(0); 27 | 28 | const dialogRef = useRef(null); 29 | 30 | useEffect(() => { 31 | globalLightboxContext.openLightbox = (postId, attachmentId) => { 32 | const attachments = lightboxContent.get(postId); 33 | if (!attachments) return; 34 | 35 | const attachmentIndex = attachments.findIndex(a => a.attachment.attachmentId === attachmentId); 36 | const attachment = attachments[attachmentIndex]; 37 | if (!attachment) return; 38 | 39 | const openingFromElement = findPostElementForAttachment(attachment); 40 | 41 | if (openingFromElement && document.startViewTransition && !window.matchMedia('(prefers-reduced-motion: reduce)').matches) { 42 | openingFromElement.classList.add('attachment-lightbox-opening-from-this'); 43 | 44 | document.startViewTransition(() => { 45 | openingFromElement.classList.remove('attachment-lightbox-opening-from-this'); 46 | 47 | flushSync(() => { 48 | setPost(postId); 49 | setAttachmentIndex(attachmentIndex); 50 | setOpen(true); 51 | }); 52 | }); 53 | } else { 54 | setPost(postId); 55 | setAttachmentIndex(attachmentIndex); 56 | setOpen(true); 57 | } 58 | }; 59 | globalLightboxContext.closeLightbox = () => setOpen(false); 60 | }, []); 61 | 62 | const isClosing = useRef(false); 63 | const close = () => { 64 | if (isClosing.current) return; 65 | isClosing.current = true; 66 | 67 | const attachment = lightboxContent.get(post)?.[attachmentIndex]; 68 | if (!attachment) { 69 | setOpen(false); 70 | return; 71 | } 72 | 73 | const closingToElement = findPostElementForAttachment(attachment); 74 | 75 | if (closingToElement && document.startViewTransition && !window.matchMedia('(prefers-reduced-motion: reduce)').matches) { 76 | const vt = document.startViewTransition(() => { 77 | closingToElement.classList.add('attachment-lightbox-opening-from-this'); 78 | 79 | flushSync(() => { 80 | setOpen(false); 81 | }); 82 | }); 83 | 84 | vt.finished.then(() => { 85 | closingToElement.classList.remove('attachment-lightbox-opening-from-this'); 86 | }); 87 | } else { 88 | setOpen(false); 89 | } 90 | }; 91 | 92 | const onKeyDown = (e: KeyboardEvent) => { 93 | if (e.key === 'ArrowLeft' || e.key === 'h') { 94 | setAttachmentIndex(index => Math.max(0, index - 1)); 95 | } else if (e.key === 'ArrowRight' || e.key === 'l') { 96 | const attachments = lightboxContent.get(post); 97 | if (!attachments) return; 98 | setAttachmentIndex(index => Math.min(attachments.length - 1, index + 1)); 99 | } 100 | }; 101 | 102 | useEffect(() => { 103 | if (open) { 104 | dialogRef.current.showModal(); 105 | dialogRef.current.scrollTo({ top: 0 }); 106 | 107 | window.addEventListener('keydown', onKeyDown); 108 | return () => window.removeEventListener('keydown', onKeyDown); 109 | } else { 110 | dialogRef.current.close(); 111 | isClosing.current = false; 112 | } 113 | }, [open]); 114 | 115 | return ( 116 | { 120 | e.preventDefault(); 121 | close(); 122 | }} 123 | > 124 |
125 | 126 | {open ? ( 127 | 133 | ) : null} 134 |
135 | ); 136 | } 137 | 138 | function LightboxContents({ attachments, index, onIndexChange, onClose }: { 139 | attachments: AttachmentViewBlock[]; 140 | index: number; 141 | onIndexChange: (index: number) => void; 142 | onClose: () => void; 143 | }) { 144 | const current = attachments[index]; 145 | 146 | return ( 147 |
{ 150 | let cursor = e.target as Node; 151 | for (let i = 0; i < 10 && cursor; i++) { 152 | if (cursor instanceof HTMLButtonElement) return; 153 | cursor = cursor.parentNode; 154 | } 155 | onClose(); 156 | }} 157 | > 158 |
159 | 168 | {attachments.length > 1 ? ( 169 |
170 | {index > 0 ? ( 171 | 178 | ) : null} 179 | 180 | {index < attachments.length - 1 ? ( 181 | 188 | ) : null} 189 |
190 | ) : null} 191 | {current.attachment.altText ? ( 192 |

193 | {current.attachment.altText} 194 |

195 | ) : null} 196 |
197 | {attachments.length > 1 ? ( 198 |
    199 | {attachments.map((attachment, i) => ( 200 |
  • 201 | 213 |
  • 214 | ))} 215 |
216 | ) : null} 217 |
218 | ); 219 | } 220 | 221 | function PaginationEgg() { 222 | return ( 223 |
224 | 225 | 233 | 234 |
235 | ); 236 | } 237 | -------------------------------------------------------------------------------- /db/md-render/src/patch_util.js: -------------------------------------------------------------------------------- 1 | export * from '../node_modules/util'; 2 | 3 | export const TextEncoder = globalThis.TextEncoder; 4 | export const TextDecoder = globalThis.TextDecoder; 5 | -------------------------------------------------------------------------------- /db/md-render/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "lib": ["ES2022", "WebWorker", "DOM", "DOM.Iterable"], 4 | "target": "ES2022", 5 | "strict": false, 6 | "module": "ES2022", 7 | "jsx": "react-jsx", 8 | "moduleResolution": "node", 9 | "esModuleInterop": true 10 | }, 11 | "include": ["src/**/*.tsx", "src/**/*.ts"] 12 | } 13 | -------------------------------------------------------------------------------- /db/migrations/2024-09-27-134854_init/down.sql: -------------------------------------------------------------------------------- 1 | drop table resource_content_types; 2 | drop table likes; 3 | drop table follows; 4 | drop table project_resources; 5 | drop table post_resources; 6 | drop table comment_resources; 7 | drop table post_related_projects; 8 | drop table post_tags; 9 | drop table comments; 10 | drop table posts; 11 | drop table projects; 12 | drop table url_files; 13 | -------------------------------------------------------------------------------- /db/migrations/2024-09-27-134854_init/up.sql: -------------------------------------------------------------------------------- 1 | create table resource_content_types 2 | ( 3 | url varchar not null primary key, 4 | content_type varchar not null 5 | ); 6 | 7 | create table url_files 8 | ( 9 | url varchar not null primary key, 10 | file_path blob not null 11 | ); 12 | 13 | create table projects 14 | ( 15 | id integer not null, 16 | handle varchar not null collate nocase, 17 | is_private boolean not null, 18 | requires_logged_in boolean not null, 19 | data blob not null, 20 | data_version integer not null, 21 | primary key (id) 22 | ); 23 | 24 | create index projects_handle on projects (handle); 25 | 26 | create table project_resources 27 | ( 28 | project_id integer not null, 29 | url varchar not null, 30 | primary key (project_id, url) on conflict ignore, 31 | foreign key (project_id) references projects (id) on delete cascade 32 | ); 33 | 34 | create table posts 35 | ( 36 | id integer not null, 37 | posting_project_id integer not null, 38 | published_at varchar, 39 | response_to_ask_id varchar, 40 | share_of_post_id integer, 41 | is_transparent_share boolean not null, 42 | filename varchar not null, 43 | data blob not null, 44 | data_version integer not null, 45 | state integer not null, 46 | primary key (id) on conflict replace, 47 | foreign key (posting_project_id) references projects (id) on delete restrict, 48 | foreign key (share_of_post_id) references posts (id) on delete cascade 49 | ); 50 | 51 | create index posts_posting_project_id on posts (posting_project_id); 52 | create index posts_published_at on posts (published_at); 53 | 54 | create table post_related_projects 55 | ( 56 | post_id integer not null, 57 | project_id integer not null, 58 | primary key (post_id, project_id) on conflict ignore, 59 | foreign key (post_id) references posts (id) on delete cascade, 60 | foreign key (project_id) references projects (id) on delete cascade 61 | ); 62 | 63 | create table post_resources 64 | ( 65 | post_id integer not null, 66 | url varchar not null, 67 | primary key (post_id, url) on conflict ignore, 68 | foreign key (post_id) references posts (id) on delete cascade 69 | ); 70 | 71 | create table post_tags 72 | ( 73 | post_id integer not null, 74 | tag varchar not null, 75 | pos integer not null, 76 | primary key (post_id, tag), 77 | foreign key (post_id) references posts (id) on delete cascade 78 | ); 79 | 80 | create index post_tags_pos on post_tags (pos); 81 | 82 | create table comments 83 | ( 84 | id varchar not null primary key on conflict replace, 85 | post_id integer not null, 86 | in_reply_to_id varchar, 87 | posting_project_id integer, 88 | published_at varchar not null, 89 | data blob not null, 90 | data_version integer not null, 91 | foreign key (post_id) references posts (id) on delete cascade, 92 | foreign key (posting_project_id) references projects (id) on delete cascade 93 | ); 94 | 95 | create index comments_posting_project_id on comments (posting_project_id); 96 | create index comments_post_id on comments (post_id); 97 | create index comments_published_at on comments (published_at); 98 | 99 | create table comment_resources 100 | ( 101 | comment_id varchar not null, 102 | url varchar not null, 103 | primary key (comment_id, url) on conflict ignore, 104 | foreign key (comment_id) references comments (id) on delete cascade 105 | ); 106 | 107 | create table likes 108 | ( 109 | from_project_id integer not null, 110 | to_post_id integer not null, 111 | primary key (from_project_id, to_post_id) on conflict ignore, 112 | foreign key (to_post_id) references posts (id) on delete no action, 113 | foreign key (from_project_id) references projects (id) on delete restrict 114 | ); 115 | 116 | create table follows 117 | ( 118 | from_project_id integer not null, 119 | to_project_id integer not null, 120 | primary key (from_project_id, to_project_id) on conflict ignore, 121 | foreign key (from_project_id) references projects (id) on delete cascade, 122 | foreign key (to_project_id) references projects (id) on delete cascade 123 | ); 124 | -------------------------------------------------------------------------------- /db/migrations/2024-10-01-150618_related_tags/down.sql: -------------------------------------------------------------------------------- 1 | drop table related_tags; 2 | -------------------------------------------------------------------------------- /db/migrations/2024-10-01-150618_related_tags/up.sql: -------------------------------------------------------------------------------- 1 | create table related_tags 2 | ( 3 | tag1 varchar not null collate nocase, 4 | tag2 varchar not null collate nocase, 5 | is_synonym integer not null, 6 | primary key (tag1, tag2), 7 | constraint ordering check (tag1 < tag2) 8 | ); 9 | 10 | create index related_tags_is_synonym on related_tags (is_synonym); 11 | -------------------------------------------------------------------------------- /db/migrations/2024-10-11-193723_data_migration/down.sql: -------------------------------------------------------------------------------- 1 | drop table data_migration_state; 2 | 3 | alter table posts drop column is_adult_content; 4 | alter table posts drop column is_pinned; 5 | -------------------------------------------------------------------------------- /db/migrations/2024-10-11-193723_data_migration/up.sql: -------------------------------------------------------------------------------- 1 | create table data_migration_state 2 | ( 3 | name varchar not null primary key, 4 | value varchar not null 5 | ); 6 | 7 | alter table posts 8 | add column is_adult_content boolean not null default false; 9 | alter table posts 10 | add column is_pinned boolean not null default false; 11 | -------------------------------------------------------------------------------- /db/migrations/2024-10-20-105004_draft_nonces/down.sql: -------------------------------------------------------------------------------- 1 | drop table draft_nonces; 2 | -------------------------------------------------------------------------------- /db/migrations/2024-10-20-105004_draft_nonces/up.sql: -------------------------------------------------------------------------------- 1 | create table draft_nonces 2 | ( 3 | post_id integer not null primary key, 4 | nonce varchar not null, 5 | foreign key (post_id) references posts (id) on delete cascade 6 | ); 7 | -------------------------------------------------------------------------------- /db/src/bundled_files.rs: -------------------------------------------------------------------------------- 1 | macro_rules! cdl_static { 2 | ($name:ident; $($item_name:literal: $item_src:literal,)+) => { 3 | pub const $name: &[(&str, &[u8])] = &[ 4 | $( 5 | ($item_name, include_bytes!(concat!("../", $item_src))), 6 | )+ 7 | ]; 8 | }; 9 | } 10 | 11 | cdl_static! { 12 | CDL_STATIC; 13 | "base.css": "static/base.css", 14 | "tailwind-prose.css": "static/tailwind-prose.css", 15 | "client.js": "md-render/dist/client.js", 16 | } 17 | 18 | /// these are hard-coded because they are very unlikely to change 19 | pub const COHOST_STATIC: &str = include_str!("../cohost_static.txt"); 20 | 21 | pub const MD_RENDER_COMPILED: &str = include_str!("../md-render/dist/server-render.js"); 22 | 23 | pub const TEMPLATE_CONFIG: &str = include_str!("../config.example.toml"); 24 | 25 | macro_rules! templates { 26 | ($name:ident; $($item:literal,)+) => { 27 | pub const $name: &[(&str, &str)] = &[ 28 | $( 29 | ($item, include_str!(concat!("../templates/", $item))), 30 | )+ 31 | ]; 32 | }; 33 | } 34 | 35 | templates! { 36 | TEMPLATES; 37 | "base.html", 38 | "comments.html", 39 | "dashboard.html", 40 | "error.html", 41 | "index.html", 42 | "liked_feed.html", 43 | "pagination_eggs.html", 44 | "post.html", 45 | "project_profile.html", 46 | "project_sidebar.html", 47 | "single_post.html", 48 | "tag_feed.html", 49 | } 50 | -------------------------------------------------------------------------------- /db/src/comment.rs: -------------------------------------------------------------------------------- 1 | use crate::project::ProjectFromCohost; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | #[derive(Debug, Serialize, Deserialize)] 5 | #[serde(rename_all = "camelCase")] 6 | pub struct CommentFromCohost { 7 | pub poster: Option, 8 | pub comment: InnerComment, 9 | pub can_edit: Permission, 10 | pub can_hide: Permission, 11 | pub can_interact: Permission, 12 | } 13 | 14 | #[derive(Debug, Serialize, Deserialize)] 15 | #[serde(rename_all = "camelCase")] 16 | pub struct InnerComment { 17 | pub body: String, 18 | pub comment_id: String, 19 | pub children: Vec, 20 | pub deleted: bool, 21 | pub has_cohost_plus: bool, 22 | pub hidden: bool, 23 | pub in_reply_to: Option, 24 | pub post_id: u64, 25 | #[serde(rename = "postedAtISO")] 26 | pub posted_at_iso: String, 27 | } 28 | 29 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 30 | #[serde(rename_all = "kebab-case")] 31 | pub enum Permission { 32 | Allowed, 33 | NotAllowed, 34 | LogInFirst, 35 | Blocked, 36 | } 37 | -------------------------------------------------------------------------------- /db/src/feed.rs: -------------------------------------------------------------------------------- 1 | use crate::comment::Permission; 2 | use crate::context::CohostContext; 3 | use crate::post::PostFromCohost; 4 | use anyhow::{anyhow, Context}; 5 | use html5ever::tendril::TendrilSink; 6 | use reqwest::Url; 7 | use serde::{Deserialize, Serialize}; 8 | 9 | #[derive(Debug, Deserialize)] 10 | #[serde(rename_all = "camelCase")] 11 | #[allow(unused)] 12 | pub struct PaginationMode { 13 | pub current_skip: u64, 14 | pub ideal_page_stride: u64, 15 | pub mode: String, 16 | pub more_pages_backward: bool, 17 | pub more_pages_forward: bool, 18 | pub page_url_factory_name: String, 19 | pub ref_timestamp: u64, 20 | } 21 | 22 | #[derive(Debug, Deserialize)] 23 | #[serde(rename_all = "camelCase")] 24 | pub struct PostsFeed { 25 | #[allow(unused)] 26 | pub highlighted_tags: Vec, 27 | #[allow(unused)] 28 | pub no_posts_string_id: String, 29 | pub pagination_mode: PaginationMode, 30 | pub posts: Vec, 31 | } 32 | 33 | #[derive(Debug, Deserialize)] 34 | struct LikedPostsFeed { 35 | #[serde(rename = "liked-posts-feed")] 36 | liked_posts_feed: PostsFeed, 37 | } 38 | 39 | #[derive(Debug, Deserialize)] 40 | #[serde(rename_all = "camelCase")] 41 | pub struct TaggedPostsFeed { 42 | #[allow(unused)] 43 | pub no_posts_string_id: String, 44 | pub pagination_mode: PaginationMode, 45 | pub posts: Vec, 46 | pub synonyms_and_related_tags: Vec, 47 | #[allow(unused)] 48 | pub tag_name: String, 49 | #[allow(unused)] 50 | pub show_18_plus_posts: bool, 51 | } 52 | 53 | #[derive(Debug, Deserialize)] 54 | pub struct RelatedTag { 55 | #[allow(unused)] 56 | pub tag_id: String, 57 | pub content: String, 58 | pub relationship: TagRelationship, 59 | } 60 | 61 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 62 | #[serde(rename_all = "kebab-case")] 63 | pub enum TagRelationship { 64 | Related, 65 | Synonym, 66 | } 67 | 68 | #[derive(Debug, Deserialize)] 69 | struct TaggedPostFeedContainer { 70 | #[serde(rename = "tagged-post-feed")] 71 | tagged_post_feed: TaggedPostsFeed, 72 | } 73 | 74 | // Not a feed, but this is the file where all the others of this type are 75 | #[derive(Debug, Deserialize)] 76 | #[serde(rename_all = "camelCase")] 77 | pub struct ProjectPageView { 78 | pub can_access_permissions: ProjectCanAccessPermissions, 79 | } 80 | 81 | #[derive(Debug, Deserialize)] 82 | #[serde(rename_all = "camelCase")] 83 | pub struct ProjectCanAccessPermissions { 84 | pub can_read: Permission, 85 | pub can_interact: Permission, 86 | pub can_share: Permission, 87 | pub can_edit: Permission, 88 | } 89 | 90 | #[derive(Debug, Deserialize)] 91 | struct ProjectPageViewContainer { 92 | #[serde(rename = "project-page-view")] 93 | project_page_view: ProjectPageView, 94 | } 95 | 96 | impl CohostContext { 97 | pub async fn load_liked_posts( 98 | &self, 99 | ref_timestamp: Option, 100 | skip_posts: u64, 101 | ) -> anyhow::Result { 102 | let mut url = Url::parse("https://cohost.org/rc/liked-posts")?; 103 | if let Some(ref_timestamp) = ref_timestamp { 104 | url.query_pairs_mut() 105 | .append_pair("refTimestamp", &ref_timestamp.to_string()); 106 | } 107 | if skip_posts > 0 { 108 | url.query_pairs_mut() 109 | .append_pair("skipPosts", &skip_posts.to_string()); 110 | } 111 | 112 | let html = self 113 | .get_text(url) 114 | .await 115 | .context("loading liked posts page")?; 116 | 117 | let doc = kuchikiki::parse_html().one(html); 118 | let script = doc 119 | .select_first("script#__COHOST_LOADER_STATE__") 120 | .map_err(|()| anyhow!("could not find __COHOST_LOADER_STATE__ in liked posts page"))?; 121 | 122 | let data: LikedPostsFeed = serde_json::from_str(&script.text_contents()) 123 | .context("parsing __COHOST_LOADER_STATE__ on liked posts page")?; 124 | 125 | Ok(data.liked_posts_feed) 126 | } 127 | 128 | pub async fn load_tagged_posts( 129 | &self, 130 | tag: &str, 131 | ref_timestamp: Option, 132 | skip_posts: u64, 133 | ) -> anyhow::Result { 134 | let tag_encoded = urlencoding::encode(tag); 135 | let mut url = Url::parse(&format!("https://cohost.org/rc/tagged/{tag_encoded}"))?; 136 | 137 | url.query_pairs_mut().append_pair("show18PlusPosts", "true"); 138 | 139 | if let Some(ref_timestamp) = ref_timestamp { 140 | url.query_pairs_mut() 141 | .append_pair("refTimestamp", &ref_timestamp.to_string()); 142 | } 143 | if skip_posts > 0 { 144 | url.query_pairs_mut() 145 | .append_pair("skipPosts", &skip_posts.to_string()); 146 | } 147 | 148 | let html = self 149 | .get_text(url) 150 | .await 151 | .context("loading tagged posts page")?; 152 | 153 | let doc = kuchikiki::parse_html().one(html); 154 | let script = doc 155 | .select_first("script#__COHOST_LOADER_STATE__") 156 | .map_err(|()| anyhow!("could not find __COHOST_LOADER_STATE__ in tagged posts page"))?; 157 | 158 | let data: TaggedPostFeedContainer = serde_json::from_str(&script.text_contents()) 159 | .context("parsing __COHOST_LOADER_STATE__ on tagged posts page")?; 160 | 161 | Ok(data.tagged_post_feed) 162 | } 163 | 164 | pub async fn project_page_view(&self, handle: &str) -> anyhow::Result { 165 | let url = Url::parse(&format!("https://cohost.org/{handle}"))?; 166 | 167 | let html = self 168 | .get_text(url) 169 | .await 170 | .context("loading project page view")?; 171 | 172 | let doc = kuchikiki::parse_html().one(html); 173 | let script = doc 174 | .select_first("script#__COHOST_LOADER_STATE__") 175 | .map_err(|()| anyhow!("could not find __COHOST_LOADER_STATE__ in project page view"))?; 176 | 177 | let data: ProjectPageViewContainer = serde_json::from_str(&script.text_contents()) 178 | .context("parsing __COHOST_LOADER_STATE__ on project page view")?; 179 | 180 | Ok(data.project_page_view) 181 | } 182 | } 183 | -------------------------------------------------------------------------------- /db/src/login.rs: -------------------------------------------------------------------------------- 1 | use crate::context::USER_AGENT; 2 | use anyhow::{bail, Context}; 3 | use base64::prelude::{BASE64_STANDARD, BASE64_STANDARD_NO_PAD}; 4 | use base64::Engine; 5 | use pbkdf2::hmac::Hmac; 6 | use pbkdf2::pbkdf2; 7 | use reqwest::{Client, Method, Url}; 8 | use serde::{Deserialize, Serialize}; 9 | use sha2::Sha384; 10 | 11 | async fn trpc( 12 | client: &Client, 13 | cookie: &str, 14 | method: Method, 15 | id: &str, 16 | input: I, 17 | ) -> anyhow::Result 18 | where 19 | I: Serialize, 20 | R: for<'a> Deserialize<'a>, 21 | { 22 | let mut url = Url::parse(&format!("https://cohost.org/api/v1/trpc/{id}"))?; 23 | 24 | if method == Method::GET { 25 | url.query_pairs_mut() 26 | .append_pair("input", &serde_json::to_string(&input)?); 27 | } 28 | 29 | let mut req = client.request(method.clone(), url).header("cookie", cookie); 30 | 31 | if method == Method::POST { 32 | req = req 33 | .header("content-type", "application/json") 34 | .body(serde_json::to_string(&input)?); 35 | } 36 | 37 | let res = req.send().await?; 38 | 39 | if !res.status().is_success() { 40 | let status = res.status(); 41 | let text = res.text().await?; 42 | bail!("unexpected: {status}\n{text}"); 43 | } 44 | 45 | #[derive(Deserialize)] 46 | enum TrpcResult { 47 | #[serde(rename = "result")] 48 | Result { data: T }, 49 | #[serde(rename = "error")] 50 | Error { code: i64, message: String }, 51 | } 52 | 53 | let result = res.text().await?; 54 | match serde_json::from_str::>(&result)? { 55 | TrpcResult::Result { data } => Ok(data), 56 | TrpcResult::Error { code, message, .. } => { 57 | bail!("{code}: {message}"); 58 | } 59 | } 60 | } 61 | 62 | pub async fn login(email: &str, password: &str) -> anyhow::Result<(String, bool)> { 63 | let client = Client::builder() 64 | .user_agent(USER_AGENT) 65 | .build() 66 | .expect("failed to create client"); 67 | 68 | let cookie = { 69 | let login_page = client.get("https://cohost.org/rc/login").send().await?; 70 | 71 | if !login_page.status().is_success() { 72 | bail!("could not get login page"); 73 | } 74 | let cookie_header = login_page 75 | .headers() 76 | .get("set-cookie") 77 | .and_then(|cookie| cookie.to_str().ok().map(|s| s.to_string())); 78 | 79 | let cookie = if let Some(cookie_header) = cookie_header { 80 | let Some(cookie) = cookie_header.split(';').next() else { 81 | bail!("bad cookie header"); 82 | }; 83 | cookie.to_string() 84 | } else { 85 | bail!("no set-cookie header"); 86 | }; 87 | 88 | cookie 89 | }; 90 | 91 | let salt = { 92 | #[derive(Serialize)] 93 | struct GetSalt { 94 | email: String, 95 | } 96 | #[derive(Deserialize)] 97 | struct Salt { 98 | salt: String, 99 | } 100 | let salt: Salt = trpc( 101 | &client, 102 | &cookie, 103 | Method::GET, 104 | "login.getSalt", 105 | GetSalt { 106 | email: email.to_string(), 107 | }, 108 | ) 109 | .await 110 | .context("getting salt")?; 111 | 112 | BASE64_STANDARD_NO_PAD 113 | .decode(salt.salt) 114 | .context("decoding salt")? 115 | }; 116 | 117 | let hash = { 118 | let mut result = [0; 128]; 119 | pbkdf2::>(password.as_bytes(), &salt, 200_000, &mut result)?; 120 | BASE64_STANDARD.encode(result) 121 | }; 122 | 123 | let needs_otp = { 124 | #[derive(Serialize)] 125 | #[serde(rename_all = "camelCase")] 126 | struct Login { 127 | client_hash: String, 128 | email: String, 129 | } 130 | 131 | #[derive(PartialEq, Deserialize)] 132 | #[serde(rename_all = "kebab-case")] 133 | enum State { 134 | NeedOtp, 135 | Done, 136 | } 137 | 138 | #[derive(Deserialize)] 139 | struct LoginResponse { 140 | state: State, 141 | } 142 | 143 | let res: LoginResponse = trpc( 144 | &client, 145 | &cookie, 146 | Method::POST, 147 | "login.login", 148 | Login { 149 | client_hash: hash, 150 | email: email.to_string(), 151 | }, 152 | ) 153 | .await 154 | .context("logging in")?; 155 | 156 | res.state == State::NeedOtp 157 | }; 158 | 159 | Ok((cookie, needs_otp)) 160 | } 161 | 162 | pub async fn login_otp(cookie: &str, otp: &str) -> anyhow::Result<()> { 163 | let client = Client::builder() 164 | .user_agent(USER_AGENT) 165 | .build() 166 | .expect("failed to create client"); 167 | 168 | #[derive(Serialize)] 169 | struct Req { 170 | token: String, 171 | } 172 | 173 | #[derive(Deserialize)] 174 | struct Res { 175 | reset: bool, 176 | } 177 | 178 | let res: Res = trpc( 179 | &client, 180 | &cookie, 181 | Method::POST, 182 | "login.send2FAToken", 183 | Req { 184 | token: otp.to_string(), 185 | }, 186 | ) 187 | .await 188 | .context("error in 2FA")?; 189 | 190 | if res.reset { 191 | bail!("unexpected response: reset"); 192 | } 193 | 194 | Ok(()) 195 | } 196 | -------------------------------------------------------------------------------- /db/src/merge.rs: -------------------------------------------------------------------------------- 1 | use crate::data::{Database, DbPost}; 2 | use crate::dl::long_progress_style; 3 | use anyhow::Context; 4 | use deno_core::url::Url; 5 | use diesel::{Connection, SqliteConnection}; 6 | use indicatif::ProgressBar; 7 | use std::path::Path; 8 | 9 | pub async fn merge( 10 | db: &Database, 11 | other_db: &str, 12 | root_dir: &Path, 13 | other_root_dir: &Path, 14 | ) -> anyhow::Result<()> { 15 | info!("merging from database at {}", other_db); 16 | 17 | let other_db = Database::new(SqliteConnection::establish(other_db)?); 18 | let other_total_post_count = other_db.total_post_count().await?; 19 | info!("checking {other_total_post_count} posts"); 20 | 21 | let mut posts_inserted = 0; 22 | 23 | let progress = ProgressBar::new(other_total_post_count); 24 | progress.set_style(long_progress_style()); 25 | progress.set_message("comparing posts"); 26 | 27 | for offset in (0..).map(|i| i * 1000) { 28 | let posts = other_db.get_post_ids(offset, 1000).await?; 29 | if posts.is_empty() { 30 | break; 31 | } 32 | 33 | for post_id in posts { 34 | progress.inc(1); 35 | let post = other_db.post(post_id).await?; 36 | 37 | if db.is_db_post_better_somehow(&post).await? { 38 | debug!("inserting better post for {post_id}"); 39 | progress.set_message(format!("copying post {post_id}")); 40 | insert_post(db, &other_db, post).await?; 41 | posts_inserted += 1; 42 | 43 | progress.set_message("comparing posts"); 44 | } 45 | } 46 | } 47 | 48 | progress.finish_and_clear(); 49 | if posts_inserted == 1 { 50 | info!("1 post copied"); 51 | } else { 52 | info!("{posts_inserted} posts copied"); 53 | } 54 | 55 | let other_total_comment_count = other_db.total_comment_count().await?; 56 | info!("checking {other_total_comment_count} comments"); 57 | 58 | let mut comment_posts_inserted = 0; 59 | 60 | let progress = ProgressBar::new(other_total_comment_count); 61 | progress.set_style(long_progress_style()); 62 | progress.set_message("checking comments"); 63 | 64 | for offset in (0..).map(|i| i * 1000) { 65 | let comments = other_db.get_comment_ids(offset, 1000).await?; 66 | if comments.is_empty() { 67 | break; 68 | } 69 | 70 | for comment_id in comments { 71 | progress.inc(1); 72 | 73 | if !db.has_comment(&comment_id).await? { 74 | let comment = other_db.comment(&comment_id).await?; 75 | debug!("inserting comment {comment_id}"); 76 | 77 | let post_id = comment.post_id as u64; 78 | progress.set_message(format!( 79 | "copying comments for post {post_id}, including {comment_id}" 80 | )); 81 | 82 | insert_comments(db, &other_db, post_id) 83 | .await 84 | .context("inserting comments")?; 85 | comment_posts_inserted += 1; 86 | 87 | progress.set_message("checking comments"); 88 | } 89 | } 90 | } 91 | 92 | progress.finish_and_clear(); 93 | if comment_posts_inserted == 1 { 94 | info!("comments copied for 1 post"); 95 | } else { 96 | info!("comments copied for {comment_posts_inserted} posts"); 97 | } 98 | 99 | let other_total_file_count = other_db.total_url_file_count().await?; 100 | info!("checking {other_total_file_count} files"); 101 | 102 | let mut files_inserted = 0; 103 | 104 | let progress = ProgressBar::new(other_total_file_count); 105 | progress.set_style(long_progress_style()); 106 | progress.set_message("copying files"); 107 | 108 | let mut file_errors = Vec::new(); 109 | 110 | for offset in (0..).map(|i| i * 1000) { 111 | let files = other_db.get_url_files_batch(offset, 1000).await?; 112 | if files.is_empty() { 113 | break; 114 | } 115 | 116 | for (url, path) in files { 117 | progress.inc(1); 118 | 119 | let Ok(url) = Url::parse(&url) else { continue }; 120 | 121 | if !db.get_url_file(&url).await?.is_some() { 122 | let from_path = other_root_dir.join(&path); 123 | let to_path = root_dir.join(&path); 124 | progress.set_message(format!("{}", path.display())); 125 | 126 | if !from_path.exists() { 127 | file_errors.push(format!("missing file {}", from_path.display())); 128 | continue; 129 | } 130 | if to_path.exists() { 131 | // probably pointing at the same files directory 132 | continue; 133 | } 134 | let mut to_path_dir = to_path.clone(); 135 | to_path_dir.pop(); 136 | std::fs::create_dir_all(&to_path_dir) 137 | .with_context(|| format!("creating directory for {}", to_path.display()))?; 138 | std::fs::copy(&from_path, &to_path).with_context(|| { 139 | format!( 140 | "copying file from {} to {}", 141 | from_path.display(), 142 | to_path.display() 143 | ) 144 | })?; 145 | db.insert_url_file(&url, &path).await?; 146 | 147 | files_inserted += 1; 148 | progress.set_message("copying files"); 149 | } 150 | } 151 | } 152 | 153 | progress.finish_and_clear(); 154 | if files_inserted == 1 { 155 | info!("1 file copied"); 156 | } else { 157 | info!("{files_inserted} files copied"); 158 | } 159 | 160 | if !file_errors.is_empty() { 161 | error!("encountered errors while copying files:"); 162 | for err in file_errors { 163 | error!("{err}"); 164 | } 165 | } 166 | 167 | info!("Done"); 168 | 169 | Ok(()) 170 | } 171 | 172 | #[async_recursion::async_recursion] 173 | async fn insert_post(db: &Database, other_db: &Database, post: DbPost) -> anyhow::Result<()> { 174 | let mut share_of_post_id = post.share_of_post_id; 175 | 176 | if let Some(share_of_post) = share_of_post_id { 177 | let mut insert_share = true; 178 | 179 | if db.has_post(share_of_post as u64).await? { 180 | let this_post = db.post(share_of_post as u64).await?; 181 | let other_post = other_db.post(share_of_post as u64).await?; 182 | 183 | if let (Some(this_pub), Some(other_pub)) = 184 | (&this_post.published_at, &other_post.published_at) 185 | { 186 | if this_pub >= other_pub { 187 | // probably a better share post 188 | share_of_post_id = Some(this_post.id); 189 | insert_share = false; 190 | } 191 | } 192 | } 193 | 194 | if insert_share { 195 | let post = other_db.post(share_of_post as u64).await?; 196 | insert_post(db, other_db, post).await?; 197 | } 198 | } 199 | 200 | if !db.has_project_id(post.posting_project_id as u64).await? { 201 | let api_project = crate::render::api_data::cohost_api_project( 202 | other_db, 203 | 0, 204 | post.posting_project_id as u64, 205 | ) 206 | .await?; 207 | db.insert_project(&api_project, true).await?; 208 | } 209 | 210 | // whatever, this works 211 | let mut api_post = 212 | crate::render::api_data::cohost_api_post(other_db, 0, post.id as u64).await?; 213 | api_post.share_of_post_id = share_of_post_id.map(|i| i as u64); 214 | db.insert_post_final(&Default::default(), &api_post, false, None) 215 | .await?; 216 | 217 | Ok(()) 218 | } 219 | 220 | async fn insert_comments(db: &Database, other_db: &Database, post: u64) -> anyhow::Result<()> { 221 | let api_comments = 222 | crate::render::api_data::cohost_api_comments(other_db, 0, post, false).await?; 223 | for comment in api_comments { 224 | db.insert_comment(post, &comment, true).await?; 225 | } 226 | Ok(()) 227 | } 228 | -------------------------------------------------------------------------------- /db/src/post.rs: -------------------------------------------------------------------------------- 1 | use crate::project::{AvatarShape, ProjectFlag, ProjectFromCohost, ProjectPrivacy}; 2 | use serde::{Deserialize, Deserializer, Serialize, Serializer}; 3 | 4 | #[derive(Debug, Serialize, Deserialize)] 5 | #[serde(rename_all = "camelCase")] 6 | pub struct PostFromCohost { 7 | pub ast_map: PostAstMap, 8 | pub blocks: Vec, 9 | pub can_publish: bool, 10 | pub can_share: bool, 11 | pub comments_locked: bool, 12 | pub contributor_block_incoming_or_outgoing: bool, 13 | pub cws: Vec, 14 | pub effective_adult_content: bool, 15 | pub filename: String, 16 | pub has_any_contributor_muted: bool, 17 | pub has_cohost_plus: bool, 18 | /// No null value; will be empty string 19 | pub headline: String, 20 | pub is_editor: bool, 21 | pub is_liked: bool, 22 | pub limited_visibility_reason: LimitedVisibilityReason, 23 | pub num_comments: u64, 24 | pub num_shared_comments: u64, 25 | pub pinned: bool, 26 | pub plain_text_body: String, 27 | pub post_edit_url: String, 28 | pub post_id: u64, 29 | pub posting_project: ProjectFromCohost, 30 | /// ISO 8601 31 | pub published_at: Option, 32 | pub related_projects: Vec, 33 | pub response_to_ask_id: Option, 34 | pub share_of_post_id: Option, 35 | pub share_tree: Vec, 36 | pub shares_locked: bool, 37 | pub single_post_page_url: String, 38 | pub state: PostState, 39 | pub tags: Vec, 40 | pub transparent_share_of_post_id: Option, 41 | } 42 | 43 | #[derive(Debug, Serialize, Deserialize)] 44 | #[serde(rename_all = "camelCase")] 45 | pub struct PostAstMap { 46 | pub read_more_index: Option, 47 | pub spans: Vec, 48 | } 49 | 50 | #[derive(Debug, Serialize, Deserialize)] 51 | #[serde(rename_all = "camelCase")] 52 | pub struct PostAstMapSpan { 53 | start_index: u64, 54 | end_index: u64, 55 | // JSON string 56 | ast: String, 57 | } 58 | 59 | #[derive(Debug, Clone, Serialize, Deserialize)] 60 | #[serde(tag = "type")] 61 | #[serde(rename_all = "kebab-case")] 62 | pub enum PostBlock { 63 | Ask { 64 | ask: PostBlockAsk, 65 | }, 66 | Attachment { 67 | attachment: PostBlockAttachment, 68 | }, 69 | AttachmentRow { 70 | attachments: Vec, 71 | }, 72 | Markdown { 73 | markdown: PostBlockMarkdown, 74 | }, 75 | } 76 | 77 | #[derive(Debug, Clone, Serialize, Deserialize)] 78 | #[serde(rename_all = "camelCase")] 79 | pub struct PostBlockAsk { 80 | pub anon: bool, 81 | pub logged_in: bool, 82 | pub asking_project: Option, 83 | pub ask_id: String, 84 | pub content: String, 85 | /// ISO 8601 86 | pub sent_at: String, 87 | } 88 | 89 | #[derive(Debug, Clone, Serialize, Deserialize)] 90 | #[serde(rename_all = "camelCase")] 91 | pub struct PostBlockAskProject { 92 | pub project_id: u64, 93 | pub handle: String, 94 | #[serde(rename = "avatarURL")] 95 | pub avatar_url: String, 96 | #[serde(rename = "avatarPreviewURL")] 97 | pub avatar_preview_url: String, 98 | pub privacy: ProjectPrivacy, 99 | pub flags: Vec, 100 | pub avatar_shape: AvatarShape, 101 | pub display_name: String, 102 | } 103 | 104 | #[derive(Debug, Clone, Serialize, Deserialize)] 105 | #[serde(rename_all = "camelCase")] 106 | pub struct PostBlockMarkdown { 107 | pub content: String, 108 | } 109 | 110 | #[derive(Debug, Clone, Serialize, Deserialize)] 111 | #[serde(rename_all = "camelCase")] 112 | pub struct PostBlockAttachmentWrapper { 113 | // WONTFIX: this struct is technically missing a type: "attachment" field 114 | pub attachment: PostBlockAttachment, 115 | } 116 | 117 | #[derive(Debug, Clone, Serialize, Deserialize)] 118 | #[serde(rename_all = "camelCase")] 119 | #[serde(tag = "kind")] 120 | pub enum PostBlockAttachment { 121 | #[serde(rename_all = "camelCase")] 122 | Image { 123 | alt_text: Option, 124 | attachment_id: Option, 125 | #[serde(rename = "fileURL")] 126 | file_url: String, 127 | #[serde(rename = "previewURL")] 128 | preview_url: String, 129 | width: Option, 130 | height: Option, 131 | }, 132 | #[serde(rename_all = "camelCase")] 133 | Audio { 134 | attachment_id: Option, 135 | artist: Option, 136 | title: Option, 137 | #[serde(rename = "previewURL")] 138 | preview_url: String, 139 | #[serde(rename = "fileURL")] 140 | file_url: String, 141 | }, 142 | } 143 | 144 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 145 | pub enum PostState { 146 | Draft = 0, 147 | Published = 1, 148 | Deleted = 2, 149 | } 150 | 151 | impl Serialize for PostState { 152 | fn serialize(&self, serializer: S) -> Result 153 | where 154 | S: Serializer, 155 | { 156 | match self { 157 | PostState::Draft => serializer.serialize_u32(0), 158 | PostState::Published => serializer.serialize_u32(1), 159 | PostState::Deleted => serializer.serialize_u32(2), 160 | } 161 | } 162 | } 163 | 164 | impl<'de> Deserialize<'de> for PostState { 165 | fn deserialize(deserializer: D) -> Result 166 | where 167 | D: Deserializer<'de>, 168 | { 169 | match u32::deserialize(deserializer)? { 170 | 0 => Ok(Self::Draft), 171 | 1 => Ok(Self::Published), 172 | 2 => Ok(Self::Deleted), 173 | _ => Err(serde::de::Error::custom("invalid post state")), 174 | } 175 | } 176 | } 177 | 178 | #[derive(Debug, PartialEq, Serialize, Deserialize)] 179 | #[serde(rename_all = "kebab-case")] 180 | pub enum LimitedVisibilityReason { 181 | None, 182 | LogInFirst, 183 | Deleted, 184 | Unpublished, 185 | AdultContent, 186 | Blocked, 187 | } 188 | -------------------------------------------------------------------------------- /db/src/project.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | #[derive(Debug, Clone, Serialize, Deserialize)] 4 | #[serde(rename_all = "camelCase")] 5 | pub struct ProjectFromCohost { 6 | pub ask_settings: ProjectAskSettings, 7 | #[serde(rename = "avatarPreviewURL")] 8 | pub avatar_preview_url: String, 9 | pub avatar_shape: AvatarShape, 10 | #[serde(rename = "avatarURL")] 11 | pub avatar_url: String, 12 | pub contact_card: Vec, 13 | pub dek: String, 14 | pub delete_after: Option, 15 | pub description: String, 16 | pub display_name: String, 17 | pub flags: Vec, 18 | pub frequently_used_tags: Vec, 19 | pub handle: String, 20 | #[serde(rename = "headerPreviewURL")] 21 | pub header_preview_url: Option, 22 | #[serde(rename = "headerURL")] 23 | pub header_url: Option, 24 | pub is_self_project: Option, 25 | pub logged_out_post_visibility: LoggedOutPostVisibility, 26 | pub privacy: ProjectPrivacy, 27 | pub project_id: u64, 28 | pub pronouns: Option, 29 | pub url: Option, 30 | } 31 | 32 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 33 | #[serde(rename_all = "kebab-case")] 34 | pub enum AvatarShape { 35 | Circle, 36 | Roundrect, 37 | Squircle, 38 | CapsuleBig, 39 | CapsuleSmall, 40 | Egg, 41 | } 42 | 43 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 44 | #[serde(rename_all = "kebab-case")] 45 | pub enum ProjectPrivacy { 46 | Public, 47 | Private, 48 | } 49 | 50 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 51 | #[serde(rename_all = "kebab-case")] 52 | pub enum LoggedOutPostVisibility { 53 | Public, 54 | None, 55 | } 56 | 57 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 58 | #[serde(rename_all = "camelCase")] 59 | pub enum ProjectFlag { 60 | Staff, 61 | StaffMember, 62 | FriendOfTheSite, 63 | NoTransparentAvatar, 64 | Suspended, 65 | Automated, 66 | Parody, 67 | } 68 | 69 | #[derive(Debug, Clone, Serialize, Deserialize)] 70 | #[serde(rename_all = "camelCase")] 71 | pub struct ProjectAskSettings { 72 | enabled: bool, 73 | allow_anon: bool, 74 | require_logged_in_anon: bool, 75 | } 76 | 77 | #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] 78 | #[serde(rename_all = "camelCase")] 79 | pub struct ProjectContactCard { 80 | service: String, 81 | value: String, 82 | visibility: ContactCardVisibility, 83 | } 84 | 85 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 86 | #[serde(rename_all = "kebab-case")] 87 | pub enum ContactCardVisibility { 88 | Public, 89 | LoggedIn, 90 | Follows, 91 | FollowingYou, 92 | } 93 | -------------------------------------------------------------------------------- /db/src/render/api_data.rs: -------------------------------------------------------------------------------- 1 | use crate::comment::{CommentFromCohost, InnerComment, Permission}; 2 | use crate::data::{Database, DbDataError}; 3 | use crate::post::{LimitedVisibilityReason, PostAstMap, PostFromCohost, PostState}; 4 | use crate::project::ProjectFromCohost; 5 | use diesel::result::Error as DieselError; 6 | use std::collections::HashMap; 7 | use thiserror::Error; 8 | 9 | #[derive(Debug, Error)] 10 | pub enum GetDataError { 11 | #[error("not found")] 12 | NotFound, 13 | #[error(transparent)] 14 | OtherQuery(DieselError), 15 | #[error("data error: {0}")] 16 | DbData(#[from] DbDataError), 17 | #[error("render error: {0}")] 18 | Render(anyhow::Error), 19 | } 20 | 21 | impl From for GetDataError { 22 | fn from(value: DieselError) -> Self { 23 | match value { 24 | DieselError::NotFound => Self::NotFound, 25 | value => Self::OtherQuery(value), 26 | } 27 | } 28 | } 29 | 30 | pub async fn cohost_api_project( 31 | db: &Database, 32 | viewer_id: u64, 33 | project_id: u64, 34 | ) -> Result { 35 | let project = db.project(project_id).await?; 36 | 37 | let project_data = project.data()?; 38 | 39 | Ok(ProjectFromCohost { 40 | ask_settings: project_data.ask_settings, 41 | avatar_preview_url: project_data.avatar_preview_url, 42 | avatar_shape: project_data.avatar_shape, 43 | avatar_url: project_data.avatar_url, 44 | contact_card: project_data.contact_card, 45 | dek: project_data.dek, 46 | delete_after: project_data.delete_after, 47 | description: project_data.description, 48 | display_name: project_data.display_name, 49 | flags: project_data.flags, 50 | frequently_used_tags: project_data.frequently_used_tags, 51 | handle: project.handle, 52 | header_preview_url: project_data.header_preview_url, 53 | header_url: project_data.header_url, 54 | is_self_project: Some(project_id == viewer_id), 55 | logged_out_post_visibility: project_data.logged_out_post_visibility, 56 | privacy: project_data.privacy, 57 | project_id, 58 | pronouns: project_data.pronouns, 59 | url: project_data.url, 60 | }) 61 | } 62 | 63 | #[async_recursion::async_recursion] 64 | pub async fn cohost_api_post( 65 | db: &Database, 66 | viewer_id: u64, 67 | post_id: u64, 68 | ) -> Result { 69 | // while this could be made more efficient, 70 | let post = db.post(post_id).await?; 71 | let draft_nonce = db.nonce_for_post(post_id).await?; 72 | 73 | let mut share_tree = Vec::new(); 74 | // this adds extra transparent shares, but whatever 75 | if let Some(share_post) = post.share_of_post_id { 76 | let mut post = cohost_api_post(db, viewer_id, share_post as u64).await?; 77 | let post_share_tree = std::mem::replace(&mut post.share_tree, Vec::new()); 78 | share_tree.push(post); 79 | for post in post_share_tree.into_iter().rev() { 80 | share_tree.push(post); 81 | } 82 | } 83 | share_tree.reverse(); 84 | 85 | let transparent_share_of_post_id = if post.is_transparent_share { 86 | share_tree 87 | .iter() 88 | .rfind(|post| post.transparent_share_of_post_id.is_none()) 89 | .map(|post| post.post_id) 90 | } else { 91 | None 92 | }; 93 | 94 | let is_liked = if viewer_id != 0 { 95 | db.is_liked(viewer_id, post_id).await? 96 | } else { 97 | false 98 | }; 99 | 100 | let posting_project = cohost_api_project(db, viewer_id, post.posting_project_id as u64).await?; 101 | 102 | let post_data = post.data()?; 103 | 104 | let tags = db.get_post_tags(post_id).await?; 105 | 106 | Ok(PostFromCohost { 107 | // we do not use the AST map 108 | ast_map: PostAstMap { 109 | read_more_index: None, 110 | spans: Default::default(), 111 | }, 112 | blocks: post_data.blocks, 113 | can_publish: false, 114 | can_share: !post_data.shares_locked, 115 | comments_locked: post_data.comments_locked, 116 | contributor_block_incoming_or_outgoing: false, 117 | cws: post_data.cws, 118 | effective_adult_content: post.is_adult_content, 119 | filename: post.filename, 120 | has_any_contributor_muted: false, 121 | has_cohost_plus: post_data.has_cohost_plus, 122 | headline: post_data.headline, 123 | is_editor: false, 124 | is_liked, 125 | limited_visibility_reason: LimitedVisibilityReason::None, 126 | num_comments: post_data.num_comments, 127 | num_shared_comments: post_data.num_shared_comments, 128 | pinned: post.is_pinned, 129 | plain_text_body: post_data.plain_text_body, 130 | post_edit_url: post_data.post_edit_url, 131 | post_id, 132 | posting_project, 133 | published_at: post.published_at, 134 | related_projects: Default::default(), 135 | response_to_ask_id: post.response_to_ask_id, 136 | share_of_post_id: post.share_of_post_id.map(|i| i as u64), 137 | share_tree, 138 | shares_locked: post_data.shares_locked, 139 | single_post_page_url: post_data.single_post_page_url, 140 | state: match draft_nonce { 141 | Some(_) => PostState::Draft, 142 | _ => PostState::Published, 143 | }, 144 | tags, 145 | transparent_share_of_post_id, 146 | }) 147 | } 148 | 149 | pub async fn cohost_api_comments_for_share_tree( 150 | db: &Database, 151 | viewer_id: u64, 152 | post: &PostFromCohost, 153 | ) -> Result>, GetDataError> { 154 | let mut comments = HashMap::with_capacity(post.share_tree.len() + 1); 155 | 156 | comments.insert( 157 | post.post_id, 158 | cohost_api_comments(db, viewer_id, post.post_id, post.is_editor).await?, 159 | ); 160 | 161 | for post in &post.share_tree { 162 | comments.insert( 163 | post.post_id, 164 | cohost_api_comments(db, viewer_id, post.post_id, post.is_editor).await?, 165 | ); 166 | } 167 | 168 | Ok(comments) 169 | } 170 | 171 | pub async fn cohost_api_comments( 172 | db: &Database, 173 | viewer_id: u64, 174 | post_id: u64, 175 | is_editor: bool, 176 | ) -> Result, GetDataError> { 177 | let comments = db.get_comments(post_id).await?; 178 | 179 | let mut projects = HashMap::new(); 180 | for comment in &comments { 181 | if let Some(project) = comment.posting_project_id { 182 | let project = project as u64; 183 | if !projects.contains_key(&project) { 184 | projects.insert(project, cohost_api_project(db, viewer_id, project).await?); 185 | } 186 | } 187 | } 188 | 189 | type ByParent = HashMap>; 190 | let mut by_parent: ByParent = HashMap::new(); 191 | for comment in comments { 192 | let comment_data = comment.data()?; 193 | 194 | let is_viewer_comment = comment 195 | .posting_project_id 196 | .map_or(false, |p| p as u64 == viewer_id); 197 | 198 | let cohost_comment = CommentFromCohost { 199 | poster: comment 200 | .posting_project_id 201 | .and_then(|proj| projects.get(&(proj as u64)).cloned()), 202 | comment: InnerComment { 203 | body: comment_data.body, 204 | comment_id: comment.id.clone(), 205 | children: Vec::new(), 206 | deleted: comment_data.deleted, 207 | has_cohost_plus: comment_data.has_cohost_plus, 208 | hidden: comment_data.hidden, 209 | in_reply_to: comment.in_reply_to_id.clone(), 210 | post_id, 211 | posted_at_iso: comment.published_at, 212 | }, 213 | can_edit: if is_viewer_comment { 214 | Permission::Allowed 215 | } else { 216 | Permission::NotAllowed 217 | }, 218 | can_hide: if is_editor { 219 | Permission::Allowed 220 | } else { 221 | Permission::NotAllowed 222 | }, 223 | can_interact: Permission::Allowed, 224 | }; 225 | by_parent 226 | .entry(comment.in_reply_to_id.unwrap_or_default()) 227 | .or_default() 228 | .push(cohost_comment); 229 | } 230 | 231 | fn collect(by_parent: &mut ByParent, parent: &str) -> Vec { 232 | let mut comments = Vec::new(); 233 | 234 | if let Some(items) = by_parent.remove(parent) { 235 | comments.reserve(items.len()); 236 | 237 | for mut item in items { 238 | item.comment.children = collect(by_parent, &item.comment.comment_id); 239 | comments.push(item); 240 | } 241 | } 242 | 243 | comments 244 | } 245 | 246 | let mut comments = collect(&mut by_parent, ""); 247 | 248 | // comments without parents? I dunno 249 | for items in by_parent.into_values() { 250 | comments.extend(items); 251 | } 252 | 253 | Ok(comments) 254 | } 255 | -------------------------------------------------------------------------------- /db/src/render/feed.rs: -------------------------------------------------------------------------------- 1 | use crate::data::{Database, PostQuery}; 2 | use crate::post::PostFromCohost; 3 | use crate::render::api_data::{cohost_api_post, cohost_api_project, GetDataError}; 4 | use crate::render::md_render::{PostRenderRequest, PostRenderResult}; 5 | use crate::render::rewrite::rewrite_projects_in_post; 6 | use crate::render::PageRenderer; 7 | use chrono::Utc; 8 | use reqwest::StatusCode; 9 | use serde::{Deserialize, Serialize}; 10 | use std::collections::HashMap; 11 | use tera::Context; 12 | use thiserror::Error; 13 | 14 | pub struct RenderedPosts { 15 | pub posts: Vec, 16 | pub rendered_posts: HashMap, 17 | pub max_page: u64, 18 | } 19 | 20 | impl PageRenderer { 21 | pub async fn get_rendered_posts( 22 | &self, 23 | db: &Database, 24 | viewer_id: u64, 25 | post_query: &PostQuery, 26 | ) -> Result { 27 | let post_ids = post_query.get(db).await?; 28 | 29 | let total_count = post_query.count(db).await?; 30 | 31 | let max_page = total_count.saturating_sub(1) / 20; 32 | 33 | let mut posts = Vec::with_capacity(post_ids.len()); 34 | let mut rendered_posts = HashMap::with_capacity(post_ids.len()); 35 | 36 | for post in post_ids { 37 | let mut post = cohost_api_post(db, viewer_id, post).await?; 38 | 39 | for post in std::iter::once(&post).chain(post.share_tree.iter()) { 40 | let resources = db.get_saved_resource_urls_for_post(post.post_id).await?; 41 | 42 | let result = self 43 | .md 44 | .render_post(PostRenderRequest { 45 | post_id: post.post_id, 46 | blocks: post.blocks.clone(), 47 | published_at: post 48 | .published_at 49 | .clone() 50 | .unwrap_or_else(|| Utc::now().to_rfc3339()), 51 | has_cohost_plus: post.has_cohost_plus, 52 | resources, 53 | }) 54 | .await 55 | .map_err(|e| GetDataError::Render(e))?; 56 | 57 | rendered_posts.insert(post.post_id, result); 58 | } 59 | 60 | rewrite_projects_in_post(db, &mut post) 61 | .await 62 | .map_err(|e| GetDataError::Render(e))?; 63 | 64 | posts.push(post); 65 | } 66 | 67 | Ok(RenderedPosts { 68 | posts, 69 | rendered_posts, 70 | max_page, 71 | }) 72 | } 73 | } 74 | 75 | #[derive(Debug, Error)] 76 | pub enum RenderFeedError { 77 | #[error(transparent)] 78 | Data(#[from] GetDataError), 79 | #[error(transparent)] 80 | Render(#[from] tera::Error), 81 | } 82 | 83 | impl RenderFeedError { 84 | pub fn status(&self) -> StatusCode { 85 | match self { 86 | Self::Data(GetDataError::NotFound) => StatusCode::NOT_FOUND, 87 | _ => StatusCode::INTERNAL_SERVER_ERROR, 88 | } 89 | } 90 | } 91 | 92 | fn default_true() -> bool { 93 | true 94 | } 95 | 96 | #[derive(Debug, Clone, Serialize, Deserialize)] 97 | #[serde(rename_all = "camelCase")] 98 | pub struct TagFeedQuery { 99 | #[serde(default)] 100 | page: u64, 101 | #[serde(default = "default_true")] 102 | show_18_plus_posts: bool, 103 | } 104 | 105 | #[derive(Debug, Serialize)] 106 | #[serde(rename_all = "camelCase")] 107 | struct TagFeedFilterState { 108 | query: TagFeedQuery, 109 | on_toggle_18_plus_posts: String, 110 | on_prev_page: String, 111 | on_next_page: String, 112 | } 113 | 114 | impl TagFeedQuery { 115 | fn fmt_query(&self) -> String { 116 | let mut out = Vec::new(); 117 | 118 | if self.page > 0 { 119 | out.push(format!("page={}", self.page)); 120 | } 121 | if !self.show_18_plus_posts { 122 | out.push("show18PlusPosts=false".into()); 123 | } 124 | 125 | let mut out = out.join("&"); 126 | if !out.is_empty() { 127 | out.insert(0, '?'); 128 | } 129 | out 130 | } 131 | 132 | fn to_filter_state(self, path: &str, max_page: u64) -> TagFeedFilterState { 133 | let on_toggle_adult = { 134 | let q = Self { 135 | show_18_plus_posts: !self.show_18_plus_posts, 136 | ..self.clone() 137 | } 138 | .fmt_query(); 139 | format!("{path}{q}") 140 | }; 141 | 142 | let on_prev_page = if self.page > 0 { 143 | let q = Self { 144 | page: self.page - 1, 145 | ..self.clone() 146 | } 147 | .fmt_query(); 148 | format!("{path}{q}") 149 | } else { 150 | "".into() 151 | }; 152 | let on_next_page = if self.page < max_page { 153 | let q = Self { 154 | page: self.page + 1, 155 | ..self.clone() 156 | } 157 | .fmt_query(); 158 | format!("{path}{q}") 159 | } else { 160 | "".into() 161 | }; 162 | 163 | TagFeedFilterState { 164 | query: self, 165 | on_toggle_18_plus_posts: on_toggle_adult, 166 | on_prev_page, 167 | on_next_page, 168 | } 169 | } 170 | } 171 | 172 | impl PageRenderer { 173 | pub async fn render_tag_feed( 174 | &self, 175 | db: &Database, 176 | path: &str, 177 | tag: &str, 178 | query: TagFeedQuery, 179 | ) -> Result { 180 | let canon_tag = db 181 | .canonical_tag_capitalization(tag) 182 | .await 183 | .map_err(|e| GetDataError::from(e))? 184 | .unwrap_or(tag.to_string()); 185 | 186 | let synonyms = db 187 | .synonym_tags(&canon_tag) 188 | .await 189 | .map_err(|e| GetDataError::from(e))?; 190 | 191 | let related_tags = db 192 | .related_tags(&canon_tag, &synonyms) 193 | .await 194 | .map_err(|e| GetDataError::from(e))?; 195 | 196 | let post_query = PostQuery { 197 | offset: query.page * 20, 198 | limit: 20, 199 | include_tags: vec![canon_tag.clone()], 200 | is_adult: match query.show_18_plus_posts { 201 | true => None, 202 | false => Some(false), 203 | }, 204 | ..Default::default() 205 | }; 206 | 207 | let RenderedPosts { 208 | posts, 209 | rendered_posts, 210 | max_page, 211 | } = self.get_rendered_posts(db, 0, &post_query).await?; 212 | 213 | let mut template_ctx = Context::new(); 214 | template_ctx.insert("tag", &canon_tag); 215 | 216 | template_ctx.insert("synonym_tags", &synonyms); 217 | template_ctx.insert("related_tags", &related_tags); 218 | 219 | template_ctx.insert("posts", &posts); 220 | template_ctx.insert("rendered_posts", &rendered_posts); 221 | 222 | template_ctx.insert("filter_state", &query.to_filter_state(path, max_page)); 223 | 224 | let body = self.tera.render("tag_feed.html", &template_ctx)?; 225 | 226 | Ok(body) 227 | } 228 | 229 | pub async fn render_liked_feed( 230 | &self, 231 | db: &Database, 232 | project: &str, 233 | // just re-use this. it's a subset 234 | query: TagFeedQuery, 235 | ) -> Result { 236 | let project_id = db 237 | .project_id_for_handle(project) 238 | .await 239 | .map_err(|e| GetDataError::from(e))?; 240 | 241 | let project = cohost_api_project(db, project_id, project_id).await?; 242 | 243 | let post_query = PostQuery { 244 | offset: query.page * 20, 245 | limit: 20, 246 | is_liked_by: Some(project_id), 247 | ..Default::default() 248 | }; 249 | 250 | let RenderedPosts { 251 | posts, 252 | rendered_posts, 253 | max_page, 254 | } = self.get_rendered_posts(db, project_id, &post_query).await?; 255 | 256 | let mut template_ctx = Context::new(); 257 | template_ctx.insert("project", &project); 258 | 259 | template_ctx.insert("posts", &posts); 260 | template_ctx.insert("rendered_posts", &rendered_posts); 261 | 262 | let path = format!("/{}/liked-posts", project.handle); 263 | template_ctx.insert("filter_state", &query.to_filter_state(&path, max_page)); 264 | 265 | let body = self.tera.render("liked_feed.html", &template_ctx)?; 266 | 267 | Ok(body) 268 | } 269 | 270 | pub async fn render_dashboard( 271 | &self, 272 | db: &Database, 273 | project: &str, 274 | // just re-use this. it's a subset 275 | query: TagFeedQuery, 276 | ) -> Result { 277 | let project_id = db 278 | .project_id_for_handle(project) 279 | .await 280 | .map_err(|e| GetDataError::from(e))?; 281 | 282 | let project = cohost_api_project(db, project_id, project_id).await?; 283 | 284 | let post_query = PostQuery { 285 | offset: query.page * 20, 286 | limit: 20, 287 | is_dashboard_for: Some(project_id), 288 | ..Default::default() 289 | }; 290 | 291 | let RenderedPosts { 292 | posts, 293 | rendered_posts, 294 | max_page, 295 | } = self.get_rendered_posts(db, project_id, &post_query).await?; 296 | 297 | let mut template_ctx = Context::new(); 298 | template_ctx.insert("project", &project); 299 | 300 | template_ctx.insert("posts", &posts); 301 | template_ctx.insert("rendered_posts", &rendered_posts); 302 | 303 | let path = format!("/{}/dashboard", project.handle); 304 | template_ctx.insert("filter_state", &query.to_filter_state(&path, max_page)); 305 | 306 | let body = self.tera.render("dashboard.html", &template_ctx)?; 307 | 308 | Ok(body) 309 | } 310 | } 311 | -------------------------------------------------------------------------------- /db/src/render/index.rs: -------------------------------------------------------------------------------- 1 | use crate::data::Database; 2 | use crate::render::PageRenderer; 3 | use tera::Context; 4 | 5 | impl PageRenderer { 6 | pub async fn render_index_page(&self, db: &Database) -> anyhow::Result { 7 | let handles = db.get_all_project_handles_with_posts().await?; 8 | let dashboard_handles = db.project_handles_with_follows().await?; 9 | let liked_handles = db.project_handles_who_liked_posts().await?; 10 | 11 | let mut template_ctx = Context::new(); 12 | template_ctx.insert("projects", &handles); 13 | template_ctx.insert("projects_with_dashboards", &dashboard_handles); 14 | template_ctx.insert("projects_who_liked_posts", &liked_handles); 15 | 16 | let body = self.tera.render("index.html", &template_ctx)?; 17 | 18 | Ok(body) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /db/src/render/md_render.rs: -------------------------------------------------------------------------------- 1 | use crate::bundled_files::MD_RENDER_COMPILED; 2 | use crate::post::PostBlock; 3 | use deno_core::_ops::RustToV8; 4 | use deno_core::url::Url; 5 | use deno_core::{ascii_str, serde_v8, v8, JsRuntime, RuntimeOptions}; 6 | use deno_web::TimersPermission; 7 | use serde::{Deserialize, Serialize}; 8 | use std::cell::RefCell; 9 | use std::collections::VecDeque; 10 | use std::sync::{Arc, Condvar, Mutex}; 11 | use tokio::sync::oneshot; 12 | 13 | #[derive(Debug, Serialize, Deserialize)] 14 | #[serde(rename_all = "camelCase")] 15 | pub struct PostRenderRequest { 16 | pub post_id: u64, 17 | pub blocks: Vec, 18 | pub published_at: String, 19 | pub has_cohost_plus: bool, 20 | pub resources: Vec, 21 | } 22 | 23 | #[derive(Debug, Serialize, Deserialize)] 24 | #[serde(rename_all = "camelCase")] 25 | pub struct PostRenderResult { 26 | pub preview: String, 27 | pub full: Option, 28 | pub class_name: String, 29 | pub view_model: String, 30 | } 31 | 32 | #[derive(Debug, Serialize, Deserialize)] 33 | #[serde(rename_all = "camelCase")] 34 | pub struct MarkdownRenderRequest { 35 | pub markdown: String, 36 | pub published_at: String, 37 | pub context: MarkdownRenderContext, 38 | pub has_cohost_plus: bool, 39 | pub resources: Vec, 40 | } 41 | 42 | #[derive(Debug, Serialize, Deserialize)] 43 | #[serde(rename_all = "kebab-case")] 44 | pub enum MarkdownRenderContext { 45 | Profile, 46 | Comment, 47 | } 48 | 49 | #[derive(Debug, Serialize, Deserialize)] 50 | pub struct MarkdownRenderResult { 51 | html: String, 52 | } 53 | 54 | enum QueueItem { 55 | Post { 56 | req: PostRenderRequest, 57 | ret: oneshot::Sender>, 58 | }, 59 | Markdown { 60 | req: MarkdownRenderRequest, 61 | ret: oneshot::Sender>, 62 | }, 63 | } 64 | 65 | pub struct MarkdownRenderer { 66 | queue: Arc>>, 67 | signal: Arc, 68 | } 69 | 70 | impl MarkdownRenderer { 71 | pub fn new(renderers: usize) -> Self { 72 | JsRuntime::init_platform(None, true); 73 | 74 | // is there a better solution to this? I am not going to find out right now 75 | let queue = Arc::new(Mutex::new(VecDeque::::new())); 76 | let signal = Arc::new(Condvar::new()); 77 | 78 | for i in 0..renderers { 79 | let queue = Arc::clone(&queue); 80 | let signal = Arc::clone(&signal); 81 | let _ = std::thread::Builder::new() 82 | .name(format!("post render {i}")) 83 | .spawn(|| { 84 | let renderer = ThreadMarkdownRenderer::new(); 85 | 86 | let rt = tokio::runtime::Builder::new_current_thread() 87 | .build() 88 | .unwrap(); 89 | 90 | let local_set = tokio::task::LocalSet::new(); 91 | let fut = local_set.run_until(async move { 92 | loop { 93 | let item = loop { 94 | let mut queue = queue.lock().unwrap(); 95 | if let Some(item) = queue.pop_front() { 96 | break item; 97 | } 98 | while queue.is_empty() { 99 | queue = signal.wait(queue).unwrap(); 100 | } 101 | }; 102 | 103 | match item { 104 | QueueItem::Post { req, ret } => { 105 | let result = renderer.render_post(req).await; 106 | let _ = ret.send(result); 107 | } 108 | QueueItem::Markdown { req, ret } => { 109 | let result = renderer.render_markdown(req).await; 110 | let _ = ret.send(result); 111 | } 112 | } 113 | } 114 | }); 115 | 116 | rt.block_on(fut); 117 | }); 118 | } 119 | 120 | Self { queue, signal } 121 | } 122 | 123 | pub async fn render_post(&self, req: PostRenderRequest) -> anyhow::Result { 124 | let (ret, recv) = oneshot::channel(); 125 | 126 | { 127 | let mut queue = self.queue.lock().unwrap(); 128 | queue.push_back(QueueItem::Post { req, ret }); 129 | self.signal.notify_one(); 130 | } 131 | 132 | recv.await? 133 | } 134 | 135 | pub async fn render_markdown( 136 | &self, 137 | req: MarkdownRenderRequest, 138 | ) -> anyhow::Result { 139 | let (ret, recv) = oneshot::channel(); 140 | 141 | { 142 | let mut queue = self.queue.lock().unwrap(); 143 | queue.push_back(QueueItem::Markdown { req, ret }); 144 | self.signal.notify_one(); 145 | } 146 | 147 | recv.await? 148 | } 149 | } 150 | 151 | struct ThreadMarkdownRenderer { 152 | rt: RefCell, 153 | render_post_fn: v8::Global, 154 | render_markdown_fn: v8::Global, 155 | } 156 | 157 | deno_core::extension!( 158 | small_runtime, 159 | esm_entry_point = "ext:small_runtime/md_render_rt.js", 160 | esm = [dir "src/render", "md_render_rt.js"], 161 | ); 162 | 163 | struct AllowHrTime; 164 | 165 | impl TimersPermission for AllowHrTime { 166 | fn allow_hrtime(&mut self) -> bool { 167 | true 168 | } 169 | } 170 | 171 | impl ThreadMarkdownRenderer { 172 | fn new() -> Self { 173 | let mut rt = JsRuntime::new(RuntimeOptions { 174 | extensions: vec![ 175 | deno_webidl::deno_webidl::init_ops_and_esm(), 176 | deno_console::deno_console::init_ops_and_esm(), 177 | deno_url::deno_url::init_ops_and_esm(), 178 | deno_web::deno_web::init_ops_and_esm::( 179 | Arc::new(Default::default()), 180 | Some(Url::parse("https://cohost.org/").unwrap()), 181 | ), 182 | small_runtime::init_ops_and_esm(), 183 | ], 184 | ..Default::default() 185 | }); 186 | 187 | let render_module = rt 188 | .lazy_load_es_module_with_code("file:///render.js", MD_RENDER_COMPILED) 189 | .expect("md render script error"); 190 | 191 | let (render_post_fn, render_markdown_fn) = { 192 | let mut scope = rt.handle_scope(); 193 | 194 | let exports = v8::Local::new(&mut scope, render_module); 195 | let exports = v8::Local::::try_from(exports).expect("no exports"); 196 | 197 | let render_post_name = ascii_str!("renderPost").v8_string(&mut scope); 198 | let render_post_fn = exports 199 | .get(&mut scope, render_post_name.into()) 200 | .expect("missing renderPost export"); 201 | let render_post_fn = v8::Local::::try_from(render_post_fn) 202 | .expect("renderPost is not a function"); 203 | 204 | let render_post_fn = v8::Global::new(&mut scope, render_post_fn); 205 | 206 | let render_markdown_name = ascii_str!("renderMarkdown").v8_string(&mut scope); 207 | let render_markdown_fn = exports 208 | .get(&mut scope, render_markdown_name.into()) 209 | .expect("missing renderMarkdown export"); 210 | let render_markdown_fn = v8::Local::::try_from(render_markdown_fn) 211 | .expect("renderMarkdown is not a function"); 212 | 213 | let render_markdown_fn = v8::Global::new(&mut scope, render_markdown_fn); 214 | 215 | (render_post_fn, render_markdown_fn) 216 | }; 217 | 218 | Self { 219 | rt: RefCell::new(rt), 220 | render_post_fn, 221 | render_markdown_fn, 222 | } 223 | } 224 | 225 | async fn render_post(&self, options: PostRenderRequest) -> anyhow::Result { 226 | let mut rt = self.rt.borrow_mut(); 227 | 228 | let options = { 229 | let main_context = rt.main_context(); 230 | let mut scope = v8::HandleScope::with_context(rt.v8_isolate(), main_context); 231 | let options = serde_v8::to_v8(&mut scope, options)?; 232 | v8::Global::new(&mut scope, options) 233 | }; 234 | 235 | let result = rt.call_with_args(&self.render_post_fn, &[options]); 236 | let result = rt 237 | .with_event_loop_promise(result, Default::default()) 238 | .await?; 239 | 240 | let main_context = rt.main_context(); 241 | let mut scope = v8::HandleScope::with_context(rt.v8_isolate(), main_context); 242 | let result = result.to_v8(&mut scope); 243 | let result = serde_v8::from_v8(&mut scope, result)?; 244 | 245 | Ok(result) 246 | } 247 | 248 | async fn render_markdown( 249 | &self, 250 | options: MarkdownRenderRequest, 251 | ) -> anyhow::Result { 252 | let mut rt = self.rt.borrow_mut(); 253 | 254 | let options = { 255 | let main_context = rt.main_context(); 256 | let mut scope = v8::HandleScope::with_context(rt.v8_isolate(), main_context); 257 | let options = serde_v8::to_v8(&mut scope, options)?; 258 | v8::Global::new(&mut scope, options) 259 | }; 260 | 261 | let result = rt.call_with_args(&self.render_markdown_fn, &[options]); 262 | let result = rt 263 | .with_event_loop_promise(result, Default::default()) 264 | .await?; 265 | 266 | let main_context = rt.main_context(); 267 | let mut scope = v8::HandleScope::with_context(rt.v8_isolate(), main_context); 268 | let result = result.to_v8(&mut scope); 269 | let result = serde_v8::from_v8(&mut scope, result)?; 270 | 271 | Ok(result) 272 | } 273 | } 274 | -------------------------------------------------------------------------------- /db/src/render/md_render_rt.js: -------------------------------------------------------------------------------- 1 | import * as abortSignal from "ext:deno_web/03_abort_signal.js"; 2 | import * as base64 from "ext:deno_web/05_base64.js"; 3 | import * as compression from "ext:deno_web/14_compression.js"; 4 | import * as domException from "ext:deno_web/01_dom_exception.js"; 5 | import * as encoding from "ext:deno_web/08_text_encoding.js"; 6 | import * as event from "ext:deno_web/02_event.js"; 7 | import * as file from "ext:deno_web/09_file.js"; 8 | import * as fileReader from "ext:deno_web/10_filereader.js"; 9 | import * as globalInterfaces from "ext:deno_web/04_global_interfaces.js"; 10 | import * as imageData from "ext:deno_web/16_image_data.js"; 11 | import * as location from "ext:deno_web/12_location.js"; 12 | import * as messagePort from "ext:deno_web/13_message_port.js"; 13 | import * as performance from "ext:deno_web/15_performance.js"; 14 | import * as streams from "ext:deno_web/06_streams.js"; 15 | import * as timers from "ext:deno_web/02_timers.js"; 16 | import * as url from "ext:deno_url/00_url.js"; 17 | import * as urlPattern from "ext:deno_url/01_urlpattern.js"; 18 | 19 | import { core } from "ext:core/mod.js"; 20 | 21 | globalThis.AbortController = abortSignal.AbortController; 22 | globalThis.AbortSignal = abortSignal.AbortSignal; 23 | globalThis.Blob = file.Blob; 24 | globalThis.CloseEvent = event.CloseEvent; 25 | globalThis.CompressionStream = compression.CompressionStream; 26 | globalThis.CountQueuingStrategy = streams.CountQueuingStrategy; 27 | globalThis.CustomEvent = event.CustomEvent; 28 | globalThis.DOMException = domException.DOMException; 29 | globalThis.ErrorEvent = event.ErrorEvent; 30 | globalThis.Event = event.Event; 31 | globalThis.EventTarget = event.EventTarget; 32 | globalThis.File = file.File; 33 | globalThis.FileReader = fileReader.FileReader; 34 | globalThis.ImageData = imageData.ImageData; 35 | globalThis.MessageChannel = messagePort.MessageChannel; 36 | globalThis.MessageEvent = event.MessageEvent; 37 | globalThis.MessagePort = messagePort.MessagePort; 38 | globalThis.Performance = performance.Performance; 39 | globalThis.PerformanceEntry = performance.PerformanceEntry; 40 | globalThis.PerformanceMark = performance.PerformanceMark; 41 | globalThis.PerformanceMeasure = performance.PerformanceMeasure; 42 | globalThis.PromiseRejectionEvent = event.PromiseRejectionEvent; 43 | globalThis.ReadableByteStreamController = streams.ReadableByteStreamController; 44 | globalThis.ReadableStream = streams.ReadableStream; 45 | globalThis.ReadableStreamBYOBReader = streams.ReadableStreamBYOBReader; 46 | globalThis.ReadableStreamBYOBRequest = streams.ReadableStreamBYOBRequest; 47 | globalThis.ReadableStreamDefaultController = streams.ReadableStreamDefaultController; 48 | globalThis.ReadableStreamDefaultReader = streams.ReadableStreamDefaultReader; 49 | globalThis.TextDecoder = encoding.TextDecoder; 50 | globalThis.TextDecoderStream = encoding.TextDecoderStream; 51 | globalThis.TextEncoder = encoding.TextEncoder; 52 | globalThis.TextEncoderStream = encoding.TextEncoderStream; 53 | globalThis.TransformStream = streams.TransformStream; 54 | globalThis.URL = url.URL; 55 | globalThis.URLPattern = urlPattern.URLPattern; 56 | globalThis.URLSearchParams = url.URLSearchParams; 57 | globalThis.WritableStream = streams.WritableStream; 58 | globalThis.WritableStreamDefaultController = streams.WritableStreamDefaultController; 59 | globalThis.WritableStreamDefaultWriter = streams.WritableStreamDefaultWriter; 60 | globalThis.atob = base64.atob; 61 | globalThis.btoa = base64.btoa; 62 | globalThis.clearInterval = timers.clearInterval; 63 | globalThis.clearTimeout = timers.clearTimeout; 64 | globalThis.performance = performance.performance; 65 | globalThis.setInterval = timers.setInterval; 66 | globalThis.setTimeout = timers.setTimeout; 67 | globalThis.structuredClone = messagePort.structuredClone; 68 | 69 | Object.defineProperty(globalThis, 'location', location.workerLocationDescriptor); 70 | Object.defineProperty(globalThis, 'WorkerGlobalScope', globalInterfaces.workerGlobalScopeConstructorDescriptor); 71 | Object.defineProperty(globalThis, 'DedicatedWorkerGlobalScope', globalInterfaces.dedicatedWorkerGlobalScopeConstructorDescriptor); 72 | 73 | globalThis.self = globalThis; 74 | 75 | location.setLocationHref('https://cohost.org/'); 76 | 77 | globalThis.process = { 78 | env: { 79 | NODE_ENV: "production", 80 | }, 81 | cwd() { 82 | return '/'; 83 | } 84 | }; 85 | -------------------------------------------------------------------------------- /db/src/render/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::bundled_files::TEMPLATES; 2 | use crate::render::md_render::MarkdownRenderer; 3 | use tera::{Context, Tera}; 4 | 5 | pub mod api_data; 6 | pub mod feed; 7 | pub mod index; 8 | pub mod md_render; 9 | pub mod project_profile; 10 | pub mod rewrite; 11 | pub mod single_post; 12 | 13 | pub struct PageRenderer { 14 | tera: Tera, 15 | md: MarkdownRenderer, 16 | } 17 | 18 | impl PageRenderer { 19 | pub fn new() -> Self { 20 | let mut tera = Tera::default(); 21 | 22 | #[rustfmt::skip] 23 | let res = tera.add_raw_templates(TEMPLATES.iter().copied()); 24 | 25 | if let Err(e) = res { 26 | eprintln!("{e}"); 27 | std::process::exit(1); 28 | } 29 | 30 | let md = MarkdownRenderer::new(4); 31 | 32 | Self { tera, md } 33 | } 34 | 35 | pub fn render_error_page(&self, message: &str) -> String { 36 | let mut template_ctx = Context::new(); 37 | template_ctx.insert("message", message); 38 | 39 | self.tera 40 | .render("error.html", &template_ctx) 41 | .unwrap_or("failed to render error page".into()) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /db/src/render/project_profile.rs: -------------------------------------------------------------------------------- 1 | use crate::data::{Database, PostQuery}; 2 | use crate::render::api_data::cohost_api_project; 3 | use crate::render::feed::RenderedPosts; 4 | use crate::render::md_render::{MarkdownRenderContext, MarkdownRenderRequest}; 5 | use crate::render::rewrite::rewrite_project; 6 | use crate::render::PageRenderer; 7 | use axum::http::StatusCode; 8 | use chrono::Utc; 9 | use serde::{Deserialize, Serialize}; 10 | use tera::Context; 11 | use thiserror::Error; 12 | 13 | #[derive(Debug, Error)] 14 | pub enum RenderProjectProfileError { 15 | #[error("no such project")] 16 | NoSuchProject, 17 | #[error("error rendering project: {0}")] 18 | RenderProject(anyhow::Error), 19 | #[error("{0:?}")] 20 | Unknown(anyhow::Error), 21 | } 22 | 23 | impl RenderProjectProfileError { 24 | pub fn status(&self) -> StatusCode { 25 | match self { 26 | RenderProjectProfileError::NoSuchProject => StatusCode::NOT_FOUND, 27 | _ => StatusCode::INTERNAL_SERVER_ERROR, 28 | } 29 | } 30 | } 31 | 32 | #[derive(Debug, Clone, Default, Serialize, Deserialize)] 33 | #[serde(rename_all = "camelCase")] 34 | pub struct ProjectProfileQuery { 35 | page: Option, 36 | #[serde(default)] 37 | hide_shares: bool, 38 | #[serde(default)] 39 | hide_replies: bool, 40 | #[serde(default)] 41 | hide_asks: bool, 42 | } 43 | 44 | #[derive(Debug, Serialize)] 45 | #[serde(rename_all = "camelCase")] 46 | struct FilterState { 47 | query: ProjectProfileQuery, 48 | 49 | on_show_shares: String, 50 | on_hide_shares: String, 51 | on_show_replies: String, 52 | on_hide_replies: String, 53 | on_show_asks: String, 54 | on_hide_asks: String, 55 | on_prev_page: String, 56 | on_next_page: String, 57 | } 58 | 59 | impl ProjectProfileQuery { 60 | fn fmt_query(&self) -> String { 61 | let mut out = Vec::new(); 62 | 63 | if let Some(page) = self.page { 64 | out.push(format!("page={page}")); 65 | } 66 | if self.hide_shares { 67 | out.push("hideShares=true".into()); 68 | } 69 | if self.hide_replies { 70 | out.push("hideReplies=true".into()); 71 | } 72 | if self.hide_asks { 73 | out.push("hideAsks=true".into()); 74 | } 75 | 76 | let mut out = out.join("&"); 77 | if !out.is_empty() { 78 | out.insert(0, '?'); 79 | } 80 | out 81 | } 82 | 83 | #[rustfmt::skip] 84 | fn to_filter_state(&self, max_page: u64) -> FilterState { 85 | let on_show_shares = Self { hide_shares: false, ..self.clone() }.fmt_query(); 86 | let on_hide_shares = Self { hide_shares: true, ..self.clone() }.fmt_query(); 87 | let on_show_replies = Self { hide_replies: false, ..self.clone() }.fmt_query(); 88 | let on_hide_replies = Self { hide_replies: true, ..self.clone() }.fmt_query(); 89 | let on_show_asks = Self { hide_asks: false, ..self.clone() }.fmt_query(); 90 | let on_hide_asks = Self { hide_asks: true, ..self.clone() }.fmt_query(); 91 | 92 | let page = self.page.unwrap_or_default(); 93 | let on_prev_page = if page > 0 { 94 | Self { page: Some(page - 1), ..self.clone() }.fmt_query() 95 | } else { 96 | "".into() 97 | }; 98 | let on_next_page = if page < max_page { 99 | Self { page: Some(page + 1), ..self.clone() }.fmt_query() 100 | } else { 101 | "".into() 102 | }; 103 | 104 | FilterState { 105 | query: self.clone(), 106 | on_show_shares, 107 | on_hide_shares, 108 | on_show_replies, 109 | on_hide_replies, 110 | on_show_asks, 111 | on_hide_asks, 112 | on_prev_page, 113 | on_next_page, 114 | } 115 | } 116 | } 117 | 118 | impl PageRenderer { 119 | pub async fn render_project_profile( 120 | &self, 121 | db: &Database, 122 | project_handle: &str, 123 | query: ProjectProfileQuery, 124 | tagged: Option, 125 | ) -> Result { 126 | let project_id = db 127 | .project_id_for_handle(project_handle) 128 | .await 129 | .map_err(|e| match e { 130 | diesel::result::Error::NotFound => RenderProjectProfileError::NoSuchProject, 131 | e => RenderProjectProfileError::Unknown(e.into()), 132 | })?; 133 | 134 | let mut project = cohost_api_project(db, 0, project_id) 135 | .await 136 | .map_err(|e| RenderProjectProfileError::Unknown(e.into()))?; 137 | 138 | rewrite_project(db, &mut project) 139 | .await 140 | .map_err(|e| RenderProjectProfileError::Unknown(e.into()))?; 141 | 142 | let resources = db 143 | .get_saved_resource_urls_for_project(project_id) 144 | .await 145 | .map_err(|e| RenderProjectProfileError::Unknown(e.into()))?; 146 | 147 | let rendered_project_description = self 148 | .md 149 | .render_markdown(MarkdownRenderRequest { 150 | markdown: project.description.clone(), 151 | published_at: Utc::now().to_rfc3339(), 152 | context: MarkdownRenderContext::Profile, 153 | has_cohost_plus: false, 154 | resources, 155 | }) 156 | .await 157 | .map_err(|e| RenderProjectProfileError::RenderProject(e))?; 158 | 159 | let post_query = PostQuery { 160 | posting_project_id: Some(project_id), 161 | offset: query.page.unwrap_or_default() * 20, 162 | limit: 20, 163 | include_tags: tagged.iter().cloned().collect(), 164 | is_share: if query.hide_shares { Some(false) } else { None }, 165 | is_reply: if query.hide_replies { 166 | Some(false) 167 | } else { 168 | None 169 | }, 170 | is_ask: if query.hide_asks { Some(false) } else { None }, 171 | ..Default::default() 172 | }; 173 | 174 | let RenderedPosts { 175 | posts, 176 | rendered_posts, 177 | max_page, 178 | } = self 179 | .get_rendered_posts(db, 0, &post_query) 180 | .await 181 | .map_err(|e| RenderProjectProfileError::Unknown(e.into()))?; 182 | 183 | let mut template_ctx = Context::new(); 184 | template_ctx.insert("project", &project); 185 | template_ctx.insert( 186 | "rendered_project_description", 187 | &rendered_project_description, 188 | ); 189 | template_ctx.insert("posts", &posts); 190 | template_ctx.insert("rendered_posts", &rendered_posts); 191 | template_ctx.insert("filter_state", &query.to_filter_state(max_page)); 192 | 193 | if let Some(tag) = tagged { 194 | template_ctx.insert("tagged", &tag); 195 | } 196 | 197 | let body = self 198 | .tera 199 | .render("project_profile.html", &template_ctx) 200 | .map_err(|e| RenderProjectProfileError::Unknown(e.into()))?; 201 | 202 | Ok(body) 203 | } 204 | } 205 | -------------------------------------------------------------------------------- /db/src/render/rewrite.rs: -------------------------------------------------------------------------------- 1 | use crate::comment::CommentFromCohost; 2 | use crate::data::Database; 3 | use crate::post::PostFromCohost; 4 | use crate::project::ProjectFromCohost; 5 | use deno_core::url::Url; 6 | 7 | fn make_resource_url(s: &str) -> String { 8 | if let Ok(url) = Url::parse(s) { 9 | if let Some(host) = url.host_str() { 10 | let mut query_builder = Url::parse("https://example.com").unwrap(); 11 | 12 | if let Some(q) = url.query() { 13 | query_builder.query_pairs_mut().append_pair("q", q); 14 | } 15 | if let Some(h) = url.fragment() { 16 | query_builder.query_pairs_mut().append_pair("h", h); 17 | } 18 | 19 | let search = if let Some(q) = query_builder.query().filter(|s| !s.is_empty()) { 20 | format!("?{q}") 21 | } else { 22 | String::new() 23 | }; 24 | 25 | return format!("/r/{}/{}{}{}", url.scheme(), host, url.path(), search); 26 | } 27 | } 28 | 29 | format!("/r/u?url={}", urlencoding::encode(s)) 30 | } 31 | 32 | pub async fn rewrite_project(db: &Database, project: &mut ProjectFromCohost) -> anyhow::Result<()> { 33 | let resources = db 34 | .get_saved_resource_urls_for_project(project.project_id) 35 | .await?; 36 | 37 | if resources.contains(&project.avatar_url) { 38 | project.avatar_url = make_resource_url(&project.avatar_url); 39 | } 40 | if resources.contains(&project.avatar_preview_url) { 41 | project.avatar_preview_url = make_resource_url(&project.avatar_preview_url); 42 | } 43 | if let Some(header_url) = &mut project.header_url { 44 | if resources.contains(header_url) { 45 | *header_url = make_resource_url(header_url); 46 | } 47 | } 48 | if let Some(header_preview_url) = &mut project.header_preview_url { 49 | if resources.contains(header_preview_url) { 50 | *header_preview_url = make_resource_url(header_preview_url); 51 | } 52 | } 53 | 54 | Ok(()) 55 | } 56 | 57 | #[async_recursion::async_recursion] 58 | pub async fn rewrite_projects_in_post( 59 | db: &Database, 60 | post: &mut PostFromCohost, 61 | ) -> anyhow::Result<()> { 62 | rewrite_project(db, &mut post.posting_project).await?; 63 | 64 | for post in &mut post.share_tree { 65 | rewrite_projects_in_post(db, post).await? 66 | } 67 | 68 | Ok(()) 69 | } 70 | 71 | #[async_recursion::async_recursion] 72 | pub async fn rewrite_projects_in_comment( 73 | db: &Database, 74 | comment: &mut CommentFromCohost, 75 | ) -> anyhow::Result<()> { 76 | if let Some(poster) = &mut comment.poster { 77 | rewrite_project(db, poster).await?; 78 | } 79 | 80 | for comment in &mut comment.comment.children { 81 | rewrite_projects_in_comment(db, comment).await? 82 | } 83 | 84 | Ok(()) 85 | } 86 | -------------------------------------------------------------------------------- /db/src/render/single_post.rs: -------------------------------------------------------------------------------- 1 | use crate::comment::CommentFromCohost; 2 | use crate::data::Database; 3 | use crate::render::api_data::{cohost_api_comments_for_share_tree, cohost_api_post, GetDataError}; 4 | use crate::render::md_render::{ 5 | MarkdownRenderContext, MarkdownRenderRequest, MarkdownRenderResult, MarkdownRenderer, 6 | PostRenderRequest, 7 | }; 8 | use crate::render::{rewrite, PageRenderer}; 9 | use axum::http::StatusCode; 10 | use chrono::Utc; 11 | use std::collections::HashMap; 12 | use std::convert::identity; 13 | use tera::Context; 14 | use thiserror::Error; 15 | 16 | #[derive(Debug, Error)] 17 | pub enum RenderSinglePostError { 18 | #[error("invalid post ID")] 19 | InvalidPostId, 20 | #[error("post not found")] 21 | PostNotFound, 22 | #[error("error loading comments: {0}")] 23 | Comments(GetDataError), 24 | #[error("error rendering post {0}: {1}")] 25 | Render(u64, anyhow::Error), 26 | #[error("error rendering project: {0}")] 27 | RenderProject(anyhow::Error), 28 | #[error("error rendering comment: {0}")] 29 | RenderComment(anyhow::Error), 30 | #[error("{0:?}")] 31 | Unknown(anyhow::Error), 32 | } 33 | 34 | impl RenderSinglePostError { 35 | pub fn status(&self) -> StatusCode { 36 | match self { 37 | RenderSinglePostError::InvalidPostId => StatusCode::BAD_REQUEST, 38 | RenderSinglePostError::PostNotFound => StatusCode::NOT_FOUND, 39 | RenderSinglePostError::Comments(_) => StatusCode::INTERNAL_SERVER_ERROR, 40 | RenderSinglePostError::Render(..) => StatusCode::INTERNAL_SERVER_ERROR, 41 | RenderSinglePostError::RenderProject(..) => StatusCode::INTERNAL_SERVER_ERROR, 42 | RenderSinglePostError::RenderComment(..) => StatusCode::INTERNAL_SERVER_ERROR, 43 | RenderSinglePostError::Unknown(_) => StatusCode::INTERNAL_SERVER_ERROR, 44 | } 45 | } 46 | } 47 | 48 | impl PageRenderer { 49 | pub async fn render_single_post( 50 | &self, 51 | db: &Database, 52 | project: &str, 53 | post: &str, 54 | ) -> Result { 55 | let post_id = post 56 | .split('-') 57 | .next() 58 | .and_then(|id| id.parse().ok()) 59 | .ok_or(RenderSinglePostError::InvalidPostId)?; 60 | 61 | let mut post = match cohost_api_post(db, 0, post_id).await { 62 | Ok(post) => post, 63 | Err(GetDataError::NotFound) => return Err(RenderSinglePostError::PostNotFound), 64 | Err(err) => return Err(RenderSinglePostError::Unknown(err.into())), 65 | }; 66 | 67 | if post.posting_project.handle != project { 68 | return Err(RenderSinglePostError::PostNotFound); 69 | } 70 | 71 | rewrite::rewrite_projects_in_post(db, &mut post) 72 | .await 73 | .map_err(|e| RenderSinglePostError::Unknown(e))?; 74 | 75 | let mut comments = match cohost_api_comments_for_share_tree(db, 0, &post).await { 76 | Ok(comments) => comments, 77 | Err(err) => return Err(RenderSinglePostError::Comments(err.into())), 78 | }; 79 | 80 | for comment in comments.values_mut().flat_map(identity) { 81 | rewrite::rewrite_projects_in_comment(db, comment) 82 | .await 83 | .map_err(|e| RenderSinglePostError::Unknown(e))?; 84 | } 85 | 86 | let mut rendered_comments = HashMap::new(); 87 | 88 | #[async_recursion::async_recursion] 89 | async fn render_comment( 90 | db: &Database, 91 | md: &MarkdownRenderer, 92 | comment: &CommentFromCohost, 93 | comments: &mut HashMap, 94 | ) -> Result<(), RenderSinglePostError> { 95 | let resources = db 96 | .get_saved_resource_urls_for_comment(&comment.comment.comment_id) 97 | .await 98 | .map_err(|e| RenderSinglePostError::Unknown(e.into()))?; 99 | 100 | let result = md 101 | .render_markdown(MarkdownRenderRequest { 102 | markdown: comment.comment.body.clone(), 103 | context: MarkdownRenderContext::Comment, 104 | published_at: comment.comment.posted_at_iso.clone(), 105 | has_cohost_plus: comment.comment.has_cohost_plus, 106 | resources, 107 | }) 108 | .await 109 | .map_err(|e| RenderSinglePostError::RenderComment(e))?; 110 | 111 | comments.insert(comment.comment.comment_id.clone(), result); 112 | 113 | for child in &comment.comment.children { 114 | render_comment(db, md, child, comments).await?; 115 | } 116 | Ok(()) 117 | } 118 | for comment in comments.values().flat_map(identity) { 119 | render_comment(db, &self.md, comment, &mut rendered_comments).await?; 120 | } 121 | 122 | let mut rendered_posts = HashMap::new(); 123 | 124 | for post in std::iter::once(&post).chain(post.share_tree.iter()) { 125 | let resources = db 126 | .get_saved_resource_urls_for_post(post.post_id) 127 | .await 128 | .map_err(|e| RenderSinglePostError::Unknown(e.into()))?; 129 | 130 | let result = self 131 | .md 132 | .render_post(PostRenderRequest { 133 | post_id: post.post_id, 134 | blocks: post.blocks.clone(), 135 | published_at: post 136 | .published_at 137 | .clone() 138 | .unwrap_or_else(|| Utc::now().to_rfc3339()), 139 | has_cohost_plus: post.has_cohost_plus, 140 | resources, 141 | }) 142 | .await 143 | .map_err(|e| RenderSinglePostError::Render(post.post_id, e))?; 144 | 145 | rendered_posts.insert(post.post_id, result); 146 | } 147 | 148 | let resources = db 149 | .get_saved_resource_urls_for_project(post.posting_project.project_id) 150 | .await 151 | .map_err(|e| RenderSinglePostError::Unknown(e.into()))?; 152 | 153 | let rendered_project_description = self 154 | .md 155 | .render_markdown(MarkdownRenderRequest { 156 | markdown: post.posting_project.description.clone(), 157 | published_at: Utc::now().to_rfc3339(), 158 | context: MarkdownRenderContext::Profile, 159 | has_cohost_plus: false, 160 | resources, 161 | }) 162 | .await 163 | .map_err(|e| RenderSinglePostError::RenderProject(e))?; 164 | 165 | let mut template_ctx = Context::new(); 166 | template_ctx.insert("post", &post); 167 | template_ctx.insert("comments", &comments); 168 | template_ctx.insert("rendered_comments", &rendered_comments); 169 | template_ctx.insert("rendered_posts", &rendered_posts); 170 | template_ctx.insert( 171 | "rendered_project_description", 172 | &rendered_project_description, 173 | ); 174 | 175 | let body = self 176 | .tera 177 | .render("single_post.html", &template_ctx) 178 | .map_err(|e| RenderSinglePostError::Unknown(e.into()))?; 179 | 180 | Ok(body) 181 | } 182 | } 183 | -------------------------------------------------------------------------------- /db/src/schema.rs: -------------------------------------------------------------------------------- 1 | // @generated automatically by Diesel CLI. 2 | 3 | diesel::table! { 4 | comment_resources (comment_id, url) { 5 | comment_id -> Text, 6 | url -> Text, 7 | } 8 | } 9 | 10 | diesel::table! { 11 | comments (id) { 12 | id -> Text, 13 | post_id -> Integer, 14 | in_reply_to_id -> Nullable, 15 | posting_project_id -> Nullable, 16 | published_at -> Text, 17 | data -> Binary, 18 | data_version -> Integer, 19 | } 20 | } 21 | 22 | diesel::table! { 23 | data_migration_state (name) { 24 | name -> Text, 25 | value -> Text, 26 | } 27 | } 28 | 29 | diesel::table! { 30 | draft_nonces (post_id) { 31 | post_id -> Integer, 32 | nonce -> Text, 33 | } 34 | } 35 | 36 | diesel::table! { 37 | follows (from_project_id, to_project_id) { 38 | from_project_id -> Integer, 39 | to_project_id -> Integer, 40 | } 41 | } 42 | 43 | diesel::table! { 44 | likes (from_project_id, to_post_id) { 45 | from_project_id -> Integer, 46 | to_post_id -> Integer, 47 | } 48 | } 49 | 50 | diesel::table! { 51 | post_related_projects (post_id, project_id) { 52 | post_id -> Integer, 53 | project_id -> Integer, 54 | } 55 | } 56 | 57 | diesel::table! { 58 | post_resources (post_id, url) { 59 | post_id -> Integer, 60 | url -> Text, 61 | } 62 | } 63 | 64 | diesel::table! { 65 | post_tags (post_id, tag) { 66 | post_id -> Integer, 67 | tag -> Text, 68 | pos -> Integer, 69 | } 70 | } 71 | 72 | diesel::table! { 73 | posts (id) { 74 | id -> Integer, 75 | posting_project_id -> Integer, 76 | published_at -> Nullable, 77 | response_to_ask_id -> Nullable, 78 | share_of_post_id -> Nullable, 79 | is_transparent_share -> Bool, 80 | filename -> Text, 81 | data -> Binary, 82 | data_version -> Integer, 83 | state -> Integer, 84 | is_adult_content -> Bool, 85 | is_pinned -> Bool, 86 | } 87 | } 88 | 89 | diesel::table! { 90 | project_resources (project_id, url) { 91 | project_id -> Integer, 92 | url -> Text, 93 | } 94 | } 95 | 96 | diesel::table! { 97 | projects (id) { 98 | id -> Integer, 99 | handle -> Text, 100 | is_private -> Bool, 101 | requires_logged_in -> Bool, 102 | data -> Binary, 103 | data_version -> Integer, 104 | } 105 | } 106 | 107 | diesel::table! { 108 | related_tags (tag1, tag2) { 109 | tag1 -> Text, 110 | tag2 -> Text, 111 | is_synonym -> Integer, 112 | } 113 | } 114 | 115 | diesel::table! { 116 | resource_content_types (url) { 117 | url -> Text, 118 | content_type -> Text, 119 | } 120 | } 121 | 122 | diesel::table! { 123 | url_files (url) { 124 | url -> Text, 125 | file_path -> Binary, 126 | } 127 | } 128 | 129 | diesel::joinable!(comment_resources -> comments (comment_id)); 130 | diesel::joinable!(comments -> posts (post_id)); 131 | diesel::joinable!(comments -> projects (posting_project_id)); 132 | diesel::joinable!(draft_nonces -> posts (post_id)); 133 | diesel::joinable!(likes -> posts (to_post_id)); 134 | diesel::joinable!(likes -> projects (from_project_id)); 135 | diesel::joinable!(post_related_projects -> posts (post_id)); 136 | diesel::joinable!(post_related_projects -> projects (project_id)); 137 | diesel::joinable!(post_resources -> posts (post_id)); 138 | diesel::joinable!(post_tags -> posts (post_id)); 139 | diesel::joinable!(posts -> projects (posting_project_id)); 140 | diesel::joinable!(project_resources -> projects (project_id)); 141 | 142 | diesel::allow_tables_to_appear_in_same_query!( 143 | comment_resources, 144 | comments, 145 | data_migration_state, 146 | draft_nonces, 147 | follows, 148 | likes, 149 | post_related_projects, 150 | post_resources, 151 | post_tags, 152 | posts, 153 | project_resources, 154 | projects, 155 | related_tags, 156 | resource_content_types, 157 | url_files, 158 | ); 159 | -------------------------------------------------------------------------------- /db/src/trpc.rs: -------------------------------------------------------------------------------- 1 | use crate::comment::CommentFromCohost; 2 | use crate::context::{CohostContext, GetError}; 3 | use crate::post::PostFromCohost; 4 | use crate::project::ProjectFromCohost; 5 | use chrono::Utc; 6 | use serde::{Deserialize, Serialize}; 7 | use std::collections::HashMap; 8 | 9 | #[derive(Debug, Default, Deserialize)] 10 | #[serde(rename_all = "camelCase")] 11 | #[allow(unused)] 12 | pub struct LoginLoggedIn { 13 | pub activated: bool, 14 | pub delete_after: Option, 15 | pub email: Option, 16 | pub email_verified: Option, 17 | pub email_verify_canceled: Option, 18 | pub logged_in: bool, 19 | pub mod_mode: bool, 20 | #[serde(deserialize_with = "deser_project_id")] 21 | pub project_id: u64, 22 | pub read_only: bool, 23 | pub two_factor_active: bool, 24 | pub user_id: Option, 25 | } 26 | 27 | fn deser_project_id<'de, D>(de: D) -> Result 28 | where 29 | D: serde::Deserializer<'de>, 30 | { 31 | let project_id = Option::::deserialize(de)?; 32 | Ok(project_id.unwrap_or_default()) 33 | } 34 | 35 | #[derive(Debug, Serialize)] 36 | #[serde(rename_all = "camelCase")] 37 | struct ProfilePostsInput { 38 | project_handle: String, 39 | page: u64, 40 | options: ProfilePostsOptions, 41 | } 42 | 43 | #[derive(Debug, Serialize)] 44 | #[serde(rename_all = "camelCase")] 45 | struct ProfilePostsOptions { 46 | hide_asks: bool, 47 | hide_replies: bool, 48 | hide_shares: bool, 49 | pinned_posts_at_top: bool, 50 | viewing_on_project_page: bool, 51 | } 52 | 53 | #[derive(Debug, Deserialize)] 54 | #[serde(rename_all = "camelCase")] 55 | pub struct ProfilePosts { 56 | #[allow(unused)] 57 | pub pagination: ProfilePostsPagination, 58 | pub posts: Vec, 59 | } 60 | 61 | #[derive(Debug, Deserialize)] 62 | #[serde(rename_all = "camelCase")] 63 | pub struct ListEditedProjects { 64 | pub projects: Vec, 65 | } 66 | 67 | #[derive(Debug, Deserialize)] 68 | #[serde(rename_all = "camelCase")] 69 | #[allow(unused)] 70 | pub struct ProfilePostsPagination { 71 | current_page: u64, 72 | /// Bogus. do not trust this guy 73 | more_pages_forward: bool, 74 | next_page: Option, 75 | previous_page: Option, 76 | } 77 | 78 | #[derive(Debug, Serialize)] 79 | #[serde(rename_all = "camelCase")] 80 | struct SinglePostInput { 81 | handle: String, 82 | post_id: u64, 83 | #[serde(skip_serializing_if = "Option::is_none")] 84 | nonce: Option, 85 | } 86 | 87 | #[derive(Debug, Deserialize)] 88 | #[serde(rename_all = "camelCase")] 89 | pub struct SinglePost { 90 | pub post: PostFromCohost, 91 | pub comments: HashMap>, 92 | } 93 | 94 | #[derive(Debug, Serialize)] 95 | #[serde(rename_all = "camelCase")] 96 | struct FollowedFeedInput { 97 | cursor: u64, 98 | limit: u64, 99 | before_timestamp: u64, 100 | sort_order: String, 101 | } 102 | 103 | #[derive(Debug, Deserialize)] 104 | #[serde(rename_all = "camelCase")] 105 | pub struct FollowedFeedQuery { 106 | pub next_cursor: Option, 107 | pub projects: Vec, 108 | } 109 | 110 | #[derive(Debug, Deserialize)] 111 | #[serde(rename_all = "camelCase")] 112 | #[allow(unused)] 113 | pub struct FollowedFeedProject { 114 | pub project: ProjectFromCohost, 115 | pub latest_post: Option, 116 | pub project_pinned: bool, 117 | } 118 | 119 | impl CohostContext { 120 | pub async fn login_logged_in(&self) -> Result { 121 | self.trpc_query::<(), _>("login.loggedIn", None).await 122 | } 123 | 124 | pub async fn posts_profile_posts( 125 | &self, 126 | project_handle: &str, 127 | page: u64, 128 | ) -> Result { 129 | let input = ProfilePostsInput { 130 | project_handle: project_handle.into(), 131 | page, 132 | options: ProfilePostsOptions { 133 | hide_asks: false, 134 | hide_replies: false, 135 | hide_shares: false, 136 | pinned_posts_at_top: true, 137 | viewing_on_project_page: true, 138 | }, 139 | }; 140 | 141 | self.trpc_query("posts.profilePosts", Some(input)).await 142 | } 143 | 144 | pub async fn posts_single_post( 145 | &self, 146 | project_handle: &str, 147 | post_id: u64, 148 | nonce: Option, 149 | ) -> Result { 150 | let input = SinglePostInput { 151 | handle: project_handle.into(), 152 | post_id, 153 | nonce, 154 | }; 155 | 156 | self.trpc_query("posts.singlePost", Some(input)).await 157 | } 158 | 159 | pub async fn projects_list_edited_projects(&self) -> Result { 160 | self.trpc_query::<(), _>("projects.listEditedProjects", None) 161 | .await 162 | } 163 | 164 | pub async fn projects_by_handle(&self, handle: &str) -> Result { 165 | self.trpc_query("projects.byHandle", Some(handle)).await 166 | } 167 | 168 | pub async fn projects_followed_feed_query( 169 | &self, 170 | before_timestamp: u64, 171 | cursor: u64, 172 | limit: u64, 173 | ) -> Result { 174 | let input = FollowedFeedInput { 175 | cursor, 176 | limit, 177 | before_timestamp, 178 | sort_order: "alpha-asc".into(), 179 | }; 180 | 181 | self.trpc_query("projects.followedFeed.query", Some(input)) 182 | .await 183 | } 184 | 185 | pub async fn projects_followed_feed_query_all( 186 | &self, 187 | ) -> Result, GetError> { 188 | let timestamp = Utc::now().timestamp_millis() as u64; 189 | 190 | let mut projects = Vec::new(); 191 | 192 | let mut cursor = Some(0); 193 | while let Some(current_cursor) = cursor { 194 | let result = self 195 | .projects_followed_feed_query(timestamp, current_cursor, 20) 196 | .await?; 197 | cursor = result.next_cursor; 198 | 199 | projects.extend(result.projects); 200 | } 201 | 202 | Ok(projects) 203 | } 204 | } 205 | -------------------------------------------------------------------------------- /db/templates/base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | {% block title %}{% endblock title %} 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 34 | {% block base_header %}{% endblock base_header %} 35 |
36 | {% block base_contents %}{% endblock base_contents %} 37 |
38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /db/templates/comments.html: -------------------------------------------------------------------------------- 1 | {% macro comment_item(comment) %} 2 | {% set date_fmt_title = "%a, %b %-d, %Y, %-I:%M %p" %} 3 | {% set date_fmt_text = "%-m/%-d/%Y, %-I:%M %p" %} 4 | 64 | {% endmacro %} 65 | 66 | {% macro comment_tree(comment) %} 67 |
68 | {{ comments::comment_item(comment = comment) }} 69 | {% if comment.comment.children | length %} 70 |
71 | {% for comment in comment.comment.children %} 72 | {{ comments::comment_tree(comment = comment) }} 73 | {% endfor %} 74 |
75 | {% endif %} 76 |
77 | {% endmacro comment_tree %} 78 | 79 | {% macro render(post, comments) %} 80 | {% if comments | length %} 81 | 82 |
83 |

84 | in reply to @{{ post.postingProject.handle }}'s post: 85 |

86 | 87 | {% for thread in comments %} 88 |
89 | {{ comments::comment_tree(comment = thread) }} 90 |
91 | {% endfor %} 92 |
93 | 94 | {% endif %} 95 | {% endmacro %} 96 | -------------------------------------------------------------------------------- /db/templates/dashboard.html: -------------------------------------------------------------------------------- 1 | {% import "post.html" as post %} 2 | {% import "pagination_eggs.html" as pagination_eggs %} 3 | {% extends "base.html" %} 4 | 5 | {% block title %} 6 | cohost archive! - dashboard 7 | {% endblock title %} 8 | 9 | {% block base_contents %} 10 |
11 | 12 |
13 |
14 | {% for post in posts %} 15 | {{ post::render_preview(post = post) }} 16 | {% endfor %} 17 | 18 | {{ pagination_eggs::pagination_eggs(base = "", prev_page = filter_state.onPrevPage, next_page = filter_state.onNextPage) }} 19 |
20 |
21 | 22 |
23 | {% endblock base_contents %} 24 | -------------------------------------------------------------------------------- /db/templates/error.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Error 7 | 8 | 9 |

Error

10 |
11 |     {{message}}
12 | 
13 | 14 | 15 | -------------------------------------------------------------------------------- /db/templates/index.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %} 4 | cohost archive! 5 | {% endblock title %} 6 | 7 | {% block base_contents %} 8 | 22 |
23 |

cohost-dl temporary index page

24 |

dashboards

25 | 32 |

liked post feeds

33 | 40 |

all archived projects

41 |
    42 | {% for project in projects %} 43 |
  • 44 | @{{ project }} 45 |
  • 46 | {% endfor %} 47 |
48 |
49 | {% endblock base_contents %} 50 | -------------------------------------------------------------------------------- /db/templates/liked_feed.html: -------------------------------------------------------------------------------- 1 | {% import "post.html" as post %} 2 | {% import "pagination_eggs.html" as pagination_eggs %} 3 | {% extends "base.html" %} 4 | 5 | {% block title %} 6 | cohost archive! - posts you’ve liked 7 | {% endblock title %} 8 | 9 | {% block base_contents %} 10 |
11 | 12 |
13 |
14 | {% for post in posts %} 15 | {{ post::render_preview(post = post) }} 16 | {% endfor %} 17 | 18 | {{ pagination_eggs::pagination_eggs(base = "", prev_page = filter_state.onPrevPage, next_page = filter_state.onNextPage) }} 19 |
20 |
21 | 22 |
23 | {% endblock base_contents %} 24 | -------------------------------------------------------------------------------- /db/templates/pagination_eggs.html: -------------------------------------------------------------------------------- 1 | {% macro pagination_eggs(base, prev_page, next_page) %} 2 | 23 | {% endmacro pagination_eggs %} 24 | -------------------------------------------------------------------------------- /db/templates/project_profile.html: -------------------------------------------------------------------------------- 1 | {% import "project_sidebar.html" as project_sidebar %} 2 | {% import "pagination_eggs.html" as pagination_eggs %} 3 | {% import "post.html" as post %} 4 | {% extends "base.html" %} 5 | 6 | {% block title %} 7 | cohost archive! - {{ project.handle }} 8 | {% endblock title %} 9 | 10 | {% block base_header %} 11 | {% if project.headerURL %} 12 | 18 | {% endif %} 19 | {% endblock base_header %} 20 | 21 | {% block base_contents %} 22 | {{ project_sidebar::project_sidebar(project = project, description = rendered_project_description) }} 23 | 24 |
25 | {% set b = "/" ~ project.handle -%} 26 | {% if tagged %} 27 | {% set_global b = "/" ~ project.handle ~ "/tagged/" ~ tagged | urlencode -%} 28 |
29 |

posts from @{{ project.handle }} tagged #{{ tagged }}

30 |
also:
31 | 36 |
37 | {% else %} 38 | 64 | {% endif %} 65 |
66 | {% for post in posts %} 67 | {{ post::render(post = post, expand = false) }} 68 | {% endfor %} 69 | 70 | {{ pagination_eggs::pagination_eggs(base = b, prev_page = filter_state.onPrevPage, next_page = filter_state.onNextPage) }} 71 |
72 |
73 | 74 | {{ project_sidebar::project_sidebar_alt(project = project) }} 75 | {% endblock base_contents %} 76 | -------------------------------------------------------------------------------- /db/templates/project_sidebar.html: -------------------------------------------------------------------------------- 1 | {% macro contact_card_item(item) %} 2 |
3 |
{{ item.service }}
4 | {%- if item.value is matching("^https?://[\S]+$") -%} 5 | 8 | {%- else -%} 9 |
{{ item.value }}
10 | {%- endif -%} 11 |
12 | {% endmacro contact_card_item %} 13 | 14 | {% macro project_sidebar(project, description) %} 15 | {%- set class_name = '' -%} 16 | {%- if project.headerURL -%} 17 | {% set class_name = 'has-header' %} 18 | {%- endif -%} 19 |
20 |
21 |
22 | {{ project.handle }} 28 |
29 |
30 |

31 | 32 | {{ project.displayName }} 33 | 34 |

35 | 40 |
41 | {{ project.dek }} 42 |
43 |
44 |
45 | 65 |
66 | {{ description.html | safe }} 67 |
68 | 69 | {%- if project.contactCard | length -%} 70 |
71 | 72 | {%- set has_private = false -%} 73 | {%- for item in project.contactCard -%} 74 | {%- if item.visibility != "follows" -%} 75 | {{ project_sidebar::contact_card_item(item = item) }} 76 | {%- else -%} 77 | {%- set_global has_private = true -%} 78 | {%- endif -%} 79 | {%- endfor -%} 80 | 81 | {%- if has_private -%} 82 |
83 | 84 | 85 | show private contact info 86 | 87 | 88 | hide private contact info 89 | 90 | 91 | 94 | 95 | 96 | 97 | {%- for item in project.contactCard -%} 98 | {%- if item.visibility == "follows" -%} 99 | {{ project_sidebar::contact_card_item(item = item) }} 100 | {%- endif -%} 101 | {%- endfor -%} 102 |
103 |
104 | {%- endif -%} 105 | {%- endif -%} 106 |
107 | {% endmacro project_sidebar %} 108 | 109 | {% macro project_sidebar_alt(project) %} 110 |
111 | {% if project.frequentlyUsedTags | length %} 112 |
113 |
114 | Pinned Tags 115 |
116 |
117 |
    118 | {% for tag in project.frequentlyUsedTags %} 119 |
  • 120 | 121 | {{ tag }} 122 | 123 |
  • 124 | {% endfor %} 125 |
126 |
127 |
128 | {% endif %} 129 |
130 | {% endmacro project_sidebar_alt %} 131 | -------------------------------------------------------------------------------- /db/templates/single_post.html: -------------------------------------------------------------------------------- 1 | {% import "project_sidebar.html" as project_sidebar %} 2 | {% import "post.html" as post %} 3 | {% import "comments.html" as comments %} 4 | {% extends "base.html" %} 5 | 6 | {% block title %} 7 | {% set post_title = post.headline -%} 8 | {%- if not post_title -%} 9 | {%- set post_title = "post from @" ~ post.postingProject.handle -%} 10 | {%- endif -%} 11 | cohost archive! - {{ post_title }} 12 | {% endblock title %} 13 | 14 | {% block base_header %} 15 | {% if post.postingProject.headerURL %} 16 | 22 | {% endif %} 23 | {% endblock base_header %} 24 | 25 | {% block base_contents %} 26 | {{ project_sidebar::project_sidebar(project = post.postingProject, description = rendered_project_description) }} 27 | 28 |
29 |
30 | {{ post::render(post = post, expand = true) }} 31 | 32 |
33 | {% for post in post.shareTree %} 34 | {{ comments::render(post = post, comments = comments[post.postId]) }} 35 | {% endfor %} 36 | 37 | {{ comments::render(post = post, comments = comments[post.postId]) }} 38 |
39 |
40 |
41 | 42 | {{ project_sidebar::project_sidebar_alt(project = post.postingProject) }} 43 | {% endblock base_contents %} 44 | -------------------------------------------------------------------------------- /db/templates/tag_feed.html: -------------------------------------------------------------------------------- 1 | {% import "post.html" as post %} 2 | {% import "pagination_eggs.html" as pagination_eggs %} 3 | {% extends "base.html" %} 4 | 5 | {% block title %} 6 | cohost archive! - #{{ tag }} 7 | {% endblock title %} 8 | 9 | {% block page_container_classes %} is-tag-feed {% endblock page_container_classes %} 10 | {% block base_contents %} 11 |
12 | 13 |
14 |
15 |

#{{ tag }}

16 | {% if synonym_tags | length %} 17 |
18 | also: 19 | {% set is_first = true -%} 20 | {%- for syn in synonym_tags -%} 21 | {%- if is_first -%} 22 | {%- set_global is_first = false -%} 23 | {%- else -%} 24 | , 25 | {%- endif %} 26 | #{{ syn }} 27 | {%- endfor -%} 28 |
29 | {% endif %} 30 |
31 | 32 |
33 | {% for post in posts %} 34 | {{ post::render_preview(post = post) }} 35 | {% endfor %} 36 | 37 | {{ pagination_eggs::pagination_eggs(base = "", prev_page = filter_state.onPrevPage, next_page = filter_state.onNextPage) }} 38 |
39 |
40 | 41 |
42 |
43 | 44 | 45 | View Settings 46 | 47 | 48 | 58 |
59 | {% if related_tags | length %} 60 |
61 | 62 | 63 | Related Tags 64 | 65 | 66 | 77 |
78 | {% endif %} 79 |
80 | {% endblock base_contents %} 81 | -------------------------------------------------------------------------------- /deno.json: -------------------------------------------------------------------------------- 1 | { 2 | "imports": { 3 | "@b-fuze/deno-dom": "jsr:@b-fuze/deno-dom@^0.1.48", 4 | "@babel/plugin-transform-class-properties": "npm:@babel/plugin-transform-class-properties@^7.25.4", 5 | "@emoji-mart/data": "npm:@emoji-mart/data@^1.2.1", 6 | "@rollup/plugin-commonjs": "npm:@rollup/plugin-commonjs@^26.0.1", 7 | "@rollup/plugin-replace": "npm:@rollup/plugin-replace@^5.0.7", 8 | "@rollup/plugin-sucrase": "npm:@rollup/plugin-sucrase@^5.0.2", 9 | "@std/path": "jsr:@std/path@^1.0.4", 10 | "entities": "npm:entities@^2.2.0", 11 | "minisearch": "npm:minisearch@^7.1.0", 12 | "remark-gfm": "npm:remark-gfm@^4.0.0", 13 | "remark-parse": "npm:remark-parse@^11.0.0", 14 | "remark-stringify": "npm:remark-stringify@^11.0.0", 15 | "rollup": "npm:rollup@^4.21.2", 16 | "unified": "npm:unified@^11.0.5" 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /main.ts: -------------------------------------------------------------------------------- 1 | import { 2 | COOKIE, 3 | DATA_PORTABILITY_ARCHIVE_PATH, 4 | ENABLE_JAVASCRIPT, 5 | POSTS, 6 | PROJECTS, 7 | SKIP_POSTS, 8 | SKIP_LIKES, 9 | } from "./src/config.ts"; 10 | import { CohostContext, POST_URL_REGEX } from "./src/context.ts"; 11 | import { loadAllLikedPosts } from "./src/likes.ts"; 12 | import { FROM_POST_PAGE_TO_ROOT, loadPostPage } from "./src/post-page.ts"; 13 | import { loadAllProjectPosts } from "./src/project.ts"; 14 | import { IPost } from "./src/model.ts"; 15 | import { readDataPortabilityArchiveItems } from "./src/data-portability-archive.ts"; 16 | import { loadCohostSource } from "./src/cohost-source.ts"; 17 | import { generateAllScripts } from "./src/scripts/index.ts"; 18 | import { rewritePost } from "./src/post.ts"; 19 | import { generateAllIndices } from "./src/post-index.ts"; 20 | import { checkForUpdates } from "./src/changelog.ts"; 21 | 22 | await checkForUpdates(); 23 | 24 | const ctx = new CohostContext(COOKIE, "out"); 25 | await ctx.init(); 26 | 27 | let isLoggedIn = false; 28 | let currentProjectHandle = null; 29 | { 30 | // check that login actually worked 31 | const loginStateResponse = await ctx.get( 32 | "https://cohost.org/api/v1/trpc/login.loggedIn,projects.listEditedProjects?batch=1&input=%7B%7D", 33 | ); 34 | const loginState = await loginStateResponse.json(); 35 | if (!loginState[0].result.data.loggedIn) { 36 | console.error( 37 | "\x1b[33mwarning:\nNot logged in. Please update your cookie configuration if cohost.org still exists\x1b[m\n\n", 38 | ); 39 | } else { 40 | const currentProjectId = loginState[0].result.data.projectId; 41 | const currentProject = loginState[1].result.data.projects.find((proj: { projectId: number }) => 42 | proj.projectId === currentProjectId 43 | ); 44 | if (!currentProject) { 45 | throw new Error( 46 | `invalid state: logged in as project ${currentProjectId}, but this is not an edited project`, 47 | ); 48 | } 49 | currentProjectHandle = currentProject.handle; 50 | 51 | console.log( 52 | `logged in as ${ 53 | loginState[0].result.data.email 54 | } / @${currentProjectHandle}`, 55 | ); 56 | isLoggedIn = true; 57 | } 58 | } 59 | 60 | // JSON data 61 | if (isLoggedIn) { 62 | // legacy liked posts 63 | if (await ctx.hasFile('liked.json')) { 64 | console.log(''); 65 | console.log(`There’s a list of liked posts here using an older format. It’s unclear what page you were logged in as when loading them.`); 66 | let likedPostsHandle: string | null = null; 67 | if (confirm(`Did you load these liked posts as @${currentProjectHandle}?`)) { 68 | likedPostsHandle = currentProjectHandle; 69 | } else { 70 | while (true) { 71 | likedPostsHandle = prompt("What’s the handle of the page these liked posts are for?")?.trim() ?? null; 72 | if (likedPostsHandle) { 73 | if (confirm(`It was @${likedPostsHandle}?`)) { 74 | break; 75 | } 76 | } else { 77 | break; 78 | } 79 | } 80 | } 81 | if (!likedPostsHandle) { 82 | console.log('no input. exiting'); 83 | Deno.exit(1); 84 | } 85 | await Deno.mkdir(ctx.getCleanPath(likedPostsHandle), { recursive: true }); 86 | await Deno.rename(ctx.getCleanPath('liked.json'), ctx.getCleanPath(`${likedPostsHandle}/liked.json`)); 87 | } 88 | 89 | // load all liked posts for the current page 90 | if (!(await ctx.hasFile(`${currentProjectHandle}/liked.json`)) && !SKIP_LIKES) { 91 | console.log(`loading likes for @${currentProjectHandle}`); 92 | const liked = await loadAllLikedPosts(ctx); 93 | await ctx.writeLargeJson(`${currentProjectHandle}/liked.json`, liked); 94 | } 95 | 96 | // load all project posts 97 | for (const handle of PROJECTS) { 98 | if (!(await ctx.hasFile(`${handle}/posts.json`))) { 99 | const posts = await loadAllProjectPosts(ctx, handle); 100 | await ctx.write(`${handle}/posts.json`, JSON.stringify(posts)); 101 | } 102 | } 103 | } else { 104 | console.log( 105 | "\x1b[33mnot logged in: skipping liked posts and project posts \x1b[m", 106 | ); 107 | } 108 | 109 | // javascript 110 | if (ENABLE_JAVASCRIPT) { 111 | const dir = await loadCohostSource(ctx); 112 | await generateAllScripts(ctx, dir); 113 | } 114 | 115 | const errors: { url: string; error: Error }[] = []; 116 | 117 | // Single post pages 118 | { 119 | const allProjectDirsProbably: string[] = []; 120 | for await (const item of Deno.readDir(ctx.getCleanPath(''))) { 121 | if (item.isDirectory) allProjectDirsProbably.push(item.name); 122 | } 123 | 124 | const likedPosts = await Promise.all( 125 | allProjectDirsProbably.map(async (handle) => { 126 | if (SKIP_LIKES) return []; 127 | 128 | const file = `${handle}/liked.json`; 129 | if (await ctx.hasFile(file)) { 130 | return ctx.readLargeJson(`${handle}/liked.json`); 131 | } else { 132 | return []; 133 | } 134 | }), 135 | ) as IPost[][]; 136 | 137 | const projectPosts = await Promise.all( 138 | allProjectDirsProbably.map(async (handle) => { 139 | const file = `${handle}/posts.json`; 140 | if (await ctx.hasFile(file)) { 141 | return ctx.readJson(`${handle}/posts.json`); 142 | } else { 143 | return []; 144 | } 145 | }), 146 | ) as IPost[][]; 147 | 148 | const allPosts = [ 149 | ...likedPosts.flatMap(x => x), 150 | ...projectPosts.flatMap((x) => x), 151 | ]; 152 | 153 | const loadPostPageAndCollectError = async (url: string) => { 154 | try { 155 | await loadPostPage(ctx, url); 156 | } catch (error) { 157 | console.error(`\x1b[31mFailed! ${error}\x1b[m`); 158 | errors.push({ url, error }); 159 | } 160 | }; 161 | 162 | for (const post of allPosts) { 163 | if (SKIP_POSTS.includes(post.postId)) continue; 164 | 165 | console.log(`~~ processing post ${post.singlePostPageUrl}`); 166 | await loadPostPageAndCollectError(post.singlePostPageUrl); 167 | } 168 | 169 | // it can happen that we've cached data for a post that is now a 404. 170 | // I suppose we can try loading resources for those as well? 171 | for (const post of allPosts) { 172 | try { 173 | await rewritePost(ctx, post, FROM_POST_PAGE_TO_ROOT); 174 | } catch { 175 | // oh well!! 176 | } 177 | } 178 | 179 | const dpaPostURLs: string[] = []; 180 | if (DATA_PORTABILITY_ARCHIVE_PATH) { 181 | const items = await readDataPortabilityArchiveItems( 182 | DATA_PORTABILITY_ARCHIVE_PATH, 183 | ); 184 | for (const ask of items.asks) { 185 | if (ask.responsePost) { 186 | dpaPostURLs.push(ask.responsePost); 187 | } 188 | } 189 | for (const comment of items.comments) { 190 | if (comment.post) { 191 | dpaPostURLs.push(comment.post); 192 | } else { 193 | console.log(`comment ${comment.commentId} has no post`); 194 | } 195 | } 196 | } 197 | 198 | for (const post of [...POSTS, ...dpaPostURLs]) { 199 | const probablyThePostId = +(post.match(POST_URL_REGEX)?.[2] || ""); 200 | if (SKIP_POSTS.includes(probablyThePostId)) continue; 201 | 202 | console.log(`~~ processing additional post ${post}`); 203 | await loadPostPageAndCollectError(post); 204 | } 205 | } 206 | 207 | { 208 | await generateAllIndices(ctx, errors); 209 | } 210 | 211 | await ctx.finalize(); 212 | 213 | if (errors.length) { 214 | console.log( 215 | `\x1b[32mDone, \x1b[33mwith ${errors.length} error${ 216 | errors.length === 1 ? "" : "s" 217 | }\x1b[m`, 218 | ); 219 | for (const { url, error } of errors) console.log(`${url}: ${error}`); 220 | } else { 221 | console.log("\x1b[32mDone\x1b[m"); 222 | } 223 | -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | cd "$(dirname "$0")" 3 | deno run --allow-env --allow-ffi --allow-net --allow-read --allow-write=out main.ts 4 | -------------------------------------------------------------------------------- /src/changelog.ts: -------------------------------------------------------------------------------- 1 | import * as path from "jsr:@std/path"; 2 | 3 | async function checkForUpdatesImpl() { 4 | const currentChangelog = await Deno.readTextFile(path.join(import.meta.dirname, "../CHANGELOG.txt")); 5 | 6 | const changelogLines = currentChangelog.replace(/\r\n/g, '\n').split("\n"); 7 | const url = changelogLines.shift().split("url=")[1]; 8 | 9 | const newestChangelogRes = await fetch(url); 10 | if (!newestChangelogRes.ok) { 11 | throw new Error(`could not fetch update information: ${await newestChangelogRes.text()}`); 12 | } 13 | const newestChangelog = await newestChangelogRes.text(); 14 | const newLines = newestChangelog.split("\n"); 15 | newLines.shift(); 16 | 17 | const firstNonEmptyLine = changelogLines.find(item => !!item); 18 | const firstNewNonEmptyLine = newLines.find(item => !!item); 19 | 20 | const latestVersion = newLines.find(line => line.startsWith('*')); 21 | const thisVersion = changelogLines.find(line => line.startsWith('*')); 22 | 23 | if (latestVersion === thisVersion) { 24 | return; 25 | } 26 | 27 | console.error('\x1b[32m=== cohost-dl update found ===\x1b[m'); 28 | console.error('maybe you want to update your downloaded version.'); 29 | console.error('changes: '); 30 | 31 | for (const line of newLines) { 32 | if (line === thisVersion) break; 33 | 34 | console.error(line); 35 | } 36 | 37 | console.error('\x1b[32m=== * ===\x1b[m'); 38 | } 39 | 40 | export async function checkForUpdates() { 41 | try { 42 | await checkForUpdatesImpl(); 43 | } catch (error) { 44 | console.error(`error checking for updates: ${error}`); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/cohost-source.ts: -------------------------------------------------------------------------------- 1 | import { CohostContext } from "./context.ts"; 2 | 3 | // a sample post we'll be using for this. 4 | // edge case that I will not be handling: user has blocked staff 5 | const SAMPLE_POST_URL = "https://cohost.org/staff/post/7611443-cohost-to-shut-down"; 6 | 7 | interface ISourceMap { 8 | version: 3; 9 | file: string; 10 | mapping: string; 11 | sources: string[]; 12 | sourcesContent: string[]; 13 | names: string[]; 14 | } 15 | 16 | /** Loads cohost frontend source and returns root path */ 17 | export async function loadCohostSource(ctx: CohostContext): Promise { 18 | const filePath = await ctx.loadResourceToFile(SAMPLE_POST_URL); 19 | const document = await ctx.getDocument(SAMPLE_POST_URL, filePath ?? undefined); 20 | 21 | const varsScript = document.getElementById("env-vars"); 22 | if (!varsScript) throw new Error('missing