├── .envrc ├── .gitattributes ├── heroku.yml ├── static ├── highlighted.js ├── logo.png ├── favicon.ico ├── favicon.png ├── Inter.var.woff2 ├── apple-touch-icon.png ├── copy.js ├── themes │ ├── rosebox.css │ ├── doomone.css │ ├── dark.css │ ├── gold.css │ ├── black.css │ ├── gruvboxdark.css │ ├── violet.css │ ├── dracula.css │ ├── nord.css │ ├── libredditDark.css │ ├── laserwave.css │ ├── midnightPurple.css │ ├── libredditBlack.css │ ├── icebergDark.css │ ├── tokyoNight.css │ ├── light.css │ ├── gruvboxlight.css │ └── libredditLight.css ├── logo.svg ├── manifest.json ├── opensearch.xml ├── check_update.js └── playHLSVideo.js ├── rustfmt.toml ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── feature_parity.md │ ├── feature_request.md │ └── bug_report.md └── workflows │ ├── pull-request.yml │ ├── build-artifacts.yaml │ ├── main-rust.yml │ └── main-docker.yml ├── .gitignore ├── templates ├── message.html ├── wall.html ├── info.html ├── wiki.html ├── error.html ├── nsfwlanding.html ├── comment.html ├── post.html ├── base.html ├── search.html ├── duplicates.html ├── subreddit.html ├── user.html └── settings.html ├── src ├── lib.rs ├── oauth_resources.rs ├── search.rs ├── user.rs ├── duplicates.rs ├── instance_info.rs ├── post.rs ├── settings.rs ├── config.rs └── main.rs ├── .replit ├── redlib.container ├── .devcontainer └── devcontainer.json ├── contrib ├── redlib.plist ├── redlib.conf └── redlib.service ├── scripts ├── gen-credits.sh ├── update_hls_js.sh ├── load_test.py └── update_oauth_resources.sh ├── Dockerfile ├── compose.yaml ├── compose.dev.yaml ├── Dockerfile.alpine ├── Dockerfile.ubuntu ├── flake.nix ├── app.json ├── .env.example ├── Cargo.toml ├── flake.lock ├── seccomp-redlib.json └── CREDITS /.envrc: -------------------------------------------------------------------------------- 1 | use flake 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | Dockerfile.* linguist-language=Dockerfile 2 | -------------------------------------------------------------------------------- /heroku.yml: -------------------------------------------------------------------------------- 1 | build: 2 | docker: 3 | web: Dockerfile 4 | -------------------------------------------------------------------------------- /static/highlighted.js: -------------------------------------------------------------------------------- 1 | document.querySelector('#commentQueryForms').scrollIntoView(); -------------------------------------------------------------------------------- /static/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redlib-org/redlib/HEAD/static/logo.png -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | edition = "2018" 2 | tab_spaces = 2 3 | hard_tabs = true 4 | max_width = 175 -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | liberapay: sigaloid 2 | buy_me_a_coffee: sigaloid 3 | github: sigaloid -------------------------------------------------------------------------------- /static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redlib-org/redlib/HEAD/static/favicon.ico -------------------------------------------------------------------------------- /static/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redlib-org/redlib/HEAD/static/favicon.png -------------------------------------------------------------------------------- /static/Inter.var.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redlib-org/redlib/HEAD/static/Inter.var.woff2 -------------------------------------------------------------------------------- /static/apple-touch-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/redlib-org/redlib/HEAD/static/apple-touch-icon.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .env 3 | redlib.toml 4 | 5 | # Idea Files 6 | .idea/ 7 | 8 | # nix files 9 | .direnv/ 10 | result 11 | -------------------------------------------------------------------------------- /templates/message.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block title %}{{ title }}{% endblock %} 3 | {% block sortstyle %}{% endblock %} 4 | {% block content %} 5 |
6 |

{{ title }}

7 |
8 | {{ body|safe }} 9 |
10 | {% endblock %} 11 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod client; 2 | pub mod config; 3 | pub mod duplicates; 4 | pub mod instance_info; 5 | pub mod oauth; 6 | pub mod oauth_resources; 7 | pub mod post; 8 | pub mod search; 9 | pub mod server; 10 | pub mod settings; 11 | pub mod subreddit; 12 | pub mod user; 13 | pub mod utils; 14 | -------------------------------------------------------------------------------- /static/copy.js: -------------------------------------------------------------------------------- 1 | async function copy() { 2 | await navigator.clipboard.writeText(document.getElementById('bincode_str').value); 3 | } 4 | 5 | async function set_listener() { 6 | document.getElementById('copy').addEventListener('click', copy); 7 | } 8 | 9 | window.addEventListener('load', set_listener); -------------------------------------------------------------------------------- /.replit: -------------------------------------------------------------------------------- 1 | run = "while :; do set -ex; nix-env -iA nixpkgs.unzip; curl -o./redlib.zip -fsSL -- https://nightly.link/redlib-org/redlib/workflows/main-rust/main/redlib.zip; unzip -n redlib.zip; mv target/x86_64-unknown-linux-musl/release/redlib .; chmod +x redlib; set +e; ./redlib -H 63115200; sleep 1; done" 2 | language = "bash" 3 | -------------------------------------------------------------------------------- /static/themes/rosebox.css: -------------------------------------------------------------------------------- 1 | /* Rosebox theme setting */ 2 | .rosebox { 3 | --accent: #a57562; 4 | --green: #a3be8c; 5 | --text: white; 6 | --foreground: #222; 7 | --background: #262626; 8 | --outside: #222; 9 | --post: #222; 10 | --panel-border: 1px solid #222; 11 | --highlighted: #262626; 12 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.5); 13 | } -------------------------------------------------------------------------------- /static/logo.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /static/themes/doomone.css: -------------------------------------------------------------------------------- 1 | .doomone { 2 | --accent: #51afef; 3 | --green: #00a229; 4 | --text: #bbc2cf; 5 | --foreground: #3d4148; 6 | --background: #282c34; 7 | --outside: #52565c; 8 | --post: #24272e; 9 | --panel-border: 2px solid #52565c; 10 | --highlighted: #686b70; 11 | --visited: #969692; 12 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.1); 13 | } 14 | -------------------------------------------------------------------------------- /static/themes/dark.css: -------------------------------------------------------------------------------- 1 | /* Dark theme setting */ 2 | .dark{ 3 | --accent: #d54455; 4 | --green: #5cff85; 5 | --text: white; 6 | --foreground: #222; 7 | --background: #0f0f0f; 8 | --outside: #1f1f1f; 9 | --post: #161616; 10 | --panel-border: 1px solid #333; 11 | --highlighted: #333; 12 | --visited: #aaa; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.5); 14 | } -------------------------------------------------------------------------------- /static/themes/gold.css: -------------------------------------------------------------------------------- 1 | /* Gold theme setting */ 2 | .gold { 3 | --accent: #f2aa4c; 4 | --green: #5cff85; 5 | --text: white; 6 | --foreground: #234; 7 | --background: #101820; 8 | --outside: #1b2936; 9 | --post: #1b2936; 10 | --panel-border: 0px solid black; 11 | --highlighted: #234; 12 | --visited: #aaa; 13 | --shadow: 0 2px 5px rgba(0, 0, 0, 0.5); 14 | } -------------------------------------------------------------------------------- /static/themes/black.css: -------------------------------------------------------------------------------- 1 | /* Black theme setting */ 2 | .black { 3 | --accent: #bb2b3b; 4 | --green: #00a229; 5 | --text: white; 6 | --foreground: #0f0f0f; 7 | --background: black; 8 | --outside: black; 9 | --post: black; 10 | --panel-border: 2px solid #0f0f0f; 11 | --highlighted: #0f0f0f; 12 | --visited: #aaa; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.1); 14 | } -------------------------------------------------------------------------------- /static/themes/gruvboxdark.css: -------------------------------------------------------------------------------- 1 | /* Gruvbox-Dark theme setting */ 2 | .gruvboxdark { 3 | --accent: #8ec07c; 4 | --green: #b8bb26; 5 | --text: #ebdbb2; 6 | --foreground: #3c3836; 7 | --background: #282828; 8 | --outside: #3c3836; 9 | --post: #3c3836; 10 | --panel-border: 1px solid #504945; 11 | --highlighted: #282828; 12 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.5); 13 | } 14 | -------------------------------------------------------------------------------- /static/themes/violet.css: -------------------------------------------------------------------------------- 1 | /* Violet theme setting */ 2 | .violet { 3 | --accent: #7c71dd; 4 | --green: #5cff85; 5 | --text: white; 6 | --foreground: #1F2347; 7 | --background: #12152b; 8 | --outside: #181c3a; 9 | --post: #181c3a; 10 | --panel-border: 1px solid #1F2347; 11 | --highlighted: #1F2347; 12 | --visited: #aaa; 13 | --shadow: 0 2px 5px rgba(0, 0, 0, 0.5); 14 | } -------------------------------------------------------------------------------- /static/themes/dracula.css: -------------------------------------------------------------------------------- 1 | /* Dracula theme setting */ 2 | .dracula { 3 | --accent: #bd93f9; 4 | --green: #50fa7b; 5 | --text: #f8f8f2; 6 | --foreground: #3d4051; 7 | --background: #282a36; 8 | --outside: #393c4d; 9 | --post: #333544; 10 | --panel-border: 2px solid #44475a; 11 | --highlighted: #4e5267; 12 | --visited: #969692; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.1); 14 | } -------------------------------------------------------------------------------- /static/themes/nord.css: -------------------------------------------------------------------------------- 1 | /* Nord theme setting */ 2 | .nord { 3 | --accent: #8fbcbb; 4 | --green: #a3be8c; 5 | --text: #eceff4; 6 | --foreground: #3b4252; 7 | --background: #2e3440; 8 | --outside: #434c5e; 9 | --post: #434c5e; 10 | --panel-border: 2px solid #4c566a; 11 | --highlighted: #3b4252; 12 | --visited: #a3a5aa; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.1); 14 | } 15 | -------------------------------------------------------------------------------- /static/themes/libredditDark.css: -------------------------------------------------------------------------------- 1 | /* Libreddit dark theme setting */ 2 | .libredditDark{ 3 | --accent: aqua; 4 | --green: #5cff85; 5 | --text: white; 6 | --foreground: #222; 7 | --background: #0f0f0f; 8 | --outside: #1f1f1f; 9 | --post: #161616; 10 | --panel-border: 1px solid #333; 11 | --highlighted: #333; 12 | --visited: #aaa; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.5); 14 | } -------------------------------------------------------------------------------- /static/themes/laserwave.css: -------------------------------------------------------------------------------- 1 | /* Laserwave theme setting */ 2 | .laserwave { 3 | --accent: #eb64b9; 4 | --green: #74dfc4; 5 | --text: #e0dfe1; 6 | --foreground: #302a36; 7 | --background: #27212e; 8 | --outside: #3e3647; 9 | --post: #3e3647; 10 | --panel-border: 2px solid #2f2738; 11 | --highlighted: #302a36; 12 | --visited: #91889b; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.1); 14 | } -------------------------------------------------------------------------------- /static/themes/midnightPurple.css: -------------------------------------------------------------------------------- 1 | /* midnightpurple theme setting */ 2 | .midnightPurple{ 3 | --accent: #be6ede; 4 | --green: #268F02; 5 | --text: white; 6 | --foreground: #222; 7 | --background: #000000; 8 | --outside: #1f1f1f; 9 | --post: #000000; 10 | --panel-border: 1px solid #4E1764; 11 | --highlighted: #333; 12 | --visited: #aaa; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.5); 14 | } -------------------------------------------------------------------------------- /static/themes/libredditBlack.css: -------------------------------------------------------------------------------- 1 | /* Libreddit black theme setting */ 2 | .libredditBlack { 3 | --accent: #009a9a; 4 | --green: #00a229; 5 | --text: white; 6 | --foreground: #0f0f0f; 7 | --background: black; 8 | --outside: black; 9 | --post: black; 10 | --panel-border: 2px solid #0f0f0f; 11 | --highlighted: #0f0f0f; 12 | --visited: #aaa; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.1); 14 | } -------------------------------------------------------------------------------- /static/themes/icebergDark.css: -------------------------------------------------------------------------------- 1 | /* icebergDark theme setting */ 2 | .icebergDark { 3 | --accent: #85a0c7; 4 | --green: #b5bf82; 5 | --text: #c6c8d1; 6 | --foreground: #454d73; 7 | --background: #161821; 8 | --outside: #1f2233; 9 | --post: #1f2233; 10 | --panel-border: 1px solid #454d73; 11 | --highlighted: #0f1117; 12 | --visited: #0f1117; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.5); 14 | } 15 | -------------------------------------------------------------------------------- /static/themes/tokyoNight.css: -------------------------------------------------------------------------------- 1 | /* Tokyo Night theme setting */ 2 | .tokyoNight { 3 | --accent: #565f89; 4 | --green: #73daca; 5 | --text: #a9b1d6; 6 | --foreground: #24283b; 7 | --background: #1a1b26; 8 | --outside: #24283b; 9 | --post: #1a1b26; 10 | --panel-border: 1px solid #a9b1d6; 11 | --highlighted: #414868; 12 | --visited: #414868; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.5); 14 | } 15 | -------------------------------------------------------------------------------- /templates/wall.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block title %}{{ msg }}{% endblock %} 3 | {% block sortstyle %}{% endblock %} 4 | {% block content %} 5 |
6 |

{{ title }}

7 |
8 |

{{ msg }}

9 |
10 | 11 |
12 |
13 | {% endblock %} 14 | -------------------------------------------------------------------------------- /redlib.container: -------------------------------------------------------------------------------- 1 | [Install] 2 | WantedBy=default.target 3 | 4 | [Container] 5 | AutoUpdate=registry 6 | ContainerName=redlib 7 | DropCapability=ALL 8 | EnvironmentFile=.env 9 | HealthCmd=["wget","--spider","-q","--tries=1","http://localhost:8080/settings"] 10 | HealthInterval=5m 11 | HealthTimeout=3s 12 | Image=quay.io/redlib/redlib:latest 13 | NoNewPrivileges=true 14 | PublishPort=8080:8080 15 | ReadOnly=true 16 | User=nobody 17 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Rust", 3 | "image": "mcr.microsoft.com/devcontainers/rust:1.0.9-bookworm", 4 | "features": { 5 | "ghcr.io/devcontainers/features/docker-in-docker:2": {} 6 | }, 7 | "portsAttributes": { 8 | "8080": { 9 | "label": "redlib", 10 | "onAutoForward": "notify" 11 | } 12 | }, 13 | "postCreateCommand": "cargo build" 14 | } 15 | -------------------------------------------------------------------------------- /contrib/redlib.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Label 6 | redlib 7 | 8 | Program 9 | redlib 10 | 11 | KeepAlive 12 | 13 | 14 | RunAtLoad 15 | 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /static/themes/light.css: -------------------------------------------------------------------------------- 1 | /* Light theme setting */ 2 | .light { 3 | --accent: #bb2b3b; 4 | --green: #00a229; 5 | --text: black; 6 | --foreground: #f5f5f5; 7 | --background: #ddd; 8 | --outside: #ececec; 9 | --post: #eee; 10 | --panel-border: 1px solid #ccc; 11 | --highlighted: white; 12 | --visited: #555; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.1); 14 | } 15 | 16 | html:has(> .light) { 17 | /* Hint color theme to browser for scrollbar */ 18 | color-scheme: light; 19 | } -------------------------------------------------------------------------------- /templates/info.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% import "utils.html" as utils %} 3 | 4 | {% block title %}Info: {{ msg }}{% endblock %} 5 | {% block sortstyle %}{% endblock %} 6 | 7 | {% block subscriptions %} 8 | {% call utils::sub_list("") %} 9 | {% endblock %} 10 | 11 | {% block search %} 12 | {% call utils::search("".to_owned(), "") %} 13 | {% endblock %} 14 | 15 | {% block content %} 16 |
17 |

{{ msg }}

18 |
19 |
20 | {% endblock %} 21 | -------------------------------------------------------------------------------- /static/themes/gruvboxlight.css: -------------------------------------------------------------------------------- 1 | /* Gruvbox-Light theme setting */ 2 | .gruvboxlight { 3 | --accent: #427b58; 4 | --green: #79740e; 5 | --text: #3c3836; 6 | --foreground: #ebdbb2; 7 | --background: #fbf1c7; 8 | --outside: #ebdbb2; 9 | --post: #ebdbb2; 10 | --panel-border: 1px solid #d5c4a1; 11 | --highlighted: #fbf1c7; 12 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.25); 13 | } 14 | 15 | html:has(> .gruvboxlight) { 16 | /* Hint color theme to browser for scrollbar */ 17 | color-scheme: light; 18 | } 19 | -------------------------------------------------------------------------------- /static/themes/libredditLight.css: -------------------------------------------------------------------------------- 1 | /* Libreddit light theme setting */ 2 | .libredditLight { 3 | --accent: #009a9a; 4 | --green: #00a229; 5 | --text: black; 6 | --foreground: #f5f5f5; 7 | --background: #ddd; 8 | --outside: #ececec; 9 | --post: #eee; 10 | --panel-border: 1px solid #ccc; 11 | --highlighted: white; 12 | --visited: #555; 13 | --shadow: 0 1px 3px rgba(0, 0, 0, 0.1); 14 | } 15 | 16 | html:has(> .libredditLight) { 17 | /* Hint color theme to browser for scrollbar */ 18 | color-scheme: light; 19 | } -------------------------------------------------------------------------------- /static/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Redlib", 3 | "short_name": "Redlib", 4 | "display": "standalone", 5 | "background_color": "#1f1f1f", 6 | "description": "An alternative private front-end to Reddit", 7 | "theme_color": "#1f1f1f", 8 | "start_url": "/", 9 | "icons": [ 10 | { 11 | "src": "logo.png", 12 | "sizes": "512x512", 13 | "type": "image/png" 14 | }, 15 | { 16 | "src": "apple-touch-icon.png", 17 | "sizes": "180x180" 18 | }, 19 | { 20 | "src": "favicon.ico", 21 | "sizes": "32x32" 22 | } 23 | ] 24 | } 25 | -------------------------------------------------------------------------------- /scripts/gen-credits.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This scripts generates the CREDITS file in the repository root, which 4 | # contains a list of all contributors ot the Redlib project. 5 | # 6 | # We use git-log to surface the names and emails of all authors and committers, 7 | # and grep will filter any automated commits due to GitHub. 8 | 9 | set -o pipefail 10 | 11 | cd "$(dirname "${BASH_SOURCE[0]}")/../" || exit 1 12 | git --no-pager log --pretty='%an <%ae>%n%cn <%ce>' main \ 13 | | sort -t'<' -u -k1,1 -k2,2 \ 14 | | grep -Fv -- 'GitHub ' \ 15 | > CREDITS 16 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine:3.19 2 | 3 | ARG TARGET 4 | 5 | RUN apk add --no-cache curl 6 | 7 | RUN curl -L "https://github.com/redlib-org/redlib/releases/latest/download/redlib-${TARGET}.tar.gz" | \ 8 | tar xz -C /usr/local/bin/ 9 | 10 | RUN adduser --home /nonexistent --no-create-home --disabled-password redlib 11 | USER redlib 12 | 13 | # Tell Docker to expose port 8080 14 | EXPOSE 8080 15 | 16 | # Run a healthcheck every minute to make sure redlib is functional 17 | HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider -q http://localhost:8080/settings || exit 1 18 | 19 | CMD ["redlib"] 20 | 21 | -------------------------------------------------------------------------------- /contrib/redlib.conf: -------------------------------------------------------------------------------- 1 | ADDRESS=0.0.0.0 2 | PORT=12345 3 | #REDLIB_DEFAULT_THEME=default 4 | #REDLIB_DEFAULT_FRONT_PAGE=default 5 | #REDLIB_DEFAULT_LAYOUT=card 6 | #REDLIB_DEFAULT_WIDE=off 7 | #REDLIB_DEFAULT_POST_SORT=hot 8 | #REDLIB_DEFAULT_COMMENT_SORT=confidence 9 | #REDLIB_DEFAULT_BLUR_SPOILER=off 10 | #REDLIB_DEFAULT_SHOW_NSFW=off 11 | #REDLIB_DEFAULT_BLUR_NSFW=off 12 | #REDLIB_DEFAULT_USE_HLS=off 13 | #REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION=off 14 | #REDLIB_DEFAULT_AUTOPLAY_VIDEOS=off 15 | #REDLIB_DEFAULT_SUBSCRIPTIONS=(sub1+sub2+sub3) 16 | #REDLIB_DEFAULT_HIDE_AWARDS=off 17 | #REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION=off 18 | -------------------------------------------------------------------------------- /compose.yaml: -------------------------------------------------------------------------------- 1 | services: 2 | redlib: 3 | image: quay.io/redlib/redlib:latest 4 | restart: always 5 | container_name: "redlib" 6 | ports: 7 | - 8080:8080 # Specify `127.0.0.1:8080:8080` instead if using a reverse proxy 8 | user: nobody 9 | read_only: true 10 | security_opt: 11 | - no-new-privileges:true 12 | # - seccomp=seccomp-redlib.json 13 | cap_drop: 14 | - ALL 15 | env_file: .env 16 | networks: 17 | - redlib 18 | healthcheck: 19 | test: ["CMD", "wget", "--spider", "-q", "--tries=1", "http://localhost:8080/settings"] 20 | interval: 5m 21 | timeout: 3s 22 | 23 | networks: 24 | redlib: 25 | -------------------------------------------------------------------------------- /static/opensearch.xml: -------------------------------------------------------------------------------- 1 | 3 | Search Redlib 4 | Search for whatever you want on Redlib, awesome Reddit frontend 5 | UTF-8 6 | https://localhost:8080/favicon.ico 7 | 8 | 9 | 10 | https://localhost:8080/search 11 | 12 | -------------------------------------------------------------------------------- /compose.dev.yaml: -------------------------------------------------------------------------------- 1 | # docker-compose -f docker-compose.dev.yml up -d 2 | version: "3.8" 3 | 4 | services: 5 | redlib: 6 | build: . 7 | restart: always 8 | container_name: "redlib" 9 | ports: 10 | - 8080:8080 # Specify `127.0.0.1:8080:8080` instead if using a reverse proxy 11 | user: nobody 12 | read_only: true 13 | security_opt: 14 | - no-new-privileges:true 15 | # - seccomp=seccomp-redlib.json 16 | cap_drop: 17 | - ALL 18 | networks: 19 | - redlib 20 | healthcheck: 21 | test: ["CMD", "wget", "--spider", "-q", "--tries=1", "http://localhost:8080/settings"] 22 | interval: 5m 23 | timeout: 3s 24 | 25 | networks: 26 | redlib: 27 | -------------------------------------------------------------------------------- /templates/wiki.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% import "utils.html" as utils %} 3 | 4 | {% block title %} 5 | {% if sub != "" %}{{ page }} - {{ sub }} 6 | {% else %}Redlib{% endif %} 7 | {% endblock %} 8 | 9 | {% block search %} 10 | {% call utils::search(["/r/", sub.as_str()].concat(), "") %} 11 | {% endblock %} 12 | 13 | {% block subscriptions %} 14 | {% call utils::sub_list(sub.as_str()) %} 15 | {% endblock %} 16 | 17 | {% block body %} 18 |
19 |
20 |
21 | Posts 22 |
Wiki
23 |
24 |
25 | {{ wiki|safe }} 26 |
27 |
28 |
29 | {% endblock %} 30 | -------------------------------------------------------------------------------- /scripts/update_hls_js.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd "$(dirname "$0")" 4 | LATEST_TAG=$(curl -s https://api.github.com/repos/video-dev/hls.js/releases/latest | jq -r '.tag_name') 5 | 6 | if [[ -z "$LATEST_TAG" || "$LATEST_TAG" == "null" ]]; then 7 | echo "Failed to fetch the latest release tag from GitHub." 8 | exit 1 9 | fi 10 | 11 | LICENSE="// @license http://www.apache.org/licenses/LICENSE-2.0 Apache-2.0 12 | // @source https://github.com/video-dev/hls.js/tree/$LATEST_TAG" 13 | 14 | echo "$LICENSE" > ../static/hls.min.js 15 | 16 | curl -s https://cdn.jsdelivr.net/npm/hls.js@${LATEST_TAG}/dist/hls.min.js >> ../static/hls.min.js 17 | 18 | echo "Update complete. The latest hls.js (${LATEST_TAG}) has been saved to static/hls.min.js." 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_parity.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: ✨ Feature parity 3 | about: Suggest implementing a feature into Redlib that is found in Reddit.com 4 | title: '✨ Feature parity: ' 5 | labels: feature parity 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## How does this feature work on Reddit? 11 | 14 | 15 | ## Describe how this could be implemented into Redlib 16 | 19 | 20 | ## Describe alternatives you've considered 21 | 24 | 25 | ## Additional context / screenshot 26 | 29 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 💡 Feature request 3 | about: Suggest a feature for Redlib that is not found in Reddit 4 | title: '💡 Feature request: ' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | ## Is your feature request related to a problem? Please describe. 11 | 14 | 15 | ## Describe the feature you would like to be implemented 16 | 19 | 20 | ## Describe alternatives you've considered 21 | 24 | 25 | ## Additional context / screenshot 26 | 29 | -------------------------------------------------------------------------------- /contrib/redlib.service: -------------------------------------------------------------------------------- 1 | [Unit] 2 | Description=redlib daemon 3 | After=network.service 4 | 5 | [Service] 6 | DynamicUser=yes 7 | # Default Values 8 | #Environment=ADDRESS=0.0.0.0 9 | #Environment=PORT=8080 10 | # Optional Override 11 | EnvironmentFile=-/etc/redlib.conf 12 | ExecStart=/usr/bin/redlib -a ${ADDRESS} -p ${PORT} 13 | 14 | # Hardening 15 | DeviceAllow= 16 | LockPersonality=yes 17 | MemoryDenyWriteExecute=yes 18 | PrivateDevices=yes 19 | ProcSubset=pid 20 | ProtectClock=yes 21 | ProtectControlGroups=yes 22 | ProtectHome=yes 23 | ProtectHostname=yes 24 | ProtectKernelLogs=yes 25 | ProtectKernelModules=yes 26 | ProtectKernelTunables=yes 27 | ProtectProc=invisible 28 | RestrictAddressFamilies=AF_INET AF_INET6 29 | RestrictNamespaces=yes 30 | RestrictRealtime=yes 31 | RestrictSUIDSGID=yes 32 | SystemCallArchitectures=native 33 | SystemCallFilter=@system-service 34 | SystemCallFilter=~@privileged @resources 35 | UMask=0077 36 | 37 | [Install] 38 | WantedBy=default.target 39 | -------------------------------------------------------------------------------- /scripts/load_test.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from bs4 import BeautifulSoup 3 | from concurrent.futures import ThreadPoolExecutor 4 | 5 | base_url = "http://localhost:8080" 6 | 7 | full_path = f"{base_url}/r/politics" 8 | 9 | ctr = 0 10 | 11 | def fetch_url(url): 12 | global ctr 13 | response = requests.get(url) 14 | ctr += 1 15 | print(f"Request count: {ctr}") 16 | return response 17 | 18 | while full_path: 19 | response = requests.get(full_path) 20 | ctr += 1 21 | print(f"Request count: {ctr}") 22 | soup = BeautifulSoup(response.text, 'html.parser') 23 | comment_links = soup.find_all('a', class_='post_comments') 24 | comment_urls = [base_url + link['href'] for link in comment_links] 25 | with ThreadPoolExecutor(max_workers=10) as executor: 26 | executor.map(fetch_url, comment_urls) 27 | next_link = soup.find('a', accesskey='N') 28 | if next_link: 29 | full_path = base_url + next_link['href'] 30 | else: 31 | break 32 | -------------------------------------------------------------------------------- /templates/error.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block title %}Error: {{ msg }}{% endblock %} 3 | {% block sortstyle %}{% endblock %} 4 | {% block content %} 5 |
6 |

{{ msg }}

7 |

Reddit Status

8 |
9 |

10 |
11 |

12 |
13 |
14 | 15 | 16 |

Expected something to work? Report 18 | an issue

19 |
20 |

If you're getting a "Failed to parse page JSON data" error, please check #446

21 |
22 |

Head back home?

23 |
24 | {% endblock %} 25 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 🐛 Bug report 3 | about: Create a report to help us improve 4 | title: '🐛 Bug Report: ' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | 13 | 14 | ## Describe the bug 15 | 18 | 19 | ## Steps to reproduce the bug 20 | 21 | 28 | 29 | ## What's the expected behavior? 30 | 33 | 34 | ## Additional context / screenshot 35 | 38 | 39 | 40 | 41 | - [ ] I checked that the instance that this was reported on is running the latest git commit, or I can reproduce it locally on the latest git commit -------------------------------------------------------------------------------- /templates/nsfwlanding.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% block title %}NSFW content gated{% endblock %} 3 | {% block sortstyle %}{% endblock %} 4 | {% block content %} 5 |
6 |

7 | 😱 8 | {% if res_type == crate::utils::ResourceType::Subreddit %} 9 | r/{{ res }} is a NSFW community! 10 | {% else if res_type == crate::utils::ResourceType::User %} 11 | u/{{ res }}'s content is NSFW! 12 | {% else if res_type == crate::utils::ResourceType::Post %} 13 | This post is NSFW! 14 | {% endif %} 15 |

16 |
17 | 18 |

19 | {% if crate::utils::sfw_only() %} 20 | This instance of Redlib is SFW-only.

21 | {% else %} 22 | Enable "Show NSFW posts" in settings to view this {% if res_type == crate::utils::ResourceType::Subreddit %}subreddit{% else if res_type == crate::utils::ResourceType::User %}user's posts or comments{% else if res_type == crate::utils::ResourceType::Post %}post{% endif %}.
23 | {% if res_type == crate::utils::ResourceType::Post %} You can also quickly bypass this gate and view the post by clicking on this link.{% endif %} 24 | {% endif %} 25 |

26 |
27 | {% endblock %} 28 | {% block footer %} 29 | {% endblock %} 30 | 31 | -------------------------------------------------------------------------------- /Dockerfile.alpine: -------------------------------------------------------------------------------- 1 | # supported versions here: https://hub.docker.com/_/rust 2 | ARG ALPINE_VERSION=3.20 3 | 4 | ######################## 5 | ## builder image 6 | ######################## 7 | FROM rust:alpine${ALPINE_VERSION} AS builder 8 | 9 | RUN apk add --no-cache musl-dev 10 | 11 | WORKDIR /redlib 12 | 13 | # download (most) dependencies in their own layer 14 | COPY Cargo.lock Cargo.toml ./ 15 | RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs 16 | RUN cargo build --release --locked --bin redlib 17 | RUN rm ./src/main.rs && rmdir ./src 18 | 19 | # copy the source and build the redlib binary 20 | COPY . ./ 21 | RUN cargo build --release --locked --bin redlib 22 | RUN echo "finished building redlib!" 23 | 24 | ######################## 25 | ## release image 26 | ######################## 27 | FROM alpine:${ALPINE_VERSION} AS release 28 | 29 | # Import redlib binary from builder 30 | COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib 31 | 32 | # Add non-root user for running redlib 33 | RUN adduser --home /nonexistent --no-create-home --disabled-password redlib 34 | USER redlib 35 | 36 | # Document that we intend to expose port 8080 to whoever runs the container 37 | EXPOSE 8080 38 | 39 | # Run a healthcheck every minute to make sure redlib is functional 40 | HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider -q http://localhost:8080/settings || exit 1 41 | 42 | # Add container metadata 43 | LABEL org.opencontainers.image.authors="sigaloid" 44 | 45 | CMD ["redlib"] 46 | -------------------------------------------------------------------------------- /Dockerfile.ubuntu: -------------------------------------------------------------------------------- 1 | # supported versions here: https://hub.docker.com/_/rust 2 | ARG RUST_BUILDER_VERSION=slim-bookworm 3 | ARG UBUNTU_RELEASE_VERSION=noble 4 | 5 | ######################## 6 | ## builder image 7 | ######################## 8 | FROM rust:${RUST_BUILDER_VERSION} AS builder 9 | 10 | WORKDIR /redlib 11 | 12 | # download (most) dependencies in their own layer 13 | COPY Cargo.lock Cargo.toml ./ 14 | RUN mkdir src && echo "fn main() { panic!(\"why am i running?\") }" > src/main.rs 15 | RUN cargo build --release --locked --bin redlib 16 | RUN rm ./src/main.rs && rmdir ./src 17 | 18 | # copy the source and build the redlib binary 19 | COPY . ./ 20 | RUN cargo build --release --locked --bin redlib 21 | RUN echo "finished building redlib!" 22 | 23 | ######################## 24 | ## release image 25 | ######################## 26 | FROM ubuntu:${UBUNTU_RELEASE_VERSION} AS release 27 | 28 | # Install ca-certificates 29 | RUN apt-get update && apt-get install -y ca-certificates 30 | 31 | # Import redlib binary from builder 32 | COPY --from=builder /redlib/target/release/redlib /usr/local/bin/redlib 33 | 34 | # Add non-root user for running redlib 35 | RUN useradd \ 36 | --no-create-home \ 37 | --password "!" \ 38 | --comment "user for running redlib" \ 39 | redlib 40 | USER redlib 41 | 42 | # Document that we intend to expose port 8080 to whoever runs the container 43 | EXPOSE 8080 44 | 45 | # Run a healthcheck every minute to make sure redlib is functional 46 | HEALTHCHECK --interval=1m --timeout=3s CMD wget --spider -q http://localhost:8080/settings || exit 1 47 | 48 | # Add container metadata 49 | LABEL org.opencontainers.image.authors="sigaloid" 50 | 51 | CMD ["redlib"] 52 | -------------------------------------------------------------------------------- /.github/workflows/pull-request.yml: -------------------------------------------------------------------------------- 1 | name: Pull Request 2 | 3 | env: 4 | CARGO_TERM_COLOR: always 5 | NEXTEST_RETRIES: 10 6 | 7 | on: 8 | push: 9 | branches: 10 | - 'main' 11 | 12 | pull_request: 13 | branches: 14 | - 'main' 15 | 16 | jobs: 17 | test: 18 | name: cargo test 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Checkout sources 22 | uses: actions/checkout@v3 23 | 24 | - name: Install stable toolchain 25 | uses: dtolnay/rust-toolchain@stable 26 | with: 27 | toolchain: stable 28 | 29 | - name: Install cargo-nextest 30 | uses: taiki-e/install-action@nextest 31 | 32 | - name: Run cargo nextest 33 | run: cargo nextest run 34 | 35 | format: 36 | name: cargo fmt --all -- --check 37 | runs-on: ubuntu-latest 38 | 39 | steps: 40 | - name: Checkout sources 41 | uses: actions/checkout@v3 42 | 43 | - name: Install stable toolchain with rustfmt component 44 | uses: dtolnay/rust-toolchain@stable 45 | with: 46 | toolchain: stable 47 | components: rustfmt 48 | 49 | - name: Run cargo fmt 50 | run: cargo fmt --all -- --check 51 | 52 | clippy: 53 | name: cargo clippy -- -D warnings 54 | runs-on: ubuntu-latest 55 | 56 | steps: 57 | - name: Checkout sources 58 | uses: actions/checkout@v3 59 | 60 | - name: Install stable toolchain with clippy component 61 | uses: dtolnay/rust-toolchain@stable 62 | with: 63 | toolchain: stable 64 | components: clippy 65 | 66 | - name: Run cargo clippy 67 | run: cargo clippy -- -D warnings -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Redlib: Private front-end for Reddit"; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; 6 | 7 | crane.url = "github:ipetkov/crane"; 8 | 9 | flake-utils.url = "github:numtide/flake-utils"; 10 | 11 | rust-overlay = { 12 | url = "github:oxalica/rust-overlay"; 13 | inputs.nixpkgs.follows = "nixpkgs"; 14 | }; 15 | }; 16 | 17 | outputs = { nixpkgs, crane, flake-utils, rust-overlay, ... }: 18 | flake-utils.lib.eachSystem [ "x86_64-linux" ] (system: 19 | let 20 | pkgs = import nixpkgs { 21 | inherit system; 22 | overlays = [ (import rust-overlay) ]; 23 | }; 24 | 25 | inherit (pkgs) lib; 26 | 27 | rustToolchain = pkgs.rust-bin.stable.latest.default.override { 28 | targets = [ "x86_64-unknown-linux-musl" ]; 29 | }; 30 | 31 | craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; 32 | 33 | 34 | src = lib.cleanSourceWith { 35 | src = craneLib.path ./.; 36 | filter = path: type: 37 | (lib.hasInfix "/templates/" path) || 38 | (lib.hasInfix "/static/" path) || 39 | (craneLib.filterCargoSources path type); 40 | }; 41 | 42 | redlib = craneLib.buildPackage { 43 | inherit src; 44 | strictDeps = true; 45 | doCheck = false; 46 | 47 | CARGO_BUILD_TARGET = "x86_64-unknown-linux-musl"; 48 | CARGO_BUILD_RUSTFLAGS = "-C target-feature=+crt-static"; 49 | }; 50 | in 51 | { 52 | checks = { 53 | my-crate = redlib; 54 | }; 55 | 56 | packages.default = redlib; 57 | packages.docker = pkgs.dockerTools.buildImage { 58 | name = "quay.io/redlib/redlib"; 59 | tag = "latest"; 60 | created = "now"; 61 | copyToRoot = with pkgs.dockerTools; [ caCertificates fakeNss ]; 62 | config.Cmd = "${redlib}/bin/redlib"; 63 | }; 64 | }); 65 | } 66 | -------------------------------------------------------------------------------- /app.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Redlib", 3 | "description": "Private front-end for Reddit", 4 | "buildpacks": [ 5 | { 6 | "url": "https://github.com/emk/heroku-buildpack-rust" 7 | }, 8 | { 9 | "url": "emk/rust" 10 | } 11 | ], 12 | "stack": "container", 13 | "env": { 14 | "REDLIB_DEFAULT_THEME": { 15 | "required": false 16 | }, 17 | "REDLIB_DEFAULT_FRONT_PAGE": { 18 | "required": false 19 | }, 20 | "REDLIB_DEFAULT_LAYOUT": { 21 | "required": false 22 | }, 23 | "REDLIB_DEFAULT_WIDE": { 24 | "required": false 25 | }, 26 | "REDLIB_DEFAULT_COMMENT_SORT": { 27 | "required": false 28 | }, 29 | "REDLIB_DEFAULT_POST_SORT": { 30 | "required": false 31 | }, 32 | "REDLIB_DEFAULT_BLUR_SPOILER": { 33 | "required": false 34 | }, 35 | "REDLIB_DEFAULT_SHOW_NSFW": { 36 | "required": false 37 | }, 38 | "REDLIB_DEFAULT_BLUR_NSFW": { 39 | "required": false 40 | }, 41 | "REDLIB_USE_HLS": { 42 | "required": false 43 | }, 44 | "REDLIB_HIDE_HLS_NOTIFICATION": { 45 | "required": false 46 | }, 47 | "REDLIB_SFW_ONLY": { 48 | "required": false 49 | }, 50 | "REDLIB_DEFAULT_HIDE_AWARDS": { 51 | "required": false 52 | }, 53 | "REDLIB_DEFAULT_HIDE_SCORE": { 54 | "required": false 55 | }, 56 | "REDLIB_BANNER": { 57 | "required": false 58 | }, 59 | "REDLIB_ROBOTS_DISABLE_INDEXING": { 60 | "required": false 61 | }, 62 | "REDLIB_DEFAULT_SUBSCRIPTIONS": { 63 | "required": false 64 | }, 65 | "REDLIB_DEFAULT_FILTERS": { 66 | "required": false 67 | }, 68 | "REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION": { 69 | "required": false 70 | }, 71 | "REDLIB_PUSHSHIFT_FRONTEND": { 72 | "required": false 73 | }, 74 | "REDLIB_ENABLE_RSS": { 75 | "required": false 76 | }, 77 | "REDLIB_FULL_URL": { 78 | "required": false 79 | }, 80 | "REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS": { 81 | "required": false 82 | } 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /templates/comment.html: -------------------------------------------------------------------------------- 1 | {% import "utils.html" as utils %} 2 | 3 | {% if kind == "more" && parent_kind == "t1" %} 4 | → More replies ({{ more_count }}) 5 | {% else if kind == "t1" %} 6 |
7 |
8 |

9 | {% if prefs.hide_score != "on" %} 10 | {{ score.0 }} 11 | {% else %} 12 | • 13 | {% endif %} 14 |

15 |
16 |
17 |
18 | 19 | {% if author.name != "[deleted]" %} 20 | 21 | {% else %} 22 | u/[deleted] 23 | {% endif %} 24 | {% if author.flair.flair_parts.len() > 0 %} 25 | {% call utils::render_flair(author.flair.flair_parts) %} 26 | {% endif %} 27 | {{ rel_time }} 28 | {% if edited.0 != "".to_string() %}edited {{ edited.0 }}{% endif %} 29 | {% if !awards.is_empty() && prefs.hide_awards != "on" %} 30 | 31 | {% for award in awards.clone() %} 32 | 33 | {{ award.name }} 34 | 35 | {% endfor %} 36 | {% endif %} 37 | 38 | {% if is_filtered %} 39 |
(Filtered content)
40 | {% else %} 41 |
{{ body|safe }}
42 | {% endif %} 43 |
{% for c in replies -%}{{ c.render().unwrap()|safe }}{%- endfor %} 44 |
45 |
46 |
47 | {% endif %} 48 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # Redlib configuration 2 | # See the Configuration section of the README for a more detailed explanation of these settings. 3 | 4 | # Instance-specific settings 5 | # Enable SFW-only mode for the instance 6 | REDLIB_SFW_ONLY=off 7 | # Set a banner message for the instance 8 | REDLIB_BANNER= 9 | # Disable search engine indexing 10 | REDLIB_ROBOTS_DISABLE_INDEXING=off 11 | # Set the Pushshift frontend for "removed" links 12 | REDLIB_PUSHSHIFT_FRONTEND=undelete.pullpush.io 13 | 14 | # Default user settings 15 | # Set the default theme (options: system, light, dark, black, dracula, nord, laserwave, violet, gold, rosebox, gruvboxdark, gruvboxlight) 16 | REDLIB_DEFAULT_THEME=system 17 | # Set the default front page (options: default, popular, all) 18 | REDLIB_DEFAULT_FRONT_PAGE=default 19 | # Set the default layout (options: card, clean, compact) 20 | REDLIB_DEFAULT_LAYOUT=card 21 | # Enable wide mode by default 22 | REDLIB_DEFAULT_WIDE=off 23 | # Set the default post sort method (options: hot, new, top, rising, controversial) 24 | REDLIB_DEFAULT_POST_SORT=hot 25 | # Set the default comment sort method (options: confidence, top, new, controversial, old) 26 | REDLIB_DEFAULT_COMMENT_SORT=confidence 27 | # Enable blurring Spoiler content by default 28 | REDLIB_DEFAULT_BLUR_SPOILER=off 29 | # Enable showing NSFW content by default 30 | REDLIB_DEFAULT_SHOW_NSFW=off 31 | # Enable blurring NSFW content by default 32 | REDLIB_DEFAULT_BLUR_NSFW=off 33 | # Enable HLS video format by default 34 | REDLIB_DEFAULT_USE_HLS=off 35 | # Hide HLS notification by default 36 | REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION=off 37 | # Disable autoplay videos by default 38 | REDLIB_DEFAULT_AUTOPLAY_VIDEOS=off 39 | # Define a default list of subreddit subscriptions (format: sub1+sub2+sub3) 40 | REDLIB_DEFAULT_SUBSCRIPTIONS= 41 | # Define a default list of subreddit filters (format: sub1+sub2+sub3) 42 | REDLIB_DEFAULT_FILTERS= 43 | # Hide awards by default 44 | REDLIB_DEFAULT_HIDE_AWARDS=off 45 | # Hide sidebar and summary 46 | REDLIB_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY=off 47 | # Disable the confirmation before visiting Reddit 48 | REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION=off 49 | # Hide score by default 50 | REDLIB_DEFAULT_HIDE_SCORE=off 51 | # Enable fixed navbar by default 52 | REDLIB_DEFAULT_FIXED_NAVBAR=on 53 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "redlib" 3 | description = " Alternative private front-end to Reddit" 4 | license = "AGPL-3.0-only" 5 | repository = "https://github.com/redlib-org/redlib" 6 | version = "0.36.0" 7 | authors = [ 8 | "Matthew Esposito ", 9 | "spikecodes <19519553+spikecodes@users.noreply.github.com>", 10 | ] 11 | edition = "2021" 12 | rust-version = "1.81" 13 | default-run = "redlib" 14 | 15 | [dependencies] 16 | askama = { version = "0.14.0", default-features = false, features = [ 17 | "std", 18 | "derive", 19 | ] } 20 | cached = { version = "0.54.0", features = ["async"] } 21 | clap = { version = "4.4.11", default-features = false, features = [ 22 | "std", 23 | "env", 24 | "derive", 25 | ] } 26 | regex = "1.10.2" 27 | serde = { version = "1.0.193", features = ["derive"] } 28 | cookie = "0.18.0" 29 | futures-lite = "2.2.0" 30 | hyper = { version = "0.14.31", features = ["full"] } 31 | percent-encoding = "2.3.1" 32 | route-recognizer = "0.3.1" 33 | serde_json = "1.0.133" 34 | tokio = { version = "1.44.2", features = ["full"] } 35 | time = { version = "0.3.31", features = ["local-offset"] } 36 | url = "2.5.0" 37 | rust-embed = { version = "8.1.0", features = ["include-exclude"] } 38 | libflate = "2.0.0" 39 | brotli = { version = "7.0.0", features = ["std"] } 40 | toml = "0.8.8" 41 | serde_yaml = "0.9.29" 42 | build_html = "2.4.0" 43 | uuid = { version = "1.6.1", features = ["v4"] } 44 | base64 = "0.22.1" 45 | fastrand = "2.0.1" 46 | log = "0.4.20" 47 | pretty_env_logger = "0.5.0" 48 | dotenvy = "0.15.7" 49 | rss = "2.0.7" 50 | arc-swap = "1.7.1" 51 | serde_json_path = "0.7.1" 52 | async-recursion = "1.1.1" 53 | pulldown-cmark = { version = "0.12.0", features = ["simd", "html"], default-features = false } 54 | hyper-rustls = { version = "0.24.2", features = [ "http2" ] } 55 | tegen = "0.1.4" 56 | serde_urlencoded = "0.7.1" 57 | chrono = { version = "0.4.39", default-features = false, features = [ "std" ] } 58 | htmlescape = "0.3.1" 59 | bincode = "1.3.3" 60 | base2048 = "2.0.2" 61 | revision = "0.10.0" 62 | fake_user_agent = "0.2.2" 63 | rustls = "0.21.12" 64 | 65 | [dev-dependencies] 66 | lipsum = "0.9.0" 67 | sealed_test = "1.0.0" 68 | 69 | [profile.release] 70 | codegen-units = 1 71 | lto = true 72 | strip = "symbols" 73 | -------------------------------------------------------------------------------- /.github/workflows/build-artifacts.yaml: -------------------------------------------------------------------------------- 1 | name: Release Build 2 | 3 | on: 4 | push: 5 | paths-ignore: 6 | - "*.md" 7 | - "compose.*" 8 | branches: 9 | - "main" 10 | release: 11 | types: [published] 12 | 13 | env: 14 | CARGO_TERM_COLOR: always 15 | 16 | CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER: aarch64-linux-gnu-gcc 17 | CC_aarch64_unknown_linux_musl: aarch64-linux-gnu-gcc 18 | CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER: arm-linux-gnueabihf-gcc 19 | CC_armv7_unknown_linux_musleabihf: arm-linux-gnueabihf-gcc 20 | 21 | jobs: 22 | build: 23 | name: Rust project - latest 24 | runs-on: ubuntu-latest 25 | strategy: 26 | matrix: 27 | target: 28 | - x86_64-unknown-linux-musl 29 | - aarch64-unknown-linux-musl 30 | - armv7-unknown-linux-musleabihf 31 | steps: 32 | - uses: actions/checkout@v4 33 | 34 | - uses: actions-rust-lang/setup-rust-toolchain@v1 35 | with: 36 | target: ${{ matrix.target }} 37 | 38 | - if: matrix.target == 'x86_64-unknown-linux-musl' 39 | run: | 40 | sudo apt-get update 41 | sudo apt-get install -y --no-install-recommends musl-tools 42 | 43 | - if: matrix.target == 'armv7-unknown-linux-musleabihf' 44 | run: | 45 | sudo apt update 46 | sudo apt install -y gcc-arm-linux-gnueabihf musl-tools 47 | 48 | - if: matrix.target == 'aarch64-unknown-linux-musl' 49 | run: | 50 | sudo apt update 51 | sudo apt install -y gcc-aarch64-linux-gnu musl-tools 52 | 53 | - name: Versions 54 | id: version 55 | run: echo "VERSION=$(cargo metadata --format-version 1 --no-deps | jq .packages[0].version -r | sed 's/^/v/')" >> "$GITHUB_OUTPUT" 56 | 57 | - name: Build 58 | run: cargo build --release --target ${{ matrix.target }} 59 | 60 | - name: Package release 61 | run: tar czf redlib-${{ matrix.target }}.tar.gz -C target/${{ matrix.target }}/release/ redlib 62 | 63 | - name: Upload release 64 | uses: softprops/action-gh-release@v1 65 | with: 66 | tag_name: ${{ steps.version.outputs.VERSION }} 67 | name: ${{ steps.version.outputs.VERSION }} - ${{ github.event.head_commit.message }} 68 | draft: true 69 | files: | 70 | redlib-${{ matrix.target }}.tar.gz 71 | body: | 72 | - ${{ github.event.head_commit.message }} ${{ github.sha }} 73 | generate_release_notes: true 74 | 75 | 76 | 77 | -------------------------------------------------------------------------------- /.github/workflows/main-rust.yml: -------------------------------------------------------------------------------- 1 | name: Rust Build & Publish 2 | 3 | on: 4 | push: 5 | paths-ignore: 6 | - "**.md" 7 | 8 | branches: 9 | - 'main' 10 | 11 | release: 12 | types: [published] 13 | 14 | env: 15 | CARGO_TERM_COLOR: always 16 | 17 | jobs: 18 | build: 19 | runs-on: ubuntu-latest 20 | 21 | steps: 22 | - name: Checkout sources 23 | uses: actions/checkout@v3 24 | 25 | - name: Cache Packages 26 | uses: Swatinem/rust-cache@v2 27 | 28 | - name: Install stable toolchain 29 | uses: dtolnay/rust-toolchain@stable 30 | with: 31 | toolchain: stable 32 | 33 | - name: Install musl-gcc 34 | run: sudo apt-get install musl-tools 35 | 36 | - name: Install cargo musl target 37 | run: rustup target add x86_64-unknown-linux-musl 38 | 39 | # Building actions 40 | - name: Build 41 | run: RUSTFLAGS='-C target-feature=+crt-static' cargo build --release --target x86_64-unknown-linux-musl 42 | 43 | - name: Versions 44 | id: version 45 | run: echo "VERSION=$(cargo metadata --format-version 1 --no-deps | jq .packages[0].version -r | sed 's/^/v/')" >> "$GITHUB_OUTPUT" 46 | 47 | # Publishing actions 48 | 49 | - name: Publish to crates.io 50 | if: github.event_name == 'release' 51 | run: cargo publish --no-verify --token ${{ secrets.CARGO_REGISTRY_TOKEN }} 52 | 53 | - name: Calculate SHA512 checksum 54 | run: sha512sum target/x86_64-unknown-linux-musl/release/redlib > redlib.sha512 55 | 56 | - name: Calculate SHA256 checksum 57 | run: sha256sum target/x86_64-unknown-linux-musl/release/redlib > redlib.sha256 58 | 59 | - uses: actions/upload-artifact@v4 60 | name: Upload a Build Artifact 61 | with: 62 | name: redlib 63 | path: | 64 | target/x86_64-unknown-linux-musl/release/redlib 65 | redlib.sha512 66 | redlib.sha256 67 | 68 | 69 | - name: Release 70 | uses: softprops/action-gh-release@v1 71 | if: github.base_ref != 'main' && github.event_name == 'release' 72 | with: 73 | tag_name: ${{ steps.version.outputs.VERSION }} 74 | name: ${{ steps.version.outputs.VERSION }} - ${{ github.event.head_commit.message }} 75 | draft: true 76 | files: | 77 | target/x86_64-unknown-linux-musl/release/redlib 78 | redlib.sha512 79 | redlib.sha256 80 | body: | 81 | - ${{ github.event.head_commit.message }} ${{ github.sha }} 82 | generate_release_notes: true 83 | env: 84 | GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} 85 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "crane": { 4 | "locked": { 5 | "lastModified": 1731974733, 6 | "narHash": "sha256-enYSSZVVl15FI5p+0Y5/Ckf5DZAvXe6fBrHxyhA/njc=", 7 | "owner": "ipetkov", 8 | "repo": "crane", 9 | "rev": "3cb338ce81076ce5e461cf77f7824476addb0e1c", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "ipetkov", 14 | "repo": "crane", 15 | "type": "github" 16 | } 17 | }, 18 | "flake-utils": { 19 | "inputs": { 20 | "systems": "systems" 21 | }, 22 | "locked": { 23 | "lastModified": 1731533236, 24 | "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 25 | "owner": "numtide", 26 | "repo": "flake-utils", 27 | "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 28 | "type": "github" 29 | }, 30 | "original": { 31 | "owner": "numtide", 32 | "repo": "flake-utils", 33 | "type": "github" 34 | } 35 | }, 36 | "nixpkgs": { 37 | "locked": { 38 | "lastModified": 1731890469, 39 | "narHash": "sha256-D1FNZ70NmQEwNxpSSdTXCSklBH1z2isPR84J6DQrJGs=", 40 | "owner": "NixOS", 41 | "repo": "nixpkgs", 42 | "rev": "5083ec887760adfe12af64830a66807423a859a7", 43 | "type": "github" 44 | }, 45 | "original": { 46 | "owner": "NixOS", 47 | "ref": "nixpkgs-unstable", 48 | "repo": "nixpkgs", 49 | "type": "github" 50 | } 51 | }, 52 | "root": { 53 | "inputs": { 54 | "crane": "crane", 55 | "flake-utils": "flake-utils", 56 | "nixpkgs": "nixpkgs", 57 | "rust-overlay": "rust-overlay" 58 | } 59 | }, 60 | "rust-overlay": { 61 | "inputs": { 62 | "nixpkgs": [ 63 | "nixpkgs" 64 | ] 65 | }, 66 | "locked": { 67 | "lastModified": 1732069891, 68 | "narHash": "sha256-moKx8AVJrViCSdA0e0nSsG8b1dAsObI4sRAtbqbvBY8=", 69 | "owner": "oxalica", 70 | "repo": "rust-overlay", 71 | "rev": "8509a51241c407d583b1963d5079585a992506e8", 72 | "type": "github" 73 | }, 74 | "original": { 75 | "owner": "oxalica", 76 | "repo": "rust-overlay", 77 | "type": "github" 78 | } 79 | }, 80 | "systems": { 81 | "locked": { 82 | "lastModified": 1681028828, 83 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 84 | "owner": "nix-systems", 85 | "repo": "default", 86 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 87 | "type": "github" 88 | }, 89 | "original": { 90 | "owner": "nix-systems", 91 | "repo": "default", 92 | "type": "github" 93 | } 94 | } 95 | }, 96 | "root": "root", 97 | "version": 7 98 | } 99 | -------------------------------------------------------------------------------- /static/check_update.js: -------------------------------------------------------------------------------- 1 | async function checkInstanceUpdateStatus() { 2 | try { 3 | const response = await fetch('/commits.atom'); 4 | const text = await response.text(); 5 | const parser = new DOMParser(); 6 | const xmlDoc = parser.parseFromString(text, "application/xml"); 7 | const entries = xmlDoc.getElementsByTagName('entry'); 8 | const localCommit = document.getElementById('git_commit').dataset.value; 9 | 10 | let statusMessage = ''; 11 | 12 | if (entries.length > 0) { 13 | const commitHashes = Array.from(entries).map(entry => { 14 | const id = entry.getElementsByTagName('id')[0].textContent; 15 | return id.split('/').pop(); 16 | }); 17 | 18 | const commitIndex = commitHashes.indexOf(localCommit); 19 | 20 | if (commitIndex === 0) { 21 | statusMessage = '✅ Instance is up to date.'; 22 | } else if (commitIndex > 0) { 23 | statusMessage = `⚠️ This instance is not up to date and is ${commitIndex} commits old. Test and confirm on an up-to-date instance before reporting.`; 24 | document.getElementById('error-446').remove(); 25 | } else { 26 | statusMessage = `⚠️ This instance is not up to date and is at least ${commitHashes.length} commits old. Test and confirm on an up-to-date instance before reporting.`; 27 | document.getElementById('error-446').remove(); 28 | } 29 | } else { 30 | statusMessage = '⚠️ Unable to fetch commit information.'; 31 | } 32 | 33 | document.getElementById('update-status').innerText = statusMessage; 34 | } catch (error) { 35 | console.error('Error fetching commits:', error); 36 | document.getElementById('update-status').innerText = '⚠️ Error checking update status: ' + error; 37 | } 38 | } 39 | 40 | async function checkOtherInstances() { 41 | try { 42 | const response = await fetch('/instances.json'); 43 | const data = await response.json(); 44 | const instances = window.location.host.endsWith('.onion') ? data.instances.filter(i => i.onion) : data.instances.filter(i => i.url); 45 | if (instances.length == 0) return; 46 | const randomInstance = instances[Math.floor(Math.random() * instances.length)]; 47 | const instanceUrl = randomInstance.url ?? randomInstance.onion; 48 | // Set the href of the tag to the instance URL with path included 49 | document.getElementById('random-instance').href = instanceUrl + window.location.pathname; 50 | document.getElementById('random-instance').innerText = "Visit Random Instance"; 51 | } catch (error) { 52 | console.error('Error fetching instances:', error); 53 | document.getElementById('update-status').innerText = '⚠️ Error checking other instances: ' + error; 54 | } 55 | } 56 | 57 | // Set the target URL when the page loads 58 | window.addEventListener('load', checkOtherInstances); 59 | 60 | checkInstanceUpdateStatus(); 61 | -------------------------------------------------------------------------------- /seccomp-redlib.json: -------------------------------------------------------------------------------- 1 | { 2 | "defaultAction": "SCMP_ACT_ERRNO", 3 | "archMap": [ 4 | { 5 | "architecture": "SCMP_ARCH_X86_64", 6 | "subArchitectures": [ 7 | "SCMP_ARCH_X86", 8 | "SCMP_ARCH_X32" 9 | ] 10 | }, 11 | { 12 | "architecture": "SCMP_ARCH_AARCH64", 13 | "subArchitectures": [ 14 | "SCMP_ARCH_ARM" 15 | ] 16 | }, 17 | { 18 | "architecture": "SCMP_ARCH_MIPS64", 19 | "subArchitectures": [ 20 | "SCMP_ARCH_MIPS", 21 | "SCMP_ARCH_MIPS64N32" 22 | ] 23 | }, 24 | { 25 | "architecture": "SCMP_ARCH_MIPS64N32", 26 | "subArchitectures": [ 27 | "SCMP_ARCH_MIPS", 28 | "SCMP_ARCH_MIPS64" 29 | ] 30 | }, 31 | { 32 | "architecture": "SCMP_ARCH_MIPSEL64", 33 | "subArchitectures": [ 34 | "SCMP_ARCH_MIPSEL", 35 | "SCMP_ARCH_MIPSEL64N32" 36 | ] 37 | }, 38 | { 39 | "architecture": "SCMP_ARCH_MIPSEL64N32", 40 | "subArchitectures": [ 41 | "SCMP_ARCH_MIPSEL", 42 | "SCMP_ARCH_MIPSEL64" 43 | ] 44 | }, 45 | { 46 | "architecture": "SCMP_ARCH_S390X", 47 | "subArchitectures": [ 48 | "SCMP_ARCH_S390" 49 | ] 50 | } 51 | ], 52 | "syscalls": [ 53 | { 54 | "names": [ 55 | "accept4", 56 | "arch_prctl", 57 | "bind", 58 | "brk", 59 | "clock_gettime", 60 | "clone", 61 | "close", 62 | "connect", 63 | "epoll_create1", 64 | "epoll_ctl", 65 | "epoll_pwait", 66 | "eventfd2", 67 | "execve", 68 | "exit", 69 | "exit_group", 70 | "fcntl", 71 | "flock", 72 | "fork", 73 | "fstat", 74 | "futex", 75 | "getcwd", 76 | "getpeername", 77 | "getpid", 78 | "getrandom", 79 | "getsockname", 80 | "getsockopt", 81 | "getgid", 82 | "getppid", 83 | "gettid", 84 | "getuid", 85 | "ioctl", 86 | "listen", 87 | "lseek", 88 | "madvise", 89 | "mmap", 90 | "mprotect", 91 | "mremap", 92 | "munmap", 93 | "newfstatat", 94 | "open", 95 | "openat", 96 | "prctl", 97 | "poll", 98 | "read", 99 | "recvfrom", 100 | "rt_sigaction", 101 | "rt_sigprocmask", 102 | "rt_sigreturn", 103 | "sched_getaffinity", 104 | "sched_yield", 105 | "sendto", 106 | "setitimer", 107 | "setsockopt", 108 | "set_tid_address", 109 | "shutdown", 110 | "sigaltstack", 111 | "socket", 112 | "socketpair", 113 | "stat", 114 | "wait4", 115 | "write", 116 | "writev" 117 | ], 118 | "action": "SCMP_ACT_ALLOW", 119 | "args": [], 120 | "comment": "", 121 | "includes": {}, 122 | "excludes": {} 123 | } 124 | ] 125 | } 126 | -------------------------------------------------------------------------------- /templates/post.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% import "utils.html" as utils %} 3 | 4 | {% block title %} 5 | {% if single_thread %} 6 | {{ comments[0].author.name }} comments on {{ post.title }} - r/{{ post.community }} 7 | {% else %} 8 | {{ post.title }} - r/{{ post.community }} 9 | {% endif %} 10 | {% endblock %} 11 | 12 | {% block search %} 13 | {% call utils::search(["/r/", post.community.as_str()].concat(), "") %} 14 | {% endblock %} 15 | 16 | {% block root %}/r/{{ post.community }}{% endblock %}{% block location %}r/{{ post.community }}{% endblock %} 17 | {% block head %} 18 | {% call super() %} 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | {% if post.post_type == "image" %} 29 | 30 | 31 | 32 | 33 | {% else if post.post_type == "video" || post.post_type == "gif" %} 34 | 35 | 36 | 37 | 38 | {% else %} 39 | 40 | {% if single_thread %} 41 | 42 | {% endif %} 43 | {% endif %} 44 | {% endblock %} 45 | 46 | {% block subscriptions %} 47 | {% call utils::sub_list(post.community.as_str()) %} 48 | {% endblock %} 49 | 50 | {% block content %} 51 | 97 | {% endblock %} 98 | -------------------------------------------------------------------------------- /.github/workflows/main-docker.yml: -------------------------------------------------------------------------------- 1 | name: Container build 2 | 3 | on: 4 | workflow_run: 5 | workflows: ["Release Build"] 6 | types: 7 | - completed 8 | env: 9 | REGISTRY_IMAGE: quay.io/redlib/redlib 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | include: 18 | - { platform: linux/amd64, target: x86_64-unknown-linux-musl } 19 | - { platform: linux/arm64, target: aarch64-unknown-linux-musl } 20 | - { platform: linux/arm/v7, target: armv7-unknown-linux-musleabihf } 21 | steps: 22 | - name: Checkout 23 | uses: actions/checkout@v4 24 | - name: Docker meta 25 | id: meta 26 | uses: docker/metadata-action@v5 27 | with: 28 | images: ${{ env.REGISTRY_IMAGE }} 29 | tags: | 30 | type=sha 31 | type=raw,value=latest,enable={{is_default_branch}} 32 | - name: Set up QEMU 33 | uses: docker/setup-qemu-action@v3 34 | - name: Set up Docker Buildx 35 | uses: docker/setup-buildx-action@v3 36 | - name: Login to Quay.io Container Registry 37 | uses: docker/login-action@v3 38 | with: 39 | registry: quay.io 40 | username: ${{ secrets.QUAY_USERNAME }} 41 | password: ${{ secrets.QUAY_ROBOT_TOKEN }} 42 | - name: Build and push 43 | id: build 44 | uses: docker/build-push-action@v5 45 | with: 46 | context: . 47 | platforms: ${{ matrix.platform }} 48 | labels: ${{ steps.meta.outputs.labels }} 49 | outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true 50 | file: Dockerfile 51 | build-args: TARGET=${{ matrix.target }} 52 | - name: Export digest 53 | run: | 54 | mkdir -p /tmp/digests 55 | digest="${{ steps.build.outputs.digest }}" 56 | touch "/tmp/digests/${digest#sha256:}" 57 | - name: Upload digest 58 | uses: actions/upload-artifact@v4 59 | with: 60 | name: digests-${{ matrix.target }} 61 | path: /tmp/digests/* 62 | if-no-files-found: error 63 | retention-days: 1 64 | merge: 65 | runs-on: ubuntu-latest 66 | needs: 67 | - build 68 | steps: 69 | - name: Download digests 70 | uses: actions/download-artifact@v4.1.7 71 | with: 72 | path: /tmp/digests 73 | pattern: digests-* 74 | merge-multiple: true 75 | 76 | - name: Set up Docker Buildx 77 | uses: docker/setup-buildx-action@v3 78 | - name: Docker meta 79 | id: meta 80 | uses: docker/metadata-action@v5 81 | with: 82 | images: ${{ env.REGISTRY_IMAGE }} 83 | tags: | 84 | type=sha 85 | type=raw,value=latest,enable={{is_default_branch}} 86 | - name: Login to Quay.io Container Registry 87 | uses: docker/login-action@v3 88 | with: 89 | registry: quay.io 90 | username: ${{ secrets.QUAY_USERNAME }} 91 | password: ${{ secrets.QUAY_ROBOT_TOKEN }} 92 | - name: Create manifest list and push 93 | working-directory: /tmp/digests 94 | run: | 95 | docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \ 96 | $(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *) 97 | 98 | # - name: Push README to Quay.io 99 | # uses: christian-korneck/update-container-description-action@v1 100 | # env: 101 | # DOCKER_APIKEY: ${{ secrets.APIKEY__QUAY_IO }} 102 | # with: 103 | # destination_container_repo: quay.io/redlib/redlib 104 | # provider: quay 105 | # readme_file: 'README.md' 106 | 107 | - name: Inspect image 108 | run: | 109 | docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }} 110 | -------------------------------------------------------------------------------- /templates/base.html: -------------------------------------------------------------------------------- 1 | {% import "utils.html" as utils %} 2 | 3 | 4 | 5 | 6 | {% block head %} 7 | {% block title %}Redlib{% endblock %} 8 | 9 | 10 | 11 | {% if crate::utils::disable_indexing() %} 12 | 13 | {% endif %} 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 |
32 | {% endblock %} 33 | 34 | 39 | 40 | 67 | 68 | 69 | {% block body %} 70 |
71 | {% block content %} 72 | {% endblock %} 73 |
74 | {% endblock %} 75 | 76 | 77 | {% block footer %} 78 | 83 | {% endblock %} 84 | 85 | 86 | -------------------------------------------------------------------------------- /CREDITS: -------------------------------------------------------------------------------- 1 | 5trongthany <65565784+5trongthany@users.noreply.github.com> 2 | 674Y3r <87250374+674Y3r@users.noreply.github.com> 3 | accountForIssues <52367365+accountForIssues@users.noreply.github.com> 4 | Adrian Lebioda 5 | alefvanoon <53198048+alefvanoon@users.noreply.github.com> 6 | Alexandre Iooss 7 | alyaeanyx 8 | AndreVuillemot160 <84594011+AndreVuillemot160@users.noreply.github.com> 9 | Andrew Kaufman <57281817+andrew-kaufman@users.noreply.github.com> 10 | Artemis <51862164+artemislena@users.noreply.github.com> 11 | arthomnix <35371030+arthomnix@users.noreply.github.com> 12 | Arya K <73596856+gi-yt@users.noreply.github.com> 13 | Austin Huang 14 | Basti 15 | Ben Smith <37027883+smithbm2316@users.noreply.github.com> 16 | BobIsMyManager 17 | curlpipe <11898833+curlpipe@users.noreply.github.com> 18 | dacousb <53299044+dacousb@users.noreply.github.com> 19 | Daniel Valentine 20 | Daniel Valentine 21 | dbrennand <52419383+dbrennand@users.noreply.github.com> 22 | dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> 23 | Diego Magdaleno <38844659+DiegoMagdaleno@users.noreply.github.com> 24 | domve 25 | Dyras 26 | Edward <101938856+EdwardLangdon@users.noreply.github.com> 27 | elliot <75391956+ellieeet123@users.noreply.github.com> 28 | erdnaxe 29 | Esmail EL BoB 30 | FireMasterK <20838718+FireMasterK@users.noreply.github.com> 31 | George Roubos 32 | git-bruh 33 | gmnsii <95436780+gmnsii@users.noreply.github.com> 34 | guaddy <67671414+guaddy@users.noreply.github.com> 35 | Harsh Mishra 36 | igna 37 | imabritishcow 38 | Johannes Schleifenbaum 39 | Josiah <70736638+fres7h@users.noreply.github.com> 40 | JPyke3 41 | Kavin <20838718+FireMasterK@users.noreply.github.com> 42 | Kazi 43 | Kieran <42723993+EnderDev@users.noreply.github.com> 44 | Kieran 45 | Kyle Roth 46 | laazyCmd 47 | Laurențiu Nicola 48 | Lena <102762572+MarshDeer@users.noreply.github.com> 49 | Macic <46872282+Macic-Dev@users.noreply.github.com> 50 | Mario A <10923513+Midblyte@users.noreply.github.com> 51 | Matthew Crossman 52 | Matthew E 53 | Matthew Esposito 54 | Mennaruuk <52135169+Mennaruuk@users.noreply.github.com> 55 | mikupls <93015331+mikupls@users.noreply.github.com> 56 | Nainar 57 | Nathan Moos 58 | Nicholas Christopher 59 | Nick Lowery 60 | Nico 61 | NKIPSC <15067635+NKIPSC@users.noreply.github.com> 62 | o69mar <119129086+o69mar@users.noreply.github.com> 63 | obeho <71698631+obeho@users.noreply.github.com> 64 | obscurity 65 | Om G <34579088+OxyMagnesium@users.noreply.github.com> 66 | pin <90570748+0323pin@users.noreply.github.com> 67 | potatoesAreGod <118043038+potatoesAreGod@users.noreply.github.com> 68 | RiversideRocks <59586759+RiversideRocks@users.noreply.github.com> 69 | robin <8597693+robrobinbin@users.noreply.github.com> 70 | Robin <8597693+robrobinbin@users.noreply.github.com> 71 | robrobinbin <> 72 | robrobinbin <8597693+robrobinbin@users.noreply.github.com> 73 | robrobinbin 74 | Ruben Elshof <15641671+rubenelshof@users.noreply.github.com> 75 | Rupert Angermeier 76 | Scoder12 <34356756+Scoder12@users.noreply.github.com> 77 | Slayer <51095261+GhostSlayer@users.noreply.github.com> 78 | Soheb 79 | somini 80 | somoso 81 | Spenser Black 82 | Spike <19519553+spikecodes@users.noreply.github.com> 83 | spikecodes <19519553+spikecodes@users.noreply.github.com> 84 | sybenx 85 | TheCultLeader666 <65368815+TheCultLeader666@users.noreply.github.com> 86 | TheFrenchGhosty <47571719+TheFrenchGhosty@users.noreply.github.com> 87 | The TwilightBlood 88 | tirz <36501933+tirz@users.noreply.github.com> 89 | Tokarak <63452145+Tokarak@users.noreply.github.com> 90 | Tsvetomir Bonev 91 | Vladislav Nepogodin 92 | Walkx 93 | Wichai <1482605+Chengings@users.noreply.github.com> 94 | wsy2220 95 | xatier 96 | Zach <72994911+zachjmurphy@users.noreply.github.com> 97 | -------------------------------------------------------------------------------- /scripts/update_oauth_resources.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Requirements 4 | # - curl 5 | # - rg 6 | # - jq 7 | 8 | # Fetch iOS app versions 9 | ios_version_list=$(curl -s "https://ipaarchive.com/app/usa/1064216828" | rg "(20\d{2}\.\d+.\d+) / (\d+)" --only-matching -r "Version \$1/Build \$2" | sort | uniq) 10 | 11 | # Count the number of lines in the version list 12 | ios_app_count=$(echo "$ios_version_list" | wc -l) 13 | 14 | echo -e "Fetching \e[34m$ios_app_count iOS app versions...\e[0m" 15 | 16 | 17 | # Specify the filename as a variable 18 | filename="src/oauth_resources.rs" 19 | 20 | # Add comment that it is user generated 21 | echo "// This file was generated by scripts/update_oauth_resources.sh" > "$filename" 22 | echo "// Rerun scripts/update_oauth_resources.sh to update this file" >> "$filename" 23 | echo "// Please do not edit manually" >> "$filename" 24 | echo "// Filled in with real app versions" >> "$filename" 25 | 26 | # Open the array in the source file 27 | echo "pub const _IOS_APP_VERSION_LIST: &[&str; $ios_app_count] = &[" >> "$filename" 28 | 29 | num=0 30 | 31 | # Append the version list to the source file 32 | echo "$ios_version_list" | while IFS= read -r line; do 33 | num=$((num+1)) 34 | echo " \"$line\"," >> "$filename" 35 | echo -e "[$num/$ios_app_count] Fetched \e[34m$line\e[0m." 36 | done 37 | 38 | # Close the array in the source file 39 | echo "];" >> "$filename" 40 | 41 | # Fetch Android app versions 42 | page_1=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions/" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') 43 | # Append with pages 44 | page_2=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=2" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') 45 | page_3=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=3" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') 46 | page_4=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=4" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') 47 | page_5=$(curl -s "https://apkcombo.com/reddit/com.reddit.frontpage/old-versions?page=5" | rg "" -r "https://apkcombo.com\$1" | sort | uniq | sed 's/ //g') 48 | 49 | # Concatenate all pages 50 | versions="${page_1}" 51 | versions+=$'\n' 52 | versions+="${page_2}" 53 | versions+=$'\n' 54 | versions+="${page_3}" 55 | versions+=$'\n' 56 | versions+="${page_4}" 57 | versions+=$'\n' 58 | versions+="${page_5}" 59 | 60 | # Count the number of lines in the version list 61 | android_count=$(echo "$versions" | wc -l) 62 | 63 | echo -e "Fetching \e[32m$android_count Android app versions...\e[0m" 64 | 65 | # Append to the source file 66 | echo "pub const ANDROID_APP_VERSION_LIST: &[&str; $android_count] = &[" >> "$filename" 67 | 68 | num=0 69 | 70 | # For each in versions, curl the page and extract the build number 71 | echo "$versions" | while IFS= read -r line; do 72 | num=$((num+1)) 73 | fetch_page=$(curl -s "$line") 74 | build=$(echo "$fetch_page" | rg "\((\d+)\)" --only-matching -r "\$1" | head -n1) 75 | version=$(echo "$fetch_page" | rg "Reddit (20\d{2}\.\d+\.\d+)" --only-matching -r "\$1" | head -n1) 76 | echo " \"Version $version/Build $build\"," >> "$filename" 77 | echo -e "[$num/$android_count] Fetched \e[32mVersion $version/Build $build\e[0m." 78 | done 79 | 80 | # Close the array in the source file 81 | echo "];" >> "$filename" 82 | 83 | # Retrieve iOS versions 84 | table=$(curl -s "https://en.wikipedia.org/w/api.php?action=parse&page=IOS_17&prop=wikitext§ion=31&format=json" | jq ".parse.wikitext.\"*\"" | rg "(17\.[\d\.]*)\\\n\|(\w*)\\\n\|" --only-matching -r "Version \$1 (Build \$2)") 85 | 86 | # Count the number of lines in the version list 87 | ios_count=$(echo "$table" | wc -l) 88 | 89 | echo -e "Fetching \e[34m$ios_count iOS versions...\e[0m" 90 | 91 | # Append to the source file 92 | echo "pub const _IOS_OS_VERSION_LIST: &[&str; $ios_count] = &[" >> "$filename" 93 | 94 | num=0 95 | 96 | # For each in versions, curl the page and extract the build number 97 | echo "$table" | while IFS= read -r line; do 98 | num=$((num+1)) 99 | echo " \"$line\"," >> "$filename" 100 | echo -e "\e[34m[$num/$ios_count] Fetched $line\e[0m." 101 | done 102 | 103 | # Close the array in the source file 104 | echo "];" >> "$filename" 105 | 106 | echo -e "\e[34mRetrieved $ios_app_count iOS app versions.\e[0m" 107 | echo -e "\e[32mRetrieved $android_count Android app versions.\e[0m" 108 | echo -e "\e[34mRetrieved $ios_count iOS versions.\e[0m" 109 | 110 | echo -e "\e[34mTotal: $((ios_app_count + android_count + ios_count))\e[0m" 111 | 112 | echo -e "\e[32mSuccess!\e[0m" 113 | -------------------------------------------------------------------------------- /static/playHLSVideo.js: -------------------------------------------------------------------------------- 1 | // @license http://www.gnu.org/licenses/agpl-3.0.html AGPL-3.0 2 | (function () { 3 | const configElement = document.getElementById('video_quality'); 4 | const qualitySetting = configElement.getAttribute('data-value'); 5 | if (Hls.isSupported()) { 6 | var videoSources = document.querySelectorAll("video source[type='application/vnd.apple.mpegurl']"); 7 | videoSources.forEach(function (source) { 8 | var playlist = source.src; 9 | 10 | var oldVideo = source.parentNode; 11 | var autoplay = oldVideo.classList.contains("hls_autoplay"); 12 | 13 | // If HLS is supported natively then don't use hls.js 14 | if (oldVideo.canPlayType(source.type) === "probably") { 15 | if (autoplay) { 16 | oldVideo.play(); 17 | } 18 | return; 19 | } 20 | 21 | // Replace video with copy that will have all "source" elements removed 22 | var newVideo = oldVideo.cloneNode(true); 23 | var allSources = newVideo.querySelectorAll("source"); 24 | allSources.forEach(function (source) { 25 | source.remove(); 26 | }); 27 | 28 | // Empty source to enable play event 29 | newVideo.src = "about:blank"; 30 | 31 | oldVideo.parentNode.replaceChild(newVideo, oldVideo); 32 | 33 | function getIndexOfDefault(length) { 34 | switch (qualitySetting) { 35 | case 'best': 36 | return length - 1; 37 | case 'medium': 38 | return Math.floor(length / 2); 39 | case 'worst': 40 | return 0; 41 | default: 42 | return length - 1; 43 | } 44 | } 45 | 46 | function initializeHls() { 47 | newVideo.removeEventListener('play', initializeHls); 48 | var hls = new Hls({ autoStartLoad: false }); 49 | hls.loadSource(playlist); 50 | hls.attachMedia(newVideo); 51 | hls.on(Hls.Events.MANIFEST_PARSED, function () { 52 | hls.loadLevel = getIndexOfDefault(hls.levels.length); 53 | var availableLevels = hls.levels.map(function(level) { 54 | return { 55 | height: level.height, 56 | width: level.width, 57 | bitrate: level.bitrate, 58 | }; 59 | }); 60 | 61 | addQualitySelector(newVideo, hls, availableLevels); 62 | 63 | hls.startLoad(); 64 | newVideo.play(); 65 | }); 66 | 67 | hls.on(Hls.Events.ERROR, function (event, data) { 68 | var errorType = data.type; 69 | var errorFatal = data.fatal; 70 | if (errorFatal) { 71 | switch (errorType) { 72 | case Hls.ErrorType.NETWORK_ERROR: 73 | hls.startLoad(); 74 | break; 75 | case Hls.ErrorType.MEDIA_ERROR: 76 | hls.recoverMediaError(); 77 | break; 78 | default: 79 | hls.destroy(); 80 | break; 81 | } 82 | } 83 | 84 | console.error("HLS error", data); 85 | }); 86 | } 87 | 88 | function addQualitySelector(videoElement, hlsInstance, availableLevels) { 89 | var qualitySelector = document.createElement('select'); 90 | qualitySelector.classList.add('quality-selector'); 91 | var defaultIndex = getIndexOfDefault(availableLevels.length); 92 | availableLevels.forEach(function (level, index) { 93 | var option = document.createElement('option'); 94 | option.value = index.toString(); 95 | var bitrate = (level.bitrate / 1_000).toFixed(0); 96 | option.text = level.height + 'p (' + bitrate + ' kbps)'; 97 | if (index === defaultIndex) { 98 | option.selected = "selected"; 99 | } 100 | qualitySelector.appendChild(option); 101 | }); 102 | qualitySelector.selectedIndex = defaultIndex; 103 | qualitySelector.addEventListener('change', function () { 104 | var selectedIndex = qualitySelector.selectedIndex; 105 | hlsInstance.nextLevel = selectedIndex; 106 | hlsInstance.startLoad(); 107 | }); 108 | 109 | videoElement.parentNode.appendChild(qualitySelector); 110 | } 111 | 112 | newVideo.addEventListener('play', initializeHls); 113 | 114 | if (autoplay) { 115 | newVideo.play(); 116 | } 117 | }); 118 | } else { 119 | var videos = document.querySelectorAll("video.hls_autoplay"); 120 | videos.forEach(function (video) { 121 | video.setAttribute("autoplay", ""); 122 | }); 123 | } 124 | })(); 125 | // @license-end 126 | -------------------------------------------------------------------------------- /templates/search.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% import "utils.html" as utils %} 3 | 4 | {% block title %}Redlib: search results - {{ params.q }}{% endblock %} 5 | 6 | {% block subscriptions %} 7 | {% call utils::sub_list("") %} 8 | {% endblock %} 9 | 10 | {% block content %} 11 |
12 |
13 |
14 | 15 |
16 | {% if sub != "" %} 17 |
18 | 19 | 20 |
21 | {% endif %} 22 | {% if params.typed == "sr_user" %}{% endif %} 23 | 26 | {% if params.sort != "new" %} 27 | 30 | {% endif %} 31 |
32 |
33 | 34 | 41 |
42 | 43 | {% if !is_filtered %} 44 | {% if subreddits.len() > 0 || params.typed == "sr_user" %} 45 |
66 | {% endif %} 67 | {% endif %} 68 | 69 | {% if all_posts_hidden_nsfw %} 70 | All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view. 71 | {% endif %} 72 | 73 | {% if no_posts %} 74 |
No posts were found.
75 | {% endif %} 76 | 77 | {% if all_posts_filtered %} 78 | (All content on this page has been filtered) 79 | {% else if is_filtered %} 80 | (Content from r/{{ sub }} has been filtered) 81 | {% else if params.typed != "sr_user" %} 82 | {% for post in posts %} 83 | {% if post.flags.nsfw && prefs.show_nsfw != "on" %} 84 | {% else if !post.title.is_empty() %} 85 | {% call utils::post_in_list(post) %} 86 | {% else %} 87 |
88 |
89 |

90 | {% if prefs.hide_score != "on" %} 91 | {{ post.score.0 }} 92 | {% else %} 93 | • 94 | {% endif %} 95 |

96 |
97 |
98 |
99 | 100 | COMMENT 101 | {{ post.rel_time }} 102 | 103 |

{{ post.body }}

104 |
105 |
106 | {% endif %} 107 | {% endfor %} 108 | {% endif %} 109 | {% if prefs.use_hls == "on" %} 110 | 111 | 112 | {% endif %} 113 | 114 | {% if params.typed != "sr_user" %} 115 |
116 | {% if params.before != "" %} 117 | PREV 120 | {% endif %} 121 | 122 | {% if params.after != "" %} 123 | NEXT 126 | {% endif %} 127 |
128 | {% endif %} 129 |
130 | {% endblock %} 131 | -------------------------------------------------------------------------------- /templates/duplicates.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% import "utils.html" as utils %} 3 | 4 | {% block title %}{{ post.title }} - r/{{ post.community }}{% endblock %} 5 | 6 | {% block search %} 7 | {% call utils::search(["/r/", post.community.as_str()].concat(), "") %} 8 | {% endblock %} 9 | 10 | {% block root %}/r/{{ post.community }}{% endblock %}{% block location %}r/{{ post.community }}{% endblock %} 11 | {% block head %} 12 | {% call super() %} 13 | {% endblock %} 14 | 15 | {% block subscriptions %} 16 | {% call utils::sub_list(post.community.as_str()) %} 17 | {% endblock %} 18 | 19 | {% block content %} 20 |
21 | {% call utils::post(post) %} 22 | 23 | 24 | {% if post.num_duplicates == 0 %} 25 | (No duplicates found) 26 | {% else if post.flags.nsfw && prefs.show_nsfw != "on" %} 27 | (Enable "Show NSFW posts" in settings to show duplicates) 28 | {% else %} 29 |

Duplicates

30 | {% if num_posts_filtered > 0 %} 31 | 32 | {% if all_posts_filtered %} 33 | (All posts have been filtered) 34 | {% else %} 35 | (Some posts have been filtered) 36 | {% endif %} 37 | 38 | {% endif %} 39 | 40 | 50 | 51 |
52 | {% for post in duplicates -%} 53 | {# TODO: utils::post should be reworked to permit a truncated display of a post as below #} 54 | {% if !(post.flags.nsfw) || prefs.show_nsfw == "on" %} 55 |
56 |

57 | {% let community -%} 58 | {% if post.community.starts_with("u_") -%} 59 | {% let community = format!("u/{}", &post.community[2..]) -%} 60 | {% else -%} 61 | {% let community = format!("r/{}", post.community) -%} 62 | {% endif -%} 63 | {{ post.community }} 64 | 65 | 66 | 67 | {{ post.rel_time }} 68 | {% if !post.awards.is_empty() && prefs.hide_awards != "on" %} 69 | {% for award in post.awards.clone() %} 70 | 71 | {{ award.name }} 72 | 73 | {% endfor %} 74 | {% endif %} 75 |

76 |

77 | {% if post.flair.flair_parts.len() > 0 %} 78 | {% call utils::render_flair(post.flair.flair_parts) %} 82 | {% endif %} 83 | {{ post.title }}{% if post.flags.nsfw %} NSFW{% endif %} 84 |

85 | 86 |
87 | {% if prefs.hide_score != "on" %} 88 | {{ post.score.0 }} 89 | {% else %} 90 | • 91 | {% endif %} 92 | Upvotes
93 | 96 | 97 |
98 | {% endif %} 99 | {%- endfor %} 100 |
101 | 102 |
103 | {% if params.before != "" %} 104 | PREV 105 | {% endif %} 106 | 107 | {% if params.after != "" %} 108 | NEXT 109 | {% endif %} 110 |
111 | {% endif %} 112 |
113 | {% endblock %} 114 | -------------------------------------------------------------------------------- /templates/subreddit.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% import "utils.html" as utils %} 3 | 4 | {% block title %} 5 | {% if sub.title != "" %}{{ sub.title }} 6 | {% else if sub.name != "" %}{{ sub.name }} 7 | {% else %}Redlib{% endif %} 8 | {% endblock %} 9 | 10 | {% block search %} 11 | {% call utils::search(["/r/", sub.name.as_str()].concat(), "") %} 12 | {% endblock %} 13 | 14 | {% block subscriptions %} 15 | {% call utils::sub_list(sub.name.as_str()) %} 16 | {% endblock %} 17 | 18 | {% block body %} 19 |
20 | {% if !is_filtered %} 21 |
22 |
23 |
24 | {% if sub.name.is_empty() %} 25 | {% call utils::sort("", ["hot", "new", "top", "rising", "controversial"], sort.0) %} 26 | {% else %} 27 | {% call utils::sort(["/r/", sub.name.as_str()].concat(), ["hot", "new", "top", "rising", "controversial"], sort.0) %} 28 | {% endif %} 29 |
30 | {% if sort.0 == "top" || sort.0 == "controversial" %} 33 | 40 | {% endif %} 41 |
42 | 43 | {% if sub.name.contains("+") %} 44 |
45 | 46 |
47 | {% endif %} 48 | 49 | {% if all_posts_hidden_nsfw %} 50 |
All posts are hidden because they are NSFW. Enable "Show NSFW posts" in settings to view.
51 | {% endif %} 52 | 53 | {% if no_posts %} 54 |
No posts were found.
55 | {% endif %} 56 | 57 | {% if all_posts_filtered %} 58 |
(All content on this page has been filtered)
59 | {% else %} 60 |
61 | {% for post in posts %} 62 | {% if !(post.flags.nsfw && prefs.show_nsfw != "on") %} 63 |
64 | {% call utils::post_in_list(post) %} 65 | {% endif %} 66 | {% endfor %} 67 | {% if prefs.use_hls == "on" %} 68 | 69 | 70 | {% endif %} 71 |
72 | {% endif %} 73 | 74 |
75 | {% if !ends.0.is_empty() %} 76 | PREV 77 | {% endif %} 78 | 79 | {% if !ends.1.is_empty() %} 80 | NEXT 81 | {% endif %} 82 |
83 |
84 | {% endif %} 85 | {% if is_filtered || (!sub.name.is_empty() && sub.name != "all" && sub.name != "popular" && !sub.name.contains("+")) && prefs.hide_sidebar_and_summary != "on" %} 86 | 158 | {% endif %} 159 |
160 | {% endblock %} 161 | -------------------------------------------------------------------------------- /src/oauth_resources.rs: -------------------------------------------------------------------------------- 1 | // This file was generated by scripts/update_oauth_resources.sh 2 | // Rerun scripts/update_oauth_resources.sh to update this file 3 | // Please do not edit manually 4 | // Filled in with real app versions 5 | pub const _IOS_APP_VERSION_LIST: &[&str; 1] = &[""]; 6 | pub const ANDROID_APP_VERSION_LIST: &[&str; 150] = &[ 7 | "Version 2024.22.1/Build 1652272", 8 | "Version 2024.23.1/Build 1665606", 9 | "Version 2024.24.1/Build 1682520", 10 | "Version 2024.25.0/Build 1693595", 11 | "Version 2024.25.2/Build 1700401", 12 | "Version 2024.25.3/Build 1703490", 13 | "Version 2024.26.0/Build 1710470", 14 | "Version 2024.26.1/Build 1717435", 15 | "Version 2024.28.0/Build 1737665", 16 | "Version 2024.28.1/Build 1741165", 17 | "Version 2024.30.0/Build 1770787", 18 | "Version 2024.31.0/Build 1786202", 19 | "Version 2024.32.0/Build 1809095", 20 | "Version 2024.32.1/Build 1813258", 21 | "Version 2024.33.0/Build 1819908", 22 | "Version 2024.34.0/Build 1837909", 23 | "Version 2024.35.0/Build 1861437", 24 | "Version 2024.36.0/Build 1875012", 25 | "Version 2024.37.0/Build 1888053", 26 | "Version 2024.38.0/Build 1902791", 27 | "Version 2024.39.0/Build 1916713", 28 | "Version 2024.40.0/Build 1928580", 29 | "Version 2024.41.0/Build 1941199", 30 | "Version 2024.41.1/Build 1947805", 31 | "Version 2024.42.0/Build 1952440", 32 | "Version 2024.43.0/Build 1972250", 33 | "Version 2024.44.0/Build 1988458", 34 | "Version 2024.45.0/Build 2001943", 35 | "Version 2024.46.0/Build 2012731", 36 | "Version 2024.47.0/Build 2029755", 37 | "Version 2023.48.0/Build 1319123", 38 | "Version 2023.49.0/Build 1321715", 39 | "Version 2023.49.1/Build 1322281", 40 | "Version 2023.50.0/Build 1332338", 41 | "Version 2023.50.1/Build 1345844", 42 | "Version 2024.02.0/Build 1368985", 43 | "Version 2024.03.0/Build 1379408", 44 | "Version 2024.04.0/Build 1391236", 45 | "Version 2024.05.0/Build 1403584", 46 | "Version 2024.06.0/Build 1418489", 47 | "Version 2024.07.0/Build 1429651", 48 | "Version 2024.08.0/Build 1439531", 49 | "Version 2024.10.0/Build 1470045", 50 | "Version 2024.10.1/Build 1478645", 51 | "Version 2024.11.0/Build 1480707", 52 | "Version 2024.12.0/Build 1494694", 53 | "Version 2024.13.0/Build 1505187", 54 | "Version 2024.14.0/Build 1520556", 55 | "Version 2024.15.0/Build 1536823", 56 | "Version 2024.16.0/Build 1551366", 57 | "Version 2024.17.0/Build 1568106", 58 | "Version 2024.18.0/Build 1577901", 59 | "Version 2024.18.1/Build 1585304", 60 | "Version 2024.19.0/Build 1593346", 61 | "Version 2024.20.0/Build 1612800", 62 | "Version 2024.20.1/Build 1615586", 63 | "Version 2024.20.2/Build 1624969", 64 | "Version 2024.20.3/Build 1624970", 65 | "Version 2024.21.0/Build 1631686", 66 | "Version 2024.22.0/Build 1645257", 67 | "Version 2023.21.0/Build 956283", 68 | "Version 2023.22.0/Build 968223", 69 | "Version 2023.23.0/Build 983896", 70 | "Version 2023.24.0/Build 998541", 71 | "Version 2023.25.0/Build 1014750", 72 | "Version 2023.25.1/Build 1018737", 73 | "Version 2023.26.0/Build 1019073", 74 | "Version 2023.27.0/Build 1031923", 75 | "Version 2023.28.0/Build 1046887", 76 | "Version 2023.29.0/Build 1059855", 77 | "Version 2023.30.0/Build 1078734", 78 | "Version 2023.31.0/Build 1091027", 79 | "Version 2023.32.0/Build 1109919", 80 | "Version 2023.32.1/Build 1114141", 81 | "Version 2023.33.1/Build 1129741", 82 | "Version 2023.34.0/Build 1144243", 83 | "Version 2023.35.0/Build 1157967", 84 | "Version 2023.36.0/Build 1168982", 85 | "Version 2023.37.0/Build 1182743", 86 | "Version 2023.38.0/Build 1198522", 87 | "Version 2023.39.0/Build 1211607", 88 | "Version 2023.39.1/Build 1221505", 89 | "Version 2023.40.0/Build 1221521", 90 | "Version 2023.41.0/Build 1233125", 91 | "Version 2023.41.1/Build 1239615", 92 | "Version 2023.42.0/Build 1245088", 93 | "Version 2023.43.0/Build 1257426", 94 | "Version 2023.44.0/Build 1268622", 95 | "Version 2023.45.0/Build 1281371", 96 | "Version 2023.47.0/Build 1303604", 97 | "Version 2022.42.0/Build 638508", 98 | "Version 2022.43.0/Build 648277", 99 | "Version 2022.44.0/Build 664348", 100 | "Version 2022.45.0/Build 677985", 101 | "Version 2023.01.0/Build 709875", 102 | "Version 2023.02.0/Build 717912", 103 | "Version 2023.03.0/Build 729220", 104 | "Version 2023.04.0/Build 744681", 105 | "Version 2023.05.0/Build 755453", 106 | "Version 2023.06.0/Build 775017", 107 | "Version 2023.07.0/Build 788827", 108 | "Version 2023.07.1/Build 790267", 109 | "Version 2023.08.0/Build 798718", 110 | "Version 2023.09.0/Build 812015", 111 | "Version 2023.09.1/Build 816833", 112 | "Version 2023.10.0/Build 821148", 113 | "Version 2023.11.0/Build 830610", 114 | "Version 2023.12.0/Build 841150", 115 | "Version 2023.13.0/Build 852246", 116 | "Version 2023.14.0/Build 861593", 117 | "Version 2023.14.1/Build 864826", 118 | "Version 2023.15.0/Build 870628", 119 | "Version 2023.16.0/Build 883294", 120 | "Version 2023.16.1/Build 886269", 121 | "Version 2023.17.0/Build 896030", 122 | "Version 2023.17.1/Build 900542", 123 | "Version 2023.18.0/Build 911877", 124 | "Version 2023.19.0/Build 927681", 125 | "Version 2023.20.0/Build 943980", 126 | "Version 2023.20.1/Build 946732", 127 | "Version 2022.20.0/Build 487703", 128 | "Version 2022.21.0/Build 492436", 129 | "Version 2022.22.0/Build 498700", 130 | "Version 2022.23.0/Build 502374", 131 | "Version 2022.23.1/Build 506606", 132 | "Version 2022.24.0/Build 510950", 133 | "Version 2022.24.1/Build 513462", 134 | "Version 2022.25.0/Build 515072", 135 | "Version 2022.25.1/Build 516394", 136 | "Version 2022.25.2/Build 519915", 137 | "Version 2022.26.0/Build 521193", 138 | "Version 2022.27.0/Build 527406", 139 | "Version 2022.27.1/Build 529687", 140 | "Version 2022.28.0/Build 533235", 141 | "Version 2022.30.0/Build 548620", 142 | "Version 2022.31.0/Build 556666", 143 | "Version 2022.31.1/Build 562612", 144 | "Version 2022.32.0/Build 567875", 145 | "Version 2022.33.0/Build 572600", 146 | "Version 2022.34.0/Build 579352", 147 | "Version 2022.35.0/Build 588016", 148 | "Version 2022.35.1/Build 589034", 149 | "Version 2022.36.0/Build 593102", 150 | "Version 2022.37.0/Build 601691", 151 | "Version 2022.38.0/Build 607460", 152 | "Version 2022.39.0/Build 615385", 153 | "Version 2022.39.1/Build 619019", 154 | "Version 2022.40.0/Build 624782", 155 | "Version 2022.41.0/Build 630468", 156 | "Version 2022.41.1/Build 634168", 157 | ]; 158 | pub const _IOS_OS_VERSION_LIST: &[&str; 1] = &[""]; 159 | -------------------------------------------------------------------------------- /src/search.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::cmp_owned)] 2 | 3 | // CRATES 4 | use crate::utils::{self, catch_random, error, filter_posts, format_num, format_url, get_filters, param, redirect, setting, template, val, Post, Preferences}; 5 | use crate::{ 6 | client::json, 7 | server::RequestExt, 8 | subreddit::{can_access_quarantine, quarantine}, 9 | }; 10 | use askama::Template; 11 | use hyper::{Body, Request, Response}; 12 | use regex::Regex; 13 | use std::sync::LazyLock; 14 | 15 | // STRUCTS 16 | struct SearchParams { 17 | q: String, 18 | sort: String, 19 | t: String, 20 | before: String, 21 | after: String, 22 | restrict_sr: String, 23 | typed: String, 24 | } 25 | 26 | // STRUCTS 27 | struct Subreddit { 28 | name: String, 29 | url: String, 30 | icon: String, 31 | description: String, 32 | subscribers: (String, String), 33 | } 34 | 35 | #[derive(Template)] 36 | #[template(path = "search.html")] 37 | struct SearchTemplate { 38 | posts: Vec, 39 | subreddits: Vec, 40 | sub: String, 41 | params: SearchParams, 42 | prefs: Preferences, 43 | url: String, 44 | /// Whether the subreddit itself is filtered. 45 | is_filtered: bool, 46 | /// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place, 47 | /// and all fetched posts being filtered). 48 | all_posts_filtered: bool, 49 | /// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW) 50 | all_posts_hidden_nsfw: bool, 51 | no_posts: bool, 52 | } 53 | 54 | /// Regex matched against search queries to determine if they are reddit urls. 55 | static REDDIT_URL_MATCH: LazyLock = LazyLock::new(|| Regex::new(r"^https?://([^\./]+\.)*reddit.com/").unwrap()); 56 | 57 | // SERVICES 58 | pub async fn find(req: Request) -> Result, String> { 59 | // This ensures that during a search, no NSFW posts are fetched at all 60 | let nsfw_results = if setting(&req, "show_nsfw") == "on" && !utils::sfw_only() { 61 | "&include_over_18=on" 62 | } else { 63 | "" 64 | }; 65 | let uri_path = req.uri().path().replace("+", "%2B"); 66 | let path = format!("{}.json?{}{}&raw_json=1", uri_path, req.uri().query().unwrap_or_default(), nsfw_results); 67 | let mut query = param(&path, "q").unwrap_or_default(); 68 | query = REDDIT_URL_MATCH.replace(&query, "").to_string(); 69 | 70 | if query.is_empty() { 71 | return Ok(redirect("/")); 72 | } 73 | 74 | if query.starts_with("r/") || query.starts_with("user/") { 75 | return Ok(redirect(&format!("/{query}"))); 76 | } 77 | 78 | if query.starts_with("R/") { 79 | return Ok(redirect(&format!("/r{}", &query[1..]))); 80 | } 81 | 82 | if query.starts_with("u/") || query.starts_with("U/") { 83 | return Ok(redirect(&format!("/user{}", &query[1..]))); 84 | } 85 | 86 | let sub = req.param("sub").unwrap_or_default(); 87 | let quarantined = can_access_quarantine(&req, &sub); 88 | // Handle random subreddits 89 | if let Ok(random) = catch_random(&sub, "/find").await { 90 | return Ok(random); 91 | } 92 | 93 | let typed = param(&path, "type").unwrap_or_default(); 94 | 95 | let sort = param(&path, "sort").unwrap_or_else(|| "relevance".to_string()); 96 | let filters = get_filters(&req); 97 | 98 | // If search is not restricted to this subreddit, show other subreddits in search results 99 | let subreddits = if param(&path, "restrict_sr").is_none() { 100 | let mut subreddits = search_subreddits(&query, &typed).await; 101 | subreddits.retain(|s| !filters.contains(s.name.as_str())); 102 | subreddits 103 | } else { 104 | Vec::new() 105 | }; 106 | 107 | let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str())); 108 | 109 | // If all requested subs are filtered, we don't need to fetch posts. 110 | if sub.split('+').all(|s| filters.contains(s)) { 111 | Ok(template(&SearchTemplate { 112 | posts: Vec::new(), 113 | subreddits, 114 | sub, 115 | params: SearchParams { 116 | q: query.replace('"', """), 117 | sort, 118 | t: param(&path, "t").unwrap_or_default(), 119 | before: param(&path, "after").unwrap_or_default(), 120 | after: String::new(), 121 | restrict_sr: param(&path, "restrict_sr").unwrap_or_default(), 122 | typed, 123 | }, 124 | prefs: Preferences::new(&req), 125 | url, 126 | is_filtered: true, 127 | all_posts_filtered: false, 128 | all_posts_hidden_nsfw: false, 129 | no_posts: false, 130 | })) 131 | } else { 132 | match Post::fetch(&path, quarantined).await { 133 | Ok((mut posts, after)) => { 134 | let (_, all_posts_filtered) = filter_posts(&mut posts, &filters); 135 | let no_posts = posts.is_empty(); 136 | let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"); 137 | Ok(template(&SearchTemplate { 138 | posts, 139 | subreddits, 140 | sub, 141 | params: SearchParams { 142 | q: query.replace('"', """), 143 | sort, 144 | t: param(&path, "t").unwrap_or_default(), 145 | before: param(&path, "after").unwrap_or_default(), 146 | after, 147 | restrict_sr: param(&path, "restrict_sr").unwrap_or_default(), 148 | typed, 149 | }, 150 | prefs: Preferences::new(&req), 151 | url, 152 | is_filtered: false, 153 | all_posts_filtered, 154 | all_posts_hidden_nsfw, 155 | no_posts, 156 | })) 157 | } 158 | Err(msg) => { 159 | if msg == "quarantined" || msg == "gated" { 160 | let sub = req.param("sub").unwrap_or_default(); 161 | Ok(quarantine(&req, sub, &msg)) 162 | } else { 163 | error(req, &msg).await 164 | } 165 | } 166 | } 167 | } 168 | } 169 | 170 | async fn search_subreddits(q: &str, typed: &str) -> Vec { 171 | let limit = if typed == "sr_user" { "50" } else { "3" }; 172 | let subreddit_search_path = format!("/subreddits/search.json?q={}&limit={limit}", q.replace(' ', "+")); 173 | 174 | // Send a request to the url 175 | json(subreddit_search_path, false).await.unwrap_or_default()["data"]["children"] 176 | .as_array() 177 | .map(ToOwned::to_owned) 178 | .unwrap_or_default() 179 | .iter() 180 | .map(|subreddit| { 181 | // For each subreddit from subreddit list 182 | // Fetch subreddit icon either from the community_icon or icon_img value 183 | let icon = subreddit["data"]["community_icon"].as_str().map_or_else(|| val(subreddit, "icon_img"), ToString::to_string); 184 | 185 | Subreddit { 186 | name: val(subreddit, "display_name"), 187 | url: val(subreddit, "url"), 188 | icon: format_url(&icon), 189 | description: val(subreddit, "public_description"), 190 | subscribers: format_num(subreddit["data"]["subscribers"].as_f64().unwrap_or_default() as i64), 191 | } 192 | }) 193 | .collect::>() 194 | } 195 | -------------------------------------------------------------------------------- /src/user.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::cmp_owned)] 2 | 3 | // CRATES 4 | use crate::client::json; 5 | use crate::server::RequestExt; 6 | use crate::utils::{error, filter_posts, format_url, get_filters, nsfw_landing, param, setting, template, Post, Preferences, User}; 7 | use crate::{config, utils}; 8 | use askama::Template; 9 | use chrono::DateTime; 10 | use htmlescape::decode_html; 11 | use hyper::{Body, Request, Response}; 12 | use time::{macros::format_description, OffsetDateTime}; 13 | 14 | // STRUCTS 15 | #[derive(Template)] 16 | #[template(path = "user.html")] 17 | struct UserTemplate { 18 | user: User, 19 | posts: Vec, 20 | sort: (String, String), 21 | ends: (String, String), 22 | /// "overview", "comments", or "submitted" 23 | listing: String, 24 | prefs: Preferences, 25 | url: String, 26 | redirect_url: String, 27 | /// Whether the user themself is filtered. 28 | is_filtered: bool, 29 | /// Whether all fetched posts are filtered (to differentiate between no posts fetched in the first place, 30 | /// and all fetched posts being filtered). 31 | all_posts_filtered: bool, 32 | /// Whether all posts were hidden because they are NSFW (and user has disabled show NSFW) 33 | all_posts_hidden_nsfw: bool, 34 | no_posts: bool, 35 | } 36 | 37 | // FUNCTIONS 38 | pub async fn profile(req: Request) -> Result, String> { 39 | let listing = req.param("listing").unwrap_or_else(|| "overview".to_string()); 40 | 41 | // Build the Reddit JSON API path 42 | let path = format!( 43 | "/user/{}/{listing}.json?{}&raw_json=1", 44 | req.param("name").unwrap_or_else(|| "reddit".to_string()), 45 | req.uri().query().unwrap_or_default(), 46 | ); 47 | let url = String::from(req.uri().path_and_query().map_or("", |val| val.as_str())); 48 | let redirect_url = url[1..].replace('?', "%3F").replace('&', "%26"); 49 | 50 | // Retrieve other variables from Redlib request 51 | let sort = param(&path, "sort").unwrap_or_default(); 52 | let username = req.param("name").unwrap_or_default(); 53 | 54 | // Retrieve info from user about page. 55 | let user = user(&username).await.unwrap_or_default(); 56 | 57 | let req_url = req.uri().to_string(); 58 | // Return landing page if this post if this Reddit deems this user NSFW, 59 | // but we have also disabled the display of NSFW content or if the instance 60 | // is SFW-only. 61 | if user.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) { 62 | return Ok(nsfw_landing(req, req_url).await.unwrap_or_default()); 63 | } 64 | 65 | let filters = get_filters(&req); 66 | if filters.contains(&["u_", &username].concat()) { 67 | Ok(template(&UserTemplate { 68 | user, 69 | posts: Vec::new(), 70 | sort: (sort, param(&path, "t").unwrap_or_default()), 71 | ends: (param(&path, "after").unwrap_or_default(), String::new()), 72 | listing, 73 | prefs: Preferences::new(&req), 74 | url, 75 | redirect_url, 76 | is_filtered: true, 77 | all_posts_filtered: false, 78 | all_posts_hidden_nsfw: false, 79 | no_posts: false, 80 | })) 81 | } else { 82 | // Request user posts/comments from Reddit 83 | match Post::fetch(&path, false).await { 84 | Ok((mut posts, after)) => { 85 | let (_, all_posts_filtered) = filter_posts(&mut posts, &filters); 86 | let no_posts = posts.is_empty(); 87 | let all_posts_hidden_nsfw = !no_posts && (posts.iter().all(|p| p.flags.nsfw) && setting(&req, "show_nsfw") != "on"); 88 | Ok(template(&UserTemplate { 89 | user, 90 | posts, 91 | sort: (sort, param(&path, "t").unwrap_or_default()), 92 | ends: (param(&path, "after").unwrap_or_default(), after), 93 | listing, 94 | prefs: Preferences::new(&req), 95 | url, 96 | redirect_url, 97 | is_filtered: false, 98 | all_posts_filtered, 99 | all_posts_hidden_nsfw, 100 | no_posts, 101 | })) 102 | } 103 | // If there is an error show error page 104 | Err(msg) => error(req, &msg).await, 105 | } 106 | } 107 | } 108 | 109 | // USER 110 | async fn user(name: &str) -> Result { 111 | // Build the Reddit JSON API path 112 | let path: String = format!("/user/{name}/about.json?raw_json=1"); 113 | 114 | // Send a request to the url 115 | json(path, false).await.map(|res| { 116 | // Grab creation date as unix timestamp 117 | let created_unix = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64; 118 | let created = OffsetDateTime::from_unix_timestamp(created_unix).unwrap_or(OffsetDateTime::UNIX_EPOCH); 119 | 120 | // Closure used to parse JSON from Reddit APIs 121 | let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string(); 122 | 123 | // Parse the JSON output into a User struct 124 | User { 125 | name: res["data"]["name"].as_str().unwrap_or(name).to_owned(), 126 | title: about("title"), 127 | icon: format_url(&about("icon_img")), 128 | karma: res["data"]["total_karma"].as_i64().unwrap_or(0), 129 | created: created.format(format_description!("[month repr:short] [day] '[year repr:last_two]")).unwrap_or_default(), 130 | banner: about("banner_img"), 131 | description: about("public_description"), 132 | nsfw: res["data"]["subreddit"]["over_18"].as_bool().unwrap_or_default(), 133 | } 134 | }) 135 | } 136 | 137 | pub async fn rss(req: Request) -> Result, String> { 138 | if config::get_setting("REDLIB_ENABLE_RSS").is_none() { 139 | return Ok(error(req, "RSS is disabled on this instance.").await.unwrap_or_default()); 140 | } 141 | use crate::utils::rewrite_urls; 142 | use hyper::header::CONTENT_TYPE; 143 | use rss::{ChannelBuilder, Item}; 144 | 145 | // Get user 146 | let user_str = req.param("name").unwrap_or_default(); 147 | 148 | let listing = req.param("listing").unwrap_or_else(|| "overview".to_string()); 149 | 150 | // Get path 151 | let path = format!("/user/{user_str}/{listing}.json?{}&raw_json=1", req.uri().query().unwrap_or_default(),); 152 | 153 | // Get user 154 | let user_obj = user(&user_str).await.unwrap_or_default(); 155 | 156 | // Get posts 157 | let (posts, _) = Post::fetch(&path, false).await?; 158 | 159 | // Build the RSS feed 160 | let channel = ChannelBuilder::default() 161 | .title(user_str) 162 | .description(user_obj.description) 163 | .items( 164 | posts 165 | .into_iter() 166 | .map(|post| Item { 167 | title: Some(post.title.to_string()), 168 | link: Some(format_url(&utils::get_post_url(&post))), 169 | author: Some(post.author.name), 170 | pub_date: Some(DateTime::from_timestamp(post.created_ts as i64, 0).unwrap_or_default().to_rfc2822()), 171 | content: Some(rewrite_urls(&decode_html(&post.body).unwrap_or_else(|_| post.body.clone()))), 172 | ..Default::default() 173 | }) 174 | .collect::>(), 175 | ) 176 | .build(); 177 | 178 | // Serialize the feed to RSS 179 | let body = channel.to_string().into_bytes(); 180 | 181 | // Create the HTTP response 182 | let mut res = Response::new(Body::from(body)); 183 | res.headers_mut().insert(CONTENT_TYPE, hyper::header::HeaderValue::from_static("application/rss+xml")); 184 | 185 | Ok(res) 186 | } 187 | 188 | #[tokio::test(flavor = "multi_thread")] 189 | async fn test_fetching_user() { 190 | let user = user("spez").await; 191 | assert!(user.is_ok()); 192 | assert!(user.unwrap().karma > 100); 193 | } 194 | -------------------------------------------------------------------------------- /templates/user.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} {% import "utils.html" as utils %} {% block search %} 2 | {% call utils::search("".to_owned(), "") %} {% endblock %} {% block title %}{{ 3 | user.name.replace("u/", "") }} (u/{{ user.name }}) - Redlib{% endblock %} {% 4 | block subscriptions %} {% call utils::sub_list("") %} {% endblock %} {% block 5 | body %} 6 |
7 | {% if !is_filtered %} 8 |
9 |
10 |
11 | {% call utils::sort(["/user/", user.name.as_str()].concat(), 12 | ["overview", "comments", "submitted"], listing) %} 13 |
14 | {% if sort.0 == "top" || sort.0 == "controversial" %}{% endif %} 36 |
37 | 38 | {% if all_posts_hidden_nsfw %} 39 |
40 | All posts are hidden because they are NSFW. Enable "Show NSFW posts" 41 | in settings to view. 42 |
43 | {% endif %} {% if no_posts %} 44 |
No posts were found.
45 | {% endif %} {% if all_posts_filtered %} 46 |
(All content on this page has been filtered)
47 | {% else %} 48 |
49 | {% for post in posts %} {% if post.flags.nsfw && prefs.show_nsfw != 50 | "on" %} {% else if !post.title.is_empty() %} {% call 51 | utils::post_in_list(post) %} {% else %} 52 |
53 |
54 |

55 | {% if prefs.hide_score != "on" %} {{ post.score.0 }} {% 56 | else %} • {% endif %} 57 |

58 |
59 |
60 |
61 | 62 | {{ post.link_title }} 68 |
69 |  in  70 | r/{{ post.community }} 75 | 76 |  {{ post.rel_time }} 79 |
80 |
81 |

{{ post.body|safe }}

82 |
83 |
84 | {% endif %} {% endfor %} {% if prefs.use_hls == "on" %} 85 | 86 | 87 | {% endif %} 88 |
89 | {% endif %} 90 | 91 |
92 | {% if ends.0 != "" %} 93 | PREV 98 | {% endif %} {% if ends.1 != "" %} 99 | NEXT 104 | {% endif %} 105 |
106 |
107 | {% endif %} 108 | 177 |
178 | {% endblock %} 179 | -------------------------------------------------------------------------------- /src/duplicates.rs: -------------------------------------------------------------------------------- 1 | //! Handler for post duplicates. 2 | 3 | use crate::client::json; 4 | use crate::server::RequestExt; 5 | use crate::subreddit::{can_access_quarantine, quarantine}; 6 | use crate::utils::{error, filter_posts, get_filters, nsfw_landing, parse_post, template, Post, Preferences}; 7 | 8 | use askama::Template; 9 | use hyper::{Body, Request, Response}; 10 | use serde_json::Value; 11 | use std::borrow::ToOwned; 12 | use std::collections::HashSet; 13 | use std::vec::Vec; 14 | 15 | /// `DuplicatesParams` contains the parameters in the URL. 16 | struct DuplicatesParams { 17 | before: String, 18 | after: String, 19 | sort: String, 20 | } 21 | 22 | /// `DuplicatesTemplate` defines an Askama template for rendering duplicate 23 | /// posts. 24 | #[derive(Template)] 25 | #[template(path = "duplicates.html")] 26 | struct DuplicatesTemplate { 27 | /// params contains the relevant request parameters. 28 | params: DuplicatesParams, 29 | 30 | /// post is the post whose ID is specified in the reqeust URL. Note that 31 | /// this is not necessarily the "original" post. 32 | post: Post, 33 | 34 | /// duplicates is the list of posts that, per Reddit, are duplicates of 35 | /// Post above. 36 | duplicates: Vec, 37 | 38 | /// prefs are the user preferences. 39 | prefs: Preferences, 40 | 41 | /// url is the request URL. 42 | url: String, 43 | 44 | /// num_posts_filtered counts how many posts were filtered from the 45 | /// duplicates list. 46 | num_posts_filtered: u64, 47 | 48 | /// all_posts_filtered is true if every duplicate was filtered. This is an 49 | /// edge case but can still happen. 50 | all_posts_filtered: bool, 51 | } 52 | 53 | /// Make the GET request to Reddit. It assumes `req` is the appropriate Reddit 54 | /// REST endpoint for enumerating post duplicates. 55 | pub async fn item(req: Request) -> Result, String> { 56 | let path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default()); 57 | let sub = req.param("sub").unwrap_or_default(); 58 | let quarantined = can_access_quarantine(&req, &sub); 59 | 60 | // Log the request in debugging mode 61 | #[cfg(debug_assertions)] 62 | req.param("id").unwrap_or_default(); 63 | 64 | // Send the GET, and await JSON. 65 | match json(path, quarantined).await { 66 | // Process response JSON. 67 | Ok(response) => { 68 | let post = parse_post(&response[0]["data"]["children"][0]).await; 69 | 70 | let req_url = req.uri().to_string(); 71 | // Return landing page if this post if this Reddit deems this post 72 | // NSFW, but we have also disabled the display of NSFW content 73 | // or if the instance is SFW-only 74 | if post.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) { 75 | return Ok(nsfw_landing(req, req_url).await.unwrap_or_default()); 76 | } 77 | 78 | let filters = get_filters(&req); 79 | let (duplicates, num_posts_filtered, all_posts_filtered) = parse_duplicates(&response[1], &filters).await; 80 | 81 | // These are the values for the "before=", "after=", and "sort=" 82 | // query params, respectively. 83 | let mut before: String = String::new(); 84 | let mut after: String = String::new(); 85 | let mut sort: String = String::new(); 86 | 87 | // FIXME: We have to perform a kludge to work around a Reddit API 88 | // bug. 89 | // 90 | // The JSON object in "data" will never contain a "before" value so 91 | // it is impossible to use it to determine our position in a 92 | // listing. We'll make do by getting the ID of the first post in 93 | // the listing, setting that as our "before" value, and ask Reddit 94 | // to give us a batch of duplicate posts up to that post. 95 | // 96 | // Likewise, if we provide a "before" request in the GET, the 97 | // result won't have an "after" in the JSON, in addition to missing 98 | // the "before." So we will have to use the final post in the list 99 | // of duplicates. 100 | // 101 | // That being said, we'll also need to capture the value of the 102 | // "sort=" parameter as well, so we will need to inspect the 103 | // query key-value pairs anyway. 104 | let l = duplicates.len(); 105 | if l > 0 { 106 | // This gets set to true if "before=" is one of the GET params. 107 | let mut have_before: bool = false; 108 | 109 | // This gets set to true if "after=" is one of the GET params. 110 | let mut have_after: bool = false; 111 | 112 | // Inspect the query key-value pairs. We will need to record 113 | // the value of "sort=", along with checking to see if either 114 | // one of "before=" or "after=" are given. 115 | // 116 | // If we're in the middle of the batch (evidenced by the 117 | // presence of a "before=" or "after=" parameter in the GET), 118 | // then use the first post as the "before" reference. 119 | // 120 | // We'll do this iteratively. Better than with .map_or() 121 | // since a closure will continue to operate on remaining 122 | // elements even after we've determined one of "before=" or 123 | // "after=" (or both) are in the GET request. 124 | // 125 | // In practice, here should only ever be one of "before=" or 126 | // "after=" and never both. 127 | let query_str = req.uri().query().unwrap_or_default().to_string(); 128 | 129 | if !query_str.is_empty() { 130 | for param in query_str.split('&') { 131 | let kv: Vec<&str> = param.split('=').collect(); 132 | if kv.len() < 2 { 133 | // Reject invalid query parameter. 134 | continue; 135 | } 136 | 137 | let key: &str = kv[0]; 138 | match key { 139 | "before" => have_before = true, 140 | "after" => have_after = true, 141 | "sort" => { 142 | let val: &str = kv[1]; 143 | match val { 144 | "new" | "num_comments" => sort = val.to_string(), 145 | _ => {} 146 | } 147 | } 148 | _ => {} 149 | } 150 | } 151 | } 152 | 153 | if have_after { 154 | "t3_".clone_into(&mut before); 155 | before.push_str(&duplicates[0].id); 156 | } 157 | 158 | // Address potentially missing "after". If "before=" is in the 159 | // GET, then "after" will be null in the JSON (see FIXME 160 | // above). 161 | if have_before { 162 | // The next batch will need to start from one after the 163 | // last post in the current batch. 164 | "t3_".clone_into(&mut after); 165 | after.push_str(&duplicates[l - 1].id); 166 | 167 | // Here is where things get terrible. Notice that we 168 | // haven't set `before`. In order to do so, we will 169 | // need to know if there is a batch that exists before 170 | // this one, and doing so requires actually fetching the 171 | // previous batch. In other words, we have to do yet one 172 | // more GET to Reddit. There is no other way to determine 173 | // whether or not to define `before`. 174 | // 175 | // We'll mitigate that by requesting at most one duplicate. 176 | let new_path: String = format!( 177 | "{}.json?before=t3_{}&sort={}&limit=1&raw_json=1", 178 | req.uri().path(), 179 | &duplicates[0].id, 180 | if sort.is_empty() { "num_comments".to_string() } else { sort.clone() } 181 | ); 182 | match json(new_path, true).await { 183 | Ok(response) => { 184 | if !response[1]["data"]["children"].as_array().unwrap_or(&Vec::new()).is_empty() { 185 | "t3_".clone_into(&mut before); 186 | before.push_str(&duplicates[0].id); 187 | } 188 | } 189 | Err(msg) => { 190 | // Abort entirely if we couldn't get the previous 191 | // batch. 192 | return error(req, &msg).await; 193 | } 194 | } 195 | } else { 196 | after = response[1]["data"]["after"].as_str().unwrap_or_default().to_string(); 197 | } 198 | } 199 | 200 | Ok(template(&DuplicatesTemplate { 201 | params: DuplicatesParams { before, after, sort }, 202 | post, 203 | duplicates, 204 | prefs: Preferences::new(&req), 205 | url: req_url, 206 | num_posts_filtered, 207 | all_posts_filtered, 208 | })) 209 | } 210 | 211 | // Process error. 212 | Err(msg) => { 213 | if msg == "quarantined" || msg == "gated" { 214 | let sub = req.param("sub").unwrap_or_default(); 215 | Ok(quarantine(&req, sub, &msg)) 216 | } else { 217 | error(req, &msg).await 218 | } 219 | } 220 | } 221 | } 222 | 223 | // DUPLICATES 224 | async fn parse_duplicates(json: &Value, filters: &HashSet) -> (Vec, u64, bool) { 225 | let post_duplicates: &Vec = &json["data"]["children"].as_array().map_or(Vec::new(), ToOwned::to_owned); 226 | let mut duplicates: Vec = Vec::new(); 227 | 228 | // Process each post and place them in the Vec. 229 | for val in post_duplicates { 230 | let post: Post = parse_post(val).await; 231 | duplicates.push(post); 232 | } 233 | 234 | let (num_posts_filtered, all_posts_filtered) = filter_posts(&mut duplicates, filters); 235 | (duplicates, num_posts_filtered, all_posts_filtered) 236 | } 237 | -------------------------------------------------------------------------------- /src/instance_info.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | config::{Config, CONFIG}, 3 | server::RequestExt, 4 | utils::{ErrorTemplate, Preferences}, 5 | }; 6 | use askama::Template; 7 | use build_html::{Container, Html, HtmlContainer, Table}; 8 | use hyper::{http::Error, Body, Request, Response}; 9 | use serde::{Deserialize, Serialize}; 10 | use std::sync::LazyLock; 11 | use time::OffsetDateTime; 12 | 13 | /// This is the local static that is initialized at runtime (technically at 14 | /// the first request to the info endpoint) and contains the data 15 | /// retrieved from the info endpoint. 16 | pub static INSTANCE_INFO: LazyLock = LazyLock::new(InstanceInfo::new); 17 | 18 | /// Handles instance info endpoint 19 | pub async fn instance_info(req: Request) -> Result, String> { 20 | // This will retrieve the extension given, or create a new string - which will 21 | // simply become the last option, an HTML page. 22 | let extension = req.param("extension").unwrap_or_default(); 23 | let response = match extension.as_str() { 24 | "yaml" | "yml" => info_yaml(), 25 | "txt" => info_txt(), 26 | "json" => info_json(), 27 | "html" | "" => info_html(&req), 28 | _ => { 29 | let error = ErrorTemplate { 30 | msg: "Error: Invalid info extension".into(), 31 | prefs: Preferences::new(&req), 32 | url: req.uri().to_string(), 33 | } 34 | .render() 35 | .unwrap(); 36 | Response::builder().status(404).header("content-type", "text/html; charset=utf-8").body(error.into()) 37 | } 38 | }; 39 | response.map_err(|err| format!("{err}")) 40 | } 41 | 42 | fn info_json() -> Result, Error> { 43 | if let Ok(body) = serde_json::to_string(&*INSTANCE_INFO) { 44 | Response::builder().status(200).header("content-type", "application/json").body(body.into()) 45 | } else { 46 | Response::builder() 47 | .status(500) 48 | .header("content-type", "text/plain") 49 | .body(Body::from("Error serializing JSON")) 50 | } 51 | } 52 | 53 | fn info_yaml() -> Result, Error> { 54 | if let Ok(body) = serde_yaml::to_string(&*INSTANCE_INFO) { 55 | // We can use `application/yaml` as media type, though there is no guarantee 56 | // that browsers will honor it. But we'll do it anyway. See: 57 | // https://github.com/ietf-wg-httpapi/mediatypes/blob/main/draft-ietf-httpapi-yaml-mediatypes.md#media-type-applicationyaml-application-yaml 58 | Response::builder().status(200).header("content-type", "application/yaml").body(body.into()) 59 | } else { 60 | Response::builder() 61 | .status(500) 62 | .header("content-type", "text/plain") 63 | .body(Body::from("Error serializing YAML.")) 64 | } 65 | } 66 | 67 | fn info_txt() -> Result, Error> { 68 | Response::builder() 69 | .status(200) 70 | .header("content-type", "text/plain") 71 | .body(Body::from(INSTANCE_INFO.to_string(&StringType::Raw))) 72 | } 73 | fn info_html(req: &Request) -> Result, Error> { 74 | let message = MessageTemplate { 75 | title: String::from("Instance information"), 76 | body: INSTANCE_INFO.to_string(&StringType::Html), 77 | prefs: Preferences::new(req), 78 | url: req.uri().to_string(), 79 | } 80 | .render() 81 | .unwrap(); 82 | Response::builder().status(200).header("content-type", "text/html; charset=utf8").body(Body::from(message)) 83 | } 84 | #[derive(Serialize, Deserialize, Default)] 85 | pub struct InstanceInfo { 86 | package_name: String, 87 | crate_version: String, 88 | pub git_commit: String, 89 | deploy_date: String, 90 | compile_mode: String, 91 | deploy_unix_ts: i64, 92 | config: Config, 93 | } 94 | 95 | impl InstanceInfo { 96 | pub fn new() -> Self { 97 | Self { 98 | package_name: env!("CARGO_PKG_NAME").to_string(), 99 | crate_version: env!("CARGO_PKG_VERSION").to_string(), 100 | git_commit: env!("GIT_HASH").to_string(), 101 | deploy_date: OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc()).to_string(), 102 | #[cfg(debug_assertions)] 103 | compile_mode: "Debug".into(), 104 | #[cfg(not(debug_assertions))] 105 | compile_mode: "Release".into(), 106 | deploy_unix_ts: OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc()).unix_timestamp(), 107 | config: CONFIG.clone(), 108 | } 109 | } 110 | fn to_table(&self) -> String { 111 | let mut container = Container::default(); 112 | let convert = |o: &Option| -> String { o.clone().unwrap_or_else(|| "Unset".to_owned()) }; 113 | if let Some(banner) = &self.config.banner { 114 | container.add_header(3, "Instance banner"); 115 | container.add_raw("
"); 116 | container.add_paragraph(banner); 117 | container.add_raw("
"); 118 | } 119 | container.add_table( 120 | Table::from([ 121 | ["Package name", &self.package_name], 122 | ["Crate version", &self.crate_version], 123 | ["Git commit", &self.git_commit], 124 | ["Deploy date", &self.deploy_date], 125 | ["Deploy timestamp", &self.deploy_unix_ts.to_string()], 126 | ["Compile mode", &self.compile_mode], 127 | ["SFW only", &convert(&self.config.sfw_only)], 128 | ["Pushshift frontend", &convert(&self.config.pushshift)], 129 | ["RSS enabled", &convert(&self.config.enable_rss)], 130 | ["Full URL", &convert(&self.config.full_url)], 131 | ["Remove default feeds", &convert(&self.config.default_remove_default_feeds)], 132 | //TODO: fallback to crate::config::DEFAULT_PUSHSHIFT_FRONTEND 133 | ]) 134 | .with_header_row(["Settings"]), 135 | ); 136 | container.add_raw("
"); 137 | container.add_table( 138 | Table::from([ 139 | ["Hide awards", &convert(&self.config.default_hide_awards)], 140 | ["Hide score", &convert(&self.config.default_hide_score)], 141 | ["Theme", &convert(&self.config.default_theme)], 142 | ["Front page", &convert(&self.config.default_front_page)], 143 | ["Layout", &convert(&self.config.default_layout)], 144 | ["Wide", &convert(&self.config.default_wide)], 145 | ["Comment sort", &convert(&self.config.default_comment_sort)], 146 | ["Post sort", &convert(&self.config.default_post_sort)], 147 | ["Blur Spoiler", &convert(&self.config.default_blur_spoiler)], 148 | ["Show NSFW", &convert(&self.config.default_show_nsfw)], 149 | ["Blur NSFW", &convert(&self.config.default_blur_nsfw)], 150 | ["Use HLS", &convert(&self.config.default_use_hls)], 151 | ["Hide HLS notification", &convert(&self.config.default_hide_hls_notification)], 152 | ["Subscriptions", &convert(&self.config.default_subscriptions)], 153 | ["Filters", &convert(&self.config.default_filters)], 154 | ]) 155 | .with_header_row(["Default preferences"]), 156 | ); 157 | container.to_html_string().replace("", "") 158 | } 159 | fn to_string(&self, string_type: &StringType) -> String { 160 | match string_type { 161 | StringType::Raw => { 162 | format!( 163 | "Package name: {}\n 164 | Crate version: {}\n 165 | Git commit: {}\n 166 | Deploy date: {}\n 167 | Deploy timestamp: {}\n 168 | Compile mode: {}\n 169 | SFW only: {:?}\n 170 | Pushshift frontend: {:?}\n 171 | RSS enabled: {:?}\n 172 | Full URL: {:?}\n 173 | Remove default feeds: {:?}\n 174 | Config:\n 175 | Banner: {:?}\n 176 | Hide awards: {:?}\n 177 | Hide score: {:?}\n 178 | Default theme: {:?}\n 179 | Default front page: {:?}\n 180 | Default layout: {:?}\n 181 | Default wide: {:?}\n 182 | Default comment sort: {:?}\n 183 | Default post sort: {:?}\n 184 | Default blur Spoiler: {:?}\n 185 | Default show NSFW: {:?}\n 186 | Default blur NSFW: {:?}\n 187 | Default use HLS: {:?}\n 188 | Default hide HLS notification: {:?}\n 189 | Default subscriptions: {:?}\n 190 | Default filters: {:?}\n", 191 | self.package_name, 192 | self.crate_version, 193 | self.git_commit, 194 | self.deploy_date, 195 | self.deploy_unix_ts, 196 | self.compile_mode, 197 | self.config.sfw_only, 198 | self.config.enable_rss, 199 | self.config.full_url, 200 | self.config.default_remove_default_feeds, 201 | self.config.pushshift, 202 | self.config.banner, 203 | self.config.default_hide_awards, 204 | self.config.default_hide_score, 205 | self.config.default_theme, 206 | self.config.default_front_page, 207 | self.config.default_layout, 208 | self.config.default_wide, 209 | self.config.default_comment_sort, 210 | self.config.default_post_sort, 211 | self.config.default_blur_spoiler, 212 | self.config.default_show_nsfw, 213 | self.config.default_blur_nsfw, 214 | self.config.default_use_hls, 215 | self.config.default_hide_hls_notification, 216 | self.config.default_subscriptions, 217 | self.config.default_filters, 218 | ) 219 | } 220 | StringType::Html => self.to_table(), 221 | } 222 | } 223 | } 224 | enum StringType { 225 | Raw, 226 | Html, 227 | } 228 | #[derive(Template)] 229 | #[template(path = "message.html")] 230 | struct MessageTemplate { 231 | title: String, 232 | body: String, 233 | prefs: Preferences, 234 | url: String, 235 | } 236 | -------------------------------------------------------------------------------- /templates/settings.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | {% import "utils.html" as utils %} 3 | 4 | {% block title %}Redlib Settings{% endblock %} 5 | 6 | {% block subscriptions %} 7 | {% call utils::sub_list("") %} 8 | {% endblock %} 9 | 10 | {% block search %} 11 | {% call utils::search("".to_owned(), "") %} 12 | {% endblock %} 13 | 14 | {% block content %} 15 |
16 |
17 |
18 |
19 | Appearance 20 |
21 | 22 | 25 |
26 |
27 |
28 | Interface 29 |
30 | 31 | 32 | 34 |
35 |
36 | 37 | 40 |
41 |
42 | 43 | 46 |
47 |
48 | 49 | 50 | 51 |
52 |
53 |
54 | Content 55 |
56 | 57 | 60 |
61 |
62 | 63 | 67 |
68 |
69 | 70 | 74 |
75 |
76 | 77 | 78 | 80 |
81 | {% if !crate::utils::sfw_only() %} 82 |
83 | 84 | 85 | 87 |
88 |
89 | 90 | 91 | 93 |
94 | {% endif %} 95 |
96 | 97 | 98 | 100 |
101 |
102 | 103 | 104 | 105 |
106 |
107 | 108 | 109 | 111 |
112 |
113 | 114 |
115 | Why? 116 |
Reddit videos require JavaScript (via HLS.js) to be enabled 117 | to be played with audio. Therefore, this toggle lets you either use Redlib JS-free or 118 | utilize this feature.
119 |
120 | 121 | 122 |
123 |
124 | 125 | 126 | 128 |
129 |
130 | 131 | 132 | 134 |
135 |
136 | 137 | 138 | 140 |
141 |
142 | 144 | 145 | 147 |
148 |
149 | 150 |
151 |
152 | {% if prefs.subscriptions.len() > 0 %} 153 |
154 | Subscribed Feeds 155 | {% for sub in prefs.subscriptions %} 156 |
157 | {% let feed -%} 158 | {% if sub.starts_with("u_") -%}{% let feed = format!("u/{}", &sub[2..]) -%}{% else -%}{% let feed = 159 | format!("r/{}", sub) -%}{% endif -%} 160 | {{ feed }} 161 |
162 | 163 |
164 |
165 | {% endfor %} 166 |
167 | {% endif %} 168 | {% if !prefs.filters.is_empty() %} 169 |
170 | Filtered Feeds 171 | {% for sub in prefs.filters %} 172 |
173 | {% let feed -%} 174 | {% if sub.starts_with("u_") -%}{% let feed = format!("u/{}", &sub[2..]) -%}{% else -%}{% let feed = 175 | format!("r/{}", sub) -%}{% endif -%} 176 | {{ feed }} 177 |
178 | 179 |
180 |
181 | {% endfor %} 182 |
183 | {% endif %} 184 | 185 |
186 |

Note: settings and subscriptions are saved in browser cookies. Clearing your cookies will reset them. 187 |

188 |
189 | {% match prefs.to_urlencoded() %} 190 | {% when Ok with (encoded_prefs) %} 191 |

You can restore your current settings and subscriptions after clearing your cookies using this link.

193 | {% when Err with (err) %} 194 |

There was an error creating your restore link: {{ err }}

195 |

Please report this issue

196 | {% endmatch %} 197 |
198 |
199 | 200 | 201 |
202 | 205 | 206 | 207 |
208 |
209 | 211 | 212 |
213 |
214 |
215 |
216 | 217 | {% endblock %} -------------------------------------------------------------------------------- /src/post.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::cmp_owned)] 2 | 3 | // CRATES 4 | use crate::client::json; 5 | use crate::config::get_setting; 6 | use crate::server::RequestExt; 7 | use crate::subreddit::{can_access_quarantine, quarantine}; 8 | use crate::utils::{ 9 | error, format_num, get_filters, nsfw_landing, param, parse_post, rewrite_emotes, setting, template, time, val, Author, Awards, Comment, Flair, FlairPart, Post, Preferences, 10 | }; 11 | use hyper::{Body, Request, Response}; 12 | 13 | use askama::Template; 14 | use regex::Regex; 15 | use std::collections::{HashMap, HashSet}; 16 | use std::sync::LazyLock; 17 | 18 | // STRUCTS 19 | #[derive(Template)] 20 | #[template(path = "post.html")] 21 | struct PostTemplate { 22 | comments: Vec, 23 | post: Post, 24 | sort: String, 25 | prefs: Preferences, 26 | single_thread: bool, 27 | url: String, 28 | url_without_query: String, 29 | comment_query: String, 30 | } 31 | 32 | static COMMENT_SEARCH_CAPTURE: LazyLock = LazyLock::new(|| Regex::new(r"\?q=(.*)&type=comment").unwrap()); 33 | 34 | pub async fn item(req: Request) -> Result, String> { 35 | // Build Reddit API path 36 | let mut path: String = format!("{}.json?{}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default()); 37 | let sub = req.param("sub").unwrap_or_default(); 38 | let quarantined = can_access_quarantine(&req, &sub); 39 | let url = req.uri().to_string(); 40 | 41 | // Set sort to sort query parameter 42 | let sort = param(&path, "sort").unwrap_or_else(|| { 43 | // Grab default comment sort method from Cookies 44 | let default_sort = setting(&req, "comment_sort"); 45 | 46 | // If there's no sort query but there's a default sort, set sort to default_sort 47 | if default_sort.is_empty() { 48 | String::new() 49 | } else { 50 | path = format!("{}.json?{}&sort={}&raw_json=1", req.uri().path(), req.uri().query().unwrap_or_default(), default_sort); 51 | default_sort 52 | } 53 | }); 54 | 55 | // Log the post ID being fetched in debug mode 56 | #[cfg(debug_assertions)] 57 | req.param("id").unwrap_or_default(); 58 | 59 | let single_thread = req.param("comment_id").is_some(); 60 | let highlighted_comment = &req.param("comment_id").unwrap_or_default(); 61 | 62 | // Send a request to the url, receive JSON in response 63 | match json(path, quarantined).await { 64 | // Otherwise, grab the JSON output from the request 65 | Ok(response) => { 66 | // Parse the JSON into Post and Comment structs 67 | let post = parse_post(&response[0]["data"]["children"][0]).await; 68 | 69 | let req_url = req.uri().to_string(); 70 | // Return landing page if this post if this Reddit deems this post 71 | // NSFW, but we have also disabled the display of NSFW content 72 | // or if the instance is SFW-only. 73 | if post.nsfw && crate::utils::should_be_nsfw_gated(&req, &req_url) { 74 | return Ok(nsfw_landing(req, req_url).await.unwrap_or_default()); 75 | } 76 | 77 | let query_body = match COMMENT_SEARCH_CAPTURE.captures(&url) { 78 | Some(captures) => captures.get(1).unwrap().as_str().replace("%20", " ").replace('+', " "), 79 | None => String::new(), 80 | }; 81 | 82 | let query_string = format!("q={query_body}&type=comment"); 83 | let form = url::form_urlencoded::parse(query_string.as_bytes()).collect::>(); 84 | let query = form.get("q").unwrap().clone().to_string(); 85 | 86 | let comments = match query.as_str() { 87 | "" => parse_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req), &req), 88 | _ => query_comments(&response[1], &post.permalink, &post.author.name, highlighted_comment, &get_filters(&req), &query, &req), 89 | }; 90 | 91 | // Use the Post and Comment structs to generate a website to show users 92 | Ok(template(&PostTemplate { 93 | comments, 94 | post, 95 | url_without_query: url.clone().trim_end_matches(&format!("?q={query}&type=comment")).to_string(), 96 | sort, 97 | prefs: Preferences::new(&req), 98 | single_thread, 99 | url: req_url, 100 | comment_query: query, 101 | })) 102 | } 103 | // If the Reddit API returns an error, exit and send error page to user 104 | Err(msg) => { 105 | if msg == "quarantined" || msg == "gated" { 106 | let sub = req.param("sub").unwrap_or_default(); 107 | Ok(quarantine(&req, sub, &msg)) 108 | } else { 109 | error(req, &msg).await 110 | } 111 | } 112 | } 113 | } 114 | 115 | // COMMENTS 116 | 117 | fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, highlighted_comment: &str, filters: &HashSet, req: &Request) -> Vec { 118 | // Parse the comment JSON into a Vector of Comments 119 | let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned); 120 | 121 | // For each comment, retrieve the values to build a Comment object 122 | comments 123 | .into_iter() 124 | .map(|comment| { 125 | let data = &comment["data"]; 126 | let replies: Vec = if data["replies"].is_object() { 127 | parse_comments(&data["replies"], post_link, post_author, highlighted_comment, filters, req) 128 | } else { 129 | Vec::new() 130 | }; 131 | build_comment(&comment, data, replies, post_link, post_author, highlighted_comment, filters, req) 132 | }) 133 | .collect() 134 | } 135 | 136 | fn query_comments( 137 | json: &serde_json::Value, 138 | post_link: &str, 139 | post_author: &str, 140 | highlighted_comment: &str, 141 | filters: &HashSet, 142 | query: &str, 143 | req: &Request, 144 | ) -> Vec { 145 | let comments = json["data"]["children"].as_array().map_or(Vec::new(), std::borrow::ToOwned::to_owned); 146 | let mut results = Vec::new(); 147 | 148 | for comment in comments { 149 | let data = &comment["data"]; 150 | 151 | // If this comment contains replies, handle those too 152 | if data["replies"].is_object() { 153 | results.append(&mut query_comments(&data["replies"], post_link, post_author, highlighted_comment, filters, query, req)); 154 | } 155 | 156 | let c = build_comment(&comment, data, Vec::new(), post_link, post_author, highlighted_comment, filters, req); 157 | if c.body.to_lowercase().contains(&query.to_lowercase()) { 158 | results.push(c); 159 | } 160 | } 161 | 162 | results 163 | } 164 | #[allow(clippy::too_many_arguments)] 165 | fn build_comment( 166 | comment: &serde_json::Value, 167 | data: &serde_json::Value, 168 | replies: Vec, 169 | post_link: &str, 170 | post_author: &str, 171 | highlighted_comment: &str, 172 | filters: &HashSet, 173 | req: &Request, 174 | ) -> Comment { 175 | let id = val(comment, "id"); 176 | 177 | let body = if (val(comment, "author") == "[deleted]" && val(comment, "body") == "[removed]") || val(comment, "body") == "[ Removed by Reddit ]" { 178 | format!( 179 | "

[removed] — view removed comment

", 180 | get_setting("REDLIB_PUSHSHIFT_FRONTEND").unwrap_or_else(|| String::from(crate::config::DEFAULT_PUSHSHIFT_FRONTEND)), 181 | ) 182 | } else { 183 | rewrite_emotes(&data["media_metadata"], val(comment, "body_html")) 184 | }; 185 | let kind = comment["kind"].as_str().unwrap_or_default().to_string(); 186 | 187 | let unix_time = data["created_utc"].as_f64().unwrap_or_default(); 188 | let (rel_time, created) = time(unix_time); 189 | 190 | let edited = data["edited"].as_f64().map_or((String::new(), String::new()), time); 191 | 192 | let score = data["score"].as_i64().unwrap_or(0); 193 | 194 | // The JSON API only provides comments up to some threshold. 195 | // Further comments have to be loaded by subsequent requests. 196 | // The "kind" value will be "more" and the "count" 197 | // shows how many more (sub-)comments exist in the respective nesting level. 198 | // Note that in certain (seemingly random) cases, the count is simply wrong. 199 | let more_count = data["count"].as_i64().unwrap_or_default(); 200 | 201 | let awards: Awards = Awards::parse(&data["all_awardings"]); 202 | 203 | let parent_kind_and_id = val(comment, "parent_id"); 204 | let parent_info = parent_kind_and_id.split('_').collect::>(); 205 | 206 | let highlighted = id == highlighted_comment; 207 | 208 | let author = Author { 209 | name: val(comment, "author"), 210 | flair: Flair { 211 | flair_parts: FlairPart::parse( 212 | data["author_flair_type"].as_str().unwrap_or_default(), 213 | data["author_flair_richtext"].as_array(), 214 | data["author_flair_text"].as_str(), 215 | ), 216 | text: val(comment, "link_flair_text"), 217 | background_color: val(comment, "author_flair_background_color"), 218 | foreground_color: val(comment, "author_flair_text_color"), 219 | }, 220 | distinguished: val(comment, "distinguished"), 221 | }; 222 | let is_filtered = filters.contains(&["u_", author.name.as_str()].concat()); 223 | 224 | // Many subreddits have a default comment posted about the sub's rules etc. 225 | // Many Redlib users do not wish to see this kind of comment by default. 226 | // Reddit does not tell us which users are "bots", so a good heuristic is to 227 | // collapse stickied moderator comments. 228 | let is_moderator_comment = data["distinguished"].as_str().unwrap_or_default() == "moderator"; 229 | let is_stickied = data["stickied"].as_bool().unwrap_or_default(); 230 | let collapsed = (is_moderator_comment && is_stickied) || is_filtered; 231 | 232 | Comment { 233 | id, 234 | kind, 235 | parent_id: parent_info[1].to_string(), 236 | parent_kind: parent_info[0].to_string(), 237 | post_link: post_link.to_string(), 238 | post_author: post_author.to_string(), 239 | body, 240 | author, 241 | score: if data["score_hidden"].as_bool().unwrap_or_default() { 242 | ("\u{2022}".to_string(), "Hidden".to_string()) 243 | } else { 244 | format_num(score) 245 | }, 246 | rel_time, 247 | created, 248 | edited, 249 | replies, 250 | highlighted, 251 | awards, 252 | collapsed, 253 | is_filtered, 254 | more_count, 255 | prefs: Preferences::new(req), 256 | } 257 | } 258 | -------------------------------------------------------------------------------- /src/settings.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::cmp_owned)] 2 | 3 | use std::collections::HashMap; 4 | 5 | // CRATES 6 | use crate::server::ResponseExt; 7 | use crate::subreddit::join_until_size_limit; 8 | use crate::utils::{deflate_decompress, redirect, template, Preferences}; 9 | use askama::Template; 10 | use cookie::Cookie; 11 | use futures_lite::StreamExt; 12 | use hyper::{Body, Request, Response}; 13 | use time::{Duration, OffsetDateTime}; 14 | use tokio::time::timeout; 15 | use url::form_urlencoded; 16 | 17 | // STRUCTS 18 | #[derive(Template)] 19 | #[template(path = "settings.html")] 20 | struct SettingsTemplate { 21 | prefs: Preferences, 22 | url: String, 23 | } 24 | 25 | // CONSTANTS 26 | 27 | const PREFS: [&str; 19] = [ 28 | "theme", 29 | "front_page", 30 | "layout", 31 | "wide", 32 | "comment_sort", 33 | "post_sort", 34 | "blur_spoiler", 35 | "show_nsfw", 36 | "blur_nsfw", 37 | "use_hls", 38 | "hide_hls_notification", 39 | "autoplay_videos", 40 | "hide_sidebar_and_summary", 41 | "fixed_navbar", 42 | "hide_awards", 43 | "hide_score", 44 | "disable_visit_reddit_confirmation", 45 | "video_quality", 46 | "remove_default_feeds", 47 | ]; 48 | 49 | // FUNCTIONS 50 | 51 | /// Retrieve cookies from request "Cookie" header 52 | pub async fn get(req: Request) -> Result, String> { 53 | let url = req.uri().to_string(); 54 | Ok(template(&SettingsTemplate { 55 | prefs: Preferences::new(&req), 56 | url, 57 | })) 58 | } 59 | 60 | /// Set cookies using response "Set-Cookie" header 61 | pub async fn set(req: Request) -> Result, String> { 62 | // Split the body into parts 63 | let (parts, mut body) = req.into_parts(); 64 | 65 | // Grab existing cookies 66 | let _cookies: Vec> = parts 67 | .headers 68 | .get_all("Cookie") 69 | .iter() 70 | .filter_map(|header| Cookie::parse(header.to_str().unwrap_or_default()).ok()) 71 | .collect(); 72 | 73 | // Aggregate the body... 74 | // let whole_body = hyper::body::aggregate(req).await.map_err(|e| e.to_string())?; 75 | let body_bytes = body 76 | .try_fold(Vec::new(), |mut data, chunk| { 77 | data.extend_from_slice(&chunk); 78 | Ok(data) 79 | }) 80 | .await 81 | .map_err(|e| e.to_string())?; 82 | 83 | let form = url::form_urlencoded::parse(&body_bytes).collect::>(); 84 | 85 | let mut response = redirect("/settings"); 86 | 87 | for &name in &PREFS { 88 | match form.get(name) { 89 | Some(value) => response.insert_cookie( 90 | Cookie::build((name.to_owned(), value.clone())) 91 | .path("/") 92 | .http_only(true) 93 | .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) 94 | .into(), 95 | ), 96 | None => response.remove_cookie(name.to_string()), 97 | }; 98 | } 99 | 100 | Ok(response) 101 | } 102 | 103 | fn set_cookies_method(req: Request, remove_cookies: bool) -> Response { 104 | // Split the body into parts 105 | let (parts, _) = req.into_parts(); 106 | 107 | // Grab existing cookies 108 | let _cookies: Vec> = parts 109 | .headers 110 | .get_all("Cookie") 111 | .iter() 112 | .filter_map(|header| Cookie::parse(header.to_str().unwrap_or_default()).ok()) 113 | .collect(); 114 | 115 | let query = parts.uri.query().unwrap_or_default().as_bytes(); 116 | 117 | let form = url::form_urlencoded::parse(query).collect::>(); 118 | 119 | let path = match form.get("redirect") { 120 | Some(value) => { 121 | let value = value.replace("%26", "&").replace("%23", "#"); 122 | if value.starts_with('/') { 123 | value 124 | } else { 125 | format!("/{value}") 126 | } 127 | } 128 | None => "/".to_string(), 129 | }; 130 | 131 | let mut response = redirect(&path); 132 | 133 | for name in PREFS { 134 | match form.get(name) { 135 | Some(value) => response.insert_cookie( 136 | Cookie::build((name.to_owned(), value.clone())) 137 | .path("/") 138 | .http_only(true) 139 | .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) 140 | .into(), 141 | ), 142 | None => { 143 | if remove_cookies { 144 | response.remove_cookie(name.to_string()); 145 | } 146 | } 147 | }; 148 | } 149 | 150 | // Get subscriptions/filters to restore from query string 151 | let subscriptions = form.get("subscriptions"); 152 | let filters = form.get("filters"); 153 | 154 | // We can't search through the cookies directly like in subreddit.rs, so instead we have to make a string out of the request's headers to search through 155 | let cookies_string = parts 156 | .headers 157 | .get("cookie") 158 | .map(|hv| hv.to_str().unwrap_or("").to_string()) // Return String 159 | .unwrap_or_else(String::new); // Return an empty string if None 160 | 161 | // If there are subscriptions to restore set them and delete any old subscriptions cookies, otherwise delete them all 162 | if let Some(subscriptions) = subscriptions { 163 | let sub_list: Vec = subscriptions.split('+').map(str::to_string).collect(); 164 | 165 | // Start at 0 to keep track of what number we need to start deleting old subscription cookies from 166 | let mut subscriptions_number_to_delete_from = 0; 167 | 168 | // Starting at 0 so we handle the subscription cookie without a number first 169 | for (subscriptions_number, list) in join_until_size_limit(&sub_list).into_iter().enumerate() { 170 | let subscriptions_cookie = if subscriptions_number == 0 { 171 | "subscriptions".to_string() 172 | } else { 173 | format!("subscriptions{subscriptions_number}") 174 | }; 175 | 176 | response.insert_cookie( 177 | Cookie::build((subscriptions_cookie, list)) 178 | .path("/") 179 | .http_only(true) 180 | .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) 181 | .into(), 182 | ); 183 | 184 | subscriptions_number_to_delete_from += 1; 185 | } 186 | 187 | // While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie 188 | while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) { 189 | // Remove that subscriptions cookie 190 | response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}")); 191 | 192 | // Increment subscriptions cookie number 193 | subscriptions_number_to_delete_from += 1; 194 | } 195 | } else { 196 | // Remove unnumbered subscriptions cookie 197 | response.remove_cookie("subscriptions".to_string()); 198 | 199 | // Starts at one to deal with the first numbered subscription cookie and onwards 200 | let mut subscriptions_number_to_delete_from = 1; 201 | 202 | // While subscriptionsNUMBER= is in the string of cookies add a response removing that cookie 203 | while cookies_string.contains(&format!("subscriptions{subscriptions_number_to_delete_from}=")) { 204 | // Remove that subscriptions cookie 205 | response.remove_cookie(format!("subscriptions{subscriptions_number_to_delete_from}")); 206 | 207 | // Increment subscriptions cookie number 208 | subscriptions_number_to_delete_from += 1; 209 | } 210 | } 211 | 212 | // If there are filters to restore set them and delete any old filters cookies, otherwise delete them all 213 | if let Some(filters) = filters { 214 | let filters_list: Vec = filters.split('+').map(str::to_string).collect(); 215 | 216 | // Start at 0 to keep track of what number we need to start deleting old subscription cookies from 217 | let mut filters_number_to_delete_from = 0; 218 | 219 | // Starting at 0 so we handle the subscription cookie without a number first 220 | for (filters_number, list) in join_until_size_limit(&filters_list).into_iter().enumerate() { 221 | let filters_cookie = if filters_number == 0 { 222 | "filters".to_string() 223 | } else { 224 | format!("filters{filters_number}") 225 | }; 226 | 227 | response.insert_cookie( 228 | Cookie::build((filters_cookie, list)) 229 | .path("/") 230 | .http_only(true) 231 | .expires(OffsetDateTime::now_utc() + Duration::weeks(52)) 232 | .into(), 233 | ); 234 | 235 | filters_number_to_delete_from += 1; 236 | } 237 | 238 | // While filtersNUMBER= is in the string of cookies add a response removing that cookie 239 | while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) { 240 | // Remove that filters cookie 241 | response.remove_cookie(format!("filters{filters_number_to_delete_from}")); 242 | 243 | // Increment filters cookie number 244 | filters_number_to_delete_from += 1; 245 | } 246 | } else { 247 | // Remove unnumbered filters cookie 248 | response.remove_cookie("filters".to_string()); 249 | 250 | // Starts at one to deal with the first numbered subscription cookie and onwards 251 | let mut filters_number_to_delete_from = 1; 252 | 253 | // While filtersNUMBER= is in the string of cookies add a response removing that cookie 254 | while cookies_string.contains(&format!("filters{filters_number_to_delete_from}=")) { 255 | // Remove that sfilters cookie 256 | response.remove_cookie(format!("filters{filters_number_to_delete_from}")); 257 | 258 | // Increment filters cookie number 259 | filters_number_to_delete_from += 1; 260 | } 261 | } 262 | 263 | response 264 | } 265 | 266 | /// Set cookies using response "Set-Cookie" header 267 | pub async fn restore(req: Request) -> Result, String> { 268 | Ok(set_cookies_method(req, true)) 269 | } 270 | 271 | pub async fn update(req: Request) -> Result, String> { 272 | Ok(set_cookies_method(req, false)) 273 | } 274 | 275 | pub async fn encoded_restore(req: Request) -> Result, String> { 276 | let body = hyper::body::to_bytes(req.into_body()) 277 | .await 278 | .map_err(|e| format!("Failed to get bytes from request body: {e}"))?; 279 | 280 | if body.len() > 1024 * 1024 { 281 | return Err("Request body too large".to_string()); 282 | } 283 | 284 | let encoded_prefs = form_urlencoded::parse(&body) 285 | .find(|(key, _)| key == "encoded_prefs") 286 | .map(|(_, value)| value) 287 | .ok_or_else(|| "encoded_prefs parameter not found in request body".to_string())?; 288 | 289 | let bytes = base2048::decode(&encoded_prefs).ok_or_else(|| "Failed to decode base2048 encoded preferences".to_string())?; 290 | 291 | let out = timeout(std::time::Duration::from_secs(1), async { deflate_decompress(bytes) }) 292 | .await 293 | .map_err(|e| format!("Failed to decompress bytes: {e}"))??; 294 | 295 | let mut prefs: Preferences = timeout(std::time::Duration::from_secs(1), async { bincode::deserialize(&out) }) 296 | .await 297 | .map_err(|e| format!("Failed to deserialize preferences: {e}"))? 298 | .map_err(|e| format!("Failed to deserialize bytes into Preferences struct: {e}"))?; 299 | 300 | prefs.available_themes = vec![]; 301 | 302 | let url = format!("/settings/restore/?{}", prefs.to_urlencoded()?); 303 | 304 | Ok(redirect(&url)) 305 | } 306 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::{env::var, fs::read_to_string, sync::LazyLock}; 3 | 4 | /// This is the local static that is initialized at runtime (technically at 5 | /// first request) and contains the instance settings. 6 | pub static CONFIG: LazyLock = LazyLock::new(Config::load); 7 | 8 | /// This serves as the frontend for an archival API - on removed comments, this URL 9 | /// will be the base of a link, to display removed content (on another site). 10 | pub const DEFAULT_PUSHSHIFT_FRONTEND: &str = "undelete.pullpush.io"; 11 | 12 | /// Stores the configuration parsed from the environment variables and the 13 | /// config file. `Config::Default()` contains None for each setting. 14 | /// When adding more config settings, add it to `Config::load`, 15 | /// `get_setting_from_config`, both below, as well as 16 | /// `instance_info::InstanceInfo.to_string`(), README.md and app.json. 17 | #[derive(Default, Serialize, Deserialize, Clone, Debug)] 18 | pub struct Config { 19 | #[serde(rename = "REDLIB_SFW_ONLY")] 20 | #[serde(alias = "LIBREDDIT_SFW_ONLY")] 21 | pub(crate) sfw_only: Option, 22 | 23 | #[serde(rename = "REDLIB_DEFAULT_THEME")] 24 | #[serde(alias = "LIBREDDIT_DEFAULT_THEME")] 25 | pub(crate) default_theme: Option, 26 | 27 | #[serde(rename = "REDLIB_DEFAULT_FRONT_PAGE")] 28 | #[serde(alias = "LIBREDDIT_DEFAULT_FRONT_PAGE")] 29 | pub(crate) default_front_page: Option, 30 | 31 | #[serde(rename = "REDLIB_DEFAULT_LAYOUT")] 32 | #[serde(alias = "LIBREDDIT_DEFAULT_LAYOUT")] 33 | pub(crate) default_layout: Option, 34 | 35 | #[serde(rename = "REDLIB_DEFAULT_WIDE")] 36 | #[serde(alias = "LIBREDDIT_DEFAULT_WIDE")] 37 | pub(crate) default_wide: Option, 38 | 39 | #[serde(rename = "REDLIB_DEFAULT_COMMENT_SORT")] 40 | #[serde(alias = "LIBREDDIT_DEFAULT_COMMENT_SORT")] 41 | pub(crate) default_comment_sort: Option, 42 | 43 | #[serde(rename = "REDLIB_DEFAULT_POST_SORT")] 44 | #[serde(alias = "LIBREDDIT_DEFAULT_POST_SORT")] 45 | pub(crate) default_post_sort: Option, 46 | 47 | #[serde(rename = "REDLIB_DEFAULT_BLUR_SPOILER")] 48 | #[serde(alias = "LIBREDDIT_DEFAULT_BLUR_SPOILER")] 49 | pub(crate) default_blur_spoiler: Option, 50 | 51 | #[serde(rename = "REDLIB_DEFAULT_SHOW_NSFW")] 52 | #[serde(alias = "LIBREDDIT_DEFAULT_SHOW_NSFW")] 53 | pub(crate) default_show_nsfw: Option, 54 | 55 | #[serde(rename = "REDLIB_DEFAULT_BLUR_NSFW")] 56 | #[serde(alias = "LIBREDDIT_DEFAULT_BLUR_NSFW")] 57 | pub(crate) default_blur_nsfw: Option, 58 | 59 | #[serde(rename = "REDLIB_DEFAULT_USE_HLS")] 60 | #[serde(alias = "LIBREDDIT_DEFAULT_USE_HLS")] 61 | pub(crate) default_use_hls: Option, 62 | 63 | #[serde(rename = "REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION")] 64 | #[serde(alias = "LIBREDDIT_DEFAULT_HIDE_HLS_NOTIFICATION")] 65 | pub(crate) default_hide_hls_notification: Option, 66 | 67 | #[serde(rename = "REDLIB_DEFAULT_HIDE_AWARDS")] 68 | #[serde(alias = "LIBREDDIT_DEFAULT_HIDE_AWARDS")] 69 | pub(crate) default_hide_awards: Option, 70 | 71 | #[serde(rename = "REDLIB_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY")] 72 | #[serde(alias = "LIBREDDIT_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY")] 73 | pub(crate) default_hide_sidebar_and_summary: Option, 74 | 75 | #[serde(rename = "REDLIB_DEFAULT_HIDE_SCORE")] 76 | #[serde(alias = "LIBREDDIT_DEFAULT_HIDE_SCORE")] 77 | pub(crate) default_hide_score: Option, 78 | 79 | #[serde(rename = "REDLIB_DEFAULT_SUBSCRIPTIONS")] 80 | #[serde(alias = "LIBREDDIT_DEFAULT_SUBSCRIPTIONS")] 81 | pub(crate) default_subscriptions: Option, 82 | 83 | #[serde(rename = "REDLIB_DEFAULT_FILTERS")] 84 | #[serde(alias = "LIBREDDIT_DEFAULT_FILTERS")] 85 | pub(crate) default_filters: Option, 86 | 87 | #[serde(rename = "REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION")] 88 | #[serde(alias = "LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION")] 89 | pub(crate) default_disable_visit_reddit_confirmation: Option, 90 | 91 | #[serde(rename = "REDLIB_BANNER")] 92 | #[serde(alias = "LIBREDDIT_BANNER")] 93 | pub(crate) banner: Option, 94 | 95 | #[serde(rename = "REDLIB_ROBOTS_DISABLE_INDEXING")] 96 | #[serde(alias = "LIBREDDIT_ROBOTS_DISABLE_INDEXING")] 97 | pub(crate) robots_disable_indexing: Option, 98 | 99 | #[serde(rename = "REDLIB_PUSHSHIFT_FRONTEND")] 100 | #[serde(alias = "LIBREDDIT_PUSHSHIFT_FRONTEND")] 101 | pub(crate) pushshift: Option, 102 | 103 | #[serde(rename = "REDLIB_ENABLE_RSS")] 104 | pub(crate) enable_rss: Option, 105 | 106 | #[serde(rename = "REDLIB_FULL_URL")] 107 | pub(crate) full_url: Option, 108 | 109 | #[serde(rename = "REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS")] 110 | pub(crate) default_remove_default_feeds: Option, 111 | } 112 | 113 | impl Config { 114 | /// Load the configuration from the environment variables and the config file. 115 | /// In the case that there are no environment variables set and there is no 116 | /// config file, this function returns a Config that contains all None values. 117 | pub fn load() -> Self { 118 | let load_config = |name: &str| { 119 | let new_file = read_to_string(name); 120 | new_file.ok().and_then(|new_file| toml::from_str::(&new_file).ok()) 121 | }; 122 | 123 | let config = load_config("redlib.toml").or_else(|| load_config("libreddit.toml")).unwrap_or_default(); 124 | 125 | // This function defines the order of preference - first check for 126 | // environment variables with "REDLIB", then check the legacy LIBREDDIT 127 | // option, then check the config, then if all are `None`, return a `None` 128 | let parse = |key: &str| -> Option { 129 | // Return the first non-`None` value 130 | // If all are `None`, return `None` 131 | let legacy_key = key.replace("REDLIB_", "LIBREDDIT_"); 132 | var(key).ok().or_else(|| var(legacy_key).ok()).or_else(|| get_setting_from_config(key, &config)) 133 | }; 134 | Self { 135 | sfw_only: parse("REDLIB_SFW_ONLY"), 136 | default_theme: parse("REDLIB_DEFAULT_THEME"), 137 | default_front_page: parse("REDLIB_DEFAULT_FRONT_PAGE"), 138 | default_layout: parse("REDLIB_DEFAULT_LAYOUT"), 139 | default_post_sort: parse("REDLIB_DEFAULT_POST_SORT"), 140 | default_wide: parse("REDLIB_DEFAULT_WIDE"), 141 | default_comment_sort: parse("REDLIB_DEFAULT_COMMENT_SORT"), 142 | default_blur_spoiler: parse("REDLIB_DEFAULT_BLUR_SPOILER"), 143 | default_show_nsfw: parse("REDLIB_DEFAULT_SHOW_NSFW"), 144 | default_blur_nsfw: parse("REDLIB_DEFAULT_BLUR_NSFW"), 145 | default_use_hls: parse("REDLIB_DEFAULT_USE_HLS"), 146 | default_hide_hls_notification: parse("REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION"), 147 | default_hide_awards: parse("REDLIB_DEFAULT_HIDE_AWARDS"), 148 | default_hide_sidebar_and_summary: parse("REDLIB_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY"), 149 | default_hide_score: parse("REDLIB_DEFAULT_HIDE_SCORE"), 150 | default_subscriptions: parse("REDLIB_DEFAULT_SUBSCRIPTIONS"), 151 | default_filters: parse("REDLIB_DEFAULT_FILTERS"), 152 | default_disable_visit_reddit_confirmation: parse("REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION"), 153 | banner: parse("REDLIB_BANNER"), 154 | robots_disable_indexing: parse("REDLIB_ROBOTS_DISABLE_INDEXING"), 155 | pushshift: parse("REDLIB_PUSHSHIFT_FRONTEND"), 156 | enable_rss: parse("REDLIB_ENABLE_RSS"), 157 | full_url: parse("REDLIB_FULL_URL"), 158 | default_remove_default_feeds: parse("REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS"), 159 | } 160 | } 161 | } 162 | 163 | fn get_setting_from_config(name: &str, config: &Config) -> Option { 164 | match name { 165 | "REDLIB_SFW_ONLY" => config.sfw_only.clone(), 166 | "REDLIB_DEFAULT_THEME" => config.default_theme.clone(), 167 | "REDLIB_DEFAULT_FRONT_PAGE" => config.default_front_page.clone(), 168 | "REDLIB_DEFAULT_LAYOUT" => config.default_layout.clone(), 169 | "REDLIB_DEFAULT_COMMENT_SORT" => config.default_comment_sort.clone(), 170 | "REDLIB_DEFAULT_POST_SORT" => config.default_post_sort.clone(), 171 | "REDLIB_DEFAULT_BLUR_SPOILER" => config.default_blur_spoiler.clone(), 172 | "REDLIB_DEFAULT_SHOW_NSFW" => config.default_show_nsfw.clone(), 173 | "REDLIB_DEFAULT_BLUR_NSFW" => config.default_blur_nsfw.clone(), 174 | "REDLIB_DEFAULT_USE_HLS" => config.default_use_hls.clone(), 175 | "REDLIB_DEFAULT_HIDE_HLS_NOTIFICATION" => config.default_hide_hls_notification.clone(), 176 | "REDLIB_DEFAULT_WIDE" => config.default_wide.clone(), 177 | "REDLIB_DEFAULT_HIDE_AWARDS" => config.default_hide_awards.clone(), 178 | "REDLIB_DEFAULT_HIDE_SIDEBAR_AND_SUMMARY" => config.default_hide_sidebar_and_summary.clone(), 179 | "REDLIB_DEFAULT_HIDE_SCORE" => config.default_hide_score.clone(), 180 | "REDLIB_DEFAULT_SUBSCRIPTIONS" => config.default_subscriptions.clone(), 181 | "REDLIB_DEFAULT_FILTERS" => config.default_filters.clone(), 182 | "REDLIB_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION" => config.default_disable_visit_reddit_confirmation.clone(), 183 | "REDLIB_BANNER" => config.banner.clone(), 184 | "REDLIB_ROBOTS_DISABLE_INDEXING" => config.robots_disable_indexing.clone(), 185 | "REDLIB_PUSHSHIFT_FRONTEND" => config.pushshift.clone(), 186 | "REDLIB_ENABLE_RSS" => config.enable_rss.clone(), 187 | "REDLIB_FULL_URL" => config.full_url.clone(), 188 | "REDLIB_DEFAULT_REMOVE_DEFAULT_FEEDS" => config.default_remove_default_feeds.clone(), 189 | _ => None, 190 | } 191 | } 192 | 193 | /// Retrieves setting from environment variable or config file. 194 | pub fn get_setting(name: &str) -> Option { 195 | get_setting_from_config(name, &CONFIG) 196 | } 197 | 198 | #[cfg(test)] 199 | use {sealed_test::prelude::*, std::fs::write}; 200 | 201 | #[test] 202 | fn test_deserialize() { 203 | // Must handle empty input 204 | let result = toml::from_str::(""); 205 | assert!(result.is_ok(), "Error: {}", result.unwrap_err()); 206 | } 207 | 208 | #[test] 209 | #[sealed_test(env = [("REDLIB_SFW_ONLY", "on")])] 210 | fn test_env_var() { 211 | assert!(crate::utils::sfw_only()) 212 | } 213 | 214 | #[test] 215 | #[sealed_test] 216 | fn test_config() { 217 | let config_to_write = r#"REDLIB_DEFAULT_COMMENT_SORT = "best""#; 218 | write("redlib.toml", config_to_write).unwrap(); 219 | assert_eq!(get_setting("REDLIB_DEFAULT_COMMENT_SORT"), Some("best".into())); 220 | } 221 | 222 | #[test] 223 | #[sealed_test] 224 | fn test_config_legacy() { 225 | let config_to_write = r#"LIBREDDIT_DEFAULT_COMMENT_SORT = "best""#; 226 | write("libreddit.toml", config_to_write).unwrap(); 227 | assert_eq!(get_setting("REDLIB_DEFAULT_COMMENT_SORT"), Some("best".into())); 228 | } 229 | 230 | #[test] 231 | #[sealed_test(env = [("LIBREDDIT_SFW_ONLY", "on")])] 232 | fn test_env_var_legacy() { 233 | assert!(crate::utils::sfw_only()) 234 | } 235 | 236 | #[test] 237 | #[sealed_test(env = [("REDLIB_DEFAULT_COMMENT_SORT", "top")])] 238 | fn test_env_config_precedence() { 239 | let config_to_write = r#"REDLIB_DEFAULT_COMMENT_SORT = "best""#; 240 | write("redlib.toml", config_to_write).unwrap(); 241 | assert_eq!(get_setting("REDLIB_DEFAULT_COMMENT_SORT"), Some("top".into())) 242 | } 243 | 244 | #[test] 245 | #[sealed_test(env = [("REDLIB_DEFAULT_COMMENT_SORT", "top")])] 246 | fn test_alt_env_config_precedence() { 247 | let config_to_write = r#"REDLIB_DEFAULT_COMMENT_SORT = "best""#; 248 | write("redlib.toml", config_to_write).unwrap(); 249 | assert_eq!(get_setting("REDLIB_DEFAULT_COMMENT_SORT"), Some("top".into())) 250 | } 251 | #[test] 252 | #[sealed_test(env = [("REDLIB_DEFAULT_SUBSCRIPTIONS", "news+bestof")])] 253 | fn test_default_subscriptions() { 254 | assert_eq!(get_setting("REDLIB_DEFAULT_SUBSCRIPTIONS"), Some("news+bestof".into())); 255 | } 256 | 257 | #[test] 258 | #[sealed_test(env = [("REDLIB_DEFAULT_FILTERS", "news+bestof")])] 259 | fn test_default_filters() { 260 | assert_eq!(get_setting("REDLIB_DEFAULT_FILTERS"), Some("news+bestof".into())); 261 | } 262 | 263 | #[test] 264 | #[sealed_test] 265 | fn test_pushshift() { 266 | let config_to_write = r#"REDLIB_PUSHSHIFT_FRONTEND = "https://api.pushshift.io""#; 267 | write("redlib.toml", config_to_write).unwrap(); 268 | assert!(get_setting("REDLIB_PUSHSHIFT_FRONTEND").is_some()); 269 | assert_eq!(get_setting("REDLIB_PUSHSHIFT_FRONTEND"), Some("https://api.pushshift.io".into())); 270 | } 271 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | // Global specifiers 2 | #![forbid(unsafe_code)] 3 | #![allow(clippy::cmp_owned)] 4 | 5 | use cached::proc_macro::cached; 6 | use clap::{Arg, ArgAction, Command}; 7 | use std::str::FromStr; 8 | use std::sync::LazyLock; 9 | 10 | use futures_lite::FutureExt; 11 | use hyper::Uri; 12 | use hyper::{header::HeaderValue, Body, Request, Response}; 13 | use log::{info, warn}; 14 | use redlib::client::{canonical_path, proxy, rate_limit_check, CLIENT}; 15 | use redlib::server::{self, RequestExt}; 16 | use redlib::utils::{error, redirect, ThemeAssets}; 17 | use redlib::{config, duplicates, headers, instance_info, post, search, settings, subreddit, user}; 18 | 19 | use redlib::client::OAUTH_CLIENT; 20 | 21 | // Create Services 22 | 23 | // Required for the manifest to be valid 24 | async fn pwa_logo() -> Result, String> { 25 | Ok( 26 | Response::builder() 27 | .status(200) 28 | .header("content-type", "image/png") 29 | .body(include_bytes!("../static/logo.png").as_ref().into()) 30 | .unwrap_or_default(), 31 | ) 32 | } 33 | 34 | // Required for iOS App Icons 35 | async fn iphone_logo() -> Result, String> { 36 | Ok( 37 | Response::builder() 38 | .status(200) 39 | .header("content-type", "image/png") 40 | .body(include_bytes!("../static/apple-touch-icon.png").as_ref().into()) 41 | .unwrap_or_default(), 42 | ) 43 | } 44 | 45 | async fn favicon() -> Result, String> { 46 | Ok( 47 | Response::builder() 48 | .status(200) 49 | .header("content-type", "image/vnd.microsoft.icon") 50 | .header("Cache-Control", "public, max-age=1209600, s-maxage=86400") 51 | .body(include_bytes!("../static/favicon.ico").as_ref().into()) 52 | .unwrap_or_default(), 53 | ) 54 | } 55 | 56 | async fn font() -> Result, String> { 57 | Ok( 58 | Response::builder() 59 | .status(200) 60 | .header("content-type", "font/woff2") 61 | .header("Cache-Control", "public, max-age=1209600, s-maxage=86400") 62 | .body(include_bytes!("../static/Inter.var.woff2").as_ref().into()) 63 | .unwrap_or_default(), 64 | ) 65 | } 66 | 67 | async fn opensearch() -> Result, String> { 68 | Ok( 69 | Response::builder() 70 | .status(200) 71 | .header("content-type", "application/opensearchdescription+xml") 72 | .header("Cache-Control", "public, max-age=1209600, s-maxage=86400") 73 | .body(include_bytes!("../static/opensearch.xml").as_ref().into()) 74 | .unwrap_or_default(), 75 | ) 76 | } 77 | 78 | async fn resource(body: &str, content_type: &str, cache: bool) -> Result, String> { 79 | let mut res = Response::builder() 80 | .status(200) 81 | .header("content-type", content_type) 82 | .body(body.to_string().into()) 83 | .unwrap_or_default(); 84 | 85 | if cache { 86 | if let Ok(val) = HeaderValue::from_str("public, max-age=1209600, s-maxage=86400") { 87 | res.headers_mut().insert("Cache-Control", val); 88 | } 89 | } 90 | 91 | Ok(res) 92 | } 93 | 94 | async fn style() -> Result, String> { 95 | let mut res = include_str!("../static/style.css").to_string(); 96 | for file in ThemeAssets::iter() { 97 | res.push('\n'); 98 | let theme = ThemeAssets::get(file.as_ref()).unwrap(); 99 | res.push_str(std::str::from_utf8(theme.data.as_ref()).unwrap()); 100 | } 101 | Ok( 102 | Response::builder() 103 | .status(200) 104 | .header("content-type", "text/css") 105 | .header("Cache-Control", "public, max-age=1209600, s-maxage=86400") 106 | .body(res.to_string().into()) 107 | .unwrap_or_default(), 108 | ) 109 | } 110 | 111 | #[tokio::main] 112 | async fn main() { 113 | // Load environment variables 114 | _ = dotenvy::dotenv(); 115 | 116 | // Initialize logger 117 | pretty_env_logger::init(); 118 | 119 | let matches = Command::new("Redlib") 120 | .version(env!("CARGO_PKG_VERSION")) 121 | .about("Private front-end for Reddit written in Rust ") 122 | .arg(Arg::new("ipv4-only").short('4').long("ipv4-only").help("Listen on IPv4 only").num_args(0)) 123 | .arg(Arg::new("ipv6-only").short('6').long("ipv6-only").help("Listen on IPv6 only").num_args(0)) 124 | .arg( 125 | Arg::new("redirect-https") 126 | .short('r') 127 | .long("redirect-https") 128 | .help("Redirect all HTTP requests to HTTPS (no longer functional)") 129 | .num_args(0), 130 | ) 131 | .arg( 132 | Arg::new("address") 133 | .short('a') 134 | .long("address") 135 | .value_name("ADDRESS") 136 | .help("Sets address to listen on") 137 | .default_value("[::]") 138 | .num_args(1), 139 | ) 140 | .arg( 141 | Arg::new("port") 142 | .short('p') 143 | .long("port") 144 | .value_name("PORT") 145 | .env("PORT") 146 | .help("Port to listen on") 147 | .default_value("8080") 148 | .action(ArgAction::Set) 149 | .num_args(1), 150 | ) 151 | .arg( 152 | Arg::new("hsts") 153 | .short('H') 154 | .long("hsts") 155 | .value_name("EXPIRE_TIME") 156 | .help("HSTS header to tell browsers that this site should only be accessed over HTTPS") 157 | .default_value("604800") 158 | .num_args(1), 159 | ) 160 | .get_matches(); 161 | 162 | match rate_limit_check().await { 163 | Ok(()) => { 164 | info!("[✅] Rate limit check passed"); 165 | } 166 | Err(e) => { 167 | let mut message = format!("Rate limit check failed: {e}"); 168 | message += "\nThis may cause issues with the rate limit."; 169 | message += "\nPlease report this error with the above information."; 170 | message += "\nhttps://github.com/redlib-org/redlib/issues/new?assignees=sigaloid&labels=bug&title=%F0%9F%90%9B+Bug+Report%3A+Rate+limit+mismatch"; 171 | warn!("{}", message); 172 | eprintln!("{message}"); 173 | } 174 | } 175 | 176 | let address = matches.get_one::("address").unwrap(); 177 | let port = matches.get_one::("port").unwrap(); 178 | let hsts = matches.get_one("hsts").map(|m: &String| m.as_str()); 179 | 180 | let ipv4_only = std::env::var("IPV4_ONLY").is_ok() || matches.get_flag("ipv4-only"); 181 | let ipv6_only = std::env::var("IPV6_ONLY").is_ok() || matches.get_flag("ipv6-only"); 182 | 183 | let listener = if ipv4_only { 184 | format!("0.0.0.0:{port}") 185 | } else if ipv6_only { 186 | format!("[::]:{port}") 187 | } else { 188 | [address, ":", port].concat() 189 | }; 190 | 191 | println!("Starting Redlib..."); 192 | 193 | // Begin constructing a server 194 | let mut app = server::Server::new(); 195 | 196 | // Force evaluation of statics. In instance_info case, we need to evaluate 197 | // the timestamp so deploy date is accurate - in config case, we need to 198 | // evaluate the configuration to avoid paying penalty at first request - 199 | // in OAUTH case, we need to retrieve the token to avoid paying penalty 200 | // at first request 201 | 202 | info!("Evaluating config."); 203 | LazyLock::force(&config::CONFIG); 204 | info!("Evaluating instance info."); 205 | LazyLock::force(&instance_info::INSTANCE_INFO); 206 | info!("Creating OAUTH client."); 207 | LazyLock::force(&OAUTH_CLIENT); 208 | 209 | // Define default headers (added to all responses) 210 | app.default_headers = headers! { 211 | "Referrer-Policy" => "no-referrer", 212 | "X-Content-Type-Options" => "nosniff", 213 | "X-Frame-Options" => "DENY", 214 | "Content-Security-Policy" => "default-src 'none'; font-src 'self'; script-src 'self' blob:; manifest-src 'self'; media-src 'self' data: blob: about:; style-src 'self' 'unsafe-inline'; base-uri 'none'; img-src 'self' data:; form-action 'self'; frame-ancestors 'none'; connect-src 'self'; worker-src blob:;" 215 | }; 216 | 217 | if let Some(expire_time) = hsts { 218 | if let Ok(val) = HeaderValue::from_str(&format!("max-age={expire_time}")) { 219 | app.default_headers.insert("Strict-Transport-Security", val); 220 | } 221 | } 222 | 223 | // Read static files 224 | app.at("/style.css").get(|_| style().boxed()); 225 | app 226 | .at("/manifest.json") 227 | .get(|_| resource(include_str!("../static/manifest.json"), "application/json", false).boxed()); 228 | app.at("/robots.txt").get(|_| { 229 | resource( 230 | if match config::get_setting("REDLIB_ROBOTS_DISABLE_INDEXING") { 231 | Some(val) => val == "on", 232 | None => false, 233 | } { 234 | "User-agent: *\nDisallow: /" 235 | } else { 236 | "User-agent: *\nDisallow: /u/\nDisallow: /user/" 237 | }, 238 | "text/plain", 239 | true, 240 | ) 241 | .boxed() 242 | }); 243 | app.at("/favicon.ico").get(|_| favicon().boxed()); 244 | app.at("/logo.png").get(|_| pwa_logo().boxed()); 245 | app.at("/Inter.var.woff2").get(|_| font().boxed()); 246 | app.at("/touch-icon-iphone.png").get(|_| iphone_logo().boxed()); 247 | app.at("/apple-touch-icon.png").get(|_| iphone_logo().boxed()); 248 | app.at("/opensearch.xml").get(|_| opensearch().boxed()); 249 | app 250 | .at("/playHLSVideo.js") 251 | .get(|_| resource(include_str!("../static/playHLSVideo.js"), "text/javascript", false).boxed()); 252 | app 253 | .at("/hls.min.js") 254 | .get(|_| resource(include_str!("../static/hls.min.js"), "text/javascript", false).boxed()); 255 | app 256 | .at("/highlighted.js") 257 | .get(|_| resource(include_str!("../static/highlighted.js"), "text/javascript", false).boxed()); 258 | app 259 | .at("/check_update.js") 260 | .get(|_| resource(include_str!("../static/check_update.js"), "text/javascript", false).boxed()); 261 | app.at("/copy.js").get(|_| resource(include_str!("../static/copy.js"), "text/javascript", false).boxed()); 262 | 263 | app.at("/commits.atom").get(|_| async move { proxy_commit_info().await }.boxed()); 264 | app.at("/instances.json").get(|_| async move { proxy_instances().await }.boxed()); 265 | 266 | // Proxy media through Redlib 267 | app.at("/vid/:id/:size").get(|r| proxy(r, "https://v.redd.it/{id}/DASH_{size}").boxed()); 268 | app.at("/hls/:id/*path").get(|r| proxy(r, "https://v.redd.it/{id}/{path}").boxed()); 269 | app.at("/img/*path").get(|r| proxy(r, "https://i.redd.it/{path}").boxed()); 270 | app.at("/thumb/:point/:id").get(|r| proxy(r, "https://{point}.thumbs.redditmedia.com/{id}").boxed()); 271 | app.at("/emoji/:id/:name").get(|r| proxy(r, "https://emoji.redditmedia.com/{id}/{name}").boxed()); 272 | app 273 | .at("/emote/:subreddit_id/:filename") 274 | .get(|r| proxy(r, "https://reddit-econ-prod-assets-permanent.s3.amazonaws.com/asset-manager/{subreddit_id}/{filename}").boxed()); 275 | app 276 | .at("/preview/:loc/award_images/:fullname/:id") 277 | .get(|r| proxy(r, "https://{loc}view.redd.it/award_images/{fullname}/{id}").boxed()); 278 | app.at("/preview/:loc/:id").get(|r| proxy(r, "https://{loc}view.redd.it/{id}").boxed()); 279 | app.at("/style/*path").get(|r| proxy(r, "https://styles.redditmedia.com/{path}").boxed()); 280 | app.at("/static/*path").get(|r| proxy(r, "https://www.redditstatic.com/{path}").boxed()); 281 | 282 | // Browse user profile 283 | app 284 | .at("/u/:name") 285 | .get(|r| async move { Ok(redirect(&format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed()); 286 | app.at("/u/:name/comments/:id/:title").get(|r| post::item(r).boxed()); 287 | app.at("/u/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed()); 288 | 289 | app.at("/user/[deleted]").get(|req| error(req, "User has deleted their account").boxed()); 290 | app.at("/user/:name.rss").get(|r| user::rss(r).boxed()); 291 | app.at("/user/:name").get(|r| user::profile(r).boxed()); 292 | app.at("/user/:name/:listing").get(|r| user::profile(r).boxed()); 293 | app.at("/user/:name/comments/:id").get(|r| post::item(r).boxed()); 294 | app.at("/user/:name/comments/:id/:title").get(|r| post::item(r).boxed()); 295 | app.at("/user/:name/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed()); 296 | 297 | // Configure settings 298 | app.at("/settings").get(|r| settings::get(r).boxed()).post(|r| settings::set(r).boxed()); 299 | app.at("/settings/restore").get(|r| settings::restore(r).boxed()); 300 | app.at("/settings/encoded-restore").post(|r| settings::encoded_restore(r).boxed()); 301 | app.at("/settings/update").get(|r| settings::update(r).boxed()); 302 | 303 | // RSS Subscriptions 304 | app.at("/r/:sub.rss").get(|r| subreddit::rss(r).boxed()); 305 | 306 | // Subreddit services 307 | app 308 | .at("/r/:sub") 309 | .get(|r| subreddit::community(r).boxed()) 310 | .post(|r| subreddit::add_quarantine_exception(r).boxed()); 311 | 312 | app 313 | .at("/r/u_:name") 314 | .get(|r| async move { Ok(redirect(&format!("/user/{}", r.param("name").unwrap_or_default()))) }.boxed()); 315 | 316 | app.at("/r/:sub/subscribe").post(|r| subreddit::subscriptions_filters(r).boxed()); 317 | app.at("/r/:sub/unsubscribe").post(|r| subreddit::subscriptions_filters(r).boxed()); 318 | app.at("/r/:sub/filter").post(|r| subreddit::subscriptions_filters(r).boxed()); 319 | app.at("/r/:sub/unfilter").post(|r| subreddit::subscriptions_filters(r).boxed()); 320 | 321 | app.at("/r/:sub/comments/:id").get(|r| post::item(r).boxed()); 322 | app.at("/r/:sub/comments/:id/:title").get(|r| post::item(r).boxed()); 323 | app.at("/r/:sub/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed()); 324 | app.at("/comments/:id").get(|r| post::item(r).boxed()); 325 | app.at("/comments/:id/comments").get(|r| post::item(r).boxed()); 326 | app.at("/comments/:id/comments/:comment_id").get(|r| post::item(r).boxed()); 327 | app.at("/comments/:id/:title").get(|r| post::item(r).boxed()); 328 | app.at("/comments/:id/:title/:comment_id").get(|r| post::item(r).boxed()); 329 | 330 | app.at("/r/:sub/duplicates/:id").get(|r| duplicates::item(r).boxed()); 331 | app.at("/r/:sub/duplicates/:id/:title").get(|r| duplicates::item(r).boxed()); 332 | app.at("/duplicates/:id").get(|r| duplicates::item(r).boxed()); 333 | app.at("/duplicates/:id/:title").get(|r| duplicates::item(r).boxed()); 334 | 335 | app.at("/r/:sub/search").get(|r| search::find(r).boxed()); 336 | 337 | app 338 | .at("/r/:sub/w") 339 | .get(|r| async move { Ok(redirect(&format!("/r/{}/wiki", r.param("sub").unwrap_or_default()))) }.boxed()); 340 | app 341 | .at("/r/:sub/w/*page") 342 | .get(|r| async move { Ok(redirect(&format!("/r/{}/wiki/{}", r.param("sub").unwrap_or_default(), r.param("wiki").unwrap_or_default()))) }.boxed()); 343 | app.at("/r/:sub/wiki").get(|r| subreddit::wiki(r).boxed()); 344 | app.at("/r/:sub/wiki/*page").get(|r| subreddit::wiki(r).boxed()); 345 | 346 | app.at("/r/:sub/about/sidebar").get(|r| subreddit::sidebar(r).boxed()); 347 | 348 | app.at("/r/:sub/:sort").get(|r| subreddit::community(r).boxed()); 349 | 350 | // Front page 351 | app.at("/").get(|r| subreddit::community(r).boxed()); 352 | 353 | // View Reddit wiki 354 | app.at("/w").get(|_| async { Ok(redirect("/wiki")) }.boxed()); 355 | app 356 | .at("/w/*page") 357 | .get(|r| async move { Ok(redirect(&format!("/wiki/{}", r.param("page").unwrap_or_default()))) }.boxed()); 358 | app.at("/wiki").get(|r| subreddit::wiki(r).boxed()); 359 | app.at("/wiki/*page").get(|r| subreddit::wiki(r).boxed()); 360 | 361 | // Search all of Reddit 362 | app.at("/search").get(|r| search::find(r).boxed()); 363 | 364 | // Handle about pages 365 | app.at("/about").get(|req| error(req, "About pages aren't added yet").boxed()); 366 | 367 | // Instance info page 368 | app.at("/info").get(|r| instance_info::instance_info(r).boxed()); 369 | app.at("/info.:extension").get(|r| instance_info::instance_info(r).boxed()); 370 | 371 | // Handle obfuscated share links. 372 | // Note that this still forces the server to follow the share link to get to the post, so maybe this wants to be updated with a warning before it follow it 373 | app.at("/r/:sub/s/:id").get(|req: Request| { 374 | Box::pin(async move { 375 | let sub = req.param("sub").unwrap_or_default(); 376 | match req.param("id").as_deref() { 377 | // Share link 378 | Some(id) if (8..12).contains(&id.len()) => match canonical_path(format!("/r/{sub}/s/{id}"), 3).await { 379 | Ok(Some(path)) => Ok(redirect(&path)), 380 | Ok(None) => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await, 381 | Err(e) => error(req, &e).await, 382 | }, 383 | 384 | // Error message for unknown pages 385 | _ => error(req, "Nothing here").await, 386 | } 387 | }) 388 | }); 389 | 390 | app.at("/:id").get(|req: Request| { 391 | Box::pin(async move { 392 | match req.param("id").as_deref() { 393 | // Sort front page 394 | Some("best" | "hot" | "new" | "top" | "rising" | "controversial") => subreddit::community(req).await, 395 | 396 | // Short link for post 397 | Some(id) if (5..8).contains(&id.len()) => match canonical_path(format!("/comments/{id}"), 3).await { 398 | Ok(path_opt) => match path_opt { 399 | Some(path) => Ok(redirect(&path)), 400 | None => error(req, "Post ID is invalid. It may point to a post on a community that has been banned.").await, 401 | }, 402 | Err(e) => error(req, &e).await, 403 | }, 404 | 405 | // Error message for unknown pages 406 | _ => error(req, "Nothing here").await, 407 | } 408 | }) 409 | }); 410 | 411 | // Default service in case no routes match 412 | app.at("/*").get(|req| error(req, "Nothing here").boxed()); 413 | 414 | println!("Running Redlib v{} on {listener}!", env!("CARGO_PKG_VERSION")); 415 | 416 | let server = app.listen(&listener); 417 | 418 | // Run this server for... forever! 419 | if let Err(e) = server.await { 420 | eprintln!("Server error: {e}"); 421 | } 422 | } 423 | 424 | pub async fn proxy_commit_info() -> Result, String> { 425 | Ok( 426 | Response::builder() 427 | .status(200) 428 | .header("content-type", "application/atom+xml") 429 | .body(Body::from(fetch_commit_info().await)) 430 | .unwrap_or_default(), 431 | ) 432 | } 433 | 434 | #[cached(time = 600)] 435 | async fn fetch_commit_info() -> String { 436 | let uri = Uri::from_str("https://github.com/redlib-org/redlib/commits/main.atom").expect("Invalid URI"); 437 | 438 | let resp: Body = CLIENT.get(uri).await.expect("Failed to request GitHub").into_body(); 439 | 440 | hyper::body::to_bytes(resp).await.expect("Failed to read body").iter().copied().map(|x| x as char).collect() 441 | } 442 | 443 | pub async fn proxy_instances() -> Result, String> { 444 | Ok( 445 | Response::builder() 446 | .status(200) 447 | .header("content-type", "application/json") 448 | .body(Body::from(fetch_instances().await)) 449 | .unwrap_or_default(), 450 | ) 451 | } 452 | 453 | #[cached(time = 600)] 454 | async fn fetch_instances() -> String { 455 | let uri = Uri::from_str("https://raw.githubusercontent.com/redlib-org/redlib-instances/refs/heads/main/instances.json").expect("Invalid URI"); 456 | 457 | let resp: Body = CLIENT.get(uri).await.expect("Failed to request GitHub").into_body(); 458 | 459 | hyper::body::to_bytes(resp).await.expect("Failed to read body").iter().copied().map(|x| x as char).collect() 460 | } 461 | --------------------------------------------------------------------------------