├── rust-toolchain ├── .gitignore ├── static ├── favicon.ico ├── index.js ├── thread.js └── styles.css ├── .rustfmt.toml ├── Dockerfile ├── proc-macros ├── Cargo.toml └── src │ └── lib.rs ├── templates ├── error.html ├── leaderboard.html ├── base.html ├── banned.html ├── update_bio.html ├── index.html ├── macros.html ├── react.html ├── login.html ├── item.html ├── author.html ├── offer.html ├── register.html ├── profile.html ├── offers.html ├── items.html └── thread.html ├── Cargo.toml ├── README.md ├── src ├── main.rs ├── schema.sql ├── images.rs ├── lib.rs ├── pages.rs ├── threads.rs └── users.rs ├── migrations └── 1_initial.up.sql └── LICENSE /rust-toolchain: -------------------------------------------------------------------------------- 1 | stable 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | Cargo.lock 3 | target/ 4 | proc-macros/target/ -------------------------------------------------------------------------------- /static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maplant/marche/HEAD/static/favicon.ico -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | struct_field_align_threshold = 20 2 | max_width = 100 3 | wrap_comments = true 4 | use_try_shorthand = true 5 | imports_granularity = "Crate" 6 | group_imports = "StdExternalCrate" -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.70-slim-buster 2 | 3 | WORKDIR /app 4 | 5 | COPY . . 6 | 7 | RUN apt-get update && apt-get install -y libpq-dev 8 | RUN cargo build --release 9 | 10 | EXPOSE 8080 11 | CMD [ "./target/release/marche-server" ] 12 | -------------------------------------------------------------------------------- /proc-macros/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "marche-proc-macros" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [lib] 9 | proc-macro = true 10 | 11 | [dependencies] 12 | syn = { version = "1.0", features = ["full"] } 13 | quote = "1.0" 14 | proc-macro2 = "1.0" 15 | -------------------------------------------------------------------------------- /templates/error.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Error!{% endblock %} 4 | 5 | {% block content %} 6 | 13 | {% endblock %} 14 | -------------------------------------------------------------------------------- /templates/leaderboard.html: -------------------------------------------------------------------------------- 1 | {%- import "macros.html" as macros -%} 2 | {% extends "base.html" %} 3 | 4 | {% block title %}Global Leaderboards{% endblock %} 5 | 6 | {% block content %} 7 | {% for user in users %} 8 | 19 | {% endfor %} 20 | {% endblock %} 21 | 22 | -------------------------------------------------------------------------------- /templates/base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | {% block title %}{% endblock %} 4 | 5 | 6 | 7 | 8 | 13 | 14 | 15 | 16 | 24 | {% block footer %}{% endblock %} 25 | 26 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "marche-server" 3 | version = "0.1.0" 4 | authors = ["Matthew Plant "] 5 | edition = "2021" 6 | default-run = "marche-server" 7 | 8 | [dependencies] 9 | aes-gcm = "0.10" 10 | anyhow = "1.0" 11 | axum = { version = "0.6", features = ["multipart", "json", "ws"] } 12 | axum-client-ip = "0.3.0" 13 | base64 = "0.13" 14 | thiserror = "1.0" 15 | aws-config = "0.46" 16 | aws-sdk-s3 = "0.16" 17 | html-escape = "0.2.11" 18 | rand = { version = "0.8" , features = ["getrandom"] } 19 | derive_more = "0.99" 20 | serde = { version = "1.0", features = ["derive"] } 21 | serde_json = "1.0" 22 | sha2 = "0.10" 23 | libpasta = "0.1" 24 | chrono = { version = "0.4", features = ["serde"] } 25 | lazy_static = "1.4" 26 | rand_xorshift = "0.3.0" 27 | regex = "1" 28 | askama = { version = "0.12", features = ["with-axum"] } 29 | askama_axum = { version = "0.3" } 30 | tokio = { version = "1", features = ["full"] } 31 | tower = "0.4" 32 | tower-http = { version = "0.3", features = ["fs", "trace"] } 33 | tower-cookies = { version = "0.8", features = ["private", "axum-core"] } 34 | tracing = "0.1" 35 | tracing-subscriber = "0.3" 36 | http = "0.2" 37 | urlencoding = "2" 38 | image = "0.24" 39 | ipnetwork = "0.19" 40 | inventory = "0.2" 41 | sqlx = { version = "0.6", features = [ "runtime-tokio-rustls", "postgres", "chrono", "ipnetwork" ] } 42 | futures = "0.3" 43 | google-authenticator = { version = "0.2.0", git = "https://github.com/maplant/google-authenticator-rust.git" } 44 | maplit = "1.0.2" 45 | cookie = "0.16" 46 | 47 | [dependencies.marche-proc-macros] 48 | path = "proc-macros" -------------------------------------------------------------------------------- /templates/banned.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | {% block title %}{% endblock %} 4 | 5 | 6 | 7 | 8 | 13 | 14 | 15 | 16 | 27 | 28 | -------------------------------------------------------------------------------- /templates/update_bio.html: -------------------------------------------------------------------------------- 1 | {%- import "macros.html" as macros -%} 2 | {% extends "base.html" %} 3 | 4 | {% block title %}{{name}}'s Profile{% endblock %} 5 | 6 | {% block content %} 7 | 44 | {% endblock %} 45 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ⚖️ Marche ⚖️ 2 | 3 | ## What is Marche? 4 | 5 | Marche is lightweight forum software in the vein of Reddit or 4chan, designed to provide an 6 | enjoyable and rewarding experience for users. 7 | 8 | You can check out the canonical deployment of marche at https://www.cest-le-marche.com 9 | Everything on the main branch of this repository is automatically deployed to that URL. 10 | 11 | ## What makes Marche different? 12 | 13 | The primary difference between Marche and other forum softwares is the inclusion of items. 14 | When a user posts a thread or a reply, there is a random chance that the user will be given 15 | an item. These items can include cosmetic items such a profile pictures or backgrounds and 16 | badges that the user can equip. 17 | 18 | The other type of item that can drop are reactions. A user can use a reaction one time 19 | on any post other then one of their own to show appreciation or the opposite. Reactions add 20 | or subtract experience points from the recipient. 21 | 22 | Experience points grant users special priviliges, the most prominent example as of right now 23 | being the ability to attach photos to posts after a certain level. This change was made in 24 | order to reduce the chance that users post blatantly illegal photos. 25 | 26 | ## Ethos 27 | 28 | Marche is designed to be fun. It is not designed to revolutionize communication or society or 29 | how people interact with each other online. 30 | 31 | ## Technical details 32 | 33 | Marche is written in Rust and uses the following tech stack: 34 | 35 | * Tokio (async runtime) 36 | * Axum (web framework) 37 | * Askama (templating) 38 | * Jquery 39 | * Postgres 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /templates/index.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Home{% endblock %} 4 | 5 | {% block content %} 6 | 7 |
  • 8 | 12 | {% for tag in tags %} 13 | 17 | {% endfor %} 18 |
  • 19 | {% for post in posts %} 20 | {% if !post.hidden || viewer_role > Role::User %} 21 | 43 | {% endif %} 44 | {% endfor %} 45 | {% endblock %} 46 | -------------------------------------------------------------------------------- /templates/macros.html: -------------------------------------------------------------------------------- 1 | {% macro profile_stub(stub) %} 2 |
    10 |

    11 | 13 | {{stub.name}} 14 | 15 |

    16 | {% match stub.picture %} 17 | {% when Some with (filename) %} 18 | 19 | {% when None %} 20 |
    21 | {% endmatch %} 22 |
    23 | {% for badge in stub.badges %} 24 | {{badge|e("none")}} 25 | {% endfor %} 26 |
    27 |
    28 | {% endmacro %} 29 | 30 | {% macro item_overlay(item) %} 31 |
    32 |
    33 |
    34 | {{item.html|e("none")}} 35 |
    36 |
    37 |
    38 |
    39 | {{item.name}} 40 |
    41 |
    42 |
    43 |
    44 | {{item.description}} 45 |
    46 |
    47 |
    48 |
    49 |
    50 | {{item.rarity}} 51 |
    52 |
    53 |
    54 |
    55 | {% endmacro %} 56 | 57 | {% macro item_thumbnail(item) %} 58 | 59 |

    {{item.html|e("none")}}

    60 |
    61 | {% call item_overlay(item) %} 62 |
    63 |
    64 | {% endmacro %} 65 | -------------------------------------------------------------------------------- /templates/react.html: -------------------------------------------------------------------------------- 1 | {%- import "macros.html" as macros -%} 2 | {% extends "base.html" %} 3 | 4 | {% block title %}React{% endblock %} 5 | 6 | {% block content %} 7 |
    8 | 42 | 57 |
    58 | {% endblock %} 59 | -------------------------------------------------------------------------------- /templates/login.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Log In{% endblock %} 4 | 5 | {% block content %} 6 | 59 | {% endblock %} 60 | -------------------------------------------------------------------------------- /static/index.js: -------------------------------------------------------------------------------- 1 | $(document).ready(function() { 2 | var tagHLcolor = ""; 3 | var threadHLcolor = ""; 4 | if ($(".tag").length > 0) 5 | { 6 | tagHLcolor = darkenRGBString(window.getComputedStyle($(".tag")[0])["background-color"], 0.90); 7 | } 8 | if ($("li.thread-menu-item").length > 0) 9 | { 10 | threadHLcolor = darkenRGBString(window.getComputedStyle($("li.thread-menu-item")[0])["background"], 0.96); 11 | } 12 | $(".tag").hover(function() { 13 | $(this).css({ 'background-color' : tagHLcolor}); 14 | $(this).parents("li.thread-menu-item").css({ 'background-color' : ''}); 15 | }, function() { 16 | $(this).css({ 'background-color' : ''}); 17 | $(this).parents("li.thread-menu-item").css({ 'background-color' : threadHLcolor}); 18 | }); 19 | $(".tag").click(function(e) { 20 | e.stopPropagation(); 21 | var result = location.pathname; 22 | if (result.substr(-1) !== "/") { 23 | result += '/'; 24 | } 25 | result += $(this).attr('name'); 26 | location.pathname = result; 27 | }); 28 | $("li.thread-menu-item").hover(function() { 29 | $(this).parents("li.thread-menu-item").css({ 'background-color' : ''}); 30 | $(this).css({ 'background-color' : threadHLcolor}); 31 | }, function() { 32 | $(this).css({ 'background-color' : ''}); 33 | }); 34 | }); 35 | 36 | function darkenRGBString(rgb, factor) 37 | { 38 | rgb = rgb.replace(/\).*/g, '').replace(/[^\d,.]/g, '').split(','); 39 | return `rgb(${rgb[0]*factor}, ${rgb[1]*factor}, ${rgb[2]*factor})` 40 | } 41 | 42 | function add_tag() { 43 | var val = $('#add-tag').val().toLowerCase().trim(); 44 | var result = location.pathname; 45 | if (result.substr(-1) !== "/") { 46 | result += '/'; 47 | } 48 | result += val; 49 | location.pathname = result; 50 | } 51 | 52 | function remove_tag(tag) { 53 | var tag = tag.toLowerCase().trim(); 54 | var tags = location.pathname.split('/').slice(2); 55 | var result = '/t'; 56 | tags.forEach(function (item, _) { 57 | var item = item.toLowerCase().trim(); 58 | if (item !== tag) { 59 | result = result + '/' + item; 60 | } 61 | }); 62 | location.pathname = result; 63 | } 64 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use std::net::SocketAddr; 2 | 3 | use axum::{ 4 | extract::Extension, 5 | http::StatusCode, 6 | response::Redirect, 7 | routing::{get, get_service}, 8 | Router, 9 | }; 10 | use marche_server::{pages::ServerError, Endpoint}; 11 | use sqlx::postgres::PgPoolOptions; 12 | use tower_cookies::CookieManagerLayer; 13 | use tower_http::{services::ServeDir, trace::TraceLayer}; 14 | 15 | #[tokio::main] 16 | async fn main() { 17 | if std::env::var_os("RUST_LOG").is_none() { 18 | std::env::set_var("RUST_LOG", "marche=info") 19 | } 20 | 21 | tracing_subscriber::fmt::init(); 22 | 23 | let db_url = std::env::var("DATABASE_URL"); 24 | let db_url = if let Ok(db_url) = db_url { 25 | db_url 26 | } else { 27 | tracing::error!("DATABASE_URL is not set, aborting."); 28 | return; 29 | }; 30 | 31 | let pool = PgPoolOptions::new() 32 | .max_connections(5) 33 | .connect(&db_url) 34 | .await 35 | .expect("Failed to create database pool"); 36 | 37 | sqlx::migrate!().run(&pool).await.expect("Migration failed"); 38 | 39 | let mut app = Router::new(); 40 | 41 | for endpoint in inventory::iter::() { 42 | app = endpoint.install(app); 43 | } 44 | 45 | let app = app.route( 46 | "/favicon.ico", 47 | get(|| async { Redirect::permanent("/static/favicon.ico") }), 48 | ); 49 | 50 | let app = app 51 | .fallback(fallback) 52 | .nest_service( 53 | "/static", 54 | get_service(ServeDir::new("static")).handle_error(|error: std::io::Error| async move { 55 | ( 56 | StatusCode::INTERNAL_SERVER_ERROR, 57 | format!("Unhandled internal error: {}", error), 58 | ) 59 | }), 60 | ) 61 | .layer(CookieManagerLayer::new()) 62 | .layer(TraceLayer::new_for_http()) 63 | .layer(Extension(pool)); 64 | 65 | let addr = SocketAddr::from(([0, 0, 0, 0], 8080)); 66 | tracing::info!("Marche server launched, listening on {}", addr); 67 | axum::Server::bind(&addr) 68 | .serve(app.into_make_service_with_connect_info::()) 69 | .await 70 | .unwrap(); 71 | } 72 | 73 | async fn fallback() -> (StatusCode, ServerError) { 74 | (StatusCode::NOT_FOUND, ServerError::NotFound) 75 | } 76 | -------------------------------------------------------------------------------- /templates/item.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Item - {{name}}{% endblock %} 4 | 5 | {% block content %} 6 | 58 | 72 | {% endblock %} 73 | 74 | -------------------------------------------------------------------------------- /src/schema.sql: -------------------------------------------------------------------------------- 1 | CREATE TYPE role_enum as ENUM ( 2 | 'admin', 3 | 'moderator', 4 | 'user' 5 | ); 6 | 7 | CREATE TABLE users ( 8 | id SERIAL PRIMARY KEY, 9 | name TEXT NOT NULL UNIQUE, 10 | password TEXT NOT NULL, 11 | bio TEXT NOT NULL, 12 | role role_enum NOT NULL, 13 | experience BIGINT NOT NULL, 14 | last_reward TIMESTAMP NOT NULL, 15 | equip_slot_prof_pic INTEGER, 16 | equip_slot_background INTEGER, 17 | equip_slot_badges INTEGER[] NOT NULL, 18 | banned_until TIMESTAMP, 19 | notes TEXT NOT NULL, 20 | ); 21 | 22 | create TABLE reading_history ( 23 | id SERIAL PRIMARY KEY, 24 | reader_id INT NOT NULL, 25 | thread_id INT NOT NULL, 26 | last_read INT NOT NULL, 27 | UNIQUE (reader_id, thread_id) 28 | ); 29 | 30 | CREATE TABLE login_sessions ( 31 | id SERIAL PRIMARY KEY, 32 | session_id VARCHAR NOT NULL, 33 | user_id INT NOT NULL, 34 | session_start TIMESTAMP NOT NULL, 35 | ip_addr CIDR NOT NULL 36 | ); 37 | 38 | CREATE TABLE threads ( 39 | id SERIAL PRIMARY KEY, 40 | last_post INTEGER NOT NULL, 41 | title TEXT NOT NULL, 42 | tags INTEGER[] NOT NULL, 43 | num_replies INTEGER NOT NULL, 44 | pinned BOOLEAN NOT NULL, 45 | locked BOOLEAN NOT NULL 46 | ); 47 | 48 | CREATE TABLE replies ( 49 | id SERIAL PRIMARY KEY, 50 | author_id INT NOT NULL, 51 | thread_id INT NOT NULL, 52 | post_date TIMESTAMP NOT NULL, 53 | body TEXT NOT NULL, 54 | reward INT, 55 | reactions INTEGER[] NOT NULL, 56 | image TEXT, 57 | thumbnail TEXT, 58 | filename TEXT 59 | ); 60 | 61 | CREATE TABLE tags ( 62 | id SERIAL PRIMARY KEY, 63 | name TEXT NOT NULL UNIQUE, 64 | num_tagged INTEGER NOT NULL DEFAULT 1 65 | ) 66 | 67 | CREATE TYPE rarity_enum AS ENUM ( 68 | 'common', 69 | 'uncommon', 70 | 'rare', 71 | 'ultra_rare', 72 | 'legendary' 73 | ); 74 | 75 | CREATE TABLE items ( 76 | id SERIAL PRIMARY KEY, 77 | name TEXT NOT NULL, 78 | description TEXT NOT NULL, 79 | available BOOLEAN NOT NULL, 80 | rarity rarity_enum NOT NULL, 81 | item_type JSONB NOT NULL, 82 | attribute_map JSONB NOT NULL 83 | ); 84 | 85 | CREATE TABLE drops ( 86 | id SERIAL PRIMARY KEY, 87 | owner_id INT NOT NULL, 88 | item_id INT NOT NULL, 89 | pattern SMALLINT NOT NULL, 90 | consumed BOOLEAN NOT NULL DEFAULT FALSE 91 | ); 92 | 93 | CREATE TABLE trade_requests ( 94 | id SERIAL PRIMARY KEY, 95 | sender_id INTEGER NOT NULL, 96 | sender_items INTEGER[] NOT NULL, 97 | receiver_id INTEGER NOT NULL, 98 | receiver_items INTEGER[] NOT NULL, 99 | note TEXT 100 | ); 101 | -------------------------------------------------------------------------------- /migrations/1_initial.up.sql: -------------------------------------------------------------------------------- 1 | CREATE TYPE user_role as ENUM ( 2 | 'admin', 3 | 'moderator', 4 | 'user' 5 | ); 6 | 7 | CREATE TABLE users ( 8 | id SERIAL PRIMARY KEY, 9 | name TEXT NOT NULL UNIQUE, 10 | display_name TEXT NOT NULL, 11 | password TEXT NOT NULL, 12 | secret BYTEA NOT NULL, 13 | reset_code TEXT NOT NULL, 14 | bio TEXT NOT NULL, 15 | email TEXT NOT NULL, 16 | role user_role NOT NULL, 17 | experience BIGINT NOT NULL, 18 | last_reward TIMESTAMP NOT NULL, 19 | equip_slot_prof_pic INTEGER, 20 | equip_slot_background INTEGER, 21 | equip_slot_badges INTEGER[] NOT NULL, 22 | banned_until TIMESTAMP, 23 | notes TEXT NOT NULL 24 | ); 25 | 26 | CREATE TABLE reading_history ( 27 | id SERIAL PRIMARY KEY, 28 | reader_id INT NOT NULL, 29 | thread_id INT NOT NULL, 30 | last_read INT NOT NULL, 31 | UNIQUE (reader_id, thread_id) 32 | ); 33 | 34 | CREATE TABLE login_sessions ( 35 | id SERIAL PRIMARY KEY, 36 | session_id VARCHAR NOT NULL, 37 | user_id INT NOT NULL, 38 | session_start TIMESTAMP NOT NULL, 39 | ip_addr CIDR NOT NULL 40 | ); 41 | 42 | CREATE TABLE threads ( 43 | id SERIAL PRIMARY KEY, 44 | last_post INTEGER NOT NULL, 45 | title TEXT NOT NULL, 46 | tags INTEGER[] NOT NULL, 47 | num_replies INTEGER NOT NULL, 48 | pinned BOOLEAN NOT NULL, 49 | locked BOOLEAN NOT NULL, 50 | hidden BOOLEAN NOT NULL 51 | ); 52 | 53 | CREATE TABLE replies ( 54 | id SERIAL PRIMARY KEY, 55 | author_id INT NOT NULL, 56 | thread_id INT NOT NULL, 57 | post_date TIMESTAMP NOT NULL, 58 | body TEXT NOT NULL, 59 | reward INT, 60 | reactions INTEGER[] NOT NULL, 61 | image TEXT, 62 | thumbnail TEXT, 63 | filename TEXT, 64 | hidden BOOLEAN NOT NULL DEFAULT FALSE 65 | ); 66 | 67 | CREATE TABLE tags ( 68 | id SERIAL PRIMARY KEY, 69 | name TEXT NOT NULL UNIQUE, 70 | num_tagged INTEGER NOT NULL DEFAULT 1 71 | ); 72 | 73 | CREATE TYPE rarity AS ENUM ( 74 | 'common', 75 | 'uncommon', 76 | 'rare', 77 | 'ultra_rare', 78 | 'legendary' 79 | ); 80 | 81 | CREATE TABLE items ( 82 | id SERIAL PRIMARY KEY, 83 | name TEXT NOT NULL, 84 | description TEXT NOT NULL, 85 | available BOOLEAN NOT NULL, 86 | rarity rarity NOT NULL, 87 | item_type JSONB NOT NULL, 88 | attributes JSONB NOT NULL 89 | ); 90 | 91 | CREATE TABLE drops ( 92 | id SERIAL PRIMARY KEY, 93 | owner_id INT NOT NULL, 94 | item_id INT NOT NULL, 95 | pattern INT NOT NULL, 96 | consumed BOOLEAN NOT NULL DEFAULT FALSE 97 | ); 98 | 99 | CREATE TABLE trade_requests ( 100 | id SERIAL PRIMARY KEY, 101 | sender_id INTEGER NOT NULL, 102 | sender_items INTEGER[] NOT NULL, 103 | receiver_id INTEGER NOT NULL, 104 | receiver_items INTEGER[] NOT NULL, 105 | note TEXT 106 | ); 107 | -------------------------------------------------------------------------------- /templates/author.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Author Post{% endblock %} 4 | 5 | {% block content %} 6 | 72 | {% endblock %} 73 | -------------------------------------------------------------------------------- /templates/offer.html: -------------------------------------------------------------------------------- 1 | {%- import "macros.html" as macros -%} 2 | {% extends "base.html" %} 3 | 4 | {% block title %}Draft Trade Offer{% endblock %} 5 | 6 | {% block content %} 7 | 95 | {% endblock %} 96 | -------------------------------------------------------------------------------- /templates/register.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Create An Account{% endblock %} 4 | 5 | {% block content %} 6 | 59 | 68 | 109 | {% endblock %} 110 | -------------------------------------------------------------------------------- /src/images.rs: -------------------------------------------------------------------------------- 1 | use std::io::Cursor; 2 | 3 | use aws_sdk_s3::{ 4 | error::PutObjectError, 5 | model::ObjectCannedAcl, 6 | output::PutObjectOutput, 7 | types::{ByteStream, SdkError}, 8 | Client, Endpoint, 9 | }; 10 | use axum::body::Bytes; 11 | use image::ImageFormat; 12 | use serde::Serialize; 13 | use sha2::{Digest, Sha256}; 14 | use thiserror::Error; 15 | use tokio::task; 16 | 17 | pub struct Image { 18 | pub filename: String, 19 | pub thumbnail: Option, 20 | } 21 | 22 | #[derive(Debug, Serialize, Error)] 23 | pub enum UploadImageError { 24 | #[error("invalid file type")] 25 | InvalidExtension, 26 | #[error("error decoding image: {0}")] 27 | ImageError( 28 | #[from] 29 | #[serde(skip)] 30 | image::ImageError, 31 | ), 32 | #[error("internal server error: {0}")] 33 | InternalServerError( 34 | #[from] 35 | #[serde(skip)] 36 | tokio::task::JoinError, 37 | ), 38 | #[error("internal block storage error: {0}")] 39 | InternalBlockStorageError( 40 | #[from] 41 | #[serde(skip)] 42 | SdkError, 43 | ), 44 | } 45 | 46 | impl Image { 47 | /// Upload image to object storage 48 | pub async fn upload_image(bytes: Bytes) -> Result { 49 | /// Maximum width/height of an image. 50 | const MAX_WH: u32 = 400; 51 | 52 | let format = image::guess_format(&bytes)?; 53 | let ext = match format { 54 | ImageFormat::Png => "png", 55 | ImageFormat::Jpeg => "jpeg", 56 | ImageFormat::Gif => "gif", 57 | ImageFormat::WebP => "webp", 58 | _ => return Err(UploadImageError::InvalidExtension), 59 | }; 60 | 61 | let (bytes, hash) = task::spawn_blocking(move || { 62 | let mut hasher = Sha256::new(); 63 | hasher.update(&bytes); 64 | ( 65 | bytes, 66 | base64::encode_config(hasher.finalize().as_slice(), base64::URL_SAFE_NO_PAD), 67 | ) 68 | }) 69 | .await?; 70 | 71 | // Check if file already exists: 72 | let config = aws_config::from_env() 73 | .endpoint_resolver(Endpoint::immutable( 74 | IMAGE_STORE_ENDPOINT.parse().expect("valid URI"), 75 | )) 76 | .load() 77 | .await; 78 | let client = Client::new(&config); 79 | let filename = format!("{hash}.{ext}"); 80 | 81 | if image_exists(&client, &filename).await { 82 | let thumbnail = format!("{hash}_thumbnail.{ext}"); 83 | return Ok(Image { 84 | filename: get_url(&filename), 85 | thumbnail: image_exists(&client, &thumbnail) 86 | .await 87 | .then(move || get_url(&thumbnail)), 88 | }); 89 | } 90 | 91 | // Resize the image if it is necessary 92 | let image = image::load_from_memory(&bytes)?; 93 | let thumbnail = if image.height() > MAX_WH || image.width() > MAX_WH { 94 | let thumb = task::spawn_blocking(move || image.thumbnail(MAX_WH, MAX_WH)).await?; 95 | let mut output = Cursor::new(Vec::with_capacity(thumb.as_bytes().len())); 96 | thumb.write_to(&mut output, format)?; 97 | let thumbnail = format!("{hash}_thumbnail.{ext}"); 98 | put_image( 99 | &client, 100 | &thumbnail, 101 | &ext, 102 | ByteStream::from(output.into_inner()), 103 | ) 104 | .await?; 105 | Some(thumbnail) 106 | } else { 107 | None 108 | }; 109 | 110 | put_image(&client, &filename, &ext, ByteStream::from(bytes)).await?; 111 | 112 | Ok(Image { 113 | filename: get_url(&filename), 114 | thumbnail: thumbnail.as_deref().map(get_url), 115 | }) 116 | } 117 | } 118 | 119 | pub const IMAGE_STORE_ENDPOINT: &'static str = "https://marche-storage.nyc3.digitaloceanspaces.com"; 120 | pub const IMAGE_STORE_BUCKET: &'static str = "images"; 121 | 122 | pub fn get_url(filename: &str) -> String { 123 | format!("{IMAGE_STORE_ENDPOINT}/{IMAGE_STORE_BUCKET}/{filename}") 124 | } 125 | 126 | pub const MAXIMUM_FILE_SIZE: u64 = 12 * 1024 * 1024; /* 12mb */ 127 | 128 | async fn image_exists(client: &Client, filename: &str) -> bool { 129 | client 130 | .head_object() 131 | .bucket(IMAGE_STORE_BUCKET) 132 | .key(filename) 133 | .send() 134 | .await 135 | .is_ok() 136 | } 137 | 138 | async fn put_image( 139 | client: &Client, 140 | filename: &str, 141 | ext: &str, 142 | body: ByteStream, 143 | ) -> Result> { 144 | client 145 | .put_object() 146 | .acl(ObjectCannedAcl::PublicRead) 147 | .content_type(format!("image/{}", ext)) 148 | .bucket(IMAGE_STORE_BUCKET) 149 | .key(filename) 150 | .body(body) 151 | .send() 152 | .await 153 | } 154 | -------------------------------------------------------------------------------- /proc-macros/src/lib.rs: -------------------------------------------------------------------------------- 1 | use proc_macro::{self, TokenStream}; 2 | use proc_macro2::Span; 3 | use quote::{quote, ToTokens}; 4 | use syn::{ 5 | parse_macro_input, parse_quote, punctuated::Punctuated, token::Comma, Block, Expr, Fields, 6 | Ident, ItemEnum, ItemFn, PatIdent, 7 | }; 8 | 9 | fn transform_params( 10 | params: Punctuated, 11 | ) -> Punctuated { 12 | let mut unnamed = 0; 13 | params 14 | .into_iter() 15 | .map(|param| match param { 16 | syn::FnArg::Typed(mut ty) => { 17 | let pat = if let syn::Pat::Ident(pat_ident) = *ty.pat.clone() { 18 | syn::Pat::Ident(pat_ident) 19 | } else { 20 | unnamed += 1; 21 | syn::Pat::Ident(PatIdent { 22 | attrs: vec![], 23 | by_ref: None, 24 | mutability: None, 25 | ident: Ident::new(&format!("t{unnamed}"), Span::call_site()), 26 | subpat: None, 27 | }) 28 | }; 29 | ty.pat = Box::new(pat); 30 | syn::FnArg::Typed(ty) 31 | } 32 | x => x, 33 | }) 34 | .collect() 35 | } 36 | 37 | fn transform_params_to_call(params: Punctuated) -> Expr { 38 | // 1. Filter the params, so that only typed arguments remain 39 | // 2. Extract the ident (in case the pattern type is ident) 40 | let mut unnamed = 0; 41 | let idents = params.iter().filter_map(|param| { 42 | if let syn::FnArg::Typed(pat_type) = param { 43 | if let syn::Pat::Ident(pat_ident) = *pat_type.pat.clone() { 44 | return Some(pat_ident.ident); 45 | } 46 | } 47 | unnamed += 1; 48 | Some(Ident::new(&format!("t{unnamed}"), Span::call_site())) 49 | }); 50 | 51 | // Add all idents to a Punctuated => param1, param2, ... 52 | let mut punctuated: Punctuated = Punctuated::new(); 53 | idents.for_each(|ident| punctuated.push(ident)); 54 | 55 | // Generate expression from Punctuated (and wrap with parentheses) 56 | let transformed_params = parse_quote!((#punctuated)); 57 | transformed_params 58 | } 59 | 60 | #[proc_macro] 61 | pub fn get_fn_name(item: TokenStream) -> TokenStream { 62 | let ItemFn { sig, .. } = parse_macro_input!(item as ItemFn); 63 | let ident = sig.ident; 64 | quote! { 65 | #ident 66 | } 67 | .into() 68 | } 69 | 70 | #[proc_macro_attribute] 71 | pub fn json(_attr: TokenStream, item: TokenStream) -> TokenStream { 72 | let ItemFn { 73 | attrs, 74 | vis, 75 | mut sig, 76 | block, 77 | } = parse_macro_input!(item as ItemFn); 78 | 79 | let inner = sig.output.clone(); 80 | let args = sig.inputs.clone(); 81 | let call_args = transform_params_to_call(sig.inputs.clone()); 82 | sig.inputs = transform_params(sig.inputs.clone()); 83 | sig.output = parse_quote!(-> (http::StatusCode, axum::Json)); 84 | 85 | let block: Block = parse_quote! { 86 | { 87 | async fn inner(#args) #inner { 88 | #block 89 | } 90 | match inner #call_args .await { 91 | Err(err) => { 92 | use crate::ErrorCode; 93 | 94 | tracing::error!("{}", err); 95 | ( 96 | err.error_code(), 97 | axum::Json(serde_json::json!({ 98 | "error": format!("{}", err), 99 | "error_type": err, 100 | })) 101 | ) 102 | }, 103 | Ok(ok) => { 104 | ( 105 | http::StatusCode::OK, 106 | axum::Json(serde_json::json!({ 107 | "ok": ok, 108 | })) 109 | ) 110 | } 111 | } 112 | } 113 | }; 114 | 115 | ItemFn { 116 | attrs, 117 | vis, 118 | sig, 119 | block: Box::new(block), 120 | } 121 | .into_token_stream() 122 | .into() 123 | } 124 | 125 | #[proc_macro_derive(ErrorCode)] 126 | pub fn error_code(input: TokenStream) -> TokenStream { 127 | let ItemEnum { 128 | ident, variants, .. 129 | } = parse_macro_input!(input as ItemEnum); 130 | 131 | let mut matches = Vec::new(); 132 | for variant in variants { 133 | let status_code = if variant.ident == "Unauthorized" { 134 | quote! { http::StatusCode::UNAUTHORIZED } 135 | } else if variant.ident == "UnknownError" 136 | || variant.ident.to_string().starts_with("Internal") 137 | { 138 | quote! { http::StatusCode::INTERNAL_SERVER_ERROR } 139 | } else { 140 | continue; 141 | }; 142 | let guard = match variant.fields { 143 | Fields::Named(..) => quote! { { .. } }, 144 | Fields::Unnamed(..) => quote! { ( .. ) }, 145 | Fields::Unit => quote! {}, 146 | }; 147 | let ident = variant.ident; 148 | matches.push(quote! { 149 | Self::#ident #guard => #status_code, 150 | }); 151 | } 152 | 153 | quote! { 154 | impl crate::ErrorCode for #ident { 155 | fn error_code(&self) -> http::StatusCode { 156 | match self { 157 | #( #matches )* 158 | _ => http::StatusCode::BAD_REQUEST, 159 | } 160 | } 161 | } 162 | } 163 | .into() 164 | } 165 | -------------------------------------------------------------------------------- /templates/profile.html: -------------------------------------------------------------------------------- 1 | {%- import "macros.html" as macros -%} 2 | {% extends "base.html" %} 3 | 4 | {% block title %}{{stub.name}}'s Profile{% endblock %} 5 | 6 | {% block content %} 7 | 170 | {% endblock %} 171 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod images; 2 | pub mod items; 3 | pub mod pages; 4 | pub mod threads; 5 | pub mod users; 6 | 7 | use std::{any::Any, collections::HashMap}; 8 | 9 | use axum::{ 10 | async_trait, 11 | body::{Body, Bytes}, 12 | extract::{ 13 | multipart::{MultipartError, MultipartRejection}, 14 | FromRequest, Multipart, 15 | }, 16 | handler::Handler, 17 | http::Request, 18 | response::{IntoResponse, Response}, 19 | Router, 20 | }; 21 | use derive_more::Display; 22 | use marche_proc_macros::ErrorCode; 23 | use serde::{de::DeserializeOwned, Serialize}; 24 | use thiserror::Error; 25 | 26 | pub const DATE_FMT: &str = "%B %-d, %Y at %I:%M %P"; 27 | 28 | pub struct Endpoint { 29 | route_type: RouteType, 30 | path: &'static str, 31 | handler: &'static (dyn Any + Send + Sync + 'static), 32 | installer: 33 | fn(RouteType, &'static str, &'static (dyn Any + Send + Sync + 'static), Router) -> Router, 34 | } 35 | 36 | impl Endpoint { 37 | pub const fn new(route_type: RouteType, path: &'static str, handler: &'static I) -> Self 38 | where 39 | I: Handler + Copy + Any + Send + Sync + 'static, 40 | A: 'static, 41 | { 42 | Self { 43 | path, 44 | route_type, 45 | handler: handler as &(dyn Any + Send + Sync + 'static), 46 | installer: install::, 47 | } 48 | } 49 | 50 | pub fn install(&self, router: Router) -> Router { 51 | (self.installer)(self.route_type, self.path, self.handler, router) 52 | } 53 | } 54 | 55 | inventory::collect!(Endpoint); 56 | 57 | #[derive(Copy, Clone, Display)] 58 | pub enum RouteType { 59 | #[display(fmt = "GET")] 60 | Get, 61 | #[display(fmt = "POST")] 62 | Post, 63 | } 64 | 65 | pub fn install( 66 | route_type: RouteType, 67 | path: &'static str, 68 | handler: &'static (dyn Any + Send + Sync + 'static), 69 | router: Router, 70 | ) -> Router 71 | where 72 | I: Handler + Copy, 73 | A: 'static, 74 | { 75 | tracing::info!("{route_type} {path} registered"); 76 | router.route( 77 | &path, 78 | match route_type { 79 | RouteType::Get => axum::routing::get(*handler.downcast_ref::().unwrap()), 80 | RouteType::Post => axum::routing::post(*handler.downcast_ref::().unwrap()), 81 | }, 82 | ) 83 | } 84 | 85 | #[macro_export] 86 | macro_rules! get { 87 | ( $suffix:literal, $func:item ) => { 88 | inventory::submit! { 89 | crate::Endpoint::new::<_, _>( 90 | crate::RouteType::Get, $suffix, &marche_proc_macros::get_fn_name!( $func ) 91 | ) 92 | } 93 | $func 94 | }; 95 | } 96 | 97 | #[macro_export] 98 | macro_rules! post { 99 | ( $suffix:literal, $func:item ) => { 100 | inventory::submit! { 101 | crate::Endpoint::new::<_, _>( 102 | crate::RouteType::Post, $suffix, &marche_proc_macros::get_fn_name!( $func ) 103 | ) 104 | } 105 | $func 106 | }; 107 | } 108 | 109 | /// An error type must give a proper status code for error handling. 110 | pub trait ErrorCode { 111 | fn error_code(&self) -> http::StatusCode; 112 | } 113 | 114 | /// A multipart form that includes a file (which must be named "file"). 115 | /// Ideally we'd like this to be 116 | #[derive(Debug)] 117 | pub struct MultipartForm { 118 | pub form: Form, 119 | pub file: Option, 120 | } 121 | 122 | #[derive(Debug)] 123 | pub struct File { 124 | pub name: String, 125 | pub bytes: Bytes, 126 | } 127 | 128 | #[derive(Debug, Serialize, Error, ErrorCode)] 129 | pub enum MultipartFormError { 130 | #[error("invalid content length")] 131 | InvalidContentLength, 132 | #[error("invalid field")] 133 | InvalidField, 134 | #[error("error parsing form: {0}")] 135 | ParseError( 136 | #[from] 137 | #[serde(skip)] 138 | serde_json::Error, 139 | ), 140 | #[error("multipart rejection: {0}")] 141 | MultipartRejection( 142 | #[from] 143 | #[serde(skip)] 144 | MultipartRejection, 145 | ), 146 | #[error("multipart error: {0}")] 147 | MultipartError( 148 | #[from] 149 | #[serde(skip)] 150 | MultipartError, 151 | ), 152 | } 153 | 154 | impl IntoResponse for MultipartFormError { 155 | fn into_response(self) -> Response { 156 | ( 157 | self.error_code(), 158 | axum::Json(serde_json::json!({ "error": format!("{}", self), "error_type": self })), 159 | ) 160 | .into_response() 161 | } 162 | } 163 | 164 | #[async_trait] 165 | impl FromRequest for MultipartForm 166 | where 167 | S: Send + Sync, 168 | B: Send + 'static, 169 | F: DeserializeOwned + Send, 170 | Multipart: FromRequest, 171 | { 172 | type Rejection = MultipartFormError; 173 | 174 | async fn from_request(req: Request, state: &S) -> Result { 175 | let mut multipart = Multipart::from_request(req, state).await?; 176 | let mut form = HashMap::new(); 177 | let mut file = None; 178 | 179 | while let Some(field) = multipart 180 | .next_field() 181 | .await 182 | .map_err(|_| MultipartFormError::InvalidField)? 183 | { 184 | let name = if let Some(name) = field.name() { 185 | name 186 | } else { 187 | continue; 188 | }; 189 | if name == "file" { 190 | if field.file_name().is_none() { 191 | continue; 192 | } 193 | let name = field.file_name().unwrap_or("").to_string(); 194 | let bytes = field.bytes().await?.clone(); 195 | file = Some(File { name, bytes }); 196 | } else { 197 | form.insert(name.to_string(), field.text().await?); 198 | } 199 | } 200 | 201 | // Yes, this is silly, but it's convenient! 202 | let form: F = serde_json::from_value(serde_json::to_value(form)?)?; 203 | 204 | Ok(Self { form, file }) 205 | } 206 | } 207 | 208 | use std::{fmt, str::FromStr}; 209 | 210 | use serde::{de, Deserialize, Deserializer}; 211 | 212 | fn empty_string_as_none<'de, D, T>(de: D) -> Result, D::Error> 213 | where 214 | D: Deserializer<'de>, 215 | T: FromStr, 216 | T::Err: fmt::Display, 217 | { 218 | let opt = Option::::deserialize(de)?; 219 | match opt.as_deref() { 220 | None | Some("") => Ok(None), 221 | Some(s) => FromStr::from_str(s).map_err(de::Error::custom).map(Some), 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /templates/offers.html: -------------------------------------------------------------------------------- 1 | {%- import "macros.html" as macros -%} 2 | {% extends "base.html" %} 3 | 4 | {% block title %}Trade Offers{% endblock %} 5 | 6 | {% block content %} 7 | {% if incoming_offers.is_empty() %} 8 | 13 | {% else %} 14 | 19 | {% for offer in incoming_offers %} 20 | 67 | {% endfor %} 68 | {% endif %} 69 | 70 | {% if outgoing_offers.is_empty() %} 71 | 77 | {% else %} 78 | 83 | {% for offer in outgoing_offers %} 84 | 129 | {% endfor %} 130 | {% endif %} 131 | 190 | {% endblock %} 191 | -------------------------------------------------------------------------------- /static/thread.js: -------------------------------------------------------------------------------- 1 | $(document).ready(function() { 2 | $(".edit-post-button").click(function() { 3 | const id = $(this).attr("postid"); 4 | $(`.unparsed-${id}`).slideToggle(); 5 | $(`.parsed-${id}`).slideToggle(); 6 | }); 7 | 8 | // editing replies 9 | $(".edit-post-form").each(function() { 10 | const id = $(this).attr("postid"); 11 | const thread_id = $(this).attr("threadid"); 12 | $(this).ajaxForm({ 13 | url: `/reply/${id}`, 14 | type: 'post', 15 | success: function(response) { 16 | // TODO: Do this properly 17 | location.href = `/thread/${thread_id}?jump_to=${id}`; 18 | }, 19 | error: function(xhr) { 20 | $(`.error-${id}`).show(); 21 | $(`.error-${id}`).html(`${xhr.responseJSON.error}`) 22 | } 23 | }); 24 | }); 25 | 26 | // Auto-embed links 27 | $(".post-text").each(function() { 28 | const LINK_RE = /(https?:\/\/(?:www\.|(?!www))[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\.[^\s]{2,}|www\.[a-zA-Z0-9][a-zA-Z0-9-]+[a-zA-Z0-9]\.[^\s]{2,}|https?:\/\/(?:www\.|(?!www))[a-zA-Z0-9]+\.[^\s]{2,}|www\.[a-zA-Z0-9]+\.[^\s]{2,})/gi; 29 | let paragraphs = $(this).find("p").each(function() { 30 | let html = $(this).html(); 31 | $(this).html(html.replaceAll(LINK_RE, function(match) { 32 | return `${match}`; 33 | })); 34 | }); 35 | }); 36 | 37 | // Replace @ with responses 38 | const has_reply_preview = new Map(); 39 | $(".post-text").each(function() { 40 | const REPLY_RE = /@(\d+)/g; 41 | let html = $(this).html(); 42 | let curr_id = $(this).parents(".reply").attr("id"); 43 | $(this).html(html.replaceAll(REPLY_RE, function(match, id) { 44 | let post = $(`#${id}`); 45 | let author = post.attr("author"); 46 | if (id < curr_id && post.length) { 47 | return `@${author}
    `; 48 | } else { 49 | return match; 50 | } 51 | })); 52 | }); 53 | 54 | $("span.respond-to-preview").each(function() { 55 | var response_div = $(this).parents(".reply"); 56 | var response_div_clone = response_div.clone(); 57 | var responder_name = response_div.attr("author"); 58 | var response_preview_div = $( 59 | $.parseHTML( 60 | `
    🗣️ ${responder_name}
    `, 61 | ), 62 | ); 63 | var response_overlay_div = $( 64 | $.parseHTML( 65 | `
    `, 66 | ), 67 | ); 68 | var response_container_div = getResponseContainerDiv($(this)); 69 | 70 | cleanCloneDiv(response_div_clone); 71 | 72 | response_preview_div.hover(function() { 73 | response_overlay_div[0].replaceChildren(response_div_clone[0]); 74 | response_overlay_div.css("visibility", "visible").css("opacity", "1.0"); 75 | }, function() { 76 | response_overlay_div.css("visibility", "hidden").css("opacity", "0.0"); 77 | }); 78 | response_preview_div.click(function() { 79 | response_div[0].scrollIntoView({ behavior: "smooth", block: "center" }); 80 | }); 81 | response_overlay_div[0].appendChild(response_div_clone[0]); 82 | response_container_div.parent().append(response_overlay_div); 83 | response_container_div.append(response_preview_div); 84 | }); 85 | 86 | $(".reply-to-button").click(function() { 87 | $("#reply-textarea")[0].value += `@${$(this).attr("replyid")} `; 88 | if($('#reply-form').is(':hidden')) { 89 | $('#toggle-form-button').html("▼ Reply"); 90 | $('#reply-form').slideToggle(); 91 | } 92 | }); 93 | 94 | $(".respond-to-preview").click(function() { 95 | getReplyDiv($(this))[0].scrollIntoView({ behavior: "smooth", block: "center" }); 96 | }); 97 | 98 | $(".respond-to-preview").hover(function() { 99 | var overlay_div = getOverlayDiv($(this)); 100 | var reply_div_clone = getReplyDiv($(this)).clone(); 101 | cleanCloneDiv(reply_div_clone); 102 | overlay_div[0].replaceChildren(reply_div_clone[0]); 103 | overlay_div.css("visibility", "visible").css("opacity", "1.0"); 104 | }, function() { 105 | getOverlayDiv($(this)).css("visibility", "hidden").css("opacity", "0.0"); 106 | }); 107 | 108 | // Embed media elements 109 | $("a").each(function() { 110 | const YOUTUBE_RE = 111 | /(?:https?:\/\/)?(?:www\.|m\.)?youtu(?:\.be\/|be.com\/\S*(?:watch|embed)(?:(?:(?=\/[^&\s\?]+(?!\S))\/)|(?:\S*v=|v\/)))([^&\s\?]+)/; 112 | 113 | const link = $(this).attr("href"); 114 | const capture = link.match(YOUTUBE_RE); 115 | if (capture != null) { 116 | const id = capture[1]; 117 | $(this).replaceWith( 118 | $(`

    `), 119 | ); 120 | return; 121 | } 122 | }); 123 | 124 | // Check if jump_to exists, and scroll to it if it does 125 | const urlParams = new URLSearchParams(window.location.search); 126 | if (urlParams.has('jump_to')) { 127 | const jump_to = urlParams.get('jump_to'); 128 | $(`#${jump_to}`)[0].scrollIntoView({ block: "center" }); 129 | } 130 | 131 | // Custom file input button 132 | $("#attach-file-to-reply-input").change(function(event) { 133 | var file = event.target.files[0]; 134 | var button = $(this).parents("#attach-file-to-reply-button"); 135 | var buttonTextHolder = $("#attach-file-to-reply-text-container"); 136 | var filenameTextHolder = $("#attached-filename-text-container"); 137 | if (file) { 138 | button.attr("title", file.name); 139 | button.css("background-color", "#b1c66d"); 140 | buttonTextHolder[0].textContent="✔️ file"; 141 | filenameTextHolder[0].textContent=`└ ${file.name}`; 142 | } else { 143 | button.attr("title", ""); 144 | button.css("background-color", ""); 145 | buttonTextHolder[0].textContent="file"; 146 | filenameTextHolder[0].textContent=""; 147 | } 148 | }); 149 | }); 150 | 151 | function cleanCloneDiv(div) { 152 | // Differences between response to preview and actual reply element 153 | div.removeAttr("id"); 154 | div.find(".edit-post-button").remove(); 155 | div.find(".hide-post").remove(); 156 | div.find(".delete-reply").remove(); 157 | div.find(".reply-to-button").remove(); 158 | div.find(".react-button").remove(); 159 | div.find(".edit-post-form").remove(); 160 | div.find(".response-container").remove(); 161 | } 162 | 163 | function getOverlayDiv(origin) { 164 | var overlay_div = origin.next("div.reply-overlay"); 165 | 166 | // Workaround because markdown parser doesn't close its own

    tags. garbage. 167 | if (overlay_div.length == 0) { 168 | overlay_div = origin.parent().next("div.reply-overlay"); 169 | } 170 | return overlay_div; 171 | } 172 | 173 | function getReplyDiv(origin) { 174 | // If we ever add paginated threads, this logic needs to be extended in order to retrieve/render replys which are not in the DOM 175 | return $(`#${origin.attr("reply_id")}`); 176 | } 177 | 178 | function getResponseContainerDiv(origin) { 179 | // If we ever add paginated threads, this logic needs to be extended in order to retrieve/render replys which are not in the DOM 180 | return $(`#response-container-${origin.attr("reply_id")}`); 181 | } 182 | -------------------------------------------------------------------------------- /templates/items.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block title %}Item manager{% endblock %} 4 | 5 | {% block content %} 6 |

    88 | {% for item in items %} 89 | 145 | {% endfor %} 146 | 227 | {% endblock %} 228 | -------------------------------------------------------------------------------- /static/styles.css: -------------------------------------------------------------------------------- 1 | html { 2 | overflow-x: hidden; 3 | } 4 | 5 | ul.menu-item { 6 | list-style-type: none; 7 | padding: 0; 8 | margin: 0px; 9 | } 10 | 11 | a { 12 | color: #2e2e2e; 13 | } 14 | 15 | li.menu-item { 16 | background: #ededed; 17 | border-radius: 5px; 18 | margin: 5px; 19 | } 20 | 21 | div.reply-box { 22 | background: #ededed; 23 | border-radius: 5px 5px 0px 0px; 24 | border-top: 1px solid #dbd9d9; 25 | position: fixed; 26 | max-width: 1093px; /* Chosen empirically */ 27 | margin-left: auto; 28 | margin-right: auto; 29 | width: 100%; 30 | left: 0; 31 | right: 0; 32 | bottom: 0; 33 | } 34 | 35 | body { 36 | background: white; 37 | color: #2e2e23; 38 | font-family: Arial, sans-serif; 39 | margin-left: auto; 40 | margin-right: auto; 41 | max-width: 1100px; 42 | } 43 | 44 | div.nav { 45 | text-align: center; 46 | padding: 10px; 47 | background: #DBD2E0; 48 | color: grey; 49 | } 50 | 51 | div.title { 52 | width: 100%; 53 | } 54 | 55 | div.right { 56 | width: 25%; 57 | padding-left: 10px; 58 | } 59 | 60 | div.table { 61 | display: table; 62 | } 63 | 64 | div.row { 65 | display: table-row; 66 | } 67 | 68 | div.cell { 69 | display: table-cell; 70 | padding: 10px; 71 | } 72 | 73 | div.heavy-cell { 74 | display: table-cell; 75 | padding: 10px; 76 | border-bottom: 1px solid black; 77 | } 78 | 79 | div.header { 80 | text-align: left; 81 | padding: 10px; 82 | background: #ededed; 83 | border-radius: 5px; 84 | } 85 | 86 | div.reward { 87 | text-align: left; 88 | padding: 10px; 89 | background: #DBD2E0; 90 | border-bottom: 1px solid #273B09; 91 | border-right: 1px solid #273B09; 92 | color: grey; 93 | } 94 | 95 | .tag { 96 | text-align: center; 97 | vertical-align: middle; 98 | margin: 5px; 99 | background: #cfcfcf; 100 | color: black; 101 | border-radius: 5px; 102 | padding: 10px; 103 | display: inline-block; 104 | } 105 | 106 | .trade-note { 107 | background: #cfcfcf; 108 | padding: 15px; 109 | border-radius: 5px; 110 | word-break: break-word; 111 | overflow-wrap: anywhere; 112 | color: #4d4b4b; 113 | } 114 | 115 | button { 116 | padding: 10px; 117 | } 118 | 119 | .selected-tag { 120 | text-align: center; 121 | vertical-align: middle; 122 | margin: 5px; 123 | background: #ededed; 124 | border-radius: 5px; 125 | padding: 10px; 126 | display: inline-block; 127 | } 128 | 129 | .item-common { 130 | text-align: center; 131 | text-decoration: none; 132 | vertical-align: middle; 133 | margin: 5px; 134 | color: #006daa; 135 | background: #b9d6f2; 136 | display: inline-block; 137 | padding: 10px; 138 | font-size: 11pt; 139 | border: 1px solid #006daa; 140 | border-radius: 5px; 141 | min-width: 60px; 142 | min-height: 105px; 143 | } 144 | 145 | .rarity-common { 146 | display: inline-block; 147 | padding: 10px; 148 | color: #006daa; 149 | background: #b9d6f2; 150 | border: 1px solid #006daa; 151 | border-radius: 5px; 152 | } 153 | 154 | .item-uncommon { 155 | display: inline-block; 156 | text-decoration: none; 157 | text-align: center; 158 | vertical-align: middle; 159 | margin: 5px; 160 | color: #5d6838; 161 | background: #b1c66d; 162 | padding: 10px; 163 | font-size: 11pt; 164 | border: 1px solid #5d6838; 165 | border-radius: 5px; 166 | min-width: 60px; 167 | min-height: 105px; 168 | } 169 | 170 | .rarity-uncommon { 171 | display: inline-block; 172 | padding: 10px; 173 | color: #5d6838; 174 | background: #b1c66d; 175 | border: 1px solid #5d6838; 176 | border-radius: 5px; 177 | } 178 | 179 | .item-rare { 180 | display: inline-block; 181 | text-decoration: none; 182 | text-align: center; 183 | vertical-align: middle; 184 | margin: 5px; 185 | color: #592a7a; 186 | background: #cb80ff; 187 | padding: 10px; 188 | font-size: 11pt; 189 | border: 1px solid #592a7a; 190 | border-radius: 5px; 191 | min-width: 60px; 192 | min-height: 105px; 193 | } 194 | 195 | .rarity-rare { 196 | display: inline-block; 197 | padding: 10px; 198 | color: #592a7a; 199 | background: #cb80ff; 200 | border: 1px solid #592a7a; 201 | border-radius: 5px; 202 | } 203 | 204 | .item-ultra-rare { 205 | display: inline-block; 206 | text-decoration: none; 207 | vertical-align: middle; 208 | margin: 5px; 209 | text-align: center; 210 | background: #ff8570; 211 | color: #8a1824; 212 | padding: 10px; 213 | font-size: 11pt; 214 | border: 1px solid #8a1824; 215 | border-radius: 5px; 216 | min-width: 60px; 217 | min-height: 105px; 218 | } 219 | 220 | .rarity-ultra-rare { 221 | display: inline-block; 222 | padding: 10px; 223 | background: #ff8570; 224 | color: #8a1824; 225 | border: 1px solid #8a1824; 226 | border-radius: 5px; 227 | } 228 | 229 | .item-legendary { 230 | display: inline-block; 231 | text-decoration: none; 232 | vertical-align: middle; 233 | margin: 5px; 234 | text-align: center; 235 | background: #f7ce5b; 236 | color: #776a31; 237 | padding: 10px; 238 | font-size: 11pt; 239 | border: 1px solid #af9b46; 240 | border-radius: 5px; 241 | min-width: 60px; 242 | min-height: 105px; 243 | } 244 | 245 | .rarity-legendary { 246 | display: inline-block; 247 | padding: 10px; 248 | background: #f7ce5b; 249 | color: #776a31; 250 | border: 1px solid #af9b46; 251 | border-radius: 5px; 252 | } 253 | 254 | div.item-overlay-cell { 255 | padding: 5px; 256 | } 257 | 258 | div.item-overlay-row { 259 | display: table-row; 260 | } 261 | 262 | .item-overlay { 263 | position: absolute; 264 | display: inline-block; 265 | top: 50%; 266 | left: 50%; 267 | transform: translateX(-50%) translateY(-50%); 268 | border-radius: 5px; 269 | width: 13em; 270 | pointer-events: none; 271 | } 272 | 273 | .response-overlay, 274 | .reply-overlay { 275 | max-width: 855px; 276 | position: absolute; 277 | border-radius: 5px; 278 | pointer-events: none; 279 | border: 1px solid black; 280 | background-color: #ededed; 281 | } 282 | 283 | .reply-overlay { 284 | transform: translateX(-13px) translateY(13px); 285 | } 286 | 287 | .response-overlay { 288 | transform: translateX(-15px); 289 | } 290 | 291 | .overlay-on-hover { 292 | position: absolute; 293 | visibility: hidden; 294 | opacity: 0.0; 295 | z-index: 10; 296 | transition: all 300ms ease-in-out; 297 | -o-transition: all 300ms ease-in-out; 298 | -ms-transition: all 300ms ease-in-out; 299 | -moz-transition: all 300ms ease-in-out; 300 | -webkit-transition: all 300ms ease-in-out; 301 | } 302 | 303 | .hover-triggers-overlay{ 304 | position: relative; 305 | } 306 | 307 | .hover-triggers-overlay:hover .overlay-on-hover, 308 | .hover-triggers-overlay:hover+.overlay-on-hover { 309 | opacity: 1.0; 310 | visibility: visible; 311 | } 312 | 313 | .error { 314 | display: inline-block; 315 | text-align: left; 316 | background: #ff674d; 317 | color: #8a1824; 318 | padding: 10px; 319 | font-size: 11pt; 320 | border: 1px solid #8a1824; 321 | border-radius: 5px; 322 | } 323 | 324 | .action-box { 325 | display: inline-block; 326 | vertical-align: middle; 327 | margin: 5px; 328 | text-align: center; 329 | padding: 10px; 330 | font-size: 11pt; 331 | border: 1px solid black; 332 | border-radius: 5px; 333 | cursor: pointer; 334 | text-decoration: none; 335 | transition: all 300ms ease-in-out; 336 | -o-transition: all 300ms ease-in-out; 337 | -ms-transition: all 300ms ease-in-out; 338 | -moz-transition: all 300ms ease-in-out; 339 | -webkit-transition: all 300ms ease-in-out; 340 | } 341 | 342 | .action-box:hover { 343 | background-color: white; 344 | } 345 | 346 | .action-box-standard-size { 347 | resize: none; 348 | width: 6em; 349 | height: 1.4em; 350 | box-sizing: content-box; 351 | } 352 | 353 | .fixed-item-thumbnail { 354 | width: 50px; 355 | height: 50px; 356 | margin-left: auto; 357 | margin-right: auto; 358 | border-radius: 50%; 359 | } 360 | 361 | div.profile { 362 | display: table-cell; 363 | text-align: center; 364 | text-shadow: 365 | 1px 0 black, 366 | 0 1px black, 367 | -1px 0 black, 368 | 0 -1px black; 369 | min-width: 200px; 370 | padding: 5px; 371 | vertical-align: top; 372 | border-radius: 5px; 373 | } 374 | 375 | div.post { 376 | word-break: break-word; 377 | overflow-wrap: anywhere; 378 | display: table-cell; 379 | width: 100%; 380 | padding: 10px; 381 | padding-left: 25px; 382 | padding-right: 25px; 383 | } 384 | 385 | .badge-grid { 386 | display: inline-grid; 387 | grid-template-columns: 1fr 1fr 1fr 1fr 1fr; 388 | grid-gap: 10px; 389 | padding-top: 10px; 390 | padding-bottom: 10px; 391 | text-shadow: 392 | 1px 0 white, 393 | 0 1px white, 394 | -1px 0 white, 395 | 0 -1px white; 396 | } 397 | 398 | .respond-to-preview { 399 | cursor: pointer; 400 | } 401 | 402 | div.respond-to-preview { 403 | /* Display to re-enable "responding-to" header previews*/ 404 | display: none; 405 | } 406 | 407 | span.respond-to-preview { 408 | font-size: 90%; 409 | } 410 | 411 | .response-from-preview { 412 | /* float:left; */ 413 | } 414 | 415 | .response-container { 416 | /* display: flex; */ 417 | } 418 | 419 | .thread-row, .tag { 420 | cursor: pointer; 421 | transition: all 300ms ease-in-out; 422 | -o-transition: all 300ms ease-in-out; 423 | -ms-transition: all 300ms ease-in-out; 424 | -moz-transition: all 300ms ease-in-out; 425 | -webkit-transition: all 300ms ease-in-out; 426 | } 427 | 428 | @keyframes spin { 429 | from { 430 | transform: rotateZ(0deg); 431 | } 432 | to { 433 | transform: rotateZ(359deg); 434 | } 435 | } 436 | 437 | @keyframes shiny { 438 | from { 439 | filter: hue-rotate(0deg) 440 | } 441 | to { 442 | filter: hue-rotate(359deg) 443 | } 444 | } 445 | 446 | @keyframes roll { 447 | from { 448 | transform: rotateY(0deg); 449 | moz-transform: rotateY(0deg); 450 | ms-transform: rotateY(0deg); 451 | } 452 | to { 453 | transform: rotateY(359deg); 454 | moz-transform: rotateY(359deg); 455 | ms-transform: rotateY(359deg); 456 | } 457 | } 458 | 459 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /templates/thread.html: -------------------------------------------------------------------------------- 1 | {%- import "macros.html" as macros -%} 2 | {% extends "base.html" %} 3 | 4 | {% block title %}{{title}}{% endblock %} 5 | 6 | {% block content %} 7 | 8 | 37 | {% for post in posts %} 38 | {% if !post.hidden || viewer_role > Role::User %} 39 | 124 | {% endif %} 125 | {% endfor %} 126 | {% endblock %} 127 | {% block footer %} 128 |
    129 |
    130 |
    131 |
    ► reply
    132 | 155 |
    156 | 339 |
    340 | {% endblock %} 341 | -------------------------------------------------------------------------------- /src/pages.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | 3 | use askama::Template; 4 | use axum::{ 5 | extract::{Extension, Path, Query}, 6 | http::StatusCode, 7 | response::{IntoResponse, Redirect, Response}, 8 | }; 9 | use chrono::prelude::*; 10 | use futures::{future, stream, StreamExt, TryStreamExt}; 11 | use serde::{Deserialize, Serialize}; 12 | use sqlx::PgPool; 13 | use thiserror::Error; 14 | 15 | use crate::{ 16 | get, 17 | items::{IncomingOffer, Item, ItemDrop, ItemThumbnail, OutgoingOffer}, 18 | threads::{Post, Reply, Tag, Tags, Thread}, 19 | users::{LevelInfo, ProfileStub, Role, User, UserCache, UserRejection}, 20 | }; 21 | 22 | const THREADS_PER_PAGE: i64 = 25; 23 | const MINUTES_TIMESTAMP_IS_EMPHASIZED: i64 = 60 * 24; 24 | 25 | #[derive(Template)] 26 | #[template(path = "error.html")] 27 | pub struct ErrorPage { 28 | offers: usize, 29 | code: u16, 30 | reason: &'static str, 31 | } 32 | 33 | #[derive(Error, Debug)] 34 | pub enum ServerError { 35 | #[error("Not found")] 36 | NotFound, 37 | #[error("Unauthorized")] 38 | Unauthorized, 39 | #[error("Internal database error: {0}")] 40 | InternalDbError(#[from] sqlx::Error), 41 | } 42 | 43 | impl IntoResponse for ServerError { 44 | fn into_response(self) -> Response { 45 | let status_code = match self { 46 | ServerError::NotFound => StatusCode::NOT_FOUND, 47 | ServerError::Unauthorized => StatusCode::UNAUTHORIZED, 48 | ServerError::InternalDbError(_) => StatusCode::INTERNAL_SERVER_ERROR, 49 | }; 50 | ( 51 | status_code, 52 | ErrorPage { 53 | offers: 0, 54 | code: status_code.as_u16(), 55 | reason: status_code.canonical_reason().unwrap_or("????"), 56 | }, 57 | ) 58 | .into_response() 59 | } 60 | } 61 | 62 | #[derive(Debug, Template)] 63 | #[template(path = "items.html")] 64 | pub struct Items { 65 | offers: usize, 66 | items: Vec, 67 | } 68 | 69 | #[derive(Debug)] 70 | pub struct ItemStub { 71 | id: i32, 72 | name: String, 73 | description: String, 74 | item_type: String, 75 | attrs: String, 76 | thumbnail: String, 77 | rarity: String, 78 | available: bool, 79 | } 80 | 81 | get!( 82 | "/items", 83 | pub async fn items(conn: Extension, user: User) -> Result { 84 | if user.role != Role::Admin { 85 | return Err(ServerError::Unauthorized); 86 | } 87 | 88 | let items = sqlx::query_as("SELECT * FROM items ORDER BY rarity DESC, id DESC, name ASC") 89 | .fetch(&*conn) 90 | .filter_map(|item: Result| future::ready(item.ok())) 91 | .map(|item| ItemStub { 92 | thumbnail: item.get_thumbnail_html(rand::random()), 93 | id: item.id, 94 | name: item.name, 95 | description: item.description, 96 | item_type: serde_json::to_string(&item.item_type).unwrap(), 97 | attrs: serde_json::to_string(&item.attributes).unwrap(), 98 | rarity: item.rarity.to_string(), 99 | available: item.available, 100 | }) 101 | .collect() 102 | .await; 103 | 104 | Ok(Items { offers: 0, items }) 105 | } 106 | ); 107 | 108 | #[derive(Debug, Template)] 109 | #[template(path = "index.html")] 110 | pub struct Index { 111 | tags: Vec, 112 | posts: Vec, 113 | offers: i64, 114 | viewer_role: Role, 115 | } 116 | 117 | #[derive(Debug, Serialize)] 118 | struct ThreadLink { 119 | num: usize, 120 | id: i32, 121 | title: String, 122 | date: String, 123 | emphasize_date: bool, 124 | read: bool, 125 | jump_to: i32, 126 | replies: String, 127 | tags: Vec, 128 | pinned: bool, 129 | locked: bool, 130 | hidden: bool, 131 | } 132 | 133 | get! { 134 | "/", 135 | pub async fn redirect_to_index() -> Redirect { 136 | // TODO: Redirect to default language. 137 | Redirect::to("/t/en") 138 | } 139 | } 140 | 141 | get! { 142 | "/t/*tags", 143 | async fn index( 144 | conn: Extension, 145 | user: User, 146 | Path(viewed_tags): Path, 147 | ) -> Result { 148 | let viewed_tags = Tags::fetch_from_str(&conn, &*viewed_tags).await; 149 | 150 | // If no tags are selected and the user is not privileged, force 151 | // the user to redirect to /t/en 152 | if viewed_tags.is_empty() && user.role < Role::Moderator { 153 | return Err(Redirect::to("/t/en")); 154 | } 155 | let conn = &*conn; 156 | let user = &user; 157 | 158 | let posts = sqlx::query_as( 159 | r#" 160 | SELECT * FROM threads 161 | WHERE 162 | tags @> $1 163 | ORDER BY 164 | pinned DESC, 165 | last_post DESC 166 | LIMIT $2 167 | "#, 168 | ) 169 | .bind(viewed_tags.clone().into_ids().collect::>()) 170 | .bind(THREADS_PER_PAGE) 171 | .fetch(conn) 172 | .filter_map(|t: Result| future::ready(t.ok())) 173 | .enumerate() 174 | .then(move |(i, thread)| async move { 175 | // Format the date: 176 | // TODO: Consider moving duration->plaintext into common utility 177 | let duration_since_last_post = Utc::now().naive_utc() 178 | - Reply::fetch(&conn, thread.last_post) 179 | .await? 180 | .post_date; 181 | let duration_min = duration_since_last_post.num_minutes(); 182 | let duration_hours = duration_since_last_post.num_hours(); 183 | let duration_days = duration_since_last_post.num_days(); 184 | let duration_weeks = duration_since_last_post.num_weeks(); 185 | let duration_string: String = if duration_weeks > 0 { 186 | format!( 187 | "{} week{} ago", 188 | duration_weeks, 189 | if duration_weeks > 1 { "s" } else { "" } 190 | ) 191 | } else if duration_days > 0 { 192 | format!( 193 | "{} day{} ago", 194 | duration_days, 195 | if duration_days > 1 { "s" } else { "" } 196 | ) 197 | } else if duration_hours > 0 { 198 | format!( 199 | "{} hour{} ago", 200 | duration_hours, 201 | if duration_hours > 1 { "s" } else { "" } 202 | ) 203 | } else if duration_min >= 5 { 204 | format!( 205 | "{} minute{} ago", 206 | duration_min, 207 | if duration_min > 1 { "s" } else { "" } 208 | ) 209 | } else { 210 | String::from("just now!") 211 | }; 212 | 213 | let replies = match thread.num_replies { 214 | 0 => format!("No replies"), 215 | 1 => format!("1 reply"), 216 | x => format!("{} replies", x), 217 | }; 218 | 219 | let read = user.has_read(conn, &thread).await?; 220 | let jump_to = user.next_unread(conn, &thread).await?; 221 | 222 | sqlx::Result::Ok(ThreadLink { 223 | num: i + 1, 224 | id: thread.id, 225 | title: thread.title, 226 | date: duration_string, 227 | emphasize_date: duration_min < MINUTES_TIMESTAMP_IS_EMPHASIZED, 228 | read, 229 | jump_to, 230 | replies, 231 | tags: stream::iter(thread.tags.into_iter()) 232 | .filter_map( 233 | |tid| async move { Tag::fetch_from_id(conn, tid).await.ok().flatten() }, 234 | ) 235 | .map(|t| t.name) 236 | .collect() 237 | .await, 238 | pinned: thread.pinned, 239 | locked: thread.locked, 240 | hidden: thread.hidden, 241 | }) 242 | }) 243 | .filter_map(|t| future::ready(t.ok())) 244 | .collect() 245 | .await; 246 | 247 | Ok(Index { 248 | tags: viewed_tags.tags, 249 | posts: posts, 250 | viewer_role: user.role, 251 | offers: user.incoming_offers(&*conn).await.unwrap_or(0), 252 | }) 253 | } 254 | } 255 | 256 | #[derive(Template)] 257 | #[template(path = "thread.html")] 258 | pub struct ThreadPage { 259 | id: i32, 260 | title: String, 261 | tags: Vec, 262 | posts: Vec, 263 | offers: i64, 264 | pinned: bool, 265 | locked: bool, 266 | hidden: bool, 267 | viewer_role: Role, 268 | } 269 | 270 | get!( 271 | "/thread/:thread_id", 272 | async fn view_thread( 273 | conn: Extension, 274 | user: User, 275 | Path(thread_id): Path, 276 | ) -> Result { 277 | let thread = Thread::fetch_optional(&*conn, thread_id) 278 | .await? 279 | .ok_or(ServerError::NotFound)?; 280 | 281 | user.read_thread(&conn, &thread).await?; 282 | 283 | if thread.hidden && user.role == Role::User { 284 | return Err(ServerError::NotFound); 285 | } 286 | 287 | let conn = &*conn; 288 | let user_cache = UserCache::new(conn); 289 | let posts = 290 | sqlx::query_as("SELECT * FROM replies WHERE thread_id = $1 ORDER BY post_date ASC") 291 | .bind(thread_id) 292 | .fetch(conn) 293 | .filter_map(|post| async move { post.ok() }) 294 | .then(move |post: Reply| { 295 | let user_cache = user_cache.clone(); 296 | async move { 297 | let date = post.post_date.format(crate::DATE_FMT).to_string(); 298 | let reactions = stream::iter(post.reactions.into_iter()) 299 | .filter_map(|drop_id| async move { 300 | ItemDrop::fetch(conn, drop_id).await.ok() 301 | }) 302 | .filter_map(|item_drop| async move { 303 | item_drop.get_thumbnail(conn).await.ok() 304 | }) 305 | .collect() 306 | .await; 307 | let can_edit = post.author_id == user.id; // TODO: Add time limit for replies 308 | let can_react = post.author_id != user.id; 309 | let author = user_cache.get(post.author_id).await?; 310 | let reward = if let Some(reward) = post.reward { 311 | Some( 312 | ItemDrop::fetch(conn, reward) 313 | .await? 314 | .get_thumbnail(conn) 315 | .await?, 316 | ) 317 | } else { 318 | None 319 | }; 320 | Result::<_, sqlx::Error>::Ok(Post { 321 | id: post.id, 322 | author, 323 | date, 324 | reactions, 325 | reward, 326 | can_edit, 327 | can_react, 328 | body: post.body, 329 | hidden: post.hidden, 330 | image: post.image, 331 | thumbnail: post.thumbnail, 332 | filename: post.filename, 333 | }) 334 | } 335 | }) 336 | .try_collect() 337 | .await?; 338 | 339 | Ok(ThreadPage { 340 | id: thread_id, 341 | title: thread.title.clone(), 342 | posts, 343 | tags: Tags::fetch_from_ids(conn, thread.tags.iter()) 344 | .await 345 | .into_names() 346 | .collect(), 347 | pinned: thread.pinned, 348 | locked: thread.locked, 349 | hidden: thread.hidden, 350 | offers: user.incoming_offers(conn).await?, 351 | viewer_role: user.role, 352 | }) 353 | } 354 | ); 355 | 356 | #[derive(Template, Debug)] 357 | #[template(path = "author.html")] 358 | pub struct AuthorPage { 359 | offers: i64, 360 | } 361 | 362 | get!( 363 | "/author", 364 | async fn author_page(conn: Extension, user: User) -> Result { 365 | Ok(AuthorPage { 366 | offers: user.incoming_offers(&*conn).await?, 367 | }) 368 | } 369 | ); 370 | 371 | #[derive(Template)] 372 | #[template(path = "item.html")] 373 | pub struct ItemPage { 374 | id: i32, 375 | name: String, 376 | description: String, 377 | pattern: u16, 378 | rarity: String, 379 | thumbnail: String, 380 | equip_action: Option, 381 | owner_id: i32, 382 | owner_name: String, 383 | offers: i64, 384 | } 385 | 386 | pub enum AvailableEquipAction { 387 | Equip, 388 | Unequip, 389 | } 390 | 391 | get!( 392 | "/item/:drop_id", 393 | pub async fn show( 394 | conn: Extension, 395 | user: User, 396 | Path(drop_id): Path, 397 | ) -> Result { 398 | let drop = ItemDrop::fetch_optional(&*conn, drop_id) 399 | .await? 400 | .ok_or(ServerError::NotFound)?; 401 | let item = drop.fetch_item(&*conn).await?; 402 | let owner = User::fetch(&*conn, drop.owner_id).await?; 403 | let inventory = user.equipped(&*conn).await?; 404 | let thumbnail = item.get_thumbnail_html(drop.pattern); 405 | let equip_action = (user.id == drop.owner_id && item.is_equipable()).then(|| { 406 | if inventory.iter().any(|(_, equipped)| equipped == &drop) { 407 | AvailableEquipAction::Unequip 408 | } else { 409 | AvailableEquipAction::Equip 410 | } 411 | }); 412 | 413 | Ok(ItemPage { 414 | thumbnail, 415 | equip_action, 416 | id: drop_id, 417 | name: item.name, 418 | description: item.description, 419 | pattern: drop.pattern as u16, 420 | rarity: item.rarity.to_string(), 421 | owner_id: owner.id, 422 | owner_name: owner.name.to_string(), 423 | offers: user.incoming_offers(&*conn).await?, 424 | }) 425 | } 426 | ); 427 | 428 | #[derive(Template)] 429 | #[template(path = "react.html")] 430 | pub struct ReactPage { 431 | thread_id: i32, 432 | post_id: i32, 433 | author: ProfileStub, 434 | body: String, 435 | inventory: Vec, 436 | offers: i64, 437 | image: Option, 438 | thumbnail: Option, 439 | filename: String, 440 | } 441 | 442 | get!( 443 | "/react/:post_id", 444 | async fn react_page( 445 | conn: Extension, 446 | user: User, 447 | Path(post_id): Path, 448 | ) -> Result { 449 | let post = Reply::fetch(&*conn, post_id).await?; 450 | let author = User::fetch(&*conn, post.author_id) 451 | .await? 452 | .get_profile_stub(&*conn) 453 | .await?; 454 | 455 | let inventory: Vec<_> = user 456 | .inventory(&conn) 457 | .await? 458 | .into_iter() 459 | .filter(|(item, _)| item.is_reaction()) 460 | .map(|(item, drop)| ItemThumbnail::new(&item, &drop)) 461 | .collect(); 462 | 463 | Ok(ReactPage { 464 | thread_id: post.thread_id, 465 | post_id, 466 | author, 467 | body: post.body, 468 | inventory, 469 | offers: user.incoming_offers(&*conn).await?, 470 | image: post.image, 471 | thumbnail: post.thumbnail, 472 | filename: post.filename, 473 | }) 474 | } 475 | ); 476 | 477 | #[derive(Template)] 478 | #[template(path = "login.html")] 479 | pub struct LoginPage { 480 | offers: usize, 481 | } 482 | 483 | #[derive(Deserialize)] 484 | pub struct LoginPageParams { 485 | redirect: Option, 486 | } 487 | 488 | get!( 489 | "/login", 490 | async fn login_page( 491 | user: Result, 492 | Query(LoginPageParams { redirect }): Query, 493 | ) -> Result { 494 | match (redirect, user) { 495 | (Some(redirect), Ok(_)) => Err(Redirect::to(&redirect)), 496 | _ => Ok(LoginPage { offers: 0 }), 497 | } 498 | } 499 | ); 500 | 501 | #[derive(Template)] 502 | #[template(path = "register.html")] 503 | pub struct RegisterPage { 504 | offers: usize, 505 | } 506 | 507 | get! { 508 | "/register", 509 | async fn register_page() -> RegisterPage { 510 | RegisterPage { offers: 0 } 511 | } 512 | } 513 | 514 | #[derive(Template)] 515 | #[template(path = "update_bio.html")] 516 | pub struct UpdateBioPage { 517 | name: String, 518 | bio: String, 519 | stub: ProfileStub, 520 | offers: usize, 521 | } 522 | 523 | get! { 524 | "/bio", 525 | async fn update_bio_page(conn: Extension, user: User) -> Result { 526 | Ok(UpdateBioPage { 527 | stub: user.get_profile_stub(&*conn).await?, 528 | offers: user.incoming_offers(&*conn).await? as usize, 529 | name: user.name, 530 | bio: user.bio, 531 | }) 532 | } 533 | } 534 | 535 | #[derive(Template)] 536 | #[template(path = "profile.html")] 537 | pub struct ProfilePage { 538 | bio: String, 539 | level: LevelInfo, 540 | role: Role, 541 | stub: ProfileStub, 542 | equipped: Vec, 543 | inventory: Vec, 544 | is_banned: bool, 545 | is_curr_user: bool, 546 | ban_timestamp: String, 547 | viewer_role: Role, 548 | viewer_name: String, 549 | offers: i64, 550 | notes: String, 551 | } 552 | 553 | mod filters { 554 | pub fn redact(input: &str) -> ::askama::Result { 555 | Ok(input 556 | .chars() 557 | .map(|c| { 558 | if c.is_whitespace() || c == '.' || c == ',' || c == '!' || c == '?' { 559 | c 560 | } else { 561 | '█' 562 | } 563 | }) 564 | .collect::()) 565 | } 566 | } 567 | 568 | get! { 569 | "/profile", 570 | async fn show_curr_user_profile(user: User) -> Redirect { 571 | Redirect::to(&format!("/profile/{}", user.id)) 572 | } 573 | } 574 | 575 | get!( 576 | "/profile/:user_id", 577 | async fn show_user_profile( 578 | conn: Extension, 579 | curr_user: User, 580 | Path(user_id): Path, 581 | ) -> Result { 582 | let user = User::fetch_optional(&*conn, user_id) 583 | .await? 584 | .ok_or(ServerError::NotFound)?; 585 | 586 | let equipped = user.equipped(&*conn).await?; 587 | 588 | let mut is_equipped = HashSet::new(); 589 | for (_, item_drop) in &equipped { 590 | is_equipped.insert(item_drop.id); 591 | } 592 | let inventory: Vec<_> = user 593 | .inventory(&*conn) 594 | .await? 595 | .into_iter() 596 | .filter(|(_, item_drop)| !is_equipped.contains(&item_drop.id)) 597 | .map(|(item, item_drop)| ItemThumbnail::new(&item, &item_drop)) 598 | .collect(); 599 | 600 | let ban_timestamp = user 601 | .banned_until 602 | .map(|x| x.format(crate::DATE_FMT).to_string()) 603 | .unwrap_or_else(String::new); 604 | 605 | Ok(ProfilePage { 606 | is_banned: user.is_banned(), 607 | ban_timestamp, 608 | offers: curr_user.incoming_offers(&*conn).await?, 609 | stub: user.get_profile_stub(&*conn).await?, 610 | level: user.level_info(), 611 | bio: user.bio, 612 | role: user.role, 613 | equipped: equipped 614 | .into_iter() 615 | .map(|(item, item_drop)| ItemThumbnail::new(&item, &item_drop)) 616 | .collect(), 617 | inventory, 618 | is_curr_user: user.id == curr_user.id, 619 | notes: user.notes, 620 | viewer_role: curr_user.role, 621 | viewer_name: curr_user.name, 622 | }) 623 | } 624 | ); 625 | 626 | #[derive(Template)] 627 | #[template(path = "leaderboard.html")] 628 | pub struct LeaderboardPage { 629 | offers: i64, 630 | users: Vec, 631 | } 632 | 633 | struct UserRank { 634 | rank: usize, 635 | bio: String, 636 | stub: ProfileStub, 637 | } 638 | 639 | get!( 640 | "/leaderboard", 641 | async fn show_leaderboard( 642 | conn: Extension, 643 | user: User, 644 | ) -> Result { 645 | let conn = &*conn; 646 | let user_profiles = 647 | sqlx::query_as("SELECT * FROM users ORDER BY experience DESC LIMIT 100") 648 | .fetch(conn) 649 | .enumerate() 650 | .filter_map(|(i, t): (_, Result)| future::ready(t.ok().map(|t| (i, t)))) 651 | .then(|(i, u)| async move { 652 | sqlx::Result::Ok(UserRank { 653 | rank: i + 1, 654 | bio: u.bio.clone(), 655 | stub: u.get_profile_stub(conn).await?, 656 | }) 657 | }) 658 | .filter_map(|t| future::ready(t.ok())) 659 | .collect() 660 | .await; 661 | 662 | Ok(LeaderboardPage { 663 | users: user_profiles, 664 | offers: user.incoming_offers(conn).await?, 665 | }) 666 | } 667 | ); 668 | 669 | #[derive(Template)] 670 | #[template(path = "offer.html")] 671 | pub struct TradeRequestPage { 672 | sender: ProfileStub, 673 | sender_inventory: Vec, 674 | receiver: ProfileStub, 675 | receiver_inventory: Vec, 676 | offers: i64, 677 | } 678 | 679 | get!( 680 | "/offer/:receiver_id", 681 | async fn show_offer( 682 | conn: Extension, 683 | sender: User, 684 | Path(receiver_id): Path, 685 | ) -> Result { 686 | let receiver = User::fetch_optional(&*conn, receiver_id) 687 | .await? 688 | .ok_or(ServerError::NotFound)?; 689 | 690 | Ok(TradeRequestPage { 691 | sender: sender.get_profile_stub(&*conn).await?, 692 | sender_inventory: sender 693 | .inventory(&*conn) 694 | .await? 695 | .map(|(i, d)| ItemThumbnail::new(&i, &d)) 696 | .collect(), 697 | receiver: receiver.get_profile_stub(&*conn).await?, 698 | receiver_inventory: receiver 699 | .inventory(&*conn) 700 | .await? 701 | .map(|(i, d)| ItemThumbnail::new(&i, &d)) 702 | .collect(), 703 | offers: sender.incoming_offers(&*conn).await?, 704 | }) 705 | } 706 | ); 707 | 708 | #[derive(Template)] 709 | #[template(path = "offers.html")] 710 | pub struct TradeRequestsPage { 711 | user: ProfileStub, 712 | incoming_offers: Vec, 713 | outgoing_offers: Vec, 714 | offers: i64, 715 | } 716 | 717 | get!( 718 | "/offers", 719 | async fn show_offers( 720 | conn: Extension, 721 | user: User, 722 | ) -> Result { 723 | let user_cache = UserCache::new(&*conn); 724 | let incoming_offers = IncomingOffer::retrieve(&*conn, &user_cache, &user).await; 725 | let outgoing_offers = OutgoingOffer::retrieve(&*conn, &user_cache, &user).await; 726 | 727 | Ok(TradeRequestsPage { 728 | user: user.get_profile_stub(&*conn).await?, 729 | offers: user.incoming_offers(&*conn).await?, 730 | incoming_offers, 731 | outgoing_offers, 732 | }) 733 | } 734 | ); 735 | -------------------------------------------------------------------------------- /src/threads.rs: -------------------------------------------------------------------------------- 1 | //! Display threads 2 | use std::{ 3 | collections::{HashMap, HashSet}, 4 | sync::Arc, 5 | time::Duration, 6 | }; 7 | 8 | use axum::{ 9 | extract::{ 10 | ws::{Message, WebSocketUpgrade}, 11 | Extension, Form, Path, Query, 12 | }, 13 | response::Response, 14 | }; 15 | use chrono::{prelude::*, NaiveDateTime}; 16 | use futures::stream::StreamExt; 17 | use marche_proc_macros::{json, ErrorCode}; 18 | use serde::{Deserialize, Serialize}; 19 | use sqlx::{FromRow, PgExecutor, PgPool}; 20 | use thiserror::Error; 21 | 22 | use crate::{ 23 | get, 24 | images::{Image, UploadImageError, MAXIMUM_FILE_SIZE}, 25 | items::{ItemDrop, ItemThumbnail}, 26 | post, 27 | users::{ProfileStub, Role, User, MIN_LEVEL_TO_UPLOAD_PHOTOS}, 28 | MultipartForm, MultipartFormError, 29 | }; 30 | 31 | #[derive(FromRow, Default, Debug, Serialize)] 32 | pub struct Thread { 33 | /// Id of the thread 34 | pub id: i32, 35 | /// Id of the last post 36 | pub last_post: i32, 37 | /// Title of the thread 38 | pub title: String, 39 | /// Tags given to this thread 40 | pub tags: Vec, 41 | /// Number of replies to this thread, not including the first. 42 | pub num_replies: i32, 43 | /// Whether or not the thread is pinned 44 | pub pinned: bool, 45 | /// Whether or not the thread is locked 46 | pub locked: bool, 47 | /// Whether or not the thread is hidden 48 | pub hidden: bool, 49 | } 50 | 51 | impl Thread { 52 | pub async fn fetch(conn: &PgPool, id: i32) -> Result { 53 | sqlx::query_as("SELECT * FROM threads WHERE id = $1") 54 | .bind(id) 55 | .fetch_one(conn) 56 | .await 57 | } 58 | 59 | pub async fn fetch_optional(conn: &PgPool, id: i32) -> Result, sqlx::Error> { 60 | sqlx::query_as("SELECT * FROM threads WHERE id = $1") 61 | .bind(id) 62 | .fetch_optional(conn) 63 | .await 64 | } 65 | } 66 | 67 | #[derive(Error, Serialize, Debug, ErrorCode)] 68 | pub enum DeleteThreadError { 69 | #[error("You are not privileged enough")] 70 | Unauthorized, 71 | #[error("No such thread exists")] 72 | NoSuchThread, 73 | #[error("Internal database error {0}")] 74 | InternalDbError( 75 | #[from] 76 | #[serde(skip)] 77 | sqlx::Error, 78 | ), 79 | } 80 | 81 | post!( 82 | "/delete_thread/:dead_thread_id", 83 | #[json] 84 | pub async fn delete_thread( 85 | conn: Extension, 86 | user: User, 87 | Path(dead_thread_id): Path, 88 | ) -> Result<(), DeleteThreadError> { 89 | if user.role < Role::Moderator { 90 | return Err(DeleteThreadError::Unauthorized); 91 | } 92 | 93 | // Fetch the thread title for logging purposes 94 | let thread_title = Thread::fetch_optional(&*conn, dead_thread_id) 95 | .await? 96 | .ok_or(DeleteThreadError::NoSuchThread)? 97 | .title; 98 | 99 | let mut transaction = conn.begin().await?; 100 | 101 | // Delete the thread: 102 | sqlx::query("DELETE FROM threads WHERE id = $1") 103 | .bind(dead_thread_id) 104 | .execute(&mut transaction) 105 | .await?; 106 | 107 | // Delete all replies to the thread: 108 | sqlx::query("DELETE FROM replies WHERE thread_id = $1") 109 | .bind(dead_thread_id) 110 | .execute(&mut transaction) 111 | .await?; 112 | 113 | transaction.commit().await?; 114 | 115 | tracing::info!( 116 | "User `{}` has deleted thread {dead_thread_id} titled: `{thread_title}`", 117 | user.name 118 | ); 119 | 120 | Ok(()) 121 | } 122 | ); 123 | 124 | #[derive(Debug, Deserialize)] 125 | pub struct ThreadForm { 126 | title: String, 127 | tags: String, 128 | body: String, 129 | } 130 | 131 | #[derive(Debug, Serialize, Error, ErrorCode)] 132 | pub enum SubmitThreadError { 133 | #[error("Title or body is empty")] 134 | TitleOrBodyIsEmpty, 135 | #[error("There is a tag that exceeds the maximum length ({MAX_TAG_LEN} characters")] 136 | TagTooLong, 137 | #[error("There are too many tags (maximum {MAX_NUM_TAGS} allowed)")] 138 | TooManyTags, 139 | #[error("Error uploading image: {0}")] 140 | UploadImageError(#[from] UploadImageError), 141 | #[error("Internal database error: {0}")] 142 | InternalDbError( 143 | #[from] 144 | #[serde(skip)] 145 | sqlx::Error, 146 | ), 147 | #[error("You must be level {MIN_LEVEL_TO_UPLOAD_PHOTOS} in order to upload photos")] 148 | NotAllowedToUploadPictures, 149 | #[error("Multipart form error: {0}")] 150 | MultipartFormError(#[from] MultipartFormError), 151 | } 152 | 153 | pub const MAX_TAG_LEN: usize = 16; 154 | pub const MAX_NUM_TAGS: usize = 6; 155 | 156 | post! { 157 | "/thread", 158 | #[json] 159 | async fn new_thread( 160 | conn: Extension, 161 | user: User, 162 | form: Result, MultipartFormError>, 163 | ) -> Result { 164 | let MultipartForm { file, form: thread } = form?; 165 | 166 | let title = thread.title.trim(); 167 | let body = thread.body.trim(); 168 | 169 | if title.is_empty() || (body.is_empty() && file.is_none()) { 170 | return Err(SubmitThreadError::TitleOrBodyIsEmpty); 171 | } 172 | 173 | let post_date = Utc::now().naive_utc(); 174 | 175 | let (image, thumbnail, filename) = if let Some(file) = file { 176 | if !user.can_post_photos() { 177 | return Err(SubmitThreadError::NotAllowedToUploadPictures); 178 | } 179 | let Image { filename: image, thumbnail } = Image::upload_image(file.bytes).await?; 180 | (Some(image), thumbnail, file.name) 181 | } else { 182 | (None, None, String::new()) 183 | }; 184 | 185 | let mut tags = Vec::new(); 186 | for tag in parse_tag_list(&thread.tags) { 187 | let tag = tag.trim(); 188 | if tag.is_empty() { 189 | continue; 190 | } 191 | if tag.len() > MAX_TAG_LEN { 192 | return Err(SubmitThreadError::TagTooLong); 193 | } 194 | tags.push(tag); 195 | } 196 | 197 | if tags.len() > MAX_NUM_TAGS { 198 | return Err(SubmitThreadError::TooManyTags); 199 | } 200 | 201 | let mut transaction = conn.begin().await?; 202 | 203 | let mut tag_ids = Vec::new(); 204 | for tag in tags.into_iter() { 205 | if let Some(tag) = Tag::fetch_from_str_and_inc(&mut *transaction, tag).await? { 206 | tag_ids.push(tag.id()); 207 | } 208 | } 209 | 210 | 211 | let thread: Thread = sqlx::query_as( 212 | r#" 213 | INSERT INTO threads 214 | (title, tags, last_post, num_replies, pinned, locked, hidden) 215 | VALUES 216 | ($1, $2, 0, 0, FALSE, FALSE, FALSE) 217 | RETURNING * 218 | "#, 219 | ) 220 | .bind(title) 221 | .bind(tag_ids) 222 | .fetch_one(&mut *transaction) 223 | .await?; 224 | 225 | let item_drop = ItemDrop::drop(&mut transaction, &user) 226 | .await? 227 | .map(ItemDrop::to_id); 228 | 229 | let reply: Reply = sqlx::query_as( 230 | r#" 231 | INSERT INTO replies 232 | (author_id, thread_id, post_date, body, reward, image, thumbnail, filename, reactions) 233 | VALUES 234 | ($1, $2, $3, $4, $5, $6, $7, $8, '{}') 235 | RETURNING * 236 | "# 237 | ) 238 | .bind(user.id) 239 | .bind(thread.id) 240 | .bind(post_date) 241 | .bind(body) 242 | .bind(item_drop) 243 | .bind(image) 244 | .bind(thumbnail) 245 | .bind(filename) 246 | .fetch_one(&mut *transaction) 247 | .await?; 248 | 249 | let thread = sqlx::query_as("UPDATE threads SET last_post = $1 WHERE id = $2 RETURNING *") 250 | .bind(reply.id) 251 | .bind(thread.id) 252 | .fetch_one(&mut *transaction) 253 | .await?; 254 | 255 | transaction.commit().await?; 256 | 257 | Ok(thread) 258 | } 259 | } 260 | 261 | #[derive(Deserialize)] 262 | struct UpdateThread { 263 | locked: Option, 264 | pinned: Option, 265 | hidden: Option, 266 | } 267 | 268 | #[derive(Serialize, Error, Debug, ErrorCode)] 269 | enum UpdateThreadError { 270 | #[error("You are not privileged enough")] 271 | Unauthorized, 272 | #[error("Internal database error: {0}")] 273 | InternalDbError( 274 | #[from] 275 | #[serde(skip)] 276 | sqlx::Error, 277 | ), 278 | } 279 | 280 | post!( 281 | "/thread/:thread_id", 282 | #[json] 283 | async fn update_thread_flags( 284 | conn: Extension, 285 | user: User, 286 | Path(thread_id): Path, 287 | Query(UpdateThread { 288 | locked, 289 | pinned, 290 | hidden, 291 | }): Query, 292 | ) -> Result<(), UpdateThreadError> { 293 | if user.role < Role::Moderator { 294 | return Err(UpdateThreadError::Unauthorized); 295 | } 296 | 297 | if locked.is_none() && pinned.is_none() && hidden.is_none() { 298 | return Ok(()); 299 | } 300 | 301 | // TODO: Come up with some pattern to chain these 302 | 303 | if let Some(locked) = locked { 304 | sqlx::query("UPDATE threads SET locked = $1 WHERE id = $2") 305 | .bind(locked) 306 | .bind(thread_id) 307 | .execute(&*conn) 308 | .await?; 309 | } 310 | 311 | if let Some(pinned) = pinned { 312 | sqlx::query("UPDATE threads SET pinned = $1 WHERE id = $2") 313 | .bind(pinned) 314 | .bind(thread_id) 315 | .execute(&*conn) 316 | .await?; 317 | } 318 | 319 | if let Some(hidden) = hidden { 320 | sqlx::query("UPDATE threads SET hidden = $1 WHERE id = $2") 321 | .bind(hidden) 322 | .bind(thread_id) 323 | .execute(&*conn) 324 | .await?; 325 | } 326 | 327 | Ok(()) 328 | } 329 | ); 330 | 331 | #[derive(Debug, FromRow, Serialize, Clone)] 332 | pub struct Tag { 333 | pub id: i32, 334 | pub name: String, 335 | /// Number of posts that have been tagged with this tag. 336 | pub num_tagged: i32, 337 | } 338 | 339 | impl Tag { 340 | pub fn id(&self) -> i32 { 341 | self.id 342 | } 343 | 344 | /// Returns the most popular tags. 345 | pub async fn popular(conn: &PgPool) -> Result, sqlx::Error> { 346 | sqlx::query_as("SELECT * FROM tags ORDER BY num_tagged DESC LIMIT 10") 347 | .fetch_all(conn) 348 | .await 349 | } 350 | 351 | pub async fn fetch_from_id(conn: &PgPool, id: i32) -> Result, sqlx::Error> { 352 | sqlx::query_as("SELECT * FROM tags WHERE id = $1") 353 | .bind(id) 354 | .fetch_optional(conn) 355 | .await 356 | } 357 | 358 | pub async fn fetch_from_str(conn: &PgPool, tag: &str) -> Result, sqlx::Error> { 359 | let tag_name = clean_tag_name(tag); 360 | 361 | if tag_name.is_empty() { 362 | return Ok(None); 363 | } 364 | 365 | sqlx::query_as("SELECT * FROM tags WHERE name = $1") 366 | .bind(tag_name) 367 | .fetch_optional(conn) 368 | .await 369 | } 370 | 371 | /// Fetches a tag, creating it if it doesn't already exist. num_tagged is 372 | /// incremented or set to one. 373 | /// 374 | /// It's kind of a weird interface, I'm open to suggestions. 375 | /// 376 | /// Assumes that str is not empty. 377 | pub async fn fetch_from_str_and_inc( 378 | conn: impl PgExecutor<'_>, 379 | tag: &str, 380 | ) -> Result, sqlx::Error> { 381 | let tag_name = clean_tag_name(tag); 382 | 383 | if tag_name.is_empty() { 384 | return Ok(None); 385 | } 386 | 387 | sqlx::query_as( 388 | r#" 389 | INSERT INTO tags (name) 390 | VALUES ($1) 391 | ON CONFLICT (name) DO UPDATE SET num_tagged = tags.num_tagged + 1 392 | RETURNING * 393 | "#, 394 | ) 395 | .bind(tag_name) 396 | .fetch_optional(conn) 397 | .await 398 | } 399 | } 400 | 401 | fn clean_tag_name(name: &str) -> String { 402 | name.trim().to_lowercase() 403 | } 404 | 405 | fn parse_tag_list(list: &str) -> impl Iterator { 406 | // TODO: More stuff! 407 | list.split(",").map(|i| i.trim()) 408 | } 409 | 410 | #[derive(Debug, Clone)] 411 | pub struct Tags { 412 | pub tags: Vec, 413 | } 414 | 415 | impl Tags { 416 | pub async fn fetch_from_str(conn: &PgPool, path: &str) -> Self { 417 | let mut seen = HashSet::new(); 418 | let tags = futures::stream::iter(path.split("/")) 419 | .filter_map(move |s| async move { Tag::fetch_from_str(conn, s).await.ok().flatten() }) 420 | .collect::>() 421 | .await 422 | .into_iter() 423 | .filter(move |t| seen.insert(t.id)) 424 | .collect(); 425 | Tags { tags } 426 | } 427 | 428 | pub async fn fetch_from_ids<'a>(conn: &PgPool, ids: impl Iterator) -> Self { 429 | Self { 430 | tags: futures::stream::iter(ids) 431 | .filter_map(|id| async move { Tag::fetch_from_id(conn, *id).await.ok().flatten() }) 432 | .collect() 433 | .await, 434 | } 435 | } 436 | 437 | // Not going to deal with lifetimes here. Just clone it. 438 | 439 | pub fn into_names(self) -> impl Iterator { 440 | self.tags.into_iter().map(|x| x.name) 441 | } 442 | 443 | pub fn into_ids(self) -> impl Iterator { 444 | self.tags.into_iter().map(|x| x.id) 445 | } 446 | 447 | pub fn is_empty(&self) -> bool { 448 | self.tags.is_empty() 449 | } 450 | } 451 | 452 | #[derive(FromRow, Debug, Serialize, Deserialize)] 453 | pub struct Reply { 454 | /// Id of the reply 455 | pub id: i32, 456 | /// Id of the author 457 | pub author_id: i32, 458 | /// Id of the thread 459 | pub thread_id: i32, 460 | /// Date of posting 461 | pub post_date: NaiveDateTime, 462 | /// Body of the reply 463 | pub body: String, 464 | /// Any item that was rewarded for this post 465 | pub reward: Option, 466 | /// Reactions attached to this post 467 | pub reactions: Vec, 468 | /// Image associated with this post 469 | pub image: Option, 470 | /// Thumbnail associated with this post's image 471 | pub thumbnail: Option, 472 | /// Filename associated with the image 473 | pub filename: String, 474 | /// Whether or not the thread is hidden 475 | pub hidden: bool, 476 | } 477 | 478 | impl Reply { 479 | pub async fn fetch(conn: &PgPool, id: i32) -> Result { 480 | sqlx::query_as("SELECT * FROM replies WHERE id = $1") 481 | .bind(id) 482 | .fetch_one(conn) 483 | .await 484 | } 485 | 486 | pub async fn fetch_optional(conn: &PgPool, id: i32) -> Result, sqlx::Error> { 487 | sqlx::query_as("SELECT * FROM replies WHERE id = $1") 488 | .bind(id) 489 | .fetch_optional(conn) 490 | .await 491 | } 492 | } 493 | 494 | #[derive(Serialize, Error, Debug, ErrorCode)] 495 | pub enum DeleteReplyError { 496 | #[error("You are not privileged enough")] 497 | Unauthorized, 498 | #[error("No such reply exists")] 499 | NoSuchReply, 500 | #[error("You cannot delete the first reply in a thread")] 501 | CannotDeleteFirstReply, 502 | #[error("Internal database error: {0}")] 503 | InternalDbError( 504 | #[from] 505 | #[serde(skip)] 506 | sqlx::Error, 507 | ), 508 | } 509 | 510 | post!( 511 | "/delete_reply/:dead_reply_id", 512 | #[json] 513 | async fn delete_reply( 514 | conn: Extension, 515 | user: User, 516 | Path(dead_reply_id): Path, 517 | ) -> Result<(), DeleteReplyError> { 518 | if user.role < Role::Moderator { 519 | return Err(DeleteReplyError::Unauthorized); 520 | } 521 | 522 | let dead_reply = Reply::fetch_optional(&*conn, dead_reply_id) 523 | .await? 524 | .ok_or(DeleteReplyError::NoSuchReply)?; 525 | 526 | // Get the post before this one in case last_post is the dead reply 527 | let prev_reply: Reply = sqlx::query_as( 528 | "SELECT * FROM replies WHERE thread_id = $1 AND id < $2 ORDER BY id DESC", 529 | ) 530 | .bind(dead_reply.thread_id) 531 | .bind(dead_reply_id) 532 | .fetch_optional(&*conn) 533 | .await? 534 | .ok_or(DeleteReplyError::CannotDeleteFirstReply)?; 535 | 536 | sqlx::query("UPDATE threads SET last_post = $1 WHERE id = $2 AND last_post = $3") 537 | .bind(prev_reply.id) 538 | .bind(dead_reply.thread_id) 539 | .bind(dead_reply_id) 540 | .execute(&*conn) 541 | .await?; 542 | 543 | // Reduce the number of replies by one: 544 | sqlx::query("UPDATE threads SET num_replies = num_replies - 1 WHERE id = $1") 545 | .bind(dead_reply.thread_id) 546 | .execute(&*conn) 547 | .await?; 548 | 549 | // Delete the reply: 550 | sqlx::query("DELETE FROM replies WHERE id = $1") 551 | .bind(dead_reply_id) 552 | .execute(&*conn) 553 | .await?; 554 | 555 | tracing::info!( 556 | "User `{}` has deleted reply {dead_reply_id} in thread {}", 557 | user.name, 558 | dead_reply.thread_id, 559 | ); 560 | 561 | Ok(()) 562 | } 563 | ); 564 | 565 | #[derive(Deserialize)] 566 | pub struct ReplyForm { 567 | body: String, 568 | thread_id: String, 569 | } 570 | 571 | #[derive(Debug, Serialize, Error, ErrorCode)] 572 | pub enum ReplyError { 573 | #[error("No such thread")] 574 | NoSuchThread, 575 | #[error("Reply cannot be empty")] 576 | ReplyIsEmpty, 577 | #[error("Thread is locked")] 578 | ThreadIsLocked, 579 | #[error("Error uploading image: {0}")] 580 | UploadImageError( 581 | #[from] 582 | #[serde(skip)] 583 | UploadImageError, 584 | ), 585 | #[error("You must be level {MIN_LEVEL_TO_UPLOAD_PHOTOS} in order to upload photos")] 586 | NotAllowedToUploadPictures, 587 | #[error("Internal database error: {0}")] 588 | InternalDbError( 589 | #[from] 590 | #[serde(skip)] 591 | sqlx::Error, 592 | ), 593 | } 594 | 595 | post!( 596 | "/reply", 597 | #[json] 598 | pub async fn new_reply( 599 | conn: Extension, 600 | user: User, 601 | MultipartForm { 602 | file, 603 | form: ReplyForm { thread_id, body }, 604 | }: MultipartForm, 605 | ) -> Result<(), ReplyError> { 606 | let body = body.trim(); 607 | 608 | if body.is_empty() && file.is_none() { 609 | return Err(ReplyError::ReplyIsEmpty); 610 | } 611 | 612 | let thread_id: i32 = thread_id.parse().map_err(|_| ReplyError::NoSuchThread)?; 613 | if Thread::fetch_optional(&conn, thread_id) 614 | .await? 615 | .ok_or(ReplyError::NoSuchThread)? 616 | .locked 617 | { 618 | return Err(ReplyError::ThreadIsLocked); 619 | } 620 | 621 | let post_date = Utc::now().naive_utc(); 622 | 623 | let (image, thumbnail, filename) = if let Some(file) = file { 624 | if !user.can_post_photos() { 625 | return Err(ReplyError::NotAllowedToUploadPictures); 626 | } 627 | let Image { 628 | filename: image, 629 | thumbnail, 630 | } = Image::upload_image(file.bytes).await?; 631 | (Some(image), thumbnail, file.name) 632 | } else { 633 | (None, None, String::new()) 634 | }; 635 | 636 | let mut transaction = conn.begin().await?; 637 | 638 | let reply: Reply = sqlx::query_as( 639 | r#" 640 | INSERT INTO replies 641 | (author_id, thread_id, post_date, body, reward, image, thumbnail, filename, reactions) 642 | VALUES 643 | ($1, $2, $3, $4, $5, $6, $7, $8, '{}') 644 | RETURNING * 645 | "# 646 | ) 647 | .bind(user.id) 648 | .bind(thread_id) 649 | .bind(post_date) 650 | .bind(body) 651 | .bind( 652 | ItemDrop::drop(&mut transaction, &user) 653 | .await? 654 | .map(ItemDrop::to_id) 655 | ) 656 | .bind(image) 657 | .bind(thumbnail) 658 | .bind(filename) 659 | .fetch_one(&mut *transaction) 660 | .await?; 661 | 662 | let thread: Thread = sqlx::query_as( 663 | r#" 664 | UPDATE threads SET 665 | last_post = $1, 666 | num_replies = num_replies + 1 667 | WHERE 668 | id = $2 669 | RETURNING * 670 | "#, 671 | ) 672 | .bind(reply.id) 673 | .bind(thread_id) 674 | .fetch_one(&mut *transaction) 675 | .await?; 676 | 677 | transaction.commit().await?; 678 | 679 | user.read_thread(&*conn, &thread).await?; 680 | 681 | Ok(()) 682 | } 683 | ); 684 | 685 | #[derive(Deserialize)] 686 | pub struct UpdateReplyParams { 687 | hidden: Option, 688 | } 689 | 690 | #[derive(Deserialize)] 691 | pub struct UpdateReplyForm { 692 | body: Option, 693 | } 694 | 695 | #[derive(Serialize, Error, Debug, ErrorCode)] 696 | pub enum UpdateReplyError { 697 | #[error("You are not privileged enough")] 698 | Unauthorized, 699 | #[error("Post does not exist")] 700 | NoSuchReply, 701 | #[error("You cannot make a post empty")] 702 | CannotMakeEmpty, 703 | #[error("Internal database error: {0}")] 704 | InternalDbError( 705 | #[from] 706 | #[serde(skip)] 707 | sqlx::Error, 708 | ), 709 | } 710 | 711 | post! { 712 | "/reply/:post_id", 713 | #[json] 714 | pub async fn update_reply( 715 | conn: Extension, 716 | user: User, 717 | Path(post_id): Path, 718 | Query(UpdateReplyParams { 719 | hidden, 720 | }): Query, 721 | Form(UpdateReplyForm { body }): Form, 722 | ) -> Result<(), UpdateReplyError> { 723 | let post = Reply::fetch_optional(&*conn, post_id) 724 | .await? 725 | .ok_or(UpdateReplyError::NoSuchReply)?; 726 | 727 | if let Some(hidden) = hidden { 728 | if user.role < Role::Moderator { 729 | return Err(UpdateReplyError::Unauthorized); 730 | } 731 | sqlx::query("UPDATE replies SET hidden = $1 WHERE id = $2") 732 | .bind(hidden) 733 | .bind(post_id) 734 | .execute(&*conn) 735 | .await?; 736 | } 737 | 738 | let Some(body) = body else { 739 | return Ok(()); 740 | }; 741 | 742 | if post.author_id != user.id && user.role < Role::Moderator { 743 | return Err(UpdateReplyError::Unauthorized); 744 | } 745 | 746 | let body = body.trim(); 747 | 748 | if post.image.is_none() && body.is_empty() { 749 | return Err(UpdateReplyError::CannotMakeEmpty); 750 | } 751 | 752 | sqlx::query("UPDATE replies SET body = $1 WHERE id = $2") 753 | .bind(body) 754 | .bind(post_id) 755 | .execute(&*conn) 756 | .await?; 757 | 758 | Ok(()) 759 | } 760 | } 761 | 762 | #[derive(Serialize, Error, Debug, ErrorCode)] 763 | pub enum ReactError { 764 | #[error("No such reply exists")] 765 | NoSuchReply, 766 | #[error("You don't own these reactions")] 767 | Unauthorized, 768 | #[error("You have already consumed one of these reactions")] 769 | AlreadyConsumed, 770 | #[error("You cannot react to your own post")] 771 | ThisIsYourPost, 772 | #[error("Internal database error: {0}")] 773 | InternalDbError( 774 | #[from] 775 | #[serde(skip)] 776 | sqlx::Error, 777 | ), 778 | } 779 | 780 | post!( 781 | "/react/:post_id", 782 | #[json] 783 | pub async fn react( 784 | conn: Extension, 785 | user: User, 786 | Path(post_id): Path, 787 | Form(used_reactions): Form>, 788 | ) -> Result<(), ReactError> { 789 | let reply = Reply::fetch_optional(&conn, post_id) 790 | .await? 791 | .ok_or(ReactError::NoSuchReply)?; 792 | 793 | if reply.author_id == user.id { 794 | return Err(ReactError::ThisIsYourPost); 795 | } 796 | 797 | let mut transaction = conn.begin().await?; 798 | let mut new_reactions = Vec::new(); 799 | let author = User::fetch(&mut transaction, reply.author_id).await?; 800 | 801 | // Verify that all of the reactions are owned by the user: 802 | for (reaction, selected) in used_reactions.into_iter() { 803 | let item_drop = ItemDrop::fetch(&mut transaction, reaction).await?; 804 | let item = item_drop.fetch_item(&mut transaction).await?; 805 | if selected != "on" || item_drop.owner_id != user.id || !item.is_reaction() { 806 | return Err(ReactError::Unauthorized); 807 | } 808 | 809 | // Set the drops to consumed: 810 | if sqlx::query("UPDATE drops SET consumed = TRUE WHERE id = $1 AND consumed = FALSE") 811 | .bind(reaction) 812 | .execute(&mut transaction) 813 | .await? 814 | .rows_affected() 815 | != 1 816 | { 817 | return Err(ReactError::AlreadyConsumed); 818 | } 819 | 820 | new_reactions.push(reaction); 821 | author 822 | .add_experience(&mut transaction, item.get_experience().unwrap() as i64) 823 | .await?; 824 | } 825 | 826 | sqlx::query("UPDATE replies SET reactions = reactions || $1 WHERE id = $2") 827 | .bind(new_reactions) 828 | .bind(post_id) 829 | .execute(&mut transaction) 830 | .await?; 831 | 832 | transaction.commit().await?; 833 | 834 | Ok(()) 835 | } 836 | ); 837 | 838 | /// A post is a generalized reply and thread. 839 | #[derive(Serialize)] 840 | pub struct Post { 841 | pub id: i32, 842 | pub author: Arc, 843 | pub body: String, 844 | pub date: String, 845 | pub reactions: Vec, 846 | pub reward: Option, 847 | pub can_react: bool, 848 | pub can_edit: bool, 849 | pub hidden: bool, 850 | pub image: Option, 851 | pub thumbnail: Option, 852 | pub filename: String, 853 | } 854 | 855 | get!( 856 | "/watch/:thread_id", 857 | pub async fn watch( 858 | _user: User, 859 | conn: Extension, 860 | ws: WebSocketUpgrade, 861 | Path(thread_id): Path, 862 | ) -> Response { 863 | let latest_thread: Reply = 864 | sqlx::query_as("SELECT * FROM replies WHERE thread_id = $1 ORDER BY post_date DESC") 865 | .bind(thread_id) 866 | .fetch_one(&*conn) 867 | .await 868 | .unwrap(); 869 | let mut last_post = latest_thread.id; 870 | ws.on_upgrade(move |mut socket| async move { 871 | // Don't use listeners. It will quickly exhaust the number of connections 872 | loop { 873 | let mut new_posts = sqlx::query_as( 874 | "SELECT * FROM replies WHERE thread_id = $1 AND id > $2 ORDER BY post_date ASC ", 875 | ) 876 | .bind(thread_id) 877 | .bind(last_post) 878 | .fetch(&*conn); 879 | while let Some(reply) = new_posts.next().await { 880 | let reply: Reply = reply.unwrap(); 881 | last_post = reply.id; 882 | let user = User::fetch(&*conn, reply.author_id).await.unwrap(); 883 | let body = askama::filters::linebreaks( 884 | askama::filters::escape(askama::Html, reply.body).unwrap(), 885 | ) 886 | .unwrap(); 887 | let post = Post { 888 | id: reply.id, 889 | author: Arc::new(user.get_profile_stub(&*conn).await.unwrap()), 890 | body, 891 | date: reply.post_date.format(crate::DATE_FMT).to_string(), 892 | reactions: vec![], 893 | reward: match reply.reward { 894 | Some(drop_id) => ItemDrop::fetch(&*conn, drop_id) 895 | .await 896 | .unwrap() 897 | .get_thumbnail(&*conn) 898 | .await 899 | .ok(), 900 | _ => None, 901 | }, 902 | can_react: false, 903 | can_edit: true, 904 | hidden: false, 905 | image: reply.image, 906 | thumbnail: reply.thumbnail, 907 | filename: reply.filename, 908 | }; 909 | if socket 910 | .send(Message::from(serde_json::to_string(&post).unwrap())) 911 | .await 912 | .is_err() 913 | { 914 | return; 915 | } 916 | } 917 | tokio::time::sleep(Duration::from_millis(50)).await; 918 | } 919 | }) 920 | } 921 | ); 922 | -------------------------------------------------------------------------------- /src/users.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashMap, 3 | ops::Range, 4 | string::FromUtf8Error, 5 | sync::{Arc, Mutex}, 6 | }; 7 | use aes_gcm::{aead::Aead, Aes256Gcm, KeyInit, Nonce}; 8 | use askama::Template; 9 | use axum::{ 10 | async_trait, 11 | extract::{Extension, Form, FromRequestParts, Path, Query}, 12 | http::request::Parts, 13 | response::{IntoResponse, Redirect, Response}, 14 | }; 15 | use axum_client_ip::ClientIp; 16 | use chrono::{prelude::*, Duration}; 17 | use cookie::time as cookie_time; 18 | use futures::StreamExt; 19 | use google_authenticator::{create_secret, qr_code_url}; 20 | use ipnetwork::IpNetwork; 21 | use lazy_static::lazy_static; 22 | use libpasta::{hash_password, verify_password}; 23 | use marche_proc_macros::{json, ErrorCode}; 24 | use rand::{rngs::OsRng, RngCore}; 25 | use serde::{Deserialize, Serialize}; 26 | use sqlx::{FromRow, PgExecutor, PgPool, Postgres, Row, Transaction, Type}; 27 | use thiserror::Error; 28 | use tower_cookies::{Cookie, Cookies, Key}; 29 | 30 | use crate::{ 31 | items::{Item, ItemDrop}, 32 | post, 33 | threads::{Reply, Thread}, 34 | }; 35 | 36 | #[derive(FromRow, Debug)] 37 | pub struct User { 38 | /// Id of the user 39 | pub id: i32, 40 | /// User name (lowercased copy of the display name for faster lookups) 41 | pub name: String, 42 | /// Display name 43 | pub display_name: String, 44 | /// Password hash 45 | pub password: String, 46 | /// Encrypted, shared secret for 2FA 47 | pub secret: Vec, 48 | /// Reset code to change password and 2FA 49 | pub reset_code: String, 50 | /// Biography of the user 51 | pub bio: String, 52 | /// Email address of the useer 53 | pub email: String, 54 | /// Role 55 | pub role: Role, 56 | /// Exprerience 57 | pub experience: i64, 58 | /// Last reward 59 | pub last_reward: NaiveDateTime, 60 | /// ProfilePic equipment slot 61 | pub equip_slot_prof_pic: Option, 62 | /// ProfileBackground equipment slot 63 | pub equip_slot_background: Option, 64 | /// Badge equipment slots 65 | pub equip_slot_badges: Vec, 66 | /// If the user is banned, and for how long 67 | pub banned_until: Option, 68 | /// Notes on the user by moderators or admins 69 | pub notes: String, 70 | } 71 | 72 | /// Displayable user profile 73 | #[derive(Clone, Serialize)] 74 | pub struct ProfileStub { 75 | pub id: i32, 76 | pub name: String, 77 | pub picture: Option, 78 | pub background: Option, 79 | pub badges: Vec, 80 | pub level: LevelInfo, 81 | } 82 | 83 | #[derive( 84 | Copy, Clone, Debug, Hash, Eq, PartialEq, PartialOrd, Ord, Serialize, Deserialize, Type, 85 | )] 86 | #[sqlx(type_name = "user_role")] 87 | #[sqlx(rename_all = "snake_case")] 88 | pub enum Role { 89 | User, 90 | Moderator, 91 | Admin, 92 | } 93 | 94 | #[derive(Copy, Clone, Serialize)] 95 | pub struct LevelInfo { 96 | pub level: u32, 97 | pub curr_xp: u64, 98 | pub next_level_xp: u64, 99 | } 100 | 101 | pub const MAX_NUM_BADGES: usize = 10; 102 | pub const MIN_LEVEL_TO_UPLOAD_PHOTOS: u32 = 3; 103 | 104 | impl User { 105 | pub async fn fetch(conn: impl PgExecutor<'_>, user_id: i32) -> Result { 106 | sqlx::query_as("SELECT * FROM users WHERE id = $1") 107 | .bind(user_id) 108 | .fetch_one(conn) 109 | .await 110 | } 111 | 112 | pub async fn fetch_optional( 113 | conn: impl PgExecutor<'_>, 114 | user_id: i32, 115 | ) -> Result, sqlx::Error> { 116 | sqlx::query_as("SELECT * FROM users WHERE id = $1") 117 | .bind(user_id) 118 | .fetch_optional(conn) 119 | .await 120 | } 121 | 122 | /// Returns the raw, total experience of the user 123 | pub fn experience(&self) -> u64 { 124 | self.experience as u64 125 | } 126 | 127 | /// Returns the level of the user. The level is defined as the log_2 of the 128 | /// user's experience value. 129 | pub fn level(&self) -> u32 { 130 | let xp = self.experience(); 131 | // Base level is 1 132 | if xp < 4 { 133 | 1 134 | } else { 135 | 63 - xp.leading_zeros() 136 | } 137 | } 138 | 139 | pub fn can_post_photos(&self) -> bool { 140 | self.level() >= MIN_LEVEL_TO_UPLOAD_PHOTOS 141 | } 142 | 143 | pub fn is_banned(&self) -> bool { 144 | self.banned_until 145 | .map(|until| { 146 | let now = Utc::now().naive_utc(); 147 | now < until 148 | }) 149 | .unwrap_or(false) 150 | } 151 | 152 | /// Returns a range of the current completion of the user's next level. 153 | pub fn level_completion(&self) -> Range { 154 | let level = self.level(); 155 | let base_xp = if level == 1 { 0 } else { 1 << level }; 156 | let next_level = level + 1; 157 | let next_level_xp = (1 << next_level) as u64 - base_xp; 158 | (self.experience() - base_xp)..next_level_xp 159 | } 160 | 161 | pub fn level_info(&self) -> LevelInfo { 162 | let completion = self.level_completion(); 163 | LevelInfo { 164 | level: self.level(), 165 | curr_xp: completion.start, 166 | next_level_xp: completion.end, 167 | } 168 | } 169 | 170 | pub async fn add_experience( 171 | &self, 172 | conn: &mut Transaction<'_, Postgres>, 173 | xp: i64, 174 | ) -> Result<(), sqlx::Error> { 175 | sqlx::query("UPDATE users SET experience = GREATEST(experience + $1, 0) WHERE id = $2") 176 | .bind(xp) 177 | .bind(self.id) 178 | .execute(conn) 179 | .await?; 180 | Ok(()) 181 | } 182 | 183 | /// Returns a vec of equipped items. 184 | pub async fn equipped(&self, conn: &PgPool) -> Result, sqlx::Error> { 185 | let mut items = Vec::new(); 186 | 187 | if let Some(prof_pic) = self.equip_slot_prof_pic { 188 | let item_drop = ItemDrop::fetch(conn, prof_pic).await?; 189 | items.push((item_drop.fetch_item(conn).await?, item_drop)); 190 | } 191 | 192 | if let Some(background) = self.equip_slot_background { 193 | let item_drop = ItemDrop::fetch(conn, background).await?; 194 | items.push((item_drop.fetch_item(conn).await?, item_drop)); 195 | } 196 | 197 | for badge in self.equip_slot_badges.iter() { 198 | let item_drop = ItemDrop::fetch(conn, *badge).await?; 199 | items.push((item_drop.fetch_item(conn).await?, item_drop)); 200 | } 201 | 202 | Ok(items) 203 | } 204 | 205 | pub async fn inventory( 206 | &self, 207 | conn: &PgPool, 208 | ) -> Result, sqlx::Error> { 209 | let mut inventory = 210 | sqlx::query_as("SELECT * FROM drops WHERE owner_id = $1 AND consumed = FALSE") 211 | .bind(self.id) 212 | .fetch(conn) 213 | .filter_map(|item_drop: Result| async move { 214 | let Ok(item_drop) = item_drop else { 215 | return None; 216 | }; 217 | Item::fetch(conn, item_drop.item_id) 218 | .await 219 | .ok() 220 | .map(move |item| (item, item_drop)) 221 | }) 222 | .collect::>() 223 | .await; 224 | 225 | inventory.sort_by(|a, b| a.0.rarity.cmp(&b.0.rarity).reverse()); 226 | 227 | Ok(inventory.into_iter()) 228 | } 229 | 230 | pub async fn get_avatar(&self, conn: &PgPool) -> Result, sqlx::Error> { 231 | let Some(drop_id) = self.equip_slot_prof_pic else { 232 | return Ok(None); 233 | }; 234 | Ok(ItemDrop::fetch(conn, drop_id) 235 | .await? 236 | .fetch_item(conn) 237 | .await? 238 | .as_avatar()) 239 | } 240 | 241 | pub async fn get_profile_background( 242 | &self, 243 | conn: &PgPool, 244 | ) -> Result, sqlx::Error> { 245 | let Some(drop_id) = self.equip_slot_background else { 246 | return Ok(None); 247 | }; 248 | let item_drop = ItemDrop::fetch(conn, drop_id).await?; 249 | Ok(item_drop 250 | .fetch_item(conn) 251 | .await? 252 | .as_profile_background(item_drop.pattern)) 253 | } 254 | 255 | pub async fn get_badges(&self, conn: &PgPool) -> Result, sqlx::Error> { 256 | let mut badges = Vec::new(); 257 | for badge in self.equip_slot_badges.iter() { 258 | let Some(item) = ItemDrop::fetch(conn, *badge).await?.fetch_item(&*conn).await?.as_badge() else { 259 | continue; 260 | }; 261 | badges.push(item); 262 | } 263 | Ok(badges) 264 | } 265 | 266 | /// Attempt to update the last drop time. If we fail, return false. 267 | /// This will fail if the user has received a new reward since the user has 268 | /// been fetched, which is by design. 269 | pub async fn update_last_reward(&self, conn: impl PgExecutor<'_>) -> Result { 270 | let rows_affected = 271 | sqlx::query("UPDATE users SET last_reward = $1 WHERE id = $2 AND last_reward = $3") 272 | .bind(Utc::now().naive_utc()) 273 | .bind(self.id) 274 | .bind(self.last_reward) 275 | .execute(conn) 276 | .await? 277 | .rows_affected(); 278 | Ok(rows_affected > 0) 279 | } 280 | 281 | pub async fn get_profile_stub(&self, conn: &PgPool) -> Result { 282 | Ok(ProfileStub { 283 | id: self.id, 284 | name: self.display_name.clone(), 285 | picture: self.get_avatar(conn).await?, 286 | background: self.get_profile_background(conn).await?, 287 | badges: self.get_badges(conn).await?, 288 | level: self.level_info(), 289 | }) 290 | } 291 | 292 | pub async fn next_unread(&self, conn: &PgPool, thread: &Thread) -> Result { 293 | let reading_history: Option = 294 | sqlx::query_as("SELECT * FROM reading_history WHERE reader_id = $1 AND thread_id = $2") 295 | .bind(self.id) 296 | .bind(thread.id) 297 | .fetch_optional(conn) 298 | .await?; 299 | 300 | let last_read = match reading_history { 301 | None => { 302 | // Find the first reply 303 | let reply: Reply = sqlx::query_as( 304 | "SELECT * FROM replies WHERE thread_id = $1 ORDER BY post_date ASC", 305 | ) 306 | .bind(thread.id) 307 | .fetch_one(conn) 308 | .await?; 309 | 310 | reply.id 311 | } 312 | Some(ReadingHistory { last_read, .. }) => sqlx::query_as( 313 | "SELECT * FROM replies WHERE thread_id = $1 AND id > $2 ORDER BY post_date ASC", 314 | ) 315 | .bind(thread.id) 316 | .bind(last_read) 317 | .fetch_optional(conn) 318 | .await? 319 | .map_or_else(|| last_read, |reply: Reply| reply.id), 320 | }; 321 | 322 | Ok(last_read) 323 | } 324 | 325 | pub async fn has_read(&self, conn: &PgPool, thread: &Thread) -> Result { 326 | Ok( 327 | sqlx::query_as("SELECT * FROM reading_history WHERE reader_id = $1 AND thread_id = $2") 328 | .bind(self.id) 329 | .bind(thread.id) 330 | .fetch_optional(conn) 331 | .await? 332 | .map_or(false, |history: ReadingHistory| { 333 | history.last_read >= thread.last_post 334 | }), 335 | ) 336 | } 337 | 338 | pub async fn read_thread(&self, conn: &PgPool, thread: &Thread) -> Result<(), sqlx::Error> { 339 | sqlx::query( 340 | r#" 341 | INSERT INTO reading_history 342 | (reader_id, thread_id, last_read) 343 | VALUES 344 | ($1, $2, $3) 345 | ON CONFLICT 346 | (reader_id, thread_id) 347 | DO UPDATE SET 348 | last_read = EXCLUDED.last_read 349 | "#, 350 | ) 351 | .bind(self.id) 352 | .bind(thread.id) 353 | .bind(thread.last_post) 354 | .execute(conn) 355 | .await?; 356 | 357 | Ok(()) 358 | } 359 | 360 | pub async fn incoming_offers(&self, conn: &PgPool) -> Result { 361 | Ok( 362 | sqlx::query("SELECT COUNT(*) FROM trade_requests WHERE receiver_id = $1") 363 | .bind(self.id) 364 | .fetch_one(conn) 365 | .await? 366 | .get(0), 367 | ) 368 | } 369 | } 370 | 371 | #[derive(Deserialize)] 372 | pub struct UserRegistrationForm { 373 | username: String, 374 | password: String, 375 | email: String, 376 | } 377 | 378 | #[derive(Serialize)] 379 | pub struct UserRegistration { 380 | qr_code_url: String, 381 | reset_code: String, 382 | } 383 | 384 | #[derive(Error, Debug, Serialize, ErrorCode)] 385 | pub enum UserRegistrationError { 386 | #[error("User names can only contain alphabetical characters")] 387 | InvalidUserName, 388 | #[error("Password is too short (minimum {MINIMUM_PASSWORD_LENGTH} characters)")] 389 | PasswordTooShort, 390 | #[error("User name has already been registered")] 391 | UserNameInUse, 392 | #[error("Invalid email")] 393 | InvalidEmail, 394 | #[error("Internal db error: {0}")] 395 | InternalDbError( 396 | #[from] 397 | #[serde(skip)] 398 | sqlx::Error, 399 | ), 400 | #[error("Internal encryption error")] 401 | InternalEncryptionError, 402 | } 403 | 404 | impl From for UserRegistrationError { 405 | fn from(_: aes_gcm::Error) -> Self { 406 | UserRegistrationError::InternalEncryptionError 407 | } 408 | } 409 | 410 | const MINIMUM_PASSWORD_LENGTH: usize = 8; 411 | 412 | post!( 413 | "/user", 414 | #[json] 415 | async fn register_user( 416 | conn: Extension, 417 | Form(UserRegistrationForm { 418 | username, 419 | password, 420 | email, 421 | }): Form, 422 | ) -> Result { 423 | let username = username.trim(); 424 | if !is_valid_username(&username) { 425 | return Err(UserRegistrationError::InvalidUserName); 426 | } 427 | 428 | let name = username.to_lowercase(); 429 | let display_name = username; 430 | let email = email.trim(); 431 | 432 | if password.len() < MINIMUM_PASSWORD_LENGTH { 433 | return Err(UserRegistrationError::PasswordTooShort); 434 | } 435 | 436 | if email.is_empty() { 437 | return Err(UserRegistrationError::InvalidEmail); 438 | } 439 | 440 | let existing_user: Option = sqlx::query_as("SELECT * FROM users WHERE name = $1") 441 | .bind(&name) 442 | .fetch_optional(&*conn) 443 | .await?; 444 | 445 | if existing_user.is_some() { 446 | return Err(UserRegistrationError::UserNameInUse); 447 | } 448 | 449 | let shared_secret = create_secret!(); 450 | let nonce = Nonce::from_slice(SHARED_SECRET_NONCE); 451 | let encrypted_secret = SHARED_SECRET_CIPHER.encrypt(nonce, shared_secret.as_ref())?; 452 | let qr_code_url = qr_code_url!(&shared_secret, "C'est Le Marché", "C'est Le Marché"); 453 | 454 | let reset_code = 455 | base64::encode_config(&rand::random::<[u8; 32]>(), base64::URL_SAFE_NO_PAD); 456 | let hashed_reset_code = hash_password(&reset_code); 457 | let password = hash_password(&password); 458 | 459 | sqlx::query( 460 | r#" 461 | INSERT INTO users ( 462 | name, display_name, password, secret, reset_code, email, 463 | role, last_reward, experience, bio, equip_slot_badges, notes 464 | ) VALUES ( $1, $2, $3, $4, $5, $6, $7, $8, 0, '', '{}', '' ) 465 | "#, 466 | ) 467 | .bind(name) 468 | .bind(display_name) 469 | .bind(password) 470 | .bind(encrypted_secret) 471 | .bind(&hashed_reset_code) 472 | .bind(email.trim()) 473 | .bind(Role::User) 474 | .bind(Utc::now().naive_utc()) 475 | .execute(&*conn) 476 | .await?; 477 | 478 | Ok(UserRegistration { 479 | qr_code_url, 480 | reset_code, 481 | }) 482 | } 483 | ); 484 | 485 | fn is_valid_username(username: &str) -> bool { 486 | username.chars().all(char::is_alphanumeric) 487 | } 488 | 489 | #[derive(Deserialize)] 490 | pub struct UpdateUser { 491 | role: Role, 492 | } 493 | 494 | #[derive(Debug, Error, Serialize, ErrorCode)] 495 | pub enum UpdateUserError { 496 | #[error("You are not privileged enough")] 497 | Unauthorized, 498 | #[error("There is no such user")] 499 | NoSuchUser, 500 | #[error("Internal database error: {0}")] 501 | InternalDbError( 502 | #[from] 503 | #[serde(skip)] 504 | sqlx::Error, 505 | ), 506 | } 507 | 508 | post!( 509 | "/user/:user_id", 510 | #[json] 511 | async fn update_user( 512 | conn: Extension, 513 | moderator: User, 514 | Path(user_id): Path, 515 | Query(UpdateUser { role }): Query, 516 | ) -> Result<(), UpdateUserError> { 517 | let user = User::fetch_optional(&*conn, user_id) 518 | .await? 519 | .ok_or(UpdateUserError::NoSuchUser)?; 520 | 521 | if user.role >= moderator.role || role >= moderator.role { 522 | return Err(UpdateUserError::Unauthorized); 523 | } 524 | 525 | sqlx::query("UPDATE users SET role = $1 WHERE id = $2") 526 | .bind(role) 527 | .bind(user_id) 528 | .execute(&*conn) 529 | .await?; 530 | 531 | Ok(()) 532 | } 533 | ); 534 | 535 | #[derive(Deserialize)] 536 | pub struct BanUser { 537 | #[serde(default, deserialize_with = "crate::empty_string_as_none")] 538 | ban_len: Option, 539 | } 540 | 541 | post!( 542 | "/ban/:user_id", 543 | #[json] 544 | async fn ban_user( 545 | conn: Extension, 546 | moderator: User, 547 | Path(user_id): Path, 548 | Query(BanUser { ban_len }): Query, 549 | ) -> Result<(), UpdateUserError> { 550 | if moderator.role < Role::Moderator || moderator.id == user_id { 551 | return Err(UpdateUserError::Unauthorized); 552 | } 553 | User::fetch_optional(&*conn, user_id) 554 | .await? 555 | .ok_or(UpdateUserError::NoSuchUser)?; 556 | 557 | sqlx::query("UPDATE users SET banned_until = $1 WHERE id = $2") 558 | .bind(ban_len.map(|days| (Utc::now() + Duration::days(days as i64)).naive_utc())) 559 | .bind(user_id) 560 | .execute(&*conn) 561 | .await?; 562 | 563 | Ok(()) 564 | } 565 | ); 566 | 567 | #[derive(Deserialize)] 568 | pub struct UpdateBioForm { 569 | bio: String, 570 | } 571 | 572 | #[derive(Debug, Serialize, Error, ErrorCode)] 573 | pub enum UpdateBioError { 574 | #[error("Bio is too long (maximum {MAX_BIO_LEN} characters allowed)")] 575 | TooLong, 576 | #[error("Internal database error: {0}")] 577 | InternalDbError( 578 | #[from] 579 | #[serde(skip)] 580 | sqlx::Error, 581 | ), 582 | } 583 | 584 | pub const MAX_BIO_LEN: usize = 300; 585 | 586 | post!( 587 | "/bio", 588 | #[json] 589 | async fn update_bio( 590 | conn: Extension, 591 | user: User, 592 | Form(UpdateBioForm { bio }): Form, 593 | ) -> Result<(), UpdateBioError> { 594 | if bio.len() > MAX_BIO_LEN { 595 | return Err(UpdateBioError::TooLong); 596 | } 597 | 598 | sqlx::query("UPDATE users SET bio = $1 WHERE id = $2") 599 | .bind(bio) 600 | .bind(user.id) 601 | .execute(&*conn) 602 | .await?; 603 | 604 | Ok(()) 605 | } 606 | ); 607 | 608 | #[derive(Deserialize)] 609 | pub struct AddNoteForm { 610 | body: String, 611 | } 612 | 613 | #[derive(Debug, Error, Serialize, ErrorCode)] 614 | pub enum AddNoteError { 615 | #[error("You are not privileged enough")] 616 | Unauthorized, 617 | #[error("Internal database error: {0}")] 618 | InternalDbError( 619 | #[from] 620 | #[serde(skip)] 621 | sqlx::Error, 622 | ), 623 | } 624 | 625 | post!( 626 | "/add_note/:user_id", 627 | #[json] 628 | pub async fn submit( 629 | conn: Extension, 630 | viewer: User, 631 | Path(user_id): Path, 632 | Form(AddNoteForm { body }): Form, 633 | ) -> Result<(), AddNoteError> { 634 | if viewer.role < Role::Moderator { 635 | return Err(AddNoteError::Unauthorized); 636 | } 637 | 638 | let viewer_name = viewer.name; 639 | let body = html_escape::encode_text(&body); 640 | let new_note = format!("

    “{body}” — {viewer_name}

    "); 641 | 642 | sqlx::query("UPDATE users SET notes = notes || $1 WHERE id = $2") 643 | .bind(new_note) 644 | .bind(user_id) 645 | .execute(&*conn) 646 | .await?; 647 | 648 | Ok(()) 649 | } 650 | ); 651 | 652 | /// Name of the cookie we use to store the session Id. 653 | const USER_SESSION_ID_COOKIE: &str = "session_id"; 654 | // TODO: Move to environmental variable 655 | const PRIVATE_COOKIE_KEY: &str = "ea63npVp7Vg+ileGuoO0OJbBLOdSkHKkNwu87B8/joU="; 656 | 657 | #[async_trait] 658 | impl FromRequestParts for User 659 | where 660 | S: Send + Sync, 661 | { 662 | type Rejection = UserRejection; 663 | 664 | async fn from_request_parts(parts: &mut Parts, state: &S) -> Result { 665 | let redirect = parts 666 | .uri 667 | .path_and_query() 668 | .map(|x| x.as_str().to_string()) 669 | .unwrap_or_else(String::new); 670 | let cookies = Cookies::from_request_parts(parts, state) 671 | .await 672 | .map_err(|_| UserRejection::Unauthorized { 673 | redirect: redirect.clone(), 674 | })?; 675 | let private_key = Key::derive_from(PRIVATE_COOKIE_KEY.as_bytes()); 676 | let signed = cookies.private(&private_key); 677 | let session_id = signed 678 | .get(USER_SESSION_ID_COOKIE) 679 | .ok_or(UserRejection::Unauthorized { 680 | redirect: redirect.clone(), 681 | })?; 682 | let conn = Extension::::from_request_parts(parts, state) 683 | .await 684 | .map_err(|_| UserRejection::UnknownError)?; 685 | let Some(session) = LoginSession::fetch(&conn, session_id.value()).await? else { 686 | return Err(UserRejection::Unauthorized { 687 | redirect: redirect.clone(), 688 | }); 689 | }; 690 | let user = match User::fetch_optional(&*conn, session.user_id).await { 691 | Ok(Some(user)) => user, 692 | Ok(None) => return Err(UserRejection::UnknownUser), 693 | Err(_) => return Err(UserRejection::Unauthorized { redirect }), 694 | }; 695 | if user.is_banned() { 696 | Err(UserRejection::Banned { 697 | until: user.banned_until.unwrap(), 698 | }) 699 | } else { 700 | Ok(user) 701 | } 702 | } 703 | } 704 | 705 | #[derive(Debug, Error)] 706 | pub enum UserRejection { 707 | #[error("An unknown error occurred")] 708 | UnknownError, 709 | #[error("Internal database error: {0}")] 710 | InternalDbError(#[from] sqlx::Error), 711 | #[error("Unknown user")] 712 | UnknownUser, 713 | #[error("Unauthorized user")] 714 | Unauthorized { redirect: String }, 715 | #[error("Banned until {until}")] 716 | Banned { until: NaiveDateTime }, 717 | } 718 | 719 | #[derive(Template)] 720 | #[template(path = "banned.html")] 721 | pub struct Banned { 722 | judge_type: bool, 723 | until: String, 724 | } 725 | 726 | impl IntoResponse for UserRejection { 727 | fn into_response(self) -> Response { 728 | match self { 729 | Self::Banned { until } => Banned { 730 | judge_type: rand::random(), 731 | until: until.format(crate::DATE_FMT).to_string(), 732 | } 733 | .into_response(), 734 | Self::Unauthorized { redirect } => { 735 | Redirect::to(&format!("/login?redirect={redirect}")).into_response() 736 | } 737 | err => { 738 | tracing::error!("Unknown error occurred: {:?}", err); 739 | Redirect::to("/login").into_response() 740 | } 741 | } 742 | } 743 | } 744 | 745 | #[derive(Clone)] 746 | pub struct UserCache<'a> { 747 | conn: &'a PgPool, 748 | cached: Arc>>>, 749 | } 750 | 751 | impl<'a> UserCache<'a> { 752 | pub fn new(conn: &'a PgPool) -> Self { 753 | UserCache { 754 | conn, 755 | cached: Arc::new(Mutex::new(Default::default())), 756 | } 757 | } 758 | 759 | pub async fn get(&self, id: i32) -> Result, sqlx::Error> { 760 | if let Some(result) = self.cached.lock().unwrap().get(&id) { 761 | return Ok(result.clone()); 762 | } 763 | let profile_stub = Arc::new( 764 | User::fetch(self.conn, id) 765 | .await? 766 | .get_profile_stub(self.conn) 767 | .await?, 768 | ); 769 | self.cached.lock().unwrap().insert(id, profile_stub.clone()); 770 | Ok(profile_stub) 771 | } 772 | } 773 | 774 | /// User login sessions 775 | #[derive(FromRow)] 776 | pub struct LoginSession { 777 | /// Id of the login session 778 | pub id: i32, 779 | /// Auth token 780 | pub session_id: String, 781 | /// UserId of the session 782 | pub user_id: i32, 783 | /// When the session began 784 | pub session_start: NaiveDateTime, 785 | /// The IP address of the connecting client 786 | pub ip_addr: IpNetwork, 787 | } 788 | 789 | #[derive(Debug, Serialize, Error, ErrorCode)] 790 | pub enum LoginFailure { 791 | #[error("Username or password is incorrect")] 792 | UserOrPasswordIncorrect, 793 | #[error("Internal database error: {0}")] 794 | InternalDbError( 795 | #[from] 796 | #[serde(skip)] 797 | sqlx::Error, 798 | ), 799 | #[error("Internal encryption error")] 800 | InternalEncryptionError, 801 | } 802 | 803 | impl From for LoginFailure { 804 | fn from(_: aes_gcm::Error) -> Self { 805 | LoginFailure::InternalEncryptionError 806 | } 807 | } 808 | 809 | impl From for LoginFailure { 810 | fn from(_: FromUtf8Error) -> Self { 811 | LoginFailure::InternalEncryptionError 812 | } 813 | } 814 | 815 | lazy_static! { 816 | pub static ref SHARED_SECRET_CIPHER: Aes256Gcm = { 817 | let key = std::env::var("SHARED_SECRET_KEY").unwrap(); 818 | let key_bytes = base64::decode(&key).expect("SHARED_SECRET_KEY is not valid base64"); 819 | Aes256Gcm::new_from_slice(&key_bytes).expect("could not construct shared secret cipher") 820 | }; 821 | } 822 | 823 | pub const SHARED_SECRET_NONCE: &[u8; 12] = b"96bitsIs12u8"; 824 | 825 | impl LoginSession { 826 | /// Fetch the login session. 827 | pub async fn fetch(conn: &PgPool, session_id: &str) -> Result, sqlx::Error> { 828 | Ok( 829 | sqlx::query_as("SELECT * FROM login_sessions WHERE session_id = $1") 830 | .bind(session_id) 831 | .fetch_optional(conn) 832 | .await? 833 | .filter(|session: &Self| { 834 | // The session is automatically invalid if the session is longer than a month 835 | // old. 836 | session.session_start >= (Utc::now() - Duration::weeks(52)).naive_utc() 837 | }), 838 | ) 839 | } 840 | 841 | /// Attempt to login a user 842 | pub async fn login( 843 | conn: &PgPool, 844 | username: &str, 845 | password: &str, 846 | ip_addr: IpNetwork, 847 | ) -> Result { 848 | let username = username.trim(); 849 | let user: User = { 850 | sqlx::query_as("SELECT * FROM users WHERE name = $1") 851 | .bind(username.to_lowercase()) 852 | .fetch_optional(conn) 853 | .await? 854 | .ok_or(LoginFailure::UserOrPasswordIncorrect)? 855 | }; 856 | 857 | if !verify_password(&user.password, password) { 858 | return Err(LoginFailure::UserOrPasswordIncorrect); 859 | } 860 | 861 | // TODO: Add extra protections here? 862 | 863 | let mut key = [0u8; 16]; 864 | OsRng.fill_bytes(&mut key); 865 | 866 | // Found a user, create a new login sessions 867 | let session_start = Utc::now().naive_utc(); 868 | 869 | Ok(sqlx::query_as( 870 | r#" 871 | INSERT INTO login_sessions 872 | (user_id, session_id, session_start, ip_addr) 873 | VALUES 874 | ($1, $2, $3, $4) 875 | RETURNING 876 | * 877 | "#, 878 | ) 879 | .bind(user.id) 880 | .bind(i128::from_be_bytes(key).to_string()) 881 | .bind(session_start) 882 | .bind(ip_addr) 883 | .fetch_one(conn) 884 | .await?) 885 | } 886 | } 887 | 888 | #[derive(Deserialize)] 889 | pub struct LoginForm { 890 | username: String, 891 | password: String, 892 | } 893 | 894 | post!( 895 | "/login", 896 | #[json] 897 | async fn login( 898 | pool: Extension, 899 | jar: Cookies, 900 | ClientIp(ip): ClientIp, 901 | login: Form, 902 | ) -> Result<(), LoginFailure> { 903 | let key = Key::derive_from(PRIVATE_COOKIE_KEY.as_bytes()); 904 | let private = jar.private(&key); 905 | private.remove(Cookie::named(USER_SESSION_ID_COOKIE)); 906 | let LoginSession { session_id, .. } = LoginSession::login( 907 | &pool, 908 | login.username.trim(), 909 | login.password.trim(), 910 | IpNetwork::from(ip), 911 | ) 912 | .await?; 913 | let mut cookie = Cookie::new(USER_SESSION_ID_COOKIE, session_id.to_string()); 914 | cookie 915 | .set_expires(cookie_time::OffsetDateTime::now_utc() + cookie_time::Duration::weeks(52)); 916 | private.add(cookie); 917 | Ok(()) 918 | } 919 | ); 920 | 921 | #[derive(Debug, Serialize, Error, ErrorCode)] 922 | pub enum LogoutFailure { 923 | #[error("An unknown error occurred")] 924 | UnknownError, 925 | #[error("Internal database error: {0}")] 926 | InternalDbError( 927 | #[from] 928 | #[serde(skip)] 929 | sqlx::Error, 930 | ), 931 | } 932 | 933 | post! { 934 | "/logout", 935 | #[json] 936 | async fn logout( 937 | pool: Extension, 938 | cookies: Cookies, 939 | ) -> Result<(), LogoutFailure> { 940 | let private_key = Key::derive_from(PRIVATE_COOKIE_KEY.as_bytes()); 941 | let signed = cookies.private(&private_key); 942 | let session_id = signed 943 | .get(USER_SESSION_ID_COOKIE) 944 | .ok_or(LogoutFailure::UnknownError)?; 945 | 946 | sqlx::query("DELETE FROM login_sessions WHERE id = $1") 947 | .bind(session_id.value()) 948 | .execute(&*pool) 949 | .await?; 950 | 951 | Ok(()) 952 | } 953 | } 954 | 955 | #[derive(FromRow)] 956 | pub struct ReadingHistory { 957 | pub id: i32, 958 | pub reader_id: i32, 959 | pub thread_id: i32, 960 | pub last_read: i32, 961 | } 962 | --------------------------------------------------------------------------------