├── .gitignore ├── mdserve-theme-picker.png ├── mdserve-terminal-output.png ├── mdserve-catppuccin-macchiato.png ├── src ├── lib.rs ├── main.rs └── app.rs ├── LICENSE ├── flake.nix ├── Cargo.toml ├── CHANGELOG.md ├── cliff.toml ├── .github └── workflows │ └── ci.yml ├── docs └── architecture.md ├── README.md ├── flake.lock ├── install.sh ├── templates └── main.html ├── tests └── integration_test.rs └── Cargo.lock /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /mdserve-theme-picker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jfernandez/mdserve/HEAD/mdserve-theme-picker.png -------------------------------------------------------------------------------- /mdserve-terminal-output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jfernandez/mdserve/HEAD/mdserve-terminal-output.png -------------------------------------------------------------------------------- /mdserve-catppuccin-macchiato.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jfernandez/mdserve/HEAD/mdserve-catppuccin-macchiato.png -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // Minimal lib.rs to support integration tests 2 | pub mod app; 3 | pub use app::{new_router, scan_markdown_files, serve_markdown, ServerMessage}; 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Jose Fernandez 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | inputs = { 3 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; 4 | utils.url = "github:numtide/flake-utils"; 5 | fenix.url = "github:nix-community/fenix"; 6 | naersk.url = "github:nix-community/naersk/master"; 7 | }; 8 | 9 | outputs = 10 | { 11 | self, 12 | nixpkgs, 13 | utils, 14 | fenix, 15 | naersk 16 | }: 17 | utils.lib.eachDefaultSystem ( 18 | system: 19 | let 20 | pkgs = import nixpkgs { inherit system; }; 21 | rustToolchain = 22 | with fenix.packages.${system}; 23 | combine [ 24 | (stable.withComponents [ 25 | "rustc" 26 | "cargo" 27 | "rustfmt" 28 | "clippy" 29 | "rust-src" 30 | "rust-analyzer" 31 | ]) 32 | ]; 33 | naersk-lib = pkgs.callPackage naersk { }; 34 | in 35 | { 36 | defaultPackage = naersk-lib.buildPackage ./.; 37 | devShell = 38 | with pkgs; 39 | mkShell { 40 | buildInputs = [ 41 | rustToolchain 42 | ]; 43 | }; 44 | } 45 | ); 46 | } 47 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "mdserve" 3 | version = "0.5.1" 4 | authors = ["Jose Fernandez "] 5 | edition = "2021" 6 | rust-version = "1.82.0" 7 | license = "MIT" 8 | description = "Fast markdown preview server with live reload and theme support" 9 | repository = "https://github.com/jfernandez/mdserve" 10 | homepage = "https://github.com/jfernandez/mdserve" 11 | documentation = "https://github.com/jfernandez/mdserve#readme" 12 | 13 | [dependencies] 14 | axum = { version = "0.7.9", features = ["ws"] } 15 | tokio = { version = "1.0", features = ["rt-multi-thread", "macros", "net", "fs", "time"] } 16 | markdown = "1.0" 17 | clap = { version = "4.5.45", features = ["derive"] } 18 | tower = "0.5.2" 19 | tower-http = { version = "0.6.6", features = ["fs", "cors"] } 20 | notify = "8.2.0" 21 | futures-util = "0.3" 22 | serde = { version = "1.0", features = ["derive"] } 23 | serde_json = "1.0" 24 | anyhow = "1.0" 25 | minijinja = "2.12.0" 26 | minijinja-embed = { version = "2.12.0", default-features = false } 27 | 28 | [build-dependencies] 29 | minijinja-embed = { version = "2.12.0", default-features = false } 30 | 31 | [profile.release] 32 | strip = true 33 | lto = true 34 | codegen-units = 1 35 | panic = "abort" 36 | 37 | [dev-dependencies] 38 | axum-test = { version = "16.0", features = ["ws"] } 39 | tempfile = "3.0" 40 | tokio-test = "0.4" 41 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use clap::Parser; 3 | use std::path::PathBuf; 4 | 5 | use mdserve::{scan_markdown_files, serve_markdown}; 6 | 7 | #[derive(Parser)] 8 | #[command(name = "mdserve")] 9 | #[command(about = "A simple HTTP server for markdown preview")] 10 | #[command(version)] 11 | struct Args { 12 | /// Path to markdown file or directory to serve 13 | path: PathBuf, 14 | 15 | /// Hostname (domain or IP address) to listen on 16 | #[arg(short = 'H', long, default_value = "127.0.0.1")] 17 | hostname: String, 18 | 19 | /// Port to serve on 20 | #[arg(short, long, default_value = "3000")] 21 | port: u16, 22 | } 23 | 24 | #[tokio::main] 25 | async fn main() -> Result<()> { 26 | let args = Args::parse(); 27 | let absolute_path = args.path.canonicalize().unwrap_or(args.path); 28 | 29 | let (base_dir, tracked_files, is_directory_mode) = if absolute_path.is_file() { 30 | // Single-file mode: derive parent directory 31 | let base_dir = absolute_path 32 | .parent() 33 | .unwrap_or_else(|| std::path::Path::new(".")) 34 | .to_path_buf(); 35 | let tracked_files = vec![absolute_path]; 36 | (base_dir, tracked_files, false) 37 | } else if absolute_path.is_dir() { 38 | // Directory mode: scan directory for markdown files 39 | let tracked_files = scan_markdown_files(&absolute_path)?; 40 | if tracked_files.is_empty() { 41 | anyhow::bail!("No markdown files found in directory"); 42 | } 43 | (absolute_path, tracked_files, true) 44 | } else { 45 | anyhow::bail!("Path must be a file or directory"); 46 | }; 47 | 48 | // Single unified serve function 49 | serve_markdown( 50 | base_dir, 51 | tracked_files, 52 | is_directory_mode, 53 | args.hostname, 54 | args.port, 55 | ) 56 | .await?; 57 | 58 | Ok(()) 59 | } 60 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## [0.5.1] - 2025-10-28 2 | ### Bug Fixes 3 | - Handle temp-file-rename edits in file watcher 4 | 5 | ## [0.5.0] - 2025-10-23 6 | ### Features 7 | - Add directory mode for serving multiple markdown files 8 | - Add YAML and TOML frontmatter support 9 | ### Bug Fixes 10 | - Center content in folder mode with sidebar collapsed 11 | - Prevent 404 race during neovim saves 12 | ### Refactoring 13 | - Simplify server startup output messages 14 | - Migrate to minijinja template engine 15 | ### Documentation 16 | - Update Cargo install instructions 17 | - Update Arch linux install instructions to use official package 18 | - Update README with new folder serving feature 19 | - Add changelog and improve git-cliff config 20 | - Fix changelog duplicate 0.4.1 entry 21 | ### Build 22 | - Downgrade to edition 2021 and set MSRV to 1.82.0 23 | - Add git-cliff configuration 24 | ### CI 25 | - Run on `aarch64-linux` as well 26 | ### Miscellaneous Tasks 27 | - Add package metadata for cargo publish 28 | - Remove macOS support, direct users to Homebrew 29 | 30 | ## [0.4.1] - 2025-10-04 31 | ### Bug Fixes 32 | - Change default hostname to 127.0.0.1 to prevent port conflicts 33 | ### Documentation 34 | - Update homebrew install instructions 35 | - Release 0.4.1 36 | 37 | ## [0.4.0] - 2025-10-03 38 | ### Features 39 | - Add ETag support for mermaid.min.js 40 | ### Refactoring 41 | - Asref avoid clone 42 | - Impl AsRef 43 | ### Documentation 44 | - Add Arch Linux install instructions 45 | ### Build 46 | - Optimize and reduce size of release binary (#8) 47 | - Add nix flake packaging 48 | - Update min Rust version to 1.85+ (2024) 49 | - Bundle mermaid.min.js (#10) 50 | - Remove cargo install instructions, add warning about naming conflict 51 | - Add `-H|--hostname` to support listening on non-localhost 52 | - Release 0.4.0 53 | 54 | ## [0.3.0] - 2025-09-27 55 | - Prevent theme flash on page load 56 | - Replace WebSocket content updates with reload signals (#4) 57 | - Add mermaid diagram support (#5) 58 | - Release 0.3.0 59 | 60 | ## [0.2.0] - 2025-09-24 61 | - Add install script and update README 62 | - Add macOS install instructions 63 | - Add image support 64 | - Add screenshot of mdserve serving README.md 65 | - Enable HTML tag rendering in markdown files (#2) 66 | - Release 0.2.0 67 | 68 | ## [0.1.0] - 2025-09-22 69 | - Release 0.1.0 70 | 71 | -------------------------------------------------------------------------------- /cliff.toml: -------------------------------------------------------------------------------- 1 | # git-cliff configuration file 2 | 3 | [changelog] 4 | # template for the changelog body 5 | # https://keats.github.io/tera/docs/#introduction 6 | body = """ 7 | {% if version %}\ 8 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 9 | {% else %}\ 10 | ## [unreleased] 11 | {% endif %}\ 12 | {% set_global group_order = ["Features", "Bug Fixes", "Performance", "Refactoring", "Documentation", "Styling", "Testing", "Build", "CI", "Miscellaneous Tasks", "Security"] %}\ 13 | {% for group_name in group_order %}\ 14 | {% set commits_in_group = commits | filter(attribute="group", value=group_name) %}\ 15 | {% if commits_in_group | length > 0 %}\ 16 | ### {{ group_name }} 17 | {% for commit in commits_in_group %}\ 18 | - {{ commit.message | split(pat="\n") | first | trim | upper_first }} 19 | {% endfor %}\ 20 | {% endif %}\ 21 | {% endfor %}\ 22 | {% set other_commits = commits | filter(attribute="group", value="Other") %}\ 23 | {% if other_commits | length > 0 %}\ 24 | {% for commit in other_commits %}\ 25 | - {{ commit.message | split(pat="\n") | first | trim | upper_first }} 26 | {% endfor %}\ 27 | {% endif %}\n 28 | """ 29 | # remove leading and trailing whitespace from the template 30 | trim = true 31 | 32 | [git] 33 | # parse the commits based on https://www.conventionalcommits.org 34 | conventional_commits = true 35 | # filter out the commits that are not conventional 36 | filter_unconventional = false 37 | # process each line of a commit as an individual commit 38 | split_commits = false 39 | # regex for parsing and grouping commits 40 | commit_parsers = [ 41 | { message = "^Merge pull request", skip = true }, 42 | { message = "^Merge branch", skip = true }, 43 | { message = "^feat", group = "Features" }, 44 | { message = "^fix", group = "Bug Fixes" }, 45 | { message = "^doc", group = "Documentation" }, 46 | { message = "^perf", group = "Performance" }, 47 | { message = "^refactor", group = "Refactoring" }, 48 | { message = "^style", group = "Styling" }, 49 | { message = "^test", group = "Testing" }, 50 | { message = "^build", group = "Build" }, 51 | { message = "^ci", group = "CI" }, 52 | { message = "^chore\\(release\\): prepare for", skip = true }, 53 | { message = "^chore", group = "Miscellaneous Tasks" }, 54 | { body = ".*security", group = "Security" }, 55 | { message = ".*", group = "Other" }, 56 | ] 57 | # protect breaking changes from being skipped due to matching a skipping commit_parser 58 | protect_breaking_commits = false 59 | # filter out the commits that are not matched by commit parsers 60 | filter_commits = false 61 | # sort the commits inside sections by oldest/newest order 62 | sort_commits = "oldest" 63 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | tags: [ "v*" ] 7 | pull_request: 8 | branches: [ "main" ] 9 | workflow_dispatch: 10 | inputs: 11 | publish-crate: 12 | description: 'Publish to crates.io' 13 | required: false 14 | type: boolean 15 | default: false 16 | 17 | permissions: 18 | contents: write 19 | 20 | env: 21 | CARGO_TERM_COLOR: always 22 | 23 | jobs: 24 | test-and-build: 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | include: 29 | - target: x86_64-unknown-linux-gnu 30 | os: ubuntu-latest 31 | - target: x86_64-unknown-linux-musl 32 | os: ubuntu-latest 33 | - target: aarch64-unknown-linux-gnu 34 | os: ubuntu-24.04-arm 35 | - target: aarch64-unknown-linux-musl 36 | os: ubuntu-24.04-arm 37 | - target: x86_64-apple-darwin 38 | os: macos-15-intel 39 | - target: aarch64-apple-darwin 40 | os: macos-latest 41 | 42 | runs-on: ${{ matrix.os }} 43 | 44 | steps: 45 | - uses: actions/checkout@v5 46 | 47 | - name: Report Rust version 48 | run: rustup show 49 | 50 | - name: Install target 51 | run: rustup target add ${{ matrix.target }} 52 | 53 | - name: Cache dependencies 54 | uses: actions/cache@v4 55 | with: 56 | path: | 57 | ~/.cargo/registry 58 | ~/.cargo/git 59 | target 60 | key: ${{ runner.os }}-${{ matrix.target }}-cargo-${{ hashFiles('**/Cargo.lock') }} 61 | 62 | - name: Check formatting 63 | run: cargo fmt --check 64 | 65 | - name: Run clippy 66 | run: cargo clippy --all-targets --all-features --target ${{ matrix.target }} -- -D warnings 67 | 68 | - name: Run tests 69 | run: cargo test --target ${{ matrix.target }} 70 | 71 | - name: Build release 72 | run: cargo build --release --target ${{ matrix.target }} 73 | 74 | - name: Prepare artifact 75 | run: | 76 | mkdir -p dist 77 | cp target/${{ matrix.target }}/release/mdserve dist/mdserve-${{ matrix.target }} 78 | 79 | - name: Upload artifacts 80 | uses: actions/upload-artifact@v4 81 | with: 82 | name: mdserve-${{ matrix.target }} 83 | path: dist/mdserve-${{ matrix.target }} 84 | 85 | publish-crate: 86 | needs: test-and-build 87 | runs-on: ubuntu-latest 88 | if: startsWith(github.ref, 'refs/tags/v') || (github.event_name == 'workflow_dispatch' && inputs.publish-crate) 89 | 90 | steps: 91 | - uses: actions/checkout@v5 92 | 93 | - name: Report Rust version 94 | run: rustup show 95 | 96 | - name: Cache dependencies 97 | uses: actions/cache@v4 98 | with: 99 | path: | 100 | ~/.cargo/registry 101 | ~/.cargo/git 102 | target 103 | key: ${{ runner.os }}-x86_64-unknown-linux-gnu-cargo-${{ hashFiles('**/Cargo.lock') }} 104 | 105 | - name: Publish to crates.io 106 | env: 107 | CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} 108 | run: cargo publish 109 | 110 | release: 111 | needs: test-and-build 112 | runs-on: ubuntu-latest 113 | if: startsWith(github.ref, 'refs/tags/v') 114 | 115 | steps: 116 | - name: Download all artifacts 117 | uses: actions/download-artifact@v4 118 | with: 119 | pattern: mdserve-* 120 | path: artifacts 121 | merge-multiple: true 122 | 123 | - name: Create Release and Upload Artifacts 124 | uses: ncipollo/release-action@v1 125 | with: 126 | artifacts: artifacts/mdserve-* 127 | draft: true 128 | allowUpdates: true 129 | updateOnlyUnreleased: true 130 | token: ${{ secrets.GITHUB_TOKEN }} 131 | -------------------------------------------------------------------------------- /docs/architecture.md: -------------------------------------------------------------------------------- 1 | # mdserve Architecture 2 | 3 | ## Overview 4 | 5 | mdserve is a simple HTTP server for markdown preview with live reload. It supports both single-file and directory modes with a unified codebase. 6 | 7 | **Core principle**: Always work with a base directory and a list of tracked files (1 or more). 8 | 9 | ```mermaid 10 | graph LR 11 | A[File System] -->|notify events| B[File Watcher] 12 | B -->|update state| C[MarkdownState] 13 | B -->|broadcast| D[WebSocket] 14 | E[HTTP Request] -->|lookup| C 15 | C -->|render| F[Template] 16 | F -->|HTML| G[Browser] 17 | D -->|reload signal| G 18 | ``` 19 | 20 | ## Modes 21 | 22 | ### Single-File Mode 23 | ```bash 24 | mdserve README.md 25 | ``` 26 | - Watches parent directory 27 | - Tracks single file 28 | - No navigation sidebar 29 | 30 | ### Directory Mode 31 | ```bash 32 | mdserve ./docs/ 33 | ``` 34 | - Watches specified directory 35 | - Tracks all `.md` and `.markdown` files 36 | - Shows navigation sidebar 37 | 38 | ## Architecture 39 | 40 | ### State Management 41 | 42 | Central state stores: 43 | - Base directory path 44 | - HashMap of tracked files (filename → metadata + pre-rendered HTML) 45 | - Directory mode flag (determines UI) 46 | - WebSocket broadcast channel 47 | 48 | ```mermaid 49 | classDiagram 50 | class MarkdownState { 51 | +PathBuf base_dir 52 | +HashMap~String,TrackedFile~ tracked_files 53 | +bool is_directory_mode 54 | +Sender~ServerMessage~ change_tx 55 | } 56 | 57 | class TrackedFile { 58 | +PathBuf path 59 | +SystemTime last_modified 60 | +String html 61 | } 62 | 63 | MarkdownState "1" --> "*" TrackedFile : contains 64 | ``` 65 | 66 | Mode is determined by user intent, not file count: 67 | - `mdserve /docs/` with 1 file shows sidebar 68 | - `mdserve single.md` never shows sidebar 69 | 70 | **Example states:** 71 | 72 | Single-file mode: 73 | ``` 74 | base_dir = /path/to/docs/ 75 | tracked_files = { 76 | "README.md": TrackedFile { ... } 77 | } 78 | is_directory_mode = false 79 | ``` 80 | 81 | Directory mode: 82 | ``` 83 | base_dir = /path/to/docs/ 84 | tracked_files = { 85 | "api.md": TrackedFile { ... }, 86 | "guide.md": TrackedFile { ... }, 87 | "README.md": TrackedFile { ... } 88 | } 89 | is_directory_mode = true 90 | ``` 91 | 92 | ### Live Reload 93 | 94 | Uses [notify](https://github.com/notify-rs/notify) crate to watch base directory (non-recursive): 95 | - Create/modify: Refresh file, add if new (directory mode only) 96 | - Delete: Remove from tracking 97 | - Rename: Remove old, add new 98 | - All changes trigger WebSocket reload broadcast 99 | 100 | File changes flow: 101 | 1. File system event detected by `notify` 102 | 2. Markdown re-rendered to HTML 103 | 3. State updated (refresh/add/remove tracked file) 104 | 4. `ServerMessage::Reload` broadcast via WebSocket channel 105 | 5. All connected clients receive reload message 106 | 6. Clients execute `window.location.reload()` 107 | 108 | ### Routing 109 | 110 | Single unified router handles both modes: 111 | - `GET /` → First file alphabetically 112 | - `GET /:filename.md` → Specific markdown file 113 | - `GET /:filename.` → Images from base directory 114 | - `GET /ws` → WebSocket connection 115 | - `GET /mermaid.min.js` → Bundled Mermaid library 116 | 117 | The `:filename` pattern rejects paths with `/`, preventing directory traversal. 118 | 119 | ### Rendering 120 | 121 | Uses [MiniJinja](https://github.com/mitsuhiko/minijinja) (Jinja2 template syntax) with templates embedded at compile time via [minijinja_embed](https://github.com/mitsuhiko/minijinja/tree/main/minijinja-embed). 122 | 123 | Conditional template rendering: 124 | - Directory mode: Includes navigation sidebar with active file highlighting 125 | - Single-file mode: Content only 126 | - Both use same pre-rendered HTML from state 127 | 128 | Template variables: 129 | - `content`: Pre-rendered markdown HTML 130 | - `mermaid_enabled`: Boolean flag, conditionally includes Mermaid.js when diagrams detected 131 | - `show_navigation`: Controls sidebar visibility 132 | - `files`: List of tracked files (directory mode) 133 | - `current_file`: Active file name (directory mode) 134 | 135 | ## Design Decisions 136 | 137 | **Unified architecture**: Single code path handles both single-file and directory modes. Mode determined by user intent, not file count. 138 | 139 | **Pre-rendered caching**: All tracked files rendered to HTML in memory on startup and file change. Serving always from memory, never from disk. 140 | 141 | **Non-recursive watching**: Only immediate directory, no subdirectories. Simplifies security and state management. 142 | 143 | **Server-side logic**: Most logic lives server-side (markdown rendering, file tracking, navigation, active file highlighting, live reload triggering). Client-side JavaScript minimal (theme management, reload execution). 144 | 145 | ## Constraints 146 | 147 | - Non-recursive (flat directories only) 148 | - Alphabetical file ordering only 149 | - All files pre-rendered in memory 150 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # mdserve 2 | 3 | Fast markdown preview server with **live reload** and **theme support**. 4 | 5 | Just run `mdserve file.md` and start writing. One statically-compiled executable that runs anywhere - no installation, no dependencies. 6 | 7 | ![Terminal output when starting mdserve](mdserve-terminal-output.png) 8 | 9 | ## Features 10 | 11 | - ⚡ **Instant Live Reload** - Real-time updates via WebSocket when markdown file changes 12 | - 📁 **Directory Mode** - Serve all markdown files in a directory with a navigation sidebar 13 | - 🎨 **Multiple Themes** - Built-in theme selector with 5 themes including Catppuccin variants 14 | - 📝 **GitHub Flavored Markdown** - Full GFM support including tables, strikethrough, code blocks, and task lists 15 | - 📊 **Mermaid Diagrams** - Automatic rendering of flowcharts, sequence diagrams, class diagrams, and more 16 | - 🚀 **Fast** - Built with Rust and Axum for excellent performance and low memory usage 17 | 18 | ## Installation 19 | 20 | ### macOS (Homebrew) 21 | 22 | ```bash 23 | brew install mdserve 24 | ``` 25 | 26 | ### Linux 27 | 28 | ```bash 29 | curl -sSfL https://raw.githubusercontent.com/jfernandez/mdserve/main/install.sh | bash 30 | ``` 31 | 32 | This will automatically detect your platform and install the latest binary to your system. 33 | 34 | ### Alternative Methods 35 | 36 | #### Using Cargo 37 | 38 | ```bash 39 | cargo install mdserve 40 | ``` 41 | 42 | #### Arch Linux 43 | 44 | ```bash 45 | sudo pacman -S mdserve 46 | ``` 47 | 48 | #### Nix Package Manager 49 | 50 | ``` bash 51 | nix profile install github:jfernandez/mdserve 52 | ``` 53 | 54 | #### From Source 55 | 56 | ```bash 57 | git clone https://github.com/jfernandez/mdserve.git 58 | cd mdserve 59 | cargo build --release 60 | cp target/release/mdserve 61 | ``` 62 | 63 | #### Manual Download 64 | 65 | Download the appropriate binary for your platform from the [latest release](https://github.com/jfernandez/mdserve/releases/latest). 66 | 67 | ## Usage 68 | 69 | ### Basic Usage 70 | 71 | ```bash 72 | # Serve a single markdown file on default port (3000) 73 | mdserve README.md 74 | 75 | # Serve all markdown files in a directory 76 | mdserve docs/ 77 | 78 | # Serve on custom port 79 | mdserve README.md --port 8080 80 | mdserve docs/ -p 8080 81 | 82 | # Serve on custom hostname and port 83 | mdserve README.md --hostname 0.0.0.0 --port 8080 84 | ``` 85 | 86 | ### Single-File vs Directory Mode 87 | 88 | **Single-File Mode**: When you pass a file path, mdserve serves that specific markdown file with a clean, focused view. 89 | 90 | **Directory Mode**: When you pass a directory path, mdserve automatically: 91 | - Scans and serves all `.md` and `.markdown` files in that directory 92 | - Displays a navigation sidebar for easy switching between files 93 | - Watches for new markdown files added to the directory 94 | - Only monitors the immediate directory (non-recursive) 95 | 96 | 97 | ## Endpoints 98 | 99 | Once running, the server provides (default: [http://localhost:3000](http://localhost:3000)): 100 | 101 | - **[`/`](http://localhost:3000/)** - Rendered HTML with live reload via WebSocket 102 | - **[`/ws`](http://localhost:3000/ws)** - WebSocket endpoint for real-time updates 103 | 104 | ## Theme System 105 | 106 | **Built-in Theme Selector** 107 | - Click the 🎨 button in the top-right corner to open theme selector 108 | - **5 Available Themes**: 109 | - **Light**: Clean, bright theme optimized for readability 110 | - **Dark**: GitHub-inspired dark theme with comfortable contrast 111 | - **Catppuccin Latte**: Warm light theme with soothing pastels 112 | - **Catppuccin Macchiato**: Cozy mid-tone theme with rich colors 113 | - **Catppuccin Mocha**: Deep dark theme with vibrant accents 114 | - **Persistent Preference**: Your theme choice is automatically saved in browser localStorage 115 | 116 | *Click the theme button (🎨) to access the built-in theme selector* 117 | 118 | ![Theme picker interface](mdserve-theme-picker.png) 119 | 120 | *mdserve running with the Catppuccin Macchiato theme - notice the warm, cozy colors and excellent readability* 121 | 122 | ![mdserve with Catppuccin Macchiato theme](mdserve-catppuccin-macchiato.png) 123 | 124 | ## Documentation 125 | 126 | For detailed information about mdserve's internal architecture, design decisions, and how it works under the hood, see [Architecture Documentation](docs/architecture.md). 127 | 128 | ## Development 129 | 130 | ### Prerequisites 131 | 132 | - Rust 1.85+ (2024 edition) 133 | 134 | ### Building 135 | 136 | ```bash 137 | cargo build --release 138 | ``` 139 | 140 | ### Running Tests 141 | 142 | ```bash 143 | # Run all tests 144 | cargo test 145 | 146 | # Run integration tests only 147 | cargo test --test integration_test 148 | ``` 149 | 150 | ## License 151 | 152 | This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. 153 | 154 | ## Acknowledgments 155 | 156 | - Built with [Axum](https://github.com/tokio-rs/axum) web framework 157 | - Markdown parsing by [markdown-rs](https://github.com/wooorm/markdown-rs) 158 | - [Catppuccin](https://catppuccin.com/) color themes 159 | - Inspired by various markdown preview tools 160 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "fenix": { 4 | "inputs": { 5 | "nixpkgs": "nixpkgs", 6 | "rust-analyzer-src": "rust-analyzer-src" 7 | }, 8 | "locked": { 9 | "lastModified": 1759128018, 10 | "narHash": "sha256-30KHoIXMgyNQULifR1yQ5Sp0vr4tWpGRJXPOTgEzx1A=", 11 | "owner": "nix-community", 12 | "repo": "fenix", 13 | "rev": "5c342209226275f704ab84d89efc80b2d3963517", 14 | "type": "github" 15 | }, 16 | "original": { 17 | "owner": "nix-community", 18 | "repo": "fenix", 19 | "type": "github" 20 | } 21 | }, 22 | "fenix_2": { 23 | "inputs": { 24 | "nixpkgs": [ 25 | "naersk", 26 | "nixpkgs" 27 | ], 28 | "rust-analyzer-src": "rust-analyzer-src_2" 29 | }, 30 | "locked": { 31 | "lastModified": 1752475459, 32 | "narHash": "sha256-z6QEu4ZFuHiqdOPbYss4/Q8B0BFhacR8ts6jO/F/aOU=", 33 | "owner": "nix-community", 34 | "repo": "fenix", 35 | "rev": "bf0d6f70f4c9a9cf8845f992105652173f4b617f", 36 | "type": "github" 37 | }, 38 | "original": { 39 | "owner": "nix-community", 40 | "repo": "fenix", 41 | "type": "github" 42 | } 43 | }, 44 | "naersk": { 45 | "inputs": { 46 | "fenix": "fenix_2", 47 | "nixpkgs": "nixpkgs_2" 48 | }, 49 | "locked": { 50 | "lastModified": 1752689277, 51 | "narHash": "sha256-uldUBFkZe/E7qbvxa3mH1ItrWZyT6w1dBKJQF/3ZSsc=", 52 | "owner": "nix-community", 53 | "repo": "naersk", 54 | "rev": "0e72363d0938b0208d6c646d10649164c43f4d64", 55 | "type": "github" 56 | }, 57 | "original": { 58 | "owner": "nix-community", 59 | "ref": "master", 60 | "repo": "naersk", 61 | "type": "github" 62 | } 63 | }, 64 | "nixpkgs": { 65 | "locked": { 66 | "lastModified": 1758690382, 67 | "narHash": "sha256-NY3kSorgqE5LMm1LqNwGne3ZLMF2/ILgLpFr1fS4X3o=", 68 | "owner": "nixos", 69 | "repo": "nixpkgs", 70 | "rev": "e643668fd71b949c53f8626614b21ff71a07379d", 71 | "type": "github" 72 | }, 73 | "original": { 74 | "owner": "nixos", 75 | "ref": "nixos-unstable", 76 | "repo": "nixpkgs", 77 | "type": "github" 78 | } 79 | }, 80 | "nixpkgs_2": { 81 | "locked": { 82 | "lastModified": 1752077645, 83 | "narHash": "sha256-HM791ZQtXV93xtCY+ZxG1REzhQenSQO020cu6rHtAPk=", 84 | "owner": "NixOS", 85 | "repo": "nixpkgs", 86 | "rev": "be9e214982e20b8310878ac2baa063a961c1bdf6", 87 | "type": "github" 88 | }, 89 | "original": { 90 | "owner": "NixOS", 91 | "ref": "nixpkgs-unstable", 92 | "repo": "nixpkgs", 93 | "type": "github" 94 | } 95 | }, 96 | "nixpkgs_3": { 97 | "locked": { 98 | "lastModified": 1759070547, 99 | "narHash": "sha256-JVZl8NaVRYb0+381nl7LvPE+A774/dRpif01FKLrYFQ=", 100 | "owner": "NixOS", 101 | "repo": "nixpkgs", 102 | "rev": "647e5c14cbd5067f44ac86b74f014962df460840", 103 | "type": "github" 104 | }, 105 | "original": { 106 | "owner": "NixOS", 107 | "ref": "nixpkgs-unstable", 108 | "repo": "nixpkgs", 109 | "type": "github" 110 | } 111 | }, 112 | "root": { 113 | "inputs": { 114 | "fenix": "fenix", 115 | "naersk": "naersk", 116 | "nixpkgs": "nixpkgs_3", 117 | "utils": "utils" 118 | } 119 | }, 120 | "rust-analyzer-src": { 121 | "flake": false, 122 | "locked": { 123 | "lastModified": 1759060464, 124 | "narHash": "sha256-37+iMpZOQ1m9SuOJTBlRK1R0IVPS7e95oQggK82UpLs=", 125 | "owner": "rust-lang", 126 | "repo": "rust-analyzer", 127 | "rev": "5c0b555a65cadc14a6a16865c3e065c9d30b0bef", 128 | "type": "github" 129 | }, 130 | "original": { 131 | "owner": "rust-lang", 132 | "ref": "nightly", 133 | "repo": "rust-analyzer", 134 | "type": "github" 135 | } 136 | }, 137 | "rust-analyzer-src_2": { 138 | "flake": false, 139 | "locked": { 140 | "lastModified": 1752428706, 141 | "narHash": "sha256-EJcdxw3aXfP8Ex1Nm3s0awyH9egQvB2Gu+QEnJn2Sfg=", 142 | "owner": "rust-lang", 143 | "repo": "rust-analyzer", 144 | "rev": "591e3b7624be97e4443ea7b5542c191311aa141d", 145 | "type": "github" 146 | }, 147 | "original": { 148 | "owner": "rust-lang", 149 | "ref": "nightly", 150 | "repo": "rust-analyzer", 151 | "type": "github" 152 | } 153 | }, 154 | "systems": { 155 | "locked": { 156 | "lastModified": 1681028828, 157 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 158 | "owner": "nix-systems", 159 | "repo": "default", 160 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 161 | "type": "github" 162 | }, 163 | "original": { 164 | "owner": "nix-systems", 165 | "repo": "default", 166 | "type": "github" 167 | } 168 | }, 169 | "utils": { 170 | "inputs": { 171 | "systems": "systems" 172 | }, 173 | "locked": { 174 | "lastModified": 1731533236, 175 | "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 176 | "owner": "numtide", 177 | "repo": "flake-utils", 178 | "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 179 | "type": "github" 180 | }, 181 | "original": { 182 | "owner": "numtide", 183 | "repo": "flake-utils", 184 | "type": "github" 185 | } 186 | } 187 | }, 188 | "root": "root", 189 | "version": 7 190 | } 191 | -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euo pipefail 3 | 4 | # mdserve installer script 5 | # Usage: curl -sSfL https://raw.githubusercontent.com/jfernandez/mdserve/main/install.sh | bash 6 | 7 | # Repository information 8 | REPO_OWNER="jfernandez" 9 | REPO_NAME="mdserve" 10 | BINARY_NAME="mdserve" 11 | 12 | # Color codes for output 13 | RED='\033[0;31m' 14 | GREEN='\033[0;32m' 15 | BLUE='\033[0;34m' 16 | YELLOW='\033[0;33m' 17 | NC='\033[0m' # No Color 18 | 19 | # Cleanup function 20 | cleanup() { 21 | if [ -n "${TEMP_FILE:-}" ] && [ -f "$TEMP_FILE" ]; then 22 | rm -f "$TEMP_FILE" 23 | fi 24 | } 25 | 26 | # Set trap for cleanup 27 | trap cleanup EXIT 28 | 29 | # Logging functions 30 | info() { 31 | echo -e "${BLUE}[INFO]${NC} $1" 32 | } 33 | 34 | success() { 35 | echo -e "${GREEN}[SUCCESS]${NC} $1" 36 | } 37 | 38 | warn() { 39 | echo -e "${YELLOW}[WARN]${NC} $1" 40 | } 41 | 42 | error() { 43 | echo -e "${RED}[ERROR]${NC} $1" >&2 44 | } 45 | 46 | fatal() { 47 | error "$1" 48 | exit 1 49 | } 50 | 51 | # Check if command exists 52 | has_command() { 53 | command -v "$1" >/dev/null 2>&1 54 | } 55 | 56 | # Download function that tries curl first, then wget 57 | download() { 58 | local url="$1" 59 | local output="$2" 60 | 61 | if has_command curl; then 62 | curl -sSfL "$url" -o "$output" 63 | elif has_command wget; then 64 | wget -q "$url" -O "$output" 65 | else 66 | fatal "Neither curl nor wget is available. Please install one of them." 67 | fi 68 | } 69 | 70 | # Get latest release tag from GitHub API 71 | get_latest_release() { 72 | local api_url="https://api.github.com/repos/${REPO_OWNER}/${REPO_NAME}/releases/latest" 73 | local response 74 | 75 | if has_command curl; then 76 | response=$(curl -sSf "$api_url") 77 | elif has_command wget; then 78 | response=$(wget -qO- "$api_url") 79 | else 80 | fatal "Neither curl nor wget is available. Please install one of them." 81 | fi 82 | 83 | # Extract tag_name from JSON response (simple grep/sed approach to avoid jq dependency) 84 | echo "$response" | grep '"tag_name":' | sed -E 's/.*"tag_name": *"([^"]+)".*/\1/' 85 | } 86 | 87 | # Detect platform and architecture 88 | detect_platform() { 89 | local os arch 90 | 91 | os=$(uname -s) 92 | arch=$(uname -m) 93 | 94 | # Normalize OS 95 | case "$os" in 96 | Linux*) os="linux" ;; 97 | Darwin*) fatal "macOS is not supported by this installer. Please install using Homebrew: brew install mdserve" ;; 98 | CYGWIN*|MINGW*|MSYS*) fatal "Windows is not currently supported" ;; 99 | *) fatal "Unsupported operating system: $os" ;; 100 | esac 101 | 102 | # Normalize architecture 103 | case "$arch" in 104 | x86_64|amd64) arch="x86_64" ;; 105 | aarch64|arm64) arch="aarch64" ;; 106 | *) fatal "Unsupported architecture: $arch" ;; 107 | esac 108 | 109 | # Map to binary names used in releases 110 | case "$os-$arch" in 111 | linux-x86_64) echo "x86_64-unknown-linux-musl" ;; 112 | *) fatal "No binary available for $os-$arch" ;; 113 | esac 114 | } 115 | 116 | # Find the best installation directory 117 | find_install_dir() { 118 | # Check for user override 119 | if [ -n "${MDSERVE_INSTALL_DIR:-}" ]; then 120 | echo "$MDSERVE_INSTALL_DIR" 121 | return 122 | fi 123 | 124 | # Try system-wide directory first (if we can write to it) 125 | if [ -w "/usr/local/bin" ] || [ "$EUID" = 0 ]; then 126 | echo "/usr/local/bin" 127 | return 128 | fi 129 | 130 | # Try user directories 131 | for dir in "$HOME/.local/bin" "$HOME/bin"; do 132 | if [ -d "$dir" ] && [ -w "$dir" ]; then 133 | echo "$dir" 134 | return 135 | fi 136 | done 137 | 138 | # Create ~/.local/bin if it doesn't exist (XDG standard) 139 | local local_bin="$HOME/.local/bin" 140 | if mkdir -p "$local_bin" 2>/dev/null; then 141 | echo "$local_bin" 142 | return 143 | fi 144 | 145 | # Final fallback 146 | local fallback_dir="$HOME/.mdserve/bin" 147 | mkdir -p "$fallback_dir" 148 | echo "$fallback_dir" 149 | } 150 | 151 | # Check if directory is in PATH 152 | is_in_path() { 153 | local dir="$1" 154 | case ":$PATH:" in 155 | *":$dir:"*) return 0 ;; 156 | *) return 1 ;; 157 | esac 158 | } 159 | 160 | # Main installation function 161 | install_mdserve() { 162 | info "Installing $BINARY_NAME..." 163 | 164 | # Detect platform 165 | info "Detecting platform..." 166 | local target 167 | target=$(detect_platform) 168 | info "Detected platform: $target" 169 | 170 | # Get latest release 171 | info "Fetching latest release information..." 172 | local version 173 | version=$(get_latest_release) 174 | if [ -z "$version" ]; then 175 | fatal "Failed to get latest release information" 176 | fi 177 | info "Latest release: $version" 178 | 179 | # Construct download URL 180 | local binary_name="${BINARY_NAME}-${target}" 181 | local download_url="https://github.com/${REPO_OWNER}/${REPO_NAME}/releases/download/${version}/${binary_name}" 182 | 183 | # Create temporary file 184 | TEMP_FILE=$(mktemp) 185 | 186 | # Download binary 187 | info "Downloading $binary_name..." 188 | if ! download "$download_url" "$TEMP_FILE"; then 189 | fatal "Failed to download binary from $download_url" 190 | fi 191 | 192 | # Find installation directory 193 | local install_dir 194 | install_dir=$(find_install_dir) 195 | info "Installing to: $install_dir" 196 | 197 | # Check if we need sudo for system directory 198 | local use_sudo="" 199 | if [ "$install_dir" = "/usr/local/bin" ] && [ "$EUID" != 0 ] && [ ! -w "$install_dir" ]; then 200 | if has_command sudo; then 201 | info "Administrator privileges required for system installation" 202 | use_sudo="sudo" 203 | else 204 | fatal "Cannot write to $install_dir and sudo is not available" 205 | fi 206 | fi 207 | 208 | # Install binary 209 | local install_path="$install_dir/$BINARY_NAME" 210 | if [ -n "$use_sudo" ]; then 211 | $use_sudo cp "$TEMP_FILE" "$install_path" 212 | $use_sudo chmod +x "$install_path" 213 | else 214 | cp "$TEMP_FILE" "$install_path" 215 | chmod +x "$install_path" 216 | fi 217 | 218 | # Verify installation 219 | if [ ! -x "$install_path" ]; then 220 | fatal "Installation failed: $install_path is not executable" 221 | fi 222 | 223 | # Test the binary 224 | if ! "$install_path" --version >/dev/null 2>&1; then 225 | warn "Binary installed but --version check failed. This might be normal if the binary doesn't support --version." 226 | fi 227 | 228 | success "$BINARY_NAME $version installed successfully to $install_path" 229 | 230 | # Check PATH 231 | if ! is_in_path "$install_dir"; then 232 | warn "⚠️ $install_dir is not in your PATH" 233 | info "Add it to your PATH by adding this line to your shell profile:" 234 | echo " export PATH=\"$install_dir:\$PATH\"" 235 | echo "" 236 | info "Or run the binary directly: $install_path" 237 | else 238 | info "✅ You can now run: $BINARY_NAME" 239 | fi 240 | } 241 | 242 | # Script entry point 243 | main() { 244 | # Check for help flag 245 | for arg in "$@"; do 246 | case "$arg" in 247 | -h|--help) 248 | echo "mdserve installer" 249 | echo "" 250 | echo "Usage: $0 [options]" 251 | echo "" 252 | echo "Environment variables:" 253 | echo " MDSERVE_INSTALL_DIR Override installation directory" 254 | echo "" 255 | echo "Examples:" 256 | echo " # Install to default location" 257 | echo " curl -sSfL https://raw.githubusercontent.com/$REPO_OWNER/$REPO_NAME/main/install.sh | bash" 258 | echo "" 259 | echo " # Install to custom directory" 260 | echo " MDSERVE_INSTALL_DIR=~/my-tools curl -sSfL ... | bash" 261 | exit 0 262 | ;; 263 | esac 264 | done 265 | 266 | install_mdserve 267 | } 268 | 269 | # Run main function with all arguments 270 | main "$@" -------------------------------------------------------------------------------- /templates/main.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Markdown Preview 7 | 8 | 9 | 34 | 35 | 453 | 454 | {% if mermaid_enabled %} 455 | 456 | {% endif %} 457 | 661 | 662 | 663 | {% if show_navigation %} 664 | 670 | 684 | {% endif %} 685 | 686 | 687 |
688 | {{ content }} 689 |
690 | 691 |
692 |
693 |

Choose Theme

694 |
695 |
696 |
697 |
Catppuccin Latte
698 |
699 |
700 |
701 |
702 |
703 |
Warm light theme
704 |
705 | 706 |
707 |
🥛
708 |
Catppuccin Macchiato
709 |
710 |
711 |
712 |
713 |
714 |
Medium contrast
715 |
716 | 717 |
718 |
🐱
719 |
Catppuccin Mocha
720 |
721 |
722 |
723 |
724 |
725 |
Dark and cozy
726 |
727 | 728 |
729 |
☀️
730 |
Light
731 |
732 |
733 |
734 |
735 |
736 |
Classic bright
737 |
738 | 739 |
740 |
🌙
741 |
Dark
742 |
743 |
744 |
745 |
746 |
747 |
Classic dark
748 |
749 |
750 |
751 |
752 | 753 | 754 | -------------------------------------------------------------------------------- /src/app.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use axum::{ 3 | extract::{ 4 | ws::{Message, WebSocket}, 5 | Path as AxumPath, State, WebSocketUpgrade, 6 | }, 7 | http::{header, HeaderMap, StatusCode}, 8 | response::{Html, IntoResponse}, 9 | routing::get, 10 | Router, 11 | }; 12 | use futures_util::{SinkExt, StreamExt}; 13 | use minijinja::{context, value::Value, Environment}; 14 | use notify::{Config, Event, RecommendedWatcher, RecursiveMode, Watcher}; 15 | use serde::{Deserialize, Serialize}; 16 | use std::{ 17 | fs, 18 | net::Ipv6Addr, 19 | path::{Path, PathBuf}, 20 | sync::{Arc, OnceLock}, 21 | time::SystemTime, 22 | }; 23 | use tokio::{ 24 | net::TcpListener, 25 | sync::{broadcast, mpsc, Mutex}, 26 | }; 27 | use tower_http::cors::CorsLayer; 28 | 29 | const TEMPLATE_NAME: &str = "main.html"; 30 | static TEMPLATE_ENV: OnceLock> = OnceLock::new(); 31 | const MERMAID_JS: &str = include_str!("../static/js/mermaid.min.js"); 32 | const MERMAID_ETAG: &str = concat!("\"", env!("CARGO_PKG_VERSION"), "\""); 33 | 34 | type SharedMarkdownState = Arc>; 35 | 36 | fn template_env() -> &'static Environment<'static> { 37 | TEMPLATE_ENV.get_or_init(|| { 38 | let mut env = Environment::new(); 39 | minijinja_embed::load_templates!(&mut env); 40 | env 41 | }) 42 | } 43 | 44 | #[derive(Serialize, Deserialize, Debug, Clone)] 45 | #[serde(tag = "type")] 46 | pub enum ClientMessage { 47 | Ping, 48 | RequestRefresh, 49 | } 50 | 51 | #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] 52 | #[serde(tag = "type")] 53 | pub enum ServerMessage { 54 | Reload, 55 | Pong, 56 | } 57 | 58 | use std::collections::HashMap; 59 | 60 | pub fn scan_markdown_files(dir: &Path) -> Result> { 61 | let mut md_files = Vec::new(); 62 | 63 | for entry in fs::read_dir(dir)? { 64 | let entry = entry?; 65 | let path = entry.path(); 66 | 67 | if path.is_file() && is_markdown_file(&path) { 68 | md_files.push(path); 69 | } 70 | } 71 | 72 | md_files.sort(); 73 | 74 | Ok(md_files) 75 | } 76 | 77 | fn is_markdown_file(path: &Path) -> bool { 78 | path.extension() 79 | .and_then(|ext| ext.to_str()) 80 | .map(|ext| ext.eq_ignore_ascii_case("md") || ext.eq_ignore_ascii_case("markdown")) 81 | .unwrap_or(false) 82 | } 83 | 84 | struct TrackedFile { 85 | path: PathBuf, 86 | last_modified: SystemTime, 87 | html: String, 88 | } 89 | 90 | struct MarkdownState { 91 | base_dir: PathBuf, 92 | tracked_files: HashMap, 93 | is_directory_mode: bool, 94 | change_tx: broadcast::Sender, 95 | } 96 | 97 | impl MarkdownState { 98 | fn new(base_dir: PathBuf, file_paths: Vec, is_directory_mode: bool) -> Result { 99 | let (change_tx, _) = broadcast::channel::(16); 100 | 101 | let mut tracked_files = HashMap::new(); 102 | for file_path in file_paths { 103 | let metadata = fs::metadata(&file_path)?; 104 | let last_modified = metadata.modified()?; 105 | let content = fs::read_to_string(&file_path)?; 106 | let html = Self::markdown_to_html(&content)?; 107 | 108 | let filename = file_path.file_name().unwrap().to_string_lossy().to_string(); 109 | 110 | tracked_files.insert( 111 | filename, 112 | TrackedFile { 113 | path: file_path, 114 | last_modified, 115 | html, 116 | }, 117 | ); 118 | } 119 | 120 | Ok(MarkdownState { 121 | base_dir, 122 | tracked_files, 123 | is_directory_mode, 124 | change_tx, 125 | }) 126 | } 127 | 128 | fn show_navigation(&self) -> bool { 129 | self.is_directory_mode 130 | } 131 | 132 | fn get_sorted_filenames(&self) -> Vec { 133 | let mut filenames: Vec<_> = self.tracked_files.keys().cloned().collect(); 134 | filenames.sort(); 135 | filenames 136 | } 137 | 138 | fn refresh_file(&mut self, filename: &str) -> Result<()> { 139 | if let Some(tracked) = self.tracked_files.get_mut(filename) { 140 | let metadata = fs::metadata(&tracked.path)?; 141 | let current_modified = metadata.modified()?; 142 | 143 | if current_modified > tracked.last_modified { 144 | let content = fs::read_to_string(&tracked.path)?; 145 | tracked.html = Self::markdown_to_html(&content)?; 146 | tracked.last_modified = current_modified; 147 | } 148 | } 149 | 150 | Ok(()) 151 | } 152 | 153 | fn add_tracked_file(&mut self, file_path: PathBuf) -> Result<()> { 154 | let filename = file_path.file_name().unwrap().to_string_lossy().to_string(); 155 | 156 | if self.tracked_files.contains_key(&filename) { 157 | return Ok(()); 158 | } 159 | 160 | let metadata = fs::metadata(&file_path)?; 161 | let content = fs::read_to_string(&file_path)?; 162 | 163 | self.tracked_files.insert( 164 | filename, 165 | TrackedFile { 166 | path: file_path, 167 | last_modified: metadata.modified()?, 168 | html: Self::markdown_to_html(&content)?, 169 | }, 170 | ); 171 | 172 | Ok(()) 173 | } 174 | 175 | fn markdown_to_html(content: &str) -> Result { 176 | let mut options = markdown::Options::gfm(); 177 | options.compile.allow_dangerous_html = true; 178 | options.parse.constructs.frontmatter = true; 179 | 180 | let html_body = markdown::to_html_with_options(content, &options) 181 | .unwrap_or_else(|_| "Error parsing markdown".to_string()); 182 | 183 | Ok(html_body) 184 | } 185 | } 186 | 187 | /// Handles a markdown file that may have been created or modified. 188 | /// Refreshes tracked files or adds new files in directory mode, sending reload notifications. 189 | async fn handle_markdown_file_change(path: &Path, state: &SharedMarkdownState) { 190 | if !is_markdown_file(path) { 191 | return; 192 | } 193 | 194 | let filename = path.file_name().and_then(|n| n.to_str()).map(String::from); 195 | let Some(filename) = filename else { 196 | return; 197 | }; 198 | 199 | let mut state_guard = state.lock().await; 200 | 201 | // If file is already tracked, refresh its content 202 | if state_guard.tracked_files.contains_key(&filename) { 203 | if state_guard.refresh_file(&filename).is_ok() { 204 | let _ = state_guard.change_tx.send(ServerMessage::Reload); 205 | } 206 | } else if state_guard.is_directory_mode { 207 | // New file in directory mode - add and reload 208 | if state_guard.add_tracked_file(path.to_path_buf()).is_ok() { 209 | let _ = state_guard.change_tx.send(ServerMessage::Reload); 210 | } 211 | } 212 | } 213 | 214 | async fn handle_file_event(event: Event, state: &SharedMarkdownState) { 215 | match event.kind { 216 | notify::EventKind::Modify(notify::event::ModifyKind::Name(rename_mode)) => { 217 | use notify::event::RenameMode; 218 | match rename_mode { 219 | RenameMode::Both => { 220 | // Linux/Windows: Both old and new paths provided in single event 221 | if event.paths.len() == 2 { 222 | let new_path = &event.paths[1]; 223 | handle_markdown_file_change(new_path, state).await; 224 | } 225 | } 226 | RenameMode::From => { 227 | // File being renamed away - ignore 228 | } 229 | RenameMode::To => { 230 | // File renamed to this location 231 | if let Some(path) = event.paths.first() { 232 | handle_markdown_file_change(path, state).await; 233 | } 234 | } 235 | RenameMode::Any => { 236 | // macOS: Sends separate events for old and new paths 237 | // Use file existence to distinguish old (doesn't exist) from new (exists) 238 | if let Some(path) = event.paths.first() { 239 | if path.exists() { 240 | handle_markdown_file_change(path, state).await; 241 | } 242 | } 243 | } 244 | _ => {} 245 | } 246 | } 247 | _ => { 248 | for path in &event.paths { 249 | if is_markdown_file(path) { 250 | match event.kind { 251 | notify::EventKind::Create(_) 252 | | notify::EventKind::Modify(notify::event::ModifyKind::Data(_)) => { 253 | handle_markdown_file_change(path, state).await; 254 | } 255 | notify::EventKind::Remove(_) => { 256 | // Don't remove files from tracking. Editors like neovim save by 257 | // renaming the file to a backup, then creating a new one. If we 258 | // removed the file here, HTTP requests during that window would 259 | // see empty tracked_files and return 404. 260 | } 261 | _ => {} 262 | } 263 | } else if path.is_file() && is_image_file(path.to_str().unwrap_or("")) { 264 | match event.kind { 265 | notify::EventKind::Modify(_) 266 | | notify::EventKind::Create(_) 267 | | notify::EventKind::Remove(_) => { 268 | let state_guard = state.lock().await; 269 | let _ = state_guard.change_tx.send(ServerMessage::Reload); 270 | } 271 | _ => {} 272 | } 273 | } 274 | } 275 | } 276 | } 277 | } 278 | 279 | /// Creates a new Router for serving markdown files. 280 | /// 281 | /// # Errors 282 | /// 283 | /// Returns an error if: 284 | /// - Files cannot be read or don't exist 285 | /// - File metadata cannot be accessed 286 | /// - File watcher cannot be created 287 | /// - File watcher cannot watch the base directory 288 | pub fn new_router( 289 | base_dir: PathBuf, 290 | tracked_files: Vec, 291 | is_directory_mode: bool, 292 | ) -> Result { 293 | let base_dir = base_dir.canonicalize()?; 294 | 295 | let state = Arc::new(Mutex::new(MarkdownState::new( 296 | base_dir.clone(), 297 | tracked_files, 298 | is_directory_mode, 299 | )?)); 300 | 301 | let watcher_state = state.clone(); 302 | let (tx, mut rx) = mpsc::channel(100); 303 | 304 | let mut watcher = RecommendedWatcher::new( 305 | move |res: std::result::Result| { 306 | if let Ok(event) = res { 307 | let _ = tx.blocking_send(event); 308 | } 309 | }, 310 | Config::default(), 311 | )?; 312 | 313 | watcher.watch(&base_dir, RecursiveMode::NonRecursive)?; 314 | 315 | tokio::spawn(async move { 316 | let _watcher = watcher; 317 | while let Some(event) = rx.recv().await { 318 | handle_file_event(event, &watcher_state).await; 319 | } 320 | }); 321 | 322 | let router = Router::new() 323 | .route("/", get(serve_html_root)) 324 | .route("/ws", get(websocket_handler)) 325 | .route("/mermaid.min.js", get(serve_mermaid_js)) 326 | .route("/:filename", get(serve_file)) 327 | .layer(CorsLayer::permissive()) 328 | .with_state(state); 329 | 330 | Ok(router) 331 | } 332 | 333 | /// Serves markdown files with live reload support. 334 | /// 335 | /// # Errors 336 | /// 337 | /// Returns an error if: 338 | /// - Files cannot be read or don't exist 339 | /// - Cannot bind to the specified host address 340 | /// - Server fails to start 341 | /// - Axum serve encounters an error 342 | pub async fn serve_markdown( 343 | base_dir: PathBuf, 344 | tracked_files: Vec, 345 | is_directory_mode: bool, 346 | hostname: impl AsRef, 347 | port: u16, 348 | ) -> Result<()> { 349 | let hostname = hostname.as_ref(); 350 | 351 | let first_file = tracked_files.first().cloned(); 352 | let router = new_router(base_dir.clone(), tracked_files, is_directory_mode)?; 353 | 354 | let listener = TcpListener::bind((hostname, port)).await?; 355 | 356 | let listen_addr = format_host(hostname, port); 357 | 358 | if is_directory_mode { 359 | println!("📁 Serving markdown files from: {}", base_dir.display()); 360 | } else if let Some(file_path) = first_file { 361 | println!("📄 Serving markdown file: {}", file_path.display()); 362 | } 363 | 364 | println!("🌐 Server running at: http://{listen_addr}"); 365 | println!("⚡ Live reload enabled"); 366 | println!("\nPress Ctrl+C to stop the server"); 367 | 368 | axum::serve(listener, router).await?; 369 | 370 | Ok(()) 371 | } 372 | 373 | /// Format the host address (hostname + port) for printing. 374 | fn format_host(hostname: &str, port: u16) -> String { 375 | if hostname.parse::().is_ok() { 376 | format!("[{hostname}]:{port}") 377 | } else { 378 | format!("{hostname}:{port}") 379 | } 380 | } 381 | 382 | async fn serve_html_root(State(state): State) -> impl IntoResponse { 383 | let mut state = state.lock().await; 384 | 385 | let filename = match state.get_sorted_filenames().into_iter().next() { 386 | Some(name) => name, 387 | None => { 388 | return ( 389 | StatusCode::INTERNAL_SERVER_ERROR, 390 | Html("No files available to serve".to_string()), 391 | ); 392 | } 393 | }; 394 | 395 | let _ = state.refresh_file(&filename); 396 | 397 | render_markdown(&state, &filename).await 398 | } 399 | 400 | async fn serve_file( 401 | AxumPath(filename): AxumPath, 402 | State(state): State, 403 | ) -> axum::response::Response { 404 | if filename.ends_with(".md") || filename.ends_with(".markdown") { 405 | let mut state = state.lock().await; 406 | 407 | if !state.tracked_files.contains_key(&filename) { 408 | return (StatusCode::NOT_FOUND, Html("File not found".to_string())).into_response(); 409 | } 410 | 411 | let _ = state.refresh_file(&filename); 412 | 413 | let (status, html) = render_markdown(&state, &filename).await; 414 | (status, html).into_response() 415 | } else if is_image_file(&filename) { 416 | serve_static_file_inner(filename, state).await 417 | } else { 418 | (StatusCode::NOT_FOUND, Html("File not found".to_string())).into_response() 419 | } 420 | } 421 | 422 | async fn render_markdown(state: &MarkdownState, current_file: &str) -> (StatusCode, Html) { 423 | let env = template_env(); 424 | let template = match env.get_template(TEMPLATE_NAME) { 425 | Ok(t) => t, 426 | Err(e) => { 427 | return ( 428 | StatusCode::INTERNAL_SERVER_ERROR, 429 | Html(format!("Template error: {e}")), 430 | ); 431 | } 432 | }; 433 | 434 | let (content, has_mermaid) = if let Some(tracked) = state.tracked_files.get(current_file) { 435 | let html = &tracked.html; 436 | let mermaid = html.contains(r#"class="language-mermaid""#); 437 | (Value::from_safe_string(html.clone()), mermaid) 438 | } else { 439 | return (StatusCode::NOT_FOUND, Html("File not found".to_string())); 440 | }; 441 | 442 | let rendered = if state.show_navigation() { 443 | let filenames = state.get_sorted_filenames(); 444 | let files: Vec = filenames 445 | .iter() 446 | .map(|name| { 447 | Value::from_object({ 448 | let mut map = std::collections::HashMap::new(); 449 | map.insert("name".to_string(), Value::from(name.clone())); 450 | map 451 | }) 452 | }) 453 | .collect(); 454 | 455 | match template.render(context! { 456 | content => content, 457 | mermaid_enabled => has_mermaid, 458 | show_navigation => true, 459 | files => files, 460 | current_file => current_file, 461 | }) { 462 | Ok(r) => r, 463 | Err(e) => { 464 | return ( 465 | StatusCode::INTERNAL_SERVER_ERROR, 466 | Html(format!("Rendering error: {e}")), 467 | ); 468 | } 469 | } 470 | } else { 471 | match template.render(context! { 472 | content => content, 473 | mermaid_enabled => has_mermaid, 474 | show_navigation => false, 475 | }) { 476 | Ok(r) => r, 477 | Err(e) => { 478 | return ( 479 | StatusCode::INTERNAL_SERVER_ERROR, 480 | Html(format!("Rendering error: {e}")), 481 | ); 482 | } 483 | } 484 | }; 485 | 486 | (StatusCode::OK, Html(rendered)) 487 | } 488 | 489 | async fn serve_mermaid_js(headers: HeaderMap) -> impl IntoResponse { 490 | if is_etag_match(&headers) { 491 | return mermaid_response(StatusCode::NOT_MODIFIED, None); 492 | } 493 | 494 | mermaid_response(StatusCode::OK, Some(MERMAID_JS)) 495 | } 496 | 497 | fn is_etag_match(headers: &HeaderMap) -> bool { 498 | headers 499 | .get(header::IF_NONE_MATCH) 500 | .and_then(|v| v.to_str().ok()) 501 | .is_some_and(|etags| etags.split(',').any(|tag| tag.trim() == MERMAID_ETAG)) 502 | } 503 | 504 | fn mermaid_response(status: StatusCode, body: Option<&'static str>) -> impl IntoResponse { 505 | // Use no-cache to force revalidation on each request. This ensures clients 506 | // get updated content when mdserve is rebuilt with a new Mermaid version, 507 | // while still benefiting from 304 responses via ETag matching. 508 | let headers = [ 509 | (header::CONTENT_TYPE, "application/javascript"), 510 | (header::ETAG, MERMAID_ETAG), 511 | (header::CACHE_CONTROL, "public, no-cache"), 512 | ]; 513 | 514 | match body { 515 | Some(content) => (status, headers, content).into_response(), 516 | None => (status, headers).into_response(), 517 | } 518 | } 519 | 520 | async fn serve_static_file_inner( 521 | filename: String, 522 | state: SharedMarkdownState, 523 | ) -> axum::response::Response { 524 | let state = state.lock().await; 525 | 526 | let full_path = state.base_dir.join(&filename); 527 | 528 | match full_path.canonicalize() { 529 | Ok(canonical_path) => { 530 | if !canonical_path.starts_with(&state.base_dir) { 531 | return ( 532 | StatusCode::FORBIDDEN, 533 | [(header::CONTENT_TYPE, "text/plain")], 534 | "Access denied".to_string(), 535 | ) 536 | .into_response(); 537 | } 538 | 539 | match fs::read(&canonical_path) { 540 | Ok(contents) => { 541 | let content_type = guess_image_content_type(&filename); 542 | ( 543 | StatusCode::OK, 544 | [(header::CONTENT_TYPE, content_type.as_str())], 545 | contents, 546 | ) 547 | .into_response() 548 | } 549 | Err(_) => ( 550 | StatusCode::NOT_FOUND, 551 | [(header::CONTENT_TYPE, "text/plain")], 552 | "File not found".to_string(), 553 | ) 554 | .into_response(), 555 | } 556 | } 557 | Err(_) => ( 558 | StatusCode::NOT_FOUND, 559 | [(header::CONTENT_TYPE, "text/plain")], 560 | "File not found".to_string(), 561 | ) 562 | .into_response(), 563 | } 564 | } 565 | 566 | fn is_image_file(file_path: &str) -> bool { 567 | let extension = std::path::Path::new(file_path) 568 | .extension() 569 | .and_then(|ext| ext.to_str()) 570 | .unwrap_or(""); 571 | 572 | matches!( 573 | extension.to_lowercase().as_str(), 574 | "png" | "jpg" | "jpeg" | "gif" | "svg" | "webp" | "bmp" | "ico" 575 | ) 576 | } 577 | 578 | fn guess_image_content_type(file_path: &str) -> String { 579 | let extension = std::path::Path::new(file_path) 580 | .extension() 581 | .and_then(|ext| ext.to_str()) 582 | .unwrap_or(""); 583 | 584 | match extension.to_lowercase().as_str() { 585 | "png" => "image/png", 586 | "jpg" | "jpeg" => "image/jpeg", 587 | "gif" => "image/gif", 588 | "svg" => "image/svg+xml", 589 | "webp" => "image/webp", 590 | "bmp" => "image/bmp", 591 | "ico" => "image/x-icon", 592 | _ => "application/octet-stream", 593 | } 594 | .to_string() 595 | } 596 | 597 | async fn websocket_handler( 598 | ws: WebSocketUpgrade, 599 | State(state): State, 600 | ) -> impl IntoResponse { 601 | ws.on_upgrade(move |socket| handle_websocket(socket, state)) 602 | } 603 | 604 | async fn handle_websocket(socket: WebSocket, state: SharedMarkdownState) { 605 | let (mut sender, mut receiver) = socket.split(); 606 | 607 | let mut change_rx = { 608 | let state = state.lock().await; 609 | state.change_tx.subscribe() 610 | }; 611 | 612 | let recv_task = tokio::spawn(async move { 613 | while let Some(msg) = receiver.next().await { 614 | match msg { 615 | Ok(Message::Text(text)) => { 616 | if let Ok(client_msg) = serde_json::from_str::(&text) { 617 | match client_msg { 618 | ClientMessage::Ping | ClientMessage::RequestRefresh => {} 619 | } 620 | } 621 | } 622 | Ok(Message::Close(_)) => break, 623 | _ => {} 624 | } 625 | } 626 | }); 627 | 628 | let send_task = tokio::spawn(async move { 629 | while let Ok(reload_msg) = change_rx.recv().await { 630 | if let Ok(json) = serde_json::to_string(&reload_msg) { 631 | if sender.send(Message::Text(json)).await.is_err() { 632 | break; 633 | } 634 | } 635 | } 636 | }); 637 | 638 | tokio::select! { 639 | _ = recv_task => {}, 640 | _ = send_task => {}, 641 | } 642 | } 643 | 644 | #[cfg(test)] 645 | mod tests { 646 | use super::*; 647 | use std::fs; 648 | use tempfile::tempdir; 649 | 650 | #[test] 651 | fn test_is_markdown_file() { 652 | assert!(is_markdown_file(Path::new("test.md"))); 653 | assert!(is_markdown_file(Path::new("/path/to/file.md"))); 654 | 655 | assert!(is_markdown_file(Path::new("test.markdown"))); 656 | assert!(is_markdown_file(Path::new("/path/to/file.markdown"))); 657 | 658 | assert!(is_markdown_file(Path::new("test.MD"))); 659 | assert!(is_markdown_file(Path::new("test.Md"))); 660 | assert!(is_markdown_file(Path::new("test.MARKDOWN"))); 661 | assert!(is_markdown_file(Path::new("test.MarkDown"))); 662 | 663 | assert!(!is_markdown_file(Path::new("test.txt"))); 664 | assert!(!is_markdown_file(Path::new("test.rs"))); 665 | assert!(!is_markdown_file(Path::new("test.html"))); 666 | assert!(!is_markdown_file(Path::new("test"))); 667 | assert!(!is_markdown_file(Path::new("README"))); 668 | } 669 | 670 | #[test] 671 | fn test_is_image_file() { 672 | assert!(is_image_file("test.png")); 673 | assert!(is_image_file("test.jpg")); 674 | assert!(is_image_file("test.jpeg")); 675 | assert!(is_image_file("test.gif")); 676 | assert!(is_image_file("test.svg")); 677 | assert!(is_image_file("test.webp")); 678 | assert!(is_image_file("test.bmp")); 679 | assert!(is_image_file("test.ico")); 680 | 681 | assert!(is_image_file("test.PNG")); 682 | assert!(is_image_file("test.JPG")); 683 | assert!(is_image_file("test.JPEG")); 684 | 685 | assert!(is_image_file("/path/to/image.png")); 686 | assert!(is_image_file("./images/photo.jpg")); 687 | 688 | assert!(!is_image_file("test.txt")); 689 | assert!(!is_image_file("test.md")); 690 | assert!(!is_image_file("test.rs")); 691 | assert!(!is_image_file("test")); 692 | } 693 | 694 | #[test] 695 | fn test_guess_image_content_type() { 696 | assert_eq!(guess_image_content_type("test.png"), "image/png"); 697 | assert_eq!(guess_image_content_type("test.jpg"), "image/jpeg"); 698 | assert_eq!(guess_image_content_type("test.jpeg"), "image/jpeg"); 699 | assert_eq!(guess_image_content_type("test.gif"), "image/gif"); 700 | assert_eq!(guess_image_content_type("test.svg"), "image/svg+xml"); 701 | assert_eq!(guess_image_content_type("test.webp"), "image/webp"); 702 | assert_eq!(guess_image_content_type("test.bmp"), "image/bmp"); 703 | assert_eq!(guess_image_content_type("test.ico"), "image/x-icon"); 704 | 705 | assert_eq!(guess_image_content_type("test.PNG"), "image/png"); 706 | assert_eq!(guess_image_content_type("test.JPG"), "image/jpeg"); 707 | 708 | assert_eq!( 709 | guess_image_content_type("test.xyz"), 710 | "application/octet-stream" 711 | ); 712 | assert_eq!(guess_image_content_type("test"), "application/octet-stream"); 713 | } 714 | 715 | #[test] 716 | fn test_scan_markdown_files_empty_directory() { 717 | let temp_dir = tempdir().expect("Failed to create temp dir"); 718 | 719 | let result = scan_markdown_files(temp_dir.path()).expect("Failed to scan"); 720 | assert_eq!(result.len(), 0); 721 | } 722 | 723 | #[test] 724 | fn test_scan_markdown_files_with_markdown_files() { 725 | let temp_dir = tempdir().expect("Failed to create temp dir"); 726 | 727 | fs::write(temp_dir.path().join("test1.md"), "# Test 1").expect("Failed to write"); 728 | fs::write(temp_dir.path().join("test2.markdown"), "# Test 2").expect("Failed to write"); 729 | fs::write(temp_dir.path().join("test3.md"), "# Test 3").expect("Failed to write"); 730 | 731 | fs::write(temp_dir.path().join("test.txt"), "text").expect("Failed to write"); 732 | fs::write(temp_dir.path().join("README"), "readme").expect("Failed to write"); 733 | 734 | let result = scan_markdown_files(temp_dir.path()).expect("Failed to scan"); 735 | 736 | assert_eq!(result.len(), 3); 737 | 738 | let filenames: Vec<_> = result 739 | .iter() 740 | .map(|p| p.file_name().unwrap().to_str().unwrap()) 741 | .collect(); 742 | assert_eq!(filenames, vec!["test1.md", "test2.markdown", "test3.md"]); 743 | } 744 | 745 | #[test] 746 | fn test_scan_markdown_files_ignores_subdirectories() { 747 | let temp_dir = tempdir().expect("Failed to create temp dir"); 748 | 749 | fs::write(temp_dir.path().join("root.md"), "# Root").expect("Failed to write"); 750 | 751 | let sub_dir = temp_dir.path().join("subdir"); 752 | fs::create_dir(&sub_dir).expect("Failed to create subdir"); 753 | fs::write(sub_dir.join("nested.md"), "# Nested").expect("Failed to write"); 754 | 755 | let result = scan_markdown_files(temp_dir.path()).expect("Failed to scan"); 756 | 757 | assert_eq!(result.len(), 1); 758 | assert_eq!(result[0].file_name().unwrap().to_str().unwrap(), "root.md"); 759 | } 760 | 761 | #[test] 762 | fn test_scan_markdown_files_case_insensitive() { 763 | let temp_dir = tempdir().expect("Failed to create temp dir"); 764 | 765 | fs::write(temp_dir.path().join("test1.md"), "# Test 1").expect("Failed to write"); 766 | fs::write(temp_dir.path().join("test2.MD"), "# Test 2").expect("Failed to write"); 767 | fs::write(temp_dir.path().join("test3.Md"), "# Test 3").expect("Failed to write"); 768 | fs::write(temp_dir.path().join("test4.MARKDOWN"), "# Test 4").expect("Failed to write"); 769 | 770 | let result = scan_markdown_files(temp_dir.path()).expect("Failed to scan"); 771 | 772 | assert_eq!(result.len(), 4); 773 | } 774 | 775 | #[test] 776 | fn test_format_host() { 777 | assert_eq!(format_host("127.0.0.1", 3000), "127.0.0.1:3000"); 778 | assert_eq!(format_host("192.168.1.1", 8080), "192.168.1.1:8080"); 779 | 780 | assert_eq!(format_host("localhost", 3000), "localhost:3000"); 781 | assert_eq!(format_host("example.com", 80), "example.com:80"); 782 | 783 | assert_eq!(format_host("::1", 3000), "[::1]:3000"); 784 | assert_eq!(format_host("2001:db8::1", 8080), "[2001:db8::1]:8080"); 785 | } 786 | } 787 | -------------------------------------------------------------------------------- /tests/integration_test.rs: -------------------------------------------------------------------------------- 1 | use axum_test::TestServer; 2 | use mdserve::{new_router, scan_markdown_files, ServerMessage}; 3 | use std::fs; 4 | use std::time::Duration; 5 | use tempfile::{tempdir, Builder, NamedTempFile, TempDir}; 6 | 7 | const FILE_WATCH_DELAY_MS: u64 = 100; 8 | const WEBSOCKET_TIMEOUT_SECS: u64 = 5; 9 | 10 | const TEST_FILE_1_CONTENT: &str = "# Test 1\n\nContent of test1"; 11 | const TEST_FILE_2_CONTENT: &str = "# Test 2\n\nContent of test2"; 12 | const TEST_FILE_3_CONTENT: &str = "# Test 3\n\nContent of test3"; 13 | const YAML_FRONTMATTER_CONTENT: &str = "---\ntitle: Test Post\nauthor: Name\n---\n\n# Test Post\n"; 14 | const TOML_FRONTMATTER_CONTENT: &str = "+++\ntitle = \"Test Post\"\n+++\n\n# Test Post\n"; 15 | 16 | fn create_test_server_impl(content: &str, use_http: bool) -> (TestServer, NamedTempFile) { 17 | let temp_file = Builder::new() 18 | .suffix(".md") 19 | .tempfile() 20 | .expect("Failed to create temp file"); 21 | fs::write(&temp_file, content).expect("Failed to write temp file"); 22 | 23 | let canonical_path = temp_file 24 | .path() 25 | .canonicalize() 26 | .unwrap_or_else(|_| temp_file.path().to_path_buf()); 27 | 28 | let base_dir = canonical_path 29 | .parent() 30 | .unwrap_or_else(|| std::path::Path::new(".")) 31 | .to_path_buf(); 32 | let tracked_files = vec![canonical_path]; 33 | let is_directory_mode = false; 34 | 35 | let router = 36 | new_router(base_dir, tracked_files, is_directory_mode).expect("Failed to create router"); 37 | 38 | let server = if use_http { 39 | TestServer::builder() 40 | .http_transport() 41 | .build(router) 42 | .expect("Failed to create test server") 43 | } else { 44 | TestServer::new(router).expect("Failed to create test server") 45 | }; 46 | 47 | (server, temp_file) 48 | } 49 | 50 | async fn create_test_server(content: &str) -> (TestServer, NamedTempFile) { 51 | create_test_server_impl(content, false) 52 | } 53 | 54 | async fn create_test_server_with_http(content: &str) -> (TestServer, NamedTempFile) { 55 | create_test_server_impl(content, true) 56 | } 57 | 58 | fn create_directory_server_impl(use_http: bool) -> (TestServer, TempDir) { 59 | let temp_dir = tempdir().expect("Failed to create temp dir"); 60 | 61 | fs::write(temp_dir.path().join("test1.md"), TEST_FILE_1_CONTENT) 62 | .expect("Failed to write test1.md"); 63 | fs::write(temp_dir.path().join("test2.markdown"), TEST_FILE_2_CONTENT) 64 | .expect("Failed to write test2.markdown"); 65 | fs::write(temp_dir.path().join("test3.md"), TEST_FILE_3_CONTENT) 66 | .expect("Failed to write test3.md"); 67 | 68 | let base_dir = temp_dir.path().to_path_buf(); 69 | let tracked_files = scan_markdown_files(&base_dir).expect("Failed to scan markdown files"); 70 | let is_directory_mode = true; 71 | 72 | let router = 73 | new_router(base_dir, tracked_files, is_directory_mode).expect("Failed to create router"); 74 | 75 | let server = if use_http { 76 | TestServer::builder() 77 | .http_transport() 78 | .build(router) 79 | .expect("Failed to create test server") 80 | } else { 81 | TestServer::new(router).expect("Failed to create test server") 82 | }; 83 | 84 | (server, temp_dir) 85 | } 86 | 87 | async fn create_directory_server() -> (TestServer, TempDir) { 88 | create_directory_server_impl(false) 89 | } 90 | 91 | async fn create_directory_server_with_http() -> (TestServer, TempDir) { 92 | create_directory_server_impl(true) 93 | } 94 | 95 | #[tokio::test] 96 | async fn test_server_starts_and_serves_basic_markdown() { 97 | let (server, _temp_file) = create_test_server("# Hello World\n\nThis is **bold** text.").await; 98 | 99 | let response = server.get("/").await; 100 | 101 | assert_eq!(response.status_code(), 200); 102 | let body = response.text(); 103 | 104 | // Check that markdown was converted to HTML 105 | assert!(body.contains("

Hello World

")); 106 | assert!(body.contains("bold")); 107 | 108 | // Check that theme toggle is present 109 | assert!(body.contains("theme-toggle")); 110 | assert!(body.contains("openThemeModal")); 111 | 112 | // Check CSS variables for theming 113 | assert!(body.contains("--bg-color")); 114 | assert!(body.contains("data-theme=\"dark\"")); 115 | } 116 | 117 | #[tokio::test] 118 | async fn test_websocket_connection() { 119 | let (server, _temp_file) = create_test_server_with_http("# WebSocket Test").await; 120 | 121 | // Test that WebSocket endpoint exists and can be connected to 122 | let response = server.get_websocket("/ws").await; 123 | response.assert_status_switching_protocols(); 124 | } 125 | 126 | #[tokio::test] 127 | async fn test_file_modification_updates_via_websocket() { 128 | let (server, temp_file) = create_test_server_with_http("# Original Content").await; 129 | 130 | let mut websocket = server.get_websocket("/ws").await.into_websocket().await; 131 | 132 | // Modify the file 133 | fs::write(&temp_file, "# Modified Content").expect("Failed to modify file"); 134 | 135 | // Add a small delay to allow file watcher to detect change 136 | tokio::time::sleep(Duration::from_millis(FILE_WATCH_DELAY_MS)).await; 137 | 138 | // Should receive reload signal via WebSocket (with timeout) 139 | let update_result = tokio::time::timeout( 140 | Duration::from_secs(WEBSOCKET_TIMEOUT_SECS), 141 | websocket.receive_json::(), 142 | ) 143 | .await; 144 | 145 | match update_result { 146 | Ok(update_message) => { 147 | if let ServerMessage::Reload = update_message { 148 | // Success - we received a reload signal 149 | } else { 150 | panic!("Expected Reload message after file modification"); 151 | } 152 | } 153 | Err(_) => { 154 | panic!("Timeout waiting for WebSocket update after file modification"); 155 | } 156 | } 157 | } 158 | 159 | #[tokio::test] 160 | async fn test_server_handles_gfm_features() { 161 | let markdown_content = r#"# GFM Test 162 | 163 | ## Table 164 | | Name | Age | 165 | |------|-----| 166 | | John | 30 | 167 | | Jane | 25 | 168 | 169 | ## Strikethrough 170 | ~~deleted text~~ 171 | 172 | ## Code block 173 | ```rust 174 | fn main() { 175 | println!("Hello!"); 176 | } 177 | ``` 178 | "#; 179 | 180 | let (server, _temp_file) = create_test_server(markdown_content).await; 181 | 182 | let response = server.get("/").await; 183 | 184 | assert_eq!(response.status_code(), 200); 185 | let body = response.text(); 186 | 187 | // Check table rendering 188 | assert!(body.contains("")); 189 | assert!(body.contains("")); 190 | assert!(body.contains("")); 191 | 192 | // Check strikethrough 193 | assert!(body.contains("deleted text")); 194 | 195 | // Check code block 196 | assert!(body.contains("
"));
 197 |     assert!(body.contains("fn main()"));
 198 | }
 199 | 
 200 | #[tokio::test]
 201 | async fn test_404_for_unknown_routes() {
 202 |     let (server, _temp_file) = create_test_server("# 404 Test").await;
 203 | 
 204 |     let response = server.get("/unknown-route").await;
 205 | 
 206 |     assert_eq!(response.status_code(), 404);
 207 | }
 208 | 
 209 | #[tokio::test]
 210 | async fn test_image_serving() {
 211 |     use tempfile::tempdir;
 212 | 
 213 |     // Create a temporary directory
 214 |     let temp_dir = tempdir().expect("Failed to create temp dir");
 215 | 
 216 |     // Create a markdown file with image reference
 217 |     let md_content =
 218 |         "# Test with Image\n\n![Test Image](test.png)\n\nThis markdown references an image.";
 219 |     let md_path = temp_dir.path().join("test.md");
 220 |     fs::write(&md_path, md_content).expect("Failed to write markdown file");
 221 | 
 222 |     // Create a fake PNG image (1x1 pixel PNG)
 223 |     let png_data = vec![
 224 |         0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48, 0x44,
 225 |         0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x02, 0x00, 0x00, 0x00, 0x90,
 226 |         0x77, 0x53, 0xDE, 0x00, 0x00, 0x00, 0x0C, 0x49, 0x44, 0x41, 0x54, 0x08, 0xD7, 0x63, 0xF8,
 227 |         0x0F, 0x00, 0x00, 0x01, 0x00, 0x01, 0x5C, 0xDD, 0x8D, 0xB4, 0x00, 0x00, 0x00, 0x00, 0x49,
 228 |         0x45, 0x4E, 0x44, 0xAE, 0x42, 0x60, 0x82,
 229 |     ];
 230 |     let img_path = temp_dir.path().join("test.png");
 231 |     fs::write(&img_path, png_data).expect("Failed to write image file");
 232 | 
 233 |     // Create router with the markdown file (single-file mode)
 234 |     let base_dir = temp_dir.path().to_path_buf();
 235 |     let tracked_files = vec![md_path];
 236 |     let is_directory_mode = false;
 237 |     let router =
 238 |         new_router(base_dir, tracked_files, is_directory_mode).expect("Failed to create router");
 239 |     let server = TestServer::new(router).expect("Failed to create test server");
 240 | 
 241 |     // Test that markdown includes img tag
 242 |     let response = server.get("/").await;
 243 |     assert_eq!(response.status_code(), 200);
 244 |     let body = response.text();
 245 |     assert!(body.contains("\"Test
 290 |     

This should be rendered as HTML, not escaped

291 | Red text 292 | 293 | 294 | Regular **markdown** still works. 295 | "#; 296 | 297 | let (server, _temp_file) = create_test_server(markdown_content).await; 298 | 299 | let response = server.get("/").await; 300 | 301 | assert_eq!(response.status_code(), 200); 302 | let body = response.text(); 303 | 304 | // HTML tags should be rendered, not escaped 305 | assert!(body.contains(r#"
"#)); 306 | assert!(body.contains(r#""#)); 307 | assert!(body.contains("

This should be rendered as HTML, not escaped

")); 308 | 309 | // Should not contain escaped HTML 310 | assert!(!body.contains("<div")); 311 | assert!(!body.contains(">")); 312 | 313 | // Regular markdown should still work 314 | assert!(body.contains("markdown")); 315 | } 316 | 317 | #[tokio::test] 318 | async fn test_mermaid_diagram_detection_and_script_injection() { 319 | let markdown_content = r#"# Mermaid Test 320 | 321 | Regular content here. 322 | 323 | ```mermaid 324 | graph TD 325 | A[Start] --> B{Decision} 326 | B -->|Yes| C[End] 327 | B -->|No| D[Continue] 328 | ``` 329 | 330 | More regular content. 331 | 332 | ```javascript 333 | // This is a regular code block, not mermaid 334 | console.log("Hello World"); 335 | ``` 336 | "#; 337 | 338 | let (server, _temp_file) = create_test_server(markdown_content).await; 339 | 340 | let response = server.get("/").await; 341 | 342 | assert_eq!(response.status_code(), 200); 343 | let body = response.text(); 344 | 345 | // Should contain the mermaid code block with language-mermaid class 346 | assert!(body.contains(r#"class="language-mermaid""#)); 347 | assert!(body.contains("graph TD")); 348 | 349 | // Check for HTML-encoded or raw content (content might be HTML-encoded) 350 | let has_raw_content = body.contains("A[Start] --> B{Decision}"); 351 | let has_encoded_content = body.contains("A[Start] --> B{Decision}"); 352 | assert!( 353 | has_raw_content || has_encoded_content, 354 | "Expected mermaid content not found in body" 355 | ); 356 | 357 | // Should inject the local Mermaid script when mermaid blocks are detected 358 | assert!(body.contains(r#""#)); 359 | 360 | // Should contain the Mermaid initialization functions 361 | assert!(body.contains("function initMermaid()")); 362 | assert!(body.contains("function transformMermaidCodeBlocks()")); 363 | assert!(body.contains("function getMermaidTheme()")); 364 | 365 | // Should contain regular JavaScript code block without mermaid treatment 366 | assert!(body.contains(r#"class="language-javascript""#)); 367 | assert!(body.contains("console.log")); 368 | } 369 | 370 | #[tokio::test] 371 | async fn test_no_mermaid_script_injection_without_mermaid_blocks() { 372 | let markdown_content = r#"# No Mermaid Test 373 | 374 | This content has no mermaid diagrams. 375 | 376 | ```javascript 377 | console.log("Hello World"); 378 | ``` 379 | 380 | ```bash 381 | echo "Regular code block" 382 | ``` 383 | 384 | Just regular markdown content. 385 | "#; 386 | 387 | let (server, _temp_file) = create_test_server(markdown_content).await; 388 | 389 | let response = server.get("/").await; 390 | 391 | assert_eq!(response.status_code(), 200); 392 | let body = response.text(); 393 | 394 | // Should NOT inject the Mermaid CDN script when no mermaid blocks are present 395 | assert!(!body.contains(r#""#)); 396 | 397 | // Should still contain the Mermaid initialization functions (they're always present) 398 | assert!(body.contains("function initMermaid()")); 399 | 400 | // Should contain regular code blocks 401 | assert!(body.contains(r#"class="language-javascript""#)); 402 | assert!(body.contains(r#"class="language-bash""#)); 403 | } 404 | 405 | #[tokio::test] 406 | async fn test_multiple_mermaid_diagrams() { 407 | let markdown_content = r#"# Multiple Mermaid Diagrams 408 | 409 | ## Flowchart 410 | ```mermaid 411 | graph LR 412 | A --> B 413 | ``` 414 | 415 | ## Sequence Diagram 416 | ```mermaid 417 | sequenceDiagram 418 | Alice->>Bob: Hello 419 | Bob-->>Alice: Hi 420 | ``` 421 | 422 | ## Class Diagram 423 | ```mermaid 424 | classDiagram 425 | Animal <|-- Duck 426 | ``` 427 | "#; 428 | 429 | let (server, _temp_file) = create_test_server(markdown_content).await; 430 | 431 | let response = server.get("/").await; 432 | 433 | assert_eq!(response.status_code(), 200); 434 | let body = response.text(); 435 | 436 | // Should detect all three mermaid blocks 437 | let mermaid_occurrences = body.matches(r#"class="language-mermaid""#).count(); 438 | assert_eq!(mermaid_occurrences, 3); 439 | 440 | // Should contain content from all diagrams 441 | assert!(body.contains("graph LR")); 442 | assert!(body.contains("sequenceDiagram")); 443 | assert!(body.contains("classDiagram")); 444 | 445 | // Check for HTML-encoded or raw content 446 | assert!(body.contains("A --> B") || body.contains("A --> B")); 447 | assert!(body.contains("Alice->>Bob") || body.contains("Alice->>Bob")); 448 | assert!(body.contains("Animal <|-- Duck") || body.contains("Animal <|-- Duck")); 449 | 450 | // Should inject the Mermaid script only once 451 | let script_occurrences = body 452 | .matches(r#""#) 453 | .count(); 454 | assert_eq!(script_occurrences, 1); 455 | } 456 | 457 | #[tokio::test] 458 | async fn test_mermaid_js_etag_caching() { 459 | let (server, _temp_file) = create_test_server("# Test").await; 460 | 461 | // First request - should return 200 with ETag 462 | let response = server.get("/mermaid.min.js").await; 463 | assert_eq!(response.status_code(), 200); 464 | 465 | let etag = response.header("etag"); 466 | assert!(!etag.is_empty(), "ETag header should be present"); 467 | 468 | let cache_control = response.header("cache-control"); 469 | let cache_control_str = cache_control.to_str().unwrap(); 470 | assert!(cache_control_str.contains("public")); 471 | assert!(cache_control_str.contains("no-cache")); 472 | 473 | let content_type = response.header("content-type"); 474 | assert_eq!(content_type, "application/javascript"); 475 | 476 | // Verify content is not empty 477 | assert!(!response.as_bytes().is_empty()); 478 | 479 | // Second request with matching ETag - should return 304 480 | let response_304 = server 481 | .get("/mermaid.min.js") 482 | .add_header( 483 | axum::http::header::IF_NONE_MATCH, 484 | axum::http::HeaderValue::from_str(etag.to_str().unwrap()).unwrap(), 485 | ) 486 | .await; 487 | 488 | assert_eq!(response_304.status_code(), 304); 489 | assert_eq!(response_304.header("etag"), etag); 490 | 491 | // Body should be empty for 304 492 | assert!(response_304.as_bytes().is_empty()); 493 | 494 | // Request with non-matching ETag - should return 200 495 | let response_200 = server 496 | .get("/mermaid.min.js") 497 | .add_header( 498 | axum::http::header::IF_NONE_MATCH, 499 | axum::http::HeaderValue::from_static("\"different-etag\""), 500 | ) 501 | .await; 502 | 503 | assert_eq!(response_200.status_code(), 200); 504 | assert!(!response_200.as_bytes().is_empty()); 505 | } 506 | 507 | // Directory mode tests 508 | 509 | #[tokio::test] 510 | async fn test_directory_mode_serves_multiple_files() { 511 | let (server, _temp_dir) = create_directory_server().await; 512 | 513 | // Test accessing first file 514 | let response1 = server.get("/test1.md").await; 515 | assert_eq!(response1.status_code(), 200); 516 | let body1 = response1.text(); 517 | assert!(body1.contains("

Test 1

")); 518 | assert!(body1.contains("Content of test1")); 519 | 520 | // Test accessing second file with .markdown extension 521 | let response2 = server.get("/test2.markdown").await; 522 | assert_eq!(response2.status_code(), 200); 523 | let body2 = response2.text(); 524 | assert!(body2.contains("

Test 2

")); 525 | assert!(body2.contains("Content of test2")); 526 | 527 | // Test accessing third file 528 | let response3 = server.get("/test3.md").await; 529 | assert_eq!(response3.status_code(), 200); 530 | let body3 = response3.text(); 531 | assert!(body3.contains("

Test 3

")); 532 | assert!(body3.contains("Content of test3")); 533 | } 534 | 535 | #[tokio::test] 536 | async fn test_directory_mode_file_not_found() { 537 | let (server, _temp_dir) = create_directory_server().await; 538 | 539 | // Test non-existent markdown file 540 | let response = server.get("/nonexistent.md").await; 541 | assert_eq!(response.status_code(), 404); 542 | } 543 | 544 | #[tokio::test] 545 | async fn test_directory_mode_has_navigation_sidebar() { 546 | let (server, _temp_dir) = create_directory_server().await; 547 | 548 | let response = server.get("/test1.md").await; 549 | assert_eq!(response.status_code(), 200); 550 | let body = response.text(); 551 | 552 | // Check for navigation elements 553 | assert!(body.contains(r#"
NameJohn