├── .cargo └── config.toml ├── .dockerignore ├── .github ├── dependabot.yml └── workflows │ ├── audit.yml │ ├── audit_cron.yml │ ├── cachix.yml │ ├── ci.yml │ └── docker-publish.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── LICENSE ├── README.md ├── build.rs ├── cliff.toml ├── deny.toml ├── doc └── man │ └── rgit.1.md ├── docker-compose.override.yml.example ├── docker-compose.yml ├── flake.lock ├── flake.nix ├── grammars.nix ├── rustfmt.toml ├── scripts └── docker │ └── entrypoint.sh ├── src ├── database │ ├── indexer.rs │ ├── mod.rs │ └── schema │ │ ├── commit.rs │ │ ├── mod.rs │ │ ├── prefixes.rs │ │ ├── repository.rs │ │ ├── tag.rs │ │ └── tree.rs ├── git.rs ├── layers │ ├── logger.rs │ └── mod.rs ├── main.rs ├── methods │ ├── filters.rs │ ├── index.rs │ ├── mod.rs │ └── repo │ │ ├── about.rs │ │ ├── commit.rs │ │ ├── diff.rs │ │ ├── log.rs │ │ ├── mod.rs │ │ ├── refs.rs │ │ ├── smart_git.rs │ │ ├── snapshot.rs │ │ ├── summary.rs │ │ ├── tag.rs │ │ └── tree.rs ├── syntax_highlight.rs ├── theme.rs └── unified_diff_builder.rs ├── statics ├── README.md ├── favicon.ico └── sass │ ├── _colours.scss │ ├── code.scss │ ├── diff.scss │ ├── style.scss │ ├── tables.scss │ └── util.scss ├── taplo.toml ├── templates ├── base.html ├── index.html ├── partials │ └── file_tree.html └── repo │ ├── about.html │ ├── base.html │ ├── commit.html │ ├── diff.html │ ├── file.html │ ├── log.html │ ├── macros │ ├── breadcrumbs.html │ ├── link.html │ ├── refs.html │ └── sidebar_toggle.html │ ├── refs.html │ ├── summary.html │ ├── tag.html │ └── tree.html ├── themes ├── README ├── github_light.toml └── onedark.toml ├── tree-sitter-grammar-repository ├── Cargo.toml ├── build.rs └── src │ └── lib.rs └── treefmt.nix /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | rustflags = ["--cfg", "tokio_unstable"] 3 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | target 2 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | 8 | updates: 9 | - package-ecosystem: "cargo" 10 | directory: "/" 11 | open-pull-requests-limit: 20 12 | schedule: 13 | interval: "monthly" 14 | -------------------------------------------------------------------------------- /.github/workflows/audit.yml: -------------------------------------------------------------------------------- 1 | name: Security audit 2 | 3 | on: 4 | push: 5 | paths: 6 | - '**/Cargo.toml' 7 | - '**/Cargo.lock' 8 | 9 | jobs: 10 | security_audit: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v1 14 | - uses: actions-rs/audit-check@v1 15 | continue-on-error: true 16 | with: 17 | token: ${{ secrets.GITHUB_TOKEN }} 18 | -------------------------------------------------------------------------------- /.github/workflows/audit_cron.yml: -------------------------------------------------------------------------------- 1 | name: Security audit (cron) 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | 7 | jobs: 8 | audit: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v2 12 | - uses: actions-rs/audit-check@v1 13 | with: 14 | token: ${{ secrets.GITHUB_TOKEN }} 15 | -------------------------------------------------------------------------------- /.github/workflows/cachix.yml: -------------------------------------------------------------------------------- 1 | name: Cachix 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | publish: 10 | name: Publish Flake 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout sources 14 | uses: actions/checkout@v4 15 | - name: Install nix 16 | uses: cachix/install-nix-action@v30 17 | - name: Authenticate with Cachix 18 | uses: cachix/cachix-action@v15 19 | with: 20 | name: rgit 21 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 22 | - name: Build nix flake 23 | run: nix build -L 24 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | on: [push, pull_request] 2 | 3 | name: CI 4 | 5 | jobs: 6 | nix-matrix: 7 | runs-on: ubuntu-latest 8 | outputs: 9 | matrix: ${{ steps.set-matrix.outputs.matrix }} 10 | steps: 11 | - uses: actions/checkout@v4 12 | - uses: cachix/install-nix-action@v30 13 | - id: set-matrix 14 | name: Generate Nix Matrix 15 | run: | 16 | set -Eeu 17 | matrix="$(nix eval --json '.#githubActions.matrix')" 18 | echo "matrix=$matrix" >> "$GITHUB_OUTPUT" 19 | 20 | check: 21 | name: ${{ matrix.name }} (${{ matrix.system }}) 22 | needs: nix-matrix 23 | runs-on: ${{ matrix.os }} 24 | strategy: 25 | matrix: ${{fromJSON(needs.nix-matrix.outputs.matrix)}} 26 | steps: 27 | - uses: actions/checkout@v4 28 | - uses: cachix/install-nix-action@v30 29 | - name: Authenticate with Cachix 30 | uses: cachix/cachix-action@v15 31 | with: 32 | name: rgit 33 | authToken: ${{ secrets.CACHIX_AUTH_TOKEN }} 34 | - run: nix build -L '.#${{ matrix.attr }}' 35 | -------------------------------------------------------------------------------- /.github/workflows/docker-publish.yml: -------------------------------------------------------------------------------- 1 | name: Docker 2 | 3 | on: 4 | schedule: 5 | - cron: '45 20 * * *' 6 | push: 7 | branches: [ main ] 8 | 9 | env: 10 | # Use docker.io for Docker Hub if empty 11 | REGISTRY: ghcr.io 12 | # github.repository as / 13 | IMAGE_NAME: ${{ github.repository }} 14 | 15 | jobs: 16 | build: 17 | runs-on: ubuntu-latest 18 | permissions: 19 | contents: read 20 | packages: write 21 | steps: 22 | - name: Checkout repository 23 | uses: actions/checkout@v2 24 | - name: Setup Docker buildx 25 | uses: docker/setup-buildx-action@79abd3f86f79a9d68a23c75a09a9a85889262adf 26 | - name: Log into registry ${{ env.REGISTRY }} 27 | if: github.event_name != 'pull_request' 28 | uses: docker/login-action@28218f9b04b4f3f62068d7b6ce6ca5b26e35336c 29 | with: 30 | registry: ${{ env.REGISTRY }} 31 | username: ${{ github.actor }} 32 | password: ${{ secrets.GITHUB_TOKEN }} 33 | - name: Extract Docker metadata 34 | id: meta 35 | uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 36 | with: 37 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 38 | - name: Build and push Docker image 39 | id: build-and-push 40 | uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc 41 | with: 42 | context: . 43 | push: ${{ github.event_name != 'pull_request' }} 44 | tags: ${{ steps.meta.outputs.tags }} 45 | labels: ${{ steps.meta.outputs.labels }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /.idea 3 | result 4 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rgit" 3 | description = "A gitweb/cgit-like interface" 4 | version = "0.1.5" 5 | edition = "2024" 6 | authors = ["Jordan Doyle "] 7 | license = "WTFPL" 8 | 9 | [workspace] 10 | members = ["tree-sitter-grammar-repository"] 11 | 12 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 13 | 14 | [dependencies] 15 | anyhow = "1.0" 16 | arc-swap = "1.7" 17 | askama = { version = "0.13.0", default-features = false, features = [ 18 | "derive", 19 | "std", 20 | ] } 21 | axum = { version = "0.8", default-features = false, features = [ 22 | "query", 23 | "tokio", 24 | "http1", 25 | ] } 26 | axum-extra = { version = "0.10", default-features = false } 27 | basic-toml = "0.1" 28 | bytes = "1.5" 29 | clap = { version = "4.5.37", default-features = false, features = [ 30 | "std", 31 | "cargo", 32 | "derive", 33 | "help", 34 | "usage", 35 | ] } 36 | comrak = { version = "0.38.0", default-features = false } 37 | const-hex = "1.14" 38 | const_format = "0.2" 39 | flate2 = "1.0" 40 | futures-util = "0.3" 41 | gix = { version = "0.71", default-features = false, features = [ 42 | "tracing", 43 | "parallel", 44 | "blob-diff", 45 | "revision", 46 | ] } 47 | hashbrown = { version = "0.14", default-features = false, features = [ 48 | "serde", 49 | "ahash", 50 | ] } 51 | httparse = "1.10" 52 | humantime = "2.2" 53 | itertools = "0.12.1" 54 | md5 = "0.7" 55 | memchr = "2.7" 56 | moka = { version = "0.12.10", features = ["future"] } 57 | path-clean = "1.0.1" 58 | rand = "0.8.5" 59 | rkyv = { version = "0.8", features = [ 60 | "bytecheck", 61 | "alloc", 62 | ], default-features = false } 63 | rocksdb = { version = "0.23", default-features = false, features = ["snappy"] } 64 | serde = { version = "1.0", features = ["derive", "rc"] } 65 | simdutf8 = "0.1.5" 66 | tar = { version = "0.4", default-features = false } 67 | time = { version = "0.3", features = ["serde", "formatting"] } 68 | timeago = { version = "0.4.2", default-features = false } 69 | tokio = { version = "1.44", features = ["full", "tracing"] } 70 | tokio-stream = "0.1" 71 | tokio-util = { version = "0.7.15", features = ["io"] } 72 | tower-http = { version = "0.6", features = ["cors", "timeout"] } 73 | tower-layer = "0.3" 74 | tower-service = "0.3" 75 | tracing = "0.1" 76 | tracing-subscriber = { version = "0.3", default-features = false, features = [ 77 | "env-filter", 78 | "smallvec", 79 | "parking_lot", 80 | "fmt", 81 | ] } 82 | tree-sitter-grammar-repository = { path = "./tree-sitter-grammar-repository" } 83 | tree-sitter-highlight = "0.24" 84 | unix_mode = "0.1" 85 | uuid = { version = "1.11", features = ["v4"] } 86 | v_htmlescape = { version = "0.15", features = ["bytes-buf"] } 87 | xxhash-rust = { version = "0.8.15", features = ["const_xxh3", "xxh3"] } 88 | yoke = { version = "0.7.5", features = ["derive"] } 89 | 90 | [features] 91 | zlib-ng = ["flate2/zlib-ng", "gix/zlib-ng"] 92 | 93 | [build-dependencies] 94 | anyhow = "1.0" 95 | rsass = "0.28.0" 96 | 97 | [package.metadata.deb] 98 | section = "web" 99 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nixos/nix:latest AS builder 2 | 3 | WORKDIR /app 4 | COPY . . 5 | 6 | RUN nix --extra-experimental-features "nix-command flakes" --accept-flake-config build .# 7 | RUN mkdir /tmp/nix-store-closure 8 | RUN cp -R $(nix-store -qR result/) /tmp/nix-store-closure 9 | 10 | FROM scratch 11 | 12 | WORKDIR /app 13 | COPY --from=builder /tmp/nix-store-closure /nix/store 14 | COPY --from=builder /app/result /app 15 | 16 | ENTRYPOINT ["/app/bin/rgit"] 17 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # rgit 2 | 3 | ## Introduction 4 | 5 | [See it in action!](https://git.inept.dev/) 6 | 7 | A gitweb/cgit-like interface for the modern age. Written in Rust using Axum, gitoxide, Askama and RocksDB. 8 | 9 | Includes a dark mode for late night committing. 10 | 11 | ## Table of Contents 12 | 13 | - [Features](#features) 14 | - [Getting Started](#getting-started) 15 | - [Installation](#installation) 16 | - [Cargo (automatic)](#cargo-automatic) 17 | - [From Source (manually)](#from-source-manually) 18 | - [Usage](#usage) 19 | - [Configuration](#configuration) 20 | - [Repository Description](#repository-description) 21 | - [Repository Owner](#repository-owner) 22 | - [NixOS](#nixos) 23 | - [Docker](#docker) 24 | - [Docker Compose](#docker-compose) 25 | - [Contributing](#contributing) 26 | - [License](#license) 27 | - [Troubleshooting](#troubleshooting) 28 | - [Cloning Repositories](#cloning-repositories) 29 | - [Repository not exported](#repository-not-exported) 30 | - [Launching the Application](#launching-the-application) 31 | - [...is not owned by the current user](#is-not-owned-by-the-current-user) 32 | - [Application Usage](#application-usage) 33 | - [Newly initialized repositories do not appear](#newly-initialized-repositories-do-not-appear) 34 | 35 | ## Features 36 | 37 | - **Efficient Metadata Storage** 38 | [RocksDB][] is used to store all metadata about a repository, including commits, branches, and tags. Metadata is reindexed, and the reindex interval is configurable (default: every 5 minutes), resulting in up to 97% faster load times for large repositories. 39 | 40 | - **On-Demand Loading** 41 | Files, trees, and diffs are loaded using [gitoxide][] directly upon request. A small in-memory cache is included for rendered READMEs and diffs, enhancing performance. 42 | 43 | - **Dark Mode Support** 44 | Enjoy a dark mode for late-night committing, providing a visually comfortable experience during extended coding sessions. 45 | 46 | [RocksDB]: https://github.com/facebook/rocksdb 47 | [gitoxide]: https://github.com/Byron/gitoxide 48 | 49 | ## Getting Started 50 | 51 | Before you begin, ensure that you have the Rust toolchain and Cargo installed. If you haven't installed them yet, you can do so by following the instructions provided on the official Rust website: 52 | 53 | - [Install Rust](https://www.rust-lang.org/learn/get-started) 54 | 55 | Once you have Rust and Cargo installed, you can proceed with setting up and running the project. 56 | 57 | **Note:** This software is designed to work exclusively with bare Git repositories. Make sure to set up bare repositories beforehand by following the [Git on the Server documentation][]. 58 | 59 | [Git on the Server documentation]: https://git-scm.com/book/en/v2/Git-on-the-Server-Getting-Git-on-a-Server 60 | 61 | ### Installation 62 | 63 | #### Cargo (automatic) 64 | 65 | ```shell 66 | cargo install --git https://github.com/w4/rgit 67 | ``` 68 | 69 | #### From Source (manually) 70 | 71 | Clone the repository and build: 72 | 73 | ```shell 74 | git clone https://github.com/w4/rgit.git 75 | cd rgit 76 | cargo build --release 77 | ``` 78 | 79 | The rgit binary will be found in the `target/release` directory. 80 | 81 | ### Usage 82 | 83 | To get up and running quickly, run rgit with the following: 84 | 85 | ```shell 86 | rgit [::]:3333 /path/to/my-bare-repos -d /tmp/rgit-cache.db 87 | ``` 88 | 89 | **Notes:** 90 | - Repository indexing is recursive. 91 | - The database is quick to generate, so this can be pointed to temporary storage. 92 | 93 | ### Configuration 94 | 95 | #### Repository Description 96 | 97 | To set a repository description, edit the file named `description` inside the bare git repository. Add your desired description text to this file. 98 | 99 | #### Repository Owner 100 | 101 | To assign an owner to a repository, edit the file named `config` inside the bare git repository and include the following content: 102 | 103 | ```ini 104 | [gitweb] 105 | owner = "Al Gorithm" 106 | ``` 107 | 108 | Replace `Al Gorithm` with the desired owner's name. 109 | 110 | ### NixOS 111 | 112 | Running rgit on NixOS is straightforward, simply import the module into your `flake.nix` 113 | and use the provided service: 114 | 115 | ```nix 116 | { 117 | inputs = { 118 | nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05"; 119 | 120 | rgit = { 121 | url = "github:w4/rgit"; 122 | inputs.nixpkgs = "nixpkgs"; 123 | }; 124 | }; 125 | 126 | outputs = { nixpkgs, ... }: { 127 | nixosConfigurations.mySystem = nixpkgs.lib.nixosSystem { 128 | modules = [ 129 | rgit.nixosModules.default 130 | { 131 | services.rgit = { 132 | enable = true; 133 | bindAddress = "[::]:3333"; 134 | dbStorePath = "/tmp/rgit.db"; 135 | repositoryStorePath = "/path/to/my-bare-repos"; 136 | }; 137 | } 138 | ... 139 | ]; 140 | }; 141 | }; 142 | } 143 | ``` 144 | 145 | ### Docker 146 | 147 | Running rgit in Docker is straightforward. Follow these steps, ensuring that your repository directory is correctly mounted: 148 | 149 | ```shell 150 | docker run --mount type=bind,source=/path/to/my-bare-repos,target=/git \ 151 | --user $UID:$GID \ 152 | -it ghcr.io/w4/rgit:main 153 | ``` 154 | 155 | **Note**: Replace `$UID` and `$GID` with the UID and GID of the user that owns the directory containing your repositories. If these values are incorrect, errors will occur. Learn how to find the UID of a user [here](https://linuxhandbook.com/uid-linux/). 156 | 157 | #### Docker Compose 158 | 159 | An example `docker-compose.yml` is provided for those who prefer using Compose. To configure 160 | the UID and GID, the user can be specified in `docker-compose.override.yml`. 161 | 162 | An example override file has been has been provided with the repository. To use it, remove the 163 | `.example` extension from `docker-compose.override.yml.example`, and adjust the UID and GID to 164 | match the user that owns the directory containing your repositories. 165 | 166 | To configure automatic refresh in Docker, an environment variable is also provided. 167 | 168 | ```yml 169 | version: '3' 170 | services: 171 | rgit: 172 | image: ghcr.io/w4/rgit:main 173 | command: 174 | - "[::]:8000" 175 | - /git 176 | - -d /tmp/rgit-cache.db 177 | volumes: 178 | - /volume/git:/git 179 | ports: 180 | - 8000:8000 181 | environment: 182 | - REFRESH_INTERVAL=5m 183 | restart: unless-stopped 184 | ``` 185 | 186 | Afterwards, bring up the container with `docker-compose up` to make sure everything works. 187 | 188 | ## Contributing 189 | 190 | Pull requests are welcome via GitHub or [`git-send-email`](https://git-scm.com/docs/git-send-email). 191 | 192 | ## License 193 | 194 | rgit is licensed under the [WTFPL](LICENSE). 195 | 196 | ## Troubleshooting 197 | 198 | ### Cloning Repositories 199 | 200 | #### Repository not exported 201 | 202 | **Symptom:** 203 | When attempting to clone repositories via HTTPS, you encounter the error message: 204 | 205 | ``` 206 | Git returned an error: Repository not exported 207 | ``` 208 | 209 | **Solution:** 210 | Create a file named `git-daemon-export-ok` in the bare git repository. This file signals to the git daemon that the repository is [exportable][]. 211 | 212 | [exportable]: https://git-scm.com/docs/git-daemon 213 | 214 | ### Launching the Application 215 | 216 | #### ...is not owned by the current user 217 | 218 | **Symptom:** 219 | When launching the application, you receive the error message: 220 | 221 | ``` 222 | repository path '/git/path/to/my/repository.git/' is not owned by the current user 223 | ``` 224 | 225 | **Solution:** 226 | Ensure that the user launching `rgit` or the Docker container has the same permissions as the user that owns the repositories directory. 227 | 228 | ### Application Usage 229 | 230 | #### Newly initialized repositories do not appear 231 | 232 | **Symptom:** 233 | When using the application, a newly initialized bare repository without commits does not appear in the list. 234 | 235 | **Solution:** 236 | Run the following command inside the repository to initialize it: 237 | 238 | ```shell 239 | git pack-refs --all 240 | ``` 241 | 242 | Alternatively, push a commit with at least one file to the repository. This will also make the repository appear in the list. 243 | -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | io::Write, 3 | path::{Path, PathBuf}, 4 | }; 5 | 6 | use anyhow::Context; 7 | 8 | #[derive(Copy, Clone)] 9 | pub struct Paths<'a> { 10 | statics_in_dir: &'a Path, 11 | statics_out_dir: &'a Path, 12 | } 13 | 14 | fn main() { 15 | if let Err(e) = run() { 16 | eprintln!("An error occurred within the rgit build script:\n\n{:?}", e); 17 | std::process::exit(1); 18 | } 19 | } 20 | 21 | fn run() -> anyhow::Result<()> { 22 | let manifest_dir = 23 | PathBuf::from(std::env::var("CARGO_MANIFEST_DIR").context("CARGO_MANIFEST_DIR not set")?); 24 | let statics_in_dir = manifest_dir.join("statics"); 25 | 26 | let out_dir = PathBuf::from(std::env::var("OUT_DIR").context("OUT_DIR not set by rustc")?); 27 | let statics_out_dir = out_dir.join("statics"); 28 | 29 | let paths = Paths { 30 | statics_in_dir: &statics_in_dir, 31 | statics_out_dir: &statics_out_dir, 32 | }; 33 | 34 | build_scss(paths).context("Failed to build CSS stylesheets")?; 35 | 36 | Ok(()) 37 | } 38 | 39 | fn build_scss(paths: Paths) -> anyhow::Result<()> { 40 | let in_dir = paths.statics_in_dir.join("sass"); 41 | let out_dir = paths.statics_out_dir.join("css"); 42 | std::fs::create_dir_all(&out_dir).context("Failed to create output directory")?; 43 | 44 | println!("cargo:rerun-if-changed={}", in_dir.display()); 45 | 46 | let input_file = in_dir.join("style.scss"); 47 | let output_file = out_dir.join("style.css"); 48 | let format = rsass::output::Format { 49 | style: rsass::output::Style::Compressed, 50 | ..rsass::output::Format::default() 51 | }; 52 | 53 | let output_content = 54 | rsass::compile_scss_path(&input_file, format).context("Failed to compile SASS")?; 55 | 56 | let mut output_file = std::fs::OpenOptions::new() 57 | .write(true) 58 | .create(true) 59 | .truncate(true) 60 | .open(output_file) 61 | .context("Failed to open output file")?; 62 | output_file 63 | .write_all(&output_content) 64 | .context("Failed to write compiled CSS to output")?; 65 | 66 | Ok(()) 67 | } 68 | -------------------------------------------------------------------------------- /cliff.toml: -------------------------------------------------------------------------------- 1 | [remote.github] 2 | owner = "w4" 3 | repo = "rgit" 4 | 5 | [changelog] 6 | body = """ 7 | ## What's Changed 8 | 9 | {%- if version %} in {{ version }}{%- endif -%} 10 | {% for commit in commits %} 11 | {% if commit.remote.pr_title -%} 12 | {%- set commit_message = commit.remote.pr_title -%} 13 | {%- else -%} 14 | {%- set commit_message = commit.message -%} 15 | {%- endif -%} 16 | * [{{ commit_message | split(pat="\n") | first | trim }}]({{ self::remote_url() }}/commit/{{ commit.id }})\ 17 | {% if commit.remote.username %} by @{{ commit.remote.username }}{% else %} by {{ commit.author.name }}{%- endif -%} 18 | {% if commit.remote.pr_number %} in \ 19 | [#{{ commit.remote.pr_number }}]({{ self::remote_url() }}/pull/{{ commit.remote.pr_number }}) \ 20 | {%- endif %} 21 | {%- endfor -%} 22 | 23 | {%- if github -%} 24 | {% if github.contributors | filter(attribute="is_first_time", value=true) | length != 0 %} 25 | {% raw %}\n{% endraw -%} 26 | ## New Contributors 27 | {%- endif %}\ 28 | {% for contributor in github.contributors | filter(attribute="is_first_time", value=true) %} 29 | * @{{ contributor.username }} made their first contribution 30 | {%- if contributor.pr_number %} in \ 31 | [#{{ contributor.pr_number }}]({{ self::remote_url() }}/pull/{{ contributor.pr_number }}) \ 32 | {%- endif %} 33 | {%- endfor -%} 34 | {%- endif -%} 35 | 36 | {% if version %} 37 | {% if previous.version %} 38 | **Full Changelog**: {{ self::remote_url() }}/compare/{{ previous.version }}...{{ version }} 39 | {% endif %} 40 | {% else -%} 41 | {% raw %}\n{% endraw %} 42 | {% endif %} 43 | 44 | {%- macro remote_url() -%} 45 | https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }} 46 | {%- endmacro -%} 47 | """ 48 | trim = true 49 | footer = """ 50 | 51 | """ 52 | postprocessors = [] 53 | 54 | [git] 55 | conventional_commits = false 56 | filter_unconventional = false 57 | split_commits = false 58 | commit_preprocessors = [{ pattern = '\((\w+\s)?#([0-9]+)\)', replace = "" }] 59 | filter_commits = false 60 | topo_order = false 61 | sort_commits = "newest" 62 | -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | [graph] 2 | targets = [] 3 | all-features = false 4 | no-default-features = false 5 | 6 | [output] 7 | feature-depth = 1 8 | 9 | [advisories] 10 | ignore = [] 11 | 12 | [licenses] 13 | allow = [ 14 | "MIT", 15 | "Apache-2.0", 16 | "Unicode-3.0", 17 | "WTFPL", 18 | "BSL-1.0", 19 | "CC0-1.0", 20 | "BSD-3-Clause", 21 | "ISC", 22 | "Zlib", 23 | "BSD-2-Clause", 24 | ] 25 | confidence-threshold = 0.8 26 | exceptions = [] 27 | 28 | [bans] 29 | multiple-versions = "deny" 30 | wildcards = "allow" 31 | highlight = "all" 32 | workspace-default-features = "allow" 33 | external-default-features = "allow" 34 | allow = [] 35 | deny = [] 36 | skip = [ 37 | { crate = "windows-sys@0.52.0", reason = "gix pulls in two separate versions" }, 38 | { crate = "zerocopy@0.7.35", reason = "gix pulls in old version of hashbrown" }, 39 | ] 40 | skip-tree = [ 41 | { name = "matchers", reason = "tracing-subscriber's env-filter pulls in an ancient regex version" }, 42 | { crate = "thiserror@1.0.69", reason = "https://github.com/moka-rs/moka/pull/506" }, 43 | { crate = "bindgen@0.69.5", reason = "rocksdb still using old version" }, 44 | ] 45 | 46 | [sources] 47 | unknown-registry = "warn" 48 | unknown-git = "warn" 49 | allow-registry = ["https://github.com/rust-lang/crates.io-index"] 50 | allow-git = [] 51 | -------------------------------------------------------------------------------- /doc/man/rgit.1.md: -------------------------------------------------------------------------------- 1 | % RGIT(1) version 0.1.2 | User Commands 2 | % 3 | % 11 January 2024 4 | 5 | NAME 6 | ==== 7 | 8 | rgit - a gitweb interface written in rust 9 | 10 | SYNOPSIS 11 | ======== 12 | 13 | | **rgit** \[*OPTIONS*] **\--db-store** *path* *bind_address* *scan_path* 14 | 15 | DESCRIPTION 16 | =========== 17 | 18 | A gitweb/cgit-like interface for the modern age. Written in Rust using Axum, gitoxide, Askama, and RocksDB. 19 | 20 | _bind_address_ 21 | 22 | : Specifies the network address and port to serve the application on. 23 | (Required) 24 | 25 | Example: 26 | 27 | : _0.0.0.0:3333_ (localhost, port 3333 on IPv4) 28 | 29 | _[::]:3333_ (localhost, port 3333 on IPv6) 30 | 31 | _scan_path_ 32 | 33 | : Specifies the root directory where git repositories reside. Scans recursively. 34 | (Required) 35 | 36 | For information about bare git repositories, see the manual for **git-init**(1). 37 | 38 | Example: 39 | 40 | : _/srv/git_ 41 | 42 | _$HOME/git_ 43 | 44 | 45 | OPTIONS 46 | ======= 47 | 48 | **-d** _path_, **\--db-store** _path_ 49 | 50 | : Path to a directory in which the RocksDB database should be stored, will be created if it doesn't already exist. 51 | 52 | The RocksDB database is very quick to generate, so this can be pointed to temporary storage. (Required) 53 | 54 | Example: 55 | 56 | : **\--db-store** _/tmp/rgit-cache.db_ 57 | 58 | **\--refresh-interval** _interval_ 59 | 60 | : Configures the metadata refresh interval. This parameter accepts human-readable time formats. 61 | 62 | Default: _5m_ 63 | 64 | Example: 65 | 66 | : **\--refresh-interval** _60s_ (refresh every 60 seconds) 67 | 68 | **\--refresh-interval** _never_ (refresh only on server start) 69 | 70 | Documentation: 71 | 72 | : https://docs.rs/humantime/latest/humantime/ 73 | 74 | EXAMPLES 75 | ======== 76 | 77 | ``` 78 | $ rgit -d /tmp/rgit-cache.db [::]:3333 /srv/git 79 | $ rgit --db-store /tmp/rgit-cache.db 0.0.0.0:3333 /srv/git 80 | $ rgit -d /tmp/rgit-cache.db [::]:3333 /srv/git --refresh-interval 12h 81 | 82 | ``` 83 | 84 | BUGS 85 | ==== 86 | 87 | https://github.com/w4/rgit/issues 88 | 89 | AUTHORS 90 | ======= 91 | 92 | Jordan Doyle \ 93 | 94 | REPOSITORY 95 | ========== 96 | 97 | https://git.inept.dev/~doyle/rgit.git 98 | 99 | https://github.com/w4/rgit 100 | 101 | SEE ALSO 102 | ======== 103 | 104 | **git**(1), 105 | **git-init**(1) 106 | -------------------------------------------------------------------------------- /docker-compose.override.yml.example: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | rgit: 4 | image: ghcr.io/w4/rgit:main 5 | user: 1000:1000 6 | command: 7 | - "[::]:8000" 8 | - /git 9 | - -d /tmp/rgit-cache.db 10 | volumes: 11 | - /volume/git:/git 12 | ports: 13 | - 8000:8000 14 | environment: 15 | - REFRESH_INTERVAL=5m 16 | restart: unless-stopped 17 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | services: 3 | rgit: 4 | image: ghcr.io/w4/rgit:main 5 | command: 6 | - "[::]:8000" 7 | - /git 8 | - -d /tmp/rgit-cache.db 9 | volumes: 10 | - /volume/git:/git 11 | ports: 12 | - 8000:8000 13 | environment: 14 | - REFRESH_INTERVAL=5m 15 | restart: unless-stopped 16 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "advisory-db": { 4 | "flake": false, 5 | "locked": { 6 | "lastModified": 1747937073, 7 | "narHash": "sha256-52H8P6jAHEwRvg7rXr4Z7h1KHZivO8T1Z9tN6R0SWJg=", 8 | "owner": "rustsec", 9 | "repo": "advisory-db", 10 | "rev": "bccf313a98c034573ac4170e6271749113343d97", 11 | "type": "github" 12 | }, 13 | "original": { 14 | "owner": "rustsec", 15 | "repo": "advisory-db", 16 | "type": "github" 17 | } 18 | }, 19 | "crane": { 20 | "locked": { 21 | "lastModified": 1748047550, 22 | "narHash": "sha256-t0qLLqb4C1rdtiY8IFRH5KIapTY/n3Lqt57AmxEv9mk=", 23 | "owner": "ipetkov", 24 | "repo": "crane", 25 | "rev": "b718a78696060df6280196a6f992d04c87a16aef", 26 | "type": "github" 27 | }, 28 | "original": { 29 | "owner": "ipetkov", 30 | "repo": "crane", 31 | "type": "github" 32 | } 33 | }, 34 | "helix": { 35 | "flake": false, 36 | "locked": { 37 | "lastModified": 1727654850, 38 | "narHash": "sha256-du6Vy5Yxy6aZFP7ad5guz5GOD/8uMY+Pgse1ZM+K2Jo=", 39 | "owner": "JordanForks", 40 | "repo": "helix", 41 | "rev": "1603715cc91bf6fdffb4aedfb5b76fb69fd10e28", 42 | "type": "github" 43 | }, 44 | "original": { 45 | "owner": "JordanForks", 46 | "repo": "helix", 47 | "type": "github" 48 | } 49 | }, 50 | "nix-github-actions": { 51 | "inputs": { 52 | "nixpkgs": [ 53 | "nixpkgs" 54 | ] 55 | }, 56 | "locked": { 57 | "lastModified": 1737420293, 58 | "narHash": "sha256-F1G5ifvqTpJq7fdkT34e/Jy9VCyzd5XfJ9TO8fHhJWE=", 59 | "owner": "nix-community", 60 | "repo": "nix-github-actions", 61 | "rev": "f4158fa080ef4503c8f4c820967d946c2af31ec9", 62 | "type": "github" 63 | }, 64 | "original": { 65 | "owner": "nix-community", 66 | "repo": "nix-github-actions", 67 | "type": "github" 68 | } 69 | }, 70 | "nixpkgs": { 71 | "locked": { 72 | "lastModified": 1748159586, 73 | "narHash": "sha256-xeCMAhKjhDjVFsfJcftv+CWcExYo+X8IBUW8L947ww4=", 74 | "owner": "NixOS", 75 | "repo": "nixpkgs", 76 | "rev": "7382d075365a977c4a9c8aa4c5e4abed15f00ee1", 77 | "type": "github" 78 | }, 79 | "original": { 80 | "owner": "NixOS", 81 | "repo": "nixpkgs", 82 | "type": "github" 83 | } 84 | }, 85 | "nixpkgs_2": { 86 | "locked": { 87 | "lastModified": 1745377448, 88 | "narHash": "sha256-jhZDfXVKdD7TSEGgzFJQvEEZ2K65UMiqW5YJ2aIqxMA=", 89 | "owner": "nixos", 90 | "repo": "nixpkgs", 91 | "rev": "507b63021ada5fee621b6ca371c4fca9ca46f52c", 92 | "type": "github" 93 | }, 94 | "original": { 95 | "owner": "nixos", 96 | "ref": "nixpkgs-unstable", 97 | "repo": "nixpkgs", 98 | "type": "github" 99 | } 100 | }, 101 | "root": { 102 | "inputs": { 103 | "advisory-db": "advisory-db", 104 | "crane": "crane", 105 | "helix": "helix", 106 | "nix-github-actions": "nix-github-actions", 107 | "nixpkgs": "nixpkgs", 108 | "treefmt-nix": "treefmt-nix", 109 | "utils": "utils" 110 | } 111 | }, 112 | "systems": { 113 | "locked": { 114 | "lastModified": 1681028828, 115 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 116 | "owner": "nix-systems", 117 | "repo": "default", 118 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 119 | "type": "github" 120 | }, 121 | "original": { 122 | "owner": "nix-systems", 123 | "repo": "default", 124 | "type": "github" 125 | } 126 | }, 127 | "treefmt-nix": { 128 | "inputs": { 129 | "nixpkgs": "nixpkgs_2" 130 | }, 131 | "locked": { 132 | "lastModified": 1747912973, 133 | "narHash": "sha256-XgxghfND8TDypxsMTPU2GQdtBEsHTEc3qWE6RVEk8O0=", 134 | "owner": "numtide", 135 | "repo": "treefmt-nix", 136 | "rev": "020cb423808365fa3f10ff4cb8c0a25df35065a3", 137 | "type": "github" 138 | }, 139 | "original": { 140 | "owner": "numtide", 141 | "repo": "treefmt-nix", 142 | "type": "github" 143 | } 144 | }, 145 | "utils": { 146 | "inputs": { 147 | "systems": "systems" 148 | }, 149 | "locked": { 150 | "lastModified": 1731533236, 151 | "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 152 | "owner": "numtide", 153 | "repo": "flake-utils", 154 | "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 155 | "type": "github" 156 | }, 157 | "original": { 158 | "owner": "numtide", 159 | "repo": "flake-utils", 160 | "type": "github" 161 | } 162 | } 163 | }, 164 | "root": "root", 165 | "version": 7 166 | } 167 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | inputs = { 3 | nixpkgs.url = "github:NixOS/nixpkgs"; 4 | 5 | crane.url = "github:ipetkov/crane"; 6 | utils.url = "github:numtide/flake-utils"; 7 | treefmt-nix.url = "github:numtide/treefmt-nix"; 8 | 9 | advisory-db = { 10 | url = "github:rustsec/advisory-db"; 11 | flake = false; 12 | }; 13 | 14 | helix = { 15 | url = "github:JordanForks/helix"; 16 | flake = false; 17 | }; 18 | 19 | nix-github-actions = { 20 | url = "github:nix-community/nix-github-actions"; 21 | inputs.nixpkgs.follows = "nixpkgs"; 22 | }; 23 | }; 24 | 25 | outputs = { self, nixpkgs, utils, crane, advisory-db, treefmt-nix, helix, nix-github-actions }: 26 | { 27 | githubActions = nix-github-actions.lib.mkGithubMatrix { 28 | checks = 29 | builtins.mapAttrs 30 | (name: value: if name != "x86_64-linux" then removeAttrs value [ "clippy" "audit" "formatting" "doc" ] else value) 31 | { inherit (self.checks) x86_64-linux aarch64-darwin; }; 32 | }; 33 | } 34 | // 35 | utils.lib.eachDefaultSystem (system: 36 | let 37 | pkgs = import nixpkgs { inherit system; }; 38 | craneLib = crane.mkLib pkgs; 39 | cargoOnlySrc = craneLib.cleanCargoSource ./.; 40 | src = pkgs.lib.fileset.toSource { 41 | root = ./.; 42 | fileset = pkgs.lib.fileset.unions [ 43 | ./.cargo 44 | ./Cargo.toml 45 | ./Cargo.lock 46 | ./tree-sitter-grammar-repository 47 | ./src 48 | ./statics 49 | ./templates 50 | ./themes 51 | ./deny.toml 52 | ./build.rs 53 | ]; 54 | }; 55 | rgit-grammar = pkgs.callPackage ./grammars.nix { inherit helix; }; 56 | commonArgs = { 57 | inherit src; 58 | strictDeps = true; 59 | buildInputs = pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.libiconv ]; 60 | nativeBuildInputs = with pkgs; [ cmake clang makeBinaryWrapper ]; 61 | LIBCLANG_PATH = "${pkgs.clang.cc.lib}/lib"; 62 | ROCKSDB_LIB_DIR = "${pkgs.rocksdb}/lib"; 63 | SNAPPY_LIB_DIR = "${pkgs.snappy}/lib"; 64 | }; 65 | cargoArtifacts = craneLib.buildDepsOnly (commonArgs // { src = cargoOnlySrc; }); 66 | buildArgs = commonArgs // { 67 | inherit cargoArtifacts; 68 | buildInputs = [ rgit-grammar ] ++ commonArgs.buildInputs; 69 | TREE_SITTER_GRAMMAR_LIB_DIR = rgit-grammar; 70 | }; 71 | rgit = craneLib.buildPackage (buildArgs // { 72 | doCheck = false; 73 | postInstall = '' 74 | wrapProgram $out/bin/rgit \ 75 | --set PATH ${pkgs.lib.makeBinPath [ pkgs.gitMinimal ]} 76 | ''; 77 | }); 78 | treefmt = treefmt-nix.lib.evalModule pkgs ./treefmt.nix; 79 | in 80 | { 81 | checks = { 82 | build = rgit; 83 | clippy = craneLib.cargoClippy buildArgs; 84 | doc = craneLib.cargoDoc buildArgs; 85 | audit = craneLib.cargoAudit { inherit advisory-db; src = cargoOnlySrc; }; 86 | deny = craneLib.cargoDeny { inherit src; }; 87 | test = craneLib.cargoNextest (buildArgs // { 88 | partitions = 1; 89 | partitionType = "count"; 90 | }); 91 | formatting = treefmt.config.build.check self; 92 | }; 93 | 94 | formatter = treefmt.config.build.wrapper; 95 | 96 | packages.default = rgit; 97 | apps.default = utils.lib.mkApp { drv = rgit; }; 98 | 99 | devShells.default = craneLib.devShell { 100 | checks = self.checks.${system}; 101 | packages = with pkgs; [ rust-analyzer clang ]; 102 | RUST_SRC_PATH = pkgs.rustPlatform.rustLibSrc; 103 | LIBCLANG_PATH = "${pkgs.clang.cc.lib}/lib"; 104 | TREE_SITTER_GRAMMAR_LIB_DIR = rgit-grammar; 105 | ROCKSDB_LIB_DIR = "${pkgs.rocksdb}/lib"; 106 | SNAPPY_LIB_DIR = "${pkgs.snappy}/lib"; 107 | }; 108 | 109 | nixosModules.default = { config, lib, pkgs, ... }: 110 | with lib; 111 | let 112 | cfg = config.services.rgit; 113 | in 114 | { 115 | options.services.rgit = { 116 | enable = mkEnableOption "rgit"; 117 | bindAddress = mkOption { 118 | default = "[::]:8333"; 119 | description = "Address and port to listen on"; 120 | type = types.str; 121 | }; 122 | dbStorePath = mkOption { 123 | default = "/tmp/rgit.db"; 124 | description = "Path to store the temporary cache"; 125 | type = types.path; 126 | }; 127 | repositoryStorePath = mkOption { 128 | default = "/git"; 129 | description = "Path to repositories"; 130 | type = types.path; 131 | }; 132 | requestTimeout = mkOption { 133 | default = "10s"; 134 | description = "Timeout for incoming HTTP requests"; 135 | type = types.str; 136 | }; 137 | package = mkOption { 138 | default = rgit; 139 | description = "rgit package to use"; 140 | type = types.package; 141 | }; 142 | }; 143 | 144 | config = mkIf cfg.enable { 145 | users.groups.rgit = { }; 146 | users.users.rgit = { 147 | description = "RGit service user"; 148 | group = "rgit"; 149 | isSystemUser = true; 150 | home = "/git"; 151 | }; 152 | 153 | systemd.services.rgit = { 154 | enable = true; 155 | wantedBy = [ "multi-user.target" ]; 156 | wants = [ "network-online.target" ]; 157 | after = [ "network-online.target" ]; 158 | path = [ pkgs.git ]; 159 | serviceConfig = { 160 | Type = "exec"; 161 | ExecStart = "${cfg.package}/bin/rgit --request-timeout ${cfg.requestTimeout} --db-store ${cfg.dbStorePath} ${cfg.bindAddress} ${cfg.repositoryStorePath}"; 162 | Restart = "on-failure"; 163 | 164 | User = "rgit"; 165 | Group = "rgit"; 166 | 167 | CapabilityBoundingSet = ""; 168 | NoNewPrivileges = true; 169 | PrivateDevices = true; 170 | PrivateTmp = true; 171 | PrivateUsers = true; 172 | PrivateMounts = true; 173 | ProtectHome = true; 174 | ProtectClock = true; 175 | ProtectProc = "noaccess"; 176 | ProcSubset = "pid"; 177 | ProtectKernelLogs = true; 178 | ProtectKernelModules = true; 179 | ProtectKernelTunables = true; 180 | ProtectControlGroups = true; 181 | ProtectHostname = true; 182 | RestrictSUIDSGID = true; 183 | RestrictRealtime = true; 184 | RestrictNamespaces = true; 185 | LockPersonality = true; 186 | RemoveIPC = true; 187 | RestrictAddressFamilies = [ "AF_INET" "AF_INET6" ]; 188 | SystemCallFilter = [ "@system-service" "~@privileged" ]; 189 | }; 190 | }; 191 | }; 192 | }; 193 | }); 194 | 195 | nixConfig = { 196 | extra-substituters = [ "https://rgit.cachix.org" ]; 197 | extra-trusted-public-keys = [ "rgit.cachix.org-1:3Wva/GHhrlhbYx+ObbEYQSYq1Yzk8x9OAvEvcYazgL0=" ]; 198 | }; 199 | } 200 | -------------------------------------------------------------------------------- /grammars.nix: -------------------------------------------------------------------------------- 1 | # adapted from https://github.com/helix-editor/helix/blob/217818681ea9bbc7f995c87f8794c46eeb012b1c/grammars.nix 2 | { stdenv 3 | , lib 4 | , runCommand 5 | , includeGrammarIf ? _: true 6 | , grammarOverlays ? [ ] 7 | , helix 8 | , ... 9 | }: 10 | let 11 | languagesConfig = builtins.fromTOML (builtins.readFile "${helix}/languages.toml"); 12 | isGitGrammar = grammar: 13 | builtins.hasAttr "source" grammar 14 | && builtins.hasAttr "git" grammar.source 15 | && builtins.hasAttr "rev" grammar.source; 16 | isGitHubGrammar = grammar: lib.hasPrefix "https://github.com" grammar.source.git; 17 | toGitHubFetcher = url: 18 | let 19 | match = builtins.match "https://github\.com/([^/]*)/([^/]*)/?" url; 20 | in 21 | { 22 | owner = builtins.elemAt match 0; 23 | repo = builtins.elemAt match 1; 24 | }; 25 | # If `use-grammars.only` is set, use only those grammars. 26 | # If `use-grammars.except` is set, use all other grammars. 27 | # Otherwise use all grammars. 28 | useGrammar = grammar: 29 | if languagesConfig?use-grammars.only then 30 | builtins.elem grammar.name languagesConfig.use-grammars.only 31 | else if languagesConfig?use-grammars.except then 32 | !(builtins.elem grammar.name languagesConfig.use-grammars.except) 33 | else true; 34 | grammarsToUse = builtins.filter useGrammar languagesConfig.grammar; 35 | gitGrammars = builtins.filter isGitGrammar grammarsToUse; 36 | buildGrammar = grammar: 37 | let 38 | gh = toGitHubFetcher grammar.source.git; 39 | sourceGit = builtins.fetchTree { 40 | type = "git"; 41 | url = grammar.source.git; 42 | inherit (grammar.source) rev; 43 | ref = grammar.source.ref or "HEAD"; 44 | shallow = true; 45 | }; 46 | sourceGitHub = builtins.fetchTree { 47 | type = "github"; 48 | inherit (gh) owner; 49 | inherit (gh) repo; 50 | inherit (grammar.source) rev; 51 | }; 52 | source = 53 | if isGitHubGrammar grammar 54 | then sourceGitHub 55 | else sourceGit; 56 | in 57 | stdenv.mkDerivation { 58 | # see https://github.com/NixOS/nixpkgs/blob/fbdd1a7c0bc29af5325e0d7dd70e804a972eb465/pkgs/development/tools/parsing/tree-sitter/grammar.nix 59 | 60 | pname = "tree-sitter-${grammar.name}"; 61 | version = grammar.source.rev; 62 | 63 | src = source; 64 | sourceRoot = 65 | if builtins.hasAttr "subpath" grammar.source then 66 | "source/${grammar.source.subpath}" 67 | else 68 | "source"; 69 | 70 | dontConfigure = true; 71 | 72 | FLAGS = [ 73 | "-Isrc" 74 | "-g" 75 | "-O3" 76 | "-fPIC" 77 | "-fno-exceptions" 78 | "-Wl,-z,relro,-z,now" 79 | ]; 80 | 81 | NAME = "libtree-sitter-${grammar.name}"; 82 | 83 | buildPhase = '' 84 | runHook preBuild 85 | 86 | if [[ -e src/scanner.cc ]]; then 87 | $CXX -c src/scanner.cc -o scanner.o $FLAGS 88 | elif [[ -e src/scanner.c ]]; then 89 | $CC -c src/scanner.c -o scanner.o $FLAGS 90 | fi 91 | 92 | $CC -c src/parser.c -o parser.o $FLAGS 93 | $CXX -shared${lib.optionalString stdenv.isDarwin " -install_name $out/$NAME.so"} -o $NAME.so *.o 94 | 95 | runHook postBuild 96 | ''; 97 | 98 | installPhase = '' 99 | runHook preInstall 100 | mkdir $out 101 | mv $NAME.so $out/ 102 | runHook postInstall 103 | ''; 104 | 105 | # Strip failed on darwin: strip: error: symbols referenced by indirect symbol table entries that can't be stripped 106 | fixupPhase = lib.optionalString stdenv.isLinux '' 107 | runHook preFixup 108 | $STRIP $out/$NAME.so 109 | runHook postFixup 110 | ''; 111 | }; 112 | grammarsToBuild = builtins.filter includeGrammarIf gitGrammars; 113 | builtGrammars = builtins.map 114 | (grammar: { 115 | inherit (grammar) name; 116 | value = buildGrammar grammar; 117 | }) 118 | grammarsToBuild; 119 | extensibleGrammars = 120 | lib.makeExtensible (self: builtins.listToAttrs builtGrammars); 121 | overlayedGrammars = lib.pipe extensibleGrammars 122 | (builtins.map (overlay: grammar: grammar.extend overlay) grammarOverlays); 123 | grammarLinks = lib.mapAttrsToList 124 | (name: artifact: "ln -s ${artifact}/libtree-sitter-${name}.so $out/libtree-sitter-${name}.so") 125 | (lib.filterAttrs (n: v: lib.isDerivation v) overlayedGrammars); 126 | in 127 | runCommand "consolidated-rit-grammars" { } '' 128 | mkdir -p $out 129 | ${builtins.concatStringsSep "\n" grammarLinks} 130 | ln -s "${helix}/languages.toml" "$out/languages.toml" 131 | ln -s "${helix}/runtime/queries" "$out/queries" 132 | '' 133 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | edition = "2021" 2 | ## not yet supported on stable 3 | #imports_granularity = "Crate" 4 | newline_style = "Unix" 5 | ## not yet supported on stable 6 | #group_imports = "StdExternalCrate" 7 | use_field_init_shorthand = true 8 | -------------------------------------------------------------------------------- /scripts/docker/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | if [ -z ${REFRESH_INTERVAL+x} ]; 4 | then 5 | ./rgit "[::]:8000" /git -d /tmp/rgit-cache.db; 6 | else 7 | ./rgit "[::]:8000" /git -d /tmp/rgit-cache.db --refresh-interval "$REFRESH_INTERVAL"; 8 | fi 9 | -------------------------------------------------------------------------------- /src/database/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod indexer; 2 | pub mod schema; 3 | -------------------------------------------------------------------------------- /src/database/schema/commit.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use anyhow::Context; 4 | use gix::{ObjectId, actor::SignatureRef, objs::CommitRef}; 5 | use rkyv::{Archive, Serialize}; 6 | use rocksdb::{IteratorMode, ReadOptions, WriteBatch}; 7 | use time::{OffsetDateTime, UtcOffset}; 8 | use tracing::debug; 9 | use yoke::{Yoke, Yokeable}; 10 | 11 | use crate::database::schema::{ 12 | Yoked, 13 | prefixes::{COMMIT_COUNT_FAMILY, COMMIT_FAMILY}, 14 | repository::RepositoryId, 15 | }; 16 | 17 | #[derive(Serialize, Archive, Debug, Yokeable)] 18 | pub struct Commit { 19 | pub summary: String, 20 | pub message: String, 21 | pub author: Author, 22 | pub committer: Author, 23 | pub hash: [u8; 20], 24 | pub tree: u64, 25 | } 26 | 27 | impl Commit { 28 | pub fn new( 29 | oid: ObjectId, 30 | commit: &CommitRef<'_>, 31 | author: SignatureRef<'_>, 32 | committer: SignatureRef<'_>, 33 | tree: u64, 34 | ) -> Result { 35 | let message = commit.message(); 36 | 37 | Ok(Self { 38 | summary: message.summary().to_string(), 39 | message: message.body.map(ToString::to_string).unwrap_or_default(), 40 | committer: committer.try_into()?, 41 | author: author.try_into()?, 42 | hash: match oid { 43 | ObjectId::Sha1(d) => d, 44 | }, 45 | tree, 46 | }) 47 | } 48 | 49 | pub fn insert(&self, tree: &CommitTree, id: u64, tx: &mut WriteBatch) -> anyhow::Result<()> { 50 | tree.insert(id, self, tx) 51 | } 52 | } 53 | 54 | #[derive(Serialize, Archive, Debug)] 55 | pub struct Author { 56 | pub name: String, 57 | pub email: String, 58 | pub time: (i64, i32), 59 | } 60 | 61 | impl ArchivedAuthor { 62 | pub fn time(&self) -> OffsetDateTime { 63 | OffsetDateTime::from_unix_timestamp(self.time.0.to_native()) 64 | .unwrap() 65 | .to_offset(UtcOffset::from_whole_seconds(self.time.1.to_native()).unwrap()) 66 | } 67 | } 68 | 69 | impl TryFrom> for Author { 70 | type Error = anyhow::Error; 71 | 72 | fn try_from(author: SignatureRef<'_>) -> Result { 73 | Ok(Self { 74 | name: author.name.to_string(), 75 | email: author.email.to_string(), 76 | time: (author.time.seconds, author.time.offset), 77 | }) 78 | } 79 | } 80 | 81 | pub struct CommitTree { 82 | db: Arc, 83 | pub prefix: Box<[u8]>, 84 | } 85 | 86 | pub type YokedCommit = Yoked<&'static ::Archived>; 87 | 88 | impl CommitTree { 89 | pub(super) fn new(db: Arc, repository: RepositoryId, reference: &str) -> Self { 90 | let mut prefix = Vec::with_capacity(std::mem::size_of::() + reference.len() + 1); 91 | prefix.extend_from_slice(&repository.to_be_bytes()); 92 | prefix.extend_from_slice(reference.as_bytes()); 93 | prefix.push(b'\0'); 94 | 95 | Self { 96 | db, 97 | prefix: prefix.into_boxed_slice(), 98 | } 99 | } 100 | 101 | pub fn drop_commits(&self) -> anyhow::Result<()> { 102 | let mut to = self.prefix.clone(); 103 | *to.last_mut().unwrap() += 1; 104 | 105 | let commit_cf = self 106 | .db 107 | .cf_handle(COMMIT_FAMILY) 108 | .context("commit column family missing")?; 109 | self.db.delete_range_cf(commit_cf, &self.prefix, &to)?; 110 | 111 | let commit_count_cf = self 112 | .db 113 | .cf_handle(COMMIT_COUNT_FAMILY) 114 | .context("missing column family")?; 115 | self.db.delete_cf(commit_count_cf, &self.prefix)?; 116 | 117 | Ok(()) 118 | } 119 | 120 | pub fn update_counter(&self, count: u64, tx: &mut WriteBatch) -> anyhow::Result<()> { 121 | let cf = self 122 | .db 123 | .cf_handle(COMMIT_COUNT_FAMILY) 124 | .context("missing column family")?; 125 | 126 | tx.put_cf(cf, &self.prefix, count.to_be_bytes()); 127 | 128 | Ok(()) 129 | } 130 | 131 | pub fn len(&self) -> anyhow::Result { 132 | let cf = self 133 | .db 134 | .cf_handle(COMMIT_COUNT_FAMILY) 135 | .context("missing column family")?; 136 | 137 | let Some(res) = self.db.get_pinned_cf(cf, &self.prefix)? else { 138 | return Ok(0); 139 | }; 140 | 141 | let out: [u8; std::mem::size_of::()] = res.as_ref().try_into()?; 142 | Ok(u64::from_be_bytes(out)) 143 | } 144 | 145 | fn insert(&self, id: u64, commit: &Commit, tx: &mut WriteBatch) -> anyhow::Result<()> { 146 | let cf = self 147 | .db 148 | .cf_handle(COMMIT_FAMILY) 149 | .context("missing column family")?; 150 | 151 | let mut key = self.prefix.to_vec(); 152 | key.extend_from_slice(&id.to_be_bytes()); 153 | 154 | tx.put_cf(cf, key, rkyv::to_bytes::(commit)?); 155 | 156 | Ok(()) 157 | } 158 | 159 | pub fn fetch_latest_one(&self) -> Result, anyhow::Error> { 160 | let mut key = self.prefix.to_vec(); 161 | key.extend_from_slice(&(self.len()?.saturating_sub(1)).to_be_bytes()); 162 | 163 | let cf = self 164 | .db 165 | .cf_handle(COMMIT_FAMILY) 166 | .context("missing column family")?; 167 | 168 | let Some(value) = self.db.get_cf(cf, key)? else { 169 | return Ok(None); 170 | }; 171 | 172 | Yoke::try_attach_to_cart(Box::from(value), |value| { 173 | rkyv::access::<_, rkyv::rancor::Error>(value) 174 | }) 175 | .context("Failed to deserialize commit") 176 | .map(Some) 177 | } 178 | 179 | pub fn fetch_latest( 180 | &self, 181 | amount: u64, 182 | offset: u64, 183 | ) -> Result, anyhow::Error> { 184 | let cf = self 185 | .db 186 | .cf_handle(COMMIT_FAMILY) 187 | .context("missing column family")?; 188 | 189 | let latest_commit_id = self.len()?; 190 | debug!("Searching from latest commit {latest_commit_id}"); 191 | 192 | let mut start_key = self.prefix.to_vec(); 193 | start_key.extend_from_slice( 194 | &latest_commit_id 195 | .saturating_sub(offset) 196 | .saturating_sub(amount) 197 | .to_be_bytes(), 198 | ); 199 | 200 | let mut end_key = self.prefix.to_vec(); 201 | end_key.extend_from_slice(&(latest_commit_id.saturating_sub(offset)).to_be_bytes()); 202 | 203 | let mut opts = ReadOptions::default(); 204 | opts.set_iterate_range(start_key.as_slice()..end_key.as_slice()); 205 | 206 | opts.set_prefix_same_as_start(true); 207 | 208 | self.db 209 | .iterator_cf_opt(cf, opts, IteratorMode::End) 210 | .map(|v| { 211 | Yoke::try_attach_to_cart(v.context("failed to read commit")?.1, |data| { 212 | rkyv::access::<_, rkyv::rancor::Error>(data).context("failed to deserialize") 213 | }) 214 | }) 215 | .collect::, anyhow::Error>>() 216 | } 217 | } 218 | -------------------------------------------------------------------------------- /src/database/schema/mod.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::module_name_repetitions)] 2 | 3 | use yoke::Yoke; 4 | 5 | pub mod commit; 6 | pub mod prefixes; 7 | pub mod repository; 8 | pub mod tag; 9 | pub mod tree; 10 | 11 | pub type Yoked = Yoke>; 12 | 13 | pub const SCHEMA_VERSION: &str = "5"; 14 | -------------------------------------------------------------------------------- /src/database/schema/prefixes.rs: -------------------------------------------------------------------------------- 1 | pub const COMMIT_FAMILY: &str = "commit"; 2 | pub const COMMIT_COUNT_FAMILY: &str = "commit_count"; 3 | pub const REPOSITORY_FAMILY: &str = "repository"; 4 | pub const TAG_FAMILY: &str = "tag"; 5 | pub const REFERENCE_FAMILY: &str = "repository_refs"; 6 | pub const TREE_FAMILY: &str = "tree"; 7 | pub const TREE_ITEM_FAMILY: &str = "tree_item"; 8 | -------------------------------------------------------------------------------- /src/database/schema/repository.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::BTreeMap, ops::Deref, path::Path, sync::Arc}; 2 | 3 | use anyhow::{Context, Result}; 4 | use rand::random; 5 | use rkyv::{Archive, Serialize}; 6 | use rocksdb::IteratorMode; 7 | use yoke::{Yoke, Yokeable}; 8 | 9 | use crate::database::schema::{ 10 | Yoked, 11 | commit::CommitTree, 12 | prefixes::{COMMIT_FAMILY, REFERENCE_FAMILY, REPOSITORY_FAMILY, TAG_FAMILY}, 13 | tag::TagTree, 14 | }; 15 | 16 | #[derive(Serialize, Archive, Debug, PartialEq, Eq, Hash, Yokeable)] 17 | pub struct Repository { 18 | /// The ID of the repository, as stored in `RocksDB` 19 | pub id: RepositoryId, 20 | /// The "clean name" of the repository (ie. `hello-world.git`) 21 | pub name: String, 22 | /// The description of the repository, as it is stored in the `description` file in the 23 | /// bare repo root 24 | pub description: Option, 25 | /// The owner of the repository (`gitweb.owner` in the repository configuration) 26 | pub owner: Option, 27 | /// The last time this repository was updated, currently read from the directory mtime 28 | pub last_modified: (i64, i32), 29 | /// The default branch for Git operations 30 | pub default_branch: Option, 31 | /// Whether the repository is available for HTTP(s) cloning 32 | /// 33 | /// This is set to `true` based on the presence of `git-daemon-export-ok` in the repository 34 | pub exported: bool, 35 | } 36 | 37 | pub type YokedRepository = Yoked<&'static ::Archived>; 38 | 39 | impl Repository { 40 | pub fn exists>(database: &rocksdb::DB, path: P) -> Result { 41 | let cf = database 42 | .cf_handle(REPOSITORY_FAMILY) 43 | .context("repository column family missing")?; 44 | let path = path.as_ref().to_str().context("invalid path")?; 45 | 46 | Ok(database.get_pinned_cf(cf, path)?.is_some()) 47 | } 48 | 49 | pub fn fetch_all(database: &rocksdb::DB) -> Result> { 50 | let cf = database 51 | .cf_handle(REPOSITORY_FAMILY) 52 | .context("repository column family missing")?; 53 | 54 | database 55 | .iterator_cf(cf, IteratorMode::Start) 56 | .filter_map(Result::ok) 57 | .map(|(key, value)| { 58 | let key = String::from_utf8(key.into_vec()).context("invalid repo name")?; 59 | let value = Yoke::try_attach_to_cart(value, |data| { 60 | rkyv::access::<_, rkyv::rancor::Error>(data) 61 | })?; 62 | 63 | Ok((key, value)) 64 | }) 65 | .collect() 66 | } 67 | 68 | pub fn insert>(&self, database: &rocksdb::DB, path: P) -> Result<()> { 69 | let cf = database 70 | .cf_handle(REPOSITORY_FAMILY) 71 | .context("repository column family missing")?; 72 | let path = path.as_ref().to_str().context("invalid path")?; 73 | 74 | database.put_cf(cf, path, rkyv::to_bytes::(self)?)?; 75 | 76 | Ok(()) 77 | } 78 | 79 | pub fn open>( 80 | database: &rocksdb::DB, 81 | path: P, 82 | ) -> Result> { 83 | let cf = database 84 | .cf_handle(REPOSITORY_FAMILY) 85 | .context("repository column family missing")?; 86 | 87 | let path = path.as_ref().to_str().context("invalid path")?; 88 | let Some(value) = database.get_cf(cf, path)? else { 89 | return Ok(None); 90 | }; 91 | 92 | Yoke::try_attach_to_cart(value.into_boxed_slice(), |data| { 93 | rkyv::access::<_, rkyv::rancor::Error>(data) 94 | }) 95 | .map(Some) 96 | .context("Failed to open repository") 97 | } 98 | } 99 | 100 | impl ArchivedRepository { 101 | pub fn delete>(&self, database: &rocksdb::DB, path: P) -> Result<()> { 102 | let start_id = self.id.0.to_native().to_be_bytes(); 103 | let mut end_id = start_id; 104 | *end_id.last_mut().unwrap() += 1; 105 | 106 | // delete commits 107 | let commit_cf = database 108 | .cf_handle(COMMIT_FAMILY) 109 | .context("commit column family missing")?; 110 | database.delete_range_cf(commit_cf, start_id, end_id)?; 111 | 112 | // delete tags 113 | let tag_cf = database 114 | .cf_handle(TAG_FAMILY) 115 | .context("tag column family missing")?; 116 | database.delete_range_cf(tag_cf, start_id, end_id)?; 117 | 118 | // delete self 119 | let repo_cf = database 120 | .cf_handle(REPOSITORY_FAMILY) 121 | .context("repository column family missing")?; 122 | let path = path.as_ref().to_str().context("invalid path")?; 123 | database.delete_cf(repo_cf, path)?; 124 | 125 | Ok(()) 126 | } 127 | 128 | pub fn commit_tree(&self, database: Arc, reference: &str) -> CommitTree { 129 | CommitTree::new(database, RepositoryId(self.id.0.to_native()), reference) 130 | } 131 | 132 | pub fn tag_tree(&self, database: Arc) -> TagTree { 133 | TagTree::new(database, RepositoryId(self.id.0.to_native())) 134 | } 135 | 136 | pub fn replace_heads(&self, database: &rocksdb::DB, new_heads: &Vec) -> Result<()> { 137 | let cf = database 138 | .cf_handle(REFERENCE_FAMILY) 139 | .context("missing reference column family")?; 140 | 141 | database.put_cf( 142 | cf, 143 | self.id.0.to_native().to_be_bytes(), 144 | rkyv::to_bytes::(new_heads)?, 145 | )?; 146 | 147 | Ok(()) 148 | } 149 | 150 | #[allow(clippy::type_complexity)] 151 | pub fn heads( 152 | &self, 153 | database: &rocksdb::DB, 154 | ) -> Result>>> { 155 | let cf = database 156 | .cf_handle(REFERENCE_FAMILY) 157 | .context("missing reference column family")?; 158 | 159 | let Some(bytes) = database.get_cf(cf, self.id.0.to_native().to_be_bytes())? else { 160 | return Ok(None); 161 | }; 162 | 163 | Yoke::try_attach_to_cart(Box::from(bytes), |bytes| { 164 | rkyv::access::<_, rkyv::rancor::Error>(bytes) 165 | }) 166 | .context("failed to deserialize heads") 167 | .map(Some) 168 | } 169 | } 170 | 171 | #[derive(Serialize, Archive, Debug, Clone, PartialEq, Eq, Hash)] 172 | pub struct Heads(pub Vec); 173 | 174 | #[derive(Serialize, Archive, Debug, Copy, Clone, PartialEq, Eq, Hash)] 175 | pub struct RepositoryId(pub u64); 176 | 177 | impl RepositoryId { 178 | pub fn new() -> Self { 179 | Self(random()) 180 | } 181 | } 182 | 183 | impl Deref for RepositoryId { 184 | type Target = u64; 185 | 186 | fn deref(&self) -> &Self::Target { 187 | &self.0 188 | } 189 | } 190 | -------------------------------------------------------------------------------- /src/database/schema/tag.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashSet, sync::Arc}; 2 | 3 | use anyhow::Context; 4 | use gix::actor::SignatureRef; 5 | use rkyv::{Archive, Serialize}; 6 | use yoke::{Yoke, Yokeable}; 7 | 8 | use crate::database::schema::{ 9 | Yoked, 10 | commit::{ArchivedAuthor, Author}, 11 | prefixes::TAG_FAMILY, 12 | repository::RepositoryId, 13 | }; 14 | 15 | #[derive(Serialize, Archive, Debug, Yokeable)] 16 | pub struct Tag { 17 | pub tagger: Option, 18 | pub tree_id: Option, 19 | } 20 | 21 | impl Tag { 22 | pub fn new( 23 | tagger: Option>, 24 | tree_id: Option, 25 | ) -> Result { 26 | Ok(Self { 27 | tagger: tagger.map(TryFrom::try_from).transpose()?, 28 | tree_id, 29 | }) 30 | } 31 | 32 | pub fn insert(&self, batch: &TagTree, name: &str) -> Result<(), anyhow::Error> { 33 | batch.insert(name, self) 34 | } 35 | } 36 | 37 | pub struct TagTree { 38 | pub db: Arc, 39 | prefix: RepositoryId, 40 | } 41 | 42 | pub type YokedString = Yoked<&'static str>; 43 | pub type YokedTag = Yoked<&'static ::Archived>; 44 | 45 | impl TagTree { 46 | pub(super) fn new(db: Arc, prefix: RepositoryId) -> Self { 47 | Self { db, prefix } 48 | } 49 | 50 | pub fn insert(&self, name: &str, value: &Tag) -> anyhow::Result<()> { 51 | let cf = self 52 | .db 53 | .cf_handle(TAG_FAMILY) 54 | .context("missing tag column family")?; 55 | 56 | let mut db_name = self.prefix.to_be_bytes().to_vec(); 57 | db_name.extend_from_slice(name.as_ref()); 58 | 59 | self.db 60 | .put_cf(cf, db_name, rkyv::to_bytes::(value)?)?; 61 | 62 | Ok(()) 63 | } 64 | 65 | pub fn remove(&self, name: &str) -> anyhow::Result<()> { 66 | let cf = self 67 | .db 68 | .cf_handle(TAG_FAMILY) 69 | .context("missing tag column family")?; 70 | 71 | let mut db_name = self.prefix.to_be_bytes().to_vec(); 72 | db_name.extend_from_slice(name.as_ref()); 73 | self.db.delete_cf(cf, db_name)?; 74 | 75 | Ok(()) 76 | } 77 | 78 | pub fn list(&self) -> anyhow::Result> { 79 | let cf = self 80 | .db 81 | .cf_handle(TAG_FAMILY) 82 | .context("missing tag column family")?; 83 | 84 | Ok(self 85 | .db 86 | .prefix_iterator_cf(cf, self.prefix.to_be_bytes()) 87 | .filter_map(Result::ok) 88 | .filter_map(|(k, _)| { 89 | Some( 90 | String::from_utf8_lossy(k.strip_prefix(&self.prefix.to_be_bytes())?) 91 | .to_string(), 92 | ) 93 | }) 94 | .collect()) 95 | } 96 | 97 | pub fn fetch_all(&self) -> anyhow::Result> { 98 | let cf = self 99 | .db 100 | .cf_handle(TAG_FAMILY) 101 | .context("missing tag column family")?; 102 | 103 | let mut res = self 104 | .db 105 | .prefix_iterator_cf(cf, self.prefix.to_be_bytes()) 106 | .filter_map(Result::ok) 107 | .filter_map(|(name, value)| { 108 | let name = Yoke::try_attach_to_cart(name, |data| { 109 | let data = data 110 | .strip_prefix(&self.prefix.to_be_bytes()) 111 | .ok_or(())? 112 | .strip_prefix(b"refs/tags/") 113 | .ok_or(())?; 114 | simdutf8::basic::from_utf8(data).map_err(|_| ()) 115 | }) 116 | .ok()?; 117 | 118 | Some((name, value)) 119 | }) 120 | .map(|(name, value)| { 121 | let value = Yoke::try_attach_to_cart(value, |data| { 122 | rkyv::access::<_, rkyv::rancor::Error>(data) 123 | })?; 124 | Ok((name, value)) 125 | }) 126 | .collect::>>()?; 127 | 128 | res.sort_unstable_by(|a, b| { 129 | let a_tagger = a.1.get().tagger.as_ref().map(ArchivedAuthor::time); 130 | let b_tagger = b.1.get().tagger.as_ref().map(ArchivedAuthor::time); 131 | b_tagger.cmp(&a_tagger) 132 | }); 133 | 134 | Ok(res) 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /src/database/schema/tree.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use anyhow::Context; 4 | use gix::{ObjectId, bstr::BStr}; 5 | use itertools::{Either, Itertools}; 6 | use rkyv::{Archive, Serialize}; 7 | use rocksdb::{DB, WriteBatch}; 8 | use yoke::{Yoke, Yokeable}; 9 | 10 | use super::{ 11 | Yoked, 12 | prefixes::{TREE_FAMILY, TREE_ITEM_FAMILY}, 13 | }; 14 | 15 | #[derive(Serialize, Archive, Debug, PartialEq, Eq, Hash)] 16 | pub struct Tree { 17 | pub indexed_tree_id: u64, 18 | } 19 | 20 | impl Tree { 21 | pub fn insert( 22 | &self, 23 | database: &DB, 24 | batch: &mut WriteBatch, 25 | tree_oid: ObjectId, 26 | ) -> Result<(), anyhow::Error> { 27 | let cf = database 28 | .cf_handle(TREE_FAMILY) 29 | .context("tree column family missing")?; 30 | 31 | batch.put_cf( 32 | cf, 33 | tree_oid.as_slice(), 34 | rkyv::to_bytes::(self)?, 35 | ); 36 | 37 | Ok(()) 38 | } 39 | 40 | pub fn find(database: &DB, tree_oid: ObjectId) -> Result, anyhow::Error> { 41 | let cf = database 42 | .cf_handle(TREE_FAMILY) 43 | .context("tree column family missing")?; 44 | 45 | let Some(data) = database.get_cf(cf, tree_oid.as_slice())? else { 46 | return Ok(None); 47 | }; 48 | 49 | let data = rkyv::access::<::Archived, rkyv::rancor::Error>(data.as_ref())?; 50 | 51 | Ok(Some(data.indexed_tree_id.to_native())) 52 | } 53 | } 54 | 55 | #[derive(Serialize, Archive, Debug, PartialEq, Eq, Hash, Ord, PartialOrd)] 56 | #[rkyv(derive(Ord, PartialOrd, Eq, PartialEq, Debug))] 57 | #[rkyv(compare(PartialOrd, PartialEq))] 58 | pub struct TreeKey(pub String); 59 | 60 | #[derive(Serialize, Archive, Debug, PartialEq, Eq, Default, Yokeable)] 61 | pub struct SortedTree(pub BTreeMap); 62 | 63 | impl SortedTree { 64 | pub fn insert( 65 | &self, 66 | digest: u64, 67 | database: &DB, 68 | batch: &mut WriteBatch, 69 | ) -> Result<(), anyhow::Error> { 70 | let cf = database 71 | .cf_handle(TREE_ITEM_FAMILY) 72 | .context("tree column family missing")?; 73 | 74 | batch.put_cf( 75 | cf, 76 | digest.to_ne_bytes(), 77 | rkyv::to_bytes::(self)?, 78 | ); 79 | 80 | Ok(()) 81 | } 82 | 83 | pub fn get(digest: u64, database: &DB) -> Result, anyhow::Error> { 84 | let cf = database 85 | .cf_handle(TREE_ITEM_FAMILY) 86 | .expect("tree column family missing"); 87 | 88 | database 89 | .get_cf(cf, digest.to_ne_bytes())? 90 | .map(|data| { 91 | Yoke::try_attach_to_cart(data.into_boxed_slice(), |data| { 92 | rkyv::access::<_, rkyv::rancor::Error>(data) 93 | }) 94 | }) 95 | .transpose() 96 | .context("failed to parse full tree") 97 | } 98 | } 99 | 100 | #[derive(Serialize, Archive, Debug, PartialEq, Eq)] 101 | #[rkyv( 102 | bytecheck(bounds(__C: rkyv::validation::ArchiveContext)), 103 | serialize_bounds(__S: rkyv::ser::Writer + rkyv::ser::Allocator, __S::Error: rkyv::rancor::Source), 104 | )] 105 | pub enum SortedTreeItem { 106 | File, 107 | Directory(#[rkyv(omit_bounds)] SortedTree), 108 | } 109 | 110 | #[derive(Serialize, Archive, Debug, PartialEq, Eq, Hash)] 111 | pub struct Submodule { 112 | pub url: String, 113 | pub oid: [u8; 20], 114 | } 115 | 116 | #[derive(Serialize, Archive, Debug, PartialEq, Eq, Hash)] 117 | pub enum TreeItemKind { 118 | Submodule(Submodule), 119 | Tree, 120 | File, 121 | } 122 | 123 | #[derive(Serialize, Archive, Debug, PartialEq, Eq, Hash, Yokeable)] 124 | pub struct TreeItem { 125 | pub mode: u16, 126 | pub kind: TreeItemKind, 127 | } 128 | 129 | pub type YokedSortedTree = Yoked<&'static ::Archived>; 130 | pub type YokedTreeItem = Yoked<&'static ::Archived>; 131 | pub type YokedTreeItemKey = Yoked<&'static [u8]>; 132 | pub type YokedTreeItemKeyUtf8 = Yoked<&'static str>; 133 | 134 | impl TreeItem { 135 | pub fn insert( 136 | &self, 137 | buffer: &mut Vec, 138 | digest: u64, 139 | path: &BStr, 140 | database: &DB, 141 | batch: &mut WriteBatch, 142 | ) -> Result<(), anyhow::Error> { 143 | let cf = database 144 | .cf_handle(TREE_ITEM_FAMILY) 145 | .context("tree column family missing")?; 146 | 147 | buffer.clear(); 148 | buffer.reserve(std::mem::size_of::() + path.len() + std::mem::size_of::()); 149 | buffer.extend_from_slice(&digest.to_ne_bytes()); 150 | buffer.extend_from_slice(&memchr::memchr_iter(b'/', path).count().to_be_bytes()); 151 | buffer.extend_from_slice(path.as_ref()); 152 | 153 | batch.put_cf(cf, &buffer, rkyv::to_bytes::(self)?); 154 | 155 | Ok(()) 156 | } 157 | 158 | pub fn find_exact( 159 | database: &DB, 160 | digest: u64, 161 | path: &[u8], 162 | ) -> Result, anyhow::Error> { 163 | let cf = database 164 | .cf_handle(TREE_ITEM_FAMILY) 165 | .expect("tree column family missing"); 166 | 167 | let mut buffer = Vec::with_capacity(std::mem::size_of::() + path.len()); 168 | buffer.extend_from_slice(&digest.to_ne_bytes()); 169 | buffer.extend_from_slice(&memchr::memchr_iter(b'/', path).count().to_be_bytes()); 170 | buffer.extend_from_slice(path); 171 | 172 | database 173 | .get_cf(cf, buffer)? 174 | .map(|data| { 175 | Yoke::try_attach_to_cart(data.into_boxed_slice(), |data| { 176 | rkyv::access::<_, rkyv::rancor::Error>(data) 177 | }) 178 | }) 179 | .transpose() 180 | .context("failed to parse tree item") 181 | } 182 | 183 | pub fn find_prefix<'a>( 184 | database: &'a DB, 185 | digest: u64, 186 | prefix: Option<&[u8]>, 187 | ) -> impl Iterator> + use<'a> 188 | { 189 | let cf = database 190 | .cf_handle(TREE_ITEM_FAMILY) 191 | .expect("tree column family missing"); 192 | 193 | let (iterator, key) = match prefix { 194 | None => { 195 | let iterator = database.prefix_iterator_cf(cf, digest.to_ne_bytes()); 196 | 197 | (iterator, Either::Left(Either::Left(digest.to_be_bytes()))) 198 | } 199 | Some([]) => { 200 | let mut buffer = [0_u8; std::mem::size_of::() + std::mem::size_of::()]; 201 | buffer[..std::mem::size_of::()].copy_from_slice(&digest.to_ne_bytes()); 202 | buffer[std::mem::size_of::()..].copy_from_slice(&0_usize.to_be_bytes()); 203 | 204 | let iterator = database.prefix_iterator_cf(cf, buffer); 205 | 206 | (iterator, Either::Left(Either::Right(buffer))) 207 | } 208 | Some(prefix) => { 209 | let mut buffer = Vec::with_capacity( 210 | std::mem::size_of::() + prefix.len() + std::mem::size_of::(), 211 | ); 212 | buffer.extend_from_slice(&digest.to_ne_bytes()); 213 | buffer.extend_from_slice( 214 | &(memchr::memchr_iter(b'/', prefix).count() + 1).to_be_bytes(), 215 | ); 216 | buffer.extend_from_slice(prefix); 217 | buffer.push(b'/'); 218 | 219 | let iterator = database.prefix_iterator_cf(cf, &buffer); 220 | 221 | (iterator, Either::Right(buffer)) 222 | } 223 | }; 224 | 225 | iterator 226 | .take_while(move |v| { 227 | v.as_ref().is_ok_and(|(k, _)| { 228 | k.starts_with(match key.as_ref() { 229 | Either::Left(Either::Right(v)) => v.as_ref(), 230 | Either::Left(Either::Left(v)) => v.as_ref(), 231 | Either::Right(v) => v.as_ref(), 232 | }) 233 | }) 234 | }) 235 | .map_ok(|(key, value)| { 236 | let key = Yoke::attach_to_cart(key, |data| { 237 | &data[std::mem::size_of::() + std::mem::size_of::()..] 238 | }); 239 | let value = Yoke::try_attach_to_cart(value, |data| { 240 | rkyv::access::<_, rkyv::rancor::Error>(data) 241 | }) 242 | .context("Failed to open repository")?; 243 | Ok((key, value)) 244 | }) 245 | .flatten() 246 | } 247 | 248 | pub fn contains(database: &DB, digest: u64) -> Result { 249 | let cf = database 250 | .cf_handle(TREE_ITEM_FAMILY) 251 | .context("tree column family missing")?; 252 | 253 | Ok(database 254 | .prefix_iterator_cf(cf, digest.to_ne_bytes()) 255 | .next() 256 | .transpose()? 257 | .is_some()) 258 | } 259 | } 260 | -------------------------------------------------------------------------------- /src/layers/logger.rs: -------------------------------------------------------------------------------- 1 | //! Logs each and every request out in a format similar to that of Apache's logs. 2 | 3 | use std::{ 4 | fmt::Debug, 5 | future::Future, 6 | net::SocketAddr, 7 | task::{Context, Poll}, 8 | time::Instant, 9 | }; 10 | 11 | use axum::{ 12 | extract, 13 | http::{HeaderValue, Method, Request, Response}, 14 | }; 15 | use futures_util::future::{FutureExt, Join, Map, Ready}; 16 | use tokio::task::futures::TaskLocalFuture; 17 | use tower_service::Service; 18 | use tracing::{Instrument, Span, error, info, instrument::Instrumented}; 19 | use uuid::Uuid; 20 | 21 | use super::UnwrapInfallible; 22 | 23 | pub trait GenericError: std::error::Error + Debug + Send + Sync {} 24 | 25 | #[derive(Clone)] 26 | pub struct LoggingMiddleware(pub S); 27 | 28 | impl Service> for LoggingMiddleware 29 | where 30 | S: Service, Response = Response, Error = std::convert::Infallible> 31 | + Clone 32 | + Send 33 | + 'static, 34 | S::Future: Send + 'static, 35 | S::Response: Default + Debug, 36 | ReqBody: Send + Debug + 'static, 37 | ResBody: Default + Send + 'static, 38 | { 39 | type Response = S::Response; 40 | type Error = S::Error; 41 | type Future = Map< 42 | Join>, Ready>, 43 | fn((::Output, PendingLogMessage)) -> ::Output, 44 | >; 45 | 46 | fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll> { 47 | self.0.poll_ready(cx) 48 | } 49 | 50 | fn call(&mut self, req: Request) -> Self::Future { 51 | let request_id = Uuid::new_v4(); 52 | let span = tracing::info_span!("web", "request_id" = request_id.to_string().as_str()); 53 | 54 | let log_message = PendingLogMessage { 55 | span: span.clone(), 56 | request_id, 57 | ip: req 58 | .extensions() 59 | .get::>() 60 | .map_or_else(|| "0.0.0.0:0".parse().unwrap(), |v| v.0), 61 | method: req.method().clone(), 62 | uri: req.uri().path().to_string(), 63 | start: Instant::now(), 64 | user_agent: req.headers().get(axum::http::header::USER_AGENT).cloned(), 65 | }; 66 | 67 | futures_util::future::join( 68 | REQ_TIMESTAMP.scope(log_message.start, self.0.call(req).instrument(span)), 69 | futures_util::future::ready(log_message), 70 | ) 71 | .map(|(response, pending_log_message)| { 72 | let mut response = response.unwrap_infallible(); 73 | pending_log_message.log(&response); 74 | response.headers_mut().insert( 75 | "X-Request-ID", 76 | HeaderValue::try_from(pending_log_message.request_id.to_string()).unwrap(), 77 | ); 78 | Ok(response) 79 | }) 80 | } 81 | } 82 | 83 | tokio::task_local! { 84 | pub static REQ_TIMESTAMP: Instant; 85 | } 86 | 87 | pub struct PendingLogMessage { 88 | span: Span, 89 | request_id: Uuid, 90 | ip: SocketAddr, 91 | method: Method, 92 | uri: String, 93 | start: Instant, 94 | user_agent: Option, 95 | } 96 | 97 | impl PendingLogMessage { 98 | pub fn log(&self, response: &Response) { 99 | let _enter = self.span.enter(); 100 | 101 | if response.status().is_server_error() { 102 | error!( 103 | "{ip} - \"{method} {uri}\" {status} {duration:?} \"{user_agent}\" \"{error:?}\"", 104 | ip = self.ip, 105 | method = self.method, 106 | uri = self.uri, 107 | status = response.status().as_u16(), 108 | duration = self.start.elapsed(), 109 | user_agent = self 110 | .user_agent 111 | .as_ref() 112 | .and_then(|v| v.to_str().ok()) 113 | .unwrap_or("unknown"), 114 | error = match response.extensions().get::>() { 115 | Some(e) => Err(e), 116 | None => Ok(()), 117 | } 118 | ); 119 | } else { 120 | info!( 121 | "{ip} - \"{method} {uri}\" {status} {duration:?} \"{user_agent}\" \"{error:?}\"", 122 | ip = self.ip, 123 | method = self.method, 124 | uri = self.uri, 125 | status = response.status().as_u16(), 126 | duration = self.start.elapsed(), 127 | user_agent = self 128 | .user_agent 129 | .as_ref() 130 | .and_then(|v| v.to_str().ok()) 131 | .unwrap_or("unknown"), 132 | error = match response.extensions().get::>() { 133 | Some(e) => Err(e), 134 | None => Ok(()), 135 | } 136 | ); 137 | } 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /src/layers/mod.rs: -------------------------------------------------------------------------------- 1 | use std::convert::Infallible; 2 | 3 | pub mod logger; 4 | 5 | pub trait UnwrapInfallible { 6 | fn unwrap_infallible(self) -> T; 7 | } 8 | 9 | impl UnwrapInfallible for Result { 10 | fn unwrap_infallible(self) -> T { 11 | self.unwrap() 12 | } 13 | } 14 | 15 | impl UnwrapInfallible for Result { 16 | fn unwrap_infallible(self) -> T { 17 | self.unwrap() 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #![deny(clippy::pedantic)] 2 | 3 | use std::{ 4 | borrow::Cow, 5 | fmt::{Display, Formatter}, 6 | future::IntoFuture, 7 | net::SocketAddr, 8 | path::PathBuf, 9 | str::FromStr, 10 | sync::{Arc, OnceLock}, 11 | time::Duration, 12 | }; 13 | 14 | use anyhow::Context; 15 | use askama::Template; 16 | use axum::{ 17 | Extension, Router, 18 | body::Body, 19 | http, 20 | http::{HeaderValue, StatusCode}, 21 | response::{IntoResponse, Response}, 22 | routing::get, 23 | }; 24 | use clap::Parser; 25 | use const_format::formatcp; 26 | use database::schema::{ 27 | SCHEMA_VERSION, 28 | prefixes::{TREE_FAMILY, TREE_ITEM_FAMILY}, 29 | }; 30 | use rocksdb::{Options, SliceTransform}; 31 | use tokio::{ 32 | net::TcpListener, 33 | signal::unix::{SignalKind, signal}, 34 | sync::mpsc, 35 | }; 36 | use tower_http::{cors::CorsLayer, timeout::TimeoutLayer}; 37 | use tower_layer::layer_fn; 38 | use tracing::{error, info, instrument, level_filters::LevelFilter, warn}; 39 | use tracing_subscriber::{ 40 | EnvFilter, fmt::format::FmtSpan, layer::SubscriberExt, util::SubscriberInitExt, 41 | }; 42 | use xxhash_rust::const_xxh3; 43 | 44 | use crate::{ 45 | database::schema::prefixes::{ 46 | COMMIT_COUNT_FAMILY, COMMIT_FAMILY, REFERENCE_FAMILY, REPOSITORY_FAMILY, TAG_FAMILY, 47 | }, 48 | git::Git, 49 | layers::logger::LoggingMiddleware, 50 | syntax_highlight::prime_highlighters, 51 | theme::Theme, 52 | }; 53 | 54 | mod database; 55 | mod git; 56 | mod layers; 57 | mod methods; 58 | mod syntax_highlight; 59 | mod theme; 60 | mod unified_diff_builder; 61 | 62 | const CRATE_VERSION: &str = clap::crate_version!(); 63 | 64 | const GLOBAL_CSS: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/statics/css/style.css")); 65 | const GLOBAL_CSS_HASH: &str = const_hex::Buffer::<16, false>::new() 66 | .const_format(&const_xxh3::xxh3_128(GLOBAL_CSS).to_be_bytes()) 67 | .as_str(); 68 | 69 | static HIGHLIGHT_CSS_HASH: OnceLock> = OnceLock::new(); 70 | static DARK_HIGHLIGHT_CSS_HASH: OnceLock> = OnceLock::new(); 71 | 72 | #[derive(Parser, Debug)] 73 | #[clap(author, version, about)] 74 | pub struct Args { 75 | /// Path to a directory in which the `RocksDB` database should be stored, will be created if it doesn't already exist 76 | /// 77 | /// The `RocksDB` database is very quick to generate, so this can be pointed to temporary storage 78 | #[clap(short, long, value_parser)] 79 | db_store: PathBuf, 80 | /// The socket address to bind to (eg. 0.0.0.0:3333) 81 | bind_address: SocketAddr, 82 | /// The path in which your bare Git repositories reside (will be scanned recursively) 83 | scan_path: PathBuf, 84 | /// Optional path (relative to cwd) to a plain text file containing a list of repositories relative to the `scan_path` 85 | /// that are whitelisted to be exposed by rgit. 86 | #[clap(long)] 87 | repository_list: Option, 88 | /// Configures the metadata refresh interval (eg. "never" or "60s") 89 | #[clap(long, default_value_t = RefreshInterval::Duration(Duration::from_secs(300)))] 90 | refresh_interval: RefreshInterval, 91 | /// Configures the request timeout. 92 | #[clap(long, default_value_t = Duration::from_secs(10).into())] 93 | request_timeout: humantime::Duration, 94 | } 95 | 96 | #[derive(Debug, Clone, Copy)] 97 | pub enum RefreshInterval { 98 | Never, 99 | Duration(Duration), 100 | } 101 | 102 | impl Display for RefreshInterval { 103 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 104 | match self { 105 | Self::Never => write!(f, "never"), 106 | Self::Duration(s) => write!(f, "{}", humantime::format_duration(*s)), 107 | } 108 | } 109 | } 110 | 111 | impl FromStr for RefreshInterval { 112 | type Err = &'static str; 113 | 114 | fn from_str(s: &str) -> Result { 115 | if s == "never" { 116 | Ok(Self::Never) 117 | } else if let Ok(v) = humantime::parse_duration(s) { 118 | Ok(Self::Duration(v)) 119 | } else { 120 | Err("must be seconds, a human readable duration (eg. '10m') or 'never'") 121 | } 122 | } 123 | } 124 | 125 | #[tokio::main] 126 | #[allow(clippy::too_many_lines)] 127 | async fn main() -> Result<(), anyhow::Error> { 128 | let args: Args = Args::parse(); 129 | 130 | let logger_layer = tracing_subscriber::fmt::layer().with_span_events(FmtSpan::CLOSE); 131 | let env_filter = EnvFilter::builder() 132 | .with_default_directive(LevelFilter::INFO.into()) 133 | .from_env()?; 134 | 135 | tracing_subscriber::registry() 136 | .with(env_filter) 137 | .with(logger_layer) 138 | .init(); 139 | 140 | let db = open_db(&args)?; 141 | 142 | let indexer_wakeup_task = run_indexer( 143 | db.clone(), 144 | args.scan_path.clone(), 145 | args.repository_list.clone(), 146 | args.refresh_interval, 147 | ); 148 | 149 | let css = { 150 | let theme = basic_toml::from_str::(include_str!("../themes/github_light.toml")) 151 | .unwrap() 152 | .build_css(); 153 | let css = Box::leak( 154 | format!("@media (prefers-color-scheme: light){{{theme}}}") 155 | .into_boxed_str() 156 | .into_boxed_bytes(), 157 | ); 158 | HIGHLIGHT_CSS_HASH.set(build_asset_hash(css)).unwrap(); 159 | css 160 | }; 161 | 162 | let dark_css = { 163 | let theme = basic_toml::from_str::(include_str!("../themes/onedark.toml")) 164 | .unwrap() 165 | .build_css(); 166 | let css = Box::leak( 167 | format!("@media (prefers-color-scheme: dark){{{theme}}}") 168 | .into_boxed_str() 169 | .into_boxed_bytes(), 170 | ); 171 | DARK_HIGHLIGHT_CSS_HASH.set(build_asset_hash(css)).unwrap(); 172 | css 173 | }; 174 | 175 | let static_favicon = |content: &'static [u8]| { 176 | move || async move { 177 | let mut resp = Response::new(Body::from(content)); 178 | resp.headers_mut().insert( 179 | http::header::CONTENT_TYPE, 180 | HeaderValue::from_static("image/x-icon"), 181 | ); 182 | resp 183 | } 184 | }; 185 | 186 | let static_css = |content: &'static [u8]| { 187 | move || async move { 188 | let mut resp = Response::new(Body::from(content)); 189 | resp.headers_mut().insert( 190 | http::header::CONTENT_TYPE, 191 | HeaderValue::from_static("text/css"), 192 | ); 193 | resp 194 | } 195 | }; 196 | 197 | info!("Priming highlighters..."); 198 | prime_highlighters(); 199 | info!("Server starting up..."); 200 | 201 | let app = Router::new() 202 | .route("/", get(methods::index::handle)) 203 | .route( 204 | formatcp!("/style-{}.css", GLOBAL_CSS_HASH), 205 | get(static_css(GLOBAL_CSS)), 206 | ) 207 | .route( 208 | &format!("/highlight-{}.css", HIGHLIGHT_CSS_HASH.get().unwrap()), 209 | get(static_css(css)), 210 | ) 211 | .route( 212 | &format!( 213 | "/highlight-dark-{}.css", 214 | DARK_HIGHLIGHT_CSS_HASH.get().unwrap() 215 | ), 216 | get(static_css(dark_css)), 217 | ) 218 | .route( 219 | "/favicon.ico", 220 | get(static_favicon(include_bytes!("../statics/favicon.ico"))), 221 | ) 222 | .fallback(methods::repo::service) 223 | .layer(TimeoutLayer::new(args.request_timeout.into())) 224 | .layer(layer_fn(LoggingMiddleware)) 225 | .layer(Extension(Arc::new(Git::new()))) 226 | .layer(Extension(db)) 227 | .layer(Extension(Arc::new(args.scan_path))) 228 | .layer(CorsLayer::new()); 229 | 230 | let listener = TcpListener::bind(&args.bind_address).await?; 231 | let app = app.into_make_service_with_connect_info::(); 232 | let server = axum::serve(listener, app).into_future(); 233 | 234 | tokio::select! { 235 | res = server => res.context("failed to run server"), 236 | res = indexer_wakeup_task => res.context("failed to run indexer"), 237 | _ = tokio::signal::ctrl_c() => { 238 | info!("Received ctrl-c, shutting down"); 239 | Ok(()) 240 | } 241 | } 242 | } 243 | 244 | fn open_db(args: &Args) -> Result, anyhow::Error> { 245 | loop { 246 | let mut db_options = Options::default(); 247 | db_options.create_missing_column_families(true); 248 | db_options.create_if_missing(true); 249 | 250 | let mut commit_family_options = Options::default(); 251 | commit_family_options.set_prefix_extractor(SliceTransform::create( 252 | "commit_prefix", 253 | |input| memchr::memchr(b'\0', input).map_or(input, |idx| &input[..idx]), 254 | None, 255 | )); 256 | 257 | let mut tag_family_options = Options::default(); 258 | tag_family_options.set_prefix_extractor(SliceTransform::create_fixed_prefix( 259 | std::mem::size_of::(), 260 | )); // repository id prefix 261 | 262 | let mut tree_item_family_options = Options::default(); 263 | tree_item_family_options.set_prefix_extractor(SliceTransform::create_fixed_prefix( 264 | std::mem::size_of::(), 265 | )); 266 | 267 | let db = rocksdb::DB::open_cf_with_opts( 268 | &db_options, 269 | &args.db_store, 270 | vec![ 271 | (COMMIT_FAMILY, commit_family_options), 272 | (REPOSITORY_FAMILY, Options::default()), 273 | (TAG_FAMILY, tag_family_options), 274 | (REFERENCE_FAMILY, Options::default()), 275 | (COMMIT_COUNT_FAMILY, Options::default()), 276 | (TREE_FAMILY, Options::default()), 277 | (TREE_ITEM_FAMILY, tree_item_family_options), 278 | ], 279 | )?; 280 | 281 | let needs_schema_regen = match db.get("schema_version")? { 282 | Some(v) if v.as_slice() != SCHEMA_VERSION.as_bytes() => Some(Some(v)), 283 | Some(_) => None, 284 | None => { 285 | db.put("schema_version", SCHEMA_VERSION)?; 286 | None 287 | } 288 | }; 289 | 290 | if let Some(version) = needs_schema_regen { 291 | let old_version = version 292 | .as_deref() 293 | .map_or(Cow::Borrowed("unknown"), String::from_utf8_lossy); 294 | 295 | warn!("Clearing outdated database ({old_version} != {SCHEMA_VERSION})"); 296 | 297 | drop(db); 298 | rocksdb::DB::destroy(&Options::default(), &args.db_store)?; 299 | } else { 300 | break Ok(Arc::new(db)); 301 | } 302 | } 303 | } 304 | 305 | async fn run_indexer( 306 | db: Arc, 307 | scan_path: PathBuf, 308 | repository_list: Option, 309 | refresh_interval: RefreshInterval, 310 | ) -> Result<(), tokio::task::JoinError> { 311 | let (indexer_wakeup_send, mut indexer_wakeup_recv) = mpsc::channel(10); 312 | 313 | std::thread::spawn(move || { 314 | loop { 315 | info!("Running periodic index"); 316 | crate::database::indexer::run(&scan_path, repository_list.as_deref(), &db); 317 | info!("Finished periodic index"); 318 | 319 | if indexer_wakeup_recv.blocking_recv().is_none() { 320 | break; 321 | } 322 | } 323 | }); 324 | 325 | tokio::spawn({ 326 | let mut sighup = signal(SignalKind::hangup()).expect("could not subscribe to sighup"); 327 | let build_sleeper = move || async move { 328 | match refresh_interval { 329 | RefreshInterval::Never => futures_util::future::pending().await, 330 | RefreshInterval::Duration(v) => tokio::time::sleep(v).await, 331 | } 332 | }; 333 | 334 | async move { 335 | loop { 336 | tokio::select! { 337 | _ = sighup.recv() => {}, 338 | () = build_sleeper() => {}, 339 | } 340 | 341 | if indexer_wakeup_send.send(()).await.is_err() { 342 | error!("Indexing thread has died and is no longer accepting wakeup messages"); 343 | } 344 | } 345 | } 346 | }) 347 | .await 348 | } 349 | 350 | #[must_use] 351 | pub fn build_asset_hash(v: &[u8]) -> Box { 352 | let hasher = const_xxh3::xxh3_128(v); 353 | let out = const_hex::encode(hasher.to_be_bytes()); 354 | Box::from(out) 355 | } 356 | 357 | pub struct TemplateResponse { 358 | template: T, 359 | } 360 | 361 | impl IntoResponse for TemplateResponse { 362 | #[instrument(skip_all)] 363 | fn into_response(self) -> Response { 364 | match self.template.render() { 365 | Ok(body) => { 366 | let headers = [( 367 | http::header::CONTENT_TYPE, 368 | HeaderValue::from_static("text/html; charset=utf-8"), 369 | )]; 370 | 371 | (headers, body).into_response() 372 | } 373 | Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), 374 | } 375 | } 376 | } 377 | 378 | pub fn into_response(template: T) -> impl IntoResponse { 379 | TemplateResponse { template } 380 | } 381 | 382 | pub enum ResponseEither { 383 | Left(A), 384 | Right(B), 385 | } 386 | 387 | impl IntoResponse for ResponseEither { 388 | fn into_response(self) -> Response { 389 | match self { 390 | Self::Left(a) => a.into_response(), 391 | Self::Right(b) => b.into_response(), 392 | } 393 | } 394 | } 395 | 396 | #[cfg(test)] 397 | mod test { 398 | #[test] 399 | fn placeholder() { 400 | assert!(true); 401 | } 402 | } 403 | -------------------------------------------------------------------------------- /src/methods/filters.rs: -------------------------------------------------------------------------------- 1 | // sorry clippy, we don't have a choice. askama forces this on us 2 | #![allow(clippy::unnecessary_wraps, clippy::trivially_copy_pass_by_ref)] 3 | 4 | use std::{ 5 | borrow::Borrow, 6 | fmt::Display, 7 | sync::{Arc, LazyLock}, 8 | }; 9 | 10 | use arc_swap::ArcSwap; 11 | use rkyv::{ 12 | rend::{i32_le, i64_le}, 13 | tuple::ArchivedTuple2, 14 | }; 15 | use time::{OffsetDateTime, UtcOffset, format_description::well_known::Rfc3339}; 16 | 17 | // pub fn format_time(s: impl Borrow) -> Result { 18 | pub fn format_time(s: impl Into) -> Result { 19 | let s = s.into().0; 20 | 21 | (*s.borrow()) 22 | .format(&Rfc3339) 23 | .map_err(Box::from) 24 | .map_err(askama::Error::Custom) 25 | } 26 | 27 | pub fn branch_query(branch: Option<&str>) -> String { 28 | if let Some(b) = branch { 29 | format!("?h={b}") 30 | } else { 31 | String::new() 32 | } 33 | } 34 | 35 | pub fn timeago(s: impl Into) -> Result { 36 | Ok(timeago::Formatter::new() 37 | .convert((OffsetDateTime::now_utc() - s.into().0).try_into().unwrap())) 38 | } 39 | 40 | pub fn file_perms(s: u16) -> Result { 41 | Ok(unix_mode::to_string(u32::from(s))) 42 | } 43 | 44 | pub struct DisplayHexBuffer(pub const_hex::Buffer); 45 | 46 | impl Display for DisplayHexBuffer { 47 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 48 | f.write_str(self.0.as_str()) 49 | } 50 | } 51 | 52 | pub fn hex(s: &[u8; 20]) -> Result, askama::Error> { 53 | let mut buf = const_hex::Buffer::new(); 54 | buf.format(s); 55 | Ok(DisplayHexBuffer(buf)) 56 | } 57 | 58 | pub fn gravatar(email: &str) -> Result<&'static str, askama::Error> { 59 | static CACHE: LazyLock>> = 60 | LazyLock::new(|| ArcSwap::new(Arc::new(hashbrown::HashMap::new()))); 61 | 62 | if let Some(res) = CACHE.load().get(email).copied() { 63 | return Ok(res); 64 | } 65 | 66 | let url = format!( 67 | "https://www.gravatar.com/avatar/{}", 68 | const_hex::encode(md5::compute(email).0) 69 | ); 70 | let key = Box::leak(Box::from(email)); 71 | let url = url.leak(); 72 | 73 | CACHE.rcu(|curr| { 74 | let mut r = (**curr).clone(); 75 | r.insert(key, url); 76 | r 77 | }); 78 | 79 | Ok(url) 80 | } 81 | 82 | pub struct Timestamp(OffsetDateTime); 83 | 84 | impl From<&ArchivedTuple2> for Timestamp { 85 | fn from(value: &ArchivedTuple2) -> Self { 86 | Self( 87 | OffsetDateTime::from_unix_timestamp(value.0.to_native()) 88 | .unwrap() 89 | .to_offset(UtcOffset::from_whole_seconds(value.1.to_native()).unwrap()), 90 | ) 91 | } 92 | } 93 | 94 | impl From<(i64, i32)> for Timestamp { 95 | fn from(value: (i64, i32)) -> Self { 96 | Self( 97 | OffsetDateTime::from_unix_timestamp(value.0) 98 | .unwrap() 99 | .to_offset(UtcOffset::from_whole_seconds(value.1).unwrap()), 100 | ) 101 | } 102 | } 103 | 104 | impl From<&(i64, i32)> for Timestamp { 105 | fn from(value: &(i64, i32)) -> Self { 106 | Self( 107 | OffsetDateTime::from_unix_timestamp(value.0) 108 | .unwrap() 109 | .to_offset(UtcOffset::from_whole_seconds(value.1).unwrap()), 110 | ) 111 | } 112 | } 113 | 114 | impl From for Timestamp { 115 | fn from(value: OffsetDateTime) -> Self { 116 | Self(value) 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /src/methods/index.rs: -------------------------------------------------------------------------------- 1 | use std::{cell::RefCell, sync::Arc}; 2 | 3 | use anyhow::Context; 4 | use askama::Template; 5 | use axum::{ 6 | Extension, 7 | response::{IntoResponse, Response}, 8 | }; 9 | use itertools::{Either, Itertools}; 10 | 11 | use super::filters; 12 | use crate::{ 13 | database::schema::repository::{Repository, YokedRepository}, 14 | into_response, 15 | }; 16 | 17 | #[derive(Template)] 18 | #[template(path = "index.html")] 19 | pub struct View< 20 | 'a, 21 | Group: Iterator, 22 | GroupIter: Iterator, 23 | > { 24 | // this type sig is a necessary evil unfortunately, because askama takes a reference 25 | // to the data for rendering. 26 | pub repositories: RefCell>>, 27 | } 28 | 29 | impl<'a, Group, GroupIter> View<'a, Group, GroupIter> 30 | where 31 | Group: Iterator, 32 | GroupIter: Iterator, 33 | { 34 | fn take_iter(&self) -> Either> { 35 | self.repositories.replace(Either::Right(std::iter::empty())) 36 | } 37 | } 38 | 39 | pub async fn handle( 40 | Extension(db): Extension>, 41 | ) -> Result { 42 | let fetched = tokio::task::spawn_blocking(move || Repository::fetch_all(&db)) 43 | .await 44 | .context("Failed to join Tokio task")??; 45 | 46 | // rocksdb returned the keys already ordered for us so group_by is a nice 47 | // operation we can use here to avoid writing into a map to group. though, 48 | // now that i think about it it might act a little bit strangely when mixing 49 | // root repositories and nested repositories. we're going to have to prefix 50 | // root repositories with a null byte or something. i'll just leave this here 51 | // as a TODO. 52 | let repositories = fetched 53 | .iter() 54 | .group_by(|(k, _)| memchr::memrchr(b'/', k.as_bytes()).map_or("", |idx| &k[..idx])); 55 | 56 | Ok(into_response(View { 57 | repositories: Either::Left(repositories.into_iter()).into(), 58 | }) 59 | .into_response()) 60 | } 61 | -------------------------------------------------------------------------------- /src/methods/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod filters; 2 | pub mod index; 3 | pub mod repo; 4 | -------------------------------------------------------------------------------- /src/methods/repo/about.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use askama::Template; 4 | use axum::{Extension, extract::Query, response::IntoResponse}; 5 | use serde::Deserialize; 6 | 7 | use crate::{ 8 | Git, 9 | git::ReadmeFormat, 10 | into_response, 11 | methods::{ 12 | filters, 13 | repo::{Repository, RepositoryPath, Result}, 14 | }, 15 | }; 16 | 17 | #[derive(Deserialize)] 18 | pub struct UriQuery { 19 | #[serde(rename = "h")] 20 | pub branch: Option>, 21 | } 22 | 23 | #[derive(Template)] 24 | #[template(path = "repo/about.html")] 25 | pub struct View { 26 | repo: Repository, 27 | readme: Option<(ReadmeFormat, Arc)>, 28 | branch: Option>, 29 | } 30 | 31 | pub async fn handle( 32 | Extension(repo): Extension, 33 | Extension(RepositoryPath(repository_path)): Extension, 34 | Extension(git): Extension>, 35 | Query(query): Query, 36 | ) -> Result { 37 | let open_repo = git 38 | .clone() 39 | .repo(repository_path, query.branch.clone()) 40 | .await?; 41 | let readme = open_repo.readme().await?; 42 | 43 | Ok(into_response(View { 44 | repo, 45 | readme, 46 | branch: query.branch, 47 | })) 48 | } 49 | -------------------------------------------------------------------------------- /src/methods/repo/commit.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use askama::Template; 4 | use axum::{Extension, extract::Query, response::IntoResponse}; 5 | use serde::Deserialize; 6 | 7 | use crate::{ 8 | Git, 9 | git::{Commit, OpenRepository}, 10 | into_response, 11 | methods::{ 12 | filters, 13 | repo::{Repository, RepositoryPath, Result}, 14 | }, 15 | }; 16 | 17 | #[derive(Template)] 18 | #[template(path = "repo/commit.html")] 19 | pub struct View { 20 | pub repo: Repository, 21 | pub commit: Arc, 22 | pub branch: Option>, 23 | pub dl_branch: Arc, 24 | pub id: Option, 25 | } 26 | 27 | #[derive(Deserialize)] 28 | pub struct UriQuery { 29 | pub id: Option, 30 | #[serde(rename = "h")] 31 | pub branch: Option>, 32 | } 33 | 34 | pub async fn handle( 35 | Extension(repo): Extension, 36 | Extension(RepositoryPath(repository_path)): Extension, 37 | Extension(git): Extension>, 38 | Query(query): Query, 39 | ) -> Result { 40 | let open_repo = git.repo(repository_path, query.branch.clone()).await?; 41 | 42 | let (dl_branch, commit) = tokio::try_join!( 43 | fetch_dl_branch(query.branch.clone(), open_repo.clone()), 44 | fetch_commit(query.id.as_deref(), open_repo), 45 | )?; 46 | 47 | Ok(into_response(View { 48 | repo, 49 | commit, 50 | branch: query.branch, 51 | id: query.id, 52 | dl_branch, 53 | })) 54 | } 55 | 56 | async fn fetch_commit( 57 | commit_id: Option<&str>, 58 | open_repo: Arc, 59 | ) -> Result> { 60 | Ok(if let Some(commit) = commit_id { 61 | open_repo.commit(commit, true).await? 62 | } else { 63 | Arc::new(open_repo.latest_commit(true).await?) 64 | }) 65 | } 66 | 67 | async fn fetch_dl_branch( 68 | branch: Option>, 69 | open_repo: Arc, 70 | ) -> Result> { 71 | if let Some(branch) = branch.clone() { 72 | Ok(branch) 73 | } else { 74 | Ok(Arc::from( 75 | open_repo 76 | .clone() 77 | .default_branch() 78 | .await? 79 | .unwrap_or_else(|| "master".to_string()), 80 | )) 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /src/methods/repo/diff.rs: -------------------------------------------------------------------------------- 1 | use std::{fmt::Write, sync::Arc}; 2 | 3 | use askama::Template; 4 | use axum::{ 5 | Extension, 6 | extract::Query, 7 | http::HeaderValue, 8 | response::{IntoResponse, Response}, 9 | }; 10 | use bytes::{BufMut, BytesMut}; 11 | use clap::crate_version; 12 | use time::format_description::well_known::Rfc2822; 13 | 14 | use crate::{ 15 | Git, 16 | git::Commit, 17 | http, into_response, 18 | methods::{ 19 | filters, 20 | repo::{Repository, RepositoryPath, Result, commit::UriQuery}, 21 | }, 22 | }; 23 | 24 | #[derive(Template)] 25 | #[template(path = "repo/diff.html")] 26 | pub struct View { 27 | pub repo: Repository, 28 | pub commit: Arc, 29 | pub branch: Option>, 30 | } 31 | 32 | pub async fn handle( 33 | Extension(repo): Extension, 34 | Extension(RepositoryPath(repository_path)): Extension, 35 | Extension(git): Extension>, 36 | Query(query): Query, 37 | ) -> Result { 38 | let open_repo = git.repo(repository_path, query.branch.clone()).await?; 39 | let commit = if let Some(commit) = query.id { 40 | open_repo.commit(&commit, true).await? 41 | } else { 42 | Arc::new(open_repo.latest_commit(true).await?) 43 | }; 44 | 45 | Ok(into_response(View { 46 | repo, 47 | commit, 48 | branch: query.branch, 49 | })) 50 | } 51 | 52 | pub async fn handle_plain( 53 | Extension(RepositoryPath(repository_path)): Extension, 54 | Extension(git): Extension>, 55 | Query(query): Query, 56 | ) -> Result { 57 | let open_repo = git.repo(repository_path, query.branch).await?; 58 | let commit = if let Some(commit) = query.id { 59 | open_repo.commit(&commit, false).await? 60 | } else { 61 | Arc::new(open_repo.latest_commit(false).await?) 62 | }; 63 | 64 | let headers = [( 65 | http::header::CONTENT_TYPE, 66 | HeaderValue::from_static("text/plain"), 67 | )]; 68 | 69 | let mut data = BytesMut::new(); 70 | 71 | writeln!(data, "From {} Mon Sep 17 00:00:00 2001", commit.get().oid()).unwrap(); 72 | writeln!( 73 | data, 74 | "From: {} <{}>", 75 | commit.get().author().name(), 76 | commit.get().author().email() 77 | ) 78 | .unwrap(); 79 | 80 | write!(data, "Date: ").unwrap(); 81 | let mut writer = data.writer(); 82 | commit 83 | .get() 84 | .author() 85 | .time() 86 | .format_into(&mut writer, &Rfc2822) 87 | .unwrap(); 88 | let mut data = writer.into_inner(); 89 | writeln!(data).unwrap(); 90 | 91 | writeln!(data, "Subject: [PATCH] {}\n", commit.get().summary()).unwrap(); 92 | 93 | write!(data, "{}", commit.get().body()).unwrap(); 94 | 95 | writeln!(data, "---").unwrap(); 96 | 97 | data.extend_from_slice(commit.diff_stats.as_bytes()); 98 | data.extend_from_slice(b"\n"); 99 | data.extend_from_slice(commit.diff.as_bytes()); 100 | 101 | writeln!(data, "--\nrgit {}", crate_version!()).unwrap(); 102 | 103 | Ok((headers, data.freeze()).into_response()) 104 | } 105 | -------------------------------------------------------------------------------- /src/methods/repo/log.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use anyhow::Context; 4 | use askama::Template; 5 | use axum::{Extension, extract::Query, response::IntoResponse}; 6 | use serde::Deserialize; 7 | 8 | use crate::{ 9 | database::schema::{commit::YokedCommit, repository::YokedRepository}, 10 | into_response, 11 | methods::{ 12 | filters, 13 | repo::{DEFAULT_BRANCHES, Repository, Result}, 14 | }, 15 | }; 16 | 17 | #[derive(Deserialize)] 18 | pub struct UriQuery { 19 | #[serde(rename = "ofs")] 20 | offset: Option, 21 | #[serde(rename = "h")] 22 | branch: Option, 23 | } 24 | 25 | #[derive(Template)] 26 | #[template(path = "repo/log.html")] 27 | pub struct View { 28 | repo: Repository, 29 | commits: Vec, 30 | next_offset: Option, 31 | branch: Option, 32 | } 33 | 34 | pub async fn handle( 35 | Extension(repo): Extension, 36 | Extension(db): Extension>, 37 | Query(query): Query, 38 | ) -> Result { 39 | tokio::task::spawn_blocking(move || { 40 | let offset = query.offset.unwrap_or(0); 41 | 42 | let repository = crate::database::schema::repository::Repository::open(&db, &*repo)? 43 | .context("Repository does not exist")?; 44 | let mut commits = 45 | get_branch_commits(&repository, &db, query.branch.as_deref(), 101, offset)?; 46 | 47 | let next_offset = if commits.len() == 101 { 48 | commits.pop(); 49 | Some(offset + 100) 50 | } else { 51 | None 52 | }; 53 | 54 | Ok(into_response(View { 55 | repo, 56 | commits, 57 | next_offset, 58 | branch: query.branch, 59 | })) 60 | }) 61 | .await 62 | .context("Failed to attach to tokio task")? 63 | } 64 | 65 | pub fn get_branch_commits( 66 | repository: &YokedRepository, 67 | database: &Arc, 68 | branch: Option<&str>, 69 | amount: u64, 70 | offset: u64, 71 | ) -> Result> { 72 | if let Some(reference) = branch { 73 | let commit_tree = repository 74 | .get() 75 | .commit_tree(database.clone(), &format!("refs/heads/{reference}")); 76 | let commit_tree = commit_tree.fetch_latest(amount, offset)?; 77 | 78 | if !commit_tree.is_empty() { 79 | return Ok(commit_tree); 80 | } 81 | 82 | let tag_tree = repository 83 | .get() 84 | .commit_tree(database.clone(), &format!("refs/tags/{reference}")); 85 | let tag_tree = tag_tree.fetch_latest(amount, offset)?; 86 | 87 | return Ok(tag_tree); 88 | } 89 | 90 | for branch in repository 91 | .get() 92 | .default_branch 93 | .as_deref() 94 | .into_iter() 95 | .chain(DEFAULT_BRANCHES.into_iter()) 96 | { 97 | let commit_tree = repository.get().commit_tree(database.clone(), branch); 98 | let commits = commit_tree.fetch_latest(amount, offset)?; 99 | 100 | if !commits.is_empty() { 101 | return Ok(commits); 102 | } 103 | } 104 | 105 | Ok(vec![]) 106 | } 107 | -------------------------------------------------------------------------------- /src/methods/repo/mod.rs: -------------------------------------------------------------------------------- 1 | mod about; 2 | mod commit; 3 | mod diff; 4 | mod log; 5 | mod refs; 6 | mod smart_git; 7 | mod snapshot; 8 | mod summary; 9 | mod tag; 10 | mod tree; 11 | 12 | use std::{ 13 | collections::BTreeMap, 14 | ops::Deref, 15 | path::{Path, PathBuf}, 16 | sync::{Arc, LazyLock}, 17 | }; 18 | 19 | use axum::{ 20 | body::Body, 21 | handler::Handler, 22 | http::{Request, StatusCode}, 23 | response::{IntoResponse, Response}, 24 | }; 25 | use path_clean::PathClean; 26 | 27 | use self::{ 28 | about::handle as handle_about, 29 | commit::handle as handle_commit, 30 | diff::{handle as handle_diff, handle_plain as handle_patch}, 31 | log::handle as handle_log, 32 | refs::handle as handle_refs, 33 | smart_git::handle as handle_smart_git, 34 | snapshot::handle as handle_snapshot, 35 | summary::handle as handle_summary, 36 | tag::handle as handle_tag, 37 | tree::handle as handle_tree, 38 | }; 39 | use crate::database::schema::{ 40 | commit::YokedCommit, 41 | tag::{YokedString, YokedTag}, 42 | }; 43 | 44 | pub const DEFAULT_BRANCHES: [&str; 2] = ["refs/heads/master", "refs/heads/main"]; 45 | 46 | // this is some wicked, wicked abuse of axum right here... 47 | #[allow(clippy::trait_duplication_in_bounds)] // clippy seems a bit.. lost 48 | pub async fn service(mut request: Request) -> Response { 49 | let scan_path = request 50 | .extensions() 51 | .get::>() 52 | .expect("scan_path missing"); 53 | 54 | let ParsedUri { 55 | uri, 56 | child_path, 57 | action, 58 | } = parse_uri(request.uri().path().trim_matches('/')); 59 | 60 | let uri = Path::new(uri).clean(); 61 | let path = scan_path.join(&uri); 62 | 63 | let db = request 64 | .extensions() 65 | .get::>() 66 | .expect("db extension missing"); 67 | if path.as_os_str().is_empty() 68 | || !crate::database::schema::repository::Repository::exists(db, &uri).unwrap_or_default() 69 | { 70 | return RepositoryNotFound.into_response(); 71 | } 72 | 73 | request.extensions_mut().insert(ChildPath(child_path)); 74 | request.extensions_mut().insert(Repository(uri)); 75 | request.extensions_mut().insert(RepositoryPath(path)); 76 | 77 | match action { 78 | HandlerAction::About => handle_about.call(request, None::<()>).await, 79 | HandlerAction::SmartGit => handle_smart_git.call(request, None::<()>).await, 80 | HandlerAction::Refs => handle_refs.call(request, None::<()>).await, 81 | HandlerAction::Log => handle_log.call(request, None::<()>).await, 82 | HandlerAction::Tree => handle_tree.call(request, None::<()>).await, 83 | HandlerAction::Commit => handle_commit.call(request, None::<()>).await, 84 | HandlerAction::Diff => handle_diff.call(request, None::<()>).await, 85 | HandlerAction::Patch => handle_patch.call(request, None::<()>).await, 86 | HandlerAction::Tag => handle_tag.call(request, None::<()>).await, 87 | HandlerAction::Snapshot => handle_snapshot.call(request, None::<()>).await, 88 | HandlerAction::Summary => handle_summary.call(request, None::<()>).await, 89 | } 90 | } 91 | 92 | #[derive(Debug, PartialEq, Eq)] 93 | struct ParsedUri<'a> { 94 | action: HandlerAction, 95 | uri: &'a str, 96 | child_path: Option, 97 | } 98 | 99 | fn parse_uri(uri: &str) -> ParsedUri<'_> { 100 | let mut uri_parts = memchr::memchr_iter(b'/', uri.as_bytes()); 101 | 102 | let original_uri = uri; 103 | let (action, mut uri) = if let Some(idx) = uri_parts.next_back() { 104 | (uri.get(idx + 1..), &uri[..idx]) 105 | } else { 106 | (None, uri) 107 | }; 108 | 109 | match action { 110 | Some("about") => ParsedUri { 111 | action: HandlerAction::About, 112 | uri, 113 | child_path: None, 114 | }, 115 | Some("git-upload-pack") => ParsedUri { 116 | action: HandlerAction::SmartGit, 117 | uri, 118 | child_path: None, 119 | }, 120 | Some("refs") => { 121 | if let Some(idx) = uri_parts.next_back() { 122 | if uri.get(idx + 1..) == Some("info") { 123 | ParsedUri { 124 | action: HandlerAction::SmartGit, 125 | uri: &uri[..idx], 126 | child_path: None, 127 | } 128 | } else { 129 | ParsedUri { 130 | action: HandlerAction::Refs, 131 | uri, 132 | child_path: None, 133 | } 134 | } 135 | } else { 136 | ParsedUri { 137 | action: HandlerAction::Refs, 138 | uri, 139 | child_path: None, 140 | } 141 | } 142 | } 143 | Some("log") => ParsedUri { 144 | action: HandlerAction::Log, 145 | uri, 146 | child_path: None, 147 | }, 148 | Some("tree") => ParsedUri { 149 | action: HandlerAction::Tree, 150 | uri, 151 | child_path: None, 152 | }, 153 | Some("commit") => ParsedUri { 154 | action: HandlerAction::Commit, 155 | uri, 156 | child_path: None, 157 | }, 158 | Some("diff") => ParsedUri { 159 | action: HandlerAction::Diff, 160 | uri, 161 | child_path: None, 162 | }, 163 | Some("patch") => ParsedUri { 164 | action: HandlerAction::Patch, 165 | uri, 166 | child_path: None, 167 | }, 168 | Some("tag") => ParsedUri { 169 | action: HandlerAction::Tag, 170 | uri, 171 | child_path: None, 172 | }, 173 | Some("snapshot") => ParsedUri { 174 | action: HandlerAction::Snapshot, 175 | uri, 176 | child_path: None, 177 | }, 178 | Some(_) => { 179 | static TREE_FINDER: LazyLock = 180 | LazyLock::new(|| memchr::memmem::Finder::new(b"/tree/")); 181 | 182 | uri = original_uri; 183 | 184 | // match tree children 185 | if let Some(idx) = TREE_FINDER.find(uri.as_bytes()) { 186 | ParsedUri { 187 | action: HandlerAction::Tree, 188 | uri: &uri[..idx], 189 | // 6 is the length of /tree/ 190 | child_path: Some(Path::new(&uri[idx + 6..]).clean()), 191 | } 192 | } else { 193 | ParsedUri { 194 | action: HandlerAction::Summary, 195 | uri, 196 | child_path: None, 197 | } 198 | } 199 | } 200 | None => ParsedUri { 201 | action: HandlerAction::Summary, 202 | uri, 203 | child_path: None, 204 | }, 205 | } 206 | } 207 | 208 | #[derive(Copy, Clone, Debug, PartialEq, Eq)] 209 | enum HandlerAction { 210 | About, 211 | SmartGit, 212 | Refs, 213 | Log, 214 | Tree, 215 | Commit, 216 | Diff, 217 | Patch, 218 | Tag, 219 | Snapshot, 220 | Summary, 221 | } 222 | 223 | #[derive(Clone)] 224 | pub struct Repository(pub PathBuf); 225 | 226 | impl Deref for Repository { 227 | type Target = Path; 228 | 229 | fn deref(&self) -> &Self::Target { 230 | &self.0 231 | } 232 | } 233 | 234 | #[derive(Clone)] 235 | pub struct RepositoryPath(pub PathBuf); 236 | 237 | #[derive(Clone)] 238 | pub struct ChildPath(pub Option); 239 | 240 | impl Deref for RepositoryPath { 241 | type Target = Path; 242 | 243 | fn deref(&self) -> &Self::Target { 244 | &self.0 245 | } 246 | } 247 | 248 | pub type Result = std::result::Result; 249 | 250 | pub struct InvalidRequest; 251 | 252 | impl IntoResponse for InvalidRequest { 253 | fn into_response(self) -> Response { 254 | (StatusCode::NOT_FOUND, "Invalid request").into_response() 255 | } 256 | } 257 | 258 | pub struct RepositoryNotFound; 259 | 260 | impl IntoResponse for RepositoryNotFound { 261 | fn into_response(self) -> Response { 262 | (StatusCode::NOT_FOUND, "Repository not found").into_response() 263 | } 264 | } 265 | 266 | pub struct Error(anyhow::Error); 267 | 268 | impl From> for Error { 269 | fn from(e: Arc) -> Self { 270 | Self(anyhow::Error::msg(format!("{e:?}"))) 271 | } 272 | } 273 | 274 | impl From for Error { 275 | fn from(e: anyhow::Error) -> Self { 276 | Self(e) 277 | } 278 | } 279 | 280 | impl From for anyhow::Error { 281 | fn from(value: Error) -> Self { 282 | value.0 283 | } 284 | } 285 | 286 | impl IntoResponse for Error { 287 | fn into_response(self) -> Response { 288 | (StatusCode::INTERNAL_SERVER_ERROR, format!("{:?}", self.0)).into_response() 289 | } 290 | } 291 | 292 | pub struct Refs { 293 | heads: BTreeMap, 294 | tags: Vec<(YokedString, YokedTag)>, 295 | } 296 | -------------------------------------------------------------------------------- /src/methods/repo/refs.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::BTreeMap, sync::Arc}; 2 | 3 | use crate::{ 4 | into_response, 5 | methods::{ 6 | filters, 7 | repo::{Refs, Repository, Result}, 8 | }, 9 | }; 10 | use anyhow::Context; 11 | use askama::Template; 12 | use axum::{Extension, response::IntoResponse}; 13 | use rkyv::string::ArchivedString; 14 | use yoke::Yoke; 15 | 16 | #[derive(Template)] 17 | #[template(path = "repo/refs.html")] 18 | pub struct View { 19 | repo: Repository, 20 | refs: Refs, 21 | branch: Option>, 22 | } 23 | 24 | pub async fn handle( 25 | Extension(repo): Extension, 26 | Extension(db): Extension>, 27 | ) -> Result { 28 | tokio::task::spawn_blocking(move || { 29 | let repository = crate::database::schema::repository::Repository::open(&db, &*repo)? 30 | .context("Repository does not exist")?; 31 | let repository = repository.get(); 32 | 33 | let heads_db = repository.heads(&db)?; 34 | let heads_db = heads_db.as_ref().map(Yoke::get); 35 | 36 | let mut heads = BTreeMap::new(); 37 | if let Some(archived_heads) = heads_db { 38 | for head in archived_heads 39 | .0 40 | .as_slice() 41 | .iter() 42 | .map(ArchivedString::as_str) 43 | { 44 | let commit_tree = repository.commit_tree(db.clone(), head); 45 | let name = head.strip_prefix("refs/heads/"); 46 | 47 | if let (Some(name), Some(commit)) = (name, commit_tree.fetch_latest_one()?) { 48 | heads.insert(name.to_string(), commit); 49 | } 50 | } 51 | } 52 | 53 | let tags = repository.tag_tree(db).fetch_all()?; 54 | 55 | Ok(into_response(View { 56 | repo, 57 | refs: Refs { heads, tags }, 58 | branch: None, 59 | })) 60 | }) 61 | .await 62 | .context("Failed to attach to tokio task")? 63 | } 64 | -------------------------------------------------------------------------------- /src/methods/repo/smart_git.rs: -------------------------------------------------------------------------------- 1 | use std::{io, io::ErrorKind, path::Path, process::Stdio, str::FromStr}; 2 | 3 | use anyhow::{Context, anyhow}; 4 | use axum::{ 5 | Extension, 6 | body::Body, 7 | http::{ 8 | Method, Uri, 9 | header::{HeaderMap, HeaderName, HeaderValue}, 10 | }, 11 | response::{IntoResponse, Response}, 12 | }; 13 | use bytes::{Buf, Bytes, BytesMut}; 14 | use futures_util::TryStreamExt; 15 | use httparse::Status; 16 | use tokio::{ 17 | io::AsyncReadExt, 18 | process::{Child, ChildStderr, ChildStdout, Command}, 19 | sync::mpsc, 20 | }; 21 | use tokio_stream::wrappers::ReceiverStream; 22 | use tokio_util::io::StreamReader; 23 | use tracing::{Instrument, debug, error, info_span, warn}; 24 | 25 | use crate::{ 26 | StatusCode, 27 | methods::repo::{Repository, RepositoryPath, Result}, 28 | }; 29 | 30 | #[allow(clippy::unused_async)] 31 | pub async fn handle( 32 | Extension(RepositoryPath(repository_path)): Extension, 33 | Extension(Repository(repository)): Extension, 34 | method: Method, 35 | uri: Uri, 36 | headers: HeaderMap, 37 | body: Body, 38 | ) -> Result { 39 | let path = extract_path(&uri, &repository)?; 40 | 41 | let mut command = Command::new("git"); 42 | 43 | for (header, env) in [ 44 | ("Content-Type", "CONTENT_TYPE"), 45 | ("Content-Length", "CONTENT_LENGTH"), 46 | ("Git-Protocol", "GIT_PROTOCOL"), 47 | ("Content-Encoding", "HTTP_CONTENT_ENCODING"), 48 | ] { 49 | extract_header(&headers, &mut command, header, env)?; 50 | } 51 | 52 | let mut child = command 53 | .arg("http-backend") 54 | .env("REQUEST_METHOD", method.as_str()) 55 | .env("PATH_INFO", path) 56 | .env("GIT_PROJECT_ROOT", repository_path) 57 | .env("QUERY_STRING", uri.query().unwrap_or("")) 58 | .stdin(Stdio::piped()) 59 | .stdout(Stdio::piped()) 60 | .stderr(Stdio::piped()) 61 | .kill_on_drop(true) 62 | .spawn() 63 | .context("Failed to spawn git http-backend")?; 64 | 65 | let mut stdout = child.stdout.take().context("Stdout already taken")?; 66 | let mut stderr = child.stderr.take().context("Stderr already taken")?; 67 | let mut stdin = child.stdin.take().context("Stdin already taken")?; 68 | 69 | // read request body and forward to stdin 70 | let mut body = StreamReader::new( 71 | body.into_data_stream() 72 | .map_err(|e| std::io::Error::new(ErrorKind::Other, e)), 73 | ); 74 | tokio::io::copy_buf(&mut body, &mut stdin) 75 | .await 76 | .context("Failed to copy bytes from request to command stdin")?; 77 | 78 | // wait for the headers back from git http-backend 79 | let mut out_buf = BytesMut::with_capacity(1024); 80 | let headers = loop { 81 | let n = stdout 82 | .read_buf(&mut out_buf) 83 | .await 84 | .context("Failed to read headers")?; 85 | if n == 0 { 86 | break None; 87 | } 88 | 89 | if let Some((body_offset, headers)) = parse_cgi_headers(&out_buf)? { 90 | out_buf.advance(body_offset); 91 | break Some(headers); 92 | } 93 | }; 94 | 95 | // if the `headers` loop broke with `None`, the `git http-backend` didn't return any parseable 96 | // headers so there's no reason for us to continue. there may be something in stderr for us 97 | // though. 98 | let Some(headers) = headers else { 99 | print_status(&mut child, &mut stderr).await; 100 | return Err(anyhow!("Received incomplete response from git http-backend").into()); 101 | }; 102 | 103 | // stream the response back to the client 104 | let (body_send, body_recv) = mpsc::channel(8); 105 | tokio::spawn( 106 | forward_response_to_client(out_buf, body_send, stdout, stderr, child) 107 | .instrument(info_span!("git http-backend reader")), 108 | ); 109 | 110 | Ok((headers, Body::from_stream(ReceiverStream::new(body_recv)))) 111 | } 112 | 113 | /// Forwards the entirety of `stdout` to `body_send`, printing subprocess stderr and status on 114 | /// completion. 115 | async fn forward_response_to_client( 116 | mut out_buf: BytesMut, 117 | body_send: mpsc::Sender>, 118 | mut stdout: ChildStdout, 119 | mut stderr: ChildStderr, 120 | mut child: Child, 121 | ) { 122 | loop { 123 | let (out, mut end) = match stdout.read_buf(&mut out_buf).await { 124 | Ok(0) => (Ok(out_buf.split().freeze()), true), 125 | Ok(n) => (Ok(out_buf.split_to(n).freeze()), false), 126 | Err(e) => (Err(e), true), 127 | }; 128 | 129 | if body_send.send(out).await.is_err() { 130 | warn!("Receiver went away during git http-backend call"); 131 | end = true; 132 | } 133 | 134 | if end { 135 | break; 136 | } 137 | } 138 | 139 | print_status(&mut child, &mut stderr).await; 140 | } 141 | 142 | /// Prints the exit status of the `git` subprocess. 143 | async fn print_status(child: &mut Child, stderr: &mut ChildStderr) { 144 | match tokio::try_join!(child.wait(), read_stderr(stderr)) { 145 | Ok((status, stderr)) if status.success() => { 146 | debug!(stderr, "git http-backend successfully shutdown"); 147 | } 148 | Ok((status, stderr)) => error!(stderr, "git http-backend exited with status code {status}"), 149 | Err(e) => error!("Failed to wait on git http-backend shutdown: {e}"), 150 | } 151 | } 152 | 153 | /// Reads the entirety of stderr for the given handle. 154 | async fn read_stderr(stderr: &mut ChildStderr) -> io::Result { 155 | let mut stderr_out = Vec::new(); 156 | stderr.read_to_end(&mut stderr_out).await?; 157 | Ok(String::from_utf8_lossy(&stderr_out).into_owned()) 158 | } 159 | 160 | /// Extracts a single header (`header`) from the `input` and passes it as `env` to 161 | /// `output`. 162 | fn extract_header(input: &HeaderMap, output: &mut Command, header: &str, env: &str) -> Result<()> { 163 | if let Some(value) = input.get(header) { 164 | output.env(env, value.to_str().context("Invalid header")?); 165 | } 166 | 167 | Ok(()) 168 | } 169 | 170 | /// Extract the path from the URL to determine the repository path. 171 | fn extract_path<'a>(uri: &'a Uri, repository: &Path) -> Result<&'a str> { 172 | let path = uri.path(); 173 | let path = path.strip_prefix('/').unwrap_or(path); 174 | 175 | if let Some(prefix) = repository.as_os_str().to_str() { 176 | Ok(path.strip_prefix(prefix).unwrap_or(path)) 177 | } else { 178 | Err(anyhow::Error::msg("Repository name contains invalid bytes").into()) 179 | } 180 | } 181 | 182 | // Intercept headers from the spawned `git http-backend` CGI and rewrite them to 183 | // an `axum::Response`. 184 | pub fn parse_cgi_headers(buffer: &[u8]) -> Result)>, anyhow::Error> { 185 | let mut headers = [httparse::EMPTY_HEADER; 10]; 186 | let (body_offset, headers) = match httparse::parse_headers(buffer, &mut headers)? { 187 | Status::Complete(v) => v, 188 | Status::Partial => return Ok(None), 189 | }; 190 | 191 | let mut response = Response::new(()); 192 | 193 | for header in headers { 194 | response.headers_mut().insert( 195 | HeaderName::from_str(header.name) 196 | .context("Failed to parse header name from Git over CGI")?, 197 | HeaderValue::from_bytes(header.value) 198 | .context("Failed to parse header value from Git over CGI")?, 199 | ); 200 | } 201 | 202 | if let Some(status) = response 203 | .headers_mut() 204 | .remove("Status") 205 | .filter(|s| s.len() >= 3) 206 | { 207 | let status = &status.as_ref()[..3]; 208 | 209 | if let Ok(status) = StatusCode::from_bytes(status) { 210 | *response.status_mut() = status; 211 | } 212 | } 213 | 214 | Ok(Some((body_offset, response))) 215 | } 216 | -------------------------------------------------------------------------------- /src/methods/repo/snapshot.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use anyhow::{Context, anyhow}; 4 | use axum::{Extension, body::Body, extract::Query, http::Response}; 5 | use serde::Deserialize; 6 | use tokio_stream::wrappers::ReceiverStream; 7 | use tracing::{Instrument, error, info_span}; 8 | 9 | use super::{RepositoryPath, Result}; 10 | use crate::git::Git; 11 | 12 | #[derive(Deserialize)] 13 | pub struct UriQuery { 14 | #[serde(rename = "h")] 15 | branch: Option>, 16 | id: Option>, 17 | } 18 | 19 | pub async fn handle( 20 | Extension(RepositoryPath(repository_path)): Extension, 21 | Extension(git): Extension>, 22 | Query(query): Query, 23 | ) -> Result> { 24 | let open_repo = git.repo(repository_path, query.branch.clone()).await?; 25 | 26 | // byte stream back to the client 27 | let (send, recv) = tokio::sync::mpsc::channel(1); 28 | 29 | // channel for `archive` to tell us we can send headers etc back to 30 | // the user so it has time to return an error 31 | let (send_cont, recv_cont) = tokio::sync::oneshot::channel(); 32 | 33 | let id = query.id.clone(); 34 | 35 | let res = tokio::spawn( 36 | async move { 37 | if let Err(error) = open_repo 38 | .archive(send.clone(), send_cont, id.as_deref()) 39 | .await 40 | { 41 | error!(%error, "Failed to build archive for client"); 42 | let _res = send.send(Err(anyhow!("archive builder failed"))).await; 43 | return Err(error); 44 | } 45 | 46 | Ok(()) 47 | } 48 | .instrument(info_span!("sender")), 49 | ); 50 | 51 | // don't send any headers until `archive` has told us we're good 52 | // to continue 53 | if recv_cont.await.is_err() { 54 | // sender disappearing means `archive` hit an issue during init, lets 55 | // wait for the error back from the spawned tokio task to return to 56 | // the client 57 | res.await 58 | .context("Tokio task failed")? 59 | .context("Failed to build archive")?; 60 | 61 | // ok, well this isn't ideal. the sender disappeared but we never got 62 | // an error. this shouldn't be possible, i guess lets just return an 63 | // internal error 64 | return Err(anyhow!("Ran into inconsistent error state whilst building archive, please file an issue at https://github.com/w4/rgit/issues").into()); 65 | } 66 | 67 | let file_name = query 68 | .id 69 | .as_deref() 70 | .or(query.branch.as_deref()) 71 | .unwrap_or("main"); 72 | 73 | Ok(Response::builder() 74 | .header("Content-Type", "application/gzip") 75 | .header( 76 | "Content-Disposition", 77 | format!("attachment; filename=\"{file_name}.tar.gz\""), 78 | ) 79 | .body(Body::from_stream(ReceiverStream::new(recv))) 80 | .context("failed to build response")?) 81 | } 82 | -------------------------------------------------------------------------------- /src/methods/repo/summary.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::BTreeMap, sync::Arc}; 2 | 3 | use anyhow::Context; 4 | use askama::Template; 5 | use axum::{Extension, response::IntoResponse}; 6 | use axum_extra::extract::Host; 7 | use rkyv::string::ArchivedString; 8 | 9 | use crate::{ 10 | database::schema::{commit::YokedCommit, repository::YokedRepository}, 11 | into_response, 12 | methods::{ 13 | filters, 14 | repo::{DEFAULT_BRANCHES, Refs, Repository, Result}, 15 | }, 16 | }; 17 | 18 | #[derive(Template)] 19 | #[template(path = "repo/summary.html")] 20 | pub struct View { 21 | repo: Repository, 22 | refs: Refs, 23 | commit_list: Vec, 24 | branch: Option>, 25 | exported: bool, 26 | host: String, 27 | } 28 | 29 | pub async fn handle( 30 | Extension(repo): Extension, 31 | Extension(db): Extension>, 32 | Host(host): Host, 33 | ) -> Result { 34 | tokio::task::spawn_blocking(move || { 35 | let repository = crate::database::schema::repository::Repository::open(&db, &*repo)? 36 | .context("Repository does not exist")?; 37 | let commits = get_default_branch_commits(&repository, &db)?; 38 | 39 | let mut heads = BTreeMap::new(); 40 | if let Some(heads_db) = repository.get().heads(&db)? { 41 | for head in heads_db 42 | .get() 43 | .0 44 | .as_slice() 45 | .iter() 46 | .map(ArchivedString::as_str) 47 | { 48 | let commit_tree = repository.get().commit_tree(db.clone(), head); 49 | let name = head.strip_prefix("refs/heads/"); 50 | 51 | if let (Some(name), Some(commit)) = (name, commit_tree.fetch_latest_one()?) { 52 | heads.insert(name.to_string(), commit); 53 | } 54 | } 55 | } 56 | 57 | let tags = repository.get().tag_tree(db).fetch_all()?; 58 | 59 | Ok(into_response(View { 60 | repo, 61 | refs: Refs { heads, tags }, 62 | commit_list: commits, 63 | branch: None, 64 | exported: repository.get().exported, 65 | host, 66 | })) 67 | }) 68 | .await 69 | .context("Failed to attach to tokio task")? 70 | } 71 | 72 | pub fn get_default_branch_commits( 73 | repository: &YokedRepository, 74 | database: &Arc, 75 | ) -> Result> { 76 | for branch in repository 77 | .get() 78 | .default_branch 79 | .as_deref() 80 | .into_iter() 81 | .chain(DEFAULT_BRANCHES.into_iter()) 82 | { 83 | let commit_tree = repository.get().commit_tree(database.clone(), branch); 84 | let commits = commit_tree.fetch_latest(11, 0)?; 85 | 86 | if !commits.is_empty() { 87 | return Ok(commits); 88 | } 89 | } 90 | 91 | Ok(vec![]) 92 | } 93 | -------------------------------------------------------------------------------- /src/methods/repo/tag.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use askama::Template; 4 | use axum::{Extension, extract::Query, response::IntoResponse}; 5 | use serde::Deserialize; 6 | use yoke::Yoke; 7 | 8 | use crate::{ 9 | Git, 10 | git::DetailedTag, 11 | into_response, 12 | methods::{ 13 | filters, 14 | repo::{Repository, RepositoryPath, Result}, 15 | }, 16 | }; 17 | 18 | #[derive(Deserialize)] 19 | pub struct UriQuery { 20 | #[serde(rename = "h")] 21 | name: Arc, 22 | } 23 | 24 | #[derive(Template)] 25 | #[template(path = "repo/tag.html")] 26 | pub struct View { 27 | repo: Repository, 28 | tag: Yoke, Vec>, 29 | branch: Option>, 30 | } 31 | 32 | pub async fn handle( 33 | Extension(repo): Extension, 34 | Extension(RepositoryPath(repository_path)): Extension, 35 | Extension(git): Extension>, 36 | Query(query): Query, 37 | ) -> Result { 38 | let open_repo = git.repo(repository_path, Some(query.name.clone())).await?; 39 | let tag = open_repo.tag_info().await?; 40 | 41 | Ok(into_response(View { 42 | repo, 43 | tag, 44 | branch: Some(query.name), 45 | })) 46 | } 47 | -------------------------------------------------------------------------------- /src/methods/repo/tree.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, bail}; 2 | use askama::Template; 3 | use axum::{Extension, extract::Query, response::IntoResponse}; 4 | use gix::ObjectId; 5 | use itertools::Itertools; 6 | use serde::Deserialize; 7 | use std::path::PathBuf; 8 | use std::{ 9 | fmt::{Display, Formatter}, 10 | path::Path, 11 | sync::Arc, 12 | }; 13 | 14 | use crate::database::schema::tree::{ 15 | ArchivedSortedTree, ArchivedSortedTreeItem, ArchivedTreeItemKind, SortedTree, Tree, TreeItem, 16 | YokedSortedTree, YokedTreeItem, YokedTreeItemKeyUtf8, 17 | }; 18 | use crate::{ 19 | Git, ResponseEither, 20 | git::FileWithContent, 21 | into_response, 22 | methods::{ 23 | filters, 24 | repo::{ChildPath, Repository, RepositoryPath, Result}, 25 | }, 26 | }; 27 | 28 | use super::log::get_branch_commits; 29 | 30 | #[derive(Deserialize)] 31 | pub struct UriQuery { 32 | id: Option, 33 | #[serde(default)] 34 | raw: bool, 35 | #[serde(rename = "h")] 36 | branch: Option>, 37 | } 38 | 39 | impl Display for UriQuery { 40 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 41 | let mut prefix = "?"; 42 | 43 | if let Some(id) = self.id.as_deref() { 44 | write!(f, "{prefix}id={id}")?; 45 | prefix = "&"; 46 | } 47 | 48 | if let Some(branch) = self.branch.as_deref() { 49 | write!(f, "{prefix}h={branch}")?; 50 | } 51 | 52 | Ok(()) 53 | } 54 | } 55 | 56 | #[derive(Template)] 57 | #[template(path = "partials/file_tree.html")] 58 | pub struct FileTree<'a> { 59 | pub inner: &'a ArchivedSortedTree, 60 | pub base: &'a Repository, 61 | pub path_stack: String, 62 | pub query: &'a UriQuery, 63 | pub repo_path: Option<&'a Path>, 64 | } 65 | 66 | impl<'a> FileTree<'a> { 67 | pub fn new( 68 | inner: &'a ArchivedSortedTree, 69 | base: &'a Repository, 70 | path_stack: String, 71 | query: &'a UriQuery, 72 | repo_path: Option<&'a Path>, 73 | ) -> Self { 74 | Self { 75 | inner, 76 | base, 77 | path_stack, 78 | query, 79 | repo_path, 80 | } 81 | } 82 | 83 | pub fn get_next_repo_path_if_parent(&self, name: &str) -> Option<&Path> { 84 | self.repo_path.and_then(|v| v.strip_prefix(name).ok()) 85 | } 86 | } 87 | 88 | #[derive(Template)] 89 | #[template(path = "repo/tree.html")] 90 | #[allow(clippy::module_name_repetitions)] 91 | pub struct TreeView { 92 | pub repo: Repository, 93 | pub items: Vec<(YokedTreeItemKeyUtf8, usize, YokedTreeItem)>, 94 | pub query: UriQuery, 95 | pub repo_path: PathBuf, 96 | pub branch: Option>, 97 | pub full_tree: YokedSortedTree, 98 | } 99 | 100 | #[derive(Template)] 101 | #[template(path = "repo/file.html")] 102 | pub struct FileView { 103 | pub repo: Repository, 104 | pub repo_path: PathBuf, 105 | pub query: UriQuery, 106 | pub file: FileWithContent, 107 | pub branch: Option>, 108 | pub full_tree: YokedSortedTree, 109 | } 110 | 111 | enum LookupResult { 112 | RealPath, 113 | Children(Vec<(YokedTreeItemKeyUtf8, usize, YokedTreeItem)>), 114 | } 115 | 116 | pub async fn handle( 117 | Extension(repo): Extension, 118 | Extension(RepositoryPath(repository_path)): Extension, 119 | Extension(ChildPath(child_path)): Extension, 120 | Extension(git): Extension>, 121 | Extension(db_orig): Extension>, 122 | Query(query): Query, 123 | ) -> Result { 124 | let db = db_orig.clone(); 125 | let (query, repo, tree_id) = tokio::task::spawn_blocking(move || { 126 | let tree_id = if let Some(id) = query.id.as_deref() { 127 | let hex = const_hex::decode_to_array(id).context("Failed to parse tree hash")?; 128 | Tree::find(&db, ObjectId::Sha1(hex)) 129 | .context("Failed to lookup tree")? 130 | .context("Couldn't find tree with given id")? 131 | } else { 132 | let repository = crate::database::schema::repository::Repository::open(&db, &*repo)? 133 | .context("Repository does not exist")?; 134 | let commit = get_branch_commits(&repository, &db, query.branch.as_deref(), 1, 0)? 135 | .into_iter() 136 | .next() 137 | .context("Branch not found")?; 138 | commit.get().tree.to_native() 139 | }; 140 | 141 | Ok::<_, anyhow::Error>((query, repo, tree_id)) 142 | }) 143 | .await 144 | .context("failed to join tree_id task")??; 145 | 146 | let db = db_orig.clone(); 147 | let (repo, child_path, lookup_result) = tokio::task::spawn_blocking(move || { 148 | if let Some(path) = &child_path { 149 | if let Some(item) = 150 | TreeItem::find_exact(&db, tree_id, path.as_os_str().as_encoded_bytes())? 151 | { 152 | if let ArchivedTreeItemKind::File = item.get().kind { 153 | return Ok((repo, child_path, LookupResult::RealPath)); 154 | } 155 | } 156 | } 157 | 158 | let path = child_path 159 | .as_ref() 160 | .map(|v| v.as_os_str().as_encoded_bytes()) 161 | .unwrap_or_default(); 162 | 163 | let tree_items = TreeItem::find_prefix(&db, tree_id, Some(path)) 164 | // don't take the current path the user is on 165 | .filter_ok(|(k, _)| !k.get()[path.len()..].is_empty()) 166 | // only take direct descendents 167 | .filter_ok(|(k, _)| { 168 | memchr::memrchr(b'/', &k.get()[path.len()..]).is_none_or(|v| v == 0) 169 | }) 170 | .map_ok(|(k, v)| { 171 | ( 172 | k.try_map_project(|v, _| simdutf8::basic::from_utf8(v)) 173 | .expect("invalid utf8"), 174 | path.len(), 175 | v, 176 | ) 177 | }) 178 | .try_collect::<_, Vec<_>, _>()?; 179 | 180 | if tree_items.is_empty() { 181 | bail!("Path doesn't exist in tree"); 182 | } 183 | 184 | Ok::<_, anyhow::Error>((repo, child_path, LookupResult::Children(tree_items))) 185 | }) 186 | .await 187 | .context("failed to join on tokio task")??; 188 | 189 | Ok(match lookup_result { 190 | LookupResult::RealPath => { 191 | let open_repo = git.repo(repository_path, query.branch.clone()).await?; 192 | let file = open_repo 193 | .path(child_path.clone(), query.id.as_deref(), !query.raw) 194 | .await?; 195 | 196 | if query.raw { 197 | ResponseEither::Right(file.content) 198 | } else { 199 | let db = db_orig.clone(); 200 | let full_tree = tokio::task::spawn_blocking(move || SortedTree::get(tree_id, &db)) 201 | .await 202 | .context("failed to join on tokio task")?? 203 | .context("missing file tree")?; 204 | 205 | ResponseEither::Left(ResponseEither::Right(into_response(FileView { 206 | repo, 207 | file, 208 | branch: query.branch.clone(), 209 | repo_path: child_path.unwrap_or_default(), 210 | full_tree, 211 | query, 212 | }))) 213 | } 214 | } 215 | LookupResult::Children(items) => { 216 | let db = db_orig.clone(); 217 | let full_tree = tokio::task::spawn_blocking(move || SortedTree::get(tree_id, &db)) 218 | .await 219 | .context("failed to join on tokio task")?? 220 | .context("missing file tree")?; 221 | 222 | ResponseEither::Left(ResponseEither::Left(into_response(TreeView { 223 | repo, 224 | items, 225 | branch: query.branch.clone(), 226 | query, 227 | repo_path: child_path.unwrap_or_default(), 228 | full_tree, 229 | }))) 230 | } 231 | }) 232 | } 233 | -------------------------------------------------------------------------------- /src/syntax_highlight.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | cell::RefCell, 3 | collections::HashMap, 4 | fmt::Write as FmtWrite, 5 | io::{ErrorKind, Write as IoWrite}, 6 | path::Path, 7 | sync::LazyLock, 8 | }; 9 | 10 | use comrak::adapters::SyntaxHighlighterAdapter; 11 | use tracing::{debug, error}; 12 | use tree_sitter_grammar_repository::{Grammar, Language}; 13 | use tree_sitter_highlight::{HighlightConfiguration, HighlightEvent, Highlighter}; 14 | 15 | thread_local! { 16 | static HIGHLIGHTER: RefCell = RefCell::new(Highlighter::new()); 17 | } 18 | 19 | macro_rules! count { 20 | () => (0); 21 | ($e:expr) => (1); 22 | ($e:expr, $($rest:expr),*) => (1 + count!($($rest),*)); 23 | } 24 | 25 | macro_rules! define_classes { 26 | ($($name:literal => $class:literal),*,) => { 27 | static HIGHLIGHT_NAMES: [&str; count!($($name),*)] = [ 28 | $($name),* 29 | ]; 30 | 31 | static HIGHLIGHT_CLASSES: [&str; count!($($name),*)] = [ 32 | $($class),* 33 | ]; 34 | }; 35 | } 36 | 37 | define_classes! { 38 | "attribute" => "attribute", 39 | "boolean" => "boolean", 40 | "carriage-return" => "carriage-return", 41 | "comment" => "comment", 42 | "comment.documentation" => "comment documentation", 43 | "constant" => "constant", 44 | "constant.builtin" => "constant builtin", 45 | "constructor" => "constructor", 46 | "constructor.builtin" => "constructor builtin", 47 | "embedded" => "embedded", 48 | "error" => "error", 49 | "escape" => "escape", 50 | "function" => "function", 51 | "function.builtin" => "function builtin", 52 | "keyword" => "keyword", 53 | "markup" => "markup", 54 | "markup.bold" => "markup bold", 55 | "markup.heading" => "markup heading", 56 | "markup.italic" => "markup italic", 57 | "markup.link" => "markup link", 58 | "markup.link.url" => "markup link url", 59 | "markup.list" => "markup list", 60 | "markup.list.checked" => "markup list checked", 61 | "markup.list.numbered" => "markup list numbered", 62 | "markup.list.unchecked" => "markup list unchecked", 63 | "markup.list.unnumbered" => "markup list unnumbered", 64 | "markup.quote" => "markup quote", 65 | "markup.raw" => "markup raw", 66 | "markup.raw.block" => "markup raw block", 67 | "markup.raw.inline" => "markup raw inline", 68 | "markup.strikethrough" => "markup strikethrough", 69 | "module" => "module", 70 | "number" => "number", 71 | "operator" => "operator", 72 | "property" => "property", 73 | "property.builtin" => "property builtin", 74 | "punctuation" => "punctuation", 75 | "punctuation.bracket" => "punctuation bracket", 76 | "punctuation.delimiter" => "punctuation delimiter", 77 | "punctuation.special" => "punctuation special", 78 | "string" => "string", 79 | "string.escape" => "string escape", 80 | "string.regexp" => "string regexp", 81 | "string.special" => "string special", 82 | "string.special.symbol" => "string special symbol", 83 | "tag" => "tag", 84 | "type" => "type", 85 | "type.builtin" => "type builtin", 86 | "variable" => "variable", 87 | "variable.builtin" => "variable builtin", 88 | "variable.member" => "variable member", 89 | "variable.parameter" => "variable parameter",} 90 | 91 | pub fn prime_highlighters() { 92 | let _res = HIGHLIGHTER_CONFIGS.len(); 93 | } 94 | 95 | static HIGHLIGHTER_CONFIGS: LazyLock> = LazyLock::new(|| { 96 | Grammar::VARIANTS 97 | .iter() 98 | .copied() 99 | .map(Grammar::highlight_configuration_params) 100 | .map(|v| { 101 | let mut configuration = HighlightConfiguration::new( 102 | v.language.into(), 103 | v.name, 104 | v.highlights_query, 105 | v.injection_query, 106 | v.locals_query, 107 | ) 108 | .unwrap_or_else(|e| panic!("bad query for {}: {e}", v.name)); 109 | configuration.configure(&HIGHLIGHT_NAMES); 110 | configuration 111 | }) 112 | .collect() 113 | }); 114 | 115 | pub fn fetch_highlighter_config(file: &Path) -> Option<&'static HighlightConfiguration> { 116 | Language::from_file_name(file) 117 | .map(Language::grammar) 118 | .map(Grammar::idx) 119 | .map(|idx| &HIGHLIGHTER_CONFIGS[idx]) 120 | } 121 | 122 | pub fn fetch_highlighter_config_by_token(token: &str) -> Option<&'static HighlightConfiguration> { 123 | Language::from_injection(token) 124 | .map(Language::grammar) 125 | .map(Grammar::idx) 126 | .map(|idx| &HIGHLIGHTER_CONFIGS[idx]) 127 | } 128 | 129 | pub struct ComrakHighlightAdapter; 130 | 131 | impl SyntaxHighlighterAdapter for ComrakHighlightAdapter { 132 | fn write_highlighted( 133 | &self, 134 | output: &mut dyn IoWrite, 135 | lang: Option<&str>, 136 | code: &str, 137 | ) -> std::io::Result<()> { 138 | let out = format_file(code, FileIdentifier::Token(lang.unwrap_or_default())) 139 | .map_err(|e| std::io::Error::new(ErrorKind::Other, e))?; 140 | output.write_all(out.as_bytes()) 141 | } 142 | 143 | fn write_pre_tag( 144 | &self, 145 | output: &mut dyn IoWrite, 146 | _attributes: HashMap, 147 | ) -> std::io::Result<()> { 148 | write!(output, "
")
149 |     }
150 | 
151 |     fn write_code_tag(
152 |         &self,
153 |         _output: &mut dyn IoWrite,
154 |         _attributes: HashMap,
155 |     ) -> std::io::Result<()> {
156 |         Ok(())
157 |     }
158 | }
159 | 
160 | #[derive(Copy, Clone, Debug)]
161 | pub enum FileIdentifier<'a> {
162 |     Path(&'a Path),
163 |     Token(&'a str),
164 | }
165 | 
166 | pub fn format_file(content: &str, identifier: FileIdentifier<'_>) -> anyhow::Result {
167 |     let mut out = String::new();
168 |     format_file_inner(&mut out, content, identifier, true)?;
169 |     Ok(out)
170 | }
171 | 
172 | pub fn format_file_inner(
173 |     out: &mut String,
174 |     content: &str,
175 |     identifier: FileIdentifier<'_>,
176 |     code_tag: bool,
177 | ) -> anyhow::Result<()> {
178 |     let config = match identifier {
179 |         FileIdentifier::Path(v) => fetch_highlighter_config(v),
180 |         FileIdentifier::Token(v) => fetch_highlighter_config_by_token(v),
181 |     };
182 | 
183 |     let line_prefix = if code_tag { "" } else { "" };
184 |     let line_suffix = if code_tag { "\n" } else { "\n" };
185 | 
186 |     let Some(config) = config else {
187 |         for line in content.lines() {
188 |             out.push_str(line_prefix);
189 |             v_htmlescape::b_escape(line.as_bytes(), out);
190 |             out.push_str(line_suffix);
191 |         }
192 | 
193 |         return Ok(());
194 |     };
195 | 
196 |     HIGHLIGHTER.with_borrow_mut(|highlighter| {
197 |         highlighter.parser().reset();
198 | 
199 |         let spans = highlighter.highlight(config, content.as_bytes(), None, |injection| {
200 |             debug!(injection, "Highlighter switch requested");
201 |             fetch_highlighter_config_by_token(injection)
202 |         });
203 | 
204 |         let mut spans = match spans {
205 |             Ok(v) => v,
206 |             Err(error) => {
207 |                 error!(
208 |                     ?error,
209 |                     "Failed to run highlighter, falling back to plaintext"
210 |                 );
211 | 
212 |                 for line in content.lines() {
213 |                     out.push_str(line_prefix);
214 |                     v_htmlescape::b_escape(line.as_bytes(), out);
215 |                     out.push_str(line_suffix);
216 |                 }
217 | 
218 |                 return Ok(());
219 |             }
220 |         };
221 | 
222 |         let mut tag_open = true;
223 |         out.push_str(line_prefix);
224 | 
225 |         while let Some(span) = spans.next().transpose()? {
226 |             if !tag_open {
227 |                 out.push_str(line_prefix);
228 |                 tag_open = true;
229 |             }
230 | 
231 |             match span {
232 |                 HighlightEvent::Source { start, end } => {
233 |                     let content = &content[start..end];
234 | 
235 |                     for (i, line) in content.lines().enumerate() {
236 |                         if i != 0 {
237 |                             out.push_str(line_suffix);
238 |                             out.push_str(line_prefix);
239 |                         }
240 | 
241 |                         v_htmlescape::b_escape(line.as_bytes(), out);
242 |                     }
243 | 
244 |                     if content.ends_with('\n') {
245 |                         out.push_str(line_suffix);
246 |                         tag_open = false;
247 |                     }
248 |                 }
249 |                 HighlightEvent::HighlightStart(highlight) => {
250 |                     write!(
251 |                         out,
252 |                         r#""#,
253 |                         HIGHLIGHT_CLASSES[highlight.0]
254 |                     )?;
255 |                 }
256 |                 HighlightEvent::HighlightEnd => {
257 |                     out.push_str("");
258 |                 }
259 |             }
260 |         }
261 | 
262 |         if tag_open {
263 |             out.push_str(line_suffix);
264 |         }
265 | 
266 |         Ok::<_, anyhow::Error>(())
267 |     })?;
268 | 
269 |     Ok(())
270 | }
271 | 


--------------------------------------------------------------------------------
/src/theme.rs:
--------------------------------------------------------------------------------
  1 | use std::fmt::{Formatter, Write};
  2 | 
  3 | use serde::{
  4 |     Deserialize, Deserializer,
  5 |     de::{Error, MapAccess, Visitor, value::MapAccessDeserializer},
  6 | };
  7 | 
  8 | #[derive(Deserialize)]
  9 | pub struct Theme {
 10 |     palette: hashbrown::HashMap,
 11 |     #[serde(flatten)]
 12 |     definitions: hashbrown::HashMap,
 13 | }
 14 | 
 15 | pub enum PaletteReference {
 16 |     Foreground(String),
 17 |     WithModifiers(PaletteReferenceWithModifiers),
 18 | }
 19 | 
 20 | impl<'de> Deserialize<'de> for PaletteReference {
 21 |     fn deserialize(deserializer: D) -> Result
 22 |     where
 23 |         D: Deserializer<'de>,
 24 |     {
 25 |         struct PaletteReferenceVisitor;
 26 | 
 27 |         impl<'de> Visitor<'de> for PaletteReferenceVisitor {
 28 |             type Value = PaletteReference;
 29 | 
 30 |             fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
 31 |                 formatter.write_str("palette reference")
 32 |             }
 33 | 
 34 |             fn visit_str(self, v: &str) -> Result
 35 |             where
 36 |                 E: Error,
 37 |             {
 38 |                 Ok(PaletteReference::Foreground(v.to_string()))
 39 |             }
 40 | 
 41 |             fn visit_map(self, map: A) -> Result
 42 |             where
 43 |                 A: MapAccess<'de>,
 44 |             {
 45 |                 PaletteReferenceWithModifiers::deserialize(MapAccessDeserializer::new(map))
 46 |                     .map(PaletteReference::WithModifiers)
 47 |             }
 48 |         }
 49 | 
 50 |         deserializer.deserialize_any(PaletteReferenceVisitor)
 51 |     }
 52 | }
 53 | 
 54 | #[derive(Deserialize)]
 55 | pub struct PaletteReferenceWithModifiers {
 56 |     bg: Option,
 57 |     fg: Option,
 58 |     #[serde(default)]
 59 |     modifiers: Vec,
 60 | }
 61 | 
 62 | #[derive(Deserialize)]
 63 | #[serde(rename_all = "snake_case")]
 64 | pub enum Modifiers {
 65 |     Underlined,
 66 |     Bold,
 67 |     Italic,
 68 |     CrossedOut,
 69 |     Reversed,
 70 |     Dim,
 71 | }
 72 | 
 73 | impl Theme {
 74 |     fn get_color<'a>(&'a self, reference: &'a str) -> &'a str {
 75 |         if reference.starts_with('#') {
 76 |             reference
 77 |         } else {
 78 |             self.palette
 79 |                 .get(reference)
 80 |                 .unwrap_or_else(|| panic!("bad palette ref {reference}"))
 81 |         }
 82 |     }
 83 | 
 84 |     pub fn build_css(&self) -> String {
 85 |         let mut out = String::new();
 86 | 
 87 |         for (kind, palette_ref) in &self.definitions {
 88 |             write!(out, ".highlight.{kind} {{").unwrap();
 89 | 
 90 |             match palette_ref {
 91 |                 PaletteReference::Foreground(color) => {
 92 |                     let color = self.get_color(color);
 93 |                     write!(out, "color:{color};").unwrap();
 94 |                 }
 95 |                 PaletteReference::WithModifiers(PaletteReferenceWithModifiers {
 96 |                     bg,
 97 |                     fg,
 98 |                     modifiers,
 99 |                 }) => {
100 |                     if let Some(color) = bg {
101 |                         let color = self.get_color(color);
102 |                         write!(out, "background:{color};").unwrap();
103 |                     }
104 | 
105 |                     if let Some(color) = fg {
106 |                         let color = self.get_color(color);
107 |                         write!(out, "color:{color};").unwrap();
108 |                     }
109 | 
110 |                     for modifier in modifiers {
111 |                         match modifier {
112 |                             Modifiers::Underlined => out.push_str("text-decoration:underline;"),
113 |                             Modifiers::Bold => out.push_str("font-weight:bold;"),
114 |                             Modifiers::Italic => out.push_str("font-style:italic;"),
115 |                             Modifiers::CrossedOut => out.push_str("text-decoration:line-through;"),
116 |                             Modifiers::Reversed | Modifiers::Dim => {}
117 |                         }
118 |                     }
119 |                 }
120 |             }
121 | 
122 |             out.push('}');
123 |         }
124 | 
125 |         out
126 |     }
127 | }
128 | 


--------------------------------------------------------------------------------
/src/unified_diff_builder.rs:
--------------------------------------------------------------------------------
  1 | //! Heavily based on [`gix::diff::blob::UnifiedDiffBuilder`] but provides
  2 | //! a callback that can be used for styling the diffs.
  3 | 
  4 | use std::{fmt::Write, ops::Range};
  5 | 
  6 | use gix::diff::blob::{
  7 |     Sink,
  8 |     intern::{InternedInput, Interner, Token},
  9 | };
 10 | 
 11 | pub(crate) trait Callback {
 12 |     fn addition(&mut self, data: &str, dst: &mut String);
 13 |     fn remove(&mut self, data: &str, dst: &mut String);
 14 |     fn context(&mut self, data: &str, dst: &mut String);
 15 | }
 16 | 
 17 | impl Callback for &mut C {
 18 |     fn addition(&mut self, data: &str, dst: &mut String) {
 19 |         (*self).addition(data, dst);
 20 |     }
 21 | 
 22 |     fn remove(&mut self, data: &str, dst: &mut String) {
 23 |         (*self).remove(data, dst);
 24 |     }
 25 | 
 26 |     fn context(&mut self, data: &str, dst: &mut String) {
 27 |         (*self).context(data, dst);
 28 |     }
 29 | }
 30 | 
 31 | /// A [`Sink`] that creates a textual diff
 32 | /// in the format typically output by git or gnu-diff if the `-u` option is used
 33 | pub struct UnifiedDiffBuilder<'a, C, W>
 34 | where
 35 |     C: Callback,
 36 |     W: Write,
 37 | {
 38 |     before: &'a [Token],
 39 |     after: &'a [Token],
 40 |     interner: &'a Interner<&'a str>,
 41 | 
 42 |     pos: u32,
 43 |     before_hunk_start: u32,
 44 |     after_hunk_start: u32,
 45 |     before_hunk_len: u32,
 46 |     after_hunk_len: u32,
 47 | 
 48 |     callback: C,
 49 |     buffer: String,
 50 |     dst: W,
 51 | }
 52 | 
 53 | impl<'a, C, W> UnifiedDiffBuilder<'a, C, W>
 54 | where
 55 |     C: Callback,
 56 |     W: Write,
 57 | {
 58 |     /// Create a new `UnifiedDiffBuilder` for the given `input`,
 59 |     /// that will writes it output to the provided implementation of [`Write`].
 60 |     pub fn with_writer(input: &'a InternedInput<&'a str>, writer: W, callback: C) -> Self {
 61 |         Self {
 62 |             before_hunk_start: 0,
 63 |             after_hunk_start: 0,
 64 |             before_hunk_len: 0,
 65 |             after_hunk_len: 0,
 66 |             buffer: String::with_capacity(8),
 67 |             dst: writer,
 68 |             interner: &input.interner,
 69 |             before: &input.before,
 70 |             after: &input.after,
 71 |             callback,
 72 |             pos: 0,
 73 |         }
 74 |     }
 75 | 
 76 |     fn flush(&mut self) {
 77 |         if self.before_hunk_len == 0 && self.after_hunk_len == 0 {
 78 |             return;
 79 |         }
 80 | 
 81 |         let end = (self.pos + 3).min(u32::try_from(self.before.len()).unwrap_or(u32::MAX));
 82 |         self.update_pos(end, end);
 83 | 
 84 |         writeln!(
 85 |             &mut self.dst,
 86 |             "@@ -{},{} +{},{} @@",
 87 |             self.before_hunk_start + 1,
 88 |             self.before_hunk_len,
 89 |             self.after_hunk_start + 1,
 90 |             self.after_hunk_len,
 91 |         )
 92 |         .unwrap();
 93 |         write!(&mut self.dst, "{}", &self.buffer).unwrap();
 94 |         self.buffer.clear();
 95 |         self.before_hunk_len = 0;
 96 |         self.after_hunk_len = 0;
 97 |     }
 98 | 
 99 |     fn update_pos(&mut self, print_to: u32, move_to: u32) {
100 |         for token in &self.before[self.pos as usize..print_to as usize] {
101 |             self.callback
102 |                 .context(self.interner[*token], &mut self.buffer);
103 |         }
104 |         let len = print_to - self.pos;
105 |         self.pos = move_to;
106 |         self.before_hunk_len += len;
107 |         self.after_hunk_len += len;
108 |     }
109 | }
110 | 
111 | impl Sink for UnifiedDiffBuilder<'_, C, W>
112 | where
113 |     C: Callback,
114 |     W: Write,
115 | {
116 |     type Out = W;
117 | 
118 |     fn process_change(&mut self, before: Range, after: Range) {
119 |         if before.start - self.pos > 6 {
120 |             self.flush();
121 |             self.pos = before.start - 3;
122 |             self.before_hunk_start = self.pos;
123 |             self.after_hunk_start = after.start - 3;
124 |         }
125 |         self.update_pos(before.start, before.end);
126 |         self.before_hunk_len += before.end - before.start;
127 |         self.after_hunk_len += after.end - after.start;
128 | 
129 |         for token in &self.before[before.start as usize..before.end as usize] {
130 |             self.callback
131 |                 .remove(self.interner[*token], &mut self.buffer);
132 |         }
133 | 
134 |         for token in &self.after[after.start as usize..after.end as usize] {
135 |             self.callback
136 |                 .addition(self.interner[*token], &mut self.buffer);
137 |         }
138 |     }
139 | 
140 |     fn finish(mut self) -> Self::Out {
141 |         self.flush();
142 |         self.dst
143 |     }
144 | }
145 | 


--------------------------------------------------------------------------------
/statics/README.md:
--------------------------------------------------------------------------------
1 | Current favicon.ico was retrieved
2 | by `wget  https://www.git-scm.com/favicon.ico`
3 | 
4 | When you have a better favicon.ico for this project
5 | contact us.
6 | 


--------------------------------------------------------------------------------
/statics/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/w4/rgit/9224aa1a006acb6af6da8cfbee82278612bd05d0/statics/favicon.ico


--------------------------------------------------------------------------------
/statics/sass/_colours.scss:
--------------------------------------------------------------------------------
1 | $darkModeHighlightColour: #20c20e;
2 | $darkModeTextColour: #abb2bf;
3 | $asideColour: #888;


--------------------------------------------------------------------------------
/statics/sass/code.scss:
--------------------------------------------------------------------------------
 1 | @import 'colours';
 2 | 
 3 | pre {
 4 |   height: 100%;
 5 |   width: 100%;
 6 |   margin: 0;
 7 |   overflow: auto;
 8 |   counter-reset: line;
 9 | 
10 |   &.h2-first-line {
11 |     &::first-line {
12 |       font-family: sans-serif;
13 |       font-size: 1.5em;
14 |       font-weight: bold;
15 |     }
16 |   }
17 | 
18 |   code {
19 |     counter-increment: line;
20 | 
21 |     @media (prefers-color-scheme: dark) {
22 |       color: #abb2bf;
23 |     }
24 | 
25 |     &::before {
26 |       content: counter(line);
27 |       display: inline-block;
28 |       width: 2em;
29 |       padding: 0 1em 0.3em 0;
30 |       margin-right: .5em;
31 |       color: $asideColour;
32 |       -webkit-user-select: none;
33 |     }
34 |   }
35 | }
36 | 


--------------------------------------------------------------------------------
/statics/sass/diff.scss:
--------------------------------------------------------------------------------
 1 | pre.diff {
 2 |   @media (prefers-color-scheme: dark) {
 3 |     color: #abb2bf;
 4 |   }
 5 | }
 6 | 
 7 | .diff-add-line::before, .diff-remove-line::before, .diff-context::before {
 8 |   display: inline-block;
 9 |   color: #888;
10 |   -webkit-user-select: none;
11 | }
12 | 
13 | .diff-file-header {
14 |   font-weight: bold;
15 | }
16 | 
17 | .diff-file-header > span > span {
18 |   font-weight: normal;
19 | }
20 | 
21 | .diff-add-line {
22 |   background: #e6ffec;
23 |   display: block;
24 | 
25 |   @media (prefers-color-scheme: dark) {
26 |     background: rgba(70, 149, 74, 0.15);
27 |   }
28 | 
29 |   &::before {
30 |     content: '+ ';
31 |   }
32 | }
33 | 
34 | .diff-remove-line {
35 |   background: #ffebe9;
36 |   display: block;
37 | 
38 |   @media (prefers-color-scheme: dark) {
39 |     background: rgba(229, 83, 75, 0.15);
40 |   }
41 | 
42 |   &::before {
43 |     content: '- ';
44 |   }
45 | }
46 | 
47 | .diff-context {
48 |   &::before {
49 |     content: '  ';
50 |   }
51 | }


--------------------------------------------------------------------------------
/statics/sass/style.scss:
--------------------------------------------------------------------------------
  1 | @import 'util';
  2 | @import 'diff';
  3 | @import 'tables';
  4 | @import 'code';
  5 | @import 'colours';
  6 | 
  7 | @media (prefers-color-scheme: dark) {
  8 |   html {
  9 |     background: #000;
 10 |     color: $darkModeTextColour;
 11 |   }
 12 | 
 13 |   h2,
 14 |   h3 {
 15 |     color: darken($darkModeHighlightColour, 20%);
 16 |   }
 17 | }
 18 | 
 19 | body {
 20 |   font-family: sans-serif;
 21 |   font-size: 0.9rem;
 22 |   tab-size: 4;
 23 | }
 24 | 
 25 | header {
 26 |   border-bottom: solid 1px #ccc;
 27 | }
 28 | 
 29 | nav {
 30 |   margin-top: 2rem;
 31 |   border-bottom: solid 3px #ccc;
 32 |   display: flex;
 33 | 
 34 |   .grow {
 35 |     flex-grow: 1;
 36 |   }
 37 | 
 38 |   @media (prefers-color-scheme: dark) {
 39 |     border-bottom-color: $darkModeHighlightColour;
 40 |   }
 41 | 
 42 |   a {
 43 |     padding: 2px 0.75em;
 44 |     color: #777;
 45 |     font-size: 110%;
 46 | 
 47 |     @media (prefers-color-scheme: dark) {
 48 |       color: $darkModeHighlightColour;
 49 |     }
 50 | 
 51 |     &.active {
 52 |       color: #000;
 53 |       background-color: #ccc;
 54 | 
 55 |       @media (prefers-color-scheme: dark) {
 56 |         background: $darkModeHighlightColour;
 57 |       }
 58 |     }
 59 |   }
 60 | }
 61 | 
 62 | aside {
 63 |   background: #f7f7f7;
 64 |   padding: 0.3rem 2rem;
 65 | 
 66 |   @media (prefers-color-scheme: dark) {
 67 |     background: #111;
 68 |   }
 69 | }
 70 | 
 71 | main {
 72 |   padding: 2rem;
 73 |   margin: 0;
 74 |   border-bottom: solid 3px #ccc;
 75 | 
 76 |   @media (prefers-color-scheme: dark) {
 77 |     border-bottom-color: $darkModeHighlightColour;
 78 |   }
 79 | }
 80 | 
 81 | .two-col {
 82 |   display: flex;
 83 |   gap: 1rem;
 84 | 
 85 |   .sidebar {
 86 |     display: none;
 87 |     overflow: hidden;
 88 |     white-space: nowrap;
 89 |     resize: horizontal;
 90 |     max-width: 50%;
 91 |     min-width: 18rem;
 92 |     width: 10%;
 93 |   }
 94 | }
 95 | 
 96 | .sidebar-toggle {
 97 |   display: inline-block;
 98 |   user-select: none;
 99 |   cursor: pointer;
100 |   width: 1rem;
101 |   height: 0.75rem;
102 |   position: relative;
103 |   margin-bottom: 1rem;
104 | 
105 |   span {
106 |     display: block;
107 |     position: absolute;
108 |     height: 0.125rem;
109 |     width: 100%;
110 |     background: #333;
111 |     border-radius: 0.125rem;
112 |     transition: 0.3s ease;
113 | 
114 |     @media (prefers-color-scheme: dark) {
115 |       background: #abb2bf;
116 |     }
117 | 
118 |     @media (prefers-reduced-motion) {
119 |       transition-duration: 0s;
120 |     }
121 | 
122 |     &:nth-of-type(1) {
123 |       top: 0;
124 |     }
125 | 
126 |     &:nth-of-type(2) {
127 |       top: 0.3rem;
128 |     }
129 | 
130 |     &:nth-of-type(3) {
131 |       top: 0.6rem;
132 |     }
133 |   }
134 | 
135 |   input:checked~span:nth-of-type(1) {
136 |     transform: rotate(45deg);
137 |     top: 0.23rem;
138 |   }
139 | 
140 |   input:checked~span:nth-of-type(2) {
141 |     opacity: 0;
142 |   }
143 | 
144 |   input:checked~span:nth-of-type(3) {
145 |     transform: rotate(-45deg);
146 |     top: 0.23rem;
147 |   }
148 | 
149 |   input {
150 |     display: none;
151 |   }
152 | 
153 |   &:has(input[type="checkbox"]:checked)+.two-col>.sidebar {
154 |     display: block;
155 |   }
156 | }
157 | 
158 | .dropdown-link {
159 |   .dropdown-label {
160 |     display: flex;
161 |     width: 100%;
162 |     border-radius: .25rem;
163 |     overflow: hidden;
164 |     cursor: pointer;
165 | 
166 |     &:hover {
167 |       background: rgba(0, 0, 255, .1);
168 | 
169 |       @media (prefers-color-scheme: dark) {
170 |         background: rgba(255, 255, 255, .1);
171 |       }
172 |     }
173 | 
174 |     .dropdown-toggle {
175 |       font-size: 1.25rem;
176 |       padding: .2rem .3rem;
177 |       cursor: pointer;
178 |       user-select: none;
179 |       display: flex;
180 |       align-items: center;
181 | 
182 |       &:hover {
183 |         background: rgba(0, 0, 255, .1);
184 | 
185 |         @media (prefers-color-scheme: dark) {
186 |           background: rgba(255, 255, 255, .1);
187 |         }
188 |       }
189 |     }
190 |   }
191 | 
192 |   .link {
193 |     display: inline-block;
194 |     padding: .35rem .5rem;
195 |     overflow: hidden;
196 |     text-overflow: ellipsis;
197 |     width: 100%;
198 |     text-decoration: none;
199 |     color: inherit;
200 | 
201 |     &.selected {
202 |       background: rgba(0, 0, 255, .1);
203 | 
204 |       @media (prefers-color-scheme: dark) {
205 |         background: rgba(0, 255, 0, .2);
206 |       }
207 |     }
208 |   }
209 | 
210 |   .dropdown-content {
211 |     border-left: .15rem solid rgba(0, 0, 255, .1);
212 |     padding-left: .4rem;
213 |     margin-left: .4rem;
214 |     display: none;
215 | 
216 |     @media (prefers-color-scheme: dark) {
217 |       border-left: .15rem solid rgba(0, 255, 0, .2);
218 |     }
219 |   }
220 | 
221 |   input[type="checkbox"] {
222 |     display: none;
223 |   }
224 | 
225 |   .dropdown-label:has(label input[type="checkbox"]:checked)+.dropdown-content {
226 |     display: block;
227 |   }
228 | 
229 |   .dropdown-toggle span {
230 |     display: inline-block;
231 |     transition: transform 0.2s;
232 | 
233 |     @media (prefers-reduced-motion) {
234 |       transition-duration: 0s;
235 |     }
236 |   }
237 | 
238 |   input[type="checkbox"]:checked+.dropdown-toggle span {
239 |     transform: rotate(90deg);
240 |   }
241 | }
242 | 
243 | .sr-only {
244 |   position: absolute;
245 |   width: 1px;
246 |   height: 1px;
247 |   padding: 0;
248 |   margin: -1px;
249 |   overflow: hidden;
250 |   clip: rect(0, 0, 0, 0);
251 |   white-space: nowrap;
252 |   border-width: 0;
253 | }
254 | 
255 | footer {
256 |   margin-top: 0.5em;
257 |   text-align: center;
258 |   font-size: 80%;
259 |   color: #ccc;
260 | 
261 |   @media (prefers-color-scheme: dark) {
262 |     color: $darkModeTextColour;
263 |   }
264 | }
265 | 
266 | a {
267 |   text-decoration: none;
268 |   color: blue;
269 | 
270 |   @media (prefers-color-scheme: dark) {
271 |     color: darken($darkModeHighlightColour, 10%);
272 |   }
273 | 
274 |   &.no-style {
275 |     color: inherit;
276 | 
277 |     @media (prefers-color-scheme: dark) {
278 |       color: darken($darkModeHighlightColour, 10%);
279 |     }
280 |   }
281 | 
282 |   &:hover {
283 |     text-decoration: underline;
284 |   }
285 | }


--------------------------------------------------------------------------------
/statics/sass/tables.scss:
--------------------------------------------------------------------------------
  1 | @import 'colours';
  2 | 
  3 | .table-responsive {
  4 |   display: block;
  5 |   width: 100%;
  6 |   overflow-x: auto;
  7 | }
  8 | 
  9 | table {
 10 |   @media (prefers-color-scheme: dark) {
 11 |     color: darken($darkModeHighlightColour, 20%);
 12 |   }
 13 | 
 14 |   border-collapse: collapse;
 15 | 
 16 |   th, td {
 17 |     white-space: nowrap;
 18 |     padding-right: 10px;
 19 | 
 20 |     &:last-of-type {
 21 |       padding-right: 0;
 22 |     }
 23 |   }
 24 | 
 25 |   th {
 26 |     text-align: left;
 27 |   }
 28 | }
 29 | 
 30 | table.repositories {
 31 |   width: 100%;
 32 | 
 33 |   a {
 34 |     color: black;
 35 | 
 36 |     @media (prefers-color-scheme: dark) {
 37 |       color: darken($darkModeHighlightColour, 15%);
 38 |     }
 39 | 
 40 |     &:hover {
 41 |       color: #00f;
 42 | 
 43 |       @media (prefers-color-scheme: dark) {
 44 |         color: darken($darkModeHighlightColour, 15%);
 45 |         text-decoration: underline;
 46 |       }
 47 |     }
 48 |   }
 49 | 
 50 |   tbody {
 51 |     tr {
 52 |       height: 1.3rem;
 53 |       vertical-align: middle;
 54 | 
 55 |       &:nth-child(odd) {
 56 |         background: #f7f7f7;
 57 | 
 58 |         @media (prefers-color-scheme: dark) {
 59 |           background: #111;
 60 |         }
 61 |       }
 62 | 
 63 |       &:nth-child(even) {
 64 |         background: #e1e1e1;
 65 | 
 66 |         @media (prefers-color-scheme: dark) {
 67 |           background: none;
 68 |         }
 69 |       }
 70 | 
 71 |       &.no-background {
 72 |         background: none;
 73 |       }
 74 | 
 75 |       &.has-parent td:first-of-type {
 76 |         padding-left: 1rem;
 77 |       }
 78 | 
 79 |       td.repo-section {
 80 |         font-style: italic;
 81 |         color: #888;
 82 |       }
 83 | 
 84 |       &.separator {
 85 |         background: none !important;
 86 |         height: 1rem;
 87 |       }
 88 |     }
 89 | 
 90 |     pre {
 91 |       margin: 0;
 92 |     }
 93 |   }
 94 | 
 95 |   .nested-tree {
 96 |     color: blue !important;
 97 |     font-weight: bold;
 98 | 
 99 |     @media (prefers-color-scheme: dark) {
100 |       color: $darkModeTextColour !important;
101 |     }
102 |   }
103 | }
104 | 
105 | table.commit-info {
106 |   td, th {
107 |     padding: 0.1em 1em 0.1em 0.1em;
108 |   }
109 | }
110 | 


--------------------------------------------------------------------------------
/statics/sass/util.scss:
--------------------------------------------------------------------------------
 1 | .mt-2 {
 2 |   margin-top: 2rem;
 3 | }
 4 | 
 5 | .text-center {
 6 |   text-align: center;
 7 | }
 8 | 
 9 | .no-hover:hover {
10 |   text-decoration: none;
11 | }
12 | 


--------------------------------------------------------------------------------
/taplo.toml:
--------------------------------------------------------------------------------
 1 | [formatting]
 2 | reorder_keys = false
 3 | 
 4 | [[rule]]
 5 | include = ["**/Cargo.toml"]
 6 | keys = ["dependencies"]
 7 | 
 8 | [rule.formatting]
 9 | reorder_keys = true
10 | 


--------------------------------------------------------------------------------
/templates/base.html:
--------------------------------------------------------------------------------
 1 | 
 2 | 
 3 | 
 4 |     
 5 |     
 6 |     {% block title %}rgit{% endblock %}
 7 |     
 8 |     {%- block head -%}{%- endblock %}
 9 | 
10 | 
11 | 
12 | 
13 |

14 | 🏡 15 | {% block header -%}Git repository browser{%- endblock %} 16 |

17 |
18 | 19 | {%- block nav -%} 20 | 31 | {%- endblock -%} 32 | 33 | 36 | 37 |
38 | {%- block content %}{% endblock -%} 39 |
40 | 41 |
42 | generated by rgit v{{ crate::CRATE_VERSION }} 43 | at {{ time::OffsetDateTime::now_utc()|format_time }} 44 | in {{ "{:?}"|format(crate::layers::logger::REQ_TIMESTAMP.get().elapsed()) }} 45 |
46 | 47 | 48 | -------------------------------------------------------------------------------- /templates/index.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | {%- for (path, repositories) in self.take_iter() %} 17 | {%- if !path.is_empty() %} 18 | 19 | {%- endif -%} 20 | 21 | {%- for repository in repositories %} 22 | {% set repository = repository.1.get() %} 23 | 24 | 29 | 38 | 45 | 52 | 53 | {%- endfor -%} 54 | {%- endfor %} 55 | 56 |
NameDescriptionOwnerIdle
{{ path }}
25 | 26 | {{- repository.name -}} 27 | 28 | 30 | 31 | {%- if let Some(description) = repository.description.as_ref() -%} 32 | {{- description -}} 33 | {%- else -%} 34 | Unnamed repository; edit this file 'description' to name the repository. 35 | {%- endif -%} 36 | 37 | 39 | 40 | {%- if let Some(owner) = repository.owner.as_ref() -%} 41 | {{- owner -}} 42 | {%- endif -%} 43 | 44 | 46 | 47 | 50 | 51 |
57 |
58 | {% endblock %} 59 | -------------------------------------------------------------------------------- /templates/partials/file_tree.html: -------------------------------------------------------------------------------- 1 | 26 | -------------------------------------------------------------------------------- /templates/repo/about.html: -------------------------------------------------------------------------------- 1 | {% extends "repo/base.html" %} 2 | 3 | {% block head -%} 4 | {%- if let Some(readme) = readme -%} 5 | {%- if readme.0 == crate::git::ReadmeFormat::Markdown %} 6 | 7 | 8 | {%- endif -%} 9 | {%- endif -%} 10 | {% endblock %} 11 | 12 | {% block about_nav_class %}active{% endblock %} 13 | 14 | {% block content %} 15 | {% if let Some(readme) = readme -%} 16 | {%- match readme.0 -%} 17 | {%- when crate::git::ReadmeFormat::Markdown -%} 18 | {{ readme.1|safe }} 19 | {%- when crate::git::ReadmeFormat::Plaintext -%} 20 |
{{ readme.1 }}
21 | {%- endmatch -%} 22 | {%- else -%} 23 | No README in repository HEAD. 24 | {%- endif %} 25 | {% endblock %} 26 | -------------------------------------------------------------------------------- /templates/repo/base.html: -------------------------------------------------------------------------------- 1 | {% import "macros/link.html" as link %} 2 | {% extends "../base.html" %} 3 | 4 | {% block title %}{{ repo.display() }}{% endblock %} 5 | 6 | {%- block header -%} 7 | index : {{ repo.display() }} 8 | {%- endblock -%} 9 | 10 | {% block nav %} 11 | 28 | {% endblock %} 29 | -------------------------------------------------------------------------------- /templates/repo/commit.html: -------------------------------------------------------------------------------- 1 | {% import "macros/link.html" as link %} 2 | {% extends "repo/base.html" %} 3 | 4 | {% block head %} 5 | 6 | 7 | {%- endblock %} 8 | 9 | {% block commit_nav_class %}active{% endblock %} 10 | 11 | {% block content %} 12 |
13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | {%- for parent in commit.get().parents() %} 34 | 35 | 36 | 37 | 38 | {%- endfor %} 39 | 40 | 41 | 42 | 43 | 44 |
author{{ commit.get().author().name() }} <{{ commit.get().author().email() }}>{{ commit.get().author().time() }}
committer{{ commit.get().committer().name() }} <{{ commit.get().committer().email() }}>{{ commit.get().committer().time() }}
commit
{{ commit.get().oid() }} [patch]
tree
{{ commit.get().tree() }}
parent
{{ parent }}
download
{{ id.as_deref().unwrap_or(dl_branch.as_ref()) }}.tar.gz
45 |
46 | 47 |

{{ commit.get().summary() }}

48 |
{{ commit.get().body() }}
49 | 50 |

Diff

51 |
{{ commit.diff_stats|safe }}
52 | {{ commit.diff|safe }}
53 | {% endblock %} 54 | -------------------------------------------------------------------------------- /templates/repo/diff.html: -------------------------------------------------------------------------------- 1 | {% extends "repo/base.html" %} 2 | 3 | {%- block head %} 4 | 5 | 6 | {%- endblock -%} 7 | 8 | {% block diff_nav_class %}active{% endblock %} 9 | 10 | {% block content %} 11 |

Diff

12 |
{{ commit.diff_stats|safe }}
13 | {{ commit.diff|safe }}
14 | {% endblock %} 15 | -------------------------------------------------------------------------------- /templates/repo/file.html: -------------------------------------------------------------------------------- 1 | {% import "macros/link.html" as link %} 2 | {% import "macros/breadcrumbs.html" as breadcrumbs %} 3 | {% import "macros/sidebar_toggle.html" as sidebar_toggle %} 4 | {% extends "repo/base.html" %} 5 | 6 | {% block head %} 7 | 8 | 9 | {%- endblock %} 10 | 11 | {% block tree_nav_class %}active{% endblock %} 12 | 13 | {% block subnav %} 14 | {% call breadcrumbs::breadcrumbs(repo_path, filters::branch_query(branch.as_deref())) %} 15 | {% endblock %} 16 | 17 | {% block extra_nav_links %} 18 | plain 19 | {% endblock %} 20 | 21 | {% block content %} 22 | {% call sidebar_toggle::sidebar_toggle("Open file browser") %} 23 | 24 |
25 | 28 | 29 |
30 |
31 |             {%- match file.content -%}
32 |                 {%- when crate::git::Content::Text with (content) -%}
33 |                     {{- content|safe -}}
34 |                 {%- when crate::git::Content::Binary with (_) -%}
35 |                     <binary file not displayed>
36 |             {%- endmatch -%}
37 |         
38 |
39 |
40 | {% endblock %} 41 | -------------------------------------------------------------------------------- /templates/repo/log.html: -------------------------------------------------------------------------------- 1 | {% import "macros/refs.html" as refs %} 2 | {% import "macros/link.html" as link %} 3 | {% extends "repo/base.html" %} 4 | 5 | {% block log_nav_class %}active{% endblock %} 6 | 7 | {% block content %} 8 |
9 | 10 | {% call refs::commit_table(commits) %} 11 |
12 |
13 | 14 | {% if let Some(next_offset) = next_offset %} 15 |
16 | [next] 17 |
18 | {% endif %} 19 | {% endblock %} 20 | -------------------------------------------------------------------------------- /templates/repo/macros/breadcrumbs.html: -------------------------------------------------------------------------------- 1 | {%- macro breadcrumbs(repo_path, query) -%} 2 | path:  3 | {{ repo.display() }} 4 | {%- for child in repo_path.ancestors().collect_vec().into_iter().rev() -%} 5 | {%- if let Some(file_name) = child.file_name() -%} 6 | / 7 | {{- file_name.to_string_lossy() -}} 8 | 9 | {%- endif -%} 10 | {%- endfor -%} 11 | {%- endmacro -%} -------------------------------------------------------------------------------- /templates/repo/macros/link.html: -------------------------------------------------------------------------------- 1 | {%- macro maybe_branch(branch) -%}{% if let Some(branch) = branch %}?h={{ branch }}{% endif %}{%- endmacro -%} 2 | 3 | {%- macro maybe_branch_suffix(branch) -%}{% if let Some(branch) = branch %}&h={{ branch }}{% endif %}{%- endmacro -%} 4 | -------------------------------------------------------------------------------- /templates/repo/macros/refs.html: -------------------------------------------------------------------------------- 1 | {%- macro branch_table(branches) -%} 2 | 3 | 4 | Branch 5 | Commit message 6 | Author 7 | Age 8 | 9 | 10 | 11 | 12 | {% for (name, commit) in branches -%} 13 | 14 | {{ name }} 15 | {{ commit.get().summary }} 16 | 17 | 18 | {{ commit.get().author.name }} 19 | 20 | 21 | 24 | 25 | 26 | {% endfor -%} 27 | 28 | {%- endmacro -%} 29 | 30 | {%- macro tag_table(tags) -%} 31 | 32 | 33 | Tag 34 | Download 35 | Author 36 | Age 37 | 38 | 39 | 40 | 41 | {% for (name, tag) in tags -%} 42 | 43 | {{- name.get() -}} 44 | {{- name.get() -}}.tar.gz 45 | 46 | {% if let Some(tagger) = tag.get().tagger.as_ref() -%} 47 | 48 | {{ tagger.name }} 49 | {%- endif %} 50 | 51 | 52 | {% if let Some(tagger) = tag.get().tagger.as_ref() -%} 53 | 56 | {%- endif %} 57 | 58 | 59 | {% endfor -%} 60 | 61 | {%- endmacro -%} 62 | 63 | {%- macro commit_table(commits) -%} 64 | 65 | 66 | Age 67 | Commit message 68 | Author 69 | 70 | 71 | 72 | 73 | {% for commit in commits -%} 74 | {% set commit = commit.get() %} 75 | 76 | 77 | 80 | 81 | {{ commit.summary }} 82 | 83 | 84 | {{ commit.author.name }} 85 | 86 | 87 | {% endfor -%} 88 | 89 | {%- endmacro -%} 90 | -------------------------------------------------------------------------------- /templates/repo/macros/sidebar_toggle.html: -------------------------------------------------------------------------------- 1 | {%- macro sidebar_toggle(screenreader_text) -%} 2 | 9 | {%- endmacro -%} 10 | -------------------------------------------------------------------------------- /templates/repo/refs.html: -------------------------------------------------------------------------------- 1 | {% import "macros/refs.html" as refs %} 2 | {% extends "repo/base.html" %} 3 | 4 | {% block refs_nav_class %}active{% endblock %} 5 | 6 | {% block content %} 7 |
8 | 9 | {% call refs::branch_table(refs.heads) %} 10 | 11 | {%- if !refs.tags.is_empty() %} 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | {%- call refs::tag_table(refs.tags) -%} 22 | {%- endif %} 23 |
24 |
25 | {% endblock %} 26 | -------------------------------------------------------------------------------- /templates/repo/summary.html: -------------------------------------------------------------------------------- 1 | {% import "macros/refs.html" as refs %} 2 | {% extends "repo/base.html" %} 3 | 4 | {% block summary_nav_class %}active{% endblock %} 5 | 6 | {% block content %} 7 |
8 | 9 | {% call refs::branch_table(refs.heads.iter().take(10)) %} 10 | {%- if refs.heads.len() > 10 -%} 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | {%- endif -%} 20 | 21 | {%- if !refs.tags.is_empty() -%} 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | {%- call refs::tag_table(refs.tags.iter().take(10)) -%} 32 | {%- if refs.tags.len() > 10 -%} 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | {%- endif -%} 42 | {%- endif %} 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | {% call refs::commit_table(commit_list.iter().take(10)) %} 54 | {% if commit_list.len() > 10 %} 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | {%- endif %} 64 | 65 | {% if exported %} 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 85 | 86 | 87 | {%- endif %} 88 |
[...]
[...]
[...]
Clone
81 | 82 | https://{{ host }}/{{ repo.display() }} 83 | 84 |
89 |
90 | {% endblock %} 91 | -------------------------------------------------------------------------------- /templates/repo/tag.html: -------------------------------------------------------------------------------- 1 | {% import "macros/link.html" as link %} 2 | {% extends "repo/base.html" %} 3 | 4 | {% block content %} 5 |
6 | 7 | 8 | 9 | 10 | 11 | 12 | {% if let Some(tagger) = tag.get().tagger %} 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | {% endif %} 22 | {% if let Some(tagged_object) = tag.get().tagged_object %} 23 | 24 | 25 | 33 | 34 | {% endif %} 35 | 36 | 37 | 40 | 41 | 42 |
tag name{{ tag.get().name }}
tag date{{ tagger.time() }}
tagged by{{ tagger.name() }} <{{ tagger.email() }}>
tagged object 26 | {% match tagged_object %} 27 | {% when crate::git::TaggedObject::Commit with (commit) %} 28 | commit {{ commit|truncate(10) }}... 29 | {% when crate::git::TaggedObject::Tree with (tree) %} 30 | tree {{ tree }} 31 | {% endmatch %} 32 |
download 38 |
{{ tag.get().name }}.tar.gz
39 |
43 |
44 | 45 |
{{ tag.get().message }}
46 | {% endblock %} 47 | -------------------------------------------------------------------------------- /templates/repo/tree.html: -------------------------------------------------------------------------------- 1 | {% import "macros/breadcrumbs.html" as breadcrumbs %} 2 | {% import "macros/sidebar_toggle.html" as sidebar_toggle %} 3 | {% extends "repo/base.html" %} 4 | 5 | {% block tree_nav_class %}active{% endblock %} 6 | 7 | {% block subnav %} 8 | {% call breadcrumbs::breadcrumbs(repo_path, query) %} 9 | {% endblock %} 10 | 11 | {% block content %} 12 | {% call sidebar_toggle::sidebar_toggle("Open file browser") %} 13 |
14 | 17 | 18 |
19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | {% for (name, name_split, item) in items -%} 29 | 30 | 33 | {% set local_name = name.get()[*name_split..] -%} 34 | {% set local_name = local_name.strip_prefix('/').unwrap_or(local_name) -%} 35 | {% match item.get().kind -%} 36 | {%- when ArchivedTreeItemKind::Tree -%} 37 | 40 | {%- when ArchivedTreeItemKind::File -%} 41 | 44 | {%- when ArchivedTreeItemKind::Submodule with (submodule) -%} 45 | 48 | {%- endmatch %} 49 | 50 | {% endfor -%} 51 | 52 |
ModeName
31 |
{{ item.get().mode.to_native()|file_perms }}
32 |
38 |
{{ local_name }}
39 |
42 |
{{ local_name }}
43 |
46 |
🔗 {{ local_name }} @ {{ submodule.oid|hex }}
47 |
53 |
54 |
55 | {% endblock %} 56 | -------------------------------------------------------------------------------- /themes/README: -------------------------------------------------------------------------------- 1 | These themes are sourced from Helix in the same format they publish them in. 2 | 3 | https://github.com/helix-editor/helix/tree/82dd96369302f60a9c83a2d54d021458f82bcd36/runtime/themes -------------------------------------------------------------------------------- /themes/github_light.toml: -------------------------------------------------------------------------------- 1 | # Author : OwOSwordsman 2 | # An unofficial GitHub theme, generated using colors from: https://primer.style/primitives/colors 3 | # Credit goes to the original VSCode theme: https://github.com/primer/github-vscode-theme 4 | # Only the Light and Dark variants were specifically tested 5 | 6 | attribute = "fg.default" 7 | keyword = "scale.red.5" 8 | "keyword.directive" = "scale.red.5" # -- preprocessor comments (#if in C) 9 | namespace = "scale.orange.6" 10 | punctuation = "fg.default" 11 | "punctuation.delimiter" = "fg.default" 12 | operator = "scale.blue.8" 13 | special = "scale.blue.8" 14 | "variable.other.member" = "scale.blue.8" 15 | variable = "fg.default" 16 | "variable.parameter" = "scale.orange.6" 17 | "variable.builtin" = "scale.red.5" 18 | type = "scale.orange.6" 19 | "type.builtin" = "scale.blue.6" 20 | constructor = "done.fg" 21 | function = "done.fg" 22 | "function.macro" = "done.fg" 23 | tag = "scale.green.6" 24 | comment = "fg.muted" 25 | constant = "scale.blue.6" 26 | "constant.builtin" = "scale.blue.6" 27 | string = "scale.blue.8" 28 | "constant.numeric" = "scale.blue.6" 29 | "constant.character.escape" = "scale.blue.6" 30 | # used for lifetimes 31 | label = "scale.red.5" 32 | 33 | "markup.heading" = "scale.blue.6" 34 | "markup.bold" = { modifiers = ["bold"] } 35 | "markup.italic" = { modifiers = ["italic"] } 36 | "markup.strikethrough" = { modifiers = ["crossed_out"] } 37 | "markup.link.url" = { modifiers = ["underlined"] } 38 | "markup.link.text" = { fg = "scale.blue.8", modifiers = ["underlined"] } 39 | "markup.raw" = "scale.blue.6" 40 | 41 | "diff.plus" = "open.fg" 42 | "diff.minus" = "closed.fg" 43 | "diff.delta" = "attention.fg" 44 | 45 | "ui.background" = { bg = "canvas.default" } 46 | "ui.background.separator" = { fg = "fg.subtle" } 47 | "ui.linenr" = { fg = "fg.subtle" } 48 | "ui.linenr.selected" = { fg = "fg.default" } 49 | "ui.statusline" = { fg = "fg.muted", bg = "neutral.subtle" } 50 | "ui.statusline.active" = { fg = "fg.default", bg = "canvas.default", underline = { color = "scale.coral.3", style = "line" } } 51 | "ui.statusline.normal" = { fg = "fg.default", bg = "accent.muted" } 52 | "ui.statusline.insert" = { fg = "fg.default", bg = "attention.muted" } 53 | "ui.statusline.select" = { fg = "fg.default", bg = "sponsors.muted" } 54 | "ui.popup" = { bg = "scale.gray.0" } 55 | "ui.popup.info" = { fg = "fg.default", bg = "scale.gray.0" } 56 | "ui.window" = { fg = "border.default" } 57 | "ui.help" = { fg = "fg.default", bg = "scale.gray.0" } 58 | 59 | "ui.text" = { fg = "fg.muted" } 60 | "ui.text.focus" = { fg = "fg.default" } 61 | "ui.text.inactive" = "fg.subtle" 62 | "ui.virtual" = { fg = "scale.gray.2" } 63 | "ui.virtual.ruler" = { bg = "canvas.subtle" } 64 | 65 | "ui.selection" = { bg = "scale.blue.0" } 66 | "ui.selection.primary" = { bg = "scale.blue.1" } 67 | "ui.cursor.match" = { fg = "attention.fg", modifiers = [ 68 | "bold", 69 | ], underline = { style = "line" } } 70 | "ui.cursor" = { modifiers = ["reversed"] } 71 | "ui.cursorline.primary" = { bg = "canvas.subtle" } 72 | 73 | "ui.menu" = { fg = "fg.default", bg = "scale.gray.0" } 74 | "ui.menu.selected" = { bg = "scale.gray.1" } 75 | "ui.menu.scroll" = { fg = "scale.gray.2", bg = "scale.gray.0" } 76 | 77 | "diagnostic.hint" = { underline = { color = "success.fg", style = "curl" } } 78 | "diagnostic.info" = { underline = { color = "accent.fg", style = "curl" } } 79 | "diagnostic.warning" = { underline = { color = "attention.fg", style = "curl" } } 80 | "diagnostic.error" = { underline = { color = "danger.fg", style = "curl" } } 81 | "diagnostic.unnecessary" = { modifiers = ["dim"] } 82 | "diagnostic.deprecated" = { modifiers = ["crossed_out"] } 83 | 84 | hint = "success.fg" 85 | info = "accent.fg" 86 | warning = "attention.fg" 87 | error = "danger.fg" 88 | 89 | [palette] 90 | "accent.emphasis" = "#0969da" 91 | "accent.fg" = "#0969da" 92 | "accent.muted" = "#54aeff66" 93 | "accent.subtle" = "#ddf4ff" 94 | "attention.emphasis" = "#bf8700" 95 | "attention.fg" = "#9a6700" 96 | "attention.muted" = "#d4a72c66" 97 | "attention.subtle" = "#fff8c5" 98 | "border.default" = "#d0d7de" 99 | "border.muted" = "#d8dee4" 100 | "border.subtle" = "#1b1f2426" 101 | "canvas.default" = "#ffffff" 102 | "canvas.inset" = "#f6f8fa" 103 | "canvas.overlay" = "#ffffff" 104 | "canvas.subtle" = "#f6f8fa" 105 | "closed.emphasis" = "#cf222e" 106 | "closed.fg" = "#cf222e" 107 | "closed.muted" = "#ff818266" 108 | "closed.subtle" = "#ffebe9" 109 | "danger.emphasis" = "#cf222e" 110 | "danger.fg" = "#cf222e" 111 | "danger.muted" = "#ff818266" 112 | "danger.subtle" = "#ffebe9" 113 | "done.emphasis" = "#8250df" 114 | "done.fg" = "#8250df" 115 | "done.muted" = "#c297ff66" 116 | "done.subtle" = "#fbefff" 117 | "fg.default" = "#24292f" 118 | "fg.muted" = "#57606a" 119 | "fg.onEmphasis" = "#ffffff" 120 | "fg.subtle" = "#6e7781" 121 | "neutral.emphasis" = "#6e7781" 122 | "neutral.emphasisPlus" = "#24292f" 123 | "neutral.muted" = "#afb8c133" 124 | "neutral.subtle" = "#eaeef280" 125 | "open.emphasis" = "#2da44e" 126 | "open.fg" = "#1a7f37" 127 | "open.muted" = "#4ac26b66" 128 | "open.subtle" = "#dafbe1" 129 | "scale.black" = "#1b1f24" 130 | "scale.blue.0" = "#ddf4ff" 131 | "scale.blue.1" = "#b6e3ff" 132 | "scale.blue.2" = "#80ccff" 133 | "scale.blue.3" = "#54aeff" 134 | "scale.blue.4" = "#218bff" 135 | "scale.blue.5" = "#0969da" 136 | "scale.blue.6" = "#0550ae" 137 | "scale.blue.7" = "#033d8b" 138 | "scale.blue.8" = "#0a3069" 139 | "scale.blue.9" = "#002155" 140 | "scale.coral.0" = "#fff0eb" 141 | "scale.coral.1" = "#ffd6cc" 142 | "scale.coral.2" = "#ffb4a1" 143 | "scale.coral.3" = "#fd8c73" 144 | "scale.coral.4" = "#ec6547" 145 | "scale.coral.5" = "#c4432b" 146 | "scale.coral.6" = "#9e2f1c" 147 | "scale.coral.7" = "#801f0f" 148 | "scale.coral.8" = "#691105" 149 | "scale.coral.9" = "#510901" 150 | "scale.gray.0" = "#f6f8fa" 151 | "scale.gray.1" = "#eaeef2" 152 | "scale.gray.2" = "#d0d7de" 153 | "scale.gray.3" = "#afb8c1" 154 | "scale.gray.4" = "#8c959f" 155 | "scale.gray.5" = "#6e7781" 156 | "scale.gray.6" = "#57606a" 157 | "scale.gray.7" = "#424a53" 158 | "scale.gray.8" = "#32383f" 159 | "scale.gray.9" = "#24292f" 160 | "scale.green.0" = "#dafbe1" 161 | "scale.green.1" = "#aceebb" 162 | "scale.green.2" = "#6fdd8b" 163 | "scale.green.3" = "#4ac26b" 164 | "scale.green.4" = "#2da44e" 165 | "scale.green.5" = "#1a7f37" 166 | "scale.green.6" = "#116329" 167 | "scale.green.7" = "#044f1e" 168 | "scale.green.8" = "#003d16" 169 | "scale.green.9" = "#002d11" 170 | "scale.orange.0" = "#fff1e5" 171 | "scale.orange.1" = "#ffd8b5" 172 | "scale.orange.2" = "#ffb77c" 173 | "scale.orange.3" = "#fb8f44" 174 | "scale.orange.4" = "#e16f24" 175 | "scale.orange.5" = "#bc4c00" 176 | "scale.orange.6" = "#953800" 177 | "scale.orange.7" = "#762c00" 178 | "scale.orange.8" = "#5c2200" 179 | "scale.orange.9" = "#471700" 180 | "scale.pink.0" = "#ffeff7" 181 | "scale.pink.1" = "#ffd3eb" 182 | "scale.pink.2" = "#ffadda" 183 | "scale.pink.3" = "#ff80c8" 184 | "scale.pink.4" = "#e85aad" 185 | "scale.pink.5" = "#bf3989" 186 | "scale.pink.6" = "#99286e" 187 | "scale.pink.7" = "#772057" 188 | "scale.pink.8" = "#611347" 189 | "scale.pink.9" = "#4d0336" 190 | "scale.purple.0" = "#fbefff" 191 | "scale.purple.1" = "#ecd8ff" 192 | "scale.purple.2" = "#d8b9ff" 193 | "scale.purple.3" = "#c297ff" 194 | "scale.purple.4" = "#a475f9" 195 | "scale.purple.5" = "#8250df" 196 | "scale.purple.6" = "#6639ba" 197 | "scale.purple.7" = "#512a97" 198 | "scale.purple.8" = "#3e1f79" 199 | "scale.purple.9" = "#2e1461" 200 | "scale.red.0" = "#ffebe9" 201 | "scale.red.1" = "#ffcecb" 202 | "scale.red.2" = "#ffaba8" 203 | "scale.red.3" = "#ff8182" 204 | "scale.red.4" = "#fa4549" 205 | "scale.red.5" = "#cf222e" 206 | "scale.red.6" = "#a40e26" 207 | "scale.red.7" = "#82071e" 208 | "scale.red.8" = "#660018" 209 | "scale.red.9" = "#4c0014" 210 | "scale.white" = "#ffffff" 211 | "scale.yellow.0" = "#fff8c5" 212 | "scale.yellow.1" = "#fae17d" 213 | "scale.yellow.2" = "#eac54f" 214 | "scale.yellow.3" = "#d4a72c" 215 | "scale.yellow.4" = "#bf8700" 216 | "scale.yellow.5" = "#9a6700" 217 | "scale.yellow.6" = "#7d4e00" 218 | "scale.yellow.7" = "#633c01" 219 | "scale.yellow.8" = "#4d2d00" 220 | "scale.yellow.9" = "#3b2300" 221 | "severe.emphasis" = "#bc4c00" 222 | "severe.fg" = "#bc4c00" 223 | "severe.muted" = "#fb8f4466" 224 | "severe.subtle" = "#fff1e5" 225 | "sponsors.emphasis" = "#bf3989" 226 | "sponsors.fg" = "#bf3989" 227 | "sponsors.muted" = "#ff80c866" 228 | "sponsors.subtle" = "#ffeff7" 229 | "success.emphasis" = "#2da44e" 230 | "success.fg" = "#1a7f37" 231 | "success.muted" = "#4ac26b66" 232 | "success.subtle" = "#dafbe1" 233 | -------------------------------------------------------------------------------- /themes/onedark.toml: -------------------------------------------------------------------------------- 1 | # Author : Gokul Soumya 2 | 3 | "tag" = { fg = "red" } 4 | "attribute" = { fg = "yellow" } 5 | "comment" = { fg = "light-gray", modifiers = ["italic"] } 6 | "constant" = { fg = "cyan" } 7 | "constant.numeric" = { fg = "gold" } 8 | "constant.builtin" = { fg = "gold" } 9 | "constant.character.escape" = { fg = "gold" } 10 | "constructor" = { fg = "blue" } 11 | "function" = { fg = "blue" } 12 | "function.builtin" = { fg = "blue" } 13 | "function.macro" = { fg = "purple" } 14 | "keyword" = { fg = "red" } 15 | "keyword.control" = { fg = "purple" } 16 | "keyword.control.import" = { fg = "red" } 17 | "keyword.directive" = { fg = "purple" } 18 | "label" = { fg = "purple" } 19 | "namespace" = { fg = "blue" } 20 | "operator" = { fg = "purple" } 21 | "keyword.operator" = { fg = "purple" } 22 | "special" = { fg = "blue" } 23 | "string" = { fg = "green" } 24 | "type" = { fg = "yellow" } 25 | # "variable" = { fg = "blue" } 26 | "variable.builtin" = { fg = "blue" } 27 | "variable.parameter" = { fg = "red" } 28 | "variable.other.member" = { fg = "red" } 29 | 30 | "markup.heading" = { fg = "red" } 31 | "markup.raw.inline" = { fg = "green" } 32 | "markup.bold" = { fg = "gold", modifiers = ["bold"] } 33 | "markup.italic" = { fg = "purple", modifiers = ["italic"] } 34 | "markup.strikethrough" = { modifiers = ["crossed_out"] } 35 | "markup.list" = { fg = "red" } 36 | "markup.quote" = { fg = "yellow" } 37 | "markup.link.url" = { fg = "cyan", modifiers = ["underlined"] } 38 | "markup.link.text" = { fg = "purple" } 39 | 40 | "diff.plus" = "green" 41 | "diff.delta" = "gold" 42 | "diff.minus" = "red" 43 | 44 | "diagnostic.info".underline = { color = "blue", style = "curl" } 45 | "diagnostic.hint".underline = { color = "green", style = "curl" } 46 | "diagnostic.warning".underline = { color = "yellow", style = "curl" } 47 | "diagnostic.error".underline = { color = "red", style = "curl" } 48 | "diagnostic.unnecessary" = { modifiers = ["dim"] } 49 | "diagnostic.deprecated" = { modifiers = ["crossed_out"] } 50 | "info" = { fg = "blue", modifiers = ["bold"] } 51 | "hint" = { fg = "green", modifiers = ["bold"] } 52 | "warning" = { fg = "yellow", modifiers = ["bold"] } 53 | "error" = { fg = "red", modifiers = ["bold"] } 54 | 55 | "ui.background" = { bg = "black" } 56 | "ui.virtual" = { fg = "faint-gray" } 57 | "ui.virtual.indent-guide" = { fg = "faint-gray" } 58 | "ui.virtual.whitespace" = { fg = "light-gray" } 59 | "ui.virtual.ruler" = { bg = "gray" } 60 | "ui.virtual.inlay-hint" = { fg = "light-gray" } 61 | "ui.virtual.jump-label" = { fg = "light-gray", modifiers = ["bold"] } 62 | 63 | "ui.cursor" = { fg = "white", modifiers = ["reversed"] } 64 | "ui.cursor.primary" = { fg = "white", modifiers = ["reversed"] } 65 | "ui.cursor.match" = { fg = "blue", modifiers = ["underlined"] } 66 | 67 | "ui.selection" = { bg = "faint-gray" } 68 | "ui.selection.primary" = { bg = "gray" } 69 | "ui.cursorline.primary" = { bg = "light-black" } 70 | 71 | "ui.highlight" = { bg = "gray" } 72 | "ui.highlight.frameline" = { bg = "#97202a" } 73 | 74 | "ui.linenr" = { fg = "linenr" } 75 | "ui.linenr.selected" = { fg = "white" } 76 | 77 | "ui.statusline" = { fg = "white", bg = "light-black" } 78 | "ui.statusline.inactive" = { fg = "light-gray", bg = "light-black" } 79 | "ui.statusline.normal" = { fg = "light-black", bg = "blue", modifiers = [ 80 | "bold", 81 | ] } 82 | "ui.statusline.insert" = { fg = "light-black", bg = "green", modifiers = [ 83 | "bold", 84 | ] } 85 | "ui.statusline.select" = { fg = "light-black", bg = "purple", modifiers = [ 86 | "bold", 87 | ] } 88 | 89 | "ui.bufferline" = { fg = "light-gray", bg = "light-black" } 90 | "ui.bufferline.active" = { fg = "light-black", bg = "blue", underline = { color = "light-black", style = "line" } } 91 | "ui.bufferline.background" = { bg = "light-black" } 92 | 93 | "ui.text" = { fg = "white" } 94 | "ui.text.focus" = { fg = "white", bg = "light-black", modifiers = ["bold"] } 95 | 96 | "ui.help" = { fg = "white", bg = "gray" } 97 | "ui.popup" = { bg = "gray" } 98 | "ui.window" = { fg = "gray" } 99 | "ui.menu" = { fg = "white", bg = "gray" } 100 | "ui.menu.selected" = { fg = "black", bg = "blue" } 101 | "ui.menu.scroll" = { fg = "white", bg = "light-gray" } 102 | 103 | "ui.debug" = { fg = "red" } 104 | 105 | [palette] 106 | 107 | yellow = "#E5C07B" 108 | blue = "#61AFEF" 109 | red = "#E06C75" 110 | purple = "#C678DD" 111 | green = "#98C379" 112 | gold = "#D19A66" 113 | cyan = "#56B6C2" 114 | white = "#ABB2BF" 115 | black = "#282C34" 116 | light-black = "#2C323C" 117 | gray = "#3E4452" 118 | faint-gray = "#3B4048" 119 | light-gray = "#5C6370" 120 | linenr = "#4B5263" 121 | -------------------------------------------------------------------------------- /tree-sitter-grammar-repository/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "tree-sitter-grammar-repository" 3 | description = "tree-sitter grammars built from Helix with support for dynamic linking" 4 | version = "0.0.1" 5 | edition = "2024" 6 | authors = ["Jordan Doyle "] 7 | license = "WTFPL" 8 | 9 | [dependencies] 10 | globset = "0.4" 11 | regex = "1.11" 12 | tree-sitter-language = "0.1" 13 | 14 | [build-dependencies] 15 | anyhow = "1.0" 16 | cc = "1.2" 17 | serde = { version = "1.0", features = ["derive"] } 18 | basic-toml = "0.1" 19 | threadpool = "1.8" 20 | memchr = "2.7" 21 | quote = "1.0" 22 | proc-macro2 = "1.0" 23 | prettyplease = "0.2" 24 | heck = "0.5" 25 | syn = "2.0" 26 | regex = "1.11" 27 | -------------------------------------------------------------------------------- /tree-sitter-grammar-repository/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! # tree-sitter-grammar-repository 2 | //! 3 | //! This crate loads in all known languages and grammars from `helix`'s 4 | //! `languages.toml` at compile time and provides an easy way for you 5 | //! to easily map the language to a highlighter configuration. 6 | //! 7 | //! `tree-sitter` grammars can be dynamically linked by setting the 8 | //! `TREE_SITTER_GRAMMAR_LIB_DIR` environment variable. If set, this library 9 | //! expects a directory of the format: 10 | //! 11 | //! ```text 12 | //! - TREE_SITTER_GRAMMAR_LIB_DIR 13 | //! - sources/ 14 | //! - html/ 15 | //! - queries/ 16 | //! - highlights.scm 17 | //! - injections.scm 18 | //! - package.json 19 | //! - javascript/ 20 | //! - queries/ 21 | //! - highlights.scm 22 | //! - injections.scm 23 | //! - package.json 24 | //! - libhtml-parser.so 25 | //! - libhtml-scanner.so 26 | //! - libjavascsript-scanner.so 27 | //! - ... 28 | //! ``` 29 | //! 30 | //! Usage: 31 | //! 32 | //! ```ignore 33 | //! use std::collections::HashMap; 34 | //! use tree_sitter_grammar_repository::Grammar; 35 | //! use tree_sitter_highlight::HighlightConfiguration; 36 | //! 37 | //! let highlighter_configurations = Grammar::VARIANTS 38 | //! .iter() 39 | //! .copied() 40 | //! .map(Grammar::highlight_configuration_params) 41 | //! .map(|v| (v, HighlightConfiguration::new( 42 | //! v.language.into(), 43 | //! v.name, 44 | //! v.highlights_query, 45 | //! v.injection_query, 46 | //! v.locals_query 47 | //! ))) 48 | //! .collect::>(); 49 | //! 50 | //! let highlighter_configuration = highlighter_configurations 51 | //! .get(&Language::from_file_name("hello_world.toml").grammar()); 52 | //! ``` 53 | 54 | include!(concat!(env!("OUT_DIR"), "/grammar.registry.rs")); 55 | include!(concat!(env!("OUT_DIR"), "/language.registry.rs")); 56 | pub mod grammar { 57 | include!(concat!(env!("OUT_DIR"), "/grammar.defs.rs")); 58 | } 59 | 60 | pub struct HighlightConfigurationParams { 61 | pub language: tree_sitter_language::LanguageFn, 62 | pub name: &'static str, 63 | pub highlights_query: &'static str, 64 | pub injection_query: &'static str, 65 | pub locals_query: &'static str, 66 | } 67 | -------------------------------------------------------------------------------- /treefmt.nix: -------------------------------------------------------------------------------- 1 | { pkgs, ... }: 2 | { 3 | projectRootFile = "flake.nix"; 4 | 5 | programs = { 6 | nixpkgs-fmt.enable = true; 7 | statix.enable = true; 8 | rustfmt.enable = true; 9 | taplo.enable = true; 10 | shellcheck.enable = true; 11 | }; 12 | } 13 | --------------------------------------------------------------------------------