├── .github └── workflows │ └── docker.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── flake.lock ├── flake.nix └── src ├── check.rs ├── check ├── authors.rs ├── compile.rs ├── diagnostics.rs ├── file_size.rs ├── imports.rs ├── kebab_case.rs └── manifest.rs ├── cli.rs ├── github.rs ├── github ├── api.rs ├── api │ ├── check.rs │ ├── hook.rs │ ├── pr.rs │ └── user.rs └── git.rs ├── main.rs ├── package.rs └── world.rs /.github/workflows/docker.yml: -------------------------------------------------------------------------------- 1 | name: "Build and publish Docker image" 2 | 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | env: 9 | REGISTRY: ghcr.io 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | permissions: 15 | contents: read 16 | packages: write 17 | steps: 18 | - uses: actions/checkout@v4 19 | - uses: cachix/install-nix-action@v25 20 | with: 21 | github_access_token: ${{ secrets.GITHUB_TOKEN }} 22 | - name: Restore and cache Nix store 23 | uses: nix-community/cache-nix-action@v5 24 | with: 25 | primary-key: nix-${{ runner.os }}-${{ hashFiles('flake.nix') }} 26 | - name: Use Nix to build a Docker image 27 | run: nix build -L '.#docker-image' 28 | - name: Log into registry 29 | uses: docker/login-action@v3.0.0 30 | with: 31 | registry: ${{ env.REGISTRY }} 32 | username: ${{ github.actor }} 33 | password: ${{ secrets.GITHUB_TOKEN }} 34 | 35 | - name: Import image and push 36 | run: | 37 | docker load -i result 38 | docker push $(docker images --format json | jq -r 'select(.Repository == "ghcr.io/typst/package-check") | .Repository + ":" + .Tag') 39 | docker tag $(docker images --format json | jq -r 'select(.Repository == "ghcr.io/typst/package-check") | .Repository + ":" + .Tag') ghcr.io/typst/package-check:latest 40 | docker push ghcr.io/typst/package-check:latest 41 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .env 3 | /packages 4 | result -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "typst-package-check" 3 | version = "0.2.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | async-trait = "0.1.80" 8 | axum = "0.7.5" 9 | casbab = "0.1.1" 10 | chrono = "0.4.38" 11 | codespan-reporting = "0.11.1" 12 | comemo = "0.4.0" 13 | dirs = "5.0.1" 14 | dotenvy = "0.15.7" 15 | eyre = "0.6.12" 16 | flate2 = "1.0.28" 17 | fontdb = "0.20.0" 18 | hmac = "0.12.1" 19 | ignore = "0.4.22" 20 | jwt-simple = { version = "0.12.9", default-features = false, features = [ 21 | "pure-rust", 22 | ] } 23 | parking_lot = "0.12.1" 24 | pathdiff = "0.2.1" 25 | reqwest = { version = "0.12.4", features = ["json"] } 26 | serde = { version = "1.0.198", features = ["derive"] } 27 | serde_json = "1.0.116" 28 | sha1 = "0.10.6" 29 | spdx = "0.10" 30 | stringcase = "0.2.1" 31 | tar = "0.4.40" 32 | tokio = { version = "1.37.0", features = ["rt-multi-thread", "process", "fs"] } 33 | toml_edit = "0.22.12" 34 | tower-http = { version = "0.5", features = ["trace"] } 35 | tracing = "0.1.40" 36 | tracing-subscriber = { version = "0.3.18", features = ["json", "env-filter"] } 37 | typst = "0.13.1" 38 | typst-assets = { version = "0.13.1", features = [ "fonts" ] } 39 | typst-eval = "0.13.1" 40 | wasm-opt = "0.116.1" 41 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Typst package check 2 | 3 | A tool to report common errors in Typst packages. 4 | 5 | This tool can be used in three ways: 6 | 7 | - `typst-package-check check`, to check a single package, in the current directory. 8 | - `typst-package-check check @preview/NAME:VERSION` to check a given package in a clone of the `typst/packages` repository. 9 | This command should be run from the `packages` sub-directory. In that configuration, imports will be resolved in the local 10 | clone of the repository, nothing will be fetched from the network. 11 | - `typst-package-check server` to start a HTTP server that listen for GitHub webhooks, and run checks when a PR is opened against 12 | `typst/packages` (or any repository with a similar structure). 13 | 14 | ## Using this tool 15 | 16 | You can install this tool with Cargo: 17 | 18 | ```bash 19 | cargo install --git https://github.com/typst/package-check.git 20 | cd my-package 21 | typst-package-check check 22 | ``` 23 | 24 | You can also run it with Nix: 25 | 26 | ```bash 27 | nix run github:typst/package-check -- check 28 | ``` 29 | 30 | Finally a Docker image is available: 31 | 32 | ```bash 33 | docker run -v .:/data ghcr.io/typst/package-check check 34 | ``` 35 | 36 | When running with Docker, `/data` is the directory in which the tool will look for files to check. 37 | 38 | ## Configuring the webhook handler 39 | 40 | The following environment variables are used for configuration. 41 | They are all mandatory when running the server that handles webhook. 42 | `.env` is supported. 43 | 44 | - `PACKAGES_DIR`, path to a local clone of `typst/packages` 45 | - `GITHUB_APP_IDENTIFIER`, the ID of the GitHub app submitting reviews. 46 | This app should have the `checks:write` permission. 47 | - `GITHUB_WEBHOOK_SECRET`, the secret provided by GitHub when enabling webhook handling. 48 | - `GITHUB_PRIVATE_KEY`, the private key of the GitHub app, in PEM format. 49 | Directly in the environment variable, not a path to an external file. 50 | Note that you can (and should probably) use double-quotes in the `.env` file for multi-line variables. 51 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "nixpkgs": { 4 | "locked": { 5 | "lastModified": 1739019272, 6 | "narHash": "sha256-7Fu7oazPoYCbDzb9k8D/DdbKrC3aU1zlnc39Y8jy/s8=", 7 | "owner": "nixos", 8 | "repo": "nixpkgs", 9 | "rev": "fa35a3c8e17a3de613240fea68f876e5b4896aec", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "nixos", 14 | "ref": "nixpkgs-unstable", 15 | "repo": "nixpkgs", 16 | "type": "github" 17 | } 18 | }, 19 | "root": { 20 | "inputs": { 21 | "nixpkgs": "nixpkgs", 22 | "utils": "utils" 23 | } 24 | }, 25 | "systems": { 26 | "locked": { 27 | "lastModified": 1681028828, 28 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 29 | "owner": "nix-systems", 30 | "repo": "default", 31 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 32 | "type": "github" 33 | }, 34 | "original": { 35 | "owner": "nix-systems", 36 | "repo": "default", 37 | "type": "github" 38 | } 39 | }, 40 | "utils": { 41 | "inputs": { 42 | "systems": "systems" 43 | }, 44 | "locked": { 45 | "lastModified": 1731533236, 46 | "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 47 | "owner": "numtide", 48 | "repo": "flake-utils", 49 | "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 50 | "type": "github" 51 | }, 52 | "original": { 53 | "owner": "numtide", 54 | "repo": "flake-utils", 55 | "type": "github" 56 | } 57 | } 58 | }, 59 | "root": "root", 60 | "version": 7 61 | } 62 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Tool to check a Typst package."; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:nixos/nixpkgs/nixpkgs-unstable"; 6 | utils.url = "github:numtide/flake-utils"; 7 | }; 8 | 9 | outputs = { self, nixpkgs, utils }: 10 | let cargoMeta = builtins.fromTOML (builtins.readFile ./Cargo.toml); 11 | in utils.lib.eachDefaultSystem (system: 12 | let pkgs = nixpkgs.legacyPackages.${system}; 13 | in { 14 | packages = rec { 15 | default = typst-package-check; 16 | typst-package-check = pkgs.rustPlatform.buildRustPackage { 17 | pname = cargoMeta.package.name; 18 | version = cargoMeta.package.version; 19 | src = ./.; 20 | nativeBuildInputs = [ pkgs.pkg-config ]; 21 | buildInputs = [ pkgs.openssl.dev pkgs.git ]; 22 | useFetchCargoVendor = true; 23 | cargoHash = "sha256-0t6bQzAbok/xBi53tr7KCU78Ew/i99B+AyfR36r7AA4="; 24 | # Don't run `cargo test`, as there are no tests to run. 25 | doCheck = false; 26 | }; 27 | docker-image = pkgs.dockerTools.buildImage { 28 | name = "ghcr.io/typst/package-check"; 29 | tag = typst-package-check.version; 30 | copyToRoot = with pkgs.dockerTools; [ 31 | caCertificates 32 | pkgs.git 33 | typst-package-check 34 | ]; 35 | config = { 36 | Entrypoint = [ "/bin/typst-package-check" ]; 37 | WorkingDir = "/data"; 38 | }; 39 | }; 40 | }; 41 | }); 42 | } 43 | -------------------------------------------------------------------------------- /src/check.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use codespan_reporting::diagnostic::Label; 4 | use typst::{ 5 | syntax::{package::PackageSpec, FileId, Span}, 6 | WorldExt, 7 | }; 8 | 9 | use crate::world::SystemWorld; 10 | 11 | pub mod authors; 12 | mod compile; 13 | mod diagnostics; 14 | mod file_size; 15 | mod imports; 16 | mod kebab_case; 17 | mod manifest; 18 | 19 | pub use diagnostics::Diagnostics; 20 | 21 | pub async fn all_checks( 22 | package_spec: Option<&PackageSpec>, 23 | package_dir: PathBuf, 24 | check_authors: bool, 25 | ) -> eyre::Result<(SystemWorld, Diagnostics)> { 26 | let mut diags = Diagnostics::default(); 27 | 28 | let worlds = manifest::check(&package_dir, &mut diags, package_spec).await?; 29 | compile::check(&mut diags, &worlds.package); 30 | if let Some(template_world) = worlds.template { 31 | let mut template_diags = Diagnostics::default(); 32 | compile::check(&mut template_diags, &template_world); 33 | let template_dir = template_world 34 | .root() 35 | .strip_prefix(worlds.package.root()) 36 | .expect("Template should be in a subfolder of the package"); 37 | diags.extend(template_diags, template_dir); 38 | } 39 | kebab_case::check(&mut diags, &worlds.package); 40 | 41 | let res = imports::check(&mut diags, &package_dir, &worlds.package); 42 | diags.maybe_emit(res); 43 | 44 | if let Some(spec) = package_spec.filter(|_| check_authors) { 45 | authors::check(&mut diags, spec); 46 | } 47 | 48 | Ok((worlds.package, diags)) 49 | } 50 | 51 | /// Create a label for a span. 52 | fn label(world: &SystemWorld, span: Span) -> Option> { 53 | Some(Label::primary(span.id()?, world.range(span)?)) 54 | } 55 | -------------------------------------------------------------------------------- /src/check/authors.rs: -------------------------------------------------------------------------------- 1 | use codespan_reporting::diagnostic::{Diagnostic, Label}; 2 | use typst::syntax::{package::PackageSpec, FileId, VirtualPath}; 3 | 4 | use crate::{github::git, package::PackageExt}; 5 | 6 | use super::Diagnostics; 7 | 8 | pub fn check(diags: &mut Diagnostics, spec: &PackageSpec) -> Option<()> { 9 | if authors_are_differents(spec).unwrap_or(false) { 10 | let manifest = FileId::new(None, VirtualPath::new("typst.toml")); 11 | 12 | diags.emit( 13 | Diagnostic::warning() 14 | .with_labels(vec![Label::primary(manifest, 0..0)]) 15 | .with_message( 16 | "The authors of this version are not the same as those of the previous one (according to Git)." 17 | ) 18 | ); 19 | } 20 | 21 | Some(()) 22 | } 23 | 24 | pub fn commit_for_previous_version(spec: &PackageSpec) -> Option { 25 | let last_manifest = spec.previous_version()?.directory().join("typst.toml"); 26 | 27 | let repo = git::repo_dir(); 28 | let repo = git::GitRepo::open(&repo); 29 | 30 | repo.commit_for_file(&last_manifest) 31 | } 32 | 33 | pub fn authors_are_differents(spec: &PackageSpec) -> Option { 34 | let last_manifest = spec.previous_version()?.directory().join("typst.toml"); 35 | let new_manifest = spec.directory().join("typst.toml"); 36 | 37 | let repo = git::repo_dir(); 38 | let repo = git::GitRepo::open(&repo); 39 | 40 | let last_authors = repo.authors_of(&last_manifest)?; 41 | let new_authors = repo.authors_of(&new_manifest)?; 42 | Some( 43 | !last_authors.is_empty() 44 | && !new_authors.is_empty() 45 | && last_authors.intersection(&new_authors).next().is_none(), 46 | ) 47 | } 48 | -------------------------------------------------------------------------------- /src/check/compile.rs: -------------------------------------------------------------------------------- 1 | use codespan_reporting::diagnostic::Diagnostic; 2 | use typst::{ 3 | diag::{Severity, SourceDiagnostic}, 4 | layout::PagedDocument, 5 | syntax::FileId, 6 | }; 7 | 8 | use crate::world::SystemWorld; 9 | 10 | use super::{label, Diagnostics}; 11 | 12 | pub fn check(diags: &mut Diagnostics, world: &SystemWorld) -> Option { 13 | let result = typst::compile(world); 14 | diags.emit_many(convert_diagnostics(world, result.warnings)); 15 | 16 | match result.output { 17 | Ok(doc) => Some(doc), 18 | Err(errors) => { 19 | diags.emit_many(convert_diagnostics(world, errors)); 20 | None 21 | } 22 | } 23 | } 24 | 25 | fn convert_diagnostics<'a>( 26 | world: &'a SystemWorld, 27 | iter: impl IntoIterator + 'a, 28 | ) -> impl Iterator> + 'a { 29 | iter.into_iter() 30 | .filter(|diagnostic| !diagnostic.message.starts_with("unknown font family:")) 31 | .map(|diagnostic| { 32 | let severity = if diagnostic.severity == Severity::Error { 33 | "error" 34 | } else { 35 | "warning" 36 | }; 37 | 38 | match diagnostic.severity { 39 | Severity::Error => Diagnostic::error(), 40 | Severity::Warning => Diagnostic::warning(), 41 | } 42 | .with_message(format!( 43 | "The following {} was reported by the Typst compiler: {}", 44 | severity, diagnostic.message 45 | )) 46 | .with_labels(label(world, diagnostic.span).into_iter().collect()) 47 | }) 48 | } 49 | -------------------------------------------------------------------------------- /src/check/diagnostics.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | 3 | use codespan_reporting::diagnostic::{Diagnostic, Severity}; 4 | use typst::syntax::{FileId, VirtualPath}; 5 | 6 | #[derive(Default, Debug)] 7 | pub struct Diagnostics { 8 | warnings: Vec>, 9 | errors: Vec>, 10 | } 11 | 12 | impl Diagnostics { 13 | pub fn maybe_emit(&mut self, maybe_err: eyre::Result) { 14 | if let Err(e) = maybe_err { 15 | self.emit(Diagnostic::error().with_message(format!("{}", e))) 16 | } 17 | } 18 | 19 | pub fn emit(&mut self, d: Diagnostic) { 20 | tracing::debug!("Emitting: {:?}", &d); 21 | if d.severity == Severity::Warning { 22 | self.warnings.push(d) 23 | } else { 24 | self.errors.push(d) 25 | } 26 | } 27 | 28 | pub fn emit_many(&mut self, ds: impl Iterator>) { 29 | for d in ds { 30 | self.emit(d) 31 | } 32 | } 33 | 34 | pub fn extend(&mut self, mut other: Self, dir_prefix: &Path) { 35 | let fix_labels = |diag: &mut Diagnostic| { 36 | for label in diag.labels.iter_mut() { 37 | if label.file_id.package().is_none() { 38 | label.file_id = FileId::new( 39 | None, 40 | VirtualPath::new(dir_prefix.join(label.file_id.vpath().as_rootless_path())), 41 | ) 42 | } 43 | } 44 | }; 45 | 46 | other.errors.iter_mut().for_each(fix_labels); 47 | self.errors.extend(other.errors); 48 | 49 | other.warnings.iter_mut().for_each(fix_labels); 50 | self.warnings.extend(other.warnings); 51 | } 52 | 53 | pub fn errors(&self) -> &[Diagnostic] { 54 | &self.errors 55 | } 56 | 57 | pub fn warnings(&self) -> &[Diagnostic] { 58 | &self.warnings 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/check/file_size.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | use eyre::Context; 4 | use ignore::overrides::Override; 5 | 6 | /// Size (in bytes) after which a file is considered large. 7 | const SIZE_THRESHOLD: u64 = 1024 * 1024; // 1 MB 8 | 9 | pub fn find_large_files(dir: &Path, exclude: Override) -> eyre::Result> { 10 | let mut result = Vec::new(); 11 | for ch in ignore::WalkBuilder::new(dir).overrides(exclude).build() { 12 | let Ok(ch) = ch else { 13 | continue; 14 | }; 15 | let Ok(metadata) = ch.metadata() else { 16 | continue; 17 | }; 18 | if metadata.is_file() && metadata.len() > SIZE_THRESHOLD { 19 | result.push(( 20 | ch.path() 21 | .strip_prefix(dir) 22 | .context("Prefix striping failed even though child path (`ch`) was constructed from parent path (`dir`)")? 23 | .to_owned(), 24 | metadata.len(), 25 | )) 26 | } 27 | } 28 | Ok(result) 29 | } 30 | -------------------------------------------------------------------------------- /src/check/imports.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | path::{Path, PathBuf}, 3 | str::FromStr, 4 | }; 5 | 6 | use codespan_reporting::diagnostic::{Diagnostic, Label}; 7 | use eyre::Context; 8 | use typst::{ 9 | syntax::{ 10 | ast::{self, AstNode, ModuleImport}, 11 | package::{PackageSpec, PackageVersion, VersionlessPackageSpec}, 12 | FileId, VirtualPath, 13 | }, 14 | World, WorldExt, 15 | }; 16 | 17 | use crate::world::SystemWorld; 18 | 19 | use super::Diagnostics; 20 | 21 | pub fn check(diags: &mut Diagnostics, package_dir: &Path, world: &SystemWorld) -> eyre::Result<()> { 22 | check_dir(diags, package_dir, world) 23 | } 24 | 25 | pub fn check_dir(diags: &mut Diagnostics, dir: &Path, world: &SystemWorld) -> eyre::Result<()> { 26 | let root_path = world.root(); 27 | let main_path = root_path 28 | .join(world.main().vpath().as_rootless_path()) 29 | .canonicalize() 30 | .ok(); 31 | let all_packages = root_path 32 | .parent() 33 | .and_then(|package_dir| package_dir.parent()) 34 | .and_then(|namespace_dir| namespace_dir.parent()); 35 | 36 | for ch in std::fs::read_dir(dir).context("Can't read directory")? { 37 | let Ok(ch) = ch else { 38 | continue; 39 | }; 40 | let Ok(meta) = ch.metadata() else { 41 | continue; 42 | }; 43 | 44 | let path = dir.join(ch.file_name()); 45 | if meta.is_dir() { 46 | check_dir(diags, &path, world)?; 47 | } 48 | if path.extension().and_then(|ext| ext.to_str()) == Some("typ") { 49 | let fid = FileId::new( 50 | None, 51 | VirtualPath::new( 52 | path.strip_prefix(root_path) 53 | // Not actually true 54 | .context( 55 | "Prefix striping failed even though `path` is built from `root_dir`", 56 | )?, 57 | ), 58 | ); 59 | let source = world.lookup(fid).context("Can't read source file")?; 60 | let imports = source 61 | .root() 62 | .children() 63 | .filter_map(|ch| ch.cast::()); 64 | for import in imports { 65 | let ast::Expr::Str(source_str) = import.source() else { 66 | continue; 67 | }; 68 | let import_path = path 69 | .parent() 70 | .unwrap_or(&PathBuf::new()) 71 | .join(source_str.get().as_str()) 72 | .canonicalize() 73 | .ok(); 74 | if main_path == import_path { 75 | diags 76 | .emit(Diagnostic::warning() 77 | .with_labels(vec![Label::primary( 78 | fid, 79 | world.range(import.span()).unwrap_or_default(), 80 | )]) 81 | .with_message( 82 | "This import should use the package specification, not a relative path." 83 | ) 84 | ) 85 | } 86 | 87 | if let Some(all_packages) = all_packages { 88 | if let Ok(import_spec) = PackageSpec::from_str(source_str.get().as_str()) { 89 | if let Some(latest_version) = 90 | latest_package_version(all_packages, import_spec.versionless()) 91 | { 92 | if latest_version != import_spec.version { 93 | diags.emit( 94 | Diagnostic::warning() 95 | .with_labels(vec![Label::primary( 96 | fid, 97 | world.range(import.span()).unwrap_or_default(), 98 | )]) 99 | .with_message( 100 | "This import seems to use an older version of the package.", 101 | ), 102 | ) 103 | } 104 | } 105 | } 106 | } 107 | } 108 | } 109 | } 110 | 111 | Ok(()) 112 | } 113 | 114 | fn latest_package_version(dir: &Path, spec: VersionlessPackageSpec) -> Option { 115 | std::fs::read_dir(dir.join(&spec.namespace[..]).join(&spec.name[..])) 116 | .ok() 117 | .and_then(|dir| { 118 | dir.filter_map(|child| PackageVersion::from_str(child.ok()?.file_name().to_str()?).ok()) 119 | .max() 120 | }) 121 | } 122 | -------------------------------------------------------------------------------- /src/check/kebab_case.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | 3 | use codespan_reporting::diagnostic::{Diagnostic, Severity}; 4 | use comemo::Track; 5 | use typst::{ 6 | engine::{Route, Sink, Traced}, 7 | syntax::{ 8 | ast::{self, AstNode}, 9 | FileId, Source, SyntaxNode, 10 | }, 11 | World, ROUTINES, 12 | }; 13 | 14 | use crate::world::SystemWorld; 15 | 16 | use super::{label, Diagnostics}; 17 | 18 | // Check that all public identifiers are in kebab-case 19 | pub fn check(diags: &mut Diagnostics, world: &SystemWorld) -> Option<()> { 20 | let main = world.source(world.main()).ok()?; 21 | 22 | let public_names: HashSet<_> = { 23 | let world = ::track(world); 24 | 25 | let mut sink = Sink::new(); 26 | let module = typst_eval::eval( 27 | &ROUTINES, 28 | world, 29 | Traced::default().track(), 30 | sink.track_mut(), 31 | Route::default().track(), 32 | &main, 33 | ) 34 | .ok()?; 35 | let scope = module.scope(); 36 | scope.iter().map(|(name, _)| name.to_string()).collect() 37 | }; 38 | 39 | let mut visited = HashSet::new(); 40 | check_source(main, world, &public_names, diags, &mut visited); 41 | 42 | Some(()) 43 | } 44 | 45 | /// Run the check for a single source file. 46 | fn check_source( 47 | src: Source, 48 | world: &SystemWorld, 49 | public_names: &HashSet, 50 | diags: &mut Diagnostics, 51 | visited: &mut HashSet, 52 | ) -> Option<()> { 53 | if visited.contains(&src.id()) { 54 | return Some(()); 55 | } 56 | visited.insert(src.id()); 57 | 58 | // Check all let bindings 59 | for binding in src 60 | .root() 61 | .children() 62 | .filter_map(|c| c.cast::()) 63 | { 64 | let Some(name_ident) = find_first::(binding.to_untyped()) else { 65 | continue; 66 | }; 67 | 68 | if !public_names.contains(name_ident.get().as_str()) { 69 | continue; 70 | } 71 | 72 | let name = &name_ident.as_str(); 73 | if name.starts_with('_') { 74 | // This is exported but considered private. 75 | continue; 76 | } 77 | 78 | if name == &casbab::screaming_snake(name) || name == &casbab::screaming_kebab(name) { 79 | // Constants can use SCREAMING_SNAKE_CASE or SCREAMING-KEBAB-CASE 80 | continue; 81 | } 82 | 83 | if name != &casbab::kebab(name) { 84 | diags.emit(Diagnostic { 85 | severity: codespan_reporting::diagnostic::Severity::Warning, 86 | message: 87 | "This value seems to be public. It is recommended to use kebab-case names." 88 | .to_owned(), 89 | labels: label(world, name_ident.span()).into_iter().collect(), 90 | notes: Vec::new(), 91 | code: None, 92 | }) 93 | } 94 | 95 | if let Some(ast::Expr::Closure(func)) = binding.init() { 96 | for param in func.params().children() { 97 | let (name, span) = match param { 98 | ast::Param::Named(named) => (named.name().as_str(), named.span()), 99 | ast::Param::Pos(ast::Pattern::Normal(ast::Expr::Ident(i))) => { 100 | (i.as_str(), i.span()) 101 | } 102 | // Spread params can safely be ignored, their name is only 103 | // exposed to the body of the function, not the caller. 104 | _ => continue, 105 | }; 106 | 107 | // We recommend kebab-style names but do not warn on 108 | // all-uppercase names that may represent real-world 109 | // acronyms. 110 | if name != casbab::kebab(name) && name != casbab::screaming(name) { 111 | diags.emit(Diagnostic { 112 | severity: Severity::Warning, 113 | message: "This argument seems to be part of public function. \ 114 | It is recommended to use kebab-case names." 115 | .to_owned(), 116 | labels: label(world, span).into_iter().collect(), 117 | notes: Vec::new(), 118 | code: None, 119 | }) 120 | } 121 | } 122 | } 123 | } 124 | 125 | // Check imported files recursively. 126 | // 127 | // Because we evaluated the module above, we know that no cyclic import will 128 | // occur. `visited` still exist because some modules may be imported 129 | // multiple times. 130 | // 131 | // Only imports at the root of the AST will be checked, as this is the most 132 | // common case anyway. 133 | for import in src 134 | .root() 135 | .children() 136 | .filter_map(|c| c.cast::()) 137 | { 138 | let file_path = match import.source() { 139 | ast::Expr::Str(s) => src.id().vpath().join(s.get().as_str()), 140 | _ => continue, 141 | }; 142 | let fid = FileId::new(None, file_path); 143 | let Ok(source) = world.source(fid) else { 144 | continue; 145 | }; 146 | 147 | check_source(source, world, public_names, diags, visited); 148 | } 149 | 150 | Some(()) 151 | } 152 | 153 | /// Find the first child of a given type in a syntax tree 154 | fn find_first<'a, T: AstNode<'a>>(node: &'a SyntaxNode) -> Option { 155 | for ch in node.children() { 156 | if let Some(cast) = ch.cast() { 157 | return Some(cast); 158 | } 159 | 160 | if let Some(x) = find_first(ch) { 161 | return Some(x); 162 | } 163 | } 164 | None 165 | } 166 | -------------------------------------------------------------------------------- /src/check/manifest.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | ops::Range, 3 | path::{Path, PathBuf}, 4 | str::FromStr, 5 | }; 6 | 7 | use codespan_reporting::diagnostic::{Diagnostic, Label}; 8 | use eyre::{Context, ContextCompat}; 9 | use ignore::overrides::{Override, OverrideBuilder}; 10 | use toml_edit::Item; 11 | use tracing::{debug, warn}; 12 | use typst::syntax::{ 13 | package::{PackageSpec, PackageVersion}, 14 | FileId, VirtualPath, 15 | }; 16 | 17 | use crate::{ 18 | check::{file_size, Diagnostics}, 19 | world::SystemWorld, 20 | }; 21 | 22 | pub struct Worlds { 23 | pub package: SystemWorld, 24 | pub template: Option, 25 | } 26 | 27 | pub async fn check( 28 | package_dir: &Path, 29 | diags: &mut Diagnostics, 30 | package_spec: Option<&PackageSpec>, 31 | ) -> eyre::Result { 32 | let manifest_path = package_dir.join("typst.toml"); 33 | debug!("Reading manifest at {}", &manifest_path.display()); 34 | let manifest_contents = 35 | std::fs::read_to_string(manifest_path).context("Failed to read manifest contents.")?; 36 | let manifest = toml_edit::ImDocument::parse(&manifest_contents) 37 | .context("Failed to parse manifest contents")?; 38 | 39 | let entrypoint = package_dir.join( 40 | manifest 41 | .get("package") 42 | .and_then(|package| package.get("entrypoint")) 43 | .and_then(|entrypoint| entrypoint.as_str()) 44 | .context("Packages must specify an `entrypoint` in their manifest")?, 45 | ); 46 | let world = SystemWorld::new(entrypoint, package_dir.to_owned()) 47 | .map_err(|e| eyre::Report::msg(e).wrap_err("Failed to initialize the Typst compiler"))?; 48 | 49 | let manifest_file_id = FileId::new(None, VirtualPath::new("typst.toml")); 50 | 51 | if !manifest.contains_table("package") { 52 | // TODO: this condition is probably unreachable as the program would 53 | // have panicked before if the `package` table is missing. 54 | diags.emit( 55 | Diagnostic::error() 56 | .with_labels(vec![Label::primary(manifest_file_id, 0..0)]) 57 | .with_message( 58 | "All `typst.toml` must contain a [package] section. \ 59 | See the README.md file of this repository for details \ 60 | about the manifest format.", 61 | ), 62 | ); 63 | return Ok(Worlds { 64 | package: world, 65 | template: None, 66 | }); 67 | } 68 | 69 | let name = check_name(diags, manifest_file_id, &manifest, package_spec); 70 | let version = check_version(diags, manifest_file_id, &manifest, package_spec); 71 | 72 | check_compiler_version(diags, manifest_file_id, &manifest); 73 | 74 | let res = check_universe_fields(diags, manifest_file_id, &manifest); 75 | diags.maybe_emit(res); 76 | 77 | let res = check_file_names(diags, package_dir); 78 | diags.maybe_emit(res); 79 | 80 | let res = dont_over_exclude(diags, package_dir, manifest_file_id, &manifest); 81 | diags.maybe_emit(res); 82 | 83 | check_repo(diags, manifest_file_id, &manifest).await; 84 | 85 | let (exclude, _) = read_exclude(package_dir, &manifest)?; 86 | 87 | let template_world = if let (Some(name), Some(version)) = (name, version) { 88 | let inferred_package_spec = PackageSpec { 89 | namespace: "preview".into(), 90 | name: name.into(), 91 | version, 92 | }; 93 | 94 | world_for_template( 95 | &manifest, 96 | package_dir, 97 | package_spec.unwrap_or(&inferred_package_spec), 98 | exclude.clone(), 99 | ) 100 | } else { 101 | None 102 | }; 103 | 104 | dont_exclude_template_files(diags, &manifest, package_dir, exclude); 105 | let thumbnail_path = check_thumbnail(diags, &manifest, manifest_file_id, package_dir); 106 | 107 | let res = exclude_large_files(diags, package_dir, &manifest, thumbnail_path); 108 | diags.maybe_emit(res); 109 | 110 | Ok(Worlds { 111 | package: world, 112 | template: template_world, 113 | }) 114 | } 115 | 116 | fn check_name( 117 | diags: &mut Diagnostics, 118 | manifest_file_id: FileId, 119 | manifest: &toml_edit::ImDocument<&String>, 120 | package_spec: Option<&PackageSpec>, 121 | ) -> Option { 122 | let Some(name) = manifest 123 | .get("package") 124 | .and_then(|package| package.get("name")) 125 | else { 126 | diags.emit( 127 | Diagnostic::error() 128 | .with_labels(vec![Label::primary(manifest_file_id, 0..0)]) 129 | .with_message( 130 | "All `typst.toml` must contain a `name` field. \ 131 | See the README.md file of this repository for details \ 132 | about the manifest format.", 133 | ), 134 | ); 135 | return None; 136 | }; 137 | 138 | let error = Diagnostic::error().with_labels(vec![Label::primary( 139 | manifest_file_id, 140 | name.span().unwrap_or_default(), 141 | )]); 142 | let warning = Diagnostic::warning().with_labels(vec![Label::primary( 143 | manifest_file_id, 144 | name.span().unwrap_or_default(), 145 | )]); 146 | 147 | let Some(name) = name.as_str() else { 148 | diags.emit(error.with_message("`name` must be a string.")); 149 | return None; 150 | }; 151 | 152 | if name != casbab::kebab(name) { 153 | diags.emit( 154 | error 155 | .clone() 156 | .with_message("Please use kebab-case for package names."), 157 | ) 158 | } 159 | 160 | if name.contains("typst") { 161 | diags.emit(warning.with_message("Package names should generally not include \"typst\".")); 162 | } 163 | 164 | if let Some(package_spec) = package_spec { 165 | if name != package_spec.name { 166 | diags.emit(error.with_message(format!( 167 | "Unexpected package name. `{name}` was expected. \ 168 | If you want to publish a new package, create a new \ 169 | directory in `packages/{namespace}/`.", 170 | name = package_spec.name, 171 | namespace = package_spec.namespace, 172 | ))) 173 | } 174 | } 175 | 176 | Some(name.to_owned()) 177 | } 178 | 179 | fn check_version( 180 | diags: &mut Diagnostics, 181 | manifest_file_id: FileId, 182 | manifest: &toml_edit::ImDocument<&String>, 183 | package_spec: Option<&PackageSpec>, 184 | ) -> Option { 185 | let Some(version) = manifest 186 | .get("package") 187 | .and_then(|package| package.get("version")) 188 | else { 189 | diags.emit( 190 | Diagnostic::error() 191 | .with_labels(vec![Label::primary(manifest_file_id, 0..0)]) 192 | .with_message( 193 | "All `typst.toml` must contain a `version` field. \ 194 | See the README.md file of this repository for details \ 195 | about the manifest format.", 196 | ), 197 | ); 198 | return None; 199 | }; 200 | 201 | let error = Diagnostic::error().with_labels(vec![Label::primary( 202 | manifest_file_id, 203 | version.span().unwrap_or_default(), 204 | )]); 205 | 206 | let Some(version) = version.as_str() else { 207 | diags.emit(error.with_message("`version` must be a string.")); 208 | return None; 209 | }; 210 | 211 | let Ok(version) = version.parse::() else { 212 | diags.emit(error.with_message( 213 | "`version` must be a valid semantic version \ 214 | (i.e follow the `MAJOR.MINOR.PATCH` format).", 215 | )); 216 | return None; 217 | }; 218 | 219 | if let Some(package_spec) = package_spec { 220 | if version != package_spec.version { 221 | diags.emit(error.with_message(format!( 222 | "Unexpected version number. `{version}` was expected. \ 223 | If you want to publish a new version, create a new \ 224 | directory in `packages/{namespace}/{name}`.", 225 | version = package_spec.version, 226 | name = package_spec.name, 227 | namespace = package_spec.namespace, 228 | ))) 229 | } 230 | } 231 | 232 | Some(version) 233 | } 234 | 235 | fn check_compiler_version( 236 | diags: &mut Diagnostics, 237 | manifest_file_id: FileId, 238 | manifest: &toml_edit::ImDocument<&String>, 239 | ) -> Option<()> { 240 | let compiler = manifest.get("package")?.get("compiler")?; 241 | let Some(compiler_str) = compiler.as_str() else { 242 | diags.emit( 243 | Diagnostic::error() 244 | .with_labels(vec![Label::primary(manifest_file_id, compiler.span()?)]) 245 | .with_message("Compiler version should be a string"), 246 | ); 247 | return None; 248 | }; 249 | 250 | if PackageVersion::from_str(compiler_str).is_err() { 251 | diags.emit( 252 | Diagnostic::error() 253 | .with_labels(vec![Label::primary(manifest_file_id, compiler.span()?)]) 254 | .with_message("Compiler version should be a valid semantic version, with three components (for example `0.12.0`)"), 255 | ); 256 | return None; 257 | } 258 | 259 | Some(()) 260 | } 261 | 262 | fn exclude_large_files( 263 | diags: &mut Diagnostics, 264 | package_dir: &Path, 265 | manifest: &toml_edit::ImDocument<&String>, 266 | thumbnail_path: Option, 267 | ) -> eyre::Result<()> { 268 | let template_root = template_root(manifest); 269 | let template_dir = template_root.and_then(|root| package_dir.join(&root).canonicalize().ok()); 270 | let (exclude, _) = read_exclude(package_dir, manifest)?; 271 | 272 | const REALLY_LARGE: u64 = 50 * 1024 * 1024; 273 | 274 | let large_files = file_size::find_large_files(package_dir, exclude.clone()); 275 | for (path, size) in large_files? { 276 | if Some(path.as_ref()) 277 | == thumbnail_path 278 | .as_ref() 279 | .and_then(|t| t.strip_prefix(package_dir).ok()) 280 | { 281 | // Thumbnail is always excluded 282 | continue; 283 | } 284 | 285 | if path.extension().and_then(|ext| ext.to_str()) == Some("wasm") { 286 | let path = package_dir.join(&path); 287 | if let Some(file_name) = path.file_name() { 288 | let out = std::env::temp_dir().join(file_name); 289 | 290 | let wasm_opt_result = wasm_opt::OptimizationOptions::new_optimize_for_size() 291 | // Explicitely enable and disable features to best match what wasmi supports 292 | // https://github.com/wasmi-labs/wasmi?tab=readme-ov-file#webassembly-proposals 293 | .enable_feature(wasm_opt::Feature::MutableGlobals) 294 | .enable_feature(wasm_opt::Feature::TruncSat) 295 | .enable_feature(wasm_opt::Feature::SignExt) 296 | .enable_feature(wasm_opt::Feature::Multivalue) 297 | .enable_feature(wasm_opt::Feature::BulkMemory) 298 | .enable_feature(wasm_opt::Feature::ReferenceTypes) 299 | .enable_feature(wasm_opt::Feature::TailCall) 300 | .enable_feature(wasm_opt::Feature::ExtendedConst) 301 | .enable_feature(wasm_opt::Feature::MultiMemory) 302 | .disable_feature(wasm_opt::Feature::Simd) 303 | .disable_feature(wasm_opt::Feature::RelaxedSimd) 304 | .disable_feature(wasm_opt::Feature::Gc) 305 | .disable_feature(wasm_opt::Feature::ExceptionHandling) 306 | .run(&path, &out); 307 | 308 | if wasm_opt_result.is_ok() { 309 | let original_size = std::fs::metadata(&path).map(|m| m.len()); 310 | let new_size = std::fs::metadata(&out).map(|m| m.len()); 311 | 312 | match (new_size, original_size) { 313 | (Ok(new_size), Ok(original_size)) if new_size < original_size => { 314 | let diff = (original_size - new_size) / 1024; 315 | 316 | if diff > 20 { 317 | diags.emit( 318 | Diagnostic::warning() 319 | .with_labels(vec![Label::primary( 320 | FileId::new( 321 | None, 322 | VirtualPath::new(path.strip_prefix(package_dir)?), 323 | ), 324 | 0..0, 325 | )]) 326 | .with_message(format!( 327 | "This file could be {diff}kB smaller with `wasm-opt -Os`." 328 | )), 329 | ); 330 | } 331 | } 332 | _ => {} 333 | } 334 | 335 | // TODO: ideally this should be async 336 | std::fs::remove_file(out).ok(); 337 | } 338 | } 339 | 340 | // Don't suggest to exclude WASM files, they are generally necessary 341 | // for the package to work. 342 | continue; 343 | } 344 | 345 | let fid = FileId::new(None, VirtualPath::new(&path)); 346 | 347 | let message = if size > REALLY_LARGE { 348 | format!( 349 | "This file is really large ({size}MB). \ 350 | If possible, do not include it in this repository at all.", 351 | size = size / 1024 / 1024 352 | ) 353 | } else if !exclude.matched(path, false).is_ignore() { 354 | format!( 355 | "This file is quite large ({size}MB). \ 356 | If it is not required to use the package \ 357 | (i.e. it is a documentation file, or part of an example), \ 358 | it should be added to `exclude` in your `typst.toml`.", 359 | size = size / 1024 / 1024 360 | ) 361 | } else { 362 | continue; 363 | }; 364 | 365 | diags.emit( 366 | Diagnostic::warning() 367 | .with_labels(vec![Label::primary(fid, 0..0)]) 368 | .with_message(message), 369 | ) 370 | } 371 | 372 | // Also exclude examples 373 | for ch in ignore::WalkBuilder::new(package_dir) 374 | .overrides(exclude) 375 | .build() 376 | { 377 | let Ok(ch) = ch else { 378 | continue; 379 | }; 380 | 381 | let Ok(metadata) = ch.metadata() else { 382 | continue; 383 | }; 384 | 385 | if metadata.is_dir() { 386 | continue; 387 | } 388 | 389 | if template_dir 390 | .as_ref() 391 | .is_some_and(|template_dir| ch.path().starts_with(template_dir)) 392 | { 393 | // Don't exclude template files, even if they contain "example" or "test" in their name. 394 | continue; 395 | } 396 | 397 | let relative_path = ch 398 | .path() 399 | .strip_prefix(package_dir) 400 | .context("Child path is not part of parent path")?; 401 | 402 | let file_name = ch.file_name(); 403 | let file_name_str = file_name.to_string_lossy(); 404 | let file_id = FileId::new(None, VirtualPath::new(relative_path)); 405 | let warning = Diagnostic::warning().with_labels(vec![Label::primary(file_id, 0..0)]); 406 | if file_name_str.contains("example") { 407 | diags.emit(warning.clone().with_message( 408 | "This file seems to be an example, \ 409 | and should probably be added to `exclude` in your `typst.toml`.", 410 | )); 411 | continue; 412 | } 413 | 414 | if file_name_str.contains("test") { 415 | diags.emit(warning.clone().with_message( 416 | "This file seems to be a test, \ 417 | and should probably be added to `exclude` in your `typst.toml`.", 418 | )); 419 | continue; 420 | } 421 | } 422 | 423 | Ok(()) 424 | } 425 | 426 | fn dont_over_exclude( 427 | diags: &mut Diagnostics, 428 | package_dir: &Path, 429 | manifest_file_id: FileId, 430 | manifest: &toml_edit::ImDocument<&String>, 431 | ) -> eyre::Result<()> { 432 | let (exclude, span) = read_exclude(package_dir, manifest)?; 433 | 434 | let warning = Diagnostic::warning().with_labels(vec![Label::primary(manifest_file_id, span)]); 435 | 436 | if exclude.matched("LICENSE", false).is_ignore() { 437 | diags.emit( 438 | warning 439 | .clone() 440 | .with_message("Your LICENSE file should not be excluded."), 441 | ); 442 | } 443 | 444 | if exclude.matched("README.md", false).is_ignore() { 445 | diags.emit(warning.with_message("Your README.md file should not be excluded.")); 446 | } 447 | 448 | Ok(()) 449 | } 450 | 451 | fn check_file_names(diags: &mut Diagnostics, package_dir: &Path) -> eyre::Result<()> { 452 | for ch in std::fs::read_dir(package_dir).context("Failed to read package directory")? { 453 | let mut error_for_file = |path, message| { 454 | let file_id = FileId::new(None, VirtualPath::new(path)); 455 | diags.emit( 456 | Diagnostic::error() 457 | .with_labels(vec![Label::primary(file_id, 0..0)]) 458 | .with_message(message), 459 | ) 460 | }; 461 | 462 | let Ok(ch) = ch else { 463 | continue; 464 | }; 465 | let Ok(meta) = ch.metadata() else { 466 | continue; 467 | }; 468 | if !meta.is_file() { 469 | continue; 470 | } 471 | 472 | let file_name = ch.file_name(); 473 | let file_path = Path::new(&file_name); 474 | let stem = file_path 475 | .file_stem() 476 | .map(|s| s.to_string_lossy().into_owned()); 477 | let stem_uppercase = stem.as_ref().map(|s| s.to_uppercase()); 478 | 479 | if stem_uppercase.as_deref() == Some("LICENCE") { 480 | error_for_file(file_path, "This file should be named LICENSE."); 481 | } 482 | 483 | if (stem_uppercase.as_deref() == Some("LICENSE") 484 | || stem_uppercase.as_deref() == Some("README")) 485 | && stem_uppercase != stem 486 | { 487 | let fixed = if let Some(ext) = file_path.extension() { 488 | format!( 489 | "{}.{}", 490 | stem.unwrap_or_default().to_uppercase(), 491 | ext.to_string_lossy() 492 | ) 493 | } else { 494 | stem.unwrap_or_default().to_uppercase() 495 | }; 496 | error_for_file( 497 | file_path, 498 | &format!( 499 | "To keep consistency, please use \ 500 | ALL CAPS for the name of this file (i.e. {fixed})" 501 | ), 502 | ) 503 | } 504 | } 505 | 506 | Ok(()) 507 | } 508 | 509 | /// Some fields are optional for the bundler, but required to be published in Typst Universe. 510 | /// Check that they are present. 511 | fn check_universe_fields( 512 | diags: &mut Diagnostics, 513 | manifest_file_id: FileId, 514 | manifest: &toml_edit::ImDocument<&String>, 515 | ) -> eyre::Result<()> { 516 | let pkg = manifest 517 | .get("package") 518 | .context("[package] not found")? 519 | .as_table() 520 | .context("[package] is not a table")?; 521 | 522 | if let Some((license, span)) = pkg 523 | .get("license") 524 | .and_then(|l| l.as_str().map(|s| (s, l.span().unwrap_or_default()))) 525 | { 526 | if let Ok(license) = spdx::Expression::parse(license) { 527 | for requirement in license.requirements() { 528 | if let Some(id) = requirement.req.license.id() { 529 | if !id.is_osi_approved() { 530 | diags.emit( 531 | Diagnostic::error() 532 | .with_message("The `license` field should be OSI approved") 533 | .with_labels(vec![Label::primary(manifest_file_id, span.clone())]), 534 | ); 535 | } 536 | } else { 537 | diags.emit( 538 | Diagnostic::error() 539 | .with_message("The `license` field should not contain a referencer") 540 | .with_labels(vec![Label::primary(manifest_file_id, span.clone())]), 541 | ); 542 | } 543 | } 544 | } else { 545 | diags.emit( 546 | Diagnostic::error() 547 | .with_message("The `license` field should be a valid SPDX-2 expression") 548 | .with_labels(vec![Label::primary(manifest_file_id, span.clone())]), 549 | ); 550 | } 551 | } else { 552 | diags.emit( 553 | Diagnostic::error() 554 | .with_message("The `license` field should be a string") 555 | .with_labels(vec![Label::primary(manifest_file_id, 0..0)]), 556 | ); 557 | } 558 | 559 | if pkg.get("description").map(|d| !d.is_str()).unwrap_or(true) { 560 | diags.emit( 561 | Diagnostic::error() 562 | .with_message("The `description` field should be a string") 563 | .with_labels(vec![Label::primary(manifest_file_id, 0..0)]), 564 | ); 565 | } 566 | 567 | if pkg 568 | .get("authors") 569 | .and_then(|a| a.as_array()) 570 | .map(|a| a.iter().any(|item| !item.is_str())) 571 | .unwrap_or(true) 572 | { 573 | diags.emit( 574 | Diagnostic::error() 575 | .with_message("The `authors` field should be an array of strings") 576 | .with_labels(vec![Label::primary(manifest_file_id, 0..0)]), 577 | ); 578 | // TODO: check that the format is correct? 579 | } 580 | 581 | Ok(()) 582 | } 583 | 584 | async fn check_url(diags: &mut Diagnostics, manifest_file_id: FileId, field: &Item) -> Option<()> { 585 | if let Err(e) = reqwest::get(field.as_str()?) 586 | .await 587 | .and_then(|res| res.error_for_status()) 588 | { 589 | diags.emit( 590 | Diagnostic::error() 591 | .with_labels(vec![Label::primary( 592 | manifest_file_id, 593 | field.span().unwrap_or_default(), 594 | )]) 595 | .with_message(format!( 596 | "We could not fetch this URL.\n\nDetails: {:#?}", 597 | e.without_url() 598 | )), 599 | ) 600 | } 601 | 602 | Some(()) 603 | } 604 | 605 | async fn check_repo( 606 | diags: &mut Diagnostics, 607 | manifest_file_id: FileId, 608 | manifest: &toml_edit::ImDocument<&String>, 609 | ) -> Option<()> { 610 | let repo_field = manifest.get("package")?.get("repository")?; 611 | check_url(diags, manifest_file_id, repo_field).await; 612 | 613 | let homepage_field = manifest.get("package")?.get("homepage")?; 614 | check_url(diags, manifest_file_id, homepage_field).await; 615 | 616 | if repo_field.as_str() == homepage_field.as_str() { 617 | diags.emit( 618 | Diagnostic::error() 619 | .with_labels(vec![Label::primary( 620 | manifest_file_id, 621 | homepage_field.span().unwrap_or_default(), 622 | )]) 623 | .with_message("Use the homepage field only if there is a dedicated website. Otherwise, prefer the `repository` field.".to_owned()), 624 | ) 625 | } 626 | 627 | Some(()) 628 | } 629 | 630 | fn read_exclude( 631 | package_dir: &Path, 632 | manifest: &toml_edit::ImDocument<&String>, 633 | ) -> eyre::Result<(Override, Range)> { 634 | let empty_array = toml_edit::Array::new(); 635 | let exclude = manifest 636 | .get("package") 637 | .and_then(|package| package.get("exclude")) 638 | .and_then(|item| item.as_array()) 639 | .unwrap_or(&empty_array); 640 | 641 | let mut exclude_globs = OverrideBuilder::new( 642 | package_dir 643 | .canonicalize() 644 | .context("Failed to canonicalize package directory")?, 645 | ); 646 | for exclusion in exclude { 647 | let Some(exclusion) = exclusion.as_str() else { 648 | continue; 649 | }; 650 | 651 | if exclusion.starts_with('!') { 652 | warn!("globs with '!' are not supported"); 653 | continue; 654 | } 655 | 656 | let exclusion = exclusion.trim_start_matches("./"); 657 | exclude_globs.add(&format!("!{exclusion}")).ok(); 658 | } 659 | Ok(( 660 | exclude_globs.build().context("Invalid exclude globs")?, 661 | exclude.span().unwrap_or(0..0), 662 | )) 663 | } 664 | 665 | fn world_for_template( 666 | manifest: &toml_edit::ImDocument<&String>, 667 | package_dir: &Path, 668 | package_spec: &PackageSpec, 669 | exclude: Override, 670 | ) -> Option { 671 | let template = manifest.get("template")?.as_table()?; 672 | let template_path = package_dir.join(template.get("path")?.as_str()?); 673 | let template_main = template_path.join(template.get("entrypoint")?.as_str()?); 674 | 675 | let mut world = SystemWorld::new(template_main, template_path) 676 | .ok()? 677 | .with_package_override(package_spec, package_dir); 678 | world.exclude(exclude); 679 | Some(world) 680 | } 681 | 682 | fn dont_exclude_template_files( 683 | diags: &mut Diagnostics, 684 | manifest: &toml_edit::ImDocument<&String>, 685 | package_dir: &Path, 686 | exclude: Override, 687 | ) -> Option<()> { 688 | let template_root = template_root(manifest)?; 689 | for entry in ignore::Walk::new(package_dir.join(template_root)).flatten() { 690 | // For build artifacts, ask the package author to delete them. 691 | let ext = entry.path().extension().and_then(|e| e.to_str()); 692 | if matches!(ext, Some("pdf" | "png" | "svg")) && entry.path().with_extension("typ").exists() 693 | { 694 | diags.emit( 695 | Diagnostic::error() 696 | .with_labels(vec![Label::primary( 697 | FileId::new( 698 | None, 699 | VirtualPath::new(entry.path().strip_prefix(package_dir).ok()?), 700 | ), 701 | 0..0, 702 | )]) 703 | .with_message( 704 | "This file is a compiled document and should \ 705 | not be included in the template. \ 706 | Please delete it.", 707 | ), 708 | ); 709 | continue; 710 | } 711 | 712 | // For other files, check that they are indeed not excluded. 713 | if exclude 714 | .matched( 715 | entry.path().canonicalize().ok()?, 716 | entry.metadata().ok()?.is_dir(), 717 | ) 718 | .is_ignore() 719 | { 720 | diags.emit( 721 | Diagnostic::error() 722 | .with_message("This file is part of the template and should not be excluded.") 723 | .with_labels(vec![Label::primary( 724 | FileId::new( 725 | None, 726 | VirtualPath::new(entry.path().strip_prefix(package_dir).ok()?), 727 | ), 728 | 0..0, 729 | )]), 730 | ) 731 | } 732 | } 733 | 734 | Some(()) 735 | } 736 | 737 | fn template_root(manifest: &toml_edit::ImDocument<&String>) -> Option { 738 | Some(PathBuf::from( 739 | manifest 740 | .get("template") 741 | .and_then(|t| t.get("path"))? 742 | .as_str()?, 743 | )) 744 | } 745 | 746 | fn check_thumbnail( 747 | diags: &mut Diagnostics, 748 | manifest: &toml_edit::ImDocument<&String>, 749 | manifest_file_id: FileId, 750 | package_dir: &Path, 751 | ) -> Option { 752 | let thumbnail = manifest.get("template")?.as_table()?.get("thumbnail")?; 753 | let thumbnail_path = package_dir.join(thumbnail.as_str()?); 754 | 755 | if !thumbnail_path.exists() { 756 | diags.emit( 757 | Diagnostic::error() 758 | .with_labels(vec![Label::primary(manifest_file_id, thumbnail.span()?)]) 759 | .with_message("This file does not exist."), 760 | ) 761 | } 762 | 763 | if !matches!( 764 | thumbnail_path.extension().and_then(|e| e.to_str()), 765 | Some("png" | "webp") 766 | ) { 767 | diags.emit( 768 | Diagnostic::error() 769 | .with_labels(vec![Label::primary(manifest_file_id, thumbnail.span()?)]) 770 | .with_message("Thumbnails should be PNG or WebP files."), 771 | ) 772 | } 773 | 774 | Some(thumbnail_path) 775 | } 776 | -------------------------------------------------------------------------------- /src/cli.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | 3 | use codespan_reporting::{diagnostic::Diagnostic, term}; 4 | use ignore::overrides::Override; 5 | use tracing::error; 6 | use typst::syntax::{package::PackageSpec, FileId, Source}; 7 | 8 | use crate::{check::all_checks, package::PackageExt, world::SystemWorld}; 9 | 10 | pub async fn main(package_spec: String) { 11 | let package_spec: Option = package_spec.parse().ok(); 12 | let package_dir = if let Some(ref package_spec) = package_spec { 13 | package_spec.directory() 14 | } else { 15 | Path::new(".").to_owned() 16 | }; 17 | 18 | match all_checks(package_spec.as_ref(), package_dir, true).await { 19 | Ok((mut world, diags)) => { 20 | if let Err(err) = print_diagnostics(&mut world, diags.errors(), diags.warnings()) { 21 | error!("failed to print diagnostics ({err})"); 22 | error!( 23 | "Raw diagnostics: {:#?}\n{:#?}", 24 | diags.errors(), 25 | diags.warnings() 26 | ); 27 | } 28 | } 29 | Err(e) => println!("Fatal error: {}", e), 30 | } 31 | } 32 | 33 | /// Print diagnostic messages to the terminal. 34 | pub fn print_diagnostics( 35 | world: &mut SystemWorld, 36 | errors: &[Diagnostic], 37 | warnings: &[Diagnostic], 38 | ) -> Result<(), codespan_reporting::files::Error> { 39 | let config = term::Config { 40 | tab_width: 2, 41 | ..Default::default() 42 | }; 43 | 44 | // We should be able to print diagnostics even on excluded files. If we 45 | // don't remove the exclusion, it will fail to read and display the file 46 | // contents. 47 | world.exclude(Override::empty()); 48 | world.reset_file_cache(); 49 | 50 | for diagnostic in warnings.iter().chain(errors) { 51 | term::emit( 52 | &mut term::termcolor::StandardStream::stdout(term::termcolor::ColorChoice::Always), 53 | &config, 54 | world, 55 | diagnostic, 56 | )?; 57 | } 58 | 59 | Ok(()) 60 | } 61 | 62 | type CodespanResult = Result; 63 | type CodespanError = codespan_reporting::files::Error; 64 | 65 | impl<'a> codespan_reporting::files::Files<'a> for SystemWorld { 66 | type FileId = FileId; 67 | type Name = String; 68 | type Source = Source; 69 | 70 | fn name(&'a self, id: FileId) -> CodespanResult { 71 | let vpath = id.vpath(); 72 | Ok(if let Some(package) = id.package() { 73 | format!("{package}{}", vpath.as_rooted_path().display()) 74 | } else { 75 | // Try to express the path relative to the working directory. 76 | vpath 77 | .resolve(self.root()) 78 | .and_then(|abs| pathdiff::diff_paths(abs, self.workdir())) 79 | .as_deref() 80 | .unwrap_or_else(|| vpath.as_rootless_path()) 81 | .to_string_lossy() 82 | .into() 83 | }) 84 | } 85 | 86 | fn source(&'a self, id: FileId) -> CodespanResult { 87 | match self.lookup(id) { 88 | Ok(x) => Ok(x), 89 | // Hack to be able to report errors on files that are not UTF-8. The 90 | // error range should always be 0..0 for this to work. 91 | Err(typst::diag::FileError::InvalidUtf8) => Ok(Source::new(id, String::new())), 92 | Err(e) => Err(CodespanError::Io(std::io::Error::new( 93 | std::io::ErrorKind::InvalidData, 94 | e, 95 | ))), 96 | } 97 | } 98 | 99 | fn line_index(&'a self, id: FileId, given: usize) -> CodespanResult { 100 | let source = self.source(id)?; 101 | source 102 | .byte_to_line(given) 103 | .ok_or_else(|| CodespanError::IndexTooLarge { 104 | given, 105 | max: source.len_bytes(), 106 | }) 107 | } 108 | 109 | fn line_range(&'a self, id: FileId, given: usize) -> CodespanResult> { 110 | let source = self.source(id)?; 111 | source 112 | .line_to_range(given) 113 | .ok_or_else(|| CodespanError::LineTooLarge { 114 | given, 115 | max: source.len_lines(), 116 | }) 117 | } 118 | 119 | fn column_number(&'a self, id: FileId, _: usize, given: usize) -> CodespanResult { 120 | let source = self.source(id)?; 121 | source.byte_to_column(given).ok_or_else(|| { 122 | let max = source.len_bytes(); 123 | if given <= max { 124 | CodespanError::InvalidCharBoundary { given } 125 | } else { 126 | CodespanError::IndexTooLarge { given, max } 127 | } 128 | }) 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /src/github.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | ffi::OsStr, 3 | path::{Path, PathBuf}, 4 | }; 5 | 6 | use axum::{ 7 | body::Body, 8 | extract::State, 9 | http::{Response, StatusCode}, 10 | response::IntoResponse, 11 | routing::{get, post}, 12 | Router, 13 | }; 14 | use codespan_reporting::{ 15 | diagnostic::{Diagnostic, Severity}, 16 | files::Files, 17 | }; 18 | use eyre::Context; 19 | use hook::{CheckRunPayload, PullRequestAction, PullRequestPayload}; 20 | use jwt_simple::prelude::*; 21 | use pr::{AnyPullRequest, MinimalPullRequest, PullRequest, PullRequestUpdate}; 22 | use tracing::{debug, error, info, warn}; 23 | use typst::syntax::{package::PackageSpec, FileId}; 24 | 25 | use crate::{check, package::PackageExt, world::SystemWorld}; 26 | 27 | use api::{ 28 | check::{CheckRun, CheckRunAction}, 29 | *, 30 | }; 31 | 32 | mod api; 33 | pub mod git; 34 | 35 | use self::{ 36 | api::check::{Annotation, AnnotationLevel, CheckRunOutput, CheckSuite, CheckSuiteAction}, 37 | git::GitRepo, 38 | hook::{CheckSuitePayload, HookPayload}, 39 | }; 40 | 41 | /// Application configuration, read from .env file. 42 | #[derive(Clone)] 43 | struct AppState { 44 | webhook_secret: Vec, 45 | private_key: String, 46 | app_id: String, 47 | git_dir: String, 48 | } 49 | 50 | /// Runs an HTTP server to handle GitHub hooks 51 | pub async fn hook_server() { 52 | let state = AppState { 53 | webhook_secret: std::env::var("GITHUB_WEBHOOK_SECRET") 54 | .expect("GITHUB_WEBHOOK_SECRET is not set.") 55 | .into_bytes(), 56 | private_key: std::env::var("GITHUB_PRIVATE_KEY") 57 | .expect("GITHUB_PRIVATE_KEY is not set.") 58 | .replace('&', "\n"), 59 | app_id: std::env::var("GITHUB_APP_IDENTIFIER").expect("GITHUB_APP_IDENTIFIER is not set."), 60 | git_dir: std::env::var("PACKAGES_DIR").expect("PACKAGES_DIR is not set."), 61 | }; 62 | 63 | GitRepo::open(Path::new(&state.git_dir[..])) 64 | .clone_if_needed("https://github.com/typst/packages.git") 65 | .await 66 | .expect("Can't clone the packages repository"); 67 | 68 | let app = Router::new() 69 | .route("/", get(index)) 70 | .route("/github-hook", post(github_hook::>)) 71 | .route("/force-review/:install/:sha", get(force)) 72 | .layer(tower_http::trace::TraceLayer::new_for_http()) 73 | .with_state(state); 74 | 75 | info!("Starting…"); 76 | let listener = tokio::net::TcpListener::bind("0.0.0.0:7878") 77 | .await 78 | .expect("Can't listen on 0.0.0.0:7878"); 79 | axum::serve(listener, app).await.expect("Server error"); 80 | } 81 | 82 | /// The page served on `/`, just to check that everything runs properly. 83 | async fn index() -> &'static str { 84 | "typst-package-check is running" 85 | } 86 | 87 | async fn force( 88 | state: State, 89 | api_client: GitHub, 90 | axum::extract::Path((install, pr)): axum::extract::Path<(String, usize)>, 91 | ) -> Result<&'static str, &'static str> { 92 | debug!("Force review for #{pr}"); 93 | let repository = Repository::new("typst/packages").map_err(|e| { 94 | error!("{}", e); 95 | "Invalid repository path" 96 | })?; 97 | 98 | let installation = Installation { 99 | id: str::parse(&install).map_err(|_| "Invalid installation ID")?, 100 | }; 101 | let api_client = api_client 102 | .auth_installation(&installation) 103 | .await 104 | .map_err(|e| { 105 | debug!("Failed to authenticate installation: {}", e); 106 | "Failed to authenticate installation" 107 | })?; 108 | 109 | let pr = MinimalPullRequest { number: pr }; 110 | let full_pr = pr 111 | .get_full(&api_client, repository.owner(), repository.name()) 112 | .await 113 | .map_err(|e| { 114 | error!("{}", e); 115 | "Failed to fetch PR context" 116 | })?; 117 | let sha = full_pr.head.sha.clone(); 118 | 119 | github_hook( 120 | state, 121 | api_client, 122 | HookPayload::CheckSuite(CheckSuitePayload { 123 | action: CheckSuiteAction::Requested, 124 | installation, 125 | check_suite: CheckSuite { 126 | head_sha: sha, 127 | pull_requests: vec![AnyPullRequest::Full(full_pr)], 128 | }, 129 | }), 130 | ) 131 | .await 132 | .map_err(|e| { 133 | debug!("Error: {:?}", e); 134 | "Error in the GitHub hook handler" 135 | })?; 136 | 137 | Ok("OK!") 138 | } 139 | 140 | /// The route to handle GitHub hooks. Mounted on `/github-hook`. 141 | async fn github_hook( 142 | State(state): State, 143 | api_client: G, 144 | payload: HookPayload, 145 | ) -> Result<(), WebError> { 146 | debug!("GitHub hook was triggered"); 147 | let api_client = api_client.auth_installation(&payload).await?; 148 | debug!("Successfully authenticated application"); 149 | 150 | let repository = Repository::new("typst/packages").map_err(|e| { 151 | error!("Invalid repository path: {}", e); 152 | WebError::UnexpectedEvent 153 | })?; 154 | 155 | let (head_sha, pr, previous_check_run) = match payload { 156 | HookPayload::CheckSuite(CheckSuitePayload { 157 | action: CheckSuiteAction::Requested | CheckSuiteAction::Rerequested, 158 | mut check_suite, 159 | .. 160 | }) => (check_suite.head_sha, check_suite.pull_requests.pop(), None), 161 | HookPayload::CheckRun(CheckRunPayload { 162 | action: CheckRunAction::Rerequested, 163 | mut check_run, 164 | .. 165 | }) => ( 166 | check_run.check_suite.head_sha.clone(), 167 | check_run.check_suite.pull_requests.pop(), 168 | Some(check_run), 169 | ), 170 | HookPayload::PullRequest(PullRequestPayload { 171 | action: PullRequestAction::Opened | PullRequestAction::Synchronize, 172 | pull_request, 173 | .. 174 | }) => ( 175 | pull_request.head.sha.clone(), 176 | Some(AnyPullRequest::Full(pull_request)), 177 | None, 178 | ), 179 | HookPayload::CheckRun(_) 180 | | HookPayload::CheckSuite(CheckSuitePayload { 181 | action: CheckSuiteAction::Completed, 182 | .. 183 | }) => return Ok(()), 184 | other => { 185 | debug!("Unexpected payload: {:?}", other); 186 | return Err(WebError::UnexpectedEvent); 187 | } 188 | }; 189 | 190 | let pr = if let Some(pr) = pr { 191 | pr.get_full(&api_client, repository.owner(), repository.name()) 192 | .await 193 | .ok() 194 | } else { 195 | None 196 | }; 197 | 198 | debug!( 199 | "Starting checks for {}{}", 200 | head_sha, 201 | if let Some(ref pr) = pr { 202 | format!(" (#{})", pr.number) 203 | } else { 204 | String::new() 205 | } 206 | ); 207 | tokio::spawn(async move { 208 | async fn inner( 209 | state: AppState, 210 | head_sha: String, 211 | api_client: GitHub, 212 | repository: Repository, 213 | previous_check_run: Option, 214 | pr: Option, 215 | ) -> eyre::Result<()> { 216 | let git_repo = GitRepo::open(Path::new(&state.git_dir)); 217 | git_repo.pull_main().await?; 218 | git_repo.fetch_commit(&head_sha).await?; 219 | let touched_files = git_repo.files_touched_by(&head_sha).await?; 220 | 221 | let mut touches_outside_of_packages = false; 222 | 223 | let touched_packages = touched_files 224 | .into_iter() 225 | .filter_map(|line| { 226 | let mut components = line.components(); 227 | if components.next()?.as_os_str() != OsStr::new("packages") { 228 | touches_outside_of_packages = true; 229 | return None; 230 | } 231 | 232 | let namespace = components.next()?.as_os_str().to_str()?.into(); 233 | let name = components.next()?.as_os_str().to_str()?.into(); 234 | let version = components.next()?.as_os_str().to_str()?.parse().ok()?; 235 | Some(PackageSpec { 236 | namespace, 237 | name, 238 | version, 239 | }) 240 | }) 241 | .collect::>(); 242 | 243 | if let Some(pr) = &pr { 244 | // Update labels 245 | let mut has_new_packages = false; 246 | let mut has_updated_packages = false; 247 | for package in &touched_packages { 248 | if git_repo 249 | .has_previous_version(package) 250 | .await 251 | .unwrap_or(false) 252 | { 253 | has_updated_packages = true; 254 | } else { 255 | has_new_packages = true; 256 | } 257 | } 258 | let mut labels = Vec::new(); 259 | if has_new_packages { 260 | labels.push("new".to_owned()) 261 | } 262 | if has_updated_packages { 263 | labels.push("update".to_owned()); 264 | } 265 | 266 | // Update checks in PR body if needed 267 | let mut body_changed = false; 268 | let new_body = pr 269 | .body 270 | .lines() 271 | .map(|l| { 272 | let line = l.trim(); 273 | if line.starts_with("-") { 274 | let marked = line.contains("[x]"); 275 | if line.ends_with("a new package") { 276 | body_changed |= marked != has_new_packages; 277 | if has_new_packages { 278 | return "- [x] a new package"; 279 | } else { 280 | return "- [ ] a new package"; 281 | } 282 | } 283 | 284 | if line.ends_with("an update for a package") { 285 | body_changed |= marked != has_updated_packages; 286 | if has_updated_packages { 287 | return "- [x] an update for a package"; 288 | } else { 289 | return "- [ ] an update for a package"; 290 | } 291 | } 292 | } 293 | 294 | l 295 | }) 296 | .fold(String::with_capacity(pr.body.len()), |body, line| { 297 | body + "\n" + line 298 | }); 299 | let body = if body_changed { Some(new_body) } else { None }; 300 | 301 | // Update title 302 | let mut package_names = touched_packages 303 | .iter() 304 | .map(|p| format!("{}:{}", p.name, p.version)) 305 | .collect::>(); 306 | package_names.sort(); 307 | let last_package = package_names.pop(); 308 | let penultimate_package = package_names.pop(); 309 | let expected_pr_title = if let Some((penultimate_package, last_package)) = 310 | penultimate_package.as_ref().zip(last_package.as_ref()) 311 | { 312 | package_names.push(format!("{} and {}", penultimate_package, last_package)); 313 | Some(package_names.join(", ")) 314 | } else { 315 | last_package 316 | }; 317 | 318 | // Actually update the PR, if needed 319 | if let Some(expected_pr_title) = expected_pr_title { 320 | if pr.title != expected_pr_title || !labels.is_empty() || body.is_some() { 321 | api_client 322 | .update_pull_request( 323 | repository.owner(), 324 | repository.name(), 325 | pr.number, 326 | PullRequestUpdate { 327 | title: expected_pr_title, 328 | labels, 329 | body, 330 | }, 331 | ) 332 | .await 333 | .context("Failed to update pull request")?; 334 | } 335 | } 336 | } 337 | 338 | for ref package in touched_packages { 339 | let check_run_name = format!( 340 | "@{}/{}:{}", 341 | package.namespace, package.name, package.version 342 | ); 343 | 344 | let check_run = if let Some(previous) = previous_check_run 345 | .as_ref() 346 | .filter(|p| p.name == check_run_name) 347 | { 348 | previous.clone().without_suite() 349 | } else { 350 | api_client 351 | .create_check_run( 352 | repository.owner(), 353 | repository.name(), 354 | check_run_name, 355 | &head_sha, 356 | ) 357 | .await 358 | .context("Failed to create a new check run")? 359 | .without_suite() 360 | }; 361 | 362 | if touches_outside_of_packages { 363 | api_client.update_check_run( 364 | repository.owner(), 365 | repository.name(), 366 | check_run.id, 367 | false, 368 | CheckRunOutput { 369 | title: "This PR does too many things", 370 | summary: "A PR should either change packages/, or the rest of the repository, but not both.", 371 | annotations: &[], 372 | }, 373 | ).await 374 | .context("Failed to cancel a check run because the branch does too many things")?; 375 | continue; 376 | } 377 | 378 | let checkout_dir = format!("checkout-{}", head_sha); 379 | git_repo 380 | .checkout_commit(&head_sha, &checkout_dir) 381 | .await 382 | .context("Failed to checkout commit")?; 383 | 384 | // Check that the author of this PR is the same as the one of 385 | // the previous version. 386 | if let Some(current_pr) = &pr { 387 | debug!("There is a current PR"); 388 | if let Some(previous_commit) = 389 | check::authors::commit_for_previous_version(package) 390 | { 391 | debug!("Found previous commit: {previous_commit}"); 392 | if let Ok(Some(previous_pr)) = api_client 393 | .prs_for_commit(repository.owner(), repository.name(), previous_commit) 394 | .await 395 | .map(|prs| prs.into_iter().next()) 396 | { 397 | debug!( 398 | "Found previous PR: #{} (author: {})", 399 | previous_pr.number, previous_pr.user.login 400 | ); 401 | if previous_pr.user.login != current_pr.user.login { 402 | if let Err(e) = api_client 403 | .post_pr_comment( 404 | repository.owner(), 405 | repository.name(), 406 | current_pr.number, 407 | format!( 408 | "@{} You released {}:{}, so you probably \ 409 | want to have a look at this pull request. \ 410 | If you want this update to be merged, \ 411 | please leave a comment stating so. \ 412 | Without your permission, the pull request \ 413 | will not be merged.", 414 | previous_pr.user.login, 415 | package.name, 416 | package.previous_version() 417 | .expect("If there is no previous version, this branch should not be reached") 418 | .version 419 | ), 420 | ) 421 | .await 422 | { 423 | warn!("Error while posting PR comment: {:?}", e) 424 | } 425 | } 426 | } 427 | } 428 | } 429 | 430 | let (world, diags) = match check::all_checks( 431 | Some(package), 432 | PathBuf::new() 433 | .join(&checkout_dir) 434 | .join("packages") 435 | .join(package.namespace.as_str()) 436 | .join(package.name.as_str()) 437 | .join(package.version.to_string()), 438 | false, 439 | ) 440 | .await 441 | { 442 | Ok(x) => x, 443 | Err(e) => { 444 | api_client 445 | .update_check_run( 446 | repository.owner(), 447 | repository.name(), 448 | check_run.id, 449 | false, 450 | CheckRunOutput { 451 | title: "Fatal error", 452 | summary: &format!( 453 | "The following error was encountered:\n\n{}", 454 | e 455 | ), 456 | annotations: &[], 457 | }, 458 | ) 459 | .await 460 | .context("Failed to report fatal error")?; 461 | return Err(e); 462 | } 463 | }; 464 | 465 | let plural = |n| if n == 1 { "" } else { "s" }; 466 | 467 | api_client 468 | .update_check_run( 469 | repository.owner(), 470 | repository.name(), 471 | check_run.id, 472 | diags.errors().is_empty() && diags.warnings().is_empty(), 473 | CheckRunOutput { 474 | title: &if !diags.errors().is_empty() { 475 | if diags.warnings().is_empty() { 476 | format!( 477 | "{} error{}", 478 | diags.errors().len(), 479 | plural(diags.errors().len()) 480 | ) 481 | } else { 482 | format!( 483 | "{} error{}, {} warning{}", 484 | diags.errors().len(), 485 | plural(diags.errors().len()), 486 | diags.warnings().len(), 487 | plural(diags.warnings().len()) 488 | ) 489 | } 490 | } else if diags.warnings().is_empty() { 491 | "All good!".to_owned() 492 | } else { 493 | format!( 494 | "{} warning{}", 495 | diags.warnings().len(), 496 | plural(diags.warnings().len()) 497 | ) 498 | }, 499 | summary: &format!( 500 | "Our bots have automatically run some checks on your packages. \ 501 | They found {} error{} and {} warning{}.\n\n\ 502 | Warnings are suggestions, your package can still be accepted even \ 503 | if you prefer not to fix them.\n\n\ 504 | A human being will soon review your package, too.", 505 | diags.errors().len(), 506 | plural(diags.errors().len()), 507 | diags.warnings().len(), 508 | plural(diags.warnings().len()), 509 | ), 510 | annotations: &diags 511 | .errors() 512 | .iter() 513 | .chain(diags.warnings()) 514 | .filter_map(|diag| diagnostic_to_annotation(&world, package, diag)) 515 | .take(50) 516 | .collect::>(), 517 | }, 518 | ) 519 | .await 520 | .context("Failed to send report")?; 521 | 522 | tokio::fs::remove_dir_all(checkout_dir).await?; 523 | } 524 | 525 | Ok(()) 526 | } 527 | 528 | if let Err(e) = inner( 529 | state, 530 | head_sha, 531 | api_client, 532 | repository, 533 | previous_check_run, 534 | pr, 535 | ) 536 | .await 537 | { 538 | warn!("Error in hook handler: {:#}", e) 539 | } 540 | }); 541 | 542 | Ok(()) 543 | } 544 | 545 | fn diagnostic_to_annotation( 546 | world: &SystemWorld, 547 | package: &PackageSpec, 548 | diag: &Diagnostic, 549 | ) -> Option { 550 | let label = diag.labels.first()?; 551 | let start_line = world.line_index(label.file_id, label.range.start).ok()?; 552 | let end_line = world.line_index(label.file_id, label.range.end).ok()?; 553 | let (start_column, end_column) = if start_line == end_line { 554 | let start = world 555 | .column_number(label.file_id, start_line, label.range.start) 556 | .ok(); 557 | let end = world 558 | .column_number(label.file_id, start_line, label.range.end) 559 | .ok(); 560 | (start, end) 561 | } else { 562 | (None, None) 563 | }; 564 | let package = label.file_id.package().unwrap_or(package); 565 | Some(Annotation { 566 | path: Path::new("packages") 567 | .join(package.namespace.to_string()) 568 | .join(package.name.to_string()) 569 | .join(package.version.to_string()) 570 | .join(label.file_id.vpath().as_rootless_path()) 571 | .to_str()? 572 | .to_owned(), 573 | // Lines are 1-indexed on GitHub but not for codespan 574 | start_line: start_line + 1, 575 | end_line: end_line + 1, 576 | start_column, 577 | end_column, 578 | annotation_level: if diag.severity == Severity::Warning { 579 | AnnotationLevel::Warning 580 | } else { 581 | AnnotationLevel::Failure 582 | }, 583 | message: diag.message.clone(), 584 | }) 585 | } 586 | 587 | #[derive(Debug)] 588 | enum WebError { 589 | #[allow(dead_code)] 590 | Api(ApiError), 591 | UnexpectedEvent, 592 | } 593 | 594 | impl IntoResponse for WebError { 595 | fn into_response(self) -> axum::response::Response { 596 | debug!("Web error: {:?}", &self); 597 | 598 | Response::builder() 599 | .status(StatusCode::INTERNAL_SERVER_ERROR) 600 | .body(Body::from(format!("{:?}", self))) 601 | .expect("Can't build error response") 602 | } 603 | } 604 | 605 | impl From for WebError { 606 | fn from(value: ApiError) -> Self { 607 | WebError::Api(value) 608 | } 609 | } 610 | -------------------------------------------------------------------------------- /src/github/api.rs: -------------------------------------------------------------------------------- 1 | //! Interact with the GitHub REST API. 2 | 3 | use std::fmt::Display; 4 | 5 | use axum::{extract::FromRequestParts, http::request::Parts}; 6 | use check::MinimalCheckSuite; 7 | use eyre::Error; 8 | use jwt_simple::{ 9 | algorithms::{RS256KeyPair, RSAKeyPairLike}, 10 | claims::Claims, 11 | reexports::coarsetime::Duration, 12 | }; 13 | use reqwest::{RequestBuilder, Response, StatusCode}; 14 | use serde::Deserialize; 15 | use tracing::{debug, warn}; 16 | 17 | use self::check::{CheckRun, CheckRunId, CheckRunOutput}; 18 | 19 | use super::AppState; 20 | 21 | pub mod check; 22 | pub mod hook; 23 | pub mod pr; 24 | mod user; 25 | 26 | #[derive(Debug)] 27 | pub enum ApiError { 28 | #[allow(dead_code)] 29 | Reqwest(reqwest::Error), 30 | Json(serde_json::Error), 31 | UnexpectedResponse(String), 32 | } 33 | 34 | impl std::error::Error for ApiError { 35 | fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { 36 | match self { 37 | ApiError::Reqwest(e) => Some(e), 38 | ApiError::Json(e) => Some(e), 39 | ApiError::UnexpectedResponse(_) => None, 40 | } 41 | } 42 | } 43 | 44 | impl Display for ApiError { 45 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 46 | match self { 47 | ApiError::Reqwest(e) => write!(f, "Network error: {:?}", e), 48 | ApiError::Json(e) => write!(f, "JSON ser/de error: {:?}", e), 49 | ApiError::UnexpectedResponse(e) => write!(f, "Unexpected response: {:?}", e), 50 | } 51 | } 52 | } 53 | 54 | impl From for ApiError { 55 | fn from(value: reqwest::Error) -> Self { 56 | ApiError::Reqwest(value) 57 | } 58 | } 59 | 60 | impl From for ApiError { 61 | fn from(value: serde_json::Error) -> Self { 62 | ApiError::Json(value) 63 | } 64 | } 65 | 66 | type ApiResult = Result; 67 | 68 | /// Authentication for the GitHub API using a JWT token. 69 | pub struct AuthJwt(String); 70 | 71 | impl Display for AuthJwt { 72 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 73 | self.0.fmt(f) 74 | } 75 | } 76 | 77 | /// Authentication for the GitHub API using an installation token, that 78 | /// is scoped to a specific organization or set of repositories, but that 79 | /// can do more than a [`AuthJwt`] token. 80 | pub struct AuthInstallation(String); 81 | 82 | impl Display for AuthInstallation { 83 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 84 | self.0.fmt(f) 85 | } 86 | } 87 | 88 | /// A GitHub API client 89 | pub struct GitHub { 90 | auth: A, 91 | req: reqwest::Client, 92 | } 93 | 94 | impl GitHub { 95 | fn get(&self, url: impl AsRef) -> RequestBuilder { 96 | self.with_headers(self.req.get(Self::url(url))) 97 | } 98 | 99 | fn patch(&self, url: impl AsRef) -> RequestBuilder { 100 | self.with_headers(self.req.patch(Self::url(url))) 101 | } 102 | 103 | fn post(&self, url: impl AsRef) -> RequestBuilder { 104 | self.with_headers(self.req.post(Self::url(url))) 105 | } 106 | 107 | fn with_headers(&self, req: RequestBuilder) -> RequestBuilder { 108 | req.bearer_auth(self.auth.to_string()) 109 | .header("Accept", "application/vnd.github+json") 110 | .header("X-GitHub-Api-Version", "2022-11-28") 111 | .header("User-Agent", "Typst package check") 112 | } 113 | 114 | fn url>(path: S) -> String { 115 | let u = format!("https://api.github.com/{}", path.as_ref()); 116 | debug!("API URL: {}", u); 117 | u 118 | } 119 | } 120 | 121 | pub trait GitHubAuth { 122 | async fn auth_installation( 123 | self, 124 | installation: &impl AsInstallation, 125 | ) -> ApiResult>; 126 | } 127 | 128 | impl GitHubAuth for GitHub { 129 | #[tracing::instrument(skip_all)] 130 | async fn auth_installation( 131 | self, 132 | installation: &impl AsInstallation, 133 | ) -> ApiResult> { 134 | let installation_id = installation.id(); 135 | let installation_token: InstallationToken = self 136 | .post(format!("app/installations/{installation_id}/access_tokens")) 137 | .json(&serde_json::json!({ 138 | "repositories": ["packages"], 139 | "permissions": { 140 | "metadata": "read", 141 | "issues": "write", 142 | "pull_requests": "write", 143 | "checks": "write", 144 | } 145 | })) 146 | .send() 147 | .await? 148 | .parse_json() 149 | .await?; 150 | 151 | Ok(GitHub { 152 | req: self.req, 153 | auth: AuthInstallation(installation_token.token), 154 | }) 155 | } 156 | } 157 | 158 | impl GitHubAuth for GitHub { 159 | async fn auth_installation( 160 | self, 161 | _installation: &impl AsInstallation, 162 | ) -> ApiResult> { 163 | Ok(self) 164 | } 165 | } 166 | 167 | impl GitHub { 168 | #[tracing::instrument(skip(self))] 169 | pub async fn create_check_run( 170 | &self, 171 | owner: OwnerId, 172 | repo: RepoId, 173 | check_run_name: String, 174 | head_sha: &str, 175 | ) -> ApiResult> { 176 | let response = self 177 | .post(format!("repos/{owner}/{repo}/check-runs")) 178 | .body(serde_json::to_string(&serde_json::json!({ 179 | "name": check_run_name, 180 | "head_sha": head_sha, 181 | "status": "in_progress", 182 | }))?) 183 | .send() 184 | .await?; 185 | 186 | if response.status() != StatusCode::CREATED { 187 | return Err(ApiError::UnexpectedResponse(response.text().await?)); 188 | } 189 | 190 | let result = serde_json::from_str(&response.text().await?)?; 191 | Ok(result) 192 | } 193 | 194 | #[tracing::instrument(skip(self, output))] 195 | pub async fn update_check_run<'a>( 196 | &self, 197 | owner: OwnerId, 198 | repo: RepoId, 199 | check_run: CheckRunId, 200 | success: bool, 201 | output: CheckRunOutput<'a>, 202 | ) -> ApiResult<()> { 203 | let res = self 204 | .patch(format!("repos/{owner}/{repo}/check-runs/{check_run}")) 205 | .body(serde_json::to_string(&serde_json::json!({ 206 | "status": "completed", 207 | "conclusion": if success { "success" } else { "failure" }, 208 | "output": output, 209 | }))?) 210 | .send() 211 | .await? 212 | .text() 213 | .await?; 214 | debug!("GitHub said: {}", res); 215 | Ok(()) 216 | } 217 | } 218 | 219 | #[async_trait::async_trait] 220 | impl FromRequestParts for GitHub { 221 | type Rejection = StatusCode; 222 | 223 | async fn from_request_parts<'a, 's>( 224 | _parts: &'a mut Parts, 225 | state: &'s AppState, 226 | ) -> Result { 227 | let Ok(private_key) = RS256KeyPair::from_pem(&state.private_key) else { 228 | warn!("The private key in the .env file cannot be parsed as PEM."); 229 | return Err(StatusCode::INTERNAL_SERVER_ERROR); 230 | }; 231 | 232 | let claims = Claims::create(Duration::from_mins(10)).with_issuer(&state.app_id); 233 | let Ok(token) = private_key.sign(claims) else { 234 | warn!("Couldn't sign JWT claims."); 235 | return Err(StatusCode::INTERNAL_SERVER_ERROR); 236 | }; 237 | 238 | Ok(Self { 239 | auth: AuthJwt(token), 240 | req: reqwest::Client::new(), 241 | }) 242 | } 243 | } 244 | 245 | #[derive(Debug)] 246 | pub struct OwnerId(String); 247 | 248 | #[derive(Debug)] 249 | pub struct RepoId(String); 250 | 251 | impl Display for OwnerId { 252 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 253 | f.write_str(&self.0) 254 | } 255 | } 256 | 257 | impl Display for RepoId { 258 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 259 | f.write_str(&self.0) 260 | } 261 | } 262 | 263 | #[derive(Debug, Deserialize)] 264 | pub struct Repository { 265 | full_name: String, 266 | } 267 | 268 | impl Repository { 269 | pub fn new(name: &str) -> eyre::Result { 270 | if !name.contains('/') { 271 | return Err(Error::msg("Invalid repository path")); 272 | } 273 | 274 | Ok(Self { 275 | full_name: name.to_owned(), 276 | }) 277 | } 278 | 279 | pub fn owner(&self) -> OwnerId { 280 | OwnerId( 281 | self.full_name 282 | .split_once('/') 283 | .expect("Repository path must contain a /") 284 | .0 285 | .to_owned(), 286 | ) 287 | } 288 | 289 | pub fn name(&self) -> RepoId { 290 | RepoId( 291 | self.full_name 292 | .split_once('/') 293 | .expect("Repository path must contain a /") 294 | .1 295 | .to_owned(), 296 | ) 297 | } 298 | } 299 | 300 | #[derive(Debug, Deserialize)] 301 | pub struct Installation { 302 | pub id: u64, 303 | } 304 | 305 | pub trait AsInstallation { 306 | fn id(&self) -> u64; 307 | } 308 | 309 | impl AsInstallation for Installation { 310 | fn id(&self) -> u64 { 311 | self.id 312 | } 313 | } 314 | 315 | #[derive(Deserialize)] 316 | struct InstallationToken { 317 | token: String, 318 | } 319 | 320 | trait JsonExt { 321 | async fn parse_json Deserialize<'a>>(self) -> Result; 322 | } 323 | 324 | impl JsonExt for Response { 325 | async fn parse_json Deserialize<'a>>(self) -> Result { 326 | let bytes = self.bytes().await?; 327 | 328 | debug!( 329 | "Parsing JSON: {}", 330 | std::str::from_utf8(&bytes).unwrap_or("[INVALID UTF8]") 331 | ); 332 | 333 | Ok(serde_json::from_slice(&bytes)?) 334 | } 335 | } 336 | -------------------------------------------------------------------------------- /src/github/api/check.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Display; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use super::pr::AnyPullRequest; 6 | 7 | #[derive(Debug, Deserialize, Clone, Copy)] 8 | #[serde(transparent)] 9 | pub struct CheckSuiteId(#[allow(dead_code)] u64); 10 | 11 | #[derive(Debug, Clone, Deserialize)] 12 | pub struct CheckSuite { 13 | pub head_sha: String, 14 | pub pull_requests: Vec, 15 | } 16 | 17 | #[derive(Clone, Deserialize)] 18 | pub struct MinimalCheckSuite { 19 | #[allow(dead_code)] 20 | pub id: CheckSuiteId, 21 | } 22 | 23 | #[derive(Debug, Deserialize)] 24 | #[serde(rename_all = "snake_case")] 25 | pub enum CheckSuiteAction { 26 | /// A check suite was requested (when code is pushed) 27 | Requested, 28 | /// A check suite was re-requested (when re-running on code that was previously pushed) 29 | Rerequested, 30 | /// A check suite has finished running 31 | Completed, 32 | } 33 | 34 | #[derive(Debug, Deserialize)] 35 | #[serde(rename_all = "snake_case")] 36 | pub enum CheckRunAction { 37 | Created, 38 | RequestedAction, 39 | Rerequested, 40 | Completed, 41 | } 42 | 43 | #[derive(Debug, Deserialize, Clone, Copy)] 44 | #[serde(transparent)] 45 | pub struct CheckRunId(u64); 46 | 47 | impl Display for CheckRunId { 48 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 49 | write!(f, "{}", self.0) 50 | } 51 | } 52 | 53 | #[derive(Clone, Debug, Deserialize)] 54 | pub struct CheckRun { 55 | pub id: CheckRunId, 56 | pub name: String, 57 | pub check_suite: S, 58 | } 59 | 60 | impl CheckRun { 61 | pub fn without_suite(self) -> CheckRun<()> { 62 | CheckRun { 63 | id: self.id, 64 | name: self.name, 65 | check_suite: (), 66 | } 67 | } 68 | } 69 | 70 | #[derive(Debug, Serialize)] 71 | pub struct CheckRunOutput<'a> { 72 | pub title: &'a str, 73 | pub summary: &'a str, 74 | pub annotations: &'a [Annotation], 75 | } 76 | 77 | #[derive(Debug, Serialize)] 78 | pub struct Annotation { 79 | pub path: String, 80 | pub start_line: usize, 81 | pub end_line: usize, 82 | #[serde(skip_serializing_if = "Option::is_none")] 83 | pub start_column: Option, 84 | #[serde(skip_serializing_if = "Option::is_none")] 85 | pub end_column: Option, 86 | pub annotation_level: AnnotationLevel, 87 | pub message: String, 88 | } 89 | 90 | #[derive(Debug, Serialize)] 91 | #[serde(rename_all = "snake_case")] 92 | pub enum AnnotationLevel { 93 | Warning, 94 | Failure, 95 | } 96 | -------------------------------------------------------------------------------- /src/github/api/hook.rs: -------------------------------------------------------------------------------- 1 | use axum::extract::{FromRequest, Request}; 2 | use hmac::Mac; 3 | use reqwest::StatusCode; 4 | use serde::Deserialize; 5 | use tracing::{debug, trace, warn}; 6 | 7 | use crate::github::AppState; 8 | 9 | use super::{ 10 | check::{CheckRun, CheckRunAction, CheckSuite, CheckSuiteAction}, 11 | pr::PullRequest, 12 | AsInstallation, Installation, 13 | }; 14 | 15 | #[derive(Debug)] 16 | pub enum HookPayload { 17 | Installation(InstallationPayload), 18 | CheckSuite(CheckSuitePayload), 19 | CheckRun(CheckRunPayload), 20 | PullRequest(PullRequestPayload), 21 | } 22 | 23 | impl HookPayload { 24 | pub fn installation(&self) -> &Installation { 25 | match self { 26 | HookPayload::CheckSuite(cs) => &cs.installation, 27 | HookPayload::Installation(i) => &i.installation, 28 | HookPayload::CheckRun(cr) => &cr.installation, 29 | HookPayload::PullRequest(pr) => &pr.installation, 30 | } 31 | } 32 | } 33 | 34 | impl AsInstallation for HookPayload { 35 | fn id(&self) -> u64 { 36 | self.installation().id 37 | } 38 | } 39 | 40 | /// Request extractor that reads and check a GitHub hook payload. 41 | #[async_trait::async_trait] 42 | impl FromRequest for HookPayload { 43 | type Rejection = (StatusCode, &'static str); 44 | 45 | async fn from_request<'s>(req: Request, state: &'s AppState) -> Result { 46 | debug!("Received a webhook event…"); 47 | let event_type = req 48 | .headers() 49 | .get("X-GitHub-Event") 50 | .map(|v| v.as_bytes().to_owned()); 51 | debug!("Event type is {:?}", event_type); 52 | 53 | let Some(their_signature_header) = req.headers().get("X-Hub-Signature") else { 54 | return Err((StatusCode::UNAUTHORIZED, "X-Hub-Signature is missing")); 55 | }; 56 | let their_signature_header = their_signature_header 57 | .to_str() 58 | .unwrap_or_default() 59 | .to_owned(); 60 | 61 | let Some((method, their_digest)) = their_signature_header.split_once('=') else { 62 | return Err((StatusCode::BAD_REQUEST, "Malformed signature header")); 63 | }; 64 | 65 | if method != "sha1" { 66 | warn!( 67 | "A hook with a {} signature was received, and rejected", 68 | method 69 | ); 70 | return Err(( 71 | StatusCode::INTERNAL_SERVER_ERROR, 72 | "Unsupported signature type", 73 | )); 74 | } 75 | 76 | let Ok(raw_payload) = String::from_request(req, state).await else { 77 | return Err((StatusCode::BAD_REQUEST, "Cannot read request body.")); 78 | }; 79 | 80 | trace!("Webhook payload was: {}", raw_payload); 81 | 82 | let our_digest = { 83 | let Ok(mut mac) = hmac::Hmac::::new_from_slice(&state.webhook_secret) 84 | else { 85 | warn!("Webhook secret is invalid."); 86 | return Err(( 87 | StatusCode::INTERNAL_SERVER_ERROR, 88 | "Server is not correctly configured.", 89 | )); 90 | }; 91 | mac.update(raw_payload.as_bytes()); 92 | mac 93 | }; 94 | // GitHub provides their hash as a hexadecimal string. 95 | let parsed_digest: Vec<_> = (0..their_digest.len() / 2) 96 | .filter_map(|idx| { 97 | let slice = &their_digest[idx * 2..idx * 2 + 2]; 98 | u8::from_str_radix(slice, 16).ok() 99 | }) 100 | .collect(); 101 | if our_digest.verify_slice(&parsed_digest).is_err() { 102 | debug!("Invalid hook signature"); 103 | return Err((StatusCode::UNAUTHORIZED, "Invalid hook signature")); 104 | } 105 | 106 | macro_rules! try_deser { 107 | ($variant:ident, $json:expr) => { 108 | match serde_json::from_str($json) { 109 | Ok(x) => Ok(HookPayload::$variant(x)), 110 | Err(_) => return Err((StatusCode::BAD_REQUEST, "Invalid JSON data")), 111 | } 112 | }; 113 | } 114 | 115 | match event_type.as_deref() { 116 | Some(b"installation") => try_deser!(Installation, &raw_payload), 117 | Some(b"check_suite") => try_deser!(CheckSuite, &raw_payload), 118 | Some(b"check_run") => try_deser!(CheckRun, &raw_payload), 119 | Some(b"pull_request") => try_deser!(PullRequest, &raw_payload), 120 | Some(x) => { 121 | debug!( 122 | "Uknown event type: {}", 123 | std::str::from_utf8(x).unwrap_or("[UTF-8 error]") 124 | ); 125 | debug!("Payload was: {}", raw_payload); 126 | Err((StatusCode::BAD_REQUEST, "Unknown event type")) 127 | } 128 | None => Err((StatusCode::BAD_REQUEST, "Unspecified event type")), 129 | } 130 | } 131 | } 132 | 133 | #[derive(Debug, Deserialize)] 134 | pub struct InstallationPayload { 135 | installation: Installation, 136 | } 137 | 138 | #[derive(Debug, Deserialize)] 139 | pub struct CheckSuitePayload { 140 | pub action: CheckSuiteAction, 141 | pub installation: Installation, 142 | pub check_suite: CheckSuite, 143 | } 144 | 145 | #[derive(Debug, Deserialize)] 146 | pub struct CheckRunPayload { 147 | pub installation: Installation, 148 | pub action: CheckRunAction, 149 | pub check_run: CheckRun, 150 | } 151 | 152 | #[derive(Debug, Deserialize)] 153 | pub struct PullRequestPayload { 154 | pub installation: Installation, 155 | pub action: PullRequestAction, 156 | pub pull_request: PullRequest, 157 | } 158 | 159 | #[derive(Debug, Deserialize)] 160 | #[serde(rename_all = "snake_case")] 161 | pub enum PullRequestAction { 162 | Opened, 163 | Synchronize, 164 | } 165 | -------------------------------------------------------------------------------- /src/github/api/pr.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::{user::User, ApiError, AuthInstallation, GitHub, JsonExt, OwnerId, RepoId}; 4 | 5 | #[derive(Clone, Debug, Deserialize)] 6 | pub struct MinimalPullRequest { 7 | pub number: usize, 8 | } 9 | 10 | impl MinimalPullRequest { 11 | pub async fn get_full( 12 | &self, 13 | api: &GitHub, 14 | owner: OwnerId, 15 | repo: RepoId, 16 | ) -> Result { 17 | api.get(format!( 18 | "repos/{owner}/{repo}/pulls/{pull_number}", 19 | owner = owner, 20 | repo = repo, 21 | pull_number = self.number 22 | )) 23 | .send() 24 | .await? 25 | .parse_json() 26 | .await 27 | } 28 | } 29 | 30 | #[derive(Clone, Debug, Deserialize)] 31 | pub struct PullRequest { 32 | pub number: usize, 33 | pub head: Commit, 34 | pub title: String, 35 | pub body: String, 36 | pub user: User, 37 | } 38 | 39 | #[derive(Clone, Debug, Deserialize)] 40 | #[serde(untagged)] 41 | pub enum AnyPullRequest { 42 | Full(PullRequest), 43 | Minimal(MinimalPullRequest), 44 | } 45 | 46 | impl AnyPullRequest { 47 | pub async fn get_full( 48 | self, 49 | api: &GitHub, 50 | owner: OwnerId, 51 | repo: RepoId, 52 | ) -> Result { 53 | match self { 54 | AnyPullRequest::Full(pr) => Ok(pr), 55 | AnyPullRequest::Minimal(pr) => pr.get_full(api, owner, repo).await, 56 | } 57 | } 58 | } 59 | 60 | #[derive(Clone, Debug, Deserialize)] 61 | pub struct Commit { 62 | pub sha: String, 63 | } 64 | 65 | #[derive(Serialize)] 66 | pub struct PullRequestUpdate { 67 | pub title: String, 68 | pub labels: Vec, 69 | #[serde(skip_serializing_if = "Option::is_none")] 70 | pub body: Option, 71 | } 72 | 73 | impl GitHub { 74 | pub async fn update_pull_request( 75 | &self, 76 | owner: OwnerId, 77 | repo: RepoId, 78 | pr: usize, 79 | update: PullRequestUpdate, 80 | ) -> Result<(), ApiError> { 81 | self.patch(format!("repos/{}/{}/issues/{}", owner, repo, pr)) 82 | .json(&update) 83 | .send() 84 | .await? 85 | .parse_json::() 86 | .await?; 87 | 88 | Ok(()) 89 | } 90 | 91 | pub async fn prs_for_commit( 92 | &self, 93 | owner: OwnerId, 94 | repo: RepoId, 95 | commit: String, 96 | ) -> Result, ApiError> { 97 | self.get(format!("repos/{owner}/{repo}/commits/{commit}/pulls")) 98 | .send() 99 | .await? 100 | .parse_json() 101 | .await 102 | } 103 | 104 | pub async fn post_pr_comment( 105 | &self, 106 | owner: OwnerId, 107 | repo: RepoId, 108 | pr: usize, 109 | message: String, 110 | ) -> Result<(), ApiError> { 111 | self.post(format!("repos/{owner}/{repo}/issues/{pr}/comments")) 112 | .json(&serde_json::json!({ 113 | "body": message 114 | })) 115 | .send() 116 | .await? 117 | .parse_json::() 118 | .await?; 119 | 120 | Ok(()) 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /src/github/api/user.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | 3 | #[derive(Clone, Debug, Deserialize)] 4 | pub struct User { 5 | pub login: String, 6 | } 7 | -------------------------------------------------------------------------------- /src/github/git.rs: -------------------------------------------------------------------------------- 1 | //! Wrapper around the `git` command line. 2 | 3 | use std::{ 4 | collections::HashSet, 5 | path::{Path, PathBuf}, 6 | process::{Output, Stdio}, 7 | }; 8 | 9 | use eyre::{Context, ContextCompat}; 10 | use tokio::process::Command; 11 | use tracing::debug; 12 | use typst::syntax::package::{PackageSpec, PackageVersion}; 13 | 14 | pub fn repo_dir() -> PathBuf { 15 | let repo_path = std::env::var("PACKAGES_DIR").unwrap_or("..".to_owned()); 16 | PathBuf::from(repo_path) 17 | } 18 | 19 | pub struct GitRepo<'a> { 20 | dir: &'a Path, 21 | } 22 | 23 | impl<'a> GitRepo<'a> { 24 | pub fn open(dir: &'a Path) -> Self { 25 | GitRepo { dir } 26 | } 27 | 28 | pub async fn clone_if_needed(&self, url: &str) -> eyre::Result<()> { 29 | let status = traced_git(["-C", self.dir()?, "status"]).await?.status; 30 | 31 | if !status.success() { 32 | traced_git(["clone", url, self.dir()?]).await?; 33 | } 34 | 35 | Ok(()) 36 | } 37 | 38 | pub async fn pull_main(&self) -> eyre::Result<()> { 39 | debug!("Pulling main branch"); 40 | traced_git([ 41 | "-C", 42 | self.dir()?, 43 | "-c", 44 | "receive.maxInputSize=134217728", // 128MB 45 | "pull", 46 | "origin", 47 | "main", 48 | "--ff-only", 49 | ]) 50 | .await?; 51 | debug!("Done"); 52 | Ok(()) 53 | } 54 | 55 | pub async fn fetch_commit(&self, sha: impl AsRef) -> eyre::Result<()> { 56 | debug!("Fetching commit: {}", sha.as_ref()); 57 | traced_git([ 58 | "-C", 59 | self.dir()?, 60 | "-c", 61 | "receive.maxInputSize=134217728", // 128MB 62 | "fetch", 63 | "origin", 64 | sha.as_ref(), 65 | ]) 66 | .await 67 | .context("Failed to fetch {} (probably because of some large file).")?; 68 | debug!("Done"); 69 | Ok(()) 70 | } 71 | 72 | /// Checks out a commit in a new working tree 73 | pub async fn checkout_commit( 74 | &self, 75 | sha: impl AsRef, 76 | working_tree: impl AsRef, 77 | ) -> eyre::Result<()> { 78 | debug!( 79 | "Checking out {} in {}", 80 | sha.as_ref(), 81 | working_tree.as_ref().display() 82 | ); 83 | tokio::fs::create_dir_all(&working_tree).await?; 84 | let working_tree = working_tree.as_ref().canonicalize()?; 85 | traced_git([ 86 | "-C", 87 | self.dir 88 | .to_str() 89 | .context("Directory name is not valid unicode")?, 90 | &format!("--work-tree={}", working_tree.display()), 91 | "checkout", 92 | sha.as_ref(), 93 | "--", 94 | ".", 95 | ]) 96 | .await?; 97 | debug!("Done"); 98 | Ok(()) 99 | } 100 | 101 | pub async fn files_touched_by(&self, sha: impl AsRef) -> eyre::Result> { 102 | debug!("Listing files touched by {}", sha.as_ref()); 103 | let command_output = String::from_utf8( 104 | Command::new("git") 105 | .args([ 106 | "-C", 107 | self.dir()?, 108 | "diff-tree", 109 | "--no-commit-id", 110 | "--name-only", 111 | "-r", 112 | "--merge-base", 113 | "main", 114 | sha.as_ref(), 115 | ]) 116 | .output() 117 | .await? 118 | .stdout, 119 | )?; 120 | 121 | debug!("Done"); 122 | 123 | Ok(parse_diff_tree_paths(&command_output)) 124 | } 125 | 126 | pub fn authors_of(&self, file: &Path) -> Option> { 127 | use std::process::Command; 128 | 129 | debug!("Lisiting authors of {}", file.display()); 130 | 131 | let output = String::from_utf8( 132 | Command::new("git") 133 | .args([ 134 | "-C", 135 | self.dir.to_str()?, 136 | "blame", 137 | "--porcelain", 138 | "--", 139 | Path::new(".").canonicalize().ok()?.join(file).to_str()?, 140 | ]) 141 | .output() 142 | .ok()? 143 | .stdout, 144 | ) 145 | .ok()?; 146 | 147 | let authors: HashSet<_> = output 148 | .lines() 149 | .filter(|l| l.starts_with("author ")) 150 | .map(|l| { 151 | let prefix_len = "author ".len(); 152 | l[prefix_len..].to_owned() 153 | }) 154 | .collect(); 155 | 156 | debug!("Done"); 157 | Some(authors) 158 | } 159 | 160 | pub fn commit_for_file(&self, file: &Path) -> Option { 161 | use std::process::Command; 162 | 163 | debug!("Finding the commit that last touched {}", file.display()); 164 | 165 | let output = String::from_utf8( 166 | Command::new("git") 167 | .args([ 168 | "-C", 169 | self.dir.to_str()?, 170 | "blame", 171 | "--porcelain", 172 | "--", 173 | Path::new(".").canonicalize().ok()?.join(file).to_str()?, 174 | ]) 175 | .output() 176 | .ok()? 177 | .stdout, 178 | ) 179 | .ok()?; 180 | 181 | output 182 | .lines() 183 | .next()? 184 | .split(' ') 185 | .next() 186 | .map(|commit| commit.to_owned()) 187 | } 188 | 189 | pub fn dir(&self) -> eyre::Result<&str> { 190 | self.dir 191 | .to_str() 192 | .context("Directory name is not valid unicode") 193 | } 194 | 195 | pub async fn has_previous_version(&self, package: &PackageSpec) -> eyre::Result { 196 | let package_dir = PathBuf::from(self.dir()?) 197 | .join("packages") 198 | .join(package.namespace.as_str()) 199 | .join(package.name.as_str()); 200 | let mut all_versions = tokio::fs::read_dir(package_dir).await?; 201 | while let Ok(Some(version)) = all_versions.next_entry().await { 202 | if version 203 | .file_type() 204 | .await 205 | .map(|t| t.is_dir()) 206 | .unwrap_or(false) 207 | { 208 | let version: PackageVersion = version 209 | .file_name() 210 | .to_str() 211 | .context("Can't convert directory name to string")? 212 | .parse() 213 | .ok() 214 | .context("Invalid version number")?; 215 | 216 | if version < package.version { 217 | return Ok(true); 218 | } 219 | } 220 | } 221 | 222 | Ok(false) 223 | } 224 | } 225 | 226 | #[tracing::instrument(name = "git-command")] 227 | async fn traced_git( 228 | args: impl IntoIterator + std::fmt::Debug, 229 | ) -> eyre::Result { 230 | let out = Command::new("git") 231 | .args(args) 232 | .stderr(Stdio::piped()) 233 | .stdout(Stdio::piped()) 234 | .spawn()? 235 | .wait_with_output() 236 | .await?; 237 | 238 | if let Ok(stderr) = std::str::from_utf8(&out.stderr) { 239 | debug!(stderr = stderr) 240 | } 241 | if let Ok(stdout) = std::str::from_utf8(&out.stdout) { 242 | debug!(stdout = stdout) 243 | } 244 | 245 | Ok(out) 246 | } 247 | 248 | fn parse_diff_tree_paths(output: &str) -> Vec { 249 | output 250 | .lines() 251 | .map(|l| { 252 | if l.starts_with('"') && l.ends_with('"') { 253 | let mut path = Vec::new(); 254 | let mut escape_seq = 0; 255 | let mut escaped = String::new(); 256 | for c in l[1..l.len() - 2].chars() { 257 | if c == '\\' { 258 | escape_seq = 3; 259 | continue; 260 | } 261 | 262 | if escape_seq == 0 { 263 | let start = path.len(); 264 | path.resize(start + c.len_utf8(), 0); 265 | c.encode_utf8(&mut path[start..]); 266 | } else { 267 | escaped.push(c); 268 | escape_seq -= 1; 269 | 270 | if escape_seq == 0 { 271 | path.push( 272 | u8::from_str_radix(&escaped, 8) 273 | .expect("Invalid escape sequence from Git"), 274 | ); 275 | escaped = String::new() 276 | } 277 | } 278 | } 279 | 280 | PathBuf::from(String::from_utf8(path).expect("Invalid UTF8 in path")).to_owned() 281 | } else { 282 | Path::new(l).to_owned() 283 | } 284 | }) 285 | .collect() 286 | } 287 | 288 | #[cfg(test)] 289 | mod tests { 290 | use std::path::PathBuf; 291 | 292 | #[test] 293 | fn touched_filed() { 294 | let output = r#"packages/preview/scholarly-tauthesis/0.8.0/.gitattributes 295 | packages/preview/scholarly-tauthesis/0.8.0/.gitignore 296 | packages/preview/scholarly-tauthesis/0.8.0/LICENSE 297 | packages/preview/scholarly-tauthesis/0.8.0/README.md 298 | packages/preview/scholarly-tauthesis/0.8.0/tauthesis.typ 299 | packages/preview/scholarly-tauthesis/0.8.0/template/bibliography.bib 300 | packages/preview/scholarly-tauthesis/0.8.0/template/code/README.md 301 | packages/preview/scholarly-tauthesis/0.8.0/template/code/square.jl 302 | packages/preview/scholarly-tauthesis/0.8.0/template/content/01.typ 303 | packages/preview/scholarly-tauthesis/0.8.0/template/content/02.typ 304 | packages/preview/scholarly-tauthesis/0.8.0/template/content/03.typ 305 | packages/preview/scholarly-tauthesis/0.8.0/template/content/04.typ 306 | packages/preview/scholarly-tauthesis/0.8.0/template/content/A.typ 307 | packages/preview/scholarly-tauthesis/0.8.0/template/content/README.md 308 | packages/preview/scholarly-tauthesis/0.8.0/template/content/abstract.typ 309 | packages/preview/scholarly-tauthesis/0.8.0/template/content/glossary.typ 310 | packages/preview/scholarly-tauthesis/0.8.0/template/content/preface.typ 311 | packages/preview/scholarly-tauthesis/0.8.0/template/content/tekoalyn-kaytto.typ 312 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/tiivistelm\303\244.typ" 313 | packages/preview/scholarly-tauthesis/0.8.0/template/content/use-of-ai.typ 314 | packages/preview/scholarly-tauthesis/0.8.0/template/images/README.md 315 | packages/preview/scholarly-tauthesis/0.8.0/template/images/tau-logo-fin-eng.svg 316 | packages/preview/scholarly-tauthesis/0.8.0/template/main.typ 317 | packages/preview/scholarly-tauthesis/0.8.0/template/meta.typ 318 | packages/preview/scholarly-tauthesis/0.8.0/template/preamble.typ 319 | packages/preview/scholarly-tauthesis/0.8.0/thumbnail.png 320 | packages/preview/scholarly-tauthesis/0.8.0/typst.toml"#; 321 | assert_eq!( 322 | super::parse_diff_tree_paths(output), 323 | [ 324 | "packages/preview/scholarly-tauthesis/0.8.0/.gitattributes", 325 | "packages/preview/scholarly-tauthesis/0.8.0/.gitignore", 326 | "packages/preview/scholarly-tauthesis/0.8.0/LICENSE", 327 | "packages/preview/scholarly-tauthesis/0.8.0/README.md", 328 | "packages/preview/scholarly-tauthesis/0.8.0/tauthesis.typ", 329 | "packages/preview/scholarly-tauthesis/0.8.0/template/bibliography.bib", 330 | "packages/preview/scholarly-tauthesis/0.8.0/template/code/README.md", 331 | "packages/preview/scholarly-tauthesis/0.8.0/template/code/square.jl", 332 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/01.typ", 333 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/02.typ", 334 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/03.typ", 335 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/04.typ", 336 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/A.typ", 337 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/README.md", 338 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/abstract.typ", 339 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/glossary.typ", 340 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/preface.typ", 341 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/tekoalyn-kaytto.typ", 342 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/tiivistelmä.ty", 343 | "packages/preview/scholarly-tauthesis/0.8.0/template/content/use-of-ai.typ", 344 | "packages/preview/scholarly-tauthesis/0.8.0/template/images/README.md", 345 | "packages/preview/scholarly-tauthesis/0.8.0/template/images/tau-logo-fin-eng.svg", 346 | "packages/preview/scholarly-tauthesis/0.8.0/template/main.typ", 347 | "packages/preview/scholarly-tauthesis/0.8.0/template/meta.typ", 348 | "packages/preview/scholarly-tauthesis/0.8.0/template/preamble.typ", 349 | "packages/preview/scholarly-tauthesis/0.8.0/thumbnail.png", 350 | "packages/preview/scholarly-tauthesis/0.8.0/typst.toml" 351 | ] 352 | .iter() 353 | .map(PathBuf::from) 354 | .collect::>() 355 | ) 356 | } 357 | } 358 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use tracing_subscriber::EnvFilter; 2 | 3 | mod check; 4 | mod cli; 5 | mod github; 6 | mod package; 7 | mod world; 8 | 9 | #[tokio::main] 10 | async fn main() { 11 | dotenvy::dotenv().ok(); 12 | 13 | if std::env::var("LOG_STYLE").as_deref().unwrap_or("human") == "json" { 14 | tracing_subscriber::fmt() 15 | .with_env_filter(EnvFilter::from_default_env()) 16 | .event_format(tracing_subscriber::fmt::format::json()) 17 | .init(); 18 | } else { 19 | tracing_subscriber::fmt() 20 | .with_env_filter(EnvFilter::from_default_env()) 21 | .init(); 22 | } 23 | 24 | let mut args = std::env::args(); 25 | let cmd = args.next(); 26 | let subcommand = args.next(); 27 | if Some("server") == subcommand.as_deref() { 28 | github::hook_server().await; 29 | } else if Some("check") == subcommand.as_deref() { 30 | cli::main(args.next().unwrap_or_default()).await; 31 | } else { 32 | show_help(&cmd.unwrap_or("typst-package-check".to_owned())); 33 | } 34 | } 35 | 36 | fn show_help(program: &str) { 37 | println!("Usage :"); 38 | println!(" {program} server"); 39 | println!(" Start a server to handle GitHub webhooks and report checks in pull requests."); 40 | println!(" {program} check @preview/PACKAGE:VERSION"); 41 | println!( 42 | " Check a local package at the specified version. To be run in typst/packages/packages." 43 | ); 44 | println!(" {program} check"); 45 | println!(" Check the package in the current directory."); 46 | } 47 | -------------------------------------------------------------------------------- /src/package.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | use typst::syntax::package::{PackageSpec, PackageVersion, VersionlessPackageSpec}; 4 | 5 | /// Return the path of the directory containing all the packages (i.e. `typst/packages/packages`). 6 | pub fn dir() -> PathBuf { 7 | Path::new(&std::env::var("PACKAGES_DIR").unwrap_or("..".to_owned())).join("packages") 8 | } 9 | 10 | pub trait PackageExt: Sized { 11 | type Versionless; 12 | 13 | fn previous_version(&self) -> Option; 14 | 15 | fn directory(&self) -> PathBuf; 16 | } 17 | 18 | impl PackageExt for PackageSpec { 19 | type Versionless = VersionlessPackageSpec; 20 | 21 | fn previous_version(&self) -> Option { 22 | let all_versions_dir = self.versionless().directory(); 23 | let mut last_version = None; 24 | for version_dir in std::fs::read_dir(&all_versions_dir).ok()? { 25 | let Ok(version_dir) = version_dir else { 26 | continue; 27 | }; 28 | 29 | let Some(version) = version_dir 30 | .file_name() 31 | .to_str() 32 | .and_then(|v| v.parse::().ok()) 33 | else { 34 | continue; 35 | }; 36 | 37 | if version == self.version { 38 | continue; 39 | } 40 | 41 | if last_version.map(|last| last < version).unwrap_or(true) { 42 | last_version = Some(version); 43 | } 44 | } 45 | 46 | last_version.map(|v| PackageSpec { 47 | version: v, 48 | name: self.name.clone(), 49 | namespace: self.namespace.clone(), 50 | }) 51 | } 52 | 53 | fn directory(&self) -> PathBuf { 54 | dir() 55 | .join(self.namespace.as_str()) 56 | .join(self.name.as_str()) 57 | .join(self.version.to_string()) 58 | } 59 | } 60 | 61 | pub trait VersionlessPackageExt { 62 | fn directory(&self) -> PathBuf; 63 | } 64 | 65 | impl VersionlessPackageExt for VersionlessPackageSpec { 66 | fn directory(&self) -> PathBuf { 67 | dir().join(self.namespace.as_str()).join(self.name.as_str()) 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/world.rs: -------------------------------------------------------------------------------- 1 | //! Typst World. 2 | //! 3 | //! Most of this module is copied from typst-cli. 4 | 5 | use std::{ 6 | collections::HashMap, 7 | path::{Path, PathBuf}, 8 | sync::OnceLock, 9 | }; 10 | 11 | use chrono::{DateTime, Datelike, FixedOffset, Local, Utc}; 12 | use fontdb::Database; 13 | use ignore::overrides::Override; 14 | use parking_lot::Mutex; 15 | use tracing::{debug, span, Level}; 16 | use typst::{ 17 | diag::{FileError, FileResult, PackageError, PackageResult}, 18 | foundations::{Bytes, Datetime}, 19 | syntax::{package::PackageSpec, FileId, Source, VirtualPath}, 20 | text::{Font, FontBook, FontInfo}, 21 | utils::LazyHash, 22 | Library, World, 23 | }; 24 | 25 | use crate::package::PackageExt; 26 | 27 | /// A world that provides access to the operating system. 28 | pub struct SystemWorld { 29 | /// The working directory. 30 | workdir: Option, 31 | /// The root relative to which absolute paths are resolved. 32 | root: PathBuf, 33 | /// The input path. 34 | main: FileId, 35 | /// Typst's standard library. 36 | library: LazyHash, 37 | /// Metadata about discovered fonts. 38 | book: LazyHash, 39 | /// Locations of and storage for lazily loaded fonts. 40 | fonts: Vec, 41 | /// Maps file ids to source files and buffers. 42 | slots: Mutex>, 43 | /// The current datetime if requested. This is stored here to ensure it is 44 | /// always the same within one compilation. 45 | /// Reset between compilations if not [`Now::Fixed`]. 46 | now: OnceLock>, 47 | /// Override for package resolution 48 | package_override: Option<(PackageSpec, PathBuf)>, 49 | /// Files that are considered excluded and should not be read from. 50 | excluded: Override, 51 | } 52 | 53 | impl SystemWorld { 54 | /// Create a new system world. 55 | pub fn new(input: PathBuf, root: PathBuf) -> Result { 56 | // Resolve the virtual path of the main file within the project root. 57 | let main_path = 58 | VirtualPath::within_root(&input, &root).ok_or(WorldCreationError::InputOutsideRoot)?; 59 | let main = FileId::new(None, main_path); 60 | 61 | let library = Library::default(); 62 | 63 | let mut searcher = FontSearcher::new(); 64 | searcher.search(&[]); 65 | 66 | Ok(Self { 67 | workdir: std::env::current_dir().ok(), 68 | root, 69 | main, 70 | library: LazyHash::new(library), 71 | book: LazyHash::new(searcher.book), 72 | fonts: searcher.fonts, 73 | slots: Mutex::new(HashMap::new()), 74 | now: OnceLock::new(), 75 | package_override: None, 76 | excluded: Override::empty(), 77 | }) 78 | } 79 | 80 | pub fn with_package_override(mut self, spec: &PackageSpec, dir: &Path) -> Self { 81 | self.package_override = Some((spec.clone(), dir.to_owned())); 82 | self 83 | } 84 | 85 | /// The root relative to which absolute paths are resolved. 86 | pub fn root(&self) -> &Path { 87 | &self.root 88 | } 89 | 90 | /// The current working directory. 91 | pub fn workdir(&self) -> &Path { 92 | self.workdir.as_deref().unwrap_or(Path::new(".")) 93 | } 94 | 95 | /// Lookup a source file by id. 96 | #[track_caller] 97 | pub fn lookup(&self, id: FileId) -> FileResult { 98 | self.source(id) 99 | } 100 | 101 | pub fn exclude(&mut self, globs: Override) { 102 | self.excluded = globs; 103 | } 104 | 105 | pub fn reset_file_cache(&mut self) { 106 | let mut slots = self.slots.lock(); 107 | slots.clear(); 108 | } 109 | } 110 | 111 | impl World for SystemWorld { 112 | fn library(&self) -> &LazyHash { 113 | &self.library 114 | } 115 | 116 | fn book(&self) -> &LazyHash { 117 | &self.book 118 | } 119 | 120 | fn main(&self) -> FileId { 121 | self.main 122 | } 123 | 124 | fn source(&self, id: FileId) -> FileResult { 125 | self.slot(id, |slot| { 126 | slot.source(&self.root, &self.package_override, &self.excluded) 127 | }) 128 | } 129 | 130 | fn file(&self, id: FileId) -> FileResult { 131 | self.slot(id, |slot| { 132 | slot.file(&self.root, &self.package_override, &self.excluded) 133 | }) 134 | } 135 | 136 | fn font(&self, index: usize) -> Option { 137 | self.fonts[index].get() 138 | } 139 | 140 | fn today(&self, offset: Option) -> Option { 141 | let now = self.now.get_or_init(Utc::now); 142 | 143 | // The time with the specified UTC offset, or within the local time zone. 144 | let with_offset = match offset { 145 | None => now.with_timezone(&Local).fixed_offset(), 146 | Some(hours) => { 147 | let seconds = i32::try_from(hours).ok()?.checked_mul(3600)?; 148 | now.with_timezone(&FixedOffset::east_opt(seconds)?) 149 | } 150 | }; 151 | 152 | Datetime::from_ymd( 153 | with_offset.year(), 154 | with_offset.month().try_into().ok()?, 155 | with_offset.day().try_into().ok()?, 156 | ) 157 | } 158 | } 159 | 160 | impl SystemWorld { 161 | /// Access the canonical slot for the given file id. 162 | fn slot(&self, id: FileId, f: F) -> T 163 | where 164 | F: FnOnce(&mut FileSlot) -> T, 165 | { 166 | let mut map = self.slots.lock(); 167 | f(map.entry(id).or_insert_with(|| FileSlot::new(id))) 168 | } 169 | } 170 | 171 | /// Holds the processed data for a file ID. 172 | /// 173 | /// Both fields can be populated if the file is both imported and read(). 174 | struct FileSlot { 175 | /// The slot's file id. 176 | id: FileId, 177 | /// The lazily loaded and incrementally updated source file. 178 | source: SlotCell, 179 | /// The lazily loaded raw byte buffer. 180 | file: SlotCell, 181 | } 182 | 183 | impl FileSlot { 184 | /// Create a new file slot. 185 | fn new(id: FileId) -> Self { 186 | Self { 187 | id, 188 | file: SlotCell::new(), 189 | source: SlotCell::new(), 190 | } 191 | } 192 | 193 | /// Retrieve the source for this file. 194 | fn source( 195 | &mut self, 196 | project_root: &Path, 197 | package_override: &Option<(PackageSpec, PathBuf)>, 198 | excluded: &Override, 199 | ) -> FileResult { 200 | self.source.get_or_init( 201 | || read(self.id, project_root, package_override, excluded), 202 | |data, prev| { 203 | let text = decode_utf8(&data)?; 204 | if let Some(mut prev) = prev { 205 | prev.replace(text); 206 | Ok(prev) 207 | } else { 208 | Ok(Source::new(self.id, text.into())) 209 | } 210 | }, 211 | ) 212 | } 213 | 214 | /// Retrieve the file's bytes. 215 | fn file( 216 | &mut self, 217 | project_root: &Path, 218 | package_override: &Option<(PackageSpec, PathBuf)>, 219 | excluded: &Override, 220 | ) -> FileResult { 221 | self.file.get_or_init( 222 | || read(self.id, project_root, package_override, excluded), 223 | |data, _| Ok(Bytes::new(data)), 224 | ) 225 | } 226 | } 227 | 228 | /// Lazily processes data for a file. 229 | struct SlotCell { 230 | /// The processed data. 231 | data: Option>, 232 | /// A hash of the raw file contents / access error. 233 | fingerprint: u128, 234 | /// Whether the slot has been accessed in the current compilation. 235 | accessed: bool, 236 | } 237 | 238 | impl SlotCell { 239 | /// Creates a new, empty cell. 240 | fn new() -> Self { 241 | Self { 242 | data: None, 243 | fingerprint: 0, 244 | accessed: false, 245 | } 246 | } 247 | 248 | /// Gets the contents of the cell or initialize them. 249 | fn get_or_init( 250 | &mut self, 251 | load: impl FnOnce() -> FileResult>, 252 | f: impl FnOnce(Vec, Option) -> FileResult, 253 | ) -> FileResult { 254 | // If we accessed the file already in this compilation, retrieve it. 255 | if std::mem::replace(&mut self.accessed, true) { 256 | if let Some(data) = &self.data { 257 | return data.clone(); 258 | } 259 | } 260 | 261 | // Read and hash the file. 262 | let result = load(); 263 | let fingerprint = typst::utils::hash128(&result); 264 | 265 | // If the file contents didn't change, yield the old processed data. 266 | if std::mem::replace(&mut self.fingerprint, fingerprint) == fingerprint { 267 | if let Some(data) = &self.data { 268 | return data.clone(); 269 | } 270 | } 271 | 272 | let prev = self.data.take().and_then(Result::ok); 273 | let value = result.and_then(|data| f(data, prev)); 274 | self.data = Some(value.clone()); 275 | 276 | value 277 | } 278 | } 279 | 280 | /// Resolves the path of a file id on the system, downloading a package if 281 | /// necessary. 282 | fn system_path( 283 | package_override: &Option<(PackageSpec, PathBuf)>, 284 | project_root: &Path, 285 | excluded: &Override, 286 | id: FileId, 287 | ) -> FileResult { 288 | let _ = span!(Level::DEBUG, "Path resolution").enter(); 289 | debug!("File ID = {:?}", id); 290 | let exclude = |file: FileResult| match file { 291 | Ok(f) => { 292 | if let Ok(canonical_path) = f.canonicalize() { 293 | if excluded.matched(canonical_path, false).is_ignore() { 294 | debug!("This file is excluded"); 295 | return Err(FileError::Other(Some( 296 | "This file exists but is excluded from your package.".into(), 297 | ))); 298 | } 299 | } 300 | 301 | debug!("Resolved to {}", f.display()); 302 | Ok(f) 303 | } 304 | err => err, 305 | }; 306 | 307 | // Determine the root path relative to which the file path 308 | // will be resolved. 309 | let root = if let Some(spec) = id.package() { 310 | if let Some(package_override) = package_override { 311 | if *spec == package_override.0 { 312 | return exclude( 313 | id.vpath() 314 | .resolve(&package_override.1) 315 | .ok_or(FileError::AccessDenied), 316 | ); 317 | } 318 | } 319 | 320 | expect_parents( 321 | project_root, 322 | &[&spec.version.to_string(), &spec.name, &spec.namespace], 323 | ) 324 | .map(|packages_root| { 325 | Ok(packages_root 326 | .join(spec.namespace.as_str()) 327 | .join(spec.name.as_str()) 328 | .join(spec.version.to_string())) 329 | }) 330 | .unwrap_or_else(|| prepare_package(spec)) 331 | .map_err(FileError::Package)? 332 | } else { 333 | project_root.to_owned() 334 | }; 335 | exclude(id.vpath().resolve(&root).ok_or(FileError::AccessDenied)) 336 | } 337 | 338 | // Goes up in a file system hierarchy while the parent folder matches the expected name 339 | fn expect_parents<'a>(dir: &'a Path, parents: &'a [&'a str]) -> Option { 340 | let dir = dir.canonicalize().ok()?; 341 | 342 | if parents.is_empty() { 343 | return Some(dir); 344 | } 345 | 346 | let (expected_parent, rest) = parents.split_first()?; 347 | if dir.file_name().and_then(|n| n.to_str()) != Some(expected_parent) { 348 | debug!( 349 | "Expected parent folder to be {}, but it was {}", 350 | expected_parent, 351 | dir.display() 352 | ); 353 | return None; 354 | } 355 | 356 | expect_parents(dir.parent()?, rest) 357 | } 358 | 359 | /// Reads a file from a `FileId`. 360 | /// 361 | /// If the ID represents stdin it will read from standard input, 362 | /// otherwise it gets the file path of the ID and reads the file from disk. 363 | fn read( 364 | id: FileId, 365 | project_root: &Path, 366 | package_override: &Option<(PackageSpec, PathBuf)>, 367 | excluded: &Override, 368 | ) -> FileResult> { 369 | read_from_disk(&system_path(package_override, project_root, excluded, id)?) 370 | } 371 | 372 | /// Read a file from disk. 373 | fn read_from_disk(path: &Path) -> FileResult> { 374 | let f = |e| FileError::from_io(e, path); 375 | if std::fs::metadata(path).map_err(f)?.is_dir() { 376 | Err(FileError::IsDirectory) 377 | } else { 378 | std::fs::read(path).map_err(f) 379 | } 380 | } 381 | 382 | /// Decode UTF-8 with an optional BOM. 383 | fn decode_utf8(buf: &[u8]) -> FileResult<&str> { 384 | // Remove UTF-8 BOM. 385 | Ok(std::str::from_utf8( 386 | buf.strip_prefix(b"\xef\xbb\xbf").unwrap_or(buf), 387 | )?) 388 | } 389 | /// An error that occurs during world construction. 390 | #[derive(Debug)] 391 | pub enum WorldCreationError { 392 | /// The input file is not contained within the root folder. 393 | InputOutsideRoot, 394 | } 395 | 396 | impl std::fmt::Display for WorldCreationError { 397 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 398 | match self { 399 | WorldCreationError::InputOutsideRoot => { 400 | write!(f, "source file must be contained in project root") 401 | } 402 | } 403 | } 404 | } 405 | 406 | /// Searches for fonts. 407 | pub struct FontSearcher { 408 | /// Metadata about all discovered fonts. 409 | pub book: FontBook, 410 | /// Slots that the fonts are loaded into. 411 | pub fonts: Vec, 412 | } 413 | 414 | /// Holds details about the location of a font and lazily the font itself. 415 | pub struct FontSlot { 416 | /// The path at which the font can be found on the system. 417 | path: PathBuf, 418 | /// The index of the font in its collection. Zero if the path does not point 419 | /// to a collection. 420 | index: u32, 421 | /// The lazily loaded font. 422 | font: OnceLock>, 423 | } 424 | 425 | impl FontSlot { 426 | /// Get the font for this slot. 427 | pub fn get(&self) -> Option { 428 | self.font 429 | .get_or_init(|| { 430 | let data = Bytes::new(std::fs::read(&self.path).ok()?); 431 | Font::new(data, self.index) 432 | }) 433 | .clone() 434 | } 435 | } 436 | 437 | impl FontSearcher { 438 | /// Create a new, empty system searcher. 439 | pub fn new() -> Self { 440 | Self { 441 | book: FontBook::new(), 442 | fonts: vec![], 443 | } 444 | } 445 | 446 | /// Search everything that is available. 447 | pub fn search(&mut self, font_paths: &[PathBuf]) { 448 | let mut db = Database::new(); 449 | 450 | // Font paths have highest priority. 451 | for path in font_paths { 452 | db.load_fonts_dir(path); 453 | } 454 | 455 | // System fonts have second priority. 456 | db.load_system_fonts(); 457 | 458 | for face in db.faces() { 459 | let path = match &face.source { 460 | fontdb::Source::File(path) | fontdb::Source::SharedFile(path, _) => path, 461 | // We never add binary sources to the database, so there 462 | // shouln't be any. 463 | fontdb::Source::Binary(_) => continue, 464 | }; 465 | 466 | let info = db 467 | .with_face_data(face.id, FontInfo::new) 468 | .expect("database must contain this font"); 469 | 470 | if let Some(info) = info { 471 | self.book.push(info); 472 | self.fonts.push(FontSlot { 473 | path: path.clone(), 474 | index: face.index, 475 | font: OnceLock::new(), 476 | }); 477 | } 478 | } 479 | 480 | // Embedded fonts have lowest priority. 481 | self.add_embedded(); 482 | } 483 | 484 | /// Add fonts that are embedded in the binary. 485 | fn add_embedded(&mut self) { 486 | for data in typst_assets::fonts() { 487 | let buffer = typst::foundations::Bytes::new(data); 488 | for (i, font) in Font::iter(buffer).enumerate() { 489 | self.book.push(font.info().clone()); 490 | self.fonts.push(FontSlot { 491 | path: PathBuf::new(), 492 | index: i as u32, 493 | font: OnceLock::from(Some(font)), 494 | }); 495 | } 496 | } 497 | } 498 | } 499 | 500 | /// Make a package available in the on-disk cache. 501 | pub fn prepare_package(spec: &PackageSpec) -> PackageResult { 502 | let subdir = format!( 503 | "typst/packages/{}/{}/{}", 504 | spec.namespace, spec.name, spec.version 505 | ); 506 | 507 | let local_package_dir = spec.directory(); 508 | if local_package_dir.exists() { 509 | return Ok(local_package_dir); 510 | } 511 | 512 | if let Some(data_dir) = dirs::data_dir() { 513 | let dir = data_dir.join(&subdir); 514 | if dir.exists() { 515 | return Ok(dir); 516 | } 517 | } 518 | 519 | if let Some(cache_dir) = dirs::cache_dir() { 520 | let dir = cache_dir.join(&subdir); 521 | if dir.exists() { 522 | return Ok(dir); 523 | } 524 | 525 | return Err(PackageError::NetworkFailed(Some( 526 | "All packages are supposed to be present in the `packages` repository, or in the local cache.".into(), 527 | ))); 528 | } 529 | 530 | Err(PackageError::NotFound(spec.clone())) 531 | } 532 | --------------------------------------------------------------------------------