├── .gitignore ├── .release-plz.toml ├── src ├── shell │ ├── mod.rs │ ├── hostname.rs │ └── shell_.rs ├── config.rs ├── targets.rs ├── bin │ └── main.rs ├── codegen_unit.rs ├── codegen_plan.rs └── lib.rs ├── cargo_px_env ├── Cargo.toml ├── README.md ├── CHANGELOG.md └── src │ ├── lib.rs │ └── error.rs ├── oranda.json ├── .github └── workflows │ ├── publish.yml │ ├── general.yml │ ├── web.yml │ └── release.yml ├── LICENSE-MIT ├── Cargo.toml ├── dist-workspace.toml ├── CHANGELOG.md ├── README.md ├── LICENSE-APACHE └── Cargo.lock /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | 3 | # Generated by `oranda generate ci` 4 | public/ -------------------------------------------------------------------------------- /.release-plz.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | git_release_enable = false 3 | semver_check = false 4 | -------------------------------------------------------------------------------- /src/shell/mod.rs: -------------------------------------------------------------------------------- 1 | // Most of the code in this module has been lifted from `cargo`'s `shell.rs` module in order 2 | // to match the output style of `cargo` as closely as possible. 3 | mod hostname; 4 | mod shell_; 5 | 6 | pub use shell_::*; 7 | -------------------------------------------------------------------------------- /cargo_px_env/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cargo_px_env" 3 | version = "0.1.0" 4 | edition = "2021" 5 | keywords = ["cargo", "cargo-px", "build", "scripts", "generate"] 6 | description = "Bindings to retrieve the environment variables set by cargo-px" 7 | categories = ["development-tools::cargo-plugins"] 8 | repository = "https://github.com/LukeMathWalker/cargo-px" 9 | license = "Apache-2.0 OR MIT" 10 | -------------------------------------------------------------------------------- /cargo_px_env/README.md: -------------------------------------------------------------------------------- 1 | # cargo_px_env 2 | 3 | Utilities to retrieve the environment variables set by `cargo px`. 4 | 5 | When `cargo px` invokes a code generator, it sets various environment variables that 6 | can be leveraged by the code generator to retrieve information about the workspace. 7 | This crate provides bindings to work with these environment variables instead 8 | of hard-coding their names in your code generator. -------------------------------------------------------------------------------- /oranda.json: -------------------------------------------------------------------------------- 1 | { 2 | "build": { 3 | "path_prefix": "cargo-px" 4 | }, 5 | "marketing": { 6 | "analytics": { 7 | "plausible": { 8 | "domain": "lukemathwalker.github.io" 9 | } 10 | } 11 | }, 12 | "components": { 13 | "artifacts": { 14 | "package_managers": { 15 | "preferred": { 16 | "cargo": "cargo install --locked cargo-px" 17 | } 18 | } 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /cargo_px_env/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [Unreleased] 8 | 9 | ## [0.1.0](https://github.com/LukeMathWalker/cargo-px/releases/tag/cargo_px_env-v0.1.0) - 2023-05-05 10 | 11 | ### Other 12 | - Add `cargo_px_env` (#6) 13 | -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: Release-plz 2 | 3 | permissions: 4 | pull-requests: write 5 | contents: write 6 | 7 | on: 8 | push: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | release-plz: 14 | name: Release-plz 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Checkout repository 18 | uses: actions/checkout@v4 19 | with: 20 | fetch-depth: 0 21 | token: ${{ secrets.RELEASE_PLZ_SECRET }} 22 | - name: Install Rust toolchain 23 | uses: dtolnay/rust-toolchain@stable 24 | - name: Run release-plz 25 | uses: MarcoIeni/release-plz-action@v0.5 26 | env: 27 | GITHUB_TOKEN: ${{ secrets.RELEASE_PLZ_SECRET }} 28 | CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} 29 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Luca Palmieri 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.github/workflows/general.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | types: [opened, synchronize, reopened] 9 | branches: 10 | - main 11 | 12 | env: 13 | CARGO_TERM_COLOR: always 14 | 15 | jobs: 16 | test: 17 | name: Test 18 | strategy: 19 | matrix: 20 | os: [ubuntu-latest, windows-latest, macos-latest] 21 | fail-fast: false 22 | runs-on: ${{ matrix.os }} 23 | steps: 24 | - name: Check out repository code 25 | uses: actions/checkout@v4 26 | - name: Install the Rust toolchain 27 | uses: dtolnay/rust-toolchain@stable 28 | - name: Rust Cache Action 29 | uses: Swatinem/rust-cache@v2 30 | - name: Run tests 31 | run: cargo test 32 | 33 | fmt: 34 | name: Rustfmt 35 | runs-on: ubuntu-latest 36 | steps: 37 | - uses: actions/checkout@v4 38 | - uses: dtolnay/rust-toolchain@stable 39 | with: 40 | components: rustfmt 41 | - name: Enforce formatting 42 | run: cargo fmt --check 43 | 44 | clippy: 45 | name: Clippy 46 | runs-on: ubuntu-latest 47 | steps: 48 | - uses: actions/checkout@v4 49 | - uses: dtolnay/rust-toolchain@stable 50 | with: 51 | components: clippy 52 | - uses: Swatinem/rust-cache@v2 53 | - name: Linting 54 | run: cargo clippy -- -D warnings 55 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [".", "cargo_px_env"] 3 | 4 | [package] 5 | name = "cargo-px" 6 | version = "0.1.20" 7 | edition = "2021" 8 | keywords = ["cargo", "generator", "build", "scripts", "generate"] 9 | description = "A cargo sub-command to overcome some of the limitations of build scripts for code generation." 10 | categories = ["development-tools::cargo-plugins", "command-line-utilities"] 11 | repository = "https://github.com/LukeMathWalker/cargo-px" 12 | license = "Apache-2.0 OR MIT" 13 | 14 | [[bin]] 15 | path = "src/bin/main.rs" 16 | name = "cargo-px" 17 | 18 | [dependencies] 19 | ahash = "0.8.3" 20 | anyhow = "1.0.70" 21 | clap = "4" 22 | guppy = "0.17.2" 23 | petgraph = { version = "0.6.3", features = ["stable_graph"] } 24 | serde = { version = "1.0.160", features = ["derive"] } 25 | serde_json = "1.0.96" 26 | tracing = "0.1.37" 27 | tracing-subscriber = { version = "0.3.17", features = [ 28 | "env-filter", 29 | "fmt", 30 | "time", 31 | ] } 32 | textwrap = "0.16" 33 | once_cell = "1.17.1" 34 | anstream = "0.6.4" 35 | anstyle = "1.0.4" 36 | url = "2.4.1" 37 | supports-hyperlinks = "2.1.0" 38 | libc = "0.2.149" 39 | 40 | [target.'cfg(windows)'.dependencies.windows-sys] 41 | version = "0.52" 42 | features = [ 43 | "Win32_Foundation", 44 | "Win32_Security", 45 | "Win32_Storage_FileSystem", 46 | "Win32_System_IO", 47 | "Win32_System_Console", 48 | ] 49 | 50 | # The profile that 'cargo dist' will build with 51 | [profile.dist] 52 | inherits = "release" 53 | lto = "thin" 54 | -------------------------------------------------------------------------------- /dist-workspace.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["cargo:."] 3 | 4 | # Config for 'dist' 5 | [dist] 6 | # The preferred dist version to use in CI (Cargo.toml SemVer syntax) 7 | cargo-dist-version = "0.28.0" 8 | # CI backends to support 9 | ci = "github" 10 | # The installers to generate for each app 11 | installers = ["shell", "powershell"] 12 | # Target platforms to build apps for (Rust target-triple syntax) 13 | targets = [ 14 | "aarch64-apple-darwin", 15 | "x86_64-apple-darwin", 16 | "x86_64-unknown-linux-gnu", 17 | "x86_64-unknown-linux-musl", 18 | "x86_64-pc-windows-msvc", 19 | ] 20 | # Which actions to run on pull requests 21 | pr-run-mode = "plan" 22 | # Path that installers should place binaries in 23 | install-path = "CARGO_HOME" 24 | # Whether to install an updater program 25 | install-updater = false 26 | 27 | [dist.github-custom-runners] 28 | # Workarounds to move away from ubuntu-20.04, 29 | # coming from https://github.com/axodotdev/cargo-dist/issues/1760#issuecomment-2769218034 30 | global = "ubuntu-latest" 31 | 32 | x86_64-pc-windows-msvc = "windows-latest" 33 | 34 | # Use faster built-in runners for macOS on ARM 35 | aarch64-apple-darwin = "macos-14" 36 | x86_64-apple-darwin = "macos-14" 37 | 38 | [dist.github-custom-runners.x86_64-unknown-linux-gnu] 39 | runner = "ubuntu-latest" 40 | container = { image = "quay.io/pypa/manylinux_2_28_x86_64", host = "x86_64-unknown-linux-musl" } 41 | 42 | [dist.github-custom-runners.x86_64-unknown-linux-musl] 43 | runner = "ubuntu-latest" 44 | container = { image = "quay.io/pypa/musllinux_1_2_x86_64", host = "x86_64-unknown-linux-musl" } 45 | -------------------------------------------------------------------------------- /cargo_px_env/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![doc = include_str!("../README.md")] 2 | use std::path::PathBuf; 3 | 4 | use crate::error::{InvalidUnicodeError, MissingVarError, VarError}; 5 | 6 | pub mod error; 7 | 8 | /// The name of the environment variable that contains the path to the root directory 9 | /// of the current workspace. 10 | pub const WORKSPACE_ROOT_DIR_ENV: &str = "CARGO_PX_WORKSPACE_ROOT_DIR"; 11 | /// The name of the environment variable that contains the path to the manifest 12 | /// of the crate that must be generated. 13 | pub const GENERATED_PKG_MANIFEST_PATH_ENV: &str = "CARGO_PX_GENERATED_PKG_MANIFEST_PATH"; 14 | 15 | /// Retrieve the path to the workspace root directory. 16 | /// 17 | /// It returns an error if the variable is not set or if it contains invalid Unicode data. 18 | pub fn workspace_root_dir() -> Result { 19 | px_env_var(WORKSPACE_ROOT_DIR_ENV).map(PathBuf::from) 20 | } 21 | 22 | /// Retrieve the path to the manifest of the crate that must be generated. 23 | /// 24 | /// It returns an error if the variable is not set or if it contains invalid Unicode data. 25 | pub fn generated_pkg_manifest_path() -> Result { 26 | px_env_var(GENERATED_PKG_MANIFEST_PATH_ENV).map(PathBuf::from) 27 | } 28 | 29 | /// Retrieve the value of an env variable set by `cargo px`. 30 | /// 31 | /// It returns an error if the variable is not set or if it contains invalid Unicode data. 32 | fn px_env_var(name: &'static str) -> Result { 33 | use std::env::{var, VarError}; 34 | 35 | var(name).map_err(|e| match e { 36 | VarError::NotPresent => { 37 | crate::error::VarError::Missing(MissingVarError { name, source: e }) 38 | } 39 | VarError::NotUnicode(_) => { 40 | crate::error::VarError::InvalidUnicode(InvalidUnicodeError { name, source: e }) 41 | } 42 | }) 43 | } 44 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | //! The configuration that `px` expects to find in the `Cargo.toml` manifests of 2 | //! the packages that require code generation. 3 | 4 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 5 | pub(crate) struct ManifestMetadata { 6 | #[serde(default)] 7 | pub(crate) px: Option, 8 | } 9 | 10 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 11 | pub(crate) struct PxConfig { 12 | pub(crate) generate: GenerateConfig, 13 | pub(crate) verify: Option, 14 | } 15 | 16 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 17 | #[non_exhaustive] 18 | #[serde(tag = "generator_type", rename_all = "snake_case")] 19 | pub(crate) enum GenerateConfig { 20 | /// The code generation step is performed by invoking a binary defined within the same workspace. 21 | CargoWorkspaceBinary(CargoBinaryGeneratorConfig), 22 | } 23 | 24 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 25 | #[non_exhaustive] 26 | #[serde(tag = "verifier_type", rename_all = "snake_case")] 27 | pub(crate) enum VerifyConfig { 28 | /// The verification step is performed by invoking a binary defined within the same workspace. 29 | CargoWorkspaceBinary(CargoBinaryVerifierConfig), 30 | } 31 | 32 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 33 | pub struct CargoBinaryGeneratorConfig { 34 | /// The name of the binary to be invoked to perform code generation. 35 | /// 36 | /// It must be a binary defined within the same workspace. 37 | pub(crate) generator_name: String, 38 | #[serde(default)] 39 | /// The arguments to be passed to the generator binary. 40 | pub(crate) generator_args: Vec, 41 | } 42 | 43 | #[derive(Debug, serde::Serialize, serde::Deserialize)] 44 | pub struct CargoBinaryVerifierConfig { 45 | /// The name of the binary to be invoked to verify the freshness of 46 | /// the generated project. 47 | /// 48 | /// It must be a binary defined within the same workspace. 49 | pub(crate) verifier_name: String, 50 | #[serde(default)] 51 | /// The arguments to be passed to the verifier binary. 52 | pub(crate) verifier_args: Vec, 53 | } 54 | -------------------------------------------------------------------------------- /cargo_px_env/src/error.rs: -------------------------------------------------------------------------------- 1 | //! Errors that can encountered when loading the environment variables set by `cargo px`. 2 | 3 | #[derive(Debug)] 4 | #[non_exhaustive] 5 | /// An error that can occur when retrieving the value of an env variable set by `cargo px`. 6 | pub enum VarError { 7 | /// The variable is not set. 8 | Missing(MissingVarError), 9 | /// The variable contains invalid Unicode data. 10 | InvalidUnicode(InvalidUnicodeError), 11 | } 12 | 13 | #[derive(Debug)] 14 | /// One of the env variables that should be set by `cargo px` is not set. 15 | pub struct MissingVarError { 16 | pub(crate) name: &'static str, 17 | pub(crate) source: std::env::VarError, 18 | } 19 | 20 | #[derive(Debug)] 21 | /// One of the env variables that should be set by `cargo px` contains invalid Unicode data. 22 | pub struct InvalidUnicodeError { 23 | pub(crate) name: &'static str, 24 | pub(crate) source: std::env::VarError, 25 | } 26 | 27 | impl std::fmt::Display for VarError { 28 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 29 | match self { 30 | VarError::Missing(e) => std::fmt::Display::fmt(e, f), 31 | VarError::InvalidUnicode(e) => std::fmt::Display::fmt(e, f), 32 | } 33 | } 34 | } 35 | 36 | impl std::error::Error for VarError { 37 | fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { 38 | match self { 39 | VarError::Missing(e) => Some(e), 40 | VarError::InvalidUnicode(e) => Some(e), 41 | } 42 | } 43 | } 44 | 45 | impl std::fmt::Display for MissingVarError { 46 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 47 | write!(f, "The environment variable `{}` is missing. Are you running the command through `cargo px`?", self.name) 48 | } 49 | } 50 | 51 | impl std::fmt::Display for InvalidUnicodeError { 52 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 53 | write!( 54 | f, 55 | "The environment variable `{}` contains invalid Unicode data.", 56 | self.name 57 | ) 58 | } 59 | } 60 | 61 | impl std::error::Error for MissingVarError { 62 | fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { 63 | Some(&self.source) 64 | } 65 | } 66 | 67 | impl std::error::Error for InvalidUnicodeError { 68 | fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { 69 | Some(&self.source) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/shell/hostname.rs: -------------------------------------------------------------------------------- 1 | // Copied from https://github.com/BurntSushi/ripgrep/blob/7099e174acbcbd940f57e4ab4913fee4040c826e/crates/cli/src/hostname.rs 2 | 3 | use std::{ffi::OsString, io}; 4 | 5 | /// Returns the hostname of the current system. 6 | /// 7 | /// It is unusual, although technically possible, for this routine to return 8 | /// an error. It is difficult to list out the error conditions, but one such 9 | /// possibility is platform support. 10 | /// 11 | /// # Platform specific behavior 12 | /// 13 | /// On Unix, this returns the result of the `gethostname` function from the 14 | /// `libc` linked into the program. 15 | pub fn hostname() -> io::Result { 16 | #[cfg(unix)] 17 | { 18 | gethostname() 19 | } 20 | #[cfg(not(unix))] 21 | { 22 | Err(io::Error::new( 23 | io::ErrorKind::Other, 24 | "hostname could not be found on unsupported platform", 25 | )) 26 | } 27 | } 28 | 29 | #[cfg(unix)] 30 | fn gethostname() -> io::Result { 31 | use std::os::unix::ffi::OsStringExt; 32 | 33 | // SAFETY: There don't appear to be any safety requirements for calling 34 | // sysconf. 35 | let limit = unsafe { libc::sysconf(libc::_SC_HOST_NAME_MAX) }; 36 | if limit == -1 { 37 | // It is in theory possible for sysconf to return -1 for a limit but 38 | // *not* set errno, in which case, io::Error::last_os_error is 39 | // indeterminate. But untangling that is super annoying because std 40 | // doesn't expose any unix-specific APIs for inspecting the errno. (We 41 | // could do it ourselves, but it just doesn't seem worth doing?) 42 | return Err(io::Error::last_os_error()); 43 | } 44 | let Ok(maxlen) = usize::try_from(limit) else { 45 | let msg = format!("host name max limit ({limit}) overflowed usize"); 46 | return Err(io::Error::other(msg)); 47 | }; 48 | // maxlen here includes the NUL terminator. 49 | let mut buf = vec![0; maxlen]; 50 | // SAFETY: The pointer we give is valid as it is derived directly from a 51 | // Vec. Similarly, `maxlen` is the length of our Vec, and is thus valid 52 | // to write to. 53 | let rc = unsafe { libc::gethostname(buf.as_mut_ptr().cast::(), maxlen) }; 54 | if rc == -1 { 55 | return Err(io::Error::last_os_error()); 56 | } 57 | // POSIX says that if the hostname is bigger than `maxlen`, then it may 58 | // write a truncate name back that is not necessarily NUL terminated (wtf, 59 | // lol). So if we can't find a NUL terminator, then just give up. 60 | let Some(zeropos) = buf.iter().position(|&b| b == 0) else { 61 | let msg = "could not find NUL terminator in hostname"; 62 | return Err(io::Error::other(msg)); 63 | }; 64 | buf.truncate(zeropos); 65 | buf.shrink_to_fit(); 66 | Ok(OsString::from_vec(buf)) 67 | } 68 | 69 | #[cfg(test)] 70 | mod tests { 71 | use super::*; 72 | 73 | #[test] 74 | fn print_hostname() { 75 | println!("{:?}", hostname()); 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /.github/workflows/web.yml: -------------------------------------------------------------------------------- 1 | # Workflow to build your docs with oranda (and mdbook) 2 | # and deploy them to Github Pages 3 | name: Web 4 | 5 | # We're going to push to the gh-pages branch, so we need that permission 6 | permissions: 7 | contents: write 8 | 9 | # What situations do we want to build docs in? 10 | # All of these work independently and can be removed / commented out 11 | # if you don't want oranda/mdbook running in that situation 12 | on: 13 | # Check that a PR didn't break docs! 14 | # 15 | # Note that the "Deploy to Github Pages" step won't run in this mode, 16 | # so this won't have any side-effects. But it will tell you if a PR 17 | # completely broke oranda/mdbook. Sadly we don't provide previews (yet)! 18 | pull_request: 19 | 20 | # Whenever something gets pushed to main, update the docs! 21 | # This is great for getting docs changes live without cutting a full release. 22 | # 23 | # Note that if you're using cargo-dist, this will "race" the Release workflow 24 | # that actually builds the Github Release that oranda tries to read (and 25 | # this will almost certainly complete first). As a result you will publish 26 | # docs for the latest commit but the oranda landing page won't know about 27 | # the latest release. The workflow_run trigger below will properly wait for 28 | # cargo-dist, and so this half-published state will only last for ~10 minutes. 29 | # 30 | # If you only want docs to update with releases, disable this, or change it to 31 | # a "release" branch. You can, of course, also manually trigger a workflow run 32 | # when you want the docs to update. 33 | push: 34 | branches: 35 | - main 36 | 37 | # Whenever a workflow called "Release" completes, update the docs! 38 | # 39 | # If you're using cargo-dist, this is recommended, as it will ensure that 40 | # oranda always sees the latest release right when it's available. Note 41 | # however that Github's UI is wonky when you use workflow_run, and won't 42 | # show this workflow as part of any commit. You have to go to the "actions" 43 | # tab for your repo to see this one running (the gh-pages deploy will also 44 | # only show up there). 45 | workflow_run: 46 | workflows: [ "Release" ] 47 | types: 48 | - completed 49 | 50 | # Alright, let's do it! 51 | jobs: 52 | web: 53 | name: Build and deploy site and docs 54 | runs-on: ubuntu-latest 55 | steps: 56 | # Setup 57 | - uses: actions/checkout@v3 58 | with: 59 | fetch-depth: 0 60 | - uses: dtolnay/rust-toolchain@stable 61 | - uses: swatinem/rust-cache@v2 62 | 63 | # If you use any mdbook plugins, here's the place to install them! 64 | 65 | # Install and run oranda (and mdbook) 66 | # This will write all output to ./public/ (including copying mdbook's output to there) 67 | - name: Install and run oranda 68 | run: | 69 | curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/oranda/releases/latest/download/oranda-installer.sh | sh 70 | oranda build 71 | 72 | - name: Prepare HTML for link checking 73 | # untitaker/hyperlink supports no site prefixes, move entire site into 74 | # a subfolder 75 | run: mkdir /tmp/public/ && cp -R public /tmp/public/oranda 76 | 77 | # - name: Check HTML for broken internal links 78 | # uses: untitaker/hyperlink@0.1.29 79 | # with: 80 | # args: /tmp/public/ 81 | 82 | # Deploy to our gh-pages branch (creating it if it doesn't exist) 83 | # the "public" dir that oranda made above will become the root dir 84 | # of this branch. 85 | # 86 | # Note that once the gh-pages branch exists, you must 87 | # go into repo's settings > pages and set "deploy from branch: gh-pages" 88 | # the other defaults work fine. 89 | - name: Deploy to Github Pages 90 | uses: JamesIves/github-pages-deploy-action@v4.4.1 91 | # ONLY if we're on main (so no PRs or feature branches allowed!) 92 | if: ${{ github.ref == 'refs/heads/main' }} 93 | with: 94 | branch: gh-pages 95 | # Gotta tell the action where to find oranda's output 96 | folder: public 97 | token: ${{ secrets.GITHUB_TOKEN }} 98 | single-commit: true -------------------------------------------------------------------------------- /src/targets.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | 3 | use clap::{Arg, Command}; 4 | use guppy::{graph::PackageGraph, PackageId}; 5 | 6 | /// Determine which sub-units should be built from the package graph. 7 | /// 8 | /// We implement a simplified version of the general algorithm in `cargo`. We determine the target packages based on: 9 | /// 10 | /// - The `-p`/`--package` flag, which specifies a list of package specs to be considered. 11 | /// - The current working directory, if no package specs are specified. 12 | /// 13 | /// But we assume that the specified package specs refer to packages in the workspace. If not, we fall back to performing 14 | /// codegen for everything. 15 | pub(crate) fn determine_targets( 16 | args: &[String], 17 | working_directory: &Path, 18 | package_graph: &PackageGraph, 19 | ) -> Vec { 20 | // TODO: Handle other forms of package selection in `cargo`: 21 | // - --workspace / --exclude 22 | // - --manifest-path 23 | // - Target selection via --bin/--lib/etc. 24 | 25 | let package_specs = extract_package_filters(args); 26 | 27 | if tracing::event_enabled!(tracing::Level::DEBUG) { 28 | if package_specs.is_empty() { 29 | tracing::debug!("No package specs provided, determining the target based on the current working directory"); 30 | } else { 31 | tracing::debug!( 32 | ?package_specs, 33 | "Extracted the following package specs for this invocation" 34 | ); 35 | } 36 | } 37 | 38 | if package_specs.is_empty() { 39 | return find_implicit_target(working_directory, package_graph) 40 | .map(|id| vec![id]) 41 | .unwrap_or_default(); 42 | } 43 | 44 | // Collect the package IDs for the specified package specs. 45 | let mut package_ids = Vec::new(); 46 | for spec in package_specs { 47 | if let Ok(package) = package_graph.workspace().member_by_name(&spec) { 48 | package_ids.push(package.id().clone()); 49 | } else { 50 | // If any spec does not match a workspace package, fall back to performing codegen for everything. 51 | return vec![]; 52 | } 53 | } 54 | 55 | package_ids 56 | } 57 | 58 | /// If no package specs have been provided, determine the package based on the working directory. 59 | /// 60 | /// We will build the package whose manifest file is closest to the current working directory. 61 | fn find_implicit_target( 62 | working_directory: &Path, 63 | package_graph: &PackageGraph, 64 | ) -> Option { 65 | let workspace_root = package_graph.workspace().root(); 66 | // All workspace paths in the graph are relative to the workspace root. 67 | let working_directory = working_directory 68 | .strip_prefix(workspace_root) 69 | .unwrap_or(working_directory); 70 | package_graph 71 | .workspace() 72 | .iter_by_path() 73 | .filter_map(|(path, package_metadata)| { 74 | if let Ok(suffix) = working_directory.strip_prefix(path) { 75 | Some((package_metadata, suffix.components().count())) 76 | } else { 77 | None 78 | } 79 | }) 80 | .min_by_key(|(_, count)| *count) 81 | .map(|(package_metadata, _)| package_metadata.id().to_owned()) 82 | } 83 | 84 | /// Check if the user has specified a list of package specs to be considered. 85 | fn extract_package_filters(args: &[String]) -> Vec { 86 | let Ok(matches) = Command::new("px") 87 | .no_binary_name(true) 88 | .arg( 89 | Arg::new("package") 90 | .short('p') 91 | .long("package") 92 | .num_args(1) 93 | .action(clap::ArgAction::Append) 94 | .help("Package(s) to operate on"), 95 | ) 96 | .allow_external_subcommands(true) 97 | .dont_collapse_args_in_usage(true) 98 | // Skip `px ` 99 | .try_get_matches_from(&args[2..]) 100 | else { 101 | tracing::debug!("Failed to match `-p`/`--package` arguments"); 102 | return Vec::new(); 103 | }; 104 | matches 105 | .get_many::("package") 106 | .map(|vals| vals.cloned().collect()) 107 | .unwrap_or_default() 108 | } 109 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## [Unreleased] 9 | 10 | ## [0.1.20](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.19...cargo-px-v0.1.20) - 2025-07-24 11 | 12 | ### Added 13 | 14 | - 'verify-freshness' will now filter down the target list using the same logic of building commands 15 | 16 | ### Other 17 | 18 | - Disable semver checks 19 | # Changelog 20 | All notable changes to this project will be documented in this file. 21 | 22 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 23 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 24 | 25 | ## [Unreleased] 26 | 27 | ## [0.1.19](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.18...cargo-px-v0.1.19) - 2025-07-21 28 | 29 | ### Fixed 30 | 31 | - Target selection based on the current working directory should use a path relative to the workspace root 32 | 33 | ## [0.1.18](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.17...cargo-px-v0.1.18) - 2025-07-21 34 | 35 | ### Other 36 | 37 | - Fix release runner for windows 38 | 39 | ## [0.1.17](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.16...cargo-px-v0.1.17) - 2025-07-21 40 | 41 | ### Added 42 | 43 | - Honor a subset of cargo's package filtering options ([#39](https://github.com/LukeMathWalker/cargo-px/pull/39)) 44 | 45 | ## [0.1.16](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.15...cargo-px-v0.1.16) - 2024-07-08 46 | 47 | ### Other 48 | - update Cargo.lock dependencies 49 | 50 | ## [0.1.15](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.14...cargo-px-v0.1.15) - 2024-02-23 51 | 52 | ### Other 53 | - Add `cargo px verify-freshness` ([#34](https://github.com/LukeMathWalker/cargo-px/pull/34)) 54 | 55 | ## [0.1.14](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.13...cargo-px-v0.1.14) - 2023-12-09 56 | 57 | ### Other 58 | - Add `cargo` as an installer. 59 | - Update README to mention Oranda website. 60 | 61 | ## [0.1.13](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.12...cargo-px-v0.1.13) - 2023-12-09 62 | 63 | ### Other 64 | - fix style assets for website 65 | 66 | ## [0.1.12](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.11...cargo-px-v0.1.12) - 2023-12-09 67 | 68 | ### Other 69 | - Add oranda website. ([#30](https://github.com/LukeMathWalker/cargo-px/pull/30)) 70 | 71 | ## [0.1.11](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.10...cargo-px-v0.1.11) - 2023-12-09 72 | 73 | ### Other 74 | - Fix trigger for `cargo-dist` ([#27](https://github.com/LukeMathWalker/cargo-px/pull/27)) 75 | 76 | ## [0.1.10](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.9...cargo-px-v0.1.10) - 2023-12-09 77 | 78 | ### Other 79 | - Fix trigger for cargo-dist. ([#25](https://github.com/LukeMathWalker/cargo-px/pull/25)) 80 | 81 | ## [0.1.9](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.8...cargo-px-v0.1.9) - 2023-12-09 82 | 83 | ### Added 84 | - Pass along the `--quiet` flag to `cargo` invocations if `--quiet` had been specified in `cargo-px`'s invocation ([#20](https://github.com/LukeMathWalker/cargo-px/pull/20)) 85 | 86 | ### Other 87 | - Add cargo-dist release flow. ([#22](https://github.com/LukeMathWalker/cargo-px/pull/22)) 88 | 89 | ## [0.1.8](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.7...cargo-px-v0.1.8) - 2023-12-03 90 | 91 | ### Fixed 92 | - `cargo-px` compiles on Windows ([#18](https://github.com/LukeMathWalker/cargo-px/pull/18)) 93 | 94 | ### Other 95 | - Always specify the package name when compiling or running a code generator ([#16](https://github.com/LukeMathWalker/cargo-px/pull/16)) 96 | 97 | ## [0.1.5](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.4...cargo-px-v0.1.5) - 2023-06-20 98 | 99 | ### Other 100 | - Display the entire error chain when reporting an unexpected error. ([#10](https://github.com/LukeMathWalker/cargo-px/pull/10)) 101 | 102 | ## [0.1.4](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.3...cargo-px-v0.1.4) - 2023-05-18 103 | 104 | ### Other 105 | - MacOS gatekeeper. 106 | - release (#7) 107 | - Add `cargo_px_env` (#6) 108 | - Add CI configuration (#1) 109 | - (cargo-release) version 0.1.2 110 | - Add instructions for CI. 111 | - Initial release 112 | 113 | ## [0.1.3](https://github.com/LukeMathWalker/cargo-px/compare/cargo-px-v0.1.2...cargo-px-v0.1.3) - 2023-05-05 114 | 115 | ### Other 116 | - Add `cargo_px_env` (#6) 117 | - Add CI configuration (#1) 118 | -------------------------------------------------------------------------------- /src/bin/main.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Context; 2 | use cargo_px::{Shell, Verbosity}; 3 | use std::process::{exit, Command}; 4 | use tracing_subscriber::{fmt::format::FmtSpan, EnvFilter}; 5 | 6 | /// The name of the environment variable that can be used to enable (and configure) `tracing` 7 | /// output for `cargo px`. 8 | static TRACING_ENV_VAR: &str = "CARGO_PX_LOG"; 9 | 10 | fn init_tracing() -> Result<(), anyhow::Error> { 11 | // We don't want to show `tracing` data to users as they go about their business, so we 12 | // require them to explicitly opt-in to it. 13 | if std::env::var(TRACING_ENV_VAR).is_err() { 14 | return Ok(()); 15 | } 16 | let env_filter = EnvFilter::builder() 17 | .with_env_var(TRACING_ENV_VAR) 18 | .from_env()?; 19 | let timer = tracing_subscriber::fmt::time::uptime(); 20 | let subscriber = tracing_subscriber::fmt() 21 | .with_env_filter(env_filter) 22 | .with_level(false) 23 | .with_timer(timer) 24 | .with_span_events(FmtSpan::NEW | FmtSpan::CLOSE) 25 | .compact(); 26 | subscriber.init(); 27 | Ok(()) 28 | } 29 | 30 | fn main() { 31 | let mut shell = Shell::new(); 32 | if let Err(e) = init_tracing().context("Failed to initialize `tracing`'s subscriber") { 33 | let _ = display_error(&e, &mut shell); 34 | exit(1) 35 | } 36 | 37 | let cargo_path = std::env::var("CARGO").expect( 38 | "The `CARGO` environment variable was not set. \ 39 | This is unexpected: it should always be provided by `cargo` when \ 40 | invoking a custom sub-command, allowing `cargo-px` to correctly detect \ 41 | which toolchain should be used. \n\ 42 | Make sure that you are invoking `cargo-px` as a `cargo` sub-command: `cargo px [...]` rather \ 43 | than `cargo-px [...]` (notice the missing dash in the first one!). \n 44 | If you're invoking it as expected but it's showing this error message, please file a bug.", 45 | ); 46 | let mut args = std::env::args(); 47 | args.next(); // Skip the first argument, since it's always `cargo` 48 | let args: Vec<_> = args.collect(); 49 | // Skip the `px` argument. 50 | let forwarded_args = &args[1..]; 51 | 52 | let be_quiet = forwarded_args 53 | .iter() 54 | .any(|arg| arg == "--quiet" || arg == "-q"); 55 | if be_quiet { 56 | shell.set_verbosity(Verbosity::Quiet); 57 | } 58 | 59 | let mut has_codegened = false; 60 | let cwd = std::env::current_dir().expect("Failed to get current working directory"); 61 | if let Some(cargo_command) = forwarded_args.first() { 62 | // This is not a proxy for a `cargo` command, it is a `cargo-px` command. 63 | if "verify-freshness" == cargo_command.as_str() { 64 | if let Err(errors) = cargo_px::verify(&cargo_path, &cwd, &args, &mut shell) { 65 | for error in errors { 66 | let _ = display_error(&error, &mut shell); 67 | } 68 | exit(1); 69 | } 70 | 71 | exit(0); 72 | } 73 | 74 | // If the user is invoking a command whose outcome might be affected by code generation, 75 | // we need to perform code generation first. 76 | if [ 77 | "build", "b", "test", "t", "check", "c", "run", "r", "doc", "d", "bench", "publish", 78 | ] 79 | .contains(&cargo_command.as_str()) 80 | { 81 | if let Err(errors) = cargo_px::codegen(&cargo_path, &cwd, &args, &mut shell) { 82 | for error in errors { 83 | let _ = display_error(&error, &mut shell); 84 | } 85 | exit(1); 86 | } 87 | has_codegened = true; 88 | } 89 | } 90 | 91 | if has_codegened { 92 | if let Some(cargo_command) = forwarded_args.first() { 93 | let _ = shell.status("Invoking", format!("`cargo {cargo_command}`")); 94 | } 95 | } 96 | 97 | let mut cmd = Command::new(cargo_path); 98 | cmd.args(forwarded_args); 99 | let status = match cmd.status().context("Failed to execute `cargo` command") { 100 | Ok(status) => status, 101 | Err(e) => { 102 | let _ = display_error(&e, &mut shell); 103 | exit(1); 104 | } 105 | }; 106 | 107 | exit(status.code().unwrap_or(1)); 108 | } 109 | 110 | fn display_error(error: &anyhow::Error, shell: &mut Shell) -> Result<(), anyhow::Error> { 111 | shell.error(error)?; 112 | for cause in error.chain().skip(1) { 113 | writeln!(shell.err(), "\n Caused by:")?; 114 | write!( 115 | shell.err(), 116 | "{}", 117 | textwrap::indent(&cause.to_string(), " ") 118 | )?; 119 | } 120 | Ok(()) 121 | } 122 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 |

cargo-px

3 |
4 | 5 | Cargo Power eXtensions 6 | 7 |
8 | 9 |
10 | 11 |
12 | 13 | 14 | Crates.io version 16 | 17 | 18 | 19 | Download 21 | 22 |
23 |
24 |
25 | 26 | Check out the [announcement post](https://lpalmieri.com/posts/cargo-px) to learn more about `cargo-px` and the problems it solves with respect to code generation in Rust projects. 27 | 28 | 29 |
30 | 31 | # Table of Contents 32 | 0. [How to install](#how-to-install) 33 | 1. [How to use](#how-to-use) 34 | 2. [Verify that the generated code is up-to-date](#verify-that-the-generated-code-is-up-to-date) 35 | 3. [License](#license) 36 | 4. [Known issues](#known-issues) 37 | 38 | ## How To Install 39 | 40 | Check out the instructions in the [release page](https://lukemathwalker.github.io/cargo-px/) 41 | 42 |
43 | 44 | ## How to use 45 | 46 | It is designed as a **`cargo` proxy**: instead of invoking `cargo `, you go for `cargo px `. For example, you go for `cargo px build --all-features` instead of `cargo build --all-features`. 47 | 48 | `cargo px` examines your workspace every time you invoke it. 49 | If any of your crates needs to be generated, it will invoke the respective code generators before forwarding the command and its arguments to cargo. 50 | 51 | `cargo px` leverages the [`metadata` section](https://doc.rust-lang.org/cargo/reference/manifest.html#the-metadata-table). 52 | In the crate that you want to see generated, you fill in the [`package.metadata.px.generate`] section as follows: 53 | 54 | ```toml 55 | [package] 56 | name = "..." 57 | version = "..." 58 | # [...] 59 | 60 | [package.metadata.px.generate] 61 | # The generator is a binary in the current workspace. 62 | # It's the only generator type we support at the moment. 63 | generator_type = "cargo_workspace_binary" 64 | # The name of the binary. 65 | generator_name = "bp" 66 | # The arguments to be passed to the binary. 67 | # It can be omitted if there are no arguments. 68 | generator_args = ["--quiet", "--profile", "optimised"] 69 | ``` 70 | 71 | `cargo-px` will detect the configuration and invoke `cargo run --bin bp -- --quiet --profile="optimised"` for you. 72 | If there are multiple crates that need to be code-generated, `cargo-px` will invoke the respective code-generators in an order that takes into account the dependency graph (i.e. dependencies are always code-generated before their dependents). 73 | 74 | `cargo-px` will also set two environment variables for the code generator: 75 | 76 | - `CARGO_PX_GENERATED_PKG_MANIFEST_PATH`, the path to the `Cargo.toml` file of the crate that needs to be generated; 77 | - `CARGO_PX_WORKSPACE_ROOT_DIR`, the path to the `Cargo.toml` file that defines the current workspace (i.e. the one that contains the `[workspace]` section). 78 | 79 | You can use the [`cargo_px_env`](https://crates.io/crates/cargo_px_env) crate to retrieve and work with these environment variables. 80 | 81 | ## Verify that the generated code is up-to-date 82 | 83 | If you are committing the generated code, it might be desirable to verify in CI that it's up-to-date. 84 | You can do so by invoking `cargo px verify-freshness`. 85 | It will only work if you define a verifier for every code-generated project in your workspace: 86 | 87 | ```toml 88 | [package] 89 | name = "..." 90 | version = "..." 91 | # [...] 92 | 93 | [package.metadata.px.verify] 94 | # The verifier is a binary in the current workspace. 95 | # It's the only verifier type we support at the moment. 96 | verifier_type = "cargo_workspace_binary" 97 | # The name of the binary. 98 | verifier_name = "bp" 99 | # The arguments to be passed to the binary. 100 | # It can be omitted if there are no arguments. 101 | verifier_args = ["--verify"] 102 | ``` 103 | 104 | `cargo-px` will detect the configuration and invoke `cargo run --bin bp -- --verify"` for you. 105 | The generated package is considered up-to-date if the verifier invocation returns a `0` status code. 106 | 107 | If there are multiple crates that need to be verified, `cargo-px` will invoke the respective verifier 108 | in an order that takes into account the dependency graph (i.e. dependencies are always code-generated before their dependents). 109 | 110 | `cargo-px` will also set two environment variables for the verifier: 111 | 112 | - `CARGO_PX_GENERATED_PKG_MANIFEST_PATH`, the path to the `Cargo.toml` file of the generated crate; 113 | - `CARGO_PX_WORKSPACE_ROOT_DIR`, the path to the `Cargo.toml` file that defines the current workspace (i.e. the one that contains the `[workspace]` section). 114 | 115 | You can use the [`cargo_px_env`](https://crates.io/crates/cargo_px_env) crate to retrieve and work with these environment variables. 116 | 117 | ## Known issues 118 | 119 | ### MacOS 120 | 121 | If you're using a macOS machine, you probably want to [disable gatekeeper notarisation for your terminal](https://apple.stackexchange.com/questions/403184/disable-gatekeeper-notarisation-check-without-disabling-sip/403185#403185). 122 | Quick guide: 123 | 124 | - Run 125 | ```bash 126 | spctl developer-mode enable-terminal 127 | ``` 128 | from your terminal 129 | - Then enable it in "Settings" -> "Security & Privacy" -> "Developer Tools" 130 | 131 | Every time you execute a binary for the first time, Apple [executes a request over the network to their servers](https://sigpipe.macromates.com/2020/macos-catalina-slow-by-design/). This becomes an issue for `cargo-px`, since it must compile your generator and then execute it: the generator binary is "new", therefore it incurs the penalty of this notarisation check. 132 | The magnitude of the delay depends on the quality of your connection as well as on Apple's servers performance. On a good Internet connection, I consistenly observed 100/150ms delays, but delays in the order of seconds have been reported as well. 133 | Fun aside: if you're working without an Internet connection, Apple skips the check entirely and lets you execute unverified binaries without complaint. 134 | 135 | 136 | ## License 137 | 138 | Licensed under either of Apache License, Version 2.0 or MIT license at your option. 139 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in this crate by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. 140 | -------------------------------------------------------------------------------- /src/codegen_unit.rs: -------------------------------------------------------------------------------- 1 | //! Logic to retrieve and validate codegen units defined in the current workspace. 2 | 3 | use crate::config::{GenerateConfig, ManifestMetadata, PxConfig, VerifyConfig}; 4 | use anyhow::Context; 5 | use guppy::{ 6 | graph::{BuildTargetKind, PackageGraph, PackageMetadata}, 7 | PackageId, 8 | }; 9 | 10 | /// A package that relies on `cargo px` for code generation. 11 | #[derive(Debug, Clone)] 12 | pub(crate) struct CodegenUnit<'graph> { 13 | /// The metadata of the package that requires code generation. 14 | pub(crate) package_metadata: PackageMetadata<'graph>, 15 | pub(crate) generator: BinaryInvocation<'graph>, 16 | pub(crate) verifier: Option>, 17 | } 18 | 19 | #[derive(Debug, Clone)] 20 | pub(crate) struct BinaryInvocation<'graph> { 21 | /// The binary to be invoked. 22 | /// It must be a binary defined within the same workspace. 23 | pub(crate) binary: WorkspaceBinary<'graph>, 24 | /// The arguments to be passed to the binary when invoked. 25 | pub(crate) args: Vec, 26 | } 27 | 28 | impl<'graph> BinaryInvocation<'graph> { 29 | /// Build a `std::process::Command` that invokes the binary. 30 | pub fn run_command(&self, cargo_path: &str, be_quiet: bool) -> std::process::Command { 31 | let mut cmd = self.binary.run_command(cargo_path, be_quiet); 32 | if !self.args.is_empty() { 33 | cmd.arg("--").args(&self.args); 34 | } 35 | cmd 36 | } 37 | 38 | /// Build a `std::process::Command` that builds the code generator for this 39 | /// codegen unit. 40 | pub fn build_command(&self, cargo_path: &str, be_quiet: bool) -> std::process::Command { 41 | self.binary.build_command(cargo_path, be_quiet) 42 | } 43 | } 44 | 45 | #[derive(Debug, Clone)] 46 | pub(crate) struct WorkspaceBinary<'graph> { 47 | /// The name of a binary defined within the current workspace. 48 | pub(crate) name: String, 49 | /// The package ID of the local package that defines the binary. 50 | pub(crate) package_id: &'graph PackageId, 51 | /// The metadata of the local package that defines the binary. 52 | pub(crate) package_metadata: PackageMetadata<'graph>, 53 | } 54 | 55 | impl<'graph> WorkspaceBinary<'graph> { 56 | /// Build a `std::process::Command` that invokes the binary. 57 | pub fn run_command(&self, cargo_path: &str, be_quiet: bool) -> std::process::Command { 58 | let mut cmd = std::process::Command::new(cargo_path); 59 | cmd.arg("run") 60 | .arg("--package") 61 | .arg(self.package_metadata.name()) 62 | .arg("--bin") 63 | .arg(&self.name); 64 | if be_quiet { 65 | cmd.arg("--quiet"); 66 | } 67 | cmd 68 | } 69 | 70 | /// Build a `std::process::Command` that builds the binary. 71 | pub fn build_command(&self, cargo_path: &str, be_quiet: bool) -> std::process::Command { 72 | let mut cmd = std::process::Command::new(cargo_path); 73 | cmd.arg("build") 74 | .arg("--package") 75 | .arg(self.package_metadata.name()) 76 | .arg("--bin") 77 | .arg(&self.name); 78 | if be_quiet { 79 | cmd.arg("--quiet"); 80 | } 81 | cmd 82 | } 83 | } 84 | 85 | impl<'graph> CodegenUnit<'graph> { 86 | /// Build a `CodegenUnit` from the given `px_config` and `pkg_metadata`. 87 | /// 88 | /// It returns an error if the `px_config` points to a binary that is not defined 89 | /// in the same workspace. 90 | pub(crate) fn new( 91 | px_config: PxConfig, 92 | pkg_metadata: PackageMetadata<'graph>, 93 | pkg_graph: &'graph PackageGraph, 94 | ) -> Result, anyhow::Error> { 95 | let GenerateConfig::CargoWorkspaceBinary(gen_config) = px_config.generate; 96 | 97 | let mut generator_package_id = None; 98 | for workspace_member in pkg_graph.workspace().iter() { 99 | if workspace_member.id() == pkg_metadata.id() { 100 | continue; 101 | } 102 | 103 | for target in workspace_member.build_targets() { 104 | if target.kind() == BuildTargetKind::Binary 105 | && target.name() == gen_config.generator_name 106 | { 107 | generator_package_id = Some(workspace_member.id()); 108 | break; 109 | } 110 | } 111 | } 112 | 113 | let Some(generator_package_id) = generator_package_id else { 114 | anyhow::bail!( 115 | "There is no binary named `{}` in the workspace, but it's listed as the generator name for package `{}`", 116 | gen_config.generator_name, 117 | pkg_metadata.name(), 118 | ); 119 | }; 120 | let generator_package_metadata = 121 | pkg_graph.metadata(generator_package_id).with_context(|| { 122 | format!( 123 | "Failed to retrieve the metadata of the package that defines `{}`, \ 124 | the code generator binary", 125 | gen_config.generator_name 126 | ) 127 | })?; 128 | let generator = BinaryInvocation { 129 | binary: WorkspaceBinary { 130 | name: gen_config.generator_name, 131 | package_id: generator_package_id, 132 | package_metadata: generator_package_metadata, 133 | }, 134 | args: gen_config.generator_args, 135 | }; 136 | 137 | let mut verifier = None; 138 | if let Some(VerifyConfig::CargoWorkspaceBinary(verify_config)) = px_config.verify { 139 | let mut verifier_package_id = None; 140 | for workspace_member in pkg_graph.workspace().iter() { 141 | if workspace_member.id() == pkg_metadata.id() { 142 | continue; 143 | } 144 | 145 | for target in workspace_member.build_targets() { 146 | if target.kind() == BuildTargetKind::Binary 147 | && target.name() == verify_config.verifier_name 148 | { 149 | verifier_package_id = Some(workspace_member.id()); 150 | break; 151 | } 152 | } 153 | } 154 | 155 | let Some(verifier_package_id) = verifier_package_id else { 156 | anyhow::bail!( 157 | "There is no binary named `{}` in the workspace, but it's listed as the verifier name for package `{}`", 158 | verify_config.verifier_name, 159 | pkg_metadata.name(), 160 | ); 161 | }; 162 | let verifier_package_metadata = 163 | pkg_graph.metadata(verifier_package_id).with_context(|| { 164 | format!( 165 | "Failed to retrieve the metadata of the package that defines `{}`, \ 166 | the verifier binary", 167 | verify_config.verifier_name 168 | ) 169 | })?; 170 | verifier = Some(BinaryInvocation { 171 | binary: WorkspaceBinary { 172 | name: verify_config.verifier_name, 173 | package_id: verifier_package_id, 174 | package_metadata: verifier_package_metadata, 175 | }, 176 | args: verify_config.verifier_args, 177 | }); 178 | } 179 | 180 | Ok(CodegenUnit { 181 | package_metadata: pkg_metadata, 182 | generator, 183 | verifier, 184 | }) 185 | } 186 | } 187 | 188 | /// Retrieve all packages in the current workspace that require code generation. 189 | pub(crate) fn extract_codegen_units( 190 | pkg_graph: &PackageGraph, 191 | ) -> Result, Vec> { 192 | let workspace = pkg_graph.workspace(); 193 | let mut codegen_units = vec![]; 194 | let mut errors = vec![]; 195 | for p_metadata in workspace.iter() { 196 | let raw_metadata = p_metadata.metadata_table().to_owned(); 197 | match serde_json::from_value::>(raw_metadata) { 198 | Ok(metadata) => { 199 | let Some(metadata) = metadata else { 200 | continue; 201 | }; 202 | let Some(px_config) = metadata.px else { 203 | continue; 204 | }; 205 | match CodegenUnit::new(px_config, p_metadata, pkg_graph) { 206 | Ok(codegen_unit) => codegen_units.push(codegen_unit), 207 | Err(e) => errors.push(e), 208 | } 209 | } 210 | Err(e) => { 211 | let e = anyhow::anyhow!(e).context(format!( 212 | "Failed to deserialize `cargo px`'s codegen configuration from the manifest of `{}`", 213 | p_metadata.name(), 214 | )); 215 | errors.push(e) 216 | } 217 | }; 218 | } 219 | if !errors.is_empty() { 220 | Err(errors) 221 | } else { 222 | Ok(codegen_units) 223 | } 224 | } 225 | -------------------------------------------------------------------------------- /src/codegen_plan.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Write; 2 | 3 | use ahash::{HashMap, HashMapExt, HashSet, HashSetExt}; 4 | use guppy::{ 5 | graph::{DependencyDirection, PackageGraph}, 6 | PackageId, 7 | }; 8 | use petgraph::{ 9 | stable_graph::{IndexType, NodeIndex, StableDiGraph}, 10 | visit::DfsPostOrder, 11 | Direction::Incoming, 12 | }; 13 | 14 | use crate::codegen_unit::CodegenUnit; 15 | 16 | /// Return a codegen plan: a vector of codegen units in an order that takes into account 17 | /// their respective dependency relationships—i.e. you can safely invoke them in order 18 | /// and each codegen unit will be generated after all the codegen units it depends on. 19 | pub(crate) fn codegen_plan<'graph>( 20 | codegen_units: Vec>, 21 | package_graph: &'graph PackageGraph, 22 | ) -> Result>, Vec> { 23 | Ok(AugmentedPackageGraph::new(codegen_units, package_graph)?.codegen_plan()) 24 | } 25 | 26 | #[derive(Debug)] 27 | /// A dependency graph augmented with information about the code generation units. 28 | /// In particular, an `A -> B` edge means that `A` depends on `B` via a non-dev dependency. 29 | /// 30 | /// The graph includes all packages defined in the workspace and packages that depend on 31 | /// a workspace crate. 32 | /// It is therefore likely to be much smaller than the [`PackageGraph`] it was built from. 33 | struct AugmentedPackageGraph<'graph> { 34 | /// The dependency graph. 35 | dep_graph: StableDiGraph>, 36 | } 37 | 38 | #[derive(Debug)] 39 | enum EdgeMetadata<'graph> { 40 | DependsOn, 41 | IsGeneratedBy(CodegenUnit<'graph>), 42 | } 43 | 44 | impl<'graph> AugmentedPackageGraph<'graph> { 45 | fn new( 46 | codegen_units: Vec>, 47 | package_graph: &'graph PackageGraph, 48 | ) -> Result> { 49 | // A map from package ID to node ID in the dependency graph. 50 | let mut pkg_id2node_id = HashMap::new(); 51 | let mut processed_pkg_ids = HashSet::new(); 52 | let mut dep_graph = 53 | petgraph::stable_graph::StableDiGraph::>::new(); 54 | let mut to_be_visited = package_graph.workspace().member_ids().collect::>(); 55 | while let Some(pkg_id) = to_be_visited.pop() { 56 | if processed_pkg_ids.contains(&pkg_id) { 57 | continue; 58 | } 59 | 60 | let node_id = if pkg_id2node_id.contains_key(pkg_id) { 61 | pkg_id2node_id[pkg_id] 62 | } else { 63 | let node_id = dep_graph.add_node(pkg_id.clone()); 64 | pkg_id2node_id.insert(pkg_id.clone(), node_id); 65 | node_id 66 | }; 67 | 68 | let pkg_metadata = package_graph.metadata(pkg_id).unwrap(); 69 | 70 | // We only care about the portion of the bigger package graph that includes the local 71 | // workspace crates. 72 | // Therefore we look for _reverse_ dependencies here—i.e. we avoid adding any package 73 | // to the graph that does not depend on a workspace crate. 74 | let pkg_deps = pkg_metadata.direct_links_directed(DependencyDirection::Reverse); 75 | for dep in pkg_deps { 76 | if dep.dev_only() { 77 | continue; 78 | } 79 | let dep_pkg_id = dep.from().id(); 80 | 81 | let dep_node_id = if pkg_id2node_id.contains_key(dep_pkg_id) { 82 | pkg_id2node_id[dep_pkg_id] 83 | } else { 84 | let node_id = dep_graph.add_node(dep_pkg_id.clone()); 85 | pkg_id2node_id.insert(dep_pkg_id.clone(), node_id); 86 | node_id 87 | }; 88 | 89 | dep_graph.update_edge(dep_node_id, node_id, EdgeMetadata::DependsOn); 90 | 91 | if !processed_pkg_ids.contains(&dep_pkg_id) { 92 | to_be_visited.push(dep_pkg_id); 93 | } 94 | } 95 | 96 | processed_pkg_ids.insert(pkg_id); 97 | } 98 | 99 | // Add edges from the generator package to the respective codegen units. 100 | for codegen_unit in codegen_units { 101 | let target_node_id = pkg_id2node_id[codegen_unit.generator.binary.package_id]; 102 | let codegen_node_id = pkg_id2node_id[codegen_unit.package_metadata.id()]; 103 | dep_graph.update_edge( 104 | codegen_node_id, 105 | target_node_id, 106 | EdgeMetadata::IsGeneratedBy(codegen_unit), 107 | ); 108 | } 109 | 110 | // Cyclic dependencies are not allowed. 111 | let cycles = find_cycles(&dep_graph); 112 | if !cycles.is_empty() { 113 | return Err(cycles 114 | .into_iter() 115 | .map(|cycle| cyclic_dependency_error(&cycle, &dep_graph)) 116 | .collect()); 117 | } 118 | 119 | Ok(Self { dep_graph }) 120 | } 121 | 122 | /// Returns the set of binary invocations that need to be executed in order to build the 123 | /// codegen units. 124 | /// 125 | /// The returned set is ordered such that the codegen units can be built in an order that 126 | /// takes into account their dependency relationships. 127 | pub fn codegen_plan(&self) -> Vec> { 128 | let mut codegen_plan = Vec::new(); 129 | let mut sources = self.dep_graph.externals(Incoming).collect::>(); 130 | // Always true since the graph is acyclic. 131 | assert!(!sources.is_empty()); 132 | let source_seed = sources.pop().unwrap(); 133 | let mut dfs = DfsPostOrder::new(&self.dep_graph, source_seed); 134 | loop { 135 | while let Some(node_index) = dfs.next(&self.dep_graph) { 136 | let dependent_edges = self.dep_graph.edges_directed(node_index, Incoming); 137 | for dependent_edge in dependent_edges { 138 | if let EdgeMetadata::IsGeneratedBy(codegen_unit) = dependent_edge.weight() { 139 | codegen_plan.push(codegen_unit.to_owned()); 140 | } 141 | } 142 | } 143 | 144 | if let Some(next_source_seed) = sources.pop() { 145 | dfs.move_to(next_source_seed); 146 | } else { 147 | break; 148 | } 149 | } 150 | 151 | codegen_plan 152 | } 153 | } 154 | 155 | fn cyclic_dependency_error( 156 | cycle: &[NodeIndex], 157 | graph: &StableDiGraph, 158 | ) -> anyhow::Error { 159 | let mut error_msg = "There is a cyclic dependency in your workspace: this is not allowed!\n\ 160 | The cycle looks like this:" 161 | .to_string(); 162 | for (i, node_id) in cycle.iter().enumerate() { 163 | writeln!(&mut error_msg).unwrap(); 164 | let dependent_id = if i == 0 { 165 | *cycle.last().unwrap() 166 | } else { 167 | cycle[i - 1] 168 | }; 169 | let dependent = graph[dependent_id].repr(); 170 | let edge_id = graph.find_edge(dependent_id, *node_id).unwrap(); 171 | let relationship = graph.edge_weight(edge_id).unwrap(); 172 | let relationship = match relationship { 173 | EdgeMetadata::DependsOn => "depends on", 174 | EdgeMetadata::IsGeneratedBy(_) => "is generated by", 175 | }; 176 | let dependency = graph[*node_id].repr(); 177 | write!( 178 | &mut error_msg, 179 | "- `{dependent}` {relationship} `{dependency}`", 180 | ) 181 | .unwrap(); 182 | } 183 | anyhow::anyhow!(error_msg) 184 | } 185 | 186 | /// Return all the cycles in the graph. 187 | /// 188 | /// It's an empty vector if the graph is acyclic. 189 | fn find_cycles(graph: &StableDiGraph) -> Vec>> 190 | where 191 | Ix: IndexType, 192 | { 193 | fn dfs( 194 | node_index: NodeIndex, 195 | graph: &StableDiGraph, 196 | visited: &mut HashSet>, 197 | stack: &mut Vec>, 198 | cycles: &mut Vec>>, 199 | ) where 200 | Ix: IndexType, 201 | { 202 | visited.insert(node_index); 203 | stack.push(node_index); 204 | 205 | for neighbour_index in graph.neighbors_directed(node_index, petgraph::Direction::Outgoing) { 206 | if !visited.contains(&neighbour_index) { 207 | dfs(neighbour_index, graph, visited, stack, cycles); 208 | } else if let Some(cycle_start) = stack.iter().position(|&x| x == neighbour_index) { 209 | let cycle = stack[cycle_start..].to_vec(); 210 | cycles.push(cycle); 211 | } 212 | } 213 | 214 | stack.pop(); 215 | } 216 | 217 | let mut visited = HashSet::new(); 218 | let mut stack = Vec::new(); 219 | let mut cycles = Vec::new(); 220 | 221 | for node_index in graph.node_indices() { 222 | if !visited.contains(&node_index) { 223 | dfs(node_index, graph, &mut visited, &mut stack, &mut cycles); 224 | } 225 | } 226 | 227 | cycles 228 | } 229 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | use std::time::Instant; 3 | 4 | use anyhow::Context; 5 | use codegen_unit::CodegenUnit; 6 | use guppy::graph::{PackageGraph, PackageMetadata}; 7 | use targets::determine_targets; 8 | 9 | use crate::codegen_unit::{extract_codegen_units, BinaryInvocation}; 10 | 11 | mod codegen_plan; 12 | mod codegen_unit; 13 | mod config; 14 | mod shell; 15 | mod targets; 16 | 17 | pub use shell::{Shell, Verbosity}; 18 | 19 | /// Find all codegen units in the current workspace and perform code generation for each of them, 20 | /// in an order that takes into account their respective dependency relationships. 21 | #[tracing::instrument(level = tracing::Level::DEBUG, name = "Generate crates", skip(cargo_path))] 22 | pub fn codegen( 23 | cargo_path: &str, 24 | working_directory: &Path, 25 | args: &[String], 26 | shell: &mut Shell, 27 | ) -> Result<(), Vec> { 28 | let package_graph = package_graph(cargo_path, shell).map_err(|e| vec![e])?; 29 | let codegen_plan = compute_filtered_codegen_plan(working_directory, args, &package_graph)?; 30 | 31 | let workspace_dir = package_graph 32 | .workspace() 33 | .root() 34 | .canonicalize() 35 | .context("Failed to get the canonical path to the root directory of this workspace") 36 | .map_err(|e| vec![e])?; 37 | for unit in codegen_plan { 38 | generate_crate(&unit, cargo_path, &workspace_dir, shell).map_err(|e| vec![e])?; 39 | } 40 | 41 | Ok(()) 42 | } 43 | 44 | /// Find all codegen units in the current workspace and verify that the associated projects 45 | /// are fresh—i.e. they don't need to be regenerated. 46 | #[tracing::instrument(level = tracing::Level::DEBUG, name = "Verify freshness", skip(cargo_path))] 47 | pub fn verify( 48 | cargo_path: &str, 49 | working_directory: &Path, 50 | args: &[String], 51 | shell: &mut Shell, 52 | ) -> Result<(), Vec> { 53 | let package_graph = package_graph(cargo_path, shell).map_err(|e| vec![e])?; 54 | let codegen_plan = compute_filtered_codegen_plan(working_directory, args, &package_graph)?; 55 | 56 | let workspace_dir = package_graph 57 | .workspace() 58 | .root() 59 | .canonicalize() 60 | .context("Failed to get the canonical path to the root directory of this workspace") 61 | .map_err(|e| vec![e])?; 62 | for unit in codegen_plan { 63 | let Some(verifier) = &unit.verifier else { 64 | return Err(vec![anyhow::anyhow!( 65 | "`{}` doesn't define a verifier, therefore we can't verify if it's fresh", 66 | unit.package_metadata.name() 67 | )]); 68 | }; 69 | verify_crate( 70 | verifier, 71 | &unit.package_metadata, 72 | cargo_path, 73 | &workspace_dir, 74 | shell, 75 | ) 76 | .map_err(|e| vec![e])?; 77 | } 78 | 79 | Ok(()) 80 | } 81 | 82 | fn compute_filtered_codegen_plan<'a>( 83 | working_directory: &Path, 84 | args: &[String], 85 | package_graph: &'a PackageGraph, 86 | ) -> Result>, Vec> { 87 | let mut codegen_units = extract_codegen_units(package_graph)?; 88 | 89 | if tracing::event_enabled!(tracing::Level::DEBUG) { 90 | let codegen_unit_names: Vec<_> = codegen_units 91 | .iter() 92 | .map(|unit| unit.package_metadata.name().to_string()) 93 | .collect(); 94 | tracing::debug!( 95 | ?codegen_unit_names, 96 | "Determined the list of codegen units in the current workspace" 97 | ); 98 | } 99 | 100 | let targets = determine_targets(args, working_directory, package_graph); 101 | 102 | if tracing::event_enabled!(tracing::Level::DEBUG) { 103 | let target_names: Vec<_> = targets 104 | .iter() 105 | .map(|id| { 106 | package_graph 107 | .metadata(id) 108 | .expect("Unknown package id") 109 | .name() 110 | .to_owned() 111 | }) 112 | .collect(); 113 | tracing::debug!( 114 | ?target_names, 115 | "Determined the list of target packages for this invocation" 116 | ); 117 | } 118 | 119 | // Keep only the codegen units that appear in the dependency graph of the targets we've chosen 120 | if !targets.is_empty() { 121 | let mut depends_cache = package_graph.new_depends_cache(); 122 | codegen_units.retain(|unit| { 123 | targets.iter().any(|target_id| { 124 | unit.package_metadata.id() == target_id 125 | || depends_cache 126 | .depends_on(target_id, unit.package_metadata.id()) 127 | .unwrap_or(false) 128 | }) 129 | }); 130 | } 131 | 132 | if tracing::event_enabled!(tracing::Level::DEBUG) { 133 | let codegen_unit_names: Vec<_> = codegen_units 134 | .iter() 135 | .map(|unit| unit.package_metadata.name().to_string()) 136 | .collect(); 137 | tracing::debug!( 138 | ?codegen_unit_names, 139 | "Retaining only the following codegen units for this invocation, based on the target packages" 140 | ); 141 | } 142 | 143 | codegen_plan::codegen_plan(codegen_units, package_graph) 144 | } 145 | 146 | #[tracing::instrument(name = "Verify crate", skip_all, fields(crate_name = %package_metadata.name()))] 147 | fn verify_crate( 148 | verifier: &BinaryInvocation, 149 | package_metadata: &PackageMetadata, 150 | cargo_path: &str, 151 | workspace_path: &Path, 152 | shell: &mut Shell, 153 | ) -> Result<(), anyhow::Error> { 154 | let be_quiet = shell.verbosity() == Verbosity::Quiet; 155 | 156 | // Compile verifier 157 | { 158 | let timer = Instant::now(); 159 | let _ = shell.status( 160 | "Compiling", 161 | format!( 162 | "`{}`, the verifier for `{}`", 163 | verifier.binary.name, 164 | package_metadata.name() 165 | ), 166 | ); 167 | let mut cmd = verifier.build_command(cargo_path, be_quiet); 168 | cmd.env("CARGO_PX_WORKSPACE_ROOT_DIR", workspace_path) 169 | .stdout(std::process::Stdio::inherit()) 170 | .stderr(std::process::Stdio::inherit()); 171 | 172 | let err_msg = || { 173 | format!( 174 | "Failed to compile `{}`, the verifier for `{}`", 175 | verifier.binary.name, 176 | package_metadata.name() 177 | ) 178 | }; 179 | 180 | let status = cmd.status().with_context(err_msg)?; 181 | if !status.success() { 182 | anyhow::bail!(err_msg()); 183 | } 184 | let _ = shell.status( 185 | "Compiled", 186 | format!( 187 | "`{}`, the verifier for `{}`, in {:.3}s", 188 | verifier.binary.name, 189 | package_metadata.name(), 190 | timer.elapsed().as_secs_f32() 191 | ), 192 | ); 193 | } 194 | 195 | // Invoke verifier 196 | { 197 | let timer = Instant::now(); 198 | let _ = shell.status("Verifying", format!("`{}`", package_metadata.name())); 199 | let mut cmd = verifier.run_command(cargo_path, be_quiet); 200 | 201 | cmd.env( 202 | "CARGO_PX_GENERATED_PKG_MANIFEST_PATH", 203 | package_metadata.manifest_path(), 204 | ) 205 | .env("CARGO_PX_WORKSPACE_ROOT_DIR", workspace_path) 206 | .stdout(std::process::Stdio::inherit()) 207 | .stderr(std::process::Stdio::inherit()); 208 | 209 | let err_msg = || { 210 | format!( 211 | "Failed to run `{}`, the verifier for `{}`", 212 | verifier.binary.name, 213 | package_metadata.name() 214 | ) 215 | }; 216 | 217 | let status = cmd.status().with_context(err_msg)?; 218 | if !status.success() { 219 | anyhow::bail!(err_msg()); 220 | } 221 | let _ = shell.status( 222 | "Verified", 223 | format!( 224 | "`{}` in {:.3}s", 225 | package_metadata.name(), 226 | timer.elapsed().as_secs_f32() 227 | ), 228 | ); 229 | } 230 | Ok(()) 231 | } 232 | 233 | #[tracing::instrument(name = "Generate crate", skip_all, fields(crate_name = %unit.package_metadata.name()))] 234 | fn generate_crate( 235 | unit: &codegen_unit::CodegenUnit, 236 | cargo_path: &str, 237 | workspace_path: &Path, 238 | shell: &mut Shell, 239 | ) -> Result<(), anyhow::Error> { 240 | let be_quiet = shell.verbosity() == Verbosity::Quiet; 241 | 242 | // Compile generator 243 | { 244 | let timer = Instant::now(); 245 | let _ = shell.status( 246 | "Compiling", 247 | format!( 248 | "`{}`, the code generator for `{}`", 249 | unit.generator.binary.name, 250 | unit.package_metadata.name() 251 | ), 252 | ); 253 | let mut cmd = unit.generator.build_command(cargo_path, be_quiet); 254 | cmd.env("CARGO_PX_WORKSPACE_ROOT_DIR", workspace_path) 255 | .stdout(std::process::Stdio::inherit()) 256 | .stderr(std::process::Stdio::inherit()); 257 | 258 | let err_msg = || { 259 | format!( 260 | "Failed to compile `{}`, the code generator for `{}`", 261 | unit.generator.binary.name, 262 | unit.package_metadata.name() 263 | ) 264 | }; 265 | 266 | let status = cmd.status().with_context(err_msg)?; 267 | if !status.success() { 268 | anyhow::bail!(err_msg()); 269 | } 270 | let _ = shell.status( 271 | "Compiled", 272 | format!( 273 | "`{}`, the code generator for `{}`, in {:.3}s", 274 | unit.generator.binary.name, 275 | unit.package_metadata.name(), 276 | timer.elapsed().as_secs_f32() 277 | ), 278 | ); 279 | } 280 | 281 | // Invoke generator 282 | { 283 | let timer = Instant::now(); 284 | let _ = shell.status("Generating", format!("`{}`", unit.package_metadata.name())); 285 | let mut cmd = unit.generator.run_command(cargo_path, be_quiet); 286 | 287 | cmd.env( 288 | "CARGO_PX_GENERATED_PKG_MANIFEST_PATH", 289 | unit.package_metadata.manifest_path(), 290 | ) 291 | .env("CARGO_PX_WORKSPACE_ROOT_DIR", workspace_path) 292 | .stdout(std::process::Stdio::inherit()) 293 | .stderr(std::process::Stdio::inherit()); 294 | 295 | let err_msg = || { 296 | format!( 297 | "Failed to run `{}`, the code generator for package `{}`", 298 | unit.generator.binary.name, 299 | unit.package_metadata.name() 300 | ) 301 | }; 302 | 303 | let status = cmd.status().with_context(err_msg)?; 304 | if !status.success() { 305 | anyhow::bail!(err_msg()); 306 | } 307 | let _ = shell.status( 308 | "Generated", 309 | format!( 310 | "`{}` in {:.3}s", 311 | unit.package_metadata.name(), 312 | timer.elapsed().as_secs_f32() 313 | ), 314 | ); 315 | } 316 | Ok(()) 317 | } 318 | 319 | /// Build the package graph for the current workspace. 320 | #[tracing::instrument(name = "Compute package graph", skip_all)] 321 | fn package_graph(cargo_path: &str, shell: &mut Shell) -> Result { 322 | let timer = Instant::now(); 323 | let _ = shell.status("Computing", "package graph"); 324 | let mut metadata_cmd = guppy::MetadataCommand::new(); 325 | metadata_cmd.cargo_path(cargo_path); 326 | let package_graph = metadata_cmd 327 | .exec() 328 | .context("Failed to execute `cargo metadata`")? 329 | .build_graph() 330 | .context("Failed to build a package graph starting from the output of `cargo metadata`"); 331 | let _ = shell.status( 332 | "Computed", 333 | format!("package graph in {:.3}s", timer.elapsed().as_secs_f32()), 334 | ); 335 | package_graph 336 | } 337 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/ 2 | # 3 | # Copyright 2022-2024, axodotdev 4 | # SPDX-License-Identifier: MIT or Apache-2.0 5 | # 6 | # CI that: 7 | # 8 | # * checks for a Git Tag that looks like a release 9 | # * builds artifacts with dist (archives, installers, hashes) 10 | # * uploads those artifacts to temporary workflow zip 11 | # * on success, uploads the artifacts to a GitHub Release 12 | # 13 | # Note that the GitHub Release will be created with a generated 14 | # title/body based on your changelogs. 15 | 16 | name: Release 17 | permissions: 18 | "contents": "write" 19 | 20 | # This task will run whenever you push a git tag that looks like a version 21 | # like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. 22 | # Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where 23 | # PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION 24 | # must be a Cargo-style SemVer Version (must have at least major.minor.patch). 25 | # 26 | # If PACKAGE_NAME is specified, then the announcement will be for that 27 | # package (erroring out if it doesn't have the given version or isn't dist-able). 28 | # 29 | # If PACKAGE_NAME isn't specified, then the announcement will be for all 30 | # (dist-able) packages in the workspace with that version (this mode is 31 | # intended for workspaces with only one dist-able package, or with all dist-able 32 | # packages versioned/released in lockstep). 33 | # 34 | # If you push multiple tags at once, separate instances of this workflow will 35 | # spin up, creating an independent announcement for each one. However, GitHub 36 | # will hard limit this to 3 tags per commit, as it will assume more tags is a 37 | # mistake. 38 | # 39 | # If there's a prerelease-style suffix to the version, then the release(s) 40 | # will be marked as a prerelease. 41 | on: 42 | pull_request: 43 | push: 44 | tags: 45 | - '**[0-9]+.[0-9]+.[0-9]+*' 46 | 47 | jobs: 48 | # Run 'dist plan' (or host) to determine what tasks we need to do 49 | plan: 50 | runs-on: "ubuntu-latest" 51 | outputs: 52 | val: ${{ steps.plan.outputs.manifest }} 53 | tag: ${{ !github.event.pull_request && github.ref_name || '' }} 54 | tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} 55 | publishing: ${{ !github.event.pull_request }} 56 | env: 57 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 58 | steps: 59 | - uses: actions/checkout@v4 60 | with: 61 | submodules: recursive 62 | - name: Install dist 63 | # we specify bash to get pipefail; it guards against the `curl` command 64 | # failing. otherwise `sh` won't catch that `curl` returned non-0 65 | shell: bash 66 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.28.0/cargo-dist-installer.sh | sh" 67 | - name: Cache dist 68 | uses: actions/upload-artifact@v4 69 | with: 70 | name: cargo-dist-cache 71 | path: ~/.cargo/bin/dist 72 | # sure would be cool if github gave us proper conditionals... 73 | # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible 74 | # functionality based on whether this is a pull_request, and whether it's from a fork. 75 | # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* 76 | # but also really annoying to build CI around when it needs secrets to work right.) 77 | - id: plan 78 | run: | 79 | dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json 80 | echo "dist ran successfully" 81 | cat plan-dist-manifest.json 82 | echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" 83 | - name: "Upload dist-manifest.json" 84 | uses: actions/upload-artifact@v4 85 | with: 86 | name: artifacts-plan-dist-manifest 87 | path: plan-dist-manifest.json 88 | 89 | # Build and packages all the platform-specific things 90 | build-local-artifacts: 91 | name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) 92 | # Let the initial task tell us to not run (currently very blunt) 93 | needs: 94 | - plan 95 | if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} 96 | strategy: 97 | fail-fast: false 98 | # Target platforms/runners are computed by dist in create-release. 99 | # Each member of the matrix has the following arguments: 100 | # 101 | # - runner: the github runner 102 | # - dist-args: cli flags to pass to dist 103 | # - install-dist: expression to run to install dist on the runner 104 | # 105 | # Typically there will be: 106 | # - 1 "global" task that builds universal installers 107 | # - N "local" tasks that build each platform's binaries and platform-specific installers 108 | matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} 109 | runs-on: ${{ matrix.runner }} 110 | container: ${{ matrix.container && matrix.container.image || null }} 111 | env: 112 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 113 | BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json 114 | steps: 115 | - name: enable windows longpaths 116 | run: | 117 | git config --global core.longpaths true 118 | - uses: actions/checkout@v4 119 | with: 120 | submodules: recursive 121 | - name: Install Rust non-interactively if not already installed 122 | if: ${{ matrix.container }} 123 | run: | 124 | if ! command -v cargo > /dev/null 2>&1; then 125 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y 126 | echo "$HOME/.cargo/bin" >> $GITHUB_PATH 127 | fi 128 | - name: Install dist 129 | run: ${{ matrix.install_dist.run }} 130 | # Get the dist-manifest 131 | - name: Fetch local artifacts 132 | uses: actions/download-artifact@v4 133 | with: 134 | pattern: artifacts-* 135 | path: target/distrib/ 136 | merge-multiple: true 137 | - name: Install dependencies 138 | run: | 139 | ${{ matrix.packages_install }} 140 | - name: Build artifacts 141 | run: | 142 | # Actually do builds and make zips and whatnot 143 | dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json 144 | echo "dist ran successfully" 145 | - id: cargo-dist 146 | name: Post-build 147 | # We force bash here just because github makes it really hard to get values up 148 | # to "real" actions without writing to env-vars, and writing to env-vars has 149 | # inconsistent syntax between shell and powershell. 150 | shell: bash 151 | run: | 152 | # Parse out what we just built and upload it to scratch storage 153 | echo "paths<> "$GITHUB_OUTPUT" 154 | dist print-upload-files-from-manifest --manifest dist-manifest.json >> "$GITHUB_OUTPUT" 155 | echo "EOF" >> "$GITHUB_OUTPUT" 156 | 157 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 158 | - name: "Upload artifacts" 159 | uses: actions/upload-artifact@v4 160 | with: 161 | name: artifacts-build-local-${{ join(matrix.targets, '_') }} 162 | path: | 163 | ${{ steps.cargo-dist.outputs.paths }} 164 | ${{ env.BUILD_MANIFEST_NAME }} 165 | 166 | # Build and package all the platform-agnostic(ish) things 167 | build-global-artifacts: 168 | needs: 169 | - plan 170 | - build-local-artifacts 171 | runs-on: "ubuntu-latest" 172 | env: 173 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 174 | BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json 175 | steps: 176 | - uses: actions/checkout@v4 177 | with: 178 | submodules: recursive 179 | - name: Install cached dist 180 | uses: actions/download-artifact@v4 181 | with: 182 | name: cargo-dist-cache 183 | path: ~/.cargo/bin/ 184 | - run: chmod +x ~/.cargo/bin/dist 185 | # Get all the local artifacts for the global tasks to use (for e.g. checksums) 186 | - name: Fetch local artifacts 187 | uses: actions/download-artifact@v4 188 | with: 189 | pattern: artifacts-* 190 | path: target/distrib/ 191 | merge-multiple: true 192 | - id: cargo-dist 193 | shell: bash 194 | run: | 195 | dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json 196 | echo "dist ran successfully" 197 | 198 | # Parse out what we just built and upload it to scratch storage 199 | echo "paths<> "$GITHUB_OUTPUT" 200 | jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" 201 | echo "EOF" >> "$GITHUB_OUTPUT" 202 | 203 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 204 | - name: "Upload artifacts" 205 | uses: actions/upload-artifact@v4 206 | with: 207 | name: artifacts-build-global 208 | path: | 209 | ${{ steps.cargo-dist.outputs.paths }} 210 | ${{ env.BUILD_MANIFEST_NAME }} 211 | # Determines if we should publish/announce 212 | host: 213 | needs: 214 | - plan 215 | - build-local-artifacts 216 | - build-global-artifacts 217 | # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) 218 | if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} 219 | env: 220 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 221 | runs-on: "ubuntu-latest" 222 | outputs: 223 | val: ${{ steps.host.outputs.manifest }} 224 | steps: 225 | - uses: actions/checkout@v4 226 | with: 227 | submodules: recursive 228 | - name: Install cached dist 229 | uses: actions/download-artifact@v4 230 | with: 231 | name: cargo-dist-cache 232 | path: ~/.cargo/bin/ 233 | - run: chmod +x ~/.cargo/bin/dist 234 | # Fetch artifacts from scratch-storage 235 | - name: Fetch artifacts 236 | uses: actions/download-artifact@v4 237 | with: 238 | pattern: artifacts-* 239 | path: target/distrib/ 240 | merge-multiple: true 241 | - id: host 242 | shell: bash 243 | run: | 244 | dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json 245 | echo "artifacts uploaded and released successfully" 246 | cat dist-manifest.json 247 | echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" 248 | - name: "Upload dist-manifest.json" 249 | uses: actions/upload-artifact@v4 250 | with: 251 | # Overwrite the previous copy 252 | name: artifacts-dist-manifest 253 | path: dist-manifest.json 254 | # Create a GitHub Release while uploading all files to it 255 | - name: "Download GitHub Artifacts" 256 | uses: actions/download-artifact@v4 257 | with: 258 | pattern: artifacts-* 259 | path: artifacts 260 | merge-multiple: true 261 | - name: Cleanup 262 | run: | 263 | # Remove the granular manifests 264 | rm -f artifacts/*-dist-manifest.json 265 | - name: Create GitHub Release 266 | env: 267 | PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}" 268 | ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}" 269 | ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}" 270 | RELEASE_COMMIT: "${{ github.sha }}" 271 | run: | 272 | # Write and read notes from a file to avoid quoting breaking things 273 | echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt 274 | 275 | gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* 276 | 277 | announce: 278 | needs: 279 | - plan 280 | - host 281 | # use "always() && ..." to allow us to wait for all publish jobs while 282 | # still allowing individual publish jobs to skip themselves (for prereleases). 283 | # "host" however must run to completion, no skipping allowed! 284 | if: ${{ always() && needs.host.result == 'success' }} 285 | runs-on: "ubuntu-latest" 286 | env: 287 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 288 | steps: 289 | - uses: actions/checkout@v4 290 | with: 291 | submodules: recursive 292 | -------------------------------------------------------------------------------- /src/shell/shell_.rs: -------------------------------------------------------------------------------- 1 | use crate::shell::hostname::hostname; 2 | use crate::shell::shell_::style::{ERROR, HEADER, NOTE, WARN}; 3 | use anstream::AutoStream; 4 | use anstyle::Style; 5 | use std::fmt; 6 | use std::io::{IsTerminal, Write}; 7 | 8 | pub enum TtyWidth { 9 | NoTty, 10 | Known(usize), 11 | Guess(usize), 12 | } 13 | 14 | /// The requested verbosity of output. 15 | #[derive(Debug, Clone, Copy, PartialEq)] 16 | pub enum Verbosity { 17 | Verbose, 18 | Normal, 19 | Quiet, 20 | } 21 | 22 | /// An abstraction around console output that remembers preferences for output 23 | /// verbosity and color. 24 | pub struct Shell { 25 | /// Wrapper around stdout/stderr. This helps with supporting sending 26 | /// output to a memory buffer which is useful for tests. 27 | output: ShellOut, 28 | /// How verbose messages should be. 29 | verbosity: Verbosity, 30 | /// Flag that indicates the current line needs to be cleared before 31 | /// printing. Used when a progress bar is currently displayed. 32 | needs_clear: bool, 33 | hostname: Option, 34 | } 35 | 36 | impl fmt::Debug for Shell { 37 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 38 | match self.output { 39 | ShellOut::Write(_) => f 40 | .debug_struct("Shell") 41 | .field("verbosity", &self.verbosity) 42 | .finish(), 43 | ShellOut::Stream { color_choice, .. } => f 44 | .debug_struct("Shell") 45 | .field("verbosity", &self.verbosity) 46 | .field("color_choice", &color_choice) 47 | .finish(), 48 | } 49 | } 50 | } 51 | 52 | /// A `Write`able object, either with or without color support 53 | enum ShellOut { 54 | /// A plain write object without color support 55 | Write(AutoStream>), 56 | /// Color-enabled stdio, with information on whether color should be used 57 | Stream { 58 | stdout: AutoStream, 59 | stderr: AutoStream, 60 | stderr_tty: bool, 61 | color_choice: ColorChoice, 62 | hyperlinks: bool, 63 | }, 64 | } 65 | 66 | /// Whether messages should use color output 67 | #[derive(Debug, PartialEq, Clone, Copy)] 68 | pub enum ColorChoice { 69 | /// Force color output 70 | Always, 71 | /// Force disable color output 72 | Never, 73 | /// Intelligently guess whether to use color output 74 | CargoAuto, 75 | } 76 | 77 | impl Shell { 78 | /// Creates a new shell (color choice and verbosity), defaulting to 'auto' color and verbose 79 | /// output. 80 | pub fn new() -> Shell { 81 | let auto_clr = ColorChoice::CargoAuto; 82 | let stdout_choice = auto_clr.to_anstream_color_choice(); 83 | let stderr_choice = auto_clr.to_anstream_color_choice(); 84 | Shell { 85 | output: ShellOut::Stream { 86 | stdout: AutoStream::new(std::io::stdout(), stdout_choice), 87 | stderr: AutoStream::new(std::io::stderr(), stderr_choice), 88 | color_choice: auto_clr, 89 | hyperlinks: supports_hyperlinks(), 90 | stderr_tty: std::io::stderr().is_terminal(), 91 | }, 92 | verbosity: Verbosity::Verbose, 93 | needs_clear: false, 94 | hostname: None, 95 | } 96 | } 97 | 98 | /// Creates a shell from a plain writable object, with no color, and max verbosity. 99 | pub fn from_write(out: Box) -> Shell { 100 | Shell { 101 | output: ShellOut::Write(AutoStream::never(out)), // strip all formatting on write 102 | verbosity: Verbosity::Verbose, 103 | needs_clear: false, 104 | hostname: None, 105 | } 106 | } 107 | 108 | /// Prints a message, where the status will have `color` color, and can be justified. The 109 | /// messages follows without color. 110 | fn print( 111 | &mut self, 112 | status: &dyn fmt::Display, 113 | message: Option<&dyn fmt::Display>, 114 | color: &Style, 115 | justified: bool, 116 | ) -> anyhow::Result<()> { 117 | match self.verbosity { 118 | Verbosity::Quiet => Ok(()), 119 | _ => { 120 | if self.needs_clear { 121 | self.err_erase_line(); 122 | } 123 | self.output 124 | .message_stderr(status, message, color, justified) 125 | } 126 | } 127 | } 128 | 129 | /// Sets whether the next print should clear the current line. 130 | pub fn set_needs_clear(&mut self, needs_clear: bool) { 131 | self.needs_clear = needs_clear; 132 | } 133 | 134 | /// Returns `true` if the `needs_clear` flag is unset. 135 | pub fn is_cleared(&self) -> bool { 136 | !self.needs_clear 137 | } 138 | 139 | /// Returns the width of the terminal in spaces, if any. 140 | pub fn err_width(&self) -> TtyWidth { 141 | match self.output { 142 | ShellOut::Stream { 143 | stderr_tty: true, .. 144 | } => imp::stderr_width(), 145 | _ => TtyWidth::NoTty, 146 | } 147 | } 148 | 149 | /// Returns `true` if stderr is a tty. 150 | pub fn is_err_tty(&self) -> bool { 151 | match self.output { 152 | ShellOut::Stream { stderr_tty, .. } => stderr_tty, 153 | _ => false, 154 | } 155 | } 156 | 157 | /// Gets a reference to the underlying stdout writer. 158 | pub fn out(&mut self) -> &mut dyn Write { 159 | if self.needs_clear { 160 | self.err_erase_line(); 161 | } 162 | self.output.stdout() 163 | } 164 | 165 | /// Gets a reference to the underlying stderr writer. 166 | pub fn err(&mut self) -> &mut dyn Write { 167 | if self.needs_clear { 168 | self.err_erase_line(); 169 | } 170 | self.output.stderr() 171 | } 172 | 173 | /// Erase from cursor to end of line. 174 | pub fn err_erase_line(&mut self) { 175 | if self.err_supports_color() { 176 | imp::err_erase_line(self); 177 | self.needs_clear = false; 178 | } 179 | } 180 | 181 | /// Shortcut to right-align and color green a status message. 182 | pub fn status(&mut self, status: T, message: U) -> anyhow::Result<()> 183 | where 184 | T: fmt::Display, 185 | U: fmt::Display, 186 | { 187 | self.print(&status, Some(&message), &HEADER, true) 188 | } 189 | 190 | pub fn status_header(&mut self, status: T) -> anyhow::Result<()> 191 | where 192 | T: fmt::Display, 193 | { 194 | self.print(&status, None, &NOTE, true) 195 | } 196 | 197 | /// Shortcut to right-align a status message. 198 | pub fn status_with_color( 199 | &mut self, 200 | status: T, 201 | message: U, 202 | color: &Style, 203 | ) -> anyhow::Result<()> 204 | where 205 | T: fmt::Display, 206 | U: fmt::Display, 207 | { 208 | self.print(&status, Some(&message), color, true) 209 | } 210 | 211 | /// Runs the callback only if we are in verbose mode. 212 | pub fn verbose(&mut self, mut callback: F) -> anyhow::Result<()> 213 | where 214 | F: FnMut(&mut Shell) -> anyhow::Result<()>, 215 | { 216 | match self.verbosity { 217 | Verbosity::Verbose => callback(self), 218 | _ => Ok(()), 219 | } 220 | } 221 | 222 | /// Runs the callback if we are not in verbose mode. 223 | pub fn concise(&mut self, mut callback: F) -> anyhow::Result<()> 224 | where 225 | F: FnMut(&mut Shell) -> anyhow::Result<()>, 226 | { 227 | match self.verbosity { 228 | Verbosity::Verbose => Ok(()), 229 | _ => callback(self), 230 | } 231 | } 232 | 233 | /// Prints a red 'error' message. 234 | pub fn error(&mut self, message: T) -> anyhow::Result<()> { 235 | if self.needs_clear { 236 | self.err_erase_line(); 237 | } 238 | self.output 239 | .message_stderr(&"error", Some(&message), &ERROR, false) 240 | } 241 | 242 | /// Prints an amber 'warning' message. 243 | pub fn warn(&mut self, message: T) -> anyhow::Result<()> { 244 | match self.verbosity { 245 | Verbosity::Quiet => Ok(()), 246 | _ => self.print(&"warning", Some(&message), &WARN, false), 247 | } 248 | } 249 | 250 | /// Prints a cyan 'note' message. 251 | pub fn note(&mut self, message: T) -> anyhow::Result<()> { 252 | self.print(&"note", Some(&message), &NOTE, false) 253 | } 254 | 255 | /// Updates the verbosity of the shell. 256 | pub fn set_verbosity(&mut self, verbosity: Verbosity) { 257 | self.verbosity = verbosity; 258 | } 259 | 260 | /// Gets the verbosity of the shell. 261 | pub fn verbosity(&self) -> Verbosity { 262 | self.verbosity 263 | } 264 | 265 | /// Updates the color choice (always, never, or auto) from a string.. 266 | pub fn set_color_choice(&mut self, color: Option<&str>) -> anyhow::Result<()> { 267 | if let ShellOut::Stream { 268 | ref mut stdout, 269 | ref mut stderr, 270 | ref mut color_choice, 271 | .. 272 | } = self.output 273 | { 274 | let cfg = match color { 275 | Some("always") => ColorChoice::Always, 276 | Some("never") => ColorChoice::Never, 277 | 278 | Some("auto") | None => ColorChoice::CargoAuto, 279 | 280 | Some(arg) => anyhow::bail!( 281 | "argument for --color must be auto, always, or \ 282 | never, but found `{}`", 283 | arg 284 | ), 285 | }; 286 | *color_choice = cfg; 287 | let stdout_choice = cfg.to_anstream_color_choice(); 288 | let stderr_choice = cfg.to_anstream_color_choice(); 289 | *stdout = AutoStream::new(std::io::stdout(), stdout_choice); 290 | *stderr = AutoStream::new(std::io::stderr(), stderr_choice); 291 | } 292 | Ok(()) 293 | } 294 | 295 | pub fn set_hyperlinks(&mut self, yes: bool) -> anyhow::Result<()> { 296 | if let ShellOut::Stream { 297 | ref mut hyperlinks, .. 298 | } = self.output 299 | { 300 | *hyperlinks = yes; 301 | } 302 | Ok(()) 303 | } 304 | 305 | /// Gets the current color choice. 306 | /// 307 | /// If we are not using a color stream, this will always return `Never`, even if the color 308 | /// choice has been set to something else. 309 | pub fn color_choice(&self) -> ColorChoice { 310 | match self.output { 311 | ShellOut::Stream { color_choice, .. } => color_choice, 312 | ShellOut::Write(_) => ColorChoice::Never, 313 | } 314 | } 315 | 316 | /// Whether the shell supports color. 317 | pub fn err_supports_color(&self) -> bool { 318 | match &self.output { 319 | ShellOut::Write(_) => false, 320 | ShellOut::Stream { stderr, .. } => supports_color(stderr.current_choice()), 321 | } 322 | } 323 | 324 | pub fn out_supports_color(&self) -> bool { 325 | match &self.output { 326 | ShellOut::Write(_) => false, 327 | ShellOut::Stream { stdout, .. } => supports_color(stdout.current_choice()), 328 | } 329 | } 330 | 331 | pub fn out_hyperlink(&self, url: D) -> Hyperlink { 332 | let supports_hyperlinks = match &self.output { 333 | ShellOut::Write(_) => false, 334 | ShellOut::Stream { 335 | stdout, hyperlinks, .. 336 | } => stdout.current_choice() == anstream::ColorChoice::AlwaysAnsi && *hyperlinks, 337 | }; 338 | Hyperlink { 339 | url: supports_hyperlinks.then_some(url), 340 | } 341 | } 342 | 343 | pub fn err_hyperlink(&self, url: D) -> Hyperlink { 344 | let supports_hyperlinks = match &self.output { 345 | ShellOut::Write(_) => false, 346 | ShellOut::Stream { 347 | stderr, hyperlinks, .. 348 | } => stderr.current_choice() == anstream::ColorChoice::AlwaysAnsi && *hyperlinks, 349 | }; 350 | if supports_hyperlinks { 351 | Hyperlink { url: Some(url) } 352 | } else { 353 | Hyperlink { url: None } 354 | } 355 | } 356 | 357 | pub fn out_file_hyperlink(&mut self, path: &std::path::Path) -> Hyperlink { 358 | let url = self.file_hyperlink(path); 359 | url.map(|u| self.out_hyperlink(u)).unwrap_or_default() 360 | } 361 | 362 | pub fn err_file_hyperlink(&mut self, path: &std::path::Path) -> Hyperlink { 363 | let url = self.file_hyperlink(path); 364 | url.map(|u| self.err_hyperlink(u)).unwrap_or_default() 365 | } 366 | 367 | fn file_hyperlink(&mut self, path: &std::path::Path) -> Option { 368 | let mut url = url::Url::from_file_path(path).ok()?; 369 | // Do a best-effort of setting the host in the URL to avoid issues with opening a link 370 | // scoped to the computer you've SSHed into 371 | let hostname = if cfg!(windows) { 372 | // Not supported correctly on windows 373 | None 374 | } else if let Some(hostname) = self.hostname.as_deref() { 375 | Some(hostname) 376 | } else { 377 | self.hostname = hostname().ok().and_then(|h| h.into_string().ok()); 378 | self.hostname.as_deref() 379 | }; 380 | let _ = url.set_host(hostname); 381 | Some(url) 382 | } 383 | 384 | /// Prints a message to stderr and translates ANSI escape code into console colors. 385 | pub fn print_ansi_stderr(&mut self, message: &[u8]) -> anyhow::Result<()> { 386 | if self.needs_clear { 387 | self.err_erase_line(); 388 | } 389 | self.err().write_all(message)?; 390 | Ok(()) 391 | } 392 | 393 | /// Prints a message to stdout and translates ANSI escape code into console colors. 394 | pub fn print_ansi_stdout(&mut self, message: &[u8]) -> anyhow::Result<()> { 395 | if self.needs_clear { 396 | self.err_erase_line(); 397 | } 398 | self.out().write_all(message)?; 399 | Ok(()) 400 | } 401 | 402 | pub fn print_json(&mut self, obj: &T) -> anyhow::Result<()> { 403 | // Path may fail to serialize to JSON ... 404 | let encoded = serde_json::to_string(&obj)?; 405 | // ... but don't fail due to a closed pipe. 406 | drop(writeln!(self.out(), "{encoded}")); 407 | Ok(()) 408 | } 409 | } 410 | 411 | impl Default for Shell { 412 | fn default() -> Self { 413 | Self::new() 414 | } 415 | } 416 | 417 | impl ShellOut { 418 | /// Prints out a message with a status. The status comes first, and is bold plus the given 419 | /// color. The status can be justified, in which case the max width that will right align is 420 | /// 12 chars. 421 | fn message_stderr( 422 | &mut self, 423 | status: &dyn fmt::Display, 424 | message: Option<&dyn fmt::Display>, 425 | style: &Style, 426 | justified: bool, 427 | ) -> anyhow::Result<()> { 428 | let style = style.render(); 429 | let bold = (anstyle::Style::new() | anstyle::Effects::BOLD).render(); 430 | let reset = anstyle::Reset.render(); 431 | 432 | let mut buffer = Vec::new(); 433 | if justified { 434 | write!(&mut buffer, "{style}{status:>12}{reset}")?; 435 | } else { 436 | write!(&mut buffer, "{style}{status}{reset}{bold}:{reset}")?; 437 | } 438 | match message { 439 | Some(message) => writeln!(buffer, " {message}")?, 440 | None => write!(buffer, " ")?, 441 | } 442 | self.stderr().write_all(&buffer)?; 443 | Ok(()) 444 | } 445 | 446 | /// Gets stdout as a `io::Write`. 447 | fn stdout(&mut self) -> &mut dyn Write { 448 | match *self { 449 | ShellOut::Stream { ref mut stdout, .. } => stdout, 450 | ShellOut::Write(ref mut w) => w, 451 | } 452 | } 453 | 454 | /// Gets stderr as a `io::Write`. 455 | fn stderr(&mut self) -> &mut dyn Write { 456 | match *self { 457 | ShellOut::Stream { ref mut stderr, .. } => stderr, 458 | ShellOut::Write(ref mut w) => w, 459 | } 460 | } 461 | } 462 | 463 | impl ColorChoice { 464 | /// Converts our color choice to anstream's version. 465 | fn to_anstream_color_choice(self) -> anstream::ColorChoice { 466 | match self { 467 | ColorChoice::Always => anstream::ColorChoice::Always, 468 | ColorChoice::Never => anstream::ColorChoice::Never, 469 | ColorChoice::CargoAuto => anstream::ColorChoice::Auto, 470 | } 471 | } 472 | } 473 | 474 | fn supports_color(choice: anstream::ColorChoice) -> bool { 475 | match choice { 476 | anstream::ColorChoice::Always 477 | | anstream::ColorChoice::AlwaysAnsi 478 | | anstream::ColorChoice::Auto => true, 479 | anstream::ColorChoice::Never => false, 480 | } 481 | } 482 | 483 | fn supports_hyperlinks() -> bool { 484 | #[allow(clippy::disallowed_methods)] // We are reading the state of the system, not config 485 | if std::env::var_os("TERM_PROGRAM").as_deref() == Some(std::ffi::OsStr::new("iTerm.app")) { 486 | // Override `supports_hyperlinks` as we have an unknown incompatibility with iTerm2 487 | return false; 488 | } 489 | 490 | supports_hyperlinks::supports_hyperlinks() 491 | } 492 | 493 | pub struct Hyperlink { 494 | url: Option, 495 | } 496 | 497 | impl Default for Hyperlink { 498 | fn default() -> Self { 499 | Self { url: None } 500 | } 501 | } 502 | 503 | impl Hyperlink { 504 | pub fn open(&self) -> impl fmt::Display { 505 | if let Some(url) = self.url.as_ref() { 506 | format!("\x1B]8;;{url}\x1B\\") 507 | } else { 508 | String::new() 509 | } 510 | } 511 | 512 | pub fn close(&self) -> impl fmt::Display { 513 | if self.url.is_some() { 514 | "\x1B]8;;\x1B\\" 515 | } else { 516 | "" 517 | } 518 | } 519 | } 520 | 521 | #[cfg(unix)] 522 | mod imp { 523 | use super::{Shell, TtyWidth}; 524 | use std::mem; 525 | 526 | pub fn stderr_width() -> TtyWidth { 527 | unsafe { 528 | let mut winsize: libc::winsize = mem::zeroed(); 529 | // The .into() here is needed for FreeBSD which defines TIOCGWINSZ 530 | // as c_uint but ioctl wants c_ulong. 531 | if libc::ioctl(libc::STDERR_FILENO, libc::TIOCGWINSZ, &mut winsize) < 0 { 532 | return TtyWidth::NoTty; 533 | } 534 | if winsize.ws_col > 0 { 535 | TtyWidth::Known(winsize.ws_col as usize) 536 | } else { 537 | TtyWidth::NoTty 538 | } 539 | } 540 | } 541 | 542 | pub fn err_erase_line(shell: &mut Shell) { 543 | // This is the "EL - Erase in Line" sequence. It clears from the cursor 544 | // to the end of line. 545 | // https://en.wikipedia.org/wiki/ANSI_escape_code#CSI_sequences 546 | let _ = shell.output.stderr().write_all(b"\x1B[K"); 547 | } 548 | } 549 | 550 | #[cfg(windows)] 551 | mod imp { 552 | use std::{cmp, mem, ptr}; 553 | 554 | use windows_sys::core::PCSTR; 555 | use windows_sys::Win32::Foundation::CloseHandle; 556 | use windows_sys::Win32::Foundation::INVALID_HANDLE_VALUE; 557 | use windows_sys::Win32::Foundation::{GENERIC_READ, GENERIC_WRITE}; 558 | use windows_sys::Win32::Storage::FileSystem::{ 559 | CreateFileA, FILE_SHARE_READ, FILE_SHARE_WRITE, OPEN_EXISTING, 560 | }; 561 | use windows_sys::Win32::System::Console::{ 562 | GetConsoleScreenBufferInfo, GetStdHandle, CONSOLE_SCREEN_BUFFER_INFO, STD_ERROR_HANDLE, 563 | }; 564 | 565 | pub(super) use super::{default_err_erase_line as err_erase_line, TtyWidth}; 566 | 567 | pub fn stderr_width() -> TtyWidth { 568 | unsafe { 569 | let stdout = GetStdHandle(STD_ERROR_HANDLE); 570 | let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); 571 | if GetConsoleScreenBufferInfo(stdout, &mut csbi) != 0 { 572 | return TtyWidth::Known((csbi.srWindow.Right - csbi.srWindow.Left) as usize); 573 | } 574 | 575 | // On mintty/msys/cygwin based terminals, the above fails with 576 | // INVALID_HANDLE_VALUE. Use an alternate method which works 577 | // in that case as well. 578 | let h = CreateFileA( 579 | "CONOUT$\0".as_ptr() as PCSTR, 580 | GENERIC_READ | GENERIC_WRITE, 581 | FILE_SHARE_READ | FILE_SHARE_WRITE, 582 | ptr::null_mut(), 583 | OPEN_EXISTING, 584 | 0, 585 | 0, 586 | ); 587 | if h == INVALID_HANDLE_VALUE { 588 | return TtyWidth::NoTty; 589 | } 590 | 591 | let mut csbi: CONSOLE_SCREEN_BUFFER_INFO = mem::zeroed(); 592 | let rc = GetConsoleScreenBufferInfo(h, &mut csbi); 593 | CloseHandle(h); 594 | if rc != 0 { 595 | let width = (csbi.srWindow.Right - csbi.srWindow.Left) as usize; 596 | // Unfortunately cygwin/mintty does not set the size of the 597 | // backing console to match the actual window size. This 598 | // always reports a size of 80 or 120 (not sure what 599 | // determines that). Use a conservative max of 60 which should 600 | // work in most circumstances. ConEmu does some magic to 601 | // resize the console correctly, but there's no reasonable way 602 | // to detect which kind of terminal we are running in, or if 603 | // GetConsoleScreenBufferInfo returns accurate information. 604 | return TtyWidth::Guess(cmp::min(60, width)); 605 | } 606 | 607 | TtyWidth::NoTty 608 | } 609 | } 610 | } 611 | 612 | #[cfg(windows)] 613 | fn default_err_erase_line(shell: &mut Shell) { 614 | match imp::stderr_width() { 615 | TtyWidth::Known(max_width) | TtyWidth::Guess(max_width) => { 616 | let blank = " ".repeat(max_width); 617 | drop(write!(shell.output.stderr(), "{}\r", blank)); 618 | } 619 | _ => (), 620 | } 621 | } 622 | 623 | mod style { 624 | use anstyle::{AnsiColor, Effects, Style}; 625 | 626 | pub const HEADER: Style = AnsiColor::Green.on_default().effects(Effects::BOLD); 627 | pub const ERROR: Style = AnsiColor::Red.on_default().effects(Effects::BOLD); 628 | pub const WARN: Style = AnsiColor::Yellow.on_default().effects(Effects::BOLD); 629 | pub const NOTE: Style = AnsiColor::Cyan.on_default().effects(Effects::BOLD); 630 | } 631 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 4 4 | 5 | [[package]] 6 | name = "ahash" 7 | version = "0.8.11" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" 10 | dependencies = [ 11 | "cfg-if", 12 | "getrandom", 13 | "once_cell", 14 | "version_check", 15 | "zerocopy", 16 | ] 17 | 18 | [[package]] 19 | name = "aho-corasick" 20 | version = "1.1.3" 21 | source = "registry+https://github.com/rust-lang/crates.io-index" 22 | checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" 23 | dependencies = [ 24 | "memchr", 25 | ] 26 | 27 | [[package]] 28 | name = "anstream" 29 | version = "0.6.14" 30 | source = "registry+https://github.com/rust-lang/crates.io-index" 31 | checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" 32 | dependencies = [ 33 | "anstyle", 34 | "anstyle-parse", 35 | "anstyle-query", 36 | "anstyle-wincon", 37 | "colorchoice", 38 | "is_terminal_polyfill", 39 | "utf8parse", 40 | ] 41 | 42 | [[package]] 43 | name = "anstyle" 44 | version = "1.0.7" 45 | source = "registry+https://github.com/rust-lang/crates.io-index" 46 | checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" 47 | 48 | [[package]] 49 | name = "anstyle-parse" 50 | version = "0.2.4" 51 | source = "registry+https://github.com/rust-lang/crates.io-index" 52 | checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" 53 | dependencies = [ 54 | "utf8parse", 55 | ] 56 | 57 | [[package]] 58 | name = "anstyle-query" 59 | version = "1.1.0" 60 | source = "registry+https://github.com/rust-lang/crates.io-index" 61 | checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" 62 | dependencies = [ 63 | "windows-sys", 64 | ] 65 | 66 | [[package]] 67 | name = "anstyle-wincon" 68 | version = "3.0.3" 69 | source = "registry+https://github.com/rust-lang/crates.io-index" 70 | checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" 71 | dependencies = [ 72 | "anstyle", 73 | "windows-sys", 74 | ] 75 | 76 | [[package]] 77 | name = "anyhow" 78 | version = "1.0.86" 79 | source = "registry+https://github.com/rust-lang/crates.io-index" 80 | checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" 81 | 82 | [[package]] 83 | name = "camino" 84 | version = "1.1.7" 85 | source = "registry+https://github.com/rust-lang/crates.io-index" 86 | checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" 87 | dependencies = [ 88 | "serde", 89 | ] 90 | 91 | [[package]] 92 | name = "cargo-platform" 93 | version = "0.1.8" 94 | source = "registry+https://github.com/rust-lang/crates.io-index" 95 | checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" 96 | dependencies = [ 97 | "serde", 98 | ] 99 | 100 | [[package]] 101 | name = "cargo-px" 102 | version = "0.1.20" 103 | dependencies = [ 104 | "ahash", 105 | "anstream", 106 | "anstyle", 107 | "anyhow", 108 | "clap", 109 | "guppy", 110 | "libc", 111 | "once_cell", 112 | "petgraph", 113 | "serde", 114 | "serde_json", 115 | "supports-hyperlinks", 116 | "textwrap", 117 | "tracing", 118 | "tracing-subscriber", 119 | "url", 120 | "windows-sys", 121 | ] 122 | 123 | [[package]] 124 | name = "cargo_metadata" 125 | version = "0.18.1" 126 | source = "registry+https://github.com/rust-lang/crates.io-index" 127 | checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037" 128 | dependencies = [ 129 | "camino", 130 | "cargo-platform", 131 | "semver", 132 | "serde", 133 | "serde_json", 134 | "thiserror", 135 | ] 136 | 137 | [[package]] 138 | name = "cargo_px_env" 139 | version = "0.1.0" 140 | 141 | [[package]] 142 | name = "cfg-expr" 143 | version = "0.15.8" 144 | source = "registry+https://github.com/rust-lang/crates.io-index" 145 | checksum = "d067ad48b8650848b989a59a86c6c36a995d02d2bf778d45c3c5d57bc2718f02" 146 | dependencies = [ 147 | "smallvec", 148 | "target-lexicon", 149 | ] 150 | 151 | [[package]] 152 | name = "cfg-if" 153 | version = "1.0.0" 154 | source = "registry+https://github.com/rust-lang/crates.io-index" 155 | checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" 156 | 157 | [[package]] 158 | name = "clap" 159 | version = "4.5.13" 160 | source = "registry+https://github.com/rust-lang/crates.io-index" 161 | checksum = "0fbb260a053428790f3de475e304ff84cdbc4face759ea7a3e64c1edd938a7fc" 162 | dependencies = [ 163 | "clap_builder", 164 | ] 165 | 166 | [[package]] 167 | name = "clap_builder" 168 | version = "4.5.13" 169 | source = "registry+https://github.com/rust-lang/crates.io-index" 170 | checksum = "64b17d7ea74e9f833c7dbf2cbe4fb12ff26783eda4782a8975b72f895c9b4d99" 171 | dependencies = [ 172 | "anstream", 173 | "anstyle", 174 | "clap_lex", 175 | "strsim", 176 | ] 177 | 178 | [[package]] 179 | name = "clap_lex" 180 | version = "0.7.5" 181 | source = "registry+https://github.com/rust-lang/crates.io-index" 182 | checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" 183 | 184 | [[package]] 185 | name = "colorchoice" 186 | version = "1.0.1" 187 | source = "registry+https://github.com/rust-lang/crates.io-index" 188 | checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" 189 | 190 | [[package]] 191 | name = "debug-ignore" 192 | version = "1.0.5" 193 | source = "registry+https://github.com/rust-lang/crates.io-index" 194 | checksum = "ffe7ed1d93f4553003e20b629abe9085e1e81b1429520f897f8f8860bc6dfc21" 195 | 196 | [[package]] 197 | name = "deranged" 198 | version = "0.3.11" 199 | source = "registry+https://github.com/rust-lang/crates.io-index" 200 | checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" 201 | dependencies = [ 202 | "powerfmt", 203 | ] 204 | 205 | [[package]] 206 | name = "either" 207 | version = "1.13.0" 208 | source = "registry+https://github.com/rust-lang/crates.io-index" 209 | checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" 210 | 211 | [[package]] 212 | name = "equivalent" 213 | version = "1.0.1" 214 | source = "registry+https://github.com/rust-lang/crates.io-index" 215 | checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" 216 | 217 | [[package]] 218 | name = "fixedbitset" 219 | version = "0.4.2" 220 | source = "registry+https://github.com/rust-lang/crates.io-index" 221 | checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" 222 | 223 | [[package]] 224 | name = "form_urlencoded" 225 | version = "1.2.1" 226 | source = "registry+https://github.com/rust-lang/crates.io-index" 227 | checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" 228 | dependencies = [ 229 | "percent-encoding", 230 | ] 231 | 232 | [[package]] 233 | name = "getrandom" 234 | version = "0.2.15" 235 | source = "registry+https://github.com/rust-lang/crates.io-index" 236 | checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" 237 | dependencies = [ 238 | "cfg-if", 239 | "libc", 240 | "wasi", 241 | ] 242 | 243 | [[package]] 244 | name = "guppy" 245 | version = "0.17.5" 246 | source = "registry+https://github.com/rust-lang/crates.io-index" 247 | checksum = "34e99a7734579b834a076ef11789783c153c6eb5fb3520ed15bc41f483f0f317" 248 | dependencies = [ 249 | "ahash", 250 | "camino", 251 | "cargo_metadata", 252 | "cfg-if", 253 | "debug-ignore", 254 | "fixedbitset", 255 | "guppy-workspace-hack", 256 | "indexmap", 257 | "itertools", 258 | "nested", 259 | "once_cell", 260 | "pathdiff", 261 | "petgraph", 262 | "semver", 263 | "serde", 264 | "serde_json", 265 | "smallvec", 266 | "static_assertions", 267 | "target-spec", 268 | ] 269 | 270 | [[package]] 271 | name = "guppy-workspace-hack" 272 | version = "0.1.0" 273 | source = "registry+https://github.com/rust-lang/crates.io-index" 274 | checksum = "92620684d99f750bae383ecb3be3748142d6095760afd5cbcf2261e9a279d780" 275 | 276 | [[package]] 277 | name = "hashbrown" 278 | version = "0.14.5" 279 | source = "registry+https://github.com/rust-lang/crates.io-index" 280 | checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" 281 | 282 | [[package]] 283 | name = "hermit-abi" 284 | version = "0.3.9" 285 | source = "registry+https://github.com/rust-lang/crates.io-index" 286 | checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" 287 | 288 | [[package]] 289 | name = "idna" 290 | version = "0.5.0" 291 | source = "registry+https://github.com/rust-lang/crates.io-index" 292 | checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" 293 | dependencies = [ 294 | "unicode-bidi", 295 | "unicode-normalization", 296 | ] 297 | 298 | [[package]] 299 | name = "indexmap" 300 | version = "2.2.6" 301 | source = "registry+https://github.com/rust-lang/crates.io-index" 302 | checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" 303 | dependencies = [ 304 | "equivalent", 305 | "hashbrown", 306 | ] 307 | 308 | [[package]] 309 | name = "is-terminal" 310 | version = "0.4.12" 311 | source = "registry+https://github.com/rust-lang/crates.io-index" 312 | checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" 313 | dependencies = [ 314 | "hermit-abi", 315 | "libc", 316 | "windows-sys", 317 | ] 318 | 319 | [[package]] 320 | name = "is_terminal_polyfill" 321 | version = "1.70.0" 322 | source = "registry+https://github.com/rust-lang/crates.io-index" 323 | checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" 324 | 325 | [[package]] 326 | name = "itertools" 327 | version = "0.12.1" 328 | source = "registry+https://github.com/rust-lang/crates.io-index" 329 | checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" 330 | dependencies = [ 331 | "either", 332 | ] 333 | 334 | [[package]] 335 | name = "itoa" 336 | version = "1.0.11" 337 | source = "registry+https://github.com/rust-lang/crates.io-index" 338 | checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" 339 | 340 | [[package]] 341 | name = "lazy_static" 342 | version = "1.5.0" 343 | source = "registry+https://github.com/rust-lang/crates.io-index" 344 | checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" 345 | 346 | [[package]] 347 | name = "libc" 348 | version = "0.2.155" 349 | source = "registry+https://github.com/rust-lang/crates.io-index" 350 | checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" 351 | 352 | [[package]] 353 | name = "log" 354 | version = "0.4.22" 355 | source = "registry+https://github.com/rust-lang/crates.io-index" 356 | checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" 357 | 358 | [[package]] 359 | name = "matchers" 360 | version = "0.1.0" 361 | source = "registry+https://github.com/rust-lang/crates.io-index" 362 | checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" 363 | dependencies = [ 364 | "regex-automata 0.1.10", 365 | ] 366 | 367 | [[package]] 368 | name = "memchr" 369 | version = "2.7.4" 370 | source = "registry+https://github.com/rust-lang/crates.io-index" 371 | checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" 372 | 373 | [[package]] 374 | name = "nested" 375 | version = "0.1.1" 376 | source = "registry+https://github.com/rust-lang/crates.io-index" 377 | checksum = "ca2b420f638f07fe83056b55ea190bb815f609ec5a35e7017884a10f78839c9e" 378 | 379 | [[package]] 380 | name = "nu-ansi-term" 381 | version = "0.46.0" 382 | source = "registry+https://github.com/rust-lang/crates.io-index" 383 | checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" 384 | dependencies = [ 385 | "overload", 386 | "winapi", 387 | ] 388 | 389 | [[package]] 390 | name = "num-conv" 391 | version = "0.1.0" 392 | source = "registry+https://github.com/rust-lang/crates.io-index" 393 | checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" 394 | 395 | [[package]] 396 | name = "once_cell" 397 | version = "1.19.0" 398 | source = "registry+https://github.com/rust-lang/crates.io-index" 399 | checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" 400 | 401 | [[package]] 402 | name = "overload" 403 | version = "0.1.1" 404 | source = "registry+https://github.com/rust-lang/crates.io-index" 405 | checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" 406 | 407 | [[package]] 408 | name = "pathdiff" 409 | version = "0.2.1" 410 | source = "registry+https://github.com/rust-lang/crates.io-index" 411 | checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" 412 | dependencies = [ 413 | "camino", 414 | ] 415 | 416 | [[package]] 417 | name = "percent-encoding" 418 | version = "2.3.1" 419 | source = "registry+https://github.com/rust-lang/crates.io-index" 420 | checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" 421 | 422 | [[package]] 423 | name = "petgraph" 424 | version = "0.6.5" 425 | source = "registry+https://github.com/rust-lang/crates.io-index" 426 | checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" 427 | dependencies = [ 428 | "fixedbitset", 429 | "indexmap", 430 | ] 431 | 432 | [[package]] 433 | name = "pin-project-lite" 434 | version = "0.2.14" 435 | source = "registry+https://github.com/rust-lang/crates.io-index" 436 | checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" 437 | 438 | [[package]] 439 | name = "powerfmt" 440 | version = "0.2.0" 441 | source = "registry+https://github.com/rust-lang/crates.io-index" 442 | checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" 443 | 444 | [[package]] 445 | name = "proc-macro2" 446 | version = "1.0.86" 447 | source = "registry+https://github.com/rust-lang/crates.io-index" 448 | checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" 449 | dependencies = [ 450 | "unicode-ident", 451 | ] 452 | 453 | [[package]] 454 | name = "quote" 455 | version = "1.0.36" 456 | source = "registry+https://github.com/rust-lang/crates.io-index" 457 | checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" 458 | dependencies = [ 459 | "proc-macro2", 460 | ] 461 | 462 | [[package]] 463 | name = "regex" 464 | version = "1.10.5" 465 | source = "registry+https://github.com/rust-lang/crates.io-index" 466 | checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" 467 | dependencies = [ 468 | "aho-corasick", 469 | "memchr", 470 | "regex-automata 0.4.7", 471 | "regex-syntax 0.8.4", 472 | ] 473 | 474 | [[package]] 475 | name = "regex-automata" 476 | version = "0.1.10" 477 | source = "registry+https://github.com/rust-lang/crates.io-index" 478 | checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" 479 | dependencies = [ 480 | "regex-syntax 0.6.29", 481 | ] 482 | 483 | [[package]] 484 | name = "regex-automata" 485 | version = "0.4.7" 486 | source = "registry+https://github.com/rust-lang/crates.io-index" 487 | checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" 488 | dependencies = [ 489 | "aho-corasick", 490 | "memchr", 491 | "regex-syntax 0.8.4", 492 | ] 493 | 494 | [[package]] 495 | name = "regex-syntax" 496 | version = "0.6.29" 497 | source = "registry+https://github.com/rust-lang/crates.io-index" 498 | checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" 499 | 500 | [[package]] 501 | name = "regex-syntax" 502 | version = "0.8.4" 503 | source = "registry+https://github.com/rust-lang/crates.io-index" 504 | checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" 505 | 506 | [[package]] 507 | name = "ryu" 508 | version = "1.0.18" 509 | source = "registry+https://github.com/rust-lang/crates.io-index" 510 | checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" 511 | 512 | [[package]] 513 | name = "semver" 514 | version = "1.0.23" 515 | source = "registry+https://github.com/rust-lang/crates.io-index" 516 | checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" 517 | dependencies = [ 518 | "serde", 519 | ] 520 | 521 | [[package]] 522 | name = "serde" 523 | version = "1.0.204" 524 | source = "registry+https://github.com/rust-lang/crates.io-index" 525 | checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" 526 | dependencies = [ 527 | "serde_derive", 528 | ] 529 | 530 | [[package]] 531 | name = "serde_derive" 532 | version = "1.0.204" 533 | source = "registry+https://github.com/rust-lang/crates.io-index" 534 | checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" 535 | dependencies = [ 536 | "proc-macro2", 537 | "quote", 538 | "syn", 539 | ] 540 | 541 | [[package]] 542 | name = "serde_json" 543 | version = "1.0.120" 544 | source = "registry+https://github.com/rust-lang/crates.io-index" 545 | checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" 546 | dependencies = [ 547 | "itoa", 548 | "ryu", 549 | "serde", 550 | ] 551 | 552 | [[package]] 553 | name = "sharded-slab" 554 | version = "0.1.7" 555 | source = "registry+https://github.com/rust-lang/crates.io-index" 556 | checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" 557 | dependencies = [ 558 | "lazy_static", 559 | ] 560 | 561 | [[package]] 562 | name = "smallvec" 563 | version = "1.13.2" 564 | source = "registry+https://github.com/rust-lang/crates.io-index" 565 | checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" 566 | 567 | [[package]] 568 | name = "smawk" 569 | version = "0.3.2" 570 | source = "registry+https://github.com/rust-lang/crates.io-index" 571 | checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" 572 | 573 | [[package]] 574 | name = "static_assertions" 575 | version = "1.1.0" 576 | source = "registry+https://github.com/rust-lang/crates.io-index" 577 | checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" 578 | 579 | [[package]] 580 | name = "strsim" 581 | version = "0.11.1" 582 | source = "registry+https://github.com/rust-lang/crates.io-index" 583 | checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" 584 | 585 | [[package]] 586 | name = "supports-hyperlinks" 587 | version = "2.1.0" 588 | source = "registry+https://github.com/rust-lang/crates.io-index" 589 | checksum = "f84231692eb0d4d41e4cdd0cabfdd2e6cd9e255e65f80c9aa7c98dd502b4233d" 590 | dependencies = [ 591 | "is-terminal", 592 | ] 593 | 594 | [[package]] 595 | name = "syn" 596 | version = "2.0.69" 597 | source = "registry+https://github.com/rust-lang/crates.io-index" 598 | checksum = "201fcda3845c23e8212cd466bfebf0bd20694490fc0356ae8e428e0824a915a6" 599 | dependencies = [ 600 | "proc-macro2", 601 | "quote", 602 | "unicode-ident", 603 | ] 604 | 605 | [[package]] 606 | name = "target-lexicon" 607 | version = "0.12.14" 608 | source = "registry+https://github.com/rust-lang/crates.io-index" 609 | checksum = "e1fc403891a21bcfb7c37834ba66a547a8f402146eba7265b5a6d88059c9ff2f" 610 | 611 | [[package]] 612 | name = "target-spec" 613 | version = "3.1.0" 614 | source = "registry+https://github.com/rust-lang/crates.io-index" 615 | checksum = "36a8e795b1824524d13cdf04f73cf8b4f244ce86c96b4d2a83a6ca1a753d2752" 616 | dependencies = [ 617 | "cfg-expr", 618 | "guppy-workspace-hack", 619 | "target-lexicon", 620 | "unicode-ident", 621 | ] 622 | 623 | [[package]] 624 | name = "textwrap" 625 | version = "0.16.1" 626 | source = "registry+https://github.com/rust-lang/crates.io-index" 627 | checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" 628 | dependencies = [ 629 | "smawk", 630 | "unicode-linebreak", 631 | "unicode-width", 632 | ] 633 | 634 | [[package]] 635 | name = "thiserror" 636 | version = "1.0.61" 637 | source = "registry+https://github.com/rust-lang/crates.io-index" 638 | checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" 639 | dependencies = [ 640 | "thiserror-impl", 641 | ] 642 | 643 | [[package]] 644 | name = "thiserror-impl" 645 | version = "1.0.61" 646 | source = "registry+https://github.com/rust-lang/crates.io-index" 647 | checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" 648 | dependencies = [ 649 | "proc-macro2", 650 | "quote", 651 | "syn", 652 | ] 653 | 654 | [[package]] 655 | name = "thread_local" 656 | version = "1.1.8" 657 | source = "registry+https://github.com/rust-lang/crates.io-index" 658 | checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" 659 | dependencies = [ 660 | "cfg-if", 661 | "once_cell", 662 | ] 663 | 664 | [[package]] 665 | name = "time" 666 | version = "0.3.36" 667 | source = "registry+https://github.com/rust-lang/crates.io-index" 668 | checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" 669 | dependencies = [ 670 | "deranged", 671 | "itoa", 672 | "num-conv", 673 | "powerfmt", 674 | "serde", 675 | "time-core", 676 | "time-macros", 677 | ] 678 | 679 | [[package]] 680 | name = "time-core" 681 | version = "0.1.2" 682 | source = "registry+https://github.com/rust-lang/crates.io-index" 683 | checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" 684 | 685 | [[package]] 686 | name = "time-macros" 687 | version = "0.2.18" 688 | source = "registry+https://github.com/rust-lang/crates.io-index" 689 | checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" 690 | dependencies = [ 691 | "num-conv", 692 | "time-core", 693 | ] 694 | 695 | [[package]] 696 | name = "tinyvec" 697 | version = "1.7.0" 698 | source = "registry+https://github.com/rust-lang/crates.io-index" 699 | checksum = "ce6b6a2fb3a985e99cebfaefa9faa3024743da73304ca1c683a36429613d3d22" 700 | dependencies = [ 701 | "tinyvec_macros", 702 | ] 703 | 704 | [[package]] 705 | name = "tinyvec_macros" 706 | version = "0.1.1" 707 | source = "registry+https://github.com/rust-lang/crates.io-index" 708 | checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" 709 | 710 | [[package]] 711 | name = "tracing" 712 | version = "0.1.40" 713 | source = "registry+https://github.com/rust-lang/crates.io-index" 714 | checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" 715 | dependencies = [ 716 | "pin-project-lite", 717 | "tracing-attributes", 718 | "tracing-core", 719 | ] 720 | 721 | [[package]] 722 | name = "tracing-attributes" 723 | version = "0.1.27" 724 | source = "registry+https://github.com/rust-lang/crates.io-index" 725 | checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" 726 | dependencies = [ 727 | "proc-macro2", 728 | "quote", 729 | "syn", 730 | ] 731 | 732 | [[package]] 733 | name = "tracing-core" 734 | version = "0.1.32" 735 | source = "registry+https://github.com/rust-lang/crates.io-index" 736 | checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" 737 | dependencies = [ 738 | "once_cell", 739 | "valuable", 740 | ] 741 | 742 | [[package]] 743 | name = "tracing-log" 744 | version = "0.2.0" 745 | source = "registry+https://github.com/rust-lang/crates.io-index" 746 | checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" 747 | dependencies = [ 748 | "log", 749 | "once_cell", 750 | "tracing-core", 751 | ] 752 | 753 | [[package]] 754 | name = "tracing-subscriber" 755 | version = "0.3.18" 756 | source = "registry+https://github.com/rust-lang/crates.io-index" 757 | checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" 758 | dependencies = [ 759 | "matchers", 760 | "nu-ansi-term", 761 | "once_cell", 762 | "regex", 763 | "sharded-slab", 764 | "smallvec", 765 | "thread_local", 766 | "time", 767 | "tracing", 768 | "tracing-core", 769 | "tracing-log", 770 | ] 771 | 772 | [[package]] 773 | name = "unicode-bidi" 774 | version = "0.3.15" 775 | source = "registry+https://github.com/rust-lang/crates.io-index" 776 | checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" 777 | 778 | [[package]] 779 | name = "unicode-ident" 780 | version = "1.0.12" 781 | source = "registry+https://github.com/rust-lang/crates.io-index" 782 | checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" 783 | 784 | [[package]] 785 | name = "unicode-linebreak" 786 | version = "0.1.5" 787 | source = "registry+https://github.com/rust-lang/crates.io-index" 788 | checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" 789 | 790 | [[package]] 791 | name = "unicode-normalization" 792 | version = "0.1.23" 793 | source = "registry+https://github.com/rust-lang/crates.io-index" 794 | checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" 795 | dependencies = [ 796 | "tinyvec", 797 | ] 798 | 799 | [[package]] 800 | name = "unicode-width" 801 | version = "0.1.13" 802 | source = "registry+https://github.com/rust-lang/crates.io-index" 803 | checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" 804 | 805 | [[package]] 806 | name = "url" 807 | version = "2.5.2" 808 | source = "registry+https://github.com/rust-lang/crates.io-index" 809 | checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" 810 | dependencies = [ 811 | "form_urlencoded", 812 | "idna", 813 | "percent-encoding", 814 | ] 815 | 816 | [[package]] 817 | name = "utf8parse" 818 | version = "0.2.2" 819 | source = "registry+https://github.com/rust-lang/crates.io-index" 820 | checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" 821 | 822 | [[package]] 823 | name = "valuable" 824 | version = "0.1.0" 825 | source = "registry+https://github.com/rust-lang/crates.io-index" 826 | checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" 827 | 828 | [[package]] 829 | name = "version_check" 830 | version = "0.9.4" 831 | source = "registry+https://github.com/rust-lang/crates.io-index" 832 | checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" 833 | 834 | [[package]] 835 | name = "wasi" 836 | version = "0.11.0+wasi-snapshot-preview1" 837 | source = "registry+https://github.com/rust-lang/crates.io-index" 838 | checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" 839 | 840 | [[package]] 841 | name = "winapi" 842 | version = "0.3.9" 843 | source = "registry+https://github.com/rust-lang/crates.io-index" 844 | checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" 845 | dependencies = [ 846 | "winapi-i686-pc-windows-gnu", 847 | "winapi-x86_64-pc-windows-gnu", 848 | ] 849 | 850 | [[package]] 851 | name = "winapi-i686-pc-windows-gnu" 852 | version = "0.4.0" 853 | source = "registry+https://github.com/rust-lang/crates.io-index" 854 | checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 855 | 856 | [[package]] 857 | name = "winapi-x86_64-pc-windows-gnu" 858 | version = "0.4.0" 859 | source = "registry+https://github.com/rust-lang/crates.io-index" 860 | checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 861 | 862 | [[package]] 863 | name = "windows-sys" 864 | version = "0.52.0" 865 | source = "registry+https://github.com/rust-lang/crates.io-index" 866 | checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" 867 | dependencies = [ 868 | "windows-targets", 869 | ] 870 | 871 | [[package]] 872 | name = "windows-targets" 873 | version = "0.52.6" 874 | source = "registry+https://github.com/rust-lang/crates.io-index" 875 | checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" 876 | dependencies = [ 877 | "windows_aarch64_gnullvm", 878 | "windows_aarch64_msvc", 879 | "windows_i686_gnu", 880 | "windows_i686_gnullvm", 881 | "windows_i686_msvc", 882 | "windows_x86_64_gnu", 883 | "windows_x86_64_gnullvm", 884 | "windows_x86_64_msvc", 885 | ] 886 | 887 | [[package]] 888 | name = "windows_aarch64_gnullvm" 889 | version = "0.52.6" 890 | source = "registry+https://github.com/rust-lang/crates.io-index" 891 | checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" 892 | 893 | [[package]] 894 | name = "windows_aarch64_msvc" 895 | version = "0.52.6" 896 | source = "registry+https://github.com/rust-lang/crates.io-index" 897 | checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" 898 | 899 | [[package]] 900 | name = "windows_i686_gnu" 901 | version = "0.52.6" 902 | source = "registry+https://github.com/rust-lang/crates.io-index" 903 | checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" 904 | 905 | [[package]] 906 | name = "windows_i686_gnullvm" 907 | version = "0.52.6" 908 | source = "registry+https://github.com/rust-lang/crates.io-index" 909 | checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" 910 | 911 | [[package]] 912 | name = "windows_i686_msvc" 913 | version = "0.52.6" 914 | source = "registry+https://github.com/rust-lang/crates.io-index" 915 | checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" 916 | 917 | [[package]] 918 | name = "windows_x86_64_gnu" 919 | version = "0.52.6" 920 | source = "registry+https://github.com/rust-lang/crates.io-index" 921 | checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" 922 | 923 | [[package]] 924 | name = "windows_x86_64_gnullvm" 925 | version = "0.52.6" 926 | source = "registry+https://github.com/rust-lang/crates.io-index" 927 | checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" 928 | 929 | [[package]] 930 | name = "windows_x86_64_msvc" 931 | version = "0.52.6" 932 | source = "registry+https://github.com/rust-lang/crates.io-index" 933 | checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" 934 | 935 | [[package]] 936 | name = "zerocopy" 937 | version = "0.7.35" 938 | source = "registry+https://github.com/rust-lang/crates.io-index" 939 | checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" 940 | dependencies = [ 941 | "zerocopy-derive", 942 | ] 943 | 944 | [[package]] 945 | name = "zerocopy-derive" 946 | version = "0.7.35" 947 | source = "registry+https://github.com/rust-lang/crates.io-index" 948 | checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" 949 | dependencies = [ 950 | "proc-macro2", 951 | "quote", 952 | "syn", 953 | ] 954 | --------------------------------------------------------------------------------