├── src ├── system │ ├── mod.rs │ ├── sleep.rs │ └── rng.rs ├── node │ ├── mod.rs │ ├── stream.rs │ ├── os.rs │ ├── process.rs │ ├── path.rs │ └── fs.rs ├── actions │ ├── mod.rs │ ├── io.rs │ ├── tool_cache.rs │ ├── push_line_splitter.rs │ ├── exec.rs │ ├── core.rs │ └── cache.rs ├── cargo_hooks │ ├── mod.rs │ ├── hook.rs │ ├── annotation.rs │ └── install.rs ├── nonce.rs ├── utils.rs ├── delta.rs ├── action_paths.rs ├── safe_encoding.rs ├── hasher.rs ├── noop_stream.rs ├── cross.rs ├── lib.rs ├── cargo_lock_hashing.rs ├── package_manifest.rs ├── agnostic_path.rs ├── job.rs ├── error.rs ├── input_manager.rs ├── access_times.rs ├── cache_key_builder.rs ├── dir_tree.rs ├── run.rs ├── rustup.rs ├── cargo.rs ├── toolchain.rs └── fingerprinting.rs ├── doc ├── images │ ├── annotation.webp │ └── cargo-install-grcov.webp ├── NOTES.md └── WASM_SIZE.md ├── .cargo └── config ├── phase-wrappers ├── main.js └── post.js ├── .gitignore ├── rustfmt.toml ├── .about ├── config.toml └── template.hbs ├── scripts └── generate-rust-licenses ├── KNOWN_ISSUES.md ├── .github ├── dependabot.yml └── workflows │ ├── release.yml │ └── ci.yml ├── LICENSE ├── package.json ├── Cargo.toml ├── action.yml └── README.md /src/system/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod rng; 2 | pub(crate) mod sleep; 3 | -------------------------------------------------------------------------------- /doc/images/annotation.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FrancisRussell/ferrous-actions/HEAD/doc/images/annotation.webp -------------------------------------------------------------------------------- /doc/images/cargo-install-grcov.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/FrancisRussell/ferrous-actions/HEAD/doc/images/cargo-install-grcov.webp -------------------------------------------------------------------------------- /src/node/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod fs; 2 | pub(crate) mod os; 3 | pub(crate) mod path; 4 | pub(crate) mod process; 5 | pub(crate) mod stream; 6 | -------------------------------------------------------------------------------- /.cargo/config: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "wasm32-unknown-unknown" 3 | 4 | [target.wasm32-unknown-unknown] 5 | runner = 'wasm-bindgen-test-runner' 6 | -------------------------------------------------------------------------------- /phase-wrappers/main.js: -------------------------------------------------------------------------------- 1 | const { env } = require('node:process'); 2 | env.GITHUB_RUST_ACTION_PHASE = 'main'; 3 | const impl = require('./index.js'); 4 | -------------------------------------------------------------------------------- /phase-wrappers/post.js: -------------------------------------------------------------------------------- 1 | const { env } = require('node:process'); 2 | env.GITHUB_RUST_ACTION_PHASE = 'post'; 3 | const impl = require('./index.js'); 4 | -------------------------------------------------------------------------------- /src/actions/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod cache; 2 | pub mod core; 3 | pub mod exec; 4 | pub mod io; 5 | pub(self) mod push_line_splitter; 6 | pub mod tool_cache; 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /dist/ 2 | /LICENSE-3RD-PARTY-JAVASCRIPT 3 | /LICENSE-3RD-PARTY-RUST 4 | /node_modules/ 5 | /pkg/ 6 | /target/ 7 | /wasm-pack.log 8 | 9 | .*.swp 10 | -------------------------------------------------------------------------------- /src/cargo_hooks/mod.rs: -------------------------------------------------------------------------------- 1 | mod annotation; 2 | mod hook; 3 | mod install; 4 | 5 | pub(crate) use annotation::*; 6 | pub(crate) use hook::*; 7 | pub(crate) use install::*; 8 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | force_multiline_blocks = false 2 | imports_granularity = "Module" 3 | max_width = 120 4 | newline_style = "Unix" 5 | reorder_impl_items = true 6 | wrap_comments = true 7 | -------------------------------------------------------------------------------- /.about/config.toml: -------------------------------------------------------------------------------- 1 | accepted = [ 2 | "Apache-2.0", 3 | "Apache-2.0 WITH LLVM-exception", 4 | "BSD-2-Clause", 5 | "BSD-3-Clause", 6 | "MIT", 7 | "Unicode-DFS-2016", 8 | ] 9 | -------------------------------------------------------------------------------- /scripts/generate-rust-licenses: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -eux 3 | 4 | cargo install --locked cargo-about 5 | cargo about generate -c ./.about/config.toml -o LICENSE-3RD-PARTY-RUST ./.about/template.hbs 6 | -------------------------------------------------------------------------------- /.about/template.hbs: -------------------------------------------------------------------------------- 1 | {{#each licenses}} 2 | The following packages are licensed under {{id}}: 3 | {{#each used_by}} 4 | {{crate.name}} {{crate.version}} 5 | {{/each}} 6 | 7 | The full license text follows: 8 | {{text}} 9 | 10 | {{/each}} 11 | -------------------------------------------------------------------------------- /KNOWN_ISSUES.md: -------------------------------------------------------------------------------- 1 | # Known Issues 2 | 3 | - Using `cargo clippy` will fail if `clippy` wasn't installed by `rustup`. This situation is difficult to detect 4 | because there is an existing `clippy` install on the Github runners that fails to function. 5 | -------------------------------------------------------------------------------- /src/nonce.rs: -------------------------------------------------------------------------------- 1 | use crate::system::rng; 2 | use rustup_toolchain_manifest::HashValue; 3 | 4 | pub fn build(num_bytes: usize) -> HashValue { 5 | let mut bytes = vec![0u8; num_bytes]; 6 | let mut rng = rng::MathRandom::default(); 7 | rng.fill_bytes(&mut bytes); 8 | HashValue::from_bytes(&bytes) 9 | } 10 | -------------------------------------------------------------------------------- /src/utils.rs: -------------------------------------------------------------------------------- 1 | pub fn set_panic_hook() { 2 | // When the `console_error_panic_hook` feature is enabled, we can call the 3 | // `set_panic_hook` function at least once during initialization, and then 4 | // we will get better error messages if our code ever panics. 5 | // 6 | // For more details see 7 | // https://github.com/rustwasm/console_error_panic_hook#readme 8 | #[cfg(feature = "console_error_panic_hook")] 9 | console_error_panic_hook::set_once(); 10 | } 11 | -------------------------------------------------------------------------------- /src/delta.rs: -------------------------------------------------------------------------------- 1 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, strum::Display)] 2 | pub enum Action { 3 | Added, 4 | Removed, 5 | Changed, 6 | } 7 | 8 | pub fn render_list(items: &[(S, Action)]) -> String 9 | where 10 | S: std::fmt::Display, 11 | { 12 | use std::fmt::Write as _; 13 | let mut result = String::new(); 14 | for (path, item) in items { 15 | writeln!(&mut result, "{}: {}", item, path).expect("Unable to write to string"); 16 | } 17 | result 18 | } 19 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | labels: 8 | - "dependencies" 9 | 10 | - package-ecosystem: "npm" 11 | directory: "/" 12 | schedule: 13 | interval: "weekly" 14 | labels: 15 | - "dependencies" 16 | 17 | - package-ecosystem: "cargo" 18 | directory: "/" 19 | schedule: 20 | interval: "weekly" 21 | labels: 22 | - "dependencies" 23 | -------------------------------------------------------------------------------- /src/action_paths.rs: -------------------------------------------------------------------------------- 1 | use crate::{node, Error}; 2 | use node::path::Path; 3 | use std::borrow::Cow; 4 | 5 | pub fn get_action_name() -> Cow<'static, str> { 6 | "github-rust-actions".into() 7 | } 8 | 9 | #[allow(clippy::unnecessary_wraps)] 10 | pub fn get_action_share_dir() -> Result { 11 | Ok(node::os::homedir() 12 | .join(".local") 13 | .join("share") 14 | .join(get_action_name().as_ref())) 15 | } 16 | 17 | #[allow(clippy::unnecessary_wraps)] 18 | pub fn get_action_cache_dir() -> Result { 19 | Ok(node::os::homedir().join(".cache").join(get_action_name().as_ref())) 20 | } 21 | -------------------------------------------------------------------------------- /src/safe_encoding.rs: -------------------------------------------------------------------------------- 1 | use base64::engine::general_purpose::GeneralPurpose; 2 | use base64::Engine as _; 3 | 4 | fn build_engine() -> GeneralPurpose { 5 | let config = base64::engine::general_purpose::NO_PAD; 6 | let alphabet = &base64::alphabet::URL_SAFE; 7 | GeneralPurpose::new(alphabet, config) 8 | } 9 | 10 | pub fn encode>(input: I) -> String { 11 | let engine = build_engine(); 12 | engine.encode(input.as_ref()) 13 | } 14 | 15 | pub fn decode>(input: I) -> Result, base64::DecodeError> { 16 | let engine = build_engine(); 17 | let result = engine.decode(input.as_ref())?; 18 | Ok(result) 19 | } 20 | -------------------------------------------------------------------------------- /src/node/stream.rs: -------------------------------------------------------------------------------- 1 | pub mod ffi { 2 | use js_sys::Object; 3 | use wasm_bindgen::prelude::*; 4 | 5 | #[wasm_bindgen(module = "stream")] 6 | extern "C" { 7 | #[derive(Debug)] 8 | #[wasm_bindgen(js_name = "Writable")] 9 | pub type Writable; 10 | 11 | #[wasm_bindgen(constructor)] 12 | pub fn new(options: Option) -> Writable; 13 | } 14 | } 15 | 16 | #[cfg(test)] 17 | mod test { 18 | use super::*; 19 | use js_sys::Object; 20 | use wasm_bindgen_test::wasm_bindgen_test; 21 | 22 | #[wasm_bindgen_test] 23 | fn construct_writable() { 24 | let options = Object::default(); 25 | ffi::Writable::new(Some(options)); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/hasher.rs: -------------------------------------------------------------------------------- 1 | use rustup_toolchain_manifest::HashValue; 2 | 3 | #[derive(Debug, Default)] 4 | pub struct Blake3 { 5 | inner: blake3::Hasher, 6 | } 7 | 8 | impl std::hash::Hasher for Blake3 { 9 | fn finish(&self) -> u64 { 10 | let mut xof = self.inner.finalize_xof(); 11 | let mut bytes = [0u8; 8]; 12 | xof.fill(&mut bytes); 13 | u64::from_le_bytes(bytes) 14 | } 15 | 16 | fn write(&mut self, bytes: &[u8]) { 17 | self.inner.update(bytes); 18 | } 19 | } 20 | 21 | impl Blake3 { 22 | pub fn inner(&self) -> &blake3::Hasher { 23 | &self.inner 24 | } 25 | 26 | pub fn hash_value(&self) -> HashValue { 27 | let hash = self.inner.finalize(); 28 | HashValue::from_bytes(&hash.as_bytes()[..]) 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/actions/io.rs: -------------------------------------------------------------------------------- 1 | use crate::node::path::Path; 2 | use js_sys::JsString; 3 | use wasm_bindgen::JsValue; 4 | 5 | pub async fn which>(tool: T, check: bool) -> Result { 6 | let path = ffi::which(&tool.into(), Some(check)).await?; 7 | let path: JsString = path.into(); 8 | Ok(Path::from(path)) 9 | } 10 | 11 | pub async fn rm_rf>(path: P) -> Result<(), JsValue> { 12 | let path = path.into(); 13 | ffi::rm_rf(&path).await?; 14 | Ok(()) 15 | } 16 | 17 | pub mod ffi { 18 | use js_sys::JsString; 19 | use wasm_bindgen::prelude::*; 20 | 21 | #[wasm_bindgen(module = "@actions/io")] 22 | extern "C" { 23 | #[wasm_bindgen(js_name = "which", catch)] 24 | pub async fn which(tool: &JsString, check: Option) -> Result; 25 | 26 | #[wasm_bindgen(js_name = "rmRF", catch)] 27 | pub async fn rm_rf(path: &JsString) -> Result; 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/system/sleep.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | pub async fn sleep(duration: &Duration) { 4 | use wasm_bindgen::closure::Closure; 5 | 6 | let (sender, receiver) = futures::channel::oneshot::channel(); 7 | let mut sender = Some(sender); 8 | let callback: Closure = Closure::new(move || { 9 | if let Some(s) = sender.take() { 10 | s.send(()).expect("Unable to send wake-up"); 11 | } 12 | }); 13 | #[allow(clippy::cast_precision_loss)] 14 | let millis = (duration.as_micros() as f64) / 1000.0; 15 | let millis: js_sys::Number = millis.into(); 16 | ffi::set_timeout(callback.as_ref(), millis); 17 | receiver.await.expect("Failed to receive wake-up"); 18 | } 19 | 20 | mod ffi { 21 | use js_sys::Number; 22 | use wasm_bindgen::prelude::*; 23 | 24 | #[wasm_bindgen] 25 | extern "C" { 26 | #[wasm_bindgen(js_name = "setTimeout")] 27 | pub fn set_timeout(callback: &JsValue, milliseconds: Number); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /doc/NOTES.md: -------------------------------------------------------------------------------- 1 | ## How does `rustup` process package manifest files? 2 | - https://github.com/rust-lang/rustup/blob/b3d53252ec06635da4b8bd434a82e2e8b6480485/src/dist/component/package.rs#L82 3 | 4 | ## How are crate files named in the cache? 5 | - https://github.com/rust-lang/cargo/blob/f6e737b1e3386adb89333bf06a01f68a91ac5306/src/cargo/sources/registry/download.rs#L20 6 | 7 | ## How do registries get named in the cache? 8 | - https://github.com/rust-lang/cargo/blob/f6e737b1e3386adb89333bf06a01f68a91ac5306/src/cargo/core/source/source_id.rs#L560 9 | - https://github.com/rust-lang/cargo/blob/f6e737b1e3386adb89333bf06a01f68a91ac5306/src/cargo/sources/registry/mod.rs#L549 10 | 11 | ## Issues with `rustup` and `cargo` acting as package managers 12 | - https://kornel.ski/rust-2019 13 | 14 | ## How to effectively clean target folder for CI caching 15 | - https://github.com/rust-lang/cargo/issues/5885 16 | 17 | ## Layout of cargo build folder 18 | - https://github.com/rust-lang/cargo/blob/master/src/cargo/core/compiler/layout.rs 19 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Peter Evans 4 | Copyright (c) 2022 Francis Russell 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 22 | SOFTWARE. 23 | -------------------------------------------------------------------------------- /doc/WASM_SIZE.md: -------------------------------------------------------------------------------- 1 | Running `npm run build` followed by `du -b dist/lib_bg.wasm`. All builds are 2 | release and have `wasm-opt` applied to them. 3 | 4 | Commit: `6ac38a64a969d92a5187bc1754b4c52c700eed31` 5 | No link-time optimization: 1200380 6 | `lto = true`: 1170489 7 | 8 | All following builds have `lto = true` in `Cargo.toml`. 9 | 10 | Commit: `6ac38a64a969d92a5187bc1754b4c52c700eed31` 11 | `opt-level = "s"`: 946514 12 | `opt-level = "z"`: 925461 13 | 14 | Setting `opt-level = "s"` due to concerns about potential speed costs. 15 | 16 | Commit: `d0b4d8a8423f2cd1c934a9acbb63874e2433db39` 17 | Monomorphizing `dir_tree::apply_visitor_impl`: 940065 18 | 19 | Commit: `2fd60b777d80275e732ae6f9cfa85024d2739ea4` 20 | Switch to postcard for dependency file (cache group list) format: 938404 21 | 22 | Commit `56c6bcdae9f951c4f23e57ed23481de18ec7adad` 23 | Switch to postcard for serialization of cache between phases: 908558 24 | 25 | Commit: `0015270238008c49b7ad1dd8b49568fd52319159` 26 | Switch to no-`regex` `simple-path-match`: 748528 27 | 28 | Commit `eccc9c747514c5b4077e4a61c100d2583dfc412e` 29 | Removing all `regex` use: 747994 30 | 31 | Keeping existing code as the saving is barely within expected variation. 32 | -------------------------------------------------------------------------------- /src/noop_stream.rs: -------------------------------------------------------------------------------- 1 | use crate::node::stream; 2 | use js_sys::{Function, Object}; 3 | use wasm_bindgen::closure::Closure; 4 | use wasm_bindgen::JsValue; 5 | 6 | pub struct Sink { 7 | _write: Closure, 8 | writable: stream::ffi::Writable, 9 | } 10 | 11 | impl Default for Sink { 12 | fn default() -> Sink { 13 | let write = Closure::new(|_chunk: JsValue, _encoding: JsValue, callback: JsValue| { 14 | let callback: Function = callback.into(); 15 | let this = JsValue::null(); 16 | let error = JsValue::null(); 17 | callback.call1(&this, &error).expect("Error calling write callback"); 18 | }); 19 | let options = js_sys::Map::new(); 20 | options.set(&"write".into(), write.as_ref()); 21 | let options = Object::from_entries(&options).expect("Failed to convert options map to object"); 22 | let writable = stream::ffi::Writable::new(Some(options)); 23 | Sink { 24 | _write: write, 25 | writable, 26 | } 27 | } 28 | } 29 | 30 | impl AsRef for Sink { 31 | fn as_ref(&self) -> &JsValue { 32 | self.writable.as_ref() 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/cross.rs: -------------------------------------------------------------------------------- 1 | use crate::actions::io; 2 | use crate::node::path::Path; 3 | use crate::{debug, info, input_manager, Cargo, Error}; 4 | 5 | #[derive(Clone, Debug)] 6 | pub struct Cross { 7 | path: Path, 8 | } 9 | 10 | impl Cross { 11 | pub async fn get() -> Result { 12 | io::which("cross", true) 13 | .await 14 | .map(|path| Cross { path }) 15 | .map_err(Error::Js) 16 | } 17 | 18 | pub async fn get_or_install(input_manager: &input_manager::Manager) -> Result { 19 | match Self::get().await { 20 | Ok(cross) => Ok(cross), 21 | Err(e) => { 22 | info!("Unable to find cross. Installing it now..."); 23 | debug!("Attempting to locate cross returned this error: {}", e); 24 | Self::install(input_manager).await 25 | } 26 | } 27 | } 28 | 29 | async fn install(input_manager: &input_manager::Manager) -> Result { 30 | let mut cargo = Cargo::from_environment().await?; 31 | let args = ["cross"]; 32 | cargo.run(None, "install", args, input_manager).await?; 33 | Self::get().await 34 | } 35 | 36 | pub fn get_path(&self) -> Path { 37 | self.path.clone() 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![warn(clippy::pedantic)] 2 | #![allow( 3 | clippy::missing_errors_doc, 4 | clippy::must_use_candidate, 5 | clippy::uninlined_format_args, 6 | clippy::missing_panics_doc 7 | )] 8 | 9 | mod access_times; 10 | mod action_paths; 11 | mod actions; 12 | mod agnostic_path; 13 | mod cache_cargo_home; 14 | mod cache_key_builder; 15 | mod cargo; 16 | mod cargo_hooks; 17 | mod cargo_lock_hashing; 18 | mod cross; 19 | mod delta; 20 | mod dir_tree; 21 | mod error; 22 | mod fingerprinting; 23 | mod hasher; 24 | mod input_manager; 25 | mod job; 26 | mod node; 27 | mod nonce; 28 | mod noop_stream; 29 | mod package_manifest; 30 | mod run; 31 | mod rustup; 32 | mod safe_encoding; 33 | mod system; 34 | mod toolchain; 35 | mod utils; 36 | 37 | use crate::cargo::Cargo; 38 | use crate::error::Error; 39 | use wasm_bindgen::prelude::{wasm_bindgen, JsValue}; 40 | 41 | #[wasm_bindgen(start)] 42 | pub async fn start() -> Result<(), JsValue> { 43 | use crate::actions::core; 44 | 45 | // Perhaps we need a hook that calls core::set_failed() on panic. 46 | // This would make sure the action outputs an error command for 47 | // the runner and returns exit code 1. 48 | utils::set_panic_hook(); 49 | 50 | if let Err(e) = run::run().await { 51 | core::set_failed(e.to_string()); 52 | } 53 | Ok(()) 54 | } 55 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*.*.*' 7 | 8 | jobs: 9 | create-release: 10 | name: Create release 11 | runs-on: ubuntu-latest 12 | environment: release 13 | steps: 14 | - uses: actions/checkout@v3 15 | with: 16 | fetch-depth: 0 17 | - uses: actions/setup-node@v3 18 | with: 19 | node-version: 16.x 20 | - run: npm ci 21 | - run: npm run build 22 | - run: npm run test 23 | - run: npm run generate-licenses 24 | - run: cargo install cargo-about 25 | - run: ./scripts/generate-rust-licenses 26 | - run: git add -f dist LICENSE-3RD-PARTY-JAVASCRIPT LICENSE-3RD-PARTY-RUST 27 | - run: git rm -rf .github/workflows 28 | - run: git config --global user.name "$GITHUB_ACTOR" 29 | - run: git config --global user.email "$GITHUB_ACTOR@users.noreply.github.com" 30 | - run: git config --unset "http.https://github.com/.extraheader" 31 | - run: git commit -m "Release $GITHUB_REF_NAME" 32 | - run: git tag -f "$GITHUB_REF_NAME" 33 | - run: git remote add release https://FrancisRussell:${{ secrets.PERSONAL_ACCESS_TOKEN }}@github.com/FrancisRussell/ferrous-actions.git 34 | - run: git push -f release "HEAD:refs/heads/release/$GITHUB_REF_NAME" 35 | - run: git push -f release "refs/tags/$GITHUB_REF_NAME" 36 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ferrous-actions", 3 | "version": "0.1.0-beta.2", 4 | "private": true, 5 | "description": "GitHub actions for Rust written in Rust and compiled to WebAssembly", 6 | "scripts": { 7 | "build": "wasm-pack build -t nodejs --out-name lib && ncc build --minify pkg/lib.js -o dist && cp phase-wrappers/* dist", 8 | "format": "cargo fmt", 9 | "format-check": "cargo fmt -- --check", 10 | "lint": "cargo clippy", 11 | "test": "wasm-pack test --node", 12 | "generate-licenses": "npm-license-text . LICENSE-3RD-PARTY-JAVASCRIPT || echo 'Ignoring error from npm-license-text'" 13 | }, 14 | "repository": { 15 | "type": "git", 16 | "url": "git@github.com:FrancisRussell/github-rust-actions.git" 17 | }, 18 | "keywords": [ 19 | "actions", 20 | "rust", 21 | "github", 22 | "wasm" 23 | ], 24 | "author": "Francis Russell", 25 | "license": "MIT", 26 | "bugs": { 27 | "url": "https://github.com/FrancisRussell/github-rust-actions/issues" 28 | }, 29 | "homepage": "https://github.com/FrancisRussell/github-rust-actions/", 30 | "dependencies": { 31 | "@actions/cache": "^3.1.1", 32 | "@actions/core": "^1.10.0", 33 | "@actions/exec": "^1.1.1", 34 | "@actions/io": "^1.1.2", 35 | "@actions/tool-cache": "^2.0.1" 36 | }, 37 | "devDependencies": { 38 | "@vercel/ncc": "^0.36.0", 39 | "npm-license-text": "^1.0.3", 40 | "wasm-pack": "^0.10.1" 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/cargo_hooks/hook.rs: -------------------------------------------------------------------------------- 1 | use crate::actions::exec::Command; 2 | use async_trait::async_trait; 3 | use std::borrow::Cow; 4 | 5 | #[async_trait(?Send)] 6 | pub trait Hook { 7 | fn additional_cargo_options(&self) -> Vec> { 8 | Vec::new() 9 | } 10 | 11 | fn modify_command(&self, command: &mut Command) { 12 | let _ = command; 13 | } 14 | 15 | async fn succeeded(&mut self) {} 16 | async fn failed(&mut self) {} 17 | } 18 | 19 | #[derive(Default)] 20 | pub struct Composite<'a> { 21 | hooks: Vec>, 22 | } 23 | 24 | impl<'a> Composite<'a> { 25 | pub fn push(&mut self, hook: H) { 26 | self.hooks.push(Box::new(hook)); 27 | } 28 | } 29 | 30 | #[async_trait(?Send)] 31 | impl<'a> Hook for Composite<'a> { 32 | fn additional_cargo_options(&self) -> Vec> { 33 | let mut result = Vec::new(); 34 | for hook in &self.hooks { 35 | result.extend(hook.additional_cargo_options()); 36 | } 37 | result 38 | } 39 | 40 | fn modify_command(&self, command: &mut Command) { 41 | for hook in &self.hooks { 42 | hook.modify_command(command); 43 | } 44 | } 45 | 46 | async fn succeeded(&mut self) { 47 | for hook in self.hooks.iter_mut().rev() { 48 | hook.succeeded().await; 49 | } 50 | } 51 | 52 | async fn failed(&mut self) { 53 | for hook in self.hooks.iter_mut().rev() { 54 | hook.failed().await; 55 | } 56 | } 57 | } 58 | 59 | #[derive(Clone, Default, Debug)] 60 | pub struct Null {} 61 | 62 | impl Hook for Null {} 63 | -------------------------------------------------------------------------------- /src/cargo_lock_hashing.rs: -------------------------------------------------------------------------------- 1 | use crate::dir_tree::{self, Ignores}; 2 | use crate::node::path::Path; 3 | use crate::{node, Error}; 4 | use async_trait::async_trait; 5 | 6 | #[derive(Debug)] 7 | struct FindFilesVisitor { 8 | name: String, 9 | paths: Vec, 10 | } 11 | 12 | #[async_trait(?Send)] 13 | impl dir_tree::Visitor for FindFilesVisitor { 14 | async fn enter_folder(&mut self, _: &Path) -> Result<(), Error> { 15 | Ok(()) 16 | } 17 | 18 | async fn exit_folder(&mut self, _: &Path) -> Result<(), Error> { 19 | Ok(()) 20 | } 21 | 22 | async fn visit_entry(&mut self, path: &Path, is_file: bool) -> Result<(), Error> { 23 | if is_file && path.file_name() == self.name { 24 | self.paths.push(path.clone()); 25 | } 26 | Ok(()) 27 | } 28 | } 29 | 30 | #[derive(Clone, Debug)] 31 | pub struct HashInfo { 32 | pub num_files: usize, 33 | pub bytes: [u8; 32], 34 | } 35 | 36 | pub async fn hash_cargo_lock_files(path: &Path) -> Result { 37 | let mut visitor = FindFilesVisitor { 38 | name: "Cargo.lock".into(), 39 | paths: Vec::new(), 40 | }; 41 | let ignores = Ignores::default(); 42 | dir_tree::apply_visitor(path, &ignores, &mut visitor).await?; 43 | let mut paths: Vec<_> = visitor.paths.iter().map(Path::to_string).collect(); 44 | // We want the paths in a deterministic order 45 | paths.sort(); 46 | let mut hasher = blake3::Hasher::new(); 47 | for path in &paths { 48 | let file_content = node::fs::read_file(path.as_str()).await?; 49 | hasher.update(&file_content); 50 | } 51 | let result = HashInfo { 52 | num_files: paths.len(), 53 | bytes: hasher.finalize().into(), 54 | }; 55 | Ok(result) 56 | } 57 | -------------------------------------------------------------------------------- /src/package_manifest.rs: -------------------------------------------------------------------------------- 1 | use crate::node::path::Path; 2 | use std::str::FromStr; 3 | use thiserror::Error; 4 | 5 | #[derive(Debug, Clone, Copy)] 6 | pub enum EntryType { 7 | File, 8 | Directory, 9 | } 10 | 11 | impl FromStr for EntryType { 12 | type Err = ParseError; 13 | 14 | fn from_str(string: &str) -> Result { 15 | let result = match string { 16 | "file" => EntryType::File, 17 | "dir" => EntryType::Directory, 18 | _ => return Err(ParseError::UnknownEntryType(string.to_string())), 19 | }; 20 | Ok(result) 21 | } 22 | } 23 | 24 | #[derive(Debug, Clone, Error)] 25 | pub enum ParseError { 26 | #[error("Unknown entry type: {0}")] 27 | UnknownEntryType(String), 28 | 29 | #[error("Malformed line: {0}")] 30 | MalformedLine(String), 31 | } 32 | 33 | #[derive(Debug, Clone)] 34 | pub struct PackageManifest { 35 | entries: Vec<(EntryType, Path)>, 36 | } 37 | 38 | impl PackageManifest { 39 | pub fn iter(&self) -> std::slice::Iter<'_, (EntryType, Path)> { 40 | self.entries.iter() 41 | } 42 | } 43 | 44 | impl FromStr for PackageManifest { 45 | type Err = ParseError; 46 | 47 | fn from_str(string: &str) -> Result { 48 | let mut entries = Vec::new(); 49 | for line in string.lines() { 50 | let split: Vec<_> = line.splitn(2, ':').collect(); 51 | if split.len() != 2 { 52 | return Err(ParseError::MalformedLine(line.to_string())); 53 | } 54 | let entry_type = EntryType::from_str(split[0])?; 55 | let path = Path::from(split[1]); 56 | entries.push((entry_type, path)); 57 | } 58 | Ok(PackageManifest { entries }) 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /src/agnostic_path.rs: -------------------------------------------------------------------------------- 1 | use crate::node; 2 | use crate::node::path::Path; 3 | use serde::{Deserialize, Serialize}; 4 | 5 | #[derive(Clone, Debug, Deserialize, Eq, Hash, Ord, PartialEq, PartialOrd, Serialize)] 6 | pub struct AgnosticPath { 7 | components: Vec, 8 | trailing_separator: bool, 9 | } 10 | 11 | impl AgnosticPath { 12 | fn to_string_with_separator(&self, separator: &str) -> String { 13 | use itertools::Itertools as _; 14 | let trailing = self.trailing_separator.then_some(""); 15 | self.components 16 | .iter() 17 | .map(String::as_str) 18 | .chain(trailing.into_iter()) 19 | .join(separator) 20 | } 21 | } 22 | 23 | impl From<&Path> for AgnosticPath { 24 | fn from(os_path: &Path) -> AgnosticPath { 25 | let os_path = os_path.to_string(); 26 | let separator = node::path::separator(); 27 | let (os_path, trailing_separator) = if let Some(stripped) = os_path.strip_suffix(separator.as_ref()) { 28 | (stripped, true) 29 | } else { 30 | (os_path.as_str(), false) 31 | }; 32 | let components = os_path.split(separator.as_ref()).map(str::to_string).collect(); 33 | AgnosticPath { 34 | components, 35 | trailing_separator, 36 | } 37 | } 38 | } 39 | 40 | impl From<&AgnosticPath> for Path { 41 | fn from(path: &AgnosticPath) -> Path { 42 | let os_path = path.to_string_with_separator(&node::path::separator()); 43 | Path::from(&os_path) 44 | } 45 | } 46 | 47 | impl std::fmt::Display for AgnosticPath { 48 | fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { 49 | let string = self.to_string_with_separator("/"); 50 | string.fmt(formatter) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/job.rs: -------------------------------------------------------------------------------- 1 | use crate::actions::core; 2 | use crate::Error; 3 | use serde::de::DeserializeOwned; 4 | use std::collections::BTreeMap; 5 | 6 | const JOB_INPUT: &str = "internal-use-github-job"; 7 | const MATRIX_INPUT: &str = "internal-use-matrix"; 8 | const WORKFLOW_INPUT: &str = "internal-use-github-workflow"; 9 | 10 | #[derive(Clone, Debug, Hash)] 11 | pub struct Job { 12 | workflow: String, 13 | job_id: String, 14 | matrix_properties: Option>, 15 | } 16 | 17 | impl Job { 18 | fn get_json_input(name: &str) -> Result 19 | where 20 | T: DeserializeOwned, 21 | { 22 | let input = core::Input::from(name).get_required()?; 23 | Ok(serde_json::from_str(&input)?) 24 | } 25 | 26 | pub fn from_env() -> Result { 27 | let workflow = Self::get_json_input(WORKFLOW_INPUT)?; 28 | let job_id = Self::get_json_input(JOB_INPUT)?; 29 | let matrix_properties = Self::get_json_input(MATRIX_INPUT)?; 30 | let result = Job { 31 | workflow, 32 | job_id, 33 | matrix_properties, 34 | }; 35 | Ok(result) 36 | } 37 | 38 | pub fn get_workflow(&self) -> &str { 39 | &self.workflow 40 | } 41 | 42 | pub fn get_job_id(&self) -> &str { 43 | &self.job_id 44 | } 45 | 46 | pub fn matrix_properties_as_string(&self) -> Option { 47 | // Note: This function does not attempt to guarantee that this string is 48 | // deterministic. At the time of writing it is though, regardless of whether 49 | // serde_json's "preserve_order" feature is on or off. 50 | self.matrix_properties 51 | .as_ref() 52 | .map(|p| serde_json::to_string(p).expect("Failed to serialize a map of String to String to JSON")) 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/node/os.rs: -------------------------------------------------------------------------------- 1 | use super::path; 2 | use js_sys::JsString; 3 | use lazy_static::lazy_static; 4 | use std::borrow::Cow; 5 | 6 | lazy_static! { 7 | static ref EOL: String = { 8 | use wasm_bindgen::JsCast as _; 9 | ffi::EOL 10 | .clone() 11 | .dyn_into::() 12 | .expect("eol wasn't a string") 13 | .into() 14 | }; 15 | } 16 | 17 | pub fn eol() -> Cow<'static, str> { 18 | EOL.as_str().into() 19 | } 20 | 21 | pub fn platform() -> String { 22 | ffi::platform().into() 23 | } 24 | 25 | pub fn machine() -> String { 26 | ffi::machine().into() 27 | } 28 | 29 | pub fn arch() -> String { 30 | ffi::arch().into() 31 | } 32 | 33 | pub fn homedir() -> path::Path { 34 | path::Path::from(ffi::homedir()) 35 | } 36 | 37 | pub fn temp_dir() -> path::Path { 38 | path::Path::from(ffi::tmpdir()) 39 | } 40 | 41 | pub mod ffi { 42 | use js_sys::{JsString, Object}; 43 | use wasm_bindgen::prelude::*; 44 | 45 | #[wasm_bindgen(module = "os")] 46 | extern "C" { 47 | #[wasm_bindgen(js_name = "EOL")] 48 | pub static EOL: Object; 49 | 50 | pub fn arch() -> JsString; 51 | pub fn homedir() -> JsString; 52 | pub fn machine() -> JsString; 53 | pub fn platform() -> JsString; 54 | pub fn tmpdir() -> JsString; 55 | } 56 | } 57 | 58 | #[cfg(test)] 59 | mod test { 60 | use wasm_bindgen_test::wasm_bindgen_test; 61 | 62 | #[wasm_bindgen_test] 63 | fn invoke_arch() { 64 | super::arch(); 65 | } 66 | 67 | #[wasm_bindgen_test] 68 | fn invoke_homedir() { 69 | super::homedir(); 70 | } 71 | 72 | #[wasm_bindgen_test] 73 | fn invoke_machine() { 74 | super::machine(); 75 | } 76 | 77 | #[wasm_bindgen_test] 78 | fn invoke_platform() { 79 | super::platform(); 80 | } 81 | 82 | #[wasm_bindgen_test] 83 | fn invoke_temp_dir() { 84 | super::temp_dir(); 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ferrous-actions" 3 | description = "GitHub actions for Rust written in Rust and compiled to WebAssembly" 4 | version = "0.1.0-beta.2" 5 | repository = "https://github.com/FrancisRussell/ferrous-actions-dev" 6 | license = "MIT" 7 | edition = "2021" 8 | keywords = ["github", "webassembly", "ci", "javascript"] 9 | 10 | [lib] 11 | crate-type = ["cdylib"] 12 | 13 | [features] 14 | default = ["console_error_panic_hook"] 15 | 16 | [dependencies] 17 | async-recursion = "1.0.0" 18 | async-trait = "0.1.59" 19 | base64 = "0.21.0" 20 | blake3 = { version = "1.3.3", default-features = false } 21 | cargo_metadata = "0.15.1" 22 | chrono = { version = "0.4.23", default-features = false, features = ["wasmbind", "std", "clock"] } 23 | console_error_panic_hook = { version = "0.1.7", optional = true } 24 | derivative = "2.2.0" 25 | futures = { version = "0.3.28", default-features = false, features = [ "std" ]} 26 | humantime = "2.1.0" 27 | itertools = "0.10.5" 28 | js-sys = "0.3.55" 29 | lazy_static = "1.4.0" 30 | parking_lot = "0.12.1" 31 | postcard = { version = "1.0.2", default-features = false, features = ["use-std"] } 32 | regex = { version = "1.7.0", default-features = false, features = [ "std" ] } 33 | serde_json = "1.0.87" 34 | serde = { version = "1.0", features = [ "derive" ] } 35 | shlex = "1.1.0" 36 | strum = { version = "0.24.1", features = ["derive"] } 37 | target-lexicon = "0.12.5" 38 | thiserror = "1.0.23" 39 | wasm-bindgen = "0.2.78" 40 | wasm-bindgen-futures = "0.4.33" 41 | wasm-bindgen-test = "0.3" 42 | 43 | [dependencies.rustup-toolchain-manifest] 44 | git = "https://github.com/FrancisRussell/rustup-toolchain-manifest.git" 45 | rev = "ecc0cd3c43b4dd1ccfb384830e18b86a3aafbc83" 46 | version = "0.2.0" 47 | 48 | [dependencies.simple-path-match] 49 | git = "https://github.com/FrancisRussell/simple-path-match.git" 50 | rev = "cbd26caad605a61861c4af754ada38166790b593" 51 | version = "0.2.0" 52 | 53 | [dev-dependencies] 54 | wasm-bindgen-test = "0.3" 55 | 56 | [profile.release] 57 | lto = true 58 | opt-level = "s" 59 | -------------------------------------------------------------------------------- /action.yml: -------------------------------------------------------------------------------- 1 | name: 'Ferrous Actions' 2 | author: 'Francis Russell' 3 | description: 'GitHub Rust toolchain actions written in Rust' 4 | inputs: 5 | command: 6 | description: 'The command to run' 7 | required: true 8 | toolchain: 9 | description: 'The toolchain to install e.g. nightly' 10 | required: false 11 | components: 12 | description: 'The components to install e.g. rustc' 13 | required: false 14 | profile: 15 | description: 'A profile that selects components (minimal, default, complete)' 16 | required: false 17 | target: 18 | description: 'Targets to install e.g. x86_64-unknown-linux-gnu' 19 | required: false 20 | default: 21 | description: 'Whether this toolchain should be set as the default' 22 | required: false 23 | args: 24 | description: 'Additional arguments to supply to the command' 25 | required: false 26 | annotations: 27 | description: 'Should GitHub annotations be generated for this command' 28 | required: false 29 | cache-only: 30 | description: 'Whitespace separated list of items to cache out of indices, crates, git-repos' 31 | required: false 32 | min-recache-indices: 33 | description: 'Minimum amount of time an index must be out of date before recaching it' 34 | required: false 35 | min-recache-crates: 36 | description: 'Minimum amount of time a collection of cached crates must be out of date before recaching' 37 | required: false 38 | min-recache-git-repos: 39 | description: 'Minimum amount of time cached Git repositories must be out of date before recaching' 40 | use-cross: 41 | description: 'Whether cross should be used in place of cargo' 42 | required: false 43 | internal-use-github-workflow: 44 | description: 'DO NOT SET THIS INPUT - used to detect the workflow name' 45 | default: ${{ toJSON(github.workflow) }} 46 | internal-use-github-job: 47 | description: 'DO NOT SET THIS INPUT - used to detect the job ID' 48 | default: ${{ toJSON(github.job) }} 49 | internal-use-matrix: 50 | description: 'DO NOT SET THIS INPUT - used to identify matrix properties' 51 | default: ${{ toJSON(matrix) }} 52 | runs: 53 | using: 'node16' 54 | main: 'dist/main.js' 55 | post: 'dist/post.js' 56 | post-if: 'success()' 57 | branding: 58 | icon: 'git-pull-request' 59 | color: 'green' 60 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use crate::package_manifest; 2 | use thiserror::Error; 3 | use wasm_bindgen::JsValue; 4 | 5 | #[derive(Debug, Error)] 6 | pub enum Error { 7 | #[error("{0:?}")] 8 | Js(JsValue), 9 | 10 | #[error("Unable to parse option `{0}`, which was supplied as `{1}`")] 11 | OptionParseError(String, String), 12 | 13 | #[error("Unable to parse as an argument list: `{0}`")] 14 | ArgumentsParseError(String), 15 | 16 | #[error("Unknown command: {0}")] 17 | UnknownCommand(String), 18 | 19 | #[error("Toolchain parse error: {0}")] 20 | ToolchainParse(#[from] rustup_toolchain_manifest::toolchain::ParseError), 21 | 22 | #[error("Manifest file not UTF-8")] 23 | ManifestNotUtf8, 24 | 25 | #[error("Manifest error: {0}")] 26 | ManifestError(#[from] rustup_toolchain_manifest::Error), 27 | 28 | #[error("Unsupported platform: {0}")] 29 | UnsupportedPlatform(String), 30 | 31 | #[error("Unable to parse package manifest: {0}")] 32 | PackageManifest(#[from] package_manifest::ParseError), 33 | 34 | #[error("JSON serialization/deserialization error: {0}")] 35 | SerdeJson(#[from] serde_json::Error), 36 | 37 | #[error("Unable to parse item to cache: {0}")] 38 | ParseCacheableItem(String), 39 | 40 | #[error("Unable to parse cross-plaform-sharing: {0}")] 41 | ParseCrossPlatformSharing(String), 42 | 43 | #[error("Unable to parse duration: {0}")] 44 | DurationParse(#[from] humantime::DurationError), 45 | 46 | #[error("Out of range duration: {0}")] 47 | OutOfRangeDuration(#[from] chrono::OutOfRangeError), 48 | 49 | #[error("Path does not exist: {0}")] 50 | PathDoesNotExist(String), 51 | 52 | #[error("Error during path match construction: {0}")] 53 | PathMatchPatternError(simple_path_match::Error), 54 | 55 | #[error("Required input was not supplied: {0}")] 56 | MissingInput(String), 57 | 58 | #[error("Toolchain install backend does not support {0} functionality")] 59 | ToolchainInstallFunctionality(String), 60 | 61 | #[error("Postcard serialization/deserialization error: {0}")] 62 | SerdePostcard(#[from] postcard::Error), 63 | } 64 | 65 | impl From for Error { 66 | fn from(value: JsValue) -> Error { 67 | Error::Js(value) 68 | } 69 | } 70 | 71 | impl From for Error { 72 | fn from(value: simple_path_match::Error) -> Error { 73 | Error::PathMatchPatternError(value) 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /src/node/process.rs: -------------------------------------------------------------------------------- 1 | use super::path::{self, Path}; 2 | use std::collections::HashMap; 3 | use wasm_bindgen::JsValue; 4 | 5 | pub fn cwd() -> path::Path { 6 | path::Path::from(ffi::cwd()) 7 | } 8 | 9 | pub fn get_env() -> HashMap { 10 | use js_sys::JsString; 11 | use wasm_bindgen::JsCast as _; 12 | 13 | let env = &ffi::ENV; 14 | let env = js_sys::Object::entries( 15 | env.dyn_ref::() 16 | .expect("get_env didn't return an object"), 17 | ) 18 | .iter() 19 | .map(|o| o.dyn_into::().expect("env entry was not an array")) 20 | .map(|a| (JsString::from(a.at(0)), JsString::from(a.at(1)))) 21 | .map(|(k, v)| (String::from(k), String::from(v))) 22 | .collect(); 23 | env 24 | } 25 | 26 | pub fn set_var(name: &str, value: &str) { 27 | use js_sys::{JsString, Map, Object}; 28 | 29 | let name: JsString = name.into(); 30 | let value: JsString = value.into(); 31 | let attributes = Map::new(); 32 | attributes.set(&"writable".into(), &true.into()); 33 | attributes.set(&"enumerable".into(), &true.into()); 34 | attributes.set(&"configurable".into(), &true.into()); 35 | attributes.set(&"value".into(), value.as_ref()); 36 | let attributes = Object::from_entries(&attributes).expect("Failed to convert attributes map to object"); 37 | Object::define_property(&ffi::ENV, &name, &attributes); 38 | } 39 | 40 | pub fn remove_var(name: &str) { 41 | js_sys::Reflect::delete_property(&ffi::ENV, &name.into()).expect("process.env wasn't an object"); 42 | } 43 | 44 | pub fn chdir>(path: P) -> Result<(), JsValue> { 45 | let path = path.into(); 46 | ffi::chdir(&path.to_js_string())?; 47 | Ok(()) 48 | } 49 | 50 | pub mod ffi { 51 | use js_sys::{JsString, Object}; 52 | use wasm_bindgen::prelude::*; 53 | 54 | #[wasm_bindgen(module = "process")] 55 | extern "C" { 56 | #[wasm_bindgen(js_name = "env")] 57 | pub static ENV: Object; 58 | 59 | pub fn cwd() -> JsString; 60 | 61 | #[wasm_bindgen(catch)] 62 | pub fn chdir(path: &JsString) -> Result; 63 | } 64 | } 65 | 66 | #[cfg(test)] 67 | mod test { 68 | use wasm_bindgen_test::wasm_bindgen_test; 69 | 70 | #[wasm_bindgen_test] 71 | fn invoke_get_env() { 72 | super::get_env(); 73 | } 74 | 75 | #[wasm_bindgen_test] 76 | async fn invoke_cwd() { 77 | let cwd = super::cwd(); 78 | assert!(cwd.exists().await); 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: [push] 3 | 4 | env: 5 | CARGO_TERM_COLOR: always 6 | 7 | jobs: 8 | build: 9 | name: Build and unit tests 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v3 13 | - uses: actions/cache@v3.2.3 14 | with: 15 | path: | 16 | ~/.cargo/registry/index 17 | ~/.cargo/registry/cache 18 | ~/.cargo/git/db 19 | target 20 | key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} 21 | - uses: actions/setup-node@v3 22 | with: 23 | node-version: 16.x 24 | - run: npm ci 25 | - run: npm run build 26 | - run: npm run test 27 | - uses: actions/upload-artifact@v3 28 | with: 29 | name: built-action 30 | path: | 31 | action.yml 32 | dist 33 | 34 | test: 35 | name: Functionality tests 36 | needs: build 37 | strategy: 38 | matrix: 39 | platform: [ubuntu-latest, macos-latest, windows-latest] 40 | runs-on: ${{ matrix.platform }} 41 | steps: 42 | - uses: actions/checkout@v3 43 | - uses: actions/download-artifact@v3 44 | with: 45 | name: built-action 46 | - id: restore-cargo-cache 47 | name: Cargo cache 48 | uses: ./ 49 | with: 50 | command: cache 51 | cache-only: indices crates git-repos 52 | min-recache-indices: 1h 53 | - id: install-toolchain 54 | uses: ./ 55 | name: Install toolchain 56 | with: 57 | command: install-rustup 58 | toolchain: nightly 59 | profile: minimal 60 | target: wasm32-unknown-unknown 61 | components: clippy 62 | default: true 63 | - id: install-grcov 64 | name: Install grcov 65 | uses: ./ 66 | with: 67 | command: cargo install 68 | args: grcov 69 | - id: cargo-build 70 | name: Cargo build (with cross) 71 | uses: ./ 72 | with: 73 | command: cargo build 74 | args: --target=wasm32-unknown-unknown 75 | use-cross: true 76 | annotations: false 77 | - id: cargo-clippy 78 | name: Cargo clippy 79 | uses: ./ 80 | with: 81 | command: cargo clippy 82 | - run: find ~/.cache 83 | if: ${{ matrix.platform != 'windows-latest' }} 84 | - run: find ~/.local 85 | if: ${{ matrix.platform != 'windows-latest' }} 86 | - run: rustc -Vv 87 | -------------------------------------------------------------------------------- /src/system/rng.rs: -------------------------------------------------------------------------------- 1 | use blake3::Hasher; 2 | use derivative::Derivative; 3 | 4 | const U8_BIT: usize = 8; 5 | 6 | // Even this is probably too high: https://v8.dev/blog/math-random 7 | const ENTROPY_BITS_PER_F64: usize = 32; 8 | 9 | // To use rand/getrandom we either need node.js's crypto module to be enabled, 10 | // or switch to WASI as a target 11 | 12 | #[derive(Derivative)] 13 | #[derivative(Debug)] 14 | #[derive(Clone)] 15 | pub struct MathRandom { 16 | #[derivative(Debug = "ignore")] 17 | bytes: [u8; 32], 18 | 19 | #[derivative(Debug = "ignore")] 20 | taken: usize, 21 | 22 | #[derivative(Debug = "ignore")] 23 | hasher: Hasher, 24 | } 25 | 26 | impl Default for MathRandom { 27 | fn default() -> MathRandom { 28 | MathRandom { 29 | bytes: [0u8; 32], 30 | taken: usize::MAX, 31 | hasher: Hasher::new(), 32 | } 33 | } 34 | } 35 | 36 | impl MathRandom { 37 | pub fn fill_bytes(&mut self, dest: &mut [u8]) { 38 | let mut written = 0; 39 | while written < dest.len() { 40 | if self.taken >= self.bytes.len() { 41 | self.refill(); 42 | } 43 | let to_take = std::cmp::min(self.bytes.len() - self.taken, dest.len() - written); 44 | dest[written..(written + to_take)].copy_from_slice(&self.bytes[self.taken..(self.taken + to_take)]); 45 | self.taken += to_take; 46 | written += to_take; 47 | } 48 | } 49 | 50 | fn refill(&mut self) { 51 | let num_doubles_to_input = (self.bytes.len() * U8_BIT + ENTROPY_BITS_PER_F64 - 1) / ENTROPY_BITS_PER_F64; 52 | for _ in 0..num_doubles_to_input { 53 | let random = ffi::MATH 54 | .random() 55 | .as_f64() 56 | .expect("Math.random() didn't return a float"); 57 | #[allow(clippy::cast_precision_loss)] 58 | let random = random * ((1u64 << f64::MANTISSA_DIGITS) as f64); 59 | #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] 60 | let random = random as u64; 61 | self.hasher.update(&random.to_le_bytes()); 62 | } 63 | self.bytes = self.hasher.finalize().into(); 64 | self.taken = 0; 65 | } 66 | } 67 | 68 | mod ffi { 69 | use wasm_bindgen::prelude::*; 70 | 71 | #[wasm_bindgen] 72 | extern "C" { 73 | pub type MathObject; 74 | 75 | #[wasm_bindgen(js_name = "Math")] 76 | pub static MATH: MathObject; 77 | 78 | #[wasm_bindgen(method)] 79 | pub fn random(this: &MathObject) -> JsValue; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /src/input_manager.rs: -------------------------------------------------------------------------------- 1 | use crate::actions::core; 2 | use crate::Error; 3 | use parking_lot::Mutex; 4 | use std::collections::{HashMap, HashSet}; 5 | use strum::{EnumIter, IntoEnumIterator as _, IntoStaticStr}; 6 | 7 | #[derive(IntoStaticStr, Clone, Copy, Debug, EnumIter, Eq, Hash, PartialEq, strum::Display)] 8 | pub enum Input { 9 | #[strum(serialize = "annotations")] 10 | Annotations, 11 | 12 | #[strum(serialize = "args")] 13 | Args, 14 | 15 | #[strum(serialize = "cache-only")] 16 | CacheOnly, 17 | 18 | #[strum(serialize = "command")] 19 | Command, 20 | 21 | #[strum(serialize = "components")] 22 | Components, 23 | 24 | #[strum(serialize = "cross-platform-sharing")] 25 | CrossPlatformSharing, 26 | 27 | #[strum(serialize = "default")] 28 | Default, 29 | 30 | #[strum(serialize = "min-recache-crates")] 31 | MinRecacheCrates, 32 | 33 | #[strum(serialize = "min-recache-git-repos")] 34 | MinRecacheGitRepos, 35 | 36 | #[strum(serialize = "min-recache-indices")] 37 | MinRecacheIndices, 38 | 39 | #[strum(serialize = "override")] 40 | Override, 41 | 42 | #[strum(serialize = "profile")] 43 | Profile, 44 | 45 | // We name this target instead of targets since actions-rs only has target 46 | #[strum(serialize = "target")] 47 | Targets, 48 | 49 | #[strum(serialize = "toolchain")] 50 | Toolchain, 51 | 52 | #[strum(serialize = "use-cross")] 53 | UseCross, 54 | } 55 | 56 | #[derive(Debug)] 57 | pub struct Manager { 58 | inputs: HashMap, 59 | accessed: Mutex>, 60 | } 61 | 62 | impl Manager { 63 | pub fn build() -> Result { 64 | let mut inputs = HashMap::new(); 65 | for input in Input::iter() { 66 | let input_name: &str = input.into(); 67 | if let Some(value) = core::Input::from(input_name).get()? { 68 | inputs.insert(input, value); 69 | } 70 | } 71 | Ok(Manager { 72 | inputs, 73 | accessed: Mutex::default(), 74 | }) 75 | } 76 | 77 | pub fn get(&self, input: Input) -> Option<&str> { 78 | self.accessed.lock().insert(input); 79 | self.inputs.get(&input).map(String::as_str) 80 | } 81 | 82 | pub fn get_required(&self, input: Input) -> Result<&str, Error> { 83 | self.get(input).ok_or_else(|| { 84 | let input_name: &str = input.into(); 85 | Error::MissingInput(input_name.into()) 86 | }) 87 | } 88 | 89 | pub fn unused(&self) -> HashSet { 90 | let available: HashSet<_> = self.inputs.keys().copied().collect(); 91 | &available - &self.accessed.lock() 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /src/access_times.rs: -------------------------------------------------------------------------------- 1 | use crate::action_paths::get_action_cache_dir; 2 | use crate::node::path::Path; 3 | use crate::{dir_tree, node, nonce, warning, Error}; 4 | use async_trait::async_trait; 5 | 6 | const WAIT_ATIME_UPDATED_MS: u64 = 5; 7 | 8 | fn default_access_time_offset() -> chrono::Duration { 9 | // This is somewhat arbitrary - we could set all access timestamps back to the 10 | // epoch. The offset time is guaranteed to be valid and is far enough in the 11 | // past to cover even vFAT access time granularity (days). 12 | chrono::Duration::hours(36) 13 | } 14 | 15 | pub struct RevertAccessTime { 16 | duration: chrono::Duration, 17 | } 18 | 19 | #[async_trait(?Send)] 20 | impl dir_tree::Visitor for RevertAccessTime { 21 | async fn enter_folder(&mut self, _: &Path) -> Result<(), Error> { 22 | Ok(()) 23 | } 24 | 25 | async fn exit_folder(&mut self, _: &Path) -> Result<(), Error> { 26 | Ok(()) 27 | } 28 | 29 | async fn visit_entry(&mut self, path: &Path, is_file: bool) -> Result<(), Error> { 30 | if is_file { 31 | set_atime_behind_mtime(path, &self.duration).await?; 32 | } else { 33 | panic!("Expected to descend into all directories"); 34 | } 35 | Ok(()) 36 | } 37 | } 38 | 39 | pub async fn revert_folder(path: &Path) -> Result<(), Error> { 40 | let mut visitor = RevertAccessTime { 41 | duration: default_access_time_offset(), 42 | }; 43 | let ignores = dir_tree::Ignores::default(); 44 | dir_tree::apply_visitor(path, &ignores, &mut visitor).await?; 45 | Ok(()) 46 | } 47 | 48 | async fn get_atime_check_dir() -> Result { 49 | let dir = get_action_cache_dir()?.join("check-atime-support"); 50 | node::fs::create_dir_all(&dir).await?; 51 | Ok(dir) 52 | } 53 | 54 | async fn set_atime_behind_mtime(path: &Path, duration: &chrono::Duration) -> Result<(), Error> { 55 | let metadata = node::fs::symlink_metadata(path).await?; 56 | let m_time = metadata.modified(); 57 | let a_time = m_time - *duration; 58 | node::fs::lutimes(path, &a_time, &m_time).await?; 59 | Ok(()) 60 | } 61 | 62 | pub async fn supports_atime() -> Result { 63 | use crate::system::sleep; 64 | 65 | let atime_check_dir = get_atime_check_dir().await?; 66 | let file_path = { 67 | let nonce = nonce::build(8); 68 | atime_check_dir.join(&nonce.to_string()) 69 | }; 70 | let data = [0u8; 1]; 71 | node::fs::write_file(&file_path, &data).await?; 72 | set_atime_behind_mtime(&file_path, &default_access_time_offset()).await?; 73 | { 74 | let metadata = node::fs::symlink_metadata(&file_path).await?; 75 | if metadata.accessed() >= metadata.modified() { 76 | // We expect setting access time to work 77 | // even on filesystems that never update it. 78 | warning!("Appeared to be unable to even set file time-stamps"); 79 | return Ok(false); 80 | } 81 | } 82 | node::fs::read_file(&file_path).await?; 83 | // Wait a few ms, just in case 84 | sleep::sleep(&std::time::Duration::from_millis(WAIT_ATIME_UPDATED_MS)).await; 85 | let metadata = node::fs::symlink_metadata(&file_path).await?; 86 | // This needs to be >= and not > since times are discrete 87 | Ok(metadata.accessed() >= metadata.modified()) 88 | } 89 | -------------------------------------------------------------------------------- /src/cargo_hooks/annotation.rs: -------------------------------------------------------------------------------- 1 | use super::Hook; 2 | use crate::actions::core::AnnotationLevel; 3 | use crate::actions::exec::Command; 4 | use crate::warning; 5 | use async_trait::async_trait; 6 | use cargo_metadata::diagnostic::{DiagnosticLevel, DiagnosticSpan}; 7 | use std::borrow::Cow; 8 | 9 | #[derive(Default)] 10 | pub struct Annotation { 11 | subcommand: String, 12 | } 13 | 14 | impl Annotation { 15 | pub fn new(subcommand: &str) -> Annotation { 16 | Annotation { 17 | subcommand: subcommand.to_string(), 18 | } 19 | } 20 | 21 | fn process_json_record(cargo_subcommand: &str, line: &str) { 22 | use crate::actions::core::Annotation; 23 | use crate::node::path::Path; 24 | use cargo_metadata::Message; 25 | 26 | // Ignore blank lines 27 | let line = line.trim(); 28 | if line.is_empty() { 29 | return; 30 | } 31 | 32 | let metadata: Message = match serde_json::from_str(line) { 33 | Ok(metadata) => metadata, 34 | Err(e) => { 35 | warning!("Unable to cargo output line as JSON metadata record: {}", e); 36 | return; 37 | } 38 | }; 39 | if let Message::CompilerMessage(compiler_message) = metadata { 40 | let diagnostic = &compiler_message.message; 41 | let level = Self::annotation_level(diagnostic.level); 42 | let mut annotation = if let Some(rendered) = &diagnostic.rendered { 43 | let mut annotation = Annotation::from(rendered.as_str()); 44 | annotation.title(&format!("cargo-{}: {}", cargo_subcommand, diagnostic.message)); 45 | annotation 46 | } else { 47 | let mut annotation = Annotation::from(diagnostic.message.as_str()); 48 | annotation.title(&format!("cargo-{}", cargo_subcommand)); 49 | annotation 50 | }; 51 | if let Some(span) = Self::get_primary_span(&diagnostic.spans) { 52 | let file_name = Path::from(&span.file_name); 53 | annotation 54 | .file(&file_name) 55 | .start_line(span.line_start) 56 | .end_line(span.line_end) 57 | .start_column(span.column_start) 58 | .end_column(span.column_end); 59 | } 60 | annotation.output(level); 61 | } 62 | } 63 | 64 | fn annotation_level(level: DiagnosticLevel) -> AnnotationLevel { 65 | #[allow(clippy::match_same_arms)] 66 | match level { 67 | DiagnosticLevel::Ice | DiagnosticLevel::Error => AnnotationLevel::Error, 68 | DiagnosticLevel::Warning => AnnotationLevel::Warning, 69 | DiagnosticLevel::FailureNote | DiagnosticLevel::Note | DiagnosticLevel::Help => AnnotationLevel::Notice, 70 | _ => AnnotationLevel::Warning, 71 | } 72 | } 73 | 74 | fn get_primary_span(spans: &[DiagnosticSpan]) -> Option<&DiagnosticSpan> { 75 | spans.iter().find(|s| s.is_primary) 76 | } 77 | } 78 | 79 | #[async_trait(?Send)] 80 | impl Hook for Annotation { 81 | fn additional_cargo_options(&self) -> Vec> { 82 | vec!["--message-format=json".into()] 83 | } 84 | 85 | fn modify_command(&self, command: &mut Command) { 86 | use crate::actions::exec::Stdio; 87 | 88 | let subcommand = self.subcommand.clone(); 89 | command 90 | .outline(move |line| Self::process_json_record(&subcommand, line)) 91 | .stdout(Stdio::null()); 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /src/actions/tool_cache.rs: -------------------------------------------------------------------------------- 1 | use crate::node::path::Path; 2 | use crate::node::process; 3 | use js_sys::JsString; 4 | use std::borrow::Cow; 5 | use std::convert::Into; 6 | use wasm_bindgen::prelude::*; 7 | 8 | #[derive(Debug)] 9 | pub struct DownloadTool { 10 | url: JsString, 11 | dest: Option, 12 | auth: Option, 13 | } 14 | 15 | impl> From for DownloadTool { 16 | fn from(url: U) -> DownloadTool { 17 | DownloadTool { 18 | url: url.into(), 19 | dest: None, 20 | auth: None, 21 | } 22 | } 23 | } 24 | 25 | impl DownloadTool { 26 | pub fn dest>(&mut self, dest: D) -> &mut Self { 27 | self.dest = Some(dest.into()); 28 | self 29 | } 30 | 31 | pub fn auth>(&mut self, auth: A) -> &mut Self { 32 | self.auth = Some(auth.into()); 33 | self 34 | } 35 | 36 | pub async fn download(&mut self) -> Result { 37 | let dest = self.dest.as_ref().map(|dest| { 38 | let mut resolved = process::cwd(); 39 | resolved.push(dest.clone()); 40 | JsString::from(&resolved) 41 | }); 42 | ffi::download_tool(&self.url, dest.as_ref(), self.auth.as_ref(), None) 43 | .await 44 | .map(Into::::into) 45 | .map(Into::::into) 46 | } 47 | } 48 | 49 | pub async fn download_tool>(options: O) -> Result { 50 | options.into().download().await 51 | } 52 | 53 | #[derive(Debug, Copy, Clone)] 54 | pub enum StreamCompression { 55 | None, 56 | Gzip, 57 | Bzip2, 58 | Xz, 59 | } 60 | 61 | impl StreamCompression { 62 | fn tar_flag(&self) -> Cow { 63 | match self { 64 | StreamCompression::None => "", 65 | StreamCompression::Gzip => "z", 66 | StreamCompression::Bzip2 => "j", 67 | StreamCompression::Xz => "J", 68 | } 69 | .into() 70 | } 71 | } 72 | 73 | pub async fn extract_tar(path: &Path, compression: StreamCompression, dest: Option<&Path>) -> Result { 74 | let mut tar_option = String::from("x"); 75 | tar_option += &compression.tar_flag(); 76 | let tar_option = vec![JsString::from(tar_option)]; 77 | 78 | let path: JsString = path.into(); 79 | let dest = dest.map(Into::::into); 80 | let dest = ffi::extract_tar(&path, dest.as_ref(), Some(tar_option)).await?; 81 | let dest: JsString = dest.into(); 82 | Ok(dest.into()) 83 | } 84 | 85 | pub async fn cache_dir(tool: &str, version: &str, path: &Path, arch: Option<&str>) -> Result { 86 | let path: JsString = path.into(); 87 | let tool: JsString = tool.into(); 88 | let version: JsString = version.into(); 89 | let arch: Option = arch.map(Into::into); 90 | let dest = ffi::cache_dir(&path, &tool, &version, arch.as_ref()).await?; 91 | let dest: JsString = dest.into(); 92 | Ok(dest.into()) 93 | } 94 | 95 | pub mod ffi { 96 | use js_sys::{JsString, Map}; 97 | use wasm_bindgen::prelude::*; 98 | 99 | #[wasm_bindgen(module = "@actions/tool-cache")] 100 | extern "C" { 101 | #[wasm_bindgen(js_name = "downloadTool", catch)] 102 | pub async fn download_tool( 103 | url: &JsString, 104 | dest: Option<&JsString>, 105 | auth: Option<&JsString>, 106 | headers: Option<&Map>, 107 | ) -> Result; 108 | 109 | #[wasm_bindgen(js_name = "cacheDir", catch)] 110 | pub async fn cache_dir( 111 | source_dir: &JsString, 112 | tool: &JsString, 113 | version: &JsString, 114 | arch: Option<&JsString>, 115 | ) -> Result; 116 | 117 | #[wasm_bindgen(js_name = "extractTar", catch)] 118 | pub async fn extract_tar( 119 | file: &JsString, 120 | dest: Option<&JsString>, 121 | flags: Option>, 122 | ) -> Result; 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /src/cache_key_builder.rs: -------------------------------------------------------------------------------- 1 | use crate::actions::cache::Entry as CacheEntry; 2 | use crate::hasher::Blake3 as Blake3Hasher; 3 | use crate::{node, safe_encoding}; 4 | use std::collections::BTreeMap; 5 | 6 | const CACHE_ENTRY_VERSION: &str = "17"; 7 | 8 | pub struct CacheKeyBuilder { 9 | name: String, 10 | hasher: Blake3Hasher, 11 | attributes: BTreeMap<&'static str, (String, bool)>, 12 | } 13 | 14 | #[derive(Clone, Copy, Debug, Eq, Hash, strum::Display, strum::IntoStaticStr, Ord, PartialEq, PartialOrd)] 15 | pub enum Attribute { 16 | #[strum(serialize = "job")] 17 | Job, 18 | 19 | #[strum(serialize = "matrix")] 20 | Matrix, 21 | 22 | #[strum(serialize = "origin_platform")] 23 | OriginPlatform, 24 | 25 | #[strum(serialize = "platform")] 26 | Platform, 27 | 28 | #[strum(serialize = "workflow")] 29 | Workflow, 30 | 31 | #[strum(serialize = "args_truncated")] 32 | ArgsTruncated, 33 | 34 | #[strum(serialize = "nonce")] 35 | Nonce, 36 | 37 | #[strum(serialize = "num_entries")] 38 | NumEntries, 39 | 40 | #[strum(serialize = "path")] 41 | Path, 42 | 43 | #[strum(serialize = "date")] 44 | Timestamp, 45 | 46 | #[strum(serialize = "target")] 47 | Target, 48 | 49 | #[strum(serialize = "toolchain_version")] 50 | ToolchainVersion, 51 | 52 | #[strum(serialize = "entries_hash")] 53 | EntriesHash, 54 | } 55 | 56 | impl CacheKeyBuilder { 57 | fn empty(name: &str) -> CacheKeyBuilder { 58 | let mut result = CacheKeyBuilder { 59 | name: name.into(), 60 | hasher: Blake3Hasher::default(), 61 | attributes: BTreeMap::new(), 62 | }; 63 | result.add_key_data(CACHE_ENTRY_VERSION); 64 | result 65 | } 66 | 67 | pub fn new(name: &str) -> CacheKeyBuilder { 68 | use crate::nonce; 69 | 70 | let mut result = Self::empty(name); 71 | result.set_key_attribute(Attribute::Platform, node::os::platform()); 72 | let date = chrono::Local::now(); 73 | result.set_attribute(Attribute::Timestamp, date.to_string()); 74 | let nonce = nonce::build(8); 75 | let nonce = safe_encoding::encode(nonce); 76 | result.set_attribute(Attribute::Nonce, nonce); 77 | result 78 | } 79 | 80 | pub fn add_key_data(&mut self, data: &T) { 81 | data.hash(&mut self.hasher); 82 | } 83 | 84 | pub fn set_key_attribute(&mut self, key: Attribute, value: String) { 85 | self.attributes.insert(key.into(), (value, true)); 86 | } 87 | 88 | pub fn set_attribute(&mut self, name: Attribute, value: String) { 89 | self.attributes.insert(name.into(), (value, false)); 90 | } 91 | 92 | fn restore_key_to_save_key(restore_key: &str, attributes: &BTreeMap<&str, (String, bool)>) -> String { 93 | use itertools::Itertools as _; 94 | use std::fmt::Write as _; 95 | 96 | let mut save_key = restore_key.to_string(); 97 | if !attributes.is_empty() { 98 | save_key += ", attributes={"; 99 | write!( 100 | save_key, 101 | "{}", 102 | attributes.iter().map(|(a, v)| format!("{}={}", a, v.0)).format("; ") 103 | ) 104 | .expect("Unable to format restore key"); 105 | save_key += "}"; 106 | } 107 | save_key.replace(',', ";") 108 | } 109 | 110 | fn build_restore_key(name: &str, mut hasher: Blake3Hasher, attributes: &BTreeMap<&str, (String, bool)>) -> String { 111 | use std::hash::Hash as _; 112 | 113 | let id = { 114 | attributes 115 | .iter() 116 | .filter_map(|(k, v)| v.1.then_some((k, &v.0))) 117 | .for_each(|v| v.hash(&mut hasher)); 118 | let id: [u8; 32] = hasher.inner().finalize().into(); 119 | let id = &id[..8]; 120 | safe_encoding::encode(id) 121 | }; 122 | 123 | let restore_key = format!("Ferrous Actions: {} - id={}", name, id); 124 | restore_key.replace(',', ";") 125 | } 126 | 127 | pub fn into_entry(self) -> CacheEntry { 128 | let restore_key = Self::build_restore_key(&self.name, self.hasher, &self.attributes); 129 | let save_key = Self::restore_key_to_save_key(&restore_key, &self.attributes); 130 | let mut result = CacheEntry::new(save_key.as_str()); 131 | result.restore_key(restore_key); 132 | // Since we have the "platform" attribute, turning this on makes no difference 133 | // unless the user overrides it 134 | result.permit_sharing_with_windows(true); 135 | result 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /src/dir_tree.rs: -------------------------------------------------------------------------------- 1 | use crate::node::fs; 2 | use crate::node::path::Path; 3 | use crate::Error; 4 | use async_recursion::async_recursion; 5 | use async_trait::async_trait; 6 | use simple_path_match::PathMatch; 7 | use std::borrow::Cow; 8 | use std::collections::{HashMap, HashSet, VecDeque}; 9 | 10 | pub const ROOT_NAME: &str = "."; 11 | 12 | #[derive(Debug, Default, Clone)] 13 | pub struct Ignores { 14 | map: HashMap>, 15 | } 16 | 17 | impl Ignores { 18 | pub fn add(&mut self, depth: usize, name: &str) { 19 | self.map.entry(depth).or_default().insert(name.to_string()); 20 | } 21 | 22 | pub fn should_ignore(&self, name: &str, depth: usize) -> bool { 23 | self.map.get(&depth).map_or(false, |names| names.contains(name)) 24 | } 25 | } 26 | 27 | #[async_trait(?Send)] 28 | pub trait Visitor { 29 | async fn should_enter(&self, _path: &Path) -> Result { 30 | Ok(true) 31 | } 32 | async fn enter_folder(&mut self, path: &Path) -> Result<(), Error>; 33 | async fn visit_entry(&mut self, name: &Path, is_file: bool) -> Result<(), Error>; 34 | async fn exit_folder(&mut self, path: &Path) -> Result<(), Error>; 35 | } 36 | 37 | pub async fn apply_visitor(folder_path: &Path, ignores: &Ignores, visitor: &mut V) -> Result<(), Error> 38 | where 39 | V: Visitor, 40 | { 41 | apply_visitor_impl(0, folder_path, ignores, visitor).await 42 | } 43 | 44 | #[async_recursion(?Send)] 45 | async fn apply_visitor_impl( 46 | depth: usize, 47 | path: &Path, 48 | ignores: &Ignores, 49 | visitor: &mut dyn Visitor, 50 | ) -> Result<(), Error> { 51 | let file_name: Cow = if depth == 0 { 52 | ROOT_NAME.into() 53 | } else { 54 | path.file_name().into() 55 | }; 56 | if ignores.should_ignore(&file_name, depth) { 57 | return Ok(()); 58 | } 59 | let metadata = fs::symlink_metadata(path).await?; 60 | if metadata.is_directory() { 61 | if visitor.should_enter(path).await? { 62 | visitor.enter_folder(path).await?; 63 | let depth = depth + 1; 64 | let dir = fs::read_dir(path).await?; 65 | for entry in dir { 66 | let path = entry.path(); 67 | apply_visitor_impl(depth, &path, ignores, visitor).await?; 68 | } 69 | visitor.exit_folder(path).await?; 70 | } else { 71 | visitor.visit_entry(path, false).await?; 72 | } 73 | } else { 74 | visitor.visit_entry(path, true).await?; 75 | } 76 | Ok(()) 77 | } 78 | 79 | #[derive(Debug)] 80 | struct PathMatchVisitor<'a> { 81 | matching_paths: Vec, 82 | matcher: &'a PathMatch, 83 | path_stack: VecDeque, 84 | output_relative: bool, 85 | } 86 | 87 | impl<'a> PathMatchVisitor<'a> { 88 | fn full_path_to_relative(&self, full_path: &Path) -> Path { 89 | self.path_stack 90 | .back() 91 | .map_or_else(|| Path::from("."), |p| p.join(&full_path.file_name())) 92 | } 93 | 94 | fn visit_path(&mut self, absolute: &Path, relative: &Path) { 95 | if self.matcher.matches(relative.to_string()) { 96 | let path = if self.output_relative { relative } else { absolute }.clone(); 97 | self.matching_paths.push(path); 98 | } 99 | } 100 | } 101 | 102 | #[async_trait(?Send)] 103 | impl<'a> Visitor for PathMatchVisitor<'a> { 104 | async fn should_enter(&self, full: &Path) -> Result { 105 | let result = if self.path_stack.len() >= self.matcher.max_depth() { 106 | false 107 | } else { 108 | let relative = self.full_path_to_relative(full); 109 | self.matcher.matches_prefix(relative.to_string()) 110 | }; 111 | Ok(result) 112 | } 113 | 114 | async fn enter_folder(&mut self, full: &Path) -> Result<(), Error> { 115 | let relative = self.full_path_to_relative(full); 116 | self.visit_path(full, &relative); 117 | self.path_stack.push_back(relative); 118 | Ok(()) 119 | } 120 | 121 | async fn visit_entry(&mut self, full: &Path, _is_file: bool) -> Result<(), Error> { 122 | let relative = self.full_path_to_relative(full); 123 | self.visit_path(full, &relative); 124 | Ok(()) 125 | } 126 | 127 | async fn exit_folder(&mut self, _: &Path) -> Result<(), Error> { 128 | self.path_stack.pop_back(); 129 | Ok(()) 130 | } 131 | } 132 | 133 | pub async fn match_relative_paths(path: &Path, matcher: &PathMatch, output_relative: bool) -> Result, Error> { 134 | let mut visitor = PathMatchVisitor { 135 | matching_paths: Vec::new(), 136 | matcher, 137 | path_stack: VecDeque::new(), 138 | output_relative, 139 | }; 140 | let ignores = Ignores::default(); 141 | apply_visitor(path, &ignores, &mut visitor).await?; 142 | Ok(visitor.matching_paths) 143 | } 144 | -------------------------------------------------------------------------------- /src/run.rs: -------------------------------------------------------------------------------- 1 | use crate::cache_cargo_home::{restore_cargo_cache, save_cargo_cache}; 2 | use crate::cross::Cross; 3 | use crate::input_manager::{Input, Manager as InputManager}; 4 | use crate::rustup::{self, ToolchainConfig}; 5 | use crate::{node, toolchain, warning, Cargo, Error}; 6 | 7 | fn get_toolchain_config(input_manager: &InputManager) -> Result { 8 | let mut toolchain_config = ToolchainConfig::default(); 9 | if let Some(toolchain) = input_manager.get(Input::Toolchain) { 10 | toolchain_config.name = toolchain.into(); 11 | } 12 | if let Some(profile) = input_manager.get(Input::Profile) { 13 | toolchain_config.profile = profile.into(); 14 | } 15 | if let Some(components) = input_manager.get(Input::Components) { 16 | toolchain_config.components = components.split_whitespace().map(String::from).collect(); 17 | } 18 | if let Some(targets) = input_manager.get(Input::Targets) { 19 | toolchain_config.targets = targets.split_whitespace().map(String::from).collect(); 20 | } 21 | if let Some(set_default) = input_manager.get(Input::Default) { 22 | let set_default = set_default 23 | .parse::() 24 | .map_err(|_| Error::OptionParseError(Input::Default.to_string(), set_default.to_string()))?; 25 | toolchain_config.set_default = set_default; 26 | } 27 | if let Some(set_override) = input_manager.get(Input::Override) { 28 | let set_override = set_override 29 | .parse::() 30 | .map_err(|_| Error::OptionParseError(Input::Override.to_string(), set_override.to_string()))?; 31 | toolchain_config.set_override = set_override; 32 | } 33 | Ok(toolchain_config) 34 | } 35 | 36 | pub async fn run() -> Result<(), Error> { 37 | use wasm_bindgen::JsError; 38 | 39 | let environment = node::process::get_env(); 40 | if let Some(phase) = environment.get("GITHUB_RUST_ACTION_PHASE") { 41 | match phase.as_str() { 42 | "main" => main().await, 43 | "post" => post().await, 44 | _ => { 45 | warning!("Unexpectedly invoked with phase {}. Doing nothing.", phase); 46 | Ok(()) 47 | } 48 | } 49 | } else { 50 | Err(Error::Js( 51 | JsError::new("Action was invoked in an unexpected way. Could not determine phase.").into(), 52 | )) 53 | } 54 | } 55 | 56 | pub async fn main() -> Result<(), Error> { 57 | let input_manager = InputManager::build()?; 58 | let command = input_manager.get_required(Input::Command)?; 59 | let split: Vec<&str> = command.split_whitespace().collect(); 60 | match split[..] { 61 | ["install-rustup"] => { 62 | let toolchain_config = get_toolchain_config(&input_manager)?; 63 | rustup::install(&toolchain_config).await?; 64 | } 65 | ["install-toolchain"] => { 66 | let toolchain_config = get_toolchain_config(&input_manager)?; 67 | toolchain::install(&toolchain_config).await?; 68 | } 69 | ["cargo", cargo_subcommand] => { 70 | let use_cross = if let Some(use_cross) = input_manager.get(Input::UseCross) { 71 | use_cross 72 | .parse::() 73 | .map_err(|_| Error::OptionParseError("use-cross".into(), use_cross.to_string()))? 74 | } else { 75 | false 76 | }; 77 | let mut cargo = if use_cross { 78 | let cross = Cross::get_or_install(&input_manager).await?; 79 | Cargo::from_path(&cross.get_path()).await? 80 | } else { 81 | Cargo::from_environment().await? 82 | }; 83 | let cargo_args = input_manager.get(Input::Args).unwrap_or_default(); 84 | let cargo_args = 85 | shlex::split(cargo_args).ok_or_else(|| Error::ArgumentsParseError(cargo_args.to_string()))?; 86 | let toolchain = input_manager.get(Input::Toolchain); 87 | cargo 88 | .run( 89 | toolchain, 90 | cargo_subcommand, 91 | cargo_args.iter().map(String::as_str), 92 | &input_manager, 93 | ) 94 | .await?; 95 | } 96 | ["cache"] => restore_cargo_cache(&input_manager).await?, 97 | _ => return Err(Error::UnknownCommand(command.to_string())), 98 | } 99 | 100 | for input in input_manager.unused() { 101 | warning!("Recognised but unused input {} was passed to action", input); 102 | } 103 | Ok(()) 104 | } 105 | 106 | pub async fn post() -> Result<(), Error> { 107 | let input_manager = InputManager::build()?; 108 | let command = input_manager.get_required(Input::Command)?; 109 | let split: Vec<&str> = command.split_whitespace().collect(); 110 | #[allow(clippy::single_match)] 111 | match split[..] { 112 | ["cache"] => save_cargo_cache(&input_manager).await?, 113 | _ => {} 114 | } 115 | Ok(()) 116 | } 117 | -------------------------------------------------------------------------------- /src/rustup.rs: -------------------------------------------------------------------------------- 1 | use crate::actions::exec::Command; 2 | use crate::actions::{core, io, tool_cache}; 3 | use crate::node::path::Path; 4 | use crate::{debug, info, node, Error}; 5 | use parking_lot::Mutex; 6 | use std::sync::Arc; 7 | 8 | const NO_DEFAULT_TOOLCHAIN_NAME: &str = "none"; 9 | 10 | pub async fn install(toolchain_config: &ToolchainConfig) -> Result<(), Error> { 11 | let rustup = Rustup::get_or_install().await?; 12 | debug!("Rustup installed at: {}", rustup.get_path()); 13 | rustup.update().await?; 14 | rustup.install_toolchain(toolchain_config).await?; 15 | Ok(()) 16 | } 17 | 18 | #[derive(Clone, Debug)] 19 | pub struct ToolchainConfig { 20 | pub name: String, 21 | pub profile: String, 22 | pub components: Vec, 23 | pub targets: Vec, 24 | pub set_default: bool, 25 | pub set_override: bool, 26 | } 27 | 28 | impl Default for ToolchainConfig { 29 | fn default() -> ToolchainConfig { 30 | ToolchainConfig { 31 | name: "stable".into(), 32 | profile: "default".into(), 33 | components: Vec::new(), 34 | targets: Vec::new(), 35 | set_default: true, 36 | set_override: false, 37 | } 38 | } 39 | } 40 | 41 | #[derive(Clone, Debug)] 42 | pub struct Rustup { 43 | path: Path, 44 | } 45 | 46 | impl Rustup { 47 | pub async fn get_or_install() -> Result { 48 | match Self::get().await { 49 | Ok(rustup) => Ok(rustup), 50 | Err(e) => { 51 | info!("Unable to find rustup, Installing it now..."); 52 | debug!("Attempting to locate rustup returned this error: {}", e); 53 | Self::install().await 54 | } 55 | } 56 | } 57 | 58 | pub async fn get() -> Result { 59 | io::which("rustup", true) 60 | .await 61 | .map(|path| Rustup { path }) 62 | .map_err(Error::Js) 63 | } 64 | 65 | pub async fn install() -> Result { 66 | let args = ["--default-toolchain", NO_DEFAULT_TOOLCHAIN_NAME, "-y"]; 67 | let platform = node::os::platform(); 68 | info!("Getting rustup for platform: {:?}", platform); 69 | match platform.as_str() { 70 | "darwin" | "linux" => { 71 | let rustup_script = tool_cache::download_tool("https://sh.rustup.rs") 72 | .await 73 | .map_err(Error::Js)?; 74 | info!("Downloaded to: {:?}", rustup_script); 75 | node::fs::chmod(&rustup_script, 0x755).await.map_err(Error::Js)?; 76 | Command::from(&rustup_script) 77 | .args(args) 78 | .exec() 79 | .await 80 | .map_err(Error::Js)?; 81 | } 82 | "windows" => { 83 | let rustup_exe = tool_cache::download_tool("https://win.rustup.rs") 84 | .await 85 | .map_err(Error::Js)?; 86 | info!("Downloaded to: {:?}", rustup_exe); 87 | Command::from(&rustup_exe).args(args).exec().await.map_err(Error::Js)?; 88 | } 89 | _ => return Err(Error::UnsupportedPlatform(platform)), 90 | } 91 | let cargo_bin_path = node::os::homedir().join(".cargo").join("bin"); 92 | info!("Adding {:?} to path", cargo_bin_path); 93 | core::add_path(&cargo_bin_path); 94 | Self::get().await 95 | } 96 | 97 | pub async fn update(&self) -> Result<(), Error> { 98 | Command::from(&self.path) 99 | .arg("update") 100 | .exec() 101 | .await 102 | .map_err(Error::Js)?; 103 | Ok(()) 104 | } 105 | 106 | pub async fn install_toolchain(&self, config: &ToolchainConfig) -> Result<(), Error> { 107 | if config.name == NO_DEFAULT_TOOLCHAIN_NAME { 108 | return Ok(()); 109 | } 110 | let mut args: Vec<_> = ["toolchain", "install"].into_iter().map(String::from).collect(); 111 | args.push(config.name.clone()); 112 | args.extend(["--profile".into(), config.profile.clone()]); 113 | for target in &config.targets { 114 | args.extend(["-t".into(), target.clone()]); 115 | } 116 | // It seems that components can take multiple arguments so the toolchain name 117 | // must be present before this 118 | for component in &config.components { 119 | args.extend(["-c".into(), component.clone()]); 120 | } 121 | Command::from(&self.path).args(args).exec().await.map_err(Error::Js)?; 122 | for (flag, option_name) in [(config.set_default, "default"), (config.set_override, "override")] { 123 | if flag { 124 | Command::from(&self.path) 125 | .arg(option_name) 126 | .arg(config.name.clone()) 127 | .exec() 128 | .await 129 | .map_err(Error::Js)?; 130 | } 131 | } 132 | Ok(()) 133 | } 134 | 135 | #[allow(dead_code)] 136 | pub async fn installed_toolchains(&self) -> Result, Error> { 137 | let args: Vec<_> = ["toolchain", "list"].into_iter().map(String::from).collect(); 138 | 139 | let toolchains: Arc>> = Arc::default(); 140 | { 141 | let match_default = regex::Regex::new(r" *\(default\) *$").expect("Regex compilation failed"); 142 | let toolchains = Arc::clone(&toolchains); 143 | Command::from(&self.path) 144 | .args(args) 145 | .outline(move |line| { 146 | let toolchain = match_default.replace(line, ""); 147 | toolchains.lock().push(toolchain.to_string()); 148 | }) 149 | .exec() 150 | .await 151 | .map_err(Error::Js)?; 152 | } 153 | let toolchains = toolchains.lock().drain(..).collect(); 154 | Ok(toolchains) 155 | } 156 | 157 | #[allow(dead_code)] 158 | pub async fn install_component(&self, name: &str) -> Result<(), Error> { 159 | Command::from(&self.path) 160 | .arg("component") 161 | .arg("add") 162 | .arg(name) 163 | .exec() 164 | .await 165 | .map_err(Error::Js)?; 166 | Ok(()) 167 | } 168 | 169 | pub fn get_path(&self) -> &Path { 170 | &self.path 171 | } 172 | } 173 | -------------------------------------------------------------------------------- /src/actions/push_line_splitter.rs: -------------------------------------------------------------------------------- 1 | use std::borrow::Cow; 2 | use std::collections::VecDeque; 3 | 4 | /// A platform-agnostic line splitter. 5 | /// 6 | /// This is part of a work-around for 7 | /// . It treats any group of CR and 8 | /// LF characters with no repeated characters as a line break. 9 | /// 10 | /// `close()` must be called when the input source has hit EOF so final lines 11 | /// may be returned. 12 | 13 | #[derive(Debug, Clone, Default)] 14 | pub struct PushLineSplitter { 15 | lines: VecDeque<(usize, usize)>, 16 | taken: usize, 17 | buffer: Vec, 18 | scan_offset: usize, 19 | scan_flags: u8, 20 | line_len: usize, 21 | delim_len: usize, 22 | closed: bool, 23 | } 24 | 25 | pub struct WriteBuffer<'a> { 26 | length: usize, 27 | parent: &'a mut PushLineSplitter, 28 | } 29 | 30 | impl<'a> AsMut<[u8]> for WriteBuffer<'a> { 31 | fn as_mut(&mut self) -> &mut [u8] { 32 | let buffer_len = self.parent.buffer.len(); 33 | &mut self.parent.buffer[(buffer_len - self.length)..] 34 | } 35 | } 36 | 37 | impl Drop for WriteBuffer<'_> { 38 | fn drop(&mut self) { 39 | self.parent.post_write(); 40 | } 41 | } 42 | 43 | impl PushLineSplitter { 44 | fn pre_write(&mut self) { 45 | assert!(!self.closed, "Data written after close"); 46 | self.drain_taken(); 47 | } 48 | 49 | fn post_write(&mut self) { 50 | self.update_scan(); 51 | } 52 | 53 | pub fn write(&mut self, data: &[u8]) { 54 | self.pre_write(); 55 | self.buffer.extend(data); 56 | self.post_write(); 57 | } 58 | 59 | pub fn write_via_buffer(&mut self, len: usize) -> WriteBuffer { 60 | self.pre_write(); 61 | let buffer_len = self.buffer.len(); 62 | self.buffer.resize(buffer_len + len, 0u8); 63 | WriteBuffer { 64 | length: len, 65 | parent: self, 66 | } 67 | } 68 | 69 | pub fn close(&mut self) { 70 | if !self.closed { 71 | self.closed = true; 72 | // The current line is finished, whether or not it is delimiter terminated 73 | self.lines.push_back((self.line_len, self.delim_len)); 74 | // If we are mid-way through a delimiter, we also need an empty line 75 | if self.scan_flags != 0 { 76 | self.lines.push_back((0, 0)); 77 | } 78 | } 79 | } 80 | 81 | pub fn next_line(&mut self) -> Option> { 82 | if let Some((line_len, delim_len)) = self.lines.pop_front() { 83 | let slice = &self.buffer[self.taken..(self.taken + line_len)]; 84 | let line = String::from_utf8_lossy(slice); 85 | self.taken += line_len + delim_len; 86 | Some(line) 87 | } else { 88 | None 89 | } 90 | } 91 | 92 | fn drain_taken(&mut self) { 93 | if self.taken > 0 { 94 | drop(self.buffer.drain(..self.taken)); 95 | self.scan_offset -= self.taken; 96 | self.taken = 0; 97 | } 98 | } 99 | 100 | fn update_scan(&mut self) { 101 | while self.scan_offset < self.buffer.len() { 102 | let c = self.buffer[self.scan_offset]; 103 | let c_flags = Self::delimiter_flags(c); 104 | // We terminate the delimiter because of a new non-NL character or repeated 105 | // newline 106 | if (self.scan_flags != 0 && c_flags == 0) || (c_flags & self.scan_flags) != 0 { 107 | self.scan_flags = 0; 108 | self.lines.push_back((self.line_len, self.delim_len)); 109 | (self.line_len, self.delim_len) = (0, 0); 110 | } 111 | if c_flags == 0 { 112 | self.line_len += 1; 113 | } else { 114 | self.delim_len += 1; 115 | self.scan_flags |= c_flags; 116 | } 117 | self.scan_offset += 1; 118 | } 119 | } 120 | 121 | fn delimiter_flags(character: u8) -> u8 { 122 | const LF: u8 = 10; 123 | const CR: u8 = 13; 124 | match character { 125 | LF => 1, 126 | CR => 2, 127 | _ => 0, 128 | } 129 | } 130 | } 131 | 132 | #[cfg(test)] 133 | mod tests { 134 | use super::*; 135 | use wasm_bindgen_test::wasm_bindgen_test; 136 | 137 | fn test_inputs() -> Vec> { 138 | let test_vectors: [&[&str]; 6] = [ 139 | &[""], 140 | &["the quick", "brown fox"], 141 | &["", "the quick", "brown fox", "jumped over", ""], 142 | &["", "", "", "", ""], 143 | &["a", "", "", "b", "", "", "c"], 144 | &["", "a", "", "b", "", "c", ""], 145 | ]; 146 | let mut result = Vec::with_capacity(test_vectors.len()); 147 | for lines in test_vectors { 148 | result.push(lines.into_iter().copied().map(String::from).collect()); 149 | } 150 | result 151 | } 152 | 153 | #[derive(Copy, Clone, Debug)] 154 | enum Mode { 155 | AllAtOnce, 156 | Bytes, 157 | } 158 | 159 | fn test_reconstruction(mode: Mode) { 160 | for delimiter in ["\n", "\r", "\r\n"] { 161 | for input in test_inputs().into_iter() { 162 | let string = input.join(delimiter); 163 | let bytes = string.as_bytes(); 164 | let mut splitter = PushLineSplitter::default(); 165 | let mut lines = Vec::with_capacity(input.len()); 166 | match mode { 167 | Mode::AllAtOnce => { 168 | splitter.write(bytes); 169 | splitter.close(); 170 | while let Some(line) = splitter.next_line() { 171 | lines.push(line.into_owned()); 172 | } 173 | } 174 | Mode::Bytes => { 175 | for byte in bytes.iter().copied() { 176 | let byte = [byte]; 177 | splitter.write(&byte[..]); 178 | while let Some(line) = splitter.next_line() { 179 | lines.push(line.into_owned()); 180 | } 181 | } 182 | splitter.close(); 183 | while let Some(line) = splitter.next_line() { 184 | lines.push(line.into_owned()); 185 | } 186 | } 187 | } 188 | assert_eq!(lines.len(), input.len(), "Wrong number of lines from splitter"); 189 | let reconstructed = lines.join(delimiter); 190 | assert_eq!(string, reconstructed); 191 | } 192 | } 193 | } 194 | 195 | #[wasm_bindgen_test] 196 | fn bulk_write() { 197 | test_reconstruction(Mode::AllAtOnce); 198 | } 199 | 200 | #[wasm_bindgen_test] 201 | fn byte_at_a_time_write() { 202 | test_reconstruction(Mode::Bytes); 203 | } 204 | } 205 | -------------------------------------------------------------------------------- /src/cargo_hooks/install.rs: -------------------------------------------------------------------------------- 1 | use super::Hook; 2 | use crate::action_paths::get_action_cache_dir; 3 | use crate::actions::cache::Entry as CacheEntry; 4 | use crate::cargo::ToolchainVersion; 5 | use crate::delta::render_list as render_delta_list; 6 | use crate::fingerprinting::Fingerprint; 7 | use crate::hasher::Blake3 as Blake3Hasher; 8 | use crate::node::path::Path; 9 | use crate::{actions, error, info, node, warning, Error}; 10 | use async_trait::async_trait; 11 | use rustup_toolchain_manifest::HashValue; 12 | use std::borrow::Cow; 13 | 14 | const MAX_ARG_STRING_LENGTH: usize = 80; 15 | 16 | fn get_package_build_dir(hash: &HashValue) -> Result { 17 | // Don't use safe_encoding here because the platform filesystem 18 | // might not be case sensitive 19 | let dir = get_action_cache_dir()? 20 | .join("package-build-artifacts") 21 | .join(&hash.to_string()); 22 | Ok(dir) 23 | } 24 | 25 | pub struct Install { 26 | hash: HashValue, 27 | build_dir: String, 28 | fingerprint: Option, 29 | arg_string: String, 30 | restore_key: Option, 31 | toolchain_version_short: String, 32 | } 33 | 34 | impl Install { 35 | pub async fn new(toolchain_version: &ToolchainVersion, args: I) -> Result 36 | where 37 | I: IntoIterator, 38 | A: AsRef, 39 | { 40 | use std::hash::Hash as _; 41 | 42 | let mut hasher = Blake3Hasher::default(); 43 | toolchain_version.long().hash(&mut hasher); 44 | let arg_string = { 45 | let mut arg_string = String::new(); 46 | let mut first = true; 47 | for arg in args { 48 | let arg = arg.as_ref(); 49 | if first { 50 | first = false; 51 | } else { 52 | arg_string += " "; 53 | } 54 | arg_string += &shlex::quote(arg); 55 | } 56 | arg_string 57 | }; 58 | arg_string.hash(&mut hasher); 59 | let hash = hasher.hash_value(); 60 | let build_dir = get_package_build_dir(&hash)?; 61 | node::fs::create_dir_all(&build_dir).await?; 62 | let mut result = Install { 63 | hash, 64 | build_dir: build_dir.to_string(), 65 | fingerprint: None, 66 | arg_string, 67 | restore_key: None, 68 | toolchain_version_short: toolchain_version.short().to_string(), 69 | }; 70 | let cache_entry = result.build_cache_entry(); 71 | if let Some(key) = cache_entry.restore().await? { 72 | info!("Restored files from cache with key {}", key); 73 | result.fingerprint = Some(Self::fingerprint_build_dir(&build_dir).await?); 74 | result.restore_key = Some(key); 75 | } 76 | Ok(result) 77 | } 78 | 79 | async fn fingerprint_build_dir(path: &Path) -> Result { 80 | use crate::fingerprinting::{fingerprint_path_with_ignores, Ignores}; 81 | 82 | // It seems that between runs something causes the rustc fingerprint to change. 83 | // It looks like this could simply be the file modification timestamp. This 84 | // would also explain why it seemed to occur with Rustup but not the 85 | // internal toolchain downloader. 86 | // 87 | // https://github.com/rust-lang/cargo/blob/70898e522116f6c23971e2a554b2dc85fd4c84cd/src/cargo/util/rustc.rs#L306 88 | 89 | let mut ignores = Ignores::default(); 90 | ignores.add(1, ".rustc_info.json"); 91 | 92 | let fingerprint = fingerprint_path_with_ignores(path, &ignores).await?; 93 | Ok(fingerprint) 94 | } 95 | 96 | fn build_cache_entry(&self) -> CacheEntry { 97 | use crate::cache_key_builder::{Attribute, CacheKeyBuilder}; 98 | 99 | let mut key_builder = CacheKeyBuilder::new("cargo install build artifacts"); 100 | key_builder.add_key_data(&self.hash); 101 | key_builder.set_attribute(Attribute::ToolchainVersion, self.toolchain_version_short.clone()); 102 | let arg_string = { 103 | let mut arg_string = self.arg_string.clone(); 104 | if arg_string.len() > MAX_ARG_STRING_LENGTH { 105 | let ellipsis = "..."; 106 | arg_string.truncate(MAX_ARG_STRING_LENGTH - ellipsis.len()); 107 | arg_string += ellipsis; 108 | } 109 | arg_string 110 | }; 111 | key_builder.set_attribute(Attribute::ArgsTruncated, arg_string); 112 | let mut cache_entry = key_builder.into_entry(); 113 | cache_entry.path(&Path::from(&self.build_dir)); 114 | cache_entry 115 | } 116 | 117 | async fn cleanup(&self) { 118 | if let Err(e) = actions::io::rm_rf(self.build_dir.as_str()).await.map_err(Error::Js) { 119 | warning!("Failed to clean up build folder at {}: {}", self.build_dir, e); 120 | } 121 | } 122 | } 123 | 124 | #[async_trait(?Send)] 125 | impl Hook for Install { 126 | fn additional_cargo_options(&self) -> Vec> { 127 | vec!["--target-dir".into(), self.build_dir.as_str().into()] 128 | } 129 | 130 | async fn succeeded(&mut self) { 131 | let save = if let Some(old_fingerprint) = &self.fingerprint { 132 | let path = Path::from(&self.build_dir); 133 | match Self::fingerprint_build_dir(&path).await { 134 | Ok(new_fingerprint) => { 135 | let changed = new_fingerprint.content_hash() != old_fingerprint.content_hash(); 136 | if changed { 137 | info!( 138 | "Package artifact cache changed fingerprint from {} to {}", 139 | old_fingerprint.content_hash(), 140 | new_fingerprint.content_hash() 141 | ); 142 | let delta = new_fingerprint.changes_from(old_fingerprint); 143 | info!("{}", render_delta_list(&delta)); 144 | } 145 | changed 146 | } 147 | Err(e) => { 148 | error!("Could not fingerprint build artifact directory: {}", e); 149 | false 150 | } 151 | } 152 | } else { 153 | true 154 | }; 155 | if save { 156 | let cache_entry = self.build_cache_entry(); 157 | match cache_entry 158 | .save_if_update(self.restore_key.as_deref()) 159 | .await 160 | .map_err(Error::Js) 161 | { 162 | Err(e) => { 163 | error!("Failed to save package build artifacts to cache: {}", e); 164 | } 165 | Ok(r) => { 166 | if r.is_some() { 167 | info!("Saved package build artifacts to cache."); 168 | } else { 169 | info!("Looks like a concurrent CI job updated the artifacts, not saving back to cache"); 170 | } 171 | } 172 | } 173 | } else { 174 | info!("Build artifacts unchanged, no need to save back to cache."); 175 | } 176 | self.cleanup().await; 177 | } 178 | 179 | async fn failed(&mut self) { 180 | self.cleanup().await; 181 | } 182 | } 183 | -------------------------------------------------------------------------------- /src/cargo.rs: -------------------------------------------------------------------------------- 1 | use crate::action_paths::get_action_cache_dir; 2 | use crate::actions::exec::Command; 3 | use crate::actions::io; 4 | use crate::cargo_hooks::{ 5 | Annotation as AnnotationHook, Composite as CompositeHook, Hook as CargoHook, Install as CargoInstallHook, 6 | }; 7 | use crate::input_manager::{self, Input}; 8 | use crate::node::path::Path; 9 | use crate::node::process; 10 | use crate::{node, nonce, Error}; 11 | use std::borrow::Cow; 12 | 13 | async fn create_empty_dir() -> Result { 14 | let nonce = nonce::build(8); 15 | let path = get_action_cache_dir()? 16 | .join("empty-directories") 17 | .join(&nonce.to_string()); 18 | node::fs::create_dir_all(&path).await?; 19 | Ok(path) 20 | } 21 | 22 | struct ChangeCwdHook { 23 | new_cwd: String, 24 | } 25 | 26 | impl CargoHook for ChangeCwdHook { 27 | fn modify_command(&self, command: &mut Command) { 28 | let path = Path::from(&self.new_cwd); 29 | command.current_dir(&path); 30 | } 31 | } 32 | 33 | #[derive(Clone, Debug)] 34 | pub struct Cargo { 35 | path: Path, 36 | } 37 | 38 | #[derive(Clone, Debug)] 39 | pub struct ToolchainVersion { 40 | long: String, 41 | } 42 | 43 | impl ToolchainVersion { 44 | pub fn short(&self) -> Cow { 45 | self.long.lines().next().unwrap_or_default().trim().into() 46 | } 47 | 48 | pub fn long(&self) -> Cow { 49 | self.long.as_str().into() 50 | } 51 | } 52 | 53 | impl Cargo { 54 | pub async fn from_environment() -> Result { 55 | io::which("cargo", true) 56 | .await 57 | .map(|path| Cargo { path }) 58 | .map_err(Error::Js) 59 | } 60 | 61 | pub async fn from_path(path: &Path) -> Result { 62 | let full_path = process::cwd().join(path); 63 | if !full_path.exists().await { 64 | return Err(Error::PathDoesNotExist(full_path.to_string())); 65 | } 66 | let result = Cargo { path: full_path }; 67 | Ok(result) 68 | } 69 | 70 | pub async fn get_installed(&self) -> Result, Error> { 71 | use parking_lot::Mutex; 72 | use std::sync::Arc; 73 | 74 | // This was added to help remove non-Rustup installed cargo-fmt and rustfmt on 75 | // the GitHub runners. However the binaries do not appear to be 76 | // cargo-managed either. 77 | 78 | let match_install = 79 | regex::Regex::new(r"^(([[:word:]]|-)+) v([[:digit:]]|\.)+:").expect("Regex compilation failed"); 80 | let installs: Arc>> = Arc::default(); 81 | let installs_captured = installs.clone(); 82 | Command::from(&self.path) 83 | .args(["install", "--list"]) 84 | .outline(move |line| { 85 | if let Some(captures) = match_install.captures(line) { 86 | let name = captures.get(1).expect("Capture missing").as_str(); 87 | installs_captured.lock().push(name.to_string()); 88 | } 89 | }) 90 | .exec() 91 | .await 92 | .map_err(Error::Js)?; 93 | let installs = installs.lock().drain(..).collect(); 94 | Ok(installs) 95 | } 96 | 97 | async fn get_hooks_for_subcommand( 98 | &self, 99 | toolchain: Option<&str>, 100 | subcommand: &str, 101 | args: &[String], 102 | input_manager: &input_manager::Manager, 103 | ) -> Result { 104 | let mut hooks = CompositeHook::default(); 105 | match subcommand { 106 | "build" | "check" | "clippy" => { 107 | let enabled = if let Some(enabled) = input_manager.get(Input::Annotations) { 108 | enabled 109 | .parse::() 110 | .map_err(|_| Error::OptionParseError("annotations".into(), enabled.to_string()))? 111 | } else { 112 | true 113 | }; 114 | if enabled { 115 | hooks.push(AnnotationHook::new(subcommand)); 116 | } 117 | } 118 | "install" => { 119 | // Due to the presence of rust toolchain files, actions-rs decides to change 120 | // directory before invoking cargo install cross. We do the same for all 121 | // installs, not just cross. 122 | let empty_dir = create_empty_dir().await?; 123 | let compiler_version = self.get_toolchain_version(toolchain, Some(&empty_dir)).await?; 124 | let empty_cwd_hook = ChangeCwdHook { 125 | new_cwd: empty_dir.to_string(), 126 | }; 127 | hooks.push(CargoInstallHook::new(&compiler_version, args).await?); 128 | hooks.push(empty_cwd_hook); 129 | } 130 | _ => {} 131 | } 132 | Ok(hooks) 133 | } 134 | 135 | async fn get_toolchain_version( 136 | &self, 137 | toolchain: Option<&str>, 138 | cwd: Option<&Path>, 139 | ) -> Result { 140 | use crate::actions::exec::Stdio; 141 | use parking_lot::Mutex; 142 | use std::sync::Arc; 143 | 144 | let rustc_path = io::which("rustc", true).await.map_err(Error::Js)?; 145 | let mut command = Command::from(&rustc_path); 146 | let output: Arc> = Arc::default(); 147 | let output_captured = output.clone(); 148 | if let Some(toolchain) = toolchain { 149 | command.arg(format!("+{}", toolchain).as_str()); 150 | } 151 | if let Some(cwd) = cwd { 152 | command.current_dir(cwd); 153 | } 154 | command.arg("-Vv"); 155 | command 156 | .outline(move |line| { 157 | let mut out = output_captured.lock(); 158 | *out += line; 159 | *out += "\n"; 160 | }) 161 | .stdout(Stdio::null()); 162 | command.exec().await?; 163 | let long = output.lock().trim().to_string(); 164 | Ok(ToolchainVersion { long }) 165 | } 166 | 167 | pub async fn run<'a, I>( 168 | &'a mut self, 169 | toolchain: Option<&str>, 170 | subcommand: &'a str, 171 | args: I, 172 | input_manager: &input_manager::Manager, 173 | ) -> Result<(), Error> 174 | where 175 | I: IntoIterator, 176 | { 177 | let args: Vec = args.into_iter().map(Into::into).collect(); 178 | let mut final_args = Vec::with_capacity(args.len()); 179 | if let Some(toolchain) = toolchain { 180 | final_args.push(format!("+{}", toolchain)); 181 | } 182 | let mut hooks = self 183 | .get_hooks_for_subcommand(toolchain, subcommand, &args[..], input_manager) 184 | .await?; 185 | final_args.push(subcommand.into()); 186 | final_args.extend(hooks.additional_cargo_options().into_iter().map(Cow::into_owned)); 187 | final_args.extend(args); 188 | let mut command = Command::from(&self.path); 189 | command.args(final_args); 190 | hooks.modify_command(&mut command); 191 | if let Err(e) = command.exec().await.map_err(Error::Js) { 192 | hooks.failed().await; 193 | Err(e) 194 | } else { 195 | hooks.succeeded().await; 196 | Ok(()) 197 | } 198 | } 199 | } 200 | -------------------------------------------------------------------------------- /src/actions/exec.rs: -------------------------------------------------------------------------------- 1 | use super::push_line_splitter::PushLineSplitter; 2 | use crate::node::path::Path; 3 | use crate::{node, noop_stream}; 4 | use js_sys::{JsString, Object}; 5 | use parking_lot::Mutex; 6 | use std::sync::Arc; 7 | use wasm_bindgen::closure::Closure; 8 | use wasm_bindgen::JsValue; 9 | 10 | #[derive(Debug, Clone, Copy)] 11 | enum StdioEnum { 12 | Inherit, 13 | Null, 14 | } 15 | 16 | #[derive(Debug, Clone, Copy)] 17 | pub struct Stdio { 18 | inner: StdioEnum, 19 | } 20 | 21 | impl Stdio { 22 | pub fn null() -> Stdio { 23 | Stdio { inner: StdioEnum::Null } 24 | } 25 | 26 | pub fn inherit() -> Stdio { 27 | Stdio { 28 | inner: StdioEnum::Inherit, 29 | } 30 | } 31 | } 32 | 33 | /// Work around for 34 | struct StreamToLines { 35 | splitter: Arc>, 36 | #[allow(clippy::type_complexity)] 37 | callback: Arc>, 38 | closure: Closure, 39 | } 40 | 41 | impl StreamToLines { 42 | #[allow(clippy::type_complexity)] 43 | pub fn new(callback: Arc>) -> StreamToLines { 44 | let splitter: Arc> = Arc::default(); 45 | let closure = { 46 | let splitter = splitter.clone(); 47 | let callback = callback.clone(); 48 | Closure::new(move |data: JsValue| { 49 | let data: js_sys::Uint8Array = data.into(); 50 | let mut splitter = splitter.lock(); 51 | let mut write_buffer = splitter.write_via_buffer(data.length() as usize); 52 | data.copy_to(write_buffer.as_mut()); 53 | drop(write_buffer); 54 | while let Some(line) = splitter.next_line() { 55 | callback(&line); 56 | } 57 | }) 58 | }; 59 | StreamToLines { 60 | splitter, 61 | callback, 62 | closure, 63 | } 64 | } 65 | } 66 | 67 | impl Drop for StreamToLines { 68 | fn drop(&mut self) { 69 | let mut splitter = self.splitter.lock(); 70 | splitter.close(); 71 | while let Some(line) = splitter.next_line() { 72 | (self.callback)(&line); 73 | } 74 | } 75 | } 76 | 77 | impl AsRef for StreamToLines { 78 | fn as_ref(&self) -> &JsValue { 79 | self.closure.as_ref() 80 | } 81 | } 82 | 83 | pub struct Command { 84 | command: Path, 85 | args: Vec, 86 | #[allow(clippy::type_complexity)] 87 | outline: Option>>, 88 | #[allow(clippy::type_complexity)] 89 | errline: Option>>, 90 | stdout: Stdio, 91 | stderr: Stdio, 92 | cwd: Path, 93 | } 94 | 95 | impl Command { 96 | pub fn args(&mut self, args: I) -> &mut Command 97 | where 98 | I: IntoIterator, 99 | S: Into, 100 | { 101 | self.args.extend(args.into_iter().map(Into::into)); 102 | self 103 | } 104 | 105 | pub fn arg>(&mut self, arg: S) -> &mut Command { 106 | self.args(std::iter::once(arg.into())); 107 | self 108 | } 109 | 110 | pub async fn exec(&mut self) -> Result { 111 | let command = self.command.to_string(); 112 | let command = Self::escape_command(command.as_str()); 113 | let command: JsString = command.into(); 114 | let args: Vec = self.args.iter().map(JsString::to_string).collect(); 115 | let options = js_sys::Map::new(); 116 | let listeners = js_sys::Map::new(); 117 | 118 | let outline_adapter = self.outline.clone().map(StreamToLines::new); 119 | if let Some(callback) = &outline_adapter { 120 | listeners.set(&"stdout".into(), callback.as_ref()); 121 | } 122 | let errline_adapter = self.errline.clone().map(StreamToLines::new); 123 | if let Some(callback) = &errline_adapter { 124 | listeners.set(&"stderr".into(), callback.as_ref()); 125 | } 126 | 127 | options.set(&"cwd".into(), &self.cwd.to_js_string()); 128 | let sink = noop_stream::Sink::default(); 129 | if let StdioEnum::Null = self.stdout.inner { 130 | options.set(&"outStream".into(), sink.as_ref()); 131 | } 132 | if let StdioEnum::Null = self.stderr.inner { 133 | options.set(&"errStream".into(), sink.as_ref()); 134 | } 135 | 136 | let listeners = Object::from_entries(&listeners).expect("Failed to convert listeners map to object"); 137 | options.set(&"listeners".into(), &listeners); 138 | let options = Object::from_entries(&options).expect("Failed to convert options map to object"); 139 | let result = ffi::exec(&command, Some(args), &options).await.map(|r| { 140 | #[allow(clippy::cast_possible_truncation)] 141 | let code = r.as_f64().expect("exec didn't return a number") as i32; 142 | code 143 | }); 144 | 145 | // Be explict about line-buffer flushing 146 | drop(outline_adapter); 147 | drop(errline_adapter); 148 | result 149 | } 150 | 151 | pub fn outline(&mut self, callback: F) -> &mut Command { 152 | self.outline = Some(Arc::new(Box::new(callback))); 153 | self 154 | } 155 | 156 | pub fn errline(&mut self, callback: F) -> &mut Command { 157 | self.errline = Some(Arc::new(Box::new(callback))); 158 | self 159 | } 160 | 161 | pub fn stdout(&mut self, redirect: Stdio) -> &mut Command { 162 | self.stdout = redirect; 163 | self 164 | } 165 | 166 | pub fn stderr(&mut self, redirect: Stdio) -> &mut Command { 167 | self.stderr = redirect; 168 | self 169 | } 170 | 171 | pub fn current_dir(&mut self, path: &Path) -> &mut Command { 172 | self.cwd = path.clone(); 173 | self 174 | } 175 | 176 | // Some bright spark had the idea of making an exec function that could both 177 | // handle execvp and shell command style invocations rather than have two 178 | // functions or some sort of flag to handle these different use cases. 179 | // Consequently we now need to escape our command so the apparently bespoke 180 | // unescaping strategy in `argStringToArray` will not mangle our command 181 | // in the case it contains spaces or double quotes. 182 | fn escape_command(command: &str) -> String { 183 | let mut result = String::with_capacity(command.len()); 184 | // - Spaces must be located between quotes to not be considered a token 185 | // separator. 186 | // - Outside of double quotes backslash is itself. 187 | // - Within double quotes, backslash is itself unless followed by a double quote 188 | // in which case it is the double quote. This means double quotes cannot 189 | // surround a string-fragment containing a trailing backslash. 190 | for c in command.chars() { 191 | match c { 192 | ' ' => result.push_str("\" \""), 193 | '\"' => result.push_str("\"\\\""), 194 | _ => result.push(c), 195 | } 196 | } 197 | result 198 | } 199 | } 200 | 201 | impl<'a> From<&'a Path> for Command { 202 | fn from(path: &'a Path) -> Command { 203 | Command { 204 | command: path.clone(), 205 | args: Vec::new(), 206 | outline: None, 207 | errline: None, 208 | stdout: Stdio::inherit(), 209 | stderr: Stdio::inherit(), 210 | cwd: node::process::cwd(), 211 | } 212 | } 213 | } 214 | 215 | pub mod ffi { 216 | use js_sys::JsString; 217 | use wasm_bindgen::prelude::*; 218 | 219 | #[wasm_bindgen(module = "@actions/exec")] 220 | extern "C" { 221 | #[wasm_bindgen(catch)] 222 | pub async fn exec( 223 | comand_line: &JsString, 224 | args: Option>, 225 | options: &JsValue, 226 | ) -> Result; 227 | } 228 | } 229 | -------------------------------------------------------------------------------- /src/toolchain.rs: -------------------------------------------------------------------------------- 1 | use crate::action_paths::{get_action_cache_dir, get_action_share_dir}; 2 | use crate::actions::cache::Entry as CacheEntry; 3 | use crate::node::path::Path; 4 | use crate::node::{self}; 5 | use crate::rustup::ToolchainConfig; 6 | use crate::{actions, info, Error}; 7 | use async_recursion::async_recursion; 8 | use rustup_toolchain_manifest::manifest::Package as ManifestPackage; 9 | use rustup_toolchain_manifest::Toolchain; 10 | use std::str::FromStr; 11 | use target_lexicon::Triple; 12 | 13 | const MAX_CONCURRENT_PACKAGE_INSTALLS: usize = 4; 14 | 15 | fn get_toolchain_home(toolchain: &Toolchain) -> Result { 16 | let dir = get_action_share_dir()?.join("toolchains").join(&toolchain.to_string()); 17 | Ok(dir) 18 | } 19 | 20 | fn get_package_decompress_path(package: &ManifestPackage) -> Result { 21 | // We must not use base64 encoding for the folder name because that 22 | // implies the platform filename is case sensitive. 23 | let package_hash = package.unique_identifier(); 24 | let dir = get_action_cache_dir()? 25 | .join("package-decompression") 26 | .join(&package_hash.to_string()); 27 | Ok(dir) 28 | } 29 | 30 | fn compute_package_cache_key(package: &ManifestPackage) -> CacheEntry { 31 | use crate::cache_key_builder::{Attribute, CacheKeyBuilder}; 32 | 33 | let mut builder = CacheKeyBuilder::new(&package.name); 34 | builder.add_key_data(&package.unique_identifier()); 35 | builder.set_attribute(Attribute::Target, package.supported_target.to_string()); 36 | builder.set_attribute(Attribute::ToolchainVersion, package.version.clone()); 37 | builder.into_entry() 38 | } 39 | 40 | fn default_target_for_platform() -> Result { 41 | let target = Triple::from_str(match (node::os::arch().as_str(), node::os::platform().as_str()) { 42 | ("arm64", "linux") => "aarch64-unknown-linux-gnu", 43 | ("ia32", "linux") => "i686-unknown-linux-gnu", 44 | ("ia32", "win32") => "i686-pc-windows-msvc", 45 | ("x64", "darwin") => "x86_64-apple-darwin", 46 | ("x64", "linux") => "x86_64-unknown-linux-gnu", 47 | ("x64", "win32") => "x86_64-pc-windows-msvc", 48 | (arch, platform) => return Err(Error::UnsupportedPlatform(format!("{}-{}", platform, arch))), 49 | }) 50 | .expect("Failed to parse hardcoded platform triple"); 51 | Ok(target) 52 | } 53 | 54 | #[async_recursion(?Send)] 55 | async fn overlay_and_move_dir(from: &Path, to: &Path) -> Result<(), Error> { 56 | node::fs::create_dir_all(to).await?; 57 | { 58 | let dir = node::fs::read_dir(from).await?; 59 | for entry in dir { 60 | let from = entry.path(); 61 | let to = to.join(&entry.file_name()); 62 | let file_type = entry.file_type(); 63 | if file_type.is_dir() { 64 | overlay_and_move_dir(&from, &to).await?; 65 | } else { 66 | node::fs::rename(&from, &to).await?; 67 | } 68 | } 69 | } 70 | node::fs::remove_dir(from).await?; 71 | Ok(()) 72 | } 73 | 74 | async fn install_components(toolchain: &Toolchain, package: &ManifestPackage) -> Result<(), Error> { 75 | use crate::package_manifest::{EntryType, PackageManifest}; 76 | 77 | let cargo_home = get_toolchain_home(toolchain)?; 78 | node::fs::create_dir_all(&cargo_home).await?; 79 | 80 | let extract_path = get_package_decompress_path(package)?; 81 | let dir = node::fs::read_dir(&extract_path).await?; 82 | for entry in dir.filter(|d| d.file_type().is_dir()) { 83 | let components_path = entry.path().join("components"); 84 | let components: Vec = node::fs::read_file(&components_path) 85 | .await 86 | .map(|data| String::from_utf8_lossy(&data[..]).into_owned())? 87 | .lines() 88 | .map(String::from) 89 | .collect(); 90 | for component in components { 91 | let component_path = entry.path().join(&component); 92 | let manifest_path = component_path.clone().join("manifest.in"); 93 | let manifest = node::fs::read_file(&manifest_path) 94 | .await 95 | .map(|data| String::from_utf8_lossy(&data[..]).into_owned())?; 96 | let manifest = PackageManifest::from_str(manifest.as_str())?; 97 | for (entry_type, path) in manifest.iter() { 98 | let source = component_path.join(path); 99 | let dest = cargo_home.join(path); 100 | node::fs::create_dir_all(&dest.parent()).await?; 101 | 102 | match *entry_type { 103 | EntryType::File => node::fs::rename(&source, &dest).await?, 104 | EntryType::Directory => overlay_and_move_dir(&source, &dest).await?, 105 | } 106 | } 107 | } 108 | } 109 | Ok(()) 110 | } 111 | 112 | async fn cleanup_decompressed_package(package: &ManifestPackage) -> Result<(), Error> { 113 | let extract_path = get_package_decompress_path(package)?; 114 | actions::io::rm_rf(&extract_path).await?; 115 | Ok(()) 116 | } 117 | 118 | async fn fetch_and_decompress_package(package: &ManifestPackage) -> Result<(), Error> { 119 | use actions::tool_cache::{self, StreamCompression}; 120 | use rustup_toolchain_manifest::manifest::Compression; 121 | 122 | let extract_path = get_package_decompress_path(package)?; 123 | let mut cache_entry = compute_package_cache_key(package); 124 | cache_entry.path(&extract_path); 125 | if let Some(key) = cache_entry.restore().await? { 126 | info!("Restored files from cache with key {}", key); 127 | } else { 128 | let remote_binary = package 129 | .tarballs 130 | .iter() 131 | .find(|(c, _)| *c == Compression::Gzip) 132 | .expect("Unable to find tar.gz") 133 | .1 134 | .clone(); 135 | info!("Will need to download the following: {:#?}", remote_binary); 136 | let tarball_path = tool_cache::download_tool(remote_binary.url.as_str()) 137 | .await 138 | .map_err(Error::Js)?; 139 | info!("Downloaded tarball to {}", tarball_path); 140 | info!("Will extract to {}", extract_path); 141 | tool_cache::extract_tar(&tarball_path, StreamCompression::Gzip, Some(&extract_path)).await?; 142 | info!("Extracted to {}", extract_path); 143 | let cache_id = cache_entry.save().await?; 144 | info!("Saved as {}", cache_id); 145 | } 146 | Ok(()) 147 | } 148 | 149 | pub async fn install(toolchain_config: &ToolchainConfig) -> Result<(), Error> { 150 | use actions::tool_cache; 151 | use futures::{StreamExt as _, TryStreamExt as _}; 152 | use rustup_toolchain_manifest::{InstallSpec, Manifest}; 153 | 154 | let toolchain = { 155 | let mut toolchain = Toolchain::from_str(&toolchain_config.name)?; 156 | toolchain.host = Some(match toolchain.host { 157 | Some(host) => host, 158 | None => default_target_for_platform()?, 159 | }); 160 | toolchain 161 | }; 162 | let manifest_url = toolchain.manifest_url(); 163 | info!( 164 | "Will download manifest for toolchain {} from {}", 165 | toolchain, manifest_url 166 | ); 167 | let manifest_path = tool_cache::download_tool(manifest_url.as_str()) 168 | .await 169 | .map_err(Error::Js)?; 170 | info!("Downloaded manifest to {}", manifest_path); 171 | let manifest = node::fs::read_file(&manifest_path).await?; 172 | let manifest = String::from_utf8(manifest).map_err(|_| Error::ManifestNotUtf8)?; 173 | let manifest = Manifest::try_from(manifest.as_str())?; 174 | let target = toolchain.host.clone().expect("Toolchain target unexpectedly missing"); 175 | info!("Attempting to find toolchain for target {}", target); 176 | let install_spec = InstallSpec { 177 | profile: toolchain_config.profile.clone(), 178 | components: toolchain_config.components.iter().cloned().collect(), 179 | targets: toolchain_config.targets.iter().cloned().collect(), 180 | }; 181 | let downloads = manifest.find_downloads_for_install(&target, &install_spec)?; 182 | let process_packages = futures::stream::iter(downloads.iter()) 183 | .map(|download| async { 184 | fetch_and_decompress_package(download).await?; 185 | install_components(&toolchain, download).await?; 186 | cleanup_decompressed_package(download).await?; 187 | Ok::<_, Error>(()) 188 | }) 189 | .buffer_unordered(MAX_CONCURRENT_PACKAGE_INSTALLS); 190 | process_packages.try_collect().await?; 191 | 192 | if toolchain_config.set_default { 193 | let cargo_bin = get_toolchain_home(&toolchain)?.join("bin"); 194 | actions::core::add_path(&cargo_bin); 195 | } else { 196 | return Err(Error::ToolchainInstallFunctionality("default=false".into())); 197 | } 198 | if toolchain_config.set_override { 199 | return Err(Error::ToolchainInstallFunctionality("override".into())); 200 | } 201 | Ok(()) 202 | } 203 | -------------------------------------------------------------------------------- /src/fingerprinting.rs: -------------------------------------------------------------------------------- 1 | use crate::delta::Action as DeltaAction; 2 | pub use crate::dir_tree::Ignores; 3 | use crate::node::fs; 4 | use crate::node::path::{self, Path}; 5 | use crate::{dir_tree, Error}; 6 | use async_trait::async_trait; 7 | use chrono::{DateTime, Utc}; 8 | use itertools::{Either, EitherOrBoth}; 9 | use serde::{Deserialize, Serialize}; 10 | use std::borrow::Cow; 11 | use std::collections::hash_map::DefaultHasher; 12 | use std::collections::{btree_map, BTreeMap, VecDeque}; 13 | use std::hash::{Hash, Hasher}; 14 | 15 | const ROOT_NAME: &str = "."; 16 | 17 | #[derive(Debug, Clone, Copy, Serialize, Deserialize, Eq, PartialEq)] 18 | struct Metadata { 19 | uid: u64, 20 | gid: u64, 21 | len: u64, 22 | mode: u64, 23 | modified: DateTime, 24 | accessed: DateTime, 25 | } 26 | 27 | impl From<&fs::Metadata> for Metadata { 28 | fn from(stats: &fs::Metadata) -> Metadata { 29 | Metadata { 30 | uid: stats.uid(), 31 | gid: stats.gid(), 32 | len: stats.len(), 33 | mode: stats.mode(), 34 | modified: stats.modified(), 35 | accessed: stats.accessed(), 36 | } 37 | } 38 | } 39 | 40 | impl Metadata { 41 | fn hash_noteworthy(&self, hasher: &mut H) { 42 | // Noteworthy basically means anything that would need an rsync 43 | self.uid.hash(hasher); 44 | self.gid.hash(hasher); 45 | self.len.hash(hasher); 46 | self.mode.hash(hasher); 47 | self.modified.hash(hasher); 48 | } 49 | 50 | fn equal_noteworthy(&self, other: &Metadata) -> bool { 51 | self.uid == other.uid 52 | && self.gid == other.gid 53 | && self.len == other.len 54 | && self.mode == other.mode 55 | && self.modified == other.modified 56 | } 57 | } 58 | 59 | #[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] 60 | enum Entry { 61 | File(Metadata), 62 | Dir(BTreeMap), 63 | } 64 | 65 | #[derive(Debug, Clone, Serialize, Deserialize)] 66 | pub struct Fingerprint { 67 | content_hash: u64, 68 | modified: Option>, 69 | accessed: Option>, 70 | root: Entry, 71 | } 72 | 73 | type BranchIter<'a> = btree_map::Iter<'a, String, Entry>; 74 | 75 | #[derive(Debug)] 76 | struct FlatteningIterator<'a> { 77 | separator: String, 78 | stack: VecDeque<(Option, Either, Metadata>)>, 79 | } 80 | 81 | impl<'a> Iterator for FlatteningIterator<'a> { 82 | type Item = (Cow<'a, str>, Metadata); 83 | 84 | fn next(&mut self) -> Option { 85 | while let Some((path, content)) = self.stack.pop_back() { 86 | match content { 87 | Either::Left(mut iter) => match iter.next() { 88 | None => {} 89 | Some((base_name, entry)) => { 90 | let mut item_path = path.as_deref().unwrap_or("").to_string(); 91 | item_path += base_name; 92 | self.stack.push_back((path, Either::Left(iter))); 93 | match entry { 94 | Entry::File(metadata) => return Some((item_path.into(), *metadata)), 95 | Entry::Dir(sub_tree) => { 96 | item_path += self.separator.as_str(); 97 | self.stack.push_back((Some(item_path), Either::Left(sub_tree.iter()))); 98 | } 99 | } 100 | } 101 | }, 102 | Either::Right(metadata) => { 103 | let path = path.unwrap_or_else(|| ROOT_NAME.to_string()); 104 | return Some((path.into(), metadata)); 105 | } 106 | } 107 | } 108 | None 109 | } 110 | } 111 | 112 | impl Fingerprint { 113 | pub fn content_hash(&self) -> u64 { 114 | self.content_hash 115 | } 116 | 117 | fn compute_entry_hash(entry: &Entry) -> u64 { 118 | let mut hasher = DefaultHasher::default(); 119 | match entry { 120 | Entry::File(metadata) => { 121 | metadata.hash_noteworthy(&mut hasher); 122 | } 123 | Entry::Dir(sub_tree) => { 124 | for (name, entry) in sub_tree { 125 | name.hash(&mut hasher); 126 | let hash = Self::compute_entry_hash(entry); 127 | hash.hash(&mut hasher); 128 | } 129 | } 130 | } 131 | hasher.finish() 132 | } 133 | 134 | pub fn modified(&self) -> Option> { 135 | self.modified 136 | } 137 | 138 | pub fn accessed(&self) -> Option> { 139 | self.accessed 140 | } 141 | 142 | fn sorted_file_paths_and_metadata(&self) -> FlatteningIterator<'_> { 143 | let root_content = match &self.root { 144 | Entry::File(metadata) => Either::Right(*metadata), 145 | Entry::Dir(sub_tree) => Either::Left(sub_tree.iter()), 146 | }; 147 | FlatteningIterator { 148 | stack: VecDeque::from([(None, root_content)]), 149 | separator: path::separator().into(), 150 | } 151 | } 152 | 153 | pub fn changes_from(&self, other: &Fingerprint) -> Vec<(String, DeltaAction)> { 154 | use itertools::Itertools as _; 155 | 156 | let from_iter = other.sorted_file_paths_and_metadata(); 157 | let to_iter = self.sorted_file_paths_and_metadata(); 158 | from_iter 159 | .merge_join_by(to_iter, |left, right| left.0.cmp(&right.0)) 160 | .filter_map(|element| match element { 161 | EitherOrBoth::Both(left, right) => { 162 | (!left.1.equal_noteworthy(&right.1)).then(|| (left.0.into_owned(), DeltaAction::Changed)) 163 | } 164 | EitherOrBoth::Left(left) => Some((left.0.into_owned(), DeltaAction::Removed)), 165 | EitherOrBoth::Right(right) => Some((right.0.into_owned(), DeltaAction::Added)), 166 | }) 167 | .collect() 168 | } 169 | } 170 | 171 | #[allow(dead_code)] 172 | pub async fn fingerprint_path(path: &Path) -> Result { 173 | let ignores = Ignores::default(); 174 | fingerprint_path_with_ignores(path, &ignores).await 175 | } 176 | 177 | struct BuildFingerprintVisitor { 178 | stack: VecDeque, 179 | modified: Option>, 180 | accessed: Option>, 181 | } 182 | 183 | impl BuildFingerprintVisitor { 184 | fn push_file(&mut self, file_name: String, metadata: Metadata) { 185 | let to_insert = Entry::File(metadata); 186 | match self.stack.back_mut() { 187 | None => self.stack.push_back(to_insert), 188 | Some(entry) => match entry { 189 | Entry::File(_) => { 190 | self.stack.push_back(to_insert); 191 | } 192 | Entry::Dir(ref mut map) => { 193 | map.insert(file_name, to_insert); 194 | } 195 | }, 196 | } 197 | } 198 | } 199 | 200 | #[async_trait(?Send)] 201 | impl dir_tree::Visitor for BuildFingerprintVisitor { 202 | async fn enter_folder(&mut self, _path: &Path) -> Result<(), Error> { 203 | self.stack.push_back(Entry::Dir(BTreeMap::new())); 204 | Ok(()) 205 | } 206 | 207 | async fn exit_folder(&mut self, path: &Path) -> Result<(), Error> { 208 | if self.stack.len() > 1 { 209 | let entry = self.stack.pop_back().expect("Missing tree visitor stack entry"); 210 | let name = path.file_name(); 211 | match self.stack.back_mut() { 212 | None => panic!("Missing parent entry on tree visitor stack"), 213 | Some(Entry::File(_)) => panic!("Parent entry on tree visitor stack wasn't a folder"), 214 | Some(Entry::Dir(map)) => { 215 | map.insert(name, entry); 216 | } 217 | } 218 | } 219 | Ok(()) 220 | } 221 | 222 | async fn visit_entry(&mut self, path: &Path, is_file: bool) -> Result<(), Error> { 223 | if is_file { 224 | let stats = fs::symlink_metadata(path).await?; 225 | let metadata = Metadata::from(&stats); 226 | self.modified = Some(match self.modified { 227 | None => metadata.modified, 228 | Some(latest) => std::cmp::max(latest, metadata.modified), 229 | }); 230 | self.accessed = Some(match self.accessed { 231 | None => metadata.accessed, 232 | Some(latest) => std::cmp::max(latest, metadata.accessed), 233 | }); 234 | let file_name = path.file_name(); 235 | self.push_file(file_name, metadata); 236 | } else { 237 | panic!("Expected to descend into all directories"); 238 | } 239 | Ok(()) 240 | } 241 | } 242 | 243 | pub async fn fingerprint_path_with_ignores(path: &Path, ignores: &Ignores) -> Result { 244 | let mut visitor = BuildFingerprintVisitor { 245 | stack: VecDeque::new(), 246 | modified: None, 247 | accessed: None, 248 | }; 249 | dir_tree::apply_visitor(path, ignores, &mut visitor).await?; 250 | assert_eq!(visitor.stack.len(), 1, "Tree data stack should only have single entry"); 251 | let root = visitor 252 | .stack 253 | .pop_back() 254 | .expect("Tree data stack was unexpectedly empty"); 255 | let content_hash = Fingerprint::compute_entry_hash(&root); 256 | let result = Fingerprint { 257 | content_hash, 258 | modified: visitor.modified, 259 | accessed: visitor.accessed, 260 | root, 261 | }; 262 | Ok(result) 263 | } 264 | -------------------------------------------------------------------------------- /src/actions/core.rs: -------------------------------------------------------------------------------- 1 | use crate::node::path::Path; 2 | use js_sys::{JsString, Number, Object}; 3 | use wasm_bindgen::JsValue; 4 | 5 | #[macro_export] 6 | macro_rules! debug { 7 | ($($arg:tt)*) => {{ 8 | $crate::actions::core::debug(std::format!($($arg)*).as_str()); 9 | }}; 10 | } 11 | 12 | #[macro_export] 13 | macro_rules! info { 14 | ($($arg:tt)*) => {{ 15 | $crate::actions::core::info(std::format!($($arg)*).as_str()); 16 | }}; 17 | } 18 | 19 | #[macro_export] 20 | macro_rules! notice { 21 | ($($arg:tt)*) => {{ 22 | $crate::actions::core::notice(std::format!($($arg)*).as_str()); 23 | }}; 24 | } 25 | 26 | #[macro_export] 27 | macro_rules! warning { 28 | ($($arg:tt)*) => {{ 29 | $crate::actions::core::warning(std::format!($($arg)*).as_str()); 30 | }}; 31 | } 32 | 33 | #[macro_export] 34 | macro_rules! error { 35 | ($($arg:tt)*) => {{ 36 | $crate::actions::core::error(std::format!($($arg)*).as_str()); 37 | }}; 38 | } 39 | 40 | pub fn debug>(message: S) { 41 | ffi::debug(&message.into()); 42 | } 43 | 44 | pub fn info>(message: S) { 45 | ffi::info(&message.into()); 46 | } 47 | 48 | pub fn notice>(message: A) { 49 | message.into().notice(); 50 | } 51 | 52 | pub fn warning>(message: A) { 53 | message.into().warning(); 54 | } 55 | 56 | pub fn error>(message: A) { 57 | message.into().error(); 58 | } 59 | 60 | pub fn set_output, V: Into>(name: N, value: V) { 61 | ffi::set_output(&name.into(), &value.into()); 62 | } 63 | 64 | #[derive(Debug)] 65 | pub struct Input { 66 | name: JsString, 67 | required: bool, 68 | trim_whitespace: bool, 69 | } 70 | 71 | impl> From for Input { 72 | fn from(name: N) -> Input { 73 | Input { 74 | name: name.into(), 75 | required: false, 76 | trim_whitespace: true, 77 | } 78 | } 79 | } 80 | 81 | impl Input { 82 | pub fn required(&mut self, value: bool) -> &mut Input { 83 | self.required = value; 84 | self 85 | } 86 | 87 | pub fn trim_whitespace(&mut self, value: bool) -> &mut Input { 88 | self.trim_whitespace = value; 89 | self 90 | } 91 | 92 | fn to_ffi(&self) -> ffi::InputOptions { 93 | ffi::InputOptions { 94 | required: Some(self.required), 95 | trim_whitespace: Some(self.trim_whitespace), 96 | } 97 | } 98 | 99 | pub fn get(&mut self) -> Result, JsValue> { 100 | let ffi = self.to_ffi(); 101 | let value = String::from(ffi::get_input(&self.name, Some(ffi))?); 102 | Ok(if value.is_empty() { None } else { Some(value) }) 103 | } 104 | 105 | pub fn get_required(&mut self) -> Result { 106 | let mut ffi = self.to_ffi(); 107 | ffi.required = Some(true); 108 | ffi::get_input(&self.name, Some(ffi)).map(String::from) 109 | } 110 | } 111 | 112 | #[derive(Debug)] 113 | pub struct Annotation { 114 | message: String, 115 | title: Option, 116 | file: Option, 117 | start_line: Option, 118 | end_line: Option, 119 | start_column: Option, 120 | end_column: Option, 121 | } 122 | 123 | impl> From for Annotation { 124 | fn from(message: M) -> Annotation { 125 | Annotation { 126 | message: message.into(), 127 | title: None, 128 | file: None, 129 | start_line: None, 130 | end_line: None, 131 | start_column: None, 132 | end_column: None, 133 | } 134 | } 135 | } 136 | 137 | #[derive(Copy, Clone, Debug)] 138 | pub enum AnnotationLevel { 139 | Notice, 140 | Warning, 141 | Error, 142 | } 143 | 144 | impl Annotation { 145 | pub fn title(&mut self, title: &str) -> &mut Annotation { 146 | self.title = Some(title.to_string()); 147 | self 148 | } 149 | 150 | pub fn file(&mut self, path: &Path) -> &mut Annotation { 151 | self.file = Some(path.clone()); 152 | self 153 | } 154 | 155 | pub fn start_line(&mut self, start_line: usize) -> &mut Annotation { 156 | self.start_line = Some(start_line); 157 | self 158 | } 159 | 160 | pub fn end_line(&mut self, end_line: usize) -> &mut Annotation { 161 | self.end_line = Some(end_line); 162 | self 163 | } 164 | 165 | pub fn start_column(&mut self, start_column: usize) -> &mut Annotation { 166 | self.start_column = Some(start_column); 167 | self 168 | } 169 | 170 | pub fn end_column(&mut self, end_column: usize) -> &mut Annotation { 171 | self.end_column = Some(end_column); 172 | self 173 | } 174 | 175 | fn build_js_properties(&self) -> Object { 176 | let properties = js_sys::Map::new(); 177 | if let Some(title) = &self.title { 178 | properties.set(&"title".into(), JsString::from(title.as_str()).as_ref()); 179 | } 180 | if let Some(file) = &self.file { 181 | properties.set(&"file".into(), file.to_js_string().as_ref()); 182 | } 183 | for (name, value) in [ 184 | ("startLine", &self.start_line), 185 | ("endLine", &self.end_line), 186 | ("startColumn", &self.start_column), 187 | ("endColumn", &self.end_column), 188 | ] { 189 | if let Some(number) = value.and_then(|n| TryInto::::try_into(n).ok()) { 190 | properties.set(&name.into(), Number::from(number).as_ref()); 191 | } 192 | } 193 | Object::from_entries(&properties).expect("Failed to convert options map to object") 194 | } 195 | 196 | pub fn error(&self) { 197 | self.output(AnnotationLevel::Error); 198 | } 199 | 200 | pub fn notice(&self) { 201 | self.output(AnnotationLevel::Notice); 202 | } 203 | 204 | pub fn warning(&self) { 205 | self.output(AnnotationLevel::Warning); 206 | } 207 | 208 | pub fn output(&self, level: AnnotationLevel) { 209 | let message = JsString::from(self.message.as_str()); 210 | let properties = self.build_js_properties(); 211 | match level { 212 | AnnotationLevel::Error => ffi::error(&message, Some(properties)), 213 | AnnotationLevel::Warning => ffi::warning(&message, Some(properties)), 214 | AnnotationLevel::Notice => ffi::notice(&message, Some(properties)), 215 | } 216 | } 217 | } 218 | 219 | pub fn get_input>(input: I) -> Result, JsValue> { 220 | let mut input = input.into(); 221 | input.get() 222 | } 223 | 224 | pub fn set_failed>(message: M) { 225 | ffi::set_failed(&message.into()); 226 | } 227 | 228 | pub fn add_path(path: &Path) { 229 | ffi::add_path(&path.into()); 230 | } 231 | 232 | pub fn export_variable, V: Into>(name: N, value: V) { 233 | let name = name.into(); 234 | let value = value.into(); 235 | ffi::export_variable(&name, &value); 236 | } 237 | 238 | pub fn save_state, V: Into>(name: N, value: V) { 239 | let name = name.into(); 240 | let value = value.into(); 241 | ffi::save_state(&name, &value); 242 | } 243 | 244 | pub fn get_state>(name: N) -> Option { 245 | let name = name.into(); 246 | let value: String = ffi::get_state(&name).into(); 247 | let value = value.trim(); 248 | if value.is_empty() { 249 | None 250 | } else { 251 | Some(value.into()) 252 | } 253 | } 254 | 255 | pub fn start_group>(name: N) { 256 | ffi::start_group(&name.into()); 257 | } 258 | 259 | pub fn end_group() { 260 | ffi::end_group(); 261 | } 262 | 263 | #[allow(clippy::drop_non_drop)] 264 | pub mod ffi { 265 | use js_sys::{JsString, Object}; 266 | use wasm_bindgen::prelude::*; 267 | 268 | #[wasm_bindgen] 269 | pub struct InputOptions { 270 | pub required: Option, 271 | 272 | #[wasm_bindgen(js_name = "trimWhitespace")] 273 | pub trim_whitespace: Option, 274 | } 275 | 276 | #[wasm_bindgen(module = "@actions/core")] 277 | extern "C" { 278 | /// Gets the value of an input. The value is also trimmed. 279 | #[wasm_bindgen(js_name = "getInput", catch)] 280 | pub fn get_input(name: &JsString, options: Option) -> Result; 281 | 282 | /// Writes info 283 | #[wasm_bindgen] 284 | pub fn info(message: &JsString); 285 | 286 | /// Writes debug 287 | #[wasm_bindgen] 288 | pub fn debug(message: &JsString); 289 | 290 | /// Writes an error with an optional annotation 291 | #[wasm_bindgen] 292 | pub fn error(message: &JsString, annotation: Option); 293 | 294 | /// Writes a warning with an optional annotation 295 | #[wasm_bindgen] 296 | pub fn warning(message: &JsString, annotation: Option); 297 | 298 | /// Writes a notice with an optional annotation 299 | #[wasm_bindgen] 300 | pub fn notice(message: &JsString, annotation: Option); 301 | 302 | /// Sets the action status to failed. 303 | /// When the action exits it will be with an exit code of 1. 304 | #[wasm_bindgen(js_name = "setFailed")] 305 | pub fn set_failed(message: &JsString); 306 | 307 | /// Sets the value of an output. 308 | #[wasm_bindgen(js_name = "setOutput")] 309 | pub fn set_output(name: &JsString, value: &JsString); 310 | 311 | #[wasm_bindgen(js_name = "addPath")] 312 | pub fn add_path(path: &JsString); 313 | 314 | #[wasm_bindgen(js_name = "exportVariable")] 315 | pub fn export_variable(name: &JsString, value: &JsString); 316 | 317 | #[wasm_bindgen(js_name = "saveState")] 318 | pub fn save_state(name: &JsString, value: &JsString); 319 | 320 | #[wasm_bindgen(js_name = "getState")] 321 | pub fn get_state(name: &JsString) -> JsString; 322 | 323 | #[wasm_bindgen(js_name = "startGroup")] 324 | pub fn start_group(name: &JsString); 325 | 326 | #[wasm_bindgen(js_name = "endGroup")] 327 | pub fn end_group(); 328 | } 329 | } 330 | -------------------------------------------------------------------------------- /src/node/path.rs: -------------------------------------------------------------------------------- 1 | use js_sys::JsString; 2 | use lazy_static::lazy_static; 3 | use std::borrow::Cow; 4 | 5 | #[derive(Clone)] 6 | pub struct Path { 7 | inner: JsString, 8 | } 9 | 10 | lazy_static! { 11 | static ref SEPARATOR: String = { 12 | use wasm_bindgen::JsCast as _; 13 | ffi::SEPARATOR 14 | .clone() 15 | .dyn_into::() 16 | .expect("separator wasn't a string") 17 | .into() 18 | }; 19 | } 20 | 21 | lazy_static! { 22 | static ref DELIMITER: String = { 23 | use wasm_bindgen::JsCast as _; 24 | ffi::DELIMITER 25 | .clone() 26 | .dyn_into::() 27 | .expect("delimiter wasn't a string") 28 | .into() 29 | }; 30 | } 31 | 32 | impl std::fmt::Display for Path { 33 | fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { 34 | let string = String::from(&self.inner); 35 | string.fmt(formatter) 36 | } 37 | } 38 | 39 | impl PartialEq for Path { 40 | fn eq(&self, rhs: &Path) -> bool { 41 | // relative() resolves paths according to the CWD so we should only 42 | // use it if they will both be resolved the same way 43 | if self.is_absolute() == rhs.is_absolute() { 44 | // This should handle both case-sensitivity and trailing slash issues 45 | let relative = ffi::relative(&self.inner, &rhs.inner); 46 | relative.length() == 0 47 | } else { 48 | false 49 | } 50 | } 51 | } 52 | 53 | impl Path { 54 | pub fn push>(&mut self, path: P) { 55 | let path = path.into(); 56 | let joined = if path.is_absolute() { 57 | path.inner 58 | } else { 59 | ffi::join(vec![self.inner.clone(), path.inner]) 60 | }; 61 | self.inner = joined; 62 | } 63 | 64 | pub fn to_js_string(&self) -> JsString { 65 | self.inner.to_string() 66 | } 67 | 68 | #[must_use] 69 | pub fn parent(&self) -> Path { 70 | let parent = ffi::dirname(&self.inner); 71 | Path { inner: parent } 72 | } 73 | 74 | pub fn is_absolute(&self) -> bool { 75 | ffi::is_absolute(&self.inner) 76 | } 77 | 78 | pub fn file_name(&self) -> String { 79 | let result = ffi::basename(&self.inner, None); 80 | result.into() 81 | } 82 | 83 | pub async fn exists(&self) -> bool { 84 | super::fs::ffi::access(&self.inner, None).await.is_ok() 85 | } 86 | 87 | #[must_use] 88 | pub fn join>(&self, path: P) -> Path { 89 | let mut result = self.clone(); 90 | result.push(path.into()); 91 | result 92 | } 93 | 94 | pub fn relative_to>(&self, path: P) -> Path { 95 | let path = path.into(); 96 | let relative = ffi::relative(&path.inner, &self.inner); 97 | if relative.length() == 0 { 98 | ".".into() 99 | } else { 100 | relative.into() 101 | } 102 | } 103 | } 104 | 105 | impl std::fmt::Debug for Path { 106 | fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { 107 | write!(formatter, "{}", self) 108 | } 109 | } 110 | 111 | impl From<&JsString> for Path { 112 | fn from(path: &JsString) -> Path { 113 | let path = ffi::normalize(path); 114 | Path { inner: path } 115 | } 116 | } 117 | 118 | impl From for Path { 119 | fn from(path: JsString) -> Path { 120 | Path::from(&path) 121 | } 122 | } 123 | 124 | impl From<&Path> for Path { 125 | fn from(path: &Path) -> Path { 126 | path.clone() 127 | } 128 | } 129 | 130 | impl From<&str> for Path { 131 | fn from(path: &str) -> Path { 132 | let path: JsString = path.into(); 133 | let path = ffi::normalize(&path); 134 | Path { inner: path } 135 | } 136 | } 137 | 138 | impl From<&String> for Path { 139 | fn from(path: &String) -> Path { 140 | Path::from(path.as_str()) 141 | } 142 | } 143 | 144 | impl From for JsString { 145 | fn from(path: Path) -> JsString { 146 | path.inner 147 | } 148 | } 149 | 150 | impl From<&Path> for JsString { 151 | fn from(path: &Path) -> JsString { 152 | path.inner.clone() 153 | } 154 | } 155 | 156 | pub fn delimiter() -> Cow<'static, str> { 157 | DELIMITER.as_str().into() 158 | } 159 | 160 | pub fn separator() -> Cow<'static, str> { 161 | SEPARATOR.as_str().into() 162 | } 163 | 164 | pub mod ffi { 165 | use js_sys::{JsString, Object}; 166 | use wasm_bindgen::prelude::*; 167 | 168 | #[wasm_bindgen(module = "path")] 169 | extern "C" { 170 | #[wasm_bindgen(js_name = "delimiter")] 171 | pub static DELIMITER: Object; 172 | 173 | #[wasm_bindgen(js_name = "sep")] 174 | pub static SEPARATOR: Object; 175 | 176 | pub fn normalize(path: &JsString) -> JsString; 177 | #[wasm_bindgen(variadic)] 178 | pub fn join(paths: Vec) -> JsString; 179 | #[wasm_bindgen(variadic)] 180 | pub fn resolve(paths: Vec) -> JsString; 181 | #[wasm_bindgen] 182 | pub fn dirname(path: &JsString) -> JsString; 183 | #[wasm_bindgen(js_name = "isAbsolute")] 184 | pub fn is_absolute(path: &JsString) -> bool; 185 | #[wasm_bindgen] 186 | pub fn relative(from: &JsString, to: &JsString) -> JsString; 187 | #[wasm_bindgen] 188 | pub fn basename(path: &JsString, suffix: Option) -> JsString; 189 | } 190 | } 191 | 192 | #[cfg(test)] 193 | mod test { 194 | use super::Path; 195 | use crate::node; 196 | use wasm_bindgen::JsValue; 197 | use wasm_bindgen_test::wasm_bindgen_test; 198 | 199 | #[wasm_bindgen_test] 200 | fn check_absolute() { 201 | let cwd = node::process::cwd(); 202 | assert!(cwd.is_absolute()); 203 | } 204 | 205 | #[wasm_bindgen_test] 206 | fn check_relative() { 207 | let relative = Path::from(&format!("{}{}{}", "a", super::separator(), "b")); 208 | assert!(!relative.is_absolute()); 209 | } 210 | 211 | #[wasm_bindgen_test] 212 | fn check_separator() { 213 | let separator = super::separator(); 214 | assert!(separator == "/" || separator == "\\"); 215 | } 216 | 217 | #[wasm_bindgen_test] 218 | fn check_delimiter() { 219 | let delimiter = super::delimiter(); 220 | assert!(delimiter == ";" || delimiter == ":"); 221 | } 222 | 223 | #[wasm_bindgen_test] 224 | fn check_parent() { 225 | let parent_name = "parent"; 226 | let path = Path::from(&format!("{}{}{}", parent_name, super::separator(), "child")); 227 | let parent_path = path.parent(); 228 | assert_eq!(parent_path.to_string(), parent_name); 229 | } 230 | 231 | #[wasm_bindgen_test] 232 | fn check_basename() { 233 | let child_base = "child."; 234 | let child_ext = ".extension"; 235 | let child_name = format!("{}{}", child_base, child_ext); 236 | let path = Path::from(&format!("{}{}{}", "parent", super::separator(), child_name)); 237 | assert_eq!(child_name, path.file_name()); 238 | assert_eq!( 239 | child_name, 240 | String::from(super::ffi::basename(&path.to_js_string(), None)) 241 | ); 242 | assert_eq!( 243 | child_name, 244 | String::from(super::ffi::basename(&path.to_js_string(), Some(".nomatch".into()))) 245 | ); 246 | assert_eq!( 247 | child_base, 248 | String::from(super::ffi::basename(&path.to_js_string(), Some(child_ext.into()))) 249 | ); 250 | } 251 | 252 | #[wasm_bindgen_test] 253 | fn check_push() { 254 | let parent_name = "a"; 255 | let child_name = "b"; 256 | let path_string = format!("{}{}{}", parent_name, super::separator(), child_name); 257 | let mut path = Path::from(parent_name); 258 | path.push(child_name); 259 | assert_eq!(path.to_string(), path_string); 260 | } 261 | 262 | #[wasm_bindgen_test] 263 | fn check_join() { 264 | let parent_name = "a"; 265 | let child_name = "b"; 266 | let path_string = format!("{}{}{}", parent_name, super::separator(), child_name); 267 | let path = Path::from(parent_name).join(child_name); 268 | assert_eq!(path.to_string(), path_string); 269 | } 270 | 271 | #[wasm_bindgen_test] 272 | fn check_current_normalization() { 273 | use itertools::Itertools as _; 274 | let current = "."; 275 | let long_current = std::iter::repeat(current).take(10).join(&super::separator()); 276 | assert_eq!(Path::from(&long_current).to_string(), current); 277 | } 278 | 279 | #[wasm_bindgen_test] 280 | fn check_parent_normalization() { 281 | use itertools::Itertools as _; 282 | let parent = ".."; 283 | let current = "."; 284 | let count = 10; 285 | 286 | let long_current = std::iter::repeat("child") 287 | .take(count) 288 | .chain(std::iter::repeat(parent).take(count)) 289 | .join(&super::separator()); 290 | assert_eq!(Path::from(&long_current).to_string(), current); 291 | 292 | let long_parent = std::iter::repeat("child") 293 | .take(count) 294 | .chain(std::iter::repeat(parent).take(count + 1)) 295 | .join(&super::separator()); 296 | assert_eq!(Path::from(&long_parent).to_string(), parent); 297 | } 298 | 299 | #[wasm_bindgen_test] 300 | async fn check_exists() -> Result<(), JsValue> { 301 | let temp = node::os::temp_dir(); 302 | let file_name = format!("ferrous-actions-exists-test - {}", chrono::Local::now()); 303 | let temp_file_path = temp.join(&file_name); 304 | let data = "Nothing to see here\n"; 305 | node::fs::write_file(&temp_file_path, data.as_bytes()).await?; 306 | assert!(temp_file_path.exists().await); 307 | node::fs::remove_file(&temp_file_path).await?; 308 | assert!(!temp_file_path.exists().await); 309 | Ok(()) 310 | } 311 | 312 | #[wasm_bindgen_test] 313 | fn check_equality() { 314 | use itertools::Itertools as _; 315 | 316 | // We can't check case behaviour without knowing filesystem semantics. 317 | // It's unclear if a trailing slash matters equality-wise. 318 | 319 | assert_eq!(Path::from("a"), Path::from("a")); 320 | assert_eq!(Path::from("."), Path::from(".")); 321 | assert_eq!(Path::from(".."), Path::from("..")); 322 | assert_eq!( 323 | Path::from(&format!("a{}..", super::separator())), 324 | Path::from(&format!("b{}..", super::separator())) 325 | ); 326 | assert_ne!(Path::from("."), Path::from("..")); 327 | assert_ne!(Path::from("a"), Path::from("b")); 328 | 329 | let path = ["a", "b", "c", "d"].into_iter().join(&super::separator()); 330 | assert_eq!(Path::from(&path), Path::from(&path)); 331 | } 332 | } 333 | -------------------------------------------------------------------------------- /src/actions/cache.rs: -------------------------------------------------------------------------------- 1 | use crate::node; 2 | use crate::node::path::Path; 3 | use js_sys::JsString; 4 | use std::collections::HashMap; 5 | use std::convert::Into; 6 | use wasm_bindgen::prelude::*; 7 | 8 | const WORKSPACE_ENV_VAR: &str = "GITHUB_WORKSPACE"; 9 | const WORKSPACE_OVERRIDDEN_TAG: &str = "#WORKSPACE_OVERRIDEN"; 10 | 11 | // Actually getting caching to work cross platform is complicated. First of all, 12 | // the action takes patterns not paths (which is unhelpful for apps that don't 13 | // want to use globs), It also means that on Windows you're going to need to 14 | // convert any paths to use forward slash as a separator. 15 | // 16 | // The cache action keys actions on patterns, but the key is simply the hash of 17 | // the patterns passed in, without modification. This means that for cross 18 | // platform caching, the patterns need to be for relative paths, not absolute 19 | // ones. This in turn means that if your action needs to produce a consistent 20 | // cache key for a path that's not at a consistent location relative to the CWD 21 | // at the time of action invocation, you're going to need to change CWD. 22 | // 23 | // As a final complication, when files are archived, they are done so using 24 | // paths which are specified relative to the GitHub workspace. So, even if you 25 | // sit in a directory you want to restore things in, and generate the right 26 | // relative paths to match a cache key, you might end up restoring to the wrong 27 | // location (e.g. because $CARGO_HOME moved relative to the GitHub workspace). 28 | // 29 | // The issue with any sort of reusable file caching across OSes is that there 30 | // needs to be some concept of a reference path or paths which are well defined 31 | // on each platform and under which it is valid to cache and restore certain 32 | // paths. GitHub actions chooses this to be $GITHUB_WORKSPACE. Unfortunately 33 | // this is problematic for two reasons: 34 | // - We have no guarantee that the path we want to cache (e.g. something in the 35 | // home directory) will remain at consistent path relative to 36 | // $GITHUB_WORKSPACE (or that is is on other OSes). 37 | // - Patterns cannot contain `.` or `..`, meaning we cannot use the GitHub 38 | // workspace as our root location when we want to cache paths located in the 39 | // home directory. 40 | // 41 | // To work around this, we have the cache user specify a root path. `Entry` both 42 | // changes CWD to that path and rewrites the supplied paths to be relative to 43 | // the root path. In addition, it sets $GITHUB_WORKSPACE to this path too, which 44 | // causes all files in the generated tarball to be specified relative to that 45 | // location. This is a hack, but in general it means that we can reliably cache 46 | // and restore paths to locations that may change across time. 47 | 48 | /// Changes the current working directory and GITHUB_WORKSPACE to a specified 49 | /// path and changes it back when it is dropped. This enables us to: 50 | /// - supply consistent relative paths (patterns rather) to the actions API 51 | /// - avoid issues related to archive paths being encoded relative to 52 | /// `$GITHUB_WORKSPACE`. 53 | #[derive(Debug)] 54 | pub struct ScopedWorkspace { 55 | original_cwd: Path, 56 | original_workspace: Option, 57 | } 58 | 59 | impl ScopedWorkspace { 60 | pub fn new(new_cwd: &Path) -> Result { 61 | let original_cwd = node::process::cwd(); 62 | let original_workspace = node::process::get_env().get(WORKSPACE_ENV_VAR).cloned(); 63 | node::process::chdir(new_cwd)?; 64 | node::process::set_var(WORKSPACE_ENV_VAR, &new_cwd.to_string()); 65 | Ok(ScopedWorkspace { 66 | original_cwd, 67 | original_workspace, 68 | }) 69 | } 70 | } 71 | 72 | impl Drop for ScopedWorkspace { 73 | fn drop(&mut self) { 74 | if let Some(original_workspace) = self.original_workspace.as_deref() { 75 | node::process::set_var(WORKSPACE_ENV_VAR, original_workspace); 76 | } else { 77 | node::process::remove_var(WORKSPACE_ENV_VAR); 78 | } 79 | node::process::chdir(&self.original_cwd) 80 | .unwrap_or_else(|e| panic!("Unable to chdir back to original folder: {:?}", e)); 81 | } 82 | } 83 | 84 | pub struct Entry { 85 | key: JsString, 86 | paths: Vec, 87 | restore_keys: Vec, 88 | cross_os_archive: bool, 89 | relative_to: Option, 90 | } 91 | 92 | impl Entry { 93 | pub fn new>(key: K) -> Entry { 94 | Entry { 95 | key: key.into(), 96 | paths: Vec::new(), 97 | restore_keys: Vec::new(), 98 | cross_os_archive: false, 99 | relative_to: None, 100 | } 101 | } 102 | 103 | pub fn paths, P: Into>(&mut self, paths: I) -> &mut Entry { 104 | self.paths.extend(paths.into_iter().map(Into::into)); 105 | self 106 | } 107 | 108 | pub fn path>(&mut self, path: P) -> &mut Entry { 109 | self.paths(std::iter::once(path.into())) 110 | } 111 | 112 | pub fn root>(&mut self, path: P) -> &mut Entry { 113 | self.relative_to = Some(path.into()); 114 | self 115 | } 116 | 117 | pub fn permit_sharing_with_windows(&mut self, allow: bool) -> &mut Entry { 118 | self.cross_os_archive = allow; 119 | self 120 | } 121 | 122 | pub fn restore_keys(&mut self, restore_keys: I) -> &mut Entry 123 | where 124 | I: IntoIterator, 125 | K: Into, 126 | { 127 | self.restore_keys.extend(restore_keys.into_iter().map(Into::into)); 128 | self 129 | } 130 | 131 | pub fn restore_key>(&mut self, restore_key: K) -> &mut Entry { 132 | self.restore_keys(std::iter::once(restore_key.into())) 133 | } 134 | 135 | pub async fn save(&self) -> Result { 136 | use wasm_bindgen::JsCast; 137 | let patterns = self.build_patterns(); 138 | let result = { 139 | let _caching_scope = self.build_action_scope()?; 140 | ffi::save_cache(patterns, &self.key, None, self.cross_os_archive).await? 141 | }; 142 | let result = result 143 | .dyn_ref::() 144 | .ok_or_else(|| JsError::new("saveCache didn't return a number")) 145 | .map(|n| { 146 | #[allow(clippy::cast_possible_truncation)] 147 | let id = n.value_of() as i64; 148 | id 149 | })?; 150 | Ok(result) 151 | } 152 | 153 | pub async fn save_if_update(&self, old_restore_key: Option<&str>) -> Result, JsValue> { 154 | let new_restore_key = self.peek_restore().await?; 155 | if new_restore_key.is_none() || new_restore_key.as_deref() == old_restore_key { 156 | self.save().await.map(Some) 157 | } else { 158 | Ok(None) 159 | } 160 | } 161 | 162 | fn build_patterns(&self) -> Vec { 163 | let cwd = node::process::cwd(); 164 | let mut result = Vec::with_capacity(self.paths.len()); 165 | for path in &self.paths { 166 | // Rewrite path to be relative if we have a root 167 | let path = if let Some(relative_to) = &self.relative_to { 168 | let absolute = cwd.join(path); 169 | absolute.relative_to(relative_to) 170 | } else { 171 | path.clone() 172 | }; 173 | let pattern = Self::path_to_glob(&path); 174 | result.push(pattern.into()); 175 | } 176 | if self.relative_to.is_some() { 177 | // If we are going to specify paths relative to some path that we also 178 | // override GITHUB_WORKSPACE to, we add a comment so it will get 179 | // incorporated into the path hash. 180 | result.push(WORKSPACE_OVERRIDDEN_TAG.into()); 181 | } 182 | result 183 | } 184 | 185 | fn path_to_glob(path: &Path) -> String { 186 | let path = path.to_string(); 187 | // This should be valid even for absolute paths on Windows 188 | let path = path.replace(node::path::separator().as_ref(), "/"); 189 | // We do not escape ']' as it would close the character set 190 | let mut result = String::with_capacity(path.len()); 191 | let is_windows = node::os::platform() == "windows"; 192 | for c in path.chars() { 193 | match c { 194 | '*' | '?' | '#' | '~' | '!' | '[' => result.extend(['[', c, ']']), 195 | '\\' => { 196 | // The glob syntax is platform specific, because of course it is. Blackslash is 197 | // escape on Unix-like platforms, even in a character set. See 198 | // `internal-pattern.ts`. 199 | if is_windows { 200 | result.push(c); 201 | } else { 202 | result.extend(['\\', c]); 203 | } 204 | } 205 | _ => result.push(c), 206 | } 207 | } 208 | result 209 | } 210 | 211 | fn build_action_scope(&self) -> Result, JsValue> { 212 | self.relative_to.as_ref().map(ScopedWorkspace::new).transpose() 213 | } 214 | 215 | pub async fn restore(&self) -> Result, JsValue> { 216 | let patterns = self.build_patterns(); 217 | let result = { 218 | let _caching_scope = self.build_action_scope()?; 219 | ffi::restore_cache( 220 | patterns, 221 | &self.key, 222 | self.restore_keys.clone(), 223 | None, 224 | self.cross_os_archive, 225 | ) 226 | .await? 227 | }; 228 | if result == JsValue::NULL || result == JsValue::UNDEFINED { 229 | Ok(None) 230 | } else { 231 | let result: JsString = result.into(); 232 | Ok(Some(result.into())) 233 | } 234 | } 235 | 236 | async fn peek_restore(&self) -> Result, JsValue> { 237 | use js_sys::Object; 238 | 239 | let compression_method: JsString = ffi::get_compression_method().await?.into(); 240 | let keys: Vec = std::iter::once(&self.key) 241 | .chain(self.restore_keys.iter()) 242 | .cloned() 243 | .collect(); 244 | let options = { 245 | let options = js_sys::Map::new(); 246 | options.set(&"compressionMethod".into(), &compression_method.into()); 247 | options.set(&"enableCrossOsArchive".into(), &self.cross_os_archive.into()); 248 | Object::from_entries(&options).expect("Failed to convert options map to object") 249 | }; 250 | let patterns = self.build_patterns(); 251 | let result = { 252 | let _caching_scope = self.build_action_scope()?; 253 | ffi::get_cache_entry(keys, patterns, Some(options)).await? 254 | }; 255 | if result == JsValue::NULL || result == JsValue::UNDEFINED { 256 | Ok(None) 257 | } else { 258 | let result: Object = result.into(); 259 | let entries = Object::entries(&result); 260 | let mut entries: HashMap = entries 261 | .iter() 262 | .map(Into::::into) 263 | .map(|e| (e.get(0), e.get(1))) 264 | .map(|(k, v)| (Into::::into(k), v)) 265 | .map(|(k, v)| (Into::::into(k), v)) 266 | .collect(); 267 | Ok(entries 268 | .remove("cacheKey") 269 | .map(Into::::into) 270 | .map(Into::::into)) 271 | } 272 | } 273 | } 274 | 275 | pub mod ffi { 276 | use js_sys::{JsString, Object}; 277 | use wasm_bindgen::prelude::*; 278 | 279 | #[wasm_bindgen(module = "@actions/cache")] 280 | extern "C" { 281 | #[wasm_bindgen(js_name = "saveCache", catch)] 282 | pub async fn save_cache( 283 | paths: Vec, 284 | key: &JsString, 285 | upload_options: Option, 286 | cross_os_archive: bool, 287 | ) -> Result; 288 | 289 | #[wasm_bindgen(js_name = "restoreCache", catch)] 290 | pub async fn restore_cache( 291 | paths: Vec, 292 | primary_key: &JsString, 293 | restore_keys: Vec, 294 | download_options: Option, 295 | cross_os_archive: bool, 296 | ) -> Result; 297 | } 298 | 299 | #[wasm_bindgen(module = "@actions/cache/lib/internal/cacheUtils")] 300 | extern "C" { 301 | #[wasm_bindgen(js_name = "getCompressionMethod", catch)] 302 | pub(super) async fn get_compression_method() -> Result; 303 | } 304 | 305 | #[wasm_bindgen(module = "@actions/cache/lib/internal/cacheHttpClient")] 306 | extern "C" { 307 | #[wasm_bindgen(js_name = "getCacheEntry", catch)] 308 | pub(super) async fn get_cache_entry( 309 | keys: Vec, 310 | paths: Vec, 311 | options: Option, 312 | ) -> Result; 313 | } 314 | } 315 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Ferrous Actions 2 | [![CI](https://github.com/FrancisRussell/ferrous-actions-dev/workflows/CI/badge.svg)](https://github.com/FrancisRussell/ferrous-actions-dev/actions?query=workflow%3ACI) 3 | 4 | GitHub action for easing Rust development, written in Rust and compiled to 5 | WebAssembly. 6 | 7 | ## About 8 | 9 | [actions-rs](https://github.com/actions-rs), the de-facto default for 10 | Rust-related GitHub actions appears to be all but abandoned. This repository is 11 | an experiment in replacing those actions with ones written in Rust, but 12 | compiled down to WebAssembly. This should make them both portable across 13 | platforms and more easily maintainable by developers who only know Rust. 14 | 15 | Please note the the development repository for this action is located 16 | [here](https://github.com/FrancisRussell/ferrous-actions-dev/) whereas the 17 | release repository is located 18 | [here](https://github.com/FrancisRussell/ferrous-actions/). Issues should be 19 | filed against the former. 20 | 21 | ## Features at a glance 22 | 23 | * Installs Rust toolchains via [Rustup](https://rustup.rs/). 24 | * Intelligent caching of Cargo home (crates, registry indices and Git 25 | repositories): 26 | * Separate caching of registry indices, Git repositories and crate files. 27 | * Only uploads new versions of cache entries when they have changed. 28 | * Parameterizable minimum update interval (for fast changing items like 29 | registry indices). 30 | * Capable of detecting and dropping unused crate files, Git repositories and 31 | registry indices on Unix-like platforms (on Windows this is not possible so 32 | cache entries are keyed with the hash of all present `Cargo.lock` files). 33 | * Detects races between concurrent CI jobs (since a workflow may contains 34 | multiple jobs) to update the same cache entry and avoids uploading multiple 35 | versions. 36 | * Separates caching of content from dependency tracking of each CI job to 37 | permit sharing and avoid CI jobs with differing dependencies fighting over 38 | what needs to be cached. 39 | * Caches can be shared across platforms (Linux, Darwin and Windows). 40 | * Caching of build artifacts from `cargo install` operations to accelerate 41 | installation. 42 | * Supports the usage of [cross](https://github.com/cross-rs/cross) for 43 | cross-compilation. 44 | * Generates GitHub annotations from output of `cargo clippy`, `cargo build` and 45 | `cargo check` operations. 46 | 47 | ## Usage 48 | 49 | Like all GitHub actions, this action is used via directives in a [GitHub 50 | Actions YAML 51 | file](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions). 52 | For practicality and implementation reasons, Ferrous Actions is structured as 53 | a ‘mono-action’, meaning that all operations are implemented as sub-commands of 54 | a single action rather than being separated. The `command` parameter is always compulsory. 55 | 56 | In all following examples, `FrancisRussell/ferrous-actions@v0.1.0-beta.2` 57 | should be replaced by the version of the action that this README is for. An 58 | example of the usage of Ferrous actions in a real project can be found 59 | [here](https://github.com/FrancisRussell/zoog/blob/develop/.github/workflows/ci.yml). 60 | 61 | Note that by default, GitHub will use the name of action as the name of a build 62 | step in its user interface. This can be confusing since with a mono-action 63 | these are always the same. Liberal use of the `name` attribute is recommended 64 | and is used in the examples below. 65 | 66 | ### Caching Cargo home 67 | 68 | Registry indices (e.g. the list of packages on `crates.io`), crate files and 69 | Git repositories downloaded by Cargo can all be cached between CI jobs. 70 | 71 | File modification timestamps are used to detect if no changes to the cached items 72 | have occurred and avoid needlessly uploading them back to the cache. 73 | 74 | Example invocation: 75 | ```yml 76 | - uses: FrancisRussell/ferrous-actions@v0.1.0-beta.2 77 | name: Cargo cache 78 | with: 79 | command: cache 80 | cache-only: indices 81 | min-recache-crates: 1m 82 | min-recache-git-repos: 12h 83 | min-recache-indices: 7d 84 | ``` 85 | 86 | The following options are also available: 87 | 88 | * `cache-only` (optional): a whitespace separated list of the token 89 | `git-repos`, `crates` and `indices`. If provided, only these items will be 90 | cached. The default is to cache all items. 91 | * `min-recache-crates` (optional): minimum time before recaching crates. 92 | * `min-recache-git-repos` (optional): minimum time before recaching Git 93 | repositories. 94 | * `min-recache-indices` (optional): minimum time before recaching registry 95 | indices. 96 | * `cross-platform-sharing` (optional): Attempt to share Cargo 97 | home caches across all platforms (`all`), only Unix-like platforms 98 | (`unix-like`), or make all caches platform-specific (`none`). The default is 99 | `all`. 100 | 101 | All recaching intervals are specified in [human 102 | time](https://docs.rs/humantime/latest/humantime/). Specifying the recaching 103 | interval makes it possible to avoid uploading a new version of a cached item 104 | each time it changes. This is useful for registry indices which (in the case of 105 | `crates.io`) can be large (hundreds of MiBs), often modified, but only with 106 | small changes. At writing, the index minimum recache interval is 2 days and 107 | none is specified for crate files or Git repositories. 108 | 109 | ### Installing a Rust toolchain with Rustup 110 | 111 | Ferrous actions can download Rustup and install a specified Rust toolchain. 112 | 113 | Example invocation: 114 | ```yml 115 | - uses: FrancisRussell/ferrous-actions@v0.1.0-beta.2 116 | name: Install Rustup 117 | with: 118 | command: install-rustup 119 | toolchain: nightly 120 | target: wasm32-unknown-unknown 121 | profile: minimal 122 | default: true 123 | ``` 124 | 125 | The following options are also available: 126 | 127 | * `toolchain` (required): The toolchain to install 128 | * `target` (optional): A whitespace separated list of target architectures. 129 | * `profile` (optional): The Rustup profile (e.g. `minimal`, `default` or 130 | `complete`). The default is `default`. 131 | * `default` (optional): Whether this toolchain should be set as the Rustup 132 | default compiler. This defaults to `true`. _This is different to actions-rs's 133 | behaviour_. 134 | * `override` (optional): Whether a Rustup ‘override’ should be set for the 135 | current directory. Defaults to `false`. 136 | 137 | ### Cargo commands 138 | 139 | Cargo commands can be invoked via Ferrous actions. The value for `command` in 140 | this case is `cargo SUBCOMMAND` where `SUBCOMMAND` is a single token. 141 | 142 | Example invocation: 143 | ```yml 144 | - uses: FrancisRussell/ferrous-actions@v0.1.0-beta.2 145 | name: Cargo build 146 | with: 147 | command: cargo build 148 | toolchain: stable 149 | args: --release 150 | ``` 151 | 152 | The following options are available whenever a Cargo subcommand is invoked: 153 | * `toolchain` (optional): A toolchain identifier that will be passed to `cargo` 154 | using the `+toolchain` syntax (only supported by Rustup-installed 155 | toolchains). 156 | * `args` (optional): Command line flags passed to `cargo`. These will be parsed 157 | using Unix-style shell quoting rules regardless of platform. 158 | 159 | ### Installing a package with Cargo install 160 | 161 | Ferrous actions will use GitHub's caching mechanism to improve the performance 162 | of installing binaries compared to compiling them from scratch each time. Note 163 | that Ferrous actions currently aims towards transparent tool caching - the 164 | cache should not result in you using a version of a binary you would not 165 | otherwise had you not used the cache. 166 | 167 | ![grcov install](doc/images/cargo-install-grcov.webp) 168 | 169 | This means that Ferrous actions caches the build artifacts folder rather than 170 | the built binaries themselves. The only way to only cache the latter would be 171 | to be completely certain of all dependencies prior to the build, 172 | [Nix-style](https://nixos.org/). 173 | 174 | From the user-perspective this means: 175 | * Tools will be recompiled from scratch when they are compiled with a 176 | previously unseen version of a Rust toolchain. If you track nightly, this 177 | might happen relatively frequently. 178 | * Any changes to a tool (because it or its dependencies have been updated in a 179 | registry) will be immediately reflected in the result of an install action. 180 | The updated build artifacts will be pushed back to the GitHub cache when this 181 | happens. 182 | 183 | Example invocation: 184 | ```yml 185 | - uses: FrancisRussell/ferrous-actions@v0.1.0-beta.2 186 | name: Install grcov 187 | with: 188 | command: cargo install 189 | args: grcov 190 | ``` 191 | 192 | The following options have additional constraints: 193 | 194 | * `args` (required): As above, but at least the binary name is required. Note 195 | that the command line is hashed to produce the cache key so changes will 196 | cause a tool to be rebuilt from scratch. 197 | 198 | When invoked via Ferrous actions, `cargo install` will execute in a different 199 | directory to the current one. The aim here is to avoid either a 200 | `rust-toolchain.toml` or a Rustup override changing the compiler used to 201 | compile the binary. 202 | 203 | ### Getting annotations from cargo build, check or clippy 204 | 205 | When the `build`, `check` or `clippy` Cargo subcommands are run via Ferrous actions, 206 | annotations are output which can be viewed via the GitHub UI. 207 | 208 | ![Unused function annotation](doc/images/annotation.webp) 209 | 210 | Example invocation: 211 | ```yml 212 | - uses: FrancisRussell/ferrous-actions@v0.1.0-beta.2 213 | name: Cargo clippy 214 | with: 215 | command: cargo clippy 216 | annotations: true 217 | ``` 218 | 219 | The following options are also available: 220 | 221 | * `annotations` (optional): Can be set to `true` or `false` depending on 222 | whether annotations are desired. Default is `true`. 223 | 224 | ### Cross support 225 | 226 | Invocation of `cargo build` via Ferrous actions can also be done in such a way 227 | that the [cross](https://github.com/cross-rs/cross) tool is used. 228 | 229 | Example invocation: 230 | ```yml 231 | - uses: FrancisRussell/ferrous-actions@v0.1.0-beta.2 232 | name: Cargo build 233 | with: 234 | command: cargo build 235 | args: --target=x86_64-apple-darwin 236 | use-cross: true 237 | ``` 238 | 239 | If `use-cross` is specified as `true` then `cross` will be used for 240 | compilation. If is supplied as `false` or not at all then `cargo` will be 241 | invoked as normal. If an existing `cross` binary is not available, then one 242 | will be built and installed. 243 | 244 | ## Implementation Notes 245 | 246 | ### The monotonically increasing cache problem 247 | 248 | One major issue with caching is how to ensure that a cache does not 249 | monotonically increase in size. There are two places where this can occur: the 250 | cached cargo home artifacts, and cached binary intermediate build artifacts. 251 | This problem is only partially solved. 252 | 253 | On Linux and Apple systems, file access times are used to determine what 254 | entries in cached cargo home items were accessed and to prune items that 255 | weren't. This has been implemented such that it still works in the presence of 256 | ‘relatime’ semantics - when the file access timestamp is only updated if it is 257 | behind the modification timestamp. 258 | 259 | File access times are typically disabled under Windows - Microsoft never 260 | implemented an equivalent of relatime, meaning that they remain a significant 261 | performance hit. On filesystems that do not update file access time-stamps, 262 | Ferrous actions will incorporate the hash of all `Cargo.lock` files under the 263 | current folder into the cache key. This means that caches will be rebuilt from 264 | scratch whenever a `Cargo.lock` file changes. 265 | 266 | This solution is far from ideal since it causes recaching more than necessary 267 | and won't work if no `Cargo.lock` files are committed to Git, or items are 268 | added to the cache via other means (e.g. due to `cargo install`). 269 | 270 | No solution exists for the issue of the build artifact folder increasing in 271 | size. Rust is a fast moving language so it's expected that compiler bumps will 272 | cause the folder to be rebuilt from scratch anyway before this becomes an 273 | issue. The file access timestamp technique is unlikely to work here, since it's 274 | likely that necessary files are only having timestamps examined rather than 275 | their content read, which won't be reflected in the access time. 276 | 277 | ### Concurrent CI jobs 278 | 279 | Larger projects will be split into multiple CI jobs which have different 280 | dependencies. It's important that these jobs don't compete against each other. 281 | This can happen because they have different opinions on what dependencies are 282 | needed or not (causing cache items to be repeatedly evicted and restored), and 283 | also because they may all decide a cached item needs to be updated and each 284 | pushes out a new copy at the same time. 285 | 286 | Each CI job is assigned a unique identifier (derived from the workflow, job ID 287 | and any matrix properties) for which a list of dependencies is recorded. Each 288 | one of these dependencies is called a ‘cache group‘ and has a name which 289 | incorporates a hash of its expected contents. 290 | 291 | A cache group represents one or more dependencies (e.g. crates, git 292 | repositories) which are bundled together. The individual dependencies may be 293 | updated, but none are ever added or removed. This means any CI job is free to 294 | update any cache group it uses. Jobs which have differing dependencies will 295 | interact with different cache groups. 296 | 297 | Making a cache group the finest granularity of caching possible might seem like 298 | a good idea. This is currently the case for cached indices and Git 299 | repositories, but not for crates. Many crates are quite small (tens of KiB) and 300 | projects typically depend on a large number of crates. It seems suboptimal and 301 | potentially irritating to users to construct a separate cache entry for each 302 | cached crate. Therefore crates are cached at the level of all crates used from 303 | a particular index. 304 | 305 | To avoid concurrent CI jobs all pushing out similar updated cache groups, we 306 | use an API internal to the [cache 307 | action](https://github.com/actions/toolkit/tree/main/packages/cache) to 308 | determine if any cache group we intend to update has had a new version pushed 309 | out since we downloaded it, just before we upload a new version. This reduces 310 | the window for a race from minutes down to a few seconds. 311 | 312 | ## `crates.io` 313 | 314 | This code is pushed to `crates.io` primarily as a proactive measure against 315 | name-squatting and for maintaining a historical record. Consequently it may be 316 | out of date, and the homepage should be consulted for the latest information. 317 | 318 | ## Notes / Disclaimer 319 | 320 | Ferrous actions is very much experimental and should not be relied upon in 321 | production environments or for business critical purposes. See LICENSE for 322 | additional details. 323 | 324 | Ferrous actions is primarily intended for use for hobbyish-sized Rust projects. 325 | If you need a complex caching framework then it's time to look at setting up 326 | [sccache](https://github.com/mozilla/sccache) backed by cloud storage and/or 327 | [Nix](https://nixos.org/). 328 | 329 | ## Acknowledgements 330 | 331 | This repository is based off the template created by Peter Evans 332 | ([@peter-evans](https://github.com/peter-evans)) 333 | [here](https://github.com/peter-evans/rust-wasm-action). 334 | 335 | ## License 336 | 337 | [MIT](LICENSE) 338 | -------------------------------------------------------------------------------- /src/node/fs.rs: -------------------------------------------------------------------------------- 1 | use crate::node::path::Path; 2 | use chrono::{DateTime, NaiveDateTime, Utc}; 3 | use js_sys::{BigInt, JsString, Object, Uint8Array}; 4 | use std::collections::VecDeque; 5 | use wasm_bindgen::{JsCast, JsError, JsValue}; 6 | 7 | #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] 8 | pub struct FileType { 9 | inner: FileTypeEnum, 10 | } 11 | 12 | impl FileType { 13 | pub fn is_file(self) -> bool { 14 | self.inner == FileTypeEnum::File 15 | } 16 | 17 | pub fn is_dir(self) -> bool { 18 | self.inner == FileTypeEnum::Dir 19 | } 20 | 21 | pub fn is_symlink(self) -> bool { 22 | self.inner == FileTypeEnum::Symlink 23 | } 24 | 25 | pub fn is_fifo(self) -> bool { 26 | self.inner == FileTypeEnum::Fifo 27 | } 28 | 29 | pub fn is_socket(self) -> bool { 30 | self.inner == FileTypeEnum::Socket 31 | } 32 | 33 | pub fn is_block_device(self) -> bool { 34 | self.inner == FileTypeEnum::BlockDev 35 | } 36 | 37 | pub fn is_char_device(self) -> bool { 38 | self.inner == FileTypeEnum::CharDev 39 | } 40 | } 41 | 42 | #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] 43 | enum FileTypeEnum { 44 | File, 45 | Dir, 46 | Symlink, 47 | BlockDev, 48 | CharDev, 49 | Fifo, 50 | Socket, 51 | Unknown, 52 | } 53 | 54 | fn determine_file_type(file_type: &ffi::FileType) -> FileTypeEnum { 55 | if file_type.is_block_device() { 56 | FileTypeEnum::BlockDev 57 | } else if file_type.is_character_device() { 58 | FileTypeEnum::CharDev 59 | } else if file_type.is_socket() { 60 | FileTypeEnum::Socket 61 | } else if file_type.is_fifo() { 62 | FileTypeEnum::Fifo 63 | } else if file_type.is_symbolic_link() { 64 | FileTypeEnum::Symlink 65 | } else if file_type.is_directory() { 66 | FileTypeEnum::Dir 67 | } else if file_type.is_file() { 68 | FileTypeEnum::File 69 | } else { 70 | FileTypeEnum::Unknown 71 | } 72 | } 73 | 74 | #[derive(Debug)] 75 | pub struct ReadDir { 76 | path: Path, 77 | entries: VecDeque, 78 | } 79 | 80 | #[derive(Debug)] 81 | pub struct DirEntry { 82 | parent: Path, 83 | inner: ffi::DirEnt, 84 | } 85 | 86 | impl DirEntry { 87 | pub fn file_name(&self) -> String { 88 | self.inner.get_name().into() 89 | } 90 | 91 | pub fn path(&self) -> Path { 92 | let mut result = self.parent.clone(); 93 | result.push(self.inner.get_name()); 94 | result 95 | } 96 | 97 | pub fn file_type(&self) -> FileType { 98 | FileType { 99 | inner: determine_file_type(&self.inner), 100 | } 101 | } 102 | } 103 | 104 | impl Iterator for ReadDir { 105 | type Item = DirEntry; 106 | 107 | fn next(&mut self) -> Option { 108 | let parent = self.path.clone(); 109 | self.entries.pop_front().map(|inner| DirEntry { parent, inner }) 110 | } 111 | } 112 | 113 | pub async fn chmod>(path: P, mode: u16) -> Result<(), JsValue> { 114 | let path: JsString = path.into(); 115 | ffi::chmod(&path, mode).await.map(|_| ()) 116 | } 117 | 118 | pub async fn read_file>(path: P) -> Result, JsValue> { 119 | let path: JsString = path.into(); 120 | let buffer = ffi::read_file(&path).await?; 121 | let buffer = buffer 122 | .dyn_ref::() 123 | .ok_or_else(|| JsError::new("readFile didn't return an array"))?; 124 | let length = buffer.length(); 125 | let mut result = vec![0u8; length as usize]; 126 | buffer.copy_to(&mut result); 127 | Ok(result) 128 | } 129 | 130 | pub async fn write_file>(path: P, data: &[u8]) -> Result<(), JsValue> { 131 | let path: JsString = path.into(); 132 | ffi::write_file(&path, data).await?; 133 | Ok(()) 134 | } 135 | 136 | pub async fn read_dir>(path: P) -> Result { 137 | let path: JsString = path.into(); 138 | let options = js_sys::Map::new(); 139 | options.set(&"withFileTypes".into(), &true.into()); 140 | options.set(&"encoding".into(), &"utf8".into()); 141 | let options = Object::from_entries(&options).expect("Failed to convert options map to object"); 142 | let entries = ffi::read_dir(&path, Some(options)).await?; 143 | let entries: VecDeque<_> = entries 144 | .dyn_into::() 145 | .map_err(|_| JsError::new("read_dir didn't return an array"))? 146 | .iter() 147 | .map(Into::::into) 148 | .collect(); 149 | let path = Path::from(path); 150 | let entries = ReadDir { path, entries }; 151 | Ok(entries) 152 | } 153 | 154 | pub async fn create_dir_all>(path: P) -> Result<(), JsValue> { 155 | let options = js_sys::Map::new(); 156 | options.set(&"recursive".into(), &true.into()); 157 | let options = Object::from_entries(&options).expect("Failed to convert options map to object"); 158 | let path: JsString = path.into(); 159 | ffi::mkdir(&path, Some(options)).await?; 160 | Ok(()) 161 | } 162 | 163 | pub async fn create_dir>(path: P) -> Result<(), JsValue> { 164 | let path: JsString = path.into(); 165 | ffi::mkdir(&path, None).await?; 166 | Ok(()) 167 | } 168 | 169 | pub async fn remove_dir>(path: P) -> Result<(), JsValue> { 170 | let path: JsString = path.into(); 171 | ffi::rmdir(&path, None).await?; 172 | Ok(()) 173 | } 174 | 175 | pub async fn remove_file>(path: P) -> Result<(), JsValue> { 176 | let path: JsString = path.into(); 177 | ffi::unlink(&path).await?; 178 | Ok(()) 179 | } 180 | 181 | pub async fn rename>(from: P, to: P) -> Result<(), JsValue> { 182 | let from: JsString = from.into(); 183 | let to: JsString = to.into(); 184 | ffi::rename(&from, &to).await?; 185 | Ok(()) 186 | } 187 | 188 | #[derive(Debug)] 189 | pub struct Metadata { 190 | inner: ffi::Stats, 191 | } 192 | 193 | impl Metadata { 194 | pub fn uid(&self) -> u64 { 195 | self.inner.uid().try_into().expect("UID too large") 196 | } 197 | 198 | pub fn gid(&self) -> u64 { 199 | self.inner.gid().try_into().expect("GID too large") 200 | } 201 | 202 | #[allow(clippy::len_without_is_empty)] 203 | pub fn len(&self) -> u64 { 204 | self.inner.size().try_into().expect("File size too large") 205 | } 206 | 207 | pub fn mode(&self) -> u64 { 208 | self.inner.mode().try_into().expect("File mode too large") 209 | } 210 | 211 | fn utc_ns_to_time(ns: BigInt) -> DateTime { 212 | const NS_IN_S: i128 = 1000 * 1000 * 1000; 213 | let ns = i128::try_from(ns).expect("Timestamp out of range"); 214 | let (secs, subsec_nanos) = { 215 | let mut seconds = ns / NS_IN_S; 216 | let mut nanoseconds = ns % NS_IN_S; 217 | if nanoseconds < 0 { 218 | seconds -= 1; 219 | nanoseconds += NS_IN_S; 220 | } 221 | (seconds, nanoseconds) 222 | }; 223 | let secs: i64 = secs.try_into().expect("Seconds out of range"); 224 | let subsec_nanos: u32 = subsec_nanos.try_into().expect("Nanoseconds out of range"); 225 | let naive = NaiveDateTime::from_timestamp_opt(secs, subsec_nanos).expect("File time out of bounds"); 226 | DateTime::from_utc(naive, Utc) 227 | } 228 | 229 | pub fn accessed(&self) -> DateTime { 230 | let ns = self.inner.access_time_ns(); 231 | Self::utc_ns_to_time(ns) 232 | } 233 | 234 | pub fn modified(&self) -> DateTime { 235 | let ns = self.inner.modification_time_ns(); 236 | Self::utc_ns_to_time(ns) 237 | } 238 | 239 | pub fn created(&self) -> DateTime { 240 | let ns = self.inner.created_time_ns(); 241 | Self::utc_ns_to_time(ns) 242 | } 243 | 244 | pub fn file_type(&self) -> FileType { 245 | FileType { 246 | inner: determine_file_type(&self.inner), 247 | } 248 | } 249 | 250 | pub fn is_directory(&self) -> bool { 251 | self.inner.is_directory() 252 | } 253 | } 254 | 255 | pub async fn symlink_metadata>(path: P) -> Result { 256 | let path = path.into(); 257 | let options = js_sys::Map::new(); 258 | options.set(&"bigint".into(), &true.into()); 259 | let options = Object::from_entries(&options).expect("Failed to convert options map to object"); 260 | let stats = ffi::lstat(&path, Some(options)).await.map(Into::::into)?; 261 | Ok(Metadata { inner: stats }) 262 | } 263 | 264 | fn timestamp_to_seconds(timestamp: &DateTime) -> f64 { 265 | // utimes takes timestamps in seconds - this was fun to debug 266 | const NS_IN_S: f64 = 1e9; 267 | #[allow(clippy::cast_precision_loss)] 268 | let whole = timestamp.timestamp() as f64; 269 | let fractional = f64::from(timestamp.timestamp_subsec_nanos()) / NS_IN_S; 270 | whole + fractional 271 | } 272 | 273 | pub async fn lutimes>( 274 | path: P, 275 | a_time: &DateTime, 276 | m_time: &DateTime, 277 | ) -> Result<(), JsValue> { 278 | use js_sys::Number; 279 | 280 | let path = path.into(); 281 | let a_time: Number = timestamp_to_seconds(a_time).into(); 282 | let m_time: Number = timestamp_to_seconds(m_time).into(); 283 | ffi::lutimes(&path, a_time.as_ref(), m_time.as_ref()).await?; 284 | Ok(()) 285 | } 286 | 287 | pub mod ffi { 288 | use js_sys::{BigInt, JsString, Object}; 289 | use wasm_bindgen::prelude::*; 290 | use wasm_bindgen::JsValue; 291 | 292 | #[wasm_bindgen] 293 | extern "C" { 294 | #[derive(Debug)] 295 | pub type FileType; 296 | 297 | #[wasm_bindgen(method, js_name = "isDirectory")] 298 | pub fn is_directory(this: &FileType) -> bool; 299 | 300 | #[wasm_bindgen(method, js_name = "isFile")] 301 | pub fn is_file(this: &FileType) -> bool; 302 | 303 | #[wasm_bindgen(method, js_name = "isBlockDevice")] 304 | pub fn is_block_device(this: &FileType) -> bool; 305 | 306 | #[wasm_bindgen(method, js_name = "isCharacterDevice")] 307 | pub fn is_character_device(this: &FileType) -> bool; 308 | 309 | #[wasm_bindgen(method, js_name = "isFIFO")] 310 | pub fn is_fifo(this: &FileType) -> bool; 311 | 312 | #[wasm_bindgen(method, js_name = "isSocket")] 313 | pub fn is_socket(this: &FileType) -> bool; 314 | 315 | #[wasm_bindgen(method, js_name = "isSymbolicLink")] 316 | pub fn is_symbolic_link(this: &FileType) -> bool; 317 | } 318 | 319 | #[wasm_bindgen(module = "fs")] 320 | extern "C" { 321 | #[derive(Debug)] 322 | #[wasm_bindgen(js_name = "DirEnt", extends = FileType)] 323 | pub type DirEnt; 324 | 325 | #[wasm_bindgen(method, getter, js_name = "name")] 326 | pub fn get_name(this: &DirEnt) -> JsString; 327 | 328 | #[derive(Debug)] 329 | #[wasm_bindgen(js_name = "Stats", extends = FileType)] 330 | pub type Stats; 331 | 332 | #[wasm_bindgen(method, getter)] 333 | pub fn size(this: &Stats) -> BigInt; 334 | 335 | #[wasm_bindgen(method, getter, js_name = "atimeNs")] 336 | pub fn access_time_ns(this: &Stats) -> BigInt; 337 | 338 | #[wasm_bindgen(method, getter, js_name = "mtimeNs")] 339 | pub fn modification_time_ns(this: &Stats) -> BigInt; 340 | 341 | #[wasm_bindgen(method, getter, js_name = "birthtimeNs")] 342 | pub fn created_time_ns(this: &Stats) -> BigInt; 343 | 344 | #[wasm_bindgen(method, getter)] 345 | pub fn uid(this: &Stats) -> BigInt; 346 | 347 | #[wasm_bindgen(method, getter)] 348 | pub fn gid(this: &Stats) -> BigInt; 349 | 350 | #[wasm_bindgen(method, getter)] 351 | pub fn mode(this: &Stats) -> BigInt; 352 | } 353 | 354 | #[wasm_bindgen(module = "fs/promises")] 355 | extern "C" { 356 | #[wasm_bindgen(catch)] 357 | pub async fn chmod(path: &JsString, mode: u16) -> Result; 358 | 359 | #[wasm_bindgen(catch, js_name = "readFile")] 360 | pub async fn read_file(path: &JsString) -> Result; 361 | 362 | #[wasm_bindgen(catch, js_name = "writeFile")] 363 | pub async fn write_file(path: &JsString, data: &[u8]) -> Result; 364 | 365 | #[wasm_bindgen(catch, js_name = "readdir")] 366 | pub async fn read_dir(path: &JsString, options: Option) -> Result; 367 | 368 | #[wasm_bindgen(catch)] 369 | pub async fn mkdir(path: &JsString, options: Option) -> Result; 370 | 371 | #[wasm_bindgen(catch)] 372 | pub async fn rename(old: &JsString, new: &JsString) -> Result; 373 | 374 | #[wasm_bindgen(catch)] 375 | pub async fn rmdir(path: &JsString, options: Option) -> Result; 376 | 377 | #[wasm_bindgen(catch)] 378 | pub async fn access(path: &JsString, mode: Option) -> Result; 379 | 380 | #[wasm_bindgen(catch)] 381 | pub async fn lstat(path: &JsString, options: Option) -> Result; 382 | 383 | #[wasm_bindgen(catch)] 384 | pub async fn lutimes(path: &JsString, atime: &JsValue, mtime: &JsValue) -> Result; 385 | 386 | #[wasm_bindgen(catch)] 387 | pub async fn unlink(path: &JsString) -> Result; 388 | } 389 | } 390 | 391 | #[cfg(test)] 392 | mod test { 393 | use crate::node; 394 | use crate::node::path::Path; 395 | use lazy_static::lazy_static; 396 | use parking_lot::Mutex; 397 | use std::collections::HashMap; 398 | use wasm_bindgen::JsValue; 399 | use wasm_bindgen_test::wasm_bindgen_test; 400 | 401 | #[derive(Debug, Clone, Copy)] 402 | enum Entry { 403 | File(u64), 404 | Dir, 405 | } 406 | 407 | lazy_static! { 408 | static ref COUNTER: Mutex = Mutex::default(); 409 | } 410 | 411 | fn get_random() -> u64 { 412 | use std::collections::hash_map::DefaultHasher; 413 | use std::hash::{Hash as _, Hasher as _}; 414 | 415 | let id = { 416 | let mut guard = COUNTER.lock(); 417 | let id = *guard; 418 | *guard += 1; 419 | id 420 | }; 421 | let now = chrono::Local::now(); 422 | let mut hasher = DefaultHasher::default(); 423 | id.hash(&mut hasher); 424 | now.hash(&mut hasher); 425 | hasher.finish() 426 | } 427 | 428 | fn temp_path() -> Path { 429 | let unique_id = get_random(); 430 | let temp = node::os::temp_dir(); 431 | let file_name = format!("ferrous-actions-fs-test - {}", unique_id); 432 | temp.join(&file_name) 433 | } 434 | 435 | #[wasm_bindgen_test] 436 | async fn write_read_unlink_file() -> Result<(), JsValue> { 437 | let path = temp_path(); 438 | let data = format!("{}", chrono::Local::now()).into_bytes(); 439 | node::fs::write_file(&path, &data).await?; 440 | let read_data = node::fs::read_file(&path).await?; 441 | assert_eq!(data, read_data); 442 | node::fs::remove_file(&path).await?; 443 | assert!(!path.exists().await); 444 | Ok(()) 445 | } 446 | 447 | #[wasm_bindgen_test] 448 | async fn create_remove_dir() -> Result<(), JsValue> { 449 | let first = temp_path(); 450 | let second = first.join("a"); 451 | let third = second.join("b"); 452 | super::create_dir_all(&second).await?; 453 | assert!(first.exists().await); 454 | assert!(second.exists().await); 455 | super::create_dir(&third).await?; 456 | assert!(third.exists().await); 457 | super::remove_dir(&third).await?; 458 | assert!(!third.exists().await); 459 | super::remove_dir(&second).await?; 460 | assert!(!second.exists().await); 461 | super::remove_dir(&first).await?; 462 | assert!(!first.exists().await); 463 | Ok(()) 464 | } 465 | 466 | #[wasm_bindgen_test] 467 | async fn rename_file() -> Result<(), JsValue> { 468 | let from = temp_path(); 469 | let to = temp_path(); 470 | let data = format!("{}", chrono::Local::now()).into_bytes(); 471 | node::fs::write_file(&from, &data).await?; 472 | assert!(from.exists().await); 473 | assert!(!to.exists().await); 474 | node::fs::rename(&from, &to).await?; 475 | assert!(!from.exists().await); 476 | assert!(to.exists().await); 477 | drop(node::fs::remove_file(&to).await); 478 | Ok(()) 479 | } 480 | 481 | #[wasm_bindgen_test] 482 | async fn read_dir_and_lstat() -> Result<(), JsValue> { 483 | const NUM_ENTRIES: usize = 256; 484 | const MAX_SIZE: u64 = 4096; 485 | 486 | // Build some entries 487 | let mut entries = HashMap::with_capacity(NUM_ENTRIES); 488 | let root = temp_path(); 489 | node::fs::create_dir(&root).await?; 490 | for _ in 0..NUM_ENTRIES { 491 | let name = format!("{}", get_random()); 492 | let path = root.join(&name); 493 | let is_dir = get_random() < (u64::MAX / 2); 494 | let entry = if is_dir { 495 | node::fs::create_dir(&path).await?; 496 | Entry::Dir 497 | } else { 498 | let size = get_random() % MAX_SIZE; 499 | let data = vec![0u8; size as usize]; 500 | node::fs::write_file(&path, &data).await?; 501 | Entry::File(size) 502 | }; 503 | entries.insert(name, entry); 504 | } 505 | 506 | for entry in node::fs::read_dir(&root).await? { 507 | let file_name = entry.file_name(); 508 | let reference = entries 509 | .get(&file_name) 510 | .unwrap_or_else(|| panic!("Missing entry: {}", file_name)); 511 | let path = entry.path(); 512 | assert!(path.exists().await); 513 | let file_type = entry.file_type(); 514 | let metadata = node::fs::symlink_metadata(&path).await?; 515 | assert_eq!(metadata.file_type(), file_type); 516 | match reference { 517 | Entry::File(size) => { 518 | assert!(file_type.is_file()); 519 | assert_eq!(metadata.len(), *size); 520 | drop(node::fs::remove_file(path).await); 521 | } 522 | Entry::Dir => { 523 | assert!(file_type.is_dir()); 524 | drop(node::fs::remove_dir(path).await); 525 | } 526 | } 527 | assert!(!file_type.is_symlink()); 528 | assert!(!file_type.is_fifo()); 529 | assert!(!file_type.is_socket()); 530 | assert!(!file_type.is_block_device()); 531 | assert!(!file_type.is_char_device()); 532 | } 533 | drop(node::fs::remove_dir(root).await); 534 | Ok(()) 535 | } 536 | 537 | fn duration_abs(duration: chrono::Duration) -> chrono::Duration { 538 | if duration < chrono::Duration::zero() { 539 | -duration 540 | } else { 541 | duration 542 | } 543 | } 544 | 545 | #[wasm_bindgen_test] 546 | async fn timestamps_match_system_time() -> Result<(), JsValue> { 547 | // Old Unix is 1 second granularity, FAT is 2 548 | let max_delta = chrono::Duration::seconds(2); 549 | 550 | let path = temp_path(); 551 | let data = format!("{}", chrono::Local::now()).into_bytes(); 552 | node::fs::write_file(&path, &data).await?; 553 | let now = chrono::Utc::now(); 554 | let metadata = node::fs::symlink_metadata(&path).await?; 555 | 556 | for timestamp in [metadata.created(), metadata.modified(), metadata.accessed()] { 557 | let delta = duration_abs(now - timestamp); 558 | assert!(delta <= max_delta); 559 | } 560 | drop(node::fs::remove_file(&path).await); 561 | Ok(()) 562 | } 563 | 564 | #[wasm_bindgen_test] 565 | async fn utimes() -> Result<(), JsValue> { 566 | let max_delta = chrono::Duration::seconds(2); 567 | let atime_change = chrono::Duration::seconds(64); 568 | let mtime_change = chrono::Duration::seconds(64); 569 | 570 | let path = temp_path(); 571 | let data = format!("{}", chrono::Local::now()).into_bytes(); 572 | node::fs::write_file(&path, &data).await?; 573 | 574 | let metadata = node::fs::symlink_metadata(&path).await?; 575 | let new_atime = metadata.accessed() - atime_change; 576 | let new_mtime = metadata.accessed() - mtime_change; 577 | node::fs::lutimes(&path, &new_atime, &new_mtime).await?; 578 | 579 | let new_metadata = node::fs::symlink_metadata(&path).await?; 580 | for (expected, actual) in [ 581 | (new_atime, new_metadata.accessed()), 582 | (new_mtime, new_metadata.modified()), 583 | ] { 584 | let delta = duration_abs(expected - actual); 585 | assert!(delta < max_delta); 586 | } 587 | drop(node::fs::remove_file(&path).await); 588 | Ok(()) 589 | } 590 | } 591 | --------------------------------------------------------------------------------