├── crates ├── state │ ├── src │ │ ├── db │ │ │ └── mod.rs │ │ ├── models.rs │ │ └── lib.rs │ ├── fixtures │ │ └── legacy │ │ │ └── state_v1.sqlite │ ├── build.rs │ ├── Cargo.toml │ └── tests │ │ └── migration_smoke.rs ├── platform │ ├── src │ │ ├── implementations │ │ │ ├── mod.rs │ │ │ └── macos │ │ │ │ └── mod.rs │ │ ├── lib.rs │ │ ├── filesystem │ │ │ └── mod.rs │ │ └── binary │ │ │ └── mod.rs │ └── Cargo.toml ├── install │ ├── src │ │ ├── api │ │ │ ├── mod.rs │ │ │ ├── types.rs │ │ │ ├── config.rs │ │ │ ├── context.rs │ │ │ └── result.rs │ │ ├── prepare │ │ │ ├── mod.rs │ │ │ └── context.rs │ │ ├── atomic │ │ │ ├── mod.rs │ │ │ └── transition.rs │ │ ├── lib.rs │ │ └── macros.rs │ └── Cargo.toml ├── builder │ ├── src │ │ ├── core │ │ │ ├── mod.rs │ │ │ ├── types.rs │ │ │ └── context.rs │ │ ├── utils │ │ │ ├── mod.rs │ │ │ ├── events.rs │ │ │ ├── timeout.rs │ │ │ └── format.rs │ │ ├── recipe │ │ │ ├── mod.rs │ │ │ └── executor.rs │ │ ├── yaml │ │ │ ├── mod.rs │ │ │ └── recipe.rs │ │ ├── security │ │ │ └── mod.rs │ │ ├── artifact_qa │ │ │ ├── scanners │ │ │ │ ├── mod.rs │ │ │ │ └── staging.rs │ │ │ ├── macho_utils.rs │ │ │ ├── traits.rs │ │ │ └── patchers │ │ │ │ ├── mod.rs │ │ │ │ ├── object_cleaner.rs │ │ │ │ ├── la_cleaner.rs │ │ │ │ ├── pkgconfig.rs │ │ │ │ └── headers.rs │ │ ├── stages │ │ │ ├── mod.rs │ │ │ ├── post.rs │ │ │ ├── environment.rs │ │ │ ├── build.rs │ │ │ └── source.rs │ │ ├── environment │ │ │ ├── mod.rs │ │ │ └── directories.rs │ │ ├── packaging │ │ │ ├── compression.rs │ │ │ └── manifest.rs │ │ └── lib.rs │ └── Cargo.toml ├── guard │ ├── src │ │ ├── lib.rs │ │ └── refcount.rs │ └── Cargo.toml ├── net │ ├── src │ │ └── download │ │ │ ├── mod.rs │ │ │ ├── validation.rs │ │ │ ├── retry.rs │ │ │ └── resume.rs │ └── Cargo.toml ├── errors │ ├── Cargo.toml │ └── src │ │ ├── signing.rs │ │ ├── version.rs │ │ ├── state.rs │ │ └── config.rs ├── index │ └── Cargo.toml ├── config │ ├── Cargo.toml │ └── src │ │ ├── constants.rs │ │ ├── repository.rs │ │ └── resources_semaphore.rs ├── hash │ └── Cargo.toml ├── types │ ├── Cargo.toml │ └── src │ │ └── reports.rs ├── events │ ├── Cargo.toml │ └── src │ │ ├── events │ │ ├── package.rs │ │ ├── qa.rs │ │ ├── platform.rs │ │ └── state.rs │ │ └── progress │ │ ├── update.rs │ │ └── config.rs ├── repository │ └── Cargo.toml ├── ops │ ├── src │ │ └── small_ops.rs │ └── Cargo.toml ├── resolver │ └── Cargo.toml └── store │ ├── Cargo.toml │ └── src │ └── manifest_io.rs ├── .gitattributes ├── packages ├── gmp-6.3.0-1.arm64.sp ├── isl-0.27.0-1.arm64.sp ├── m4-1.4.20-1.arm64.sp ├── make-4.4.1-1.arm64.sp ├── mpc-1.3.1-1.arm64.sp ├── mpfr-4.2.2-1.arm64.sp ├── tar-1.35.0-1.arm64.sp ├── whi-0.6.2-1.arm64.sp ├── xz-5.8.1-1.arm64.sp ├── zlib-1.3.1-1.arm64.sp ├── zstd-1.5.7-1.arm64.sp ├── ansible-2.18.6-1.arm64.sp ├── autoconf-2.72.0-1.arm64.sp ├── automake-1.18.0-1.arm64.sp ├── bat-0.25.0-1.arm64.sp ├── brotli-1.1.0-1.arm64.sp ├── bzip2-1.0.8-1.arm64.sp ├── cmake-4.0.3-1.arm64.sp ├── curl-8.14.1-1.arm64.sp ├── gcc-15.1.0-1.arm64.sp ├── helix-25.0.1-1.arm64.sp ├── libidn2-2.3.8-1.arm64.sp ├── libpsl-0.21.5-1.arm64.sp ├── libssh2-1.11.1-1.arm64.sp ├── libtool-2.5.4-1.arm64.sp ├── libxml2-2.13.8-1.arm64.sp ├── llvm-20.1.7-1.arm64.sp ├── meson-1.8.2-1.arm64.sp ├── nghttp2-1.65.0-1.arm64.sp ├── ninja-1.13.0-1.arm64.sp ├── openssl-3.5.0-1.arm64.sp ├── pkgconf-2.4.3-1.arm64.sp ├── python-3.13.3-1.arm64.sp ├── ripgrep-14.1.1-1.arm64.sp ├── rust-1.88.0-1.arm64.sp ├── rust-1.89.0-1.arm64.sp ├── rust-1.90.0-1.arm64.sp ├── sqlite-3.39.4-1.arm64.sp ├── uv-0.7.13-1.arm64.sp ├── binutils-2.44.0-1.arm64.sp ├── libunistring-1.3.0-1.arm64.sp └── python3.11-3.11.13-1.arm64.sp ├── .cargo └── config.toml ├── whifile ├── recipes ├── nghttp2-1.65.0.yml ├── tar-1.35.0.yml ├── gnu_m4-1.4.20.yml ├── zlib-1.3.1.yml ├── whi-0.6.2.yml ├── helix-25.1.1.yml ├── ninja-1.13.0.yml ├── bat-0.25.0.yml ├── automake-1.18.yml ├── brotli-1.1.0-1.yml ├── autoconf-2.72.yml ├── uv-0.7.13.yml ├── libidn2-2.3.7-1.yml ├── pkgconf-2.4.3.yml ├── meson-1.8.2.yml ├── bzip2-1.0.8.yaml ├── xz-5.8.1.yml ├── libtool-2.5.4.yml ├── cmake-4.0.3.yaml ├── sqlite-3.39.4.yml ├── libunistring-1.3.yml ├── zstd-1.5.7.yml ├── ansible-2.18.6.yml ├── libpsl-0.21.5-1.yml ├── libssh2-1.11.1-1.yml ├── openssl-3.5.0.yml ├── isl-0.27.0.yml ├── ripgrep-14.1.1.yml ├── gnu_mpfr-4.2.2.yml ├── make-4.4.1-1.yml ├── mpc-1.3.1.yml ├── gnu_gmp-6.3.0.yml ├── libxml2-2.13.8.yml ├── python-3.11.13.yaml ├── python-3.9.21.yaml ├── python-3.12.11.yaml ├── binutils-2.44.0.yml ├── python-3.13.3.yaml ├── curl-8.14.1.yml ├── gcc │ └── gcc-15.1.0-1.yml ├── llvm-20.1.7.yml ├── rust-1.89.0.yml ├── rust-1.90.0.yml └── rust-1.88.0.yml ├── .github ├── dependabot.yml ├── workflows │ ├── security-audit.yml │ ├── docs.yml │ ├── coverage.yml │ └── ci.yml └── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── apps ├── sls │ └── Cargo.toml ├── sbs │ └── Cargo.toml └── sps2 │ ├── Cargo.toml │ └── src │ └── error.rs ├── justfile ├── scripts └── sqlx │ └── prepare.sh ├── LICENSE.md ├── SECURITY.md ├── Cargo.toml ├── CONTRIBUTING.md └── .gitignore /crates/state/src/db/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod refcount_deltas; 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.sp filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /crates/platform/src/implementations/mod.rs: -------------------------------------------------------------------------------- 1 | //! Platform-specific implementations 2 | 3 | pub mod macos; 4 | -------------------------------------------------------------------------------- /crates/install/src/api/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod config; 2 | pub mod context; 3 | pub mod result; 4 | pub mod types; 5 | -------------------------------------------------------------------------------- /crates/state/fixtures/legacy/state_v1.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexykn/sps2/HEAD/crates/state/fixtures/legacy/state_v1.sqlite -------------------------------------------------------------------------------- /crates/builder/src/core/mod.rs: -------------------------------------------------------------------------------- 1 | //! Core module containing main builder API and types 2 | 3 | pub mod api; 4 | pub mod builder; 5 | pub mod context; 6 | pub mod types; 7 | -------------------------------------------------------------------------------- /packages/gmp-6.3.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:e6fbf97200ae74ed1900522ab3615da3fe16e020f7bcc74cec2229329b190963 3 | size 363263 4 | -------------------------------------------------------------------------------- /packages/isl-0.27.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:1f331c45e78584f659154e3d2a641b791a2b0e92b403156de61b5d39ef7c12d7 3 | size 798339 4 | -------------------------------------------------------------------------------- /packages/m4-1.4.20-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:78e00f2cc51e6f0c59ea410bfcfca3abe647ab73e14fac09bb907cffc224af47 3 | size 213000 4 | -------------------------------------------------------------------------------- /packages/make-4.4.1-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:19e18b14cbdeb3bf0988a661537bf147dd8d5cbbdb95c0f8f66c39a43db7376a 3 | size 259316 4 | -------------------------------------------------------------------------------- /packages/mpc-1.3.1-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:40ea6e60be1e2ee9b9667fc8aea9263501facda04db4018529efd28a1a2863bf 3 | size 72199 4 | -------------------------------------------------------------------------------- /packages/mpfr-4.2.2-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:7a5e844760b44b8fbe7ca316668e7422a9c7207b8d291d335277d6d91b9062c1 3 | size 303887 4 | -------------------------------------------------------------------------------- /packages/tar-1.35.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:cedb8fa4168767459eff3cc5d7ff123de2ee77a3f98bb3ee22fc208dfbdb897a 3 | size 380834 4 | -------------------------------------------------------------------------------- /packages/whi-0.6.2-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:49d49f2dd16138784e699a1ba2c4cfa57d75a6980dabc8688cf2dc65d7ed5bfa 3 | size 600885 4 | -------------------------------------------------------------------------------- /packages/xz-5.8.1-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:5402ff1d51e75a81f6735bd4d04baee404b68d519d914df13a0de234a31ab7ce 3 | size 259060 4 | -------------------------------------------------------------------------------- /packages/zlib-1.3.1-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:7fed74d5b39e654979fbbc889d1543e99d7a2a650aee361342e592220c19944a 3 | size 79439 4 | -------------------------------------------------------------------------------- /packages/zstd-1.5.7-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:e6e5c95dd355f70e8f421b84597dda52484612793a962ee04a2a120b97503730 3 | size 440572 4 | -------------------------------------------------------------------------------- /packages/ansible-2.18.6-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:8a973833d3d62f6c70dd1219495532a94700e3e8bbbfbd140866f433d13e5a37 3 | size 8362892 4 | -------------------------------------------------------------------------------- /packages/autoconf-2.72.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:156ffe4419f639597948eb0746a5125981e75f389c54e69d78f404333a8e4a72 3 | size 646884 4 | -------------------------------------------------------------------------------- /packages/automake-1.18.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:205ac2e651b986c63b9fb05bb169412524b0e5c008e3b620d6420efda5b23822 3 | size 599202 4 | -------------------------------------------------------------------------------- /packages/bat-0.25.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:4d1a4f16423e224a9f47dc4be76c581617b3118de8acac20aa458d43cc82918f 3 | size 2488375 4 | -------------------------------------------------------------------------------- /packages/brotli-1.1.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:6304c618a07e52701e6757afa69f45b3d21d426312a96e6ada77209fa0a53f6f 3 | size 363631 4 | -------------------------------------------------------------------------------- /packages/bzip2-1.0.8-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:8e700b584878bbc748a57a4a30f649dbea2d2d64618fa26dbf0ed890fba62fde 3 | size 109772 4 | -------------------------------------------------------------------------------- /packages/cmake-4.0.3-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:e934dbed57c09990c714e698fcb7c6f0b2b0c91ed82d56dc1b0fe731e2b13ec2 3 | size 16571346 4 | -------------------------------------------------------------------------------- /packages/curl-8.14.1-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:5c65255e0900c7c7f64e3b4069790ddc94cb56e0cb95e6b99c4568d9f80be442 3 | size 677394 4 | -------------------------------------------------------------------------------- /packages/gcc-15.1.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:cc8090e1ba48bb6941c6412383acc99e7a00d26a7aeb5d0262fcf9495c95506d 3 | size 117374335 4 | -------------------------------------------------------------------------------- /packages/helix-25.0.1-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:fd87d4f0c2a54d987da526d3a4a7436dd243563911db7026cfe2af809ea7fa18 3 | size 6519238 4 | -------------------------------------------------------------------------------- /packages/libidn2-2.3.8-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:029e23466c2b4876fe2295ecac2b8d531ddaa49318b99eac2b68bfc8b18942c1 3 | size 139469 4 | -------------------------------------------------------------------------------- /packages/libpsl-0.21.5-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:33b6f160c2b7af5037be59431ab54b45c8d38f635cd0084bc4966034c9d48075 3 | size 67164 4 | -------------------------------------------------------------------------------- /packages/libssh2-1.11.1-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:6731ad491a8244432d6e46b8e87a848c42714f1cbdc423140c93cda95e02b9cf 3 | size 322952 4 | -------------------------------------------------------------------------------- /packages/libtool-2.5.4-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:c0dada72fffbc9055bc53fc109250aa9d11ec189128fc42fad018714127a61dd 3 | size 419799 4 | -------------------------------------------------------------------------------- /packages/libxml2-2.13.8-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:3664ed1f16f89b31204be57c5373cafb01938f36f3fca4a6440df717cfc07aeb 3 | size 754984 4 | -------------------------------------------------------------------------------- /packages/llvm-20.1.7-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:cd929983bd72906fe042998b4cdbbb3300bc51e2669b5ed2e2556eb002fd2475 3 | size 162025912 4 | -------------------------------------------------------------------------------- /packages/meson-1.8.2-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:797cd7411e3921b282be6fae8d9ce087701c3800d18c20949c02259983721cd1 3 | size 772132 4 | -------------------------------------------------------------------------------- /packages/nghttp2-1.65.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:9c7df48a3279cb46d1313bddafa55d14f673cb0eeef22dcd69398f3d9a2584a6 3 | size 171875 4 | -------------------------------------------------------------------------------- /packages/ninja-1.13.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:0a88617d6340c1e1bb9ce4595120811ea32393d7b53ef5cd3be313010c4c011a 3 | size 732327 4 | -------------------------------------------------------------------------------- /packages/openssl-3.5.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:ff6428dcff80d5a951a312103f61f72dc426b01efad02f864c3d018932e01ded 3 | size 8791952 4 | -------------------------------------------------------------------------------- /packages/pkgconf-2.4.3-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:822d10c65ee47e6571b8ed4775d7b5d34201504edaab145bb5a5ebe57c03fa7e 3 | size 53212 4 | -------------------------------------------------------------------------------- /packages/python-3.13.3-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:6bddb41c69a32c39ed6c5cd9a33ec2c0972592a85ff07da0cd6cb17b30afb144 3 | size 58506884 4 | -------------------------------------------------------------------------------- /packages/ripgrep-14.1.1-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:1d2d064f00103e6893af2748004a05be4f8663657c4a0bf507e35101791eb501 3 | size 1550805 4 | -------------------------------------------------------------------------------- /packages/rust-1.88.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:60f0c720bf98969c8afcd937b419550b628ef29e0f1a146705a161623937e381 3 | size 123353466 4 | -------------------------------------------------------------------------------- /packages/rust-1.89.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:c37da755f7c3ba2ed31e25db6ad01ed274ae8fc17bbe3b53ac456857947d42b4 3 | size 235307916 4 | -------------------------------------------------------------------------------- /packages/rust-1.90.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:cdd577670ac85c0ba97b1bc76022997563638c7b33e1354d8747d483a99f57f6 3 | size 236876202 4 | -------------------------------------------------------------------------------- /packages/sqlite-3.39.4-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:b75f279fea949c215af84c119a8ca7c25fb61e979e0f71ca6cf233eac96cee98 3 | size 1054873 4 | -------------------------------------------------------------------------------- /packages/uv-0.7.13-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:502694fcd79c34d79b8f4e52165cc8ebd0202bcadf1750df3b38dc775709da38 3 | size 13015033 4 | -------------------------------------------------------------------------------- /crates/install/src/prepare/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod context; 2 | pub mod executor; 3 | pub mod worker; 4 | 5 | pub use context::ExecutionContext; 6 | pub use executor::ParallelExecutor; 7 | -------------------------------------------------------------------------------- /packages/binutils-2.44.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:3a762a7aa2d4161ea83a82379cc111712b9da6cd84d7f706aea347eedabf8104 3 | size 1519171 4 | -------------------------------------------------------------------------------- /packages/libunistring-1.3.0-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:6e7220725511bbe9b671a0ac77d79957c29107396da3c781ea44c87a06bf189e 3 | size 703458 4 | -------------------------------------------------------------------------------- /packages/python3.11-3.11.13-1.arm64.sp: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:1c93d01cb4692ac05d1087c8b7e43ba8f1c446c0105fbd11468f0d39156d15ca 3 | size 49266117 4 | -------------------------------------------------------------------------------- /crates/builder/src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | //! Utility modules for the builder crate 2 | 3 | pub mod events; 4 | pub mod executor; 5 | pub mod fileops; 6 | pub mod format; 7 | pub mod timeout; 8 | -------------------------------------------------------------------------------- /crates/builder/src/recipe/mod.rs: -------------------------------------------------------------------------------- 1 | //! Recipe parsing and execution module 2 | 3 | pub mod executor; 4 | pub mod model; 5 | pub mod parser; 6 | 7 | // Re-export commonly used items 8 | pub use executor::execute_recipe; 9 | -------------------------------------------------------------------------------- /crates/builder/src/core/types.rs: -------------------------------------------------------------------------------- 1 | //! Core types for the builder module 2 | //! 3 | //! This module contains shared types used throughout the builder crate. 4 | 5 | // Currently empty - will be populated as we refactor and identify common types 6 | -------------------------------------------------------------------------------- /crates/builder/src/yaml/mod.rs: -------------------------------------------------------------------------------- 1 | //! YAML recipe handling 2 | //! 3 | //! This module provides YAML-based recipe format for build recipes, 4 | //! using a declarative, staged approach for package building. 5 | 6 | mod recipe; 7 | 8 | pub use recipe::{BuildStep, RecipeMetadata}; 9 | -------------------------------------------------------------------------------- /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | # .cargo/config.toml 2 | [profile.profiling] 3 | inherits = "release" 4 | debug = true 5 | lto = "off" 6 | codegen-units = 16 7 | 8 | [profile.profiling.build-override] 9 | opt-level = 3 10 | 11 | [build] 12 | rustflags = ["-C", "force-frame-pointers=yes", "-C", "target-cpu=native"] 13 | -------------------------------------------------------------------------------- /crates/builder/src/utils/events.rs: -------------------------------------------------------------------------------- 1 | //! Event emission utilities for build operations 2 | 3 | use crate::BuildContext; 4 | use sps2_events::{AppEvent, EventEmitter}; 5 | 6 | /// Send event if context has event sender 7 | pub fn send_event(context: &BuildContext, event: AppEvent) { 8 | context.emit(event); 9 | } 10 | -------------------------------------------------------------------------------- /whifile: -------------------------------------------------------------------------------- 1 | !path.replace 2 | $HOME/.rustup/toolchains/stable-aarch64-apple-darwin/bin 3 | $(pwd)/target/aarch64-apple-darwin/release 4 | /opt/pm/live/bin 5 | /opt/homebrew/bin 6 | /opt/homebrew/sbin 7 | /Users/alxknt/.local/bin 8 | /usr/local/bin 9 | /usr/local/sbin/ 10 | /usr/bin 11 | /usr/sbin 12 | /bin 13 | /sbin 14 | 15 | !env.set 16 | SPS2_ALLOW_HTTP 1 17 | -------------------------------------------------------------------------------- /recipes/nghttp2-1.65.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: nghttp2 3 | version: "1.65.0" 4 | description: "HTTP/2 C library and tools" 5 | license: "MIT" 6 | 7 | environment: 8 | defaults: true 9 | 10 | source: 11 | fetch: 12 | url: "https://github.com/nghttp2/nghttp2/releases/download/v1.65.0/nghttp2-1.65.0.tar.bz2" 13 | 14 | build: 15 | system: autotools -------------------------------------------------------------------------------- /crates/builder/src/security/mod.rs: -------------------------------------------------------------------------------- 1 | //! Build security context and validation 2 | //! 3 | //! This module provides a comprehensive security framework for tracking and 4 | //! validating all file system operations and command executions during builds. 5 | 6 | mod context; 7 | mod parser; 8 | mod path_resolver; 9 | 10 | pub use context::SecurityContext; 11 | pub use parser::parse_command_with_context; 12 | -------------------------------------------------------------------------------- /crates/builder/src/artifact_qa/scanners/mod.rs: -------------------------------------------------------------------------------- 1 | //! Registry of all scanner (validator) modules. 2 | 3 | pub mod archive; 4 | pub mod hardcoded; 5 | pub mod macho; 6 | pub mod staging; 7 | 8 | // Re-export the concrete types for convenient access elsewhere. 9 | pub use archive::ArchiveScanner; 10 | pub use hardcoded::HardcodedScanner; 11 | pub use macho::MachOScanner; 12 | pub use staging::StagingScanner; 13 | -------------------------------------------------------------------------------- /recipes/tar-1.35.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: tar 3 | version: "1.35.0" 4 | description: "GNU tar archiving utility" 5 | license: "GPL-3.0-or-later" 6 | 7 | environment: 8 | defaults: true 9 | 10 | source: 11 | fetch: 12 | url: "https://ftp.gnu.org/gnu/tar/tar-1.35.tar.gz" 13 | 14 | build: 15 | system: autotools 16 | args: 17 | - "LIBS=-liconv" # On macOS, we need to explicitly link with iconv -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: cargo 4 | directory: / 5 | schedule: 6 | interval: weekly 7 | groups: 8 | cargo: 9 | patterns: 10 | - "*" 11 | 12 | - package-ecosystem: github-actions 13 | directory: / 14 | schedule: 15 | interval: weekly 16 | groups: 17 | github-actions: 18 | patterns: 19 | - "*" 20 | -------------------------------------------------------------------------------- /recipes/gnu_m4-1.4.20.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: m4 3 | version: "1.4.20" 4 | description: "GNU M4 is an implementation of the traditional Unix macro processor." 5 | license: "GPL-3.0-or-later" 6 | homepage: "https://www.gnu.org/software/m4/" 7 | 8 | environment: 9 | defaults: true 10 | 11 | source: 12 | fetch: 13 | url: "https://ftp.gnu.org/gnu/m4/m4-1.4.20.tar.gz" 14 | 15 | build: 16 | system: autotools 17 | -------------------------------------------------------------------------------- /recipes/zlib-1.3.1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: zlib 3 | version: "1.3.1" 4 | description: "A massively spiffy yet delicately unobtrusive compression library." 5 | license: "Zlib" 6 | 7 | environment: 8 | defaults: true 9 | 10 | source: 11 | fetch: 12 | url: "https://github.com/madler/zlib/releases/download/v1.3.1/zlib-1.3.1.tar.gz" 13 | 14 | build: 15 | system: cmake 16 | args: 17 | - "-DCMAKE_BUILD_TYPE=Release" -------------------------------------------------------------------------------- /crates/guard/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![warn(mismatched_lifetime_syntaxes)] 2 | //! Lightweight state guard utilities for verifying and healing package installations. 3 | 4 | mod refcount; 5 | mod store; 6 | mod verifier; 7 | 8 | pub use refcount::sync_refcounts_to_active_state; 9 | pub use store::{StoreVerificationConfig, StoreVerificationStats, StoreVerifier}; 10 | pub use verifier::{Discrepancy, VerificationLevel, VerificationResult, Verifier}; 11 | -------------------------------------------------------------------------------- /recipes/whi-0.6.2.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: whi 3 | version: "0.6.2" 4 | description: | 5 | Stupid simple PATH management 6 | license: "MIT" 7 | homepage: "https://github.com/alexykn/whi" 8 | 9 | environment: 10 | defaults: true 11 | network: true # Allow network access for dependency downloads 12 | 13 | source: 14 | local: 15 | path: "." 16 | 17 | build: 18 | system: cargo 19 | args: 20 | - "--release" 21 | -------------------------------------------------------------------------------- /recipes/helix-25.1.1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: helix 3 | version: "25.0.1" 4 | description: "A post-modern modal text editor." 5 | license: "MIT" 6 | 7 | environment: 8 | defaults: true 9 | network: true # Allow network access for dependency downloads 10 | 11 | source: 12 | fetch: 13 | url: "https://github.com/helix-editor/helix/archive/refs/tags/25.01.tar.gz" 14 | 15 | build: 16 | system: cargo 17 | args: 18 | - "--release" -------------------------------------------------------------------------------- /crates/guard/src/refcount.rs: -------------------------------------------------------------------------------- 1 | use sps2_errors::Error; 2 | use sps2_state::StateManager; 3 | 4 | /// Synchronize store and file-object refcounts to match the active state. 5 | /// 6 | /// Returns the number of updated store rows and file rows respectively. 7 | pub async fn sync_refcounts_to_active_state(state: &StateManager) -> Result<(usize, usize), Error> { 8 | let state_id = state.get_active_state().await?; 9 | state.sync_refcounts_to_state(&state_id).await 10 | } 11 | -------------------------------------------------------------------------------- /recipes/ninja-1.13.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: ninja 3 | version: "1.13.0" 4 | description: "Ninja is a small build system with a focus on speed." 5 | license: "Apache-2.0" 6 | homepage: "https://ninja-build.org/" 7 | 8 | environment: 9 | defaults: true 10 | 11 | source: 12 | fetch: 13 | url: "https://github.com/ninja-build/ninja/archive/refs/tags/v1.13.0.tar.gz" 14 | 15 | build: 16 | system: cmake 17 | args: 18 | - "-DCMAKE_BUILD_TYPE=Release" 19 | -------------------------------------------------------------------------------- /recipes/bat-0.25.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: bat 3 | version: "0.25.0" 4 | description: "A cat(1) clone with wings." 5 | license: "MIT OR Apache-2.0" 6 | homepage: "https://github.com/sharkdp/bat" 7 | 8 | environment: 9 | defaults: true 10 | network: true # Allow network access for dependency downloads 11 | 12 | source: 13 | fetch: 14 | url: "https://github.com/sharkdp/bat/archive/refs/tags/v0.25.0.tar.gz" 15 | 16 | build: 17 | system: cargo 18 | args: 19 | - "--release" 20 | -------------------------------------------------------------------------------- /recipes/automake-1.18.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: automake 3 | version: "1.18.0" 4 | description: "GNU Automake is a tool for automatically generating Makefile.in files from Makefile.am files." 5 | license: "GPL-2.0-or-later" 6 | homepage: "https://www.gnu.org/software/automake/" 7 | 8 | environment: 9 | defaults: true 10 | 11 | dependencies: 12 | build: 13 | - autoconf 14 | 15 | source: 16 | fetch: 17 | url: "https://ftp.gnu.org/gnu/automake/automake-1.18.tar.gz" 18 | 19 | build: 20 | system: autotools 21 | -------------------------------------------------------------------------------- /recipes/brotli-1.1.0-1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: brotli 3 | version: "1.1.0" 4 | description: "Generic-purpose lossless compression algorithm" 5 | license: "MIT" 6 | homepage: "https://github.com/google/brotli" 7 | 8 | environment: 9 | defaults: true 10 | 11 | source: 12 | fetch: 13 | url: "https://github.com/google/brotli/archive/refs/tags/v1.1.0.tar.gz" 14 | 15 | build: 16 | system: cmake 17 | args: 18 | - "-DCMAKE_BUILD_TYPE=Release" 19 | - "-DBUILD_SHARED_LIBS=ON" 20 | - "-DBROTLI_DISABLE_TESTS=ON" -------------------------------------------------------------------------------- /recipes/autoconf-2.72.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: autoconf 3 | version: "2.72.0" 4 | description: "GNU Autoconf is a tool for producing shell scripts that automatically configure software source code packages." 5 | license: "GPL-3.0-or-later" 6 | homepage: "https://www.gnu.org/software/autoconf/" 7 | 8 | environment: 9 | defaults: true 10 | 11 | dependencies: 12 | build: 13 | - m4 14 | 15 | source: 16 | fetch: 17 | url: "https://ftp.gnu.org/gnu/autoconf/autoconf-2.72.tar.gz" 18 | 19 | build: 20 | system: autotools 21 | -------------------------------------------------------------------------------- /recipes/uv-0.7.13.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: uv 3 | version: "0.7.13" 4 | description: | 5 | An extremely fast Python package and project manager, written in Rust. 6 | license: "MIT" 7 | homepage: "https://github.com/astral-sh/uv" 8 | 9 | environment: 10 | defaults: true 11 | network: true # Allow network access for dependency downloads 12 | 13 | source: 14 | fetch: 15 | url: "https://github.com/astral-sh/uv/releases/download/0.7.13/source.tar.gz" 16 | 17 | build: 18 | system: cargo 19 | args: 20 | - "--release" 21 | -------------------------------------------------------------------------------- /recipes/libidn2-2.3.7-1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: libidn2 3 | version: "2.3.8" 4 | description: "International domain name library (IDNA2008/TR46)" 5 | license: "GPL-3.0-or-later" 6 | homepage: "https://www.gnu.org/software/libidn/#libidn2" 7 | 8 | environment: 9 | defaults: true 10 | 11 | source: 12 | fetch: 13 | url: "https://ftp.gnu.org/gnu/libidn/libidn2-2.3.8.tar.gz" 14 | 15 | build: 16 | system: autotools 17 | args: 18 | - "--disable-dependency-tracking" 19 | - "--disable-silent-rules" 20 | - "--disable-static" -------------------------------------------------------------------------------- /recipes/pkgconf-2.4.3.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: pkgconf 3 | version: "2.4.3" 4 | description: "A system for managing library compile/link flags" 5 | license: "ISC" 6 | 7 | environment: 8 | defaults: true 9 | 10 | source: 11 | fetch: 12 | url: "https://distfiles.ariadne.space/pkgconf/pkgconf-2.4.3.tar.xz" 13 | 14 | build: 15 | system: meson 16 | args: 17 | - "--buildtype=release" 18 | 19 | post: 20 | commands: 21 | - shell: | 22 | cd ${DESTDIR}${PREFIX}/bin 23 | ln -sf pkgconf pkg-config 24 | 25 | -------------------------------------------------------------------------------- /recipes/meson-1.8.2.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: meson 3 | version: "1.8.2" 4 | description: "Meson is an open source build system meant to be both extremely fast, and, even more importantly, as user friendly as possible." 5 | license: "Apache-2.0" 6 | homepage: "https://mesonbuild.com/" 7 | build_deps: 8 | - ninja 9 | 10 | environment: 11 | defaults: true 12 | network: true 13 | 14 | source: 15 | fetch: 16 | url: "https://github.com/mesonbuild/meson/releases/download/1.8.2/meson-1.8.2.tar.gz" 17 | 18 | build: 19 | system: python 20 | -------------------------------------------------------------------------------- /recipes/bzip2-1.0.8.yaml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: bzip2 3 | version: "1.0.8" 4 | description: "High-quality data compression program" 5 | homepage: "https://sourceware.org/bzip2/" 6 | license: "bzip2-1.0.6" 7 | runtime_deps: [] 8 | build_deps: [] 9 | 10 | environment: 11 | defaults: true 12 | 13 | source: 14 | fetch: 15 | url: "https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz" 16 | 17 | build: 18 | steps: 19 | - shell: | 20 | make 21 | make install PREFIX=${DESTDIR}${PREFIX} 22 | 23 | post: 24 | fix_permissions: true 25 | -------------------------------------------------------------------------------- /crates/builder/src/stages/mod.rs: -------------------------------------------------------------------------------- 1 | //! Stage-specific types for the builder 2 | //! 3 | //! This module provides types for each stage of the build process, 4 | //! maintaining a clear separation between parsing and execution. 5 | 6 | pub mod build; 7 | pub mod environment; 8 | pub mod executors; 9 | pub mod post; 10 | pub mod source; 11 | 12 | // Re-export execution types 13 | pub use build::BuildCommand; 14 | pub use environment::EnvironmentStep; 15 | pub use post::PostStep; 16 | pub use source::SourceStep; 17 | 18 | // The executors are used internally by utils/executor.rs 19 | -------------------------------------------------------------------------------- /crates/net/src/download/mod.rs: -------------------------------------------------------------------------------- 1 | //! Production-ready streaming download infrastructure for .sp files 2 | //! 3 | //! This module provides high-performance, resumable downloads with concurrent 4 | //! signature verification and comprehensive error handling. 5 | 6 | mod config; 7 | mod core; 8 | mod resume; 9 | mod retry; 10 | mod stream; 11 | mod validation; 12 | 13 | // Re-export public types and structs 14 | pub use config::{ 15 | DownloadResult, PackageDownloadConfig, PackageDownloadRequest, PackageDownloadResult, 16 | RetryConfig, 17 | }; 18 | pub use core::PackageDownloader; 19 | -------------------------------------------------------------------------------- /recipes/xz-5.8.1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: xz 3 | version: "5.8.1" 4 | description: "XZ Utils is a free general-purpose data compression software with a high compression ratio." 5 | license: "Public-Domain" 6 | homepage: "https://tukaani.org/xz/" 7 | 8 | environment: 9 | defaults: true 10 | 11 | source: 12 | fetch: 13 | url: "https://github.com/tukaani-project/xz/releases/download/v5.8.1/xz-5.8.1.tar.gz" 14 | 15 | build: 16 | system: autotools 17 | args: 18 | - "--disable-dependency-tracking" 19 | - "--disable-silent-rules" 20 | - "--disable-static" 21 | -------------------------------------------------------------------------------- /crates/errors/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-errors" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | thiserror = { workspace = true } 12 | serde = { workspace = true, optional = true } 13 | semver = { workspace = true } 14 | sqlx = { workspace = true } 15 | uuid = { workspace = true } 16 | serde_json = { workspace = true } 17 | minisign-verify = { workspace = true } 18 | 19 | [features] 20 | default = [] 21 | serde = ["dep:serde"] 22 | -------------------------------------------------------------------------------- /recipes/libtool-2.5.4.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: libtool 3 | version: "2.5.4" 4 | description: "GNU Libtool is a generic library support script that hides the complexity of using shared libraries behind a consistent, portable interface." 5 | license: "GPL-2.0-or-later" 6 | homepage: "https://www.gnu.org/software/libtool/" 7 | 8 | environment: 9 | defaults: true 10 | 11 | dependencies: 12 | build: 13 | - autoconf 14 | - automake 15 | 16 | source: 17 | fetch: 18 | url: "https://ftp.gnu.org/gnu/libtool/libtool-2.5.4.tar.gz" 19 | 20 | build: 21 | system: autotools 22 | -------------------------------------------------------------------------------- /crates/index/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-index" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-types = { path = "../types" } 13 | serde = { workspace = true } 14 | serde_json = { workspace = true } 15 | chrono = { workspace = true } 16 | semver = { workspace = true } 17 | tokio = { workspace = true, features = ["fs"] } 18 | 19 | [dev-dependencies] 20 | tempfile = { workspace = true } 21 | -------------------------------------------------------------------------------- /recipes/cmake-4.0.3.yaml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: cmake 3 | version: "4.0.3" 4 | description: "Cross-platform build system generator" 5 | homepage: "https://cmake.org" 6 | license: "BSD-3-Clause" 7 | runtime_deps: [] 8 | build_deps: [] 9 | 10 | environment: 11 | defaults: true 12 | 13 | source: 14 | fetch: 15 | url: "https://github.com/Kitware/CMake/releases/download/v4.0.3/cmake-4.0.3.tar.gz" 16 | 17 | build: 18 | steps: 19 | - shell: | 20 | ./bootstrap --prefix=${PREFIX} 21 | make 22 | make install 23 | 24 | post: 25 | fix_permissions: true 26 | -------------------------------------------------------------------------------- /.github/workflows/security-audit.yml: -------------------------------------------------------------------------------- 1 | name: Security Audit 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | env: 7 | CARGO_TERM_COLOR: always 8 | RUST_BACKTRACE: 1 9 | 10 | jobs: 11 | security_audit: 12 | name: Security Audit 13 | runs-on: macos-latest 14 | steps: 15 | - uses: actions/checkout@v5 16 | - uses: dtolnay/rust-toolchain@stable 17 | with: 18 | toolchain: 1.90.0 19 | - name: Install cargo-audit 20 | run: cargo install cargo-audit 21 | - name: Run security audit 22 | run: cargo audit 23 | continue-on-error: true 24 | -------------------------------------------------------------------------------- /crates/builder/src/environment/mod.rs: -------------------------------------------------------------------------------- 1 | //! Build environment management 2 | //! 3 | //! This module provides isolated build environments for package building. 4 | //! It manages directory structure, environment variables, dependency installation, 5 | //! command execution, and environment isolation verification. 6 | 7 | mod core; 8 | mod dependencies; 9 | mod directories; 10 | mod execution; 11 | mod hermetic; 12 | mod isolation; 13 | mod types; 14 | mod variables; 15 | 16 | // Re-export public API 17 | pub use core::BuildEnvironment; 18 | pub use types::{BuildCommandResult, BuildResult, IsolationLevel}; 19 | -------------------------------------------------------------------------------- /crates/config/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-config" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-types = { path = "../types" } 13 | serde = { workspace = true } 14 | toml = { workspace = true } 15 | tokio = { workspace = true, features = ["fs", "sync"] } 16 | dirs = "6.0.0" 17 | num_cpus = "1.17.0" 18 | tracing = { workspace = true } 19 | 20 | [dev-dependencies] 21 | tempfile = { workspace = true } 22 | -------------------------------------------------------------------------------- /crates/hash/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-hash" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-types = { path = "../types" } 13 | blake3 = { workspace = true } 14 | xxhash-rust = { workspace = true } 15 | tokio = { workspace = true, features = ["fs", "io-util"] } 16 | serde = { workspace = true } 17 | serde_json = { workspace = true } 18 | hex = "0.4.3" 19 | 20 | [dev-dependencies] 21 | tempfile = { workspace = true } 22 | -------------------------------------------------------------------------------- /crates/install/src/atomic/mod.rs: -------------------------------------------------------------------------------- 1 | //! Atomic installation operations using APFS clonefile and state transitions 2 | //! 3 | //! This module provides atomic installation capabilities with: 4 | //! - APFS-optimized file operations for instant, space-efficient copies 5 | //! - Hard link creation for efficient package linking 6 | //! - State transitions with rollback support 7 | //! - Platform-specific filesystem optimizations 8 | 9 | pub mod fs; 10 | pub mod installer; 11 | pub mod package; 12 | pub mod transition; 13 | 14 | // Re-export main public API 15 | pub use installer::AtomicInstaller; 16 | pub use transition::StateTransition; 17 | -------------------------------------------------------------------------------- /crates/types/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-types" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | semver = { workspace = true } 13 | serde = { workspace = true } 14 | thiserror = { workspace = true } 15 | chrono = { workspace = true } 16 | uuid = { workspace = true } 17 | clap = { workspace = true } 18 | toml = { workspace = true } 19 | 20 | [dev-dependencies] 21 | proptest = { workspace = true } 22 | serde_json = { workspace = true } 23 | -------------------------------------------------------------------------------- /recipes/sqlite-3.39.4.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: sqlite 3 | version: "3.39.4" 4 | description: "SQLite is a C-language library that implements a small, fast, self-contained, high-reliability, full-featured, SQL database engine." 5 | license: "Public-Domain" 6 | homepage: "https://www.sqlite.org/" 7 | 8 | environment: 9 | defaults: true 10 | 11 | source: 12 | fetch: 13 | url: "https://www.sqlite.org/2022/sqlite-autoconf-3390400.tar.gz" 14 | 15 | build: 16 | system: autotools 17 | args: 18 | - "--disable-dependency-tracking" 19 | - "--disable-silent-rules" 20 | - "--disable-static" 21 | - "--enable-fts5" 22 | -------------------------------------------------------------------------------- /crates/builder/src/artifact_qa/macho_utils.rs: -------------------------------------------------------------------------------- 1 | //! Shared utilities for working with Mach-O files 2 | //! Used by both scanners and patchers to ensure consistent detection 3 | 4 | use object::FileKind; 5 | use std::path::Path; 6 | 7 | /// Check if a file is a Mach-O binary by parsing its header 8 | /// 9 | /// Uses the exact same logic as the `MachO` scanner. Returns true if the file 10 | /// can be parsed as a valid Mach-O binary. 11 | #[must_use] 12 | pub fn is_macho_file(path: &Path) -> bool { 13 | if let Ok(data) = std::fs::read(path) { 14 | FileKind::parse(&*data).is_ok() 15 | } else { 16 | false 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /crates/events/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-events" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-types = { path = "../types" } 12 | tokio = { workspace = true, features = ["sync"] } 13 | serde = { workspace = true } 14 | uuid = { workspace = true, features = ["v4"] } 15 | tracing = { workspace = true } 16 | chrono = { workspace = true } 17 | sps2-errors = { path = "../errors" } 18 | 19 | [dev-dependencies] 20 | tokio = { workspace = true, features = ["test-util", "macros"] } 21 | -------------------------------------------------------------------------------- /recipes/libunistring-1.3.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: libunistring 3 | version: "1.3.0" 4 | description: "GNU libunistring provides functions for manipulating Unicode strings and for manipulating C strings according to the Unicode standard." 5 | license: "LGPL-3.0-or-later" 6 | homepage: "https://www.gnu.org/software/libunistring/" 7 | 8 | environment: 9 | defaults: true 10 | 11 | source: 12 | fetch: 13 | url: "https://ftp.gnu.org/gnu/libunistring/libunistring-1.3.tar.gz" 14 | 15 | build: 16 | system: autotools 17 | args: 18 | - "--disable-dependency-tracking" 19 | - "--disable-silent-rules" 20 | - "--disable-static" 21 | -------------------------------------------------------------------------------- /recipes/zstd-1.5.7.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: zstd 3 | version: "1.5.7" 4 | description: "Zstandard - Fast lossless compression algorithm targeting real-time compression scenarios at zlib-level and better compression ratios" 5 | license: "BSD-3-Clause OR GPL-2.0" 6 | homepage: "https://facebook.github.io/zstd/" 7 | 8 | environment: 9 | defaults: true 10 | variables: 11 | CC: "clang -arch arm64 -O3" 12 | CXX: "clang++ -arch arm64 -O3" 13 | 14 | source: 15 | fetch: 16 | url: "https://github.com/facebook/zstd/releases/download/v1.5.7/zstd-1.5.7.tar.gz" 17 | 18 | build: 19 | steps: 20 | - make: [] 21 | - make: ["install"] -------------------------------------------------------------------------------- /recipes/ansible-2.18.6.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: ansible 3 | version: "2.18.6" 4 | description: "Ansible is a radically simple IT automation platform that makes your applications and systems easier to deploy and maintain. Ansible Core contains the base engine and a small subset of modules and plugins." 5 | license: "GPL-3.0-or-later" 6 | homepage: "https://www.ansible.com/" 7 | build_deps: 8 | - python 9 | 10 | environment: 11 | defaults: true 12 | network: true 13 | 14 | source: 15 | fetch: 16 | url: "https://files.pythonhosted.org/packages/source/a/ansible-core/ansible_core-2.18.6.tar.gz" 17 | 18 | build: 19 | system: python 20 | -------------------------------------------------------------------------------- /apps/sls/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sls" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-hash = { path = "../../crates/hash" } 12 | sps2-state = { path = "../../crates/state" } 13 | sps2-config = { path = "../../crates/config" } 14 | 15 | clap = { version = "4.5.51", features = ["derive"] } 16 | tokio = { version = "1.48.0", features = ["full"] } 17 | chrono = "0.4.42" 18 | sqlx = { version = "0.8.6", features = ["runtime-tokio", "sqlite"] } 19 | libc = { version = "0.2.177", optional = false } 20 | -------------------------------------------------------------------------------- /crates/builder/src/stages/post.rs: -------------------------------------------------------------------------------- 1 | //! Post-processing stage types and operations 2 | 3 | use serde::{Deserialize, Serialize}; 4 | use sps2_types::RpathStyle; 5 | 6 | /// Post-processing operations 7 | #[derive(Debug, Clone, Serialize, Deserialize)] 8 | pub enum PostStep { 9 | /// Patch rpaths in binaries 10 | PatchRpaths { 11 | style: RpathStyle, 12 | paths: Vec, 13 | }, 14 | 15 | /// Fix executable permissions 16 | FixPermissions { paths: Vec }, 17 | 18 | /// Run arbitrary command in post stage 19 | Command { program: String, args: Vec }, 20 | } 21 | 22 | // Note: ParsedPost is recipe::model::Post 23 | -------------------------------------------------------------------------------- /recipes/libpsl-0.21.5-1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: libpsl 3 | version: "0.21.5" 4 | description: "C library for the Public Suffix List" 5 | license: "MIT" 6 | homepage: "https://github.com/rockdaboot/libpsl" 7 | dependencies: 8 | runtime: 9 | - libidn2 10 | 11 | environment: 12 | defaults: true 13 | 14 | source: 15 | fetch: 16 | url: "https://github.com/rockdaboot/libpsl/releases/download/0.21.5/libpsl-0.21.5.tar.gz" 17 | 18 | build: 19 | system: autotools 20 | args: 21 | - "--disable-dependency-tracking" 22 | - "--disable-silent-rules" 23 | - "--disable-static" 24 | - "--enable-runtime=libidn2" 25 | - "--enable-builtin=libidn2" -------------------------------------------------------------------------------- /crates/builder/src/stages/environment.rs: -------------------------------------------------------------------------------- 1 | //! Environment stage types and operations 2 | 3 | use crate::environment::IsolationLevel; 4 | use serde::{Deserialize, Serialize}; 5 | 6 | /// Environment setup operations 7 | #[derive(Debug, Clone, Serialize, Deserialize)] 8 | pub enum EnvironmentStep { 9 | /// Set isolation level 10 | SetIsolation { level: IsolationLevel }, 11 | 12 | /// Apply compiler defaults 13 | WithDefaults, 14 | 15 | /// Allow network access 16 | AllowNetwork { enabled: bool }, 17 | 18 | /// Set environment variable 19 | SetEnv { key: String, value: String }, 20 | } 21 | 22 | // Note: ParsedEnvironment is recipe::model::Environment 23 | -------------------------------------------------------------------------------- /recipes/libssh2-1.11.1-1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: libssh2 3 | version: "1.11.1" 4 | description: "A client-side C library implementing the SSH2 protocol" 5 | license: "BSD" 6 | homepage: "https://libssh2.org" 7 | dependencies: 8 | runtime: 9 | - openssl 10 | - zlib 11 | 12 | environment: 13 | defaults: true 14 | 15 | source: 16 | fetch: 17 | url: "https://github.com/libssh2/libssh2/releases/download/libssh2-1.11.1/libssh2-1.11.1.tar.gz" 18 | 19 | build: 20 | system: cmake 21 | args: 22 | - "-DCMAKE_BUILD_TYPE=Release" 23 | - "-DBUILD_SHARED_LIBS=ON" 24 | - "-DENABLE_ZLIB_COMPRESSION=ON" 25 | - "-DBUILD_EXAMPLES=OFF" 26 | - "-DBUILD_TESTING=OFF" -------------------------------------------------------------------------------- /recipes/openssl-3.5.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: openssl 3 | version: "3.5.0" 4 | description: "Robust, commercial-grade, and full-featured toolkit for TLS and SSL protocols" 5 | license: "Apache-2.0" 6 | 7 | environment: 8 | defaults: true 9 | 10 | source: 11 | fetch: 12 | url: "https://github.com/openssl/openssl/releases/download/openssl-3.5.0/openssl-3.5.0.tar.gz" 13 | 14 | build: 15 | steps: 16 | - configure: 17 | - "darwin64-arm64-cc" 18 | - "--prefix=${PREFIX}" 19 | - "--openssldir=${PREFIX}/etc/ssl" 20 | - "--libdir=lib" 21 | - "shared" 22 | - "zlib-dynamic" 23 | - make: [] 24 | - make: ["install", "DESTDIR=${DESTDIR}"] -------------------------------------------------------------------------------- /crates/repository/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-repository" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-index = { path = "../index" } 13 | sps2-hash = { path = "../hash" } 14 | sps2-net = { path = "../net" } 15 | serde = { workspace = true } 16 | serde_json = { workspace = true } 17 | tokio = { workspace = true, features = ["fs"] } 18 | regex = "1.12.2" 19 | thiserror = { workspace = true } 20 | async-trait = "0.1.89" 21 | chrono = { workspace = true } 22 | base64 = "0.22.1" 23 | hex = "0.4.3" 24 | -------------------------------------------------------------------------------- /recipes/isl-0.27.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: isl 3 | version: "0.27.0" 4 | description: "A library for manipulating sets and relations of integer points bounded by linear constraints." 5 | license: "MIT" 6 | homepage: "https://libisl.sourceforge.io/" 7 | dependencies: 8 | runtime: 9 | - gmp 10 | build: 11 | - gmp 12 | 13 | environment: 14 | defaults: true 15 | 16 | source: 17 | fetch: 18 | url: "https://libisl.sourceforge.io/isl-0.27.tar.bz2" 19 | 20 | build: 21 | system: autotools 22 | args: 23 | # Build shared libraries for dynamic linking. 24 | - "--enable-shared" 25 | # Disable the static library to save space and reduce complexity. 26 | - "--disable-static" -------------------------------------------------------------------------------- /recipes/ripgrep-14.1.1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: ripgrep 3 | version: "14.1.1" 4 | description: | 5 | ripgrep is a line-oriented search tool that recursively searches the current 6 | directory for a regex pattern while respecting gitignore rules. ripgrep has 7 | first class support on Windows, macOS and Linux. 8 | license: "Unlicense OR MIT" 9 | homepage: "https://github.com/BurntSushi/ripgrep" 10 | 11 | environment: 12 | defaults: true 13 | network: true # Allow network access for dependency downloads 14 | 15 | source: 16 | fetch: 17 | url: "https://github.com/BurntSushi/ripgrep/archive/refs/tags/14.1.1.tar.gz" 18 | 19 | build: 20 | system: cargo 21 | args: 22 | - "--release" 23 | -------------------------------------------------------------------------------- /crates/guard/Cargo.toml: -------------------------------------------------------------------------------- 1 | 2 | [package] 3 | name = "sps2-guard" 4 | version = "0.1.0" 5 | edition.workspace = true 6 | rust-version.workspace = true 7 | authors.workspace = true 8 | license.workspace = true 9 | repository.workspace = true 10 | 11 | [dependencies] 12 | sps2-errors = { path = "../errors" } 13 | sps2-events = { path = "../events" } 14 | sps2-state = { path = "../state" } 15 | sps2-hash = { path = "../hash" } 16 | sps2-store = { path = "../store" } 17 | sps2-platform = { path = "../platform" } 18 | serde = { workspace = true } 19 | tokio = { workspace = true, features = ["fs"] } 20 | walkdir = "2.5.0" 21 | uuid = { workspace = true, features = ["v4"]} 22 | 23 | [dev-dependencies] 24 | tempfile = { workspace = true } 25 | -------------------------------------------------------------------------------- /crates/platform/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-platform" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-events = { path = "../events" } 13 | 14 | async-trait = "0.1.89" 15 | tokio = { workspace = true } 16 | serde = { workspace = true } 17 | thiserror = { workspace = true } 18 | futures = { workspace = true } 19 | libc = "0.2.177" 20 | serde_json = { workspace = true } 21 | dirs = "6.0.0" 22 | chrono = { workspace = true } 23 | 24 | [dev-dependencies] 25 | tempfile = { workspace = true } 26 | 27 | [features] 28 | default = [] -------------------------------------------------------------------------------- /crates/config/src/constants.rs: -------------------------------------------------------------------------------- 1 | //! Centralized, non-configurable filesystem paths for sps2 2 | //! 3 | //! These paths are deliberately not exposed via TOML configuration to keep the 4 | //! installation prefix stable. Packages are built against this fixed prefix. 5 | 6 | pub const PREFIX: &str = "/opt/pm"; 7 | 8 | pub const STORE_DIR: &str = "/opt/pm/store"; 9 | pub const STATES_DIR: &str = "/opt/pm/states"; 10 | pub const LIVE_DIR: &str = "/opt/pm/live"; 11 | pub const BIN_DIR: &str = "/opt/pm/live/bin"; 12 | 13 | pub const LOGS_DIR: &str = "/opt/pm/logs"; 14 | pub const KEYS_DIR: &str = "/opt/pm/keys"; 15 | 16 | pub const DB_PATH: &str = "/opt/pm/state.sqlite"; 17 | 18 | pub const LAST_GC_TIMESTAMP: &str = "/opt/pm/.last_gc_timestamp"; 19 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Documentation 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | env: 7 | CARGO_TERM_COLOR: always 8 | RUST_BACKTRACE: 1 9 | 10 | jobs: 11 | docs: 12 | name: Documentation 13 | runs-on: macos-latest 14 | steps: 15 | - uses: actions/checkout@v5 16 | - uses: dtolnay/rust-toolchain@stable 17 | with: 18 | toolchain: 1.90.0 19 | - name: Build documentation 20 | run: cargo doc --all --no-deps 21 | - name: Deploy to GitHub Pages 22 | if: github.ref == 'refs/heads/main' 23 | uses: peaceiris/actions-gh-pages@v4 24 | with: 25 | github_token: ${{ secrets.GITHUB_TOKEN }} 26 | publish_dir: ./target/doc 27 | continue-on-error: true 28 | -------------------------------------------------------------------------------- /apps/sbs/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sbs" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [[bin]] 11 | name = "sbs" 12 | path = "src/main.rs" 13 | 14 | [dependencies] 15 | clap = { workspace = true, features = ["derive"] } 16 | sps2-errors = { path = "../../crates/errors" } 17 | sps2-repository = { path = "../../crates/repository" } 18 | sps2-net = { path = "../../crates/net" } 19 | tokio = { workspace = true, features = ["fs", "rt-multi-thread", "macros"] } 20 | tracing = "0.1.41" 21 | tracing-subscriber = { version = "0.3.20", features = ["env-filter"] } 22 | minisign = "0.8.0" 23 | base64 = "0.22.1" 24 | rpassword = "7.4.0" 25 | -------------------------------------------------------------------------------- /recipes/gnu_mpfr-4.2.2.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: mpfr 3 | version: "4.2.2" 4 | description: "A C library for multiple-precision floating-point computations with correct rounding." 5 | license: "LGPL-3.0-or-later" 6 | homepage: "https://www.mpfr.org/" 7 | dependencies: 8 | runtime: 9 | - gmp 10 | 11 | environment: 12 | defaults: true 13 | 14 | source: 15 | fetch: 16 | url: "https://www.mpfr.org/mpfr-current/mpfr-4.2.2.tar.gz" 17 | 18 | build: 19 | system: autotools 20 | args: 21 | # Link against the GMP library provided in the build environment. 22 | - "--with-gmp=${PREFIX}" 23 | # Build shared libraries for dynamic linking. 24 | - "--enable-shared" 25 | # Disable the static library to save space. 26 | - "--disable-static" -------------------------------------------------------------------------------- /recipes/make-4.4.1-1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: make 3 | version: "4.4.1" 4 | description: "GNU Make is a tool which controls the generation of executables and other non-source files of a program from the program's source files." 5 | license: "GPL-3.0-or-later" 6 | homepage: "https://www.gnu.org/software/make/" 7 | 8 | environment: 9 | defaults: true 10 | variables: 11 | CC: "gcc" # Force GCC to avoid clang issues 12 | CXX: "g++" 13 | 14 | source: 15 | fetch: 16 | url: "https://ftp.gnu.org/gnu/make/make-4.4.1.tar.gz" 17 | 18 | build: 19 | system: autotools 20 | args: 21 | # Standard optimization flags 22 | - "--disable-dependency-tracking" 23 | - "--disable-silent-rules" 24 | # Skip optional Guile support 25 | - "--without-guile" -------------------------------------------------------------------------------- /crates/platform/src/implementations/macos/mod.rs: -------------------------------------------------------------------------------- 1 | //! macOS-specific platform implementation 2 | 3 | pub mod binary; 4 | pub mod filesystem; 5 | pub mod process; 6 | 7 | /// macOS platform implementation 8 | pub struct MacOSPlatform; 9 | 10 | impl MacOSPlatform { 11 | /// Create a new macOS platform instance 12 | #[allow(clippy::new_ret_no_self)] 13 | pub fn new() -> crate::core::Platform { 14 | use binary::MacOSBinaryOperations; 15 | use filesystem::MacOSFilesystemOperations; 16 | use process::MacOSProcessOperations; 17 | 18 | crate::core::Platform::new( 19 | Box::new(MacOSBinaryOperations::new()), 20 | Box::new(MacOSFilesystemOperations::new()), 21 | Box::new(MacOSProcessOperations::new()), 22 | ) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /crates/ops/src/small_ops.rs: -------------------------------------------------------------------------------- 1 | //! Small operations implemented in the ops crate 2 | //! 3 | //! This module serves as a public API facade that re-exports operations 4 | //! from specialized modules. All function signatures are preserved for 5 | //! backward compatibility. 6 | 7 | // Import all the modularized operations 8 | use crate::health; 9 | use crate::maintenance; 10 | use crate::query; 11 | use crate::repository; 12 | use crate::self_update as self_update_module; 13 | 14 | // Re-export all public functions to maintain API compatibility 15 | pub use health::check_health; 16 | pub use maintenance::{cleanup, history, rollback}; 17 | pub use query::{list_packages, package_info, search_packages}; 18 | pub use repository::{add_repo, list_repos, remove_repo, reposync}; 19 | pub use self_update_module::self_update; 20 | -------------------------------------------------------------------------------- /crates/install/src/api/types.rs: -------------------------------------------------------------------------------- 1 | use sps2_hash::Hash; 2 | use std::path::PathBuf; 3 | 4 | /// Prepared package data passed from `ParallelExecutor` to `AtomicInstaller` 5 | /// 6 | /// This structure contains all the information needed by `AtomicInstaller` 7 | /// to install a package without having to look up `package_map` or perform 8 | /// additional database queries. 9 | #[derive(Clone, Debug)] 10 | pub struct PreparedPackage { 11 | /// Package hash 12 | pub hash: Hash, 13 | /// Package size in bytes 14 | pub size: u64, 15 | /// Path to the package in the store 16 | pub store_path: PathBuf, 17 | /// Whether this package was downloaded or local 18 | pub is_local: bool, 19 | /// Optional package archive hash (BLAKE3) provided by the repository 20 | pub package_hash: Option, 21 | } 22 | -------------------------------------------------------------------------------- /crates/resolver/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-resolver" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-events = { path = "../events" } 13 | sps2-types = { path = "../types" } 14 | sps2-index = { path = "../index" } 15 | sps2-platform = { path = "../platform" } 16 | sps2-hash = { path = "../hash" } 17 | serde = { workspace = true } 18 | semver = { workspace = true } 19 | tokio = { workspace = true } 20 | dashmap = { workspace = true } 21 | crossbeam = { workspace = true } 22 | uuid = { version = "1.18.1", features = ["v4"] } 23 | scopeguard = "1.2.0" 24 | 25 | [dev-dependencies] 26 | tempfile = { workspace = true } 27 | -------------------------------------------------------------------------------- /.github/workflows/coverage.yml: -------------------------------------------------------------------------------- 1 | name: Code Coverage 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | env: 7 | CARGO_TERM_COLOR: always 8 | RUST_BACKTRACE: 1 9 | 10 | jobs: 11 | coverage: 12 | name: Code Coverage 13 | runs-on: macos-latest 14 | steps: 15 | - uses: actions/checkout@v5 16 | - uses: dtolnay/rust-toolchain@stable 17 | with: 18 | toolchain: 1.90.0 19 | components: llvm-tools-preview 20 | - name: Install cargo-llvm-cov 21 | run: cargo install cargo-llvm-cov 22 | - name: Generate code coverage 23 | run: cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info 24 | - name: Upload coverage to Codecov 25 | uses: codecov/codecov-action@v5 26 | with: 27 | files: lcov.info 28 | continue-on-error: true 29 | -------------------------------------------------------------------------------- /justfile: -------------------------------------------------------------------------------- 1 | set positional-arguments 2 | 3 | help: 4 | just -l 5 | 6 | fetch: 7 | rustup show active-toolchain 8 | cargo fetch 9 | 10 | fmt *args: 11 | rustup show active-toolchain 12 | cargo fmt "$@" 13 | 14 | check *args: 15 | rustup show active-toolchain 16 | cargo check 17 | 18 | lint *args: 19 | rustup show active-toolchain 20 | cargo clippy --all-targets --all-features "$@" 21 | 22 | fix *args: 23 | rustup show active-toolchain 24 | cargo clippy --fix --all-targets --all-features --allow-dirty "$@" 25 | 26 | build *args: 27 | rustup show active-toolchain 28 | cargo build --release --target=aarch64-apple-darwin 29 | 30 | test *args: 31 | rustup show active-toolchain 32 | cargo test 33 | 34 | push: 35 | git push origin main 36 | 37 | pull: 38 | git pull origin main 39 | -------------------------------------------------------------------------------- /recipes/mpc-1.3.1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: mpc 3 | version: "1.3.1" 4 | description: "A C library for complex number arithmetic with arbitrarily high precision and correct rounding." 5 | license: "LGPL-3.0-or-later" 6 | homepage: "https://www.multiprecision.org/mpc/" 7 | dependencies: 8 | runtime: 9 | - gmp 10 | - mpfr 11 | 12 | environment: 13 | defaults: true 14 | 15 | source: 16 | fetch: 17 | url: "https://ftp.gnu.org/gnu/mpc/mpc-1.3.1.tar.gz" 18 | 19 | build: 20 | system: autotools 21 | args: 22 | # Link against the GMP and MPFR libraries provided in the build environment. 23 | - "--with-gmp=${PREFIX}" 24 | - "--with-mpfr=${PREFIX}" 25 | # Build shared libraries for dynamic linking. 26 | - "--enable-shared" 27 | # Disable the static library to save space. 28 | - "--disable-static" -------------------------------------------------------------------------------- /crates/builder/src/recipe/executor.rs: -------------------------------------------------------------------------------- 1 | //! YAML recipe execution 2 | 3 | use crate::yaml::RecipeMetadata; 4 | use crate::{BuildConfig, BuildContext, BuildEnvironment}; 5 | use sps2_errors::Error; 6 | use sps2_types::package::PackageSpec; 7 | 8 | /// Execute the YAML recipe and return dependencies, metadata, install request status, and `qa_pipeline` 9 | pub async fn execute_recipe( 10 | config: &BuildConfig, 11 | context: &BuildContext, 12 | environment: &mut BuildEnvironment, 13 | ) -> Result< 14 | ( 15 | Vec, 16 | Vec, 17 | RecipeMetadata, 18 | bool, 19 | sps2_types::QaPipelineOverride, 20 | ), 21 | Error, 22 | > { 23 | // Execute YAML recipe using staged execution 24 | crate::utils::executor::execute_staged_build(config, context, environment).await 25 | } 26 | -------------------------------------------------------------------------------- /crates/state/build.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::path::PathBuf; 3 | 4 | fn main() { 5 | // Set SQLX_OFFLINE_DIR if not already set 6 | if env::var("SQLX_OFFLINE_DIR").is_err() { 7 | // Check multiple possible locations 8 | let possible_dirs = vec![ 9 | PathBuf::from("/opt/pm/.sqlx"), 10 | PathBuf::from(".sqlx"), 11 | env::current_dir().unwrap().join(".sqlx"), 12 | ]; 13 | 14 | for dir in possible_dirs { 15 | if dir.exists() { 16 | println!("cargo:rustc-env=SQLX_OFFLINE_DIR={}", dir.display()); 17 | break; 18 | } 19 | } 20 | } 21 | 22 | // Force offline mode in production builds 23 | if env::var("SQLX_OFFLINE").is_err() { 24 | println!("cargo:rustc-env=SQLX_OFFLINE=true"); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /recipes/gnu_gmp-6.3.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: gmp 3 | version: "6.3.0" 4 | description: "A free library for arbitrary precision arithmetic, operating on signed integers, rational numbers, and floating-point numbers." 5 | license: "LGPL-3.0-or-later" 6 | homepage: "https://gmplib.org" 7 | dependencies: 8 | build: 9 | - m4 # Required by the configure script 10 | 11 | environment: 12 | defaults: true 13 | 14 | source: 15 | fetch: 16 | url: "https://gmplib.org/download/gmp/gmp-6.3.0.tar.xz" 17 | 18 | build: 19 | system: autotools 20 | args: 21 | # Enable the C++ interface (gmpxx), which is required by other 22 | # libraries like MPFR and MPC that depend on GMP. 23 | - "--enable-cxx" 24 | # Build shared libraries for dynamic linking. 25 | - "--enable-shared" 26 | # Disable the static library to save space and avoid linking issues. 27 | - "--disable-static" -------------------------------------------------------------------------------- /crates/builder/src/artifact_qa/traits.rs: -------------------------------------------------------------------------------- 1 | //! Generic abstractions for post‑build actions. 2 | 3 | use crate::artifact_qa::diagnostics::DiagnosticCollector; 4 | use crate::artifact_qa::reports::Report; 5 | use crate::{BuildContext, BuildEnvironment}; 6 | use sps2_errors::Error; 7 | use std::future::Future; 8 | 9 | pub trait Action: Send + Sync + 'static { 10 | /// Human readable label (emitted in events). 11 | const NAME: &'static str; 12 | 13 | /// Execute the action and return a [`Report`]. 14 | /// Validators should ignore the findings parameter. 15 | /// Patchers may use the findings to target specific files. 16 | fn run( 17 | ctx: &BuildContext, 18 | env: &BuildEnvironment, 19 | findings: Option<&DiagnosticCollector>, 20 | ) -> impl Future> + Send; 21 | } 22 | 23 | pub trait Validator: Action {} 24 | pub trait Patcher: Action {} 25 | -------------------------------------------------------------------------------- /recipes/libxml2-2.13.8.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: libxml2 3 | version: "2.13.8" 4 | description: "XML parsing library with support for reading, modifying and writing XML and HTML files" 5 | homepage: "https://gitlab.gnome.org/GNOME/libxml2" 6 | license: "MIT" 7 | dependencies: 8 | runtime: 9 | - zlib 10 | - xz 11 | build: 12 | - pkgconf 13 | 14 | environment: 15 | defaults: true 16 | 17 | source: 18 | fetch: 19 | url: "https://download.gnome.org/sources/libxml2/2.13/libxml2-2.13.8.tar.xz" 20 | 21 | build: 22 | steps: 23 | - shell: | 24 | ./configure --prefix=${PREFIX} \ 25 | --with-zlib=${PREFIX} \ 26 | --with-lzma=${PREFIX} \ 27 | --without-python \ 28 | --disable-static \ 29 | --enable-shared 30 | make -j$(nproc) 31 | make install 32 | 33 | post: 34 | fix_permissions: true 35 | patch_rpaths: default 36 | -------------------------------------------------------------------------------- /scripts/sqlx/prepare.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | # Generates sqlx-data.json for offline compilation using the v2 state schema. 5 | 6 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 7 | REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)" 8 | MIGRATIONS_DIR="${REPO_ROOT}/crates/state/migrations" 9 | TARGET_DIR="${REPO_ROOT}/target" 10 | DB_FILE="${TARGET_DIR}/sqlx-dev.sqlite" 11 | 12 | mkdir -p "${TARGET_DIR}" 13 | rm -f "${DB_FILE}" 14 | touch "${DB_FILE}" 15 | 16 | export DATABASE_URL="sqlite://${DB_FILE}" 17 | export SQLX_OFFLINE_DIR="${REPO_ROOT}/.sqlx" 18 | mkdir -p "${SQLX_OFFLINE_DIR}" 19 | 20 | # Apply migrations (if any) before preparing offline data. 21 | if command -v sqlx >/dev/null 2>&1; then 22 | sqlx migrate run --source "${MIGRATIONS_DIR}" --database-url "${DATABASE_URL}" >/dev/null 23 | else 24 | echo "sqlx CLI not found; skipping migration run" >&2 25 | fi 26 | 27 | cargo sqlx prepare --workspace -- --lib 28 | -------------------------------------------------------------------------------- /crates/builder/src/artifact_qa/patchers/mod.rs: -------------------------------------------------------------------------------- 1 | //! Registry of all post-build patcher modules. 2 | 3 | pub mod binary_string; 4 | pub mod codesigner; 5 | pub mod headers; 6 | pub mod la_cleaner; 7 | pub mod object_cleaner; 8 | pub mod permissions; 9 | pub mod pkgconfig; 10 | pub mod placeholder; 11 | pub mod python_bytecode_cleanup; 12 | pub mod python_isolation; 13 | pub mod rpath; 14 | 15 | // Re-export the concrete types so callers can use 16 | // `patchers::PlaceholderPatcher`, etc. 17 | pub use binary_string::BinaryStringPatcher; 18 | pub use codesigner::CodeSigner; 19 | pub use headers::HeaderPatcher; 20 | pub use la_cleaner::LaFileCleaner; 21 | pub use object_cleaner::ObjectFileCleaner; 22 | pub use permissions::PermissionsFixer; 23 | pub use pkgconfig::PkgConfigPatcher; 24 | pub use placeholder::PlaceholderPatcher; 25 | pub use python_bytecode_cleanup::PythonBytecodeCleanupPatcher; 26 | pub use python_isolation::PythonIsolationPatcher; 27 | pub use rpath::RPathPatcher; 28 | -------------------------------------------------------------------------------- /recipes/python-3.11.13.yaml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: python3.11 3 | version: "3.11.13" 4 | description: "High-level programming language" 5 | homepage: "https://www.python.org" 6 | license: "PSF-2.0" 7 | runtime_deps: 8 | - openssl 9 | - sqlite 10 | - xz 11 | - zlib 12 | - bzip2 13 | build_deps: 14 | - pkgconf 15 | 16 | environment: 17 | defaults: true 18 | 19 | source: 20 | fetch: 21 | url: "https://www.python.org/ftp/python/3.11.13/Python-3.11.13.tar.xz" 22 | 23 | build: 24 | steps: 25 | - shell: | 26 | ./configure --prefix=${PREFIX} \ 27 | --enable-optimizations \ 28 | --enable-loadable-sqlite-extensions \ 29 | --with-openssl=${PREFIX} \ 30 | --with-lto \ 31 | --enable-ipv6 \ 32 | --with-system-expat \ 33 | --with-dbmliborder=ndbm \ 34 | --without-ensurepip 35 | make 36 | make install 37 | 38 | post: 39 | fix_permissions: true -------------------------------------------------------------------------------- /recipes/python-3.9.21.yaml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: python3.9 3 | version: "3.9.21" 4 | description: "High-level programming language" 5 | homepage: "https://www.python.org" 6 | license: "PSF-2.0" 7 | runtime_deps: 8 | - openssl 9 | - sqlite 10 | - xz 11 | - zlib 12 | - bzip2 13 | build_deps: 14 | - pkgconf 15 | 16 | environment: 17 | defaults: true 18 | 19 | source: 20 | fetch: 21 | url: "https://www.python.org/ftp/python/3.9.21/Python-3.9.21.tar.xz" 22 | 23 | build: 24 | steps: 25 | - shell: | 26 | ./configure --prefix=${PREFIX} \ 27 | --enable-optimizations \ 28 | --enable-loadable-sqlite-extensions \ 29 | --with-openssl=${PREFIX} \ 30 | --with-lto \ 31 | --enable-ipv6 \ 32 | --with-system-expat \ 33 | --with-dbmliborder=ndbm \ 34 | --without-ensurepip 35 | make 36 | make install 37 | 38 | post: 39 | fix_permissions: true 40 | -------------------------------------------------------------------------------- /crates/net/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-net" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-types = { path = "../types" } 13 | sps2-events = { path = "../events" } 14 | sps2-hash = { path = "../hash" } 15 | sps2-config = { path = "../config" } 16 | blake3 = { workspace = true } 17 | tokio = { workspace = true, features = ["fs"] } 18 | reqwest = { workspace = true } 19 | futures = "0.3.31" 20 | bytes = "1.10.1" 21 | url = "2.5.7" 22 | rand = "0.9.2" 23 | serde = { workspace = true } 24 | serde_json = { workspace = true } 25 | minisign-verify = "0.2.4" 26 | minisign = "0.8.0" 27 | hex = "0.4.3" 28 | base64 = "0.22.1" 29 | 30 | [dev-dependencies] 31 | tempfile = { workspace = true } 32 | httpmock = "0.8.2" 33 | tokio = { workspace = true, features = ["test-util", "macros"] } 34 | -------------------------------------------------------------------------------- /recipes/python-3.12.11.yaml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: python3.12 3 | version: "3.12.11" 4 | description: "High-level programming language" 5 | homepage: "https://www.python.org" 6 | license: "PSF-2.0" 7 | runtime_deps: 8 | - openssl 9 | - sqlite 10 | - xz 11 | - zlib 12 | - bzip2 13 | build_deps: 14 | - pkgconf 15 | 16 | environment: 17 | defaults: true 18 | 19 | source: 20 | fetch: 21 | url: "https://www.python.org/ftp/python/3.12.11/Python-3.12.11.tar.xz" 22 | 23 | build: 24 | steps: 25 | - shell: | 26 | ./configure --prefix=${PREFIX} \ 27 | --enable-optimizations \ 28 | --enable-loadable-sqlite-extensions \ 29 | --with-openssl=${PREFIX} \ 30 | --with-lto \ 31 | --enable-ipv6 \ 32 | --with-system-expat \ 33 | --with-dbmliborder=ndbm \ 34 | --without-ensurepip 35 | make 36 | make install 37 | 38 | post: 39 | fix_permissions: true 40 | -------------------------------------------------------------------------------- /crates/store/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-store" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-types = { path = "../types" } 13 | sps2-hash = { path = "../hash" } 14 | sps2-platform = { path = "../platform" } 15 | sps2-events = { path = "../events" } 16 | sps2-resolver = { path = "../resolver" } 17 | tokio = { workspace = true, features = ["fs", "io-util"] } 18 | tar = "0.4.44" 19 | async-compression = { version = "0.4.33", features = ["tokio", "zstd"] } 20 | tokio-util = { version = "0.7.17", features = ["compat", "io", "io-util"] } 21 | tempfile = { workspace = true } 22 | uuid = { version = "1.18.1", features = ["v4"] } 23 | serde = { workspace = true } 24 | serde_json = { workspace = true } 25 | 26 | [dev-dependencies] 27 | tokio = { workspace = true, features = ["test-util", "macros"] } 28 | -------------------------------------------------------------------------------- /crates/state/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-state" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | build = "build.rs" 10 | 11 | [dependencies] 12 | sps2-errors = { path = "../errors" } 13 | sps2-types = { path = "../types" } 14 | sps2-events = { path = "../events" } 15 | sps2-hash = { path = "../hash" } 16 | sps2-resolver = { path = "../resolver" } 17 | sps2-store = { path = "../store" } 18 | sqlx = { workspace = true } 19 | tokio = { workspace = true, features = ["fs"] } 20 | sps2-platform = { path = "../platform" } 21 | uuid = { workspace = true } 22 | chrono = { workspace = true } 23 | serde = { workspace = true } 24 | serde_json = { workspace = true } 25 | 26 | [dev-dependencies] 27 | tempfile = { workspace = true } 28 | 29 | [[test]] 30 | name = "recovery" 31 | path = "tests/recovery.rs" 32 | 33 | [features] 34 | default = ["runtime-queries"] 35 | runtime-queries = [] 36 | -------------------------------------------------------------------------------- /recipes/binutils-2.44.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: binutils 3 | version: "2.44.0" 4 | description: "The GNU Binutils are a collection of binary tools, including the linker, assembler, and other tools for object file manipulation." 5 | license: "GPL-3.0-or-later" 6 | homepage: "https://www.gnu.org/software/binutils/" 7 | dependencies: 8 | build: 9 | - zlib # For handling compressed debug sections 10 | 11 | environment: 12 | defaults: true 13 | 14 | source: 15 | fetch: 16 | url: "https://ftp.gnu.org/gnu/binutils/binutils-2.44.tar.gz" 17 | 18 | build: 19 | system: autotools 20 | args: 21 | # Use the system's zlib library. 22 | - "--with-system-zlib" 23 | # Build shared libraries, which are needed by other tools. 24 | - "--enable-shared" 25 | # Disable building for multiple architectures to keep the package focused. 26 | - "--disable-multilib" 27 | # Disable Native Language Support to reduce package size. 28 | - "--disable-nls" 29 | # Disable CTF support (not available on macOS) 30 | - "--disable-libctf" -------------------------------------------------------------------------------- /crates/install/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![warn(clippy::pedantic)] 2 | #![deny(clippy::all)] 3 | 4 | //! Package installation with atomic updates for sps2 5 | //! 6 | //! This crate handles the installation of packages with atomic 7 | //! state transitions, rollback capabilities, and parallel execution. 8 | 9 | #[macro_use] 10 | mod macros; 11 | mod api; 12 | mod atomic; 13 | mod installer; 14 | mod operations; 15 | mod prepare; 16 | //mod pipeline; 17 | //pub mod validation; 18 | 19 | pub use atomic::{AtomicInstaller, StateTransition}; 20 | pub use installer::Installer; 21 | pub use operations::{InstallOperation, UninstallOperation, UpdateOperation}; 22 | pub use prepare::{ExecutionContext, ParallelExecutor}; 23 | 24 | // Re-export the public API surface from api module 25 | pub use api::config::{InstallConfig, SecurityPolicy}; 26 | pub use api::context::{InstallContext, UninstallContext, UpdateContext}; 27 | pub use api::result::{InstallResult, StateInfo}; 28 | pub use api::types::PreparedPackage; 29 | 30 | // Re-export EventSender for use by macros and contexts 31 | pub use sps2_events::EventSender; 32 | -------------------------------------------------------------------------------- /crates/builder/src/stages/build.rs: -------------------------------------------------------------------------------- 1 | //! Build stage types and operations 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | /// Build commands that can be executed 6 | #[derive(Debug, Clone, Serialize, Deserialize)] 7 | pub enum BuildCommand { 8 | /// Run configure script 9 | Configure { args: Vec }, 10 | 11 | /// Run make 12 | Make { args: Vec }, 13 | 14 | /// Run autotools build 15 | Autotools { args: Vec }, 16 | 17 | /// Run `CMake` build 18 | Cmake { args: Vec }, 19 | 20 | /// Run Meson build 21 | Meson { args: Vec }, 22 | 23 | /// Run Cargo build 24 | Cargo { args: Vec }, 25 | 26 | /// Run Go build 27 | Go { args: Vec }, 28 | 29 | /// Run Python build 30 | Python { args: Vec }, 31 | 32 | /// Run Node.js build 33 | NodeJs { args: Vec }, 34 | 35 | /// Run arbitrary command 36 | Command { program: String, args: Vec }, 37 | } 38 | 39 | // Note: ParsedBuild is recipe::model::Build 40 | // Note: ParsedStep is recipe::model::ParsedStep 41 | -------------------------------------------------------------------------------- /crates/platform/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![warn(mismatched_lifetime_syntaxes)] 2 | //! Platform abstraction layer for macOS ARM64 package manager operations. 3 | //! 4 | //! This crate provides a unified interface for platform-specific operations including: 5 | //! - Binary operations (install_name_tool, otool, codesign) 6 | //! - Filesystem operations (APFS clonefile, atomic operations) 7 | //! - Process execution with proper event emission and error handling 8 | //! 9 | //! The platform abstraction integrates seamlessly with the existing event system 10 | //! and error handling patterns in the sps2 codebase. 11 | 12 | pub mod binary; 13 | pub mod core; 14 | pub mod filesystem; 15 | pub mod fs; 16 | pub mod implementations; 17 | pub mod process; 18 | 19 | pub use core::{ 20 | Platform, PlatformCapabilities, PlatformContext, PlatformManager, ToolInfo, ToolRegistry, 21 | }; 22 | pub use implementations::macos::MacOSPlatform; 23 | 24 | /// Re-export commonly used types 25 | pub use binary::BinaryOperations; 26 | pub use filesystem::FilesystemOperations; 27 | pub use fs as filesystem_helpers; 28 | pub use process::ProcessOperations; 29 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [main, develop] 6 | pull_request: 7 | branches: [main] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | RUST_BACKTRACE: 1 12 | 13 | jobs: 14 | test: 15 | name: Check 16 | runs-on: macos-latest 17 | 18 | steps: 19 | - uses: actions/checkout@v5 20 | 21 | - name: Install Rust 22 | uses: dtolnay/rust-toolchain@stable 23 | with: 24 | toolchain: 1.90.0 25 | targets: aarch64-apple-darwin 26 | components: rustfmt, clippy 27 | 28 | - name: Cache dependencies 29 | uses: actions/cache@v4 30 | with: 31 | path: | 32 | ~/.cargo/registry 33 | ~/.cargo/git 34 | target 35 | key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} 36 | 37 | - name: Check formatting 38 | run: cargo fmt --all -- --check 39 | 40 | - name: Run clippy 41 | run: cargo clippy --all-targets --all-features -- -D warnings 42 | 43 | - name: Build release 44 | run: cargo build --all --release 45 | -------------------------------------------------------------------------------- /crates/state/tests/migration_smoke.rs: -------------------------------------------------------------------------------- 1 | use tempfile::TempDir; 2 | 3 | #[tokio::test] 4 | async fn migrations_apply_and_expose_core_tables() { 5 | let temp_dir = TempDir::new().expect("temp dir"); 6 | let db_path = temp_dir.path().join("state.sqlite"); 7 | 8 | let pool = sps2_state::create_pool(&db_path) 9 | .await 10 | .expect("create pool"); 11 | sps2_state::run_migrations(&pool) 12 | .await 13 | .expect("run migrations"); 14 | 15 | let mut conn = pool.acquire().await.expect("acquire connection"); 16 | 17 | for table in [ 18 | "states", 19 | "state_packages", 20 | "package_versions", 21 | "cas_objects", 22 | "package_files", 23 | "file_verification", 24 | ] { 25 | let exists: Option = 26 | sqlx::query_scalar("SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = ?1") 27 | .bind(table) 28 | .fetch_optional(&mut *conn) 29 | .await 30 | .expect("check table existence"); 31 | assert!(exists.is_some(), "expected table `{table}` to exist"); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /recipes/python-3.13.3.yaml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: python 3 | version: "3.13.3" 4 | description: "High-level programming language" 5 | homepage: "https://www.python.org" 6 | license: "PSF-2.0" 7 | runtime_deps: 8 | - openssl 9 | - sqlite 10 | - xz 11 | - zlib 12 | - bzip2 13 | build_deps: 14 | - pkgconf 15 | 16 | environment: 17 | defaults: true 18 | 19 | source: 20 | fetch: 21 | url: "https://www.python.org/ftp/python/3.13.3/Python-3.13.3.tar.xz" 22 | 23 | build: 24 | steps: 25 | - shell: | 26 | ./configure --prefix=${PREFIX} \ 27 | --enable-optimizations \ 28 | --enable-loadable-sqlite-extensions \ 29 | --with-openssl=${PREFIX} \ 30 | --with-lto \ 31 | --enable-ipv6 \ 32 | --with-system-expat \ 33 | --with-dbmliborder=ndbm \ 34 | --without-ensurepip 35 | make 36 | make install 37 | 38 | post: 39 | commands: 40 | - shell: | 41 | # Create unversioned symlinks 42 | cd ${DESTDIR}${PREFIX}/bin 43 | ln -sf python3.13 python3 44 | ln -sf python3.13 python 45 | 46 | fix_permissions: true 47 | -------------------------------------------------------------------------------- /crates/ops/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-ops" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-types = { path = "../types" } 13 | sps2-events = { path = "../events" } 14 | sps2-net = { path = "../net" } 15 | sps2-resolver = { path = "../resolver" } 16 | sps2-state = { path = "../state" } 17 | sps2-store = { path = "../store" } 18 | sps2-install = { path = "../install" } 19 | sps2-builder = { path = "../builder" } 20 | sps2-index = { path = "../index" } 21 | sps2-config = { path = "../config" } 22 | sps2-hash = { path = "../hash" } 23 | sps2-guard = { path = "../guard" } 24 | serde = { workspace = true } 25 | serde_json = { workspace = true } 26 | tokio = { workspace = true, features = ["fs"] } 27 | uuid = { workspace = true } 28 | chrono = { workspace = true } 29 | tempfile = { workspace = true } 30 | minisign-verify = "0.2.4" 31 | hex = "0.4.3" 32 | walkdir = "2.5.0" 33 | toml = "0.9.8" 34 | base64 = "0.22.1" 35 | dialoguer = "0.12.0" 36 | 37 | [dev-dependencies] 38 | tempfile = { workspace = true } 39 | -------------------------------------------------------------------------------- /crates/builder/src/utils/timeout.rs: -------------------------------------------------------------------------------- 1 | //! Timeout utilities for build operations 2 | 3 | use sps2_errors::{BuildError, Error}; 4 | use std::future::Future; 5 | use std::time::Duration; 6 | 7 | /// Execute a future with a timeout 8 | pub async fn with_timeout( 9 | future: F, 10 | timeout_seconds: u64, 11 | package_name: &str, 12 | ) -> Result 13 | where 14 | F: Future>, 15 | { 16 | tokio::time::timeout(Duration::from_secs(timeout_seconds), future) 17 | .await 18 | .map_err(|_| -> Error { 19 | BuildError::BuildTimeout { 20 | package: package_name.to_string(), 21 | timeout_seconds, 22 | } 23 | .into() 24 | })? 25 | } 26 | 27 | /// Execute a future with an optional timeout 28 | pub async fn with_optional_timeout( 29 | future: F, 30 | timeout_seconds: Option, 31 | package_name: &str, 32 | ) -> Result 33 | where 34 | F: Future>, 35 | { 36 | if let Some(timeout) = timeout_seconds { 37 | with_timeout(future, timeout, package_name).await 38 | } else { 39 | future.await 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /crates/store/src/manifest_io.rs: -------------------------------------------------------------------------------- 1 | #![deny(clippy::pedantic, unsafe_code)] 2 | #![allow(clippy::module_name_repetitions)] 3 | 4 | //! Manifest I/O helpers colocated with the store. 5 | 6 | use sps2_errors::{Error, PackageError}; 7 | use sps2_types::Manifest; 8 | use std::path::Path; 9 | 10 | /// Read `manifest.toml` from a path 11 | /// 12 | /// # Errors 13 | /// Returns an error if reading or parsing the manifest fails. 14 | pub async fn read_manifest(path: &Path) -> Result { 15 | let content = 16 | tokio::fs::read_to_string(path) 17 | .await 18 | .map_err(|e| PackageError::InvalidManifest { 19 | message: format!("failed to read manifest: {e}"), 20 | })?; 21 | Manifest::from_toml(&content) 22 | } 23 | 24 | /// Write `manifest.toml` to a path 25 | /// 26 | /// # Errors 27 | /// Returns an error if serialization or writing fails. 28 | pub async fn write_manifest(path: &Path, manifest: &Manifest) -> Result<(), Error> { 29 | let content = manifest.to_toml()?; 30 | Ok(tokio::fs::write(path, content) 31 | .await 32 | .map_err(|e| PackageError::InvalidManifest { 33 | message: format!("failed to write manifest: {e}"), 34 | })?) 35 | } 36 | -------------------------------------------------------------------------------- /crates/net/src/download/validation.rs: -------------------------------------------------------------------------------- 1 | //! URL validation and HTTP response validation for downloads 2 | 3 | use sps2_errors::{Error, NetworkError}; 4 | use url::Url; 5 | 6 | /// Validate URL and check for supported protocols 7 | pub(super) fn validate_url(url: &str) -> Result { 8 | let parsed = Url::parse(url).map_err(|e| NetworkError::InvalidUrl(e.to_string()))?; 9 | 10 | match parsed.scheme() { 11 | "http" | "https" | "file" => Ok(url.to_string()), 12 | scheme => Err(NetworkError::UnsupportedProtocol { 13 | protocol: scheme.to_string(), 14 | } 15 | .into()), 16 | } 17 | } 18 | 19 | /// Validate HTTP response for download 20 | pub(super) fn validate_response( 21 | response: &reqwest::Response, 22 | is_resume: bool, 23 | ) -> Result<(), Error> { 24 | let status = response.status(); 25 | 26 | if is_resume { 27 | if status != reqwest::StatusCode::PARTIAL_CONTENT { 28 | return Err(NetworkError::PartialContentNotSupported.into()); 29 | } 30 | } else if !status.is_success() { 31 | return Err(NetworkError::HttpError { 32 | status: status.as_u16(), 33 | message: status.to_string(), 34 | } 35 | .into()); 36 | } 37 | 38 | Ok(()) 39 | } 40 | -------------------------------------------------------------------------------- /crates/builder/src/stages/source.rs: -------------------------------------------------------------------------------- 1 | //! Source stage types and operations 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | /// Source operations that can be executed 6 | #[derive(Debug, Clone, Serialize, Deserialize)] 7 | pub enum SourceStep { 8 | /// Clean the source directory 9 | Cleanup, 10 | 11 | /// Fetch file from URL 12 | Fetch { 13 | url: String, 14 | extract_to: Option, 15 | }, 16 | 17 | /// Fetch with MD5 verification 18 | FetchMd5 { 19 | url: String, 20 | md5: String, 21 | extract_to: Option, 22 | }, 23 | 24 | /// Fetch with SHA256 verification 25 | FetchSha256 { 26 | url: String, 27 | sha256: String, 28 | extract_to: Option, 29 | }, 30 | 31 | /// Fetch with BLAKE3 verification 32 | FetchBlake3 { 33 | url: String, 34 | blake3: String, 35 | extract_to: Option, 36 | }, 37 | 38 | /// Extract downloaded archives 39 | Extract { extract_to: Option }, 40 | 41 | /// Clone from git 42 | Git { url: String, ref_: String }, 43 | 44 | /// Copy local files 45 | Copy { src_path: Option }, 46 | 47 | /// Apply a patch 48 | ApplyPatch { path: String }, 49 | } 50 | 51 | // Note: ParsedSource is recipe::model::Source 52 | -------------------------------------------------------------------------------- /crates/install/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-install" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-types = { path = "../types" } 13 | paste = "1.0.15" 14 | sps2-events = { path = "../events" } 15 | sps2-hash = { path = "../hash" } 16 | sps2-net = { path = "../net" } 17 | sps2-resolver = { path = "../resolver" } 18 | sps2-platform = { path = "../platform" } 19 | sps2-state = { path = "../state" } 20 | sps2-store = { path = "../store" } 21 | sps2-config = { path = "../config" } 22 | serde = { workspace = true } 23 | tokio = { workspace = true, features = ["fs", "process", "io-util", "time"] } 24 | uuid = { workspace = true } 25 | sqlx = { workspace = true } 26 | dashmap = { workspace = true } 27 | crossbeam = { workspace = true } 28 | chrono = { workspace = true } 29 | tempfile = { workspace = true } 30 | libc = "0.2.177" 31 | toml = { workspace = true } 32 | async-compression = { version = "0.4.33", features = ["tokio", "zstd"] } 33 | tar = { workspace = true } 34 | futures = { workspace = true } 35 | blake3 = { workspace = true } 36 | 37 | 38 | [dev-dependencies] 39 | tempfile = { workspace = true } 40 | sps2-index = { path = "../index" } 41 | toml = { workspace = true } 42 | -------------------------------------------------------------------------------- /crates/types/src/reports.rs: -------------------------------------------------------------------------------- 1 | //! Report type definitions for operations 2 | 3 | use crate::Version; 4 | use serde::{Deserialize, Serialize}; 5 | use std::path::PathBuf; 6 | use uuid::Uuid; 7 | 8 | /// Installation report 9 | #[derive(Clone, Debug, Serialize, Deserialize)] 10 | pub struct InstallReport { 11 | /// Packages that were installed 12 | pub installed: Vec, 13 | /// Packages that were updated 14 | pub updated: Vec, 15 | /// Packages that were removed 16 | pub removed: Vec, 17 | /// New state ID 18 | pub state_id: Uuid, 19 | /// Total execution time 20 | pub duration_ms: u64, 21 | } 22 | 23 | /// Build report 24 | #[derive(Clone, Debug, Serialize, Deserialize)] 25 | pub struct BuildReport { 26 | /// Package that was built 27 | pub package: String, 28 | /// Version that was built 29 | pub version: Version, 30 | /// Output file path 31 | pub output_path: PathBuf, 32 | /// Build duration 33 | pub duration_ms: u64, 34 | } 35 | 36 | /// Package change for reports 37 | #[derive(Clone, Debug, Serialize, Deserialize)] 38 | pub struct PackageChange { 39 | /// Package name 40 | pub name: String, 41 | /// Previous version 42 | pub from_version: Option, 43 | /// New version 44 | pub to_version: Option, 45 | /// Size in bytes 46 | pub size: Option, 47 | } 48 | -------------------------------------------------------------------------------- /apps/sps2/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | description = "Modern package manager for macOS ARM64" 10 | 11 | [[bin]] 12 | name = "sps2" 13 | path = "src/main.rs" 14 | 15 | [dependencies] 16 | sps2-ops = { path = "../../crates/ops" } 17 | sps2-events = { path = "../../crates/events" } 18 | sps2-errors = { path = "../../crates/errors" } 19 | sps2-types = { path = "../../crates/types" } 20 | sps2-config = { path = "../../crates/config" } 21 | sps2-net = { path = "../../crates/net" } 22 | sps2-resolver = { path = "../../crates/resolver" } 23 | sps2-state = { path = "../../crates/state" } 24 | sps2-store = { path = "../../crates/store" } 25 | sps2-index = { path = "../../crates/index" } 26 | sps2-builder = { path = "../../crates/builder" } 27 | sps2-platform = { path = "../../crates/platform" } 28 | 29 | clap = { version = "4.5.51", features = ["derive", "color"] } 30 | tokio = { workspace = true, features = ["full"] } 31 | tracing = "0.1.41" 32 | tracing-subscriber = { version = "0.3.20", features = ["env-filter", "json"] } 33 | serde_json = { workspace = true } 34 | comfy-table = "7.2.1" 35 | console = "0.16.1" 36 | chrono = { workspace = true } 37 | uuid = { workspace = true } 38 | 39 | [dev-dependencies] 40 | tempfile = { workspace = true } 41 | -------------------------------------------------------------------------------- /crates/builder/src/packaging/compression.rs: -------------------------------------------------------------------------------- 1 | //! Zstandard compression for sps2 packages 2 | //! 3 | //! This module applies a fixed Zstandard compression level when creating 4 | //! package archives, ensuring consistent output across builds. 5 | 6 | use sps2_errors::Error; 7 | use std::path::Path; 8 | 9 | const DEFAULT_LEVEL: i32 = 9; 10 | 11 | /// Compress tar archive with zstd using async-compression 12 | /// Compress a tar file using Zstandard compression 13 | /// 14 | /// # Errors 15 | /// 16 | /// Returns an error if file I/O operations fail or compression fails. 17 | pub async fn compress_with_zstd(tar_path: &Path, output_path: &Path) -> Result<(), Error> { 18 | use async_compression::tokio::write::ZstdEncoder; 19 | use async_compression::Level; 20 | use tokio::fs::File; 21 | use tokio::io::{AsyncWriteExt, BufReader}; 22 | 23 | let input_file = File::open(tar_path).await?; 24 | let output_file = File::create(output_path).await?; 25 | 26 | // Create zstd encoder with default compression level 27 | let level = Level::Precise(DEFAULT_LEVEL); 28 | let mut encoder = ZstdEncoder::with_quality(output_file, level); 29 | 30 | // Copy tar file through zstd encoder 31 | let mut reader = BufReader::new(input_file); 32 | tokio::io::copy(&mut reader, &mut encoder).await?; 33 | 34 | // Ensure all data is written 35 | encoder.shutdown().await?; 36 | 37 | Ok(()) 38 | } 39 | -------------------------------------------------------------------------------- /crates/config/src/repository.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | #[derive(Debug, Clone, Serialize, Deserialize)] 4 | pub struct RepositoryConfig { 5 | pub url: String, 6 | #[serde(default = "default_priority")] 7 | pub priority: u32, 8 | #[serde(default = "default_algorithm")] 9 | pub algorithm: String, // "minisign" | "openpgp" (future) 10 | #[serde(default)] 11 | pub key_ids: Vec, 12 | } 13 | 14 | #[derive(Debug, Clone, Default, Serialize, Deserialize)] 15 | pub struct Repositories { 16 | #[serde(default)] 17 | pub fast: Option, 18 | #[serde(default)] 19 | pub slow: Option, 20 | #[serde(default)] 21 | pub stable: Option, 22 | #[serde(default)] 23 | pub extras: std::collections::HashMap, 24 | } 25 | 26 | impl Repositories { 27 | #[must_use] 28 | pub fn get_all(&self) -> Vec<&RepositoryConfig> { 29 | let mut all = Vec::new(); 30 | if let Some(fast) = &self.fast { 31 | all.push(fast); 32 | } 33 | if let Some(slow) = &self.slow { 34 | all.push(slow); 35 | } 36 | if let Some(stable) = &self.stable { 37 | all.push(stable); 38 | } 39 | all.extend(self.extras.values()); 40 | all 41 | } 42 | } 43 | 44 | fn default_priority() -> u32 { 45 | 1 46 | } 47 | fn default_algorithm() -> String { 48 | "minisign".to_string() 49 | } 50 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright 2025 sps2 Contributors 4 | 5 | All rights reserved. 6 | 7 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 8 | 9 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 10 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 11 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 12 | 13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 14 | -------------------------------------------------------------------------------- /crates/builder/src/utils/format.rs: -------------------------------------------------------------------------------- 1 | /// File size and formatting utilities 2 | use sps2_errors::{BuildError, Error}; 3 | use std::path::Path; 4 | use tokio::fs::File; 5 | use tokio::io::AsyncReadExt; 6 | 7 | /// Information about detected compression format 8 | #[derive(Clone, Debug, PartialEq)] 9 | pub struct CompressionFormatInfo { 10 | /// Estimated total compressed size 11 | pub compressed_size: u64, 12 | } 13 | 14 | /// zstd magic number (4 bytes): 0x28B52FFD 15 | const ZSTD_MAGIC: [u8; 4] = [0x28, 0xB5, 0x2F, 0xFD]; 16 | 17 | /// Detect the compression format of a .sp package file 18 | /// 19 | /// # Errors 20 | /// 21 | /// Returns an error if: 22 | /// - The file cannot be opened or read 23 | /// - The file is not a valid zstd-compressed package 24 | /// - I/O operations fail during scanning 25 | pub async fn detect_compression_format(file_path: &Path) -> Result { 26 | let mut file = File::open(file_path).await?; 27 | let file_size = file.metadata().await?.len(); 28 | 29 | // Read the first 4 bytes to verify this is a zstd file 30 | let mut magic_bytes = [0u8; 4]; 31 | file.read_exact(&mut magic_bytes).await?; 32 | 33 | if magic_bytes != ZSTD_MAGIC { 34 | return Err(BuildError::Failed { 35 | message: format!( 36 | "Invalid package format: expected zstd magic bytes, got {magic_bytes:?}" 37 | ), 38 | } 39 | .into()); 40 | } 41 | 42 | Ok(CompressionFormatInfo { 43 | compressed_size: file_size, 44 | }) 45 | } 46 | -------------------------------------------------------------------------------- /recipes/curl-8.14.1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: curl 3 | version: "8.14.1" 4 | description: "A command-line tool and library for transferring data with URL syntax." 5 | license: "CUSTOM" # MIT-like license, see LICENSES/curl.txt 6 | homepage: "https://curl.se" 7 | dependencies: 8 | runtime: 9 | - openssl 10 | - zlib 11 | - nghttp2 12 | - brotli 13 | - libssh2 14 | - libidn2 15 | - libpsl 16 | 17 | environment: 18 | defaults: true 19 | 20 | source: 21 | fetch: 22 | url: "https://github.com/curl/curl/releases/download/curl-8_14_1/curl-8.14.1.tar.bz2" 23 | 24 | build: 25 | system: cmake 26 | args: 27 | # Standard release build flags 28 | - "-DCMAKE_BUILD_TYPE=Release" 29 | - "-GNinja" 30 | # Build shared libraries, which is common for system packages 31 | - "-DBUILD_SHARED_LIBS=ON" 32 | # Explicitly disable building static libs to save time and space 33 | - "-DBUILD_STATIC_LIBS=OFF" 34 | # Enable essential features 35 | - "-DCURL_USE_OPENSSL=ON" 36 | - "-DCURL_ZLIB=ON" 37 | - "-DUSE_NGHTTP2=ON" # For HTTP/2 support 38 | - "-DENABLE_IPV6=ON" # Enable IPv6 support 39 | - "-DCURL_USE_LIBSSH2=ON" # SSH support 40 | - "-DUSE_LIBIDN2=ON" # International domain names 41 | - "-DCURL_BROTLI=ON" # Brotli compression 42 | - "-DCURL_USE_LIBPSL=ON" # Public suffix list 43 | # Disable features not typically needed for a runtime package 44 | - "-DBUILD_TESTING=OFF" 45 | - "-DENABLE_CURL_MANUAL=OFF" 46 | 47 | post: 48 | patch_rpaths: absolute 49 | 50 | -------------------------------------------------------------------------------- /crates/install/src/macros.rs: -------------------------------------------------------------------------------- 1 | //! Macros for context builder helpers 2 | 3 | #[macro_export] 4 | macro_rules! context_add_package_method { 5 | ($name:ident, $pkg_type:ty) => { 6 | impl $name { 7 | /// Add package to the context 8 | #[must_use] 9 | pub fn add_package(mut self, package: $pkg_type) -> Self { 10 | self.packages.push(package); 11 | self 12 | } 13 | } 14 | }; 15 | } 16 | 17 | #[macro_export] 18 | macro_rules! context_builder { 19 | ($name:ident { $($field:ident: $ty:ty),* $(,)? }) => { 20 | paste::paste! { 21 | impl $name { 22 | /// Create a new context with default values 23 | pub fn new() -> Self { 24 | Self { 25 | $($field: Default::default(),)* 26 | event_sender: None, 27 | } 28 | } 29 | 30 | $( #[must_use] 31 | pub fn [](mut self, value: $ty) -> Self { 32 | self.$field = value; 33 | self 34 | } )* 35 | 36 | /// Set the event sender for progress reporting 37 | #[must_use] 38 | pub fn with_event_sender(mut self, sender: sps2_events::EventSender) -> Self { 39 | self.event_sender = Some(sender); 40 | self 41 | } 42 | } 43 | 44 | impl Default for $name { 45 | fn default() -> Self { 46 | Self::new() 47 | } 48 | } 49 | } 50 | }; 51 | } 52 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | ### Bug Report 11 | 12 | **Describe the bug** 13 | A clear and concise description of what the bug is. Please include the primary error message you received. 14 | 15 | **To Reproduce** 16 | Please provide the exact steps to reproduce the behavior. 17 | 18 | 1. Command(s) run (e.g., `sps2 install "jq>=1.6"` or `sps2 build my-package.star`): 19 | ```sh 20 | # paste command here 21 | ``` 22 | 23 | 2. Full terminal output. Please run the command with the `--debug` flag and paste the complete output below. 24 | ```sh 25 | # paste full debug output here 26 | ``` 27 | 28 | 3. If this is a build-related bug (`sps2 build`), please provide the full `recipe.star` file. 29 | ```python 30 | # paste recipe.star here 31 | ``` 32 | 33 | **Expected behavior** 34 | A clear and concise description of what you expected to happen. 35 | 36 | **Environment (please complete the following information):** 37 | This information is critical for diagnosing the issue. Please paste the output of the following commands. 38 | 39 | * **`sps2 --version`**: 40 | ``` 41 | (paste output here) 42 | ``` 43 | * **`sps2 check-health --json`**: 44 | ``` 45 | (paste output here) 46 | ``` 47 | * **macOS Version**: (e.g., macOS Sonoma 14.4.1) 48 | * **Apple Silicon Chip**: (e.g., M1, M2 Pro, M3 Max) 49 | * **Shell**: (e.g., zsh, bash) 50 | 51 | **Additional context** 52 | Add any other context about the problem here. This could include: 53 | * Relevant sections of your `~/.config/sps2/config.toml` file. 54 | * Whether this is a regression (i.e., it worked in a previous version). 55 | * Any unusual setup in your environment. 56 | -------------------------------------------------------------------------------- /crates/install/src/api/config.rs: -------------------------------------------------------------------------------- 1 | /// Installer configuration 2 | #[derive(Clone, Debug)] 3 | pub struct InstallConfig { 4 | /// Maximum concurrent downloads 5 | pub max_concurrency: usize, 6 | /// Download timeout in seconds 7 | pub download_timeout: u64, 8 | /// Enable APFS optimizations 9 | pub enable_apfs: bool, 10 | /// State retention policy (number of states to keep) 11 | pub state_retention: usize, 12 | } 13 | 14 | impl Default for InstallConfig { 15 | fn default() -> Self { 16 | Self { 17 | max_concurrency: 4, 18 | download_timeout: 300, // 5 minutes 19 | enable_apfs: cfg!(target_os = "macos"), 20 | state_retention: 10, 21 | } 22 | } 23 | } 24 | 25 | impl InstallConfig { 26 | /// Create config with custom concurrency 27 | #[must_use] 28 | pub fn with_concurrency(mut self, max_concurrency: usize) -> Self { 29 | self.max_concurrency = max_concurrency; 30 | self 31 | } 32 | 33 | /// Set download timeout 34 | #[must_use] 35 | pub fn with_timeout(mut self, timeout_seconds: u64) -> Self { 36 | self.download_timeout = timeout_seconds; 37 | self 38 | } 39 | 40 | /// Enable/disable APFS optimizations 41 | #[must_use] 42 | pub fn with_apfs(mut self, enable: bool) -> Self { 43 | self.enable_apfs = enable; 44 | self 45 | } 46 | 47 | /// Set state retention policy 48 | #[must_use] 49 | pub fn with_retention(mut self, count: usize) -> Self { 50 | self.state_retention = count; 51 | self 52 | } 53 | } 54 | 55 | /// Security policy for signature enforcement 56 | #[derive(Clone, Copy, Debug)] 57 | pub struct SecurityPolicy { 58 | pub verify_signatures: bool, 59 | pub allow_unsigned: bool, 60 | } 61 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | I take the security of `sps2` seriously. I appreciate your efforts to responsibly disclose your findings, and I will make every effort to acknowledge and address them. 4 | 5 | ## Supported Versions 6 | 7 | `sps2` is currently in the early stages of development and has not yet had a stable release. As such, there are no formal version branches with dedicated security support. 8 | 9 | Security patches will be applied to the latest commit on the `main` branch only. 10 | 11 | ## Reporting a Vulnerability 12 | 13 | To report a security vulnerability, please send an email to **alexander.knott@posteo.co**. 14 | 15 | To ensure the confidentiality of the report, I strongly encourage you to encrypt your email using my public GPG key. You can typically find it on public key servers like `keys.openpgp.org` or `keyserver.ubuntu.com` by searching for my email address. 16 | 17 | **In your report, please include:** 18 | 19 | * A clear description of the vulnerability. 20 | * The steps required to reproduce it. 21 | * The potential impact of the vulnerability. 22 | * Any proof-of-concept code or screenshots, if applicable. 23 | 24 | ### What to Expect 25 | 26 | As I am the sole contributor developing this project in my spare time, please understand that I cannot offer guaranteed response times. However, I will do my best to adhere to the following process: 27 | 28 | 1. **Acknowledge**: I will try to acknowledge receipt of your report within 72 hours. 29 | 2. **Investigate**: I will investigate the report to confirm the vulnerability. 30 | 3. **Remediate**: If the vulnerability is confirmed, I will work on a patch. 31 | 4. **Notify**: I will notify you once the fix has been merged into the `main` branch. 32 | 33 | This policy will evolve as the project matures. Thank you for helping to keep `sps2` secure. 34 | -------------------------------------------------------------------------------- /crates/events/src/events/package.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::FailureContext; 4 | 5 | /// Named package operations surfaced to consumers. 6 | #[derive(Debug, Clone, Serialize, Deserialize)] 7 | #[serde(rename_all = "snake_case")] 8 | pub enum PackageOperation { 9 | List, 10 | Search, 11 | HealthCheck, 12 | SelfUpdate, 13 | Cleanup, 14 | } 15 | 16 | /// Outcome payloads for completed operations. 17 | #[derive(Debug, Clone, Serialize, Deserialize)] 18 | #[serde(tag = "kind", rename_all = "snake_case")] 19 | pub enum PackageOutcome { 20 | List { 21 | total: usize, 22 | }, 23 | Search { 24 | query: String, 25 | total: usize, 26 | }, 27 | Health { 28 | healthy: bool, 29 | issues: Vec, 30 | }, 31 | SelfUpdate { 32 | from: String, 33 | to: String, 34 | duration_ms: u64, 35 | }, 36 | Cleanup { 37 | states_removed: usize, 38 | packages_removed: usize, 39 | duration_ms: u64, 40 | }, 41 | } 42 | 43 | /// Package-level events consumed by CLI/log handlers. 44 | #[derive(Debug, Clone, Serialize, Deserialize)] 45 | #[serde(tag = "type", rename_all = "snake_case")] 46 | pub enum PackageEvent { 47 | OperationStarted { 48 | operation: PackageOperation, 49 | }, 50 | OperationCompleted { 51 | operation: PackageOperation, 52 | outcome: PackageOutcome, 53 | }, 54 | OperationFailed { 55 | operation: PackageOperation, 56 | failure: FailureContext, 57 | }, 58 | } 59 | 60 | /// Health status indicator for health checks. 61 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 62 | #[serde(rename_all = "snake_case")] 63 | pub enum HealthStatus { 64 | Healthy, 65 | Warning, 66 | Error, 67 | } 68 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | ### Feature Request 11 | 12 | **What problem is this feature trying to solve?** 13 | Please provide a clear and concise description of the problem or limitation you're facing. For example: "I'm always frustrated when I have to manually..." or "It would be powerful if `sps2` could..." 14 | 15 | **Describe the desired solution** 16 | A clear and concise description of what you want to happen. Please be as specific as possible. 17 | 18 | **Example Usage** 19 | How would you envision using this feature? Please provide an example of the command(s) and any potential output. 20 | 21 | * **Example command:** 22 | ```sh 23 | # e.g., sps2 audit --all --fail-on=high 24 | ``` 25 | 26 | * **Example `config.toml` addition (if any):** 27 | ```toml 28 | # [new_feature] 29 | # setting = "value" 30 | ``` 31 | 32 | * **Example `recipe.star` addition (if any):** 33 | ```python 34 | # e.g., ctx.new_functionality() 35 | ``` 36 | 37 | **Describe any alternatives you've considered** 38 | A clear and concise description of any alternative solutions or features you've considered. 39 | 40 | **Which part of `sps2` does this feature affect?** 41 | (Please check all that apply to help us route this to the right team) 42 | 43 | * [ ] CLI (`sps2` app) 44 | * [ ] Package Building (`builder` crate) 45 | * [ ] Dependency Resolution (`resolver` crate) 46 | * [ ] Installation & State Management (`install`/`state` crates) 47 | * [ ] Package Format (`.sp` / `manifest.toml`) 48 | * [ ] Starlark Recipes (`recipe.star` API) 49 | * [ ] Other (please specify): 50 | 51 | **Additional context** 52 | Add any other context, mockups, or screenshots about the feature request here. 53 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | resolver = "3" 3 | members = [ 4 | "apps/sps2", 5 | "apps/sls", 6 | "apps/sbs", 7 | "crates/builder", 8 | "crates/config", 9 | "crates/errors", 10 | "crates/events", 11 | "crates/guard", 12 | "crates/hash", 13 | "crates/index", 14 | "crates/install", 15 | 16 | "crates/net", 17 | "crates/ops", 18 | "crates/platform", 19 | "crates/repository", 20 | "crates/resolver", 21 | 22 | "crates/state", 23 | "crates/store", 24 | "crates/types", 25 | ] 26 | 27 | [workspace.package] 28 | edition = "2021" 29 | rust-version = "1.90.0" 30 | authors = ["sps2 Contributors"] 31 | license = "MIT OR Apache-2.0" 32 | repository = "https://github.com/org/sps2" 33 | 34 | [workspace.dependencies] 35 | # Async runtime 36 | tokio = { version = "1.48.0", features = ["full"] } 37 | 38 | # Database0.8.6 39 | sqlx = { version = "0.8.6", features = [ 40 | "runtime-tokio", 41 | "sqlite", 42 | "macros", 43 | "migrate", 44 | ] } 45 | 46 | # HTTP0.12.20 47 | reqwest = { version = "0.12.24", features = ["json", "stream"] } 48 | 49 | # Serialization 50 | serde = { version = "1.0.228", features = ["derive"] } 51 | serde_json = "1.0.145" 52 | toml = "0.9.8" 53 | 54 | # Error handl2.0.12 55 | thiserror = "2.0.17" 56 | 57 | # Utilities 58 | uuid = { version = "1.18.1", features = ["v4", "serde"] } 59 | semver = { version = "1.0.27", features = ["serde"] } 60 | blake3 = "1.8.2" 61 | xxhash-rust = { version = "0.8.15", features = ["xxh3"] } 62 | tracing = "0.1.41" 63 | chrono = { version = "0.4.42", features = ["serde"] } 64 | minisign-verify = "0.2.4" 65 | 66 | # Concurren6.1.0a structures 67 | dashmap = "6.1.0" 68 | crossbeam = "0.8.4" 69 | 70 | clap = { version = "4.5.51", features = ["derive", "env"] } 71 | 72 | tar = "0.4.44" 73 | 74 | tempfile = "3.22.0" 75 | proptest = "1.9.0" 76 | futures = "0.3.31" 77 | -------------------------------------------------------------------------------- /crates/builder/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sps2-builder" 3 | version = "0.1.0" 4 | edition.workspace = true 5 | rust-version.workspace = true 6 | authors.workspace = true 7 | license.workspace = true 8 | repository.workspace = true 9 | 10 | [dependencies] 11 | sps2-errors = { path = "../errors" } 12 | sps2-types = { path = "../types" } 13 | sps2-events = { path = "../events" } 14 | sps2-hash = { path = "../hash" } 15 | sps2-resolver = { path = "../resolver" } 16 | sps2-store = { path = "../store" } 17 | sps2-net = { path = "../net" } 18 | sps2-install = { path = "../install" } 19 | sps2-state = { path = "../state" } 20 | sps2-config = { path = "../config" } 21 | sps2-platform = { path = "../platform" } 22 | serde = { workspace = true } 23 | serde_json = { workspace = true } 24 | tokio = { workspace = true, features = ["fs", "process"] } 25 | reqwest = { workspace = true } 26 | # YAML parsing dependencies 27 | serde_yaml2 = "0.1.3" 28 | tempfile = { workspace = true } 29 | num_cpus = "1.17.0" 30 | toml = { workspace = true } 31 | minisign = "0.8.0" 32 | async-trait = "0.1.89" 33 | tar = "0.4.44" 34 | async-compression = { version = "0.4.33", features = [ 35 | "tokio", 36 | "zstd", 37 | "gzip", 38 | "bzip2", 39 | "xz", 40 | ] } 41 | futures = "0.3.31" 42 | dashmap = { workspace = true } 43 | crossbeam = { workspace = true } 44 | chrono = { workspace = true } 45 | which = "8.0.0" 46 | sysinfo = "0.37.2" 47 | uuid = { version = "1.18.1", features = ["v4"] } 48 | bitflags = "2.10.0" 49 | zip = "6.0.0" 50 | rayon = "1.11.0" 51 | ignore = { version = "0.4.25", features = [ 52 | "simd-accel", 53 | ] } # ripgrep's fast walker 54 | globset = "0.4.16" 55 | bstr = "1.12.1" # binary‑safe search helpers 56 | object = { version = "0.37.3", features = ["read_core", "write_core", "macho"] } 57 | regex = "1.12.2" 58 | thiserror = "2.0.17" 59 | md-5 = "0.10.6" 60 | sha2 = "0.10.9" 61 | 62 | [dev-dependencies] 63 | tempfile = { workspace = true } 64 | sps2-index = { path = "../index" } 65 | filetime = "0.2.26" 66 | zstd = "0.13.3" 67 | -------------------------------------------------------------------------------- /crates/builder/src/artifact_qa/patchers/object_cleaner.rs: -------------------------------------------------------------------------------- 1 | //! Cleaner that removes object (.o) files 2 | 3 | use crate::artifact_qa::{reports::Report, traits::Patcher}; 4 | use crate::{BuildContext, BuildEnvironment}; 5 | use sps2_errors::Error; 6 | 7 | pub struct ObjectFileCleaner; 8 | 9 | impl crate::artifact_qa::traits::Action for ObjectFileCleaner { 10 | const NAME: &'static str = "Object file cleaner"; 11 | 12 | async fn run( 13 | _ctx: &BuildContext, 14 | env: &BuildEnvironment, 15 | _findings: Option<&crate::artifact_qa::diagnostics::DiagnosticCollector>, 16 | ) -> Result { 17 | let staging_dir = env.staging_dir(); 18 | let mut removed_files = Vec::new(); 19 | 20 | // Walk staging directory for .o files 21 | for entry in ignore::WalkBuilder::new(staging_dir) 22 | .hidden(false) 23 | .parents(false) 24 | .build() 25 | { 26 | let path = match entry { 27 | Ok(e) => e.into_path(), 28 | Err(_) => continue, 29 | }; 30 | 31 | if !path.is_file() { 32 | continue; 33 | } 34 | 35 | // Check if it's a .o file 36 | if let Some(ext) = path.extension().and_then(|e| e.to_str()) { 37 | if ext == "o" { 38 | // Remove the file 39 | if let Ok(()) = std::fs::remove_file(&path) { 40 | removed_files.push(path); 41 | } 42 | // Ignore removal errors 43 | } 44 | } 45 | } 46 | 47 | let mut warnings = Vec::new(); 48 | let removed = removed_files; 49 | if !removed.is_empty() { 50 | warnings.push(format!("Removed {} object files", removed.len())); 51 | } 52 | 53 | Ok(Report { 54 | changed_files: removed, 55 | warnings, 56 | ..Default::default() 57 | }) 58 | } 59 | } 60 | 61 | impl Patcher for ObjectFileCleaner {} 62 | -------------------------------------------------------------------------------- /crates/builder/src/artifact_qa/patchers/la_cleaner.rs: -------------------------------------------------------------------------------- 1 | //! Cleaner that removes libtool archive (.la) files 2 | 3 | use crate::artifact_qa::{reports::Report, traits::Patcher}; 4 | use crate::{BuildContext, BuildEnvironment}; 5 | use sps2_errors::Error; 6 | 7 | pub struct LaFileCleaner; 8 | 9 | impl crate::artifact_qa::traits::Action for LaFileCleaner { 10 | const NAME: &'static str = "Libtool archive cleaner"; 11 | 12 | async fn run( 13 | _ctx: &BuildContext, 14 | env: &BuildEnvironment, 15 | _findings: Option<&crate::artifact_qa::diagnostics::DiagnosticCollector>, 16 | ) -> Result { 17 | let staging_dir = env.staging_dir(); 18 | let mut removed_files = Vec::new(); 19 | 20 | // Walk staging directory for .la files 21 | for entry in ignore::WalkBuilder::new(staging_dir) 22 | .hidden(false) 23 | .parents(false) 24 | .build() 25 | { 26 | let path = match entry { 27 | Ok(e) => e.into_path(), 28 | Err(_) => continue, 29 | }; 30 | 31 | if !path.is_file() { 32 | continue; 33 | } 34 | 35 | // Check if it's a .la file 36 | if let Some(ext) = path.extension().and_then(|e| e.to_str()) { 37 | if ext == "la" { 38 | // Remove the file 39 | if let Ok(()) = std::fs::remove_file(&path) { 40 | removed_files.push(path); 41 | } 42 | // Ignore removal errors 43 | } 44 | } 45 | } 46 | 47 | let mut warnings = Vec::new(); 48 | let removed = removed_files; 49 | if !removed.is_empty() { 50 | warnings.push(format!("Removed {} libtool archives", removed.len())); 51 | } 52 | 53 | Ok(Report { 54 | changed_files: removed, 55 | warnings, 56 | ..Default::default() 57 | }) 58 | } 59 | } 60 | 61 | impl Patcher for LaFileCleaner {} 62 | -------------------------------------------------------------------------------- /crates/errors/src/signing.rs: -------------------------------------------------------------------------------- 1 | #![deny(clippy::pedantic, unsafe_code)] 2 | 3 | //! Signing error types 4 | 5 | use std::borrow::Cow; 6 | 7 | use crate::UserFacingError; 8 | use thiserror::Error; 9 | 10 | #[derive(Debug, Clone, Error)] 11 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 12 | #[non_exhaustive] 13 | pub enum SigningError { 14 | #[error("signature verification failed: {reason}")] 15 | VerificationFailed { reason: String }, 16 | 17 | #[error("no trusted key found for signature with key id: {key_id}")] 18 | NoTrustedKeyFound { key_id: String }, 19 | 20 | #[error("invalid signature format: {0}")] 21 | InvalidSignatureFormat(String), 22 | 23 | #[error("invalid public key format: {0}")] 24 | InvalidPublicKey(String), 25 | } 26 | 27 | impl UserFacingError for SigningError { 28 | fn user_message(&self) -> Cow<'_, str> { 29 | Cow::Owned(self.to_string()) 30 | } 31 | 32 | fn user_hint(&self) -> Option<&'static str> { 33 | match self { 34 | Self::VerificationFailed { .. } => Some("Ensure you have the correct public key and the artifact has not been tampered with."), 35 | Self::NoTrustedKeyFound { .. } => Some("Import the missing public key (`sps2 keys import`) and retry."), 36 | Self::InvalidSignatureFormat { .. } | Self::InvalidPublicKey { .. } => { 37 | Some("Check the signature and key files for corruption or unsupported formats.") 38 | } 39 | } 40 | } 41 | 42 | fn is_retryable(&self) -> bool { 43 | matches!(self, Self::NoTrustedKeyFound { .. }) 44 | } 45 | 46 | fn user_code(&self) -> Option<&'static str> { 47 | let code = match self { 48 | Self::VerificationFailed { .. } => "signing.verification_failed", 49 | Self::NoTrustedKeyFound { .. } => "signing.no_trusted_key", 50 | Self::InvalidSignatureFormat { .. } => "signing.invalid_signature_format", 51 | Self::InvalidPublicKey { .. } => "signing.invalid_public_key", 52 | }; 53 | Some(code) 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /crates/install/src/api/context.rs: -------------------------------------------------------------------------------- 1 | use sps2_events::EventSender; 2 | use sps2_types::PackageSpec; 3 | use std::path::PathBuf; 4 | 5 | /// Installation context 6 | #[derive(Clone, Debug)] 7 | pub struct InstallContext { 8 | /// Package specifications to install 9 | pub packages: Vec, 10 | /// Local package files to install 11 | pub local_files: Vec, 12 | /// Force reinstallation 13 | pub force: bool, 14 | 15 | /// Force re-download even if cached in the store 16 | pub force_download: bool, 17 | 18 | /// Event sender for progress reporting 19 | pub event_sender: Option, 20 | } 21 | 22 | context_builder! { 23 | InstallContext { 24 | packages: Vec, 25 | local_files: Vec, 26 | force: bool, 27 | force_download: bool, 28 | 29 | } 30 | } 31 | context_add_package_method!(InstallContext, PackageSpec); 32 | 33 | /// Uninstall context 34 | #[derive(Clone, Debug)] 35 | pub struct UninstallContext { 36 | /// Package names to uninstall 37 | pub packages: Vec, 38 | /// Remove dependencies if no longer needed 39 | pub autoremove: bool, 40 | /// Force removal even with dependents 41 | pub force: bool, 42 | 43 | /// Event sender for progress reporting 44 | pub event_sender: Option, 45 | } 46 | 47 | context_builder! { 48 | UninstallContext { 49 | packages: Vec, 50 | autoremove: bool, 51 | force: bool, 52 | 53 | } 54 | } 55 | context_add_package_method!(UninstallContext, String); 56 | 57 | /// Update context 58 | #[derive(Clone, Debug)] 59 | pub struct UpdateContext { 60 | /// Packages to update (empty = all) 61 | pub packages: Vec, 62 | /// Upgrade mode (ignore upper bounds) 63 | pub upgrade: bool, 64 | 65 | /// Event sender for progress reporting 66 | pub event_sender: Option, 67 | } 68 | 69 | context_builder! { 70 | UpdateContext { 71 | packages: Vec, 72 | upgrade: bool, 73 | 74 | } 75 | } 76 | context_add_package_method!(UpdateContext, String); 77 | -------------------------------------------------------------------------------- /crates/install/src/prepare/context.rs: -------------------------------------------------------------------------------- 1 | //! Execution context for parallel operations 2 | 3 | use crate::SecurityPolicy; 4 | use sps2_events::{EventEmitter, EventSender}; 5 | 6 | /// Execution context for parallel operations 7 | #[derive(Clone)] 8 | pub struct ExecutionContext { 9 | /// Event sender for progress reporting 10 | event_sender: Option, 11 | /// Optional security policy for signature enforcement 12 | security_policy: Option, 13 | /// Whether downloads should bypass cache reuse 14 | force_redownload: bool, 15 | } 16 | 17 | impl ExecutionContext { 18 | /// Create new execution context 19 | #[must_use] 20 | pub fn new() -> Self { 21 | Self { 22 | event_sender: None, 23 | security_policy: None, 24 | force_redownload: false, 25 | } 26 | } 27 | 28 | /// Set event sender 29 | #[must_use] 30 | pub fn with_event_sender(mut self, event_sender: EventSender) -> Self { 31 | self.event_sender = Some(event_sender); 32 | self 33 | } 34 | 35 | /// Set security policy for downloads 36 | #[must_use] 37 | pub fn with_security_policy(mut self, policy: SecurityPolicy) -> Self { 38 | self.security_policy = Some(policy); 39 | self 40 | } 41 | 42 | /// Set whether downloads must ignore cached packages 43 | #[must_use] 44 | pub fn with_force_redownload(mut self, force: bool) -> Self { 45 | self.force_redownload = force; 46 | self 47 | } 48 | 49 | /// Should downstream logic bypass store reuse 50 | #[must_use] 51 | pub fn force_redownload(&self) -> bool { 52 | self.force_redownload 53 | } 54 | 55 | /// Get the security policy if set 56 | pub(crate) fn security_policy(&self) -> Option { 57 | self.security_policy 58 | } 59 | } 60 | 61 | impl EventEmitter for ExecutionContext { 62 | fn event_sender(&self) -> Option<&EventSender> { 63 | self.event_sender.as_ref() 64 | } 65 | } 66 | 67 | impl Default for ExecutionContext { 68 | fn default() -> Self { 69 | Self::new() 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /crates/net/src/download/retry.rs: -------------------------------------------------------------------------------- 1 | //! Retry logic and backoff calculations for downloads 2 | 3 | use super::config::RetryConfig; 4 | use std::time::Duration; 5 | 6 | /// Calculate exponential backoff delay with jitter 7 | /// 8 | /// This function implements exponential backoff with proper overflow protection 9 | /// and jitter to prevent thundering herd problems. 10 | pub(super) fn calculate_backoff_delay(retry_config: &RetryConfig, attempt: u32) -> Duration { 11 | // Cap attempt at a reasonable value to prevent overflow (2^30 is already huge) 12 | let attempt = attempt.saturating_sub(1).min(30); 13 | 14 | // Calculate exponential backoff: base_delay * multiplier^attempt 15 | // Precision loss is acceptable for backoff calculations 16 | #[allow(clippy::cast_precision_loss)] 17 | let base_ms = retry_config.initial_delay.as_millis() as f64; 18 | #[allow(clippy::cast_precision_loss)] 19 | let max_ms = retry_config.max_delay.as_millis() as f64; 20 | let multiplier = retry_config.backoff_multiplier; 21 | 22 | // Use floating point for exponential calculation, clamped to max_delay 23 | // Cast is safe: attempt is capped at 30, which fits in i32 24 | #[allow(clippy::cast_possible_wrap)] 25 | let delay_ms = (base_ms * multiplier.powi(attempt as i32)) 26 | .min(max_ms) 27 | .max(0.0); 28 | 29 | // Add jitter: random value in range [-jitter_factor/2, +jitter_factor/2] 30 | // This prevents thundering herd when multiple clients retry simultaneously 31 | let jitter_factor = retry_config.jitter_factor.clamp(0.0, 1.0); 32 | let jitter_ms = delay_ms * jitter_factor * (rand::random::() - 0.5); 33 | let final_delay_ms = (delay_ms + jitter_ms).max(0.0); 34 | 35 | // Convert to Duration, clamping at u64::MAX milliseconds 36 | #[allow(clippy::cast_precision_loss)] 37 | let final_delay_ms = if final_delay_ms > u64::MAX as f64 { 38 | u64::MAX 39 | } else { 40 | // Safe: value is positive (max'd with 0) and already range-checked 41 | #[allow(clippy::cast_possible_truncation, clippy::cast_sign_loss)] 42 | { 43 | final_delay_ms as u64 44 | } 45 | }; 46 | 47 | Duration::from_millis(final_delay_ms) 48 | } 49 | -------------------------------------------------------------------------------- /crates/errors/src/version.rs: -------------------------------------------------------------------------------- 1 | //! Version and constraint parsing error types 2 | 3 | use std::borrow::Cow; 4 | 5 | use crate::UserFacingError; 6 | use thiserror::Error; 7 | 8 | #[derive(Debug, Clone, Error)] 9 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 10 | #[non_exhaustive] 11 | pub enum VersionError { 12 | #[error("invalid version: {input}")] 13 | InvalidVersion { input: String }, 14 | 15 | #[error("invalid version constraint: {input}")] 16 | InvalidConstraint { input: String }, 17 | 18 | #[error("incompatible version: {version} does not satisfy {constraint}")] 19 | IncompatibleVersion { version: String, constraint: String }, 20 | 21 | #[error("no version satisfies constraints: {constraints}")] 22 | NoSatisfyingVersion { constraints: String }, 23 | 24 | #[error("version parse error: {message}")] 25 | ParseError { message: String }, 26 | } 27 | 28 | impl UserFacingError for VersionError { 29 | fn user_message(&self) -> Cow<'_, str> { 30 | Cow::Owned(self.to_string()) 31 | } 32 | 33 | fn user_hint(&self) -> Option<&'static str> { 34 | match self { 35 | Self::InvalidVersion { .. } | Self::ParseError { .. } => { 36 | Some("Use semantic-version strings like 1.2.3 or consult the package's available versions.") 37 | } 38 | Self::InvalidConstraint { .. } => Some("Use caret (`^`), tilde (`~`), or equality constraints accepted by sps2."), 39 | Self::IncompatibleVersion { .. } | Self::NoSatisfyingVersion { .. } => { 40 | Some("Relax the version requirement or select a different package build.") 41 | } 42 | } 43 | } 44 | 45 | fn is_retryable(&self) -> bool { 46 | false 47 | } 48 | 49 | fn user_code(&self) -> Option<&'static str> { 50 | let code = match self { 51 | Self::InvalidVersion { .. } => "version.invalid_version", 52 | Self::InvalidConstraint { .. } => "version.invalid_constraint", 53 | Self::IncompatibleVersion { .. } => "version.incompatible_version", 54 | Self::NoSatisfyingVersion { .. } => "version.no_satisfying_version", 55 | Self::ParseError { .. } => "version.parse_error", 56 | }; 57 | Some(code) 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /apps/sps2/src/error.rs: -------------------------------------------------------------------------------- 1 | //! CLI error handling 2 | 3 | use std::fmt; 4 | 5 | use sps2_errors::UserFacingError; 6 | 7 | /// CLI-specific error type 8 | #[derive(Debug)] 9 | pub enum CliError { 10 | /// Configuration error 11 | Config(sps2_errors::ConfigError), 12 | /// Operations error 13 | Ops(sps2_errors::Error), 14 | /// System setup error 15 | Setup(String), 16 | 17 | /// Invalid command arguments 18 | InvalidArguments(String), 19 | /// I/O error 20 | Io(std::io::Error), 21 | } 22 | 23 | impl fmt::Display for CliError { 24 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 25 | match self { 26 | CliError::Config(e) => write!(f, "Configuration error: {e}"), 27 | CliError::Ops(e) => { 28 | let message = e.user_message(); 29 | write!(f, "{message}")?; 30 | if let Some(code) = e.user_code() { 31 | write!(f, "\n Code: {code}")?; 32 | } 33 | if let Some(hint) = e.user_hint() { 34 | write!(f, "\n Hint: {hint}")?; 35 | } 36 | if e.is_retryable() { 37 | write!(f, "\n Retry: safe to retry this operation.")?; 38 | } 39 | Ok(()) 40 | } 41 | CliError::Setup(msg) => write!(f, "System setup error: {msg}"), 42 | 43 | CliError::InvalidArguments(msg) => write!(f, "Invalid arguments: {msg}"), 44 | CliError::Io(e) => write!(f, "I/O error: {e}"), 45 | } 46 | } 47 | } 48 | 49 | impl std::error::Error for CliError { 50 | fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { 51 | match self { 52 | CliError::Config(e) => Some(e), 53 | CliError::Ops(e) => Some(e), 54 | CliError::Io(e) => Some(e), 55 | _ => None, 56 | } 57 | } 58 | } 59 | 60 | impl From for CliError { 61 | fn from(e: sps2_errors::ConfigError) -> Self { 62 | CliError::Config(e) 63 | } 64 | } 65 | 66 | impl From for CliError { 67 | fn from(e: sps2_errors::Error) -> Self { 68 | CliError::Ops(e) 69 | } 70 | } 71 | 72 | impl From for CliError { 73 | fn from(e: std::io::Error) -> Self { 74 | CliError::Io(e) 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /crates/builder/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![warn(mismatched_lifetime_syntaxes)] 2 | #![deny(clippy::pedantic, unsafe_code)] 3 | //! Package building for sps2 4 | //! 5 | //! This crate handles building packages from YAML recipes with 6 | //! isolated environments, dependency management, and packaging. 7 | 8 | pub mod artifact_qa; 9 | mod build_plan; 10 | mod build_systems; 11 | mod cache; 12 | pub mod config; 13 | mod core; 14 | mod environment; 15 | mod packaging; 16 | mod recipe; 17 | mod security; 18 | mod stages; 19 | mod utils; 20 | mod validation; 21 | mod yaml; 22 | 23 | pub use build_systems::{ 24 | detect_build_system, AutotoolsBuildSystem, BuildSystem, BuildSystemConfig, BuildSystemContext, 25 | BuildSystemRegistry, CMakeBuildSystem, CargoBuildSystem, GoBuildSystem, MesonBuildSystem, 26 | NodeJsBuildSystem, PythonBuildSystem, TestFailure, TestResults, 27 | }; 28 | pub use cache::{ 29 | BuildCache, CacheStatistics, CompilerCache, CompilerCacheType, IncrementalBuildTracker, 30 | SourceCache, 31 | }; 32 | pub use config::BuildConfig; 33 | pub use core::api::BuilderApi; 34 | pub use core::builder::Builder; 35 | pub use environment::{BuildCommandResult, BuildEnvironment, BuildResult}; 36 | pub use utils::format::{detect_compression_format, CompressionFormatInfo}; 37 | 38 | // Re-export packaging types 39 | pub use packaging::archive::{create_deterministic_tar_archive, get_deterministic_timestamp}; 40 | pub use packaging::compression::compress_with_zstd; 41 | // SBOM-related re-exports removed 42 | // SBOM types removed from re-exports 43 | pub use packaging::manifest::create_manifest; 44 | pub use packaging::signing::PackageSigner; 45 | pub use packaging::{create_and_sign_package, create_package}; 46 | 47 | // Re-export config types for backward compatibility 48 | 49 | // Re-export YAML types (from yaml module) 50 | pub use yaml::{BuildStep, RecipeMetadata}; 51 | 52 | // Re-export recipe types (from recipe module) 53 | pub use recipe::model::{ 54 | Build, BuildSystem as YamlBuildSystem, ChecksumAlgorithm, ParsedStep, PostCommand, PostOption, 55 | RpathPatchOption, SourceMethod, YamlRecipe, 56 | }; 57 | pub use recipe::parser::parse_yaml_recipe; 58 | 59 | pub use core::context::BuildContext; 60 | 61 | // Re-export build plan and security types for pack command 62 | pub use build_plan::BuildPlan; 63 | pub use security::SecurityContext; 64 | pub use stages::build::BuildCommand; 65 | pub use stages::executors::execute_post_step_with_security; 66 | -------------------------------------------------------------------------------- /crates/platform/src/filesystem/mod.rs: -------------------------------------------------------------------------------- 1 | //! Filesystem operations for macOS platform (APFS clonefile, atomic operations) 2 | 3 | use async_trait::async_trait; 4 | use sps2_errors::PlatformError; 5 | use std::path::Path; 6 | 7 | use crate::core::PlatformContext; 8 | 9 | /// Trait for filesystem operations specific to macOS 10 | #[async_trait] 11 | pub trait FilesystemOperations: Send + Sync { 12 | /// Clone a file using APFS clonefile for efficient copy-on-write 13 | async fn clone_file( 14 | &self, 15 | ctx: &PlatformContext, 16 | src: &Path, 17 | dst: &Path, 18 | ) -> Result<(), PlatformError>; 19 | 20 | /// Clone a directory using APFS clonefile for efficient copy-on-write 21 | async fn clone_directory( 22 | &self, 23 | ctx: &PlatformContext, 24 | src: &Path, 25 | dst: &Path, 26 | ) -> Result<(), PlatformError>; 27 | 28 | /// Atomically rename a file 29 | async fn atomic_rename( 30 | &self, 31 | ctx: &PlatformContext, 32 | src: &Path, 33 | dst: &Path, 34 | ) -> Result<(), PlatformError>; 35 | 36 | /// Atomically swap two files 37 | async fn atomic_swap( 38 | &self, 39 | ctx: &PlatformContext, 40 | path_a: &Path, 41 | path_b: &Path, 42 | ) -> Result<(), PlatformError>; 43 | 44 | /// Create a hard link between files 45 | async fn hard_link( 46 | &self, 47 | ctx: &PlatformContext, 48 | src: &Path, 49 | dst: &Path, 50 | ) -> Result<(), PlatformError>; 51 | 52 | /// Create directory and all parent directories 53 | async fn create_dir_all(&self, ctx: &PlatformContext, path: &Path) 54 | -> Result<(), PlatformError>; 55 | 56 | /// Remove directory and all contents 57 | async fn remove_dir_all(&self, ctx: &PlatformContext, path: &Path) 58 | -> Result<(), PlatformError>; 59 | 60 | /// Check if a path exists 61 | async fn exists(&self, ctx: &PlatformContext, path: &Path) -> bool; 62 | 63 | /// Remove a single file 64 | async fn remove_file(&self, ctx: &PlatformContext, path: &Path) -> Result<(), PlatformError>; 65 | 66 | /// Get the size of a file or directory 67 | async fn size(&self, ctx: &PlatformContext, path: &Path) -> Result; 68 | 69 | /// Check if a path points to a directory. 70 | async fn is_dir(&self, ctx: &PlatformContext, path: &Path) -> bool; 71 | } 72 | -------------------------------------------------------------------------------- /crates/events/src/progress/update.rs: -------------------------------------------------------------------------------- 1 | //! Progress update and formatting utilities 2 | 3 | use super::config::TrendDirection; 4 | use std::time::Duration; 5 | 6 | /// Result of a progress update with calculated metrics 7 | #[derive(Debug, Clone)] 8 | pub struct ProgressUpdate { 9 | /// Tracker ID 10 | pub id: String, 11 | /// Current progress 12 | pub progress: u64, 13 | /// Total amount of work 14 | pub total: Option, 15 | /// Current phase index 16 | pub phase: Option, 17 | /// Smoothed speed (units per second) 18 | pub speed: Option, 19 | /// Estimated time to completion 20 | pub eta: Option, 21 | /// Speed trend direction 22 | pub trend: TrendDirection, 23 | } 24 | 25 | impl ProgressUpdate { 26 | /// Get progress as a percentage (0.0-100.0) 27 | #[must_use] 28 | pub fn percentage(&self) -> Option { 29 | if let Some(total) = self.total { 30 | if total > 0 { 31 | Some((self.progress as f64 / total as f64) * 100.0) 32 | } else { 33 | Some(100.0) 34 | } 35 | } else { 36 | None 37 | } 38 | } 39 | 40 | /// Format speed in human-readable units 41 | #[must_use] 42 | pub fn format_speed(&self, unit: &str) -> Option { 43 | self.speed.map(|speed| { 44 | if speed > 1_000_000.0 { 45 | format!("{:.1}M {unit}/s", speed / 1_000_000.0) 46 | } else if speed > 1_000.0 { 47 | format!("{:.1}K {unit}/s", speed / 1_000.0) 48 | } else { 49 | format!("{:.1} {unit}/s", speed) 50 | } 51 | }) 52 | } 53 | 54 | /// Format ETA in human-readable format 55 | #[must_use] 56 | pub fn format_eta(&self) -> Option { 57 | self.eta.map(|eta| { 58 | let total_seconds = eta.as_secs(); 59 | if total_seconds > 3600 { 60 | let hours = total_seconds / 3600; 61 | let minutes = (total_seconds % 3600) / 60; 62 | format!("{hours}h {minutes}m") 63 | } else if total_seconds > 60 { 64 | let minutes = total_seconds / 60; 65 | let seconds = total_seconds % 60; 66 | format!("{minutes}m {seconds}s") 67 | } else { 68 | format!("{total_seconds}s") 69 | } 70 | }) 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /crates/install/src/atomic/transition.rs: -------------------------------------------------------------------------------- 1 | //! State transition management for atomic installations 2 | 3 | use sps2_events::EventSender; 4 | use sps2_hash::FileHashResult; 5 | use sps2_state::{FileReference, PackageRef, StateManager}; 6 | use sps2_types::state::SlotId; 7 | use std::path::PathBuf; 8 | use uuid::Uuid; 9 | 10 | /// State transition for atomic operations 11 | /// 12 | /// This is now a simple data container that holds information about 13 | /// a pending state transition. The actual commit logic is handled 14 | /// by the `StateManager` using two-phase commit. 15 | pub struct StateTransition { 16 | /// Staging state ID 17 | pub staging_id: Uuid, 18 | /// Parent state ID 19 | pub parent_id: Option, 20 | /// Slot that will hold the prepared state 21 | pub staging_slot: SlotId, 22 | /// Filesystem path to the staging slot 23 | pub slot_path: PathBuf, 24 | /// Package references to be added during commit 25 | pub package_refs: Vec, 26 | /// File references for file-level storage 27 | pub file_references: Vec<(i64, FileReference)>, // (package_id, file_reference) 28 | /// Pending file hashes to be converted to file references after we have package IDs 29 | pub pending_file_hashes: Vec<(sps2_resolver::PackageId, Vec)>, 30 | /// Event sender for progress reporting 31 | pub event_sender: Option, 32 | /// Operation type (install, uninstall, etc.) 33 | pub operation: String, 34 | } 35 | 36 | impl StateTransition { 37 | /// Create new state transition 38 | /// 39 | /// # Errors 40 | /// 41 | /// Returns an error if getting current state ID fails. 42 | pub async fn new( 43 | state_manager: &StateManager, 44 | operation: String, 45 | ) -> Result { 46 | let staging_id = Uuid::new_v4(); 47 | let parent_id = Some(state_manager.get_current_state_id().await?); 48 | let staging_slot = state_manager.inactive_slot().await; 49 | let slot_path = state_manager.ensure_slot_dir(staging_slot).await?; 50 | 51 | Ok(Self { 52 | staging_id, 53 | parent_id, 54 | staging_slot, 55 | slot_path, 56 | package_refs: Vec::new(), 57 | file_references: Vec::new(), 58 | pending_file_hashes: Vec::new(), 59 | event_sender: None, 60 | operation, 61 | }) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /recipes/gcc/gcc-15.1.0-1.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: gcc 3 | version: "15.1.0" 4 | description: "The GNU Compiler Collection (GCC) - comprehensive suite of compilers for C, C++, and other languages, optimized for macOS ARM64." 5 | license: "GPL-3.0-or-later" 6 | homepage: "https://gcc.gnu.org" 7 | dependencies: 8 | build: 9 | # Core mathematical libraries required by GCC 10 | - gmp # GNU Multiple Precision Arithmetic Library (version 4.3.2+) 11 | - mpfr # GNU Multiple-Precision Floating-Point Library (version 3.1.0+) 12 | - mpc # GNU Multiple-Precision Complex Library (version 1.0.1+) 13 | - isl # Integer Set Library for Graphite loop optimizations (version 0.15+) 14 | - zstd # For LTO bytecode compression 15 | 16 | facts: 17 | build_triple: "aarch64-apple-darwin24" 18 | sdk_path: "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk" 19 | 20 | environment: 21 | defaults: true 22 | variables: 23 | LDFLAGS: "-L${PREFIX}/lib -L/usr/lib -Wl,-rpath,${PREFIX}/lib -Wl,-rpath,/usr/lib" 24 | CPPFLAGS: "-I${PREFIX}/include" 25 | DYLD_LIBRARY_PATH: "${PREFIX}/lib:/usr/lib" 26 | BOOT_LDFLAGS: "-Wl,-headerpad_max_install_names -Wl,-rpath,${PREFIX}/lib -Wl,-rpath,/usr/lib" 27 | # For GCC's build process 28 | BOOT_CFLAGS: "-O2" 29 | 30 | source: 31 | local: 32 | path: "." 33 | patches: 34 | - "gcc-15.1.0-darwin.patch" 35 | 36 | build: 37 | steps: 38 | # Create build directory and configure 39 | - shell: | 40 | mkdir -p build 41 | cd build && ../configure \ 42 | --prefix=${PREFIX} \ 43 | --build=${build_triple} \ 44 | --with-sysroot=${sdk_path} \ 45 | --with-native-system-header-dir=/usr/include \ 46 | --with-gmp=${PREFIX} \ 47 | --with-mpfr=${PREFIX} \ 48 | --with-mpc=${PREFIX} \ 49 | --with-isl=${PREFIX} \ 50 | --with-zstd=${PREFIX} \ 51 | --enable-languages=c,c++,objc,obj-c++,fortran \ 52 | --disable-multilib \ 53 | --enable-checking=release \ 54 | --with-gcc-major-version-only \ 55 | --with-system-zlib \ 56 | --disable-nls \ 57 | --enable-bootstrap 58 | 59 | # Build with proper BOOT_LDFLAGS 60 | - shell: | 61 | cd build && make -j8 BOOT_LDFLAGS="${BOOT_LDFLAGS}" 62 | 63 | # Install 64 | - shell: | 65 | cd build && make install 66 | 67 | post: 68 | fix_permissions: true 69 | -------------------------------------------------------------------------------- /crates/builder/src/artifact_qa/scanners/staging.rs: -------------------------------------------------------------------------------- 1 | //! Validator that checks if the staging directory contains any files. 2 | //! 3 | //! This is a fundamental check that runs for all build system profiles. 4 | //! An empty staging directory indicates that the build succeeded but no files 5 | //! were installed, which usually means the install step failed or was skipped. 6 | 7 | use crate::artifact_qa::{diagnostics::DiagnosticCollector, reports::Report, traits::Validator}; 8 | use crate::{BuildContext, BuildEnvironment}; 9 | use sps2_errors::Error; 10 | use std::path::Path; 11 | 12 | pub struct StagingScanner; 13 | 14 | impl crate::artifact_qa::traits::Action for StagingScanner { 15 | const NAME: &'static str = "Staging directory scanner"; 16 | 17 | async fn run( 18 | _ctx: &BuildContext, 19 | env: &BuildEnvironment, 20 | _findings: Option<&DiagnosticCollector>, 21 | ) -> Result { 22 | let staging_dir = env.staging_dir(); 23 | 24 | // Check if staging directory exists and has any content 25 | if !staging_dir.exists() { 26 | let mut report = Report::default(); 27 | report.errors.push(format!( 28 | "Staging directory does not exist: {}", 29 | staging_dir.display() 30 | )); 31 | return Ok(report); 32 | } 33 | 34 | // Check if staging directory is empty 35 | if is_directory_empty(staging_dir)? { 36 | let mut report = Report::default(); 37 | report.errors.push(format!( 38 | "Staging directory is empty: {}. This usually indicates that the build's install step failed or was not run. Check the build recipe for proper 'make install' or equivalent installation commands.", 39 | staging_dir.display() 40 | )); 41 | return Ok(report); 42 | } 43 | 44 | // Staging directory has content - success 45 | Ok(Report::ok()) 46 | } 47 | } 48 | 49 | impl Validator for StagingScanner {} 50 | 51 | /// Check if a directory is empty (has no files or subdirectories) 52 | fn is_directory_empty(dir: &Path) -> Result { 53 | let mut entries = 54 | std::fs::read_dir(dir).map_err(|e| sps2_errors::BuildError::ValidationFailed { 55 | message: format!("Failed to read staging directory {}: {}", dir.display(), e), 56 | })?; 57 | 58 | // If we can get even one entry, the directory is not empty 59 | Ok(entries.next().is_none()) 60 | } 61 | -------------------------------------------------------------------------------- /crates/events/src/events/qa.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use sps2_types::Version; 3 | use std::path::PathBuf; 4 | 5 | use super::FailureContext; 6 | 7 | /// Target package being evaluated by QA. 8 | #[derive(Debug, Clone, Serialize, Deserialize)] 9 | pub struct QaTarget { 10 | pub package: String, 11 | pub version: Version, 12 | } 13 | 14 | /// QA level applied to the pipeline. 15 | #[derive(Debug, Clone, Serialize, Deserialize)] 16 | #[serde(rename_all = "snake_case")] 17 | pub enum QaLevel { 18 | Fast, 19 | Standard, 20 | Strict, 21 | Custom(String), 22 | } 23 | 24 | /// Status for an individual QA check. 25 | #[derive(Debug, Clone, Serialize, Deserialize)] 26 | #[serde(rename_all = "snake_case")] 27 | pub enum QaCheckStatus { 28 | Passed, 29 | Failed, 30 | Skipped, 31 | } 32 | 33 | /// Severity for QA findings. 34 | #[derive(Debug, Clone, Serialize, Deserialize)] 35 | #[serde(rename_all = "snake_case")] 36 | pub enum QaSeverity { 37 | Info, 38 | Warning, 39 | Error, 40 | Critical, 41 | } 42 | 43 | /// Individual finding emitted by a QA check. 44 | #[derive(Debug, Clone, Serialize, Deserialize)] 45 | pub struct QaFinding { 46 | pub message: String, 47 | pub severity: QaSeverity, 48 | #[serde(skip_serializing_if = "Option::is_none")] 49 | pub file: Option, 50 | #[serde(skip_serializing_if = "Option::is_none")] 51 | pub line: Option, 52 | } 53 | 54 | /// Summary emitted after a QA check completes. 55 | #[derive(Debug, Clone, Serialize, Deserialize)] 56 | pub struct QaCheckSummary { 57 | pub name: String, 58 | pub category: String, 59 | pub status: QaCheckStatus, 60 | #[serde(skip_serializing_if = "Option::is_none")] 61 | pub duration_ms: Option, 62 | #[serde(skip_serializing_if = "Vec::is_empty")] 63 | pub findings: Vec, 64 | } 65 | 66 | /// QA events consumed by CLI/logging pipelines. 67 | #[derive(Debug, Clone, Serialize, Deserialize)] 68 | #[serde(tag = "type", rename_all = "snake_case")] 69 | pub enum QaEvent { 70 | PipelineStarted { 71 | target: QaTarget, 72 | level: QaLevel, 73 | }, 74 | PipelineCompleted { 75 | target: QaTarget, 76 | total_checks: usize, 77 | passed: usize, 78 | failed: usize, 79 | duration_ms: u64, 80 | }, 81 | PipelineFailed { 82 | target: QaTarget, 83 | failure: FailureContext, 84 | }, 85 | CheckEvaluated { 86 | target: QaTarget, 87 | summary: QaCheckSummary, 88 | }, 89 | } 90 | -------------------------------------------------------------------------------- /crates/platform/src/binary/mod.rs: -------------------------------------------------------------------------------- 1 | //! Binary operations for macOS platform (install_name_tool, otool, codesign) 2 | 3 | use async_trait::async_trait; 4 | use sps2_errors::PlatformError; 5 | use std::path::Path; 6 | 7 | use crate::core::PlatformContext; 8 | 9 | /// Trait for binary manipulation operations specific to macOS 10 | #[async_trait] 11 | pub trait BinaryOperations: Send + Sync { 12 | /// Get the install name of a binary using otool -D 13 | async fn get_install_name( 14 | &self, 15 | ctx: &PlatformContext, 16 | binary: &Path, 17 | ) -> Result, PlatformError>; 18 | 19 | /// Set the install name of a binary using install_name_tool -id 20 | async fn set_install_name( 21 | &self, 22 | ctx: &PlatformContext, 23 | binary: &Path, 24 | name: &str, 25 | ) -> Result<(), PlatformError>; 26 | 27 | /// Get dependencies of a binary using otool -L 28 | async fn get_dependencies( 29 | &self, 30 | ctx: &PlatformContext, 31 | binary: &Path, 32 | ) -> Result, PlatformError>; 33 | 34 | /// Change a dependency reference using install_name_tool -change 35 | async fn change_dependency( 36 | &self, 37 | ctx: &PlatformContext, 38 | binary: &Path, 39 | old: &str, 40 | new: &str, 41 | ) -> Result<(), PlatformError>; 42 | 43 | /// Add an rpath entry using install_name_tool -add_rpath 44 | async fn add_rpath( 45 | &self, 46 | ctx: &PlatformContext, 47 | binary: &Path, 48 | rpath: &str, 49 | ) -> Result<(), PlatformError>; 50 | 51 | /// Delete an rpath entry using install_name_tool -delete_rpath 52 | async fn delete_rpath( 53 | &self, 54 | ctx: &PlatformContext, 55 | binary: &Path, 56 | rpath: &str, 57 | ) -> Result<(), PlatformError>; 58 | 59 | /// Get rpath entries using otool -l 60 | async fn get_rpath_entries( 61 | &self, 62 | ctx: &PlatformContext, 63 | binary: &Path, 64 | ) -> Result, PlatformError>; 65 | 66 | /// Verify binary signature using codesign -vvv 67 | async fn verify_signature( 68 | &self, 69 | ctx: &PlatformContext, 70 | binary: &Path, 71 | ) -> Result; 72 | 73 | /// Sign binary using codesign 74 | async fn sign_binary( 75 | &self, 76 | ctx: &PlatformContext, 77 | binary: &Path, 78 | identity: Option<&str>, 79 | ) -> Result<(), PlatformError>; 80 | } 81 | -------------------------------------------------------------------------------- /crates/builder/src/environment/directories.rs: -------------------------------------------------------------------------------- 1 | //! Build directory structure management 2 | 3 | use super::core::BuildEnvironment; 4 | use sps2_errors::{BuildError, Error}; 5 | use sps2_events::{AppEvent, EventEmitter}; 6 | use tokio::fs; 7 | 8 | impl BuildEnvironment { 9 | /// Initialize the build environment 10 | /// 11 | /// # Errors 12 | /// 13 | /// Returns an error if directories cannot be created or environment setup fails. 14 | pub async fn initialize(&mut self) -> Result<(), Error> { 15 | self.emit_operation_started(format!( 16 | "Building {} {}", 17 | self.context.name, self.context.version 18 | )); 19 | 20 | // Create build directories with better error reporting 21 | fs::create_dir_all(&self.build_prefix) 22 | .await 23 | .map_err(|e| BuildError::Failed { 24 | message: format!( 25 | "Failed to create build prefix {}: {}", 26 | self.build_prefix.display(), 27 | e 28 | ), 29 | })?; 30 | 31 | fs::create_dir_all(&self.staging_dir) 32 | .await 33 | .map_err(|e| BuildError::Failed { 34 | message: format!( 35 | "Failed to create staging dir {}: {}", 36 | self.staging_dir.display(), 37 | e 38 | ), 39 | })?; 40 | 41 | // Set up environment variables 42 | self.setup_environment(); 43 | 44 | Ok(()) 45 | } 46 | 47 | /// Clean up build environment thoroughly 48 | /// 49 | /// # Errors 50 | /// 51 | /// Returns an error if directories cannot be removed during cleanup. 52 | pub async fn cleanup(&self) -> Result<(), Error> { 53 | // Remove any temporary build files in the build prefix 54 | let temp_dirs = vec!["src", "build", "tmp"]; 55 | for dir in temp_dirs { 56 | let temp_path = self.build_prefix.join(dir); 57 | if temp_path.exists() { 58 | fs::remove_dir_all(&temp_path).await?; 59 | } 60 | } 61 | 62 | self.emit_operation_completed( 63 | format!("Cleaned build environment for {}", self.context.name), 64 | true, 65 | ); 66 | 67 | Ok(()) 68 | } 69 | 70 | /// Send event if sender is available 71 | pub(crate) fn send_event(&self, event: AppEvent) { 72 | if let Some(sender) = self.event_sender() { 73 | sender.emit(event); 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /crates/builder/src/yaml/recipe.rs: -------------------------------------------------------------------------------- 1 | //! Recipe data structures 2 | 3 | use serde::{Deserialize, Serialize}; 4 | use sps2_types::RpathStyle; 5 | 6 | /// Recipe metadata collected from `metadata()` function 7 | #[derive(Debug, Clone, Default, Serialize, Deserialize)] 8 | pub struct RecipeMetadata { 9 | pub name: String, 10 | pub version: String, 11 | pub description: Option, 12 | pub homepage: Option, 13 | pub license: Option, 14 | pub runtime_deps: Vec, 15 | pub build_deps: Vec, 16 | } 17 | 18 | /// A build step from the `build()` function 19 | #[derive(Debug, Clone, Serialize, Deserialize)] 20 | pub enum BuildStep { 21 | Fetch { 22 | url: String, 23 | }, 24 | FetchMd5 { 25 | url: String, 26 | md5: String, 27 | }, 28 | FetchSha256 { 29 | url: String, 30 | sha256: String, 31 | }, 32 | FetchBlake3 { 33 | url: String, 34 | blake3: String, 35 | }, 36 | Extract, 37 | Git { 38 | url: String, 39 | ref_: String, 40 | }, 41 | ApplyPatch { 42 | path: String, 43 | }, 44 | AllowNetwork { 45 | enabled: bool, 46 | }, 47 | Configure { 48 | args: Vec, 49 | }, 50 | Make { 51 | args: Vec, 52 | }, 53 | Autotools { 54 | args: Vec, 55 | }, 56 | Cmake { 57 | args: Vec, 58 | }, 59 | Meson { 60 | args: Vec, 61 | }, 62 | Cargo { 63 | args: Vec, 64 | }, 65 | Go { 66 | args: Vec, 67 | }, 68 | Python { 69 | args: Vec, 70 | }, 71 | NodeJs { 72 | args: Vec, 73 | }, 74 | Command { 75 | program: String, 76 | args: Vec, 77 | }, 78 | SetEnv { 79 | key: String, 80 | value: String, 81 | }, 82 | WithDefaults, 83 | Install, 84 | // Cleanup staging directory 85 | Cleanup, 86 | // Copy source files 87 | Copy { 88 | src_path: Option, 89 | }, 90 | // Apply rpath patching to binaries and libraries 91 | PatchRpaths { 92 | style: RpathStyle, 93 | paths: Vec, 94 | }, 95 | // Fix executable permissions on binaries 96 | FixPermissions { 97 | paths: Vec, 98 | }, 99 | // Set build isolation level 100 | SetIsolation { 101 | level: u8, 102 | }, 103 | } 104 | -------------------------------------------------------------------------------- /crates/events/src/events/platform.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::path::PathBuf; 3 | 4 | use super::FailureContext; 5 | 6 | /// High-level category for a platform operation. 7 | #[derive(Debug, Clone, Serialize, Deserialize)] 8 | #[serde(rename_all = "snake_case")] 9 | pub enum PlatformOperationKind { 10 | Binary, 11 | Filesystem, 12 | Process, 13 | ToolDiscovery, 14 | } 15 | 16 | /// Descriptor for a process command execution. 17 | #[derive(Debug, Clone, Serialize, Deserialize)] 18 | pub struct ProcessCommandDescriptor { 19 | pub program: String, 20 | pub args: Vec, 21 | #[serde(skip_serializing_if = "Option::is_none")] 22 | pub cwd: Option, 23 | } 24 | 25 | /// Context describing the operation being performed. 26 | #[derive(Debug, Clone, Serialize, Deserialize)] 27 | pub struct PlatformOperationContext { 28 | pub kind: PlatformOperationKind, 29 | pub operation: String, 30 | #[serde(skip_serializing_if = "Option::is_none")] 31 | pub target: Option, 32 | #[serde(skip_serializing_if = "Option::is_none")] 33 | pub source: Option, 34 | #[serde(skip_serializing_if = "Option::is_none")] 35 | pub command: Option, 36 | } 37 | 38 | /// Optional metrics gathered for completed operations. 39 | #[derive(Debug, Clone, Serialize, Deserialize)] 40 | pub struct PlatformOperationMetrics { 41 | #[serde(skip_serializing_if = "Option::is_none")] 42 | pub duration_ms: Option, 43 | #[serde(skip_serializing_if = "Option::is_none")] 44 | pub exit_code: Option, 45 | #[serde(skip_serializing_if = "Option::is_none")] 46 | pub stdout_bytes: Option, 47 | #[serde(skip_serializing_if = "Option::is_none")] 48 | pub stderr_bytes: Option, 49 | #[serde(skip_serializing_if = "Option::is_none")] 50 | pub changes: Option>, 51 | } 52 | 53 | /// Platform events surfaced to consumers. 54 | #[derive(Debug, Clone, Serialize, Deserialize)] 55 | #[serde(tag = "type", rename_all = "snake_case")] 56 | pub enum PlatformEvent { 57 | OperationStarted { 58 | context: PlatformOperationContext, 59 | }, 60 | OperationCompleted { 61 | context: PlatformOperationContext, 62 | #[serde(skip_serializing_if = "Option::is_none")] 63 | metrics: Option, 64 | }, 65 | OperationFailed { 66 | context: PlatformOperationContext, 67 | failure: FailureContext, 68 | #[serde(skip_serializing_if = "Option::is_none")] 69 | metrics: Option, 70 | }, 71 | } 72 | -------------------------------------------------------------------------------- /crates/net/src/download/resume.rs: -------------------------------------------------------------------------------- 1 | //! Resumable download logic for package downloads 2 | 3 | use super::config::PackageDownloadConfig; 4 | use sps2_errors::Error; 5 | use std::path::Path; 6 | use tokio::fs as tokio_fs; 7 | use tokio::io::AsyncReadExt; 8 | 9 | /// Get the offset for resuming a download 10 | /// 11 | /// This function checks if a partial download exists, is large enough to resume, 12 | /// and validates its integrity before allowing resumption. 13 | /// 14 | /// # Errors 15 | /// 16 | /// Returns an error if file operations fail. 17 | pub(super) async fn get_resume_offset( 18 | config: &PackageDownloadConfig, 19 | dest_path: &Path, 20 | ) -> Result { 21 | match tokio_fs::metadata(dest_path).await { 22 | Ok(metadata) => { 23 | let size = metadata.len(); 24 | if size >= config.min_chunk_size { 25 | // Validate the integrity of the partial file by hashing what we have 26 | // If this fails, the partial file is likely corrupted 27 | match calculate_existing_file_hash(config, dest_path, size).await { 28 | Ok(_hasher) => { 29 | // Validation successful, can resume from this offset 30 | Ok(size) 31 | } 32 | Err(_e) => { 33 | // Partial file is corrupted, delete and start over 34 | let _ = tokio_fs::remove_file(dest_path).await; 35 | Ok(0) 36 | } 37 | } 38 | } else { 39 | // File is too small to resume, start over 40 | let _ = tokio_fs::remove_file(dest_path).await; 41 | Ok(0) 42 | } 43 | } 44 | Err(_) => Ok(0), // File doesn't exist 45 | } 46 | } 47 | 48 | /// Calculate hash of existing file content for resume 49 | pub(super) async fn calculate_existing_file_hash( 50 | config: &PackageDownloadConfig, 51 | dest_path: &Path, 52 | bytes: u64, 53 | ) -> Result { 54 | let mut file = tokio_fs::File::open(dest_path).await?; 55 | let mut hasher = blake3::Hasher::new(); 56 | let mut buffer = vec![0; config.buffer_size]; 57 | let mut remaining = bytes; 58 | 59 | while remaining > 0 { 60 | let to_read = 61 | usize::try_from(std::cmp::min(buffer.len() as u64, remaining)).unwrap_or(buffer.len()); 62 | let bytes_read = file.read(&mut buffer[..to_read]).await?; 63 | 64 | if bytes_read == 0 { 65 | break; 66 | } 67 | 68 | hasher.update(&buffer[..bytes_read]); 69 | remaining -= bytes_read as u64; 70 | } 71 | 72 | Ok(hasher) 73 | } 74 | -------------------------------------------------------------------------------- /crates/builder/src/artifact_qa/patchers/pkgconfig.rs: -------------------------------------------------------------------------------- 1 | //! Fixes *.pc and *Config.cmake so downstream builds never see /opt/pm/build/… 2 | 3 | use crate::artifact_qa::{reports::Report, traits::Patcher}; 4 | use crate::{BuildContext, BuildEnvironment}; 5 | use ignore::WalkBuilder; 6 | use sps2_errors::Error; 7 | 8 | pub struct PkgConfigPatcher; 9 | impl crate::artifact_qa::traits::Action for PkgConfigPatcher { 10 | const NAME: &'static str = "pkg‑config / CMake patcher"; 11 | 12 | async fn run( 13 | _ctx: &BuildContext, 14 | env: &BuildEnvironment, 15 | _findings: Option<&crate::artifact_qa::diagnostics::DiagnosticCollector>, 16 | ) -> Result { 17 | let build_prefix = env.build_prefix().to_string_lossy().into_owned(); 18 | let build_src = format!("{build_prefix}/src"); 19 | let build_base = "/opt/pm/build"; 20 | let actual = sps2_config::fixed_paths::LIVE_DIR; 21 | 22 | let pat = WalkBuilder::new(env.staging_dir()) 23 | .build() 24 | .filter_map(Result::ok) 25 | .map(ignore::DirEntry::into_path) 26 | .filter(|p| { 27 | p.is_file() && { 28 | p.extension().and_then(|e| e.to_str()) == Some("pc") 29 | || p.file_name() 30 | .and_then(|n| n.to_str()) 31 | .is_some_and(|n| n.ends_with("Config.cmake")) 32 | } 33 | }) 34 | .collect::>(); 35 | 36 | let mut changed = Vec::new(); 37 | for f in pat { 38 | if let Ok(s) = std::fs::read_to_string(&f) { 39 | let mut modified = false; 40 | let mut result = s.clone(); 41 | 42 | // Replace build paths in order of specificity (most specific first) 43 | if result.contains(&build_src) { 44 | result = result.replace(&build_src, actual); 45 | modified = true; 46 | } 47 | if result.contains(&build_prefix) { 48 | result = result.replace(&build_prefix, actual); 49 | modified = true; 50 | } 51 | if result.contains(build_base) { 52 | result = result.replace(build_base, actual); 53 | modified = true; 54 | } 55 | 56 | if modified { 57 | std::fs::write(&f, result)?; 58 | changed.push(f); 59 | } 60 | } 61 | } 62 | 63 | Ok(Report { 64 | changed_files: changed, 65 | ..Default::default() 66 | }) 67 | } 68 | } 69 | impl Patcher for PkgConfigPatcher {} 70 | -------------------------------------------------------------------------------- /crates/events/src/events/state.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use sps2_types::StateId; 3 | 4 | use super::FailureContext; 5 | 6 | /// Context describing a state transition. 7 | #[derive(Debug, Clone, Serialize, Deserialize)] 8 | pub struct StateTransitionContext { 9 | pub operation: String, 10 | #[serde(skip_serializing_if = "Option::is_none")] 11 | pub source: Option, 12 | pub target: StateId, 13 | } 14 | 15 | /// Optional summary for completed transitions. 16 | #[derive(Debug, Clone, Serialize, Deserialize)] 17 | pub struct TransitionSummary { 18 | #[serde(skip_serializing_if = "Option::is_none")] 19 | pub duration_ms: Option, 20 | } 21 | 22 | /// Context for rollback operations. 23 | #[derive(Debug, Clone, Serialize, Deserialize)] 24 | pub struct RollbackContext { 25 | pub from: StateId, 26 | pub to: StateId, 27 | } 28 | 29 | /// Optional summary for completed rollbacks. 30 | #[derive(Debug, Clone, Serialize, Deserialize)] 31 | pub struct RollbackSummary { 32 | #[serde(skip_serializing_if = "Option::is_none")] 33 | pub duration_ms: Option, 34 | } 35 | 36 | /// Summary for cleanup operations. 37 | #[derive(Debug, Clone, Serialize, Deserialize)] 38 | pub struct CleanupSummary { 39 | pub planned_states: usize, 40 | #[serde(skip_serializing_if = "Option::is_none")] 41 | pub removed_states: Option, 42 | #[serde(skip_serializing_if = "Option::is_none")] 43 | pub space_freed_bytes: Option, 44 | #[serde(skip_serializing_if = "Option::is_none")] 45 | pub duration_ms: Option, 46 | } 47 | 48 | /// State events emitted by state manager and install flows. 49 | #[derive(Debug, Clone, Serialize, Deserialize)] 50 | #[serde(tag = "type", rename_all = "snake_case")] 51 | pub enum StateEvent { 52 | TransitionStarted { 53 | context: StateTransitionContext, 54 | }, 55 | TransitionCompleted { 56 | context: StateTransitionContext, 57 | #[serde(skip_serializing_if = "Option::is_none")] 58 | summary: Option, 59 | }, 60 | TransitionFailed { 61 | context: StateTransitionContext, 62 | failure: FailureContext, 63 | }, 64 | RollbackStarted { 65 | context: RollbackContext, 66 | }, 67 | RollbackCompleted { 68 | context: RollbackContext, 69 | #[serde(skip_serializing_if = "Option::is_none")] 70 | summary: Option, 71 | }, 72 | RollbackFailed { 73 | context: RollbackContext, 74 | failure: FailureContext, 75 | }, 76 | CleanupStarted { 77 | summary: CleanupSummary, 78 | }, 79 | CleanupCompleted { 80 | summary: CleanupSummary, 81 | }, 82 | CleanupFailed { 83 | summary: CleanupSummary, 84 | failure: FailureContext, 85 | }, 86 | } 87 | -------------------------------------------------------------------------------- /crates/config/src/resources_semaphore.rs: -------------------------------------------------------------------------------- 1 | //! Semaphore utilities for resource management 2 | //! 3 | //! This module provides helper functions for managing semaphores with 4 | //! consistent error handling across the sps2 package manager. 5 | 6 | use sps2_errors::{Error, InstallError}; 7 | use std::sync::Arc; 8 | use tokio::sync::{OwnedSemaphorePermit, Semaphore}; 9 | 10 | /// Acquire a semaphore permit with proper error handling 11 | /// 12 | /// This helper function provides consistent error handling for semaphore 13 | /// acquisition across all modules in sps2. 14 | /// 15 | /// # Arguments 16 | /// 17 | /// * `semaphore` - The semaphore to acquire a permit from 18 | /// * `operation` - Description of the operation for error reporting 19 | /// 20 | /// # Errors 21 | /// 22 | /// Returns an error if the semaphore is closed or acquisition fails 23 | pub async fn acquire_semaphore_permit( 24 | semaphore: Arc, 25 | operation: &str, 26 | ) -> Result { 27 | semaphore.clone().acquire_owned().await.map_err(|_| { 28 | InstallError::ConcurrencyError { 29 | message: format!("failed to acquire semaphore for {operation}"), 30 | } 31 | .into() 32 | }) 33 | } 34 | 35 | /// Try to acquire a semaphore permit without waiting 36 | /// 37 | /// This helper function attempts to acquire a semaphore permit immediately 38 | /// without blocking. Useful for checking resource availability. 39 | /// 40 | /// # Arguments 41 | /// 42 | /// * `semaphore` - The semaphore to try to acquire a permit from 43 | /// 44 | /// # Returns 45 | /// 46 | /// Returns `Ok(Some(permit))` if successful, `Ok(None)` if would block, 47 | /// or an error if the semaphore is closed. 48 | /// 49 | /// # Errors 50 | /// 51 | /// Returns an error if the semaphore is closed. 52 | pub fn try_acquire_semaphore_permit( 53 | semaphore: &Arc, 54 | ) -> Result, Error> { 55 | match semaphore.clone().try_acquire_owned() { 56 | Ok(permit) => Ok(Some(permit)), 57 | Err(tokio::sync::TryAcquireError::NoPermits) => Ok(None), 58 | Err(tokio::sync::TryAcquireError::Closed) => Err(InstallError::ConcurrencyError { 59 | message: "semaphore is closed".to_string(), 60 | } 61 | .into()), 62 | } 63 | } 64 | 65 | /// Create a semaphore with a specified number of permits 66 | /// 67 | /// This is a convenience function for creating semaphores with consistent 68 | /// error handling and documentation. 69 | /// 70 | /// # Arguments 71 | /// 72 | /// * `permits` - Number of permits the semaphore should have 73 | /// 74 | /// # Returns 75 | /// 76 | /// Returns an Arc-wrapped semaphore for shared ownership 77 | #[must_use] 78 | pub fn create_semaphore(permits: usize) -> Arc { 79 | Arc::new(Semaphore::new(permits)) 80 | } 81 | -------------------------------------------------------------------------------- /crates/builder/src/packaging/manifest.rs: -------------------------------------------------------------------------------- 1 | //! Package manifest and SBOM coordination 2 | //! NOTE: SBOM generation is currently disabled by callers (soft-disabled). 3 | 4 | // use crate::utils::events::send_event; 5 | use crate::yaml::RecipeMetadata; 6 | use crate::{BuildContext, BuildEnvironment}; 7 | // use sps2_errors::Error; 8 | use sps2_types::Manifest; 9 | 10 | // Create package manifest 11 | #[must_use] 12 | pub fn create_manifest( 13 | context: &BuildContext, 14 | runtime_deps: Vec, 15 | recipe_metadata: &RecipeMetadata, 16 | environment: &BuildEnvironment, 17 | ) -> Manifest { 18 | use sps2_types::{ManifestDependencies as Dependencies, ManifestPackageInfo as PackageInfo}; 19 | 20 | // Generate Python metadata if this is a Python package 21 | let python_metadata = if environment.used_build_systems().contains("python") { 22 | Some(create_python_metadata_from_env(environment)) 23 | } else { 24 | None 25 | }; 26 | 27 | Manifest { 28 | format_version: sps2_types::PackageFormatVersion::CURRENT, 29 | package: PackageInfo { 30 | name: context.name.clone(), 31 | version: context.version.to_string(), 32 | revision: context.revision, 33 | arch: context.arch.clone(), 34 | description: recipe_metadata.description.clone(), 35 | homepage: recipe_metadata.homepage.clone(), 36 | license: recipe_metadata.license.clone(), 37 | legacy_compression: None, 38 | }, 39 | dependencies: Dependencies { 40 | runtime: runtime_deps, 41 | build: Vec::new(), // Build deps not included in final manifest 42 | }, 43 | python: python_metadata, 44 | } 45 | } 46 | 47 | /// Create Python metadata for builder-centric approach 48 | fn create_python_metadata_from_env( 49 | environment: &BuildEnvironment, 50 | ) -> sps2_types::PythonPackageMetadata { 51 | use std::collections::HashMap; 52 | 53 | // Extract metadata from build environment 54 | let requires_python = environment 55 | .get_extra_env("PYTHON_REQUIRES_VERSION") 56 | .unwrap_or_else(|| ">=3.8".to_string()); 57 | 58 | let executables = environment 59 | .get_extra_env("PYTHON_ENTRY_POINTS") 60 | .and_then(|json_str| serde_json::from_str::>(&json_str).ok()) 61 | .unwrap_or_default(); 62 | 63 | // For builder-centric approach, wheel_file and requirements_file are not used 64 | // since the builder has already installed the package to staging 65 | sps2_types::PythonPackageMetadata { 66 | requires_python, 67 | wheel_file: String::new(), // Not used in builder-centric approach 68 | requirements_file: String::new(), // Not used in builder-centric approach 69 | executables, 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /crates/events/src/progress/config.rs: -------------------------------------------------------------------------------- 1 | //! Configuration and core types for progress tracking 2 | 3 | use serde::{Deserialize, Serialize}; 4 | use std::time::Duration; 5 | 6 | /// Configuration for progress tracking algorithms 7 | #[derive(Debug, Clone)] 8 | pub struct ProgressConfig { 9 | /// Number of samples for moving average (default: 10) 10 | pub speed_window_size: usize, 11 | /// Maximum samples to retain in history (default: 1000) 12 | pub max_history_samples: usize, 13 | /// Update frequency for smooth UI (default: 100ms) 14 | pub update_interval: Duration, 15 | /// Outlier rejection multiplier (default: 2.0) 16 | pub outlier_threshold: f64, 17 | /// Exponential moving average alpha (default: 0.3) 18 | pub ema_alpha: f64, 19 | /// Minimum samples needed for reliable ETA (default: 3) 20 | pub min_samples_for_eta: usize, 21 | } 22 | 23 | impl Default for ProgressConfig { 24 | fn default() -> Self { 25 | Self { 26 | speed_window_size: 10, 27 | max_history_samples: 1000, 28 | update_interval: Duration::from_millis(100), 29 | outlier_threshold: 2.0, 30 | ema_alpha: 0.3, 31 | min_samples_for_eta: 3, 32 | } 33 | } 34 | } 35 | 36 | /// A phase in a multi-stage operation 37 | #[derive(Debug, Clone, Serialize, Deserialize)] 38 | pub struct ProgressPhase { 39 | /// Human-readable name of the phase 40 | pub name: String, 41 | /// Weight of this phase relative to others (0.0-1.0) 42 | pub weight: f64, 43 | /// Optional estimated duration for this phase 44 | pub estimated_duration: Option, 45 | /// Optional human-readable description of the phase 46 | pub description: Option, 47 | } 48 | 49 | impl ProgressPhase { 50 | /// Create a new progress phase 51 | #[must_use] 52 | pub fn new(name: &str, description: &str) -> Self { 53 | Self { 54 | name: name.to_string(), 55 | weight: 1.0, // Default equal weight 56 | estimated_duration: None, 57 | description: Some(description.to_string()), 58 | } 59 | } 60 | 61 | /// Set the weight for this phase 62 | #[must_use] 63 | pub fn with_weight(mut self, weight: f64) -> Self { 64 | self.weight = weight; 65 | self 66 | } 67 | 68 | /// Set the estimated duration for this phase 69 | #[must_use] 70 | pub fn with_duration(mut self, duration: Duration) -> Self { 71 | self.estimated_duration = Some(duration); 72 | self 73 | } 74 | } 75 | 76 | /// Direction of speed trend 77 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 78 | pub enum TrendDirection { 79 | /// Speed is increasing 80 | Accelerating, 81 | /// Speed is decreasing 82 | Decelerating, 83 | /// Speed is relatively stable 84 | Stable, 85 | } 86 | -------------------------------------------------------------------------------- /crates/errors/src/state.rs: -------------------------------------------------------------------------------- 1 | //! State management error types 2 | 3 | use std::borrow::Cow; 4 | 5 | use crate::UserFacingError; 6 | use thiserror::Error; 7 | 8 | #[derive(Debug, Clone, Error)] 9 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 10 | #[non_exhaustive] 11 | pub enum StateError { 12 | #[error("invalid state transition from {from} to {to}")] 13 | InvalidTransition { from: String, to: String }, 14 | 15 | #[error("state conflict: {message}")] 16 | Conflict { message: String }, 17 | 18 | #[error("state not found: {id}")] 19 | StateNotFound { id: String }, 20 | 21 | #[error("database error: {message}")] 22 | DatabaseError { message: String }, 23 | 24 | #[error("transaction failed: {message}")] 25 | TransactionFailed { message: String }, 26 | 27 | #[error("state corrupted: {message}")] 28 | StateCorrupted { message: String }, 29 | 30 | #[error("rollback failed: {message}")] 31 | RollbackFailed { message: String }, 32 | 33 | #[error("active state missing")] 34 | ActiveStateMissing, 35 | 36 | #[error("migration failed: {message}")] 37 | MigrationFailed { message: String }, 38 | } 39 | 40 | impl UserFacingError for StateError { 41 | fn user_message(&self) -> Cow<'_, str> { 42 | Cow::Owned(self.to_string()) 43 | } 44 | 45 | fn user_hint(&self) -> Option<&'static str> { 46 | match self { 47 | Self::Conflict { .. } => Some("Retry once the concurrent operation has completed."), 48 | Self::StateNotFound { .. } => Some("List available states with `sps2 history --all`."), 49 | Self::ActiveStateMissing => { 50 | Some("Run `sps2 check-health` to rebuild the active state.") 51 | } 52 | Self::MigrationFailed { .. } => { 53 | Some("Review the migration logs and rerun `sps2 check-health`.") 54 | } 55 | _ => None, 56 | } 57 | } 58 | 59 | fn is_retryable(&self) -> bool { 60 | matches!(self, Self::Conflict { .. } | Self::TransactionFailed { .. }) 61 | } 62 | 63 | fn user_code(&self) -> Option<&'static str> { 64 | let code = match self { 65 | Self::InvalidTransition { .. } => "state.invalid_transition", 66 | Self::Conflict { .. } => "state.conflict", 67 | Self::StateNotFound { .. } => "state.state_not_found", 68 | Self::DatabaseError { .. } => "state.database_error", 69 | Self::TransactionFailed { .. } => "state.transaction_failed", 70 | Self::StateCorrupted { .. } => "state.state_corrupted", 71 | Self::RollbackFailed { .. } => "state.rollback_failed", 72 | Self::ActiveStateMissing => "state.active_state_missing", 73 | Self::MigrationFailed { .. } => "state.migration_failed", 74 | }; 75 | Some(code) 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /crates/builder/src/artifact_qa/patchers/headers.rs: -------------------------------------------------------------------------------- 1 | //! Converts absolute include paths in headers to form. 2 | 3 | use crate::artifact_qa::{reports::Report, traits::Patcher}; 4 | use crate::{BuildContext, BuildEnvironment}; 5 | use ignore::WalkBuilder; 6 | use regex::Regex; 7 | use sps2_errors::Error; 8 | 9 | pub struct HeaderPatcher; 10 | impl crate::artifact_qa::traits::Action for HeaderPatcher { 11 | const NAME: &'static str = "Header include‑fixer"; 12 | 13 | async fn run( 14 | _ctx: &BuildContext, 15 | env: &BuildEnvironment, 16 | _findings: Option<&crate::artifact_qa::diagnostics::DiagnosticCollector>, 17 | ) -> Result { 18 | let build_prefix = env.build_prefix().to_string_lossy().into_owned(); 19 | let build_src = format!("{build_prefix}/src"); 20 | let build_base = "/opt/pm/build"; 21 | 22 | // Create regex for all build paths 23 | let re = Regex::new(&format!( 24 | r#"#\s*include\s*"({}|{}|{})[^"]+""#, 25 | regex::escape(&build_src), 26 | regex::escape(&build_prefix), 27 | regex::escape(build_base) 28 | )) 29 | .unwrap(); 30 | 31 | let mut changed = Vec::new(); 32 | for dir in ["include", "Headers"] { 33 | let root = env.staging_dir().join(dir); 34 | if !root.exists() { 35 | continue; 36 | } 37 | for entry in WalkBuilder::new(&root).build().flatten() { 38 | let p = entry.into_path(); 39 | if p.is_file() { 40 | if let Ok(src) = std::fs::read_to_string(&p) { 41 | if re.is_match(&src) { 42 | let repl = re.replace_all(&src, |caps: ®ex::Captures| { 43 | // naive: just strip the prefix and keep quotes 44 | let full = &caps.get(0).unwrap().as_str()[0..]; 45 | let inner = full.trim_start_matches("#include ").trim(); 46 | let stripped = inner 47 | .trim_matches('"') 48 | .trim_start_matches(&build_src) 49 | .trim_start_matches(&build_prefix) 50 | .trim_start_matches(build_base) 51 | .trim_start_matches('/'); 52 | format!("#include \"{stripped}\"") 53 | }); 54 | std::fs::write(&p, repl.as_bytes())?; 55 | changed.push(p); 56 | } 57 | } 58 | } 59 | } 60 | } 61 | Ok(Report { 62 | changed_files: changed, 63 | ..Default::default() 64 | }) 65 | } 66 | } 67 | impl Patcher for HeaderPatcher {} 68 | -------------------------------------------------------------------------------- /recipes/llvm-20.1.7.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: llvm 3 | version: "20.1.7" 4 | description: "The LLVM Compiler Infrastructure - a collection of modular and reusable compiler and toolchain technologies" 5 | homepage: "https://llvm.org" 6 | license: "Apache-2.0 WITH LLVM-exception" 7 | dependencies: 8 | runtime: 9 | - zlib 10 | - zstd 11 | - libxml2 12 | build: 13 | - cmake 14 | - ninja 15 | - python 16 | - zlib 17 | - zstd 18 | - libxml2 19 | 20 | facts: 21 | # Apple Silicon specific configuration 22 | target_triple: "aarch64-apple-darwin24" 23 | sdk_path: "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk" 24 | # LLVM projects to build - core LLVM + Clang + essential tools 25 | llvm_projects: "clang;clang-tools-extra;lld;lldb" 26 | # Targets to build - focus on ARM64 and x86_64 for cross-compilation 27 | llvm_targets: "AArch64;X86;WebAssembly" 28 | 29 | environment: 30 | defaults: true 31 | variables: 32 | # Optimize for Apple Silicon 33 | CMAKE_OSX_ARCHITECTURES: "arm64" 34 | # Use system SDK 35 | CMAKE_OSX_SYSROOT: "${sdk_path}" 36 | # Ensure proper linking on macOS 37 | LDFLAGS: "-L${PREFIX}/lib -Wl,-rpath,${PREFIX}/lib" 38 | CPPFLAGS: "-I${PREFIX}/include" 39 | # Memory optimization for build process 40 | LLVM_PARALLEL_LINK_JOBS: "2" 41 | 42 | source: 43 | fetch: 44 | url: "https://github.com/llvm/llvm-project/releases/download/llvmorg-20.1.7/llvm-project-20.1.7.src.tar.xz" 45 | 46 | build: 47 | steps: 48 | - shell: | 49 | cd llvm 50 | mkdir -p build 51 | cd build 52 | cmake .. \ 53 | -GNinja \ 54 | -DCMAKE_BUILD_TYPE=Release \ 55 | -DCMAKE_INSTALL_PREFIX=${PREFIX} \ 56 | -DLLVM_DEFAULT_TARGET_TRIPLE=${target_triple} \ 57 | -DLLVM_TARGETS_TO_BUILD="${llvm_targets}" \ 58 | -DLLVM_HOST_TRIPLE=${target_triple} \ 59 | -DLLVM_ENABLE_PROJECTS="${llvm_projects}" \ 60 | -DLLDB_USE_SYSTEM_DEBUGSERVER=ON \ 61 | -DLLVM_ENABLE_ASSERTIONS=OFF \ 62 | -DLLVM_ENABLE_EXPENSIVE_CHECKS=OFF \ 63 | -DLLVM_OPTIMIZED_TABLEGEN=ON \ 64 | -DBUILD_SHARED_LIBS=OFF \ 65 | -DLLVM_BUILD_LLVM_DYLIB=ON \ 66 | -DLLVM_LINK_LLVM_DYLIB=ON \ 67 | -DLLVM_ENABLE_ZLIB=ON \ 68 | -DLLVM_ENABLE_ZSTD=ON \ 69 | -DLLVM_ENABLE_LIBXML2=ON \ 70 | -DLLVM_ENABLE_CURL=OFF \ 71 | -DLLVM_INCLUDE_EXAMPLES=OFF \ 72 | -DLLVM_INCLUDE_TESTS=OFF \ 73 | -DLLVM_INCLUDE_BENCHMARKS=OFF \ 74 | -DLLVM_INCLUDE_DOCS=OFF \ 75 | -DLLVM_CREATE_XCODE_TOOLCHAIN=OFF \ 76 | -DLLVM_EXTERNALIZE_DEBUGINFO=OFF \ 77 | -DLLVM_PARALLEL_COMPILE_JOBS=4 \ 78 | -DLLVM_PARALLEL_LINK_JOBS=2 \ 79 | -DLLVM_PARALLEL_TABLEGEN_JOBS=2 80 | ninja 81 | ninja install 82 | 83 | post: 84 | fix_permissions: true 85 | patch_rpaths: default 86 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to sps 2 | 3 | > We love merge requests! This guide shows the fastest path from **idea** to **merged code**. Skip straight to the *Quick‑Start* if you just want to get going, or dive into the details below. 4 | 5 | --- 6 | 7 | ## ⏩ Quick‑Start 8 | 9 | ### 1. Fork, clone & branch 10 | ```bash 11 | git clone https://github.com//sps.git 12 | cd sps 13 | git checkout -b feat/ 14 | ``` 15 | 16 | ### 2. Compile fast 17 | ```bash 18 | cargo check --workspace --all-targets 19 | ``` 20 | 21 | ### 3. Format (uses nightly toolchain) 22 | ```bash 23 | cargo fmt --all 24 | ``` 25 | 26 | ### 4. Lint 27 | ```bash 28 | cargo clippy --workspace --all-targets --all-features -- -D warnings 29 | ``` 30 | 31 | ### 7. Commit (Conventional + DCO) 32 | ```bash 33 | git commit -s -m "feat(core): add new fetcher" 34 | ``` 35 | 36 | ### 8. Push & open a Merge Request against `main` 37 | ```bash 38 | git push origin feat/ 39 | # then open a merge request on GitHub 40 | ``` 41 | 42 | ----- 43 | 44 | ## Coding Style 45 | 46 | * Complete Spec in [README.md](https://github.com/alexykn/sps2) 47 | 48 | ----- 49 | 50 | ## Git & Commits 51 | 52 | * **Fork** the repo on GitHub and add your remote if you haven’t already. 53 | * **Branches**: use feature branches like `feat/…`, `fix/…`, `docs/…`, `test/…`. 54 | * **Conventional Commits** preferred (`feat(core): add bottle caching`). 55 | * **DCO**: add `-s` flag (`git commit -s …`). 56 | * Keep commits atomic; squash fix‑ups before marking the MR ready. 57 | 58 | ----- 59 | 60 | ## Merge‑Request Flow 61 | 62 | 1. Sync with `main`; no rebase. 63 | 2. Ensure your code is formatted correctly with `cargo fmt --all`. 64 | 3. Ensure CI is green (build, fmt check, clippy, tests on macOS using appropriate toolchains). 65 | 4. Fill out the MR template; explain *why* + *how*. 66 | 5. Respond to review comments promptly – we’re friendly, promise! 67 | 6. Maintainers will *Squash & Merge* (unless history is already clean). 68 | 69 | ----- 70 | 71 | ## Reporting Issues 72 | 73 | * **Bug** – include repro steps, expected vs. actual, macOS version & architecture (Intel/ARM). 74 | * **Feature** – explain use‑case, alternatives, and willingness to implement. 75 | * **Security** – email maintainers privately; do **not** file a public issue. 76 | 77 | ----- 78 | 79 | ## License & DCO 80 | 81 | By submitting code you agree to the BSD‑3‑Clause license and certify the [Developer Certificate of Origin][Developer Certificate of Origin]. 82 | 83 | ----- 84 | 85 | ## Code of Conduct 86 | 87 | We follow the [Contributor Covenant][Contributor Covenant]; be kind and inclusive. Report misconduct privately to the core team. 88 | 89 | ----- 90 | 91 | Happy coding – and thanks for making sps better! ✨ 92 | 93 | [rustup.rs]: https://rustup.rs/ 94 | [Rust API Guidelines]: https://rust-lang.github.io/api-guidelines/ 95 | [Developer Certificate of Origin]: https://developercertificate.org/ 96 | [Contributor Covenant]: https://www.contributor-covenant.org/version/2/1/code_of_conduct/ 97 | -------------------------------------------------------------------------------- /crates/errors/src/config.rs: -------------------------------------------------------------------------------- 1 | //! Configuration error types 2 | 3 | use std::borrow::Cow; 4 | 5 | use crate::UserFacingError; 6 | use thiserror::Error; 7 | 8 | #[derive(Debug, Clone, Error)] 9 | #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] 10 | #[non_exhaustive] 11 | pub enum ConfigError { 12 | #[error("config file not found: {path}")] 13 | NotFound { path: String }, 14 | 15 | #[error("invalid config: {message}")] 16 | Invalid { message: String }, 17 | 18 | #[error("parse error: {message}")] 19 | ParseError { message: String }, 20 | 21 | #[error("missing required field: {field}")] 22 | MissingField { field: String }, 23 | 24 | #[error("invalid value for {field}: {value}")] 25 | InvalidValue { field: String, value: String }, 26 | 27 | #[error("environment variable not found: {var}")] 28 | EnvVarNotFound { var: String }, 29 | 30 | #[error("failed to write config to {path}: {error}")] 31 | WriteError { path: String, error: String }, 32 | 33 | #[error("failed to serialize config: {error}")] 34 | SerializeError { error: String }, 35 | } 36 | 37 | impl UserFacingError for ConfigError { 38 | fn user_message(&self) -> Cow<'_, str> { 39 | Cow::Owned(self.to_string()) 40 | } 41 | 42 | fn user_hint(&self) -> Option<&'static str> { 43 | match self { 44 | Self::NotFound { .. } => { 45 | Some("Provide a configuration file or run `sps2 setup` to create one.") 46 | } 47 | Self::MissingField { field } => Some(match field.as_str() { 48 | "store" => "Set the store path in the configuration file or via CLI flags.", 49 | _ => "Add the missing configuration field noted in the error message.", 50 | }), 51 | Self::InvalidValue { .. } | Self::Invalid { .. } | Self::ParseError { .. } => { 52 | Some("Fix the configuration value and retry the command.") 53 | } 54 | Self::EnvVarNotFound { .. } => { 55 | Some("Export the environment variable or move the value into the config file.") 56 | } 57 | Self::WriteError { .. } => Some("Ensure the config path is writable and retry."), 58 | _ => None, 59 | } 60 | } 61 | 62 | fn is_retryable(&self) -> bool { 63 | false 64 | } 65 | 66 | fn user_code(&self) -> Option<&'static str> { 67 | let code = match self { 68 | Self::NotFound { .. } => "config.not_found", 69 | Self::Invalid { .. } => "config.invalid", 70 | Self::ParseError { .. } => "config.parse_error", 71 | Self::MissingField { .. } => "config.missing_field", 72 | Self::InvalidValue { .. } => "config.invalid_value", 73 | Self::EnvVarNotFound { .. } => "config.env_var_not_found", 74 | Self::WriteError { .. } => "config.write_error", 75 | Self::SerializeError { .. } => "config.serialize_error", 76 | }; 77 | Some(code) 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /recipes/rust-1.89.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: rust 3 | version: "1.89.0" 4 | description: "Rust programming language with Cargo package manager and standard toolchain" 5 | homepage: "https://www.rust-lang.org" 6 | license: "MIT OR Apache-2.0" 7 | dependencies: 8 | runtime: 9 | - zlib 10 | - libxml2 11 | build: 12 | - llvm 13 | - cmake 14 | - ninja 15 | - python3 16 | - zlib 17 | - libxml2 18 | 19 | facts: 20 | target_triple: "aarch64-apple-darwin" 21 | 22 | environment: 23 | defaults: true 24 | variables: 25 | RUST_BACKTRACE: "1" 26 | 27 | source: 28 | sources: 29 | - fetch: 30 | url: "https://static.rust-lang.org/dist/rustc-1.89.0-src.tar.gz" 31 | extract_to: "src" 32 | 33 | build: 34 | steps: 35 | - shell: | 36 | echo "Current directory: $(pwd)" 37 | 38 | echo '[build]' > config.toml 39 | echo 'rustc = "/opt/pm/live/bin/rustc"' >> config.toml 40 | echo 'cargo = "/opt/pm/live/bin/cargo"' >> config.toml 41 | echo 'target = ["aarch64-apple-darwin"]' >> config.toml 42 | echo 'host = ["aarch64-apple-darwin"]' >> config.toml 43 | echo 'docs = false' >> config.toml 44 | echo 'submodules = false' >> config.toml 45 | echo 'extended = true' >> config.toml 46 | echo '' >> config.toml 47 | echo '[install]' >> config.toml 48 | echo 'prefix = "/opt/pm/live"' >> config.toml 49 | echo 'bindir = "bin"' >> config.toml 50 | echo 'libdir = "lib"' >> config.toml 51 | echo 'docdir = "share/doc/rust"' >> config.toml 52 | echo 'mandir = "share/man"' >> config.toml 53 | echo 'sysconfdir = "etc"' >> config.toml 54 | echo '' >> config.toml 55 | 56 | # --- Use Pre-Built LLVM from Rust CI --- 57 | echo '[llvm]' >> config.toml 58 | echo 'download-ci-llvm = true' >> config.toml 59 | echo 'ninja = true' >> config.toml 60 | echo '' >> config.toml 61 | # ---------------------------------------- 62 | 63 | echo '[rust]' >> config.toml 64 | echo 'channel = "stable"' >> config.toml 65 | echo 'optimize = true' >> config.toml 66 | echo 'codegen-units = 1' >> config.toml 67 | echo 'debuginfo-level = 0' >> config.toml 68 | echo 'debug-assertions = false' >> config.toml 69 | echo 'lld = true' >> config.toml 70 | echo 'llvm-libunwind = "in-tree"' >> config.toml 71 | echo '' >> config.toml 72 | echo '[target.aarch64-apple-darwin]' >> config.toml 73 | echo 'cc = "/opt/pm/live/bin/clang"' >> config.toml 74 | echo 'cxx = "/opt/pm/live/bin/clang++"' >> config.toml 75 | echo 'linker = "/opt/pm/live/bin/clang"' >> config.toml 76 | 77 | echo "Building Rust 1.89.0..." 78 | python3 x.py build --config config.toml 79 | 80 | echo "Installing Rust 1.89.0..." 81 | python3 x.py install --config config.toml 82 | 83 | post: 84 | fix_permissions: true 85 | patch_rpaths: skip 86 | qa_pipeline: rust 87 | 88 | -------------------------------------------------------------------------------- /recipes/rust-1.90.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: rust 3 | version: "1.90.0" 4 | description: "Rust programming language with Cargo package manager and standard toolchain" 5 | homepage: "https://www.rust-lang.org" 6 | license: "MIT OR Apache-2.0" 7 | dependencies: 8 | runtime: 9 | - zlib 10 | - libxml2 11 | build: 12 | - llvm 13 | - cmake 14 | - ninja 15 | - python3 16 | - zlib 17 | - libxml2 18 | 19 | facts: 20 | target_triple: "aarch64-apple-darwin" 21 | 22 | environment: 23 | defaults: true 24 | variables: 25 | RUST_BACKTRACE: "1" 26 | 27 | source: 28 | sources: 29 | - fetch: 30 | url: "https://static.rust-lang.org/dist/rustc-1.90.0-src.tar.gz" 31 | extract_to: "src" 32 | 33 | build: 34 | steps: 35 | - shell: | 36 | echo "Current directory: $(pwd)" 37 | 38 | echo '[build]' > config.toml 39 | echo 'rustc = "/opt/pm/live/bin/rustc"' >> config.toml 40 | echo 'cargo = "/opt/pm/live/bin/cargo"' >> config.toml 41 | echo 'target = ["aarch64-apple-darwin"]' >> config.toml 42 | echo 'host = ["aarch64-apple-darwin"]' >> config.toml 43 | echo 'docs = false' >> config.toml 44 | echo 'submodules = false' >> config.toml 45 | echo 'extended = true' >> config.toml 46 | echo '' >> config.toml 47 | echo '[install]' >> config.toml 48 | echo 'prefix = "/opt/pm/live"' >> config.toml 49 | echo 'bindir = "bin"' >> config.toml 50 | echo 'libdir = "lib"' >> config.toml 51 | echo 'docdir = "share/doc/rust"' >> config.toml 52 | echo 'mandir = "share/man"' >> config.toml 53 | echo 'sysconfdir = "etc"' >> config.toml 54 | echo '' >> config.toml 55 | 56 | # --- Use Pre-Built LLVM from Rust CI --- 57 | echo '[llvm]' >> config.toml 58 | echo 'download-ci-llvm = true' >> config.toml 59 | echo 'ninja = true' >> config.toml 60 | echo '' >> config.toml 61 | # ---------------------------------------- 62 | 63 | echo '[rust]' >> config.toml 64 | echo 'channel = "stable"' >> config.toml 65 | echo 'optimize = true' >> config.toml 66 | echo 'codegen-units = 1' >> config.toml 67 | echo 'debuginfo-level = 0' >> config.toml 68 | echo 'debug-assertions = false' >> config.toml 69 | echo 'lld = true' >> config.toml 70 | echo 'llvm-libunwind = "in-tree"' >> config.toml 71 | echo '' >> config.toml 72 | echo '[target.aarch64-apple-darwin]' >> config.toml 73 | echo 'cc = "/opt/pm/live/bin/clang"' >> config.toml 74 | echo 'cxx = "/opt/pm/live/bin/clang++"' >> config.toml 75 | echo 'linker = "/opt/pm/live/bin/clang"' >> config.toml 76 | 77 | echo "Building Rust 1.90.0..." 78 | python3 x.py build --config config.toml 79 | 80 | echo "Installing Rust 1.90.0..." 81 | python3 x.py install --config config.toml 82 | 83 | post: 84 | fix_permissions: true 85 | patch_rpaths: skip 86 | qa_pipeline: rust 87 | 88 | -------------------------------------------------------------------------------- /crates/builder/src/core/context.rs: -------------------------------------------------------------------------------- 1 | //! Build context for package building 2 | 3 | use sps2_events::{EventEmitter, EventSender}; 4 | use sps2_types::Version; 5 | use std::path::PathBuf; 6 | 7 | /// Build context for package building 8 | #[derive(Clone, Debug)] 9 | pub struct BuildContext { 10 | /// Package name 11 | pub name: String, 12 | /// Package version 13 | pub version: Version, 14 | /// Revision number 15 | pub revision: u32, 16 | /// Target architecture 17 | pub arch: String, 18 | /// Recipe file path 19 | pub recipe_path: PathBuf, 20 | /// Output directory for .sp files 21 | pub output_dir: PathBuf, 22 | /// Event sender for progress reporting 23 | pub event_sender: Option, 24 | /// Path to the generated .sp package (set after package creation) 25 | pub package_path: Option, 26 | /// Optional session identifier used for correlating events. 27 | pub session_id: Option, 28 | } 29 | 30 | impl EventEmitter for BuildContext { 31 | fn event_sender(&self) -> Option<&EventSender> { 32 | self.event_sender.as_ref() 33 | } 34 | } 35 | 36 | impl BuildContext { 37 | /// Create new build context 38 | #[must_use] 39 | pub fn new(name: String, version: Version, recipe_path: PathBuf, output_dir: PathBuf) -> Self { 40 | Self { 41 | name, 42 | version, 43 | revision: 1, 44 | arch: "arm64".to_string(), 45 | recipe_path, 46 | output_dir, 47 | event_sender: None, 48 | package_path: None, 49 | session_id: None, 50 | } 51 | } 52 | 53 | /// Set revision number 54 | #[must_use] 55 | pub fn with_revision(mut self, revision: u32) -> Self { 56 | self.revision = revision; 57 | self 58 | } 59 | 60 | /// Set architecture 61 | #[must_use] 62 | pub fn with_arch(mut self, arch: String) -> Self { 63 | self.arch = arch; 64 | self 65 | } 66 | 67 | /// Set event sender 68 | #[must_use] 69 | pub fn with_event_sender(mut self, event_sender: EventSender) -> Self { 70 | self.event_sender = Some(event_sender); 71 | self 72 | } 73 | 74 | /// Attach a session identifier for event correlation. 75 | #[must_use] 76 | pub fn with_session_id(mut self, session_id: impl Into) -> Self { 77 | self.session_id = Some(session_id.into()); 78 | self 79 | } 80 | 81 | /// Retrieve the session identifier or derive a deterministic fallback. 82 | #[must_use] 83 | pub fn session_id(&self) -> String { 84 | self.session_id 85 | .clone() 86 | .unwrap_or_else(|| format!("build:{}-{}", self.name, self.version)) 87 | } 88 | 89 | /// Get package filename 90 | #[must_use] 91 | pub fn package_filename(&self) -> String { 92 | format!( 93 | "{}-{}-{}.{}.sp", 94 | self.name, self.version, self.revision, self.arch 95 | ) 96 | } 97 | 98 | /// Get full output path 99 | #[must_use] 100 | pub fn output_path(&self) -> PathBuf { 101 | self.output_dir.join(self.package_filename()) 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ############################################################################### 2 | # Rust / Cargo # 3 | ############################################################################### 4 | 5 | 6 | # All compiled output lives here 7 | /target/ 8 | **/target/ 9 | 10 | # Backup files sometimes generated by rustfmt or editors 11 | *.rs.bk 12 | 13 | # If you ever vendor dependencies with `cargo vendor`, ignore the dir you pick 14 | /vendor/ 15 | 16 | ############################################################################### 17 | # Tooling & IDEs # 18 | ############################################################################### 19 | 20 | .idea/ # JetBrains / CLion 21 | .vscode/ # VS Code settings 22 | *.code-workspace 23 | .claude/ 24 | .gemini/ 25 | CLAUDE.md 26 | GEMINI.md 27 | AGENTS.md 28 | opencode.json 29 | # Rust Analyzer incremental cache (kept outside target/) 30 | .ra_cache/ 31 | 32 | ############################################################################### 33 | # Test, coverage & benchmarking artifacts # 34 | ############################################################################### 35 | 36 | /coverage/ 37 | **/*.profraw 38 | **/*.profdata 39 | htmlcov/ 40 | criterion/ 41 | cargo-llvm-cov-target/ # llvm-cov temp dir 42 | 43 | ############################################################################### 44 | # Generated docs # 45 | ############################################################################### 46 | 47 | /docs/target/ # `cargo doc` with `--target` 48 | /target/doc/ # default location if you run it at workspace root 49 | /book/ # mdBook output 50 | 51 | ############################################################################### 52 | # Design notes / proposals (local only) # 53 | ############################################################################### 54 | 55 | /design-notes/ 56 | 57 | ############################################################################### 58 | # OS-specific cruft # 59 | ############################################################################### 60 | 61 | # macOS 62 | .DS_Store 63 | .AppleDouble 64 | .LSOverride 65 | 66 | # Windows 67 | Thumbs.db 68 | ehthumbs.db 69 | Desktop.ini 70 | 71 | # Linux 72 | *~ 73 | 74 | ############################################################################### 75 | # Temporary/editor files # 76 | ############################################################################### 77 | 78 | *.swp 79 | *.swo 80 | *.swn 81 | # VSCode/Codium 82 | *.code-workspace 83 | 84 | ############################################################################### 85 | # Misc # 86 | ############################################################################### 87 | 88 | # Prevent committing secret env files, local config, etc. 89 | .env 90 | .env.* 91 | .serena 92 | c.env 93 | test-repo 94 | generate_index.py 95 | minisign.pub 96 | -------------------------------------------------------------------------------- /crates/state/src/models.rs: -------------------------------------------------------------------------------- 1 | //! Database models for state management 2 | 3 | use chrono::{DateTime, Utc}; 4 | use serde::{Deserialize, Serialize}; 5 | use sps2_hash::Hash; 6 | use sps2_types::{StateId, Version}; 7 | use sqlx::FromRow; 8 | 9 | /// A system state record 10 | #[derive(Debug, Clone, FromRow, Serialize, Deserialize)] 11 | pub struct State { 12 | pub id: String, 13 | pub parent_id: Option, 14 | pub created_at: i64, 15 | pub operation: String, 16 | pub success: bool, 17 | pub rollback_of: Option, 18 | pub pruned_at: Option, 19 | } 20 | 21 | impl State { 22 | /// Convert to `StateId` 23 | /// 24 | /// # Panics 25 | /// 26 | /// Panics if the stored ID is not a valid UUID. 27 | #[must_use] 28 | pub fn state_id(&self) -> StateId { 29 | uuid::Uuid::parse_str(&self.id).expect("valid UUID in database") 30 | } 31 | 32 | /// Get creation timestamp 33 | /// 34 | /// # Panics 35 | /// 36 | /// Panics if the stored timestamp is not valid. 37 | #[must_use] 38 | pub fn timestamp(&self) -> DateTime { 39 | DateTime::from_timestamp(self.created_at, 0).expect("valid timestamp in database") 40 | } 41 | } 42 | 43 | /// An installed package record 44 | #[derive(Debug, Clone, FromRow, Serialize, Deserialize)] 45 | pub struct Package { 46 | pub id: i64, 47 | pub state_id: String, 48 | pub name: String, 49 | pub version: String, 50 | pub hash: String, 51 | pub size: i64, 52 | pub installed_at: i64, 53 | pub venv_path: Option, 54 | } 55 | 56 | impl Package { 57 | /// Parse the version 58 | /// 59 | /// # Panics 60 | /// 61 | /// Panics if the stored version string is not valid. 62 | #[must_use] 63 | pub fn version(&self) -> Version { 64 | Version::parse(&self.version).expect("valid version in database") 65 | } 66 | 67 | /// Parse the hash 68 | /// 69 | /// # Panics 70 | /// 71 | /// Panics if the stored hash string is not valid. 72 | #[must_use] 73 | pub fn hash(&self) -> Hash { 74 | Hash::from_hex(&self.hash).expect("valid hash in database") 75 | } 76 | } 77 | 78 | /// A package dependency record 79 | #[derive(Debug, Clone, FromRow, Serialize, Deserialize)] 80 | pub struct Dependency { 81 | pub id: i64, 82 | pub package_id: i64, 83 | pub dep_name: String, 84 | pub dep_spec: String, 85 | pub dep_kind: String, 86 | } 87 | 88 | /// A store reference count record 89 | #[derive(Debug, Clone, FromRow, Serialize, Deserialize)] 90 | pub struct StoreRef { 91 | pub hash: String, 92 | pub ref_count: i64, 93 | pub size: i64, 94 | pub created_at: i64, 95 | } 96 | 97 | impl StoreRef { 98 | /// Parse the hash 99 | /// 100 | /// # Panics 101 | /// 102 | /// Panics if the stored hash string is not valid. 103 | #[must_use] 104 | pub fn hash(&self) -> Hash { 105 | Hash::from_hex(&self.hash).expect("valid hash in database") 106 | } 107 | } 108 | 109 | /// Package reference for state transitions 110 | #[derive(Debug, Clone)] 111 | pub struct PackageRef { 112 | pub state_id: uuid::Uuid, 113 | pub package_id: sps2_resolver::PackageId, 114 | pub hash: String, 115 | pub size: i64, 116 | } 117 | -------------------------------------------------------------------------------- /crates/install/src/api/result.rs: -------------------------------------------------------------------------------- 1 | use chrono::{DateTime, Utc}; 2 | use sps2_resolver::PackageId; 3 | use uuid::Uuid; 4 | 5 | /// Installation result 6 | #[derive(Debug)] 7 | pub struct InstallResult { 8 | /// State ID after installation 9 | pub state_id: Uuid, 10 | /// Packages that were installed 11 | pub installed_packages: Vec, 12 | /// Packages that were updated 13 | pub updated_packages: Vec, 14 | /// Packages that were removed 15 | pub removed_packages: Vec, 16 | } 17 | 18 | impl InstallResult { 19 | /// Create new install result 20 | #[must_use] 21 | pub fn new(state_id: Uuid) -> Self { 22 | Self { 23 | state_id, 24 | installed_packages: Vec::new(), 25 | updated_packages: Vec::new(), 26 | removed_packages: Vec::new(), 27 | } 28 | } 29 | 30 | /// Add installed package 31 | pub fn add_installed(&mut self, package_id: PackageId) { 32 | self.installed_packages.push(package_id); 33 | } 34 | 35 | /// Add updated package 36 | pub fn add_updated(&mut self, package_id: PackageId) { 37 | self.updated_packages.push(package_id); 38 | } 39 | 40 | /// Add removed package 41 | pub fn add_removed(&mut self, package_id: PackageId) { 42 | self.removed_packages.push(package_id); 43 | } 44 | 45 | /// Get total number of changes 46 | #[must_use] 47 | pub fn total_changes(&self) -> usize { 48 | self.installed_packages.len() + self.updated_packages.len() + self.removed_packages.len() 49 | } 50 | } 51 | 52 | /// State information for listing 53 | #[derive(Debug, Clone)] 54 | pub struct StateInfo { 55 | /// State ID 56 | pub id: Uuid, 57 | /// Creation timestamp 58 | pub timestamp: DateTime, 59 | /// Parent state ID 60 | pub parent_id: Option, 61 | /// Number of packages in this state 62 | pub package_count: usize, 63 | /// Sample of packages (for display) 64 | pub packages: Vec, 65 | } 66 | 67 | impl StateInfo { 68 | /// Check if this is the root state 69 | #[must_use] 70 | pub fn is_root(&self) -> bool { 71 | self.parent_id.is_none() 72 | } 73 | 74 | /// Get age of this state 75 | #[must_use] 76 | pub fn age(&self) -> chrono::Duration { 77 | Utc::now() - self.timestamp 78 | } 79 | 80 | /// Format package list for display 81 | #[must_use] 82 | pub fn package_summary(&self) -> String { 83 | if self.packages.is_empty() { 84 | "No packages".to_string() 85 | } else if self.packages.len() <= 3 { 86 | self.packages 87 | .iter() 88 | .map(|pkg| format!("{}-{}", pkg.name, pkg.version)) 89 | .collect::>() 90 | .join(", ") 91 | } else { 92 | let first_three: Vec = self 93 | .packages 94 | .iter() 95 | .take(3) 96 | .map(|pkg| format!("{}-{}", pkg.name, pkg.version)) 97 | .collect(); 98 | format!( 99 | "{} and {} more", 100 | first_three.join(", "), 101 | self.package_count - 3 102 | ) 103 | } 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /recipes/rust-1.88.0.yml: -------------------------------------------------------------------------------- 1 | metadata: 2 | name: rust 3 | version: "1.88.0" 4 | description: "Rust programming language with Cargo package manager and standard toolchain" 5 | homepage: "https://www.rust-lang.org" 6 | license: "MIT OR Apache-2.0" 7 | dependencies: 8 | runtime: 9 | - zlib 10 | - libxml2 11 | build: 12 | - cmake 13 | - ninja 14 | - python 15 | - zlib 16 | - libxml2 17 | 18 | facts: 19 | bootstrap_version: "1.87.0" 20 | target_triple: "aarch64-apple-darwin" 21 | 22 | environment: 23 | defaults: true 24 | variables: 25 | RUST_BACKTRACE: "1" 26 | 27 | source: 28 | sources: 29 | - fetch: 30 | url: "https://static.rust-lang.org/dist/rustc-1.88.0-src.tar.gz" 31 | extract_to: "src" 32 | - fetch: 33 | url: "https://static.rust-lang.org/dist/rust-1.87.0-aarch64-apple-darwin.tar.gz" 34 | extract_to: "bootstrap" 35 | 36 | build: 37 | steps: 38 | - shell: | 39 | echo "Current directory: $(pwd)" 40 | cd ../bootstrap 41 | echo "Running install.sh..." 42 | ./install.sh --prefix=/tmp/rust-bootstrap --without=rust-docs,clippy-preview,rust-analyzer-preview 43 | cd ../src 44 | echo "Current directory: $(pwd)" 45 | 46 | echo '[build]' > config.toml 47 | echo 'rustc = "/tmp/rust-bootstrap/bin/rustc"' >> config.toml 48 | echo 'cargo = "/tmp/rust-bootstrap/bin/cargo"' >> config.toml 49 | echo 'target = ["aarch64-apple-darwin"]' >> config.toml 50 | echo 'host = ["aarch64-apple-darwin"]' >> config.toml 51 | echo 'docs = false' >> config.toml 52 | echo 'submodules = false' >> config.toml 53 | echo 'extended = true' >> config.toml 54 | echo '' >> config.toml 55 | echo '[install]' >> config.toml 56 | echo 'prefix = "/opt/pm/live"' >> config.toml 57 | echo 'bindir = "bin"' >> config.toml 58 | echo 'libdir = "lib"' >> config.toml 59 | echo 'docdir = "share/doc/rust"' >> config.toml 60 | echo 'mandir = "share/man"' >> config.toml 61 | echo 'sysconfdir = "etc"' >> config.toml 62 | echo '' >> config.toml 63 | 64 | # --- Use Pre-Built LLVM from Rust CI --- 65 | echo '[llvm]' >> config.toml 66 | echo 'download-ci-llvm = true' >> config.toml 67 | echo 'ninja = true' >> config.toml 68 | echo '' >> config.toml 69 | # ---------------------------------------- 70 | 71 | echo '[rust]' >> config.toml 72 | echo 'channel = "stable"' >> config.toml 73 | echo 'optimize = true' >> config.toml 74 | echo 'codegen-units = 1' >> config.toml 75 | echo 'debuginfo-level = 0' >> config.toml 76 | echo 'debug-assertions = false' >> config.toml 77 | echo 'lld = true' >> config.toml 78 | echo 'llvm-libunwind = "in-tree"' >> config.toml 79 | echo '' >> config.toml 80 | echo '[target.aarch64-apple-darwin]' >> config.toml 81 | echo 'cc = "/opt/pm/live/bin/clang"' >> config.toml 82 | echo 'cxx = "/opt/pm/live/bin/clang++"' >> config.toml 83 | echo 'linker = "/opt/pm/live/bin/clang"' >> config.toml 84 | 85 | echo "Building Rust 1.88.0..." 86 | python3 x.py build --config config.toml 87 | 88 | echo "Installing Rust 1.88.0..." 89 | python3 x.py install --config config.toml 90 | 91 | post: 92 | fix_permissions: true 93 | patch_rpaths: skip 94 | qa_pipeline: rust 95 | 96 | -------------------------------------------------------------------------------- /crates/state/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![warn(mismatched_lifetime_syntaxes)] 2 | #![deny(clippy::pedantic, unsafe_code)] 3 | #![allow( 4 | clippy::needless_raw_string_hashes, 5 | clippy::cast_possible_truncation, 6 | clippy::cast_sign_loss, 7 | clippy::cast_possible_wrap, 8 | clippy::cast_lossless, 9 | clippy::map_unwrap_or, 10 | clippy::unused_async, 11 | clippy::missing_panics_doc 12 | )] 13 | #![allow(clippy::module_name_repetitions)] 14 | 15 | //! State management for sps2 16 | //! 17 | //! This crate manages the `SQLite` database that tracks system state, 18 | //! installed packages, and enables atomic updates with rollback. 19 | 20 | pub mod db; 21 | pub mod file_models; 22 | pub mod file_queries_runtime; 23 | pub mod live_slots; 24 | pub mod manager; 25 | pub mod models; 26 | 27 | #[cfg(feature = "runtime-queries")] 28 | pub use manager::{StateManager, TransactionData}; 29 | pub mod queries { 30 | pub use crate::file_queries_runtime::*; 31 | pub use crate::queries_runtime::*; 32 | } 33 | 34 | #[cfg(feature = "runtime-queries")] 35 | mod queries_runtime; 36 | 37 | pub use file_models::{ 38 | DeduplicationResult, FileMTimeTracker, FileMetadata, FileObject, FileReference, 39 | FileStorageStats, InstalledFile, PackageFileEntry, 40 | }; 41 | pub use models::{Package, PackageRef, State, StoreRef}; 42 | 43 | use sps2_errors::Error; 44 | use sqlx::sqlite::{SqliteConnectOptions, SqliteJournalMode, SqlitePoolOptions}; 45 | use sqlx::{Pool, Sqlite}; 46 | use std::path::Path; 47 | use std::time::Duration; 48 | 49 | /// Create a new `SQLite` connection pool 50 | /// 51 | /// # Errors 52 | /// 53 | /// Returns an error if the database connection fails or configuration is invalid. 54 | pub async fn create_pool(db_path: &Path) -> Result, Error> { 55 | let options = SqliteConnectOptions::new() 56 | .filename(db_path) 57 | .create_if_missing(true) 58 | .journal_mode(SqliteJournalMode::Wal) 59 | .busy_timeout(Duration::from_secs(30)); 60 | 61 | let pool = SqlitePoolOptions::new() 62 | .max_connections(5) 63 | .connect_with(options) 64 | .await 65 | .map_err(|e| { 66 | Error::from(sps2_errors::StateError::DatabaseError { 67 | message: e.to_string(), 68 | }) 69 | })?; 70 | 71 | if let Ok(mut conn) = pool.acquire().await { 72 | let _ = sqlx::query("PRAGMA synchronous = NORMAL") 73 | .execute(&mut *conn) 74 | .await; 75 | let _ = sqlx::query("PRAGMA temp_store = MEMORY") 76 | .execute(&mut *conn) 77 | .await; 78 | let _ = sqlx::query("PRAGMA mmap_size = 268435456") 79 | .execute(&mut *conn) 80 | .await; 81 | let _ = sqlx::query("PRAGMA cache_size = -20000") 82 | .execute(&mut *conn) 83 | .await; 84 | let _ = sqlx::query("PRAGMA wal_autocheckpoint = 1000") 85 | .execute(&mut *conn) 86 | .await; 87 | } 88 | 89 | Ok(pool) 90 | } 91 | 92 | /// Run database migrations 93 | /// 94 | /// # Errors 95 | /// 96 | /// Returns an error if any migration fails to execute. 97 | pub async fn run_migrations(pool: &Pool) -> Result<(), Error> { 98 | sqlx::migrate!("./migrations").run(pool).await.map_err(|e| { 99 | sps2_errors::StateError::MigrationFailed { 100 | message: e.to_string(), 101 | } 102 | .into() 103 | }) 104 | } 105 | --------------------------------------------------------------------------------