├── .gitignore ├── src ├── alpm │ ├── archive.rs │ ├── mod.rs │ ├── local.rs │ └── db.rs ├── solver │ ├── pool │ │ ├── source │ │ │ ├── pacdb.rs │ │ │ ├── mod.rs │ │ │ └── local.rs │ │ ├── in_memory.rs │ │ └── mod.rs │ ├── sort.rs │ ├── incompatible.rs │ ├── mod.rs │ └── improve.rs ├── config │ ├── repo │ │ ├── mirrorlist.rs │ │ ├── config.rs │ │ └── mod.rs │ ├── blueprint │ │ ├── variables.rs │ │ ├── mod.rs │ │ └── parse.rs │ └── mod.rs ├── utils │ ├── mod.rs │ ├── cli │ │ ├── prompt.rs │ │ └── mod.rs │ ├── lock.rs │ ├── pager.rs │ ├── pacparse.rs │ └── downloader.rs ├── types │ ├── mod.rs │ ├── version │ │ ├── mod.rs │ │ ├── test.rs │ │ ├── parse.rs │ │ ├── ord.rs │ │ └── requirement.rs │ ├── checksum.rs │ └── actions │ │ ├── mod.rs │ │ └── table.rs ├── executor │ └── mod.rs ├── actions │ ├── search │ │ ├── provide │ │ │ ├── parse.rs │ │ │ └── mod.rs │ │ ├── pkg.rs │ │ └── mod.rs │ ├── local.rs │ ├── download.rs │ ├── mod.rs │ ├── execute.rs │ └── bench.rs └── main.rs ├── rustfmt.toml ├── README.md ├── misc └── oma-migrate ├── Cargo.toml ├── doc ├── usage.md └── config.md └── LICENSE /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /src/alpm/archive.rs: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /src/solver/pool/source/pacdb.rs: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | use_small_heuristics = "Max" 2 | newline_style = "Unix" 3 | -------------------------------------------------------------------------------- /src/alpm/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod archive; 2 | pub mod db; 3 | pub mod local; 4 | -------------------------------------------------------------------------------- /src/config/repo/mirrorlist.rs: -------------------------------------------------------------------------------- 1 | /// Parse pacman-style mirrorlist file 2 | -------------------------------------------------------------------------------- /src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod cli; 2 | pub mod downloader; 3 | pub mod lock; 4 | pub mod pacparse; 5 | pub mod pager; 6 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Under construction 2 | TODOs: 3 | - [] update `tabled` (the current version uses a very old version of nom) 4 | -------------------------------------------------------------------------------- /src/solver/pool/source/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod pacdb; 2 | 3 | use super::{BasicPkgPool, InMemoryPool, PkgPool}; 4 | use anyhow::Result; 5 | use std::path::PathBuf; 6 | 7 | pub fn create_pool( 8 | pac_dbs: &[(String, PathBuf)], 9 | _local_deb_roots: &[PathBuf], 10 | ) -> Result> { 11 | let mut pool = InMemoryPool::new(); 12 | for (root_url, pac_db) in pac_dbs { 13 | pacdb::import(pac_db, &mut pool, root_url)?; 14 | } 15 | 16 | pool.finalize(); 17 | Ok(Box::new(pool)) 18 | } 19 | -------------------------------------------------------------------------------- /src/utils/cli/prompt.rs: -------------------------------------------------------------------------------- 1 | use crate::config::Opts; 2 | 3 | use anyhow::Result; 4 | use console::style; 5 | use dialoguer::{theme::Theme, Confirm}; 6 | use std::fmt; 7 | 8 | pub fn ask_confirm(opts: &Opts, msg: &str) -> Result { 9 | if opts.yes { 10 | return Ok(true); 11 | } 12 | 13 | let prefix = super::gen_prefix(""); 14 | let msg = format!("{prefix}{msg}"); 15 | let res = Confirm::new().with_prompt(msg).interact()?; 16 | Ok(res) 17 | } 18 | 19 | /// Theme for dialoguer 20 | #[derive(Default)] 21 | pub struct SasmTheme; 22 | 23 | impl Theme for SasmTheme { 24 | fn format_select_prompt_item( 25 | &self, 26 | f: &mut dyn fmt::Write, 27 | text: &str, 28 | active: bool, 29 | ) -> fmt::Result { 30 | let prefix = match active { 31 | true => crate::utils::cli::gen_prefix(&style("->").bold().to_string()), 32 | false => crate::utils::cli::gen_prefix(""), 33 | }; 34 | 35 | write!(f, "{prefix}{text}") 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /misc/oma-migrate: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | _help_message() { 4 | printf "\ 5 | oma-migrate: export APT database as an sasm blueprint 6 | Useage: 7 | oma-migrate [OUTPUT] 8 | - OUTPUT: Where to generate blueprint. If not specified, /etc/omakase/user.blueprint will be used. 9 | " 10 | } 11 | 12 | 13 | if [[ "$1" == "--help" || "$1" == "-h" ]]; then 14 | _help_message 15 | exit 0 16 | fi 17 | 18 | BLUEPRINT_PATH="/etc/omakase/user.blueprint" 19 | if [[ -n "$1" ]]; then 20 | BLUEPRINT_PATH="$1" 21 | fi 22 | 23 | echo "# Migrated from apt at $(date)" >> "$BLUEPRINT_PATH" 24 | apt-mark showmanual | xargs sudo oma install --init 25 | 26 | if [[ $? -ne 0 ]]; then 27 | echo "Failed to get packages from apt. Is this system using APT?" 28 | else 29 | echo "All manually installed packages from apt has been added to $BLUEPRINT_PATH." 30 | echo "Now, run oma execute to let sasm take over. Please carefully examine the installation/removal list. If packages are installed or removed unexpectedly, then manually adjust the blueprint." 31 | fi 32 | -------------------------------------------------------------------------------- /src/config/blueprint/variables.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{bail, Result}; 2 | use lazy_static::lazy_static; 3 | use regex::{Captures, Regex}; 4 | 5 | pub fn fill_variables(rule: &str) -> Result { 6 | lazy_static! { 7 | static ref EXPANSION: Regex = Regex::new(r"\{([A-Z_]+)}").unwrap(); 8 | } 9 | 10 | let kernel_version = get_kernel_version()?; 11 | let mut unknown_variable = Vec::new(); 12 | let res = EXPANSION.replace_all(rule, |caps: &Captures| match caps.get(1).unwrap().as_str() { 13 | "KERNEL_VERSION" => &kernel_version, 14 | unintended => { 15 | unknown_variable.push(unintended.to_owned()); 16 | "" 17 | } 18 | }); 19 | 20 | if !unknown_variable.is_empty() { 21 | bail!("Unknown variable: {}.", unknown_variable.join(", ")); 22 | } 23 | 24 | Ok(res.to_string()) 25 | } 26 | 27 | fn get_kernel_version() -> Result { 28 | let uname = nix::sys::utsname::uname(); 29 | let version = uname.release(); 30 | let section = version.split('-').next(); 31 | if let Some(section) = section { 32 | return Ok(section.to_string()); 33 | } 34 | bail!("Failed to obtain kernel version: malformed kernel local version."); 35 | } 36 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sasm" 3 | version = "0.1.0-beta.8" 4 | authors = ["Leo Shen "] 5 | license = "GPL-2.0-or-later" 6 | edition = "2021" 7 | 8 | [[bin]] 9 | name = "sasm" 10 | path = "src/main.rs" 11 | 12 | [dependencies] 13 | anyhow = "1.0" 14 | regex = "1" 15 | lazy_static = "1" 16 | ctrlc = { version = "3", features = ["termination"] } 17 | # Solver 18 | varisat = "0.2" 19 | petgraph = "0.6" 20 | # Dealing with deb control files and InRelease 21 | sequoia-openpgp = "1.16" 22 | sha2 = "0.10" 23 | hex = "0.4" 24 | walkdir = "2" 25 | # cli 26 | console = "0.15" 27 | clap = { version = "3", features = ["derive"] } 28 | tabled = { version = "0.4", features = ["color"] } 29 | indicatif = "0.17" 30 | dialoguer = "0.9" 31 | # Dealing with config files 32 | serde = "1" 33 | toml = "0.7" 34 | nom = "7.1" 35 | # Network stack 36 | futures-util = "0.3" 37 | tokio = { version = "1", default_features = false, features = ["rt", "macros", "fs", "io-util"] } 38 | bytes = "1" 39 | reqwest = "0.11" 40 | async-compression = { version = "0.4", features = ["tokio", "gzip", "xz"] } 41 | flate2 = "1" 42 | # Actions modifier 43 | nix = "0.23" 44 | # Parallel db generation 45 | rayon = "1" 46 | # Read deb files 47 | ar = "0.9" 48 | tar = "0.4" 49 | xz2 = "0.1" 50 | # Search stuff 51 | strsim = "0.10" 52 | memchr = "2" 53 | -------------------------------------------------------------------------------- /src/types/mod.rs: -------------------------------------------------------------------------------- 1 | mod actions; 2 | mod checksum; 3 | mod version; 4 | 5 | pub use actions::{PkgActionModifier, PkgActions, PkgInstallAction}; 6 | pub use checksum::{Checksum, ChecksumValidator}; 7 | use nom::error::VerboseError; 8 | pub use version::{parse_version, parse_version_requirement, PkgVersion, VersionRequirement}; 9 | 10 | use serde::{Deserialize, Serialize}; 11 | use std::path::PathBuf; 12 | 13 | #[derive(Deserialize, Default)] 14 | pub struct PkgRequirement { 15 | pub with_recommends: Option, 16 | pub version: Option, 17 | } 18 | 19 | #[derive(Clone, Debug, Serialize, Deserialize)] 20 | pub struct PkgMeta { 21 | pub name: String, 22 | pub description: String, 23 | pub version: PkgVersion, 24 | 25 | pub depends: Vec<(String, VersionRequirement, Option)>, 26 | pub optional: Vec<(String, VersionRequirement, Option)>, 27 | pub conflicts: Vec<(String, VersionRequirement, Option)>, 28 | pub provides: Vec<(String, VersionRequirement, Option)>, 29 | pub replaces: Vec<(String, VersionRequirement, Option)>, 30 | pub install_size: u64, 31 | 32 | pub source: PkgSource, 33 | } 34 | 35 | #[derive(Clone, Debug, Serialize, Deserialize)] 36 | pub enum PkgSource { 37 | // Http((url, size, checksum)) 38 | Http((String, u64, Checksum)), 39 | // Local(path) 40 | Local(PathBuf), 41 | } 42 | 43 | /// Status of package on this instance, extracted from pacman local state db 44 | /// Usually located at /var/lib/pacman/local 45 | #[derive(Clone)] 46 | pub struct PkgStatus { 47 | pub name: String, 48 | pub version: PkgVersion, 49 | pub install_size: u64, 50 | } 51 | -------------------------------------------------------------------------------- /src/config/repo/config.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{bail, Result}; 2 | use serde::{Deserialize, Serialize, Serializer}; 3 | use std::path::PathBuf; 4 | 5 | #[derive(Deserialize, Serialize, Debug, Clone)] 6 | pub struct RepoConfig { 7 | pub source: Mirror, 8 | pub keys: Vec, 9 | } 10 | 11 | #[derive(Deserialize, Serialize, Debug, Clone)] 12 | #[serde(untagged)] 13 | pub enum Mirror { 14 | Simple(String), 15 | MirrorList(PathBuf), 16 | } 17 | 18 | impl RepoConfig { 19 | /// Check if there's some mirror available 20 | pub fn check_sanity(&self) -> Result<()> { 21 | // TODO: Implement mirrorlist 22 | if matches!(self.source, Mirror::MirrorList(_)) { 23 | bail!("mirrorlist not supported yet!") 24 | } 25 | 26 | Ok(()) 27 | } 28 | 29 | /// Get base urls for all repositories 30 | /// Returns a list of possible urls for the repository 31 | pub fn get_url(&self, name: &str, arch: &str) -> Result { 32 | let mut url = match &self.source { 33 | Mirror::Simple(m) => { 34 | let mut url = m.clone(); 35 | normalize_mirror_url(&mut url); 36 | url 37 | } 38 | Mirror::MirrorList(path) => { 39 | unimplemented!() 40 | } 41 | }; 42 | 43 | // Replace variables 44 | // $repo: Repository name 45 | // $arch: Current system architecture 46 | url = url.replace("$repo", name); 47 | url = url.replace("$arch", arch); 48 | Ok(url) 49 | } 50 | } 51 | 52 | fn normalize_mirror_url(url: &mut String) { 53 | if url.ends_with('/') { 54 | url.pop(); 55 | } 56 | } 57 | 58 | pub enum MirrorlistLine {} 59 | -------------------------------------------------------------------------------- /src/types/version/mod.rs: -------------------------------------------------------------------------------- 1 | mod ord; 2 | mod parse; 3 | mod requirement; 4 | mod test; 5 | pub use parse::parse_version; 6 | pub use requirement::{parse_version_requirement, VersionRequirement}; 7 | 8 | use serde::{Deserialize, Serialize, Serializer}; 9 | use std::fmt; 10 | 11 | #[derive(PartialEq, Eq, Clone, Debug, Deserialize)] 12 | pub enum PkgVersionSegment { 13 | Number(u64), 14 | Alphabetic(String), 15 | Separater(String), 16 | } 17 | 18 | /// RPM style package version comparison 19 | #[derive(PartialEq, Eq, Clone, Debug, Deserialize)] 20 | pub struct PkgVersion { 21 | pub epoch: u64, 22 | pub version: Vec, 23 | pub revision: Option, 24 | } 25 | 26 | impl fmt::Display for PkgVersionSegment { 27 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 28 | match self { 29 | Self::Number(x) => write!(f, "{x}")?, 30 | Self::Alphabetic(x) => write!(f, "{x}")?, 31 | Self::Separater(x) => write!(f, "{x}")?, 32 | } 33 | Ok(()) 34 | } 35 | } 36 | 37 | impl fmt::Display for PkgVersion { 38 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 39 | if self.epoch != 0 { 40 | write!(f, "{}:", self.epoch)?; 41 | } 42 | for segment in &self.version { 43 | write!(f, "{}", &segment)?; 44 | } 45 | if let Some(rev) = self.revision { 46 | write!(f, "-{}", rev)?; 47 | } 48 | Ok(()) 49 | } 50 | } 51 | 52 | impl Serialize for PkgVersion { 53 | fn serialize(&self, serializer: S) -> Result 54 | where 55 | S: Serializer, 56 | { 57 | let res = self.to_string(); 58 | serializer.serialize_str(&res) 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /doc/usage.md: -------------------------------------------------------------------------------- 1 | # Installing packages 2 | ```bash 3 | oma install PKG1 PKG2 ... 4 | ``` 5 | 6 | Possible arguments: 7 | + `--no-recommends` Do not install recommended packages 8 | 9 | Note that in order to make sure the dependency tree is sound and up-to-date, omakase may upgrade existing packages when installing new packages. 10 | 11 | # Removing packages 12 | ```bash 13 | oma remove PKG1 PKG2 ... 14 | ``` 15 | 16 | Possible arguments: 17 | + `--remove-recommends` Remove recommended packages introduced by designated packages 18 | 19 | This will remove designated packages alongside **all** their dependencies from the system. 20 | 21 | Note that just like the previous case, you may see omakase upgrade (or even install) packages when using `remove` subcommand. 22 | 23 | # Upgrading packages 24 | ```bash 25 | oma upgrade 26 | ``` 27 | 28 | # Searching for packages 29 | ```bash 30 | oma search QUERY 31 | ``` 32 | 33 | Query string accepts Regex syntax. Note that only package titles will be searched. 34 | 35 | # Search packages that contain certain files 36 | ```bash 37 | oma provide FILE 38 | ``` 39 | 40 | Possible arguments: 41 | + `--bin` Search binary files only. This should be significantly faster. 42 | 43 | Search what packages contain a certain file. 44 | 45 | # Pick a certain version for a package 46 | ```bash 47 | oma pick PKGNAME 48 | ``` 49 | 50 | Tell sasm to use a certain version of a package. 51 | 52 | # Benchmarking mirrors and pick the best one 53 | ```bash 54 | oma bench 55 | ``` 56 | 57 | Benchmark mirrors in MirrorLists (see [config documentation](doc/config.md)) and use the best one. 58 | 59 | # Download a package from the repository 60 | ```bash 61 | oma download PKGNAME 62 | ``` 63 | 64 | Download a package archive file from remote repositories. 65 | -------------------------------------------------------------------------------- /src/solver/sort.rs: -------------------------------------------------------------------------------- 1 | use super::pool::PkgPool; 2 | use anyhow::Result; 3 | use petgraph::graph::{DiGraph, NodeIndex}; 4 | use std::collections::HashMap; 5 | 6 | /// Use trajan algorithm to find out installation order of packages 7 | pub fn sort_pkgs(pool: &dyn PkgPool, pkgs: &mut Vec) -> Result<()> { 8 | let res = sort_pkgs_to_cycles(pool, pkgs)?; 9 | pkgs.clear(); 10 | for mut pkgids in res { 11 | if pkgids.len() == 1 { 12 | pkgs.push(pkgids[0]); 13 | } else { 14 | // Sort via the number of dependencies 15 | pkgids.sort_by_key(|id| { 16 | let pkg = pool.get_pkg_by_id(*id).unwrap(); 17 | pkg.depends.len() 18 | }); 19 | pkgs.append(&mut pkgids); 20 | } 21 | } 22 | 23 | Ok(()) 24 | } 25 | 26 | pub fn sort_pkgs_to_cycles(pool: &dyn PkgPool, pkgs: &[usize]) -> Result>> { 27 | let mut g = DiGraph::::new(); 28 | let mut indexs: HashMap = HashMap::new(); 29 | // Add package nodes 30 | for pkgid in pkgs.iter() { 31 | indexs.insert(*pkgid, g.add_node(*pkgid)); 32 | } 33 | // Add dependency edges 34 | for pkgid in pkgs.iter() { 35 | let deps: Vec = pool.get_deps(*pkgid)?.into_iter().flatten().collect(); 36 | for depid in deps { 37 | if pkgs.contains(&depid) { 38 | // Add a directed edge 39 | g.update_edge(indexs[pkgid], indexs[&depid], ()); 40 | } 41 | } 42 | } 43 | // Find a path 44 | let solve_res = petgraph::algo::tarjan_scc(&g); 45 | 46 | let mut res = Vec::new(); 47 | for pkg_indexs in solve_res { 48 | let cycle: Vec = pkg_indexs.into_iter().map(|index| g[index]).collect(); 49 | res.push(cycle); 50 | } 51 | 52 | Ok(res) 53 | } 54 | -------------------------------------------------------------------------------- /src/solver/incompatible.rs: -------------------------------------------------------------------------------- 1 | use super::pool::PkgPool; 2 | use crate::types::PkgMeta; 3 | use varisat::{Lit, Solver}; 4 | 5 | pub fn find_incompatible_friendly(pool: &dyn PkgPool, to_install: &[usize]) -> String { 6 | let incompatible = find_incompatible(pool, to_install); 7 | let pkgs: Vec<&PkgMeta> = 8 | incompatible.into_iter().map(|id| pool.get_pkg_by_id(id).unwrap()).collect(); 9 | 10 | if pkgs.is_empty() { 11 | "Unknown reason".to_string() 12 | } else if pkgs.len() == 1 { 13 | let pkg = pkgs.get(0).unwrap(); 14 | format!( 15 | "{}({}) appears to have dependency issues that prevents it from being installed. Please contact your package maintainers.", 16 | pkg.name, 17 | console::style(&pkg.version).dim() 18 | ) 19 | } else { 20 | let mut res = String::from("The following packages cannot be installed simultaneously: "); 21 | res.push_str(""); 22 | let mut pkgs = pkgs.into_iter().peekable(); 23 | while let Some(pkg) = pkgs.next() { 24 | res.push_str(&format!("{}({})", pkg.name, console::style(&pkg.version).dim())); 25 | if pkgs.peek().is_some() { 26 | res.push_str(", "); 27 | } 28 | } 29 | res 30 | } 31 | } 32 | 33 | fn find_incompatible(pool: &dyn PkgPool, to_install: &[usize]) -> Vec { 34 | // Set up solver 35 | let mut solver = Solver::new(); 36 | let formula = pool.gen_formula(None); 37 | solver.add_formula(&formula); 38 | 39 | // Check individual packages first 40 | let to_install_as_lits: Vec = 41 | to_install.iter().map(|id| Lit::from_dimacs(*id as isize)).collect(); 42 | solver.solve().unwrap(); 43 | solver.assume(&to_install_as_lits); 44 | solver.solve().unwrap(); 45 | let core: Vec = match solver.failed_core() { 46 | Some(pkgids) => pkgids.to_vec().into_iter().map(|lit| lit.to_dimacs() as usize).collect(), 47 | None => Vec::new(), 48 | }; 49 | 50 | core 51 | } 52 | -------------------------------------------------------------------------------- /src/types/version/test.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod test { 3 | use super::super::PkgVersion; 4 | use std::cmp::Ordering::*; 5 | #[test] 6 | fn pkg_ver_ord() { 7 | let source = vec![ 8 | ("1.1.1", Less, "1.1.2"), 9 | ("1b", Greater, "1a"), 10 | ("1", Less, "1.1"), 11 | ("1.0", Less, "1.1"), 12 | ("1.2", Less, "1.11"), 13 | ("1.0-1", Less, "1.1"), 14 | ("1.0-1", Less, "1.0-12"), 15 | // make them different for sorting 16 | ("1:1.0-0", Equal, "1:1.0"), 17 | ("1.0", Equal, "1.0"), 18 | ("1.0-1", Equal, "1.0-1"), 19 | ("1:1.0-1", Equal, "1:1.0-1"), 20 | ("1:1.0", Equal, "1:1.0"), 21 | ("1.0-1", Less, "1.0-2"), 22 | //("1.0final-5sarge1", Greater, "1.0final-5"), 23 | ("1.0final-5", Greater, "1.0a7-2"), 24 | ("0.9.2-5", Less, "0.9.2+cvs.1.0.dev.2004.07.28-1"), 25 | ("1:500", Less, "1:5000"), 26 | ("100:500", Greater, "11:5000"), 27 | ("1.0.4-2", Greater, "1.0pre7-2"), 28 | ("1.5rc1", Less, "1.5"), 29 | ("1.5rc1", Less, "1.5+1"), 30 | ("1.5rc1", Less, "1.5rc2"), 31 | ("1.5rc1", Greater, "1.5dev0"), 32 | ]; 33 | 34 | for e in source { 35 | println!("Comparing {} vs {}", e.0, e.2); 36 | println!( 37 | "{:#?} vs {:#?}", 38 | PkgVersion::try_from(e.0).unwrap(), 39 | PkgVersion::try_from(e.2).unwrap() 40 | ); 41 | assert_eq!( 42 | PkgVersion::try_from(e.0).unwrap().cmp(&PkgVersion::try_from(e.2).unwrap()), 43 | e.1 44 | ); 45 | } 46 | } 47 | 48 | #[test] 49 | fn pkg_ver_eq() { 50 | let source = vec![("1.1+git2021", "1.1+git2021")]; 51 | for e in &source { 52 | assert_eq!(PkgVersion::try_from(e.0).unwrap(), PkgVersion::try_from(e.1).unwrap()); 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/executor/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::types::{PkgActions, PkgMeta, PkgStatus}; 2 | 3 | use anyhow::{Context, Result}; 4 | use std::{collections::HashMap, fs, path::Path}; 5 | 6 | /// Status of this machine 7 | pub struct MachineStatus { 8 | pub pkgs: HashMap, 9 | } 10 | 11 | impl MachineStatus { 12 | pub async fn new(root: &Path) -> Result { 13 | // Load or create ALPM local db 14 | let alpm_local_db_root = root.join("var/lib/pacman/local"); 15 | if !alpm_local_db_root.is_dir() { 16 | fs::create_dir_all(&alpm_local_db_root) 17 | .context("Failed to initialize ALPM local database.")?; 18 | } 19 | 20 | let pkgs = crate::alpm::local::read_alpm_local_db(&alpm_local_db_root).await?; 21 | 22 | Ok(MachineStatus { pkgs }) 23 | } 24 | 25 | /// Generate a list of actions according to machine status and package blueprint 26 | pub fn gen_actions<'a>(&self, blueprint: &[&'a PkgMeta]) -> PkgActions<'a> { 27 | let mut res = PkgActions::default(); 28 | // We will modify the list, so do a clone 29 | let mut old_pkgs = self.pkgs.clone(); 30 | 31 | for newpkg in blueprint { 32 | if !old_pkgs.contains_key(&newpkg.name) { 33 | // New one! Install it 34 | res.install.push((newpkg, None)); 35 | } else { 36 | // Older version exists. Let's check the state of it 37 | // Remove it to mark it's been processed 38 | let oldpkg = old_pkgs.remove(&newpkg.name).unwrap(); 39 | if oldpkg.version < newpkg.version { 40 | // Upgrade 41 | res.install.push((newpkg, Some((oldpkg.version, oldpkg.install_size)))) 42 | } else if oldpkg.version > newpkg.version { 43 | // Downgrade 44 | res.install.push((newpkg, Some((oldpkg.version, oldpkg.install_size)))) 45 | } 46 | } 47 | } 48 | 49 | // Now deal with the leftovers 50 | for oldpkg in old_pkgs { 51 | res.remove.push((oldpkg.0, oldpkg.1.install_size)); 52 | } 53 | res 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /src/alpm/local.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | debug, error, 3 | types::{PkgStatus, PkgVersion}, 4 | utils::pacparse, 5 | }; 6 | use anyhow::{anyhow, bail, Context, Result}; 7 | use std::{collections::HashMap, path::Path}; 8 | use tokio::fs; 9 | 10 | const SUPPORTED_ALPM_DB_VERSION: usize = 9; 11 | pub async fn read_alpm_local_db(root: &Path) -> Result> { 12 | let mut state: HashMap = HashMap::new(); 13 | // First check ALPM_DB_VERSION 14 | let alpm_db_ver_path = root.join("ALPM_DB_VERSION"); 15 | let alpm_db_ver: usize = 16 | if let Some(v) = fs::read_to_string(alpm_db_ver_path).await?.lines().next() { 17 | v.parse()? 18 | } else { 19 | bail!("malformed ALPM DB (no version file)") 20 | }; 21 | if alpm_db_ver != SUPPORTED_ALPM_DB_VERSION { 22 | bail!( 23 | "bad ALPM local database version: expected {}, found {}", 24 | SUPPORTED_ALPM_DB_VERSION, 25 | alpm_db_ver 26 | ); 27 | } 28 | 29 | // Start reading 30 | for entry in walkdir::WalkDir::new(root) { 31 | let entry = entry?; 32 | if entry.path().ends_with("desc") { 33 | // Parse it 34 | let content = fs::read_to_string(entry.path()).await?; 35 | let mut result = pacparse::parse_str(&content)?; 36 | let name = result.remove("NAME").ok_or_else(|| { 37 | anyhow!("bad ALPM local db: NAME missing from {}", entry.path().display()) 38 | })?; 39 | let version: PkgVersion = result 40 | .remove("VERSION") 41 | .ok_or_else(|| { 42 | anyhow!("bad ALPM local db: VERSION missing from {}", entry.path().display()) 43 | })? 44 | .as_str() 45 | .try_into()?; 46 | // SIZE is not mandatory because metapackages 47 | let install_size: u64 = if let Some(size_str) = result.remove("SIZE") { 48 | size_str.parse().context(format!( 49 | "bad ALPM local db: invalid character in SIZE for {}", 50 | entry.path().display() 51 | ))? 52 | } else { 53 | 0 54 | }; 55 | state.insert(name.clone(), PkgStatus { name, version, install_size }); 56 | } 57 | } 58 | Ok(state) 59 | } 60 | -------------------------------------------------------------------------------- /src/utils/lock.rs: -------------------------------------------------------------------------------- 1 | use crate::{debug, LOCK_PATH}; 2 | use anyhow::{bail, Context, Result}; 3 | use nix::unistd::Uid; 4 | use serde::{Deserialize, Serialize}; 5 | use std::{fs, io::prelude::*, path::Path, sync::atomic::Ordering}; 6 | 7 | /// Make sure only one instance of sasm can run at one time 8 | 9 | #[derive(Serialize, Deserialize)] 10 | struct LockInfo { 11 | pid: u32, 12 | } 13 | 14 | pub fn ensure_unlocked(root: &Path) -> Result<()> { 15 | if let Some(pid) = check(root)? { 16 | bail!("Another instance of sasm is currently running at PID {}.", pid); 17 | } 18 | 19 | Ok(()) 20 | } 21 | 22 | pub fn check(root: &Path) -> Result> { 23 | let lock_path = root.join(LOCK_PATH); 24 | if lock_path.is_file() { 25 | let lock_content = 26 | std::fs::read_to_string(lock_path).context("Failed to read lock file.")?; 27 | let lock_info: LockInfo = 28 | toml::from_str(&lock_content).context("Failed to parse lock file.")?; 29 | Ok(Some(lock_info.pid)) 30 | } else { 31 | Ok(None) 32 | } 33 | } 34 | 35 | pub fn lock(root: &Path) -> Result<()> { 36 | // Make sure we are running as root 37 | if !Uid::effective().is_root() { 38 | bail!("You must be root to perform this operation."); 39 | } 40 | 41 | let lock_path = root.join(LOCK_PATH); 42 | if lock_path.is_file() { 43 | bail!("Failed to create an instance lock because the lock file already exists."); 44 | } 45 | 46 | // Set global lock parameter 47 | crate::LOCKED.store(true, Ordering::Relaxed); 48 | 49 | // Create directory if not created yet 50 | let prefix = lock_path.parent().unwrap(); 51 | if !prefix.is_dir() { 52 | fs::create_dir_all(prefix).context("Failed to create directory for lock file.")?; 53 | } 54 | let lock_info = LockInfo { pid: std::process::id() }; 55 | let lock_content = toml::to_string(&lock_info)?; 56 | let mut file = fs::File::create(&lock_path).context("Failed to create lock file.")?; 57 | file.write(lock_content.as_bytes()) 58 | .context("Failed to write instance information to lock file.")?; 59 | Ok(()) 60 | } 61 | 62 | pub fn unlock(root: &Path) -> Result<()> { 63 | let lock_path = root.join(LOCK_PATH); 64 | if lock_path.is_file() { 65 | fs::remove_file(&lock_path).context("Failed to delete lock file.")?; 66 | } else { 67 | debug!("Attempt to unlock, but lock file does not exist."); 68 | } 69 | Ok(()) 70 | } 71 | -------------------------------------------------------------------------------- /src/actions/search/provide/parse.rs: -------------------------------------------------------------------------------- 1 | /// Parse Contents files 2 | use anyhow::{bail, Result}; 3 | use nom::{ 4 | bytes::complete::tag, 5 | bytes::complete::take_until1, 6 | character::complete::{space0, space1}, 7 | combinator::eof, 8 | error::ErrorKind, 9 | multi::separated_list1, 10 | IResult, InputTakeAtPosition, 11 | }; 12 | 13 | pub fn parse_contents_line(i: &str) -> Result<(&str, Vec<(&str, &str)>)> { 14 | let (_, (path, packages)) = match contents_line(i) { 15 | Ok(res) => res, 16 | Err(e) => bail!("Invalid Contents line: {}", e), 17 | }; 18 | Ok((path, packages)) 19 | } 20 | 21 | pub fn contents_line(i: &str) -> IResult<&str, (&str, Vec<(&str, &str)>)> { 22 | let (i, mut path) = take_until1(" ")(i)?; 23 | let (i, _) = space1(i)?; 24 | let (i, packages) = separated_list1(package_separator, package)(i)?; 25 | let (i, _) = eof(i)?; 26 | 27 | // Normalize path 28 | if path.starts_with("./") { 29 | path = &path[2..]; 30 | } 31 | 32 | Ok((i, (path, packages))) 33 | } 34 | 35 | fn package_separator(i: &str) -> IResult<&str, ()> { 36 | let (i, _) = tag(",")(i)?; 37 | let (i, _) = space0(i)?; 38 | Ok((i, ())) 39 | } 40 | 41 | fn package(i: &str) -> IResult<&str, (&str, &str)> { 42 | let (i, section) = take_until1("/")(i)?; 43 | let (i, _) = tag("/")(i)?; 44 | let (i, name) = package_name(i)?; 45 | Ok((i, (section, name))) 46 | } 47 | 48 | fn is_pkgname_char(c: char) -> bool { 49 | c.is_alphanumeric() || c == '-' || c == '.' || c == '+' 50 | } 51 | 52 | fn is_pkgname_with_var_char(c: char) -> bool { 53 | is_pkgname_char(c) || c == '{' || c == '_' || c == '}' 54 | } 55 | 56 | fn package_name(i: &str) -> IResult<&str, &str> { 57 | i.split_at_position1_complete(|item| !is_pkgname_with_var_char(item), ErrorKind::Char) 58 | } 59 | 60 | #[cfg(test)] 61 | mod test { 62 | use super::*; 63 | 64 | #[test] 65 | fn test_contents_line() { 66 | let tests = vec![ 67 | ( 68 | "simple/path sec/pkg1", 69 | ("simple/path", vec![("sec", "pkg1")]), 70 | ), 71 | ( 72 | "simple/path sec/pkg1,sec2/pkg2", 73 | ("simple/path", vec![("sec", "pkg1"), ("sec2", "pkg2")]), 74 | ), 75 | ( 76 | "./bad/path sec/pkg1,sec2/pkg2", 77 | ("bad/path", vec![("sec", "pkg1"), ("sec2", "pkg2")]), 78 | ), 79 | ]; 80 | for (t, r) in tests { 81 | assert_eq!(contents_line(t).unwrap(), ("", r)); 82 | } 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/utils/pager.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{format_err, Result}; 2 | use std::{env::var, io::Write, process::Child, sync::atomic::Ordering}; 3 | 4 | pub enum Pager { 5 | Plain, 6 | External((String, Child)), 7 | } 8 | 9 | impl Pager { 10 | pub fn new(no_pager: bool) -> Result { 11 | if no_pager { 12 | return Ok(Pager::Plain); 13 | } 14 | 15 | // Use plain mode for dumb terminals 16 | let term = var("TERM").unwrap_or_default(); 17 | if term == "dumb" || term == "dialup" { 18 | return Ok(Pager::Plain); 19 | } 20 | 21 | let pager_cmd = var("PAGER").unwrap_or_else(|_| "less".to_owned()); 22 | let pager_cmd_segments: Vec<&str> = pager_cmd.split_ascii_whitespace().collect(); 23 | let pager_name = pager_cmd_segments.get(0).unwrap_or(&"less"); 24 | let mut p = std::process::Command::new(&pager_name); 25 | if pager_name == &"less" { 26 | p.arg("-R"); // Show ANSI escape sequences correctly 27 | p.arg("-c"); // Start from the top of the screen 28 | p.env("LESSCHARSET", "UTF-8"); // Rust uses UTF-8 29 | } else if pager_cmd_segments.len() > 1 { 30 | p.args(&pager_cmd_segments[1..]); 31 | } 32 | let pager_process = p.stdin(std::process::Stdio::piped()).spawn()?; 33 | // Record PID 34 | crate::SUBPROCESS.store(pager_process.id() as i32, Ordering::SeqCst); 35 | 36 | let res = Pager::External((pager_name.to_string(), pager_process)); 37 | Ok(res) 38 | } 39 | 40 | pub fn pager_name(&self) -> Option<&str> { 41 | match self { 42 | Pager::Plain => None, 43 | Pager::External((name, _)) => Some(name.as_str()), 44 | } 45 | } 46 | 47 | pub fn get_writer(&self) -> Result> { 48 | let res = match self { 49 | Pager::Plain => crate::WRITER.get_writer(), 50 | Pager::External((_, child)) => { 51 | let stdin = child 52 | .stdin 53 | .as_ref() 54 | .ok_or_else(|| format_err!("Failed to take pager's stdin"))?; 55 | let res: Box = Box::new(stdin); 56 | res 57 | } 58 | }; 59 | Ok(res) 60 | } 61 | 62 | pub fn wait_for_exit(&mut self) -> Result<()> { 63 | if let Pager::External((_, child)) = self { 64 | let _ = child.wait()?; 65 | } 66 | 67 | Ok(()) 68 | } 69 | } 70 | 71 | impl Drop for Pager { 72 | fn drop(&mut self) { 73 | // Un-set subprocess pid 74 | crate::SUBPROCESS.store(-1, Ordering::SeqCst); 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/actions/local.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | cli, debug, 3 | executor::{MachineStatus, PkgState}, 4 | info, 5 | pool::source::local, 6 | types::config::Opts, 7 | }; 8 | 9 | use anyhow::{bail, Result}; 10 | use console::style; 11 | use std::{ 12 | ffi::OsStr, 13 | fs, 14 | path::{Path, PathBuf}, 15 | }; 16 | 17 | pub fn add(opts: &Opts, paths: &[PathBuf]) -> Result> { 18 | let local_repo_root = opts.root.join(crate::LOCAL_REPO_PATH); 19 | 20 | let mut jobs = Vec::new(); 21 | for path in paths { 22 | // Try to load deb info 23 | let pkgmeta = crate::pool::source::local::read_control_from_deb(path)?; 24 | info!( 25 | "Loading {}({}) into local package repository...", 26 | style(&pkgmeta.name).bold(), 27 | pkgmeta.version 28 | ); 29 | if cli::ask_confirm(opts, "Confirm?")? { 30 | bail!("User cancelled operation."); 31 | } 32 | 33 | let filename = match path.file_name() { 34 | Some(f) => f, 35 | None => bail!("Invalid deb file {} !", path.display()), 36 | }; 37 | 38 | // Prepare job 39 | let new_path = local_repo_root.join(filename); 40 | jobs.push((path, new_path, pkgmeta)); 41 | } 42 | 43 | // User has confirmed all packages. 44 | let mut res = Vec::new(); 45 | if !local_repo_root.is_dir() { 46 | fs::create_dir_all(&local_repo_root)?; 47 | } 48 | for (old_path, new_path, pkg) in jobs { 49 | // Copy package to local repo 50 | std::fs::copy(old_path, new_path)?; 51 | // Add pkgname to res 52 | res.push(pkg.name); 53 | } 54 | 55 | Ok(res) 56 | } 57 | 58 | pub fn clean(ms: &MachineStatus, root: &Path) -> Result<()> { 59 | let local_repo_root = root.join(crate::LOCAL_REPO_PATH); 60 | if !local_repo_root.is_dir() { 61 | // Nothing to clean 62 | return Ok(()); 63 | } 64 | 65 | for entry in fs::read_dir(&local_repo_root)? { 66 | let entry = entry?; 67 | let path = entry.path(); 68 | debug!("Inspecting local deb {} ...", path.display()); 69 | if !path.is_file() || path.extension() != Some(OsStr::new("deb")) { 70 | continue; 71 | } 72 | // Read meta 73 | let pkgmeta = local::read_control_from_deb(&path)?; 74 | if let Some(pkgstate) = ms.pkgs.get(&pkgmeta.name) { 75 | if pkgstate.state != PkgState::ConfigFiles && pkgstate.state != PkgState::NotInstalled { 76 | // This package is needed. Move on. 77 | continue; 78 | } 79 | } 80 | // Not contained in current machine or not installed, remove it. 81 | debug!("Removing {} ...", style(path.display()).bold()); 82 | fs::remove_file(&path)?; 83 | } 84 | 85 | Ok(()) 86 | } 87 | -------------------------------------------------------------------------------- /src/actions/download.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | db::LocalDb, 3 | info, pool, 4 | types::{Checksum, PkgSource}, 5 | utils::downloader::{Compression, DownloadJob, Downloader}, 6 | }; 7 | 8 | use anyhow::{bail, Context, Result}; 9 | use console::style; 10 | 11 | pub async fn download( 12 | pkgname: &str, 13 | local_db: &LocalDb, 14 | downloader: &Downloader, 15 | latest: bool, 16 | ) -> Result<()> { 17 | let dbs = local_db.get_all_package_db().context("Invalid local package database")?; 18 | let pool = pool::source::create_pool(&dbs, &[])?; 19 | 20 | // Get all versions 21 | // Choices: Vec<(DisplayString, URL)> 22 | let mut choices: Vec<(String, String, u64, Checksum)> = Vec::new(); 23 | if let Some(ids) = pool.get_pkgs_by_name(pkgname) { 24 | let mut first = true; 25 | for id in ids { 26 | let meta = pool.get_pkg_by_id(id).unwrap(); 27 | let (url, size, checksum) = match &meta.source { 28 | PkgSource::Http((url, size, checksum)) => (url, size, checksum), 29 | // This should never happen 30 | PkgSource::Local(_) => panic!("Local source from http repo"), 31 | }; 32 | // Form version str for display 33 | let mut version_str = meta.version.to_string(); 34 | let mut info_segments = Vec::new(); 35 | if first { 36 | info_segments.push(style("latest").green().to_string()); 37 | } 38 | if !info_segments.is_empty() { 39 | version_str.push_str(&format!(" ({})", info_segments.join(", "))); 40 | } 41 | choices.push((version_str, url.to_owned(), *size, checksum.to_owned())); 42 | // Not the first anymore 43 | first = false; 44 | } 45 | } else { 46 | bail!("Package {} not found", style(pkgname).bold()); 47 | } 48 | 49 | // Display them 50 | let choices_str: Vec<&str> = choices.iter().map(|ver| ver.0.as_str()).collect(); 51 | let i = if latest { 52 | 0 53 | } else { 54 | info!("Please choose a version for {}:", style(pkgname).bold()); 55 | dialoguer::Select::with_theme(&crate::cli::OmaTheme::default()) 56 | .items(&choices_str) 57 | .default(0) 58 | .interact()? 59 | }; 60 | 61 | let (_, url, size, checksum) = &choices[i]; 62 | let job = DownloadJob { 63 | url: url.to_owned(), 64 | description: None, 65 | filename: None, 66 | size: Some(*size), 67 | compression: Compression::None(Some(checksum.clone())), 68 | }; 69 | 70 | // Download package to current directory 71 | let current_dir = std::env::current_dir().context("Failed to get current directory.")?; 72 | downloader 73 | .fetch([job].to_vec(), ¤t_dir, true) 74 | .await 75 | .context("Failed to fetch request package from repository.")?; 76 | 77 | Ok(()) 78 | } 79 | -------------------------------------------------------------------------------- /src/solver/mod.rs: -------------------------------------------------------------------------------- 1 | /// The sasm dependency solver 2 | /// Comes with the pool it uses to calculate dependencies upon 3 | pub mod pool; 4 | 5 | mod improve; 6 | mod incompatible; 7 | mod sort; 8 | 9 | use crate::{config::Blueprints, debug, types::PkgMeta}; 10 | use anyhow::{bail, format_err, Context, Result}; 11 | use pool::PkgPool; 12 | use varisat::{lit::Lit, ExtendFormula}; 13 | 14 | pub struct Solver { 15 | pub pool: Box, 16 | } 17 | 18 | impl From> for Solver { 19 | fn from(pool: Box) -> Self { 20 | Solver { pool } 21 | } 22 | } 23 | 24 | impl From for Solver { 25 | fn from(pool: pool::InMemoryPool) -> Self { 26 | Solver { pool: Box::new(pool) } 27 | } 28 | } 29 | 30 | impl Solver { 31 | pub fn install(&self, blueprints: &Blueprints) -> Result> { 32 | let mut formula = self.pool.gen_formula(None); 33 | debug!("Adding requested packages to solver formula..."); 34 | let mut ids = Vec::new(); 35 | for req in blueprints.get_pkg_requests() { 36 | let id = self.pool.pick_best_pkg(&req.name, &req.version, req.local)?; 37 | formula.add_clause(&[Lit::from_dimacs(id as isize)]); 38 | ids.push(id); 39 | } 40 | // Add rules to solver 41 | let mut solver = varisat::Solver::new(); 42 | solver.add_formula(&formula); 43 | 44 | // Initial solve 45 | debug!("Computing initial solution..."); 46 | let mut res = match solve(&mut solver) { 47 | Ok(r) => r, 48 | Err(_) => { 49 | return Err(format_err!(incompatible::find_incompatible_friendly( 50 | self.pool.as_ref(), 51 | &ids 52 | ))) 53 | .context("sasm cannot satisfy package requirements.") 54 | } 55 | }; 56 | 57 | // Improve the result to remove redundant packages 58 | // and select best possible packages 59 | debug!("Refining dependency solution..."); 60 | improve::upgrade(self.pool.as_ref(), &mut res, &mut solver)?; 61 | improve::reduce(self.pool.as_ref(), &mut res, &ids)?; 62 | // Sort result 63 | sort::sort_pkgs(self.pool.as_ref(), &mut res).context("Failed to sort packages")?; 64 | 65 | // Generate result 66 | let pkgs: Vec<&PkgMeta> = 67 | res.into_iter().map(|pkgid| self.pool.get_pkg_by_id(pkgid).unwrap()).collect(); 68 | 69 | Ok(pkgs) 70 | } 71 | } 72 | 73 | /// Helper function to get PkgID list 74 | pub fn solve(solver: &mut varisat::Solver) -> Result> { 75 | let mut res = Vec::new(); 76 | if !solver.solve().unwrap() { 77 | bail!("sasm cannot satisfy package requirements."); 78 | } else { 79 | let model = solver.model().unwrap(); 80 | for i in model { 81 | if i.is_positive() { 82 | let id = i.to_dimacs() as usize; 83 | res.push(id); 84 | } 85 | } 86 | } 87 | Ok(res) 88 | } 89 | -------------------------------------------------------------------------------- /src/actions/mod.rs: -------------------------------------------------------------------------------- 1 | mod execute; 2 | use execute::execute; 3 | 4 | use crate::{ 5 | config::CachedRepoDb, 6 | config::{Blueprints, Config, Opts, SubCmd}, 7 | executor::MachineStatus, 8 | info, success, 9 | types::VersionRequirement, 10 | utils::lock, 11 | }; 12 | 13 | use anyhow::{Context, Result}; 14 | use std::path::PathBuf; 15 | 16 | #[derive(Debug)] 17 | pub enum UserRequest { 18 | // Vec<(PkgName, ver_req, install_recomm, added_by, local)> 19 | Install(Vec), 20 | // Vec<(PkgName, remove_recomm)> 21 | Remove(Vec<(String, bool)>), 22 | Upgrade, 23 | } 24 | 25 | #[derive(Debug)] 26 | pub struct InstallRequest { 27 | pkgname: String, 28 | install_recomm: bool, 29 | ver_req: Option, 30 | local: bool, 31 | /// Whether modify existing entry 32 | modify: bool, 33 | } 34 | 35 | /// bool in return type indicated whether user cancelled operation 36 | pub async fn fullfill_command( 37 | config: &Config, 38 | opts: &Opts, 39 | blueprints: &mut Blueprints, 40 | ) -> Result { 41 | let downloader = crate::utils::downloader::Downloader::new(); 42 | // Directory that stores trusted public keys for repos 43 | let _key_root = opts.root.join(crate::DB_KEY_PATH); 44 | let localdb = 45 | CachedRepoDb::new(opts.root.join(crate::DB_CACHE_PATH), config.repo.clone(), &config.arch); 46 | 47 | match &opts.subcmd { 48 | SubCmd::Execute => { 49 | // This operation has side effects 50 | lock::ensure_unlocked(&opts.root)?; 51 | lock::lock(&opts.root)?; 52 | 53 | let req = UserRequest::Upgrade; 54 | localdb 55 | .update(&downloader) 56 | .await 57 | .context("Failed to refresh local package metadata!")?; 58 | 59 | let exit = execute(&localdb, &downloader, blueprints, opts, config, req).await?; 60 | 61 | Ok(exit) 62 | } 63 | SubCmd::Clean(cleanconfig) => { 64 | // This operation has side effects 65 | lock::ensure_unlocked(&opts.root)?; 66 | lock::lock(&opts.root)?; 67 | 68 | info!("Purging local package metadata cache..."); 69 | let pkg_cache_path = opts.root.join(crate::PKG_CACHE_PATH); 70 | if pkg_cache_path.is_dir() { 71 | std::fs::remove_dir_all(&pkg_cache_path)?; 72 | std::fs::create_dir_all(&pkg_cache_path)?; 73 | } 74 | 75 | info!("Purging local package cache..."); 76 | let ms = MachineStatus::new(&opts.root).await?; 77 | 78 | if cleanconfig.all { 79 | info!("Purging local metadata cache..."); 80 | let db_cache_path = opts.root.join(crate::DB_CACHE_PATH); 81 | if db_cache_path.is_dir() { 82 | std::fs::remove_dir_all(&db_cache_path)?; 83 | std::fs::create_dir_all(&db_cache_path)?; 84 | } 85 | } 86 | 87 | Ok(false) 88 | } 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/actions/search/pkg.rs: -------------------------------------------------------------------------------- 1 | use super::PkgInfo; 2 | use crate::{db::LocalDb, executor::MachineStatus, pool, pool::PkgPool}; 3 | 4 | use anyhow::{Context, Result}; 5 | use std::{cmp::Reverse, collections::HashMap}; 6 | 7 | pub fn search_deb_db( 8 | local_db: &LocalDb, 9 | keyword: &str, 10 | machine_status: &MachineStatus, 11 | ) -> Result<()> { 12 | let dbs = local_db 13 | .get_all_package_db() 14 | .context("Failed to initialize local database for searching!")?; 15 | let pool = pool::source::create_pool(&dbs, &[])?; 16 | 17 | let mut pkgs = search_pkg_helper(pool.as_ref(), keyword); 18 | 19 | // Sort pkg in descending order based on relevance to keyword 20 | pkgs.sort_by_cached_key(|pkg| Reverse(pkg_score(pkg, keyword))); 21 | 22 | // Display result 23 | for pkg in pkgs { 24 | pkg.show(machine_status)?; 25 | } 26 | 27 | Ok(()) 28 | } 29 | 30 | pub fn search_pkg_helper<'a, P: ?Sized>(pool: &'a P, keyword: &str) -> Vec> 31 | where 32 | P: PkgPool, 33 | { 34 | // Iterate through package names 35 | let mut res = HashMap::new(); 36 | for (name, versions) in pool.pkgname_iter() { 37 | if name.contains(keyword) { 38 | let id = versions[0].0; 39 | let pkg = pool.get_pkg_by_id(id).unwrap(); 40 | let has_dbg_pkg = pool.has_dbg_pkg(id).unwrap(); 41 | 42 | // Construct PkgInfo, don't include debug packages 43 | if !pkg.name.ends_with("-dbg") && pkg.section != "debug" { 44 | let pkginfo = PkgInfo { 45 | pkg, 46 | has_dbg_pkg, 47 | additional_info: Vec::new(), 48 | }; 49 | res.insert(name, pkginfo); 50 | } 51 | } 52 | } 53 | 54 | // Search package description 55 | for (id, meta) in pool.pkgid_iter() { 56 | if meta.description.contains(keyword) && !res.contains_key(meta.name.as_str()) { 57 | let pkginfo = PkgInfo { 58 | pkg: meta, 59 | has_dbg_pkg: pool.has_dbg_pkg(id).unwrap(), 60 | additional_info: Vec::new(), 61 | }; 62 | res.insert(&meta.name, pkginfo); 63 | } 64 | 65 | // Search if provides 66 | if let Some(provides) = &meta.provides { 67 | for provide in provides { 68 | if keyword == provide.0 { 69 | let pkginfo = PkgInfo { 70 | pkg: meta, 71 | has_dbg_pkg: false, 72 | additional_info: Vec::new(), 73 | }; 74 | res.insert(&meta.name, pkginfo); 75 | } 76 | } 77 | } 78 | } 79 | 80 | res.into_values().collect() 81 | } 82 | 83 | fn pkg_score(pkg: &PkgInfo, keyword: &str) -> u8 { 84 | if let Some(provides) = &pkg.pkg.provides { 85 | for provide in provides { 86 | if provide.0 == keyword { 87 | return u8::MAX; 88 | } 89 | } 90 | } 91 | 92 | (255.0 * strsim::jaro_winkler(&pkg.pkg.name, keyword)) as u8 93 | } 94 | -------------------------------------------------------------------------------- /src/config/repo/mod.rs: -------------------------------------------------------------------------------- 1 | mod config; 2 | pub use config::RepoConfig; 3 | 4 | use crate::{ 5 | debug, info, 6 | utils::downloader::{Compression, DownloadJob, Downloader}, 7 | }; 8 | use anyhow::Result; 9 | use console::style; 10 | use std::{collections::HashMap, path::PathBuf}; 11 | 12 | #[derive(Debug)] 13 | pub struct CachedRepoDb { 14 | // root directory for dbs 15 | root: PathBuf, 16 | arch: String, 17 | repos: HashMap, 18 | } 19 | 20 | impl CachedRepoDb { 21 | pub fn new(root: PathBuf, repos: HashMap, arch: &str) -> Self { 22 | CachedRepoDb { root, arch: arch.to_owned(), repos } 23 | } 24 | 25 | /// Get the remote (relative) path and local path for a repository 26 | pub fn get_package_db(&self, name: &str) -> Result<(String, PathBuf)> { 27 | let remote_relative_path = format!("{0}.db", name); 28 | let local_path = self.root.join(self.root.join(format!("{}.db", name))); 29 | 30 | Ok((remote_relative_path, local_path)) 31 | } 32 | 33 | // Get (BaseURL, FilePath) of all configured repos 34 | pub fn get_all_package_db(&self) -> Result> { 35 | let mut res = Vec::new(); 36 | for repo in &self.repos { 37 | res.push(self.get_package_db(repo.0)?); 38 | } 39 | Ok(res) 40 | } 41 | 42 | pub fn get_contents_db(&self, name: &str) -> Result<(String, PathBuf)> { 43 | let arch = &self.arch; 44 | let remote_relative_path = format!("{0}.files", name); 45 | let local_path = self.root.join(self.root.join(format!("{}.files", name))); 46 | 47 | Ok((remote_relative_path, local_path)) 48 | } 49 | 50 | // Get (BaseURL, FilePath) of all configured repos 51 | pub fn get_all_contents_db(&self) -> Result> { 52 | let mut res = Vec::new(); 53 | for repo in &self.repos { 54 | res.push(self.get_contents_db(repo.0)?); 55 | } 56 | Ok(res) 57 | } 58 | 59 | pub async fn update(&self, downloader: &Downloader) -> Result<()> { 60 | info!("Refreshing local repository metadata..."); 61 | 62 | let package_dbs = self.get_all_package_db()?; 63 | if crate::verbose() { 64 | for db in &package_dbs { 65 | debug!("Downloading {} {}", db.0, db.1.display()); 66 | } 67 | } 68 | 69 | let mut download_jobs = Vec::with_capacity(package_dbs.len()); 70 | for (name, repo) in &self.repos { 71 | let (remote_path, _local_path) = self.get_package_db(&name)?; 72 | download_jobs.push(DownloadJob { 73 | url: format!("{}/{}", repo.get_url(name, &self.arch)?, remote_path), 74 | description: Some(format!("Package database for {}", style(name).bold())), 75 | filename: Some(format!("{}.db", name)), 76 | size: None, 77 | compression: Compression::None(None), 78 | }) 79 | } 80 | 81 | // The downloader will verify the checksum for us 82 | downloader.fetch(download_jobs, &self.root, false).await?; 83 | 84 | Ok(()) 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /src/actions/search/mod.rs: -------------------------------------------------------------------------------- 1 | mod pkg; 2 | mod provide; 3 | pub use pkg::search_deb_db; 4 | pub use provide::show_provide_file; 5 | 6 | use crate::{ 7 | executor::{MachineStatus, PkgState}, 8 | types::PkgMeta, 9 | }; 10 | 11 | use anyhow::Result; 12 | use console::style; 13 | 14 | pub struct PkgInfo<'a> { 15 | pub pkg: &'a PkgMeta, 16 | // Additional info 17 | pub has_dbg_pkg: bool, 18 | pub additional_info: Vec, 19 | } 20 | 21 | impl<'a> PkgInfo<'a> { 22 | pub fn show(&self, machine_status: &MachineStatus) -> Result<()> { 23 | // Construct prefix 24 | let prefix = match machine_status.pkgs.get(&self.pkg.name) { 25 | Some(pkg) => match pkg.state { 26 | PkgState::Installed => style("INSTALLED").green(), 27 | PkgState::Unpacked => style("UNPACKED").yellow(), 28 | _ => style("AVAIL").dim(), 29 | }, 30 | None => style("AVAIL").dim(), 31 | } 32 | .to_string(); 33 | // Construct pkg info line 34 | let mut pkg_info_line = style(&self.pkg.name).bold().to_string(); 35 | pkg_info_line.push(' '); 36 | pkg_info_line.push_str(&style(&self.pkg.version).green().to_string()); 37 | if self.has_dbg_pkg { 38 | pkg_info_line.push(' '); 39 | pkg_info_line.push_str(&style("(debug symbols available)").dim().to_string()); 40 | } 41 | crate::WRITER.writeln(&prefix, &pkg_info_line)?; 42 | 43 | // Write package description 44 | crate::WRITER.writeln("", &self.pkg.description)?; 45 | 46 | // Write provided packages 47 | if let Some(provides) = &self.pkg.provides { 48 | let prefix = style("Provides:").dim().to_string(); 49 | let mut chunks = vec![prefix]; 50 | for (name, ver_req) in provides { 51 | let mut chunk = name.clone(); 52 | if !ver_req.is_arbitary() { 53 | chunk.push_str(&format!(" ({ver_req})")); 54 | } 55 | chunks.push(chunk); 56 | } 57 | crate::WRITER.write_chunks("", &chunks)?; 58 | } 59 | 60 | // Write recommended packages 61 | if let Some(recommends) = &self.pkg.recommends { 62 | let prefix = style("Recommends:").dim().to_string(); 63 | let mut chunks = vec![prefix]; 64 | for (name, ver_req) in recommends { 65 | let mut chunk = name.clone(); 66 | if !ver_req.is_arbitary() { 67 | chunk.push_str(&format!(" ({ver_req})")); 68 | } 69 | chunks.push(chunk); 70 | } 71 | crate::WRITER.write_chunks("", &chunks)?; 72 | } 73 | 74 | // Write suggested packages 75 | if let Some(suggests) = &self.pkg.suggests { 76 | let prefix = style("Suggests:").dim().to_string(); 77 | let mut chunks = vec![prefix]; 78 | for (name, ver_req) in suggests { 79 | let mut chunk = name.clone(); 80 | if !ver_req.is_arbitary() { 81 | chunk.push_str(&format!(" ({ver_req})")); 82 | } 83 | chunks.push(chunk); 84 | } 85 | crate::WRITER.write_chunks("", &chunks)?; 86 | } 87 | 88 | // Write additional info, if applicable 89 | for line in &self.additional_info { 90 | crate::WRITER.writeln("", line)?; 91 | } 92 | 93 | Ok(()) 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /src/types/version/parse.rs: -------------------------------------------------------------------------------- 1 | use super::{PkgVersion, PkgVersionSegment}; 2 | 3 | use anyhow::{bail, Result}; 4 | use nom::{ 5 | character::{complete::*, is_alphanumeric}, 6 | combinator::eof, 7 | error::{context, ErrorKind, ParseError}, 8 | sequence::*, 9 | IResult, InputTakeAtPosition, 10 | }; 11 | 12 | pub fn parse_version(i: &str) -> IResult<&str, PkgVersion> { 13 | let (tmp_i, epoch) = match context( 14 | "Parsing epoch...", 15 | pair::<_, _, _, nom::error::Error<&str>, _, _>(digit1, char(':')), 16 | )(i) 17 | { 18 | Ok((i, (epoch, _))) => (i, epoch.parse().unwrap()), 19 | Err(_) => (i, 0), 20 | }; 21 | 22 | // Try with or without epoch 23 | let (i, (upstream_version, revision)) = match upstream_version(tmp_i) { 24 | Ok(x) => x, 25 | Err(_) => upstream_version(i)?, 26 | }; 27 | 28 | let res = PkgVersion { epoch, version: upstream_version, revision }; 29 | 30 | Ok((i, res)) 31 | } 32 | 33 | impl TryFrom<&str> for PkgVersion { 34 | type Error = anyhow::Error; 35 | fn try_from(s: &str) -> Result { 36 | match parse_version(s) { 37 | Ok((_, ver)) => Ok(ver), 38 | Err(e) => bail!("Error parsing package version: {e}"), 39 | } 40 | } 41 | } 42 | 43 | fn is_upstream_version_char(c: char) -> bool { 44 | c.is_alphanumeric() || is_upstream_version_separater(c) 45 | } 46 | 47 | fn is_upstream_version_separater(c: char) -> bool { 48 | c == '.' || c == '-' || c == '_' || c == '~' || c == '+' 49 | } 50 | 51 | fn upstream_version_separater(i: &str) -> IResult<&str, &str> { 52 | i.split_at_position1_complete(|item| !is_upstream_version_separater(item), ErrorKind::Char) 53 | } 54 | 55 | fn revision(i: &str) -> IResult<&str, &str> { 56 | let (i, _) = char('-')(i)?; 57 | let (i, rev) = digit1(i)?; 58 | let (i, _) = eof(i)?; 59 | Ok((i, rev)) 60 | } 61 | 62 | fn upstream_version(i: &str) -> IResult<&str, (Vec, Option)> { 63 | if i.is_empty() { 64 | return Err(nom::Err::Error(nom::error::Error::from_error_kind(i, ErrorKind::Eof))); 65 | } 66 | 67 | if !i.starts_with(|c: char| c.is_alphanumeric()) { 68 | return Err(nom::Err::Error(nom::error::Error::from_error_kind(i, ErrorKind::Char))); 69 | } 70 | 71 | let mut result = Vec::new(); 72 | let mut rev = None; 73 | let mut ti = i; 74 | loop { 75 | if ti.len() == 0 { 76 | // Our job is done here 77 | break; 78 | } else if let Ok((i, r)) = revision(ti) { 79 | // We've reached the end and there's a revision 80 | rev = Some(r.parse().unwrap()); 81 | ti = i; 82 | break; 83 | } else if let Ok((i, digits)) = digit1::<_, ()>(ti) { 84 | // We got a digit segment! 85 | result.push(PkgVersionSegment::Number(digits.parse().unwrap())); 86 | ti = i; 87 | } else if let Ok((i, chars)) = alpha1::<_, ()>(ti) { 88 | // We got a character segment! 89 | result.push(PkgVersionSegment::Alphabetic(chars.to_owned())); 90 | ti = i; 91 | } else if let Ok((i, chars)) = upstream_version_separater(ti) { 92 | // Some characters we don't care 93 | result.push(PkgVersionSegment::Separater(chars.to_owned())); 94 | ti = i; 95 | } else { 96 | // We've reached something we don't know about. Stop parsing 97 | break; 98 | } 99 | } 100 | 101 | Ok((ti, (result, rev))) 102 | } 103 | -------------------------------------------------------------------------------- /src/config/mod.rs: -------------------------------------------------------------------------------- 1 | mod blueprint; 2 | pub use blueprint::{Blueprints, PkgRequest}; 3 | mod repo; 4 | pub use repo::{CachedRepoDb, RepoConfig}; 5 | 6 | use crate::{info, warn}; 7 | 8 | use anyhow::{bail, Context, Result}; 9 | use clap::Parser; 10 | use console::style; 11 | use serde::{Deserialize, Serialize, Serializer}; 12 | use std::{ 13 | collections::{BTreeMap, HashMap}, 14 | fs, 15 | path::PathBuf, 16 | }; 17 | 18 | #[derive(Deserialize, Serialize, Clone)] 19 | pub struct Config { 20 | pub arch: String, 21 | #[serde(serialize_with = "ordered_map")] 22 | pub repo: HashMap, 23 | } 24 | 25 | fn ordered_map(value: &HashMap, serializer: S) -> Result 26 | where 27 | S: Serializer, 28 | { 29 | let ordered: BTreeMap<_, _> = value.iter().collect(); 30 | ordered.serialize(serializer) 31 | } 32 | 33 | #[derive(Parser)] 34 | #[clap(about, version, author)] 35 | pub struct Opts { 36 | #[clap(display_order = 1, long, default_value = "/", help = "Root directory for operation")] 37 | pub root: PathBuf, 38 | #[clap( 39 | display_order = 2, 40 | long, 41 | default_value = "etc/sasm/", 42 | help = "Position of the config folder" 43 | )] 44 | pub config_root: PathBuf, 45 | #[clap(display_order = 3, long, help = "Say yes to every prompt")] 46 | pub yes: bool, 47 | #[clap(display_order = 4, short, long, help = "Print additional debug information")] 48 | pub verbose: bool, 49 | #[clap(display_order = 5, long, help = "Don't pipe long output into a pager")] 50 | pub no_pager: bool, 51 | #[clap(subcommand)] 52 | pub subcmd: SubCmd, 53 | } 54 | 55 | #[derive(Parser)] 56 | pub enum SubCmd { 57 | /// Install and upgrade all packages according to Blueprint 58 | #[clap(display_order = 4, aliases = &["upgrade"])] 59 | Execute, 60 | /// Delete local package cache (optionally metadata cache) 61 | #[clap(display_order = 21)] 62 | Clean(CleanConfig), 63 | } 64 | 65 | #[derive(Parser)] 66 | pub struct InstallPkg { 67 | /// Package names or deb file names to install 68 | #[clap(min_values = 1)] 69 | pub names: Vec, 70 | 71 | /// Don't install recommended packages 72 | #[clap(long)] 73 | pub no_recommends: bool, 74 | /// Install local package files rather from the repositories 75 | #[clap(long)] 76 | pub local: bool, 77 | } 78 | 79 | #[derive(Parser)] 80 | pub struct RemovePkg { 81 | /// Package names to remove 82 | #[clap(min_values = 1)] 83 | pub names: Vec, 84 | /// Also remove recommended packages 85 | #[clap(long)] 86 | pub remove_recommends: bool, 87 | } 88 | 89 | #[derive(Parser)] 90 | pub struct PickPkg { 91 | /// Package names to pick version 92 | pub name: String, 93 | } 94 | 95 | #[derive(Parser)] 96 | pub struct SearchPkg { 97 | /// Search keyword for package name 98 | pub keyword: String, 99 | } 100 | 101 | #[derive(Parser)] 102 | pub struct ProvideFile { 103 | /// Partial or full path for searching 104 | pub file: String, 105 | /// Search binary files only 106 | #[clap(long)] 107 | pub bin: bool, 108 | } 109 | 110 | #[derive(Parser)] 111 | pub struct CleanConfig { 112 | /// Remove both package cache and local database 113 | #[clap(short, long)] 114 | pub all: bool, 115 | } 116 | 117 | #[derive(Parser)] 118 | pub struct DownloadPkg { 119 | /// Name of package 120 | pub pkgname: String, 121 | /// Use latest version automatically 122 | #[clap(long)] 123 | pub latest: bool, 124 | } 125 | -------------------------------------------------------------------------------- /src/types/version/ord.rs: -------------------------------------------------------------------------------- 1 | use super::{PkgVersion, PkgVersionSegment}; 2 | use std::cmp::{max, Ord, Ordering}; 3 | 4 | /// the rpmvercmp algorithm 5 | /// Check https://fedoraproject.org/wiki/Archive:Tools/RPM/VersionComparison 6 | impl Ord for PkgVersion { 7 | fn cmp(&self, other: &Self) -> Ordering { 8 | if self.epoch > other.epoch { 9 | return Ordering::Greater; 10 | } 11 | 12 | if self.epoch < other.epoch { 13 | return Ordering::Less; 14 | } 15 | 16 | let this_segments: Vec<&PkgVersionSegment> = 17 | self.version.iter().filter(|x| !matches!(x, PkgVersionSegment::Separater(_))).collect(); 18 | let that_segments: Vec<&PkgVersionSegment> = other 19 | .version 20 | .iter() 21 | .filter(|x| !matches!(x, PkgVersionSegment::Separater(_))) 22 | .collect(); 23 | 24 | let max_len = max(this_segments.len(), that_segments.len()); 25 | for i in 0..max_len { 26 | let this = this_segments.get(i); 27 | let that = that_segments.get(i); 28 | 29 | match this { 30 | Some(PkgVersionSegment::Alphabetic(this_val)) => match that { 31 | Some(PkgVersionSegment::Alphabetic(that_val)) => { 32 | if this_val > that_val { 33 | return Ordering::Greater; 34 | } else if this_val < that_val { 35 | return Ordering::Less; 36 | } 37 | } 38 | Some(PkgVersionSegment::Number(that_val)) => { 39 | return Ordering::Less; 40 | } 41 | Some(PkgVersionSegment::Separater(_)) => { 42 | unreachable!() 43 | } 44 | None => { 45 | return Ordering::Less; 46 | } 47 | }, 48 | Some(PkgVersionSegment::Number(this_val)) => match that { 49 | Some(PkgVersionSegment::Alphabetic(that_val)) => { 50 | return Ordering::Greater; 51 | } 52 | Some(PkgVersionSegment::Number(that_val)) => { 53 | if this_val > that_val { 54 | return Ordering::Greater; 55 | } else if this_val < that_val { 56 | return Ordering::Less; 57 | } 58 | } 59 | Some(PkgVersionSegment::Separater(_)) => { 60 | unreachable!() 61 | } 62 | None => { 63 | return Ordering::Greater; 64 | } 65 | }, 66 | Some(PkgVersionSegment::Separater(_)) => { 67 | unreachable!() 68 | } 69 | None => match that { 70 | Some(PkgVersionSegment::Alphabetic(that_val)) => { 71 | return Ordering::Less; 72 | } 73 | Some(PkgVersionSegment::Number(that_val)) => { 74 | return Ordering::Less; 75 | } 76 | Some(PkgVersionSegment::Separater(_)) => { 77 | unreachable!() 78 | } 79 | None => (), 80 | }, 81 | } 82 | } 83 | 84 | if self.revision.is_some() && other.revision.is_some() { 85 | self.revision.cmp(&other.revision) 86 | } else { 87 | Ordering::Equal 88 | } 89 | } 90 | } 91 | 92 | impl PartialOrd for PkgVersion { 93 | fn partial_cmp(&self, other: &Self) -> Option { 94 | Some(self.cmp(other)) 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/types/checksum.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{bail, Context, Result}; 2 | use serde::{Deserialize, Serialize}; 3 | use sha2::{Digest, Sha256, Sha512}; 4 | use std::{fmt::Display, fs::File, io, path::Path}; 5 | 6 | #[derive(PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] 7 | pub enum Checksum { 8 | Sha256(Vec), 9 | Sha512(Vec), 10 | } 11 | 12 | #[derive(Clone)] 13 | pub enum ChecksumValidator { 14 | Sha256((Vec, Sha256)), 15 | Sha512((Vec, Sha512)), 16 | } 17 | 18 | impl ChecksumValidator { 19 | pub fn update(&mut self, data: impl AsRef<[u8]>) { 20 | match self { 21 | ChecksumValidator::Sha256((_, v)) => v.update(data), 22 | ChecksumValidator::Sha512((_, v)) => v.update(data), 23 | } 24 | } 25 | 26 | pub fn finish(self) -> bool { 27 | match self { 28 | ChecksumValidator::Sha256((c, v)) => c == v.finalize().to_vec(), 29 | ChecksumValidator::Sha512((c, v)) => c == v.finalize().to_vec(), 30 | } 31 | } 32 | } 33 | 34 | impl Checksum { 35 | pub fn from_file_sha256(path: &Path) -> Result { 36 | let mut file = File::open(path) 37 | .context(format!("Failed to open {} for checking checksum", path.display()))?; 38 | let mut hasher = Sha256::new(); 39 | io::copy(&mut file, &mut hasher)?; 40 | let hash = hasher.finalize().to_vec(); 41 | Ok(Self::Sha256(hash)) 42 | } 43 | 44 | /// This function does not do input sanitization, so do checks before! 45 | pub fn from_sha256_str(s: &str) -> Result { 46 | if s.len() != 64 { 47 | bail!("Malformed SHA256 checksum: bad length.") 48 | } 49 | Ok(Checksum::Sha256(hex::decode(s)?)) 50 | } 51 | 52 | /// This function does not do input sanitization, so do checks before! 53 | pub fn from_sha512_str(s: &str) -> Result { 54 | if s.len() != 128 { 55 | bail!("Malformed SHA512 checksum: bad length.") 56 | } 57 | Ok(Checksum::Sha512(hex::decode(s)?)) 58 | } 59 | 60 | pub fn get_validator(&self) -> ChecksumValidator { 61 | match self { 62 | Checksum::Sha256(c) => ChecksumValidator::Sha256((c.clone(), Sha256::new())), 63 | Checksum::Sha512(c) => ChecksumValidator::Sha512((c.clone(), Sha512::new())), 64 | } 65 | } 66 | 67 | pub fn cmp_read(&self, mut r: Box) -> Result { 68 | match self { 69 | Checksum::Sha256(hex) => { 70 | let mut hasher = Sha256::new(); 71 | io::copy(&mut r, &mut hasher)?; 72 | let hash = hasher.finalize().to_vec(); 73 | if hex == &hash { 74 | Ok(true) 75 | } else { 76 | Ok(false) 77 | } 78 | } 79 | Checksum::Sha512(hex) => { 80 | let mut hasher = Sha512::new(); 81 | io::copy(&mut r, &mut hasher)?; 82 | let hash = hasher.finalize().to_vec(); 83 | if hex == &hash { 84 | Ok(true) 85 | } else { 86 | Ok(false) 87 | } 88 | } 89 | } 90 | } 91 | 92 | pub fn cmp_file(&self, path: &Path) -> Result { 93 | let file = File::open(path) 94 | .context(format!("Failed to open {} for checking checksum", path.display()))?; 95 | 96 | self.cmp_read(Box::new(file) as Box) 97 | } 98 | } 99 | 100 | impl Display for Checksum { 101 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 102 | match self { 103 | Checksum::Sha256(hex) => { 104 | f.write_str("sha256::")?; 105 | f.write_str(&hex::encode(hex)) 106 | } 107 | Checksum::Sha512(hex) => { 108 | f.write_str("sha512::")?; 109 | f.write_str(&hex::encode(hex)) 110 | } 111 | } 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /src/solver/pool/in_memory.rs: -------------------------------------------------------------------------------- 1 | use super::{BasicPkgPool, PkgPool}; 2 | use crate::types::{PkgMeta, PkgVersion, VersionRequirement}; 3 | 4 | use rayon::prelude::*; 5 | use reqwest::header::ValueDrain; 6 | use std::collections::HashMap; 7 | 8 | pub struct InMemoryPool { 9 | pkgs: Vec, 10 | // The id of packages for each name, sorted by version 11 | name_to_ids: HashMap>, 12 | // The ids of packages that provide a certain package, for accelerated provide lookup 13 | provide_to_ids: HashMap>, 14 | } 15 | 16 | impl InMemoryPool { 17 | pub fn new() -> Self { 18 | InMemoryPool { 19 | pkgs: Vec::new(), 20 | name_to_ids: HashMap::new(), 21 | provide_to_ids: HashMap::new(), 22 | } 23 | } 24 | } 25 | 26 | impl BasicPkgPool for InMemoryPool { 27 | fn add(&mut self, meta: PkgMeta) -> usize { 28 | // Find out which names are provided 29 | let provide_names: Vec<(String, VersionRequirement)> = 30 | meta.provides.iter().map(|p| (p.0.clone(), p.1.clone())).collect(); 31 | 32 | let name = meta.name.clone(); 33 | let version = meta.version.clone(); 34 | self.pkgs.push(meta); 35 | let index = self.pkgs.len(); 36 | 37 | // Add pkgid to corresponding provide dict 38 | for (provide, ver_req) in provide_names { 39 | if self.provide_to_ids.contains_key(&provide) { 40 | let ids = self.provide_to_ids.get_mut(&provide).unwrap(); 41 | ids.push((index, ver_req)); 42 | } else { 43 | self.provide_to_ids.insert(provide, vec![(index, ver_req)]); 44 | } 45 | } 46 | 47 | // Add pkgid to corresponding name dict 48 | if self.name_to_ids.contains_key(&name) { 49 | let ids = self.name_to_ids.get_mut(&name).unwrap(); 50 | ids.push((index, version)); 51 | } else { 52 | self.name_to_ids.insert(name, Vec::from([(index, version)])); 53 | } 54 | 55 | index 56 | } 57 | 58 | fn finalize(&mut self) { 59 | // Sort versions 60 | self.name_to_ids.iter_mut().for_each(|(_, pkgs)| { 61 | // Sort in descending order 62 | pkgs.sort_unstable_by(|a, b| b.1.cmp(&a.1)); 63 | }); 64 | } 65 | 66 | fn get_pkg_by_id(&self, id: usize) -> Option<&PkgMeta> { 67 | if id > self.pkgs.len() { 68 | return None; 69 | } 70 | // Since our SAT solver only accepts int > 0 as Literal, we offset pos by 1 71 | let pos = id - 1; 72 | let pkg = &self.pkgs[pos]; 73 | Some(pkg) 74 | } 75 | 76 | fn get_pkgs_by_name(&self, name: &str) -> Option> { 77 | match self.name_to_ids.get(name) { 78 | Some(pkgs) => { 79 | let res: Vec = pkgs.iter().map(|(pkgid, _)| *pkgid).collect(); 80 | Some(res) 81 | } 82 | None => None, 83 | } 84 | } 85 | 86 | fn get_pkgs_by_provide(&self, name: &str, ver_req: &VersionRequirement) -> Option> { 87 | let res = if let Some(provides) = self.provide_to_ids.get(name) { 88 | let res = provides 89 | .into_iter() 90 | .filter(|pkg| ver_req.overlap(&pkg.1)) 91 | .map(|pkg| pkg.0) 92 | .collect(); 93 | Some(res) 94 | } else { 95 | None 96 | }; 97 | res 98 | } 99 | 100 | fn pkgname_iter(&self) -> Box + '_> { 101 | Box::new(self.name_to_ids.iter().map(|(name, pkgs)| (name.as_str(), pkgs.as_slice()))) 102 | } 103 | 104 | fn pkgid_iter(&self) -> Box + '_> { 105 | // PkgID = pos + 1 106 | Box::new(self.pkgs.iter().enumerate().map(|(pos, meta)| { 107 | let id = pos + 1; 108 | (id, meta) 109 | })) 110 | } 111 | } 112 | 113 | impl PkgPool for InMemoryPool {} 114 | -------------------------------------------------------------------------------- /src/solver/improve.rs: -------------------------------------------------------------------------------- 1 | use super::{pool::PkgPool, solve, sort::sort_pkgs_to_cycles}; 2 | 3 | use anyhow::Result; 4 | use std::collections::HashSet; 5 | use varisat::{lit::Lit, ExtendFormula, Solver}; 6 | 7 | /// Attempt to use latest possible version of packages via forcing the solver to choose better versions 8 | /// of packages via banning older versions via solver assume 9 | pub fn upgrade(pool: &dyn PkgPool, res: &mut Vec, solver: &mut Solver) -> Result<()> { 10 | let mut assumes = Vec::new(); 11 | let mut cant_update: HashSet = HashSet::new(); 12 | loop { 13 | let updates = gen_update_assume(pool, res); 14 | if !updates.is_empty() { 15 | let names: HashSet = updates.iter().map(|(name, _)| name.to_string()).collect(); 16 | // If the update list only contains packages that can't be upgraded, stop 17 | if cant_update == names { 18 | break; 19 | } 20 | for update in updates { 21 | let mut new_assumes = assumes.clone(); 22 | new_assumes.append(&mut update.1.clone()); 23 | solver.assume(&new_assumes); 24 | if solver.solve().unwrap() { 25 | *res = solve(solver)?; 26 | assumes = new_assumes; 27 | } else { 28 | cant_update.insert(update.0); 29 | } 30 | } 31 | } else { 32 | break; 33 | } 34 | } 35 | 36 | Ok(()) 37 | } 38 | 39 | /// Construct a subset list of packages that only contains equal version of existing packages 40 | /// So that no older packages are included when upgrading packages 41 | pub fn reduce(pool: &dyn PkgPool, res: &mut Vec, to_install: &[usize]) -> Result<()> { 42 | // Generate reduced formula 43 | let mut formula = pool.gen_formula(Some(res)); 44 | for pkgid in to_install { 45 | formula.add_clause(&[Lit::from_dimacs(*pkgid as isize)]); 46 | } 47 | 48 | let mut solver = Solver::new(); 49 | solver.add_formula(&formula); 50 | // Initial solve 51 | *res = solve(&mut solver)?; 52 | 53 | // Try remove this package from the list of cycles 54 | let cycles = sort_pkgs_to_cycles(pool, res)?; 55 | let mut assumes = Vec::new(); 56 | for cycle in &cycles { 57 | // Check if this cycle contains mandatory package 58 | // If so, don't reduce 59 | for id in cycle { 60 | if to_install.contains(id) { 61 | continue; 62 | } 63 | } 64 | 65 | let mut no_ids: Vec = cycle.iter().map(|id| !Lit::from_dimacs(*id as isize)).collect(); 66 | let mut new_assume = assumes.clone(); 67 | new_assume.append(&mut no_ids); 68 | solver.assume(&new_assume); 69 | if solver.solve().unwrap() { 70 | // If can be solved without the cycle, it should be safe to remove it 71 | assumes = new_assume; 72 | } 73 | } 74 | 75 | solver.assume(&assumes); 76 | *res = solve(&mut solver).unwrap(); 77 | Ok(()) 78 | } 79 | 80 | /// Generate a list of Lit of all older packages 81 | /// The idea is that with these assumptions, the SAT solver must choose more up-to-date 82 | /// packages, or give Unsolvable 83 | pub fn gen_update_assume(pool: &dyn PkgPool, ids: &[usize]) -> Vec<(String, Vec)> { 84 | let mut res = Vec::new(); 85 | for id in ids { 86 | if !is_best(pool, *id).unwrap() { 87 | let mut assume = Vec::new(); 88 | // Find all newer versions of this package 89 | let name = &pool.get_pkg_by_id(*id).unwrap().name; 90 | let pkgids: Vec = pool.get_pkgs_by_name(name).unwrap(); 91 | 92 | let mut reached = false; 93 | for pkgid in pkgids { 94 | if pkgid == *id { 95 | reached = true; 96 | } 97 | if reached { 98 | reached = true; 99 | let lit = !Lit::from_dimacs(pkgid as isize); 100 | assume.push(lit); 101 | } 102 | } 103 | res.push((name.to_string(), assume)); 104 | } 105 | } 106 | res 107 | } 108 | 109 | #[inline] 110 | pub fn is_best(pool: &dyn PkgPool, id: usize) -> Option { 111 | let name = &pool.get_pkg_by_id(id)?.name; 112 | let ids = pool.get_pkgs_by_name(name)?; 113 | if ids[0] != id { 114 | Some(false) 115 | } else { 116 | Some(true) 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /src/alpm/db.rs: -------------------------------------------------------------------------------- 1 | /// The pacman db reader 2 | use crate::{ 3 | debug, 4 | solver::pool::PkgPool, 5 | types::{Checksum, PkgMeta, PkgSource, PkgVersion, VersionRequirement}, 6 | utils::{downloader, pacparse}, 7 | warn, 8 | }; 9 | use anyhow::{anyhow, bail, Context, Result}; 10 | use rayon::prelude::*; 11 | use std::{ 12 | collections::HashMap, 13 | fs::File, 14 | io::{BufRead, BufReader, Read}, 15 | path::Path, 16 | }; 17 | 18 | use flate2::read::GzDecoder; 19 | use tar::Archive; 20 | 21 | pub fn import(db: &Path, pool: &mut dyn PkgPool, baseurl: &str) -> Result<()> { 22 | debug!("Importing package database from {}", db.display()); 23 | let f = File::open(db)?; 24 | let gzipdecoder = GzDecoder::new(f); 25 | let mut tar = Archive::new(gzipdecoder); 26 | 27 | for file in tar.entries()? { 28 | let file = file.context("error reading file from db")?; 29 | let path = file.path()?.to_path_buf(); 30 | let path_str = path.display().to_string(); 31 | if path.ends_with("desc") { 32 | // Now we are talking! 33 | match parse_desc(file, &path_str) { 34 | Ok(pkgmeta) => { 35 | pool.add(pkgmeta); 36 | } 37 | Err(e) => { 38 | warn!("Failed to add {path_str} from {0}: {e}", db.display()); 39 | } 40 | }; 41 | } 42 | } 43 | Ok(()) 44 | } 45 | 46 | fn parse_desc(mut f: impl Read, from: &str) -> Result { 47 | let mut content = String::new(); 48 | f.read_to_string(&mut content).context("error reading desc file from db")?; 49 | let fields = 50 | pacparse::parse_str(&content).context(format!("error parsing desc from {from}"))?; 51 | let pkgmeta = fields_to_pkgmeta(fields).context(format!("error reading fields from {from}"))?; 52 | Ok(pkgmeta) 53 | } 54 | 55 | fn fields_to_pkgmeta(mut f: HashMap) -> Result { 56 | // Get name first, for error reporting 57 | let name = f.remove("NAME").ok_or_else(|| anyhow!("bad metadata: missing NAME"))?; 58 | // Generate real url 59 | let path = f.remove("FILENAME").ok_or_else(|| anyhow!("bad metadata: missing FILENAME"))?; 60 | 61 | // Needed for source, so parse this first 62 | let download_size = 63 | f.remove("CSIZE").ok_or_else(|| anyhow!("bad metadata: missing CSIZE"))?.parse()?; 64 | Ok(PkgMeta { 65 | name: name.clone(), 66 | description: f.remove("DESC").ok_or_else(|| anyhow!("bad metadata for {name}"))?, 67 | version: PkgVersion::try_from( 68 | f.remove("VERSION").ok_or_else(|| anyhow!("bad metadata for {name}"))?.as_str(), 69 | )?, 70 | 71 | depends: get_pkg_list(&name, "DEPENDS", &mut f)?, 72 | optional: get_pkg_list(&name, "OPTDEPENDS", &mut f)?, 73 | conflicts: get_pkg_list(&name, "CONFLICTS", &mut f)?, 74 | install_size: f 75 | .remove("ISIZE") 76 | .ok_or_else(|| anyhow!("bad metadata: missing ISIZE"))? 77 | .parse()?, 78 | provides: get_pkg_list(&name, "PROVIDES", &mut f)?, 79 | replaces: get_pkg_list(&name, "REPLACES", &mut f)?, 80 | source: PkgSource::Http((path, download_size, { 81 | if let Some(hex) = f.get("SHA256SUM") { 82 | Checksum::from_sha256_str(&hex)? 83 | } else if let Some(hex) = f.get("SHA512SUM") { 84 | Checksum::from_sha512_str(&hex)? 85 | } else { 86 | bail!( 87 | "Metadata for package {} does not contain the checksum field (SHA256 or SHA512).", 88 | name 89 | ) 90 | } 91 | })), 92 | }) 93 | } 94 | 95 | fn get_pkg_list( 96 | pkgname: &str, 97 | field_name: &str, 98 | f: &mut HashMap, 99 | ) -> Result)>> { 100 | let mut out = Vec::new(); 101 | if let Some(values) = f.remove(field_name) { 102 | for (i, line) in values.lines().into_iter().enumerate() { 103 | // Parse the package line 104 | match pacparse::parse_package_requirement_line(&line) { 105 | Ok((_, (name, verreq, desc))) => out.push((name.to_owned(), verreq, desc)), 106 | Err(e) => { 107 | warn!("bad package requirement when parsing {field_name}: {e}"); 108 | bail!("malformed package requirement for {pkgname} at line {i}"); 109 | } 110 | } 111 | } 112 | } 113 | // It's fine to have nothing 114 | Ok(out) 115 | } 116 | -------------------------------------------------------------------------------- /src/utils/pacparse.rs: -------------------------------------------------------------------------------- 1 | use crate::debug; 2 | use anyhow::{bail, Result}; 3 | /// Parse pacman style package database files 4 | use nom::{ 5 | bytes::complete::{take_till, take_until1, take_while1}, 6 | character::complete::anychar, 7 | character::complete::{alphanumeric1, char, space0}, 8 | combinator::eof, 9 | multi::many1, 10 | IResult, 11 | }; 12 | use std::collections::HashMap; 13 | 14 | use crate::types::{parse_version_requirement, VersionRequirement}; 15 | 16 | /// Parse the key part of a paragraph, like `%NAME%` 17 | fn parse_key(i: &str) -> IResult<&str, &str> { 18 | let (i, _) = char('%')(i)?; 19 | let (i, key) = alphanumeric1(i)?; 20 | let (i, _) = char('%')(i)?; 21 | // There should be a newline after the key line 22 | let (i, _) = char('\n')(i)?; 23 | 24 | Ok((i, key)) 25 | } 26 | 27 | /// Parse the value part of a paragraph that ends with an empty line 28 | fn parse_value_with_empty_line(i: &str) -> IResult<&str, &str> { 29 | // It should be multiple lines until an empty line 30 | let (i, content) = take_until1("\n\n")(i)?; 31 | // Eat the new lines 32 | let (i, _) = char('\n')(i)?; 33 | let (i, _) = char('\n')(i)?; 34 | 35 | Ok((i, content)) 36 | } 37 | 38 | /// Parse the value part of a paragraph that ends with EOF 39 | fn parse_value(mut i: &str) -> IResult<&str, String> { 40 | let mut lines = Vec::new(); 41 | loop { 42 | let (x, content) = take_till(|c| c == '\n')(i)?; 43 | let (x, _) = char('\n')(x)?; 44 | i = x; 45 | if content.is_empty() { 46 | break; 47 | } 48 | lines.push(content.to_owned()); 49 | if x.is_empty() { 50 | break; 51 | } 52 | } 53 | let s = lines.join("\n"); 54 | Ok((i, s)) 55 | } 56 | 57 | /// Parse a key-value pair in pacman's package description syntax 58 | fn parse_pair(i: &str) -> IResult<&str, (String, String)> { 59 | let (i, key) = parse_key(i)?; 60 | let (i, lines) = parse_value(i)?; 61 | 62 | Ok((i, (key.to_owned(), lines))) 63 | } 64 | 65 | pub fn parse_str(mut i: &str) -> anyhow::Result> { 66 | let mut res = HashMap::new(); 67 | let mut counter = 0; 68 | while !i.is_empty() { 69 | match parse_pair(i) { 70 | Ok((x, pair)) => { 71 | res.insert(pair.0, pair.1); 72 | counter += 1; 73 | i = x; 74 | } 75 | Err(e) => { 76 | bail!("bad pacman database on paragraph {counter}: {e}"); 77 | } 78 | } 79 | } 80 | Ok(res) 81 | } 82 | 83 | fn is_package_name_char(c: char) -> bool { 84 | c.is_alphanumeric() || c == '@' || c == '.' || c == '+' || c == '-' || c == '_' 85 | } 86 | 87 | fn is_version_requirement(i: &str) -> bool { 88 | i.starts_with(">") || i.starts_with("<") || i.starts_with("=") 89 | } 90 | 91 | pub fn parse_package_requirement_line( 92 | i: &str, 93 | ) -> IResult<&str, (&str, VersionRequirement, Option)> { 94 | // First parse the package name 95 | let (i, name) = take_while1(is_package_name_char)(i)?; 96 | // Then the version requirement 97 | if is_version_requirement(i) { 98 | let (i, ver_req) = parse_version_requirement(i)?; 99 | if i.is_empty() { 100 | return Ok((i, (name, ver_req, None))); 101 | } 102 | let (i, desc) = parse_requirement_description(i)?; 103 | let (i, _) = eof(i)?; 104 | Ok((i, (name, ver_req, Some(desc)))) 105 | } else if i.starts_with(":") { 106 | let (i, desc) = parse_requirement_description(i)?; 107 | let (i, _) = eof(i)?; 108 | Ok((i, (name, VersionRequirement::new(), Some(desc)))) 109 | } else { 110 | let (i, _) = eof(i)?; 111 | Ok((i, (name, VersionRequirement::new(), None))) 112 | } 113 | } 114 | 115 | fn parse_requirement_description(i: &str) -> IResult<&str, String> { 116 | let (i, _) = char(':')(i)?; 117 | let (i, _) = space0(i)?; 118 | let (i, desc) = many1(anychar)(i)?; 119 | let (i, _) = eof(i)?; 120 | 121 | let desc_str: String = desc.into_iter().collect(); 122 | Ok((i, desc_str)) 123 | } 124 | 125 | #[cfg(test)] 126 | mod test { 127 | use super::*; 128 | 129 | #[test] 130 | fn try_parse() { 131 | assert_eq!(("", "BRUH"), parse_key("%BRUH%\n").unwrap()); 132 | assert_eq!( 133 | ("something else", ("NAME".to_string(), "A multiple\nline\nparagraph.".to_string(),)), 134 | parse_pair( 135 | "%NAME% 136 | A multiple 137 | line 138 | paragraph. 139 | 140 | something else" 141 | ) 142 | .unwrap() 143 | ); 144 | assert_eq!( 145 | ("", ("NAME".to_string(), "A multiple\nline\nparagraph".to_string(),)), 146 | parse_pair( 147 | "%NAME% 148 | A multiple 149 | line 150 | paragraph. 151 | " 152 | ) 153 | .unwrap() 154 | ); 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /src/actions/execute.rs: -------------------------------------------------------------------------------- 1 | use super::UserRequest; 2 | use crate::{ 3 | config::{Blueprints, CachedRepoDb, Config, Opts}, 4 | debug, 5 | executor::MachineStatus, 6 | info, 7 | solver::{ 8 | pool::{self, PkgPool}, 9 | Solver, 10 | }, 11 | success, 12 | types::PkgActionModifier, 13 | utils::cli::{self, ask_confirm}, 14 | utils::downloader::Downloader, 15 | warn, 16 | }; 17 | 18 | use anyhow::{anyhow, bail, Context, Result}; 19 | use console::style; 20 | 21 | // -> Result 22 | pub async fn execute( 23 | repo_db: &CachedRepoDb, 24 | downloader: &Downloader, 25 | blueprint: &mut Blueprints, 26 | opts: &Opts, 27 | config: &Config, 28 | request: UserRequest, 29 | ) -> Result { 30 | let dbs = repo_db.get_all_package_db().context("Invalid local package database!")?; 31 | let local_repo = opts.root.join(crate::LOCAL_REPO_PATH); 32 | if !local_repo.is_dir() { 33 | std::fs::create_dir_all(&local_repo)?; 34 | } 35 | let mut pool = pool::InMemoryPool::new(); 36 | for (db_base_url, db_path) in dbs { 37 | crate::alpm::db::import(&db_path, &mut pool, &db_base_url)?; 38 | } 39 | 40 | debug!("Processing user request..."); 41 | let root = &opts.root; 42 | let machine_status = MachineStatus::new(root).await?; 43 | process_user_request(request, &pool, blueprint, &machine_status)?; 44 | 45 | debug!("Applying replaces according to package catalog..."); 46 | apply_replaces(opts, &pool, blueprint)?; 47 | 48 | info!("Resolving dependencies..."); 49 | let solver = Solver::from(pool); 50 | let res = solver.install(blueprint)?; 51 | // Translating result to list of actions 52 | let actions = machine_status.gen_actions(res.as_slice()); 53 | if actions.is_empty() { 54 | success!("There is nothing to do."); 55 | return Ok(false); 56 | } 57 | 58 | // There is something to do. Show it. 59 | info!("sasm will perform the following actions:"); 60 | if opts.yes && opts.no_pager { 61 | actions.show(); 62 | } else { 63 | actions.show_tables(opts.no_pager)?; 64 | } 65 | crate::WRITER.writeln("", "")?; 66 | actions.show_size_change(); 67 | 68 | if ask_confirm(opts, "Proceed?")? { 69 | // Run it! 70 | Ok(false) 71 | } else { 72 | Ok(true) 73 | } 74 | } 75 | 76 | fn process_user_request( 77 | req: UserRequest, 78 | pool: &dyn PkgPool, 79 | blueprint: &mut Blueprints, 80 | ms: &MachineStatus, 81 | ) -> Result<()> { 82 | match req { 83 | UserRequest::Install(list) => { 84 | for install in list { 85 | // Check if this package actually exists 86 | if pool.get_pkgs_by_name(&install.pkgname).is_none() { 87 | // Check if provides 88 | if let Some(provider) = pool.find_provide(&install.pkgname, &install.ver_req) { 89 | let e = anyhow!( 90 | "Standalone package {} not found. However, {} provides package with this name. Add this package instead?", 91 | style(&install.pkgname).bold(), 92 | style(provider).bold() 93 | ); 94 | return Err(e.context("Failed to add new package(s).")); 95 | } else { 96 | bail!("Failed to add new package: {}", install.pkgname); 97 | } 98 | } 99 | 100 | // Add pkg to blueprint 101 | let add_res = blueprint.add( 102 | &install.pkgname, 103 | install.modify, 104 | None, 105 | install.ver_req, 106 | install.local, 107 | ); 108 | if let Err(e) = add_res { 109 | warn!("Cannot add package {}: {e}", style(&install.pkgname).bold()); 110 | } 111 | } 112 | } 113 | UserRequest::Remove(list) => { 114 | for (name, remove_recomm) in list { 115 | blueprint.remove(&name, remove_recomm)?; 116 | } 117 | } 118 | UserRequest::Upgrade => (), 119 | }; 120 | 121 | Ok(()) 122 | } 123 | 124 | fn apply_replaces(opts: &Opts, pool: &dyn PkgPool, blueprint: &mut Blueprints) -> Result<()> { 125 | // For every package in blueprint, check if they are replaced 126 | for pkg in blueprint.get_pkg_requests() { 127 | if let Some(replacement) = pool.find_replacement(&pkg.name, &pkg.version) { 128 | // Found a replacement! 129 | // If in user blueprint, ask if to replace it 130 | if blueprint.user_list_contains(&pkg.name) { 131 | if cli::ask_confirm(opts, &format!("Replace {} with {}?", pkg.name, replacement))? { 132 | blueprint.remove(&pkg.name, true)?; 133 | blueprint.add(&replacement, false, None, None, false)?; 134 | } else { 135 | warn!("Package {} has been replaced by {}. Please update or edit vendor blueprint to use the new package.", 136 | style(&pkg.name).bold(), 137 | style(&replacement).bold()); 138 | } 139 | } 140 | } 141 | } 142 | 143 | Ok(()) 144 | } 145 | -------------------------------------------------------------------------------- /src/actions/search/provide/mod.rs: -------------------------------------------------------------------------------- 1 | mod parse; 2 | 3 | use super::PkgInfo; 4 | use crate::{db::LocalDb, debug, executor::MachineStatus, pool}; 5 | 6 | use anyhow::{Context, Result}; 7 | use console::style; 8 | use flate2::read::GzDecoder; 9 | use std::{ 10 | cmp::Reverse, 11 | collections::{HashMap, HashSet}, 12 | fs::File, 13 | io::{BufRead, BufReader, Read}, 14 | path::PathBuf, 15 | }; 16 | 17 | const READ_BUFFER_SIZE: usize = 8000; 18 | 19 | pub fn show_provide_file( 20 | local_db: &LocalDb, 21 | machine_status: &MachineStatus, 22 | filename: &str, 23 | bin: bool, 24 | ) -> Result<()> { 25 | let content_paths: Vec = if bin { 26 | local_db.get_all_bincontents_db() 27 | } else { 28 | local_db.get_all_contents_db() 29 | } 30 | .context("Failed to initialize local database for searching!")? 31 | .into_iter() 32 | .map(|(_, path)| path) 33 | .collect(); 34 | 35 | let mut content_dbs: Vec> = Vec::with_capacity(content_paths.len()); 36 | for path in &content_paths { 37 | let f = File::open(path)?; 38 | if bin { 39 | // BinContents are not compressed 40 | content_dbs.push(Box::new(f)); 41 | } else { 42 | // Contents are Gzip compressed 43 | let f = GzDecoder::new(f); 44 | content_dbs.push(Box::new(f)); 45 | } 46 | } 47 | 48 | // Find a list of package names that provide the designated file 49 | debug!("Searching Contents metadata..."); 50 | let mut pkgnames = Vec::from_iter(package_name_provide_file(content_dbs, filename)?); 51 | // Sort based on number of matched paths 52 | pkgnames.sort_by_key(|(_, paths)| Reverse(paths.len())); 53 | 54 | // Create a Solver so we can get more info let mut solver = Solver::new(); 55 | debug!("Constructing package pool..."); 56 | let dbs = local_db.get_all_package_db()?; 57 | let pool = pool::source::create_pool(&dbs, &[])?; 58 | 59 | debug!("Generating detailed package information..."); 60 | for (pkgname, paths) in pkgnames { 61 | if let Some(pkgs) = pool.get_pkgs_by_name(&pkgname) { 62 | // This is safe unless the pool is broken 63 | let latest_pkg_id = pkgs.get(0).unwrap(); 64 | let latest_pkg = pool.get_pkg_by_id(*latest_pkg_id).unwrap(); 65 | let provide_paths = paths 66 | .into_iter() 67 | .map(|path| format!("Provides: {}", style(path).bold())) 68 | .collect(); 69 | // Prepare a PkgInfo 70 | let pkginfo = PkgInfo { 71 | pkg: latest_pkg, 72 | has_dbg_pkg: pool.has_dbg_pkg(*latest_pkg_id)?, 73 | additional_info: provide_paths, 74 | }; 75 | pkginfo.show(machine_status)?; 76 | } 77 | } 78 | 79 | Ok(()) 80 | } 81 | 82 | // Given a filename or path, find package names that provide such file 83 | pub fn package_name_provide_file( 84 | dbs: Vec>, 85 | filename: &str, 86 | ) -> Result>> { 87 | let mut res = HashMap::new(); 88 | for db in dbs { 89 | let mut bufreader = BufReader::new(db); 90 | let mut buffer = vec![0u8; READ_BUFFER_SIZE]; 91 | loop { 92 | let len = bufreader.read(&mut buffer)?; 93 | if len == 0 { 94 | // EOL reached 95 | break; 96 | } 97 | bufreader.read_until(b'\n', &mut buffer)?; 98 | scan_buffer(&buffer, &mut res, filename)?; 99 | } 100 | } 101 | 102 | Ok(res) 103 | } 104 | 105 | fn scan_buffer( 106 | buffer: &[u8], 107 | results: &mut HashMap>, 108 | filename: &str, 109 | ) -> Result<()> { 110 | let substring = format!("{} ", filename); 111 | for occurence in memchr::memmem::find_iter(buffer, &substring) { 112 | // Find line start 113 | let mut start = occurence; 114 | loop { 115 | if start == 0 || buffer[start - 1] == b'\n' { 116 | break; 117 | } 118 | start -= 1; 119 | } 120 | // Find line end 121 | let mut end = occurence; 122 | loop { 123 | if end == buffer.len() || buffer[end] == b'\n' { 124 | break; 125 | } 126 | end += 1; 127 | } 128 | 129 | let slice = &buffer[start..end]; 130 | let line = std::str::from_utf8(slice)?; 131 | match parse::parse_contents_line(line) { 132 | Ok((path, packages)) => { 133 | let mut path = path.to_owned(); 134 | // Add `/` to the front of path, because Contents file uses relative path 135 | path.insert(0, '/'); 136 | for (_, pkgname) in packages { 137 | if let Some(list) = results.get_mut(pkgname) { 138 | list.insert(path.clone()); 139 | } else { 140 | let mut set = HashSet::new(); 141 | set.insert(path.clone()); 142 | results.insert(pkgname.to_owned(), set); 143 | } 144 | } 145 | } 146 | Err(e) => { 147 | debug!("{}", e); 148 | } 149 | } 150 | } 151 | 152 | Ok(()) 153 | } 154 | -------------------------------------------------------------------------------- /src/utils/cli/mod.rs: -------------------------------------------------------------------------------- 1 | mod prompt; 2 | pub use prompt::{ask_confirm, SasmTheme}; 3 | 4 | use anyhow::{Context, Result}; 5 | use console::Term; 6 | use std::io::Write; 7 | 8 | const PREFIX_LEN: u16 = 10; 9 | 10 | pub fn gen_prefix(prefix: &str) -> String { 11 | if console::measure_text_width(prefix) > (PREFIX_LEN - 1).into() { 12 | panic!("Line prefix \"{}\" too long!", prefix); 13 | } 14 | 15 | // Make sure the real_prefix has desired PREFIX_LEN in console 16 | let left_padding_size = (PREFIX_LEN as usize) - 1 - console::measure_text_width(prefix); 17 | let mut real_prefix: String = " ".repeat(left_padding_size); 18 | real_prefix.push_str(prefix); 19 | real_prefix.push(' '); 20 | real_prefix 21 | } 22 | 23 | pub struct Writer { 24 | term: Term, 25 | } 26 | 27 | impl Writer { 28 | pub fn new() -> Self { 29 | Writer { term: Term::stdout() } 30 | } 31 | 32 | pub fn show_cursor(&self) -> Result<()> { 33 | self.term.show_cursor()?; 34 | Ok(()) 35 | } 36 | 37 | pub fn get_max_len(&self) -> u16 { 38 | self.term.size_checked().unwrap_or((25, 80)).1 - PREFIX_LEN 39 | } 40 | 41 | pub fn get_writer(&self) -> Box { 42 | Box::new(self.term.clone()) 43 | } 44 | 45 | fn write_prefix(&self, prefix: &str) -> Result<()> { 46 | self.term.write_str(&gen_prefix(prefix)).context("Failed to write prefix to console.")?; 47 | Ok(()) 48 | } 49 | 50 | pub fn writeln(&self, prefix: &str, msg: &str) -> Result<()> { 51 | let max_len = self.get_max_len(); 52 | let mut first_run = true; 53 | 54 | let mut msg = msg.to_string(); 55 | // Print msg with left padding 56 | while !msg.is_empty() { 57 | let line_msg = console::truncate_str(&msg, max_len.into(), "\n"); 58 | if first_run { 59 | self.write_prefix(prefix).context("Failed to write prefix to console.")?; 60 | first_run = false; 61 | } else { 62 | self.write_prefix("").context("Failed to write prefix to console.")?; 63 | } 64 | self.term.write_str(&line_msg).context("Failed to write message to console.")?; 65 | // Remove the already written part, strip ANSI since it can mess everything up 66 | let mut new_msg = console::strip_ansi_codes(&msg).to_string(); 67 | let line_msg_len = console::measure_text_width(&line_msg); 68 | new_msg.replace_range(..line_msg_len, ""); 69 | // Swap 70 | std::mem::swap(&mut msg, &mut new_msg); 71 | } 72 | self.term.write_line("")?; 73 | Ok(()) 74 | } 75 | 76 | pub fn write_chunks>(&self, prefix: &str, chunks: &[S]) -> Result<()> { 77 | if chunks.is_empty() { 78 | return Ok(()); 79 | } 80 | let max_len: usize = (self.get_max_len() - PREFIX_LEN).into(); 81 | // Write prefix first 82 | self.write_prefix(prefix)?; 83 | let mut cur_line_len: usize = PREFIX_LEN.into(); 84 | for chunk in chunks { 85 | let chunk = chunk.as_ref(); 86 | let chunk_len = console::measure_text_width(chunk); 87 | // If going to overflow the line, create new line 88 | // The `1` is the preceding space 89 | if cur_line_len + chunk_len + 1 > max_len { 90 | self.term.write_str("\n")?; 91 | self.write_prefix("")?; 92 | cur_line_len = 0; 93 | } 94 | self.term.write_str(chunk)?; 95 | self.term.write_str(" ")?; 96 | cur_line_len += chunk_len + 1; 97 | } 98 | // Write a new line 99 | self.term.write_str("\n")?; 100 | Ok(()) 101 | } 102 | } 103 | 104 | // We will ignore write errors in the following macros, since cannot print messages is not an emergency 105 | #[macro_export] 106 | macro_rules! msg { 107 | ($($arg:tt)+) => { 108 | $crate::WRITER.writeln("", &format!($($arg)+)).ok(); 109 | }; 110 | } 111 | 112 | #[macro_export] 113 | macro_rules! debug { 114 | ($($arg:tt)+) => { 115 | if crate::verbose() { 116 | $crate::WRITER.writeln(&console::style("DEBUG").dim().to_string(), &format!($($arg)+)).ok(); 117 | } 118 | }; 119 | } 120 | 121 | #[macro_export] 122 | macro_rules! success { 123 | ($($arg:tt)+) => { 124 | $crate::WRITER.writeln(&console::style("SUCCESS").green().bold().to_string(), &format!($($arg)+)).ok(); 125 | }; 126 | } 127 | 128 | #[macro_export] 129 | macro_rules! info { 130 | ($($arg:tt)+) => { 131 | $crate::WRITER.writeln(&console::style("INFO").blue().bold().to_string(), &format!($($arg)+)).ok(); 132 | }; 133 | } 134 | 135 | #[macro_export] 136 | macro_rules! warn { 137 | ($($arg:tt)+) => { 138 | $crate::WRITER.writeln(&console::style("WARNING").yellow().bold().to_string(), &format!($($arg)+)).ok(); 139 | }; 140 | } 141 | 142 | #[macro_export] 143 | macro_rules! error { 144 | ($($arg:tt)+) => { 145 | $crate::WRITER.writeln(&console::style("ERROR").red().bold().to_string(), &format!($($arg)+)).ok(); 146 | }; 147 | } 148 | 149 | #[macro_export] 150 | macro_rules! due_to { 151 | ($($arg:tt)+) => { 152 | $crate::WRITER.writeln(&console::style("DUE TO").yellow().bold().to_string(), &format!($($arg)+)).ok(); 153 | }; 154 | } 155 | -------------------------------------------------------------------------------- /doc/config.md: -------------------------------------------------------------------------------- 1 | sasm requires a config folder of this structure: 2 | 3 | ``` 4 | CONFIG_ROOT (typically /etc/omakase) 5 | |--- config.toml 6 | |--- user.blueprint 7 | |--- blueprint.d/ 8 | | |--- vendor-1.blueprint 9 | | `--- vendor-2.blueprint 10 | `--- keys 11 | |--- repo1-key1.gpg 12 | |--- repo1-key2.asc 13 | `--- repo2-key.asc 14 | ``` 15 | 16 | # `config.toml` 17 | This is the main configuration file. It uses `TOML` and have a series of mandatory fields. Here's an example: 18 | 19 | ```toml 20 | arch = "amd64" 21 | 22 | # Repository configuration sections are denoted by `[repo.REPO_NAME]`. REPO_NAME can be arbitary. 23 | [repo.main] 24 | # sasm support loading mirrors from a mirrorlist 25 | # The mirrorlist path must be an absolute path 26 | source = { mirrorlist = "/usr/share/distro-repository-data/mirrors.toml", preferred = "origin" } 27 | # Or, use a simple URL 28 | #source = "https://repo.aosc.io" 29 | distribution = "stable" 30 | components = ["main"] 31 | # GPG public key for this repository. 32 | # Put the public keys in the `keys/` folder, and provide filenames of the key files here 33 | keys = ["main.asc"] 34 | # Tags are used by external programs to identify repositories. sasm doesn't use them. 35 | tags = ["topic-template"] 36 | ``` 37 | 38 | ## The MirrorList file format 39 | A MirrorList file defines a series of possible mirrors. Such file should use `TOML` file format. Here's an example: 40 | 41 | ```toml 42 | # This field specifies which field should be used by default 43 | default = "origin" 44 | 45 | # And a list of mirrors 46 | [origin] 47 | description = "AOSC main repository" 48 | url = "https://repo.aosc.io/" 49 | 50 | [magicmirror] 51 | description = "Mirror built by magic" 52 | url = "https://magicmirror.bruh/" 53 | ``` 54 | 55 | ## The Omanomicon: `unsafe` section 56 | Some dangerous flags of sasm can be enabled in the `unsafe` section. This section is optional and the default config will not contain this section, but if you are sure you want to enable these features, you can manually add this section and enable the flags you want. 57 | 58 | ```toml 59 | [unsafe] 60 | # When sasm thinks it should remove a package, purge the package's config files too 61 | purge_on_remove = true 62 | # Allow dpkg to skip fsync on files. Only use on systems with battery backup. 63 | unsafe_io = true 64 | # Allow remove essential packages. 65 | # If not implicitly set to true, sasm will refuse any action that involves removing essential packages 66 | allow_remove_essential = true 67 | ``` 68 | 69 | # Blueprints 70 | Blueprint are, as their name suggests, the blueprint for the system. They defines the packages users can use about the system, and omakase will ensure these packages are available. However, this also means that any package that is not included in the system blueprint is not guaranteed to be installed. For example, user might able to use a package installed as dependency, but if this package is no longer depended, it can be removed. Thus, user should always include packages they use in the blueprint files. 71 | 72 | There are two types of blueprint: _user blueprint_ and _vendor blueprint_. There is only one user blueprint at `CONFIG_ROOT/user.blueprint`, but there may be many vendor blueprints at `CONFIG_ROOT/blueprint.d/*.blueprint`. When using the CLI, sasm will only modify user blueprint. So, if you wants to remove a package inside vendor blueprint, sasm will not allow so. You will have to manually remove the line in the corresponding vendor blueprint. 73 | 74 | Blueprint files have a special syntax. Each line in a blueprint file represents a package request. Such line include two parts: package name and (optional) additional requirements. Here's a few examples: 75 | 76 | ``` 77 | # Example of a simple request 78 | konsole 79 | # Package name that includes variables 80 | linux-kernel-{KERNEL_VERSION} 81 | # Package request with version requirements 82 | linux+kernel (>=3:5.14.0, <<3:5.15.0) 83 | mpv (=0.33.1) 84 | # Package that are installed from local debs 85 | some_pkg (local) 86 | # Package that are installed because they are recommended by other packages 87 | fcitx5-base 88 | fcitx5 (added_by = fcitx5-base) 89 | fcitx5-qt (added_by = fcitx5-base) 90 | ``` 91 | 92 | There may be variables in package names. These can be used to dynamically request packages based on system state. Currently, these variables are supported: 93 | + `KERNEL_VERSION`: version of the current running kernel, can be used to prevent current kernel from being removed. 94 | 95 | You can specify additional attributes inside the pair of round brackets after package name. Multiple arguments are separated by `,`. Currently these attributes are supported: 96 | + Version requirements (`>>`, `>=`, `=`, `<<`, `<=`): Indicate what range of version should be installed. Multiple requirements are allowed as far as they are not contradictory (for example, `>=2, <=1` will not be accepted). 97 | - `>>` means strictly larger and `<<` means strictly smaller 98 | - Note that this only accepts full deb version, which includes epoch, upstream version and package revision. 99 | + `local`: Install this package from local package repository. This will be added automatically if you use `install --local` to install a local deb. 100 | + `added_by = PKGNAME`: This package is introduced by another package rather than direct user request. Recommended packages will contain this attribute to show which package recommends them. When removing packages with `--remove-recommends` argument, all packages that have this attribute and is pointing to the package to remove will also be removed. 101 | -------------------------------------------------------------------------------- /src/solver/pool/source/local.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | debug, 3 | types::{PkgMeta, PkgSource, PkgVersion}, 4 | utils::debcontrol::parse_pkg_list, 5 | }; 6 | 7 | use anyhow::{bail, format_err, Context, Result}; 8 | use rayon::prelude::*; 9 | use std::{collections::HashMap, ffi::OsStr, fs::File, io::prelude::*, path::Path}; 10 | use tar::Archive; 11 | use xz2::read::XzDecoder; 12 | 13 | pub fn read_debs_from_path(p: &Path) -> Result> { 14 | if !p.is_dir() { 15 | bail!( 16 | "Invalid local repository: {} is not a directory.", 17 | p.display() 18 | ); 19 | } 20 | 21 | let mut deb_paths = Vec::new(); 22 | for entry in std::fs::read_dir(p)? { 23 | let entry = entry?; 24 | let path = entry.path(); 25 | debug!("Parsing local deb file {} ...", path.display()); 26 | if !path.is_file() || path.extension() != Some(OsStr::new("deb")) { 27 | continue; 28 | } 29 | // Now we confirm it is a deb file. Add it to the process queue 30 | deb_paths.push(path); 31 | } 32 | 33 | deb_paths 34 | .par_iter() 35 | .map(|deb| read_control_from_deb(deb)) 36 | .collect() 37 | } 38 | 39 | pub fn read_control_from_deb(p: &Path) -> Result { 40 | let mut archive = ar::Archive::new( 41 | File::open(p).context(format!("Failed to open deb file {} .", p.display()))?, 42 | ); 43 | while let Some(entry) = archive.next_entry() { 44 | let entry = entry?; 45 | let filename = std::str::from_utf8(entry.header().identifier())?; 46 | if filename == "control.tar.xz" { 47 | let xzdecoder = XzDecoder::new(entry); 48 | let mut tar = Archive::new(xzdecoder); 49 | for file in tar.entries()? { 50 | let mut file = file?; 51 | let path = file 52 | .header() 53 | .path()? 54 | .to_str() 55 | .unwrap_or_default() 56 | .to_owned(); 57 | if path == "./control" { 58 | let mut res = String::new(); 59 | file.read_to_string(&mut res)?; 60 | let res = parse_debcontrol(&res, p)?; 61 | return Ok(res); 62 | } 63 | } 64 | } 65 | } 66 | bail!("Malformed deb file.") 67 | } 68 | 69 | fn parse_debcontrol(i: &str, p: &Path) -> Result { 70 | let paragraphs = match debcontrol::parse_str(i) { 71 | Ok(p) => p, 72 | Err(e) => bail!("Failed to parse control for deb: {} .", e), 73 | }; 74 | let mut fields = HashMap::new(); 75 | for p in paragraphs { 76 | for field in p.fields { 77 | fields.insert(field.name, field.value); 78 | } 79 | } 80 | 81 | parse_debcontrol_fields(fields, p) 82 | } 83 | 84 | fn parse_debcontrol_fields(mut f: HashMap<&str, String>, p: &Path) -> Result { 85 | Ok(PkgMeta { 86 | name: f 87 | .remove("Package") 88 | .ok_or_else(|| format_err!("deb control file does not contain the Package field."))?, 89 | section: f 90 | .remove("Section") 91 | .ok_or_else(|| format_err!("deb control file does not contain the Section field."))?, 92 | description: f 93 | .remove("Description") 94 | .ok_or_else(|| format_err!("deb control file does not contain the Description field."))?, 95 | version: PkgVersion::try_from( 96 | f.get("Version") 97 | .ok_or_else(|| format_err!("deb control file does not contain the Version field."))? 98 | .as_str(), 99 | )?, 100 | depends: parse_pkg_list(f.get("Depends").unwrap_or(&String::new()))?, 101 | breaks: parse_pkg_list(f.get("Breaks").unwrap_or(&String::new()))?, 102 | conflicts: parse_pkg_list(f.get("Conflicts").unwrap_or(&String::new()))?, 103 | recommends: match f.get("Recommends") { 104 | Some(recomm) => Some(parse_pkg_list(recomm)?), 105 | None => None, 106 | }, 107 | suggests: match f.get("Suggests") { 108 | Some(suggests) => Some(parse_pkg_list(suggests)?), 109 | None => None, 110 | }, 111 | provides: match f.get("Provides") { 112 | Some(provides) => Some(parse_pkg_list(provides)?), 113 | None => None, 114 | }, 115 | replaces: match f.get("Replaces") { 116 | Some(replaces) => Some(parse_pkg_list(replaces)?), 117 | None => None, 118 | }, 119 | // Installed-Size is in kilobytes, multiply by 1024 to convert it to bytes 120 | install_size: f 121 | .remove("Installed-Size") 122 | .ok_or_else(|| format_err!("deb control file does not contain the Installed-Size field."))? 123 | .as_str() 124 | .parse() 125 | .map(|kb: u64| 1024 * kb)?, 126 | essential: match f.get("Essential") { 127 | Some(word) => match word.as_str() { 128 | "yes" => true, 129 | "no" => false, 130 | invalid => bail!( 131 | "deb control for {} contains invalid value for the Essential field (should be yes/no, got {}).", 132 | p.display(), 133 | invalid 134 | ), 135 | }, 136 | None => false, 137 | }, 138 | source: PkgSource::Local(p.to_owned()), 139 | }) 140 | } 141 | -------------------------------------------------------------------------------- /src/types/actions/mod.rs: -------------------------------------------------------------------------------- 1 | mod table; 2 | 3 | use super::{Checksum, PkgMeta, PkgSource, PkgVersion}; 4 | 5 | use anyhow::Result; 6 | use console::style; 7 | use indicatif::HumanBytes; 8 | 9 | #[derive(Default, Debug)] 10 | pub struct PkgActions<'a> { 11 | pub install: Vec<(&'a PkgMeta, Option<(PkgVersion, u64)>)>, 12 | // (Name, InstallSize) 13 | pub remove: Vec<(String, u64)>, 14 | } 15 | 16 | #[derive(Debug, PartialEq, Eq)] 17 | pub struct PkgInstallAction { 18 | pub name: String, 19 | pub url: String, 20 | pub download_size: u64, 21 | pub install_size: u64, 22 | pub checksum: Checksum, 23 | pub version: PkgVersion, 24 | } 25 | 26 | /// Alter PkgActions based on user configuration, system state, etc. 27 | pub trait PkgActionModifier { 28 | fn apply(&self, actions: &mut PkgActions); 29 | } 30 | 31 | impl PkgActions<'_> { 32 | pub fn is_empty(&self) -> bool { 33 | self.install.is_empty() && self.remove.is_empty() 34 | } 35 | 36 | pub fn show(&self) { 37 | let to_install: Vec = self 38 | .install 39 | .iter() 40 | .filter_map(|(install, old_ver)| match old_ver { 41 | Some(_) => None, 42 | None => { 43 | let mut msg = install.name.clone(); 44 | let ver_str = format!("({})", install.version); 45 | msg.push_str(&style(ver_str).dim().to_string()); 46 | Some(msg) 47 | } 48 | }) 49 | .collect(); 50 | let install_prefix = style("INSTALL").on_blue().bold().to_string(); 51 | crate::WRITER.write_chunks(&install_prefix, &to_install).unwrap(); 52 | 53 | let to_upgrade: Vec = self 54 | .install 55 | .iter() 56 | .filter_map(|(install, oldpkg)| match oldpkg { 57 | Some(oldpkg) => { 58 | if install.version > oldpkg.0 { 59 | let mut msg = install.name.clone(); 60 | let ver_str = format!("({} -> {})", oldpkg.0, install.version); 61 | msg.push_str(&style(ver_str).dim().to_string()); 62 | Some(msg) 63 | } else { 64 | None 65 | } 66 | } 67 | None => None, 68 | }) 69 | .collect(); 70 | let upgrade_prefix = style("UPGRADE").on_green().black().bold().to_string(); 71 | crate::WRITER.write_chunks(&upgrade_prefix, &to_upgrade).unwrap(); 72 | 73 | let to_downgrade: Vec = self 74 | .install 75 | .iter() 76 | .filter_map(|(install, oldpkg)| match oldpkg { 77 | Some(oldpkg) => { 78 | if install.version < oldpkg.0 { 79 | let mut msg = install.name.clone(); 80 | let ver_str = format!("({} -> {})", oldpkg.0, install.version); 81 | msg.push_str(&style(ver_str).dim().to_string()); 82 | Some(msg) 83 | } else { 84 | None 85 | } 86 | } 87 | None => None, 88 | }) 89 | .collect(); 90 | let downgrade_prefix = style("DOWNGRADE").on_yellow().white().bold().to_string(); 91 | crate::WRITER.write_chunks(&downgrade_prefix, &to_downgrade).unwrap(); 92 | 93 | let removes: Vec = self 94 | .remove 95 | .iter() 96 | .map(|(name, _)| { 97 | let mut pkg = name.clone(); 98 | pkg 99 | }) 100 | .collect(); 101 | let remove_prefix = style("REMOVE").on_red().bold().white().to_string(); 102 | crate::WRITER.write_chunks(&remove_prefix, &removes).unwrap(); 103 | } 104 | 105 | pub fn show_tables(&self, no_pager: bool) -> Result<()> { 106 | table::show_table(self, no_pager) 107 | } 108 | 109 | pub fn show_size_change(&self) { 110 | crate::WRITER 111 | .writeln( 112 | "", 113 | &format!( 114 | "{} {}", 115 | &style("Total download size:").bold().to_string(), 116 | HumanBytes(self.calculate_download_size()) 117 | ), 118 | ) 119 | .unwrap(); 120 | let install_size_change = self.calculate_size_change(); 121 | let abs_install_size_change = install_size_change.abs() as u64; 122 | if install_size_change >= 0 { 123 | crate::WRITER 124 | .writeln( 125 | "", 126 | &format!( 127 | "{} +{}", 128 | &style("Estimated change in storage usage:").bold().to_string(), 129 | HumanBytes(abs_install_size_change) 130 | ), 131 | ) 132 | .unwrap(); 133 | } else { 134 | crate::WRITER 135 | .writeln( 136 | "", 137 | &format!( 138 | "{} -{}", 139 | &style("Estimated change in storage usage:").bold().to_string(), 140 | HumanBytes(abs_install_size_change) 141 | ), 142 | ) 143 | .unwrap(); 144 | } 145 | } 146 | 147 | fn calculate_size_change(&self) -> i128 { 148 | let mut res: i128 = 0; 149 | for install in &self.install { 150 | res += i128::from(install.0.install_size); 151 | if let Some(oldpkg) = &install.1 { 152 | res -= i128::from(oldpkg.1); 153 | } 154 | } 155 | 156 | for remove in &self.remove { 157 | res -= i128::from(remove.1); 158 | } 159 | 160 | res 161 | } 162 | 163 | fn calculate_download_size(&self) -> u64 { 164 | let mut res = 0; 165 | for install in &self.install { 166 | if let PkgSource::Http((_, size, _)) = install.0.source { 167 | res += size; 168 | } 169 | } 170 | res 171 | } 172 | } 173 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | #![allow(unused_imports)] 3 | // TODO: Remove above allows when the transition is done 4 | mod actions; 5 | mod alpm; 6 | mod config; 7 | mod executor; 8 | mod solver; 9 | mod types; 10 | mod utils; 11 | use config::{Blueprints, Config, Opts}; 12 | 13 | use anyhow::{bail, Context, Result}; 14 | use clap::Parser; 15 | use lazy_static::lazy_static; 16 | use nix::sys::signal; 17 | use std::{ 18 | fs::{read_dir, File}, 19 | io::Read, 20 | path::Path, 21 | process::exit, 22 | sync::atomic::{AtomicBool, AtomicI32, Ordering}, 23 | }; 24 | 25 | // Initialize writer 26 | lazy_static! { 27 | static ref WRITER: utils::cli::Writer = utils::cli::Writer::new(); 28 | } 29 | // Debug flag 30 | static VERBOSE: AtomicBool = AtomicBool::new(false); 31 | // Global states 32 | static DPKG_RUNNING: AtomicBool = AtomicBool::new(false); 33 | static LOCKED: AtomicBool = AtomicBool::new(false); 34 | static SUBPROCESS: AtomicI32 = AtomicI32::new(-1); 35 | // Global constants 36 | const DB_KEY_PATH: &str = "etc/sasm/keys"; 37 | const DB_CACHE_PATH: &str = "var/cache/sasm/db"; 38 | const PKG_CACHE_PATH: &str = "var/cache/sasm/pkgs"; 39 | const LOCK_PATH: &str = "var/lib/sasm/lock"; 40 | const LOCAL_REPO_PATH: &str = "var/lib/sasm/local_repo"; 41 | 42 | /// Check if in verbose mode 43 | fn verbose() -> bool { 44 | crate::VERBOSE.load(std::sync::atomic::Ordering::Relaxed) 45 | } 46 | 47 | /// Exit codes: 48 | /// 1 => program screwed up 49 | /// 2 => user cancelled operation 50 | #[tokio::main(flavor = "current_thread")] 51 | async fn main() { 52 | // Initial setup 53 | let mut opts: Opts = Opts::parse(); 54 | // Configure debug globally 55 | VERBOSE.store(opts.verbose, Ordering::Relaxed); 56 | // If yes mode is enabled, pager will be disabled 57 | if opts.yes { 58 | opts.no_pager = true; 59 | } 60 | 61 | // Set up SIGINT handler 62 | { 63 | let root = opts.root.to_owned(); 64 | ctrlc::set_handler(move || sigint_handler(&root)).expect("Error setting SIGINT handler."); 65 | } 66 | 67 | // Run main logic 68 | let exit_code = match try_main(&opts).await { 69 | Ok(exit_code) => exit_code, 70 | Err(err) => { 71 | // Create a new line first, for visual distinction 72 | WRITER.writeln("", "").ok(); 73 | error!("{}", err.to_string()); 74 | err.chain().skip(1).for_each(|cause| { 75 | due_to!("{}", cause); 76 | }); 77 | 1 78 | } 79 | }; 80 | 81 | // Unlock if current process locked 82 | if LOCKED.load(Ordering::Relaxed) { 83 | if let Err(e) = utils::lock::unlock(&opts.root) { 84 | error!("{}", e); 85 | } 86 | } 87 | 88 | // Always show cursor, just in case 89 | let _ = WRITER.show_cursor(); 90 | 91 | exit(exit_code); 92 | } 93 | 94 | async fn try_main(opts: &Opts) -> Result { 95 | // Start reading configs 96 | let config_root = opts.root.join(&opts.config_root).canonicalize().context(format!( 97 | "Failed to find config_root in Sasm configuration file {} .", 98 | opts.config_root.display() 99 | ))?; 100 | if !config_root.is_dir() { 101 | bail!( 102 | "Configuration root (config_root) does not exist or is not a directory at {} .", 103 | config_root.display() 104 | ); 105 | } 106 | 107 | let config_path = config_root.join("config.toml"); 108 | // Set-up main config file 109 | let mut config_file = File::open(&config_path) 110 | .context(format!("Failed to open configuration file {} .", config_path.display()))?; 111 | let mut data = String::new(); 112 | config_file.read_to_string(&mut data).context("Failed to read configuration file.")?; 113 | let config: Config = toml::from_str(&data).context("Failed to parse configuration file.")?; 114 | 115 | // Set-up blueprints 116 | let mut vendor_blueprint_paths = Vec::new(); 117 | let blueprint_d_path = config_root.join("blueprint.d"); 118 | if blueprint_d_path.is_dir() { 119 | let paths = read_dir(blueprint_d_path).context("Failed to load Blueprint directory.")?; 120 | for path in paths { 121 | let path = path?; 122 | let filename = path 123 | .file_name() 124 | .to_str() 125 | .context(format!( 126 | "Bad filename in configuration folder: {} .", 127 | path.path().display() 128 | ))? 129 | .to_owned(); 130 | if filename.ends_with(".blueprint") { 131 | vendor_blueprint_paths.push(path.path()); 132 | } 133 | } 134 | } 135 | let mut blueprint = 136 | Blueprints::from_files(config_root.join("user.blueprint"), &vendor_blueprint_paths)?; 137 | 138 | // Do stuff 139 | warn!("Sasm is currently under construction and active testing. Proceed with caution on production systems!"); 140 | let cancelled = actions::fullfill_command(&config, opts, &mut blueprint).await?; 141 | if !cancelled { 142 | // Write back blueprint. 143 | // They will determine if it really need to write back user blueprint 144 | blueprint.export()?; 145 | Ok(0) 146 | } else { 147 | // User cancelled operation. Don't write back blueprint 148 | Ok(2) 149 | } 150 | } 151 | 152 | fn sigint_handler(root: &Path) { 153 | if crate::DPKG_RUNNING.load(Ordering::Relaxed) { 154 | warn!("You may not interrupt Sasm when dpkg is running."); 155 | // Don't exit. Important things are happening 156 | return; 157 | } 158 | 159 | // Kill subprocess 160 | let subprocess_pid = SUBPROCESS.load(Ordering::Relaxed); 161 | if subprocess_pid > 0 { 162 | let pid = nix::unistd::Pid::from_raw(subprocess_pid); 163 | signal::kill(pid, signal::SIGTERM).expect("Failed to kill child process."); 164 | } 165 | 166 | // Dealing with lock 167 | if LOCKED.load(Ordering::Relaxed) { 168 | utils::lock::unlock(root).expect("Failed to unlock instance."); 169 | } 170 | 171 | // Show cursor before exiting. 172 | // This is not a big deal so we won't panic on this. 173 | let _ = WRITER.show_cursor(); 174 | std::process::exit(2); 175 | } 176 | -------------------------------------------------------------------------------- /src/types/actions/table.rs: -------------------------------------------------------------------------------- 1 | /// Show actions in tables 2 | use super::PkgActions; 3 | use crate::utils::pager::Pager; 4 | 5 | use anyhow::Result; 6 | use console::style; 7 | use indicatif::HumanBytes; 8 | use std::io::Write; 9 | use tabled::{Alignment, Column, Full, Modify, Style, Table, Tabled}; 10 | 11 | #[derive(Tabled)] 12 | struct InstallRow { 13 | #[header("Name")] 14 | name: String, 15 | #[header("Version")] 16 | version: String, 17 | #[header("Installed Size")] 18 | size: String, 19 | } 20 | 21 | #[derive(Tabled)] 22 | struct RemoveRow { 23 | #[header("Name")] 24 | name: String, 25 | #[header("Package Size")] 26 | size: String, 27 | // Show details to this specific removal. Eg: if this is an essential package 28 | #[header("Details")] 29 | detail: String, 30 | } 31 | 32 | pub fn show_table(actions: &PkgActions, no_pager: bool) -> Result<()> { 33 | let mut install_rows = Vec::new(); 34 | let mut upgrade_rows = Vec::new(); 35 | let mut downgrade_rows = Vec::new(); 36 | let mut remove_rows = Vec::new(); 37 | 38 | for (new, old) in actions.install.iter().rev() { 39 | let mut install_size_change: i128 = new.install_size.into(); 40 | if let Some((_, oldsize)) = old { 41 | install_size_change -= *oldsize as i128; 42 | } 43 | let mut install_size_change_str = HumanBytes(install_size_change.abs() as u64).to_string(); 44 | if install_size_change >= 0 { 45 | install_size_change_str.insert(0, '+'); 46 | } else { 47 | install_size_change_str.insert(0, '-'); 48 | } 49 | let mut row = InstallRow { 50 | name: new.name.clone(), 51 | version: match old { 52 | Some((oldver, _)) => format!("{} -> {}", oldver, new.version), 53 | None => new.version.to_string(), 54 | }, 55 | size: install_size_change_str, 56 | }; 57 | // Insert to different row based on operation 58 | if let Some(old) = old { 59 | // Upgrade/downgrade 60 | if old.0 < new.version { 61 | row.name = style(row.name).green().to_string(); 62 | upgrade_rows.push(row); 63 | } else { 64 | row.name = style(row.name).yellow().to_string(); 65 | downgrade_rows.push(row); 66 | } 67 | } else { 68 | // New package 69 | row.name = style(row.name).green().to_string(); 70 | install_rows.push(row); 71 | } 72 | } 73 | 74 | for (name, size) in &actions.remove { 75 | let detail = String::new(); 76 | let row = RemoveRow { 77 | name: style(name).red().to_string(), 78 | size: HumanBytes(*size).to_string(), 79 | detail, 80 | }; 81 | remove_rows.push(row); 82 | } 83 | 84 | let mut pager = Pager::new(no_pager)?; 85 | let pager_name = pager.pager_name().to_owned(); 86 | let mut out = pager.get_writer()?; 87 | 88 | write_review_help_message(&mut out)?; 89 | // Show help message about how to exit review view 90 | if pager_name == Some("less") { 91 | writeln!(out, "{}", style("Press [q] to finish review.\n").bold())?; 92 | } 93 | 94 | if !remove_rows.is_empty() { 95 | writeln!(out, "The following packages will be {}:\n", style("REMOVED").red().bold())?; 96 | let table = Table::new(&remove_rows) 97 | .with(Modify::new(Full).with(Alignment::left())) 98 | // Install Size column should align right 99 | .with(Modify::new(Column(1..2)).with(Alignment::right())) 100 | .with(Modify::new(Full).with(|s: &str| format!(" {s} "))) 101 | .with(Style::PSQL); 102 | writeln!(out, "{table}")?; 103 | } 104 | 105 | if !install_rows.is_empty() { 106 | writeln!(out, "The following packages will be {}:\n", style("installed").green().bold())?; 107 | let table = Table::new(&install_rows) 108 | .with(Modify::new(Full).with(Alignment::left())) 109 | // Install Size column should align right 110 | .with(Modify::new(Column(2..3)).with(Alignment::right())) 111 | .with(Modify::new(Full).with(|s: &str| format!(" {s} "))) 112 | .with(Style::PSQL); 113 | writeln!(out, "{table}")?; 114 | } 115 | 116 | if !upgrade_rows.is_empty() { 117 | writeln!(out, "The following packages will be {}:\n", style("upgraded").green().bold())?; 118 | let table = Table::new(&upgrade_rows) 119 | .with(Modify::new(Full).with(Alignment::left())) 120 | // Install Size column should align right 121 | .with(Modify::new(Column(2..3)).with(Alignment::right())) 122 | .with(Modify::new(Full).with(|s: &str| format!(" {s} "))) 123 | .with(Style::PSQL); 124 | writeln!(out, "{table}")?; 125 | } 126 | 127 | if !downgrade_rows.is_empty() { 128 | writeln!(out, "The following packages will be {}:\n", style("downgraded").yellow().bold())?; 129 | let table = Table::new(&downgrade_rows) 130 | .with(Modify::new(Full).with(Alignment::left())) 131 | // Install Size column should align right 132 | .with(Modify::new(Column(1..2)).with(Alignment::right())) 133 | .with(Modify::new(Full).with(|s: &str| format!(" {s} "))) 134 | .with(Style::PSQL); 135 | writeln!(out, "{table}")?; 136 | } 137 | 138 | // Write size changes 139 | writeln!( 140 | out, 141 | "{} {}", 142 | style("Total download size:").bold(), 143 | HumanBytes(actions.calculate_download_size()) 144 | )?; 145 | let install_size_change = actions.calculate_size_change(); 146 | let abs_install_size_change = install_size_change.abs() as u64; 147 | let symbol = if install_size_change >= 0 { '+' } else { '-' }; 148 | writeln!( 149 | out, 150 | "{} {}{}", 151 | style("Estimated change in storage usage:").bold(), 152 | symbol, 153 | HumanBytes(abs_install_size_change) 154 | )?; 155 | 156 | // Finish writing 157 | drop(out); 158 | // Wait until pager exits 159 | pager.wait_for_exit()?; 160 | 161 | Ok(()) 162 | } 163 | 164 | fn write_review_help_message(w: &mut dyn Write) -> Result<()> { 165 | writeln!(w, "{}", style("Pending Operations").bold())?; 166 | writeln!(w)?; 167 | writeln!(w, "Shown below is an overview of the pending changes sasm will apply to your system, please review them carefully.")?; 168 | writeln!(w, "Please note that sasm may {}, {}, {}, {}, or {} packages in order to fulfill your requested changes.", style("install").green(), style("remove").red(), style("upgrade").green(), style("downgrade").yellow(), style("configure").blue())?; 169 | writeln!(w)?; 170 | Ok(()) 171 | } 172 | -------------------------------------------------------------------------------- /src/actions/bench.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | db::LocalDb, 3 | info, msg, success, 4 | types::{ 5 | config::{Config, Mirror, Opts}, 6 | Checksum, ChecksumValidator, 7 | }, 8 | utils::{downloader::Downloader, pager::Pager}, 9 | }; 10 | 11 | use anyhow::{bail, Result}; 12 | use console::style; 13 | use indicatif::HumanBytes; 14 | use reqwest::{Client, ClientBuilder}; 15 | use std::{ 16 | fs, 17 | io::Write, 18 | path::PathBuf, 19 | time::{Duration, Instant}, 20 | }; 21 | use tabled::{Alignment, Column, Full, Head, Header, Modify, Style, Table, Tabled}; 22 | use toml_edit::{value, Document}; 23 | 24 | pub async fn bench( 25 | opts: &Opts, 26 | config: &Config, 27 | db: LocalDb, 28 | downloader: &Downloader, 29 | ) -> Result<()> { 30 | // First, update local db 31 | db.update(downloader).await?; 32 | 33 | info!("Starting benchmarks..."); 34 | // Set reqwest parameters 35 | let clientbuilder = ClientBuilder::new() 36 | .connect_timeout(Duration::from_secs(5)) 37 | .timeout(Duration::from_secs(30)); 38 | let client = clientbuilder.build()?; 39 | 40 | let mut config = config.clone(); 41 | let mut results = Vec::new(); 42 | for (name, repo) in &mut config.repo { 43 | let (urls, _) = match &repo.source { 44 | Mirror::Simple(_) => { 45 | msg!( 46 | "Skipping repository {} because it only has one mirror.", 47 | style(name).bold() 48 | ); 49 | continue; 50 | } 51 | Mirror::MirrorList { 52 | preferred: _, 53 | mirrorlist: _, 54 | } => repo.get_mirrors()?, 55 | }; 56 | 57 | msg!("Running benchmark for repository {}...", style(name).bold()); 58 | let mut res = Vec::new(); 59 | // Fetch Contents-all.gz for specified repo 60 | let contents_filename = format!( 61 | "Contents_{}_{}_{}.gz", 62 | repo.distribution, repo.components[0], config.arch 63 | ); 64 | // Get ChecksumValidator for this file 65 | let local = db.get_contents_db(name)?; 66 | let local_paths: Vec = local 67 | .into_iter() 68 | .filter(|(_, path)| path.ends_with(&contents_filename)) 69 | .map(|(_, path)| path) 70 | .collect(); 71 | if local_paths.is_empty() { 72 | bail!( 73 | "Internal Error: Local repository don't have benchmark file {}", 74 | contents_filename 75 | ); 76 | } 77 | let local_path = &local_paths[0]; 78 | let size = fs::metadata(&local_path)?.len(); 79 | let local_hash = Checksum::from_file_sha256(local_path)?; 80 | let validator = local_hash.get_validator(); 81 | 82 | for (name, mirror) in urls { 83 | let contents_url = format!( 84 | "{}/dists/{}/{}/Contents-{}.gz", 85 | mirror.url, repo.distribution, repo.components[0], config.arch 86 | ); 87 | // Start counting 88 | let start = Instant::now(); 89 | match try_download(&contents_url, &client, validator.clone()).await { 90 | Ok(_) => { 91 | let time = start.elapsed(); 92 | res.push((name.clone(), mirror.url.clone(), Some(time))); 93 | } 94 | Err(e) => { 95 | msg!("Mirror {name} failed to complete benchmark: {e}"); 96 | res.push((name.clone(), mirror.url.clone(), None)); 97 | } 98 | } 99 | } 100 | // Sort result based on time 101 | res.sort_by_key(|(_, _, time)| time.unwrap_or(Duration::MAX)); 102 | // Push result of this repo to results 103 | results.push((name.as_str(), size, res)); 104 | } 105 | 106 | // Show results 107 | show_bench_results(&results, opts.no_pager)?; 108 | 109 | // Ask if to write back results 110 | if crate::cli::ask_confirm(opts, "Apply optimal mirrors based on benchmark result?")? { 111 | let config_path = opts 112 | .root 113 | .join(&opts.config_root) 114 | .canonicalize() 115 | .unwrap() 116 | .join("config.toml"); 117 | let original_toml = fs::read_to_string(&config_path)?; 118 | let mut new_config = original_toml.parse::()?; 119 | for (repo_name, _, result) in &results { 120 | let new_preferred = &result[0].0; 121 | new_config["repo"][repo_name]["source"]["preferred"] = value(new_preferred); 122 | } 123 | let new_config = new_config.to_string(); 124 | std::fs::write(config_path, new_config)?; 125 | success!( 126 | "New repository configuration has been written to {}.", 127 | style("config.toml").bold() 128 | ); 129 | } 130 | 131 | Ok(()) 132 | } 133 | 134 | #[inline] 135 | async fn try_download(url: &str, client: &Client, mut validator: ChecksumValidator) -> Result<()> { 136 | let mut resp = client.get(url).send().await?; 137 | while let Some(chunk) = resp.chunk().await? { 138 | validator.update(&chunk); 139 | } 140 | 141 | if !validator.finish() { 142 | bail!("Checksum mismatched."); 143 | } 144 | 145 | Ok(()) 146 | } 147 | 148 | #[derive(Tabled)] 149 | struct BenchResultRow { 150 | #[header("Best")] 151 | best: String, 152 | #[header("Mirror Name")] 153 | name: String, 154 | #[header("URL")] 155 | url: String, 156 | #[header("Speed")] 157 | speed: String, 158 | } 159 | 160 | #[inline] 161 | fn show_bench_results( 162 | results: &[(&str, u64, Vec<(String, String, Option)>)], 163 | no_pager: bool, 164 | ) -> Result<()> { 165 | info!("Benchmark result:"); 166 | 167 | let mut pager = Pager::new(no_pager)?; 168 | let pager_name = pager.pager_name().to_owned(); 169 | let mut writer = pager.get_writer()?; 170 | 171 | if pager_name == Some("less") { 172 | writeln!( 173 | writer, 174 | "Press {} to finish reviewing benchmark result.", 175 | style("q").bold() 176 | )?; 177 | writeln!(writer)?; 178 | } 179 | 180 | for (name, size, repo_results) in results { 181 | let mut rows = Vec::new(); 182 | for (i, (name, url, time)) in repo_results.iter().enumerate() { 183 | let speed = if let Some(duration) = time { 184 | let ms = duration.as_millis(); 185 | // *1024 because ms to s 186 | let bytes_per_sec: u128 = *size as u128 / ms * 1024; 187 | format!("{}/s", HumanBytes(bytes_per_sec as u64)) 188 | } else { 189 | style("FAILED").red().bold().to_string() 190 | }; 191 | let best = if i == 0 { 192 | style("*").green().bold().to_string() 193 | } else { 194 | String::new() 195 | }; 196 | let row = BenchResultRow { 197 | best, 198 | name: name.clone(), 199 | url: url.clone(), 200 | speed, 201 | }; 202 | rows.push(row); 203 | } 204 | let table = Table::new(&rows) 205 | .with(Header(format!( 206 | "Benchmark Result for {}", 207 | style(name).bold() 208 | ))) 209 | .with(Modify::new(Full).with(Alignment::left())) 210 | .with(Modify::new(Head).with(Alignment::center_horizontal())) 211 | // Best column should be aligned to the center 212 | .with(Modify::new(Column(0..1)).with(Alignment::center_horizontal())) 213 | .with(Modify::new(Column(1..)).with(|s: &str| format!(" {} ", s))) 214 | .with(Style::PSEUDO_CLEAN); 215 | writeln!(writer, "{table}\n")?; 216 | } 217 | 218 | drop(writer); 219 | pager.wait_for_exit()?; 220 | msg!(""); 221 | 222 | Ok(()) 223 | } 224 | -------------------------------------------------------------------------------- /src/config/blueprint/mod.rs: -------------------------------------------------------------------------------- 1 | mod parse; 2 | mod variables; 3 | use parse::{read_blueprint_from_file, BlueprintLine}; 4 | 5 | use crate::{error, info, msg, types::VersionRequirement}; 6 | 7 | use anyhow::{bail, Context, Result}; 8 | use console::style; 9 | use std::{fs::OpenOptions, os::unix::fs::FileExt, path::PathBuf}; 10 | 11 | #[derive(Debug, PartialEq, Eq, Default, Clone)] 12 | pub struct PkgRequest { 13 | pub name: String, 14 | pub version: VersionRequirement, 15 | pub added_by: Option, 16 | pub local: bool, 17 | } 18 | 19 | impl std::fmt::Display for PkgRequest { 20 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> { 21 | write!(f, "{}", self.name)?; 22 | // Sections of package property 23 | let mut sections = Vec::new(); 24 | if !self.version.is_arbitary() { 25 | sections.push(self.version.to_string()); 26 | } 27 | if let Some(pkgname) = &self.added_by { 28 | sections.push(format!("added_by = {}", pkgname)); 29 | } 30 | if self.local { 31 | sections.push("local".to_owned()); 32 | } 33 | // Write it 34 | if !sections.is_empty() { 35 | let joined = sections.join(", "); 36 | write!(f, " ({})", joined)?; 37 | } 38 | Ok(()) 39 | } 40 | } 41 | 42 | /// A collection of blueprints 43 | pub struct Blueprints { 44 | user_blueprint_path: PathBuf, 45 | // If we need to export the blueprint back to disk 46 | user_blueprint_modified: bool, 47 | user: Vec, 48 | vendor: Vec<(PathBuf, Vec)>, 49 | } 50 | 51 | impl Blueprints { 52 | pub fn from_files(user: PathBuf, vendor: &[PathBuf]) -> Result { 53 | let user_blueprint = read_blueprint_from_file(&user)?; 54 | let mut vendor_blueprints = Vec::with_capacity(vendor.len()); 55 | for path in vendor { 56 | vendor_blueprints.push((path.clone(), read_blueprint_from_file(path)?)); 57 | } 58 | 59 | Ok(Blueprints { 60 | user_blueprint_path: user, 61 | user_blueprint_modified: false, 62 | user: user_blueprint, 63 | vendor: vendor_blueprints, 64 | }) 65 | } 66 | 67 | pub fn get_pkg_requests(&self) -> Vec { 68 | // Add user blueprint first 69 | let mut res: Vec = self 70 | .user 71 | .iter() 72 | .filter_map(|mut line| match &mut line { 73 | BlueprintLine::PkgRequest(req) => { 74 | // Fill variables 75 | let mut req = req.clone(); 76 | req.name = variables::fill_variables(&req.name).unwrap(); 77 | Some(req) 78 | } 79 | _ => None, 80 | }) 81 | .collect(); 82 | 83 | // Then add vendor blueprint 84 | for (_, vendor) in &self.vendor { 85 | for line in vendor { 86 | if let BlueprintLine::PkgRequest(req) = line { 87 | res.push(req.clone()); 88 | } 89 | } 90 | } 91 | 92 | // Duplicates are allowed, so we shall dedup here 93 | res.dedup(); 94 | res 95 | } 96 | 97 | pub fn add( 98 | &mut self, 99 | pkgname: &str, 100 | modify: bool, 101 | added_by: Option<&str>, 102 | ver_req: Option, 103 | local: bool, 104 | ) -> Result<()> { 105 | if !modify && self.user_list_contains(pkgname) { 106 | bail!("Package {} already exists in user blueprint.", style(pkgname).bold()); 107 | } 108 | if let Some(path) = self.vendor_list_contains(pkgname) { 109 | if modify { 110 | bail!( 111 | "Cannot modify {}: package belongs to vendor blueprint {}", 112 | style(pkgname).bold(), 113 | style(path.display()).bold() 114 | ); 115 | } 116 | bail!( 117 | "Package {} already exists in vendor blueprint {}.", 118 | style(pkgname).bold(), 119 | style(path.display()).bold() 120 | ); 121 | } 122 | 123 | let version = ver_req.unwrap_or_default(); 124 | let pkgreq = PkgRequest { 125 | name: pkgname.to_string(), 126 | version, 127 | added_by: added_by.map(|pkgname| pkgname.to_owned()), 128 | local, 129 | }; 130 | if modify && self.user_list_contains(pkgname) { 131 | // Remove old entry first 132 | self.remove(pkgname, false).unwrap(); 133 | } 134 | self.user.push(BlueprintLine::PkgRequest(pkgreq)); 135 | self.user_blueprint_modified = true; 136 | Ok(()) 137 | } 138 | 139 | pub fn remove(&mut self, pkgname: &str, remove_recomms: bool) -> Result<()> { 140 | if !self.user_list_contains(pkgname) { 141 | if let Some(path) = self.vendor_list_contains(pkgname) { 142 | error!("Package {} not found in user blueprint.", style(pkgname).bold()); 143 | info!("However, it exists in vendor blueprint {}.", style(path.display()).bold()); 144 | msg!( 145 | "You may not remove packages in vendor blueprints via the sasm CLI. If you really wish to remove this package, please edit the vendor blueprint file above directly." 146 | ); 147 | } else { 148 | error!("Package {} not found in any installed blueprints.", style(pkgname).bold()); 149 | } 150 | bail!("Failed to remove {} from user blueprint.", pkgname) 151 | } else { 152 | self.user.retain(|line| match line { 153 | BlueprintLine::PkgRequest(req) => req.name != pkgname, 154 | _ => true, 155 | }); 156 | if remove_recomms { 157 | self.remove_affiliated(pkgname); 158 | } 159 | self.user_blueprint_modified = true; 160 | Ok(()) 161 | } 162 | } 163 | 164 | pub fn remove_affiliated(&mut self, pkgname: &str) { 165 | let prev_len = self.user.len(); 166 | self.user.retain(|line| match line { 167 | BlueprintLine::PkgRequest(req) => req.added_by != Some(pkgname.to_string()), 168 | _ => true, 169 | }); 170 | if self.user.len() < prev_len { 171 | self.user_blueprint_modified = true; 172 | } 173 | } 174 | 175 | // Write back user blueprint 176 | pub fn export(&self) -> Result { 177 | if !self.user_blueprint_modified { 178 | // If not modified, nothing to do here. 179 | return Ok(false); 180 | } 181 | 182 | let mut res = String::new(); 183 | for l in &self.user { 184 | match l { 185 | BlueprintLine::Comment(content) => res.push_str(&format!("#{}\n", content)), 186 | BlueprintLine::EmptyLine => res.push('\n'), 187 | BlueprintLine::PkgRequest(req) => res.push_str(&format!("{}\n", req)), 188 | } 189 | } 190 | 191 | // Open user blueprint 192 | let blueprint_file = 193 | OpenOptions::new().write(true).truncate(true).open(&self.user_blueprint_path)?; 194 | blueprint_file.set_len(0)?; 195 | blueprint_file.write_all_at(&res.into_bytes(), 0).context(format!( 196 | "Failed to write to blueprint file {}.", 197 | self.user_blueprint_path.display() 198 | ))?; 199 | 200 | Ok(true) 201 | } 202 | 203 | pub fn user_list_contains(&self, pkgname: &str) -> bool { 204 | for line in &self.user { 205 | if let BlueprintLine::PkgRequest(req) = line { 206 | if req.name == pkgname { 207 | return true; 208 | } 209 | } 210 | } 211 | false 212 | } 213 | 214 | pub fn vendor_list_contains(&self, pkgname: &str) -> Option { 215 | for (path, vendor) in &self.vendor { 216 | for line in vendor { 217 | if let BlueprintLine::PkgRequest(req) = line { 218 | if req.name == pkgname { 219 | return Some(path.clone()); 220 | } 221 | } 222 | } 223 | } 224 | None 225 | } 226 | } 227 | -------------------------------------------------------------------------------- /src/config/blueprint/parse.rs: -------------------------------------------------------------------------------- 1 | use super::{variables::fill_variables, PkgRequest}; 2 | use crate::{ 3 | error, 4 | types::{parse_version_requirement, VersionRequirement}, 5 | }; 6 | 7 | use anyhow::{bail, Context, Result}; 8 | use console::style; 9 | use nom::{ 10 | branch::alt, 11 | bytes::complete::tag, 12 | character::complete::*, 13 | error::{Error, ErrorKind, ParseError}, 14 | sequence::*, 15 | IResult, InputTakeAtPosition, 16 | }; 17 | use std::{ 18 | fs::File, 19 | io::{BufRead, BufReader}, 20 | path::Path, 21 | }; 22 | 23 | #[derive(Debug, PartialEq, Eq, Clone)] 24 | pub enum BlueprintLine { 25 | PkgRequest(PkgRequest), 26 | Comment(String), 27 | EmptyLine, 28 | } 29 | 30 | pub fn read_blueprint_from_file(path: &Path) -> Result> { 31 | // Read lines from blueprint file 32 | let f = File::open(path) 33 | .context(format!("Failed to open blueprint file at {}.", style(path.display()).bold()))?; 34 | let reader = BufReader::new(f); 35 | let lines = parse_blueprint_lines(reader) 36 | .context(format!("Failed to parse blueprint {}.", style(path.display()).bold()))?; 37 | for (no, line) in lines.iter().enumerate() { 38 | // Try fill variables to sanitize 39 | if let BlueprintLine::PkgRequest(req) = &line { 40 | let new_pkgname = fill_variables(&req.name)?; 41 | if !new_pkgname.chars().all(is_pkgname_char) { 42 | bail!( 43 | "Fail to parse blueprint {}: invalid package name at line {}.", 44 | path.display(), 45 | no 46 | ); 47 | } 48 | } 49 | } 50 | 51 | Ok(lines) 52 | } 53 | 54 | fn parse_blueprint_lines(reader: impl BufRead) -> Result> { 55 | let mut res = Vec::new(); 56 | let mut errors = 0; 57 | for (no, line) in reader.lines().enumerate() { 58 | let i = line?; 59 | match alt((empty_line, comment_line, package_line_wrapper))(&i) { 60 | Ok((_, content)) => { 61 | res.push(content); 62 | } 63 | Err(e) => { 64 | errors += 1; 65 | error!("Failed to parse blueprint at line {}: {}.\n", no, e.to_string()); 66 | } 67 | }; 68 | } 69 | 70 | if errors == 0 { 71 | Ok(res) 72 | } else { 73 | bail!("Failed to parse blueprint, found {} error(s).", errors) 74 | } 75 | } 76 | 77 | fn empty_line(i: &str) -> IResult<&str, BlueprintLine> { 78 | match nom::sequence::terminated(nom::character::complete::space0, nom::combinator::eof)(i) { 79 | Ok(_) => Ok(("", BlueprintLine::EmptyLine)), 80 | Err(e) => Err(e), 81 | } 82 | } 83 | 84 | fn comment_line(i: &str) -> IResult<&str, BlueprintLine> { 85 | match char('#')(i) { 86 | Ok((r, _)) => Ok(("", BlueprintLine::Comment(r.to_string()))), 87 | Err(e) => Err(e), 88 | } 89 | } 90 | 91 | fn is_pkgname_char(c: char) -> bool { 92 | c.is_alphanumeric() || c == '-' || c == '.' || c == '+' 93 | } 94 | 95 | fn is_pkgname_with_var_char(c: char) -> bool { 96 | is_pkgname_char(c) || c == '{' || c == '_' || c == '}' 97 | } 98 | 99 | fn package_name(i: &str) -> IResult<&str, &str> { 100 | i.split_at_position1_complete(|item| !is_pkgname_with_var_char(item), ErrorKind::Char) 101 | } 102 | 103 | enum PkgOption { 104 | VersionRequirement(VersionRequirement), 105 | AddedBy(String), 106 | Local, 107 | } 108 | 109 | fn pkg_option(i: &str) -> IResult<&str, PkgOption> { 110 | if let Ok((i, _)) = tag::<_, _, Error<&str>>("added_by")(i) { 111 | let (i, _) = space0(i)?; 112 | let (i, _) = char('=')(i)?; 113 | let (i, _) = space0(i)?; 114 | let (i, pkgname) = package_name(i)?; 115 | return Ok((i, PkgOption::AddedBy(pkgname.to_owned()))); 116 | } 117 | 118 | if let Ok((i, req)) = parse_version_requirement(i) { 119 | return Ok((i, PkgOption::VersionRequirement(req))); 120 | } 121 | 122 | if let Ok((i, _)) = tag::<_, _, Error<&str>>("local")(i) { 123 | return Ok((i, PkgOption::Local)); 124 | } 125 | 126 | Err(nom::Err::Error(nom::error::Error::from_error_kind(i, ErrorKind::Alt))) 127 | } 128 | 129 | fn package_line(i: &str) -> IResult<&str, PkgRequest> { 130 | let (i, name) = package_name(i)?; 131 | let (i, _) = nom::character::complete::space0(i)?; 132 | // Construct basic result 133 | let mut res = PkgRequest { 134 | name: name.to_string(), 135 | version: VersionRequirement::default(), 136 | added_by: None, 137 | local: false, 138 | }; 139 | 140 | let i = if let Ok((i, opts)) = nom::sequence::delimited( 141 | tuple((space0, char('('), space0)), 142 | nom::multi::separated_list1(tuple((space0, char(','), space0)), pkg_option), 143 | tuple((space0, char(')'), space0)), 144 | )(i) 145 | { 146 | // Enroll optional requests 147 | for opt in opts { 148 | match opt { 149 | PkgOption::AddedBy(pkgname) => res.added_by = Some(pkgname), 150 | PkgOption::VersionRequirement(request) => { 151 | res.version = res.version.combine(&request).unwrap(); 152 | } 153 | PkgOption::Local => { 154 | res.local = true; 155 | } 156 | } 157 | } 158 | i 159 | } else { 160 | i 161 | }; 162 | 163 | let (i, _) = nom::combinator::eof(i)?; 164 | 165 | Ok((i, res)) 166 | } 167 | 168 | fn package_line_wrapper(i: &str) -> IResult<&str, BlueprintLine> { 169 | let (i, res) = package_line(i)?; 170 | Ok((i, BlueprintLine::PkgRequest(res))) 171 | } 172 | 173 | #[cfg(test)] 174 | mod tests { 175 | use super::*; 176 | use crate::types::PkgVersion; 177 | use nom::{error::Error, IResult}; 178 | 179 | #[test] 180 | fn test_empty_line() { 181 | let t: Vec<(&str, IResult<&str, BlueprintLine>)> = vec![ 182 | ("", Ok(("", BlueprintLine::EmptyLine))), 183 | (" ", Ok(("", BlueprintLine::EmptyLine))), 184 | ("blah", Err(nom::Err::Error(Error::new("blah", nom::error::ErrorKind::Eof)))), 185 | (" nope", Err(nom::Err::Error(Error::new("nope", nom::error::ErrorKind::Eof)))), 186 | ]; 187 | 188 | for test in t { 189 | assert_eq!(empty_line(test.0), test.1); 190 | } 191 | } 192 | 193 | #[test] 194 | fn test_comment_line() { 195 | let t: Vec<(&str, IResult<&str, BlueprintLine>)> = vec![ 196 | ("#", Ok(("", BlueprintLine::Comment("".to_string())))), 197 | ("# ", Ok(("", BlueprintLine::Comment(" ".to_string())))), 198 | ( 199 | "# This is a comment", 200 | Ok(("", BlueprintLine::Comment(" This is a comment".to_string()))), 201 | ), 202 | ("blah", Err(nom::Err::Error(Error::new("blah", nom::error::ErrorKind::Char)))), 203 | (" nope", Err(nom::Err::Error(Error::new(" nope", nom::error::ErrorKind::Char)))), 204 | ]; 205 | 206 | for test in t { 207 | assert_eq!(comment_line(test.0), test.1); 208 | } 209 | } 210 | 211 | #[test] 212 | fn test_pkgname() { 213 | let t: Vec<(&str, IResult<&str, &str>)> = vec![ 214 | ("a1-v2", Ok(("", "a1-v2"))), 215 | ("a.+b", Ok(("", "a.+b"))), 216 | ("a~b", Ok(("~b", "a"))), // The letters after ~ will not be consumed 217 | ]; 218 | 219 | for test in t { 220 | assert_eq!(package_name(test.0), test.1); 221 | } 222 | } 223 | 224 | #[test] 225 | fn test_package_line() { 226 | let tests = vec![( 227 | "abc (added_by = wow, >>1)", 228 | PkgRequest { 229 | name: "abc".to_string(), 230 | version: VersionRequirement { 231 | lower_bond: Some((PkgVersion::try_from("1").unwrap(), false)), 232 | upper_bond: None, 233 | }, 234 | added_by: Some("wow".to_string()), 235 | local: false, 236 | }, 237 | ( 238 | "pkgname (>>1, local, <<2)", 239 | PkgRequest { 240 | name: "abc".to_string(), 241 | version: VersionRequirement { 242 | lower_bond: Some((PkgVersion::try_from("1").unwrap(), false)), 243 | upper_bond: Some((PkgVersion::try_from("2").unwrap(), true)), 244 | }, 245 | added_by: None, 246 | local: true, 247 | }, 248 | ), 249 | )]; 250 | 251 | for t in tests { 252 | assert_eq!(package_line(t.0).unwrap().1, t.1); 253 | } 254 | } 255 | } 256 | -------------------------------------------------------------------------------- /src/types/version/requirement.rs: -------------------------------------------------------------------------------- 1 | use super::{parse_version, PkgVersion}; 2 | use anyhow::{bail, format_err, Result}; 3 | use nom::{branch::alt, bytes::complete::tag, character::complete::*, error::context, IResult}; 4 | use serde::{Deserialize, Serialize, Serializer}; 5 | use std::cmp::Ordering::*; 6 | use std::fmt; 7 | 8 | #[derive(PartialEq, Eq, Clone, Debug, Deserialize, Default)] 9 | #[serde(try_from = "&str")] 10 | pub struct VersionRequirement { 11 | // The bool represents if the restriction is inclusive 12 | pub lower_bond: Option<(PkgVersion, bool)>, 13 | pub upper_bond: Option<(PkgVersion, bool)>, 14 | } 15 | 16 | impl VersionRequirement { 17 | pub fn new() -> Self { 18 | VersionRequirement { lower_bond: None, upper_bond: None } 19 | } 20 | 21 | /// Check if this VersionRequirement accepts arbitary version 22 | pub fn is_arbitary(&self) -> bool { 23 | self.lower_bond.is_none() && self.upper_bond.is_none() 24 | } 25 | 26 | /// Create a new VersionRequirment that satisfies both original requirements 27 | pub fn combine(&self, other: &VersionRequirement) -> Result { 28 | let mut new = self.clone(); 29 | if self.lower_bond.is_none() && other.lower_bond.is_some() { 30 | new.lower_bond = other.lower_bond.clone(); 31 | } else if self.lower_bond.is_some() && other.lower_bond.is_some() { 32 | let this = self.lower_bond.as_ref().unwrap(); 33 | let other = other.lower_bond.as_ref().unwrap(); 34 | if this.0 < other.0 || (this.0 == other.0 && this.1 && !other.1) { 35 | // Either other is stricter than this (higher lower-bond), 36 | // or same bond but other is not inclusive 37 | new.lower_bond = Some(other.clone()); 38 | } 39 | } 40 | 41 | if self.upper_bond.is_none() && other.upper_bond.is_some() { 42 | new.upper_bond = other.upper_bond.clone(); 43 | } else if self.upper_bond.is_some() && other.upper_bond.is_some() { 44 | let this = self.upper_bond.as_ref().unwrap(); 45 | let other = other.upper_bond.as_ref().unwrap(); 46 | if this.0 > other.0 || (this.0 == other.0 && this.1 && !other.1) { 47 | // Either other is stricter than this (lower upper-bond), 48 | // or same bond but other is not inclusive 49 | new.upper_bond = Some(other.clone()); 50 | } 51 | } 52 | 53 | if !new.valid() { 54 | bail!("Failed to reach a solution for version requirements {} and {} .", self, other); 55 | } 56 | 57 | Ok(new) 58 | } 59 | 60 | /// Validate if this VersionRequirment can be satisfied for some PkgVersion 61 | pub fn valid(&self) -> bool { 62 | if self.lower_bond.is_some() && self.upper_bond.is_some() { 63 | let lower = self.lower_bond.as_ref().unwrap(); 64 | let upper = self.upper_bond.as_ref().unwrap(); 65 | match lower.0.cmp(&upper.0) { 66 | Greater => false, 67 | Equal => { 68 | // must be both inclusive to be valid 69 | lower.1 && upper.1 70 | } 71 | Less => true, 72 | } 73 | } else { 74 | true 75 | } 76 | } 77 | 78 | /// Check if a PkgVersion satisfies this VersionRequirement 79 | pub fn contains(&self, ver: &PkgVersion) -> bool { 80 | if let Some(lower) = &self.lower_bond { 81 | // If inclusive 82 | if lower.1 { 83 | if ver < &lower.0 { 84 | return false; 85 | } 86 | } else if ver <= &lower.0 { 87 | return false; 88 | } 89 | } 90 | 91 | if let Some(upper) = &self.upper_bond { 92 | // If inclusive 93 | if upper.1 { 94 | if ver > &upper.0 { 95 | return false; 96 | } 97 | } else if ver >= &upper.0 { 98 | return false; 99 | } 100 | } 101 | 102 | true 103 | } 104 | 105 | // Check if that VersionRequirement is within this VersionRequirement 106 | pub fn within(&self, that: &VersionRequirement) -> bool { 107 | let lower_within = self.lower_bond.is_none() 108 | || self.lower_bond.is_some() 109 | && that.lower_bond.is_some() 110 | && self.lower_bond.as_ref().unwrap() <= that.lower_bond.as_ref().unwrap(); 111 | let upper_within = self.upper_bond.is_none() 112 | || self.upper_bond.is_some() 113 | && that.upper_bond.is_some() 114 | && self.upper_bond.as_ref().unwrap() >= that.upper_bond.as_ref().unwrap(); 115 | lower_within && upper_within 116 | } 117 | 118 | // Check if there's an overlap between two VersionRequirements 119 | pub fn overlap(&self, that: &VersionRequirement) -> bool { 120 | self.combine(that).is_ok() 121 | } 122 | } 123 | 124 | /// Use `nom` to parse a VersionRequirement string 125 | pub fn parse_version_requirement(i: &str) -> IResult<&str, VersionRequirement> { 126 | let (i, compare) = context( 127 | "Parsing compare literal...", 128 | alt((tag(">="), tag("<="), tag("="), tag(">"), tag("<"))), 129 | )(i)?; 130 | let (i, _) = space0(i)?; 131 | let (i, ver) = context("Parsing version in VersionRequirement...", parse_version)(i)?; 132 | let mut res = VersionRequirement::default(); 133 | match compare { 134 | ">" => { 135 | res.lower_bond = Some((ver, false)); 136 | } 137 | ">=" => { 138 | res.lower_bond = Some((ver, true)); 139 | } 140 | "=" => { 141 | res.lower_bond = Some((ver.clone(), true)); 142 | res.upper_bond = Some((ver, true)); 143 | } 144 | "<" => { 145 | res.upper_bond = Some((ver, false)); 146 | } 147 | "<=" => { 148 | res.upper_bond = Some((ver, true)); 149 | } 150 | _ => panic!(), 151 | } 152 | 153 | Ok((i, res)) 154 | } 155 | 156 | impl TryFrom<&str> for VersionRequirement { 157 | type Error = anyhow::Error; 158 | 159 | fn try_from(s: &str) -> Result { 160 | let (_, ver_req) = 161 | parse_version_requirement(s).map_err(|e| format_err!("Malformed version: {}", e))?; 162 | if !ver_req.valid() { 163 | bail!("Failed to parse version requirements: lower bound is greater than upper bound.") 164 | } 165 | Ok(ver_req) 166 | } 167 | } 168 | 169 | impl fmt::Display for VersionRequirement { 170 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 171 | // First, deal with equal 172 | if self.lower_bond.is_some() && self.lower_bond == self.upper_bond { 173 | let lower = self.lower_bond.as_ref().unwrap(); 174 | write!(f, "={}", lower.0)?; 175 | // We are done! 176 | return Ok(()); 177 | } 178 | 179 | // If not equal, write two parts 180 | let mut written = false; 181 | if let Some(lower) = &self.lower_bond { 182 | // If inclusive 183 | if lower.1 { 184 | write!(f, ">={}", lower.0)?; 185 | } else { 186 | write!(f, ">{}", lower.0)?; 187 | } 188 | written = true; 189 | } 190 | if let Some(upper) = &self.upper_bond { 191 | // Add comma 192 | if written { 193 | write!(f, ", ")?; 194 | } 195 | // If inclusive 196 | if upper.1 { 197 | write!(f, "<={}", upper.0)?; 198 | } else { 199 | write!(f, "<{}", upper.0)?; 200 | } 201 | } 202 | Ok(()) 203 | } 204 | } 205 | 206 | impl Serialize for VersionRequirement { 207 | fn serialize(&self, serializer: S) -> Result 208 | where 209 | S: Serializer, 210 | { 211 | let res = self.to_string(); 212 | serializer.serialize_str(&res) 213 | } 214 | } 215 | 216 | #[cfg(test)] 217 | mod test { 218 | use super::*; 219 | #[test] 220 | fn merge_ver_rq() { 221 | let tests = vec![ 222 | ( 223 | VersionRequirement::default(), 224 | VersionRequirement::default(), 225 | VersionRequirement::default(), 226 | ), 227 | ( 228 | VersionRequirement::default(), 229 | VersionRequirement::try_from(">1").unwrap(), 230 | VersionRequirement::try_from(">1").unwrap(), 231 | ), 232 | ( 233 | VersionRequirement::try_from(">1").unwrap(), 234 | VersionRequirement::try_from(">=1").unwrap(), 235 | VersionRequirement::try_from(">1").unwrap(), 236 | ), 237 | ( 238 | VersionRequirement::try_from(">1").unwrap(), 239 | VersionRequirement::try_from(">2").unwrap(), 240 | VersionRequirement::try_from(">2").unwrap(), 241 | ), 242 | ( 243 | VersionRequirement::try_from(">2").unwrap(), 244 | VersionRequirement::try_from(">1").unwrap(), 245 | VersionRequirement::try_from(">2").unwrap(), 246 | ), 247 | ( 248 | VersionRequirement::try_from(">1").unwrap(), 249 | VersionRequirement::try_from("<=2").unwrap(), 250 | VersionRequirement { 251 | lower_bond: Some((PkgVersion::try_from("1").unwrap(), false)), 252 | upper_bond: Some((PkgVersion::try_from("2").unwrap(), true)), 253 | }, 254 | ), 255 | ]; 256 | 257 | for t in tests { 258 | assert_eq!(t.0.combine(&t.1).unwrap(), t.2); 259 | } 260 | } 261 | 262 | #[test] 263 | fn merge_ver_fail() { 264 | let tests = vec![( 265 | VersionRequirement::try_from(">1").unwrap(), 266 | VersionRequirement::try_from("<1").unwrap(), 267 | )]; 268 | 269 | for t in tests { 270 | assert_eq!(t.0.combine(&t.1).is_ok(), false); 271 | } 272 | } 273 | } 274 | -------------------------------------------------------------------------------- /src/utils/downloader.rs: -------------------------------------------------------------------------------- 1 | use crate::{msg, types::Checksum}; 2 | 3 | use anyhow::{bail, format_err, Result}; 4 | use async_compression::tokio::write::{GzipDecoder, XzDecoder}; 5 | use console::style; 6 | use futures_util::future::select_all; 7 | use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; 8 | use reqwest::Client; 9 | use std::{ 10 | collections::HashMap, 11 | path::{Path, PathBuf}, 12 | }; 13 | use tokio::{ 14 | fs::OpenOptions, 15 | io::{AsyncWrite, AsyncWriteExt}, 16 | }; 17 | 18 | #[derive(Clone)] 19 | pub struct DownloadJob { 20 | pub url: String, 21 | pub description: Option, 22 | pub filename: Option, 23 | pub size: Option, 24 | pub compression: Compression, 25 | } 26 | 27 | #[allow(dead_code)] 28 | #[derive(Clone)] 29 | pub enum Compression { 30 | Gzip((Option, Option)), 31 | Xz((Option, Option)), 32 | None(Option), 33 | } 34 | 35 | impl Compression { 36 | pub fn get_extracted_checksum(&self) -> Option { 37 | match self { 38 | Compression::Gzip((_, c)) | Compression::Xz((_, c)) | Compression::None(c) => c, 39 | } 40 | .clone() 41 | } 42 | 43 | pub fn get_download_checksum(&self) -> Option { 44 | match self { 45 | Compression::Gzip((c, _)) | Compression::Xz((c, _)) | Compression::None(c) => c, 46 | } 47 | .clone() 48 | } 49 | } 50 | 51 | pub struct Downloader { 52 | client: Client, 53 | max_concurrent: usize, 54 | max_retry: usize, 55 | } 56 | 57 | impl Downloader { 58 | pub fn new() -> Self { 59 | Downloader { client: Client::new(), max_concurrent: 5, max_retry: 3 } 60 | } 61 | 62 | /// Download all required stuff in an async manner and show a progress bar 63 | pub async fn fetch( 64 | &self, 65 | mut to_download: Vec, 66 | download_path: &Path, 67 | global_progess: bool, 68 | ) -> Result> { 69 | // Create download dir 70 | if !download_path.is_dir() { 71 | tokio::fs::create_dir_all(download_path).await?; 72 | } 73 | 74 | // Calculate total size 75 | let total_size: u64 = to_download.iter().map(|job| job.size.unwrap_or(0)).sum(); 76 | 77 | let mut res = HashMap::new(); 78 | // Handles for download processes 79 | let mut handles = Vec::with_capacity(self.max_concurrent); 80 | 81 | // Show download info 82 | msg!("Downloading {} files...", to_download.len()); 83 | let multibar = MultiProgress::new(); 84 | let bar_template = { 85 | let max_len = crate::WRITER.get_max_len(); 86 | if max_len < 90 { 87 | " {wide_msg} {total_bytes:>10} {binary_bytes_per_sec:>12} {eta:>4} {percent:>3}%" 88 | } else { 89 | " {msg:<48} {total_bytes:>10} {binary_bytes_per_sec:>12} {eta:>4} [{wide_bar:.white/black}] {percent:>3}%" 90 | } 91 | }; 92 | let barsty = ProgressStyle::default_bar().template(bar_template)?.progress_chars("=>-"); 93 | // Create a global bar if some files specified size 94 | let total = to_download.len(); 95 | let total_str_len = total.to_string().len(); 96 | let mut finished = 0; 97 | let global_bar = if total_size > 0 && global_progess { 98 | let bar = multibar.insert(0, ProgressBar::new(total_size)); 99 | bar.set_style(barsty.clone()); 100 | Some(bar) 101 | } else { 102 | None 103 | }; 104 | 105 | // Down them all! 106 | while !to_download.is_empty() { 107 | while handles.len() < self.max_concurrent && !to_download.is_empty() { 108 | let job = to_download.pop().unwrap(); 109 | let client = self.client.clone(); 110 | let path = download_path.to_owned(); 111 | let bar = multibar.insert(0, ProgressBar::new(job.size.unwrap_or(0))); 112 | let global_bar = global_bar.clone(); 113 | bar.set_style(barsty.clone()); 114 | let handle = tokio::spawn(async move { 115 | try_download_file(client, path, job, 0, bar, global_bar).await 116 | }); 117 | handles.push(handle); 118 | } 119 | // Wait for any of them to stop 120 | let (download_res, _, remaining) = select_all(handles).await; 121 | handles = remaining; 122 | // Remove the handle from the list 123 | match download_res.unwrap() { 124 | Ok((name, path)) => { 125 | res.insert(name, path); 126 | finished += 1; 127 | update_global_bar(&global_bar, total, finished, total_str_len); 128 | } 129 | Err(e) => { 130 | // Handling download errors 131 | // If have remaining reties, do it 132 | if e.retry < self.max_retry { 133 | let c = self.client.clone(); 134 | let path = download_path.to_owned(); 135 | let handle = tokio::spawn(async move { 136 | try_download_file(c, path, e.job, e.retry + 1, e.bar, e.global_bar) 137 | .await 138 | }); 139 | handles.push(handle); 140 | } else { 141 | return Err(e.error); 142 | } 143 | } 144 | } 145 | } 146 | // Wait for the remaining to finish 147 | while !handles.is_empty() { 148 | let (download_res, _, remaining) = select_all(handles).await; 149 | handles = remaining; 150 | match download_res.unwrap() { 151 | Ok((url, path)) => { 152 | res.insert(url, path); 153 | finished += 1; 154 | update_global_bar(&global_bar, total, finished, total_str_len); 155 | } 156 | Err(e) => { 157 | // Handling download errors 158 | // If have remaining reties, do it 159 | if e.retry < self.max_retry { 160 | let c = self.client.clone(); 161 | let path = download_path.to_owned(); 162 | let handle = tokio::spawn(async move { 163 | try_download_file(c, path, e.job, e.retry + 1, e.bar, e.global_bar) 164 | .await 165 | }); 166 | handles.push(handle); 167 | } else { 168 | return Err(e.error); 169 | } 170 | } 171 | } 172 | } 173 | Ok(res) 174 | } 175 | } 176 | 177 | struct DownloadError { 178 | error: anyhow::Error, 179 | job: DownloadJob, 180 | retry: usize, 181 | bar: ProgressBar, 182 | global_bar: Option, 183 | } 184 | 185 | async fn try_download_file( 186 | client: Client, 187 | path: PathBuf, 188 | job: DownloadJob, 189 | retry: usize, 190 | bar: ProgressBar, 191 | global_bar: Option, 192 | ) -> Result<(String, PathBuf), DownloadError> { 193 | match download_file(&client, &path, job.clone(), bar.clone(), global_bar.clone()).await { 194 | Ok(res) => Ok(res), 195 | Err(error) => Err({ 196 | bar.reset(); 197 | DownloadError { error, job, retry: retry + 1, bar, global_bar } 198 | }), 199 | } 200 | } 201 | 202 | async fn download_file( 203 | client: &Client, 204 | path: &Path, 205 | job: DownloadJob, 206 | bar: ProgressBar, 207 | global_bar: Option, 208 | ) -> Result<(String, PathBuf)> { 209 | let mut resp = client.get(&job.url).send().await?; 210 | resp.error_for_status_ref()?; 211 | let filename = match job.filename { 212 | Some(n) => n, 213 | None => resp 214 | .url() 215 | .path_segments() 216 | .and_then(|segments| segments.last()) 217 | .and_then(|name| if name.is_empty() { None } else { Some(name) }) 218 | .ok_or_else(|| format_err!("{} doesn't contain filename.", &job.url))? 219 | .to_string(), 220 | }; 221 | let len = match job.size { 222 | Some(len) => len, 223 | None => { 224 | resp.content_length().ok_or_else(|| format_err!("Cannot determine content length."))? 225 | } 226 | }; 227 | let msg = job.description.as_ref().unwrap_or(&filename); 228 | 229 | let file_path = path.join(&filename); 230 | let mut f = { 231 | if file_path.is_file() { 232 | if let Some(checksum) = job.compression.get_extracted_checksum() { 233 | let p = file_path.clone(); 234 | let res = tokio::task::spawn_blocking(move || checksum.cmp_file(&p)).await?; 235 | if res.is_ok() && res.unwrap() { 236 | // Checksum matched. 237 | bar.finish_and_clear(); 238 | // Reduce global bar length, since we don't need to download this file 239 | if let Some(ref global_bar) = global_bar { 240 | global_bar.set_length(global_bar.length().unwrap() - len); 241 | } 242 | 243 | if crate::verbose() || global_bar.is_some() { 244 | bar.println(format!( 245 | "{}{} (not modified)", 246 | crate::utils::cli::gen_prefix( 247 | &console::style("SKIP").dim().to_string() 248 | ), 249 | &msg 250 | )); 251 | } 252 | return Ok((job.url, file_path)); 253 | } 254 | } 255 | // If checksum DNE/mismatch, purge current content 256 | let f = 257 | OpenOptions::new().read(true).write(true).truncate(true).open(&file_path).await?; 258 | f.set_len(0).await?; 259 | f 260 | } else { 261 | OpenOptions::new().read(true).write(true).create(true).open(&file_path).await? 262 | } 263 | }; 264 | 265 | // Prepare progress bar 266 | let mut progress_text = msg.to_owned(); 267 | if console::measure_text_width(&progress_text) > 48 { 268 | progress_text = console::truncate_str(&progress_text, 45, "...").to_string(); 269 | } 270 | bar.set_message(progress_text); 271 | bar.set_length(len); 272 | bar.set_position(0); 273 | bar.reset(); 274 | 275 | // Download! 276 | { 277 | let mut validator = 278 | job.compression.get_download_checksum().as_ref().map(|c| c.get_validator()); 279 | let mut writer: Box = match job.compression { 280 | Compression::Gzip(_) => Box::new(GzipDecoder::new(&mut f)), 281 | Compression::Xz(_) => Box::new(XzDecoder::new(&mut f)), 282 | Compression::None(_) => Box::new(&mut f), 283 | }; 284 | while let Some(chunk) = resp.chunk().await? { 285 | writer.write_all(&chunk).await?; 286 | let len = chunk.len().try_into().unwrap(); 287 | bar.inc(len); 288 | // Increase global bar, if applicable 289 | if let Some(ref global_bar) = global_bar { 290 | global_bar.inc(len); 291 | } 292 | if let Some(ref mut validator) = validator { 293 | validator.update(&chunk); 294 | } 295 | } 296 | writer.shutdown().await?; 297 | 298 | if let Some(len) = job.size { 299 | if bar.length().unwrap() != len { 300 | bail!( 301 | "Bad file size when downloading {}: mirrors may be synchronizing, try again later.", 302 | job.url 303 | ); 304 | } 305 | } 306 | 307 | if let Some(validator) = validator { 308 | // finish() returns false if validate failed 309 | if !validator.finish() { 310 | bail!("Checksum mismatched for file {}.", filename); 311 | } 312 | } 313 | } 314 | 315 | bar.finish_and_clear(); 316 | bar.println(format!( 317 | "{}{}", 318 | crate::utils::cli::gen_prefix(&console::style("DONE").dim().to_string()), 319 | &msg 320 | )); 321 | Ok((job.url, file_path)) 322 | } 323 | 324 | #[inline] 325 | fn update_global_bar( 326 | bar: &Option, 327 | total: usize, 328 | finished: usize, 329 | total_text_len: usize, 330 | ) { 331 | if let Some(bar) = bar { 332 | bar.set_message( 333 | style(gen_global_bar_message(total, finished, total_text_len)) 334 | .bright() 335 | .bold() 336 | .to_string(), 337 | ); 338 | } 339 | } 340 | 341 | #[inline] 342 | fn gen_global_bar_message(total: usize, finished: usize, total_text_len: usize) -> String { 343 | let finished_str = finished.to_string(); 344 | format!("Total Progress: [{: >width$}/{}]", finished_str, total, width = total_text_len) 345 | } 346 | -------------------------------------------------------------------------------- /src/solver/pool/mod.rs: -------------------------------------------------------------------------------- 1 | mod in_memory; 2 | pub use in_memory::InMemoryPool; 3 | 4 | use crate::{ 5 | msg, 6 | types::{PkgMeta, PkgSource, PkgVersion, VersionRequirement}, 7 | warn, 8 | }; 9 | 10 | use anyhow::{anyhow, bail, format_err, Context, Result}; 11 | use console::style; 12 | use varisat::{lit::Lit, CnfFormula, ExtendFormula}; 13 | 14 | /// The basic PkgPool interface 15 | pub trait BasicPkgPool { 16 | // Add a package to the pool 17 | fn add(&mut self, meta: PkgMeta) -> usize; 18 | // Finalize the pool, must call before using the pool 19 | fn finalize(&mut self); 20 | // Get PkgMeta from Pkg ID 21 | fn get_pkg_by_id(&self, id: usize) -> Option<&PkgMeta>; 22 | // Get a list of available package IDs based on the given name 23 | fn get_pkgs_by_name(&self, name: &str) -> Option>; 24 | // Get a list of packages that provide a certain package 25 | fn get_pkgs_by_provide(&self, name: &str, ver_req: &VersionRequirement) -> Option>; 26 | // Get an Iterator of (PkgName, &[(id, PkgVersion)]) 27 | fn pkgname_iter(&self) -> Box + '_>; 28 | // Get an Iterator of (PkgId, PkgMeta) 29 | fn pkgid_iter(&self) -> Box + '_>; 30 | } 31 | 32 | /// Additional tools 33 | pub trait PkgPool: BasicPkgPool { 34 | fn get_deps(&self, pkgid: usize) -> Result>> { 35 | let pkg = self 36 | .get_pkg_by_id(pkgid) 37 | .ok_or_else(|| format_err!("Package with ID {pkgid} not found."))?; 38 | let mut res = Vec::new(); 39 | for dep in &pkg.depends { 40 | let mut deps_id = Vec::new(); 41 | let available = self.get_pkgs_by_name(&dep.0).unwrap_or_default(); 42 | // Provides can be considered as dependencies as well 43 | let provides = self.get_pkgs_by_provide(&dep.0, &dep.1).unwrap_or_default(); 44 | 45 | for dep_pkgid in &available { 46 | let p = self.get_pkg_by_id(*dep_pkgid).unwrap(); 47 | if dep.1.contains(&p.version) { 48 | deps_id.push(*dep_pkgid); 49 | } 50 | } 51 | for provide_pkgid in &provides { 52 | deps_id.push(*provide_pkgid); 53 | } 54 | if deps_id.is_empty() { 55 | let error = anyhow!( 56 | "{} requires {} ({}), but only the following version(s) are available: {}.", 57 | pkg.name, 58 | dep.0, 59 | dep.1, 60 | available 61 | .iter() 62 | .map(|id| self.get_pkg_by_id(*id).unwrap().version.to_string()) 63 | .collect::>() 64 | .join(", ") 65 | ); 66 | let context = format!( 67 | "Cannot fulfill dependency {} for {}: no suitable version.", 68 | style(&dep.0).bold(), 69 | style(&pkg.name).bold() 70 | ); 71 | return Err(error).context(context); 72 | } else { 73 | res.push(deps_id); 74 | } 75 | } 76 | Ok(res) 77 | } 78 | 79 | fn pick_best_pkg( 80 | &self, 81 | pkgname: &str, 82 | ver_req: &VersionRequirement, 83 | need_local: bool, 84 | ) -> Result { 85 | if let Some(pkgs) = self.get_pkgs_by_name(pkgname) { 86 | let mut first_valid_version = true; 87 | for id in pkgs { 88 | // Safe unless the pool is broken 89 | let pkg = self.get_pkg_by_id(id).unwrap(); 90 | let is_local = matches!(pkg.source, PkgSource::Local(_)); 91 | if ver_req.contains(&pkg.version) { 92 | if need_local == is_local { 93 | return Ok(id); 94 | } else if first_valid_version { 95 | // First version that matches version requirement but can't use it because local 96 | // Tell it to the user 97 | warn!("Keeping local version of {}, but a newer version is available in upstream repositories.", 98 | style(pkgname).bold()); 99 | msg!( 100 | "Remove the {} keyword from your blueprint to use the latest version.", 101 | style("local").bold() 102 | ); 103 | first_valid_version = false; 104 | } 105 | } 106 | } 107 | // We haven't found a suitable candidate 108 | bail!("Cannot find a suitable version for {pkgname}."); 109 | } else { 110 | bail!("Package {pkgname} not found."); 111 | } 112 | } 113 | 114 | fn find_provide(&self, name: &str, ver_req: &Option) -> Option { 115 | let ver_req = ver_req.clone().unwrap_or_default(); 116 | for (_, pkg) in self.pkgid_iter() { 117 | for provide in &pkg.provides { 118 | if provide.0 == name && provide.1.combine(&ver_req).is_ok() { 119 | return Some(pkg.name.to_owned()); 120 | } 121 | } 122 | } 123 | 124 | None 125 | } 126 | 127 | fn find_replacement(&self, name: &str, ver_req: &VersionRequirement) -> Option { 128 | for (_, pkg) in self.pkgid_iter() { 129 | for replace in &pkg.replaces { 130 | if replace.0 == name && replace.1.within(ver_req) { 131 | return Some(pkg.name.to_owned()); 132 | } 133 | } 134 | } 135 | 136 | None 137 | } 138 | 139 | fn pkg_to_rule(&self, pkgid: usize, subset: Option<&[usize]>) -> Result>> { 140 | let pkg = self.get_pkg_by_id(pkgid).unwrap(); 141 | let mut res = Vec::new(); 142 | // Enroll dependencies 143 | for dep in &pkg.depends { 144 | let available = match self.get_pkgs_by_name(&dep.0) { 145 | Some(pkgs) => match subset { 146 | Some(ids) => { 147 | let pkgs: Vec = 148 | pkgs.iter().filter(|id| ids.contains(id)).copied().collect(); 149 | pkgs 150 | } 151 | None => pkgs.iter().copied().collect(), 152 | }, 153 | None => Vec::new(), 154 | }; 155 | // Provides can be considered as dependencies as well 156 | let provides: Vec = self.get_pkgs_by_provide(&dep.0, &dep.1).unwrap_or_default(); 157 | 158 | if available.is_empty() && provides.is_empty() { 159 | bail!("Cannot find a package which fulfills dependency {}.", style(&dep.0).bold()); 160 | } 161 | 162 | let mut clause = vec![!Lit::from_dimacs(pkgid as isize)]; 163 | 164 | for dep_pkgid in available { 165 | let p = self.get_pkg_by_id(dep_pkgid).unwrap(); 166 | if dep.1.contains(&p.version) { 167 | clause.push(Lit::from_dimacs(dep_pkgid as isize)); 168 | } 169 | } 170 | 171 | for provide_pkgid in provides { 172 | clause.push(Lit::from_dimacs(provide_pkgid as isize)); 173 | } 174 | 175 | if clause.len() > 1 { 176 | res.push(clause); 177 | } else { 178 | bail!("Cannot find an applicable version for dependency {}.", style(&dep.0).bold()); 179 | } 180 | } 181 | 182 | // Enroll conflicts 183 | for conflict in pkg.conflicts.iter() { 184 | let conflicable = match self.get_pkgs_by_name(&conflict.0) { 185 | Some(pkgs) => match subset { 186 | Some(ids) => { 187 | let pkgs: Vec = 188 | pkgs.into_iter().filter(|id| ids.contains(id)).collect(); 189 | pkgs 190 | } 191 | None => pkgs, 192 | }, 193 | None => { 194 | continue; 195 | } 196 | }; 197 | 198 | for conflict_pkgid in conflicable { 199 | let p = self.get_pkg_by_id(conflict_pkgid).unwrap(); 200 | if conflict.1.contains(&p.version) { 201 | let clause = vec![ 202 | !Lit::from_dimacs(pkgid as isize), 203 | !Lit::from_dimacs(conflict_pkgid as isize), 204 | ]; 205 | res.push(clause); 206 | } 207 | } 208 | } 209 | 210 | Ok(res) 211 | } 212 | 213 | fn gen_formula(&self, subset: Option<&[usize]>) -> CnfFormula { 214 | let mut formula = CnfFormula::new(); 215 | 216 | // Generating rules from pool 217 | for (id, meta) in self.pkgid_iter() { 218 | let valid = match subset { 219 | Some(ids) => ids.contains(&id), 220 | // If there's no subset requirement, then all packages are valid 221 | None => true, 222 | }; 223 | if valid { 224 | match self.pkg_to_rule(id, subset) { 225 | Ok(rules) => { 226 | for rule in rules { 227 | formula.add_clause(&rule); 228 | } 229 | } 230 | Err(e) => { 231 | warn!("Ignoring package {}: {}", style(&meta.name).bold(), e); 232 | } 233 | } 234 | } 235 | } 236 | 237 | // Generate conflict for different versions of the same package 238 | for (_, versions) in self.pkgname_iter() { 239 | let versions: Vec = match subset { 240 | Some(ids) => { 241 | versions.iter().filter(|pkg| ids.contains(&pkg.0)).map(|pkg| pkg.0).collect() 242 | } 243 | None => versions.iter().map(|(id, _)| *id).collect(), 244 | }; 245 | if versions.len() > 1 { 246 | let clause: Vec = 247 | versions.into_iter().map(|pkgid| !Lit::from_dimacs(pkgid as isize)).collect(); 248 | formula.add_clause(&clause); 249 | } 250 | } 251 | 252 | formula 253 | } 254 | } 255 | 256 | #[cfg(test)] 257 | mod test { 258 | use super::*; 259 | use crate::types::{PkgMeta, PkgVersion, VersionRequirement}; 260 | use std::path::PathBuf; 261 | 262 | #[test] 263 | fn trivial_pool() { 264 | let mut pool = InMemoryPool::new(); 265 | let a_id = pool.add(PkgMeta { 266 | name: "a".to_string(), 267 | description: "".to_string(), 268 | version: PkgVersion::try_from("1").unwrap(), 269 | 270 | depends: vec![( 271 | "c".to_string(), 272 | VersionRequirement { lower_bond: None, upper_bond: None }, 273 | None, 274 | )], 275 | optional: Vec::new(), 276 | conflicts: vec![( 277 | "d".to_string(), 278 | VersionRequirement { lower_bond: None, upper_bond: None }, 279 | None, 280 | )], 281 | provides: Vec::new(), 282 | replaces: Vec::new(), 283 | install_size: 0, 284 | source: PkgSource::Local(PathBuf::new()), 285 | }); 286 | let b_id = pool.add(PkgMeta { 287 | name: "b".to_string(), 288 | description: "".to_string(), 289 | version: PkgVersion::try_from("1").unwrap(), 290 | depends: vec![( 291 | "a".to_string(), 292 | VersionRequirement { lower_bond: None, upper_bond: None }, 293 | None, 294 | )], 295 | optional: Vec::new(), 296 | conflicts: Vec::new(), 297 | replaces: Vec::new(), 298 | provides: Vec::new(), 299 | install_size: 0, 300 | source: PkgSource::Local(PathBuf::new()), 301 | }); 302 | let c_id = pool.add(PkgMeta { 303 | name: "c".to_string(), 304 | description: "".to_string(), 305 | version: PkgVersion::try_from("1").unwrap(), 306 | depends: vec![( 307 | "b".to_string(), 308 | VersionRequirement { lower_bond: None, upper_bond: None }, 309 | None, 310 | )], 311 | optional: Vec::new(), 312 | conflicts: Vec::new(), 313 | replaces: Vec::new(), 314 | provides: Vec::new(), 315 | install_size: 0, 316 | source: PkgSource::Local(PathBuf::new()), 317 | }); 318 | let d_id = pool.add(PkgMeta { 319 | name: "d".to_string(), 320 | description: "".to_string(), 321 | version: PkgVersion::try_from("1").unwrap(), 322 | depends: vec![( 323 | "b".to_string(), 324 | VersionRequirement { lower_bond: None, upper_bond: None }, 325 | None, 326 | )], 327 | optional: Vec::new(), 328 | conflicts: Vec::new(), 329 | replaces: Vec::new(), 330 | provides: Vec::new(), 331 | install_size: 0, 332 | source: PkgSource::Local(PathBuf::new()), 333 | }); 334 | pool.finalize(); 335 | 336 | let mut solver = varisat::Solver::new(); 337 | let formula = pool.gen_formula(None); 338 | solver.add_formula(&formula); 339 | solver.assume(&[Lit::from_dimacs(c_id as isize)]); 340 | 341 | solver.solve().unwrap(); 342 | assert_eq!( 343 | solver.model().unwrap(), 344 | vec![ 345 | Lit::from_dimacs(a_id as isize), 346 | Lit::from_dimacs(b_id as isize), 347 | Lit::from_dimacs(c_id as isize), 348 | !Lit::from_dimacs(d_id as isize), 349 | ] 350 | ); 351 | } 352 | } 353 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 2, June 1991 3 | 4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc., 5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 6 | Everyone is permitted to copy and distribute verbatim copies 7 | of this license document, but changing it is not allowed. 8 | 9 | Preamble 10 | 11 | The licenses for most software are designed to take away your 12 | freedom to share and change it. By contrast, the GNU General Public 13 | License is intended to guarantee your freedom to share and change free 14 | software--to make sure the software is free for all its users. This 15 | General Public License applies to most of the Free Software 16 | Foundation's software and to any other program whose authors commit to 17 | using it. (Some other Free Software Foundation software is covered by 18 | the GNU Lesser General Public License instead.) You can apply it to 19 | your programs, too. 20 | 21 | When we speak of free software, we are referring to freedom, not 22 | price. Our General Public Licenses are designed to make sure that you 23 | have the freedom to distribute copies of free software (and charge for 24 | this service if you wish), that you receive source code or can get it 25 | if you want it, that you can change the software or use pieces of it 26 | in new free programs; and that you know you can do these things. 27 | 28 | To protect your rights, we need to make restrictions that forbid 29 | anyone to deny you these rights or to ask you to surrender the rights. 30 | These restrictions translate to certain responsibilities for you if you 31 | distribute copies of the software, or if you modify it. 32 | 33 | For example, if you distribute copies of such a program, whether 34 | gratis or for a fee, you must give the recipients all the rights that 35 | you have. You must make sure that they, too, receive or can get the 36 | source code. And you must show them these terms so they know their 37 | rights. 38 | 39 | We protect your rights with two steps: (1) copyright the software, and 40 | (2) offer you this license which gives you legal permission to copy, 41 | distribute and/or modify the software. 42 | 43 | Also, for each author's protection and ours, we want to make certain 44 | that everyone understands that there is no warranty for this free 45 | software. If the software is modified by someone else and passed on, we 46 | want its recipients to know that what they have is not the original, so 47 | that any problems introduced by others will not reflect on the original 48 | authors' reputations. 49 | 50 | Finally, any free program is threatened constantly by software 51 | patents. We wish to avoid the danger that redistributors of a free 52 | program will individually obtain patent licenses, in effect making the 53 | program proprietary. To prevent this, we have made it clear that any 54 | patent must be licensed for everyone's free use or not licensed at all. 55 | 56 | The precise terms and conditions for copying, distribution and 57 | modification follow. 58 | 59 | GNU GENERAL PUBLIC LICENSE 60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 61 | 62 | 0. This License applies to any program or other work which contains 63 | a notice placed by the copyright holder saying it may be distributed 64 | under the terms of this General Public License. The "Program", below, 65 | refers to any such program or work, and a "work based on the Program" 66 | means either the Program or any derivative work under copyright law: 67 | that is to say, a work containing the Program or a portion of it, 68 | either verbatim or with modifications and/or translated into another 69 | language. (Hereinafter, translation is included without limitation in 70 | the term "modification".) Each licensee is addressed as "you". 71 | 72 | Activities other than copying, distribution and modification are not 73 | covered by this License; they are outside its scope. The act of 74 | running the Program is not restricted, and the output from the Program 75 | is covered only if its contents constitute a work based on the 76 | Program (independent of having been made by running the Program). 77 | Whether that is true depends on what the Program does. 78 | 79 | 1. You may copy and distribute verbatim copies of the Program's 80 | source code as you receive it, in any medium, provided that you 81 | conspicuously and appropriately publish on each copy an appropriate 82 | copyright notice and disclaimer of warranty; keep intact all the 83 | notices that refer to this License and to the absence of any warranty; 84 | and give any other recipients of the Program a copy of this License 85 | along with the Program. 86 | 87 | You may charge a fee for the physical act of transferring a copy, and 88 | you may at your option offer warranty protection in exchange for a fee. 89 | 90 | 2. You may modify your copy or copies of the Program or any portion 91 | of it, thus forming a work based on the Program, and copy and 92 | distribute such modifications or work under the terms of Section 1 93 | above, provided that you also meet all of these conditions: 94 | 95 | a) You must cause the modified files to carry prominent notices 96 | stating that you changed the files and the date of any change. 97 | 98 | b) You must cause any work that you distribute or publish, that in 99 | whole or in part contains or is derived from the Program or any 100 | part thereof, to be licensed as a whole at no charge to all third 101 | parties under the terms of this License. 102 | 103 | c) If the modified program normally reads commands interactively 104 | when run, you must cause it, when started running for such 105 | interactive use in the most ordinary way, to print or display an 106 | announcement including an appropriate copyright notice and a 107 | notice that there is no warranty (or else, saying that you provide 108 | a warranty) and that users may redistribute the program under 109 | these conditions, and telling the user how to view a copy of this 110 | License. (Exception: if the Program itself is interactive but 111 | does not normally print such an announcement, your work based on 112 | the Program is not required to print an announcement.) 113 | 114 | These requirements apply to the modified work as a whole. If 115 | identifiable sections of that work are not derived from the Program, 116 | and can be reasonably considered independent and separate works in 117 | themselves, then this License, and its terms, do not apply to those 118 | sections when you distribute them as separate works. But when you 119 | distribute the same sections as part of a whole which is a work based 120 | on the Program, the distribution of the whole must be on the terms of 121 | this License, whose permissions for other licensees extend to the 122 | entire whole, and thus to each and every part regardless of who wrote it. 123 | 124 | Thus, it is not the intent of this section to claim rights or contest 125 | your rights to work written entirely by you; rather, the intent is to 126 | exercise the right to control the distribution of derivative or 127 | collective works based on the Program. 128 | 129 | In addition, mere aggregation of another work not based on the Program 130 | with the Program (or with a work based on the Program) on a volume of 131 | a storage or distribution medium does not bring the other work under 132 | the scope of this License. 133 | 134 | 3. You may copy and distribute the Program (or a work based on it, 135 | under Section 2) in object code or executable form under the terms of 136 | Sections 1 and 2 above provided that you also do one of the following: 137 | 138 | a) Accompany it with the complete corresponding machine-readable 139 | source code, which must be distributed under the terms of Sections 140 | 1 and 2 above on a medium customarily used for software interchange; or, 141 | 142 | b) Accompany it with a written offer, valid for at least three 143 | years, to give any third party, for a charge no more than your 144 | cost of physically performing source distribution, a complete 145 | machine-readable copy of the corresponding source code, to be 146 | distributed under the terms of Sections 1 and 2 above on a medium 147 | customarily used for software interchange; or, 148 | 149 | c) Accompany it with the information you received as to the offer 150 | to distribute corresponding source code. (This alternative is 151 | allowed only for noncommercial distribution and only if you 152 | received the program in object code or executable form with such 153 | an offer, in accord with Subsection b above.) 154 | 155 | The source code for a work means the preferred form of the work for 156 | making modifications to it. For an executable work, complete source 157 | code means all the source code for all modules it contains, plus any 158 | associated interface definition files, plus the scripts used to 159 | control compilation and installation of the executable. However, as a 160 | special exception, the source code distributed need not include 161 | anything that is normally distributed (in either source or binary 162 | form) with the major components (compiler, kernel, and so on) of the 163 | operating system on which the executable runs, unless that component 164 | itself accompanies the executable. 165 | 166 | If distribution of executable or object code is made by offering 167 | access to copy from a designated place, then offering equivalent 168 | access to copy the source code from the same place counts as 169 | distribution of the source code, even though third parties are not 170 | compelled to copy the source along with the object code. 171 | 172 | 4. You may not copy, modify, sublicense, or distribute the Program 173 | except as expressly provided under this License. Any attempt 174 | otherwise to copy, modify, sublicense or distribute the Program is 175 | void, and will automatically terminate your rights under this License. 176 | However, parties who have received copies, or rights, from you under 177 | this License will not have their licenses terminated so long as such 178 | parties remain in full compliance. 179 | 180 | 5. You are not required to accept this License, since you have not 181 | signed it. However, nothing else grants you permission to modify or 182 | distribute the Program or its derivative works. These actions are 183 | prohibited by law if you do not accept this License. Therefore, by 184 | modifying or distributing the Program (or any work based on the 185 | Program), you indicate your acceptance of this License to do so, and 186 | all its terms and conditions for copying, distributing or modifying 187 | the Program or works based on it. 188 | 189 | 6. Each time you redistribute the Program (or any work based on the 190 | Program), the recipient automatically receives a license from the 191 | original licensor to copy, distribute or modify the Program subject to 192 | these terms and conditions. You may not impose any further 193 | restrictions on the recipients' exercise of the rights granted herein. 194 | You are not responsible for enforcing compliance by third parties to 195 | this License. 196 | 197 | 7. If, as a consequence of a court judgment or allegation of patent 198 | infringement or for any other reason (not limited to patent issues), 199 | conditions are imposed on you (whether by court order, agreement or 200 | otherwise) that contradict the conditions of this License, they do not 201 | excuse you from the conditions of this License. If you cannot 202 | distribute so as to satisfy simultaneously your obligations under this 203 | License and any other pertinent obligations, then as a consequence you 204 | may not distribute the Program at all. For example, if a patent 205 | license would not permit royalty-free redistribution of the Program by 206 | all those who receive copies directly or indirectly through you, then 207 | the only way you could satisfy both it and this License would be to 208 | refrain entirely from distribution of the Program. 209 | 210 | If any portion of this section is held invalid or unenforceable under 211 | any particular circumstance, the balance of the section is intended to 212 | apply and the section as a whole is intended to apply in other 213 | circumstances. 214 | 215 | It is not the purpose of this section to induce you to infringe any 216 | patents or other property right claims or to contest validity of any 217 | such claims; this section has the sole purpose of protecting the 218 | integrity of the free software distribution system, which is 219 | implemented by public license practices. Many people have made 220 | generous contributions to the wide range of software distributed 221 | through that system in reliance on consistent application of that 222 | system; it is up to the author/donor to decide if he or she is willing 223 | to distribute software through any other system and a licensee cannot 224 | impose that choice. 225 | 226 | This section is intended to make thoroughly clear what is believed to 227 | be a consequence of the rest of this License. 228 | 229 | 8. If the distribution and/or use of the Program is restricted in 230 | certain countries either by patents or by copyrighted interfaces, the 231 | original copyright holder who places the Program under this License 232 | may add an explicit geographical distribution limitation excluding 233 | those countries, so that distribution is permitted only in or among 234 | countries not thus excluded. In such case, this License incorporates 235 | the limitation as if written in the body of this License. 236 | 237 | 9. The Free Software Foundation may publish revised and/or new versions 238 | of the General Public License from time to time. Such new versions will 239 | be similar in spirit to the present version, but may differ in detail to 240 | address new problems or concerns. 241 | 242 | Each version is given a distinguishing version number. If the Program 243 | specifies a version number of this License which applies to it and "any 244 | later version", you have the option of following the terms and conditions 245 | either of that version or of any later version published by the Free 246 | Software Foundation. If the Program does not specify a version number of 247 | this License, you may choose any version ever published by the Free Software 248 | Foundation. 249 | 250 | 10. If you wish to incorporate parts of the Program into other free 251 | programs whose distribution conditions are different, write to the author 252 | to ask for permission. For software which is copyrighted by the Free 253 | Software Foundation, write to the Free Software Foundation; we sometimes 254 | make exceptions for this. Our decision will be guided by the two goals 255 | of preserving the free status of all derivatives of our free software and 256 | of promoting the sharing and reuse of software generally. 257 | 258 | NO WARRANTY 259 | 260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY 261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN 262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES 263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED 264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS 266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE 267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, 268 | REPAIR OR CORRECTION. 269 | 270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR 272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, 273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING 274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED 275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY 276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER 277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE 278 | POSSIBILITY OF SUCH DAMAGES. 279 | 280 | END OF TERMS AND CONDITIONS 281 | 282 | How to Apply These Terms to Your New Programs 283 | 284 | If you develop a new program, and you want it to be of the greatest 285 | possible use to the public, the best way to achieve this is to make it 286 | free software which everyone can redistribute and change under these terms. 287 | 288 | To do so, attach the following notices to the program. It is safest 289 | to attach them to the start of each source file to most effectively 290 | convey the exclusion of warranty; and each file should have at least 291 | the "copyright" line and a pointer to where the full notice is found. 292 | 293 | 294 | Copyright (C) 295 | 296 | This program is free software; you can redistribute it and/or modify 297 | it under the terms of the GNU General Public License as published by 298 | the Free Software Foundation; either version 2 of the License, or 299 | (at your option) any later version. 300 | 301 | This program is distributed in the hope that it will be useful, 302 | but WITHOUT ANY WARRANTY; without even the implied warranty of 303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 304 | GNU General Public License for more details. 305 | 306 | You should have received a copy of the GNU General Public License along 307 | with this program; if not, write to the Free Software Foundation, Inc., 308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 309 | 310 | Also add information on how to contact you by electronic and paper mail. 311 | 312 | If the program is interactive, make it output a short notice like this 313 | when it starts in an interactive mode: 314 | 315 | Gnomovision version 69, Copyright (C) year name of author 316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 317 | This is free software, and you are welcome to redistribute it 318 | under certain conditions; type `show c' for details. 319 | 320 | The hypothetical commands `show w' and `show c' should show the appropriate 321 | parts of the General Public License. Of course, the commands you use may 322 | be called something other than `show w' and `show c'; they could even be 323 | mouse-clicks or menu items--whatever suits your program. 324 | 325 | You should also get your employer (if you work as a programmer) or your 326 | school, if any, to sign a "copyright disclaimer" for the program, if 327 | necessary. Here is a sample; alter the names: 328 | 329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program 330 | `Gnomovision' (which makes passes at compilers) written by James Hacker. 331 | 332 | , 1 April 1989 333 | Ty Coon, President of Vice 334 | 335 | This General Public License does not permit incorporating your program into 336 | proprietary programs. If your program is a subroutine library, you may 337 | consider it more useful to permit linking proprietary applications with the 338 | library. If this is what you want to do, use the GNU Lesser General 339 | Public License instead of this License. 340 | --------------------------------------------------------------------------------