├── yb ├── src │ ├── core │ │ ├── mod.rs │ │ └── tool_context.rs │ ├── ops │ │ ├── mod.rs │ │ ├── update_stream.rs │ │ └── add_stream.rs │ ├── ui_ops │ │ ├── mod.rs │ │ ├── check_broken_streams.rs │ │ └── update_stream.rs │ ├── errors.rs │ ├── data_model │ │ ├── mod.rs │ │ ├── yocto.rs │ │ ├── git.rs │ │ └── status.rs │ ├── util │ │ ├── mod.rs │ │ ├── debug_temp_dir.rs │ │ ├── indicatif.rs │ │ ├── paths.rs │ │ └── git │ │ │ └── mod.rs │ ├── commands │ │ ├── stream │ │ │ ├── mod.rs │ │ │ ├── update.rs │ │ │ ├── add.rs │ │ │ └── list.rs │ │ ├── sync │ │ │ ├── actions │ │ │ │ ├── mod.rs │ │ │ │ ├── bblayers.rs │ │ │ │ └── basic.rs │ │ │ └── mod.rs │ │ ├── list.rs │ │ ├── self_update.rs │ │ ├── mod.rs │ │ ├── activate.rs │ │ ├── run.rs │ │ ├── upgrade.rs │ │ ├── init.rs │ │ └── twice_bake.rs │ ├── lib.rs │ ├── config.rs │ ├── yb_options.rs │ ├── status_calculator │ │ ├── bblayers_manager.rs │ │ └── mod.rs │ ├── yb_conf.rs │ ├── main.rs │ ├── stream_db.rs │ ├── spec.rs │ ├── stream.rs │ └── yb_env.rs └── Cargo.toml ├── .cargo └── config.toml ├── images ├── yb.0.0.11.run.show.branch.gif ├── yb.0.0.11.status.vanilla.gif ├── yb.0.0.11.sync.and.status.gif ├── yb.0.0.11.status.missing.repo.gif └── yb.0.0.11.status.vanilla.skip.unremarkable.gif ├── .idea ├── misc.xml ├── codeStyles │ ├── codeStyleConfig.xml │ └── Project.xml ├── vcs.xml ├── .gitignore ├── modules.xml └── yb.iml ├── Cargo.toml ├── run_tests.sh ├── concurrent_git_pool ├── src │ ├── lib.rs │ ├── service.rs │ ├── bin │ │ ├── client.rs │ │ └── server.rs │ ├── error.rs │ ├── server.rs │ ├── client.rs │ ├── pool_helper.rs │ └── pool.rs └── Cargo.toml ├── yb_tests ├── tests │ ├── common │ │ └── mod.rs │ ├── resources │ │ └── confs │ │ │ └── basic.yaml │ ├── environment_tests.rs │ └── integration_tests.rs └── Cargo.toml ├── concurrent_git_pool_proc_macros ├── Cargo.toml └── src │ └── lib.rs ├── LICENSE ├── .gitignore ├── yb-support.bbclass └── README.md /yb/src/core/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod tool_context; 2 | -------------------------------------------------------------------------------- /yb/src/ops/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod add_stream; 2 | pub mod update_stream; 3 | -------------------------------------------------------------------------------- /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | target = "x86_64-unknown-linux-musl" 3 | 4 | -------------------------------------------------------------------------------- /yb/src/ui_ops/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod check_broken_streams; 2 | pub mod update_stream; 3 | -------------------------------------------------------------------------------- /yb/src/errors.rs: -------------------------------------------------------------------------------- 1 | use color_eyre::eyre; 2 | 3 | pub type YbResult = eyre::Result; 4 | -------------------------------------------------------------------------------- /yb/src/data_model/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod git; 2 | pub mod status; 3 | pub mod yocto; 4 | 5 | pub use yocto::*; 6 | -------------------------------------------------------------------------------- /images/yb.0.0.11.run.show.branch.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Agilent/yb/HEAD/images/yb.0.0.11.run.show.branch.gif -------------------------------------------------------------------------------- /images/yb.0.0.11.status.vanilla.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Agilent/yb/HEAD/images/yb.0.0.11.status.vanilla.gif -------------------------------------------------------------------------------- /images/yb.0.0.11.sync.and.status.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Agilent/yb/HEAD/images/yb.0.0.11.sync.and.status.gif -------------------------------------------------------------------------------- /images/yb.0.0.11.status.missing.repo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Agilent/yb/HEAD/images/yb.0.0.11.status.missing.repo.gif -------------------------------------------------------------------------------- /images/yb.0.0.11.status.vanilla.skip.unremarkable.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Agilent/yb/HEAD/images/yb.0.0.11.status.vanilla.skip.unremarkable.gif -------------------------------------------------------------------------------- /.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | resolver = "2" 3 | 4 | members = [ 5 | "concurrent_git_pool", 6 | "concurrent_git_pool_proc_macros", 7 | "yb", 8 | "yb_tests", 9 | ] 10 | -------------------------------------------------------------------------------- /.idea/codeStyles/codeStyleConfig.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | -------------------------------------------------------------------------------- /.idea/vcs.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | -------------------------------------------------------------------------------- /.idea/.gitignore: -------------------------------------------------------------------------------- 1 | # Default ignored files 2 | /shelf/ 3 | /workspace.xml 4 | # Editor-based HTTP Client requests 5 | /httpRequests/ 6 | # Datasource local storage ignored files 7 | /dataSources/ 8 | /dataSources.local.xml 9 | -------------------------------------------------------------------------------- /yb/src/data_model/yocto.rs: -------------------------------------------------------------------------------- 1 | use serde::Serialize; 2 | use std::path::PathBuf; 3 | 4 | #[derive(Debug, Hash, PartialEq, Eq, Clone, Serialize)] 5 | pub struct Layer { 6 | pub path: PathBuf, 7 | pub name: String, 8 | } 9 | -------------------------------------------------------------------------------- /run_tests.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cargo build || exit 4 | 5 | trap 'kill $(jobs -p) 2>/dev/null' EXIT 6 | 7 | PORT=12345 8 | cargo run --bin=concurrent_git_pool -- --port "$PORT" 1>out.txt 2>&1 & 9 | 10 | CONCURRENT_GIT_POOL="127.0.0.1:$PORT" cargo test --all-features 11 | -------------------------------------------------------------------------------- /.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /concurrent_git_pool/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod client; 2 | mod error; 3 | pub mod pool; 4 | pub mod pool_helper; 5 | pub mod server; 6 | pub mod service; 7 | 8 | pub use client::Client; 9 | pub use error::{ServiceError, ServiceResult}; 10 | 11 | pub use pool_helper::PoolHelper; 12 | 13 | pub use tarpc::client::RpcError; 14 | -------------------------------------------------------------------------------- /.idea/codeStyles/Project.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | 9 | 10 | -------------------------------------------------------------------------------- /yb/src/util/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | use std::hash::Hash; 3 | 4 | pub mod debug_temp_dir; 5 | pub mod git; 6 | pub mod indicatif; 7 | pub mod paths; 8 | 9 | // https://stackoverflow.com/a/46767732 10 | pub fn has_unique_elements(iter: T) -> bool 11 | where 12 | T: IntoIterator, 13 | T::Item: Eq + Hash, 14 | { 15 | let mut uniq = HashSet::new(); 16 | iter.into_iter().all(move |x| uniq.insert(x)) 17 | } 18 | -------------------------------------------------------------------------------- /yb_tests/tests/common/mod.rs: -------------------------------------------------------------------------------- 1 | use assert_cmd::Command; 2 | use std::path::Path; 3 | pub use yb::util::debug_temp_dir::DebugTempDir; 4 | 5 | pub fn yb_cmd>(cwd: P) -> Command { 6 | let mut ret = Command::cargo_bin("yb").unwrap(); 7 | ret.current_dir(cwd).env_clear().env("NO_COLOR", "1"); 8 | if let Ok(var) = std::env::var("CONCURRENT_GIT_POOL") { 9 | ret.env("CONCURRENT_GIT_POOL", var); 10 | } 11 | ret 12 | } 13 | -------------------------------------------------------------------------------- /yb/src/commands/stream/mod.rs: -------------------------------------------------------------------------------- 1 | use enum_dispatch::enum_dispatch; 2 | 3 | pub use add::StreamAddCommand; 4 | pub use list::StreamListCommand; 5 | pub use update::StreamUpdateCommand; 6 | 7 | mod add; 8 | mod list; 9 | mod update; 10 | 11 | #[enum_dispatch(SubcommandRunner)] 12 | #[derive(Debug, clap::Subcommand)] 13 | pub enum StreamSubcommands { 14 | Add(StreamAddCommand), 15 | List(StreamListCommand), 16 | Update(StreamUpdateCommand), 17 | } 18 | -------------------------------------------------------------------------------- /concurrent_git_pool/src/service.rs: -------------------------------------------------------------------------------- 1 | use crate::error::ServiceResult; 2 | use std::path::PathBuf; 3 | 4 | #[tarpc::service] 5 | pub trait Service { 6 | async fn lookup_or_clone(uri: String) -> ServiceResult; 7 | async fn lookup(uri: String) -> Option>; 8 | async fn clone_in( 9 | uri: String, 10 | parent_dir: Option, 11 | directory: Option, 12 | ) -> ServiceResult<()>; 13 | } 14 | -------------------------------------------------------------------------------- /yb/src/commands/sync/actions/mod.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use std::fmt::Debug; 3 | 4 | pub(crate) use basic::*; 5 | pub(crate) use bblayers::*; 6 | 7 | use crate::errors::YbResult; 8 | use concurrent_git_pool::PoolHelper; 9 | 10 | pub mod basic; 11 | pub mod bblayers; 12 | 13 | #[async_trait] 14 | pub trait SyncAction: Debug + Send + Sync { 15 | fn is_force_required(&self) -> bool; 16 | async fn apply(&self, pool: &PoolHelper) -> YbResult<()>; 17 | } 18 | -------------------------------------------------------------------------------- /concurrent_git_pool_proc_macros/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "concurrent_git_pool_proc_macros" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [lib] 9 | proc-macro = true 10 | 11 | [dependencies] 12 | syn = { version = "2", features = ["full"] } 13 | quote = "1" 14 | proc-macro2 = "1" 15 | tokio = "1" 16 | 17 | [dev-dependencies] 18 | concurrent_git_pool = { path = "../concurrent_git_pool" } 19 | -------------------------------------------------------------------------------- /concurrent_git_pool/src/bin/client.rs: -------------------------------------------------------------------------------- 1 | use concurrent_git_pool::client::Client; 2 | 3 | #[tokio::main] 4 | async fn main() -> anyhow::Result<()> { 5 | let client = Client::connect("127.0.0.1:12345").await?; 6 | 7 | let p1 = client.lookup_or_clone("https://github.com/console-rs/indicatif.git"); 8 | let p2 = client.lookup_or_clone("https://github.com/yoctoproject/poky.git"); 9 | 10 | let ret = tokio::join!(p1, p2); 11 | dbg!(ret); 12 | 13 | eprintln!("DONE"); 14 | 15 | Ok(()) 16 | } 17 | -------------------------------------------------------------------------------- /yb_tests/tests/resources/confs/basic.yaml: -------------------------------------------------------------------------------- 1 | header: 2 | version: 1 3 | name: "zeus" 4 | 5 | repos: 6 | poky: 7 | url: "https://github.com/yoctoproject/poky.git" 8 | refspec: "zeus" 9 | layers: 10 | meta: 11 | meta-poky: 12 | 13 | meta-openembedded: 14 | url: "https://github.com/openembedded/meta-openembedded.git" 15 | refspec: "zeus" 16 | layers: 17 | meta-networking: 18 | meta-python: 19 | meta-filesystems: 20 | meta-webserver: 21 | meta-oe: 22 | -------------------------------------------------------------------------------- /yb_tests/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "yb-tests" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | assert_cmd = "2" 10 | color-eyre = "0.6.3" 11 | tokio = "1" 12 | yb = { path = "../yb" } 13 | concurrent_git_pool = { path = "../concurrent_git_pool" } 14 | concurrent_git_pool_proc_macros = { path = "../concurrent_git_pool_proc_macros" } 15 | 16 | [package.metadata.cargo-machete] 17 | ignored = ["concurrent_git_pool"] -------------------------------------------------------------------------------- /concurrent_git_pool/src/error.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::io; 3 | use thiserror::Error; 4 | 5 | #[derive(Clone, Debug, Error, Serialize, Deserialize)] 6 | pub enum ServiceError { 7 | #[error("The git clone operation failed: {}", .0)] 8 | CloneFailed(String), 9 | #[error("IO error encountered: {}", .0)] 10 | IoError(String), 11 | } 12 | 13 | impl From for ServiceError { 14 | fn from(e: io::Error) -> Self { 15 | Self::IoError(format!("{e}")) 16 | } 17 | } 18 | 19 | pub type ServiceResult = Result; 20 | -------------------------------------------------------------------------------- /yb/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![feature(result_flattening)] 2 | #![feature(entry_insert)] 3 | #![feature(assert_matches)] 4 | #![feature(try_find)] 5 | #![feature(async_closure)] 6 | #![feature(exit_status_error)] 7 | 8 | pub use config::Config; 9 | 10 | pub const VERSION: &str = env!("CARGO_PKG_VERSION"); 11 | 12 | pub mod commands; 13 | pub mod config; 14 | pub mod core; 15 | pub mod data_model; 16 | pub mod errors; 17 | pub mod ops; 18 | pub mod spec; 19 | pub mod status_calculator; 20 | pub mod stream; 21 | pub mod stream_db; 22 | pub mod ui_ops; 23 | pub mod util; 24 | pub mod yb_conf; 25 | pub mod yb_env; 26 | pub mod yb_options; 27 | -------------------------------------------------------------------------------- /concurrent_git_pool/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "concurrent_git_pool" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | anyhow = "1" 10 | clap = { version = "4", features = ["derive"] } 11 | futures = "0.3.30" 12 | serde = { version = "1", features = ["derive"] } 13 | sha2 = "0.10.8" 14 | tempfile = "3" 15 | thiserror = "1" 16 | tokio = { version = "1", features = ["full"] } 17 | tarpc = { version = "0.33.0", features = ["full"] } 18 | 19 | [[bin]] 20 | name = "concurrent_git_pool" 21 | path = "src/bin/server.rs" 22 | -------------------------------------------------------------------------------- /yb/src/commands/stream/update.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use indicatif::MultiProgress; 3 | 4 | use crate::commands::SubcommandRunner; 5 | use crate::errors::YbResult; 6 | use crate::ui_ops::update_stream::{ui_op_update_stream, UiUpdateStreamOptions}; 7 | use crate::Config; 8 | 9 | #[derive(Debug, clap::Parser)] 10 | pub struct StreamUpdateCommand {} 11 | 12 | #[async_trait] 13 | impl SubcommandRunner for StreamUpdateCommand { 14 | async fn run(&self, config: &mut Config, mp: &MultiProgress) -> YbResult<()> { 15 | let mut update_stream_opts = UiUpdateStreamOptions::new(config, mp); 16 | update_stream_opts.fail_if_no_yb_env(true); 17 | ui_op_update_stream(update_stream_opts) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /yb/src/config.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use crate::yb_options::YbOptions; 4 | 5 | /// Application-scope context 6 | #[derive(Debug, Clone)] 7 | pub struct Config { 8 | /// The current working directory 9 | pub(crate) cwd: PathBuf, 10 | pub(crate) porcelain: bool, 11 | } 12 | 13 | impl Config { 14 | pub fn new(cwd: PathBuf, options: &YbOptions) -> Config { 15 | Config { 16 | cwd, 17 | porcelain: options.porcelain, 18 | } 19 | } 20 | 21 | pub fn cwd(&self) -> &PathBuf { 22 | &self.cwd 23 | } 24 | 25 | pub fn clone_with_cwd(&self, cwd: PathBuf) -> Config { 26 | Config { 27 | cwd, 28 | ..self.clone() 29 | } 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /yb/src/commands/stream/add.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use indicatif::MultiProgress; 3 | 4 | use crate::commands::SubcommandRunner; 5 | use crate::errors::YbResult; 6 | use crate::ops::add_stream::{op_add_stream, AddStreamOptions}; 7 | use crate::Config; 8 | 9 | #[derive(Debug, clap::Parser)] 10 | pub struct StreamAddCommand { 11 | #[clap()] 12 | uri: String, 13 | 14 | #[clap(long, short)] 15 | name: Option, 16 | } 17 | 18 | #[async_trait] 19 | impl SubcommandRunner for StreamAddCommand { 20 | async fn run(&self, config: &mut Config, _mp: &MultiProgress) -> YbResult<()> { 21 | let mut add_stream_opts = AddStreamOptions::new(config); 22 | add_stream_opts.name(self.name.clone()); 23 | add_stream_opts.uri(self.uri.clone()); 24 | op_add_stream(add_stream_opts) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /yb/src/commands/list.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use indicatif::MultiProgress; 3 | 4 | use crate::commands::SubcommandRunner; 5 | use crate::config::Config; 6 | use crate::core::tool_context::require_yb_env; 7 | use crate::errors::YbResult; 8 | 9 | /// List the available specs 10 | #[derive(Debug, clap::Parser)] 11 | pub struct ListCommand {} 12 | 13 | #[async_trait] 14 | impl SubcommandRunner for ListCommand { 15 | async fn run(&self, config: &mut Config, _mp: &MultiProgress) -> YbResult<()> { 16 | let yb_env = require_yb_env(config)?; 17 | 18 | for stream in yb_env.stream_db().streams() { 19 | println!("{}:", stream.1.name()); 20 | 21 | if let Some(reason) = &stream.1.broken_reason() { 22 | println!("\tstream is broken: {reason:?}"); 23 | } else { 24 | for spec in stream.1.specs() { 25 | println!("\t{}", spec.0); 26 | } 27 | } 28 | } 29 | 30 | Ok(()) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /yb/src/commands/stream/list.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use indicatif::MultiProgress; 3 | 4 | use crate::commands::SubcommandRunner; 5 | use crate::core::tool_context::require_yb_env; 6 | use crate::errors::YbResult; 7 | use crate::util::paths::list_subdirectories_sorted; 8 | use crate::Config; 9 | 10 | #[derive(Debug, clap::Parser)] 11 | pub struct StreamListCommand {} 12 | 13 | #[async_trait] 14 | impl SubcommandRunner for StreamListCommand { 15 | async fn run(&self, config: &mut Config, _mp: &MultiProgress) -> YbResult<()> { 16 | let yb_env = require_yb_env(config)?; 17 | 18 | // TODO: use stream DB instead of iterating through dir 19 | let streams_dir = yb_env.streams_dir(); 20 | 21 | if streams_dir.exists() { 22 | println!(); 23 | let streams = list_subdirectories_sorted(&streams_dir)?; 24 | for stream in streams { 25 | println!("{}", stream.file_name().unwrap().to_str().unwrap()); 26 | } 27 | } 28 | 29 | Ok(()) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /concurrent_git_pool/src/server.rs: -------------------------------------------------------------------------------- 1 | use crate::error::ServiceResult; 2 | use crate::pool::Pool; 3 | use crate::service::Service; 4 | use std::path::PathBuf; 5 | use std::sync::Arc; 6 | use tarpc::context::Context; 7 | 8 | #[derive(Clone)] 9 | pub struct Server { 10 | cache: Arc, 11 | } 12 | 13 | #[tarpc::server] 14 | impl Service for Server { 15 | async fn lookup_or_clone(self, _: Context, uri: String) -> ServiceResult { 16 | self.cache.lookup_or_clone(uri).await 17 | } 18 | 19 | async fn lookup(self, _: Context, uri: String) -> Option> { 20 | self.cache.lookup(uri).await 21 | } 22 | 23 | async fn clone_in( 24 | self, 25 | _: Context, 26 | uri: String, 27 | parent_dir: Option, 28 | directory: Option, 29 | ) -> ServiceResult<()> { 30 | self.cache.clone_in(parent_dir, uri, directory).await 31 | } 32 | } 33 | 34 | impl Server { 35 | pub fn new(cache: Arc) -> Self { 36 | Self { cache } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2022-2024 Agilent Technologies, Inc. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | 9 | -------------------------------------------------------------------------------- /yb_tests/tests/environment_tests.rs: -------------------------------------------------------------------------------- 1 | use crate::common::yb_cmd; 2 | use crate::common::DebugTempDir; 3 | use color_eyre::eyre::Result; 4 | use concurrent_git_pool_proc_macros::clone_repos; 5 | use std::fs; 6 | 7 | mod common; 8 | 9 | #[tokio::test] 10 | async fn bare_poky_not_supported() -> Result<()> { 11 | let t = DebugTempDir::new()?; 12 | let path = t.path(); 13 | 14 | clone_repos! { 15 | "https://github.com/yoctoproject/poky.git" in &path, 16 | } 17 | 18 | let poky_dir = path.join("poky"); 19 | let build_dir = poky_dir.join("build"); 20 | fs::create_dir(&build_dir)?; 21 | 22 | let path_var = std::env::var("PATH").unwrap(); 23 | let path_var = format!( 24 | "{}:{}:{}", 25 | poky_dir.join("scripts").to_str().unwrap(), 26 | poky_dir.join("bitbake").join("bin").to_str().unwrap(), 27 | path_var 28 | ); 29 | 30 | yb_cmd(poky_dir) 31 | .arg("upgrade") 32 | .env("PATH", path_var) 33 | .env("BBPATH", build_dir.to_str().unwrap()) 34 | .assert() 35 | .failure(); 36 | 37 | Ok(()) 38 | } 39 | -------------------------------------------------------------------------------- /yb/src/yb_options.rs: -------------------------------------------------------------------------------- 1 | use crate::commands::Subcommands; 2 | use crate::VERSION; 3 | 4 | #[derive(clap::Parser, Debug)] 5 | #[clap(name = "yb", about = "Yocto buddy", version = VERSION)] 6 | pub struct YbOptions { 7 | /// Set log level 8 | #[clap(short = 'v', long, global = true, value_enum, default_value = "warn")] 9 | pub level: Level, 10 | 11 | /// Coloring: auto, always, never 12 | #[clap(long, global = true)] 13 | pub color: Option, 14 | 15 | #[clap(long, global = true)] 16 | pub porcelain: bool, 17 | 18 | #[clap(subcommand)] 19 | pub command: Subcommands, 20 | } 21 | 22 | #[derive(clap::ValueEnum, Clone, Debug, Copy)] 23 | pub enum Level { 24 | Error, 25 | Warn, 26 | Info, 27 | Debug, 28 | Trace, 29 | } 30 | 31 | impl From for tracing::Level { 32 | fn from(level: Level) -> Self { 33 | match level { 34 | Level::Error => tracing::Level::ERROR, 35 | Level::Warn => tracing::Level::WARN, 36 | Level::Info => tracing::Level::INFO, 37 | Level::Debug => tracing::Level::DEBUG, 38 | Level::Trace => tracing::Level::TRACE, 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /yb/src/commands/self_update.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use self_update::{cargo_crate_version, Status}; 3 | 4 | use indicatif::MultiProgress; 5 | 6 | use crate::commands::SubcommandRunner; 7 | use crate::errors::YbResult; 8 | use crate::util::indicatif::MultiProgressHelpers; 9 | use crate::Config; 10 | 11 | /// Automatically download the latest version of yb 12 | #[derive(Debug, clap::Parser)] 13 | pub struct SelfUpdateCommand {} 14 | 15 | #[async_trait] 16 | impl SubcommandRunner for SelfUpdateCommand { 17 | async fn run(&self, _config: &mut Config, mp: &MultiProgress) -> YbResult<()> { 18 | let mp = mp.clone(); 19 | tokio::task::spawn_blocking(move || { 20 | let status = self_update::backends::github::Update::configure() 21 | .repo_owner("Agilent") 22 | .repo_name("yb") 23 | .bin_name("yb") 24 | .show_download_progress(true) 25 | .current_version(cargo_crate_version!()) 26 | .build()? 27 | .update()?; 28 | 29 | match status { 30 | Status::UpToDate(v) => mp.note(format!("Version {v} is up-to-date!")), 31 | Status::Updated(v) => mp.note(format!("Updated to version {v}")), 32 | } 33 | 34 | Ok(()) 35 | }) 36 | .await 37 | .unwrap() 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /yb/src/commands/mod.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use enum_dispatch::enum_dispatch; 3 | use indicatif::MultiProgress; 4 | 5 | use crate::commands::activate::ActivateCommand; 6 | use crate::commands::init::InitCommand; 7 | use crate::commands::list::ListCommand; 8 | use crate::commands::run::RunCommand; 9 | use crate::commands::self_update::SelfUpdateCommand; 10 | use crate::commands::status::*; 11 | use crate::commands::stream::{ 12 | StreamAddCommand, StreamListCommand, StreamSubcommands, StreamUpdateCommand, 13 | }; 14 | use crate::commands::sync::SyncCommand; 15 | use crate::commands::twice_bake::TwiceBakeCommand; 16 | use crate::commands::upgrade::UpgradeCommand; 17 | use crate::errors::YbResult; 18 | use crate::Config; 19 | 20 | mod activate; 21 | mod init; 22 | mod list; 23 | mod run; 24 | mod self_update; 25 | pub mod status; 26 | mod stream; 27 | mod sync; 28 | mod twice_bake; 29 | mod upgrade; 30 | 31 | #[async_trait] 32 | #[enum_dispatch] 33 | pub trait SubcommandRunner { 34 | async fn run(&self, config: &mut Config, mp: &MultiProgress) -> YbResult<()>; 35 | } 36 | 37 | #[enum_dispatch(SubcommandRunner)] 38 | #[derive(Debug, clap::Parser)] 39 | pub enum Subcommands { 40 | Init(InitCommand), 41 | Run(RunCommand), 42 | SelfUpdate(SelfUpdateCommand), 43 | Status(StatusCommand), 44 | #[clap(subcommand)] 45 | Stream(StreamSubcommands), 46 | Activate(ActivateCommand), 47 | Sync(SyncCommand), 48 | TwiceBake(TwiceBakeCommand), 49 | List(ListCommand), 50 | Upgrade(UpgradeCommand), 51 | } 52 | -------------------------------------------------------------------------------- /yb/src/commands/activate.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use indicatif::MultiProgress; 3 | 4 | use crate::commands::SubcommandRunner; 5 | use crate::config::Config; 6 | use crate::core::tool_context::require_yb_env; 7 | use crate::errors::YbResult; 8 | use crate::ui_ops::check_broken_streams::{ 9 | ui_op_check_broken_streams, UiCheckBrokenStreamsOptions, 10 | }; 11 | use crate::util::indicatif::MultiProgressHelpers; 12 | use crate::yb_env::YbEnv; 13 | 14 | /// Make the given spec active, but don't actually sync anything 15 | #[derive(Debug, clap::Parser)] 16 | pub struct ActivateCommand { 17 | /// Name of the spec to activate 18 | spec: String, 19 | } 20 | 21 | #[async_trait] 22 | impl SubcommandRunner for ActivateCommand { 23 | async fn run(&self, config: &mut Config, mp: &MultiProgress) -> YbResult<()> { 24 | ui_op_check_broken_streams(UiCheckBrokenStreamsOptions::new(config, mp))?; 25 | 26 | let mut yb_env = require_yb_env(config)?; 27 | 28 | if yb_env.stream_db().is_empty() { 29 | mp.warn("couldn't activate a spec because there are no streams"); 30 | mp.warn("use 'yb stream add' first"); 31 | panic!(); 32 | } 33 | 34 | activate_spec(&mut yb_env, &self.spec) 35 | } 36 | } 37 | 38 | pub fn activate_spec(yb_env: &mut YbEnv, name: &str) -> YbResult<()> { 39 | let spec = yb_env.find_spec(name)?.cloned(); 40 | if let Some(spec) = spec { 41 | // TODO don't clone 42 | yb_env.activate_spec(spec)?; 43 | println!("Activated spec '{}'", &name); 44 | } else { 45 | eyre::bail!("spec with name '{}' not found", &name); 46 | } 47 | 48 | Ok(()) 49 | } 50 | -------------------------------------------------------------------------------- /.idea/yb.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /yb/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "yb" 3 | version = "0.0.14" 4 | authors = ["Chris Laplante "] 5 | edition = "2018" 6 | 7 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 8 | 9 | [dependencies] 10 | bytebraise = { version = "0.1.3" } 11 | concurrent_git_pool = { path = "../concurrent_git_pool" } 12 | assert_cmd = "2" 13 | async-trait = "0.1.81" 14 | clap = { version = "3", features = ["derive"] } 15 | color-eyre = "0.6.3" 16 | color-spantrace = "0.2.1" 17 | console = "0.15.8" 18 | coredump = "0.1.2" 19 | dialoguer = "0.11.0" 20 | enum_dispatch = "0.3.13" 21 | eyre = "0.6.12" 22 | git2 = "0.19.0" 23 | glob = "0.3.1" 24 | indoc = "2.0.5" 25 | indicatif = { version = "0.17.8", features = ["improved_unicode"] } 26 | itertools = "0.13.0" 27 | lazy_static = "1" 28 | maplit = "1" 29 | multi_index_map = "0.11.0" 30 | openssl-sys = { version = "0.9.103", features = ["vendored"] } 31 | pathdiff = "0.2.1" 32 | self_update = { version = "0.41.0", features = ["archive-tar", "compression-flate2"] } 33 | serde = { version = "1", features = ["derive"] } 34 | serde_json = "1" 35 | serde_yaml = "0.9.34" 36 | slotmap = "1" 37 | tempfile = "3" 38 | time = { version = "0.3.36", features = ["formatting", "macros", "local-offset"] } 39 | tokio = { version = "1", features = ["full"] } 40 | tokio-stream = { version = "0.1.16", features = ["io-util"] } 41 | tracing = "0.1" 42 | tracing-error = "0.2.0" 43 | tracing-subscriber = { version = "0.3", features = ["env-filter"] } 44 | walkdir = "2" 45 | serde_with = { version = "3.9.0", features = ["macros"] } 46 | 47 | [dev-dependencies] 48 | assert_cmd = "2" 49 | 50 | [package.metadata.cargo-machete] 51 | ignored = ["openssl-sys"] 52 | -------------------------------------------------------------------------------- /yb/src/status_calculator/bblayers_manager.rs: -------------------------------------------------------------------------------- 1 | use eyre::WrapErr; 2 | use std::collections::HashSet; 3 | use std::fs::File; 4 | use std::io::Read; 5 | use std::path::PathBuf; 6 | 7 | use crate::data_model::Layer; 8 | use bytebraise::data_smart::variable_contents::VariableContentsAccessors; 9 | use bytebraise::data_smart::DataSmart; 10 | use bytebraise::parser::parse_bitbake_from_str; 11 | use bytebraise::syntax::ast::evaluate::Evaluate; 12 | use bytebraise::syntax::ast::AstNode; 13 | 14 | use crate::errors::YbResult; 15 | use crate::util::paths::normalize_path; 16 | 17 | pub struct BBLayersManager {} 18 | 19 | impl BBLayersManager {} 20 | 21 | pub fn read_bblayers(build_dir: &PathBuf) -> YbResult> { 22 | let bblayers = build_dir.join("conf").join("bblayers.conf"); 23 | 24 | if bblayers.is_file() { 25 | let mut source = String::new(); 26 | File::open(&bblayers) 27 | .with_context(|| format!("failed to read {:?}", &bblayers))? 28 | .read_to_string(&mut source)?; 29 | let res = parse_bitbake_from_str(&source).clone_for_update(); 30 | let d = DataSmart::new(); 31 | res.evaluate(&d).unwrap(); 32 | //TODO .with_context(|| format!("failed to evaluate AST for {:?}", &bblayers))?; 33 | 34 | Ok(d.get_var("BBLAYERS") 35 | .unwrap() 36 | .as_string_or_empty() 37 | .split_whitespace() 38 | .map(|l| { 39 | let path = normalize_path(l); 40 | Layer { 41 | path: path.clone(), 42 | name: path.file_name().unwrap().to_str().unwrap().to_string(), 43 | } 44 | }) 45 | .collect::>()) 46 | } else { 47 | Ok(HashSet::new()) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /concurrent_git_pool/src/bin/server.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use concurrent_git_pool::pool::Pool; 3 | use concurrent_git_pool::server::Server; 4 | use concurrent_git_pool::service::Service; 5 | use futures::{future, prelude::*}; 6 | use std::io; 7 | use std::sync::Arc; 8 | use tarpc::{ 9 | server::{self, Channel}, 10 | tokio_serde::formats::Json, 11 | }; 12 | use tokio::signal; 13 | 14 | #[derive(Parser)] 15 | struct Args { 16 | #[arg(short, long)] 17 | port: u16, 18 | } 19 | 20 | #[tokio::main] 21 | async fn main() -> io::Result<()> { 22 | let args = Args::parse(); 23 | 24 | let cache = Arc::new(Pool::new()); 25 | 26 | // JSON transport is provided by the json_transport tarpc module. It makes it easy 27 | // to start up a serde-powered json serialization strategy over TCP. 28 | let mut listener = 29 | tarpc::serde_transport::tcp::listen(format!("127.0.0.1:{}", args.port), Json::default) 30 | .await?; 31 | listener.config_mut().max_frame_length(usize::MAX); 32 | let server = listener 33 | // Ignore accept errors. 34 | .filter_map(|r| future::ready(r.ok())) 35 | .map(server::BaseChannel::with_defaults) 36 | // serve is generated by the service attribute. It takes as input any type implementing 37 | // the generated World trait. 38 | .map(|channel| { 39 | let server = Server::new(cache.clone()); 40 | channel.execute(server.serve()) 41 | }) 42 | // Max 10 channels. 43 | .buffer_unordered(10) 44 | .for_each(|_| async {}); 45 | 46 | tokio::select! { 47 | _ = server => { } 48 | _ = signal::ctrl_c() => { 49 | // The shutdown signal has been received. 50 | eprintln!("shutting down"); 51 | } 52 | } 53 | 54 | Ok(()) 55 | } 56 | -------------------------------------------------------------------------------- /yb/src/util/debug_temp_dir.rs: -------------------------------------------------------------------------------- 1 | use ::std::io::Result; 2 | use ::std::path::{Path, PathBuf}; 3 | 4 | use tempfile::{Builder, TempDir}; 5 | 6 | // Based on https://gist.github.com/ExpHP/facc0dcbf4399aac7af87dcebae03f7c 7 | 8 | #[derive(Debug)] 9 | pub struct DebugTempDir(Option); 10 | 11 | impl From for DebugTempDir { 12 | fn from(tmp: TempDir) -> Self { 13 | DebugTempDir(Some(tmp)) 14 | } 15 | } 16 | 17 | /// Forward everything to the tempdir crate. 18 | impl DebugTempDir { 19 | pub fn prefixed(prefix: &str) -> Result { 20 | Builder::new().prefix(prefix).tempdir().map(Self::from) 21 | } 22 | 23 | pub fn prefixed_in>(tmpdir: P, prefix: &str) -> Result { 24 | Builder::new() 25 | .prefix(prefix) 26 | .tempdir_in(tmpdir) 27 | .map(Self::from) 28 | } 29 | 30 | pub fn new() -> Result { 31 | TempDir::new().map(Self::from) 32 | } 33 | 34 | pub fn path(&self) -> &Path { 35 | self.0.as_ref().unwrap().path() 36 | } 37 | pub fn into_path(mut self) -> PathBuf { 38 | self.0.take().unwrap().into_path() 39 | } 40 | pub fn close(mut self) -> Result<()> { 41 | self.0.take().unwrap().close() 42 | } 43 | } 44 | 45 | impl AsRef for DebugTempDir { 46 | fn as_ref(&self) -> &Path { 47 | self.0.as_ref().unwrap().as_ref() 48 | } 49 | } 50 | 51 | /// Leaks the inner TempDir if we are unwinding. 52 | impl Drop for DebugTempDir { 53 | fn drop(&mut self) { 54 | if ::std::thread::panicking() { 55 | if let Some(d) = self.0.as_ref() { 56 | eprintln!("retaining temporary directory at: {d:?}") 57 | } 58 | ::std::mem::forget(self.0.take()) 59 | } 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | 3 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 4 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 5 | 6 | # User-specific stuff 7 | .idea/**/workspace.xml 8 | .idea/**/tasks.xml 9 | .idea/**/usage.statistics.xml 10 | .idea/**/dictionaries 11 | .idea/**/shelf 12 | 13 | # AWS User-specific 14 | .idea/**/aws.xml 15 | 16 | # Generated files 17 | .idea/**/contentModel.xml 18 | 19 | # Sensitive or high-churn files 20 | .idea/**/dataSources/ 21 | .idea/**/dataSources.ids 22 | .idea/**/dataSources.local.xml 23 | .idea/**/sqlDataSources.xml 24 | .idea/**/dynamic.xml 25 | .idea/**/uiDesigner.xml 26 | .idea/**/dbnavigator.xml 27 | 28 | # Gradle 29 | .idea/**/gradle.xml 30 | .idea/**/libraries 31 | 32 | # Gradle and Maven with auto-import 33 | # When using Gradle or Maven with auto-import, you should exclude module files, 34 | # since they will be recreated, and may cause churn. Uncomment if using 35 | # auto-import. 36 | # .idea/artifacts 37 | # .idea/compiler.xml 38 | # .idea/jarRepositories.xml 39 | # .idea/modules.xml 40 | # .idea/*.iml 41 | # .idea/modules 42 | # *.iml 43 | # *.ipr 44 | 45 | # CMake 46 | cmake-build-*/ 47 | 48 | # Mongo Explorer plugin 49 | .idea/**/mongoSettings.xml 50 | 51 | # File-based project format 52 | *.iws 53 | 54 | # IntelliJ 55 | out/ 56 | 57 | # mpeltonen/sbt-idea plugin 58 | .idea_modules/ 59 | 60 | # JIRA plugin 61 | atlassian-ide-plugin.xml 62 | 63 | # Cursive Clojure plugin 64 | .idea/replstate.xml 65 | 66 | # SonarLint plugin 67 | .idea/sonarlint/ 68 | 69 | # Crashlytics plugin (for Android Studio and IntelliJ) 70 | com_crashlytics_export_strings.xml 71 | crashlytics.properties 72 | crashlytics-build.properties 73 | fabric.properties 74 | 75 | # Editor-based Rest Client 76 | .idea/httpRequests 77 | 78 | # Android studio 3.1+ serialized cache file 79 | .idea/caches/build_file_checksums.ser 80 | -------------------------------------------------------------------------------- /concurrent_git_pool/src/client.rs: -------------------------------------------------------------------------------- 1 | use crate::error::ServiceResult; 2 | use crate::service::ServiceClient; 3 | use std::path::PathBuf; 4 | use std::time::{Duration, SystemTime}; 5 | use tarpc::client::RpcError; 6 | use tarpc::context::Context; 7 | use tarpc::{client, context, tokio_serde::formats::Json}; 8 | use tokio::net::ToSocketAddrs; 9 | 10 | #[derive(Clone)] 11 | pub struct Client { 12 | inner: ServiceClient, 13 | } 14 | 15 | impl Client { 16 | pub async fn connect(addr: A) -> anyhow::Result { 17 | let transport = tarpc::serde_transport::tcp::connect(addr, Json::default).await?; 18 | let client = ServiceClient::new(client::Config::default(), transport).spawn(); 19 | 20 | Ok(Self { inner: client }) 21 | } 22 | 23 | pub fn lookup_or_clone>( 24 | &self, 25 | uri: U, 26 | ) -> impl futures::Future, RpcError>> + '_ { 27 | self.inner.lookup_or_clone(Self::make_context(), uri.into()) 28 | } 29 | 30 | pub fn lookup>( 31 | &self, 32 | uri: U, 33 | ) -> impl futures::Future>, RpcError>> + '_ { 34 | self.inner.lookup(Self::make_context(), uri.into()) 35 | } 36 | 37 | pub fn clone_in, P: Into, D: Into>( 38 | &self, 39 | uri: U, 40 | parent_dir: Option

, 41 | directory: Option, 42 | ) -> impl futures::Future, RpcError>> + '_ { 43 | self.inner.clone_in( 44 | Self::make_context(), 45 | uri.into(), 46 | parent_dir.map(Into::into), 47 | directory.map(Into::into), 48 | ) 49 | } 50 | 51 | fn make_context() -> Context { 52 | let mut context = context::current(); 53 | context.deadline = SystemTime::now() + Duration::from_secs(60 * 5); 54 | context 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /concurrent_git_pool/src/pool_helper.rs: -------------------------------------------------------------------------------- 1 | use crate::{Client, RpcError, ServiceError, ServiceResult}; 2 | use std::path::PathBuf; 3 | use tokio::process::Command; 4 | 5 | #[derive(Clone)] 6 | pub struct PoolHelper { 7 | inner: Option, 8 | } 9 | 10 | impl PoolHelper { 11 | pub async fn connect_or_local() -> anyhow::Result { 12 | if let Ok(var) = std::env::var("CONCURRENT_GIT_POOL") { 13 | eprintln!("connecting to: {}", &var); 14 | return Ok(Self { 15 | inner: Some(Client::connect(var).await?), 16 | }); 17 | } 18 | 19 | Ok(Self { inner: None }) 20 | } 21 | 22 | pub async fn clone_in>( 23 | &self, 24 | uri: U, 25 | parent_dir: Option, 26 | directory: Option, 27 | ) -> Result, RpcError> { 28 | if let Some(inner) = &self.inner { 29 | let uri = uri.into(); 30 | eprintln!("cloning: {}", &uri); 31 | let ret = inner.clone_in(uri, parent_dir, directory).await; 32 | dbg!(&ret); 33 | return ret; 34 | } 35 | 36 | let mut command = Command::new("git"); 37 | command.env("GIT_TERMINAL_PROMPT", "0"); 38 | command.env("GIT_SSH_COMMAND", "ssh -o BatchMode=yes"); 39 | command.arg("clone").arg(uri.into()); 40 | if let Some(directory) = directory { 41 | command.arg(directory); 42 | } 43 | if let Some(parent_dir) = parent_dir { 44 | command.current_dir(parent_dir); 45 | } 46 | 47 | let result = command.output().await; 48 | if let Err(e) = result { 49 | return Ok(Err(ServiceError::IoError(format!( 50 | "failed to call status() on command: {e:?}" 51 | )))); 52 | } 53 | 54 | let result = result.unwrap(); 55 | if !result.status.success() { 56 | return Ok(Err(ServiceError::CloneFailed(format!( 57 | "exit code: {result:?}" 58 | )))); 59 | } 60 | 61 | Ok(Ok(())) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /yb/src/ui_ops/check_broken_streams.rs: -------------------------------------------------------------------------------- 1 | use crate::core::tool_context::maybe_yb_env; 2 | use crate::errors::YbResult; 3 | use crate::ops::update_stream::{op_update_stream, UpdateStreamOptions}; 4 | use crate::util::indicatif::MultiProgressHelpers; 5 | use crate::yb_env::ActiveSpecStatus; 6 | use crate::Config; 7 | use dialoguer::Confirm; 8 | use indicatif::MultiProgress; 9 | 10 | #[derive(Debug)] 11 | pub struct UiCheckBrokenStreamsOptions<'cfg> { 12 | config: &'cfg Config, 13 | mp: &'cfg MultiProgress, 14 | verbose: bool, 15 | } 16 | 17 | impl<'cfg> UiCheckBrokenStreamsOptions<'cfg> { 18 | pub fn new(config: &'cfg Config, mp: &'cfg MultiProgress) -> Self { 19 | Self { 20 | config, 21 | mp, 22 | verbose: false, 23 | } 24 | } 25 | 26 | pub fn verbose(&mut self, val: bool) -> &mut Self { 27 | self.verbose = val; 28 | self 29 | } 30 | } 31 | 32 | pub fn ui_op_check_broken_streams(options: UiCheckBrokenStreamsOptions) -> YbResult<()> { 33 | let yb_env = match maybe_yb_env(options.config)? { 34 | Some(yb_env) => yb_env, 35 | None => { 36 | return Ok(()); 37 | } 38 | }; 39 | 40 | let active_spec_status = yb_env.active_spec_status(); 41 | if let Some(ActiveSpecStatus::StreamsBroken(broken)) = &active_spec_status { 42 | options 43 | .mp 44 | .warn("one or more streams are broken, so the active spec could not be loaded"); 45 | options.mp.note("error information follows below:"); 46 | options.mp.suspend(|| eprintln!("{:?}", &broken)); 47 | options.mp.println("")?; 48 | options 49 | .mp 50 | .note("would you like to try refresh the broken streams?"); 51 | let confirm_result = options.mp.suspend(|| -> YbResult { 52 | Confirm::new() 53 | .with_prompt("Refresh streams?") 54 | .wait_for_newline(true) 55 | .interact() 56 | .map_err(|e| e.into()) 57 | })?; 58 | 59 | if !confirm_result { 60 | options 61 | .mp 62 | .warn("OK, continuing with possibly limited functionality."); 63 | return Ok(()); 64 | } 65 | 66 | let update_opts = 67 | UpdateStreamOptions::new(options.config, broken.keys().cloned().collect()); 68 | op_update_stream(update_opts, |_| {})?; 69 | } else { 70 | return Ok(()); 71 | } 72 | 73 | Ok(()) 74 | } 75 | -------------------------------------------------------------------------------- /yb/src/ops/update_stream.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | 3 | use crate::config::Config; 4 | use crate::core::tool_context::require_yb_env; 5 | use crate::errors::YbResult; 6 | use crate::stream_db::StreamKey; 7 | use crate::yb_env::ActiveSpecStatus; 8 | 9 | #[derive(Default)] 10 | pub struct UpdateStreamResult { 11 | pub active_spec_updated: bool, 12 | } 13 | 14 | pub enum UpdateStreamEvent<'a> { 15 | Start, 16 | ActiveSpecUpdated, 17 | Finish(&'a UpdateStreamResult), 18 | } 19 | 20 | pub struct UpdateStreamOptions<'cfg> { 21 | pub(crate) config: &'cfg Config, 22 | stream_keys: HashSet, 23 | } 24 | 25 | impl<'cfg> UpdateStreamOptions<'cfg> { 26 | pub fn new(config: &'cfg Config, stream_keys: HashSet) -> Self { 27 | Self { 28 | config, 29 | stream_keys, 30 | } 31 | } 32 | } 33 | 34 | pub fn op_update_stream(options: UpdateStreamOptions, mut c: F) -> YbResult 35 | where 36 | F: FnMut(UpdateStreamEvent), 37 | { 38 | let mut result = UpdateStreamResult::default(); 39 | 40 | let mut yb_env = require_yb_env(options.config)?; 41 | 42 | let active_spec_stream = yb_env.active_spec_status().and_then(|status| match status { 43 | ActiveSpecStatus::StreamsBroken(..) => None, 44 | ActiveSpecStatus::Active(spec) => Some(spec.stream_key), 45 | }); 46 | 47 | c(UpdateStreamEvent::Start); 48 | 49 | for stream_key in options.stream_keys { 50 | let is_active_stream = active_spec_stream 51 | .map(|key| key == stream_key) 52 | .unwrap_or_default(); 53 | 54 | { 55 | let stream = yb_env.stream_db_mut().stream_mut(stream_key).unwrap(); 56 | stream.pull()?; 57 | } 58 | 59 | if is_active_stream { 60 | if let ActiveSpecStatus::Active(active_spec) = 61 | yb_env.active_spec_status().cloned().unwrap() 62 | { 63 | let stream = yb_env.stream_db().stream(stream_key).unwrap(); 64 | let reloaded_spec = stream.get_spec_by_name(active_spec.spec.name()).unwrap(); 65 | if *reloaded_spec != active_spec.spec { 66 | c(UpdateStreamEvent::ActiveSpecUpdated); 67 | result.active_spec_updated = true; 68 | yb_env.activate_spec(reloaded_spec.clone())?; 69 | } 70 | } else { 71 | unreachable!(); 72 | } 73 | } 74 | } 75 | 76 | c(UpdateStreamEvent::Finish(&result)); 77 | 78 | Ok(result) 79 | } 80 | -------------------------------------------------------------------------------- /yb/src/commands/sync/actions/bblayers.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use std::fs; 3 | use std::fs::OpenOptions; 4 | use std::io::Write; 5 | use std::path::PathBuf; 6 | 7 | use bytebraise::editor::list_var_editor::ListVarEditor; 8 | 9 | use crate::commands::sync::actions::SyncAction; 10 | use crate::errors::YbResult; 11 | use crate::util::paths::normalize_path; 12 | use concurrent_git_pool::PoolHelper; 13 | 14 | #[derive(Debug, PartialEq, Eq)] 15 | pub enum BBLayersEditAction { 16 | AddLayer, 17 | RemoveLayer, 18 | } 19 | 20 | #[derive(Debug)] 21 | pub struct ModifyBBLayersConfSyncAction { 22 | layer_path: PathBuf, 23 | bblayers_path: PathBuf, 24 | action: BBLayersEditAction, 25 | } 26 | 27 | impl ModifyBBLayersConfSyncAction { 28 | pub fn new(layer_path: PathBuf, bblayers_path: PathBuf, action: BBLayersEditAction) -> Self { 29 | Self { 30 | layer_path, 31 | bblayers_path, 32 | action, 33 | } 34 | } 35 | } 36 | 37 | #[async_trait] 38 | impl SyncAction for ModifyBBLayersConfSyncAction { 39 | fn is_force_required(&self) -> bool { 40 | false 41 | } 42 | 43 | async fn apply(&self, _pool: &PoolHelper) -> YbResult<()> { 44 | let layer_path = normalize_path(&self.layer_path) 45 | .to_str() 46 | .unwrap() 47 | .to_string(); 48 | if !self.bblayers_path.is_file() { 49 | assert_eq!(self.action, BBLayersEditAction::AddLayer); 50 | 51 | fs::create_dir_all(self.bblayers_path.parent().unwrap())?; 52 | 53 | // Generate new bblayers.conf 54 | let mut bblayers_content = String::from( 55 | r##"# POKY_BBLAYERS_CONF_VERSION is increased each time build/conf/bblayers.conf 56 | # changes incompatibly 57 | POKY_BBLAYERS_CONF_VERSION = "2" 58 | 59 | BBPATH = "${TOPDIR}" 60 | BBFILES ??= """##, 61 | ); 62 | 63 | bblayers_content.push_str(&format!("\n\nBBLAYERS ?= \"{layer_path}\"")); 64 | 65 | let mut f = OpenOptions::new() 66 | .write(true) 67 | .create_new(true) 68 | .open(&self.bblayers_path)?; 69 | f.write_all(bblayers_content.as_bytes())?; 70 | return Ok(()); 71 | } 72 | 73 | let mut editor = 74 | ListVarEditor::from_file(&self.bblayers_path, String::from("BBLAYERS")).unwrap(); 75 | match self.action { 76 | BBLayersEditAction::AddLayer => { 77 | editor.add_value(layer_path); 78 | } 79 | BBLayersEditAction::RemoveLayer => { 80 | editor.remove_value(layer_path); 81 | } 82 | } 83 | editor.commit().unwrap(); 84 | 85 | Ok(()) 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /yb/src/commands/run.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use std::ffi::OsString; 3 | use std::process::Command; 4 | 5 | use console::Style; 6 | use eyre::Context; 7 | use indicatif::MultiProgress; 8 | 9 | use crate::commands::SubcommandRunner; 10 | use crate::core::tool_context::require_tool_context; 11 | use crate::errors::YbResult; 12 | use crate::util::paths::make_relative_to_cwd; 13 | use crate::Config; 14 | 15 | /// Run a command on each top-level layer repository. Works like 'mr run'. 16 | #[derive(Debug, clap::Parser)] 17 | #[clap(setting = clap::AppSettings::TrailingVarArg)] 18 | pub struct RunCommand { 19 | /// Command and arguments to run on all top-level layer repositories 20 | #[structopt(parse(from_os_str))] 21 | args: Vec, 22 | 23 | /// Don't print return codes 24 | #[structopt(name = "no-return-codes", short, long)] 25 | flag_no_return_codes: bool, 26 | } 27 | 28 | #[async_trait] 29 | impl SubcommandRunner for RunCommand { 30 | async fn run(&self, config: &mut Config, _mp: &MultiProgress) -> YbResult<()> { 31 | let context = require_tool_context(config)?; 32 | let repos = context 33 | .sources_repos() 34 | .context("can't enumerate layer repos - was the Yocto environment activated?")?; 35 | if self.args.is_empty() { 36 | return Err(eyre::eyre!("must pass a command")); 37 | } 38 | 39 | for repo in &repos { 40 | let dname_path = repo.workdir().unwrap(); 41 | let dname = dname_path 42 | .file_name() 43 | .ok_or_else(|| eyre::eyre!("workdir has no path name?"))?; 44 | 45 | let header = Style::from_dotted_str("blue.bold").apply_to(dname.to_str().unwrap()); 46 | 47 | println!( 48 | "\n{} [{}]:", 49 | header, 50 | make_relative_to_cwd(dname_path).unwrap().display() 51 | ); 52 | 53 | let result = Command::new(&self.args[0]) 54 | .args(&self.args[1..]) 55 | .current_dir(repo.workdir().unwrap()) 56 | .spawn()? 57 | .wait()?; 58 | 59 | if !self.flag_no_return_codes { 60 | let (color, return_code_text) = match result.code() { 61 | Some(0) => (Style::from_dotted_str("green"), String::from("0")), 62 | Some(code) => (Style::from_dotted_str("red"), code.to_string()), 63 | None => ( 64 | Style::from_dotted_str("yellow"), 65 | String::from("[terminated by signal]"), 66 | ), 67 | }; 68 | 69 | println!( 70 | "{}: {}", 71 | color.bold().apply_to("return code"), 72 | return_code_text 73 | ); 74 | } 75 | } 76 | 77 | Ok(()) 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /yb/src/yb_conf.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use crate::core::tool_context::YoctoEnvironment; 6 | use crate::errors::YbResult; 7 | use crate::util::paths::try_diff_paths; 8 | 9 | pub const YB_CONF_FORMAT_VERSION: u32 = 2; 10 | 11 | #[derive(Debug, Serialize, Deserialize)] 12 | pub struct YbConf { 13 | format_version: u32, 14 | 15 | /// Location of the build directory relative to .yb directory 16 | build_dir_relative: PathBuf, 17 | 18 | /// Location of the top-level sources directory relative to the .yb directory 19 | #[serde(alias = "repos_dir_relative")] 20 | sources_dir_relative: PathBuf, 21 | 22 | /// Location of the poky layer relative to the .yb directory 23 | poky_dir_relative: Option, 24 | } 25 | 26 | impl YbConf { 27 | pub fn new_from_yocto_env(yb_dir: &Path, yocto_env: &YoctoEnvironment) -> YbResult { 28 | // There may not be a poky directory (or any layers) yet 29 | let poky_dir_relative = yocto_env 30 | .poky_layer 31 | .as_ref() 32 | .map(|p| try_diff_paths(p, yb_dir)) 33 | .map_or(Ok(None), |r| r.map(Some))?; 34 | 35 | Ok(YbConf { 36 | format_version: YB_CONF_FORMAT_VERSION, 37 | build_dir_relative: try_diff_paths(&yocto_env.build_dir, yb_dir)?, 38 | sources_dir_relative: try_diff_paths(&yocto_env.sources_dir, yb_dir)?, 39 | poky_dir_relative, 40 | }) 41 | } 42 | 43 | pub fn build_dir_relative(&self) -> &PathBuf { 44 | &self.build_dir_relative 45 | } 46 | 47 | pub fn sources_dir_relative(&self) -> &PathBuf { 48 | &self.sources_dir_relative 49 | } 50 | 51 | pub fn poky_dir_relative(&self) -> Option<&PathBuf> { 52 | self.poky_dir_relative.as_ref() 53 | } 54 | } 55 | 56 | #[cfg(test)] 57 | mod test { 58 | use crate::yb_conf::{YbConf, YB_CONF_FORMAT_VERSION}; 59 | 60 | #[test] 61 | fn fake_version_1_handling() { 62 | // This is actually version 2, but I never bumped the format version :/ 63 | let conf = r#"--- 64 | format_version: 1 65 | build_dir_relative: "../build" 66 | sources_dir_relative: "../sources" 67 | poky_dir_relative: "../sources/poky" 68 | "#; 69 | 70 | let yb_conf: YbConf = serde_yaml::from_str(conf).unwrap(); 71 | assert_eq!(yb_conf.format_version, 1); 72 | } 73 | 74 | #[test] 75 | fn version_1_handling() { 76 | let conf = r#"--- 77 | format_version: 1 78 | build_dir_relative: "../build" 79 | repos_dir_relative: "../sources" 80 | poky_dir_relative: "../sources/poky" 81 | "#; 82 | 83 | let yb_conf: YbConf = serde_yaml::from_str(conf).unwrap(); 84 | assert_eq!(yb_conf.format_version, 1); 85 | } 86 | 87 | #[test] 88 | fn format_version_up_to_date() { 89 | assert_eq!(YB_CONF_FORMAT_VERSION, 2, "need to update migration code!"); 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /yb/src/data_model/git.rs: -------------------------------------------------------------------------------- 1 | use itertools::Itertools; 2 | use serde::Serialize; 3 | 4 | #[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord, Serialize)] 5 | pub enum UpstreamComparison { 6 | UpToDate, 7 | Behind(usize), 8 | Ahead(usize), 9 | Diverged { ahead: usize, behind: usize }, 10 | } 11 | 12 | impl UpstreamComparison { 13 | pub fn is_diverged(&self) -> bool { 14 | matches!(self, UpstreamComparison::Diverged { .. }) 15 | } 16 | } 17 | 18 | // It just so happens we can use the ordering of the `UpstreamComparison` enum to determine the 19 | // optimal branch. 20 | pub fn determine_optimal_checkout_branch( 21 | candidates: &Vec, 22 | ) -> Option<&LocalTrackingBranchWithUpstreamComparison> { 23 | candidates 24 | .iter() 25 | .sorted_by(|a, b| a.upstream_comparison.cmp(&b.upstream_comparison)) 26 | .next() 27 | } 28 | 29 | #[cfg(test)] 30 | mod test { 31 | use super::*; 32 | 33 | #[test] 34 | fn upstream_comparison_orderings() { 35 | assert_eq!(UpstreamComparison::UpToDate, UpstreamComparison::UpToDate); 36 | assert!(UpstreamComparison::UpToDate < UpstreamComparison::Ahead(10)); 37 | assert!(UpstreamComparison::UpToDate < UpstreamComparison::Behind(10)); 38 | assert!( 39 | UpstreamComparison::UpToDate 40 | < UpstreamComparison::Diverged { 41 | ahead: 1, 42 | behind: 2 43 | } 44 | ); 45 | assert!(UpstreamComparison::Behind(1) < UpstreamComparison::Ahead(1)); 46 | } 47 | } 48 | 49 | #[derive(Debug, Eq, PartialEq, Serialize)] 50 | pub struct BranchStatus { 51 | /// Name of the branch 52 | pub local_branch_name: String, 53 | pub upstream_branch_status: Option, 54 | } 55 | 56 | impl BranchStatus { 57 | pub fn is_diverged(&self) -> bool { 58 | self.upstream_branch_status 59 | .as_ref() 60 | .map_or(false, |s| s.upstream_comparison.is_diverged()) 61 | } 62 | } 63 | 64 | #[derive(Debug, Eq, PartialEq, Serialize)] 65 | pub struct UpstreamBranchStatus { 66 | pub remote_tracking_branch: RemoteTrackingBranch, 67 | /// Status of the branch relative to the upstream 68 | pub upstream_comparison: UpstreamComparison, 69 | } 70 | 71 | #[derive(Debug, Eq, PartialEq, Serialize)] 72 | pub struct LocalTrackingBranchWithUpstreamComparison { 73 | pub local_tracking_branch: LocalTrackingBranch, 74 | pub upstream_comparison: UpstreamComparison, 75 | } 76 | 77 | #[derive(Debug, Eq, PartialEq, Serialize)] 78 | pub struct LocalTrackingBranch { 79 | pub branch_name: String, 80 | pub remote_tracking_branch: RemoteTrackingBranch, 81 | } 82 | 83 | #[derive(Debug, Clone, PartialEq, Eq, Serialize)] 84 | pub struct RemoteTrackingBranch { 85 | pub remote_name: String, 86 | pub branch_name: String, 87 | } 88 | 89 | impl RemoteTrackingBranch { 90 | // TODO better name 91 | pub fn to_string(&self) -> String { 92 | format!("{}/{}", self.remote_name, self.branch_name) 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /yb/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::{env, io}; 2 | 3 | use clap::Parser; 4 | use eyre::Context; 5 | use indicatif::MultiProgress; 6 | 7 | use yb::commands::*; 8 | use yb::config::Config; 9 | use yb::errors::YbResult; 10 | use yb::yb_options::{Level, YbOptions}; 11 | 12 | fn parse_args_and_create_config() -> YbResult<(Config, YbOptions)> { 13 | let opt: YbOptions = YbOptions::parse(); 14 | let cwd = env::current_dir().context("couldn't get the current directory of the process")?; 15 | let config = Config::new(cwd, &opt); 16 | Ok((config, opt)) 17 | } 18 | 19 | #[tokio::main] 20 | async fn main() { 21 | let _ = coredump::register_panic_handler(); 22 | if env::var("NO_COLOR") == Err(std::env::VarError::NotPresent) { 23 | color_eyre::install().unwrap(); 24 | } else { 25 | color_eyre::config::HookBuilder::new() 26 | .theme(color_eyre::config::Theme::new()) 27 | .install() 28 | .unwrap(); 29 | } 30 | 31 | if let Err(code) = real_main().await { 32 | std::process::exit(code); 33 | } 34 | } 35 | 36 | async fn real_main() -> Result<(), i32> { 37 | // Automatically enable backtracing unless user explicitly disabled it 38 | if env::var("RUST_BACKTRACE").is_err() { 39 | env::set_var("RUST_BACKTRACE", "1"); 40 | } 41 | 42 | // Figure out what we're going to do 43 | let result = parse_args_and_create_config(); 44 | match result { 45 | Err(err) => { 46 | eprintln!("internal error whilst setting up application: {err:?}"); 47 | return Err(1); 48 | } 49 | 50 | Ok((mut config, opt)) => { 51 | let mp = MultiProgress::new(); 52 | 53 | install_tracing(opt.level, mp.clone()); 54 | 55 | // Run the subcommand 56 | if let Err(err) = opt.command.run(&mut config, &mp).await { 57 | eprintln!("internal error: {err:?}"); 58 | return Err(1); 59 | } 60 | } 61 | } 62 | 63 | Ok(()) 64 | } 65 | 66 | fn install_tracing(level: Level, mp: MultiProgress) { 67 | use tracing_error::ErrorLayer; 68 | use tracing_subscriber::fmt; 69 | use tracing_subscriber::prelude::*; 70 | use tracing_subscriber::EnvFilter; 71 | 72 | let fmt_layer = fmt::layer() 73 | .with_target(false) 74 | .with_writer(move || MultiProgressWriteWrapper::new(mp.clone())); 75 | let level = tracing::Level::from(level); 76 | let filter_layer = EnvFilter::try_from_default_env() 77 | .or_else(|_| { 78 | EnvFilter::builder() 79 | .with_default_directive(level.into()) 80 | .parse("") 81 | }) 82 | .unwrap(); 83 | 84 | tracing_subscriber::registry() 85 | .with(filter_layer) 86 | .with(fmt_layer) 87 | .with(ErrorLayer::default()) 88 | .init(); 89 | } 90 | 91 | struct MultiProgressWriteWrapper(MultiProgress); 92 | 93 | impl MultiProgressWriteWrapper { 94 | fn new(mp: MultiProgress) -> Self { 95 | Self(mp) 96 | } 97 | } 98 | 99 | impl io::Write for MultiProgressWriteWrapper { 100 | fn write(&mut self, buf: &[u8]) -> io::Result { 101 | self.0.suspend(|| io::stderr().lock().write(buf)) 102 | } 103 | 104 | fn flush(&mut self) -> io::Result<()> { 105 | Ok(()) 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /yb/src/util/indicatif.rs: -------------------------------------------------------------------------------- 1 | use console::{strip_ansi_codes, Style}; 2 | use indicatif::{MultiProgress, ProgressBar, ProgressFinish, ProgressStyle}; 3 | 4 | use std::borrow::Cow; 5 | use std::time::Duration; 6 | 7 | pub trait IndicatifHelpers { 8 | fn with_steady_tick(self, duration: Duration) -> Self; 9 | 10 | fn restyle_message(&self, style: Style); 11 | 12 | fn message_unstyled(&self) -> String; 13 | } 14 | 15 | impl IndicatifHelpers for ProgressBar { 16 | fn with_steady_tick(self, duration: Duration) -> Self { 17 | self.enable_steady_tick(duration); 18 | self 19 | } 20 | 21 | fn restyle_message(&self, style: Style) { 22 | let msg = self.message_unstyled(); 23 | self.set_message(style.apply_to(msg).to_string()); 24 | } 25 | 26 | fn message_unstyled(&self) -> String { 27 | strip_ansi_codes(&self.message()).to_string() 28 | } 29 | } 30 | 31 | pub trait MultiProgressHelpers { 32 | fn println_after( 33 | &self, 34 | after: &ProgressBar, 35 | message: impl Into>, 36 | ) -> ProgressBar; 37 | 38 | fn println_before( 39 | &self, 40 | after: &ProgressBar, 41 | message: impl Into>, 42 | ) -> ProgressBar; 43 | 44 | fn note(&self, s: S) 45 | where 46 | S: AsRef; 47 | fn warn(&self, s: S) 48 | where 49 | S: AsRef; 50 | fn error(&self, s: S) 51 | where 52 | S: AsRef; 53 | } 54 | 55 | impl MultiProgressHelpers for MultiProgress { 56 | fn println_after( 57 | &self, 58 | after: &ProgressBar, 59 | message: impl Into>, 60 | ) -> ProgressBar { 61 | let ret = self.insert_after( 62 | after, 63 | ProgressBar::new_spinner() 64 | .with_style(ProgressStyle::with_template("{msg}").unwrap()) 65 | .with_finish(ProgressFinish::AndLeave) 66 | .with_message(message) 67 | .with_tab_width(4), 68 | ); 69 | ret.tick(); 70 | ret 71 | } 72 | 73 | fn println_before( 74 | &self, 75 | after: &ProgressBar, 76 | message: impl Into>, 77 | ) -> ProgressBar { 78 | let ret = self.insert_before( 79 | after, 80 | ProgressBar::new_spinner() 81 | .with_style(ProgressStyle::with_template("{msg}").unwrap()) 82 | .with_finish(ProgressFinish::AndLeave) 83 | .with_message(message) 84 | .with_tab_width(4), 85 | ); 86 | ret.tick(); 87 | ret 88 | } 89 | 90 | fn note(&self, s: S) 91 | where 92 | S: AsRef, 93 | { 94 | let header = Style::from_dotted_str("cyan.bold").apply_to("note"); 95 | self.suspend(|| eprintln!("{}: {}", header, s.as_ref())); 96 | } 97 | 98 | fn warn(&self, s: S) 99 | where 100 | S: AsRef, 101 | { 102 | let header = Style::from_dotted_str("yellow.bold").apply_to("warning"); 103 | self.suspend(|| eprintln!("{}: {}", header, s.as_ref())); 104 | } 105 | 106 | fn error(&self, s: S) 107 | where 108 | S: AsRef, 109 | { 110 | let header = Style::from_dotted_str("red.bold").apply_to("error"); 111 | self.suspend(|| eprintln!("\n{}: {}\n", header, s.as_ref())); 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /yb/src/ops/add_stream.rs: -------------------------------------------------------------------------------- 1 | use assert_cmd::Command; 2 | use eyre::WrapErr; 3 | use std::fs; 4 | use std::fs::OpenOptions; 5 | use std::path::PathBuf; 6 | 7 | use git2::build::RepoBuilder; 8 | use git2::FetchOptions; 9 | use tempfile::Builder; 10 | 11 | use crate::config::Config; 12 | use crate::core::tool_context::require_yb_env; 13 | use crate::errors::YbResult; 14 | use crate::stream::{ 15 | Stream, StreamConfig, StreamKind, STREAM_CONFIG_FILE, STREAM_CONTENT_ROOT_SUBDIR, 16 | }; 17 | use crate::stream_db::StreamKey; 18 | use crate::util::git::ssh_agent_remote_callbacks; 19 | 20 | pub struct AddStreamOptions<'cfg> { 21 | config: &'cfg Config, 22 | pub(crate) uri: String, 23 | pub(crate) name: Option, 24 | } 25 | 26 | impl<'cfg> AddStreamOptions<'cfg> { 27 | pub fn new(config: &'cfg Config) -> Self { 28 | Self { 29 | config, 30 | uri: String::new(), 31 | name: None, 32 | } 33 | } 34 | 35 | pub fn uri(&mut self, uri: String) -> &mut AddStreamOptions<'cfg> { 36 | self.uri = uri; 37 | self 38 | } 39 | 40 | pub fn name(&mut self, name: Option) -> &mut AddStreamOptions<'cfg> { 41 | self.name = name; 42 | self 43 | } 44 | 45 | // pub fn callbacks( 46 | // &mut self, 47 | // callbacks: AddStreamCallbacks<'cfg>, 48 | // ) -> &mut AddStreamOptions<'cfg> { 49 | // self.callbacks = callbacks; 50 | // self 51 | // } 52 | } 53 | 54 | pub fn op_add_stream(options: AddStreamOptions) -> YbResult<()> { 55 | let yb_env = require_yb_env(options.config)?; 56 | 57 | let stream_name = options.name.clone().unwrap_or_else(|| "default".into()); 58 | 59 | let tmpdir = Builder::new().prefix("yb").tempdir()?; 60 | let tmp_contents_dir = tmpdir.path().join(STREAM_CONTENT_ROOT_SUBDIR); 61 | 62 | let mut fetch_options = FetchOptions::new(); 63 | fetch_options.remote_callbacks(ssh_agent_remote_callbacks()); 64 | 65 | // Clone the stream 66 | RepoBuilder::new() 67 | .fetch_options(fetch_options) 68 | .clone(&options.uri, tmp_contents_dir.as_ref())?; 69 | 70 | // Write the config file 71 | // TODO: when other stream types are added, don't hardcode git 72 | let config = StreamConfig::new(StreamKind::Git); 73 | let config_file_path = tmpdir.path().join(STREAM_CONFIG_FILE); 74 | let f = OpenOptions::new() 75 | .write(true) 76 | .create(true) 77 | .open(&config_file_path) 78 | .context(format!( 79 | "failed to open file {:?} for writing", 80 | &config_file_path 81 | ))?; 82 | serde_yaml::to_writer(f, &config)?; 83 | 84 | let stream_dir = yb_env.streams_dir(); 85 | if !stream_dir.is_dir() { 86 | println!("creating dir: {:?}", &stream_dir); 87 | fs::create_dir(&stream_dir)?; 88 | } 89 | 90 | let stream_root_dir = stream_dir.join(&stream_name); 91 | if stream_root_dir.exists() { 92 | eyre::bail!("a stream with name {} already exists", &stream_name); 93 | } 94 | 95 | // Just fake a key for now 96 | let key = StreamKey::default(); 97 | // Try to load stream 98 | Stream::load(PathBuf::from(tmpdir.path()), stream_name, key)?; 99 | 100 | // Everything was OK, so move into stream directory 101 | let mut mv_cmd = Command::new("mv"); 102 | mv_cmd.arg(tmpdir.into_path()).arg(&stream_root_dir); 103 | mv_cmd.assert().success(); 104 | 105 | println!("yb {:?}", &yb_env); 106 | 107 | Ok(()) 108 | } 109 | -------------------------------------------------------------------------------- /yb/src/ui_ops/update_stream.rs: -------------------------------------------------------------------------------- 1 | use indicatif::{MultiProgress, ProgressBar}; 2 | use maplit::hashset; 3 | use std::time::Duration; 4 | 5 | use crate::config::Config; 6 | use crate::core::tool_context::maybe_yb_env; 7 | use crate::errors::YbResult; 8 | use crate::ops::update_stream::{op_update_stream, UpdateStreamEvent, UpdateStreamOptions}; 9 | use crate::util::indicatif::{IndicatifHelpers, MultiProgressHelpers}; 10 | 11 | use crate::yb_env::ActiveSpecStatus; 12 | 13 | #[derive(Debug)] 14 | pub struct UiUpdateStreamOptions<'cfg> { 15 | config: &'cfg Config, 16 | mp: &'cfg MultiProgress, 17 | verbose: bool, 18 | fail_if_no_yb_env: bool, 19 | } 20 | 21 | impl<'cfg> UiUpdateStreamOptions<'cfg> { 22 | pub fn new(config: &'cfg Config, mp: &'cfg MultiProgress) -> Self { 23 | Self { 24 | config, 25 | mp, 26 | verbose: false, 27 | fail_if_no_yb_env: false, 28 | } 29 | } 30 | 31 | pub fn fail_if_no_yb_env(&mut self, val: bool) -> &mut Self { 32 | self.fail_if_no_yb_env = val; 33 | self 34 | } 35 | 36 | pub fn verbose(&mut self, val: bool) -> &mut Self { 37 | self.verbose = val; 38 | self 39 | } 40 | } 41 | 42 | pub fn ui_op_update_stream(options: UiUpdateStreamOptions) -> YbResult<()> { 43 | let yb_env = match maybe_yb_env(options.config)? { 44 | Some(yb_env) => yb_env, 45 | None => { 46 | if options.fail_if_no_yb_env { 47 | eyre::bail!("expected yb environment; see the 'yb init' command") 48 | } else { 49 | return Ok(()); 50 | } 51 | } 52 | }; 53 | 54 | let active_spec_status = yb_env.active_spec_status(); 55 | let streams; 56 | match &active_spec_status { 57 | Some(ActiveSpecStatus::Active(active_spec)) => { 58 | options 59 | .mp 60 | .note(format!("active spec: {}", active_spec.spec.name())); 61 | 62 | streams = hashset! { active_spec.stream_key }; 63 | } 64 | Some(ActiveSpecStatus::StreamsBroken(broken)) => { 65 | options 66 | .mp 67 | .note("one or more streams are broken; will update them all"); 68 | streams = broken.keys().copied().collect(); 69 | } 70 | None => { 71 | options 72 | .mp 73 | .note("no active spec; consider using the 'yb activate' command"); 74 | return Ok(()); 75 | } 76 | } 77 | 78 | let update_opts = UpdateStreamOptions::new(options.config, streams); 79 | 80 | // TODO report result in porcelain 81 | 82 | let mut stream_update_spinner: Option = None; 83 | op_update_stream(update_opts, |event| match event { 84 | UpdateStreamEvent::Start => { 85 | stream_update_spinner.replace( 86 | options.mp.add( 87 | ProgressBar::new_spinner() 88 | .with_message("refreshing stream") 89 | .with_steady_tick(Duration::from_millis(50)), 90 | ), 91 | ); 92 | } 93 | UpdateStreamEvent::ActiveSpecUpdated => { 94 | options 95 | .mp 96 | .note("active spec changed - reloading environment"); 97 | } 98 | UpdateStreamEvent::Finish(..) => { 99 | if let Some(stream_update_spinner) = stream_update_spinner.as_ref() { 100 | stream_update_spinner.finish_and_clear(); 101 | } 102 | } 103 | })?; 104 | 105 | Ok(()) 106 | } 107 | -------------------------------------------------------------------------------- /yb-support.bbclass: -------------------------------------------------------------------------------- 1 | # Author: Chris Laplante 2 | # 3 | # This bbclass maintains the build history data that backs the 4 | # yb (https://github.com/Agilent/yb) "twice-bake" subcommand. One day, 5 | # it might do more, hence the generic name. 6 | # 7 | # Ideally, yb should be able to operate in any Yocto environment without 8 | # advance configuration. This build history support could/should? be folded 9 | # into the 'buildstats' bbclass in poky and upstreamed. But that's a lot of 10 | # work I don't feel like doing right now. So I # took the cheaters way out 11 | # and wrote this small bbclass. 12 | 13 | YB_SUPPORT_BASEDIR = "${TMPDIR}/yb-support" 14 | YB_BUILD_HISTORY_DIR = "${YB_SUPPORT_BASEDIR}/history" 15 | 16 | addhandler yb_support_eventhandler 17 | yb_support_eventhandler[eventmask] = " \ 18 | bb.build.TaskFailed \ 19 | bb.build.TaskStarted \ 20 | bb.build.TaskSucceeded \ 21 | bb.event.BuildStarted \ 22 | " 23 | 24 | # BitBake hashing seems to account for the presence or absence of event 25 | # handlers, but not the actual contents of the code. So there is no 26 | # obvious way to force things to rebuild when, e.g., our history format 27 | # changes. The best we can do (without playing crazy games like injecting 28 | # vardeps flags into every task in the task graph) is to just manually 29 | # keep track of our build history format. We write it to every json file. 30 | # The yb tool itself will be resposible for telling the user about version 31 | # mismatches. 32 | YB_BUILD_HISTORY_VERSION = "2" 33 | 34 | python yb_support_eventhandler() { 35 | import json 36 | 37 | support_dir = None 38 | bn = d.getVar("BUILDNAME") 39 | if bn is not None: 40 | support_dir = os.path.join(d.getVar("YB_BUILD_HISTORY_DIR"), bn) 41 | bb.utils.mkdirhier(support_dir) 42 | 43 | if isinstance(e, bb.event.BuildStarted): 44 | return 45 | 46 | task_dir = os.path.join(support_dir, d.getVar("PF")) 47 | bb.utils.mkdirhier(task_dir) 48 | task_file = os.path.join(task_dir, "{0}.json".format(e.task)) 49 | 50 | data = {} 51 | try: 52 | with open(task_file, "r") as f: 53 | data = json.loads(f.read()) 54 | except FileNotFoundError: 55 | # Seed initial data 56 | prefuncs = (d.getVarFlag(e.task, 'prefuncs', expand=True) or '').split() 57 | postfuncs = (d.getVarFlag(e.task, 'postfuncs', expand=True) or '').split() 58 | 59 | runfmt = d.getVar('BB_RUNFMT') or "run.{func}.{pid}" 60 | pid = os.getpid() 61 | 62 | # Get the name of the run files 63 | task_runfile = runfmt.format(func=d.getVar("BB_RUNTASK"), pid=pid) 64 | prefunc_runfiles = [ (f, runfmt.format(func=f, pid=pid)) for f in prefuncs ] 65 | postfunc_runfiles = [ (f, runfmt.format(func=f, pid=pid)) for f in postfuncs ] 66 | 67 | data = { 68 | "PN": e.pn, 69 | "PV": e.pv, 70 | "T": d.getVar("T"), 71 | "WORKDIR": d.getVar("WORKDIR"), 72 | "log_file": e.logfile, 73 | "mc": e._mc, 74 | "postfunc_runfiles": postfunc_runfiles, 75 | "prefunc_runfiles": prefunc_runfiles, 76 | "task_file": e.taskfile, 77 | "task_runfile": task_runfile, 78 | "task": e.task, 79 | "class_version": d.getVar("YB_BUILD_HISTORY_VERSION") 80 | } 81 | 82 | if isinstance(e, bb.build.TaskStarted): 83 | data["start_time"] = e.time 84 | 85 | if isinstance(e, bb.build.TaskFailed): 86 | data["end_time"] = e.time 87 | data["outcome"] = "FAIL" 88 | 89 | if isinstance(e, bb.build.TaskSucceeded): 90 | data["end_time"] = e.time 91 | data["outcome"] = "SUCCESS" 92 | 93 | with open(task_file, "w") as f: 94 | f.write(json.dumps(data, indent=4, sort_keys=True)) 95 | } 96 | 97 | -------------------------------------------------------------------------------- /yb/src/util/paths.rs: -------------------------------------------------------------------------------- 1 | use eyre::{Context, ContextCompat}; 2 | use std::os::linux::fs::MetadataExt; 3 | use std::path::{Component, Path, PathBuf}; 4 | use std::process::Command; 5 | use std::{env, fs, io}; 6 | 7 | use walkdir::DirEntry; 8 | 9 | use crate::errors::YbResult; 10 | 11 | pub fn run_which(program_name: &str) -> YbResult> { 12 | let output = Command::new("which").arg(program_name).output()?; 13 | return Ok(match output.status.code() { 14 | Some(0) => Some(PathBuf::from(String::from_utf8(output.stdout)?.trim_end())), 15 | _ => None, 16 | }); 17 | } 18 | 19 | pub fn make_relative_to_cwd(path: &P) -> YbResult 20 | where 21 | P: AsRef, 22 | { 23 | try_diff_paths(path, env::current_dir()?) 24 | } 25 | 26 | pub fn try_diff_paths(path: P, base: B) -> YbResult 27 | where 28 | P: AsRef, 29 | B: AsRef, 30 | { 31 | pathdiff::diff_paths(path, base).context("unable to compute relative path") 32 | } 33 | 34 | pub fn list_subdirectories_sorted(dir: &Path) -> YbResult> { 35 | let mut ret: Vec = fs::read_dir(dir)? 36 | .filter_map(|r| r.ok().map(|r| r.path())) 37 | .filter(|r| r.is_dir()) 38 | .collect(); 39 | ret.sort(); 40 | Ok(ret) 41 | } 42 | 43 | pub fn is_yaml_file(entry: &DirEntry) -> bool { 44 | entry.file_type().is_file() 45 | && entry 46 | .file_name() 47 | .to_str() 48 | .map(|s| s.ends_with(".yaml")) 49 | .unwrap_or(false) 50 | } 51 | 52 | pub fn is_hidden(entry: &DirEntry) -> bool { 53 | entry 54 | .file_name() 55 | .to_str() 56 | .map(|s| s.starts_with('.')) 57 | .unwrap_or(false) 58 | } 59 | 60 | // from https://stackoverflow.com/a/68233480 61 | /// Improve the path to try remove and solve .. token. 62 | /// 63 | /// This assumes that `a/b/../c` is `a/c` which might be different from 64 | /// what the OS would have chosen when b is a link. This is OK 65 | /// for broot verb arguments but can't be generally used elsewhere 66 | /// 67 | /// This function ensures a given path ending with '/' still 68 | /// ends with '/' after normalization. 69 | pub fn normalize_path>(path: P) -> PathBuf { 70 | let ends_with_slash = path.as_ref().to_str().map_or(false, |s| s.ends_with('/')); 71 | let mut normalized = PathBuf::new(); 72 | for component in path.as_ref().components() { 73 | match &component { 74 | Component::ParentDir => { 75 | if !normalized.pop() { 76 | normalized.push(component); 77 | } 78 | } 79 | _ => { 80 | normalized.push(component); 81 | } 82 | } 83 | } 84 | if ends_with_slash { 85 | normalized.push(""); 86 | } 87 | normalized 88 | } 89 | 90 | pub fn find_dir_recurse_upwards>( 91 | start_point: S, 92 | dir_name: &str, 93 | ) -> YbResult> { 94 | let start_point = start_point.as_ref(); 95 | assert!(start_point.is_dir()); 96 | let st_dev = start_point.metadata().unwrap().st_dev(); 97 | 98 | let mut dir = Some(start_point); 99 | while let Some(root) = &dir { 100 | let metadata = root 101 | .metadata() 102 | .with_context(|| format!("couldn't get fs metadata for {:?}", &dir))?; 103 | 104 | // Don't cross filesystems 105 | if metadata.st_dev() != st_dev { 106 | return Ok(None); 107 | } 108 | 109 | let candidate = root.join(dir_name); 110 | match candidate.metadata() { 111 | Ok(yb_dir_metadata) if yb_dir_metadata.is_dir() => return Ok(Some(candidate)), 112 | Err(e) if e.kind() != io::ErrorKind::NotFound => { 113 | Err(e).with_context(|| format!("couldn't get fs metadata for {:?}", &candidate))? 114 | } 115 | _ => { 116 | dir = root.parent(); 117 | } 118 | } 119 | } 120 | 121 | Ok(None) 122 | } 123 | -------------------------------------------------------------------------------- /yb/src/commands/upgrade.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use color_eyre::Help; 3 | use indicatif::MultiProgress; 4 | 5 | use crate::commands::SubcommandRunner; 6 | use crate::core::tool_context::{determine_tool_context, require_yb_env, ToolContext}; 7 | use crate::errors::YbResult; 8 | use crate::ops::add_stream::{op_add_stream, AddStreamOptions}; 9 | use crate::yb_env::YbEnv; 10 | use crate::Config; 11 | 12 | /// Create a 'yb' environment within an activated Yocto environment 13 | /// 14 | /// When run in the context of an activated Yocto environment (i.e. the command `bitbake` is 15 | /// available in your terminal), the .yb control directory is created above the top-level repos 16 | /// directory (typically 'sources'). For example if your layers live in yocto/sources/ then the 17 | /// control directory is created at yocto/.yb 18 | #[derive(Debug, clap::Parser)] 19 | #[clap(verbatim_doc_comment)] 20 | pub struct UpgradeCommand { 21 | /// You can use the '--default-stream' flag to specify a default spec stream to be added. 22 | /// 23 | /// URI pointing to a default spec stream to add 24 | #[clap(name = "default-stream", short = 's', long)] 25 | default_stream: Option, 26 | 27 | #[clap(name = "default-spec", short = 'p', long, requires = "default-stream")] 28 | default_spec: Option, 29 | } 30 | 31 | #[async_trait] 32 | impl SubcommandRunner for UpgradeCommand { 33 | async fn run(&self, config: &mut Config, _mp: &MultiProgress) -> YbResult<()> { 34 | let context = determine_tool_context(config)?; 35 | 36 | match context { 37 | Some(ToolContext::Yb(yb_env)) => { 38 | return Err(eyre::eyre!( 39 | "a .yb environment already exists at {:?}", 40 | yb_env.root_dir() 41 | )); 42 | } 43 | Some(ToolContext::YoctoEnv(context2)) => { 44 | // An activated Yocto environment 45 | let target = context2.sources_dir.parent().unwrap().to_owned(); 46 | 47 | // Sanity check: make sure cwd is under `target` 48 | if !config.cwd.ancestors().any(|ancestor| ancestor == target) { 49 | return Err(eyre::eyre!( 50 | "current working directory must be within the activated Yocto environment to proceed", 51 | ) 52 | .suggestion(format!("`cd` to the Yocto environment ({}) and then try again", target.display())) 53 | .suggestion("or, activate a different Yocto environment") 54 | .suppress_backtrace(true) 55 | ); 56 | } 57 | 58 | let new_context = ToolContext::Yb(YbEnv::initialize(target, &context2)?); 59 | match &new_context { 60 | ToolContext::Yb(yb_env) => println!("initialized yb env at {yb_env:?}"), 61 | _ => panic!(""), 62 | }; 63 | } 64 | None => { 65 | return Err(eyre::eyre!( 66 | "an activated Yocto environment was not found", 67 | ).suggestion("use `yb init` if you want to create a fresh yb and Yocto env") 68 | .suggestion("or, if you meant to use `yb upgrade`, make sure your Yocto env is activated") 69 | .suppress_backtrace(true) 70 | ); 71 | } 72 | }; 73 | 74 | if let Some(default_stream_uri) = &self.default_stream { 75 | let mut add_stream_opts = AddStreamOptions::new(config); 76 | add_stream_opts.uri(default_stream_uri.clone()); 77 | op_add_stream(add_stream_opts)?; 78 | 79 | if let Some(default_spec_name) = &self.default_spec { 80 | // TODO deduplicate code 81 | let mut yb_env = require_yb_env(config)?; 82 | 83 | let spec = yb_env.find_spec(default_spec_name)?.cloned(); 84 | if let Some(spec) = spec { 85 | // TODO don't clone 86 | yb_env.activate_spec(spec)?; 87 | println!("Activated spec '{}'", &default_spec_name); 88 | } else { 89 | eyre::bail!("spec with name '{}' not found", &default_spec_name); 90 | } 91 | } 92 | } 93 | 94 | Ok(()) 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /concurrent_git_pool_proc_macros/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![feature(proc_macro_diagnostic)] 2 | 3 | use proc_macro::TokenStream; 4 | use proc_macro2::Span; 5 | 6 | use quote::{quote, ToTokens}; 7 | use syn::parse::{Parse, ParseStream}; 8 | use syn::{parse_macro_input, Expr, Lit, Token}; 9 | 10 | // Example: 11 | // ```no_run 12 | // clone_repos! { 13 | // "https://github.com/yoctoproject/poky.git" => "test/", 14 | // "https://github.com/openembedded/meta-openembedded.git" in parent_dir, 15 | // }; 16 | // 17 | 18 | struct MacroInput(Vec); 19 | 20 | impl Parse for MacroInput { 21 | fn parse(input: ParseStream) -> syn::Result { 22 | let mut ret = vec![]; 23 | while let Ok(r) = input.parse::() { 24 | ret.push(r); 25 | if input.peek(Token![,]) { 26 | input.parse::().unwrap(); 27 | } else { 28 | break; 29 | } 30 | } 31 | Ok(MacroInput(ret)) 32 | } 33 | } 34 | 35 | enum ExprOrLit { 36 | Expr(Expr), 37 | Lit(Lit), 38 | } 39 | 40 | impl ToTokens for ExprOrLit { 41 | fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { 42 | match self { 43 | Self::Expr(expr) => expr.to_tokens(tokens), 44 | Self::Lit(lit) => lit.to_tokens(tokens), 45 | } 46 | } 47 | } 48 | 49 | impl Parse for ExprOrLit { 50 | fn parse(input: ParseStream) -> syn::Result { 51 | if let Ok(expr) = input.parse::() { 52 | Ok(ExprOrLit::Expr(expr)) 53 | } else if input.peek(Lit) { 54 | return Ok(ExprOrLit::Lit(input.parse().unwrap())); 55 | } else { 56 | unimplemented!(); 57 | } 58 | } 59 | } 60 | 61 | struct CloneCommand { 62 | uri: Lit, 63 | parent_dir: Option, 64 | directory: Option, 65 | } 66 | 67 | impl Parse for CloneCommand { 68 | fn parse(input: ParseStream) -> syn::Result { 69 | let uri: Lit = input.parse()?; 70 | 71 | let mut directory = None; 72 | if input.peek(Token![=>]) { 73 | input.parse::]>().unwrap(); 74 | directory = Some(input.parse::()?); 75 | } 76 | 77 | let mut parent_dir = None; 78 | if input.peek(Token![in]) { 79 | input.parse::().unwrap(); 80 | parent_dir = Some(input.parse::()?); 81 | } 82 | Ok(CloneCommand { 83 | uri, 84 | parent_dir, 85 | directory, 86 | }) 87 | } 88 | } 89 | 90 | #[proc_macro] 91 | pub fn clone_repos(input: TokenStream) -> TokenStream { 92 | let mut macro_input: MacroInput = parse_macro_input!(input); 93 | let input_len = macro_input.0.len(); 94 | 95 | if input_len == 0 { 96 | Span::call_site() 97 | .unwrap() 98 | .error("Need at least one clone command") 99 | .emit(); 100 | return TokenStream::new(); 101 | } 102 | 103 | let clones = macro_input 104 | .0 105 | .drain(..) 106 | .map(|v| { 107 | let uri = v.uri; 108 | let parent_dir = v 109 | .parent_dir 110 | .map(|p| quote! {Some(std::path::PathBuf::from(#p))}) 111 | .unwrap_or(quote! {None}); 112 | let directory = v 113 | .directory 114 | .map(|p| quote! {Some(String::from(#p))}) 115 | .unwrap_or(quote! {None}); 116 | 117 | quote! { 118 | client.clone_in(#uri, #parent_dir, #directory) 119 | } 120 | }) 121 | .collect::>(); 122 | 123 | let unwraps = (0..input_len) 124 | .map(|v| { 125 | let v = syn::Index::from(v); 126 | quote! { 127 | results.#v.expect("RPC failed").expect("service error"); 128 | } 129 | }) 130 | .collect::>(); 131 | 132 | let ret = quote! { 133 | use concurrent_git_pool::PoolHelper; 134 | let client = PoolHelper::connect_or_local().await.expect("unable to establish PoolHelper"); 135 | 136 | let results = tokio::join!( 137 | #(#clones),* 138 | ); 139 | 140 | #(#unwraps)* 141 | }; 142 | 143 | TokenStream::from(ret) 144 | } 145 | -------------------------------------------------------------------------------- /yb/src/commands/init.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use color_eyre::Help; 3 | use indicatif::MultiProgress; 4 | use std::fs; 5 | 6 | use crate::commands::SubcommandRunner; 7 | use crate::core::tool_context::{ 8 | determine_tool_context, require_yb_env, ToolContext, YoctoEnvironment, 9 | }; 10 | use crate::errors::YbResult; 11 | use crate::ops::add_stream::{op_add_stream, AddStreamOptions}; 12 | use crate::yb_env::YbEnv; 13 | use crate::Config; 14 | 15 | /// Initialize a 'yb' environment 16 | /// 17 | /// When run in the context of an activated Yocto environment (e.g. you have sourced 'setupsdk'), 18 | /// the .yb control directory is created above the top-level repos directory (typically 'sources'). 19 | /// For example if your layers live in yocto/sources then the control directory is created at yocto/.yb 20 | /// 21 | /// If no Yocto environment is activated then a directory called 'yocto' is created, the .yb control directory 22 | /// is initialized underneath it, and empty 'build' and 'sources' directories are created: 23 | /// 24 | /// yocto/ 25 | /// ├── build 26 | /// ├── sources 27 | /// └── .yb 28 | /// 29 | #[derive(Debug, clap::Parser)] 30 | #[clap(verbatim_doc_comment)] 31 | pub struct InitCommand { 32 | /// You can use the '--default-stream' flag to specify a default spec stream to be added. 33 | /// 34 | /// URI pointing to a default spec stream to add 35 | #[clap(name = "default-stream", short = 's', long)] 36 | default_stream: Option, 37 | 38 | #[clap(name = "default-spec", short = 'p', long, requires = "default-stream")] 39 | default_spec: Option, 40 | } 41 | 42 | #[async_trait] 43 | impl SubcommandRunner for InitCommand { 44 | async fn run(&self, config: &mut Config, _mp: &MultiProgress) -> YbResult<()> { 45 | let context = determine_tool_context(config)?; 46 | 47 | let new_yocto_dir; 48 | match context { 49 | Some(ToolContext::Yb(yb_env)) => { 50 | return Err(eyre::eyre!( 51 | "a .yb environment already exists at {:?}", 52 | yb_env.root_dir() 53 | )); 54 | } 55 | Some(ToolContext::YoctoEnv(context2)) => { 56 | return Err(eyre::eyre!( 57 | "cannot init yb env within an activated Yocto environment", 58 | ).suggestion("use `yb upgrade` if you want to create a yb env within your activated Yocto env") 59 | .suggestion("or, if you meant to use `yb init`, launch a new shell") 60 | .note(format!("Yocto environment in question is rooted at: {}", context2.sources_dir.parent().unwrap().to_owned().display())) 61 | .warning("since 0.0.12, the behavior of yb init has changed. See https://github.com/Agilent/yb/issues/3") 62 | .suppress_backtrace(true) 63 | ); 64 | } 65 | None => { 66 | // No environment, create a skeleton one 67 | let yocto_dir = config.cwd().join("yocto"); 68 | new_yocto_dir = yocto_dir.clone(); 69 | fs::create_dir(&yocto_dir)?; 70 | 71 | let sources_dir = yocto_dir.join("sources"); 72 | let build_dir = yocto_dir.join("build"); 73 | fs::create_dir(&sources_dir)?; 74 | fs::create_dir(&build_dir)?; 75 | 76 | let new_yocto_env = YoctoEnvironment { 77 | build_dir, 78 | sources_dir, 79 | poky_layer: None, 80 | }; 81 | 82 | let yb_env = YbEnv::initialize(&yocto_dir, &new_yocto_env)?; 83 | println!( 84 | "created skeleton Yocto environment at {:?}, yb env at {:?}", 85 | &yocto_dir, yb_env 86 | ); 87 | } 88 | }; 89 | 90 | if let Some(default_stream_uri) = &self.default_stream { 91 | let config = config.clone_with_cwd(new_yocto_dir); 92 | 93 | let mut add_stream_opts = AddStreamOptions::new(&config); 94 | add_stream_opts.uri(default_stream_uri.clone()); 95 | op_add_stream(add_stream_opts)?; 96 | 97 | if let Some(default_spec_name) = &self.default_spec { 98 | // TODO deduplicate code 99 | let mut yb_env = require_yb_env(&config)?; 100 | 101 | let spec = yb_env.find_spec(default_spec_name)?.cloned(); 102 | if let Some(spec) = spec { 103 | // TODO don't clone 104 | yb_env.activate_spec(spec)?; 105 | println!("Activated spec '{}'", &default_spec_name); 106 | } else { 107 | eyre::bail!("spec with name '{}' not found", &default_spec_name); 108 | } 109 | } 110 | } 111 | 112 | Ok(()) 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /concurrent_git_pool/src/pool.rs: -------------------------------------------------------------------------------- 1 | use crate::error::{ServiceError, ServiceResult}; 2 | use futures::future::Shared; 3 | use futures::prelude::*; 4 | use sha2::{Digest, Sha256}; 5 | use std::collections::hash_map::Entry; 6 | use std::collections::HashMap; 7 | use std::future::Future; 8 | use std::path::{Path, PathBuf}; 9 | use std::pin::Pin; 10 | use tempfile::TempDir; 11 | use tokio::process::Command; 12 | use tokio::sync::Mutex; 13 | 14 | #[derive(Debug)] 15 | pub struct Pool { 16 | cache: Mutex>, 17 | root: TempDir, 18 | } 19 | 20 | impl Default for Pool { 21 | fn default() -> Self { 22 | Self::new() 23 | } 24 | } 25 | 26 | impl Pool { 27 | pub fn new() -> Self { 28 | Self { 29 | cache: Mutex::new(HashMap::new()), 30 | root: TempDir::new().unwrap(), 31 | } 32 | } 33 | 34 | pub async fn clone_in( 35 | &self, 36 | cwd: Option, 37 | remote: R, 38 | directory: Option, 39 | ) -> ServiceResult<()> 40 | where 41 | C: AsRef, 42 | R: AsRef, 43 | D: AsRef, 44 | { 45 | let remote = remote.as_ref(); 46 | let path = self.lookup_or_clone(remote).await.unwrap(); 47 | 48 | let mut command = Command::new("git"); 49 | command.env("GIT_TERMINAL_PROMPT", "0"); 50 | command.env("GIT_SSH_COMMAND", "ssh -o BatchMode=yes"); 51 | command.arg("clone").arg(remote); 52 | if let Some(directory) = directory { 53 | command.arg(directory.as_ref()); 54 | } 55 | 56 | command 57 | .arg("--reference") 58 | .arg(path.to_str().unwrap()) 59 | .arg("--dissociate"); 60 | 61 | if let Some(cwd) = cwd { 62 | command.current_dir(cwd); 63 | } 64 | 65 | command.output().await.map_err(|e| e.into()).map(|_| ()) 66 | } 67 | 68 | pub async fn lookup>(&self, uri: U) -> Option> { 69 | let uri = uri.as_ref(); 70 | 71 | let cache = self.cache.lock().await; 72 | match cache.get(uri) { 73 | Some(entry) => match entry { 74 | CacheEntry::Available(p) => Some(p.clone()), 75 | CacheEntry::Cloning(_) => None, 76 | }, 77 | _ => None, 78 | } 79 | } 80 | 81 | // Clone the given remote and add it to the cache, if not already present. 82 | // Returns path to the cached git repo. 83 | pub async fn lookup_or_clone(&self, remote: R) -> ServiceResult 84 | where 85 | R: Into, 86 | { 87 | let remote = remote.into(); 88 | let dest_dir_name = { 89 | let mut hasher = Sha256::new(); 90 | hasher.update(remote.clone()); 91 | format!("{:x}", hasher.finalize()) 92 | }; 93 | 94 | let root = self.root.path().to_path_buf(); 95 | 96 | let mut cache = self.cache.lock().await; 97 | match cache.entry(remote.clone()) { 98 | Entry::Occupied(entry) => { 99 | return match entry.get().clone() { 100 | // Repo is already on-disk 101 | CacheEntry::Available(p) => p, 102 | CacheEntry::Cloning(future) => { 103 | drop(cache); 104 | // Clone is in-flight 105 | future.await 106 | } 107 | }; 108 | } 109 | Entry::Vacant(entry) => { 110 | let request = clone_repo(root, remote.clone(), dest_dir_name) 111 | .boxed() 112 | .shared(); 113 | 114 | entry.insert(CacheEntry::Cloning(request.clone())); 115 | 116 | drop(cache); 117 | 118 | let ret = request.await; 119 | 120 | // Re-acquire lock on HashMap so we can change the entry 121 | let mut requests = self.cache.lock().await; 122 | requests.insert(remote.clone(), CacheEntry::Available(ret.clone())); 123 | 124 | ret 125 | } 126 | } 127 | } 128 | } 129 | 130 | // Actually invokes 'git clone' 131 | async fn clone_repo( 132 | root: PathBuf, 133 | remote: String, 134 | dest_dir_name: String, 135 | ) -> ServiceResult { 136 | let status = Command::new("git") 137 | .current_dir(&root) 138 | .env("GIT_TERMINAL_PROMPT", "0") 139 | .arg("clone") 140 | .arg(&remote) 141 | .arg(&dest_dir_name) 142 | .status() 143 | .await?; 144 | 145 | match status.success() { 146 | true => Ok(root.join(&dest_dir_name)), 147 | false => Err(ServiceError::CloneFailed(format!("{status}"))), 148 | } 149 | } 150 | 151 | #[derive(Debug, Clone)] 152 | enum CacheEntry { 153 | Available(ServiceResult), 154 | Cloning(Shared> + Send>>>), 155 | } 156 | -------------------------------------------------------------------------------- /yb/src/stream_db.rs: -------------------------------------------------------------------------------- 1 | use crate::errors::YbResult; 2 | use crate::spec::{ActiveSpec, Spec}; 3 | use crate::stream::Stream; 4 | use crate::util::paths::is_hidden; 5 | use eyre::Context; 6 | use slotmap::{new_key_type, SlotMap}; 7 | use std::collections::HashMap; 8 | use std::fs::File; 9 | use std::path::{Path, PathBuf}; 10 | use std::sync::Arc; 11 | use walkdir::WalkDir; 12 | 13 | new_key_type! { 14 | pub struct StreamKey; 15 | } 16 | 17 | #[derive(Debug)] 18 | pub struct StreamDb { 19 | streams: SlotMap, 20 | } 21 | 22 | impl Default for StreamDb { 23 | fn default() -> Self { 24 | Self::new() 25 | } 26 | } 27 | 28 | impl StreamDb { 29 | pub fn new() -> Self { 30 | Self { 31 | streams: SlotMap::with_key(), 32 | } 33 | } 34 | 35 | pub fn streams(&self) -> slotmap::basic::Iter<'_, StreamKey, Stream> { 36 | self.streams.iter() 37 | } 38 | 39 | pub fn is_empty(&self) -> bool { 40 | self.streams.is_empty() 41 | } 42 | 43 | pub fn has_broken(&self) -> bool { 44 | self.streams.iter().any(|stream| stream.1.is_broken()) 45 | } 46 | 47 | pub fn broken_streams(&self) -> HashMap> { 48 | self.streams 49 | .iter() 50 | .filter_map(|stream| stream.1.broken_reason().map(|reason| (stream.0, reason))) 51 | .collect() 52 | } 53 | 54 | pub fn load_all>(&mut self, streams_dir: P) -> YbResult<()> { 55 | // Iterate over each stream (which are subdirectories) 56 | for d in WalkDir::new(streams_dir) 57 | .max_depth(1) 58 | .min_depth(1) 59 | .into_iter() 60 | .filter_entry(|e| !is_hidden(e)) 61 | .filter(|e| e.as_ref().unwrap().file_type().is_dir()) 62 | { 63 | let stream_path = d?.into_path(); 64 | let stream_name = stream_path 65 | .file_name() 66 | .unwrap() 67 | .to_str() 68 | .unwrap() 69 | .to_string(); 70 | 71 | self.streams.insert_with_key(|key| { 72 | Stream::load(stream_path.clone(), stream_name, key).unwrap() 73 | }); 74 | } 75 | 76 | Ok(()) 77 | } 78 | 79 | pub fn get_stream_by_name>(&self, name: N) -> Option<&Stream> { 80 | let name = name.as_ref(); 81 | self.streams 82 | .iter() 83 | .find(|stream| stream.1.name() == name) 84 | .map(|item| item.1) 85 | } 86 | 87 | pub fn find_spec_by_name>(&self, name: N) -> YbResult> { 88 | let mut ret = None; 89 | 90 | for stream in self.streams.values() { 91 | let s = stream.get_spec_by_name(&name); 92 | if s.is_some() { 93 | if ret.is_some() { 94 | eyre::bail!("spec '{}' found in multiple streams", name.as_ref()); 95 | } 96 | ret = s; 97 | } 98 | } 99 | 100 | Ok(ret) 101 | } 102 | 103 | pub fn stream(&self, stream_key: StreamKey) -> Option<&Stream> { 104 | self.streams.get(stream_key) 105 | } 106 | 107 | pub fn stream_mut(&mut self, stream_key: StreamKey) -> Option<&mut Stream> { 108 | self.streams.get_mut(stream_key) 109 | } 110 | 111 | pub fn load_active_spec(&self, active_spec_file_path: PathBuf) -> YbResult { 112 | let active_spec_file = File::open(&active_spec_file_path)?; 113 | let mut active_spec = serde_yaml::from_reader::<_, ActiveSpec>(active_spec_file) 114 | .with_context(|| { 115 | format!( 116 | "failed to parse active spec file {}", 117 | &active_spec_file_path.display() 118 | ) 119 | })?; 120 | 121 | if let Some(stream) = self.get_stream_by_name(&active_spec.from_stream) { 122 | if stream.get_spec_by_name(active_spec.name()).is_none() { 123 | eyre::bail!("active spec '{}' claims to be a member of stream '{}', but it was not found there", active_spec.name(), active_spec.from_stream); 124 | } 125 | 126 | active_spec.stream_key = stream.key(); 127 | } else { 128 | eyre::bail!( 129 | "active spec '{}' refers to non-existent stream '{}'", 130 | active_spec.name(), 131 | active_spec.from_stream 132 | ); 133 | } 134 | 135 | Ok(active_spec) 136 | } 137 | 138 | pub fn make_active_spec(&self, spec: Spec) -> YbResult { 139 | if let Some(stream) = self.streams.get(spec.stream_key) { 140 | let key = spec.stream_key; 141 | Ok(ActiveSpec { 142 | spec, 143 | from_stream: stream.name().clone(), 144 | stream_key: key, 145 | }) 146 | } else { 147 | eyre::bail!("spec '{}' not found in any stream", spec.name()) 148 | } 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /yb/src/core/tool_context.rs: -------------------------------------------------------------------------------- 1 | use color_eyre::Help; 2 | use std::env; 3 | 4 | use std::path::{Path, PathBuf}; 5 | 6 | use git2::Repository; 7 | 8 | use crate::config::Config; 9 | use crate::errors::YbResult; 10 | use crate::util::paths::{list_subdirectories_sorted, run_which}; 11 | use crate::yb_env::{try_discover_yb_env, YbEnv}; 12 | 13 | #[derive(Debug)] 14 | pub enum ToolContext { 15 | Yb(YbEnv), 16 | YoctoEnv(YoctoEnvironment), 17 | } 18 | 19 | impl ToolContext { 20 | /// Returns a `Vec` of `Repository` objects representing each top-level git repo found in 21 | /// the sources directory. 22 | pub fn sources_repos(&self) -> YbResult> { 23 | list_subdirectories_sorted(&self.sources_dir()).map(|subdirs| { 24 | subdirs 25 | .iter() 26 | .filter_map(|d| { 27 | // TODO: this throws out every error - maybe need to be more careful 28 | Repository::discover(d).ok() 29 | }) 30 | .collect() 31 | }) 32 | } 33 | 34 | pub fn sources_dir(&self) -> PathBuf { 35 | match self { 36 | ToolContext::Yb(yb_env) => yb_env.sources_dir(), 37 | ToolContext::YoctoEnv(yocto_env) => yocto_env.sources_dir.clone(), 38 | } 39 | } 40 | 41 | pub fn build_dir(&self) -> PathBuf { 42 | match self { 43 | ToolContext::Yb(yb_env) => yb_env.build_dir(), 44 | ToolContext::YoctoEnv(yocto_env) => yocto_env.build_dir.clone(), 45 | } 46 | } 47 | 48 | pub fn poky_dir(&self) -> Option { 49 | match self { 50 | ToolContext::Yb(yb_env) => yb_env.poky_dir(), 51 | ToolContext::YoctoEnv(yocto_env) => yocto_env.poky_layer.clone(), 52 | } 53 | } 54 | } 55 | 56 | #[derive(Debug)] 57 | pub struct YoctoEnvironment { 58 | pub(crate) build_dir: PathBuf, 59 | pub(crate) poky_layer: Option, 60 | pub(crate) sources_dir: PathBuf, 61 | } 62 | 63 | pub fn determine_tool_context(config: &Config) -> YbResult> { 64 | if run_which("petalinux-build")?.is_some() { 65 | eyre::bail!("PetaLinux is not supported, but an active PetaLinux environment was detected"); 66 | } 67 | 68 | // Figure out what kind of context we are executing under 69 | if let Some(yb_env) = try_discover_yb_env(config.cwd())? { 70 | // A .yb directory was found 71 | return Ok(Some(ToolContext::Yb(yb_env))); 72 | } 73 | 74 | // Check for activated Yocto environment 75 | let bbpath = env::var("BBPATH").ok().map(PathBuf::from); 76 | let poky_layer_maybe = run_which("oe-buildenv-internal")? 77 | .and_then(|s| s.parent().map(|p| p.to_path_buf())) 78 | .and_then(|s| s.parent().map(|p| p.to_path_buf())); 79 | 80 | // Assume all other repos are siblings of the poky layer 81 | let sources_dir = poky_layer_maybe 82 | .as_ref() 83 | .map(|l| l.parent().map(|p| p.to_path_buf()).unwrap()); 84 | 85 | match (&bbpath, &poky_layer_maybe, &sources_dir) { 86 | (Some(build_dir), Some(poky_layer), Some(sources_dir)) => { 87 | // Check for bare Poky environments 88 | if let Some(build_dir_parent) = &build_dir.parent().map(Path::to_path_buf) { 89 | if poky_layer == build_dir_parent { 90 | eyre::bail!("Bare poky environments are not supported"); 91 | } 92 | } 93 | 94 | return Ok(Some(ToolContext::YoctoEnv(YoctoEnvironment { 95 | sources_dir: sources_dir.clone(), 96 | build_dir: build_dir.clone(), 97 | poky_layer: Some(poky_layer.clone()), 98 | }))); 99 | } 100 | (None, None, None) => {} 101 | _ => { 102 | eyre::bail!( 103 | "Found partially activated Yocto environment? {:?} {:?} {:?}", 104 | &bbpath, 105 | &poky_layer_maybe, 106 | &sources_dir 107 | ); 108 | } 109 | } 110 | 111 | Ok(None) 112 | } 113 | 114 | pub fn require_tool_context(config: &Config) -> YbResult { 115 | determine_tool_context(config).and_then(|c| { 116 | c.ok_or_else(|| { 117 | tracing::error!("expected a yb or Yocto environment"); 118 | eyre::eyre!("expected a yb or Yocto environment") 119 | .suggestion("use yb init") 120 | .suppress_backtrace(true) 121 | }) 122 | }) 123 | } 124 | 125 | pub fn require_yb_env(config: &Config) -> YbResult { 126 | determine_tool_context(config).and_then(|c| match c { 127 | None => eyre::bail!("expected a yb environment; no environment was found"), 128 | Some(ToolContext::Yb(yb_env)) => Ok(yb_env), 129 | Some(ToolContext::YoctoEnv(_)) => { 130 | eyre::bail!("expected a yb environment; a Yocto environment was found") 131 | } 132 | }) 133 | } 134 | 135 | pub fn maybe_yb_env(config: &Config) -> YbResult> { 136 | determine_tool_context(config).map(|c| { 137 | if let Some(ToolContext::Yb(yb_env)) = c { 138 | Some(yb_env) 139 | } else { 140 | None 141 | } 142 | }) 143 | } 144 | -------------------------------------------------------------------------------- /yb/src/spec.rs: -------------------------------------------------------------------------------- 1 | use crate::data_model::Layer; 2 | use color_eyre::Help; 3 | use eyre::Report; 4 | use itertools::Itertools; 5 | use serde::{Deserialize, Deserializer, Serialize}; 6 | use std::collections::{HashMap, HashSet}; 7 | use std::fs::File; 8 | use std::path::{Path, PathBuf}; 9 | 10 | use crate::errors::YbResult; 11 | use crate::stream_db::StreamKey; 12 | 13 | const SPEC_FORMAT_VERSION: u32 = 1; 14 | 15 | const fn default_format_version() -> u32 { 16 | SPEC_FORMAT_VERSION 17 | } 18 | 19 | #[derive(Debug, Serialize, Deserialize, Clone)] 20 | pub struct Spec { 21 | header: SpecHeader, 22 | pub(crate) repos: HashMap, 23 | 24 | #[serde(skip)] 25 | pub(crate) stream_key: StreamKey, 26 | } 27 | 28 | impl PartialEq for Spec { 29 | fn eq(&self, other: &Self) -> bool { 30 | self.header == other.header && self.repos == other.repos 31 | } 32 | } 33 | 34 | impl Eq for Spec {} 35 | 36 | impl Spec { 37 | pub fn load(path: &Path, stream_key: StreamKey) -> YbResult { 38 | let f = File::open(path)?; 39 | let mut ret = serde_yaml::from_reader::<_, Self>(f).map_err(Report::from)?; 40 | ret.stream_key = stream_key; 41 | 42 | // Validation: ensure no overlap between repo URLs 43 | let mut urls_to_repos: HashMap<&String, HashSet<&String>> = HashMap::new(); 44 | for (repo_name, spec_repo) in &ret.repos { 45 | let entry = urls_to_repos.entry(&spec_repo.url).or_default(); 46 | entry.insert(repo_name); 47 | 48 | for (repo_name, spec_remote) in &spec_repo.extra_remotes { 49 | let entry = urls_to_repos.entry(&spec_remote.url).or_default(); 50 | entry.insert(repo_name); 51 | } 52 | } 53 | 54 | for (url, repo_names) in urls_to_repos { 55 | if repo_names.len() > 1 { 56 | return Err(eyre::eyre!( 57 | "URL {} corresponds to more than one spec repo: {}", 58 | url, 59 | repo_names.into_iter().join(", ") 60 | ) 61 | .suppress_backtrace(true)); 62 | } 63 | } 64 | 65 | Ok(ret) 66 | } 67 | 68 | pub fn name(&self) -> String { 69 | self.header.name.clone() 70 | } 71 | } 72 | 73 | #[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)] 74 | pub struct SpecHeader { 75 | #[serde(alias = "version", default = "default_format_version")] 76 | format_version: u32, 77 | name: String, 78 | } 79 | 80 | // https://github.com/serde-rs/serde/issues/1098#issuecomment-760711617 81 | fn deserialize_null_default<'de, D, T>(deserializer: D) -> Result 82 | where 83 | T: Default + Deserialize<'de>, 84 | D: Deserializer<'de>, 85 | { 86 | let opt = Option::deserialize(deserializer)?; 87 | Ok(opt.unwrap_or_default()) 88 | } 89 | 90 | #[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)] 91 | pub struct SpecRepo { 92 | pub(crate) url: String, 93 | pub(crate) refspec: String, 94 | #[serde( 95 | rename = "extra-remotes", 96 | default, 97 | deserialize_with = "deserialize_null_default" 98 | )] 99 | pub(crate) extra_remotes: HashMap, 100 | // each entry is a layer name 101 | pub(crate) layers: Option>, 102 | } 103 | 104 | #[derive(Debug, PartialEq, Eq, Hash)] 105 | pub enum SpecRepoLayer { 106 | Root, 107 | Named(String), 108 | } 109 | 110 | impl SpecRepo { 111 | pub fn layers(&self) -> Option> { 112 | self.layers.clone().map(|layer_names| { 113 | layer_names 114 | .keys() 115 | .map(|name| match name.as_str() { 116 | "." => SpecRepoLayer::Root, 117 | _ => SpecRepoLayer::Named(name.clone()), 118 | }) 119 | .collect() 120 | }) 121 | } 122 | 123 | pub fn resolved_layers(&self, repo_path: PathBuf) -> Option> { 124 | let repo_dir_name = repo_path.file_name().unwrap().to_str().unwrap().to_string(); 125 | self.layers().map(|mut layers| { 126 | layers 127 | .drain() 128 | .map(|layer| match layer { 129 | SpecRepoLayer::Root => Layer { 130 | name: repo_dir_name.clone(), 131 | path: repo_path.clone(), 132 | }, 133 | SpecRepoLayer::Named(name) => Layer { 134 | name: name.clone(), 135 | path: repo_path.join(name), 136 | }, 137 | }) 138 | .collect() 139 | }) 140 | } 141 | } 142 | 143 | #[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)] 144 | pub struct SpecRemote { 145 | pub(crate) url: String, 146 | } 147 | 148 | #[derive(Debug, Serialize, Deserialize, Clone)] 149 | pub struct ActiveSpec { 150 | pub(crate) spec: Spec, 151 | pub(crate) from_stream: String, 152 | 153 | #[serde(skip)] 154 | pub(crate) stream_key: StreamKey, 155 | } 156 | 157 | impl ActiveSpec { 158 | pub fn name(&self) -> String { 159 | self.spec.header.name.clone() 160 | } 161 | 162 | pub fn stream_key(&self) -> StreamKey { 163 | self.stream_key 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /yb_tests/tests/integration_tests.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | use std::path::PathBuf; 3 | 4 | use crate::common::yb_cmd; 5 | use crate::common::DebugTempDir; 6 | use assert_cmd::Command; 7 | use color_eyre::eyre::Result; 8 | use concurrent_git_pool_proc_macros::clone_repos; 9 | 10 | mod common; 11 | 12 | #[test] 13 | fn yb_init_bare() -> Result<()> { 14 | let t = DebugTempDir::new()?; 15 | let path = t.path(); 16 | yb_cmd(path).arg("init").assert().success(); 17 | assert!(path.join("yocto").is_dir()); 18 | assert!(path.join("yocto").join(".yb").is_dir()); 19 | assert!(path.join("yocto").join("sources").is_dir()); 20 | assert!(path.join("yocto").join("build").is_dir()); 21 | Ok(()) 22 | } 23 | 24 | #[test] 25 | fn no_yb_init_over_existing() -> Result<()> { 26 | let t = DebugTempDir::new()?; 27 | let path = t.path(); 28 | // first init should work 29 | yb_cmd(path).arg("init").assert().success(); 30 | // second init should fail 31 | yb_cmd(path).arg("init").assert().code(1); 32 | Ok(()) 33 | } 34 | 35 | #[test] 36 | fn yb_init() -> Result<()> { 37 | let conf_repo = create_yb_conf_repo()?; 38 | 39 | let t = DebugTempDir::new()?; 40 | let path = t.path(); 41 | 42 | let yb_env_dir = path.join("yocto"); 43 | 44 | yb_cmd(path).arg("init").assert().success(); 45 | yb_cmd(&yb_env_dir) 46 | .arg("stream") 47 | .arg("add") 48 | .arg(conf_repo.path.path()) 49 | .assert() 50 | .success(); 51 | yb_cmd(&yb_env_dir) 52 | .arg("activate") 53 | .arg("zeus") 54 | .assert() 55 | .success(); 56 | yb_cmd(&yb_env_dir).arg("sync").arg("-a").assert().success(); 57 | 58 | Ok(()) 59 | } 60 | 61 | async fn setup_yocto_env() -> Result { 62 | let t = DebugTempDir::new()?; 63 | let path = t.path(); 64 | 65 | let yocto_dir = path.join("yocto"); 66 | fs::create_dir(&yocto_dir)?; 67 | 68 | let sources_dir = yocto_dir.join("sources"); 69 | fs::create_dir(&sources_dir)?; 70 | 71 | clone_repos! { 72 | "https://github.com/yoctoproject/poky.git" in &sources_dir, 73 | "https://github.com/openembedded/meta-openembedded.git" in &sources_dir, 74 | } 75 | 76 | let build_dir = yocto_dir.join("build"); 77 | let conf_dir = build_dir.join("conf"); 78 | let bblayers = conf_dir.join("bblayers.conf"); 79 | fs::create_dir_all(conf_dir).unwrap(); 80 | let mut contents = 81 | r##"# POKY_BBLAYERS_CONF_VERSION is increased each time build/conf/bblayers.conf 82 | # changes incompatibly 83 | POKY_BBLAYERS_CONF_VERSION = "2" 84 | 85 | BBPATH = "${TOPDIR}" 86 | BBFILES ??= "" 87 | BBLAYERS ?= " "## 88 | .to_string(); 89 | 90 | contents += sources_dir.join("poky").to_str().unwrap(); 91 | contents.push(' '); 92 | contents += sources_dir.join("meta-openembedded").to_str().unwrap(); 93 | contents.push('"'); 94 | 95 | fs::write(bblayers, contents).unwrap(); 96 | 97 | Command::new("sh") 98 | .current_dir(&yocto_dir) 99 | .arg("-c") 100 | .arg(". sources/poky/oe-init-build-env") 101 | .unwrap(); 102 | 103 | let path_var = std::env::var("PATH").unwrap(); 104 | let path_var = format!( 105 | "{}:{}:{}", 106 | sources_dir.join("poky").join("scripts").to_str().unwrap(), 107 | sources_dir 108 | .join("poky") 109 | .join("bitbake") 110 | .join("bin") 111 | .to_str() 112 | .unwrap(), 113 | path_var 114 | ); 115 | 116 | Ok(YoctoEnv { 117 | root: t, 118 | sources_dir, 119 | yocto_dir, 120 | build_dir, 121 | path_var, 122 | }) 123 | } 124 | 125 | struct YoctoEnv { 126 | root: DebugTempDir, 127 | sources_dir: PathBuf, 128 | yocto_dir: PathBuf, 129 | build_dir: PathBuf, 130 | path_var: String, 131 | } 132 | 133 | #[tokio::test] 134 | async fn yb_upgrade() -> Result<()> { 135 | // Test that `yb upgrade` can upgrade an existing Yocto env 136 | let env = setup_yocto_env().await?; 137 | 138 | yb_cmd(env.yocto_dir) 139 | .arg("upgrade") 140 | .env("PATH", env.path_var) 141 | .env("BBPATH", env.build_dir.to_str().unwrap()) 142 | .assert() 143 | .success(); 144 | 145 | Ok(()) 146 | } 147 | 148 | #[tokio::test] 149 | async fn yb_init_fails_on_yocto_env() -> Result<()> { 150 | // Test that `yb init` fails inside a Yocto env 151 | let env = setup_yocto_env().await?; 152 | 153 | let output = yb_cmd(env.yocto_dir) 154 | .arg("init") 155 | .env("PATH", env.path_var) 156 | .env("BBPATH", env.build_dir.to_str().unwrap()) 157 | .output() 158 | .unwrap(); 159 | 160 | let stderr = std::str::from_utf8(&output.stderr)?; 161 | assert!(stderr.contains("cannot init yb env within an activated Yocto environment")); 162 | 163 | Ok(()) 164 | } 165 | 166 | fn create_yb_conf_repo() -> Result { 167 | let dir = DebugTempDir::new().unwrap(); 168 | let dir_path = dir.path().to_path_buf(); 169 | 170 | Command::new("git") 171 | .arg("init") 172 | .current_dir(&dir_path) 173 | .output()?; 174 | 175 | let basic_yaml = include_bytes!("resources/confs/basic.yaml"); 176 | fs::write(dir_path.join("basic.yaml"), basic_yaml).unwrap(); 177 | 178 | Command::new("git") 179 | .current_dir(&dir_path) 180 | .arg("add") 181 | .arg("basic.yaml") 182 | .output()?; 183 | 184 | Command::new("git") 185 | .current_dir(&dir_path) 186 | .arg("commit") 187 | .arg("-m") 188 | .arg("'initial'") 189 | .output()?; 190 | 191 | Ok(GitRepo { path: dir }) 192 | } 193 | 194 | #[derive(Debug)] 195 | struct GitRepo { 196 | path: DebugTempDir, 197 | } 198 | -------------------------------------------------------------------------------- /yb/src/stream.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::fmt; 3 | use std::fmt::{Debug, Formatter}; 4 | use std::fs::File; 5 | use std::path::PathBuf; 6 | 7 | use std::sync::{Arc, Mutex}; 8 | 9 | use git2::{FetchOptions, Repository}; 10 | use serde::{Deserialize, Serialize}; 11 | use walkdir::WalkDir; 12 | 13 | use crate::errors::YbResult; 14 | use crate::spec::Spec; 15 | use crate::stream_db::StreamKey; 16 | use crate::util::git::{ 17 | do_merge, get_current_local_branch_name, get_remote_name_for_current_branch, 18 | ssh_agent_remote_callbacks, 19 | }; 20 | use crate::util::paths::{is_hidden, is_yaml_file}; 21 | 22 | // TODO: don't make pub, move logic here? 23 | const STREAM_CONFIG_FILE_VERSION: u32 = 1; 24 | pub const STREAM_CONTENT_ROOT_SUBDIR: &str = "contents"; 25 | pub const STREAM_CONFIG_FILE: &str = "stream.yaml"; 26 | 27 | #[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] 28 | pub enum StreamKind { 29 | Git, 30 | } 31 | 32 | #[derive(Debug, Deserialize, Serialize, Eq, PartialEq)] 33 | pub struct StreamConfig { 34 | kind: StreamKind, 35 | format_version: u32, 36 | } 37 | 38 | impl StreamConfig { 39 | pub fn new(kind: StreamKind) -> Self { 40 | StreamConfig { 41 | kind, 42 | format_version: STREAM_CONFIG_FILE_VERSION, 43 | } 44 | } 45 | } 46 | 47 | pub struct Stream { 48 | path: PathBuf, 49 | name: String, 50 | repo: Mutex, 51 | config: StreamConfig, 52 | specs: StreamSpecs, 53 | key: StreamKey, 54 | } 55 | 56 | impl Debug for Stream { 57 | fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { 58 | f.debug_struct("Stream") 59 | .field("path", &self.path) 60 | .field("name", &self.name) 61 | .field("specs", &self.specs) 62 | .field("key", &self.key) 63 | .field("config", &self.config) 64 | .finish_non_exhaustive() 65 | } 66 | } 67 | // 68 | // impl PartialEq for Stream { 69 | // fn eq(&self, other: &Self) -> bool { 70 | // self.path == other.path 71 | // && self.name == other.name 72 | // && self.specs == other.specs 73 | // && self.config == other.config 74 | // } 75 | // } 76 | // 77 | // impl Eq for Stream {} 78 | 79 | impl Stream { 80 | pub fn load(path: PathBuf, name: String, stream_key: StreamKey) -> YbResult { 81 | let f = File::open(path.join(STREAM_CONFIG_FILE))?; 82 | let config: StreamConfig = serde_yaml::from_reader(&f)?; 83 | 84 | let stream_contents_dir = path.join(STREAM_CONTENT_ROOT_SUBDIR); 85 | let repo = Repository::discover(&stream_contents_dir)?; 86 | 87 | Ok(Stream { 88 | path, 89 | name, 90 | specs: Self::load_specs(stream_contents_dir, stream_key)?, 91 | repo: Mutex::new(repo), 92 | config, 93 | key: stream_key, 94 | }) 95 | } 96 | 97 | fn load_specs(stream_contents_dir: PathBuf, stream_key: StreamKey) -> YbResult { 98 | let mut specs = HashMap::new(); 99 | 100 | // Iterate over each spec yaml 101 | for spec_yaml in WalkDir::new(&stream_contents_dir) 102 | .into_iter() 103 | .filter_entry(|e| !is_hidden(e)) 104 | .filter(|e| is_yaml_file(e.as_ref().unwrap())) 105 | { 106 | let spec_path = spec_yaml?.into_path(); 107 | match Spec::load(&spec_path, stream_key) { 108 | Ok(spec) => { 109 | specs.insert(spec.name(), spec); 110 | } 111 | Err(e) => { 112 | // Error encountered while loading spec 113 | return Ok(StreamSpecs::Broken(Arc::new(e))); 114 | } 115 | } 116 | } 117 | 118 | Ok(StreamSpecs::Loaded(specs)) 119 | } 120 | 121 | pub fn fetch(&self) -> YbResult<()> { 122 | let repo = self.repo.lock().unwrap(); 123 | 124 | let upstream_name = get_remote_name_for_current_branch(&repo)?.unwrap(); 125 | 126 | let mut remote = repo.find_remote(&upstream_name)?; 127 | let mut fetch_options = FetchOptions::new(); 128 | fetch_options.remote_callbacks(ssh_agent_remote_callbacks()); 129 | remote.fetch(&[] as &[&str], Some(&mut fetch_options), None)?; 130 | Ok(()) 131 | } 132 | 133 | pub fn pull(&mut self) -> YbResult<()> { 134 | self.fetch()?; 135 | 136 | let repo = self.repo.lock().unwrap(); 137 | let current_branch_name = get_current_local_branch_name(&repo)?; 138 | 139 | let fetch_head = repo.find_reference("FETCH_HEAD")?; 140 | let fetch_commit = repo.reference_to_annotated_commit(&fetch_head)?; 141 | 142 | do_merge(&repo, ¤t_branch_name, fetch_commit)?; 143 | 144 | let stream_contents_dir = self.path.join(STREAM_CONTENT_ROOT_SUBDIR); 145 | self.specs = Self::load_specs(stream_contents_dir, self.key)?; 146 | 147 | Ok(()) 148 | } 149 | 150 | pub fn get_spec_by_name>(&self, name: S) -> Option<&Spec> { 151 | match &self.specs { 152 | StreamSpecs::Loaded(specs) => specs.get(name.as_ref()), 153 | StreamSpecs::Broken(..) => None, 154 | } 155 | } 156 | 157 | pub fn name(&self) -> &String { 158 | &self.name 159 | } 160 | 161 | pub fn key(&self) -> StreamKey { 162 | self.key 163 | } 164 | 165 | pub fn is_broken(&self) -> bool { 166 | matches!(self.specs, StreamSpecs::Broken(..)) 167 | } 168 | 169 | pub fn broken_reason(&self) -> Option> { 170 | match &self.specs { 171 | StreamSpecs::Loaded(..) => None, 172 | StreamSpecs::Broken(e) => Some(e.clone()), 173 | } 174 | } 175 | 176 | pub fn specs<'a>(&'a self) -> Box + 'a> { 177 | match &self.specs { 178 | StreamSpecs::Loaded(specs) => Box::new(specs.iter()), 179 | StreamSpecs::Broken(..) => Box::new(std::iter::empty()), 180 | } 181 | } 182 | } 183 | 184 | #[derive(Debug)] 185 | pub enum StreamSpecs { 186 | Loaded(HashMap), 187 | Broken(Arc), 188 | } 189 | -------------------------------------------------------------------------------- /yb/src/commands/sync/actions/basic.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use std::path::PathBuf; 3 | use std::process::{Command, Stdio}; 4 | 5 | use crate::commands::sync::actions::SyncAction; 6 | use crate::data_model::git::RemoteTrackingBranch; 7 | use crate::errors::YbResult; 8 | use crate::spec::SpecRepo; 9 | use concurrent_git_pool::PoolHelper; 10 | 11 | #[derive(Debug)] 12 | pub struct ResetGitWorkdirSyncAction { 13 | repo_path: PathBuf, 14 | } 15 | 16 | impl ResetGitWorkdirSyncAction { 17 | pub fn new(repo_path: PathBuf) -> Self { 18 | Self { repo_path } 19 | } 20 | } 21 | 22 | #[async_trait] 23 | impl SyncAction for ResetGitWorkdirSyncAction { 24 | fn is_force_required(&self) -> bool { 25 | true 26 | } 27 | 28 | async fn apply(&self, _pool: &PoolHelper) -> YbResult<()> { 29 | Command::new("git") 30 | .arg("reset") 31 | .arg("--hard") 32 | .stdout(Stdio::null()) 33 | .stderr(Stdio::null()) 34 | .current_dir(&self.repo_path) 35 | .output()?; 36 | Ok(()) 37 | } 38 | } 39 | 40 | #[derive(Debug)] 41 | pub struct CheckoutBranchSyncAction { 42 | repo_path: PathBuf, 43 | branch_name: String, 44 | } 45 | 46 | impl CheckoutBranchSyncAction { 47 | pub fn new(repo_path: PathBuf, branch_name: String) -> Self { 48 | Self { 49 | repo_path, 50 | branch_name, 51 | } 52 | } 53 | } 54 | 55 | #[async_trait] 56 | impl SyncAction for CheckoutBranchSyncAction { 57 | fn is_force_required(&self) -> bool { 58 | false 59 | } 60 | 61 | async fn apply(&self, _pool: &PoolHelper) -> YbResult<()> { 62 | Command::new("git") 63 | .arg("checkout") 64 | .arg(&self.branch_name) 65 | //.stdout(Stdio::null()) 66 | //.stderr(Stdio::null()) 67 | .current_dir(&self.repo_path) 68 | .output()?; 69 | 70 | Ok(()) 71 | } 72 | } 73 | 74 | #[derive(Debug)] 75 | pub struct FastForwardPullSyncAction { 76 | repo_path: PathBuf, 77 | // TODO number of commits? 78 | } 79 | 80 | impl FastForwardPullSyncAction { 81 | pub fn new(repo_path: PathBuf) -> Self { 82 | Self { repo_path } 83 | } 84 | } 85 | 86 | #[async_trait] 87 | impl SyncAction for FastForwardPullSyncAction { 88 | fn is_force_required(&self) -> bool { 89 | false 90 | } 91 | 92 | async fn apply(&self, _pool: &PoolHelper) -> YbResult<()> { 93 | Command::new("git") 94 | .arg("pull") 95 | .arg("--ff-only") 96 | .stdout(Stdio::null()) 97 | .stderr(Stdio::null()) 98 | .current_dir(&self.repo_path) 99 | .output()?; 100 | Ok(()) 101 | } 102 | } 103 | 104 | #[derive(Debug)] 105 | pub struct CreateLocalTrackingBranchSyncAction { 106 | repo_path: PathBuf, 107 | local_branch_name: String, 108 | remote_tracking_branch: RemoteTrackingBranch, 109 | } 110 | 111 | impl CreateLocalTrackingBranchSyncAction { 112 | pub fn new( 113 | repo_path: PathBuf, 114 | local_branch_name: String, 115 | remote_tracking_branch: RemoteTrackingBranch, 116 | ) -> Self { 117 | Self { 118 | repo_path, 119 | local_branch_name, 120 | remote_tracking_branch, 121 | } 122 | } 123 | } 124 | 125 | #[async_trait] 126 | impl SyncAction for CreateLocalTrackingBranchSyncAction { 127 | fn is_force_required(&self) -> bool { 128 | false 129 | } 130 | 131 | async fn apply(&self, _pool: &PoolHelper) -> YbResult<()> { 132 | Command::new("git") 133 | .arg("checkout") 134 | .arg("-b") 135 | .arg(&self.local_branch_name) 136 | .arg("--track") 137 | .arg(self.remote_tracking_branch.to_string()) 138 | .stdout(Stdio::null()) 139 | .stderr(Stdio::null()) 140 | .current_dir(&self.repo_path) 141 | .output()?; 142 | Ok(()) 143 | } 144 | } 145 | 146 | #[derive(Debug)] 147 | pub struct CloneRepoSyncAction { 148 | dest_repo_path: PathBuf, 149 | spec_repo: SpecRepo, 150 | } 151 | 152 | impl CloneRepoSyncAction { 153 | pub fn new(dest_repo_path: PathBuf, spec_repo: SpecRepo) -> Self { 154 | Self { 155 | dest_repo_path, 156 | spec_repo, 157 | } 158 | } 159 | } 160 | 161 | #[async_trait] 162 | impl SyncAction for CloneRepoSyncAction { 163 | fn is_force_required(&self) -> bool { 164 | false 165 | } 166 | 167 | async fn apply(&self, pool: &PoolHelper) -> YbResult<()> { 168 | pool.clone_in( 169 | &self.spec_repo.url, 170 | None, 171 | Some(self.dest_repo_path.to_str().unwrap().to_string()), 172 | ) 173 | .await 174 | .unwrap()?; 175 | 176 | assert_cmd::Command::new("git") 177 | .current_dir(&self.dest_repo_path) 178 | .arg("checkout") 179 | .arg(&self.spec_repo.refspec) 180 | .assert() 181 | .success(); 182 | Ok(()) 183 | } 184 | } 185 | 186 | #[cfg(test)] 187 | mod tests { 188 | use crate::commands::sync::actions::{CloneRepoSyncAction, SyncAction}; 189 | use crate::spec::SpecRepo; 190 | use crate::util::debug_temp_dir::DebugTempDir; 191 | use assert_cmd::Command; 192 | use concurrent_git_pool::PoolHelper; 193 | 194 | #[tokio::test] 195 | async fn clone_action_checks_out_correct_refspec() { 196 | let dir = DebugTempDir::new().unwrap(); 197 | let dir_path = dir.path().to_path_buf(); 198 | 199 | let pool = PoolHelper::connect_or_local().await.unwrap(); 200 | 201 | let spec_repo = SpecRepo { 202 | url: "https://github.com/agherzan/meta-raspberrypi.git".to_string(), 203 | refspec: "honister".to_string(), 204 | extra_remotes: Default::default(), 205 | layers: None, 206 | }; 207 | 208 | let action = CloneRepoSyncAction::new(dir_path.clone(), spec_repo); 209 | action.apply(&pool).await.unwrap(); 210 | 211 | let mut branch_cmd = Command::new("git"); 212 | branch_cmd 213 | .current_dir(dir_path) 214 | .arg("branch") 215 | .arg("--show-current"); 216 | let branch_cmd_output = branch_cmd.output().unwrap(); 217 | let current_branch = std::str::from_utf8(&branch_cmd_output.stdout) 218 | .unwrap() 219 | .trim(); 220 | assert_eq!(current_branch, "honister"); 221 | } 222 | } 223 | -------------------------------------------------------------------------------- /yb/src/yb_env.rs: -------------------------------------------------------------------------------- 1 | use core::fmt::{self, Debug, Formatter}; 2 | use eyre::Context; 3 | use std::collections::HashMap; 4 | use std::fs; 5 | use std::fs::{File, OpenOptions}; 6 | use std::io::Read; 7 | use std::path::{Path, PathBuf}; 8 | use std::sync::Arc; 9 | 10 | use crate::core::tool_context::YoctoEnvironment; 11 | use crate::errors::YbResult; 12 | use crate::spec::{ActiveSpec, Spec}; 13 | use crate::stream::Stream; 14 | use crate::stream_db::{StreamDb, StreamKey}; 15 | use crate::util::paths::find_dir_recurse_upwards; 16 | use crate::yb_conf::YbConf; 17 | 18 | const YB_ENV_DIRECTORY: &str = ".yb"; 19 | const STREAMS_SUBDIR: &str = "streams"; 20 | const YB_CONF_FILE: &str = "yb.yaml"; 21 | const ACTIVE_SPEC_FILE: &str = "active_spec.yaml"; 22 | 23 | #[derive(Debug, Clone)] 24 | pub enum ActiveSpecStatus { 25 | Active(ActiveSpec), 26 | StreamsBroken(HashMap>), 27 | } 28 | 29 | pub struct YbEnv { 30 | /// Absolute path to the .yb directory 31 | dir: PathBuf, 32 | config: YbConf, 33 | active_spec_status: Option, 34 | streams: StreamDb, 35 | } 36 | 37 | impl Debug for YbEnv { 38 | fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { 39 | f.debug_struct("YbEnv") 40 | .field("dir", &self.dir) 41 | .field("config", &self.config) 42 | .field("active_spec_status", &self.active_spec_status) 43 | .field("streams", &self.streams) 44 | .finish_non_exhaustive() 45 | } 46 | } 47 | 48 | impl YbEnv { 49 | fn new( 50 | dir: PathBuf, 51 | config: YbConf, 52 | active_spec: Option, 53 | streams: StreamDb, 54 | ) -> Self { 55 | Self { 56 | dir, 57 | config, 58 | active_spec_status: active_spec, 59 | streams, 60 | } 61 | } 62 | 63 | pub fn stream_db(&self) -> &StreamDb { 64 | &self.streams 65 | } 66 | 67 | pub fn stream_db_mut(&mut self) -> &mut StreamDb { 68 | &mut self.streams 69 | } 70 | 71 | pub fn activate_spec(&mut self, spec: Spec) -> YbResult<()> { 72 | let active_spec = self.streams.make_active_spec(spec)?; 73 | 74 | let dest = self.dir.join(ACTIVE_SPEC_FILE); 75 | let f = OpenOptions::new() 76 | .write(true) 77 | .create(true) 78 | .truncate(true) 79 | .open(dest)?; 80 | serde_yaml::to_writer(&f, &active_spec)?; 81 | 82 | self.active_spec_status = Some(ActiveSpecStatus::Active(active_spec)); 83 | Ok(()) 84 | } 85 | 86 | pub fn active_stream_mut(&mut self) -> Option<&mut Stream> { 87 | let key = self.active_spec_status.as_ref().and_then(|a| match a { 88 | ActiveSpecStatus::Active(spec) => Some(spec.stream_key), 89 | _ => None, 90 | }); 91 | 92 | key.and_then(move |k| self.streams.stream_mut(k)) 93 | } 94 | 95 | pub fn find_spec>(&self, name: S) -> YbResult> { 96 | self.streams.find_spec_by_name(name) 97 | } 98 | 99 | pub fn root_dir(&self) -> &PathBuf { 100 | &self.dir 101 | } 102 | 103 | pub fn active_spec_status(&self) -> Option<&ActiveSpecStatus> { 104 | self.active_spec_status.as_ref() 105 | } 106 | 107 | pub fn build_dir(&self) -> PathBuf { 108 | self.dir.join(self.config.build_dir_relative()) 109 | } 110 | 111 | pub fn sources_dir(&self) -> PathBuf { 112 | self.dir.join(self.config.sources_dir_relative()) 113 | } 114 | 115 | pub fn poky_dir(&self) -> Option { 116 | self.config.poky_dir_relative().map(|p| self.dir.join(p)) 117 | } 118 | 119 | pub fn yb_dir(&self) -> &PathBuf { 120 | &self.dir 121 | } 122 | 123 | pub fn streams_dir(&self) -> PathBuf { 124 | self.yb_dir().join(STREAMS_SUBDIR) 125 | } 126 | 127 | pub fn initialize>( 128 | location: S, 129 | yocto_env: &YoctoEnvironment, 130 | ) -> YbResult { 131 | // TODO: create in temp directory then move over? 132 | let yb_dir = location.into().join(YB_ENV_DIRECTORY); 133 | println!("creating at {:?}", &yb_dir); 134 | fs::create_dir(&yb_dir)?; 135 | 136 | // Create a default yb.yaml file and write it to disk 137 | let conf = YbConf::new_from_yocto_env(&yb_dir, yocto_env)?; 138 | let f = OpenOptions::new() 139 | .write(true) 140 | .create(true) 141 | .open(yb_dir.join(YB_CONF_FILE))?; 142 | serde_yaml::to_writer(f, &conf)?; 143 | 144 | // Create the streams dir 145 | let streams_dir = yb_dir.join(STREAMS_SUBDIR); 146 | fs::create_dir(streams_dir)?; 147 | 148 | Ok(YbEnv::new(yb_dir, conf, None, StreamDb::new())) 149 | } 150 | } 151 | 152 | /// Search upwards from `start_point` for a .yb directory and load the environment if found. 153 | pub fn try_discover_yb_env>(start_point: S) -> YbResult> { 154 | // Locate the hidden .yb directory 155 | find_dir_recurse_upwards(start_point, YB_ENV_DIRECTORY)? 156 | .map(|yb_dir| -> YbResult<_> { 157 | tracing::info!("found .yb directory at {:?}", yb_dir); 158 | let conf_file = yb_dir.join(YB_CONF_FILE); 159 | // TODO handle missing conf file? 160 | assert!(conf_file.is_file()); 161 | 162 | let mut config_file_data = Vec::new(); 163 | File::open(&conf_file) 164 | .with_context(|| format!("failed to open conf file {}", conf_file.display()))? 165 | .read_to_end(&mut config_file_data)?; 166 | 167 | let conf: YbConf = serde_yaml::from_slice(config_file_data.as_slice()).unwrap(); 168 | 169 | let mut stream_db = StreamDb::new(); 170 | 171 | let streams_dir = yb_dir.join(STREAMS_SUBDIR); 172 | if streams_dir.is_dir() { 173 | stream_db.load_all(streams_dir)?; 174 | } 175 | 176 | let active_spec; 177 | let broken_streams = stream_db.broken_streams(); 178 | if !broken_streams.is_empty() { 179 | // TODO: active spec is not necessarily part of broken stream(s); could still try to load it 180 | active_spec = Some(ActiveSpecStatus::StreamsBroken(broken_streams)); 181 | } else { 182 | let active_spec_file_path = yb_dir.join(ACTIVE_SPEC_FILE); 183 | if active_spec_file_path.is_file() { 184 | active_spec = Some(ActiveSpecStatus::Active( 185 | stream_db.load_active_spec(active_spec_file_path)?, 186 | )); 187 | } else { 188 | active_spec = None; 189 | } 190 | } 191 | 192 | Ok(YbEnv::new(yb_dir, conf, active_spec, stream_db)) 193 | }) 194 | .transpose() 195 | } 196 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # yb (*Y*octo *B*uddy) 2 | 3 | yb is designed to make it easy to setup and (perhaps more importantly) keep Yocto environments **up-to-date and in-sync** with your team. It is early in development, but we are releasing it now as it is already useful. 4 | 5 | Motivation 6 | =========== 7 | 8 | This tool was heavily inspired by [kas](https://github.com/siemens/kas), [myrepos](https://myrepos.branchable.com/), and Google's [repo](https://gerrit.googlesource.com/git-repo) tool. We are also familiar with [whisk](https://github.com/garmin/whisk). 9 | 10 | All of these tools are great for doing initial environment setup for CI and/or new developers coming onboard. In our estimation, however, that is the easy part. The harder part is ensuring your environment stays up-to-date as your product(s) evolve through development: 11 | * Layers get added, removed, and updated 12 | * DISTRO and MACHINE configurations are added 13 | * Recommended local.conf settings may drift over time: perhaps new SSTATE_MIRRORS or BB_HASHSERVE servers come on-line. 14 | 15 | Historically, it has been painful to keep all of this in-sync, usually manifesting as emails sent team-wide everytime bblayers.conf needs to change. 16 | 17 | Goals and non-goals 18 | ==================== 19 | 20 | yb strives to be a tool for helping developers with their everyday development tasks. Unlike kas, it does *not* enforce separation of build environment and host. yb is designed to complement the Yocto workflow you're already used to - for example, there is no `yb shell` command. You'll run the `bitbake` command as usual. 21 | 22 | Specs and streams: keeping in-sync 23 | ========================================== 24 | 25 | Much like kas' configuration files (see https://kas.readthedocs.io/en/latest/userguide.html), yb has **specs** which are also .yaml files. In fact, the format is nearly the same (though interoperability is not guaranteed - if that's a feature you want please open an issue). 26 | 27 | A basic spec looks like this: 28 | 29 |

30 | Basic spec (click to expand) 31 | 32 | ```yaml 33 | header: 34 | version: 1 35 | name: "nightly" 36 | 37 | repos: 38 | poky: 39 | url: "git://git.yoctoproject.org/poky" 40 | refspec: "honister" 41 | layers: 42 | meta: 43 | meta-poky: 44 | 45 | meta-openembedded: 46 | url: "git://git.openembedded.org/meta-openembedded" 47 | refspec: "honister" 48 | layers: 49 | meta-networking: 50 | meta-python: 51 | meta-filesystems: 52 | meta-webserver: 53 | meta-oe: 54 | ``` 55 |
56 | 57 | Specs live in **streams**. A stream is just a git repo that you've hosted somewhere accessible by your developers. 58 | 59 | If you need to add a layer to your build, just do it in the spec and commit the change to the stream. Developers using that stream with `yb` will automatically have the stream refreshed the next time they run `yb status` or `yb sync` (see below). 60 | 61 | # Installation 62 | 63 | The easiest way to install yb is to use the pre-compiled, statically-linked binary available here: https://github.com/Agilent/yb/releases/latest. Simply download and unpack into PATH. It should run on any modern-ish 64-bit Linux system. If you want binaries for other systems (e.g. Windows or 32-bit Linux) please file an issue. 64 | 65 | Alternatively, you can build yb yourself. You'll need a nightly Rust compiler. To build and run, use ```cargo run -- --help``` (equivalent to doing `yb --help`). 66 | 67 | # Basic usage 68 | 69 | yb supports two kinds of environments ("envs" for short): vanilla Yocto and yb. You'll know you have a yb env if you see a hidden .yb/ directory inside your yocto/ directory. 70 | 71 | ## Vanilla Yocto 72 | 73 | A vanilla Yocto env is one in which you haven't (yet) used `yb init` to initialize a yb env. In this case, yb operates with reduced functionality but can still be extremely useful. See for example the `yb status` command below. To use: 74 | 75 | 1. In a terminal, activate your Yocto env. This is usually a matter of doing `source setupsdk` or `source oe-init-build-env`. 76 | 2. Try running `yb status` 77 | 78 | ## Converting vanilla Yocto env to yb env 79 | 80 | To do the conversion, simply activate your Yocto env as usual and then run `yb upgrade`: 81 | 82 | 1. `source setupsdk` or `source oe-init-build-env` 83 | 2. `yb upgrade` (or `yb upgrade -s PATH_TO_STREAM`) 84 | 3. Try running `yb status` 85 | 86 | For a demo stream you can use right now, see https://github.com/Agilent/yb-demo-specs. 87 | 88 | ## Creating a new yb env from scratch 89 | 90 | You can create a new yb env (and skeleton yocto/ directory) by running `yb init` outside of any existing environments: 91 | 92 | 1. Ensure you are _not_ in the context of an existing yb or vanilla Yocto env. If you are, launch a new terminal and/or cd somewhere else. 93 | 2. `yb init` (or `yb init -s PATH_TO_STREAM`) 94 | 3. cd yocto 95 | 96 | Note that even if you pass a stream to `yb init`, no layers are cloned yet. You'll need `yb sync` for that (see below). 97 | 98 | For a demo stream you can use right now, see https://github.com/Agilent/yb-demo-specs. 99 | 100 | # Commands 101 | 102 | ## `yb self-update`: check GitHub for latest release 103 | 104 | This command checks GitHub for the latest release of yb. If a new release is found it is automatically downloaded. 105 | 106 | ```bash 107 | yb self-update 108 | ``` 109 | 110 | ## `yb init`: create a new yb env 111 | | Vanilla Yocto env | yb env | 112 | | ------------- | ------------- | 113 | | :x: | :x: | 114 | 115 | See https://github.com/Agilent/yb#creating-a-new-yb-env-from-scratch 116 | 117 | ## `yb upgrade`: convert vanilla Yocto env to a yb env 118 | | Vanilla Yocto env | yb env | 119 | | ------------- | ------------- | 120 | | :heavy_check_mark: | :x: | 121 | 122 | See https://github.com/Agilent/yb#converting-vanilla-yocto-env-to-yb-env 123 | 124 | ## `yb activate`: set the active spec 125 | | Vanilla Yocto env | yb env | 126 | | ------------- | ------------- | 127 | | :x: | :heavy_check_mark: | 128 | 129 | Use this command to set the active spec. It doesn't actually make any changes to your layers or confs. You'll need `yb sync` for that (see below). 130 | 131 | ```bash 132 | yb activate nightly 133 | ``` 134 | 135 | ## `yb status`: report env status 136 | | Vanilla Yocto env | yb env | 137 | | ------------- | ------------- | 138 | | :heavy_check_mark: | :heavy_check_mark: | 139 | 140 | The `yb status` command was designed to provide useful status information across all the repos in your Yocto env. 141 | 142 | Better yet, it also works in vanilla Yocto envs. As long as you run `yb status` in a terminal in which you have an activated Yocto env (i.e. `which bitbake` prints a path), yb will find the path to where your layers live and report their statuses. 143 | 144 | | ![yb status with vanilla Yocto env](/images/yb.0.0.11.status.vanilla.gif) | 145 | |:--:| 146 | | `yb status` is run in the context of a vanilla Yocto env. | 147 | 148 | Use the `--skip-unremarkable` / `-s` flag to hide repos for which there is no actionable status information. The result is a more concise summary. 149 | 150 | | ![yb status with vanilla Yocto env](/images/yb.0.0.11.status.vanilla.skip.unremarkable.gif) | 151 | |:--:| 152 | | `yb status` is run in the context of a vanilla Yocto env with the `--skip-unremarkable` / `-s` flag. | 153 | 154 | When run in the context of a yb env, however, yb can help even more. If a yb env is found, yb will fetch the current stream to see if any specs were updated. Then it will report how your current env differs from that of the activated spec. 155 | 156 | | ![yb status with yb env](/images/yb.0.0.11.status.missing.repo.gif) | 157 | |:--:| 158 | | `yb status` is run in the context of a yb env with an activated spec. | 159 | 160 | ## `yb sync`: make my env match the active spec 161 | | Vanilla Yocto env | yb env | 162 | | ------------- | ------------- | 163 | | :x: | :heavy_check_mark: | 164 | 165 | `yb sync` with the `-a/--apply` flag will do what is needed to make your env reflect that of the activated spec. It currently supports these actions: 166 | * Clone repos 167 | * Add/remove layers from bblayers.conf (creating it first if necessary) 168 | * Switch branches 169 | * Do fast-forward git pull 170 | * Create local tracking branch 171 | * Reset working directory (only if given `--force` flag) 172 | 173 | As a precaution, `yb sync` does nothing but report what would have been done. To actually make changes you need to pass the `-a`/`--apply` flag. 174 | 175 | When used within a yb env, `yb sync` will first pull any stream updates. 176 | 177 | | ![yb sync and status](/images/yb.0.0.11.sync.and.status.gif) | 178 | |:--:| 179 | | `yb sync` is first run in dry-run only mode (the default) to show what would be done. Then it is run again with `--apply`/`-a` flag. Finally, `yb status` is run to show that the env is up-to-date. | 180 | 181 | ## `yb run`: run a command for each repo 182 | | Vanilla Yocto env | yb env | 183 | | ------------- | ------------- | 184 | | :heavy_check_mark: | :heavy_check_mark: | 185 | 186 | This works in either yb or Yocto env. It doesn't matter what directory you run it in as long as yb can find the env. 187 | 188 | ```bash 189 | yb run -n -- git branch --show-current 190 | ``` 191 | 192 | | ![yb run](/images/yb.0.0.11.run.show.branch.gif) | 193 | |:--:| 194 | | `yb run` using the `-no-return-codes`/`-n` flag to display just the current branch of each repo. | 195 | 196 | Project status 197 | ============== 198 | 199 | What's working: 200 | * Everything described above, plus a few other utility commands (e.g. `yb list` to view specs and streams) 201 | 202 | TODO: 203 | - [ ] Support modifications to local.conf in specs 204 | - [ ] Some kind of matrix build support (multiple MACHINE/DISTRO/?) 205 | - [ ] Usage in CI environment 206 | - [ ] Documentation... 207 | - [ ] Tests 208 | - [ ] User-friendly output for `yb sync` 209 | - [ ] Make updating streams more robust than just a `git pull` 210 | - [ ] Support some kind of local stream? (for now, just use a local git repo - file:// works as a URI) 211 | - [ ] Make `--porcelain` flag more than just dumping internal structures as JSON which are liable to change; and make it work for other commands 212 | 213 | Possible TODOs (i.e. features I probably won't use but that other tools support - submit issue reports please) 214 | - [ ] Layer patches 215 | 216 | Ideas: 217 | - [ ] Maybe menuconfig like kas? 218 | - [ ] Multiconfig support 219 | - [ ] Some kind of `yb stash`/`yb stash pop` (like `git stash`/`git stash pop`) across all layers/local.conf at once. Would be useful for doing a quick build in between experiementing. 220 | 221 | Why not Python? 222 | =============== 223 | 224 | Basically because [this](https://xkcd.com/1987/). Rust lets you build statically-linked binaries with no hassle. There is no beating that for distribution. Also, the type system and ecosystem are great. 225 | 226 | License 227 | ======== 228 | Copyright 2022-2023 Agilent Technologies, Inc. 229 | 230 | This software is licensed under the MIT license. 231 | 232 | Some portions have been adapted from [git2-rs](https://github.com/rust-lang/git2-rs) which is dual-licensed MIT and Apache 2.0. We have chosen to use it as MIT licensed. 233 | 234 | Disclaimer 235 | ======== 236 | This is not an official Agilent product. No support is implied. 237 | -------------------------------------------------------------------------------- /yb/src/commands/twice_bake.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use clap::value_parser; 3 | use color_eyre::Help; 4 | use console::Style; 5 | use indicatif::MultiProgress; 6 | use itertools::Itertools; 7 | use multi_index_map::MultiIndexMap; 8 | use serde::Deserialize; 9 | use serde_with::TimestampSecondsWithFrac; 10 | use serde_with::{serde_as, DisplayFromStr}; 11 | use std::collections::HashMap; 12 | use std::ffi::OsStr; 13 | use std::fs::File; 14 | use std::io::BufRead; 15 | use std::os::unix::fs::MetadataExt; 16 | use std::path::{Path, PathBuf}; 17 | use std::pin::Pin; 18 | use std::process::Stdio; 19 | use std::time::SystemTime; 20 | use std::{cmp, io}; 21 | use time::macros::format_description; 22 | use time::OffsetDateTime; 23 | use tokio::io::AsyncBufReadExt; 24 | use tokio::process::Command; 25 | use tokio_stream::wrappers::LinesStream; 26 | use tokio_stream::{Stream, StreamExt, StreamMap}; 27 | use tracing::info_span; 28 | use walkdir::{DirEntry, WalkDir}; 29 | 30 | use crate::commands::SubcommandRunner; 31 | use crate::core::tool_context::require_tool_context; 32 | use crate::errors::YbResult; 33 | use crate::util::indicatif::MultiProgressHelpers; 34 | use crate::Config; 35 | 36 | /// Re-execute the most recent task(s) (by default) that BitBake ran. 37 | #[derive(Debug, clap::Parser)] 38 | #[clap(verbatim_doc_comment, visible_aliases = & ["twice_bake", "twicebake", "tb"])] 39 | pub struct TwiceBakeCommand { 40 | /// By default, this command does a dry-run. Pass this flag to actually run tasks. 41 | #[clap(long, short)] 42 | execute: bool, 43 | 44 | /// Use the Nth most recent invocation of bitbake, rather than the most recent. 45 | #[clap(long, short, default_value_t = 1, id = "N", value_parser = value_parser!(u8).range(1..))] 46 | previous: u8, 47 | // /// By default, this command only executes tasks if they all belong to the same recipe (PN). 48 | // /// Pass this flag to disable that sanity check. 49 | // force: bool, 50 | } 51 | 52 | #[derive(Eq, PartialEq, Debug, Clone, Copy, Deserialize)] 53 | #[serde(rename_all = "UPPERCASE")] 54 | enum TaskOutcome { 55 | Fail, 56 | Success, 57 | } 58 | 59 | #[serde_as] 60 | #[derive(Deserialize, MultiIndexMap, Debug)] 61 | #[multi_index_derive(Debug)] 62 | struct YbSupportHistoryFile { 63 | #[serde(rename = "PN")] 64 | #[multi_index(hashed_non_unique)] 65 | pn: String, 66 | #[serde(rename = "PV")] 67 | pv: String, 68 | #[serde(rename = "T")] 69 | t: PathBuf, 70 | #[serde(rename = "WORKDIR")] 71 | workdir: PathBuf, 72 | #[serde_as(as = "DisplayFromStr")] 73 | class_version: u32, 74 | 75 | #[serde_as(as = "Option>")] 76 | #[multi_index(ordered_non_unique)] 77 | end_time: Option, 78 | 79 | #[serde_as(as = "TimestampSecondsWithFrac")] 80 | #[multi_index(ordered_non_unique)] 81 | start_time: SystemTime, 82 | 83 | log_file: String, 84 | mc: String, 85 | outcome: Option, 86 | postfunc_runfiles: Vec<(String, String)>, 87 | prefunc_runfiles: Vec<(String, String)>, 88 | task: String, 89 | task_file: PathBuf, 90 | 91 | #[serde(rename = "task_runfile")] 92 | task_runfile_name: String, 93 | } 94 | 95 | impl YbSupportHistoryFile { 96 | fn task_runfile(&self) -> PathBuf { 97 | self.t.join(&self.task_runfile_name) 98 | } 99 | 100 | fn is_executable(&self) -> bool { 101 | let metadata = self.task_runfile().metadata().unwrap(); 102 | metadata.mode() & 0o111 != 0 103 | } 104 | } 105 | 106 | fn find_tmpdirs>(build_dir: P) -> impl Iterator { 107 | WalkDir::new(build_dir) 108 | .min_depth(1) 109 | .max_depth(1) 110 | .into_iter() 111 | .flatten() 112 | .filter(|e| { 113 | if !e.path().is_dir() { 114 | return false; 115 | } 116 | 117 | let file_name = e.file_name().to_str().unwrap(); 118 | if !(file_name == "tmp" || file_name.starts_with("tmp-")) { 119 | return false; 120 | } 121 | 122 | true 123 | }) 124 | } 125 | 126 | async fn launch>( 127 | p: P, 128 | mp: MultiProgress, 129 | ) -> Result<(), Box> { 130 | let mut cmd = Command::new(p); 131 | 132 | cmd.stdout(Stdio::piped()); 133 | cmd.stderr(Stdio::piped()); 134 | 135 | let mut child = cmd.spawn().expect("failed to spawn command"); 136 | 137 | let stdout = child.stdout.take().unwrap(); 138 | let stderr = child.stderr.take().unwrap(); 139 | 140 | let stdout_reader: Pin> + Send>> = 141 | Box::pin(LinesStream::new(tokio::io::BufReader::new(stdout).lines())); 142 | 143 | let stderr_reader: Pin> + Send>> = 144 | Box::pin(LinesStream::new(tokio::io::BufReader::new(stderr).lines())); 145 | 146 | let mut map = StreamMap::new(); 147 | map.insert("stdout", stdout_reader); 148 | map.insert("stderr", stderr_reader); 149 | 150 | // Ensure the child process is spawned in the runtime so it can 151 | // make progress on its own while we await for any output. 152 | let join_handle = tokio::spawn(async move { 153 | let status = child 154 | .wait() 155 | .await 156 | .expect("child process encountered an error"); 157 | 158 | //println!("child status was: {}", status); 159 | status 160 | }); 161 | 162 | while let Some(line) = map.next().await { 163 | let line_str = line.1?; 164 | mp.suspend(|| println!("{}", Style::from_dotted_str("dim").apply_to(line_str))); 165 | } 166 | 167 | let status = join_handle.await?; 168 | println!("Process terminated with status: {}", status); 169 | 170 | status.exit_ok().map_err(|e| e.into()) 171 | } 172 | 173 | #[async_trait] 174 | impl SubcommandRunner for TwiceBakeCommand { 175 | async fn run(&self, config: &mut Config, mp: &MultiProgress) -> YbResult<()> { 176 | let context = require_tool_context(config)?; 177 | 178 | let mut tmpdir_to_history_dir_map = HashMap::new(); 179 | let mut map = MultiIndexYbSupportHistoryFileMap::default(); 180 | 181 | // Iterate over each tmpdir 182 | for tmpdir in find_tmpdirs(context.build_dir()) { 183 | let span = info_span!("looking at tmpdir", dir = tmpdir.path().to_str()); 184 | let _guard = span.enter(); 185 | 186 | // Locate the yb-support build history directory 187 | let history_dir = tmpdir.path().join("yb-support/history/"); 188 | let maybe_history_dir = history_dir.is_dir().then(|| history_dir.clone()); 189 | tmpdir_to_history_dir_map 190 | .insert(tmpdir.path().to_path_buf(), maybe_history_dir.clone()); 191 | if maybe_history_dir.is_none() { 192 | continue; 193 | } 194 | 195 | // Underneath the history directory, each subdir represents a build. 196 | let mut history_subdir_walker = WalkDir::new(history_dir) 197 | .min_depth(1) 198 | .max_depth(1) 199 | .into_iter() 200 | .flatten() 201 | .filter(|e| e.path().is_dir()) 202 | .sorted_by_key(|e| cmp::Reverse(e.metadata().unwrap().modified().unwrap())); 203 | 204 | let latest_history_dir = history_subdir_walker 205 | .nth(self.previous as usize - 1) 206 | .unwrap(); 207 | 208 | fn systemtime_strftime(dt: T) -> String 209 | where 210 | T: Into, 211 | { 212 | dt.into() 213 | .format(format_description!("[weekday repr:short], [day] [month repr:short] [year] [hour]:[minute]:[second] [offset_hour][offset_minute]")) 214 | .unwrap() 215 | } 216 | 217 | mp.note(format!( 218 | "selected history dir = {:?}", 219 | latest_history_dir.path() 220 | )); 221 | 222 | // Iterate over subdirectories, each of which represents a PF 223 | let pf_walker = WalkDir::new(latest_history_dir.path()) 224 | .min_depth(1) 225 | .max_depth(1) 226 | .into_iter() 227 | .flatten() 228 | .filter(|e| e.path().is_dir()); 229 | 230 | // Iterate over each task file in the PF subdir 231 | for pf in pf_walker { 232 | let task_glob_pattern = format!("{}/do_*.json", pf.path().to_str().unwrap()); 233 | //mp.note(format!("PF={}", pf.file_name().to_str().unwrap())); 234 | 235 | for task_file in glob::glob(&task_glob_pattern).unwrap().flatten() { 236 | tracing::info!("file: {:?}", task_file); 237 | 238 | let file_handle = File::open(&task_file).unwrap(); 239 | let data: YbSupportHistoryFile = serde_json::from_reader(file_handle)?; 240 | map.insert(data); 241 | } 242 | } 243 | } 244 | 245 | if tmpdir_to_history_dir_map.is_empty() { 246 | return Err( 247 | eyre::eyre!("didn't find any tmp dirs - have you run a build yet?") 248 | .suppress_backtrace(true), 249 | ); 250 | } 251 | 252 | if tmpdir_to_history_dir_map.iter().all(|t| t.1.is_none()) { 253 | return Err(eyre::eyre!( 254 | "found one or more tmp dirs, but not yb-support/history/ directories" 255 | ) 256 | .suggestion("ensure INHERIT contains yb-support class") 257 | .suppress_backtrace(true)); 258 | } 259 | 260 | if map.is_empty() { 261 | mp.warn("no tasks found - did the last bitbake run do anything?"); 262 | return Ok(()); 263 | } 264 | 265 | if !self.execute { 266 | for entry in map.iter_by_start_time() { 267 | if !entry.is_executable() { 268 | mp.warn(format!( 269 | "would skip Python task {}:{}", 270 | entry.pn, entry.task 271 | )); 272 | continue; 273 | } 274 | 275 | mp.note(format!("would run {}:{}", entry.pn, entry.task)); 276 | } 277 | 278 | mp.warn("dry run only - pass the -e/--execute flag to run tasks"); 279 | } else { 280 | enum RunOrSkip { 281 | Run(PathBuf), 282 | Skip(PathBuf), 283 | } 284 | 285 | let mut ps = vec![]; 286 | for entry in map.iter_by_start_time() { 287 | let p = entry.task_runfile().clone(); 288 | if entry.is_executable() { 289 | ps.push(RunOrSkip::Run(p)); 290 | } else { 291 | ps.push(RunOrSkip::Skip(p)); 292 | } 293 | } 294 | 295 | let count = ps.len(); 296 | for (i, p) in ps.iter().enumerate() { 297 | match p { 298 | RunOrSkip::Run(p) => { 299 | mp.note(format!("[{}/{}] running {}", i, count, p.to_str().unwrap())); 300 | launch(p, mp.clone()).await.unwrap(); 301 | } 302 | RunOrSkip::Skip(p) => { 303 | mp.warn(format!("skipping Python task {}", p.to_str().unwrap())); 304 | } 305 | } 306 | } 307 | } 308 | 309 | Ok(()) 310 | } 311 | } 312 | -------------------------------------------------------------------------------- /yb/src/status_calculator/mod.rs: -------------------------------------------------------------------------------- 1 | use color_eyre::Help; 2 | use git2::{Branch, FetchOptions, Repository, StatusOptions}; 3 | use indoc::indoc; 4 | use maplit::hashset; 5 | use std::collections::{HashMap, HashSet}; 6 | use std::path::{Path, PathBuf}; 7 | use std::{fs, io}; 8 | 9 | use crate::config::Config; 10 | use crate::core::tool_context::{require_tool_context, ToolContext}; 11 | use crate::data_model::git::{ 12 | BranchStatus, RemoteTrackingBranch, UpstreamBranchStatus, UpstreamComparison, 13 | }; 14 | use crate::data_model::status::{ 15 | find_corresponding_spec_repo_for_repo, ComputedStatus, ComputedStatusEntry, MissingRepo, 16 | OnDiskNonRepoStatus, OnDiskRepoStatus, 17 | }; 18 | use crate::data_model::Layer; 19 | use crate::errors::YbResult; 20 | use crate::spec::SpecRepo; 21 | use crate::status_calculator::bblayers_manager::read_bblayers; 22 | use crate::util::git::{ 23 | check_repository_workdirs_unique, create_revwalk, get_current_local_branch, 24 | get_remote_for_current_branch, get_remote_tracking_branch, ssh_agent_remote_callbacks, 25 | }; 26 | use crate::util::paths::list_subdirectories_sorted; 27 | use crate::yb_env::ActiveSpecStatus; 28 | 29 | pub mod bblayers_manager; 30 | 31 | pub struct StatusCalculatorOptions<'cfg> { 32 | config: &'cfg Config, 33 | no_fetch: bool, 34 | log: bool, 35 | } 36 | 37 | impl<'cfg> StatusCalculatorOptions<'cfg> { 38 | pub fn new(config: &'cfg Config, no_fetch: bool, log: bool) -> Self { 39 | Self { 40 | config, 41 | no_fetch, 42 | log, 43 | } 44 | } 45 | } 46 | 47 | /// Compares a local branch (identified by `local_branch_name`) and remote tracking branch (`tracking_branch`) 48 | /// to determine if the former is up-to-date, ahead, behind, or diverged from the latter. 49 | pub fn compare_branch_to_remote_tracking_branch( 50 | repo: &Repository, 51 | local_branch_name: String, 52 | tracking_branch: &RemoteTrackingBranch, 53 | ) -> YbResult { 54 | let remote_branch_name = tracking_branch.to_string(); 55 | let ahead_count = 56 | create_revwalk(repo, &format!("{remote_branch_name}..{local_branch_name}"))?.count(); 57 | let behind_count = 58 | create_revwalk(repo, &format!("{local_branch_name}..{remote_branch_name}"))?.count(); 59 | Ok(match (ahead_count > 0, behind_count > 0) { 60 | (true, true) => UpstreamComparison::Diverged { 61 | ahead: ahead_count, 62 | behind: behind_count, 63 | }, 64 | (true, false) => UpstreamComparison::Ahead(ahead_count), 65 | (false, true) => UpstreamComparison::Behind(behind_count), 66 | _ => UpstreamComparison::UpToDate, 67 | }) 68 | } 69 | 70 | /// Compares a local branch (`local_branch`) to its remote tracking branch via `compare_branch_to_remote_tracking_branch`. 71 | /// Returns None if local branch has no remote tracking branch. 72 | fn compare_branch_to_upstream( 73 | repo: &Repository, 74 | local_branch: &Branch, 75 | ) -> YbResult> { 76 | let local_branch_name = local_branch.name()?.unwrap().to_string(); 77 | 78 | get_remote_tracking_branch(local_branch)? 79 | .map(|tracking_branch| -> YbResult<_> { 80 | compare_branch_to_remote_tracking_branch(repo, local_branch_name, &tracking_branch).map( 81 | |comparison| UpstreamBranchStatus { 82 | upstream_comparison: comparison, 83 | remote_tracking_branch: tracking_branch, 84 | }, 85 | ) 86 | }) 87 | .transpose() 88 | } 89 | 90 | fn looks_like_layer_dir>(path: P) -> bool { 91 | let path = path.as_ref(); 92 | path.is_dir() && path.join("conf").join("layer.conf").is_file() 93 | } 94 | 95 | fn detect_layers>(start_dir: P) -> YbResult> { 96 | // TODO depth? 97 | 98 | let start_dir = start_dir.as_ref(); 99 | let layers; 100 | if looks_like_layer_dir(start_dir) { 101 | // The `start_dir` is itself a single layer 102 | layers = hashset![Layer { 103 | path: start_dir.to_path_buf(), 104 | name: start_dir.file_name().unwrap().to_str().unwrap().to_string(), 105 | }]; 106 | } else { 107 | // Detect layers under the path 108 | layers = fs::read_dir(start_dir)? 109 | .filter_map(|r| r.ok().map(|r| r.path())) 110 | .filter(|r| looks_like_layer_dir(r)) 111 | .map(|path| Layer { 112 | path: path.clone(), 113 | name: path.file_name().unwrap().to_str().unwrap().to_string(), 114 | }) 115 | .collect(); 116 | } 117 | 118 | Ok(layers) 119 | } 120 | 121 | fn compute_repo_status( 122 | repo: Repository, 123 | path: &PathBuf, 124 | options: &mut StatusCalculatorOptions, 125 | active_spec_repos: &HashMap, 126 | c: &mut F, 127 | ) -> YbResult 128 | where 129 | F: FnMut(StatusCalculatorEvent), 130 | { 131 | // First things first, do a 'git fetch' 132 | // TODO: fetch all remotes? 133 | if !options.no_fetch { 134 | let mut repo_remote = get_remote_for_current_branch(&repo)?; 135 | // If the current branch is tracking an upstream branch, fetch it to check for updates 136 | if let Some(remote) = repo_remote.as_mut() { 137 | c(StatusCalculatorEvent::StartFetch); 138 | let mut fetch_options = FetchOptions::new(); 139 | fetch_options.remote_callbacks(ssh_agent_remote_callbacks()); 140 | // TODO: this is really slow 141 | //fetch_options.download_tags(AutotagOption::All); 142 | remote 143 | .fetch(&[] as &[&str], Some(&mut fetch_options), None) 144 | .with_suggestion(|| { 145 | indoc! {r#" 146 | Is ssh-agent configured and running? If not, you might need to run: 147 | eval `ssh-agent -s` 148 | ssh-add 149 | "#} 150 | })?; 151 | c(StatusCalculatorEvent::FinishFetch); 152 | } 153 | 154 | drop(repo_remote); 155 | } 156 | 157 | // See if we can map the repo to a spec repo 158 | let spec_repo_status = find_corresponding_spec_repo_for_repo(&repo, active_spec_repos, c)?; 159 | 160 | let current_branch_status = { 161 | // TODO: gracefully handle detached HEAD and repos without a tracked branch 162 | let local_branch = get_current_local_branch(&repo)?; 163 | let local_branch_name = local_branch.name()?.unwrap().to_string(); 164 | 165 | BranchStatus { 166 | local_branch_name, 167 | upstream_branch_status: compare_branch_to_upstream(&repo, &local_branch)?, 168 | } 169 | }; 170 | 171 | // Only run log if enabled and the repo is not diverged 172 | let commits = if options.log && !current_branch_status.is_diverged() { 173 | let mut walker = repo.revwalk()?; 174 | walker.set_sorting(git2::Sort::TOPOLOGICAL)?; 175 | walker.push_head()?; 176 | let mut commit_v = vec![]; 177 | for commit in walker.take(5) { 178 | commit_v.push(commit?); 179 | } 180 | Some(commit_v) 181 | } else { 182 | None 183 | }; 184 | 185 | let is_workdir_dirty = !repo.statuses(Some(&mut StatusOptions::new()))?.is_empty(); 186 | 187 | Ok(ComputedStatusEntry::OnDiskRepo(OnDiskRepoStatus { 188 | current_branch_status, 189 | is_workdir_dirty, 190 | repo, 191 | corresponding_spec_repo: spec_repo_status, 192 | path: path.clone(), 193 | recent_commits: commits, 194 | layers: detect_layers(path)?, 195 | })) 196 | } 197 | 198 | pub fn compute_status(mut options: StatusCalculatorOptions, mut c: F) -> YbResult 199 | where 200 | F: FnMut(StatusCalculatorEvent), 201 | { 202 | let config = &options.config; 203 | let context = require_tool_context(config)?; 204 | 205 | let sources_subdirs = match list_subdirectories_sorted(&context.sources_dir()) 206 | .map_err(|e| e.downcast::()) 207 | { 208 | Ok(result) => result 209 | .into_iter() 210 | .map(|subdir| subdir.canonicalize().unwrap()) 211 | .collect::>(), 212 | Err(Ok(io_error)) if io_error.kind() == io::ErrorKind::NotFound => { 213 | // Don't make it an error for the sources directory to be missing 214 | vec![] 215 | } 216 | Err(Ok(io_error)) => { 217 | eyre::bail!("IO error while enumerating sources directory {}", io_error) 218 | } 219 | Err(Err(non_io_error)) => { 220 | eyre::bail!("error enumerating sources directory {}", non_io_error) 221 | } 222 | }; 223 | 224 | let active_spec_maybe = match &context { 225 | ToolContext::Yb(yb_env) => yb_env.active_spec_status(), 226 | _ => None, 227 | }; 228 | 229 | let sources_subdirs_with_repo = sources_subdirs 230 | .iter() 231 | // TODO: this throws out all errors from `discover` 232 | .map(|d| (d, Repository::discover(d).ok())) 233 | .collect::>(); 234 | 235 | let repos = sources_subdirs_with_repo 236 | .iter() 237 | .filter_map(|v| v.1.as_ref()); 238 | 239 | check_repository_workdirs_unique(repos.clone())?; 240 | 241 | c(StatusCalculatorEvent::Start { 242 | number_subdirs: sources_subdirs.len() as u64, 243 | number_repos: repos.count() as u64, 244 | }); 245 | 246 | // If a spec is active, get the expected set of repos, otherwise empty. 247 | // As we discover spec repos on-disk, we will remove the corresponding entry from this map. 248 | // What is left is the set of missing spec repos. 249 | let mut active_spec_repos = active_spec_maybe 250 | .map(|s| match s { 251 | ActiveSpecStatus::StreamsBroken(..) => unimplemented!(), 252 | ActiveSpecStatus::Active(active_spec) => active_spec.spec.repos.clone(), 253 | }) 254 | .unwrap_or_default(); 255 | 256 | let mut status_entries: Vec = Vec::with_capacity(sources_subdirs.len()); 257 | for (subdir, repo_maybe) in sources_subdirs_with_repo { 258 | let subdir_name = subdir.file_name().unwrap().to_str().unwrap().to_string(); 259 | c(StatusCalculatorEvent::StartProcessSubdir { 260 | dirname: subdir_name.clone(), 261 | }); 262 | 263 | if let Some(repo) = repo_maybe { 264 | let status = 265 | compute_repo_status(repo, subdir, &mut options, &active_spec_repos, &mut c)?; 266 | if let ComputedStatusEntry::OnDiskRepo(OnDiskRepoStatus { 267 | corresponding_spec_repo: Some(c), 268 | .. 269 | }) = &status 270 | { 271 | active_spec_repos.remove(&c.spec_repo_name()); 272 | } 273 | 274 | c(StatusCalculatorEvent::SubdirStatusComputed(&status)); 275 | status_entries.push(status); 276 | } else { 277 | let status = ComputedStatusEntry::OnDiskNonRepo(OnDiskNonRepoStatus { 278 | path: subdir.clone(), 279 | }); 280 | c(StatusCalculatorEvent::SubdirStatusComputed(&status)); 281 | status_entries.push(status); 282 | } 283 | 284 | c(StatusCalculatorEvent::FinishProcessSubdir); 285 | } 286 | 287 | let missing_repos = active_spec_repos 288 | .drain() 289 | .map(|(name, spec_repo)| MissingRepo { name, spec_repo }) 290 | .collect::>(); 291 | 292 | if !missing_repos.is_empty() { 293 | c(StatusCalculatorEvent::MissingReposDetected(&missing_repos)); 294 | } 295 | 296 | let bblayers = read_bblayers(&context.build_dir())?; 297 | let ret = ComputedStatus { 298 | source_dirs: status_entries, 299 | enabled_layers: bblayers, 300 | missing_repos, 301 | active_spec: active_spec_maybe.map(|status| match status { 302 | ActiveSpecStatus::StreamsBroken(..) => unimplemented!(), 303 | ActiveSpecStatus::Active(active_spec) => active_spec.clone(), 304 | }), 305 | bblayers_path: context.build_dir().join("conf").join("bblayers.conf"), 306 | }; 307 | 308 | c(StatusCalculatorEvent::Finish(&ret)); 309 | 310 | Ok(ret) 311 | } 312 | 313 | pub enum StatusCalculatorEvent<'a> { 314 | Start { 315 | number_repos: u64, 316 | number_subdirs: u64, 317 | }, 318 | StartProcessSubdir { 319 | dirname: String, 320 | }, 321 | StartFetch, 322 | StartSubdirOperation { 323 | operation_name: String, 324 | }, 325 | FinishFetch, 326 | SubdirStatusComputed(&'a ComputedStatusEntry), 327 | FinishProcessSubdir, 328 | MissingReposDetected(&'a Vec), 329 | Finish(&'a ComputedStatus), 330 | } 331 | -------------------------------------------------------------------------------- /yb/src/commands/sync/mod.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Debug; 2 | 3 | use async_trait::async_trait; 4 | 5 | use console::Style; 6 | use git2::Repository; 7 | use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; 8 | 9 | use crate::commands::activate::activate_spec; 10 | use crate::commands::sync::actions::{ 11 | BBLayersEditAction, CheckoutBranchSyncAction, CloneRepoSyncAction, 12 | CreateLocalTrackingBranchSyncAction, FastForwardPullSyncAction, ModifyBBLayersConfSyncAction, 13 | ResetGitWorkdirSyncAction, SyncAction, 14 | }; 15 | use crate::commands::SubcommandRunner; 16 | use crate::config::Config; 17 | use crate::core::tool_context::require_yb_env; 18 | use crate::data_model::git::{ 19 | determine_optimal_checkout_branch, RemoteTrackingBranch, UpstreamComparison, 20 | }; 21 | use crate::data_model::status::{ComputedStatusEntry, CorrespondingSpecRepoStatus}; 22 | use crate::errors::YbResult; 23 | use crate::status_calculator::{compute_status, StatusCalculatorEvent, StatusCalculatorOptions}; 24 | use crate::ui_ops::check_broken_streams::{ 25 | ui_op_check_broken_streams, UiCheckBrokenStreamsOptions, 26 | }; 27 | use crate::ui_ops::update_stream::{ui_op_update_stream, UiUpdateStreamOptions}; 28 | use crate::util::git; 29 | use crate::util::indicatif::MultiProgressHelpers; 30 | use concurrent_git_pool::PoolHelper; 31 | 32 | mod actions; 33 | 34 | /// Analyze the yb environment and determine what needs to be done so that it matches the active spec. 35 | /// 36 | /// By default no changes are applied. Pass the --apply/-a flag to make changes. 37 | #[derive(Debug, clap::Parser)] 38 | pub struct SyncCommand { 39 | /// Activate the given spec before syncing 40 | spec: Option, 41 | 42 | #[clap(long, short)] 43 | apply: bool, 44 | 45 | #[clap(long, short)] 46 | force: bool, 47 | 48 | #[clap(long, short)] 49 | exact: bool, 50 | } 51 | 52 | #[async_trait] 53 | impl SubcommandRunner for SyncCommand { 54 | async fn run(&self, config: &mut Config, mp: &MultiProgress) -> YbResult<()> { 55 | ui_op_check_broken_streams(UiCheckBrokenStreamsOptions::new(config, mp))?; 56 | 57 | let mut yb_env = require_yb_env(config)?; 58 | 59 | if let Some(spec_name) = &self.spec { 60 | // TODO: don't immediately activate. Use current spec and desired spec to better calculate 61 | // what needs to be done. 62 | activate_spec(&mut yb_env, spec_name)?; 63 | } 64 | 65 | if yb_env.active_spec_status().is_none() { 66 | eyre::bail!("cannot sync unless a spec is active - see the 'yb activate' command"); 67 | } 68 | 69 | let update_stream_opts = UiUpdateStreamOptions::new(config, mp); 70 | ui_op_update_stream(update_stream_opts)?; 71 | 72 | if self.apply { 73 | mp.note("gathering status\n\n"); 74 | } else { 75 | mp.warn("gathering status only - will not modify environment (pass the -a flag to apply changes)\n\n"); 76 | } 77 | 78 | let mut overall_progress: Option = None; 79 | 80 | let status_calculator_options = StatusCalculatorOptions::new(config, false, false); 81 | let status = compute_status(status_calculator_options, |event| match event { 82 | StatusCalculatorEvent::Start { number_subdirs, .. } => { 83 | overall_progress.replace( 84 | mp.add( 85 | ProgressBar::new(number_subdirs) 86 | .with_message("checking source directories") 87 | .with_style( 88 | ProgressStyle::with_template("{msg} [{wide_bar}] {pos}/{len}") 89 | .unwrap() 90 | .progress_chars("##-"), 91 | ), 92 | ), 93 | ); 94 | } 95 | StatusCalculatorEvent::StartProcessSubdir { dirname } => overall_progress 96 | .as_ref() 97 | .unwrap() 98 | .set_message(format!("checking {dirname}")), 99 | StatusCalculatorEvent::FinishProcessSubdir => overall_progress.as_ref().unwrap().inc(1), 100 | _ => {} 101 | })?; 102 | 103 | drop(overall_progress); 104 | 105 | let mut sync_actions: Vec> = vec![]; 106 | 107 | for status_data in status.source_dirs.iter() { 108 | let subdir = status_data.path(); 109 | 110 | if let ComputedStatusEntry::OnDiskRepo(status_data) = status_data { 111 | if !status_data.has_corresponding_spec_repo() { 112 | println!("skipped {:?}", &subdir); 113 | continue; 114 | } 115 | 116 | if status_data.is_workdir_dirty { 117 | sync_actions.push(Box::new(ResetGitWorkdirSyncAction::new( 118 | status_data.path.clone(), 119 | ))) 120 | } 121 | 122 | match &status_data.corresponding_spec_repo { 123 | Some(corresponding_spec_repo_status) => match &corresponding_spec_repo_status { 124 | CorrespondingSpecRepoStatus::RelatedRepo { spec_repo, .. } => { 125 | println!( 126 | "{}", 127 | Style::new().red().on_white().apply_to(format!( 128 | "{} shares commits with spec repo {}", 129 | status_data.path.display(), 130 | spec_repo.url 131 | )) 132 | ); 133 | panic!(); 134 | } 135 | CorrespondingSpecRepoStatus::RemoteMatch(remote_match) => { 136 | if status_data.is_local_branch_tracking_correct_branch() { 137 | let upstream_comparison = status_data 138 | .current_branch_status 139 | .upstream_branch_status 140 | .as_ref() 141 | .unwrap() 142 | .upstream_comparison; 143 | match upstream_comparison { 144 | UpstreamComparison::UpToDate => {} 145 | UpstreamComparison::Behind(_) => { 146 | sync_actions.push(Box::new( 147 | FastForwardPullSyncAction::new( 148 | status_data.path.clone(), 149 | ), 150 | )); 151 | } 152 | UpstreamComparison::Ahead(_) => { 153 | let msg = format!("{} is ahead of remote and I don't know what to do about it", status_data.path.display()); 154 | mp.error(msg); 155 | panic!(); 156 | } 157 | UpstreamComparison::Diverged { .. } => unimplemented!(), 158 | } 159 | } else if remote_match.local_branches_tracking_remote.is_empty() { 160 | let new_local_branch_name = 161 | determine_local_branch_name_for_checkout( 162 | &status_data.repo, 163 | &remote_match.spec_repo.refspec, 164 | )?; 165 | 166 | sync_actions.push(Box::new( 167 | CreateLocalTrackingBranchSyncAction::new( 168 | status_data.path.clone(), 169 | new_local_branch_name.clone(), 170 | RemoteTrackingBranch { 171 | branch_name: remote_match.spec_repo.refspec.clone(), 172 | remote_name: remote_match.matching_remote_name.clone(), 173 | }, 174 | ), 175 | )); 176 | 177 | sync_actions.push(Box::new(CheckoutBranchSyncAction::new( 178 | status_data.path.clone(), 179 | new_local_branch_name.clone(), 180 | ))); 181 | 182 | sync_actions.push(Box::new(FastForwardPullSyncAction::new( 183 | status_data.path.clone(), 184 | ))); 185 | } else { 186 | let optimal_branch = determine_optimal_checkout_branch( 187 | &remote_match.local_branches_tracking_remote, 188 | ) 189 | .unwrap(); 190 | 191 | sync_actions.push(Box::new(CheckoutBranchSyncAction::new( 192 | status_data.path.clone(), 193 | optimal_branch.local_tracking_branch.branch_name.clone(), 194 | ))); 195 | 196 | match optimal_branch.upstream_comparison { 197 | UpstreamComparison::UpToDate => {} 198 | UpstreamComparison::Behind(_) => { 199 | sync_actions.push(Box::new( 200 | FastForwardPullSyncAction::new( 201 | status_data.path.clone(), 202 | ), 203 | )); 204 | } 205 | UpstreamComparison::Ahead(_ahead) => { 206 | // TODO: suggest pushing changes? 207 | } 208 | UpstreamComparison::Diverged { .. } => unimplemented!(), 209 | } 210 | } 211 | } 212 | }, 213 | None => { 214 | // TODO 215 | } 216 | } 217 | } 218 | } 219 | 220 | for repo in &status.missing_repos { 221 | let dest = yb_env.sources_dir().join(repo.name.clone()); 222 | sync_actions.push(Box::new(CloneRepoSyncAction::new( 223 | dest.clone(), 224 | repo.spec_repo.clone(), 225 | ))); 226 | 227 | // TODO add action to temporary clone the repo and precheck that the expected layers 228 | // actually exist? 229 | for layer in repo.spec_repo.resolved_layers(dest) { 230 | for layer in layer { 231 | sync_actions.push(Box::new(ModifyBBLayersConfSyncAction::new( 232 | layer.path, 233 | status.bblayers_path.clone(), 234 | BBLayersEditAction::AddLayer, 235 | ))); 236 | } 237 | } 238 | } 239 | 240 | // This doesn't include layers for missing spec repos - that is handled above 241 | for layer in status.missing_bblayers_layers_for_extant_spec_repos() { 242 | sync_actions.push(Box::new(ModifyBBLayersConfSyncAction::new( 243 | layer.path, 244 | status.bblayers_path.clone(), 245 | BBLayersEditAction::AddLayer, 246 | ))); 247 | } 248 | 249 | if self.exact { 250 | for layer in status.extraneous_bblayers_layers() { 251 | sync_actions.push(Box::new(ModifyBBLayersConfSyncAction::new( 252 | layer.path, 253 | status.bblayers_path.clone(), 254 | BBLayersEditAction::RemoveLayer, 255 | ))); 256 | } 257 | 258 | // TODO workspace layer 259 | } 260 | 261 | // TODO backup bblayers.conf before apply 262 | 263 | println!("actions: {sync_actions:#?}"); 264 | 265 | if self.apply { 266 | if sync_actions.iter().any(|action| action.is_force_required()) && !self.force { 267 | mp.warn("need to pass --force flag to apply one or more actions"); 268 | panic!(); 269 | } 270 | 271 | println!(); 272 | let progress = mp.add( 273 | ProgressBar::new(sync_actions.len() as u64).with_style( 274 | ProgressStyle::with_template("{msg} [{wide_bar}] {pos}/{len}") 275 | .unwrap() 276 | .progress_chars("##-"), 277 | ), 278 | ); 279 | progress.set_message("applying actions"); 280 | 281 | let client = PoolHelper::connect_or_local().await.unwrap(); 282 | for action in sync_actions { 283 | action.apply(&client).await?; 284 | progress.inc(1); 285 | } 286 | } else if !sync_actions.is_empty() { 287 | mp.warn("none of these changes have been applied - re-run with -a to apply") 288 | } 289 | 290 | Ok(()) 291 | } 292 | } 293 | 294 | fn determine_local_branch_name_for_checkout( 295 | repo: &Repository, 296 | local_branch_name: &str, 297 | ) -> YbResult { 298 | if !git::local_branch_exists(repo, local_branch_name)? { 299 | return Ok(local_branch_name.to_string()); 300 | } 301 | 302 | // TODO smarter way 303 | for i in 2..10 { 304 | let next_try = format!("{local_branch_name}-{i}"); 305 | if !git::local_branch_exists(repo, &next_try)? { 306 | return Ok(next_try); 307 | } 308 | } 309 | 310 | unimplemented!("exhausted possible local branch candidates"); 311 | } 312 | -------------------------------------------------------------------------------- /yb/src/util/git/mod.rs: -------------------------------------------------------------------------------- 1 | // Some functions below (where noted) are from git2-rs which is dual-licensed MIT and Apache 2.0. 2 | // Those portions are Copyright (c) 2014 Alex Crichton 3 | 4 | use std::collections::HashMap; 5 | use std::path::PathBuf; 6 | 7 | use crate::data_model::git::RemoteTrackingBranch; 8 | use eyre::eyre; 9 | use git2::ErrorCode::NotFound; 10 | use git2::{ 11 | Branch, BranchType, Cred, ErrorCode, ObjectType, Remote, RemoteCallbacks, Repository, Revwalk, 12 | SubmoduleIgnore, 13 | }; 14 | 15 | use crate::errors::YbResult; 16 | 17 | pub fn get_current_local_branch(repo: &Repository) -> YbResult { 18 | match repo.head() { 19 | Ok(head) => Ok(Branch::wrap(head)), 20 | Err(ref e) if e.code() == ErrorCode::UnbornBranch /*|| e.code() == ErrorCode::NotFound*/ => { 21 | Err(eyre!("unborn branch")) 22 | } 23 | Err(e) => Err(e.into()), 24 | } 25 | } 26 | 27 | pub fn get_current_local_branch_name(repo: &Repository) -> YbResult { 28 | Ok(get_current_local_branch(repo)? 29 | .name()? 30 | .ok_or_else(|| eyre!("couldn't determine shorthand"))? 31 | .to_string()) 32 | } 33 | 34 | pub fn get_remote_tracking_branch_for_current_local_branch( 35 | repo: &Repository, 36 | ) -> YbResult> { 37 | get_remote_tracking_branch(&get_current_local_branch(repo)?) 38 | } 39 | 40 | pub fn get_remote_tracking_branch(branch: &Branch) -> YbResult> { 41 | match branch.upstream() { 42 | Ok(upstream_branch) => { 43 | let tracking_branch_name = upstream_branch.name()?.unwrap().to_string(); 44 | let tracking_branch_parts = tracking_branch_name.split_once('/').unwrap(); 45 | Ok(Some(RemoteTrackingBranch { 46 | remote_name: tracking_branch_parts.0.to_string(), 47 | branch_name: tracking_branch_parts.1.to_string(), 48 | })) 49 | } 50 | Err(err) if err.code() == NotFound => Ok(None), 51 | Err(err) => Err(err.into()), 52 | } 53 | } 54 | 55 | pub fn get_remote_name_for_current_branch(repo: &Repository) -> YbResult> { 56 | let branch = get_current_local_branch(repo)?; 57 | // Repository::branch_upstream_remote needs the 'refs/heads/blah' 58 | let branch_ref_name = branch 59 | .into_reference() 60 | .name() 61 | .ok_or_else(|| eyre!("branch has no name"))? 62 | .to_string(); 63 | 64 | match repo.branch_upstream_remote(&branch_ref_name) { 65 | Err(ref e) if e.code() == ErrorCode::NotFound => Ok(None), 66 | Ok(name) => Ok(Some( 67 | name.as_str() 68 | .ok_or_else(|| eyre!("couldn't get branch name from reference"))? 69 | .to_string(), 70 | )), 71 | Err(e) => Err(e.into()), 72 | } 73 | } 74 | 75 | pub fn get_remote_for_current_branch(repo: &Repository) -> YbResult> { 76 | get_remote_name_for_current_branch(repo)? 77 | .map(|n| repo.find_remote(&n)) 78 | .transpose() 79 | .map_err(|e| e.into()) 80 | } 81 | 82 | // Adapted from libgit2-rs 83 | pub fn create_revwalk<'a>(repo: &'a Repository, commit: &str) -> YbResult> { 84 | let mut revwalk = repo.revwalk()?; 85 | let revspec = repo.revparse(commit)?; 86 | if revspec.mode().contains(git2::RevparseMode::SINGLE) { 87 | revwalk.push(revspec.from().unwrap().id())?; 88 | } else { 89 | let from = revspec.from().unwrap().id(); 90 | let to = revspec.to().unwrap().id(); 91 | revwalk.push(to)?; 92 | if revspec.mode().contains(git2::RevparseMode::MERGE_BASE) { 93 | let base = repo.merge_base(from, to)?; 94 | let o = repo.find_object(base, Some(ObjectType::Commit))?; 95 | revwalk.push(o.id())?; 96 | } 97 | revwalk.hide(from)?; 98 | } 99 | Ok(revwalk) 100 | } 101 | 102 | // Adapted from libgit2-rs 103 | // This version of the output prefixes each path with two status columns and 104 | // shows submodule status information. 105 | pub fn format_short_statuses(repo: &Repository, statuses: &git2::Statuses) -> Vec { 106 | let mut ret: Vec<_> = vec![]; 107 | for entry in statuses 108 | .iter() 109 | .filter(|e| e.status() != git2::Status::CURRENT) 110 | { 111 | let mut istatus = match entry.status() { 112 | s if s.contains(git2::Status::INDEX_NEW) => 'A', 113 | s if s.contains(git2::Status::INDEX_MODIFIED) => 'M', 114 | s if s.contains(git2::Status::INDEX_DELETED) => 'D', 115 | s if s.contains(git2::Status::INDEX_RENAMED) => 'R', 116 | s if s.contains(git2::Status::INDEX_TYPECHANGE) => 'T', 117 | _ => ' ', 118 | }; 119 | let mut wstatus = match entry.status() { 120 | s if s.contains(git2::Status::WT_NEW) => { 121 | if istatus == ' ' { 122 | istatus = '?'; 123 | } 124 | '?' 125 | } 126 | s if s.contains(git2::Status::WT_MODIFIED) => 'M', 127 | s if s.contains(git2::Status::WT_DELETED) => 'D', 128 | s if s.contains(git2::Status::WT_RENAMED) => 'R', 129 | s if s.contains(git2::Status::WT_TYPECHANGE) => 'T', 130 | _ => ' ', 131 | }; 132 | 133 | if entry.status().contains(git2::Status::IGNORED) { 134 | istatus = '!'; 135 | wstatus = '!'; 136 | } 137 | if istatus == '?' && wstatus == '?' { 138 | continue; 139 | } 140 | let mut extra = ""; 141 | 142 | // A commit in a tree is how submodules are stored, so let's go take a 143 | // look at its status. 144 | // 145 | // TODO: check for GIT_FILEMODE_COMMIT 146 | let status = entry.index_to_workdir().and_then(|diff| { 147 | let ignore = SubmoduleIgnore::Unspecified; 148 | diff.new_file() 149 | .path_bytes() 150 | .and_then(|s| std::str::from_utf8(s).ok()) 151 | .and_then(|name| repo.submodule_status(name, ignore).ok()) 152 | }); 153 | if let Some(status) = status { 154 | if status.contains(git2::SubmoduleStatus::WD_MODIFIED) { 155 | extra = " (new commits)"; 156 | } else if status.contains(git2::SubmoduleStatus::WD_INDEX_MODIFIED) 157 | || status.contains(git2::SubmoduleStatus::WD_WD_MODIFIED) 158 | { 159 | extra = " (modified content)"; 160 | } else if status.contains(git2::SubmoduleStatus::WD_UNTRACKED) { 161 | extra = " (untracked content)"; 162 | } 163 | } 164 | 165 | let (mut a, mut b, mut c) = (None, None, None); 166 | if let Some(diff) = entry.head_to_index() { 167 | a = diff.old_file().path(); 168 | b = diff.new_file().path(); 169 | } 170 | if let Some(diff) = entry.index_to_workdir() { 171 | a = a.or_else(|| diff.old_file().path()); 172 | b = b.or_else(|| diff.old_file().path()); 173 | c = diff.new_file().path(); 174 | } 175 | 176 | match (istatus, wstatus) { 177 | ('R', 'R') => ret.push(format!( 178 | "\tRR {} {} {}{}", 179 | a.unwrap().display(), 180 | b.unwrap().display(), 181 | c.unwrap().display(), 182 | extra 183 | )), 184 | ('R', w) => ret.push(format!( 185 | "\tR{} {} {}{}", 186 | w, 187 | a.unwrap().display(), 188 | b.unwrap().display(), 189 | extra 190 | )), 191 | (i, 'R') => ret.push(format!( 192 | "\t{}R {} {}{}", 193 | i, 194 | a.unwrap().display(), 195 | c.unwrap().display(), 196 | extra 197 | )), 198 | (i, w) => ret.push(format!("\t{}{} {}{}", i, w, a.unwrap().display(), extra)), 199 | }; 200 | } 201 | 202 | for entry in statuses 203 | .iter() 204 | .filter(|e| e.status() == git2::Status::WT_NEW) 205 | { 206 | ret.push(format!( 207 | "\t?? {}", 208 | entry 209 | .index_to_workdir() 210 | .unwrap() 211 | .old_file() 212 | .path() 213 | .unwrap() 214 | .display() 215 | )); 216 | } 217 | 218 | ret 219 | } 220 | 221 | // Adapted from libgit2-rs 222 | fn fast_forward( 223 | repo: &Repository, 224 | lb: &mut git2::Reference, 225 | rc: &git2::AnnotatedCommit, 226 | ) -> Result<(), git2::Error> { 227 | let name = match lb.name() { 228 | Some(s) => s.to_string(), 229 | None => String::from_utf8_lossy(lb.name_bytes()).to_string(), 230 | }; 231 | let msg = format!("Fast-Forward: Setting {} to id: {}", name, rc.id()); 232 | println!("{msg}"); 233 | lb.set_target(rc.id(), &msg)?; 234 | repo.set_head(&name)?; 235 | repo.checkout_head(Some( 236 | git2::build::CheckoutBuilder::default() 237 | // For some reason the force is required to make the working directory actually get updated 238 | // I suspect we should be adding some logic to handle dirty working directory states 239 | // but this is just an example so maybe not. 240 | .force(), 241 | ))?; 242 | Ok(()) 243 | } 244 | 245 | // Adapted from libgit2-rs 246 | // fn normal_merge( 247 | // repo: &Repository, 248 | // local: &git2::AnnotatedCommit, 249 | // remote: &git2::AnnotatedCommit, 250 | // ) -> Result<(), git2::Error> { 251 | // let local_tree = repo.find_commit(local.id())?.tree()?; 252 | // let remote_tree = repo.find_commit(remote.id())?.tree()?; 253 | // let ancestor = repo 254 | // .find_commit(repo.merge_base(local.id(), remote.id())?)? 255 | // .tree()?; 256 | // let mut idx = repo.merge_trees(&ancestor, &local_tree, &remote_tree, None)?; 257 | // 258 | // if idx.has_conflicts() { 259 | // println!("Merge conficts detected..."); 260 | // repo.checkout_index(Some(&mut idx), None)?; 261 | // return Ok(()); 262 | // } 263 | // let result_tree = repo.find_tree(idx.write_tree_to(repo)?)?; 264 | // // now create the merge commit 265 | // let msg = format!("Merge: {} into {}", remote.id(), local.id()); 266 | // let sig = repo.signature()?; 267 | // let local_commit = repo.find_commit(local.id())?; 268 | // let remote_commit = repo.find_commit(remote.id())?; 269 | // // Do our merge commit and set current branch head to that commit. 270 | // let _merge_commit = repo.commit( 271 | // Some("HEAD"), 272 | // &sig, 273 | // &sig, 274 | // &msg, 275 | // &result_tree, 276 | // &[&local_commit, &remote_commit], 277 | // )?; 278 | // // Set working tree to match head. 279 | // repo.checkout_head(None)?; 280 | // Ok(()) 281 | // } 282 | 283 | // Adapted from libgit2-rs 284 | pub fn do_merge<'a>( 285 | repo: &'a Repository, 286 | remote_branch: &str, 287 | fetch_commit: git2::AnnotatedCommit<'a>, 288 | ) -> Result<(), git2::Error> { 289 | // 1. do a merge analysis 290 | let analysis = repo.merge_analysis(&[&fetch_commit])?; 291 | 292 | // 2. Do the appopriate merge 293 | if analysis.0.is_fast_forward() { 294 | //println!("Doing a fast forward"); 295 | // do a fast forward 296 | let refname = format!("refs/heads/{remote_branch}"); 297 | match repo.find_reference(&refname) { 298 | Ok(mut r) => { 299 | fast_forward(repo, &mut r, &fetch_commit)?; 300 | } 301 | Err(_) => { 302 | // The branch doesn't exist so just set the reference to the 303 | // commit directly. Usually this is because you are pulling 304 | // into an empty repository. 305 | repo.reference( 306 | &refname, 307 | fetch_commit.id(), 308 | true, 309 | &format!("Setting {} to {}", remote_branch, fetch_commit.id()), 310 | )?; 311 | repo.set_head(&refname)?; 312 | repo.checkout_head(Some( 313 | git2::build::CheckoutBuilder::default() 314 | .allow_conflicts(true) 315 | .conflict_style_merge(true) 316 | .force(), 317 | ))?; 318 | } 319 | }; 320 | } else if analysis.0.is_normal() { 321 | // do a normal merge 322 | panic!("merge not yet supported"); 323 | // TODO 324 | //let head_commit = repo.reference_to_annotated_commit(&repo.head()?)?; 325 | //normal_merge(&repo, &head_commit, &fetch_commit)?; 326 | } else { 327 | //println!("Nothing to do..."); 328 | } 329 | Ok(()) 330 | } 331 | 332 | pub fn check_repository_workdirs_unique<'a, I>(repos: I) -> YbResult<()> 333 | where 334 | I: Iterator, 335 | { 336 | let mut workdir_to_repo: HashMap> = HashMap::new(); 337 | for repo in repos { 338 | let workdir = repo 339 | .workdir() 340 | .ok_or_else(|| eyre!("bare repositories not supported"))?; 341 | let r = workdir_to_repo.entry(PathBuf::from(workdir)).or_default(); 342 | r.push(repo); 343 | } 344 | 345 | for (workdir, workdir_repos) in workdir_to_repo { 346 | if workdir_repos.len() > 1 { 347 | return Err(eyre::eyre!( 348 | "multiple layer repositories are rooted at git workdir {}", 349 | workdir.display() 350 | )); 351 | } 352 | } 353 | 354 | Ok(()) 355 | } 356 | 357 | pub fn ssh_agent_remote_callbacks<'a>() -> RemoteCallbacks<'a> { 358 | let mut callbacks = RemoteCallbacks::new(); 359 | callbacks.credentials(|_url, username_from_url, _allowed_types| { 360 | Cred::ssh_key_from_agent(username_from_url.unwrap()) 361 | }); 362 | callbacks 363 | } 364 | 365 | pub fn local_branch_exists(repo: &Repository, local_branch_name: &str) -> YbResult { 366 | match repo.find_branch(local_branch_name, BranchType::Local) { 367 | Ok(_) => Ok(true), 368 | Err(err) if err.code() == ErrorCode::NotFound => Ok(false), 369 | Err(err) => Err(err.into()), 370 | } 371 | } 372 | -------------------------------------------------------------------------------- /yb/src/data_model/status.rs: -------------------------------------------------------------------------------- 1 | use assert_cmd::Command; 2 | use core::fmt; 3 | use std::collections::{HashMap, HashSet}; 4 | use std::fmt::{Debug, Formatter}; 5 | use std::path::{Path, PathBuf}; 6 | 7 | use crate::data_model::git::{ 8 | BranchStatus, LocalTrackingBranch, LocalTrackingBranchWithUpstreamComparison, 9 | RemoteTrackingBranch, 10 | }; 11 | use crate::data_model::Layer; 12 | use git2::{Branch, BranchType, Oid, Repository}; 13 | use itertools::Itertools; 14 | use serde::Serialize; 15 | use tempfile::TempDir; 16 | 17 | use crate::errors::YbResult; 18 | use crate::spec::{ActiveSpec, SpecRepo}; 19 | use crate::status_calculator::{compare_branch_to_remote_tracking_branch, StatusCalculatorEvent}; 20 | 21 | use crate::util::git::get_remote_tracking_branch; 22 | 23 | /// The status of the Yocto environment 24 | #[derive(Debug, Serialize)] 25 | pub struct ComputedStatus { 26 | pub(crate) source_dirs: Vec, 27 | pub(crate) enabled_layers: HashSet, 28 | pub(crate) missing_repos: Vec, 29 | pub(crate) active_spec: Option, 30 | pub(crate) bblayers_path: PathBuf, 31 | } 32 | 33 | impl ComputedStatus { 34 | pub fn active_spec_repos(&self) -> Option { 35 | let active_spec = self.active_spec.as_ref()?; 36 | Some(ActiveSpecRepos { 37 | active_spec_repos: active_spec.spec.repos.iter(), 38 | source_dirs: &self.source_dirs, 39 | }) 40 | } 41 | 42 | pub fn spec_requested_layers(&self) -> HashSet { 43 | let mut spec_requested_layers = HashSet::new(); 44 | for entry in &self.source_dirs { 45 | if let ComputedStatusEntry::OnDiskRepo(repo) = entry { 46 | if let Some(CorrespondingSpecRepoStatus::RemoteMatch(remote_match_status)) = 47 | &repo.corresponding_spec_repo 48 | { 49 | spec_requested_layers.extend( 50 | remote_match_status 51 | .spec_repo 52 | .resolved_layers(repo.path.clone()) 53 | .unwrap_or_default(), 54 | ); 55 | } 56 | } 57 | } 58 | 59 | spec_requested_layers 60 | } 61 | 62 | pub fn missing_bblayers_layers_for_extant_spec_repos(&self) -> HashSet { 63 | // TODO don't clone? 64 | self.spec_requested_layers() 65 | .difference(&self.enabled_layers) 66 | .cloned() 67 | .collect() 68 | } 69 | 70 | pub fn extraneous_bblayers_layers(&self) -> HashSet { 71 | self.enabled_layers 72 | .difference(&self.spec_requested_layers()) 73 | .cloned() 74 | .collect() 75 | } 76 | } 77 | 78 | #[derive(Debug)] 79 | pub enum ActiveSpecRepoStatus<'a> { 80 | Missing(&'a SpecRepo), 81 | Extant { 82 | spec_repo: &'a SpecRepo, 83 | path: &'a PathBuf, 84 | }, 85 | } 86 | 87 | pub struct ActiveSpecRepos<'a> { 88 | source_dirs: &'a Vec, 89 | active_spec_repos: std::collections::hash_map::Iter<'a, String, SpecRepo>, 90 | } 91 | 92 | impl<'a> Iterator for ActiveSpecRepos<'a> { 93 | type Item = ActiveSpecRepoStatus<'a>; 94 | 95 | fn next(&mut self) -> Option { 96 | self.active_spec_repos.next().map(|a| { 97 | if let Some(extant) = self 98 | .source_dirs 99 | .iter() 100 | .find(|entry| (*entry).spec_repo() == Some(a.1)) 101 | { 102 | ActiveSpecRepoStatus::Extant { 103 | spec_repo: a.1, 104 | path: extant.path(), 105 | } 106 | } else { 107 | ActiveSpecRepoStatus::Missing(a.1) 108 | } 109 | }) 110 | } 111 | } 112 | 113 | /// The status of a source directory 114 | #[derive(Debug, Serialize)] 115 | pub enum ComputedStatusEntry { 116 | /// A repository 117 | OnDiskRepo(OnDiskRepoStatus), 118 | /// A directory that is not a repository 119 | OnDiskNonRepo(OnDiskNonRepoStatus), 120 | } 121 | 122 | impl ComputedStatusEntry { 123 | pub fn path(&self) -> &PathBuf { 124 | match &self { 125 | ComputedStatusEntry::OnDiskNonRepo(OnDiskNonRepoStatus { path, .. }) => path, 126 | ComputedStatusEntry::OnDiskRepo(OnDiskRepoStatus { path, .. }) => path, 127 | } 128 | } 129 | 130 | pub fn spec_repo(&self) -> Option<&SpecRepo> { 131 | match &self { 132 | ComputedStatusEntry::OnDiskNonRepo(_) => None, 133 | ComputedStatusEntry::OnDiskRepo(repo) => repo.spec_repo(), 134 | } 135 | } 136 | } 137 | 138 | #[derive(Debug, Serialize)] 139 | pub struct OnDiskNonRepoStatus { 140 | pub(crate) path: PathBuf, 141 | } 142 | 143 | #[derive(Serialize)] 144 | pub struct OnDiskRepoStatus { 145 | /// Repository object 146 | #[serde(skip)] 147 | pub repo: Repository, 148 | /// Path to the directory 149 | pub path: PathBuf, 150 | pub is_workdir_dirty: bool, 151 | #[serde(skip)] 152 | pub recent_commits: Option>, 153 | /// Not necessarily the correct branch as far as any active spec is concerned 154 | pub current_branch_status: BranchStatus, 155 | /// Status information pertaining to corresponding spec repo, or None if no matching spec repo 156 | pub corresponding_spec_repo: Option, 157 | /// Layers that were detected inside the repo (via looking for conf/layer.conf) 158 | pub layers: HashSet, 159 | } 160 | 161 | impl OnDiskRepoStatus { 162 | pub fn has_corresponding_spec_repo(&self) -> bool { 163 | self.corresponding_spec_repo.is_some() 164 | } 165 | 166 | pub fn spec_repo(&self) -> Option<&SpecRepo> { 167 | self.corresponding_spec_repo.as_ref().map(|c| c.spec_repo()) 168 | } 169 | 170 | pub fn is_local_branch_tracking_correct_branch(&self) -> bool { 171 | assert!( 172 | self.has_corresponding_spec_repo(), 173 | "need to check for spec repo before using this method!" 174 | ); 175 | let spec_repo = self.corresponding_spec_repo.as_ref().unwrap(); 176 | match spec_repo { 177 | CorrespondingSpecRepoStatus::RemoteMatch(remote_match) => remote_match 178 | .is_local_branch_tracking_correct_branch( 179 | &self.current_branch_status.local_branch_name, 180 | ), 181 | _ => panic!("need to check for spec repo match type before using this method!"), 182 | } 183 | } 184 | } 185 | 186 | impl Debug for OnDiskRepoStatus { 187 | fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { 188 | f.debug_struct("OnDiskRepo") 189 | .field("path", &self.path) 190 | .field("is_workdir_dirty", &self.is_workdir_dirty) 191 | .field("recent_commits", &self.recent_commits) 192 | .field("current_branch_status", &self.current_branch_status) 193 | .field("corresponding_spec_repo", &self.corresponding_spec_repo) 194 | .field("layers", &self.layers) 195 | .finish_non_exhaustive() 196 | } 197 | } 198 | 199 | #[derive(Debug, Serialize)] 200 | pub struct MissingRepo { 201 | pub name: String, 202 | pub spec_repo: SpecRepo, 203 | } 204 | 205 | pub fn find_local_branches_tracking_remote_branch( 206 | repo: &Repository, 207 | remote_tracking_branch: &RemoteTrackingBranch, 208 | ) -> YbResult> { 209 | let branches: YbResult> = repo 210 | .branches(Some(BranchType::Local))? 211 | .map(|branch| -> YbResult<_> { Ok(branch?.0) }) 212 | .collect(); 213 | 214 | let filtered = branches? 215 | .into_iter() 216 | .filter(|branch| { 217 | get_remote_tracking_branch(branch) 218 | .unwrap() 219 | .map_or(false, |b| b == *remote_tracking_branch) 220 | }) 221 | .map(|branch| { 222 | let branch_name = branch.name().unwrap().unwrap().to_string(); 223 | compare_branch_to_remote_tracking_branch( 224 | repo, 225 | branch_name.clone(), 226 | remote_tracking_branch, 227 | ) 228 | .map(|comparison| LocalTrackingBranchWithUpstreamComparison { 229 | local_tracking_branch: LocalTrackingBranch { 230 | branch_name: branch_name.clone(), 231 | remote_tracking_branch: remote_tracking_branch.clone(), 232 | }, 233 | upstream_comparison: comparison, 234 | }) 235 | }) 236 | .try_collect()?; 237 | 238 | Ok(filtered) 239 | } 240 | 241 | #[derive(Debug, Eq, PartialEq, Serialize)] 242 | pub struct RemoteMatchStatus { 243 | pub is_extra_remote: bool, 244 | pub spec_repo: SpecRepo, 245 | pub spec_repo_name: String, 246 | pub remote_tracking_branch: RemoteTrackingBranch, 247 | pub local_branches_tracking_remote: Vec, 248 | pub matching_remote_name: String, 249 | } 250 | 251 | impl RemoteMatchStatus { 252 | pub fn is_local_branch_tracking_correct_branch(&self, local_branch_name: &String) -> bool { 253 | self.local_branches_tracking_remote 254 | .iter() 255 | .map(|l| l.local_tracking_branch.branch_name.clone()) 256 | .collect::>() 257 | .contains(local_branch_name) 258 | } 259 | } 260 | 261 | #[derive(Debug, PartialEq, Eq, Serialize)] 262 | pub enum CorrespondingSpecRepoStatus { 263 | RemoteMatch(RemoteMatchStatus), 264 | RelatedRepo { 265 | spec_repo: SpecRepo, 266 | spec_repo_name: String, 267 | }, 268 | } 269 | 270 | impl CorrespondingSpecRepoStatus { 271 | pub fn spec_repo_name(&self) -> String { 272 | match &self { 273 | CorrespondingSpecRepoStatus::RemoteMatch(RemoteMatchStatus { 274 | spec_repo_name, .. 275 | }) => spec_repo_name.clone(), 276 | CorrespondingSpecRepoStatus::RelatedRepo { spec_repo_name, .. } => { 277 | spec_repo_name.clone() 278 | } 279 | } 280 | } 281 | 282 | pub fn spec_repo(&self) -> &SpecRepo { 283 | match &self { 284 | CorrespondingSpecRepoStatus::RemoteMatch(remote_match_status) => { 285 | &remote_match_status.spec_repo 286 | } 287 | CorrespondingSpecRepoStatus::RelatedRepo { spec_repo, .. } => spec_repo, 288 | } 289 | } 290 | } 291 | 292 | // TODO introduce type for return 293 | pub fn enumerate_repo_remotes(repo: &Repository) -> YbResult> { 294 | let remote_names = repo.remotes()?; 295 | 296 | let remotes: Vec<_> = remote_names 297 | .iter() 298 | .map(|remote_name| -> YbResult<_> { 299 | let remote_name = remote_name.unwrap(); // assume utf-8 300 | Ok((remote_name, repo.find_remote(remote_name)?)) 301 | }) 302 | .try_collect()?; 303 | 304 | Ok(remotes 305 | .into_iter() 306 | .filter_map(|(remote_name, remote)| { 307 | remote 308 | .url() 309 | .map(|remote_url| (remote_name.to_string(), remote_url.to_string())) 310 | }) 311 | .collect()) 312 | } 313 | 314 | pub fn enumerate_revisions>(repo_path: P) -> YbResult> { 315 | // git rev-list --all --full-history 316 | let revs = Command::new("git") 317 | .arg("rev-list") 318 | .arg("--all") 319 | .arg("--full-history") 320 | .current_dir(repo_path) 321 | .output()? 322 | .stdout; 323 | 324 | Ok(std::str::from_utf8(revs.as_slice()) 325 | .unwrap() 326 | .lines() 327 | .map(String::from) 328 | .collect()) 329 | } 330 | 331 | pub fn clone_and_enumerate_revisions(spec_repo: &SpecRepo) -> YbResult> { 332 | let tmp = TempDir::new().unwrap(); 333 | 334 | let mut cmd = Command::new("git"); 335 | cmd.arg("clone") 336 | .arg(&spec_repo.url) 337 | .arg("-b") 338 | .arg(&spec_repo.refspec) 339 | .arg(tmp.path()); 340 | cmd.env("GIT_TERMINAL_PROMPT", "0"); 341 | cmd.env("GIT_SSH_COMMAND", "ssh -o BatchMode=yes"); 342 | cmd.assert().success(); 343 | 344 | enumerate_revisions(tmp.path()) 345 | } 346 | 347 | /// For the on-disk repository `repo`, try to find corresponding spec repo using these methods: 348 | /// 1. Check if the repos share a remote (either primary or extra) 349 | /// 2. See if the on-disk repo and the spec repo remote has any common commits (by cloning the 350 | /// latter to a temporary directory) 351 | /// TODO document does not validate refspec 352 | pub fn find_corresponding_spec_repo_for_repo( 353 | repo: &Repository, 354 | spec_repos: &HashMap, 355 | c: &mut F, 356 | ) -> YbResult> 357 | where 358 | F: FnMut(StatusCalculatorEvent), 359 | { 360 | let repo_subdir_name = repo 361 | .path() 362 | .parent() 363 | .unwrap() 364 | .file_name() 365 | .unwrap() 366 | .to_str() 367 | .unwrap(); 368 | 369 | let remote_names_with_urls = enumerate_repo_remotes(repo)?; 370 | 371 | // Iterate through each spec repo 372 | for (spec_repo_subdir_name, spec_repo) in spec_repos { 373 | // Iterate through each of the on-disk repo's remotes 374 | for (remote_name, remote_url) in &remote_names_with_urls { 375 | let tracking_branch = RemoteTrackingBranch { 376 | branch_name: spec_repo.refspec.clone(), 377 | remote_name: remote_name.clone(), 378 | }; 379 | 380 | if *remote_url == spec_repo.url { 381 | // The remote URL exactly matches what the spec expects 382 | return Ok(Some(CorrespondingSpecRepoStatus::RemoteMatch( 383 | RemoteMatchStatus { 384 | spec_repo: spec_repo.clone(), 385 | spec_repo_name: spec_repo_subdir_name.clone(), 386 | is_extra_remote: false, 387 | local_branches_tracking_remote: find_local_branches_tracking_remote_branch( 388 | repo, 389 | &tracking_branch, 390 | )?, 391 | remote_tracking_branch: tracking_branch, 392 | matching_remote_name: remote_name.clone(), 393 | }, 394 | ))); 395 | } 396 | } 397 | 398 | // Consider extra remotes 399 | for (remote_name, remote_url) in &remote_names_with_urls { 400 | let tracking_branch = RemoteTrackingBranch { 401 | branch_name: spec_repo.refspec.clone(), 402 | remote_name: remote_name.clone(), 403 | }; 404 | 405 | if spec_repo 406 | .extra_remotes 407 | .iter() 408 | .any(|(_, extra_remote)| *remote_url == extra_remote.url) 409 | { 410 | // The remote URL matches one of the extra remotes in the spec 411 | // TODO revisit assertion 412 | assert_eq!( 413 | repo_subdir_name, spec_repo_subdir_name, 414 | "TODO revisit assertion" 415 | ); 416 | return Ok(Some(CorrespondingSpecRepoStatus::RemoteMatch( 417 | RemoteMatchStatus { 418 | spec_repo: spec_repo.clone(), 419 | spec_repo_name: spec_repo_subdir_name.clone(), 420 | is_extra_remote: true, 421 | local_branches_tracking_remote: find_local_branches_tracking_remote_branch( 422 | repo, 423 | &tracking_branch, 424 | )?, 425 | remote_tracking_branch: tracking_branch, 426 | matching_remote_name: remote_name.clone(), 427 | }, 428 | ))); 429 | } 430 | } 431 | } 432 | 433 | // Make another pass through spec repos to look for related repos 434 | for (spec_repo_subdir_name, spec_repo) in spec_repos { 435 | if repo_subdir_name == spec_repo_subdir_name { 436 | let op = format!("checking possible upstream {}", spec_repo.url); 437 | c(StatusCalculatorEvent::StartSubdirOperation { operation_name: op }); 438 | let spec_repo_revs = clone_and_enumerate_revisions(spec_repo)?; 439 | let on_disk_revs = enumerate_revisions(repo.path())?; 440 | c(StatusCalculatorEvent::StartSubdirOperation { 441 | operation_name: "".into(), 442 | }); 443 | 444 | if spec_repo_revs.is_disjoint(&on_disk_revs) { 445 | continue; 446 | } 447 | 448 | return Ok(Some(CorrespondingSpecRepoStatus::RelatedRepo { 449 | spec_repo: spec_repo.clone(), 450 | spec_repo_name: spec_repo_subdir_name.clone(), 451 | })); 452 | } 453 | } 454 | 455 | Ok(None) 456 | } 457 | --------------------------------------------------------------------------------