├── .env.example ├── demo.gif ├── .gitignore ├── rust-toolchain.toml ├── src ├── actors │ ├── mod.rs │ ├── grim_reaper.rs │ ├── watcher.rs │ ├── command.rs │ └── console.rs ├── lib.rs ├── utils.rs ├── global_config.rs ├── graph │ ├── mod.rs │ ├── ui.rs │ └── graph_task.rs ├── args.rs ├── serial_mode.rs ├── config │ ├── pipe.rs │ ├── ops.rs │ ├── color.rs │ └── mod.rs ├── exec.rs ├── tests.rs └── main.rs ├── .pre-commit-config.yaml ├── .github ├── workflows │ ├── pr-title-check.yml │ ├── tests.yml │ └── release.yml ├── dependabot.yml ├── release.yml └── pr-title-checker-config.json ├── Cargo.toml ├── installer.sh ├── whiz.yaml ├── README.md └── LICENSE /.env.example: -------------------------------------------------------------------------------- 1 | world="earth &\nmars" -------------------------------------------------------------------------------- /demo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zifeo/whiz/HEAD/demo.gif -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | .DS_Store 3 | debug/ 4 | target/ 5 | Cargo.lock 6 | **/*.rs.bk 7 | logs -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "stable" 3 | components = ["rustfmt", "clippy"] 4 | -------------------------------------------------------------------------------- /src/actors/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod command; 2 | pub mod console; 3 | pub mod grim_reaper; 4 | pub mod watcher; 5 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/doublify/pre-commit-rust 3 | rev: v1.0 4 | hooks: 5 | - id: fmt 6 | - id: cargo-check 7 | - id: clippy 8 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod actors; 2 | pub mod args; 3 | pub mod config; 4 | pub mod exec; 5 | pub mod global_config; 6 | pub mod serial_mode; 7 | pub mod utils; 8 | 9 | #[cfg(test)] 10 | mod tests; 11 | -------------------------------------------------------------------------------- /.github/workflows/pr-title-check.yml: -------------------------------------------------------------------------------- 1 | on: 2 | pull_request: 3 | types: 4 | - opened 5 | - edited 6 | - synchronize 7 | - ready_for_review 8 | 9 | jobs: 10 | check: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: thehanimo/pr-title-checker@v1.4.3 14 | with: 15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: github-actions 4 | directory: / 5 | schedule: 6 | interval: monthly 7 | groups: 8 | deps: 9 | patterns: 10 | - "*" 11 | - package-ecosystem: cargo 12 | directory: / 13 | schedule: 14 | interval: monthly 15 | groups: 16 | deps: 17 | patterns: 18 | - "*" 19 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | changelog: 2 | exclude: 3 | authors: 4 | - octocat 5 | - dependabot 6 | categories: 7 | - title: Breaking changes 🛠 8 | labels: 9 | - breaking-change 10 | - title: Documentation improvements 📖 11 | labels: 12 | - documentation 13 | - title: Bug fixes 🐛 14 | labels: 15 | - bug 16 | - title: New features 🎉 17 | labels: 18 | - "*" 19 | -------------------------------------------------------------------------------- /.github/pr-title-checker-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "LABEL": { 3 | "name": "title-needs-formatting", 4 | "color": "eee" 5 | }, 6 | "CHECKS": { 7 | "regexp": "(?build|chore|ci|docs|feat|fix|perf|refactor|revert|test)(?(?:\\([^()\\r\\n]*\\)|\\()?(?!)?:)(?:.*)?", 8 | "regexpFlags": "i" 9 | }, 10 | "MESSAGES": { 11 | "success": "All OK", 12 | "failure": "Failing CI test", 13 | "notice": "" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | pull_request: 6 | types: 7 | - opened 8 | - synchronize 9 | - ready_for_review 10 | 11 | jobs: 12 | test: 13 | strategy: 14 | matrix: 15 | include: 16 | - os: ubuntu-latest 17 | - os: macos-latest 18 | - os: windows-latest 19 | 20 | runs-on: ${{ matrix.os }} 21 | steps: 22 | - uses: actions/checkout@v6 23 | - uses: dtolnay/rust-toolchain@stable 24 | - uses: Swatinem/rust-cache@v2 25 | with: 26 | key: ${{ runner.os }} 27 | - run: | 28 | cargo run -- --help 29 | cargo test 30 | -------------------------------------------------------------------------------- /src/utils.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | io::{Error, ErrorKind}, 3 | path::{Path, PathBuf}, 4 | }; 5 | 6 | pub fn find_config_path(location: &Path, config_name: &str) -> Result { 7 | let config_name_as_path = Path::new(config_name); 8 | let mut config_path = location.to_path_buf(); 9 | config_path.push(config_name_as_path); 10 | if config_path.exists() { 11 | return Ok(config_path); 12 | } 13 | 14 | let parent = location.parent(); 15 | match parent { 16 | // not found 17 | None => { 18 | let message = format!("configuration file {} not found", config_name); 19 | Err(Error::new(ErrorKind::NotFound, message)) 20 | } 21 | // backtrack 22 | Some(parent) => find_config_path(parent, config_name), 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/global_config.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Ok, Result}; 2 | use chrono::{DateTime, Utc}; 3 | 4 | use serde::{Deserialize, Serialize}; 5 | use std::path::Path; 6 | 7 | use tokio::fs; 8 | 9 | #[derive(Deserialize, Serialize)] 10 | pub struct GlobalConfig { 11 | pub update_check: DateTime, 12 | } 13 | 14 | impl GlobalConfig { 15 | pub async fn load>(path: P) -> Result { 16 | if path.as_ref().exists() { 17 | let config_str = fs::read_to_string(path).await?; 18 | let config: GlobalConfig = serde_yaml::from_str(&config_str)?; 19 | Ok(config) 20 | } else { 21 | let config = GlobalConfig { 22 | update_check: Utc::now(), 23 | }; 24 | config.save(path).await?; 25 | Ok(config) 26 | } 27 | } 28 | 29 | pub async fn save>(&self, path: P) -> Result<()> { 30 | let config_str = serde_yaml::to_string(&self)?; 31 | fs::create_dir_all(path.as_ref().parent().unwrap()).await?; 32 | fs::write(path, config_str).await?; 33 | Ok(()) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "whiz" 3 | version = "0.5.1-beta.1" 4 | edition = "2021" 5 | description = "Modern DAG/tasks runner for multi-platform monorepos." 6 | license = "MPL-2.0" 7 | 8 | [dependencies] 9 | actix = "0.13.5" 10 | ansi-to-tui = "4.0.1" 11 | anyhow = "1.0.86" 12 | chrono = { version = "0.4.38", features = ["serde"] } 13 | clap = { version = "4.5.8", features = ["derive"] } 14 | crossterm = "0.27.0" 15 | directories = "5.0.1" 16 | dotenv-parser = "0.1.3" 17 | globset = "0.4.14" 18 | ignore = "0.4.22" 19 | indexmap = { version = "2.2.6", features = ["serde"] } 20 | # lade-sdk = { path = "../../github/lade/sdk"} 21 | lade-sdk = "0.11.2" 22 | openssl = { version = "0.10.70", features = ["vendored"] } 23 | notify = "6.1.1" 24 | path-absolutize = "3.1.1" 25 | path-clean = "1.0.1" 26 | regex = "1.10.5" 27 | self_update = { version = "0.40.0", features = [ 28 | "archive-tar", 29 | "archive-zip", 30 | "compression-flate2", 31 | "compression-zip-deflate", 32 | "compression-zip-bzip2", 33 | ] } 34 | serde = { version = "1.0.203", features = ["derive"] } 35 | serde_yaml = "0.9.34" 36 | strip-ansi-escapes = "0.2.0" 37 | subprocess = "0.2.9" 38 | textwrap = "0.16.1" 39 | tokio = { version = "1.38.0", features = ["full"] } 40 | ratatui = "0.27.0" 41 | url = "2.5.2" 42 | assert_cmd = "2.0.14" 43 | semver = "1.0.23" 44 | shlex = "1.3.0" 45 | termgraph = "0.4.0" 46 | lazy_static = "1.5.0" 47 | -------------------------------------------------------------------------------- /src/graph/mod.rs: -------------------------------------------------------------------------------- 1 | pub use graph_task::{Graph, Task}; 2 | use ratatui::prelude::{CrosstermBackend, Terminal}; 3 | use std::error::Error; 4 | use termgraph::fdisplay; 5 | 6 | use ui::{Drawer, Model, TaskFormatter}; 7 | 8 | use self::ui::LineFormat; 9 | 10 | pub mod graph_task; 11 | mod ui; 12 | 13 | pub fn draw_graph(tasks_list: Vec, boxed: bool) -> Result<(), Box> { 14 | let boxed = match boxed { 15 | true => LineFormat::Boxed, 16 | _ => LineFormat::Ascii, 17 | }; 18 | let graph = Graph::from_tasks_list(&tasks_list); 19 | 20 | //use termgraph to generate the ascii representation 21 | let config = termgraph::Config::new(TaskFormatter::new(), 200) 22 | .line_glyphs(TaskFormatter::from_commandline(boxed)); 23 | let mut ascii_graph = termgraph::DirectedGraph::new(); 24 | ascii_graph.add_nodes(graph.nodes()); 25 | ascii_graph.add_edges(graph.edges()); 26 | 27 | // Write graphics into the buffer 28 | let mut formatted_ascii_graph = Vec::new(); 29 | fdisplay(&ascii_graph, &config, &mut formatted_ascii_graph); 30 | 31 | //Start ratatui initializaion 32 | crossterm::terminal::enable_raw_mode()?; 33 | crossterm::execute!(std::io::stderr(), crossterm::terminal::EnterAlternateScreen)?; 34 | let mut terminal = Terminal::new(CrosstermBackend::new(std::io::stderr()))?; 35 | 36 | // let mut ui = Model::default(); 37 | let mut ui = Model::new(&formatted_ascii_graph, graph.format_independent_task()); 38 | 39 | loop { 40 | terminal.draw(|f| { 41 | Drawer::draw(&mut ui, f); 42 | })?; 43 | 44 | let mut current_msg = ui::handle_key_event()?; 45 | 46 | while current_msg.is_some() { 47 | current_msg = ui::update(&mut ui, current_msg.unwrap()) 48 | } 49 | 50 | if ui.should_quit { 51 | break; 52 | } 53 | } 54 | 55 | crossterm::execute!(std::io::stderr(), crossterm::terminal::LeaveAlternateScreen)?; 56 | crossterm::terminal::disable_raw_mode()?; 57 | 58 | Ok(()) 59 | } 60 | -------------------------------------------------------------------------------- /src/args.rs: -------------------------------------------------------------------------------- 1 | use clap::{Parser, Subcommand}; 2 | 3 | #[derive(Parser, Debug, Clone)] 4 | pub struct Upgrade { 5 | /// Upgrade to specific version (e.g. 1.0.0) 6 | #[arg(long)] 7 | pub version: Option, 8 | 9 | /// Do not ask for version confirmation 10 | #[arg(short, long, default_value_t = false)] 11 | pub yes: bool, 12 | } 13 | 14 | #[derive(Parser, Debug, Clone)] 15 | pub struct Graph { 16 | /// Draw the line using box-drawing character 17 | #[arg(long, short, default_value_t = false)] 18 | pub boxed: bool, 19 | } 20 | 21 | #[derive(Parser, Debug, Clone)] 22 | pub struct Execute { 23 | #[arg()] 24 | pub task: String, 25 | } 26 | 27 | /// Set of subcommands. 28 | #[derive(Subcommand, Debug)] 29 | pub enum Command { 30 | /// Upgrade whiz. 31 | Upgrade(Upgrade), 32 | /// PUpgrade whizrint the graphical ascii representation 33 | Graph(Graph), 34 | /// List all the jobs set in the config file 35 | ListJobs, 36 | /// Execute a specific job; running its dependencies serially 37 | #[command(name = "x")] 38 | Execute(Execute), 39 | } 40 | 41 | #[derive(Parser, Debug)] 42 | #[command( 43 | name = "whiz", 44 | about, 45 | long_about= None, 46 | )] 47 | pub struct Args { 48 | #[arg(short = 'V', long)] 49 | pub version: bool, 50 | 51 | #[command(subcommand)] 52 | pub command: Option, 53 | 54 | #[arg(short, long, default_value = "whiz.yaml")] 55 | pub file: String, 56 | 57 | #[arg(short, long)] 58 | pub verbose: bool, 59 | 60 | #[arg(short, long)] 61 | /// Enable timestamps in logging 62 | pub timestamp: bool, 63 | 64 | /// Run specific jobs 65 | #[arg(short, long, value_name = "JOB")] 66 | pub run: Vec, 67 | 68 | // This disables fs watching despite any values given to the `watch` flag. 69 | // 70 | /// Whiz will exit after all tasks have finished executing. 71 | #[arg(long)] 72 | pub exit_after: bool, 73 | 74 | // Globally toggle triggering task reloading from any watched files 75 | /// Globally enable/disable fs watching 76 | #[arg(long, default_value_t = true)] 77 | pub watch: bool, 78 | } 79 | -------------------------------------------------------------------------------- /src/serial_mode.rs: -------------------------------------------------------------------------------- 1 | use actix::System; 2 | use anyhow::{anyhow, Result}; 3 | use crossterm::style::Stylize; 4 | 5 | use crate::{args::Execute, config::Config, exec::ExecBuilder}; 6 | 7 | pub async fn start(opts: &Execute, config: Config) -> Result<()> { 8 | let mut queue: Vec = Vec::new(); 9 | queue.push(opts.task.clone()); 10 | 11 | let mut executed_tasks: Vec = Vec::new(); 12 | 13 | while let Some(task_name) = queue.pop() { 14 | if !executed_tasks.is_empty() { 15 | println!(); 16 | } 17 | 18 | let task = config 19 | .ops 20 | .get(&task_name) 21 | .ok_or_else(|| anyhow!("Task not found: {}", task_name))?; 22 | 23 | if executed_tasks.contains(&task_name) { 24 | continue; 25 | } 26 | 27 | let deps = task 28 | .depends_on 29 | .resolve() 30 | .into_iter() 31 | .filter(|dep| !executed_tasks.contains(dep)) 32 | .collect::>(); 33 | if !deps.is_empty() { 34 | queue.push(task_name); 35 | queue.extend(deps); 36 | continue; 37 | } 38 | 39 | println!( 40 | "---------------- Starting task {task} ---------------", 41 | task = task_name.as_str().cyan(), 42 | ); 43 | 44 | let exec_builder = ExecBuilder::new(task, &config).await?; 45 | 46 | let exit_status = tokio::task::spawn_blocking(move || { 47 | let exec = exec_builder 48 | .build() 49 | .unwrap() 50 | .stdout(subprocess::Redirection::None) 51 | .stderr(subprocess::Redirection::None); 52 | exec.join().unwrap() 53 | }) 54 | .await?; 55 | 56 | let prefix = if exit_status.success() { 57 | "✓".green() 58 | } else { 59 | "✖️".red() 60 | }; 61 | 62 | println!( 63 | "---- {prefix} Task {task} exited with status {status} ----", 64 | task = task_name.as_str().cyan(), 65 | status = format!("{:?}", exit_status).yellow(), 66 | ); 67 | 68 | if !exit_status.success() { 69 | System::current().stop_with_code(1); 70 | } 71 | 72 | executed_tasks.push(task_name.clone()); 73 | } 74 | 75 | Ok(()) 76 | } 77 | -------------------------------------------------------------------------------- /src/config/pipe.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | 3 | use anyhow::anyhow; 4 | use regex::Regex; 5 | use url::Url; 6 | 7 | /// A pipe represents the redirection of the output of a task 8 | /// matched by a regular expression to an [`OutputRedirection`]. 9 | #[derive(Clone, Debug)] 10 | pub struct Pipe { 11 | /// Regular expression used to capture the output of the task and 12 | /// redirect it. 13 | pub regex: Regex, 14 | /// The place where the ouput matched by the regex is sent. 15 | pub redirection: OutputRedirection, 16 | } 17 | 18 | impl Pipe { 19 | /// Returns a pipe from the configuration provided. 20 | /// 21 | /// The configuration provided is a tuple of strings with the format of 22 | /// ([`Regex`], [`OutputRedirection`]). 23 | pub fn from(pipe_config: (&String, &String)) -> anyhow::Result { 24 | let (regex, redirection) = pipe_config; 25 | let regex = Regex::new(regex)?; 26 | let redirection = OutputRedirection::from_str(redirection)?; 27 | Ok(Self { regex, redirection }) 28 | } 29 | } 30 | 31 | /// Set of places to which the output of a task can be redirected. 32 | #[derive(Clone, Debug)] 33 | pub enum OutputRedirection { 34 | /// Indicates that the output of a task should be sent 35 | /// to a new virtual tab with the given name. 36 | Tab(String), 37 | /// Indicates that the output of a task should be saved 38 | /// as a log file in the given path. 39 | File(String), 40 | } 41 | 42 | impl FromStr for OutputRedirection { 43 | type Err = anyhow::Error; 44 | 45 | /// Creates a new [`OutputRedirection`] from the given redirection URI. 46 | /// 47 | /// Available URI schemes: 48 | /// 49 | /// - file (default) 50 | /// - whiz 51 | /// 52 | /// Redirection URI examples: 53 | /// 54 | /// - whiz://virtual_views -> Tab 55 | /// - file:///dev/null -> File 56 | /// - ./logs/server.log -> File 57 | fn from_str(redirection_uri: &str) -> anyhow::Result { 58 | // URIs that do not start with a scheme are considered files by default 59 | if redirection_uri.starts_with('/') || redirection_uri.starts_with('.') { 60 | let output_redirection = OutputRedirection::File(redirection_uri.to_string()); 61 | return Ok(output_redirection); 62 | } 63 | 64 | let redirection_uri = Url::parse(redirection_uri)?; 65 | 66 | let scheme = redirection_uri.scheme(); 67 | let host = redirection_uri.host(); 68 | 69 | let mut path = String::new(); 70 | 71 | if let Some(host) = host { 72 | path += &host.to_string(); 73 | } 74 | 75 | path += redirection_uri.path(); 76 | 77 | match scheme { 78 | "whiz" => Ok(OutputRedirection::Tab(path)), 79 | "file" => Ok(OutputRedirection::File(path)), 80 | _ => Err(anyhow!("unsupported scheme")), 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /src/actors/grim_reaper.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{HashMap, HashSet}; 2 | 3 | use actix::prelude::*; 4 | use subprocess::ExitStatus; 5 | 6 | /// This is responsible for exiting whiz when all tasks are done. 7 | /// It `send`s it's targets `PermaDeathInvite` which and when all 8 | /// have been `rsvp`d, terminates the Actix runtime and thus the program. 9 | pub struct GrimReaperActor { 10 | live_invites: HashSet, 11 | non_zero_deaths: HashMap, 12 | } 13 | 14 | impl GrimReaperActor { 15 | pub async fn start_new(targets: HashMap>) -> anyhow::Result<()> 16 | where 17 | T: Actor + Handler, 18 | ::Context: actix::dev::ToEnvelope, 19 | { 20 | let reaper_addr = GrimReaperActor { 21 | live_invites: targets.keys().cloned().collect(), 22 | non_zero_deaths: Default::default(), 23 | } 24 | .start(); 25 | for target in targets.values() { 26 | target 27 | .send(PermaDeathInvite { 28 | reaper_addr: reaper_addr.clone(), 29 | }) 30 | .await?; 31 | } 32 | Ok(()) 33 | } 34 | } 35 | 36 | impl Actor for GrimReaperActor { 37 | type Context = Context; 38 | } 39 | 40 | #[derive(Message)] 41 | #[rtype(result = "()")] 42 | pub struct PermaDeathInvite { 43 | reaper_addr: Addr, 44 | } 45 | 46 | impl PermaDeathInvite { 47 | pub fn rsvp(self, actor_name: String, exit_status: ExitStatus, invitee_cx: &mut C) 48 | where 49 | T: Actor + Handler, 50 | ::Context: actix::dev::ToEnvelope, 51 | C: actix::ActorContext, 52 | { 53 | self.reaper_addr 54 | .try_send(InviteAccepted { 55 | actor_name, 56 | exit_status, 57 | }) 58 | .expect("GrimReaperActor mailbox is closed or its mailbox full"); 59 | invitee_cx.stop(); 60 | } 61 | } 62 | 63 | #[derive(Message)] 64 | #[rtype(result = "()")] 65 | pub struct InviteAccepted { 66 | actor_name: String, 67 | exit_status: ExitStatus, 68 | } 69 | 70 | impl Handler for GrimReaperActor { 71 | type Result = (); 72 | 73 | fn handle(&mut self, evt: InviteAccepted, _: &mut Context) -> Self::Result { 74 | assert!(self.live_invites.remove(&evt.actor_name)); 75 | if !evt.exit_status.success() { 76 | self.non_zero_deaths.insert(evt.actor_name, evt.exit_status); 77 | } 78 | if self.live_invites.is_empty() { 79 | if let Some((_op_name, status)) = self.non_zero_deaths.iter().next() { 80 | // exit with the error code of the first aberrant task 81 | let code = match *status { 82 | ExitStatus::Exited(code) => code as i32, 83 | ExitStatus::Other(code) => code, 84 | ExitStatus::Signaled(code) => code as i32, 85 | ExitStatus::Undetermined => { 86 | eprintln!("ERROR: task {_op_name} exited with Undetermined status"); 87 | 1 88 | } 89 | }; 90 | System::current().stop_with_code(code); 91 | } 92 | System::current().stop(); 93 | } 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /installer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -e -u 4 | 5 | ORG=zifeo 6 | REPO=whiz 7 | EXT=tar.gz 8 | NAME=whiz 9 | EXE=whiz 10 | 11 | INSTALLER_URL="https://raw.githubusercontent.com/$ORG/$REPO/main/installer.sh" 12 | RELEASE_URL="https://github.com/$ORG/$REPO/releases" 13 | 14 | LATEST_VERSION=$(curl "$RELEASE_URL/latest" -s -L -I -o /dev/null -w '%{url_effective}') 15 | LATEST_VERSION="${LATEST_VERSION##*v}" 16 | 17 | PLATFORM="${PLATFORM:-}" 18 | TMP_DIR=$(mktemp -d) 19 | OUT_DIR="${OUT_DIR:-/usr/local/bin}" 20 | VERSION="${VERSION:-$LATEST_VERSION}" 21 | MACHINE=$(uname -m) 22 | 23 | if [ "${PLATFORM:-x}" = "x" ]; then 24 | case "$(uname -s | tr '[:upper:]' '[:lower:]')" in 25 | "linux") 26 | case "$MACHINE" in 27 | "arm64"* | "aarch64"* ) PLATFORM='aarch64-unknown-linux-gnu' ;; 28 | *"64") PLATFORM='x86_64-unknown-linux-gnu' ;; 29 | esac 30 | ;; 31 | "darwin") 32 | case "$MACHINE" in 33 | "arm64"* | "aarch64"* ) PLATFORM='aarch64-apple-darwin' ;; 34 | *"64") PLATFORM='x86_64-apple-darwin' ;; 35 | esac 36 | ;; 37 | "msys"*|"cygwin"*|"mingw"*|*"_nt"*|"win"*) 38 | case "$MACHINE" in 39 | *"64") PLATFORM='x86_64-pc-windows-msvc' ;; 40 | esac 41 | ;; 42 | esac 43 | if [ "${PLATFORM:-x}" = "x" ]; then 44 | cat >&2 <&2 <[a-zA-Z0-9_]+).*$": ./logs/animals/${name}.log 123 | 124 | # Content not matched by a regular expression goes to 125 | # `whiz://{task_name}` by default 126 | 127 | friends_conversation: 128 | command: | 129 | echo "[message] Dave: hello" 130 | echo "[message] Bob: bonjour" 131 | 132 | pipe: 133 | # Dynamic naming for virtual tabs generation 134 | '^\[message\] (?P[a-zA-Z]+):.*$': whiz://talker_${name} 135 | 136 | colors_test: 137 | command: | 138 | echo "[message] Dave: Hello!" 139 | echo "[message] Bob: Bonjour!" 140 | echo "[message] John: Bye-Bye!" 141 | 142 | color: 143 | "\\[[a-z]+\\]": "yellow" 144 | "[A-Z][a-z]+\\:": "#def" 145 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - "v*" 7 | 8 | jobs: 9 | check-bump: 10 | runs-on: ubuntu-latest 11 | if: github.ref_type == 'tag' 12 | 13 | steps: 14 | - uses: actions/checkout@v6 15 | - uses: dtolnay/rust-toolchain@stable 16 | - uses: Swatinem/rust-cache@v2 17 | with: 18 | key: ${{ runner.os }} 19 | - name: Check version 20 | run: | 21 | VERSION=$(cargo run -- --version | cut -d' ' -f2) 22 | if [[ "${{ github.ref_name }}" != "v$VERSION" ]]; then 23 | echo "Tag does not match code version v$VERSION, stopping." 24 | exit -1 25 | fi 26 | echo "Releasing v$VERSION" 27 | - uses: ncipollo/release-action@v1 28 | with: 29 | tag: ${{ github.ref_name }} 30 | makeLatest: true 31 | generateReleaseNotes: true 32 | 33 | crates: 34 | needs: 35 | - check-bump 36 | runs-on: ubuntu-latest 37 | steps: 38 | - uses: actions/checkout@v6 39 | - uses: dtolnay/rust-toolchain@stable 40 | - uses: Swatinem/rust-cache@v2 41 | with: 42 | key: ${{ runner.os }} 43 | - uses: katyo/publish-crates@v2 44 | with: 45 | registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }} 46 | ignore-unpublished-changes: true 47 | 48 | binaries: 49 | needs: 50 | - check-bump 51 | runs-on: ${{ matrix.os }} 52 | strategy: 53 | fail-fast: false 54 | matrix: 55 | include: 56 | - os: macos-latest 57 | target: x86_64-apple-darwin 58 | suffix: "" 59 | - os: macos-latest 60 | target: aarch64-apple-darwin 61 | suffix: "" 62 | - os: ubuntu-latest 63 | target: x86_64-unknown-linux-gnu 64 | suffix: "" 65 | - os: ubuntu-latest 66 | target: x86_64-unknown-linux-musl 67 | suffix: "" 68 | - os: ubuntu-latest 69 | target: aarch64-unknown-linux-gnu 70 | suffix: "" 71 | - os: windows-latest 72 | target: x86_64-pc-windows-msvc 73 | suffix: ".exe" 74 | steps: 75 | - uses: actions/checkout@v6 76 | - uses: dtolnay/rust-toolchain@stable 77 | with: 78 | targets: ${{ matrix.target }} 79 | - uses: Swatinem/rust-cache@v2 80 | with: 81 | key: ${{ runner.os }} 82 | - name: Build 83 | uses: actions-rs/cargo@v1 84 | with: 85 | use-cross: true 86 | command: build 87 | args: "--locked --release --target ${{ matrix.target }}" 88 | - name: Compress 89 | run: | 90 | mv "target/${{ matrix.target }}/release/whiz${{ matrix.suffix }}" . 91 | tar czvf "whiz-${{ github.ref_name }}-${{ matrix.target }}.tar.gz" "whiz${{ matrix.suffix }}" 92 | - name: Upload 93 | uses: svenstaro/upload-release-action@v2 94 | with: 95 | tag: ${{ github.ref }} 96 | file: "whiz-${{ github.ref_name }}-${{ matrix.target }}.tar.gz" 97 | asset_name: "whiz-${{ github.ref_name }}-${{ matrix.target }}.tar.gz" 98 | overwrite: false 99 | 100 | bump: 101 | needs: 102 | - crates 103 | - binaries 104 | runs-on: ubuntu-latest 105 | steps: 106 | - uses: actions/checkout@v6 107 | - uses: dtolnay/rust-toolchain@stable 108 | - uses: Swatinem/rust-cache@v2 109 | with: 110 | key: ${{ runner.os }} 111 | - name: Bump version 112 | id: bump 113 | run: | 114 | cargo install cargo-edit 115 | cargo set-version --bump beta 116 | echo "version=$(cargo metadata --format-version 1 --no-deps | jq -r .packages[0].version)" >> $GITHUB_OUTPUT 117 | - uses: peter-evans/create-pull-request@v7 118 | with: 119 | branch: bump-${{ steps.bump.outputs.version }} 120 | delete-branch: true 121 | commit-message: "chore: prepare release ${{ steps.bump.outputs.version }}" 122 | title: "chore: prepare release ${{ steps.bump.outputs.version }}" 123 | body: "Automatic suggested bump" 124 | base: main 125 | -------------------------------------------------------------------------------- /src/actors/watcher.rs: -------------------------------------------------------------------------------- 1 | use actix::prelude::*; 2 | 3 | use globset::GlobSet; 4 | use ignore::gitignore::GitignoreBuilder; 5 | use notify::event::ModifyKind; 6 | use notify::{recommended_watcher, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher}; 7 | use std::collections::HashSet; 8 | use std::path::{Path, PathBuf}; 9 | use std::sync::Arc; 10 | 11 | use super::command::{CommandActor, Reload}; 12 | 13 | pub struct WatcherActor { 14 | watcher: Option, 15 | globs: Vec, 16 | base_dir: Arc, 17 | // List of file paths to ignore on the watcher 18 | ignore: HashSet, 19 | } 20 | 21 | impl WatcherActor { 22 | pub fn new(base_dir: Arc) -> Self { 23 | Self { 24 | watcher: None, 25 | globs: Vec::default(), 26 | base_dir, 27 | ignore: HashSet::default(), 28 | } 29 | } 30 | } 31 | 32 | impl Actor for WatcherActor { 33 | type Context = Context; 34 | 35 | fn started(&mut self, ctx: &mut Context) { 36 | let addr = ctx.address(); 37 | 38 | let mut git_ignore_builder = GitignoreBuilder::new(&self.base_dir); 39 | // add globs from `/.gitignore` 40 | git_ignore_builder.add(self.base_dir.join(".gitignore")); 41 | // ignore `/.git` folder 42 | git_ignore_builder.add_line(None, ".git/").unwrap(); 43 | let git_ignore = git_ignore_builder.build(); 44 | 45 | let mut watcher = recommended_watcher(move |res: Result| { 46 | let mut event = res.unwrap(); 47 | 48 | if let Ok(git_ignore) = &git_ignore { 49 | event.paths.retain(|path| { 50 | !git_ignore 51 | .matched_path_or_any_parents(path, false) 52 | .is_ignore() 53 | }) 54 | }; 55 | 56 | if !event.paths.is_empty() { 57 | match event.kind { 58 | EventKind::Create(_) 59 | | EventKind::Remove(_) 60 | | EventKind::Modify(ModifyKind::Data(_)) 61 | | EventKind::Modify(ModifyKind::Name(_)) => { 62 | addr.do_send(WatchEvent(event)); 63 | } 64 | _ => {} 65 | } 66 | } 67 | }) 68 | .unwrap(); 69 | 70 | watcher 71 | .watch(&self.base_dir, RecursiveMode::Recursive) 72 | .unwrap(); 73 | 74 | self.watcher = Some(watcher); 75 | } 76 | } 77 | 78 | #[derive(Message, Clone)] 79 | #[rtype(result = "()")] 80 | pub struct WatchGlob { 81 | pub command: Addr, 82 | pub on: GlobSet, 83 | pub off: GlobSet, 84 | } 85 | 86 | impl Handler for WatcherActor { 87 | type Result = (); 88 | 89 | fn handle(&mut self, msg: WatchGlob, _: &mut Context) -> Self::Result { 90 | self.globs.push(msg); 91 | } 92 | } 93 | 94 | #[derive(Message)] 95 | #[rtype(result = "()")] 96 | struct WatchEvent(Event); 97 | 98 | impl Handler for WatcherActor { 99 | type Result = (); 100 | 101 | fn handle(&mut self, msg: WatchEvent, _: &mut Context) -> Self::Result { 102 | let WatchEvent(event) = msg; 103 | for glob in &self.globs { 104 | let paths = event 105 | .paths 106 | .iter() 107 | .filter(|path| { 108 | !self.ignore.contains(path.as_path()) 109 | && glob.on.is_match(path) 110 | && !glob.off.is_match(path) 111 | }) 112 | .collect::>(); 113 | 114 | if !paths.is_empty() { 115 | let trigger = paths 116 | .iter() 117 | .map(|p| p.as_path().display().to_string()) 118 | .collect::>() 119 | .join(", "); 120 | glob.command.do_send(Reload::Watch(trigger)) 121 | } 122 | } 123 | } 124 | } 125 | 126 | #[derive(Message, Clone)] 127 | #[rtype(result = "()")] 128 | pub struct IgnorePath(pub PathBuf); 129 | 130 | impl Handler for WatcherActor { 131 | type Result = (); 132 | 133 | fn handle(&mut self, msg: IgnorePath, _: &mut Context) -> Self::Result { 134 | let IgnorePath(path) = msg; 135 | self.ignore.insert(path); 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Whiz 2 | 3 | ![Crates.io](https://img.shields.io/crates/v/whiz) 4 | 5 | Whiz (/wɪz/) is a modern DAG/tasks runner for multi-platform monorepos. It 6 | provides convenient live reloading, env management, pipes, and more in a tabbed 7 | view. 8 | 9 | ![Demo](./demo.gif) 10 | 11 | > Whiz is part of the 12 | > [Metatype ecosystem](https://github.com/metatypedev/metatype). Consider 13 | > checking out how this component integrates with the whole ecosystem and browse 14 | > the 15 | > [documentation](https://metatype.dev?utm_source=github&utm_medium=readme&utm_campaign=whiz) 16 | > to see more examples. 17 | 18 | ## Getting started 19 | 20 | You can download the binary executable from 21 | [releases page](https://github.com/zifeo/whiz/releases/) on GitHub, make it 22 | executable and add it to your `$PATH` or use the method below to automate those 23 | steps. 24 | 25 | ```bash 26 | curl -fsSL https://raw.githubusercontent.com/zifeo/whiz/main/installer.sh | bash 27 | 28 | # via cargo 29 | cargo install whiz --locked 30 | cargo install --git https://github.com/zifeo/whiz --locked 31 | 32 | # create your tasks file, see https://github.com/zifeo/whiz/blob/main/whiz.yaml for an example 33 | touch whiz.yaml 34 | 35 | # run 36 | whiz 37 | 38 | # upgrade 39 | whiz upgrade 40 | ``` 41 | 42 | ## Usage 43 | 44 | ### Configuration file 45 | 46 | Environment variables for all tasks can be defined in the `env` section at root 47 | level. You can use [Lade loaders](https://github.com/zifeo/lade) when loading 48 | secrets (e.g. `infisical://DOMAIN/PROJECT_NAME/ENV_NAME/SECRET_NAME`). 49 | 50 | ``` 51 | env: 52 | [key]: [value] 53 | ``` 54 | 55 | All other root level keys are considered as tasks. Each time a dependency is 56 | load, the dependent task is also reloaded. 57 | 58 | ```yaml 59 | [task]: 60 | workdir: [working directory, by default .] 61 | command: [command] 62 | watch: [file or list of files] 63 | env: 64 | [key]: [value] 65 | env_file: [file or list of env files] 66 | depends_on: [task or list of task names for dependencies] 67 | pipes: # see https://github.com/zifeo/whiz/blob/main/whiz.yaml 68 | [regex]: [destination] 69 | ``` 70 | 71 | See this [file](https://github.com/zifeo/whiz/blob/main/whiz.yaml) for a 72 | complete example. 73 | 74 | ### CLI options 75 | 76 | See `whiz --help` for more information. 77 | 78 | | Subcommads | Description | 79 | | ------------------- | ------------------------------------------------- | 80 | | upgrade | Upgrade whiz | 81 | | list-jobs | List all the available jobs | 82 | | graph | Print the graphical ascii representation | 83 | | help | Display help message or the help for subcommand | 84 | 85 | 86 | | Flags | Description | 87 | | ------------------- | ------------------------------------------------- | 88 | | -f, --file \ | Specify the config file | 89 | | -h, --help | Print help information | 90 | | -r, --run \ | Run specific jobs | 91 | | -t, --timestamp | Enable timestamps in logging | 92 | | -v, --verbose | Enable verbose mode | 93 | | -V, --version | Print whiz version | 94 | | --watch | Globally enable/disable fs watching | 95 | | --exit-after | Exit whiz after all tasks are done | 96 | 97 | 98 | ### Key bindings 99 | 100 | | Keys | Action | 101 | | ------------ | ----------------------------------- | 102 | | l, RighArrow | go to next tab | 103 | | h, LeftArrow | go to previous tab | 104 | | k, Ctl + p | scroll up one line | 105 | | j, Ctl + n | scroll down one line | 106 | | Ctl + u | scroll up half page | 107 | | Ctl + d | scroll down half page | 108 | | Ctl + b | scroll up full page | 109 | | Ctl + f | scroll down full page | 110 | | 0 | go to last tab | 111 | | 1-9 | go to the tab at the given position | 112 | | q, Ctl + c | exit the program | 113 | | r | rerun the job in the current tab | 114 | 115 | ## Development 116 | 117 | ```bash 118 | cargo run -- 119 | ``` 120 | 121 | -------------------------------------------------------------------------------- /src/exec.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, Result}; 2 | use dotenv_parser::parse_dotenv; 3 | use std::{ 4 | collections::HashMap, 5 | fs, 6 | path::{Path, PathBuf}, 7 | }; 8 | use subprocess::Exec; 9 | 10 | use crate::config::{Config, ConfigInner, Task}; 11 | 12 | impl Task { 13 | pub fn get_absolute_workdir(&self, base_dir: &Path) -> PathBuf { 14 | match &self.workdir { 15 | Some(path) => base_dir.join(path), 16 | None => base_dir.to_path_buf(), 17 | } 18 | } 19 | } 20 | 21 | pub struct ExecBuilder { 22 | env: Vec<(String, String)>, 23 | cwd: PathBuf, 24 | cmd: String, 25 | args: Vec, 26 | } 27 | 28 | impl ExecBuilder { 29 | pub async fn new(task: &Task, config: &Config) -> Result { 30 | let cwd = task.get_absolute_workdir(&config.base_dir); 31 | 32 | let shared_env = config.get_shared_env().await?; 33 | let env = task 34 | .get_full_env(&cwd, &shared_env) 35 | .await? 36 | .into_iter() 37 | .collect::>(); 38 | 39 | let (cmd, args) = task.get_exec_command()?; 40 | 41 | Ok(Self { 42 | cwd, 43 | env, 44 | cmd, 45 | args, 46 | }) 47 | } 48 | 49 | pub fn build(&self) -> Result { 50 | Ok(Exec::cmd(self.cmd.clone()) 51 | .args(&self.args) 52 | .cwd(&self.cwd) 53 | .env_extend(&self.env)) 54 | } 55 | 56 | pub fn as_string(&self) -> String { 57 | format!("EXEC: {} {:?} at {:?}", self.cmd, self.args, self.cwd) 58 | } 59 | } 60 | 61 | impl ConfigInner { 62 | pub async fn get_shared_env(&self) -> Result> { 63 | let mut shared_env = HashMap::from_iter(std::env::vars()); 64 | shared_env.extend(lade_sdk::resolve(&self.env, &shared_env)?); 65 | lade_sdk::hydrate(shared_env, self.base_dir.to_path_buf()).await 66 | } 67 | } 68 | 69 | impl Task { 70 | pub fn get_exec_command(&self) -> Result<(String, Vec)> { 71 | let default_entrypoint = { 72 | #[cfg(not(target_os = "windows"))] 73 | { 74 | "bash -c" 75 | } 76 | 77 | #[cfg(target_os = "windows")] 78 | { 79 | "cmd /c" 80 | } 81 | }; 82 | 83 | let entrypoint_lex = match &self.entrypoint { 84 | Some(e) => { 85 | if !e.is_empty() { 86 | e.as_str() 87 | } else { 88 | default_entrypoint 89 | } 90 | } 91 | None => default_entrypoint, 92 | }; 93 | 94 | let entrypoint_split = { 95 | let mut s = shlex::split(entrypoint_lex).unwrap(); 96 | 97 | match &self.command { 98 | Some(a) => { 99 | s.push(a.to_owned()); 100 | s 101 | } 102 | None => s, 103 | } 104 | }; 105 | 106 | let entrypoint = &entrypoint_split[0]; 107 | let nargs = entrypoint_split[1..] 108 | .iter() 109 | .filter(|s| !s.is_empty()) 110 | .cloned() 111 | .collect::>(); 112 | 113 | Ok((entrypoint.to_owned(), nargs)) 114 | } 115 | 116 | pub async fn get_full_env( 117 | &self, 118 | cwd: &Path, 119 | shared_env: &HashMap, 120 | ) -> Result> { 121 | let mut env = HashMap::default(); 122 | 123 | for env_file in self.env_file.resolve() { 124 | let path = cwd.join(env_file.clone()); 125 | let file = fs::read_to_string(path.clone()) 126 | .with_context(|| format!("cannot find env_file {:?}", path.clone()))?; 127 | let values = parse_dotenv(&file) 128 | .map_err(anyhow::Error::msg) 129 | .with_context(|| format!("cannot parse env_file {:?}", path))? 130 | .into_iter() 131 | .map(|(k, v)| (k, v.replace("\\n", "\n"))); 132 | 133 | env.extend(lade_sdk::resolve(&values.collect(), shared_env)?); 134 | } 135 | 136 | env.extend(lade_sdk::resolve(&self.env.clone(), shared_env)?); 137 | let mut env = lade_sdk::hydrate(env, cwd.to_owned()).await?; 138 | env.extend(shared_env.clone()); 139 | 140 | Ok(env) 141 | } 142 | } 143 | 144 | pub fn get_env() -> HashMap { 145 | let mut env = HashMap::new(); 146 | env.insert("RUST_LOG".to_string(), "info".to_string()); 147 | env 148 | } 149 | -------------------------------------------------------------------------------- /src/config/ops.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashSet; 2 | 3 | use anyhow::{anyhow, bail, Result}; 4 | use indexmap::IndexMap; 5 | 6 | use super::{Dag, Task}; 7 | 8 | pub type Ops = IndexMap; 9 | 10 | pub fn build_dag(ops: &Ops) -> Result { 11 | // dependencies 12 | for (op_name, task) in ops.iter() { 13 | for dep_op_name in task.depends_on.resolve().into_iter() { 14 | if op_name == &dep_op_name { 15 | return Err(anyhow!("dependency cannot be recursive in {}", op_name)); 16 | } 17 | 18 | if !ops.contains_key(&dep_op_name) { 19 | return Err(anyhow!("{} in op {}", dep_op_name, op_name)); 20 | } 21 | } 22 | } 23 | 24 | let mut order: Vec = Vec::new(); 25 | let mut poll = Vec::from_iter(ops.keys()); 26 | 27 | while !poll.is_empty() { 28 | let (satisfied, missing): (Vec<&String>, Vec<&String>) = 29 | poll.into_iter().partition(|&item| { 30 | get_dependencies(ops, item) 31 | .iter() 32 | .all(|p| order.contains(p)) 33 | }); 34 | 35 | if satisfied.is_empty() { 36 | return Err(anyhow!( 37 | "cycle detected with one of {}", 38 | missing.into_iter().cloned().collect::>().join(", ") 39 | )); 40 | } 41 | 42 | order.extend(satisfied.into_iter().cloned().collect::>()); 43 | poll = missing; 44 | } 45 | 46 | let dag = order 47 | .into_iter() 48 | .map(|item| { 49 | let nexts = ops 50 | .iter() 51 | .filter(|(_, op)| op.depends_on.resolve().contains(&item)) 52 | .map(|(op_name, _)| op_name.clone()) 53 | .collect::>(); 54 | (item, nexts) 55 | }) 56 | .rev() 57 | .collect::(); 58 | Ok(dag) 59 | } 60 | 61 | /// Returns the list of dependencies of a job defined in the config file. 62 | pub fn get_dependencies(ops: &Ops, job_name: &str) -> Vec { 63 | ops.get(job_name).unwrap().depends_on.resolve() 64 | } 65 | 66 | /// Returns a list of all the dependencies of a list of jobs, and 67 | /// the children dependencies of each dependency recursively. 68 | pub fn get_all_dependencies(ops: &Ops, jobs: &[String]) -> Vec { 69 | let mut job_dependencies = Vec::new(); 70 | let mut all_dependencies = Vec::new(); 71 | 72 | // add initial dependencies 73 | for job_name in jobs { 74 | let child_dependencies = get_dependencies(ops, job_name); 75 | job_dependencies.extend(child_dependencies.into_iter()); 76 | } 77 | 78 | // add child dependencies recursively 79 | while let Some(job_name) = job_dependencies.pop() { 80 | let child_dependencies = get_dependencies(ops, &job_name); 81 | job_dependencies.extend(child_dependencies.into_iter()); 82 | all_dependencies.push(job_name); 83 | } 84 | 85 | all_dependencies 86 | } 87 | 88 | /// Returns the list of all the jobs defined in the config file. 89 | pub fn get_jobs(ops: &Ops) -> Vec<&String> { 90 | ops.iter().map(|(job_name, _)| job_name).collect() 91 | } 92 | 93 | /// Returns the list of all the jobs set in the config file and 94 | /// their dependencies in a simplified version. 95 | pub fn get_formatted_list_of_jobs(ops: &Ops) -> String { 96 | let mut formatted_list_of_jobs: Vec = get_jobs(ops) 97 | .iter() 98 | .map(|job_name| { 99 | let dependencies = get_dependencies(ops, job_name); 100 | let mut formatted_job = format!(" - {job_name}"); 101 | 102 | if !dependencies.is_empty() { 103 | formatted_job += &format!(" ({})", dependencies.join(",")); 104 | } 105 | 106 | formatted_job 107 | }) 108 | .collect(); 109 | formatted_list_of_jobs.sort(); 110 | formatted_list_of_jobs.join("\n") 111 | } 112 | 113 | /// Filters the jobs to only the ones provided in `run` 114 | /// and then recursively add their dependencies to be able 115 | /// to run the filtered jobs. 116 | /// 117 | /// Doesn't filter if `run` is empty. 118 | /// 119 | /// Fails if a job in `run` is not set in the config file. 120 | pub fn filter_jobs(ops: &mut Ops, run: &[String]) -> Result<()> { 121 | for job_name in run { 122 | if ops.get(job_name).is_none() { 123 | let formatted_list_of_jobs = get_formatted_list_of_jobs(ops); 124 | let error_header = format!("job '{job_name}' not found in config file."); 125 | let error_suggestion = format!("Valid jobs are:\n{formatted_list_of_jobs}"); 126 | let error_message = format!("{error_header}\n\n{error_suggestion}"); 127 | bail!(error_message); 128 | } 129 | } 130 | 131 | if !run.is_empty() { 132 | let mut filtered_jobs = get_all_dependencies(ops, run); 133 | filtered_jobs.extend(run.iter().cloned()); 134 | let filtered_jobs: HashSet = HashSet::from_iter(filtered_jobs); 135 | *ops = ops 136 | .clone() 137 | .into_iter() 138 | .filter(|(job_name, _)| filtered_jobs.contains(job_name)) 139 | .collect(); 140 | } 141 | 142 | Ok(()) 143 | } 144 | -------------------------------------------------------------------------------- /src/tests.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | use std::sync::Arc; 3 | use std::{env, future::Future}; 4 | 5 | use anyhow::{Ok, Result}; 6 | 7 | use subprocess::ExitStatus; 8 | 9 | use crate::actors::command::{CommandActorsBuilder, WaitStatus}; 10 | use crate::actors::console::{OutputKind, RegisterPanel}; 11 | use crate::actors::watcher::WatchGlob; 12 | use crate::args::Args; 13 | use crate::config::{ConfigInner, RawConfig}; 14 | use crate::utils::find_config_path; 15 | use crate::{ 16 | actors::{ 17 | console::{ConsoleActor, Output, PanelStatus, TermEvent}, 18 | grim_reaper::GrimReaperActor, 19 | watcher::WatcherActor, 20 | }, 21 | config::Config, 22 | }; 23 | use actix::{actors::mocker::Mocker, prelude::*}; 24 | use assert_cmd::Command; 25 | use clap::CommandFactory; 26 | 27 | fn within_system>>(f: F) { 28 | let system = System::new(); 29 | system.block_on(f).unwrap(); 30 | } 31 | 32 | #[macro_export] 33 | macro_rules! mock_actor { 34 | ( $tt:tt, { $( $mtch:ident : $ty:ty => $case:expr ), *, } ) => ( 35 | Mocker::<$tt>::mock(Box::new(|msg, _ctx| { 36 | $( 37 | if msg.is::<$ty>() { 38 | let $mtch = msg.downcast::<$ty>().unwrap(); 39 | Box::new($case) 40 | } else 41 | )* 42 | { 43 | println!("unexpected {:?} on {}", 44 | msg.downcast::>(), 45 | stringify!($tt) 46 | ); 47 | Box::new(None::<()>) 48 | } 49 | })).start() 50 | ) 51 | } 52 | 53 | #[test] 54 | fn verify_cli() { 55 | Args::command().debug_assert() 56 | } 57 | 58 | #[test] 59 | fn end_to_end() { 60 | let mut cmd = Command::cargo_bin("whiz").unwrap(); 61 | cmd.arg("-h").assert().success(); 62 | } 63 | 64 | fn config_from_str(s: &str) -> Result { 65 | let raw: RawConfig = s.parse()?; 66 | Ok(Arc::new(ConfigInner::from_raw(raw, env::current_dir()?)?)) 67 | } 68 | 69 | #[test] 70 | fn hello() { 71 | within_system(async move { 72 | let config = config_from_str( 73 | r#" 74 | test: 75 | command: ls 76 | "#, 77 | )?; 78 | 79 | let console = mock_actor!(ConsoleActor, { 80 | msg: Output => { 81 | println!("---{:?}", msg.message); 82 | Some(()) 83 | }, 84 | _msg: RegisterPanel => Some(()), 85 | _msg: TermEvent => Some(()), 86 | _msg: PanelStatus => Some(()), 87 | }); 88 | 89 | let watcher = mock_actor!(WatcherActor, { 90 | _msg: WatchGlob => Some(()), 91 | }); 92 | 93 | console 94 | .send(Output::now( 95 | "test".to_string(), 96 | "message".to_string(), 97 | OutputKind::Command, 98 | )) 99 | .await?; 100 | 101 | let commands = CommandActorsBuilder::new(config, console, watcher) 102 | .build() 103 | .await?; 104 | 105 | let status = commands.get("test").unwrap().send(WaitStatus).await?; 106 | println!("status: {:?}", status); 107 | 108 | Ok(()) 109 | }); 110 | } 111 | 112 | #[test] 113 | fn test_grim_reaper() { 114 | let system = System::with_tokio_rt(|| { 115 | tokio::runtime::Builder::new_multi_thread() 116 | .worker_threads(2) 117 | .max_blocking_threads(1) 118 | .enable_all() 119 | .build() 120 | .unwrap() 121 | }); 122 | 123 | let fut = async move { 124 | let config_raw = r#" 125 | test: 126 | entrypoint: 'python3 -c' 127 | command: 'print("hello whiz")' 128 | long_test_dep: 129 | entrypoint: 'python3 -c' 130 | command: 'import time; time.sleep(1); print("wake up")' 131 | long_test: 132 | entrypoint: 'python3 -c' 133 | command: 'print("my que to enter")' 134 | depends_on: 135 | - long_test_dep"#; 136 | let config: Config = config_from_str(config_raw)?; 137 | 138 | let console = mock_actor!(ConsoleActor, { 139 | msg: Output => { 140 | println!("---{:?}", msg.message); 141 | Some(()) 142 | }, 143 | _msg: PanelStatus => Some(()), 144 | _msg: RegisterPanel => Some(()), 145 | _msg: TermEvent => Some(()), 146 | }); 147 | 148 | let watcher = mock_actor!(WatcherActor, { 149 | _msg: WatchGlob => Some(()), 150 | }); 151 | 152 | let commands = CommandActorsBuilder::new(config, console, watcher) 153 | .build() 154 | .await?; 155 | 156 | GrimReaperActor::start_new(commands).await?; 157 | Ok(()) 158 | }; 159 | 160 | Arbiter::current().spawn(async { fut.await.unwrap() }); 161 | 162 | let timer = std::time::SystemTime::now(); 163 | assert_eq!(0, system.run_with_code().unwrap()); 164 | let elapsed = timer.elapsed().unwrap(); 165 | assert!( 166 | elapsed.as_millis() >= 1000, 167 | "test took less than a second: {elapsed:?}" 168 | ); 169 | } 170 | 171 | #[test] 172 | fn config_search_recursive() { 173 | assert!(env::current_dir().is_ok()); 174 | let previous_cwd = env::current_dir().unwrap().as_path().display().to_string(); 175 | 176 | // change current working directory to {root_app}/src 177 | assert!(env::set_current_dir(Path::new("src")).is_ok()); 178 | assert!(env::current_dir().is_ok()); 179 | 180 | // cwd as string 181 | let new_cwd = env::current_dir().unwrap().as_path().display().to_string(); 182 | println!(" Working directory set to {}", new_cwd); 183 | 184 | let config_name = "whiz.yaml"; 185 | let expected_if_exist = Path::new(&new_cwd).join(config_name).display().to_string(); 186 | 187 | let config_path = find_config_path(&env::current_dir().unwrap(), config_name).unwrap(); 188 | let config_path_got = config_path.display().to_string(); 189 | 190 | println!(" Config file located at {}", config_path_got); 191 | println!( 192 | " Path \"{}\" should be different from \"{}\"", 193 | config_path_got, expected_if_exist 194 | ); 195 | assert_ne!(config_path_got, expected_if_exist); 196 | 197 | // reset cwd to be safe 198 | assert!(env::set_current_dir(Path::new(&previous_cwd)).is_ok()); 199 | println!(" Working directory reset to {}", previous_cwd); 200 | } 201 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use actix::prelude::*; 2 | use anyhow::anyhow; 3 | use anyhow::Ok; 4 | use anyhow::Result; 5 | use chrono::{Duration, Utc}; 6 | use clap::Parser; 7 | use self_update::{backends::github::Update, cargo_crate_version, update::UpdateStatus}; 8 | use semver::Version; 9 | use std::eprintln; 10 | use tokio::time::{sleep, Duration as TokioDuration}; 11 | use whiz::actors::command::CommandActorsBuilder; 12 | use whiz::config::ops; 13 | use whiz::config::ConfigBuilder; 14 | use whiz::serial_mode; 15 | use whiz::utils::find_config_path; 16 | use whiz::{ 17 | actors::{console::ConsoleActor, watcher::WatcherActor}, 18 | args::Command, 19 | config::Config, 20 | global_config::GlobalConfig, 21 | }; 22 | mod graph; 23 | 24 | use whiz::args::Args; 25 | 26 | async fn upgrade_check() -> Result<()> { 27 | let project = directories::ProjectDirs::from("com", "zifeo", "whiz") 28 | .expect("cannot get directory for projet"); 29 | 30 | let config_path = project.config_local_dir().join("config.yml"); 31 | let mut local_config = GlobalConfig::load(config_path.clone()).await?; 32 | 33 | if local_config.update_check + Duration::days(1) < Utc::now() { 34 | let current_version = cargo_crate_version!(); 35 | let latest = tokio::task::spawn_blocking(move || { 36 | let update = Update::configure() 37 | .repo_owner("zifeo") 38 | .repo_name("whiz") 39 | .bin_name("whiz") 40 | .current_version(current_version) 41 | .build()?; 42 | 43 | Ok(update.get_latest_release()?) 44 | }) 45 | .await??; 46 | 47 | if Version::parse(&latest.version)? > Version::parse(current_version)? { 48 | println!( 49 | "New whiz update available: {} -> {} (use: whiz upgrade)", 50 | current_version, latest.version 51 | ); 52 | println!("Will resume in 5 seconds..."); 53 | sleep(TokioDuration::from_secs(5)).await; 54 | } 55 | local_config.update_check = Utc::now(); 56 | local_config.save(config_path).await?; 57 | } 58 | Ok(()) 59 | } 60 | 61 | fn main() -> Result<()> { 62 | let args = Args::parse(); 63 | 64 | if args.version { 65 | println!("whiz {}", env!("CARGO_PKG_VERSION")); 66 | return Ok(()); 67 | } 68 | 69 | if let Some(Command::Upgrade(opts)) = args.command { 70 | let mut update = Update::configure(); 71 | update 72 | .repo_owner("zifeo") 73 | .repo_name("whiz") 74 | .bin_name("whiz") 75 | .show_download_progress(true) 76 | .current_version(cargo_crate_version!()) 77 | .no_confirm(opts.yes); 78 | 79 | if let Some(version) = opts.version { 80 | update.target_version_tag(&format!("v{version}")); 81 | } 82 | 83 | match update.build()?.update_extended()? { 84 | UpdateStatus::UpToDate => println!("Already up to date!"), 85 | UpdateStatus::Updated(release) => { 86 | println!("Updated successfully to {}!", release.version); 87 | println!( 88 | "Release notes: https://github.com/zifeo/whiz/releases/tag/{}", 89 | release.name 90 | ); 91 | } 92 | }; 93 | return Ok(()); 94 | }; 95 | 96 | let system = System::with_tokio_rt(|| { 97 | tokio::runtime::Builder::new_multi_thread() 98 | .worker_threads(2) 99 | .max_blocking_threads(1) 100 | .enable_all() 101 | .build() 102 | .unwrap() 103 | }); 104 | 105 | Arbiter::current().spawn(async { 106 | run(args).await.unwrap_or_else(|e| { 107 | eprintln!("{}", e); 108 | System::current().stop_with_code(1); 109 | }); 110 | }); 111 | 112 | let code = system.run_with_code()?; 113 | std::process::exit(code); 114 | } 115 | 116 | async fn run(args: Args) -> Result<()> { 117 | #[cfg(target_os = "windows")] 118 | std::env::set_var( 119 | "PWD", 120 | std::env::current_dir() 121 | .expect("could not read current directory") 122 | .to_str() 123 | .unwrap(), 124 | ); 125 | 126 | upgrade_check() 127 | .await 128 | .unwrap_or_else(|e| eprintln!("cannot check for update: {}", e)); 129 | 130 | let config = ConfigBuilder::new(find_config_path( 131 | &std::env::current_dir().unwrap(), 132 | &args.file, 133 | )?) 134 | .build()?; 135 | 136 | let Some(command) = args.command.as_ref() else { 137 | return start_default_mode(config, args).await; 138 | }; 139 | 140 | match command { 141 | Command::Upgrade(_) => { 142 | unreachable!(); 143 | } 144 | 145 | Command::ListJobs => { 146 | let formatted_list_of_jobs = ops::get_formatted_list_of_jobs(&config.ops); 147 | println!("List of jobs:\n{formatted_list_of_jobs}"); 148 | System::current().stop_with_code(0); 149 | Ok(()) 150 | } 151 | 152 | Command::Graph(opts) => { 153 | let filtered_tasks: Vec = config 154 | .ops 155 | .iter() 156 | .map(|task| graph::Task { 157 | name: task.0.to_owned(), 158 | depends_on: task.1.depends_on.resolve(), 159 | }) 160 | .collect(); 161 | 162 | match graph::draw_graph(filtered_tasks, opts.boxed) 163 | .map_err(|err| anyhow!("Error visualizing graph: {}", err)) 164 | { 165 | Result::Ok(..) => { 166 | System::current().stop_with_code(0); 167 | Ok(()) 168 | } 169 | Err(e) => { 170 | System::current().stop_with_code(1); 171 | Err(e) 172 | } 173 | } 174 | } 175 | 176 | Command::Execute(opts) => { 177 | serial_mode::start(opts, config).await?; 178 | System::current().stop_with_code(0); 179 | Ok(()) 180 | } 181 | } 182 | } 183 | 184 | async fn start_default_mode(config: Config, args: Args) -> Result<()> { 185 | let console = 186 | ConsoleActor::new(Vec::from_iter(config.ops.keys().cloned()), args.timestamp).start(); 187 | let watcher = WatcherActor::new(config.base_dir.clone()).start(); 188 | 189 | let cmds = CommandActorsBuilder::new(config, console.clone(), watcher) 190 | .verbose(args.verbose) 191 | .globally_enable_watch(if args.exit_after { false } else { args.watch }) 192 | .build() 193 | .await 194 | .map_err(|err| anyhow!("error spawning commands: {}", err))?; 195 | 196 | if args.exit_after { 197 | whiz::actors::grim_reaper::GrimReaperActor::start_new(cmds).await?; 198 | } 199 | 200 | Ok(()) 201 | } 202 | -------------------------------------------------------------------------------- /src/graph/ui.rs: -------------------------------------------------------------------------------- 1 | use std::{fmt::Display, rc::Rc}; 2 | 3 | use crossterm::event::KeyCode; 4 | use ratatui::{ 5 | prelude::{Constraint, Layout, Rect}, 6 | widgets::{Block, Borders, Paragraph, Scrollbar, ScrollbarOrientation, ScrollbarState}, 7 | Frame, 8 | }; 9 | use termgraph::{LineGlyphBuilder, LineGlyphs, NodeFormat}; 10 | 11 | pub enum LineFormat { 12 | Ascii, 13 | Boxed, 14 | } 15 | 16 | #[derive(PartialEq)] 17 | pub enum Message { 18 | ScrollDown, 19 | ScrollUp, 20 | ScrollRight, 21 | ScrollLeft, 22 | Quit, 23 | } 24 | 25 | #[derive(Default)] 26 | pub struct Model { 27 | vertical_scroll_state: ScrollbarState, 28 | horizontal_scroll_state: ScrollbarState, 29 | vertical_scroll: u16, 30 | horizontal_scroll: u16, 31 | pub should_quit: bool, 32 | graph_string_representation: String, 33 | indipendent_tasks: String, 34 | } 35 | 36 | impl Model { 37 | pub fn new(graph_string_representation: &[u8], indipendent_tasks: String) -> Self { 38 | Model { 39 | vertical_scroll: 0, 40 | horizontal_scroll: 0, 41 | should_quit: false, 42 | horizontal_scroll_state: ScrollbarState::default(), 43 | vertical_scroll_state: ScrollbarState::default(), 44 | graph_string_representation: String::from_utf8_lossy(graph_string_representation) 45 | .into_owned(), 46 | indipendent_tasks, 47 | } 48 | } 49 | } 50 | 51 | pub fn handle_key_event() -> Result, Box> { 52 | let message = if crossterm::event::poll(std::time::Duration::from_millis(250))? { 53 | if let crossterm::event::Event::Key(key) = crossterm::event::read()? { 54 | match key.code { 55 | KeyCode::Char('q') => Message::Quit, 56 | KeyCode::Char('j') | KeyCode::Down => Message::ScrollDown, 57 | KeyCode::Char('k') | KeyCode::Up => Message::ScrollUp, 58 | KeyCode::Char('h') | KeyCode::Left => Message::ScrollLeft, 59 | KeyCode::Char('l') | KeyCode::Right => Message::ScrollRight, 60 | _ => return Ok(None), 61 | } 62 | } else { 63 | return Ok(None); 64 | } 65 | } else { 66 | return Ok(None); 67 | }; 68 | 69 | Ok(Some(message)) 70 | } 71 | 72 | pub fn update(model: &mut Model, msg: Message) -> Option { 73 | use Message::*; 74 | match msg { 75 | ScrollRight => { 76 | model.horizontal_scroll = model.horizontal_scroll.saturating_add(5); 77 | model.horizontal_scroll_state = model 78 | .horizontal_scroll_state 79 | .position(model.horizontal_scroll.into()); 80 | } 81 | ScrollLeft => { 82 | model.horizontal_scroll = model.horizontal_scroll.saturating_sub(5); 83 | model.horizontal_scroll_state = model 84 | .horizontal_scroll_state 85 | .position(model.horizontal_scroll.into()); 86 | } 87 | ScrollUp => { 88 | model.vertical_scroll = model.vertical_scroll.saturating_sub(5); 89 | model.vertical_scroll_state = model 90 | .vertical_scroll_state 91 | .position(model.vertical_scroll.into()); 92 | } 93 | 94 | ScrollDown => { 95 | model.vertical_scroll = model.vertical_scroll.saturating_add(5); 96 | model.vertical_scroll_state = model 97 | .vertical_scroll_state 98 | .position(model.vertical_scroll.into()); 99 | } 100 | Quit => model.should_quit = true, 101 | } 102 | None 103 | } 104 | 105 | pub struct Drawer {} 106 | impl Drawer { 107 | fn render_indipendent_tasks(frame: &mut Frame, chunks: Rc<[Rect]>, model: &mut Model) { 108 | frame.render_widget( 109 | Paragraph::new(model.indipendent_tasks.as_str()) 110 | .block( 111 | Block::new() 112 | .title("Indipendent task") 113 | .title_alignment(ratatui::prelude::Alignment::Center) 114 | .borders(Borders::ALL), 115 | ) 116 | // .alignment(ratatui::prelude::Alignment::Center) 117 | .scroll((0, model.horizontal_scroll)), 118 | chunks.clone()[0], 119 | ); 120 | } 121 | 122 | fn render_dependency_graph(frame: &mut Frame, chunks: Rc<[Rect]>, model: &mut Model) { 123 | frame.render_widget( 124 | Paragraph::new(model.graph_string_representation.to_owned()) 125 | .block( 126 | Block::new() 127 | .title("Dependency Graph") 128 | .title_alignment(ratatui::prelude::Alignment::Center) 129 | .borders(Borders::ALL), 130 | ) 131 | .scroll((model.vertical_scroll, model.horizontal_scroll)), 132 | chunks.clone()[1], 133 | ); 134 | } 135 | 136 | pub fn render_scrollbar(model: &mut Model, frame: &mut Frame, chunks: Rc<[Rect]>) { 137 | frame.render_stateful_widget( 138 | Scrollbar::default().orientation(ScrollbarOrientation::HorizontalTop), 139 | chunks[1], 140 | &mut model.horizontal_scroll_state, 141 | ); 142 | 143 | frame.render_stateful_widget( 144 | Scrollbar::default().orientation(ScrollbarOrientation::VerticalLeft), 145 | chunks[1], 146 | &mut model.vertical_scroll_state, 147 | ); 148 | } 149 | 150 | pub fn get_layout(frame: &Frame) -> Rc<[Rect]> { 151 | Layout::default() 152 | .direction(ratatui::prelude::Direction::Vertical) 153 | .constraints(vec![Constraint::Length(5), Constraint::Min(0)]) 154 | .split(frame.size()) 155 | } 156 | 157 | pub fn draw(model: &mut Model, frame: &mut Frame) { 158 | let chunks = Self::get_layout(frame); 159 | Self::render_scrollbar(model, frame, chunks.clone()); 160 | Self::render_dependency_graph(frame, chunks.clone(), model); 161 | Self::render_indipendent_tasks(frame, chunks.clone(), model); 162 | } 163 | } 164 | 165 | pub struct TaskFormatter {} 166 | impl TaskFormatter { 167 | /// Creates a new Instance of the Formatter 168 | pub fn new() -> Self { 169 | Self {} 170 | } 171 | 172 | pub fn from_commandline(line_format: LineFormat) -> LineGlyphs { 173 | match line_format { 174 | LineFormat::Ascii => LineGlyphBuilder::ascii().finish(), 175 | LineFormat::Boxed => LineGlyphBuilder::ascii() 176 | .vertical('\u{2502}') 177 | .crossing('\u{253C}') 178 | .horizontal('\u{2500}') 179 | .arrow_down('▼') 180 | .finish(), 181 | } 182 | } 183 | } 184 | 185 | impl NodeFormat for TaskFormatter 186 | where 187 | T: Display, 188 | { 189 | fn format_node(&self, _: &ID, name: &T) -> String { 190 | format!("|{}|", name) 191 | } 192 | } 193 | -------------------------------------------------------------------------------- /src/graph/graph_task.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::collections::{HashMap, HashSet}; 3 | 4 | pub struct Graph<'a> { 5 | pub independent_tasks: Vec<&'a Task>, 6 | nodes_dictionary: HashMap, 7 | edges: Vec<(usize, usize)>, 8 | } 9 | 10 | impl<'a> Graph<'a> { 11 | pub fn from_tasks_list(tasks_list: &'a [Task]) -> Self { 12 | let (independent_tasks, dependent_tasks) = Task::split_tasks(tasks_list); 13 | let mut nodes_dictionary: HashMap = HashMap::new(); 14 | Self::populate_node_dictionary(&mut nodes_dictionary, &dependent_tasks); 15 | let edges = Self::build_edges(&dependent_tasks, &nodes_dictionary); 16 | Self { 17 | independent_tasks, 18 | nodes_dictionary, 19 | edges, 20 | } 21 | } 22 | 23 | pub fn nodes(&self) -> HashMap<&usize, &String> { 24 | self.nodes_dictionary 25 | .iter() 26 | .map(|node| (node.1, node.0)) 27 | .collect() 28 | } 29 | 30 | pub fn edges(&self) -> Vec<(&usize, &usize)> { 31 | self.edges.iter().map(|t| (&t.0, &t.1)).collect() 32 | } 33 | 34 | fn build_edges( 35 | dependent_tasks: &[&Task], 36 | nodes_dictionary: &HashMap, 37 | ) -> Vec<(usize, usize)> { 38 | dependent_tasks 39 | .iter() 40 | .enumerate() 41 | .filter_map(|(uid, task)| { 42 | Self::dependecies_lists_to_tuple_nodes(&task.depends_on, uid, nodes_dictionary) 43 | }) 44 | .flatten() 45 | .collect() 46 | } 47 | 48 | fn dependecies_lists_to_tuple_nodes( 49 | dependecies_lists: &Vec, 50 | uid: usize, 51 | nodes_dictionary: &HashMap, 52 | ) -> Option> { 53 | if dependecies_lists.is_empty() { 54 | return None; 55 | }; 56 | let mut result: Vec<(usize, usize)> = vec![]; 57 | for dependecy in dependecies_lists { 58 | match nodes_dictionary.get(dependecy) { 59 | Some(node) => result.push((*node, uid)), 60 | None => return None, 61 | } 62 | } 63 | Some(result) 64 | } 65 | 66 | fn populate_node_dictionary( 67 | nodes_dictionary: &mut HashMap, 68 | dependent_tasks: &[&Task], 69 | ) { 70 | dependent_tasks.iter().enumerate().for_each(|(uid, task)| { 71 | nodes_dictionary.insert(task.name.to_owned(), uid); 72 | }); 73 | } 74 | 75 | pub fn format_independent_task(&self) -> String { 76 | //Format the indipendent tasks on the first line 77 | if self.independent_tasks.is_empty() { 78 | return String::new(); 79 | }; 80 | self.independent_tasks.iter().skip(1).fold( 81 | format!("|{}|", &self.independent_tasks[0].name), 82 | |accumulatotask_list, task| format!("{} |{}|", accumulatotask_list, task.name), 83 | ) + "\n" 84 | + "\n" 85 | } 86 | } 87 | 88 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 89 | pub struct TaskFile { 90 | #[serde(flatten)] 91 | file: HashMap, 92 | } 93 | 94 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 95 | pub struct DependsOn { 96 | pub depends_on: Vec, 97 | } 98 | 99 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 100 | pub struct Task { 101 | pub name: String, 102 | 103 | pub depends_on: Vec, 104 | } 105 | 106 | impl Task { 107 | pub fn split_tasks(tasks: &[Task]) -> (Vec<&Task>, Vec<&Task>) { 108 | let mut dependencies_tasks: HashSet<&str> = HashSet::new(); 109 | tasks.iter().for_each(|task| { 110 | task.depends_on.iter().for_each(|dep_task| { 111 | dependencies_tasks.insert(dep_task); 112 | }) 113 | }); 114 | tasks.iter().partition(|task| { 115 | task.depends_on.is_empty() 116 | && !tasks 117 | .iter() 118 | .all(|_| dependencies_tasks.contains(task.name.as_str())) 119 | }) 120 | } 121 | } 122 | 123 | #[cfg(test)] 124 | mod helpers_tests { 125 | use super::{Graph, Task}; 126 | use std::collections::HashMap; 127 | // 128 | //Test helpers 129 | type TestInputTask = (&'static str, &'static [&'static str]); 130 | impl Task { 131 | pub fn from_formatted(formatted_tasks: &[TestInputTask]) -> Vec { 132 | formatted_tasks.iter().map(|t| Task::from(*t)).collect() 133 | } 134 | } 135 | impl From for Task { 136 | fn from(value: TestInputTask) -> Self { 137 | Task { 138 | name: value.0.to_owned(), 139 | depends_on: value 140 | .1 141 | .iter() 142 | .map(|refer| refer.to_string()) 143 | .collect::>(), 144 | } 145 | } 146 | } 147 | 148 | #[test] 149 | fn test_split_tasks() { 150 | let input: &[TestInputTask] = &[("once", &[]), ("once_b", &["once"]), ("third_task", &[])]; 151 | let task_vec: Vec = Task::from_formatted(input); 152 | 153 | assert_eq!( 154 | Task::split_tasks(&task_vec).0.first().unwrap(), 155 | &task_vec.get(2).unwrap() 156 | ) 157 | } 158 | 159 | #[test] 160 | fn split_multiple_tasks() { 161 | let input: &[TestInputTask] = &[ 162 | ("once", &[]), 163 | ("once_b", &["once"]), 164 | ("third_task", &[]), 165 | ("once_c", &["once", "once_b"]), 166 | ("speedy", &[]), 167 | ("err", &[]), 168 | ]; 169 | 170 | let tasks: Vec = Task::from_formatted(input); 171 | let (indipendent_tasks, dependent_tasks) = Task::split_tasks(&tasks); 172 | assert_eq!( 173 | indipendent_tasks, 174 | &[ 175 | tasks.get(2).unwrap(), 176 | tasks.get(4).unwrap(), 177 | tasks.get(5).unwrap() 178 | ] 179 | ); 180 | assert_eq!( 181 | dependent_tasks, 182 | vec![ 183 | tasks.first().unwrap(), 184 | tasks.get(1).unwrap(), 185 | tasks.get(3).unwrap() 186 | ] 187 | ) 188 | } 189 | 190 | #[test] 191 | fn split_bigger_list() { 192 | let input: &[TestInputTask] = &[ 193 | ("2.2_task", &["1.4_task"]), 194 | ("0.8_task", &[]), 195 | ("1.3_task", &["0.6_task"]), 196 | ( 197 | "1.4_task", 198 | &["0.6_task", "0.7_task", "0.8_task", "0.9_task", "0.10_task"], 199 | ), 200 | ("0.5_task", &[]), 201 | ("0.3_task", &[]), 202 | ("0.2_task", &[]), 203 | ("1.1_task", &["0.2_task"]), 204 | ("0.7_task", &[]), 205 | ("0.6_task", &[]), 206 | ("0.11_task", &[]), 207 | ( 208 | "1.2_task", 209 | &["0.2_task", "0.3_task", "0.4_task", "0.6_task", "0.10_task"], 210 | ), 211 | ("1.5_task", &["0.7_task"]), 212 | ("0.9_task", &[]), 213 | ("0.4_task", &[]), 214 | ("2.1_task", &["1.4_task", "1.5_task"]), 215 | ("0.10_task", &[]), 216 | ("0.1_task", &[]), 217 | ]; 218 | let tasks = Task::from_formatted(input); 219 | 220 | let (indipendent, _) = Task::split_tasks(&tasks); 221 | [ 222 | Task { 223 | name: "0.1_task".into(), 224 | depends_on: vec![], 225 | }, 226 | Task { 227 | name: "0.5_task".into(), 228 | depends_on: vec![], 229 | }, 230 | Task { 231 | name: "0.11_task".into(), 232 | depends_on: vec![], 233 | }, 234 | ] 235 | .iter() 236 | .for_each(|el| assert!(indipendent.contains(&el))); 237 | } 238 | 239 | #[test] 240 | fn dep_list_to_nodes() { 241 | let one = Task { 242 | name: "one".to_owned(), 243 | depends_on: vec![], 244 | }; 245 | 246 | let two = Task { 247 | name: "two".to_owned(), 248 | depends_on: vec!["one".to_owned()], 249 | }; 250 | 251 | let three = Task { 252 | name: "three".to_owned(), 253 | depends_on: vec!["one".to_owned(), "two".to_owned()], 254 | }; 255 | 256 | let dependent_dictionary: HashMap = HashMap::from([ 257 | ("one".to_owned(), 1), 258 | ("two".to_owned(), 2), 259 | ("three".to_owned(), 3), 260 | ]); 261 | let dependencies_for_one = 262 | Graph::dependecies_lists_to_tuple_nodes(&one.depends_on, 1, &dependent_dictionary); 263 | assert_eq!(dependencies_for_one, None); 264 | 265 | let dependencies_for_two = 266 | Graph::dependecies_lists_to_tuple_nodes(&two.depends_on, 2, &dependent_dictionary); 267 | 268 | let dependencies_for_three = 269 | Graph::dependecies_lists_to_tuple_nodes(&three.depends_on, 3, &dependent_dictionary); 270 | 271 | assert_eq!(dependencies_for_two, Some(vec![(1, 2)])); 272 | assert_eq!(dependencies_for_three, Some(vec![(1, 3), (2, 3)])); 273 | } 274 | } 275 | -------------------------------------------------------------------------------- /src/config/color.rs: -------------------------------------------------------------------------------- 1 | use ansi_to_tui::IntoText; 2 | use anyhow::anyhow; 3 | use ratatui::style::{Color, Style}; 4 | use ratatui::text::{Line, Span, StyledGrapheme}; 5 | use regex::Regex; 6 | 7 | #[derive(Clone, Debug)] 8 | pub struct ColorOption { 9 | pub regex: Regex, 10 | pub color: Color, 11 | } 12 | 13 | impl ColorOption { 14 | pub fn new(regex: Regex, color: Color) -> Self { 15 | Self { regex, color } 16 | } 17 | 18 | pub fn from(color_config: (&str, &str)) -> anyhow::Result { 19 | let (regex, color_str) = color_config; 20 | let regex = Regex::new(regex)?; 21 | let color = ColorOption::parse_color(color_str)?; 22 | Ok(Self { regex, color }) 23 | } 24 | 25 | pub fn parse_color(str: &str) -> anyhow::Result { 26 | if str.starts_with('#') { 27 | let rgb = u32::from_str_radix(str.trim_start_matches('#'), 16)?; 28 | let r = ((rgb & 0x00FF0000) >> 16) as u8; 29 | let g = ((rgb & 0x0000FF00) >> 8) as u8; 30 | let b = (rgb & 0x000000FF) as u8; 31 | return Ok(Color::Rgb(r, g, b)); 32 | } 33 | 34 | match str.to_ascii_lowercase().as_str() { 35 | "red" => Ok(Color::Red), 36 | "blue" => Ok(Color::Blue), 37 | "gray" => Ok(Color::Gray), 38 | "cyan" => Ok(Color::Cyan), 39 | "black" => Ok(Color::Black), 40 | "green" => Ok(Color::Green), 41 | "white" => Ok(Color::White), 42 | "yellow" => Ok(Color::Yellow), 43 | "magenta" => Ok(Color::Magenta), 44 | "darkgray" => Ok(Color::DarkGray), 45 | "lightred" => Ok(Color::LightRed), 46 | "lightblue" => Ok(Color::LightBlue), 47 | "lightcyan" => Ok(Color::LightCyan), 48 | "lightgreen" => Ok(Color::LightGreen), 49 | "lightyellow" => Ok(Color::LightYellow), 50 | "lightmagenta" => Ok(Color::LightMagenta), 51 | other => Err(anyhow!("unsupported color: {:?}", other)), 52 | } 53 | } 54 | } 55 | 56 | impl PartialEq for ColorOption { 57 | fn eq(&self, other: &Self) -> bool { 58 | self.regex.as_str() == other.regex.as_str() && self.color == other.color 59 | } 60 | } 61 | 62 | lazy_static::lazy_static! { 63 | static ref COLOR_OPTIONS: Vec = vec![ 64 | ColorOption::from(("GET", "green")).unwrap(), 65 | ColorOption::from(("POST", "#FFA500")).unwrap(), 66 | ColorOption::from(("PUT", "#800080")).unwrap(), 67 | ColorOption::from(("PATCH", "#800080")).unwrap(), 68 | ColorOption::from(("DELETE", "red")).unwrap(), 69 | ColorOption::from(("ERROR", "red")).unwrap(), 70 | ColorOption::from(("RELOAD", "#800080")).unwrap(), 71 | ColorOption::from(( 72 | r"(?x) 73 | \b 74 | \d+ 75 | (\. 76 | \d+ 77 | )? 78 | \b", 79 | "cyan", 80 | )).unwrap(), // digits 81 | ColorOption::from(( 82 | r"(?x) 83 | (?P 84 | [~/.][\w./-]* 85 | /[\w.-]* 86 | )", 87 | "green", 88 | )).unwrap(), // paths 89 | ColorOption::from((r"https?://[^\s]+", "blue")).unwrap(), // https 90 | ]; 91 | } 92 | 93 | pub struct Colorizer<'b> { 94 | colors: &'b Vec, 95 | base_style: Style, 96 | } 97 | 98 | impl<'b> Colorizer<'b> { 99 | pub fn new(colors: &'b Vec, base_style: Style) -> Self { 100 | Self { colors, base_style } 101 | } 102 | 103 | /// 104 | /// Patches style of input [`&str`] according to stored [`ColorOption`]'s. 105 | /// Each color is applied sequentially. 106 | /// 107 | /// Returns vector of patched lines. 108 | /// 109 | pub fn patch_text<'a>(&self, str: &'a str) -> Vec> { 110 | let text = str.into_text().unwrap().patch_style(self.base_style); 111 | 112 | let colors = COLOR_OPTIONS.iter().chain(self.colors); 113 | 114 | text.lines 115 | .iter() 116 | .map(|line| { 117 | let mut styled_line = line.clone(); 118 | let pure_str = Colorizer::line_as_string(line); 119 | for opt in colors.clone() { 120 | styled_line = 121 | self.merge_lines(&styled_line, &self.apply_color_option(&pure_str, opt)); 122 | } 123 | styled_line 124 | }) 125 | .collect() 126 | } 127 | 128 | fn line_as_string(line: &Line) -> String { 129 | line.spans 130 | .iter() 131 | .map(|s| s.content.to_string()) 132 | .collect::>() 133 | .join("") 134 | } 135 | 136 | fn uncolored<'a>(&self, content: &'a str) -> Span<'a> { 137 | Span::styled(content, self.base_style) 138 | } 139 | 140 | fn colored<'a>(&self, content: &'a str, color: Color) -> Span<'a> { 141 | Span::styled(content, self.base_style.fg(color)) 142 | } 143 | 144 | /// 145 | /// Creates a new [`Line<'c>`] from the given input [`Line<'a>`]'s. 146 | /// 147 | /// Byte contents of the text should be equal. Only grapheme styles 148 | /// can differ. RHS styles always has priority in contrast with LHS. 149 | /// 150 | fn merge_lines<'a, 'c>(&self, lhs: &Line<'a>, rhs: &Line<'a>) -> Line<'c> { 151 | let lhs_graphemes = lhs.styled_graphemes(self.base_style); 152 | let rhs_graphemes = rhs.styled_graphemes(self.base_style); 153 | 154 | let merged_graphemes: Vec> = lhs_graphemes 155 | .zip(rhs_graphemes) 156 | .map(|(l, r)| { 157 | assert_eq!(l.symbol, r.symbol, "Symbols should be always equal here"); 158 | if r.style.fg.is_none() { 159 | l 160 | } else { 161 | r 162 | } 163 | }) 164 | .collect(); 165 | 166 | let mut spans = Vec::new(); 167 | let mut outer = merged_graphemes.iter(); 168 | while let Some(grapheme) = outer.next() { 169 | let mut content = String::from(grapheme.symbol); 170 | let mut inner = outer.clone(); 171 | 172 | while let Some(StyledGrapheme { symbol, style }) = inner.next() { 173 | if *style == grapheme.style { 174 | content += symbol; 175 | outer = inner.clone(); 176 | } else { 177 | break; 178 | } 179 | } 180 | 181 | spans.push(Span::styled(content, grapheme.style)); 182 | } 183 | 184 | Line::from(spans) 185 | } 186 | 187 | /// 188 | /// Splits pure [`&str`] into vector of [`Span`]'s by applying regex pattern stored 189 | /// in [`ColorOption`]. 190 | /// 191 | /// All matched substrings are colorized to corresponding color. 192 | /// Any other unmatched substrings have "base" style. 193 | /// 194 | fn apply_color_option<'a>(&self, s: &'a str, opt: &ColorOption) -> Line<'a> { 195 | let mut last = 0; 196 | let mut result = Vec::new(); 197 | 198 | for m in opt.regex.find_iter(s) { 199 | if last != m.start() { 200 | let unmatched = self.uncolored(&s[last..m.start()]); 201 | result.push(unmatched); 202 | } 203 | let matched = self.colored(&s[m.start()..m.end()], opt.color); 204 | result.push(matched); 205 | last = m.end(); 206 | } 207 | 208 | if last < s.len() { 209 | result.push(self.uncolored(&s[last..])); 210 | } 211 | 212 | Line::from(result) 213 | } 214 | } 215 | 216 | #[cfg(test)] 217 | mod tests { 218 | use super::*; 219 | use std::str::FromStr; 220 | 221 | #[test] 222 | fn merge_colored_lines() { 223 | let lhs = Line::from(vec![ 224 | Span::styled("S", Style::default().fg(Color::Magenta)), 225 | Span::styled("hould be ", Style::default()), 226 | Span::styled("SPLITTED", Style::default().fg(Color::Magenta)), 227 | Span::styled(" into ", Style::default()), 228 | Span::styled("COLORED", Style::default().fg(Color::Magenta)), 229 | Span::styled(" spans", Style::default()), 230 | ]); 231 | 232 | let rhs = Line::from(vec![ 233 | Span::styled("Should be ", Style::default().fg(Color::Red)), 234 | Span::styled("SPLIT", Style::default().fg(Color::Yellow)), 235 | Span::styled("TED", Style::default()), 236 | Span::styled(" into ", Style::default().fg(Color::Cyan)), 237 | Span::styled("COLORED s", Style::default().fg(Color::Green)), 238 | Span::styled("pans", Style::default().fg(Color::DarkGray)), 239 | ]); 240 | 241 | let colored_opt = Vec::new(); 242 | let colorizer = Colorizer::new(&colored_opt, Style::default()); 243 | 244 | assert_eq!( 245 | colorizer.merge_lines(&lhs, &rhs).spans, 246 | vec![ 247 | Span::styled("Should be ", Style::default().fg(Color::Red)), 248 | Span::styled("SPLIT", Style::default().fg(Color::Yellow)), 249 | Span::styled("TED", Style::default().fg(Color::Magenta)), 250 | Span::styled(" into ", Style::default().fg(Color::Cyan)), 251 | Span::styled("COLORED s", Style::default().fg(Color::Green)), 252 | Span::styled("pans", Style::default().fg(Color::DarkGray)), 253 | ] 254 | ); 255 | 256 | assert_eq!( 257 | colorizer.merge_lines(&rhs, &lhs).spans, 258 | vec![ 259 | Span::styled("S", Style::default().fg(Color::Magenta)), 260 | Span::styled("hould be ", Style::default().fg(Color::Red)), 261 | Span::styled("SPLITTED", Style::default().fg(Color::Magenta)), 262 | Span::styled(" into ", Style::default().fg(Color::Cyan)), 263 | Span::styled("COLORED", Style::default().fg(Color::Magenta)), 264 | Span::styled(" s", Style::default().fg(Color::Green)), 265 | Span::styled("pans", Style::default().fg(Color::DarkGray)), 266 | ] 267 | ); 268 | } 269 | 270 | #[test] 271 | fn split_string_into_colored_parts() { 272 | let test_string = "Should be SPLITTED into COLORED spans"; 273 | let colored_opt = Vec::new(); 274 | let colorizer = Colorizer::new(&colored_opt, Style::default()); 275 | 276 | let actual_spans = colorizer.apply_color_option( 277 | test_string, 278 | &ColorOption::new( 279 | Regex::from_str("[A-Z]+").unwrap(), 280 | ColorOption::parse_color("magenta").unwrap(), 281 | ), 282 | ); 283 | 284 | let expected_spans = Line::from(vec![ 285 | Span::styled("S", Style::default().fg(Color::Magenta)), 286 | Span::styled("hould be ", Style::default()), 287 | Span::styled("SPLITTED", Style::default().fg(Color::Magenta)), 288 | Span::styled(" into ", Style::default()), 289 | Span::styled("COLORED", Style::default().fg(Color::Magenta)), 290 | Span::styled(" spans", Style::default()), 291 | ]); 292 | 293 | assert_eq!(actual_spans, expected_spans); 294 | } 295 | 296 | #[test] 297 | fn patch_ansi() { 298 | let ansi_string = "\u{1b}[31mHelloWorld\u{1b}[0m"; // red-line colored ANSI string 299 | let base_style = Style::default(); 300 | let color_opts = vec![ 301 | ColorOption::new( 302 | Regex::from_str("He").unwrap(), 303 | ColorOption::parse_color("yellow").unwrap(), 304 | ), 305 | ColorOption::new( 306 | Regex::from_str("Wor").unwrap(), 307 | ColorOption::parse_color("green").unwrap(), 308 | ), 309 | ]; 310 | 311 | let colorizer = Colorizer::new(&color_opts, base_style); 312 | let patched = colorizer.patch_text(ansi_string); 313 | let expected = vec![ 314 | Span::styled("He", base_style.fg(Color::Yellow)), 315 | Span::styled("llo", base_style.fg(Color::Red)), 316 | Span::styled("Wor", base_style.fg(Color::Green)), 317 | Span::styled("ld", base_style.fg(Color::Red)), 318 | ]; 319 | 320 | assert_eq!(patched.len(), 1); 321 | assert_eq!(expected, patched.first().unwrap().spans); 322 | } 323 | 324 | #[test] 325 | fn patch_line() { 326 | let test_string = "The variablE#nAmEs####next. http://localhost:8080"; 327 | let color_opts = vec![ 328 | ColorOption::new( 329 | Regex::from_str("#+").unwrap(), 330 | ColorOption::parse_color("#eee").unwrap(), 331 | ), 332 | ColorOption::new( 333 | Regex::from_str("[a-z]\\#+[a-z]").unwrap(), 334 | ColorOption::parse_color("blue").unwrap(), 335 | ), 336 | ColorOption::new( 337 | Regex::from_str("[A-Z]").unwrap(), 338 | ColorOption::parse_color("green").unwrap(), 339 | ), 340 | ColorOption::new( 341 | Regex::from_str("^The").unwrap(), 342 | ColorOption::parse_color("yellow").unwrap(), 343 | ), 344 | ColorOption::new( 345 | Regex::from_str("http://(.*)").unwrap(), 346 | ColorOption::parse_color("#def").unwrap(), 347 | ), 348 | ]; 349 | 350 | let base_style = Style::default(); 351 | let colorizer = Colorizer::new(&color_opts, base_style); 352 | let patched = colorizer.patch_text(test_string); 353 | 354 | let expected = vec![ 355 | Span::styled("The", base_style.fg(Color::Yellow)), 356 | Span::styled(" variabl", base_style), 357 | Span::styled("E", base_style.fg(Color::Green)), 358 | Span::styled("#", base_style.fg(Color::Rgb(0, 14, 238))), 359 | Span::styled("n", base_style), 360 | Span::styled("A", base_style.fg(Color::Green)), 361 | Span::styled("m", base_style), 362 | Span::styled("E", base_style.fg(Color::Green)), 363 | Span::styled("s####n", base_style.fg(Color::Blue)), 364 | Span::styled("ext. ", base_style), 365 | Span::styled( 366 | "http://localhost:8080", 367 | base_style.fg(Color::Rgb(0, 13, 239)), 368 | ), 369 | ]; 370 | 371 | assert_eq!(patched.len(), 1); 372 | assert_eq!(expected, patched.first().unwrap().spans); 373 | } 374 | } 375 | -------------------------------------------------------------------------------- /src/config/mod.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::{HashMap, HashSet}, 3 | path::{Path, PathBuf}, 4 | str::FromStr, 5 | sync::Arc, 6 | }; 7 | 8 | use anyhow::{Context, Result}; 9 | use indexmap::IndexMap; 10 | use serde::Deserialize; 11 | 12 | use std::fs::File; 13 | use std::io::Read; 14 | 15 | pub mod color; 16 | pub mod ops; 17 | pub mod pipe; 18 | 19 | use pipe::Pipe; 20 | 21 | use self::{color::ColorOption, ops::Ops}; 22 | 23 | #[derive(Deserialize, Debug, Clone, Default)] 24 | #[serde(untagged)] 25 | pub enum Lift { 26 | More(Vec), 27 | One(T), 28 | #[default] 29 | Empty, 30 | } 31 | 32 | impl Lift { 33 | pub fn resolve(&self) -> Vec { 34 | match self { 35 | Lift::More(vs) => vs.clone(), 36 | Lift::One(v) => vec![v.clone()], 37 | Lift::Empty => vec![], 38 | } 39 | } 40 | } 41 | 42 | #[derive(Deserialize, Debug, Clone)] 43 | #[serde(deny_unknown_fields)] 44 | pub struct Task { 45 | pub workdir: Option, 46 | pub command: Option, 47 | pub entrypoint: Option, 48 | 49 | #[serde(default)] 50 | pub watch: Lift, 51 | 52 | #[serde(default)] 53 | pub ignore: Lift, 54 | 55 | #[serde(default)] 56 | pub env: HashMap, 57 | 58 | #[serde(default)] 59 | pub env_file: Lift, 60 | 61 | #[serde(default)] 62 | pub depends_on: Lift, 63 | 64 | /// Map of output redirections with the format: 65 | /// `regular expressiong` -> `pipe` 66 | /// 67 | /// Where the content matched by the regular expression 68 | /// can be redirected to: 69 | /// 70 | /// - whiz: creating a new tab for the incoming messages. 71 | /// Format: `whiz://{tab_name}` 72 | /// 73 | /// - /dev/null: silence the matched content. 74 | /// Format: `/dev/null` or `file:///dev/null` 75 | /// 76 | /// - file: saving the matched content in a log file. 77 | /// Format: `path` or `file:///{path}` 78 | /// 79 | /// # NOTE 80 | /// 81 | /// Any other output not matched by a regular expression goes to 82 | /// `whiz://{task_name}` as default. 83 | #[serde(default)] 84 | pub pipe: HashMap, 85 | 86 | #[serde(default)] 87 | pub color: IndexMap, 88 | } 89 | 90 | #[derive(Deserialize, Debug)] 91 | pub struct RawConfig { 92 | #[serde(default)] 93 | pub env: HashMap, 94 | 95 | #[serde(flatten)] 96 | pub ops: IndexMap, 97 | } 98 | 99 | #[derive(Debug, Clone)] 100 | pub struct ConfigInner { 101 | pub base_dir: Arc, 102 | pub env: HashMap, 103 | pub ops: Ops, 104 | pub pipes_map: HashMap>, 105 | pub colors_map: HashMap>, 106 | } 107 | 108 | impl ConfigInner { 109 | pub fn from_raw(config: RawConfig, base_dir: PathBuf) -> Result { 110 | let pipes_map = config 111 | .get_pipes_map() 112 | .context("Error while getting pipes")?; 113 | 114 | let colors_map = config 115 | .get_colors_map() 116 | .context("Error while getting colors")?; 117 | 118 | Ok(Self { 119 | base_dir: base_dir.into(), 120 | env: config.env, 121 | ops: config.ops, 122 | pipes_map, 123 | colors_map, 124 | }) 125 | } 126 | } 127 | 128 | pub type Config = Arc; 129 | 130 | pub type Dag = IndexMap>; 131 | 132 | impl FromStr for RawConfig { 133 | type Err = anyhow::Error; 134 | 135 | fn from_str(s: &str) -> Result { 136 | Self::from_reader(s.as_bytes()) 137 | } 138 | } 139 | 140 | impl RawConfig { 141 | pub fn from_file(file: &File) -> Result { 142 | Self::from_reader(file) 143 | } 144 | 145 | fn from_reader(reader: impl Read) -> Result { 146 | let mut config: serde_yaml::Value = serde_yaml::from_reader(reader)?; 147 | config.apply_merge()?; 148 | let mut config: RawConfig = serde_yaml::from_value(config)?; 149 | 150 | // make sure config file is a `Directed Acyclic Graph` 151 | ops::build_dag(&config.ops)?; 152 | 153 | config.simplify_dependencies(); 154 | Ok(config) 155 | } 156 | 157 | /// Parses the pipes of each task to make sure they are valid and returns 158 | /// a [`HashMap`] where the keys are the task names and the values 159 | /// are the parsed pipes. 160 | pub fn get_pipes_map(&self) -> Result>> { 161 | let mut pipes = HashMap::new(); 162 | 163 | for (task_name, task) in &self.ops { 164 | for pipe_config in &task.pipe { 165 | let task_pipes: &mut Vec = pipes.entry(task_name.to_owned()).or_default(); 166 | let pipe = Pipe::from(pipe_config)?; 167 | task_pipes.push(pipe); 168 | } 169 | } 170 | 171 | Ok(pipes) 172 | } 173 | 174 | pub fn get_colors_map(&self) -> Result>> { 175 | let mut colors = HashMap::new(); 176 | 177 | for (task_name, task) in &self.ops { 178 | let task_color_options: Vec = task 179 | .color 180 | .iter() 181 | .filter_map(|(r, c)| ColorOption::from((&r, &c)).ok()) 182 | .collect(); 183 | 184 | colors.insert(task_name.to_owned(), task_color_options); 185 | } 186 | 187 | Ok(colors) 188 | } 189 | 190 | /// Remove dependencies that are child of another dependency for 191 | /// the same job. 192 | pub fn simplify_dependencies(&mut self) { 193 | let jobs = self.ops.clone().into_iter().map(|(job_name, _)| job_name); 194 | for job_name in jobs { 195 | // array used to iterate all the elements and skip removed elements 196 | let mut dependencies = ops::get_dependencies(&self.ops, &job_name); 197 | let mut simplified_dependencies = dependencies.clone(); 198 | 199 | while let Some(dependency) = dependencies.pop() { 200 | let child_dependencies = 201 | &ops::get_all_dependencies(&self.ops, &[dependency.to_owned()]); 202 | let child_dependencies: HashSet<&String> = 203 | HashSet::from_iter(child_dependencies.iter()); 204 | // remove all the dependencies that are dependency 205 | // of the current `dependency` 206 | dependencies.retain(|job_name| !child_dependencies.contains(job_name)); 207 | simplified_dependencies.retain(|job_name| !child_dependencies.contains(job_name)); 208 | } 209 | 210 | let job_operator = self.ops.get_mut(&job_name).unwrap(); 211 | job_operator.depends_on = Lift::More(simplified_dependencies); 212 | } 213 | } 214 | 215 | fn filter_jobs(&mut self, run: &[String]) -> Result<()> { 216 | ops::filter_jobs(&mut self.ops, run) 217 | } 218 | } 219 | 220 | impl ConfigInner { 221 | pub fn build_dag(&self) -> Result { 222 | ops::build_dag(&self.ops) 223 | } 224 | } 225 | 226 | pub struct ConfigBuilder { 227 | path: PathBuf, 228 | filter: Option>, 229 | } 230 | 231 | impl ConfigBuilder { 232 | pub fn new(path: PathBuf) -> Self { 233 | Self { path, filter: None } 234 | } 235 | 236 | pub fn filter(mut self, filter: Vec) -> Self { 237 | self.filter = Some(filter); 238 | self 239 | } 240 | 241 | pub fn build(self) -> Result { 242 | let file = File::open(&self.path)?; 243 | let mut config = RawConfig::from_file(&file)?; 244 | 245 | if let Some(filter) = self.filter { 246 | config 247 | .filter_jobs(&filter) 248 | .context("Error while filtering jobs")?; 249 | } 250 | 251 | Ok(Arc::new(ConfigInner::from_raw( 252 | config, 253 | self.path.parent().unwrap().into(), 254 | )?)) 255 | } 256 | } 257 | 258 | #[cfg(test)] 259 | mod tests { 260 | use super::*; 261 | 262 | /// Asserts if two arrays are equal without taking into account the order. 263 | macro_rules! assert_array_not_strict { 264 | ($left:expr, $right:expr) => { 265 | match (&$left, &$right) { 266 | (left_val, right_val) => { 267 | let mut v1 = left_val.clone(); 268 | v1.sort(); 269 | let mut v2 = right_val.clone(); 270 | v2.sort(); 271 | assert_eq!(v1, v2); 272 | } 273 | }; 274 | }; 275 | } 276 | 277 | mod dependencies { 278 | use super::*; 279 | 280 | const CONFIG_EXAMPLE: &str = r#" 281 | a: 282 | command: echo a 283 | 284 | b: 285 | command: echo b 286 | depends_on: 287 | - a 288 | 289 | c: 290 | command: echo c 291 | depends_on: 292 | - b 293 | 294 | d: &alias 295 | command: echo c 296 | depends_on: 297 | - a 298 | - b 299 | - c 300 | - y 301 | - z 302 | 303 | y: 304 | command: echo y 305 | 306 | z: 307 | command: echo z 308 | depends_on: 309 | - y 310 | 311 | not_child_dependency: 312 | command: echo hello world 313 | 314 | with_alias: 315 | <<: *alias 316 | command: echo with_alias 317 | "#; 318 | 319 | #[test] 320 | fn gets_all_dependencies() { 321 | let config: RawConfig = CONFIG_EXAMPLE.parse().unwrap(); 322 | let jobs = &["c".to_string(), "z".to_string()]; 323 | 324 | let jobs = ops::get_all_dependencies(&config.ops, jobs); 325 | let expected_jobs = vec!["a", "b", "y"]; 326 | 327 | assert_array_not_strict!(jobs, expected_jobs); 328 | } 329 | 330 | #[test] 331 | fn gets_dependencies_from_config_file() { 332 | let config: RawConfig = CONFIG_EXAMPLE.parse().unwrap(); 333 | 334 | let jobs = ops::get_dependencies(&config.ops, "c"); 335 | let expected_jobs = vec!["b"]; 336 | 337 | assert_array_not_strict!(jobs, expected_jobs); 338 | } 339 | 340 | #[test] 341 | fn simplifies_dependencies() { 342 | let config: RawConfig = CONFIG_EXAMPLE.parse().unwrap(); 343 | 344 | let job_d = config.ops.get("d").unwrap(); 345 | 346 | let dependencies_d = job_d.depends_on.resolve(); 347 | let expected_dependencies = vec!["c", "z"]; 348 | 349 | assert_array_not_strict!(dependencies_d, expected_dependencies); 350 | } 351 | 352 | #[test] 353 | fn resolves_alias() { 354 | let config: RawConfig = CONFIG_EXAMPLE.parse().unwrap(); 355 | 356 | assert_array_not_strict!( 357 | ops::get_dependencies(&config.ops, "d"), 358 | ops::get_dependencies(&config.ops, "with_alias") 359 | ); 360 | 361 | let job_with_alias = config.ops.get("with_alias").unwrap(); 362 | assert_eq!(&job_with_alias.command.clone().unwrap(), "echo with_alias"); 363 | } 364 | } 365 | 366 | mod job_filtering { 367 | use super::*; 368 | 369 | const CONFIG_EXAMPLE: &str = r#" 370 | not_test_dependency: 371 | command: echo fails 372 | 373 | test_dependency: 374 | command: echo hello 375 | 376 | test: 377 | command: echo world 378 | depends_on: 379 | - test_dependency 380 | "#; 381 | 382 | #[test] 383 | fn filters_jobs() { 384 | let mut config: RawConfig = CONFIG_EXAMPLE.parse().unwrap(); 385 | let run = ["test".to_string()]; 386 | 387 | config.filter_jobs(&run).unwrap(); 388 | 389 | let jobs: Vec<_> = config.ops.iter().map(|(job_name, _)| job_name).collect(); 390 | let expected_jobs = vec!["test", "test_dependency"]; 391 | 392 | assert_array_not_strict!(jobs, expected_jobs); 393 | } 394 | 395 | #[test] 396 | fn fails_job_filtering() { 397 | let mut config: RawConfig = CONFIG_EXAMPLE.parse().unwrap(); 398 | 399 | let expected_err = [ 400 | "job 'doesnt_exist' not found in config file.", 401 | "", 402 | "Valid jobs are:", 403 | " - not_test_dependency", 404 | " - test (test_dependency)", 405 | " - test_dependency", 406 | ] 407 | .join("\n"); 408 | 409 | let mut err_message = String::new(); 410 | let run = ["doesnt_exist".to_string()]; 411 | 412 | if let Err(err) = config.filter_jobs(&run) { 413 | err_message = err.to_string(); 414 | }; 415 | 416 | assert_eq!(err_message, expected_err); 417 | } 418 | 419 | #[test] 420 | fn doesnt_filter_jobs() { 421 | let mut config: RawConfig = CONFIG_EXAMPLE.parse().unwrap(); 422 | let run = &Vec::new(); 423 | 424 | config.filter_jobs(run).unwrap(); 425 | 426 | let jobs: Vec<_> = config.ops.iter().map(|(job_name, _)| job_name).collect(); 427 | let expected_jobs = vec!["test", "test_dependency", "not_test_dependency"]; 428 | 429 | assert_array_not_strict!(jobs, expected_jobs); 430 | } 431 | } 432 | 433 | mod colors { 434 | use regex::Regex; 435 | 436 | use super::*; 437 | 438 | const CONFIG_EXAMPLE: &str = r#" 439 | task1: 440 | color: 441 | "^abc": red 442 | "My": yellow 443 | task2: 444 | color: 445 | "d+": '#def' 446 | "#; 447 | 448 | #[test] 449 | fn parse_colors_map() { 450 | let config: RawConfig = CONFIG_EXAMPLE.parse().unwrap(); 451 | let actual = config.get_colors_map().unwrap(); 452 | let mut expected = HashMap::new(); 453 | 454 | expected.insert( 455 | "task1".to_owned(), 456 | vec![ 457 | ColorOption::new( 458 | Regex::from_str("^abc").unwrap(), 459 | ColorOption::parse_color("red").unwrap(), 460 | ), 461 | ColorOption::new( 462 | Regex::from_str("My").unwrap(), 463 | ColorOption::parse_color("yellow").unwrap(), 464 | ), 465 | ], 466 | ); 467 | expected.insert( 468 | "task2".to_owned(), 469 | vec![ColorOption::new( 470 | Regex::from_str("d+").unwrap(), 471 | ColorOption::parse_color("#def").unwrap(), 472 | )], 473 | ); 474 | 475 | assert_eq!(actual.get("task1").unwrap(), expected.get("task1").unwrap()); 476 | assert_eq!(actual.get("task2").unwrap(), expected.get("task2").unwrap()); 477 | } 478 | } 479 | } 480 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Mozilla Public License Version 2.0 2 | ================================== 3 | 4 | 1. Definitions 5 | -------------- 6 | 7 | 1.1. "Contributor" 8 | means each individual or legal entity that creates, contributes to 9 | the creation of, or owns Covered Software. 10 | 11 | 1.2. "Contributor Version" 12 | means the combination of the Contributions of others (if any) used 13 | by a Contributor and that particular Contributor's Contribution. 14 | 15 | 1.3. "Contribution" 16 | means Covered Software of a particular Contributor. 17 | 18 | 1.4. "Covered Software" 19 | means Source Code Form to which the initial Contributor has attached 20 | the notice in Exhibit A, the Executable Form of such Source Code 21 | Form, and Modifications of such Source Code Form, in each case 22 | including portions thereof. 23 | 24 | 1.5. "Incompatible With Secondary Licenses" 25 | means 26 | 27 | (a) that the initial Contributor has attached the notice described 28 | in Exhibit B to the Covered Software; or 29 | 30 | (b) that the Covered Software was made available under the terms of 31 | version 1.1 or earlier of the License, but not also under the 32 | terms of a Secondary License. 33 | 34 | 1.6. "Executable Form" 35 | means any form of the work other than Source Code Form. 36 | 37 | 1.7. "Larger Work" 38 | means a work that combines Covered Software with other material, in 39 | a separate file or files, that is not Covered Software. 40 | 41 | 1.8. "License" 42 | means this document. 43 | 44 | 1.9. "Licensable" 45 | means having the right to grant, to the maximum extent possible, 46 | whether at the time of the initial grant or subsequently, any and 47 | all of the rights conveyed by this License. 48 | 49 | 1.10. "Modifications" 50 | means any of the following: 51 | 52 | (a) any file in Source Code Form that results from an addition to, 53 | deletion from, or modification of the contents of Covered 54 | Software; or 55 | 56 | (b) any new file in Source Code Form that contains any Covered 57 | Software. 58 | 59 | 1.11. "Patent Claims" of a Contributor 60 | means any patent claim(s), including without limitation, method, 61 | process, and apparatus claims, in any patent Licensable by such 62 | Contributor that would be infringed, but for the grant of the 63 | License, by the making, using, selling, offering for sale, having 64 | made, import, or transfer of either its Contributions or its 65 | Contributor Version. 66 | 67 | 1.12. "Secondary License" 68 | means either the GNU General Public License, Version 2.0, the GNU 69 | Lesser General Public License, Version 2.1, the GNU Affero General 70 | Public License, Version 3.0, or any later versions of those 71 | licenses. 72 | 73 | 1.13. "Source Code Form" 74 | means the form of the work preferred for making modifications. 75 | 76 | 1.14. "You" (or "Your") 77 | means an individual or a legal entity exercising rights under this 78 | License. For legal entities, "You" includes any entity that 79 | controls, is controlled by, or is under common control with You. For 80 | purposes of this definition, "control" means (a) the power, direct 81 | or indirect, to cause the direction or management of such entity, 82 | whether by contract or otherwise, or (b) ownership of more than 83 | fifty percent (50%) of the outstanding shares or beneficial 84 | ownership of such entity. 85 | 86 | 2. License Grants and Conditions 87 | -------------------------------- 88 | 89 | 2.1. Grants 90 | 91 | Each Contributor hereby grants You a world-wide, royalty-free, 92 | non-exclusive license: 93 | 94 | (a) under intellectual property rights (other than patent or trademark) 95 | Licensable by such Contributor to use, reproduce, make available, 96 | modify, display, perform, distribute, and otherwise exploit its 97 | Contributions, either on an unmodified basis, with Modifications, or 98 | as part of a Larger Work; and 99 | 100 | (b) under Patent Claims of such Contributor to make, use, sell, offer 101 | for sale, have made, import, and otherwise transfer either its 102 | Contributions or its Contributor Version. 103 | 104 | 2.2. Effective Date 105 | 106 | The licenses granted in Section 2.1 with respect to any Contribution 107 | become effective for each Contribution on the date the Contributor first 108 | distributes such Contribution. 109 | 110 | 2.3. Limitations on Grant Scope 111 | 112 | The licenses granted in this Section 2 are the only rights granted under 113 | this License. No additional rights or licenses will be implied from the 114 | distribution or licensing of Covered Software under this License. 115 | Notwithstanding Section 2.1(b) above, no patent license is granted by a 116 | Contributor: 117 | 118 | (a) for any code that a Contributor has removed from Covered Software; 119 | or 120 | 121 | (b) for infringements caused by: (i) Your and any other third party's 122 | modifications of Covered Software, or (ii) the combination of its 123 | Contributions with other software (except as part of its Contributor 124 | Version); or 125 | 126 | (c) under Patent Claims infringed by Covered Software in the absence of 127 | its Contributions. 128 | 129 | This License does not grant any rights in the trademarks, service marks, 130 | or logos of any Contributor (except as may be necessary to comply with 131 | the notice requirements in Section 3.4). 132 | 133 | 2.4. Subsequent Licenses 134 | 135 | No Contributor makes additional grants as a result of Your choice to 136 | distribute the Covered Software under a subsequent version of this 137 | License (see Section 10.2) or under the terms of a Secondary License (if 138 | permitted under the terms of Section 3.3). 139 | 140 | 2.5. Representation 141 | 142 | Each Contributor represents that the Contributor believes its 143 | Contributions are its original creation(s) or it has sufficient rights 144 | to grant the rights to its Contributions conveyed by this License. 145 | 146 | 2.6. Fair Use 147 | 148 | This License is not intended to limit any rights You have under 149 | applicable copyright doctrines of fair use, fair dealing, or other 150 | equivalents. 151 | 152 | 2.7. Conditions 153 | 154 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted 155 | in Section 2.1. 156 | 157 | 3. Responsibilities 158 | ------------------- 159 | 160 | 3.1. Distribution of Source Form 161 | 162 | All distribution of Covered Software in Source Code Form, including any 163 | Modifications that You create or to which You contribute, must be under 164 | the terms of this License. You must inform recipients that the Source 165 | Code Form of the Covered Software is governed by the terms of this 166 | License, and how they can obtain a copy of this License. You may not 167 | attempt to alter or restrict the recipients' rights in the Source Code 168 | Form. 169 | 170 | 3.2. Distribution of Executable Form 171 | 172 | If You distribute Covered Software in Executable Form then: 173 | 174 | (a) such Covered Software must also be made available in Source Code 175 | Form, as described in Section 3.1, and You must inform recipients of 176 | the Executable Form how they can obtain a copy of such Source Code 177 | Form by reasonable means in a timely manner, at a charge no more 178 | than the cost of distribution to the recipient; and 179 | 180 | (b) You may distribute such Executable Form under the terms of this 181 | License, or sublicense it under different terms, provided that the 182 | license for the Executable Form does not attempt to limit or alter 183 | the recipients' rights in the Source Code Form under this License. 184 | 185 | 3.3. Distribution of a Larger Work 186 | 187 | You may create and distribute a Larger Work under terms of Your choice, 188 | provided that You also comply with the requirements of this License for 189 | the Covered Software. If the Larger Work is a combination of Covered 190 | Software with a work governed by one or more Secondary Licenses, and the 191 | Covered Software is not Incompatible With Secondary Licenses, this 192 | License permits You to additionally distribute such Covered Software 193 | under the terms of such Secondary License(s), so that the recipient of 194 | the Larger Work may, at their option, further distribute the Covered 195 | Software under the terms of either this License or such Secondary 196 | License(s). 197 | 198 | 3.4. Notices 199 | 200 | You may not remove or alter the substance of any license notices 201 | (including copyright notices, patent notices, disclaimers of warranty, 202 | or limitations of liability) contained within the Source Code Form of 203 | the Covered Software, except that You may alter any license notices to 204 | the extent required to remedy known factual inaccuracies. 205 | 206 | 3.5. Application of Additional Terms 207 | 208 | You may choose to offer, and to charge a fee for, warranty, support, 209 | indemnity or liability obligations to one or more recipients of Covered 210 | Software. However, You may do so only on Your own behalf, and not on 211 | behalf of any Contributor. You must make it absolutely clear that any 212 | such warranty, support, indemnity, or liability obligation is offered by 213 | You alone, and You hereby agree to indemnify every Contributor for any 214 | liability incurred by such Contributor as a result of warranty, support, 215 | indemnity or liability terms You offer. You may include additional 216 | disclaimers of warranty and limitations of liability specific to any 217 | jurisdiction. 218 | 219 | 4. Inability to Comply Due to Statute or Regulation 220 | --------------------------------------------------- 221 | 222 | If it is impossible for You to comply with any of the terms of this 223 | License with respect to some or all of the Covered Software due to 224 | statute, judicial order, or regulation then You must: (a) comply with 225 | the terms of this License to the maximum extent possible; and (b) 226 | describe the limitations and the code they affect. Such description must 227 | be placed in a text file included with all distributions of the Covered 228 | Software under this License. Except to the extent prohibited by statute 229 | or regulation, such description must be sufficiently detailed for a 230 | recipient of ordinary skill to be able to understand it. 231 | 232 | 5. Termination 233 | -------------- 234 | 235 | 5.1. The rights granted under this License will terminate automatically 236 | if You fail to comply with any of its terms. However, if You become 237 | compliant, then the rights granted under this License from a particular 238 | Contributor are reinstated (a) provisionally, unless and until such 239 | Contributor explicitly and finally terminates Your grants, and (b) on an 240 | ongoing basis, if such Contributor fails to notify You of the 241 | non-compliance by some reasonable means prior to 60 days after You have 242 | come back into compliance. Moreover, Your grants from a particular 243 | Contributor are reinstated on an ongoing basis if such Contributor 244 | notifies You of the non-compliance by some reasonable means, this is the 245 | first time You have received notice of non-compliance with this License 246 | from such Contributor, and You become compliant prior to 30 days after 247 | Your receipt of the notice. 248 | 249 | 5.2. If You initiate litigation against any entity by asserting a patent 250 | infringement claim (excluding declaratory judgment actions, 251 | counter-claims, and cross-claims) alleging that a Contributor Version 252 | directly or indirectly infringes any patent, then the rights granted to 253 | You by any and all Contributors for the Covered Software under Section 254 | 2.1 of this License shall terminate. 255 | 256 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all 257 | end user license agreements (excluding distributors and resellers) which 258 | have been validly granted by You or Your distributors under this License 259 | prior to termination shall survive termination. 260 | 261 | ************************************************************************ 262 | * * 263 | * 6. Disclaimer of Warranty * 264 | * ------------------------- * 265 | * * 266 | * Covered Software is provided under this License on an "as is" * 267 | * basis, without warranty of any kind, either expressed, implied, or * 268 | * statutory, including, without limitation, warranties that the * 269 | * Covered Software is free of defects, merchantable, fit for a * 270 | * particular purpose or non-infringing. The entire risk as to the * 271 | * quality and performance of the Covered Software is with You. * 272 | * Should any Covered Software prove defective in any respect, You * 273 | * (not any Contributor) assume the cost of any necessary servicing, * 274 | * repair, or correction. This disclaimer of warranty constitutes an * 275 | * essential part of this License. No use of any Covered Software is * 276 | * authorized under this License except under this disclaimer. * 277 | * * 278 | ************************************************************************ 279 | 280 | ************************************************************************ 281 | * * 282 | * 7. Limitation of Liability * 283 | * -------------------------- * 284 | * * 285 | * Under no circumstances and under no legal theory, whether tort * 286 | * (including negligence), contract, or otherwise, shall any * 287 | * Contributor, or anyone who distributes Covered Software as * 288 | * permitted above, be liable to You for any direct, indirect, * 289 | * special, incidental, or consequential damages of any character * 290 | * including, without limitation, damages for lost profits, loss of * 291 | * goodwill, work stoppage, computer failure or malfunction, or any * 292 | * and all other commercial damages or losses, even if such party * 293 | * shall have been informed of the possibility of such damages. This * 294 | * limitation of liability shall not apply to liability for death or * 295 | * personal injury resulting from such party's negligence to the * 296 | * extent applicable law prohibits such limitation. Some * 297 | * jurisdictions do not allow the exclusion or limitation of * 298 | * incidental or consequential damages, so this exclusion and * 299 | * limitation may not apply to You. * 300 | * * 301 | ************************************************************************ 302 | 303 | 8. Litigation 304 | ------------- 305 | 306 | Any litigation relating to this License may be brought only in the 307 | courts of a jurisdiction where the defendant maintains its principal 308 | place of business and such litigation shall be governed by laws of that 309 | jurisdiction, without reference to its conflict-of-law provisions. 310 | Nothing in this Section shall prevent a party's ability to bring 311 | cross-claims or counter-claims. 312 | 313 | 9. Miscellaneous 314 | ---------------- 315 | 316 | This License represents the complete agreement concerning the subject 317 | matter hereof. If any provision of this License is held to be 318 | unenforceable, such provision shall be reformed only to the extent 319 | necessary to make it enforceable. Any law or regulation which provides 320 | that the language of a contract shall be construed against the drafter 321 | shall not be used to construe this License against a Contributor. 322 | 323 | 10. Versions of the License 324 | --------------------------- 325 | 326 | 10.1. New Versions 327 | 328 | Mozilla Foundation is the license steward. Except as provided in Section 329 | 10.3, no one other than the license steward has the right to modify or 330 | publish new versions of this License. Each version will be given a 331 | distinguishing version number. 332 | 333 | 10.2. Effect of New Versions 334 | 335 | You may distribute the Covered Software under the terms of the version 336 | of the License under which You originally received the Covered Software, 337 | or under the terms of any subsequent version published by the license 338 | steward. 339 | 340 | 10.3. Modified Versions 341 | 342 | If you create software not governed by this License, and you want to 343 | create a new license for such software, you may create and use a 344 | modified version of this License if you rename the license and remove 345 | any references to the name of the license steward (except to note that 346 | such modified license differs from this License). 347 | 348 | 10.4. Distributing Source Code Form that is Incompatible With Secondary 349 | Licenses 350 | 351 | If You choose to distribute Source Code Form that is Incompatible With 352 | Secondary Licenses under the terms of this version of the License, the 353 | notice described in Exhibit B of this License must be attached. 354 | 355 | Exhibit A - Source Code Form License Notice 356 | ------------------------------------------- 357 | 358 | This Source Code Form is subject to the terms of the Mozilla Public 359 | License, v. 2.0. If a copy of the MPL was not distributed with this 360 | file, You can obtain one at http://mozilla.org/MPL/2.0/. 361 | 362 | If it is not possible or desirable to put the notice in a particular 363 | file, then You may include the notice in a location (such as a LICENSE 364 | file in a relevant directory) where a recipient would be likely to look 365 | for such a notice. 366 | 367 | You may add additional accurate notices of copyright ownership. 368 | 369 | Exhibit B - "Incompatible With Secondary Licenses" Notice 370 | --------------------------------------------------------- 371 | 372 | This Source Code Form is "Incompatible With Secondary Licenses", as 373 | defined by the Mozilla Public License, v. 2.0. -------------------------------------------------------------------------------- /src/actors/command.rs: -------------------------------------------------------------------------------- 1 | use actix::clock::sleep; 2 | use actix::prelude::*; 3 | 4 | use anyhow::Result; 5 | use chrono::{DateTime, Local}; 6 | use subprocess::{ExitStatus, Popen, Redirection}; 7 | 8 | use globset::{Glob, GlobSetBuilder}; 9 | use path_absolutize::*; 10 | use std::collections::BTreeMap; 11 | use std::fs; 12 | use std::io::Write; 13 | use std::path::Path; 14 | use std::{collections::HashMap, time::Duration}; 15 | use std::{ 16 | io::{BufRead, BufReader}, 17 | path::PathBuf, 18 | }; 19 | 20 | use crate::actors::grim_reaper::PermaDeathInvite; 21 | use crate::config::color::ColorOption; 22 | use crate::config::{ 23 | pipe::{OutputRedirection, Pipe}, 24 | Config, Task, 25 | }; 26 | use crate::exec::ExecBuilder; 27 | 28 | use super::console::{Output, OutputKind, PanelStatus, RegisterPanel}; 29 | use super::watcher::{IgnorePath, WatchGlob}; 30 | 31 | #[cfg(not(test))] 32 | mod prelude { 33 | use crate::actors::{console::ConsoleActor, watcher::WatcherActor}; 34 | 35 | pub type WatcherAct = WatcherActor; 36 | pub type ConsoleAct = ConsoleActor; 37 | } 38 | 39 | #[cfg(test)] 40 | mod prelude { 41 | use crate::actors::{console::ConsoleActor, watcher::WatcherActor}; 42 | use actix::actors::mocker::Mocker; 43 | 44 | pub type WatcherAct = Mocker; 45 | pub type ConsoleAct = Mocker; 46 | } 47 | 48 | use prelude::*; 49 | 50 | pub struct ExtendedTask { 51 | name: String, 52 | task: Task, 53 | pipes: Vec, 54 | colors: Vec, 55 | cwd: PathBuf, 56 | } 57 | 58 | impl Task { 59 | pub fn extend(&self, name: String, config: &Config) -> ExtendedTask { 60 | let cwd = self.get_absolute_workdir(&config.base_dir); 61 | let pipes = config.pipes_map.get(&name).unwrap_or(&Vec::new()).clone(); 62 | let colors = config.colors_map.get(&name).unwrap_or(&Vec::new()).clone(); 63 | 64 | ExtendedTask { 65 | name, 66 | task: self.clone(), 67 | pipes, 68 | colors, 69 | cwd, 70 | } 71 | } 72 | } 73 | 74 | #[derive(Debug)] 75 | pub enum Child { 76 | NotStarted, 77 | Killed, 78 | Process(Popen), 79 | Exited(ExitStatus), 80 | } 81 | 82 | impl Child { 83 | fn poll(&mut self, kill: bool) -> Result { 84 | if let Child::Process(p) = self { 85 | match p.poll() { 86 | Some(exit) => { 87 | *self = Self::Exited(exit); 88 | Ok(true) 89 | } 90 | None if kill => { 91 | p.terminate()?; 92 | match p.wait_timeout(Duration::from_millis(500))? { 93 | Some(_status) => { 94 | //println!("terminated with {:?}", status); 95 | } 96 | None => { 97 | p.kill()?; 98 | let _status = p.wait()?; 99 | //println!("killed with {:?} ", _status); 100 | } 101 | } 102 | 103 | *self = Self::Killed; 104 | Ok(true) 105 | } 106 | None => Ok(false), 107 | } 108 | } else { 109 | Ok(false) 110 | } 111 | } 112 | 113 | fn wait_or_kill(&mut self, dur: Duration) -> Result { 114 | if let Child::Process(p) = self { 115 | match p.wait_timeout(dur)? { 116 | Some(status) => { 117 | *self = Child::Exited(status); 118 | Ok(true) 119 | } 120 | None => { 121 | p.terminate()?; 122 | p.kill()?; 123 | let _status = p.wait()?; 124 | if p.wait_timeout(Duration::from_millis(500))?.is_none() { 125 | p.kill()?; 126 | p.wait()?; 127 | } 128 | *self = Self::Killed; 129 | Ok(true) 130 | } 131 | } 132 | } else { 133 | Ok(false) 134 | } 135 | } 136 | 137 | fn exit_status(&mut self) -> Option { 138 | match &self { 139 | Child::Process(_) => None, 140 | Child::Killed => Some(ExitStatus::Undetermined), 141 | Child::Exited(exit) => Some(*exit), 142 | Child::NotStarted => panic!("should not happen"), 143 | } 144 | } 145 | } 146 | 147 | pub struct CommandActorsBuilder { 148 | config: Config, 149 | console: Addr, 150 | watcher: Addr, 151 | verbose: bool, 152 | watch_enabled_globally: bool, 153 | } 154 | 155 | impl CommandActorsBuilder { 156 | pub fn new(config: Config, console: Addr, watcher: Addr) -> Self { 157 | Self { 158 | config, 159 | console, 160 | watcher, 161 | verbose: false, 162 | watch_enabled_globally: true, 163 | } 164 | } 165 | 166 | pub fn verbose(self, toggle: bool) -> Self { 167 | Self { 168 | verbose: toggle, 169 | ..self 170 | } 171 | } 172 | 173 | pub fn globally_enable_watch(self, toggle: bool) -> Self { 174 | Self { 175 | watch_enabled_globally: toggle, 176 | ..self 177 | } 178 | } 179 | 180 | pub async fn build(self) -> Result>> { 181 | let Self { 182 | config, 183 | console, 184 | watcher, 185 | verbose, 186 | watch_enabled_globally, 187 | } = self; 188 | 189 | let mut commands: HashMap> = HashMap::new(); 190 | 191 | for (op_name, nexts) in config.build_dag().unwrap().into_iter() { 192 | let task = config.ops.get(&op_name).unwrap(); 193 | 194 | let exec_builder = ExecBuilder::new(task, &config).await?; 195 | let op = task.extend(op_name.clone(), &config); 196 | 197 | let actor = CommandActor::new( 198 | op, 199 | console.clone(), 200 | watcher.clone(), 201 | nexts 202 | .iter() 203 | .map(|e| commands.get(e).unwrap().clone()) 204 | .collect(), 205 | verbose, 206 | watch_enabled_globally, 207 | exec_builder, 208 | ) 209 | .start(); 210 | 211 | if task.depends_on.resolve().is_empty() { 212 | actor.do_send(Reload::Start) 213 | } 214 | commands.insert(op_name, actor); 215 | } 216 | 217 | Ok(commands) 218 | } 219 | } 220 | 221 | pub struct CommandActor { 222 | operator: ExtendedTask, 223 | console: Addr, 224 | watcher: Addr, 225 | arbiter: Arbiter, 226 | child: Child, 227 | nexts: Vec>, 228 | self_addr: Option>, 229 | pending_upstream: BTreeMap, 230 | verbose: bool, 231 | started_at: DateTime, 232 | watch: bool, 233 | death_invite: Option, 234 | exec_builder: ExecBuilder, 235 | } 236 | 237 | impl CommandActor { 238 | #[allow(clippy::too_many_arguments)] 239 | pub fn new( 240 | operator: ExtendedTask, 241 | console: Addr, 242 | watcher: Addr, 243 | nexts: Vec>, 244 | verbose: bool, 245 | watch: bool, 246 | exec_builder: ExecBuilder, 247 | ) -> Self { 248 | Self { 249 | operator, 250 | console, 251 | watcher, 252 | arbiter: Arbiter::new(), 253 | child: Child::NotStarted, 254 | nexts, 255 | self_addr: None, 256 | pending_upstream: BTreeMap::default(), 257 | verbose, 258 | started_at: Local::now(), 259 | watch, 260 | death_invite: None, 261 | exec_builder, 262 | } 263 | } 264 | 265 | fn log_info(&self, log: String) { 266 | let job_name = self.operator.name.clone(); 267 | 268 | self.console 269 | .do_send(Output::now(job_name, log, OutputKind::Service)); 270 | } 271 | 272 | fn log_debug(&self, log: String) { 273 | if self.verbose { 274 | self.log_info(log); 275 | } 276 | } 277 | 278 | fn ensure_stopped(&mut self) { 279 | if self.child.poll(true).unwrap() { 280 | self.send_reload(); 281 | } 282 | } 283 | 284 | fn upstream(&self) -> String { 285 | Vec::from_iter( 286 | self.pending_upstream 287 | .iter() 288 | .map(|(k, v)| format!("{}×{}", v, k)), 289 | ) 290 | .join(", ") 291 | } 292 | 293 | fn send_reload(&self) { 294 | for next in (self.nexts).iter() { 295 | next.do_send(Reload::Op(self.operator.name.clone())); 296 | } 297 | } 298 | 299 | fn send_will_reload(&self) { 300 | for next in (self.nexts).iter() { 301 | next.do_send(WillReload { 302 | op_name: self.operator.name.clone(), 303 | }); 304 | } 305 | } 306 | 307 | fn reload(&mut self) -> Result<()> { 308 | self.log_debug(self.exec_builder.as_string()); 309 | self.console.do_send(PanelStatus { 310 | panel_name: self.operator.name.clone(), 311 | status: None, 312 | }); 313 | 314 | let mut p = self 315 | .exec_builder 316 | .build() 317 | .unwrap() 318 | .stdout(Redirection::Pipe) 319 | .stderr(Redirection::Merge) 320 | .popen() 321 | .unwrap(); 322 | 323 | let stdout = p.stdout.take().unwrap(); 324 | let reader = BufReader::new(stdout); 325 | 326 | let console = self.console.clone(); 327 | let op_name = self.operator.name.clone(); 328 | let self_addr = self.self_addr.clone(); 329 | let started_at = Local::now(); 330 | let cwd = self.operator.cwd.clone(); 331 | let watcher = self.watcher.clone(); 332 | let task_pipes = self.operator.pipes.clone(); 333 | let task_colors = self.operator.colors.clone(); 334 | 335 | let fut = async move { 336 | for line in reader.lines() { 337 | let mut line = line.unwrap(); 338 | 339 | let task_pipe = task_pipes.iter().find(|pipe| pipe.regex.is_match(&line)); 340 | 341 | if let Some(task_pipe) = task_pipe { 342 | match &task_pipe.redirection { 343 | OutputRedirection::Tab(name) => { 344 | let mut tab_name = "".to_string(); 345 | if let Some(capture) = task_pipe.regex.captures(&line) { 346 | capture.expand(&name.clone(), &mut tab_name); 347 | } 348 | if let Some(addr) = &self_addr { 349 | // tabs must be created on each loop, 350 | // as their name can be dynamic 351 | console.do_send(RegisterPanel { 352 | name: tab_name.to_owned(), 353 | addr: addr.clone(), 354 | colors: task_colors.clone(), 355 | }); 356 | } 357 | console.do_send(Output::now( 358 | tab_name.to_owned(), 359 | line, 360 | OutputKind::Command, 361 | )); 362 | } 363 | OutputRedirection::File(path) => { 364 | let path = task_pipe.regex.replace(&line, path); 365 | let mut path = Path::new(path.as_ref()).to_path_buf(); 366 | 367 | // prepend base dir if the log file path is relative 368 | if !path.starts_with("/") { 369 | path = cwd.join(path); 370 | } 371 | 372 | let log_folder = Path::new(&path).parent().unwrap(); 373 | fs::create_dir_all(log_folder).unwrap(); 374 | 375 | // file must be created and opened on each loop 376 | // as the path is dynamic, therefore there 377 | // is no a way to optimize it to create it 378 | // only once 379 | let mut file = fs::OpenOptions::new() 380 | .create(true) 381 | .append(true) 382 | .open(&path) 383 | .unwrap(); 384 | 385 | // exlude file path from watcher before writing to it 386 | // to avoid infinite loops 387 | watcher.do_send(IgnorePath(path)); 388 | 389 | // append new line since strings from the buffer reader don't include it 390 | line.push('\n'); 391 | file.write_all(line.as_bytes()).unwrap(); 392 | } 393 | } 394 | } else { 395 | console.do_send(Output::now(op_name.clone(), line, OutputKind::Command)); 396 | } 397 | } 398 | 399 | if let Some(addr) = self_addr { 400 | addr.do_send(StdoutTerminated { started_at }); 401 | } 402 | }; 403 | 404 | self.child = Child::Process(p); 405 | self.started_at = started_at; 406 | self.arbiter.spawn(fut); 407 | 408 | Ok(()) 409 | } 410 | 411 | fn accept_death_invite(&mut self, cx: &mut Context) { 412 | if let Some(invite) = self.death_invite.take() { 413 | let status = match &self.child { 414 | Child::Killed => ExitStatus::Other(1), 415 | Child::Exited(val) => *val, 416 | child => panic!("invalid death invite acceptance: {child:?}"), 417 | }; 418 | invite.rsvp::>(self.operator.name.clone(), status, cx); 419 | } 420 | } 421 | } 422 | 423 | impl Actor for CommandActor { 424 | type Context = Context; 425 | 426 | fn started(&mut self, ctx: &mut Context) { 427 | let addr = ctx.address(); 428 | self.self_addr = Some(addr.clone()); 429 | 430 | self.console.do_send(RegisterPanel { 431 | name: self.operator.name.clone(), 432 | addr, 433 | colors: self.operator.colors.clone(), 434 | }); 435 | 436 | let watches = self.operator.task.watch.resolve(); 437 | 438 | if self.watch && !watches.is_empty() { 439 | let mut on = GlobSetBuilder::new(); 440 | for pattern in watches { 441 | on.add( 442 | Glob::new( 443 | &self 444 | .operator 445 | .cwd 446 | .join(pattern) 447 | .absolutize() 448 | .unwrap() 449 | .to_string_lossy(), 450 | ) 451 | .unwrap(), 452 | ); 453 | } 454 | 455 | let mut off = GlobSetBuilder::new(); 456 | for pattern in self.operator.task.ignore.resolve() { 457 | off.add( 458 | Glob::new( 459 | &self 460 | .operator 461 | .cwd 462 | .join(pattern) 463 | .absolutize() 464 | .unwrap() 465 | .to_string_lossy(), 466 | ) 467 | .unwrap(), 468 | ); 469 | } 470 | 471 | let glob = WatchGlob { 472 | command: ctx.address(), 473 | on: on.build().unwrap(), 474 | off: off.build().unwrap(), 475 | }; 476 | 477 | self.watcher.do_send(glob); 478 | } 479 | } 480 | 481 | fn stopped(&mut self, _: &mut Self::Context) { 482 | self.self_addr = None; 483 | self.child.poll(true).unwrap(); 484 | } 485 | } 486 | 487 | #[derive(Message)] 488 | #[rtype(result = "()")] 489 | pub struct WillReload { 490 | pub op_name: String, 491 | } 492 | 493 | impl Handler for CommandActor { 494 | type Result = (); 495 | 496 | fn handle(&mut self, msg: WillReload, _: &mut Context) -> Self::Result { 497 | let counter = self.pending_upstream.remove(&msg.op_name).unwrap_or(0); 498 | self.pending_upstream 499 | .insert(msg.op_name.clone(), counter + 1); 500 | 501 | self.log_info(format!("Waiting on {}", msg.op_name)); 502 | self.log_debug(format!("WAIT: +{} [{}]", msg.op_name, self.upstream())); 503 | 504 | self.ensure_stopped(); 505 | 506 | self.send_will_reload(); 507 | } 508 | } 509 | 510 | #[derive(Message, Clone, Debug)] 511 | #[rtype(result = "()")] 512 | pub enum Reload { 513 | Start, 514 | Manual, 515 | Watch(String), 516 | Op(String), 517 | } 518 | 519 | impl Handler for CommandActor { 520 | type Result = (); 521 | 522 | fn handle(&mut self, msg: Reload, _: &mut Context) -> Self::Result { 523 | self.ensure_stopped(); 524 | 525 | match &msg { 526 | Reload::Start => { 527 | self.send_will_reload(); 528 | } 529 | Reload::Manual => { 530 | if !self.pending_upstream.is_empty() { 531 | self.log_info(format!( 532 | "RELOAD: manual while pending on {}", 533 | self.upstream() 534 | )); 535 | } else { 536 | self.log_info("RELOAD: manual".to_string()); 537 | } 538 | self.send_will_reload(); 539 | } 540 | Reload::Watch(files) => { 541 | self.log_info(format!("RELOAD: file changed: {files} ")); 542 | self.send_will_reload(); 543 | } 544 | Reload::Op(op_name) => { 545 | let counter = self.pending_upstream.remove(op_name).unwrap(); 546 | 547 | if counter > 1 { 548 | self.pending_upstream.insert(op_name.clone(), counter - 1); 549 | } 550 | 551 | self.log_debug(format!("WAIT: -{} [{}]", op_name.clone(), self.upstream())); 552 | 553 | if !self.pending_upstream.is_empty() { 554 | return; 555 | } else { 556 | self.log_info("Upstream(s) finished".to_string()); 557 | } 558 | } 559 | } 560 | 561 | self.reload().unwrap(); 562 | } 563 | } 564 | 565 | #[derive(Message)] 566 | #[rtype(result = "Result, std::io::Error>")] 567 | pub struct GetStatus; 568 | 569 | impl Handler for CommandActor { 570 | type Result = Result, std::io::Error>; 571 | 572 | fn handle(&mut self, _: GetStatus, _: &mut Self::Context) -> Self::Result { 573 | self.child.poll(false).unwrap(); 574 | Ok(self.child.exit_status()) 575 | } 576 | } 577 | 578 | #[derive(Message)] 579 | #[rtype(result = "Result")] 580 | pub struct WaitStatus; 581 | 582 | impl Handler for CommandActor { 583 | type Result = ResponseActFuture>; 584 | 585 | fn handle(&mut self, _: WaitStatus, ctx: &mut Self::Context) -> Self::Result { 586 | let addr = ctx.address(); 587 | let f = async move { 588 | loop { 589 | if let Some(status) = addr.send(GetStatus).await.unwrap().unwrap() { 590 | return status; 591 | } 592 | sleep(Duration::from_millis(20)).await; 593 | } 594 | } 595 | .into_actor(self) 596 | .map(|res, _act, _ctx| Ok(res)); 597 | Box::pin(f) 598 | } 599 | } 600 | 601 | #[derive(Message)] 602 | #[rtype(result = "()")] 603 | struct StdoutTerminated { 604 | pub started_at: DateTime, 605 | } 606 | 607 | impl Handler for CommandActor { 608 | type Result = (); 609 | 610 | fn handle(&mut self, msg: StdoutTerminated, cx: &mut Self::Context) -> Self::Result { 611 | if msg.started_at == self.started_at { 612 | // since there's a chance that child might not be done by this point 613 | // wait for it die for a maximum of 1 seconds 614 | // before pulling the plug 615 | if self 616 | .child 617 | .wait_or_kill(Duration::from_millis(1000)) 618 | .unwrap() 619 | { 620 | self.send_reload(); 621 | } 622 | let exit = self.child.exit_status(); 623 | self.console.do_send(PanelStatus { 624 | panel_name: self.operator.name.clone(), 625 | status: exit, 626 | }); 627 | self.accept_death_invite(cx); 628 | } 629 | } 630 | } 631 | 632 | #[derive(Message)] 633 | #[rtype(result = "()")] 634 | pub struct PoisonPill; 635 | 636 | impl Handler for CommandActor { 637 | type Result = (); 638 | 639 | fn handle(&mut self, _: PoisonPill, ctx: &mut Context) -> Self::Result { 640 | self.accept_death_invite(ctx); 641 | ctx.stop(); 642 | } 643 | } 644 | 645 | impl Handler for CommandActor { 646 | type Result = (); 647 | 648 | fn handle(&mut self, evt: PermaDeathInvite, cx: &mut Context) -> Self::Result { 649 | self.child.poll(false).unwrap(); 650 | let status = match &self.child { 651 | Child::Killed => Some(ExitStatus::Other(1)), 652 | Child::Exited(val) => Some(*val), 653 | _ => None, 654 | }; 655 | if let Some(status) = status { 656 | evt.rsvp::(self.operator.name.clone(), status, cx); 657 | } else { 658 | self.death_invite = Some(evt); 659 | } 660 | } 661 | } 662 | -------------------------------------------------------------------------------- /src/actors/console.rs: -------------------------------------------------------------------------------- 1 | use actix::prelude::*; 2 | use chrono::prelude::*; 3 | use crossterm::event::KeyEvent; 4 | use ratatui::layout::Rect; 5 | use ratatui::prelude::Alignment; 6 | use ratatui::text::Line; 7 | use ratatui::widgets::{List, ListItem, ListState}; 8 | use ratatui::Frame; 9 | use std::borrow::Cow; 10 | use std::rc::Rc; 11 | use std::{cmp::min, collections::HashMap, io}; 12 | use std::{str, usize}; 13 | use subprocess::ExitStatus; 14 | 15 | use ratatui::{ 16 | backend::CrosstermBackend, 17 | layout::{Constraint, Direction, Layout}, 18 | style::{Color, Modifier, Style}, 19 | text::Span, 20 | widgets::{Block, Borders, Paragraph, Tabs, Wrap}, 21 | Terminal, 22 | }; 23 | 24 | use crossterm::{ 25 | cursor, 26 | event::{self, Event, KeyCode, KeyModifiers, MouseEventKind}, 27 | execute, 28 | terminal::{disable_raw_mode, enable_raw_mode, EnterAlternateScreen, LeaveAlternateScreen}, 29 | }; 30 | 31 | use crate::config::color::{ColorOption, Colorizer}; 32 | 33 | use super::command::{CommandActor, PoisonPill, Reload}; 34 | 35 | const MENU_WIDTH: u16 = 30; 36 | const MAX_CHARS: usize = (MENU_WIDTH - 6) as usize; 37 | 38 | enum LayoutDirection { 39 | Horizontal, 40 | Vertical, 41 | } 42 | 43 | impl LayoutDirection { 44 | fn get_opposite_orientation(&self) -> Self { 45 | match self { 46 | Self::Horizontal => Self::Vertical, 47 | Self::Vertical => Self::Horizontal, 48 | } 49 | } 50 | } 51 | 52 | enum AppMode { 53 | Menu, 54 | View, 55 | } 56 | 57 | impl AppMode { 58 | fn get_opposite_mode(&self) -> Self { 59 | match self { 60 | Self::View => Self::Menu, 61 | Self::Menu => Self::View, 62 | } 63 | } 64 | } 65 | 66 | pub struct Panel { 67 | logs: Vec<(String, OutputKind)>, 68 | line_offsets: Vec, 69 | shift: u16, 70 | command: Addr, 71 | status: Option, 72 | colors: Vec, 73 | } 74 | 75 | impl Panel { 76 | pub fn new(command: Addr, colors: Vec) -> Self { 77 | Self { 78 | logs: Vec::default(), 79 | line_offsets: Vec::default(), 80 | shift: 0, 81 | command, 82 | status: None, 83 | colors, 84 | } 85 | } 86 | 87 | pub fn sync_lines(&mut self, width: u16) { 88 | self.line_offsets = self 89 | .logs 90 | .iter() 91 | .enumerate() 92 | .flat_map(|(i, l)| vec![i; wrapped_lines(&l.0, width)]) 93 | .collect(); 94 | } 95 | } 96 | 97 | pub struct ConsoleActor { 98 | terminal: Terminal>, 99 | index: String, 100 | order: Vec, 101 | arbiter: Arbiter, 102 | panels: HashMap, 103 | timestamp: bool, 104 | layout_direction: LayoutDirection, 105 | mode: AppMode, 106 | list_state: ListState, 107 | } 108 | 109 | fn chunks(mode: &AppMode, direction: &LayoutDirection, f: &Frame) -> Rc<[Rect]> { 110 | let chunks_constraints = match mode { 111 | AppMode::Menu => match direction { 112 | LayoutDirection::Horizontal => vec![Constraint::Min(0), Constraint::Length(3)], 113 | LayoutDirection::Vertical => vec![Constraint::Min(0), Constraint::Length(MENU_WIDTH)], 114 | }, 115 | AppMode::View => vec![Constraint::Min(0)], 116 | }; 117 | let direction = match direction { 118 | LayoutDirection::Horizontal => Direction::Vertical, 119 | LayoutDirection::Vertical => Direction::Horizontal, 120 | }; 121 | Layout::default() 122 | .direction(direction) 123 | .constraints(chunks_constraints) 124 | .split(f.size()) 125 | } 126 | 127 | impl ConsoleActor { 128 | pub fn new(order: Vec, timestamp: bool) -> Self { 129 | let stdout = io::stdout(); 130 | let backend = CrosstermBackend::new(stdout); 131 | let terminal = Terminal::new(backend).unwrap(); 132 | Self { 133 | terminal, 134 | index: order[0].clone(), 135 | order, 136 | arbiter: Arbiter::new(), 137 | panels: HashMap::default(), 138 | timestamp, 139 | mode: AppMode::Menu, 140 | layout_direction: LayoutDirection::Horizontal, 141 | list_state: ListState::default().with_selected(Some(0)), 142 | } 143 | } 144 | 145 | pub fn up(&mut self, shift: u16) { 146 | let log_height = self.get_log_height(); 147 | if let Some(focused_panel) = self.panels.get_mut(&self.index) { 148 | // maximum_scroll is the number of lines 149 | // overflowing in the current focused panel 150 | let lines = focused_panel.line_offsets.len() as u16; 151 | let maximum_scroll = lines - min(lines, log_height); 152 | 153 | // `focused_panel.shift` goes from 0 until maximum_scroll 154 | focused_panel.shift = min(focused_panel.shift + shift, maximum_scroll); 155 | } 156 | } 157 | 158 | pub fn down(&mut self, shift: u16) { 159 | if let Some(focused_panel) = self.panels.get_mut(&self.index) { 160 | if focused_panel.shift >= shift { 161 | focused_panel.shift -= shift; 162 | } else { 163 | focused_panel.shift = 0; 164 | } 165 | } 166 | } 167 | 168 | pub fn get_log_height(&mut self) -> u16 { 169 | let frame = self.terminal.get_frame(); 170 | chunks(&self.mode, &self.layout_direction, &frame)[0].height 171 | } 172 | 173 | pub fn go_to(&mut self, panel_index: usize) { 174 | if panel_index < self.order.len() { 175 | self.index.clone_from(&self.order[panel_index]); 176 | } 177 | } 178 | 179 | pub fn idx(&self) -> usize { 180 | self.order 181 | .iter() 182 | .position(|e| e == &self.index) 183 | .unwrap_or(0) 184 | } 185 | 186 | pub fn next(&mut self) { 187 | self.index 188 | .clone_from(&self.order[(self.idx() + 1) % self.order.len()]); 189 | self.list_state.select(Some(self.idx())) 190 | } 191 | 192 | pub fn previous(&mut self) { 193 | self.index 194 | .clone_from(&self.order[(self.idx() + self.order.len() - 1) % self.order.len()]); 195 | self.list_state.select(Some(self.idx())) 196 | } 197 | 198 | fn clean(&mut self) { 199 | self.terminal 200 | .draw(|f| { 201 | let clean = Block::default().style(Style::default().fg(Color::Black)); 202 | f.render_widget(clean, f.size()); 203 | }) 204 | .unwrap(); 205 | } 206 | 207 | fn draw(&mut self) { 208 | let idx = self.idx(); 209 | if let Some(focused_panel) = &self.panels.get(&self.index) { 210 | self.terminal 211 | .draw(|f| { 212 | let chunks = chunks(&self.mode, &self.layout_direction, f); 213 | let logs = &focused_panel.logs; 214 | let shift = focused_panel.shift as usize; 215 | let line_offsets = &focused_panel.line_offsets; 216 | let lines = line_offsets.len(); 217 | let log_height = chunks[0].height as usize; 218 | 219 | let maximum_scroll = lines - min(lines, log_height); 220 | let scroll_offset = maximum_scroll - min(maximum_scroll, shift); 221 | let offset_end = min(lines, scroll_offset + log_height).wrapping_sub(1); 222 | 223 | let line_start = line_offsets.get(scroll_offset).cloned().unwrap_or(0); 224 | let line_end = line_offsets.get(offset_end).cloned().unwrap_or(0); 225 | 226 | let wrap_offset = line_offsets 227 | .get(..scroll_offset) 228 | .map(|offsets| { 229 | offsets 230 | .iter() 231 | .rev() 232 | .take_while(|&line| *line == line_start) 233 | .count() 234 | }) 235 | .unwrap_or(0); 236 | 237 | let lines = logs 238 | .get(line_start..=line_end) 239 | .map(|logs| { 240 | logs.iter() 241 | .flat_map(|(s, kind)| { 242 | Colorizer::new(&focused_panel.colors, kind.style()) 243 | .patch_text(s) 244 | }) 245 | .collect::>() 246 | }) 247 | .unwrap_or_default(); 248 | 249 | let paragraph = Paragraph::new(lines) 250 | .wrap(Wrap { trim: false }) 251 | .scroll((wrap_offset as u16, 0)); 252 | 253 | f.render_widget(paragraph, chunks[0]); 254 | 255 | //Format titles 256 | let titles: Vec = self 257 | .order 258 | .iter() 259 | .map(|panel| { 260 | let mut span = self 261 | .panels 262 | .get(panel) 263 | .map(|p| match p.status { 264 | Some(ExitStatus::Exited(0)) => Span::styled( 265 | format!("{}.", panel), 266 | Style::default().fg(Color::Green), 267 | ), 268 | Some(_) => Span::styled( 269 | format!("{}!", panel), 270 | Style::default().fg(Color::Red), 271 | ), 272 | None => Span::styled(format!("{}*", panel), Style::default()), 273 | }) 274 | .unwrap_or_else(|| Span::styled(panel, Style::default())); 275 | // Replace the titles whoms length is greater than MAX_CHARS with an 276 | // ellipse 277 | span = Span::styled( 278 | ellipse_if_too_long(span.content).into_owned(), 279 | span.style, 280 | ); 281 | Line::from(span) 282 | }) 283 | .collect(); 284 | /* 285 | titles.push(Spans::from(Span::raw(format!( 286 | "shift {} / window {} / lines {} / max {} / compute {}", 287 | focus.shift, 288 | log_height, 289 | logs.len(), 290 | focus.lines, 291 | f.size().width, 292 | )))); 293 | 294 | */ 295 | match self.mode { 296 | AppMode::Menu => { 297 | match self.layout_direction { 298 | LayoutDirection::Horizontal => { 299 | let tabs = Tabs::new(titles) 300 | .block(Block::default().borders(Borders::ALL)) 301 | .select(idx) 302 | .highlight_style( 303 | Style::default() 304 | .add_modifier(Modifier::BOLD) 305 | .bg(Color::DarkGray), 306 | ); 307 | f.render_widget(tabs, chunks[1]); 308 | } 309 | LayoutDirection::Vertical => { 310 | let list = List::new( 311 | titles 312 | .into_iter() 313 | .map(ListItem::new) 314 | .collect::>(), 315 | ) 316 | .block( 317 | Block::default() 318 | .borders(Borders::ALL) 319 | .title("Task List") 320 | .title_alignment(Alignment::Center), 321 | ) 322 | .highlight_style( 323 | Style::default() 324 | .bg(Color::DarkGray) 325 | .add_modifier(Modifier::BOLD), 326 | ); 327 | f.render_stateful_widget(list, chunks[1], &mut self.list_state) 328 | } 329 | }; 330 | } 331 | AppMode::View => {} 332 | }; 333 | }) 334 | .unwrap(); 335 | } 336 | } 337 | 338 | pub fn resize_panels(&mut self, width: u16) { 339 | for panel in self.panels.values_mut() { 340 | panel.shift = 0; 341 | panel.sync_lines(width) 342 | } 343 | } 344 | 345 | pub fn switch_layout(&mut self) { 346 | self.layout_direction = self.layout_direction.get_opposite_orientation(); 347 | let f = self.terminal.get_frame(); 348 | let chunks = chunks(&self.mode, &self.layout_direction, &f); 349 | self.resize_panels(chunks[0].width); 350 | } 351 | pub fn switch_mode(&mut self) { 352 | self.mode = self.mode.get_opposite_mode(); 353 | } 354 | } 355 | 356 | impl Actor for ConsoleActor { 357 | type Context = Context; 358 | 359 | fn started(&mut self, ctx: &mut Context) { 360 | enable_raw_mode().unwrap(); 361 | execute!( 362 | self.terminal.backend_mut(), 363 | cursor::Hide, 364 | EnterAlternateScreen, 365 | ) 366 | .unwrap(); 367 | 368 | let addr = ctx.address(); 369 | self.arbiter.spawn(async move { 370 | loop { 371 | addr.do_send(TermEvent(event::read().unwrap())); 372 | } 373 | }); 374 | 375 | self.clean(); 376 | self.draw(); 377 | } 378 | 379 | fn stopped(&mut self, _: &mut Self::Context) { 380 | self.arbiter.stop(); 381 | self.clean(); 382 | 383 | execute!( 384 | self.terminal.backend_mut(), 385 | LeaveAlternateScreen, 386 | cursor::Show, 387 | ) 388 | .unwrap(); 389 | disable_raw_mode().unwrap(); 390 | } 391 | } 392 | 393 | #[derive(Message, Debug)] 394 | #[rtype(result = "()")] 395 | pub struct TermEvent(Event); 396 | 397 | impl TermEvent { 398 | pub fn quit() -> Self { 399 | Self(Event::Key(KeyEvent::new( 400 | KeyCode::Char('q'), 401 | KeyModifiers::NONE, 402 | ))) 403 | } 404 | } 405 | 406 | impl Handler for ConsoleActor { 407 | type Result = (); 408 | 409 | fn handle(&mut self, msg: TermEvent, _: &mut Context) -> Self::Result { 410 | match msg.0 { 411 | Event::Key(e) => match (e.modifiers, e.code) { 412 | (KeyModifiers::CONTROL, KeyCode::Char('c')) 413 | | (KeyModifiers::NONE, KeyCode::Char('q')) => { 414 | self.panels 415 | .values() 416 | .for_each(|e| e.command.do_send(PoisonPill)); 417 | System::current().stop(); 418 | } 419 | (KeyModifiers::NONE, KeyCode::Up | KeyCode::Char('k')) 420 | | (KeyModifiers::CONTROL, KeyCode::Char('p')) => { 421 | self.up(1); 422 | } 423 | (KeyModifiers::NONE, KeyCode::Down | KeyCode::Char('j')) 424 | | (KeyModifiers::CONTROL, KeyCode::Char('n')) => { 425 | self.down(1); 426 | } 427 | (KeyModifiers::CONTROL, key_code) => match key_code { 428 | KeyCode::Char('f') => { 429 | let log_height = self.get_log_height(); 430 | self.down(log_height); 431 | } 432 | KeyCode::Char('u') => { 433 | let log_height = self.get_log_height(); 434 | self.up(log_height / 2); 435 | } 436 | KeyCode::Char('d') => { 437 | let log_height = self.get_log_height(); 438 | self.down(log_height / 2); 439 | } 440 | KeyCode::Char('b') => { 441 | let log_height = self.get_log_height(); 442 | self.up(log_height); 443 | } 444 | _ => {} 445 | }, 446 | (KeyModifiers::NONE, key_code) => match key_code { 447 | KeyCode::Char('r') => { 448 | if let Some(focused_panel) = self.panels.get(&self.index) { 449 | focused_panel.command.do_send(Reload::Manual); 450 | } 451 | } 452 | KeyCode::Tab => self.switch_layout(), 453 | KeyCode::Char('m') => self.switch_mode(), 454 | KeyCode::Right | KeyCode::Char('l') => { 455 | self.next(); 456 | } 457 | KeyCode::Left | KeyCode::Char('h') => { 458 | self.previous(); 459 | } 460 | KeyCode::Char(ch) => { 461 | if ch.is_ascii_digit() { 462 | let mut panel_index = ch.to_digit(10).unwrap() as usize; 463 | // first tab is key 1, therefore 464 | // in key 0 go to last tab 465 | if panel_index == 0 { 466 | panel_index = self.order.len() - 1; 467 | } else { 468 | panel_index -= 1; 469 | } 470 | self.go_to(panel_index); 471 | } 472 | } 473 | _ => {} 474 | }, 475 | _ => {} 476 | }, 477 | Event::Resize(width, _) => self.resize_panels(width), 478 | Event::Mouse(e) => match e.kind { 479 | MouseEventKind::ScrollUp => { 480 | self.up(1); 481 | } 482 | MouseEventKind::ScrollDown => { 483 | self.down(1); 484 | } 485 | _ => {} 486 | }, 487 | _ => {} 488 | } 489 | self.draw(); 490 | } 491 | } 492 | 493 | #[derive(Debug)] 494 | pub enum OutputKind { 495 | Service, 496 | Command, 497 | } 498 | 499 | impl OutputKind { 500 | fn style(&self) -> Style { 501 | match self { 502 | OutputKind::Service => Style::default().bg(Color::DarkGray), 503 | OutputKind::Command => Style::default(), 504 | } 505 | } 506 | } 507 | 508 | #[derive(Message)] 509 | #[rtype(result = "()")] 510 | pub struct Output { 511 | panel_name: String, 512 | pub message: String, 513 | kind: OutputKind, 514 | timestamp: DateTime, 515 | } 516 | 517 | impl Output { 518 | pub fn now(panel_name: String, message: String, kind: OutputKind) -> Self { 519 | Self { 520 | panel_name, 521 | message, 522 | kind, 523 | timestamp: Local::now(), 524 | } 525 | } 526 | } 527 | 528 | fn wrapped_lines(message: &String, width: u16) -> usize { 529 | let clean = strip_ansi_escapes::strip(message); 530 | textwrap::wrap(str::from_utf8(&clean).unwrap(), width as usize).len() 531 | } 532 | 533 | // Replace the character that are max that MAX_CHARS with an ellipse ... 534 | fn ellipse_if_too_long(task_title: Cow<'_, str>) -> Cow { 535 | if task_title.len() >= MAX_CHARS { 536 | let mut task_title = task_title.to_string(); 537 | task_title.replace_range(MAX_CHARS.., "..."); 538 | Cow::Owned(task_title.to_string()) 539 | } else { 540 | task_title 541 | } 542 | } 543 | 544 | /// Formats a message with a timestamp in `"{timestamp} {message}"`. 545 | fn format_message(message: &str, timestamp: &DateTime) -> String { 546 | format!("{} {}", timestamp.format("%H:%M:%S%.3f"), message) 547 | } 548 | 549 | impl Handler for ConsoleActor { 550 | type Result = (); 551 | 552 | fn handle(&mut self, msg: Output, _: &mut Context) -> Self::Result { 553 | let message = match self.timestamp { 554 | true => format_message(&msg.message, &msg.timestamp), 555 | false => msg.message, 556 | }; 557 | 558 | let panel = self.panels.get_mut(&msg.panel_name).unwrap(); 559 | let width = self.terminal.get_frame().size().width; 560 | let line_count = wrapped_lines(&message, width); 561 | let line_offset = panel.logs.len(); 562 | 563 | panel.line_offsets.extend(vec![line_offset; line_count]); 564 | panel.logs.push((message, msg.kind)); 565 | 566 | self.draw(); 567 | } 568 | } 569 | 570 | #[derive(Message)] 571 | #[rtype(result = "()")] 572 | pub struct RegisterPanel { 573 | pub name: String, 574 | pub addr: Addr, 575 | pub colors: Vec, 576 | } 577 | 578 | impl Handler for ConsoleActor { 579 | type Result = (); 580 | 581 | fn handle(&mut self, msg: RegisterPanel, _: &mut Context) -> Self::Result { 582 | if !self.panels.contains_key(&msg.name) { 583 | let new_panel = Panel::new(msg.addr, msg.colors); 584 | self.panels.insert(msg.name.clone(), new_panel); 585 | } 586 | if !self.order.contains(&msg.name) { 587 | self.order.push(msg.name); 588 | } 589 | self.draw(); 590 | } 591 | } 592 | 593 | #[derive(Message)] 594 | #[rtype(result = "()")] 595 | pub struct PanelStatus { 596 | pub panel_name: String, 597 | pub status: Option, 598 | } 599 | 600 | impl Handler for ConsoleActor { 601 | type Result = (); 602 | 603 | fn handle(&mut self, msg: PanelStatus, ctx: &mut Context) -> Self::Result { 604 | let focused_panel = self.panels.get_mut(&msg.panel_name).unwrap(); 605 | focused_panel.status = msg.status; 606 | 607 | if let Some(message) = msg.status.map(|c| format!("Status: {:?}", c)) { 608 | ctx.address() 609 | .do_send(Output::now(msg.panel_name, message, OutputKind::Service)); 610 | } 611 | 612 | self.draw(); 613 | } 614 | } 615 | --------------------------------------------------------------------------------