├── clippy.toml ├── src ├── airflow │ ├── model.rs │ ├── managed_services.rs │ ├── client │ │ ├── v1 │ │ │ ├── model │ │ │ │ ├── mod.rs │ │ │ │ ├── log.rs │ │ │ │ ├── dagstats.rs │ │ │ │ ├── dagrun.rs │ │ │ │ ├── dag.rs │ │ │ │ └── taskinstance.rs │ │ │ ├── dagstats.rs │ │ │ ├── log.rs │ │ │ ├── dag.rs │ │ │ ├── dagrun.rs │ │ │ └── mod.rs │ │ ├── v2 │ │ │ ├── model │ │ │ │ ├── mod.rs │ │ │ │ ├── dagstats.rs │ │ │ │ ├── log.rs │ │ │ │ ├── dagrun.rs │ │ │ │ ├── taskinstance.rs │ │ │ │ └── dag.rs │ │ │ ├── dagstats.rs │ │ │ ├── log.rs │ │ │ ├── dag.rs │ │ │ ├── dagrun.rs │ │ │ └── mod.rs │ │ └── base.rs │ ├── model │ │ └── common │ │ │ ├── mod.rs │ │ │ ├── log.rs │ │ │ ├── dagstats.rs │ │ │ ├── dagrun.rs │ │ │ └── taskinstance.rs │ ├── traits │ │ ├── dagstats.rs │ │ ├── log.rs │ │ ├── dag.rs │ │ ├── dagrun.rs │ │ ├── taskinstance.rs │ │ └── mod.rs │ ├── client.rs │ ├── config │ │ └── paths.rs │ └── managed_services │ │ └── conveyor.rs ├── app │ ├── model │ │ ├── popup │ │ │ ├── dags │ │ │ │ ├── mod.rs │ │ │ │ └── commands.rs │ │ │ ├── config │ │ │ │ ├── mod.rs │ │ │ │ └── commands.rs │ │ │ ├── taskinstances │ │ │ │ ├── mod.rs │ │ │ │ ├── commands.rs │ │ │ │ └── clear.rs │ │ │ ├── dagruns │ │ │ │ ├── mod.rs │ │ │ │ ├── commands.rs │ │ │ │ ├── trigger.rs │ │ │ │ └── clear.rs │ │ │ ├── mod.rs │ │ │ ├── error.rs │ │ │ ├── warning.rs │ │ │ └── commands_help.rs │ │ └── filter.rs │ ├── events.rs │ ├── events │ │ ├── custom.rs │ │ └── generator.rs │ └── worker │ │ ├── browser.rs │ │ ├── logs.rs │ │ ├── config.rs │ │ ├── taskinstances.rs │ │ ├── dagruns.rs │ │ └── dags.rs ├── commands.rs ├── airflow.rs ├── commands │ ├── config.rs │ ├── config │ │ ├── list.rs │ │ ├── remove.rs │ │ ├── managed_services.rs │ │ ├── model.rs │ │ ├── update.rs │ │ └── add.rs │ └── run.rs ├── ui │ ├── common.rs │ ├── init_screen.rs │ └── constants.rs ├── lib.rs ├── main.rs └── ui.rs ├── .gitattributes ├── vhs ├── flowrs.gif ├── add_config.gif ├── add_config.tape └── flowrs.tape ├── .gitignore ├── image ├── rotation │ └── png │ │ ├── 0.png │ │ ├── 1.png │ │ ├── 10.png │ │ ├── 11.png │ │ ├── 12.png │ │ ├── 13.png │ │ ├── 14.png │ │ ├── 15.png │ │ ├── 16.png │ │ ├── 2.png │ │ ├── 3.png │ │ ├── 4.png │ │ ├── 5.png │ │ ├── 6.png │ │ ├── 7.png │ │ ├── 8.png │ │ └── 9.png └── README │ └── 1683789045509.png ├── .github ├── dependabot.yml ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md └── workflows │ ├── ci.yml │ ├── release-plz.yml │ └── integration.yml ├── release-plz.toml ├── infrastructure ├── aws │ ├── .gitignore │ ├── terraform.tfvars.example │ ├── outputs.tf │ └── variables.tf ├── azure │ └── README.md ├── astronomer │ └── README.md ├── gcp │ └── README.md └── README.md ├── Makefile ├── dist-workspace.toml ├── LICENSE ├── tests ├── common │ └── mod.rs ├── v1_api_test.rs ├── v2_api_test.rs └── managed_services_test.rs ├── Cargo.toml ├── README.md └── docs ├── plans ├── 2025-12-14-binary-size-reduction-design.md ├── 2025-12-13-xdg-config-design.md ├── 2025-12-18-focus-tracking-design.md ├── 2025-12-18-ci-testing-design.md └── 2024-12-14-tabs-navigation-design.md └── client-architecture.md /clippy.toml: -------------------------------------------------------------------------------- 1 | too-many-lines-threshold = 300 -------------------------------------------------------------------------------- /src/airflow/model.rs: -------------------------------------------------------------------------------- 1 | pub mod common; 2 | -------------------------------------------------------------------------------- /src/app/model/popup/dags/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod commands; 2 | -------------------------------------------------------------------------------- /src/commands.rs: -------------------------------------------------------------------------------- 1 | pub mod config; 2 | pub mod run; 3 | -------------------------------------------------------------------------------- /src/app/model/popup/config/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod commands; 2 | -------------------------------------------------------------------------------- /src/app/events.rs: -------------------------------------------------------------------------------- 1 | pub mod custom; 2 | pub mod generator; 3 | -------------------------------------------------------------------------------- /src/airflow/managed_services.rs: -------------------------------------------------------------------------------- 1 | pub mod astronomer; 2 | pub mod conveyor; 3 | pub mod mwaa; 4 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.gif filter=lfs diff=lfs merge=lfs -text 2 | *.png filter=lfs diff=lfs merge=lfs -text 3 | -------------------------------------------------------------------------------- /src/airflow.rs: -------------------------------------------------------------------------------- 1 | pub mod client; 2 | pub mod config; 3 | pub mod managed_services; 4 | pub mod model; 5 | pub mod traits; 6 | -------------------------------------------------------------------------------- /src/airflow/client/v1/model/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod dag; 2 | pub mod dagrun; 3 | pub mod dagstats; 4 | pub mod log; 5 | pub mod taskinstance; 6 | -------------------------------------------------------------------------------- /src/airflow/client/v2/model/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod dag; 2 | pub mod dagrun; 3 | pub mod dagstats; 4 | pub mod log; 5 | pub mod taskinstance; 6 | -------------------------------------------------------------------------------- /src/commands/config.rs: -------------------------------------------------------------------------------- 1 | pub mod add; 2 | pub mod list; 3 | pub mod managed_services; 4 | pub mod model; 5 | pub mod remove; 6 | pub mod update; 7 | -------------------------------------------------------------------------------- /vhs/flowrs.gif: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:63542bd7c9eea510df10144cb139a36b052f52a06f24699229bb1c96319821b3 3 | size 3347311 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | .flowrs 3 | flowrs 4 | logs 5 | plugins 6 | dags/** 7 | .DS_Store 8 | .vscode 9 | *.tiff 10 | .claude 11 | 12 | flowrs-debug* 13 | -------------------------------------------------------------------------------- /vhs/add_config.gif: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:92f18d345fd40bb398103ef631e69eb814db94092933480b74c9734a55ccdc5d 3 | size 97838 4 | -------------------------------------------------------------------------------- /image/rotation/png/0.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:c44938a6563c787314b54828fe2e311c4da3896321bd286132f298ef9c675825 3 | size 179755 4 | -------------------------------------------------------------------------------- /image/rotation/png/1.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:8da19232b717a24e25d5b593f94dcb6737a7c59d8841cf2ea9aaa3b32e7367f4 3 | size 312307 4 | -------------------------------------------------------------------------------- /image/rotation/png/10.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:527fdf9ff9f61833a4330342253504432a026db9eafe1eb3bdaca62c50a64b93 3 | size 321390 4 | -------------------------------------------------------------------------------- /image/rotation/png/11.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:12bf2f655cbe0582bd30263ee915ce0594fc83f132e852f6e8393b3348663c8f 3 | size 311175 4 | -------------------------------------------------------------------------------- /image/rotation/png/12.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:e29e97f1faf0b69189b2dbc0b48a7ac1518651120cb39ba8e09a4ba02b8f4fe7 3 | size 225242 4 | -------------------------------------------------------------------------------- /image/rotation/png/13.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:82abf5bb28362b8133b1a58e44a1133224e230a08f94c4a958ccff3844e9bfd8 3 | size 309509 4 | -------------------------------------------------------------------------------- /image/rotation/png/14.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:8a6420d863fb88a05604a90f394b19d410552f2ec4f5a962fccfba761913c985 3 | size 322883 4 | -------------------------------------------------------------------------------- /image/rotation/png/15.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:d46c8d15fdb47abf52f6957f3b443f60021b5f69cc479355672318051bf3271f 3 | size 309497 4 | -------------------------------------------------------------------------------- /image/rotation/png/16.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:c44938a6563c787314b54828fe2e311c4da3896321bd286132f298ef9c675825 3 | size 179755 4 | -------------------------------------------------------------------------------- /image/rotation/png/2.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:191017468dfd4c397d59439638b9c41e3a71d6f070056beb2364ecc466e2cbc7 3 | size 322144 4 | -------------------------------------------------------------------------------- /image/rotation/png/3.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:c8f9ab3b6af07c331b3c72b0c32a806ebeeae74d6cc4cb5be7292c8acef6032b 3 | size 299989 4 | -------------------------------------------------------------------------------- /image/rotation/png/4.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:eecd5851ec337e8d5fd3d72a29c5d3b7729c7878e583bc58bbd64aea0ea4ed14 3 | size 225666 4 | -------------------------------------------------------------------------------- /image/rotation/png/5.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:1591acda910fbe38814008e42312727bd45cdfd93d330eb4b57f14d7de173b9b 3 | size 301365 4 | -------------------------------------------------------------------------------- /image/rotation/png/6.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:e71d38757a55d4027a0ef81fada284d5c86af1bc99ddff319e0271fc485702ee 3 | size 324997 4 | -------------------------------------------------------------------------------- /image/rotation/png/7.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:51ad23e0fa260ad0b08acb3766b915c05d516e47c0a8af6cf2fc2d13184eca33 3 | size 308438 4 | -------------------------------------------------------------------------------- /image/rotation/png/8.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:99fcf2c454ceb636ed06a70d8c30965915871a38f91ff21620e623487f0b4de8 3 | size 227028 4 | -------------------------------------------------------------------------------- /image/rotation/png/9.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:a33df7cf8fce199b26430f8dcec5ab9418511a34a4957031bf0742267c54fc50 3 | size 301126 4 | -------------------------------------------------------------------------------- /image/README/1683789045509.png: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:e1a33a4f011f36f2990c06da43f1a0baf3a5ea26b025bd4bfe308bd58daf52c8 3 | size 23497 4 | -------------------------------------------------------------------------------- /src/airflow/client/v1/model/log.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | #[derive(Debug, Serialize, Deserialize)] 4 | pub struct Log { 5 | #[serde(rename = "continuation_token")] 6 | pub continuation_token: Option, 7 | pub content: String, 8 | } 9 | -------------------------------------------------------------------------------- /src/app/model/popup/taskinstances/mod.rs: -------------------------------------------------------------------------------- 1 | use clear::ClearTaskInstancePopup; 2 | use mark::MarkTaskInstancePopup; 3 | 4 | pub mod clear; 5 | pub mod commands; 6 | pub mod mark; 7 | 8 | pub enum TaskInstancePopUp { 9 | Clear(ClearTaskInstancePopup), 10 | Mark(MarkTaskInstancePopup), 11 | } 12 | -------------------------------------------------------------------------------- /src/app/model/popup/dagruns/mod.rs: -------------------------------------------------------------------------------- 1 | use clear::ClearDagRunPopup; 2 | use mark::MarkDagRunPopup; 3 | use trigger::TriggerDagRunPopUp; 4 | 5 | pub mod clear; 6 | pub mod commands; 7 | pub mod mark; 8 | pub mod trigger; 9 | 10 | pub enum DagRunPopUp { 11 | Clear(ClearDagRunPopup), 12 | Mark(MarkDagRunPopup), 13 | Trigger(TriggerDagRunPopUp), 14 | } 15 | -------------------------------------------------------------------------------- /src/airflow/model/common/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod dag; 2 | pub mod dagrun; 3 | pub mod dagstats; 4 | pub mod log; 5 | pub mod taskinstance; 6 | 7 | // Re-export common types for easier access 8 | pub use dag::{Dag, DagList}; 9 | pub use dagrun::{DagRun, DagRunList}; 10 | pub use dagstats::{DagStatistic, DagStatsResponse}; 11 | pub use log::Log; 12 | pub use taskinstance::{TaskInstance, TaskInstanceList}; 13 | -------------------------------------------------------------------------------- /src/airflow/traits/dagstats.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | 4 | use crate::airflow::model::common::DagStatsResponse; 5 | 6 | /// Trait for DAG Statistics operations 7 | #[async_trait] 8 | pub trait DagStatsOperations: Send + Sync { 9 | /// Get DAG statistics for the specified DAG IDs 10 | async fn get_dag_stats(&self, dag_ids: Vec<&str>) -> Result; 11 | } 12 | -------------------------------------------------------------------------------- /src/airflow/traits/log.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | 4 | use crate::airflow::model::common::Log; 5 | 6 | /// Trait for Log operations 7 | #[async_trait] 8 | pub trait LogOperations: Send + Sync { 9 | /// Get task logs for a specific task instance and try number 10 | async fn get_task_logs( 11 | &self, 12 | dag_id: &str, 13 | dag_run_id: &str, 14 | task_id: &str, 15 | task_try: u16, 16 | ) -> Result; 17 | } 18 | -------------------------------------------------------------------------------- /vhs/add_config.tape: -------------------------------------------------------------------------------- 1 | Set Theme "Dark+" 2 | Require flowrs 3 | Set Height 400 4 | Set Framerate 24 5 | 6 | Sleep 1s 7 | Type "flowrs config add" 8 | Enter 9 | Sleep 0.5s 10 | Type "airflow-test" 11 | Enter 12 | Sleep 0.5s 13 | Type "http://localhost:8080" 14 | Sleep 0.5s 15 | Enter 16 | Sleep 0.5s 17 | Enter 18 | Sleep 0.5s 19 | Enter 20 | Type "airflow" 21 | Sleep 0.5s 22 | Enter 23 | Type "airflow" 24 | Sleep 0.5s 25 | Enter 26 | Type "airflow" 27 | Sleep 0.5s 28 | Enter 29 | Sleep 1s 30 | 31 | -------------------------------------------------------------------------------- /src/airflow/client/v1/model/dagstats.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | #[derive(Debug, Clone, Serialize, Deserialize)] 4 | pub struct DagStatsResponse { 5 | pub dags: Vec, 6 | pub total_entries: u64, 7 | } 8 | 9 | #[derive(Debug, Clone, Serialize, Deserialize)] 10 | pub struct DagStatistics { 11 | pub dag_id: String, 12 | pub stats: Vec, 13 | } 14 | 15 | #[derive(Debug, Clone, Serialize, Deserialize)] 16 | pub struct DagStatistic { 17 | pub state: String, 18 | pub count: u64, 19 | } 20 | -------------------------------------------------------------------------------- /src/airflow/client/v2/model/dagstats.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | #[derive(Debug, Clone, Serialize, Deserialize)] 4 | pub struct DagStatsResponse { 5 | pub dags: Vec, 6 | pub total_entries: u64, 7 | } 8 | 9 | #[derive(Debug, Clone, Serialize, Deserialize)] 10 | pub struct DagStatistics { 11 | pub dag_id: String, 12 | pub stats: Vec, 13 | } 14 | 15 | #[derive(Debug, Clone, Serialize, Deserialize)] 16 | pub struct DagStatistic { 17 | pub state: String, 18 | pub count: u64, 19 | } 20 | -------------------------------------------------------------------------------- /src/ui/common.rs: -------------------------------------------------------------------------------- 1 | use ratatui::{ 2 | style::Style, 3 | text::{Line, Span}, 4 | }; 5 | 6 | use super::constants::{AirflowStateColor, DEFAULT_STYLE}; 7 | 8 | pub fn create_headers<'a>( 9 | headers: impl IntoIterator, 10 | ) -> impl Iterator> { 11 | headers 12 | .into_iter() 13 | .map(|h| Line::from(h).style(DEFAULT_STYLE).centered()) 14 | } 15 | 16 | pub fn state_to_colored_square<'a>(color: AirflowStateColor) -> Span<'a> { 17 | Span::styled("■", Style::default().fg(color.into())) 18 | } 19 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "cargo" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | -------------------------------------------------------------------------------- /src/app/model/popup/config/commands.rs: -------------------------------------------------------------------------------- 1 | use std::sync::LazyLock; 2 | 3 | use crate::app::model::popup::commands_help::{Command, CommandPopUp, DefaultCommands}; 4 | 5 | pub static CONFIG_COMMAND_POP_UP: LazyLock = LazyLock::new(|| { 6 | let mut commands = vec![Command { 7 | name: "Open", 8 | key_binding: "o", 9 | description: "Open Airflow Web UI", 10 | }]; 11 | commands.append(&mut DefaultCommands::new().0); 12 | CommandPopUp { 13 | title: "Config Commands".into(), 14 | commands, 15 | } 16 | }); 17 | -------------------------------------------------------------------------------- /src/app/model/popup/dags/commands.rs: -------------------------------------------------------------------------------- 1 | use std::sync::LazyLock; 2 | 3 | use crate::app::model::popup::commands_help::{Command, CommandPopUp, DefaultCommands}; 4 | 5 | pub static DAG_COMMAND_POP_UP: LazyLock = LazyLock::new(|| { 6 | let mut commands = vec![Command { 7 | name: "Toggle pauze", 8 | key_binding: "p", 9 | description: "Toggle pauze/unpauze a DAG", 10 | }]; 11 | commands.append(&mut DefaultCommands::new().0); 12 | CommandPopUp { 13 | title: "DAG Commands".into(), 14 | commands, 15 | } 16 | }); 17 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | // Library exports for integration tests and external usage 2 | // 3 | // Clippy pedantic allows - these are library documentation concerns, not relevant for a TUI application 4 | #![allow(clippy::missing_errors_doc)] 5 | #![allow(clippy::missing_panics_doc)] 6 | #![allow(clippy::must_use_candidate)] 7 | 8 | use std::sync::LazyLock; 9 | 10 | pub mod airflow; 11 | pub mod app; 12 | pub mod commands; 13 | pub mod ui; 14 | 15 | use airflow::config::paths::ConfigPaths; 16 | 17 | pub static CONFIG_PATHS: LazyLock = LazyLock::new(ConfigPaths::resolve); 18 | -------------------------------------------------------------------------------- /release-plz.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | # enable changelog updates 3 | changelog_update = true 4 | # update dependencies with `cargo update` 5 | dependencies_update = true 6 | # create tags for the releases 7 | git_tag_enable = true 8 | # disable GitHub releases 9 | git_release_enable = false 10 | # labels for the release PR 11 | pr_labels = ["release"] 12 | # disallow updating repositories with uncommitted changes 13 | allow_dirty = false 14 | # disallow packaging with uncommitted changes 15 | publish_allow_dirty = false 16 | # disable running `cargo-semver-checks` 17 | semver_check = false 18 | -------------------------------------------------------------------------------- /src/airflow/traits/dag.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | 4 | use crate::airflow::model::common::{Dag, DagList}; 5 | 6 | /// Trait for DAG operations 7 | #[async_trait] 8 | pub trait DagOperations: Send + Sync { 9 | /// List all DAGs 10 | async fn list_dags(&self) -> Result; 11 | 12 | /// Toggle a DAG's paused state 13 | async fn toggle_dag(&self, dag_id: &str, is_paused: bool) -> Result<()>; 14 | 15 | /// Get DAG source code (uses `file_token` in v1, `dag_id` in v2) 16 | async fn get_dag_code(&self, dag: &Dag) -> Result; 17 | } 18 | -------------------------------------------------------------------------------- /infrastructure/aws/.gitignore: -------------------------------------------------------------------------------- 1 | # Local .terraform directories 2 | **/.terraform/* 3 | 4 | # .tfstate files 5 | *.tfstate 6 | *.tfstate.* 7 | 8 | # Crash log files 9 | crash.log 10 | crash.*.log 11 | 12 | # Exclude all .tfvars files, which are likely to contain sensitive data 13 | *.tfvars 14 | *.tfvars.json 15 | 16 | # Ignore override files as they are usually used to override resources locally 17 | override.tf 18 | override.tf.json 19 | *_override.tf 20 | *_override.tf.json 21 | 22 | # Ignore CLI configuration files 23 | .terraformrc 24 | terraform.rc 25 | 26 | # Lock file 27 | .terraform.lock.hcl 28 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | build: 5 | cargo build --release && cp target/release/flowrs . 6 | cp flowrs /usr/local/bin/flowrs 7 | 8 | 9 | logo: 10 | @ascii-image-converter image/README/1683789045509.png -C -W 101 -c 11 | 12 | rotating_logo: 13 | @tiff2png -force -destdir image/rotation/png/ image/rotation/tiff/*; 14 | for file in ./image/rotation/png/*; do \ 15 | set -e; \ 16 | file_name=$$(basename $$file .png); \ 17 | echo "Processing $$file_name"; \ 18 | ascii-image-converter $$file -C -W 101 -c > image/rotation/ascii/$${file_name}.ascii; \ 19 | done 20 | 21 | run: 22 | FLOWRS_LOG=debug cargo run -------------------------------------------------------------------------------- /vhs/flowrs.tape: -------------------------------------------------------------------------------- 1 | Set Shell zsh 2 | Set Framerate 24 3 | Set Width 1600 4 | Set Height 900 5 | Require flowrs 6 | 7 | Sleep 1s 8 | Type "flowrs" 9 | Sleep 1s 10 | Enter 11 | Sleep 3s 12 | Type "jjjj" 13 | Sleep 0.5s 14 | Type "/bash" 15 | Sleep 0.5s 16 | Enter 17 | Sleep 1s 18 | Enter 19 | Sleep 1s 20 | Type "jjjj" 21 | Sleep 1s 22 | Type "c" 23 | Sleep 0.5s 24 | Type "h" 25 | Sleep 1s 26 | Enter 27 | Sleep 1.5s 28 | Type "jjjj" 29 | Enter 30 | Sleep 1s 31 | Type "jjjj" 32 | Sleep 1s 33 | Enter 34 | Sleep 3s 35 | Escape 36 | Sleep 1s 37 | Escape 38 | Sleep 1s 39 | Escape 40 | Sleep 1s 41 | Escape 42 | Sleep 1s 43 | Type "q" 44 | Sleep 1s 45 | 46 | -------------------------------------------------------------------------------- /infrastructure/aws/terraform.tfvars.example: -------------------------------------------------------------------------------- 1 | # Copy this file to terraform.tfvars and customize the values 2 | 3 | # AWS region for deployment 4 | aws_region = "us-east-1" 5 | 6 | # Name of the MWAA environment (must be unique in your account) 7 | environment_name = "flowrs-mwaa" 8 | 9 | # Airflow version (see AWS MWAA docs for available versions) 10 | airflow_version = "2.10.3" 11 | 12 | # Environment class (mw1.small, mw1.medium, mw1.large, mw1.xlarge, mw1.2xlarge) 13 | environment_class = "mw1.small" 14 | 15 | # Worker configuration 16 | max_workers = 2 17 | min_workers = 1 18 | 19 | # Webserver access mode (PUBLIC_ONLY or PRIVATE_ONLY) 20 | webserver_access_mode = "PUBLIC_ONLY" 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior 15 | 16 | **Expected behavior** 17 | A clear and concise description of what you expected to happen. 18 | 19 | **Screenshots** 20 | If applicable, add screenshots to help explain your problem. 21 | 22 | **Desktop (please complete the following information):** 23 | - OS: [e.g. iOS] 24 | - Version [e.g. 22] 25 | 26 | **Additional context** 27 | Add any other context about the problem here. 28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /src/app/events/custom.rs: -------------------------------------------------------------------------------- 1 | use crossterm::event::KeyEvent; 2 | 3 | #[derive(Debug, Clone, PartialEq)] 4 | pub enum FlowrsEvent { 5 | Tick, 6 | Key(KeyEvent), 7 | Mouse, 8 | FocusGained, 9 | FocusLost, 10 | } 11 | 12 | impl From for FlowrsEvent { 13 | fn from(ev: crossterm::event::Event) -> Self { 14 | match ev { 15 | crossterm::event::Event::Key(key) => FlowrsEvent::Key(key), 16 | crossterm::event::Event::Mouse(_) => FlowrsEvent::Mouse, 17 | crossterm::event::Event::FocusGained => FlowrsEvent::FocusGained, 18 | crossterm::event::Event::FocusLost => FlowrsEvent::FocusLost, 19 | _ => FlowrsEvent::Tick, 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/airflow/client.rs: -------------------------------------------------------------------------------- 1 | pub mod base; 2 | pub mod v1; 3 | pub mod v2; 4 | 5 | use anyhow::Result; 6 | use std::sync::Arc; 7 | 8 | use crate::airflow::config::{AirflowConfig, AirflowVersion}; 9 | use crate::airflow::traits::AirflowClient; 10 | 11 | pub use base::BaseClient; 12 | pub use v1::V1Client; 13 | pub use v2::V2Client; 14 | 15 | /// Create an Airflow client based on the configuration version 16 | pub fn create_client(config: &AirflowConfig) -> Result> { 17 | let base = BaseClient::new(config.clone())?; 18 | 19 | match config.version { 20 | AirflowVersion::V2 => Ok(Arc::new(V1Client::new(base))), // V2 uses API v1 21 | AirflowVersion::V3 => Ok(Arc::new(V2Client::new(base))), // V3 uses API v2 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/airflow/client/v1/dagstats.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | use reqwest::Method; 4 | 5 | use super::model; 6 | use crate::airflow::{model::common::DagStatsResponse, traits::DagStatsOperations}; 7 | 8 | use super::V1Client; 9 | 10 | #[async_trait] 11 | impl DagStatsOperations for V1Client { 12 | async fn get_dag_stats(&self, dag_ids: Vec<&str>) -> Result { 13 | let response = self 14 | .base_api(Method::GET, "dagStats")? 15 | .query(&[("dag_ids", dag_ids.join(","))]) 16 | .send() 17 | .await? 18 | .error_for_status()?; 19 | 20 | let dag_stats = response.json::().await?; 21 | Ok(dag_stats.into()) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /dist-workspace.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["cargo:."] 3 | 4 | # Config for 'dist' 5 | [dist] 6 | # The preferred dist version to use in CI (Cargo.toml SemVer syntax) 7 | cargo-dist-version = "0.30.2" 8 | # CI backends to support 9 | ci = "github" 10 | # The installers to generate for each app 11 | installers = ["shell", "homebrew"] 12 | # A GitHub repo to push Homebrew formulas to 13 | tap = "jvanbuel/homebrew-flowrs" 14 | # Publish jobs to run in CI 15 | publish-jobs = ["homebrew"] 16 | # Target platforms to build apps for (Rust target-triple syntax) 17 | targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu"] 18 | # Path that installers should place binaries in 19 | install-path = "CARGO_HOME" 20 | # Whether to install an updater program 21 | install-updater = true 22 | 23 | allow-dirty = ["ci"] -------------------------------------------------------------------------------- /src/airflow/traits/dagrun.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | 4 | use crate::airflow::model::common::DagRunList; 5 | 6 | /// Trait for DAG Run operations 7 | #[async_trait] 8 | pub trait DagRunOperations: Send + Sync { 9 | /// List DAG runs for a specific DAG 10 | async fn list_dagruns(&self, dag_id: &str) -> Result; 11 | 12 | /// List all DAG runs across all DAGs 13 | #[allow(unused)] 14 | async fn list_all_dagruns(&self) -> Result; 15 | 16 | /// Mark a DAG run with a specific status 17 | async fn mark_dag_run(&self, dag_id: &str, dag_run_id: &str, status: &str) -> Result<()>; 18 | 19 | /// Clear a DAG run 20 | async fn clear_dagrun(&self, dag_id: &str, dag_run_id: &str) -> Result<()>; 21 | 22 | /// Trigger a new DAG run 23 | async fn trigger_dag_run(&self, dag_id: &str, logical_date: Option<&str>) -> Result<()>; 24 | } 25 | -------------------------------------------------------------------------------- /src/airflow/client/v2/dagstats.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | use reqwest::Method; 4 | 5 | use super::model; 6 | use crate::airflow::{model::common::DagStatsResponse, traits::DagStatsOperations}; 7 | 8 | use super::V2Client; 9 | 10 | #[async_trait] 11 | impl DagStatsOperations for V2Client { 12 | async fn get_dag_stats(&self, dag_ids: Vec<&str>) -> Result { 13 | let response = self 14 | .base_api(Method::GET, "dagStats")? 15 | .query( 16 | &dag_ids 17 | .into_iter() 18 | .map(|id| ("dag_ids", id)) 19 | .collect::>(), 20 | ) 21 | .send() 22 | .await? 23 | .error_for_status()?; 24 | let dag_stats = response.json::().await?; 25 | Ok(dag_stats.into()) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/airflow/model/common/log.rs: -------------------------------------------------------------------------------- 1 | use crate::airflow::client::v1; 2 | use crate::airflow::client::v2; 3 | use serde::{Deserialize, Serialize}; 4 | 5 | /// Common Log model used by the application 6 | #[derive(Debug, Clone, Serialize, Deserialize)] 7 | pub struct Log { 8 | pub continuation_token: Option, 9 | pub content: String, 10 | } 11 | 12 | // From trait implementations for v1 models 13 | impl From for Log { 14 | fn from(value: v1::model::log::Log) -> Self { 15 | Log { 16 | continuation_token: value.continuation_token, 17 | content: value.content, 18 | } 19 | } 20 | } 21 | 22 | // From trait implementations for v2 models 23 | impl From for Log { 24 | fn from(value: v2::model::log::Log) -> Self { 25 | Log { 26 | continuation_token: value.continuation_token, 27 | content: value.content.to_string(), 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/commands/config/list.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use super::model::ListCommand; 4 | use crate::airflow::config::FlowrsConfig; 5 | use anyhow::Result; 6 | 7 | impl ListCommand { 8 | pub fn run(&self) -> Result<()> { 9 | let path = self.file.as_ref().map(PathBuf::from); 10 | let config = FlowrsConfig::from_file(path.as_ref())?; 11 | let servers = config.servers.unwrap_or_default(); 12 | 13 | if servers.is_empty() { 14 | println!("❌ No servers found in the config file!"); 15 | } else { 16 | println!("📋 Airflow instances in the config file:"); 17 | for server in servers { 18 | if let Some(managed) = server.managed { 19 | println!(" - {} ({})", server.name, managed); 20 | } else { 21 | println!(" - {}", server.name); 22 | } 23 | } 24 | } 25 | Ok(()) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | pull_request: 5 | branches: [main] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | fmt: 12 | name: Format 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - uses: dtolnay/rust-toolchain@stable 17 | with: 18 | components: rustfmt 19 | - run: cargo fmt --all --check 20 | 21 | clippy: 22 | name: Clippy 23 | runs-on: ubuntu-latest 24 | steps: 25 | - uses: actions/checkout@v4 26 | - uses: dtolnay/rust-toolchain@stable 27 | with: 28 | components: clippy 29 | - uses: Swatinem/rust-cache@v2 30 | - run: cargo clippy --all-targets --all-features -- -D warnings 31 | 32 | test: 33 | name: Unit Tests 34 | runs-on: ubuntu-latest 35 | steps: 36 | - uses: actions/checkout@v4 37 | - uses: dtolnay/rust-toolchain@stable 38 | - uses: Swatinem/rust-cache@v2 39 | - run: cargo test --lib --bins 40 | -------------------------------------------------------------------------------- /src/airflow/client/v1/log.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | use reqwest::Method; 4 | 5 | use super::model; 6 | use crate::airflow::{model::common::Log, traits::LogOperations}; 7 | 8 | use super::V1Client; 9 | 10 | #[async_trait] 11 | impl LogOperations for V1Client { 12 | async fn get_task_logs( 13 | &self, 14 | dag_id: &str, 15 | dag_run_id: &str, 16 | task_id: &str, 17 | task_try: u16, 18 | ) -> Result { 19 | let response = self 20 | .base_api( 21 | Method::GET, 22 | &format!( 23 | "dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{task_try}" 24 | ), 25 | )? 26 | .query(&[("full_content", "true")]) 27 | .header("Accept", "application/json") 28 | .send() 29 | .await? 30 | .error_for_status()?; 31 | let log = response.json::().await?; 32 | Ok(log.into()) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/airflow/traits/taskinstance.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | 4 | use crate::airflow::model::common::TaskInstanceList; 5 | 6 | /// Trait for Task Instance operations 7 | #[async_trait] 8 | pub trait TaskInstanceOperations: Send + Sync { 9 | /// List task instances for a specific DAG run 10 | async fn list_task_instances(&self, dag_id: &str, dag_run_id: &str) 11 | -> Result; 12 | 13 | /// List all task instances across all DAG runs 14 | #[allow(unused)] 15 | async fn list_all_taskinstances(&self) -> Result; 16 | 17 | /// Mark a task instance with a specific status 18 | async fn mark_task_instance( 19 | &self, 20 | dag_id: &str, 21 | dag_run_id: &str, 22 | task_id: &str, 23 | status: &str, 24 | ) -> Result<()>; 25 | 26 | /// Clear a task instance 27 | async fn clear_task_instance( 28 | &self, 29 | dag_id: &str, 30 | dag_run_id: &str, 31 | task_id: &str, 32 | ) -> Result<()>; 33 | } 34 | -------------------------------------------------------------------------------- /src/commands/config/remove.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use inquire::Select; 4 | 5 | use super::model::RemoveCommand; 6 | use crate::airflow::config::FlowrsConfig; 7 | use anyhow::Result; 8 | 9 | impl RemoveCommand { 10 | pub fn run(&self) -> Result<()> { 11 | let path = self.file.as_ref().map(PathBuf::from); 12 | let mut config = FlowrsConfig::from_file(path.as_ref())?; 13 | 14 | if let Some(mut servers) = config.servers.clone() { 15 | let name = match self.name { 16 | None => Select::new( 17 | "name", 18 | servers.iter().map(|server| server.name.clone()).collect(), 19 | ) 20 | .prompt()?, 21 | Some(ref name) => name.clone(), 22 | }; 23 | servers.retain(|server| server.name != name && server.managed.is_none()); 24 | config.servers = Some(servers); 25 | config.write_to_file()?; 26 | 27 | println!("✅ Config '{name}' removed successfully!"); 28 | } 29 | Ok(()) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/ui/init_screen.rs: -------------------------------------------------------------------------------- 1 | use crate::ui::constants::ROTATING_LOGO; 2 | use ansi_to_tui::IntoText; 3 | use ratatui::{ 4 | layout::{Constraint, Flex, Layout, Rect}, 5 | Frame, 6 | }; 7 | 8 | pub fn render_init_screen(f: &mut Frame, index: u32) { 9 | // let text = ASCII_LOGO.into_text().unwrap(); 10 | let text = ROTATING_LOGO[index as usize % ROTATING_LOGO.len()] 11 | .into_text() 12 | .expect("ROTATING_LOGO should contain valid ANSI text"); 13 | 14 | let area = center( 15 | f.area(), 16 | #[allow(clippy::cast_possible_truncation)] 17 | Constraint::Length(text.width() as u16), 18 | #[allow(clippy::cast_possible_truncation)] 19 | Constraint::Length(text.height() as u16), 20 | ); 21 | 22 | f.render_widget(text, area); 23 | } 24 | 25 | fn center(area: Rect, horizontal: Constraint, vertical: Constraint) -> Rect { 26 | let [area] = Layout::horizontal([horizontal]) 27 | .flex(Flex::Center) 28 | .areas(area); 29 | let [area] = Layout::vertical([vertical]).flex(Flex::Center).areas(area); 30 | area 31 | } 32 | -------------------------------------------------------------------------------- /src/app/model/popup/taskinstances/commands.rs: -------------------------------------------------------------------------------- 1 | use std::sync::LazyLock; 2 | 3 | use crate::app::model::popup::commands_help::{Command, CommandPopUp, DefaultCommands}; 4 | 5 | pub static TASK_COMMAND_POP_UP: LazyLock = LazyLock::new(|| { 6 | let mut commands = vec![ 7 | Command { 8 | name: "Clear", 9 | key_binding: "c", 10 | description: "Clear a task instance", 11 | }, 12 | Command { 13 | name: "Visual", 14 | key_binding: "V", 15 | description: "Enter visual selection mode", 16 | }, 17 | Command { 18 | name: "Mark", 19 | key_binding: "m", 20 | description: "Mark selected task instance(s)", 21 | }, 22 | Command { 23 | name: "Filter", 24 | key_binding: "/", 25 | description: "Filter task instances", 26 | }, 27 | ]; 28 | 29 | commands.append(&mut DefaultCommands::new().0); 30 | CommandPopUp { 31 | title: "Task Commands".into(), 32 | commands, 33 | } 34 | }); 35 | -------------------------------------------------------------------------------- /infrastructure/aws/outputs.tf: -------------------------------------------------------------------------------- 1 | output "mwaa_environment_name" { 2 | description = "Name of the MWAA environment" 3 | value = aws_mwaa_environment.this.name 4 | } 5 | 6 | output "mwaa_webserver_url" { 7 | description = "URL of the MWAA webserver" 8 | value = aws_mwaa_environment.this.webserver_url 9 | } 10 | 11 | output "mwaa_arn" { 12 | description = "ARN of the MWAA environment" 13 | value = aws_mwaa_environment.this.arn 14 | } 15 | 16 | output "s3_bucket_name" { 17 | description = "Name of the S3 bucket for DAGs" 18 | value = aws_s3_bucket.mwaa.id 19 | } 20 | 21 | output "mwaa_execution_role_arn" { 22 | description = "ARN of the MWAA execution role" 23 | value = aws_iam_role.mwaa.arn 24 | } 25 | 26 | output "vpc_id" { 27 | description = "ID of the VPC" 28 | value = aws_vpc.mwaa.id 29 | } 30 | 31 | output "private_subnet_ids" { 32 | description = "IDs of the private subnets" 33 | value = aws_subnet.private[*].id 34 | } 35 | 36 | output "security_group_id" { 37 | description = "ID of the MWAA security group" 38 | value = aws_security_group.mwaa.id 39 | } 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Jan Vanbuel 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /infrastructure/azure/README.md: -------------------------------------------------------------------------------- 1 | # Azure Managed Airflow Infrastructure 2 | 3 | Infrastructure as Code for deploying Apache Airflow on Azure. 4 | 5 | ## Coming Soon 6 | 7 | This directory will contain Terraform/Bicep configuration for deploying: 8 | 9 | - Azure Data Factory Managed Airflow 10 | - Virtual Network configuration 11 | - Storage Account for DAGs 12 | - Identity and Access Management 13 | 14 | ## Planned Resources 15 | 16 | - **Resource Group**: Dedicated resource group for Airflow resources 17 | - **Virtual Network**: VNet with subnets for Airflow components 18 | - **Storage Account**: Blob storage for DAGs and logs 19 | - **Managed Airflow**: Azure Data Factory Managed Airflow environment 20 | - **Identity**: Managed identity with appropriate permissions 21 | 22 | ## Prerequisites 23 | 24 | - Azure CLI installed and configured 25 | - Terraform >= 1.0 (or Azure Bicep) 26 | - Azure subscription with appropriate permissions 27 | 28 | ## Status 29 | 30 | Status: Not yet implemented 31 | 32 | ## References 33 | 34 | - [Azure Data Factory Managed Airflow Documentation](https://learn.microsoft.com/en-us/azure/data-factory/concept-managed-airflow) 35 | -------------------------------------------------------------------------------- /src/airflow/client/v2/log.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | use log::debug; 4 | use reqwest::Method; 5 | 6 | use super::model; 7 | use crate::airflow::{model::common::Log, traits::LogOperations}; 8 | 9 | use super::V2Client; 10 | 11 | #[async_trait] 12 | impl LogOperations for V2Client { 13 | async fn get_task_logs( 14 | &self, 15 | dag_id: &str, 16 | dag_run_id: &str, 17 | task_id: &str, 18 | task_try: u16, 19 | ) -> Result { 20 | let response = self 21 | .base_api( 22 | Method::GET, 23 | &format!( 24 | "dags/{dag_id}/dagRuns/{dag_run_id}/taskInstances/{task_id}/logs/{task_try}" 25 | ), 26 | )? 27 | .query(&[("full_content", "true")]) 28 | .header("Accept", "application/json") 29 | .send() 30 | .await? 31 | .error_for_status()?; 32 | 33 | debug!("Response: {response:?}"); 34 | let log = response.json::().await?; 35 | debug!("Parsed Log: {log:?}"); 36 | Ok(log.into()) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /infrastructure/aws/variables.tf: -------------------------------------------------------------------------------- 1 | variable "aws_region" { 2 | description = "AWS region for MWAA environment" 3 | type = string 4 | default = "eu-west-1" 5 | } 6 | 7 | variable "environment_name" { 8 | description = "Name of the MWAA environment" 9 | type = string 10 | default = "flowrs-mwaa" 11 | } 12 | 13 | variable "airflow_version" { 14 | description = "Airflow version for MWAA" 15 | type = string 16 | default = "2.10.3" 17 | } 18 | 19 | variable "environment_class" { 20 | description = "Environment class for MWAA (mw1.small, mw1.medium, mw1.large, mw1.xlarge, mw1.2xlarge)" 21 | type = string 22 | default = "mw1.small" 23 | } 24 | 25 | variable "max_workers" { 26 | description = "Maximum number of workers" 27 | type = number 28 | default = 2 29 | } 30 | 31 | variable "min_workers" { 32 | description = "Minimum number of workers" 33 | type = number 34 | default = 1 35 | } 36 | 37 | variable "webserver_access_mode" { 38 | description = "Access mode for the Airflow webserver (PUBLIC_ONLY or PRIVATE_ONLY)" 39 | type = string 40 | default = "PUBLIC_ONLY" 41 | } 42 | -------------------------------------------------------------------------------- /src/airflow/traits/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod dag; 2 | pub mod dagrun; 3 | pub mod dagstats; 4 | pub mod log; 5 | pub mod taskinstance; 6 | 7 | pub use dag::DagOperations; 8 | pub use dagrun::DagRunOperations; 9 | pub use dagstats::DagStatsOperations; 10 | pub use log::LogOperations; 11 | pub use taskinstance::TaskInstanceOperations; 12 | 13 | use crate::airflow::config::AirflowVersion; 14 | use crate::app::worker::OpenItem; 15 | use anyhow::Result; 16 | 17 | /// Super-trait combining all Airflow API operations. 18 | /// This trait can be implemented by different API versions (v1 for Airflow v2, v2 for Airflow v3) 19 | /// to provide a consistent interface for interacting with Airflow. 20 | pub trait AirflowClient: 21 | DagOperations + DagRunOperations + TaskInstanceOperations + LogOperations + DagStatsOperations 22 | { 23 | /// Get the Airflow version this client is configured for 24 | #[allow(unused)] 25 | fn get_version(&self) -> AirflowVersion; 26 | 27 | /// Build the appropriate web UI URL for opening an item in the browser. 28 | /// The URL structure differs between Airflow v2 and v3. 29 | #[allow(unused)] 30 | fn build_open_url(&self, item: &OpenItem) -> Result; 31 | } 32 | -------------------------------------------------------------------------------- /src/app/model/popup/dagruns/commands.rs: -------------------------------------------------------------------------------- 1 | use std::sync::LazyLock; 2 | 3 | use crate::app::model::popup::commands_help::{Command, CommandPopUp, DefaultCommands}; 4 | 5 | pub static DAGRUN_COMMAND_POP_UP: LazyLock = LazyLock::new(|| { 6 | let mut commands = vec![ 7 | Command { 8 | name: "Clear", 9 | key_binding: "c", 10 | description: "Clear a DAG run", 11 | }, 12 | Command { 13 | name: "Show", 14 | key_binding: "v", 15 | description: "Show DAG code", 16 | }, 17 | Command { 18 | name: "Visual", 19 | key_binding: "V", 20 | description: "Enter visual selection mode", 21 | }, 22 | Command { 23 | name: "Mark", 24 | key_binding: "m", 25 | description: "Mark selected DAG run(s)", 26 | }, 27 | Command { 28 | name: "Trigger", 29 | key_binding: "t", 30 | description: "Trigger a DAG run", 31 | }, 32 | ]; 33 | commands.append(&mut DefaultCommands::new().0); 34 | CommandPopUp { 35 | title: "DAG Run Commands".into(), 36 | commands, 37 | } 38 | }); 39 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use std::sync::LazyLock; 2 | 3 | use clap::Parser; 4 | use ui::constants::ASCII_LOGO; 5 | 6 | mod airflow; 7 | mod app; 8 | mod commands; 9 | mod ui; 10 | 11 | use airflow::config::paths::ConfigPaths; 12 | use anyhow::Result; 13 | use commands::config::model::ConfigCommand; 14 | use commands::run::RunCommand; 15 | 16 | pub static CONFIG_PATHS: LazyLock = LazyLock::new(ConfigPaths::resolve); 17 | 18 | #[derive(Parser)] 19 | #[clap(name="flowrs", bin_name="flowrs", version, about, before_help=ASCII_LOGO)] 20 | struct FlowrsApp { 21 | #[clap(subcommand)] 22 | command: Option, 23 | } 24 | 25 | #[derive(Parser)] 26 | enum FlowrsCommand { 27 | Run(RunCommand), 28 | #[clap(subcommand)] 29 | Config(ConfigCommand), 30 | } 31 | 32 | impl FlowrsApp { 33 | pub async fn run(&self) -> Result<()> { 34 | match &self.command { 35 | Some(FlowrsCommand::Run(cmd)) => cmd.run().await, 36 | Some(FlowrsCommand::Config(cmd)) => cmd.run(), 37 | None => RunCommand { file: None }.run().await, 38 | } 39 | } 40 | } 41 | 42 | #[tokio::main] 43 | async fn main() -> Result<()> { 44 | let app = FlowrsApp::parse(); 45 | app.run().await?; 46 | std::process::exit(0); 47 | } 48 | -------------------------------------------------------------------------------- /infrastructure/astronomer/README.md: -------------------------------------------------------------------------------- 1 | # Astronomer Infrastructure 2 | 3 | Infrastructure as Code for deploying Apache Airflow using Astronomer. 4 | 5 | ## Coming Soon 6 | 7 | This directory will contain configuration for deploying Airflow via: 8 | 9 | - **Astronomer Cloud**: Managed Airflow service 10 | - **Astronomer Software**: Self-hosted Kubernetes-based deployment 11 | 12 | ## Planned Resources 13 | 14 | ### For Astronomer Cloud 15 | 16 | - Workspace configuration 17 | - Deployment definitions 18 | - Environment variables 19 | - Connection secrets 20 | 21 | ### For Astronomer Software (Kubernetes) 22 | 23 | - Kubernetes cluster (EKS/GKE/AKS) 24 | - Helm chart configuration 25 | - Astronomer platform installation 26 | - Ingress and certificate management 27 | - Monitoring and observability stack 28 | 29 | ## Prerequisites 30 | 31 | ### Astronomer Cloud 32 | - Astronomer account 33 | - Astro CLI installed 34 | 35 | ### Astronomer Software 36 | - Kubernetes cluster (v1.25+) 37 | - kubectl installed and configured 38 | - Helm 3 installed 39 | - Domain name for Airflow UI 40 | 41 | ## Status 42 | 43 | Status: Not yet implemented 44 | 45 | ## References 46 | 47 | - [Astronomer Documentation](https://docs.astronomer.io/) 48 | - [Astro CLI](https://docs.astronomer.io/astro/cli/overview) 49 | - [Astronomer Software Installation](https://docs.astronomer.io/software/install-aws) 50 | -------------------------------------------------------------------------------- /src/airflow/client/v1/model/dagrun.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use time::OffsetDateTime; 3 | 4 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 5 | pub struct DAGRunCollectionResponse { 6 | #[serde(rename = "dag_runs")] 7 | pub dag_runs: Vec, 8 | #[serde(rename = "total_entries")] 9 | pub total_entries: i64, 10 | } 11 | 12 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 13 | pub struct DAGRunResponse { 14 | pub dag_run_id: Option, 15 | pub dag_id: String, 16 | #[serde(with = "time::serde::iso8601::option")] 17 | pub logical_date: Option, 18 | #[serde(with = "time::serde::iso8601::option")] 19 | pub execution_date: Option, 20 | #[serde(with = "time::serde::iso8601::option")] 21 | pub start_date: Option, 22 | #[serde(with = "time::serde::iso8601::option")] 23 | pub end_date: Option, 24 | #[serde(with = "time::serde::iso8601::option")] 25 | pub data_interval_start: Option, 26 | #[serde(with = "time::serde::iso8601::option")] 27 | pub data_interval_end: Option, 28 | #[serde(with = "time::serde::iso8601::option")] 29 | pub last_scheduling_decision: Option, 30 | pub run_type: String, 31 | pub state: String, 32 | pub external_trigger: bool, 33 | pub conf: Option, 34 | pub note: Option, 35 | } 36 | -------------------------------------------------------------------------------- /src/airflow/client/v2/dag.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | use reqwest::Method; 4 | 5 | use super::model; 6 | use crate::airflow::{model::common::DagList, traits::DagOperations}; 7 | 8 | use super::V2Client; 9 | 10 | #[async_trait] 11 | impl DagOperations for V2Client { 12 | async fn list_dags(&self) -> Result { 13 | let r = self.base_api(Method::GET, "dags")?.build()?; 14 | let response = self.base.client.execute(r).await?.error_for_status()?; 15 | 16 | response 17 | .json::() 18 | .await 19 | .map(std::convert::Into::into) 20 | .map_err(std::convert::Into::into) 21 | } 22 | 23 | async fn toggle_dag(&self, dag_id: &str, is_paused: bool) -> Result<()> { 24 | self.base_api(Method::PATCH, &format!("dags/{dag_id}"))? 25 | .json(&serde_json::json!({"is_paused": !is_paused})) 26 | .send() 27 | .await? 28 | .error_for_status()?; 29 | Ok(()) 30 | } 31 | 32 | async fn get_dag_code(&self, dag: &crate::airflow::model::common::Dag) -> Result { 33 | let r = self 34 | .base_api(Method::GET, &format!("dagSources/{}", dag.dag_id))? 35 | .build()?; 36 | let response = self.base.client.execute(r).await?.error_for_status()?; 37 | let dag_source: model::dag::DagSource = response.json().await?; 38 | Ok(dag_source.content) 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/app/model/popup/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod commands_help; 2 | pub mod config; 3 | pub mod dagruns; 4 | pub mod dags; 5 | pub mod error; 6 | pub mod taskinstances; 7 | pub mod warning; 8 | 9 | use ratatui::{ 10 | layout::{Constraint, Flex, Layout, Rect}, 11 | style::Style, 12 | widgets::{Block, BorderType, Borders, Paragraph}, 13 | }; 14 | 15 | use crate::ui::theme::{BORDER_DEFAULT, BORDER_SELECTED, BUTTON_DEFAULT, BUTTON_SELECTED}; 16 | 17 | /// helper function to create a centered rect using up certain percentage of the available rect `r` 18 | #[allow(dead_code)] 19 | pub fn popup_area(area: Rect, percent_x: u16, percent_y: u16) -> Rect { 20 | let vertical = Layout::vertical([Constraint::Percentage(percent_y)]).flex(Flex::Center); 21 | let horizontal = Layout::horizontal([Constraint::Percentage(percent_x)]).flex(Flex::Center); 22 | let [area] = vertical.areas(area); 23 | let [area] = horizontal.areas(area); 24 | area 25 | } 26 | 27 | /// Create a themed button with consistent styling across popups. 28 | pub fn themed_button(text: &str, selected: bool) -> Paragraph<'_> { 29 | let (style, border_color) = if selected { 30 | (BUTTON_SELECTED, BORDER_SELECTED) 31 | } else { 32 | (BUTTON_DEFAULT, BORDER_DEFAULT) 33 | }; 34 | 35 | Paragraph::new(text).style(style).centered().block( 36 | Block::default() 37 | .border_type(BorderType::Rounded) 38 | .borders(Borders::ALL) 39 | .border_style(Style::default().fg(border_color)) 40 | .style(style), 41 | ) 42 | } 43 | -------------------------------------------------------------------------------- /.github/workflows/release-plz.yml: -------------------------------------------------------------------------------- 1 | name: Release-plz 2 | 3 | permissions: 4 | pull-requests: write 5 | contents: write 6 | 7 | on: 8 | push: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | release-plz-release: 14 | name: Release-plz release 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Checkout repository 18 | uses: actions/checkout@v4 19 | with: 20 | fetch-depth: 0 21 | token: ${{ secrets.RELEASE_PLZ_TOKEN }} 22 | - name: Install Rust toolchain 23 | uses: dtolnay/rust-toolchain@stable 24 | - name: Run release-plz 25 | uses: MarcoIeni/release-plz-action@v0.5 26 | with: 27 | command: release 28 | env: 29 | GITHUB_TOKEN: ${{ secrets.RELEASE_PLZ_TOKEN }} 30 | CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} 31 | 32 | release-plz-pr: 33 | name: Release-plz PR 34 | runs-on: ubuntu-latest 35 | concurrency: 36 | group: release-plz-${{ github.ref }} 37 | cancel-in-progress: false 38 | steps: 39 | - name: Checkout repository 40 | uses: actions/checkout@v4 41 | with: 42 | fetch-depth: 0 43 | token: ${{ secrets.RELEASE_PLZ_TOKEN }} 44 | - name: Install Rust toolchain 45 | uses: dtolnay/rust-toolchain@stable 46 | - name: Run release-plz 47 | uses: MarcoIeni/release-plz-action@v0.5 48 | with: 49 | command: release-pr 50 | env: 51 | GITHUB_TOKEN: ${{ secrets.RELEASE_PLZ_TOKEN }} 52 | CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} 53 | -------------------------------------------------------------------------------- /src/airflow/client/v1/dag.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | use log::info; 4 | use reqwest::Method; 5 | 6 | use crate::airflow::{model::common::DagList, traits::DagOperations}; 7 | 8 | use super::model::dag::DagCollectionResponse; 9 | 10 | use super::V1Client; 11 | 12 | #[async_trait] 13 | impl DagOperations for V1Client { 14 | async fn list_dags(&self) -> Result { 15 | let r = self.base_api(Method::GET, "dags")?.build()?; 16 | let response = self.base.client.execute(r).await?.error_for_status()?; 17 | 18 | response 19 | .json::() 20 | .await 21 | .map(|daglist| { 22 | info!("DAGs: {daglist:?}"); 23 | daglist.into() 24 | }) 25 | .map_err(std::convert::Into::into) 26 | } 27 | 28 | async fn toggle_dag(&self, dag_id: &str, is_paused: bool) -> Result<()> { 29 | self.base_api(Method::PATCH, &format!("dags/{dag_id}"))? 30 | .query(&[("update_mask", "is_paused")]) 31 | .json(&serde_json::json!({"is_paused": !is_paused})) 32 | .send() 33 | .await? 34 | .error_for_status()?; 35 | Ok(()) 36 | } 37 | 38 | async fn get_dag_code(&self, dag: &crate::airflow::model::common::Dag) -> Result { 39 | let r = self 40 | .base_api(Method::GET, &format!("dagSources/{}", dag.file_token))? 41 | .build()?; 42 | let response = self.base.client.execute(r).await?.error_for_status()?; 43 | let code = response.text().await?; 44 | Ok(code) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/app/worker/browser.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, Mutex}; 2 | 3 | use anyhow::Result; 4 | 5 | use crate::airflow::traits::AirflowClient; 6 | use crate::app::state::App; 7 | use crate::app::worker::OpenItem; 8 | 9 | /// Handle opening an item (DAG, DAG run, task instance, etc.) in the browser. 10 | pub fn handle_open_item( 11 | app: &Arc>, 12 | client: &Arc, 13 | item: OpenItem, 14 | ) -> Result<()> { 15 | // For Config items, look up the endpoint from active_server instead of using the passed string 16 | let final_item = if let OpenItem::Config(_) = &item { 17 | let app_lock = app.lock().unwrap(); 18 | 19 | let active_server_name = app_lock 20 | .config 21 | .active_server 22 | .as_ref() 23 | .ok_or_else(|| anyhow::anyhow!("No active server configured"))?; 24 | 25 | let servers = app_lock 26 | .config 27 | .servers 28 | .as_ref() 29 | .ok_or_else(|| anyhow::anyhow!("No servers configured"))?; 30 | 31 | let server = servers 32 | .iter() 33 | .find(|s| &s.name == active_server_name) 34 | .ok_or_else(|| { 35 | anyhow::anyhow!("Active server '{active_server_name}' not found in configuration") 36 | })?; 37 | 38 | OpenItem::Config(server.endpoint.clone()) 39 | } else { 40 | item 41 | }; 42 | 43 | let url = client.build_open_url(&final_item)?; 44 | if let Err(e) = webbrowser::open(&url) { 45 | log::error!("Failed to open browser with URL {url}: {e}"); 46 | } 47 | Ok(()) 48 | } 49 | -------------------------------------------------------------------------------- /src/app/worker/logs.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, Mutex}; 2 | 3 | use futures::future::join_all; 4 | use log::debug; 5 | 6 | use crate::airflow::traits::AirflowClient; 7 | use crate::app::model::popup::error::ErrorPopup; 8 | use crate::app::state::App; 9 | 10 | /// Handle fetching task logs for all attempts of a task instance. 11 | pub async fn handle_update_task_logs( 12 | app: &Arc>, 13 | client: &Arc, 14 | dag_id: &str, 15 | dag_run_id: &str, 16 | task_id: &str, 17 | task_try: u16, 18 | ) { 19 | debug!("Getting logs for task: {task_id}, try number {task_try}"); 20 | let logs = 21 | join_all((1..=task_try).map(|i| client.get_task_logs(dag_id, dag_run_id, task_id, i))) 22 | .await; 23 | 24 | let mut app = app.lock().unwrap(); 25 | let mut collected_logs = Vec::new(); 26 | for log in logs { 27 | match log { 28 | Ok(log) => { 29 | debug!("Got log: {log:?}"); 30 | collected_logs.push(log); 31 | } 32 | Err(e) => { 33 | debug!("Error getting logs: {e}"); 34 | app.logs.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 35 | } 36 | } 37 | } 38 | 39 | // Store logs in the environment state 40 | if !collected_logs.is_empty() { 41 | if let Some(env) = app.environment_state.get_active_environment_mut() { 42 | env.add_task_logs(dag_id, dag_run_id, task_id, collected_logs); 43 | } 44 | } 45 | 46 | // Sync panel data from environment state to refresh with new API data 47 | app.sync_panel_data(); 48 | } 49 | -------------------------------------------------------------------------------- /src/app/events/generator.rs: -------------------------------------------------------------------------------- 1 | use std::time::{Duration, Instant}; 2 | 3 | use tokio::sync::mpsc::{channel, Receiver, Sender}; 4 | 5 | use crossterm::event; 6 | 7 | use super::custom::FlowrsEvent; 8 | 9 | pub struct EventGenerator { 10 | _tick_rate: Duration, 11 | pub rx_event: Receiver, 12 | _tx_event: Sender, 13 | } 14 | 15 | impl EventGenerator { 16 | pub fn new(tick_rate: u16) -> Self { 17 | let (tx_event, rx_event) = channel::(500); 18 | 19 | let tick_rate = Duration::from_millis(u64::from(tick_rate)); 20 | let tx_event_thread = tx_event.clone(); 21 | tokio::spawn(async move { 22 | let mut last_tick = Instant::now(); 23 | loop { 24 | let timeout = tick_rate 25 | .checked_sub(last_tick.elapsed()) 26 | .unwrap_or_else(|| Duration::from_secs(0)); 27 | if let Ok(true) = event::poll(timeout) { 28 | if let Ok(ev) = event::read() { 29 | let _ = tx_event_thread.send(FlowrsEvent::from(ev)).await; 30 | } 31 | } 32 | if last_tick.elapsed() > tick_rate { 33 | let _ = tx_event_thread.send(FlowrsEvent::Tick).await; 34 | last_tick = Instant::now(); 35 | } 36 | } 37 | }); 38 | 39 | Self { 40 | _tick_rate: tick_rate, 41 | rx_event, 42 | _tx_event: tx_event, 43 | } 44 | } 45 | 46 | pub async fn next(&mut self) -> Option { 47 | self.rx_event.recv().await 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /tests/common/mod.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::sync::Arc; 3 | 4 | use flowrs_tui::airflow::client::create_client; 5 | use flowrs_tui::airflow::config::{AirflowAuth, AirflowConfig, AirflowVersion, BasicAuth}; 6 | use flowrs_tui::airflow::traits::AirflowClient; 7 | 8 | /// Check if we should run tests for a specific API version. 9 | /// Returns false if `TEST_AIRFLOW_URL` is not set (required for all API tests). 10 | pub fn should_run_for_api_version(version: &str) -> bool { 11 | // Skip if TEST_AIRFLOW_URL is not configured 12 | if env::var("TEST_AIRFLOW_URL").is_err() { 13 | return false; 14 | } 15 | 16 | let test_version = env::var("TEST_API_VERSION").unwrap_or_default(); 17 | test_version.is_empty() || test_version == version 18 | } 19 | 20 | /// Create a test client from environment variables 21 | pub fn create_test_client() -> anyhow::Result> { 22 | let url = env::var("TEST_AIRFLOW_URL").expect("TEST_AIRFLOW_URL must be set"); 23 | let username = env::var("TEST_AIRFLOW_USERNAME").unwrap_or_else(|_| "airflow".to_string()); 24 | let password = env::var("TEST_AIRFLOW_PASSWORD").unwrap_or_else(|_| "airflow".to_string()); 25 | let api_version = env::var("TEST_API_VERSION").unwrap_or_else(|_| "v1".to_string()); 26 | 27 | let version = match api_version.as_str() { 28 | "v2" => AirflowVersion::V3, // Airflow 3.x uses API v2 29 | _ => AirflowVersion::V2, // Airflow 2.x uses API v1 (default) 30 | }; 31 | 32 | let config = AirflowConfig { 33 | name: "test".to_string(), 34 | endpoint: url, 35 | auth: AirflowAuth::Basic(BasicAuth { username, password }), 36 | managed: None, 37 | version, 38 | timeout_secs: 30, 39 | }; 40 | 41 | create_client(&config) 42 | } 43 | -------------------------------------------------------------------------------- /infrastructure/gcp/README.md: -------------------------------------------------------------------------------- 1 | # GCP Cloud Composer Infrastructure 2 | 3 | Infrastructure as Code for deploying Apache Airflow using Google Cloud Composer. 4 | 5 | ## Coming Soon 6 | 7 | This directory will contain Terraform configuration for deploying: 8 | 9 | - Cloud Composer environment (managed Airflow) 10 | - VPC network configuration 11 | - Cloud Storage buckets for DAGs and data 12 | - IAM roles and service accounts 13 | 14 | ## Planned Resources 15 | 16 | - **VPC Network**: Dedicated network for Composer 17 | - **Cloud Composer Environment**: Managed Airflow environment 18 | - Airflow version configuration 19 | - Node configuration (machine types, node count) 20 | - Networking (IP allocation, private IP) 21 | - **Cloud Storage**: GCS buckets for DAGs, plugins, and data 22 | - **Service Account**: Identity with appropriate permissions 23 | - **IAM Bindings**: Role assignments for the service account 24 | 25 | ## Prerequisites 26 | 27 | - Google Cloud SDK installed and configured 28 | - Terraform >= 1.0 29 | - GCP project with appropriate APIs enabled: 30 | - Cloud Composer API 31 | - Compute Engine API 32 | - Cloud Storage API 33 | - Appropriate IAM permissions 34 | 35 | ## Deployment Options 36 | 37 | ### Standard Environment 38 | - Smaller scale deployments 39 | - Lower cost 40 | - Suitable for development and small production workloads 41 | 42 | ### Cloud Composer 2 43 | - Autoscaling workers 44 | - Better performance 45 | - Enhanced security features 46 | 47 | ### Cloud Composer 3 48 | - Latest generation 49 | - Improved autoscaling 50 | - Better resource utilization 51 | 52 | ## Status 53 | 54 | Status: Not yet implemented 55 | 56 | ## References 57 | 58 | - [Cloud Composer Documentation](https://cloud.google.com/composer/docs) 59 | - [Cloud Composer Terraform Module](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/composer_environment) 60 | - [Cloud Composer Pricing](https://cloud.google.com/composer/pricing) 61 | -------------------------------------------------------------------------------- /src/airflow/client/v2/model/log.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::{ 3 | collections::HashMap, 4 | fmt::{Display, Formatter}, 5 | }; 6 | 7 | /// An individual structured log message with timestamp and event 8 | #[derive(Debug, Clone, Serialize, Deserialize)] 9 | pub struct StructuredLogMessage { 10 | #[serde(skip_serializing_if = "Option::is_none")] 11 | pub timestamp: Option, 12 | pub event: String, 13 | #[serde(flatten)] 14 | pub additional_fields: HashMap, 15 | } 16 | 17 | /// Log content can be either structured messages or plain text lines 18 | #[derive(Debug, Clone, Serialize, Deserialize)] 19 | #[serde(untagged)] 20 | pub enum LogContent { 21 | Structured(Vec), 22 | Plain(Vec), 23 | } 24 | 25 | impl Display for LogContent { 26 | /// Convert log content to a single string representation 27 | fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { 28 | match self { 29 | LogContent::Structured(messages) => { 30 | for msg in messages { 31 | if let Some(timestamp) = &msg.timestamp { 32 | write!(f, "{timestamp} | ")?; 33 | } 34 | for (key, value) in &msg.additional_fields { 35 | write!(f, "{key}: {value} | ")?; 36 | } 37 | write!(f, "{} | ", msg.event)?; 38 | writeln!(f)?; 39 | } 40 | } 41 | LogContent::Plain(lines) => { 42 | for line in lines { 43 | writeln!(f, "{line}")?; 44 | } 45 | } 46 | } 47 | Ok(()) 48 | } 49 | } 50 | 51 | #[derive(Debug, Serialize, Deserialize)] 52 | pub struct Log { 53 | #[serde(rename = "continuation_token")] 54 | pub continuation_token: Option, 55 | pub content: LogContent, 56 | } 57 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "flowrs-tui" 3 | version = "0.7.4" 4 | edition = "2021" 5 | rust-version = "1.87.0" 6 | description = "Flowrs is a Terminal User Interface (TUI) for Apache Airflow" 7 | license = "MIT" 8 | repository = "https://github.com/jvanbuel/flowrs" 9 | homepage = "https://github.com/jvanbuel/flowrs" 10 | authors = ["jan@lightcone.be"] 11 | 12 | [[bin]] 13 | name = "flowrs" 14 | path = "src/main.rs" 15 | 16 | 17 | [package.metadata.dist] 18 | formula = "flowrs" 19 | 20 | [dependencies] 21 | ansi-to-tui = { version = "7.0.0" } 22 | anyhow = "1.0.100" 23 | aws-config = { version = "1.8.11" } 24 | aws-sdk-mwaa = "1.96.0" 25 | backtrace = "0.3.76" 26 | chrono = "0.4.42" 27 | async-trait = "0.1.89" 28 | clap = { version = "^4.5", features = ["derive", "env"] } 29 | crossterm = "0.29.0" 30 | custom_error = "1.9.2" 31 | dirs = "6.0.0" 32 | env_logger = "0.11.8" 33 | expectrl = "0.8.0" # Added for controlling interactive processes via pty 34 | futures = "0.3.31" 35 | indoc = "2.0.6" 36 | inquire = "0.9.1" 37 | left-pad = "1.0.1" 38 | log = "0.4.29" 39 | ratatui = { version = "0.29.0", features = ["unstable-widget-ref"] } 40 | regex = "1.12.2" 41 | reqwest = { version = "0.12.24", features = ["json", "rustls-tls"] } 42 | rstest = "0.26.1" 43 | serde = { version = "1.0.228", features = ["derive"] } 44 | serde_json = "1.0.145" 45 | simplelog = "0.12.2" 46 | strum = { version = "0.27.2", features = ["derive"] } 47 | syntect = "5.3.0" 48 | syntect-tui = "3.0.6" 49 | time = { version = "0.3.44", features = [ 50 | "serde", 51 | "serde-human-readable", 52 | "parsing", 53 | "macros", 54 | ] } 55 | throbber-widgets-tui = "0.9.0" 56 | tokio = { version = "1.48.0", features = ["rt-multi-thread", "sync", "macros"] } 57 | toml = "0.9.8" 58 | unicode-width = "0.2.0" 59 | url = "2.5.7" 60 | webbrowser = "1.0.6" 61 | 62 | [dev-dependencies] 63 | mockito = "1.7.1" 64 | 65 | [profile.release] 66 | opt-level = "z" 67 | lto = "fat" 68 | codegen-units = 1 69 | strip = true 70 | panic = "abort" 71 | 72 | [lints.clippy] 73 | pedantic = "warn" 74 | 75 | 76 | [profile.dist] 77 | inherits = "release" 78 | -------------------------------------------------------------------------------- /src/airflow/client/v1/model/dag.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use time::OffsetDateTime; 3 | 4 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 5 | pub struct DagCollectionResponse { 6 | pub dags: Vec, 7 | pub total_entries: i64, 8 | } 9 | 10 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 11 | pub struct DagResponse { 12 | pub dag_id: String, 13 | pub dag_display_name: String, 14 | pub root_dag_id: Option, 15 | pub is_paused: Option, 16 | pub is_active: Option, 17 | pub is_subdag: bool, 18 | #[serde(with = "time::serde::iso8601::option")] 19 | pub last_parsed_time: Option, 20 | #[serde(with = "time::serde::iso8601::option")] 21 | pub last_pickled: Option, 22 | #[serde(with = "time::serde::iso8601::option")] 23 | pub last_expired: Option, 24 | pub scheduler_lock: Option, 25 | pub pickle_id: Option, 26 | pub default_view: Option, 27 | pub fileloc: String, 28 | pub file_token: String, 29 | pub owners: Vec, 30 | pub description: Option, 31 | pub schedule_interval: Option, 32 | pub timetable_description: Option, 33 | pub tags: Option>, 34 | pub max_active_tasks: Option, 35 | pub max_active_runs: Option, 36 | pub has_task_concurrency_limits: Option, 37 | pub has_import_errors: Option, 38 | #[serde(with = "time::serde::iso8601::option")] 39 | pub next_dagrun: Option, 40 | #[serde(with = "time::serde::iso8601::option")] 41 | pub next_dagrun_data_interval_start: Option, 42 | #[serde(with = "time::serde::iso8601::option")] 43 | pub next_dagrun_data_interval_end: Option, 44 | #[serde(with = "time::serde::iso8601::option")] 45 | pub next_dagrun_create_after: Option, 46 | pub max_consecutive_failed_dag_runs: Option, 47 | } 48 | 49 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 50 | pub struct DagTagResponse { 51 | pub name: String, 52 | } 53 | -------------------------------------------------------------------------------- /src/airflow/client/v2/model/dagrun.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use time::OffsetDateTime; 3 | 4 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 5 | #[serde(rename_all = "camelCase")] 6 | pub struct DagRunList { 7 | #[serde(rename = "dag_runs")] 8 | pub dag_runs: Vec, 9 | #[serde(rename = "total_entries")] 10 | pub total_entries: i64, 11 | } 12 | 13 | #[allow(clippy::struct_field_names)] 14 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 15 | pub struct DagRun { 16 | pub dag_run_id: String, 17 | pub dag_id: String, 18 | #[serde(with = "time::serde::iso8601::option")] 19 | pub logical_date: Option, 20 | #[serde(with = "time::serde::iso8601::option")] 21 | pub queued_at: Option, 22 | #[serde(with = "time::serde::iso8601::option")] 23 | pub start_date: Option, 24 | #[serde(with = "time::serde::iso8601::option")] 25 | pub end_date: Option, 26 | pub duration: Option, 27 | #[serde(with = "time::serde::iso8601::option")] 28 | pub data_interval_start: Option, 29 | #[serde(with = "time::serde::iso8601::option")] 30 | pub data_interval_end: Option, 31 | #[serde(with = "time::serde::iso8601::option")] 32 | pub run_after: Option, 33 | #[serde(with = "time::serde::iso8601::option")] 34 | pub last_scheduling_decision: Option, 35 | pub run_type: String, 36 | pub state: String, 37 | pub triggered_by: Option, 38 | pub triggering_user_name: Option, 39 | pub conf: Option, 40 | pub note: Option, 41 | #[serde(default, skip_serializing_if = "Option::is_none")] 42 | pub dag_versions: Option>, 43 | pub bundle_version: Option, 44 | } 45 | 46 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 47 | pub struct DagVersion { 48 | pub id: String, 49 | pub version_number: i64, 50 | pub dag_id: String, 51 | pub bundle_name: Option, 52 | pub bundle_version: Option, 53 | #[serde(with = "time::serde::iso8601")] 54 | pub created_at: OffsetDateTime, 55 | } 56 | -------------------------------------------------------------------------------- /src/app/model/popup/error.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Error; 2 | use ratatui::{ 3 | buffer::Buffer, 4 | layout::Rect, 5 | style::{Color, Modifier, Style}, 6 | text::{Line, Span, Text}, 7 | widgets::{Block, BorderType, Borders, Clear, Paragraph, Widget, Wrap}, 8 | }; 9 | 10 | use super::popup_area; 11 | 12 | pub struct ErrorPopup { 13 | pub errors: Vec, 14 | } 15 | 16 | impl ErrorPopup { 17 | pub fn new(errors: &[Error]) -> Self { 18 | Self { 19 | errors: errors 20 | .iter() 21 | .map(std::string::ToString::to_string) 22 | .collect(), 23 | } 24 | } 25 | 26 | pub fn from_strings(errors: Vec) -> Self { 27 | Self { errors } 28 | } 29 | 30 | pub fn has_errors(&self) -> bool { 31 | !self.errors.is_empty() 32 | } 33 | } 34 | 35 | impl Widget for &ErrorPopup { 36 | fn render(self, area: Rect, buf: &mut Buffer) { 37 | if self.errors.is_empty() { 38 | return; 39 | } 40 | 41 | let popup_area = popup_area(area, 80, 50); 42 | let popup = Block::default() 43 | .border_type(BorderType::Rounded) 44 | .title("Errors - Press or to close") 45 | .title_style(Style::default().fg(Color::Red).add_modifier(Modifier::BOLD)) 46 | .borders(Borders::ALL) 47 | .border_style(Style::default().fg(Color::Red)); 48 | 49 | Clear.render(popup_area, buf); 50 | 51 | let mut text = Text::default(); 52 | for (idx, error) in self.errors.iter().enumerate() { 53 | text.push_line(Line::from(vec![ 54 | Span::styled( 55 | format!("Error {}: ", idx + 1), 56 | Style::default().fg(Color::Red).add_modifier(Modifier::BOLD), 57 | ), 58 | Span::styled(error.as_str(), Style::default().fg(Color::White)), 59 | ])); 60 | if idx < self.errors.len() - 1 { 61 | text.push_line(Line::from("")); 62 | } 63 | } 64 | 65 | let error_paragraph = Paragraph::new(text).wrap(Wrap { trim: true }).block(popup); 66 | error_paragraph.render(popup_area, buf); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/ui/constants.rs: -------------------------------------------------------------------------------- 1 | use ratatui::style::Color; 2 | 3 | // Re-export from theme for backward compatibility 4 | pub use super::theme::DEFAULT_STYLE; 5 | 6 | pub const ASCII_LOGO: &str = include_str!("logo/logo.ascii"); 7 | 8 | pub const ROTATING_LOGO: [&str; 16] = [ 9 | include_str!("../../image/rotation/ascii/0.ascii"), 10 | include_str!("../../image/rotation/ascii/1.ascii"), 11 | include_str!("../../image/rotation/ascii/2.ascii"), 12 | include_str!("../../image/rotation/ascii/3.ascii"), 13 | include_str!("../../image/rotation/ascii/4.ascii"), 14 | include_str!("../../image/rotation/ascii/5.ascii"), 15 | include_str!("../../image/rotation/ascii/6.ascii"), 16 | include_str!("../../image/rotation/ascii/7.ascii"), 17 | include_str!("../../image/rotation/ascii/8.ascii"), 18 | include_str!("../../image/rotation/ascii/9.ascii"), 19 | include_str!("../../image/rotation/ascii/10.ascii"), 20 | include_str!("../../image/rotation/ascii/11.ascii"), 21 | include_str!("../../image/rotation/ascii/12.ascii"), 22 | include_str!("../../image/rotation/ascii/13.ascii"), 23 | include_str!("../../image/rotation/ascii/14.ascii"), 24 | include_str!("../../image/rotation/ascii/15.ascii"), 25 | ]; 26 | 27 | pub enum AirflowStateColor { 28 | Success, 29 | Failed, 30 | Running, 31 | Queued, 32 | UpForRetry, 33 | UpForReschedule, 34 | Skipped, 35 | UpstreamFailed, 36 | None, 37 | } 38 | 39 | impl From for Color { 40 | fn from(state: AirflowStateColor) -> Self { 41 | use super::theme; 42 | match state { 43 | AirflowStateColor::Success => theme::STATE_SUCCESS, 44 | AirflowStateColor::Failed => theme::STATE_FAILED, 45 | AirflowStateColor::Running => theme::STATE_RUNNING, 46 | AirflowStateColor::Queued => theme::STATE_QUEUED, 47 | AirflowStateColor::UpForRetry => theme::STATE_UP_FOR_RETRY, 48 | AirflowStateColor::UpForReschedule => theme::STATE_UP_FOR_RESCHEDULE, 49 | AirflowStateColor::Skipped => theme::STATE_SKIPPED, 50 | AirflowStateColor::UpstreamFailed => theme::STATE_UPSTREAM_FAILED, 51 | AirflowStateColor::None => Color::Reset, 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/commands/config/managed_services.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use inquire::Select; 4 | use strum::IntoEnumIterator; 5 | 6 | use super::model::ManagedServiceCommand; 7 | use crate::airflow::config::FlowrsConfig; 8 | use crate::airflow::config::ManagedService; 9 | use anyhow::Result; 10 | 11 | impl ManagedServiceCommand { 12 | pub fn run(&self) -> Result<()> { 13 | let managed_service = match &self.managed_service { 14 | Some(managed_service) => managed_service.clone(), 15 | None => Select::new("managed service", ManagedService::iter().collect()).prompt()?, 16 | }; 17 | 18 | let path = self.file.as_ref().map(PathBuf::from); 19 | let mut config = FlowrsConfig::from_file(path.as_ref())?; 20 | 21 | match config.managed_services { 22 | Some(ref mut services) => { 23 | if services.contains(&managed_service) { 24 | println!("Managed service already enabled!"); 25 | return Ok(()); 26 | } 27 | services.push(managed_service); 28 | } 29 | None => { 30 | config.managed_services = Some(vec![managed_service]); 31 | } 32 | } 33 | 34 | config.write_to_file()?; 35 | 36 | println!("✅ Managed service added successfully!"); 37 | Ok(()) 38 | } 39 | 40 | pub fn disable(&self) -> Result<()> { 41 | let managed_service = match &self.managed_service { 42 | Some(managed_service) => managed_service.clone(), 43 | None => Select::new("managed service", ManagedService::iter().collect()).prompt()?, 44 | }; 45 | 46 | let path = self.file.as_ref().map(PathBuf::from); 47 | let mut config = FlowrsConfig::from_file(path.as_ref())?; 48 | 49 | if let Some(ref mut services) = config.managed_services { 50 | if !services.contains(&managed_service) { 51 | println!("Managed service already disabled!"); 52 | return Ok(()); 53 | } 54 | services.retain(|service| service != &managed_service); 55 | } 56 | 57 | config.write_to_file()?; 58 | 59 | println!("✅ Managed service disabled successfully!"); 60 | Ok(()) 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/commands/config/model.rs: -------------------------------------------------------------------------------- 1 | use crate::airflow::config::ManagedService; 2 | use anyhow::Result; 3 | use clap::Parser; 4 | use inquire::validator::Validation; 5 | use strum::Display; 6 | use strum::EnumIter; 7 | use url::Url; 8 | 9 | #[derive(Parser, Debug)] 10 | pub enum ConfigCommand { 11 | Add(AddCommand), 12 | #[clap(alias = "rm")] 13 | Remove(RemoveCommand), 14 | Update(UpdateCommand), 15 | #[clap(alias = "ls")] 16 | List(ListCommand), 17 | Enable(ManagedServiceCommand), 18 | Disable(ManagedServiceCommand), 19 | } 20 | 21 | impl ConfigCommand { 22 | pub fn run(&self) -> Result<()> { 23 | match self { 24 | ConfigCommand::Add(cmd) => cmd.run(), 25 | ConfigCommand::Remove(cmd) => cmd.run(), 26 | ConfigCommand::Update(cmd) => cmd.run(), 27 | ConfigCommand::List(cmd) => cmd.run(), 28 | ConfigCommand::Enable(cmd) => cmd.run(), 29 | ConfigCommand::Disable(cmd) => cmd.disable(), 30 | } 31 | } 32 | } 33 | 34 | #[derive(Parser, Debug)] 35 | pub struct AddCommand { 36 | #[clap(short, long)] 37 | pub file: Option, 38 | } 39 | 40 | #[derive(Parser, Debug)] 41 | pub struct RemoveCommand { 42 | pub name: Option, 43 | #[clap(short, long)] 44 | pub file: Option, 45 | } 46 | 47 | #[derive(Parser, Debug)] 48 | pub struct ListCommand { 49 | #[clap(short, long)] 50 | pub file: Option, 51 | } 52 | 53 | #[derive(Parser, Debug)] 54 | pub struct UpdateCommand { 55 | pub name: Option, 56 | #[clap(short, long)] 57 | pub file: Option, 58 | } 59 | 60 | #[derive(EnumIter, Debug, Display)] 61 | pub enum ConfigOption { 62 | BasicAuth, 63 | Token(Command), 64 | } 65 | 66 | #[derive(Parser, Debug)] 67 | pub struct ManagedServiceCommand { 68 | #[clap(short, long)] 69 | pub managed_service: Option, 70 | #[clap(short, long)] 71 | pub file: Option, 72 | } 73 | 74 | type Command = Option; 75 | 76 | #[allow(clippy::unnecessary_wraps)] 77 | pub fn validate_endpoint( 78 | endpoint: &str, 79 | ) -> Result> { 80 | match Url::parse(endpoint) { 81 | Ok(_) => Ok(Validation::Valid), 82 | Err(error) => Ok(Validation::Invalid(error.into())), 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /src/app/worker/config.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, Mutex}; 2 | 3 | use anyhow::Result; 4 | 5 | use crate::app::environment_state::EnvironmentData; 6 | use crate::app::model::popup::error::ErrorPopup; 7 | use crate::app::state::App; 8 | 9 | /// Handle configuration selection. 10 | /// Creates a new client for the selected configuration if needed and sets it as active. 11 | pub fn handle_config_selected(app: &Arc>, idx: usize) -> Result<()> { 12 | let mut app = app 13 | .lock() 14 | .map_err(|_| anyhow::anyhow!("Failed to acquire app lock"))?; 15 | 16 | let Some(selected_config) = app.configs.filtered.items.get(idx).cloned() else { 17 | log::error!( 18 | "Config index {idx} out of bounds (total: {})", 19 | app.configs.filtered.items.len() 20 | ); 21 | app.configs.error_popup = Some(ErrorPopup::from_strings(vec![format!( 22 | "Configuration index {idx} not found" 23 | )])); 24 | app.loading = false; 25 | return Ok(()); 26 | }; 27 | let env_name = selected_config.name.clone(); 28 | 29 | // Check if environment already exists, if not create it 30 | if !app.environment_state.environments.contains_key(&env_name) { 31 | match crate::airflow::client::create_client(&selected_config) { 32 | Ok(client) => { 33 | let env_data = EnvironmentData::new(client); 34 | app.environment_state 35 | .add_environment(env_name.clone(), env_data); 36 | } 37 | Err(e) => { 38 | log::error!("Failed to create client for '{env_name}': {e}"); 39 | app.configs.error_popup = Some(ErrorPopup::from_strings(vec![format!( 40 | "Failed to connect to '{env_name}': {e}" 41 | )])); 42 | app.loading = false; 43 | return Ok(()); 44 | } 45 | } 46 | } 47 | 48 | // Set this as the active environment 49 | app.environment_state 50 | .set_active_environment(env_name.clone()); 51 | app.config.active_server = Some(env_name); 52 | 53 | // Clear the view state but NOT the environment data 54 | app.clear_state(); 55 | 56 | // Sync panel data from the new environment 57 | app.sync_panel_data(); 58 | app.loading = false; 59 | Ok(()) 60 | } 61 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![flowrs_logo](./image/README/1683789045509.png) 2 | 3 | Flowrs is a TUI application for [Apache Airflow](https://airflow.apache.org/). It allows you to monitor, inspect and manage Airflow DAGs from the comforts of your terminal. It is build with the [ratatui](https://ratatui.rs/) library. 4 | 5 | ![flowrs demo](./vhs/flowrs.gif) 6 | 7 | ## Installation 8 | 9 | You can install `flowrs` via Homebrew if you're on macOS / Linux / WSL2: 10 | 11 | ``` 12 | brew tap jvanbuel/flowrs 13 | brew install flowrs 14 | ``` 15 | 16 | or by downloading the binary directly from GitHub: 17 | 18 | ```bash 19 | curl --proto '=https' --tlsv1.2 -LsSf https://github.com/jvanbuel/flowrs/releases/latest/download/flowrs-tui-installer.sh | sh 20 | ``` 21 | 22 | Alternatively, you can build `flowrs` from source with `cargo`: 23 | 24 | ```bash 25 | cargo install flowrs-tui --locked 26 | ``` 27 | 28 | ## Usage 29 | 30 | ### Managed Airflow services 31 | 32 | The easiest way to user `flowrs` is with a managed Airflow service. The currently supported managed services are: 33 | 34 | - [x] Conveyor 35 | - [x] Amazon Managed Workflows for Apache Airflow (MWAA) 36 | - [ ] Google Cloud Composer 37 | - [x] Astronomer 38 | 39 | To enable a managed service, run `flowrs config enable -m `. This will add the configuration for the managed service to your configuration file, or prompt you for the necessary configuration details. On startup `flowrs` will then try to find and connect to all available managed service's Airflow instances. 40 | 41 | Note that for Astronomer, you need to set the `ASTRO_API_TOKEN` environment variable with your Astronomer API token (Organization, Workspace or Deployment) to be able to connect to the service. 42 | 43 | ### Custom Airflow instances 44 | 45 | If you're self-hosting an Airflow instance, or your favorite managed service is not yet supported, you can register an Airflow server instance with the `flowrs config add` command: 46 | 47 | ![flowrs config add demo](./vhs/add_config.gif) 48 | 49 | This creates an entry in your configuration file at `$XDG_CONFIG_HOME/flowrs/config.toml` (following the XDG Base Directory Specification, which defaults to `~/.config/flowrs/config.toml`). For backwards compatibility, flowrs also reads from `~/.flowrs` if the XDG location doesn't exist. If you have multiple Airflow servers configured, you can easily switch between them in `flowrs` configuration screen. 50 | 51 | Only basic authentication and bearer token authentication are supported. When selecting the bearer token option, you can either provide a static token or a command that generates a token. 52 | -------------------------------------------------------------------------------- /docs/plans/2025-12-14-binary-size-reduction-design.md: -------------------------------------------------------------------------------- 1 | # Binary Size Reduction Design 2 | 3 | ## Goal 4 | 5 | Reduce the flowrs release binary size from 26MB to approximately 12-16MB (~40-50% reduction) through tokio feature trimming and aggressive release build optimizations. 6 | 7 | ## Changes 8 | 9 | ### 1. Tokio Feature Trimming 10 | 11 | **Current:** 12 | ```toml 13 | tokio = { version = "1.48.0", features = ["full"] } 14 | ``` 15 | 16 | **Proposed:** 17 | ```toml 18 | tokio = { version = "1.48.0", features = ["rt-multi-thread", "sync", "macros"] } 19 | ``` 20 | 21 | **Rationale:** The codebase only uses: 22 | - `tokio::sync::mpsc` - channels (requires `sync`) 23 | - `tokio::spawn` - spawning tasks (requires `rt-multi-thread`) 24 | - `tokio::task::JoinSet` - managing concurrent tasks (requires `rt-multi-thread`) 25 | - `#[tokio::main]` - the runtime (requires `rt-multi-thread`, `macros`) 26 | - `#[tokio::test]` - async tests (requires `rt-multi-thread`, `macros`) 27 | 28 | Unused features being dropped: `io-util`, `io-std`, `net`, `time`, `fs`, `process`, `signal`. 29 | 30 | ### 2. Release Profile Optimizations 31 | 32 | **Current:** 33 | ```toml 34 | [profile.dist] 35 | inherits = "release" 36 | lto = "thin" 37 | ``` 38 | 39 | **Proposed:** 40 | ```toml 41 | [profile.release] 42 | opt-level = "z" # Optimize for size 43 | lto = "fat" # Full link-time optimization 44 | codegen-units = 1 # Single codegen unit for better optimization 45 | strip = true # Strip symbols from binary 46 | panic = "abort" # No unwinding machinery 47 | 48 | # [profile.dist] section removed - no longer needed 49 | ``` 50 | 51 | **Setting explanations:** 52 | - `opt-level = "z"` - Compiler prioritizes smallest code over speed 53 | - `lto = "fat"` - Cross-crate optimization, better dead code elimination than "thin" 54 | - `codegen-units = 1` - Better optimization at cost of slower compilation 55 | - `strip = true` - Removes debug symbols and symbol table 56 | - `panic = "abort"` - Removes unwinding machinery 57 | 58 | ## Trade-offs 59 | 60 | - Release builds will take 2-3x longer due to LTO and single codegen unit 61 | - `panic = "abort"` means no stack unwinding on panic (acceptable for TUI apps) 62 | - `opt-level = "z"` may have minor runtime performance impact (negligible for I/O-bound TUI) 63 | 64 | ## Expected Results 65 | 66 | - Baseline: 26MB 67 | - Target: 12-16MB (40-50% reduction) 68 | 69 | ## Verification 70 | 71 | 1. Apply changes to `Cargo.toml` 72 | 2. Run `cargo clean` 73 | 3. Build with `cargo build --release` 74 | 4. Compare binary size 75 | 5. Run `cargo test` to ensure nothing broke 76 | -------------------------------------------------------------------------------- /src/airflow/client/v2/dagrun.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | use log::debug; 4 | use reqwest::{Method, Response}; 5 | 6 | use super::model; 7 | use crate::airflow::{model::common::DagRunList, traits::DagRunOperations}; 8 | 9 | use super::V2Client; 10 | 11 | #[async_trait] 12 | impl DagRunOperations for V2Client { 13 | async fn list_dagruns(&self, dag_id: &str) -> Result { 14 | let response: Response = self 15 | .base_api(Method::GET, &format!("dags/{dag_id}/dagRuns"))? 16 | .query(&[("order_by", "-start_date"), ("limit", "50")]) 17 | .send() 18 | .await? 19 | .error_for_status()?; 20 | let dagruns: model::dagrun::DagRunList = 21 | response.json::().await?; 22 | Ok(dagruns.into()) 23 | } 24 | 25 | async fn list_all_dagruns(&self) -> Result { 26 | let response: Response = self 27 | .base_api(Method::POST, "dags/~/dagRuns/list")? 28 | .json(&serde_json::json!({"page_limit": 200})) 29 | .send() 30 | .await? 31 | .error_for_status()?; 32 | let dagruns: model::dagrun::DagRunList = 33 | response.json::().await?; 34 | Ok(dagruns.into()) 35 | } 36 | 37 | async fn mark_dag_run(&self, dag_id: &str, dag_run_id: &str, status: &str) -> Result<()> { 38 | self.base_api( 39 | Method::PATCH, 40 | &format!("dags/{dag_id}/dagRuns/{dag_run_id}"), 41 | )? 42 | .json(&serde_json::json!({"state": status})) 43 | .send() 44 | .await? 45 | .error_for_status()?; 46 | Ok(()) 47 | } 48 | 49 | async fn clear_dagrun(&self, dag_id: &str, dag_run_id: &str) -> Result<()> { 50 | self.base_api( 51 | Method::POST, 52 | &format!("dags/{dag_id}/dagRuns/{dag_run_id}/clear"), 53 | )? 54 | .json(&serde_json::json!({"dry_run": false})) 55 | .send() 56 | .await? 57 | .error_for_status()?; 58 | Ok(()) 59 | } 60 | 61 | async fn trigger_dag_run(&self, dag_id: &str, logical_date: Option<&str>) -> Result<()> { 62 | let body = serde_json::json!({"logical_date": logical_date}); 63 | 64 | let resp: Response = self 65 | .base_api(Method::POST, &format!("dags/{dag_id}/dagRuns"))? 66 | .json(&body) 67 | .send() 68 | .await? 69 | .error_for_status()?; 70 | debug!("{resp:?}"); 71 | Ok(()) 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /tests/v1_api_test.rs: -------------------------------------------------------------------------------- 1 | mod common; 2 | 3 | use common::{create_test_client, should_run_for_api_version}; 4 | 5 | #[tokio::test] 6 | async fn test_v1_list_dags() { 7 | if !should_run_for_api_version("v1") { 8 | println!("Skipping V1 test - TEST_API_VERSION is not 'v1'"); 9 | return; 10 | } 11 | 12 | let client = create_test_client().expect("Failed to create test client"); 13 | let result = client.list_dags().await; 14 | 15 | assert!(result.is_ok(), "Failed to list DAGs: {:?}", result.err()); 16 | 17 | let dag_list = result.unwrap(); 18 | // Airflow with LOAD_EXAMPLES=true should have example DAGs 19 | assert!( 20 | !dag_list.dags.is_empty(), 21 | "Expected at least one DAG, got none" 22 | ); 23 | } 24 | 25 | #[tokio::test] 26 | async fn test_v1_dag_has_required_fields() { 27 | if !should_run_for_api_version("v1") { 28 | return; 29 | } 30 | 31 | let client = create_test_client().expect("Failed to create test client"); 32 | let dag_list = client.list_dags().await.expect("Failed to list DAGs"); 33 | 34 | if let Some(dag) = dag_list.dags.first() { 35 | // Verify DAG has required fields populated 36 | assert!(!dag.dag_id.is_empty(), "DAG ID should not be empty"); 37 | } 38 | } 39 | 40 | #[tokio::test] 41 | async fn test_v1_get_dag_code() { 42 | if !should_run_for_api_version("v1") { 43 | return; 44 | } 45 | 46 | let client = create_test_client().expect("Failed to create test client"); 47 | let dag_list = client.list_dags().await.expect("Failed to list DAGs"); 48 | 49 | if let Some(dag) = dag_list.dags.first() { 50 | let code = client 51 | .get_dag_code(dag) 52 | .await 53 | .expect("Failed to get DAG code"); 54 | assert!( 55 | code.contains("DAG") || code.contains("dag"), 56 | "DAG code should contain DAG definition" 57 | ); 58 | } 59 | } 60 | 61 | #[tokio::test] 62 | async fn test_v1_list_dagruns() { 63 | if !should_run_for_api_version("v1") { 64 | return; 65 | } 66 | 67 | let client = create_test_client().expect("Failed to create test client"); 68 | let dag_list = client.list_dags().await.expect("Failed to list DAGs"); 69 | 70 | if let Some(dag) = dag_list.dags.first() { 71 | let result = client.list_dagruns(&dag.dag_id).await; 72 | assert!( 73 | result.is_ok(), 74 | "Failed to list DAG runs: {:?}", 75 | result.err() 76 | ); 77 | // Note: dag_runs may be empty if no runs have been triggered 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /.github/workflows/integration.yml: -------------------------------------------------------------------------------- 1 | name: Integration Tests 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | 7 | env: 8 | CARGO_TERM_COLOR: always 9 | 10 | jobs: 11 | integration: 12 | name: Integration (${{ matrix.airflow_version }}) 13 | runs-on: ubuntu-latest 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | include: 18 | - airflow_version: "2.10.4" 19 | api_version: "v1" 20 | - airflow_version: "3.0.1" 21 | api_version: "v2" 22 | 23 | services: 24 | airflow: 25 | image: apache/airflow:${{ matrix.airflow_version }} 26 | env: 27 | AIRFLOW__CORE__LOAD_EXAMPLES: "true" 28 | AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: sqlite:////tmp/airflow.db 29 | AIRFLOW__WEBSERVER__SECRET_KEY: test-secret-key 30 | AIRFLOW__API__AUTH_BACKENDS: airflow.api.auth.backend.basic_auth 31 | _AIRFLOW_DB_MIGRATE: "true" 32 | _AIRFLOW_WWW_USER_CREATE: "true" 33 | _AIRFLOW_WWW_USER_USERNAME: airflow 34 | _AIRFLOW_WWW_USER_PASSWORD: airflow 35 | ports: 36 | - 8080:8080 37 | options: >- 38 | --entrypoint /bin/bash 39 | --health-cmd "curl -f http://localhost:8080/health || exit 1" 40 | --health-interval 10s 41 | --health-timeout 5s 42 | --health-retries 10 43 | --health-start-period 30s 44 | 45 | steps: 46 | - uses: actions/checkout@v4 47 | 48 | - uses: dtolnay/rust-toolchain@stable 49 | 50 | - uses: Swatinem/rust-cache@v2 51 | 52 | - name: Start Airflow 53 | run: | 54 | docker exec ${{ job.services.airflow.id }} bash -c " 55 | airflow db migrate && 56 | airflow users create --username airflow --password airflow --firstname Test --lastname User --role Admin --email test@example.com && 57 | airflow webserver --port 8080 & 58 | airflow scheduler & 59 | sleep 30 60 | " 61 | 62 | - name: Wait for Airflow API 63 | run: | 64 | timeout 120 bash -c ' 65 | until curl -sf -u airflow:airflow http://localhost:8080/api/v1/health; do 66 | echo "Waiting for Airflow API..." 67 | sleep 5 68 | done 69 | ' 70 | echo "Airflow API is ready" 71 | 72 | - name: Run integration tests 73 | env: 74 | TEST_AIRFLOW_URL: http://localhost:8080 75 | TEST_AIRFLOW_USERNAME: airflow 76 | TEST_AIRFLOW_PASSWORD: airflow 77 | TEST_API_VERSION: ${{ matrix.api_version }} 78 | run: cargo test --test '*' -- --test-threads=1 79 | -------------------------------------------------------------------------------- /src/airflow/client/v1/model/taskinstance.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use time::OffsetDateTime; 3 | 4 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 5 | pub struct TaskInstanceCollectionResponse { 6 | pub task_instances: Vec, 7 | pub total_entries: i64, 8 | } 9 | 10 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 11 | pub struct TaskInstanceResponse { 12 | pub task_id: String, 13 | pub task_display_name: String, 14 | pub dag_id: String, 15 | pub dag_run_id: String, 16 | #[serde(with = "time::serde::iso8601")] 17 | pub execution_date: OffsetDateTime, 18 | #[serde(with = "time::serde::iso8601::option")] 19 | pub start_date: Option, 20 | #[serde(with = "time::serde::iso8601::option")] 21 | pub end_date: Option, 22 | pub duration: Option, 23 | pub state: Option, 24 | pub try_number: i64, 25 | pub map_index: i64, 26 | pub max_tries: i64, 27 | pub hostname: String, 28 | pub unixname: String, 29 | pub pool: String, 30 | pub pool_slots: i64, 31 | pub queue: Option, 32 | pub priority_weight: Option, 33 | pub operator: Option, 34 | #[serde(with = "time::serde::iso8601::option")] 35 | pub queued_when: Option, 36 | pub pid: Option, 37 | pub executor: Option, 38 | pub executor_config: Option, 39 | pub sla_miss: Option, 40 | pub rendered_map_index: Option, 41 | pub rendered_fields: serde_json::Value, 42 | pub trigger: Option, 43 | pub triggerer_job: Option, 44 | pub note: Option, 45 | } 46 | 47 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 48 | pub struct TriggerResponse { 49 | pub id: i64, 50 | pub classpath: String, 51 | pub kwargs: String, 52 | #[serde(with = "time::serde::iso8601::option")] 53 | pub created_date: Option, 54 | pub triggerer_id: i64, 55 | } 56 | 57 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 58 | pub struct JobResponse { 59 | pub id: i64, 60 | pub dag_id: String, 61 | pub state: String, 62 | pub job_type: String, 63 | #[serde(with = "time::serde::iso8601::option")] 64 | pub start_date: Option, 65 | #[serde(with = "time::serde::iso8601::option")] 66 | pub end_date: Option, 67 | #[serde(with = "time::serde::iso8601::option")] 68 | pub latest_heartbeat: Option, 69 | pub executor_class: String, 70 | pub hostname: String, 71 | pub unixname: String, 72 | } 73 | -------------------------------------------------------------------------------- /infrastructure/README.md: -------------------------------------------------------------------------------- 1 | # Flowrs Infrastructure 2 | 3 | Infrastructure as Code for deploying Apache Airflow across multiple cloud providers and platforms. 4 | 5 | ## Overview 6 | 7 | This directory contains Terraform and other IaC configurations for deploying managed Apache Airflow environments that can be monitored and managed using flowrs. 8 | 9 | ## Available Environments 10 | 11 | ### AWS (Amazon Web Services) 12 | **Status**: Implemented ✓ 13 | 14 | Deploy Apache Airflow using Amazon MWAA (Managed Workflows for Apache Airflow). 15 | 16 | - **Directory**: `aws/` 17 | - **Technology**: Terraform 18 | - **Features**: 19 | - VPC with private subnets 20 | - S3 bucket for DAGs 21 | - IAM roles with least privilege 22 | - NAT Gateways for outbound connectivity 23 | - Example DAG included 24 | 25 | **Cost**: Starting at ~$420/month for smallest configuration 26 | 27 | See [aws/README.md](aws/README.md) for detailed instructions. 28 | 29 | ### Azure 30 | **Status**: Coming soon 31 | 32 | Deploy Apache Airflow using Azure Data Factory Managed Airflow. 33 | 34 | - **Directory**: `azure/` 35 | - **Technology**: Terraform or Bicep 36 | - **Planned Features**: 37 | - Virtual Network configuration 38 | - Storage Account for DAGs 39 | - Managed Airflow environment 40 | - Managed Identity authentication 41 | 42 | See [azure/README.md](azure/README.md) for more information. 43 | 44 | ### Astronomer 45 | **Status**: Coming soon 46 | 47 | Deploy Apache Airflow using Astronomer (Cloud or Software). 48 | 49 | - **Directory**: `astronomer/` 50 | - **Options**: 51 | - Astronomer Cloud (managed service) 52 | - Astronomer Software (Kubernetes-based) 53 | - **Planned Features**: 54 | - Workspace and deployment configuration 55 | - Kubernetes cluster setup (for Software) 56 | - Helm chart configuration 57 | 58 | See [astronomer/README.md](astronomer/README.md) for more information. 59 | 60 | ### GCP (Google Cloud Platform) 61 | **Status**: Coming soon 62 | 63 | Deploy Apache Airflow using Google Cloud Composer. 64 | 65 | - **Directory**: `gcp/` 66 | - **Technology**: Terraform 67 | - **Planned Features**: 68 | - VPC network configuration 69 | - Cloud Composer environment 70 | - GCS buckets for DAGs 71 | - Service account with IAM bindings 72 | 73 | See [gcp/README.md](gcp/README.md) for more information. 74 | 75 | ## Contributing 76 | 77 | To add a new platform: 78 | 79 | 1. Create a directory with the platform name 80 | 2. Add Terraform/IaC configuration 81 | 3. Include a comprehensive README.md 82 | 4. Document connection instructions for flowrs 83 | 5. Add cost estimates if applicable 84 | 85 | ## Support 86 | 87 | For issues or questions: 88 | - flowrs issues: https://github.com/jvanbuel/flowrs/issues 89 | - Platform-specific issues: Check respective cloud provider documentation 90 | -------------------------------------------------------------------------------- /src/app/model/popup/warning.rs: -------------------------------------------------------------------------------- 1 | use ratatui::{ 2 | buffer::Buffer, 3 | layout::Rect, 4 | style::{Color, Modifier, Style}, 5 | text::{Line, Span, Text}, 6 | widgets::{Block, BorderType, Borders, Clear, Paragraph, Widget, Wrap}, 7 | }; 8 | 9 | use super::popup_area; 10 | 11 | pub struct WarningPopup { 12 | pub warnings: Vec, 13 | } 14 | 15 | impl WarningPopup { 16 | pub fn new(warnings: Vec) -> Self { 17 | Self { warnings } 18 | } 19 | 20 | pub fn has_warnings(&self) -> bool { 21 | !self.warnings.is_empty() 22 | } 23 | } 24 | 25 | impl Widget for &WarningPopup { 26 | fn render(self, area: Rect, buf: &mut Buffer) { 27 | if self.warnings.is_empty() { 28 | return; 29 | } 30 | 31 | let popup_area = popup_area(area, 80, 50); 32 | let popup = Block::default() 33 | .border_type(BorderType::Rounded) 34 | .title("Warning - Press or to close") 35 | .title_style( 36 | Style::default() 37 | .fg(Color::Yellow) 38 | .add_modifier(Modifier::BOLD), 39 | ) 40 | .borders(Borders::ALL) 41 | .border_style(Style::default().fg(Color::Yellow)); 42 | 43 | Clear.render(popup_area, buf); 44 | 45 | let mut text = Text::default(); 46 | for (idx, warning) in self.warnings.iter().enumerate() { 47 | // Split the warning by newlines to properly render multi-line warnings 48 | for (line_idx, line) in warning.lines().enumerate() { 49 | if line_idx == 0 { 50 | // First line includes the "Warning N: " prefix 51 | text.push_line(Line::from(vec![ 52 | Span::styled( 53 | format!("Warning {}: ", idx + 1), 54 | Style::default() 55 | .fg(Color::Yellow) 56 | .add_modifier(Modifier::BOLD), 57 | ), 58 | Span::styled(line, Style::default().fg(Color::White)), 59 | ])); 60 | } else { 61 | // Subsequent lines are just white text 62 | text.push_line(Line::from(Span::styled( 63 | line, 64 | Style::default().fg(Color::White), 65 | ))); 66 | } 67 | } 68 | if idx < self.warnings.len() - 1 { 69 | text.push_line(Line::from("")); 70 | } 71 | } 72 | 73 | let warning_paragraph = Paragraph::new(text).wrap(Wrap { trim: true }).block(popup); 74 | warning_paragraph.render(popup_area, buf); 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/airflow/client/v1/dagrun.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use async_trait::async_trait; 3 | use log::debug; 4 | use reqwest::{Method, Response}; 5 | 6 | use super::model; 7 | use crate::airflow::{model::common::DagRunList, traits::DagRunOperations}; 8 | 9 | use super::V1Client; 10 | 11 | #[async_trait] 12 | impl DagRunOperations for V1Client { 13 | async fn list_dagruns(&self, dag_id: &str) -> Result { 14 | let response: Response = self 15 | .base_api(Method::GET, &format!("dags/{dag_id}/dagRuns"))? 16 | .query(&[("order_by", "-execution_date"), ("limit", "50")]) 17 | .send() 18 | .await? 19 | .error_for_status()?; 20 | 21 | let dagruns: model::dagrun::DAGRunCollectionResponse = response 22 | .json::() 23 | .await?; 24 | Ok(dagruns.into()) 25 | } 26 | 27 | async fn list_all_dagruns(&self) -> Result { 28 | let response: Response = self 29 | .base_api(Method::POST, "dags/~/dagRuns/list")? 30 | .json(&serde_json::json!({"page_limit": 200})) 31 | .send() 32 | .await? 33 | .error_for_status()?; 34 | let dagruns: model::dagrun::DAGRunCollectionResponse = response 35 | .json::() 36 | .await?; 37 | Ok(dagruns.into()) 38 | } 39 | 40 | async fn mark_dag_run(&self, dag_id: &str, dag_run_id: &str, status: &str) -> Result<()> { 41 | self.base_api( 42 | Method::PATCH, 43 | &format!("dags/{dag_id}/dagRuns/{dag_run_id}"), 44 | )? 45 | .json(&serde_json::json!({"state": status})) 46 | .send() 47 | .await? 48 | .error_for_status()?; 49 | Ok(()) 50 | } 51 | 52 | async fn clear_dagrun(&self, dag_id: &str, dag_run_id: &str) -> Result<()> { 53 | self.base_api( 54 | Method::POST, 55 | &format!("dags/{dag_id}/dagRuns/{dag_run_id}/clear"), 56 | )? 57 | .json(&serde_json::json!({"dry_run": false})) 58 | .send() 59 | .await? 60 | .error_for_status()?; 61 | Ok(()) 62 | } 63 | 64 | async fn trigger_dag_run(&self, dag_id: &str, logical_date: Option<&str>) -> Result<()> { 65 | // Somehow Airflow V1 API does not accept null for logical_date 66 | let body = logical_date.map_or_else( 67 | || serde_json::json!({}), 68 | |date| serde_json::json!({ "logical_date": date }), 69 | ); 70 | 71 | let resp: Response = self 72 | .base_api(Method::POST, &format!("dags/{dag_id}/dagRuns"))? 73 | .json(&body) 74 | .send() 75 | .await? 76 | .error_for_status()?; 77 | debug!("{resp:?}"); 78 | Ok(()) 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/commands/run.rs: -------------------------------------------------------------------------------- 1 | use std::fs::File; 2 | use std::path::PathBuf; 3 | use std::sync::{Arc, Mutex}; 4 | 5 | use clap::Parser; 6 | use crossterm::event::{DisableFocusChange, EnableFocusChange}; 7 | use crossterm::ExecutableCommand; 8 | use log::{info, LevelFilter}; 9 | use simplelog::{Config, WriteLogger}; 10 | 11 | use crate::airflow::config::FlowrsConfig; 12 | use crate::app::run_app; 13 | use crate::app::state::App; 14 | use crate::CONFIG_PATHS; 15 | use anyhow::Result; 16 | 17 | #[derive(Parser, Debug)] 18 | pub struct RunCommand { 19 | #[clap(short, long)] 20 | pub file: Option, 21 | } 22 | 23 | impl RunCommand { 24 | pub async fn run(&self) -> Result<()> { 25 | // setup logging 26 | if let Ok(log_level) = std::env::var("FLOWRS_LOG") { 27 | setup_logging(&log_level)?; 28 | } 29 | 30 | // Read config file 31 | let path = self.file.as_ref().map(PathBuf::from); 32 | let (config, errors) = FlowrsConfig::from_file(path.as_ref())? 33 | .expand_managed_services() 34 | .await?; 35 | 36 | // Generate warnings for legacy config conflict (only when no explicit --file) 37 | let mut warnings = Vec::new(); 38 | if self.file.is_none() && CONFIG_PATHS.has_legacy_conflict { 39 | let legacy_path = 40 | dirs::home_dir().map_or_else(|| PathBuf::from("~/.flowrs"), |h| h.join(".flowrs")); 41 | warnings.push(format!( 42 | "Configuration file found in both locations:\n \ 43 | - {} (active)\n \ 44 | - {} (ignored)\n\n\ 45 | Consider removing the legacy file.", 46 | CONFIG_PATHS.write_path.display(), 47 | legacy_path.display() 48 | )); 49 | } 50 | 51 | // setup terminal (includes panic hooks) and run app 52 | let mut terminal = ratatui::init(); 53 | std::io::stdout().execute(EnableFocusChange)?; 54 | 55 | let app = App::new_with_errors_and_warnings(config, errors, warnings); 56 | let result = run_app(&mut terminal, Arc::new(Mutex::new(app))).await; 57 | 58 | info!("Shutting down the terminal..."); 59 | std::io::stdout().execute(DisableFocusChange)?; 60 | ratatui::restore(); 61 | result 62 | } 63 | } 64 | 65 | fn setup_logging(log_level: &str) -> Result<()> { 66 | let log_file = format!( 67 | "./flowrs-debug-{}.log", 68 | chrono::Local::now().format("%Y%m%d%H%M%S") 69 | ); 70 | let log_level = match log_level.to_lowercase().as_str() { 71 | "debug" => LevelFilter::Debug, 72 | "trace" => LevelFilter::Trace, 73 | "warn" => LevelFilter::Warn, 74 | "error" => LevelFilter::Error, 75 | _ => LevelFilter::Info, 76 | }; 77 | 78 | WriteLogger::init(log_level, Config::default(), File::create(log_file)?)?; 79 | Ok(()) 80 | } 81 | -------------------------------------------------------------------------------- /src/airflow/model/common/dagstats.rs: -------------------------------------------------------------------------------- 1 | use crate::airflow::client::v1; 2 | use crate::airflow::client::v2; 3 | use serde::{Deserialize, Serialize}; 4 | 5 | /// Common `DagStats` model used by the application 6 | #[derive(Debug, Clone, Serialize, Deserialize)] 7 | pub struct DagStatsResponse { 8 | pub dags: Vec, 9 | pub total_entries: u64, 10 | } 11 | 12 | #[derive(Debug, Clone, Serialize, Deserialize)] 13 | pub struct DagStatistics { 14 | pub dag_id: String, 15 | pub stats: Vec, 16 | } 17 | 18 | #[derive(Debug, Clone, Serialize, Deserialize)] 19 | pub struct DagStatistic { 20 | pub state: String, 21 | pub count: u64, 22 | } 23 | 24 | // From trait implementations for v1 models 25 | impl From for DagStatsResponse { 26 | fn from(value: v1::model::dagstats::DagStatsResponse) -> Self { 27 | DagStatsResponse { 28 | dags: value 29 | .dags 30 | .into_iter() 31 | .map(std::convert::Into::into) 32 | .collect(), 33 | total_entries: value.total_entries, 34 | } 35 | } 36 | } 37 | 38 | impl From for DagStatistics { 39 | fn from(value: v1::model::dagstats::DagStatistics) -> Self { 40 | DagStatistics { 41 | dag_id: value.dag_id, 42 | stats: value 43 | .stats 44 | .into_iter() 45 | .map(std::convert::Into::into) 46 | .collect(), 47 | } 48 | } 49 | } 50 | 51 | impl From for DagStatistic { 52 | fn from(value: v1::model::dagstats::DagStatistic) -> Self { 53 | DagStatistic { 54 | state: value.state, 55 | count: value.count, 56 | } 57 | } 58 | } 59 | 60 | // From trait implementations for v2 models 61 | impl From for DagStatsResponse { 62 | fn from(value: v2::model::dagstats::DagStatsResponse) -> Self { 63 | DagStatsResponse { 64 | dags: value 65 | .dags 66 | .into_iter() 67 | .map(std::convert::Into::into) 68 | .collect(), 69 | total_entries: value.total_entries, 70 | } 71 | } 72 | } 73 | 74 | impl From for DagStatistics { 75 | fn from(value: v2::model::dagstats::DagStatistics) -> Self { 76 | DagStatistics { 77 | dag_id: value.dag_id, 78 | stats: value 79 | .stats 80 | .into_iter() 81 | .map(std::convert::Into::into) 82 | .collect(), 83 | } 84 | } 85 | } 86 | 87 | impl From for DagStatistic { 88 | fn from(value: v2::model::dagstats::DagStatistic) -> Self { 89 | DagStatistic { 90 | state: value.state, 91 | count: value.count, 92 | } 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/airflow/client/v1/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod model; 2 | 3 | mod dag; 4 | mod dagrun; 5 | mod dagstats; 6 | mod log; 7 | mod taskinstance; 8 | 9 | use anyhow::Result; 10 | use reqwest::Method; 11 | use url::{form_urlencoded, Url}; 12 | 13 | use super::base::BaseClient; 14 | use crate::airflow::{config::AirflowVersion, traits::AirflowClient}; 15 | use crate::app::worker::OpenItem; 16 | 17 | /// API v1 client implementation (for Airflow v2, uses /api/v1 endpoint) 18 | #[derive(Debug, Clone)] 19 | pub struct V1Client { 20 | base: BaseClient, 21 | } 22 | 23 | impl V1Client { 24 | const API_VERSION: &'static str = "api/v1"; 25 | 26 | pub fn new(base: BaseClient) -> Self { 27 | Self { base } 28 | } 29 | 30 | fn base_api(&self, method: Method, endpoint: &str) -> Result { 31 | self.base.base_api(method, endpoint, Self::API_VERSION) 32 | } 33 | } 34 | 35 | impl AirflowClient for V1Client { 36 | fn get_version(&self) -> AirflowVersion { 37 | AirflowVersion::V2 38 | } 39 | 40 | fn build_open_url(&self, item: &OpenItem) -> Result { 41 | let mut base_url = Url::parse(&self.base.config.endpoint)?; 42 | 43 | match item { 44 | OpenItem::Config(config_endpoint) => { 45 | base_url = config_endpoint.parse()?; 46 | } 47 | OpenItem::Dag { dag_id } => { 48 | base_url = base_url.join(&format!("dags/{dag_id}"))?; 49 | } 50 | OpenItem::DagRun { dag_id, dag_run_id } => { 51 | let escaped_dag_run_id: String = 52 | form_urlencoded::byte_serialize(dag_run_id.as_bytes()).collect(); 53 | base_url = base_url.join(&format!("dags/{dag_id}/grid"))?; 54 | base_url.set_query(Some(&format!("dag_run_id={escaped_dag_run_id}"))); 55 | } 56 | OpenItem::TaskInstance { 57 | dag_id, 58 | dag_run_id, 59 | task_id, 60 | } => { 61 | let escaped_dag_run_id: String = 62 | form_urlencoded::byte_serialize(dag_run_id.as_bytes()).collect(); 63 | base_url = base_url.join(&format!("dags/{dag_id}/grid"))?; 64 | base_url.set_query(Some(&format!( 65 | "dag_run_id={escaped_dag_run_id}&task_id={task_id}" 66 | ))); 67 | } 68 | OpenItem::Log { 69 | dag_id, 70 | dag_run_id, 71 | task_id, 72 | task_try: _, 73 | } => { 74 | let escaped_dag_run_id: String = 75 | form_urlencoded::byte_serialize(dag_run_id.as_bytes()).collect(); 76 | base_url = base_url.join(&format!("dags/{dag_id}/grid"))?; 77 | base_url.set_query(Some(&format!( 78 | "dag_run_id={escaped_dag_run_id}&task_id={task_id}&tab=logs" 79 | ))); 80 | } 81 | } 82 | 83 | Ok(base_url.to_string()) 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /src/app/model/filter.rs: -------------------------------------------------------------------------------- 1 | use crossterm::event::{KeyCode, KeyEvent}; 2 | use ratatui::{ 3 | buffer::Buffer, 4 | layout::{Position, Rect}, 5 | style::Styled, 6 | widgets::{Block, BorderType, Borders, Paragraph, Widget}, 7 | }; 8 | 9 | use crate::ui::constants::DEFAULT_STYLE; 10 | 11 | #[derive(Clone, Default)] 12 | pub struct CursorState { 13 | pub position: Position, 14 | } 15 | 16 | #[derive(Default)] 17 | pub struct Filter { 18 | pub enabled: bool, 19 | pub prefix: Option, 20 | pub cursor: CursorState, 21 | } 22 | 23 | impl Filter { 24 | pub fn new() -> Filter { 25 | Self::default() 26 | } 27 | 28 | pub fn toggle(&mut self) { 29 | self.enabled = !self.enabled; 30 | } 31 | 32 | pub fn is_enabled(&self) -> bool { 33 | self.enabled 34 | } 35 | 36 | /// Returns true if a filter is active (has non-empty prefix) 37 | pub fn is_active(&self) -> bool { 38 | self.prefix.as_ref().is_some_and(|p| !p.is_empty()) 39 | } 40 | 41 | pub fn prefix(&self) -> Option<&String> { 42 | self.prefix.as_ref() 43 | } 44 | 45 | pub fn reset(&mut self) { 46 | self.enabled = false; 47 | self.prefix = None; 48 | } 49 | 50 | pub fn update(&mut self, key_event: &KeyEvent) { 51 | match key_event.code { 52 | KeyCode::Esc | KeyCode::Enter => { 53 | self.toggle(); 54 | } 55 | KeyCode::Backspace => { 56 | if let Some(ref mut prefix) = self.prefix { 57 | prefix.pop(); 58 | } 59 | } 60 | KeyCode::Char(c) => match self.prefix { 61 | Some(ref mut prefix) => { 62 | prefix.push(c); 63 | } 64 | None => { 65 | self.prefix = Some(c.to_string()); 66 | } 67 | }, 68 | _ => {} 69 | } 70 | } 71 | pub fn cursor_position(&self) -> &Position { 72 | &self.cursor.position 73 | } 74 | } 75 | 76 | impl Widget for &mut Filter { 77 | #[allow(clippy::cast_possible_truncation)] 78 | fn render(self, area: Rect, buf: &mut Buffer) { 79 | let filter = self.prefix().cloned(); 80 | let binding = String::new(); 81 | let filter_text = filter.unwrap_or(binding); 82 | let filter_length = filter_text.len(); 83 | self.cursor.position = Position { 84 | x: area.x + 1 + filter_length as u16, 85 | y: area.y + 1, 86 | }; 87 | 88 | let paragraph = Paragraph::new(filter_text.as_str()) 89 | .block( 90 | Block::default() 91 | .border_type(BorderType::Rounded) 92 | .borders(Borders::ALL) 93 | .title("filter"), 94 | ) 95 | .set_style(DEFAULT_STYLE); 96 | 97 | Widget::render(paragraph, area, buf); 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /src/app/worker/taskinstances.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, Mutex}; 2 | 3 | use log::debug; 4 | 5 | use crate::airflow::traits::AirflowClient; 6 | use crate::app::model::popup::error::ErrorPopup; 7 | use crate::app::model::popup::taskinstances::mark::MarkState; 8 | use crate::app::state::App; 9 | 10 | /// Handle updating the list of task instances for a specific DAG run. 11 | pub async fn handle_update_task_instances( 12 | app: &Arc>, 13 | client: &Arc, 14 | dag_id: &str, 15 | dag_run_id: &str, 16 | ) { 17 | let task_instances = client.list_task_instances(dag_id, dag_run_id).await; 18 | let mut app = app.lock().unwrap(); 19 | match task_instances { 20 | Ok(task_instances) => { 21 | // Store task instances in the environment state 22 | if let Some(env) = app.environment_state.get_active_environment_mut() { 23 | for task_instance in &task_instances.task_instances { 24 | env.upsert_task_instance(task_instance.clone()); 25 | } 26 | } 27 | // Sync panel data from environment state to refresh with new API data 28 | app.sync_panel_data(); 29 | } 30 | Err(e) => { 31 | log::error!("Error getting task instances: {e:?}"); 32 | app.task_instances.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 33 | } 34 | } 35 | } 36 | 37 | /// Handle clearing a task instance (resets it to be re-run). 38 | pub async fn handle_clear_task_instance( 39 | app: &Arc>, 40 | client: &Arc, 41 | dag_id: &str, 42 | dag_run_id: &str, 43 | task_id: &str, 44 | ) { 45 | debug!("Clearing task_instance: {task_id}"); 46 | let task_instance = client 47 | .clear_task_instance(dag_id, dag_run_id, task_id) 48 | .await; 49 | if let Err(e) = task_instance { 50 | debug!("Error clearing task_instance: {e}"); 51 | let mut app = app.lock().unwrap(); 52 | app.task_instances.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 53 | } 54 | } 55 | 56 | /// Handle marking a task instance with a new state (success/failed). 57 | pub async fn handle_mark_task_instance( 58 | app: &Arc>, 59 | client: &Arc, 60 | dag_id: &str, 61 | dag_run_id: &str, 62 | task_id: &str, 63 | status: MarkState, 64 | ) { 65 | debug!("Marking task_instance: {task_id}"); 66 | { 67 | // Update the local state before sending the request; this way, the UI will update immediately 68 | let mut app = app.lock().unwrap(); 69 | app.task_instances 70 | .mark_task_instance(task_id, &status.to_string()); 71 | } 72 | let task_instance = client 73 | .mark_task_instance(dag_id, dag_run_id, task_id, &status.to_string()) 74 | .await; 75 | if let Err(e) = task_instance { 76 | debug!("Error marking task_instance: {e}"); 77 | let mut app = app.lock().unwrap(); 78 | app.task_instances.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/airflow/client/v2/model/taskinstance.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use time::OffsetDateTime; 3 | 4 | use super::dagrun::DagVersion; 5 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 6 | #[serde(rename_all = "camelCase")] 7 | pub struct TaskInstanceList { 8 | #[serde(rename = "task_instances")] 9 | pub task_instances: Vec, 10 | #[serde(rename = "total_entries")] 11 | pub total_entries: i64, 12 | } 13 | 14 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 15 | pub struct TaskInstance { 16 | pub id: String, 17 | pub task_id: String, 18 | pub dag_id: String, 19 | pub dag_run_id: String, 20 | pub map_index: i64, 21 | #[serde(with = "time::serde::iso8601::option")] 22 | pub logical_date: Option, 23 | #[serde(with = "time::serde::iso8601")] 24 | pub run_after: OffsetDateTime, 25 | #[serde(with = "time::serde::iso8601::option")] 26 | pub start_date: Option, 27 | #[serde(with = "time::serde::iso8601::option")] 28 | pub end_date: Option, 29 | pub duration: Option, 30 | pub state: Option, 31 | pub try_number: i64, 32 | pub max_tries: i64, 33 | pub task_display_name: String, 34 | pub hostname: Option, 35 | pub unixname: Option, 36 | pub pool: String, 37 | pub pool_slots: i64, 38 | pub queue: Option, 39 | pub priority_weight: Option, 40 | pub operator: Option, 41 | pub operator_name: Option, 42 | #[serde(with = "time::serde::iso8601::option")] 43 | pub queued_when: Option, 44 | #[serde(with = "time::serde::iso8601::option")] 45 | pub scheduled_when: Option, 46 | pub pid: Option, 47 | pub executor: Option, 48 | pub executor_config: String, 49 | pub note: Option, 50 | pub rendered_map_index: Option, 51 | pub rendered_fields: serde_json::Value, 52 | pub trigger: Option, 53 | pub triggerer_job: Option, 54 | pub dag_version: Option, 55 | } 56 | 57 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 58 | pub struct Trigger { 59 | pub id: i64, 60 | pub classpath: String, 61 | pub kwargs: String, 62 | #[serde(with = "time::serde::iso8601")] 63 | pub created_date: OffsetDateTime, 64 | pub triggerer_id: Option, 65 | } 66 | 67 | #[allow(clippy::struct_field_names)] 68 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 69 | pub struct Job { 70 | pub id: i64, 71 | pub dag_id: Option, 72 | pub state: Option, 73 | pub job_type: Option, 74 | #[serde(with = "time::serde::iso8601::option")] 75 | pub start_date: Option, 76 | #[serde(with = "time::serde::iso8601::option")] 77 | pub end_date: Option, 78 | #[serde(with = "time::serde::iso8601::option")] 79 | pub latest_heartbeat: Option, 80 | pub executor_class: Option, 81 | pub hostname: Option, 82 | pub unixname: Option, 83 | } 84 | -------------------------------------------------------------------------------- /src/airflow/client/v2/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod model; 2 | 3 | mod dag; 4 | mod dagrun; 5 | mod dagstats; 6 | mod log; 7 | mod taskinstance; 8 | 9 | use anyhow::Result; 10 | use reqwest::Method; 11 | use url::{form_urlencoded, Url}; 12 | 13 | use super::base::BaseClient; 14 | use crate::airflow::{config::AirflowVersion, traits::AirflowClient}; 15 | use crate::app::worker::OpenItem; 16 | 17 | /// API v2 client implementation (for Airflow v3, uses /api/v2 endpoint) 18 | #[derive(Debug, Clone)] 19 | pub struct V2Client { 20 | base: BaseClient, 21 | } 22 | 23 | impl V2Client { 24 | const API_VERSION: &'static str = "api/v2"; 25 | 26 | pub fn new(base: BaseClient) -> Self { 27 | Self { base } 28 | } 29 | 30 | fn base_api(&self, method: Method, endpoint: &str) -> Result { 31 | self.base.base_api(method, endpoint, Self::API_VERSION) 32 | } 33 | } 34 | 35 | impl AirflowClient for V2Client { 36 | fn get_version(&self) -> AirflowVersion { 37 | AirflowVersion::V3 38 | } 39 | 40 | fn build_open_url(&self, item: &OpenItem) -> Result { 41 | let mut base_url = Url::parse(&self.base.config.endpoint)?; 42 | 43 | match item { 44 | OpenItem::Config(config_endpoint) => { 45 | base_url = config_endpoint.parse()?; 46 | } 47 | OpenItem::Dag { dag_id } => { 48 | base_url = base_url.join(&format!("dags/{dag_id}"))?; 49 | } 50 | OpenItem::DagRun { dag_id, dag_run_id } => { 51 | let escaped_dag_run_id: String = 52 | form_urlencoded::byte_serialize(dag_run_id.as_bytes()).collect(); 53 | base_url = base_url.join(&format!("dags/{dag_id}/runs/{escaped_dag_run_id}"))?; 54 | } 55 | OpenItem::TaskInstance { 56 | dag_id, 57 | dag_run_id, 58 | task_id, 59 | } => { 60 | let escaped_dag_run_id: String = 61 | form_urlencoded::byte_serialize(dag_run_id.as_bytes()).collect(); 62 | let escaped_task_id: String = 63 | form_urlencoded::byte_serialize(task_id.as_bytes()).collect(); 64 | base_url = base_url.join(&format!( 65 | "dags/{dag_id}/runs/{escaped_dag_run_id}/tasks/{escaped_task_id}" 66 | ))?; 67 | } 68 | OpenItem::Log { 69 | dag_id, 70 | dag_run_id, 71 | task_id, 72 | task_try, 73 | } => { 74 | let escaped_dag_run_id: String = 75 | form_urlencoded::byte_serialize(dag_run_id.as_bytes()).collect(); 76 | let escaped_task_id: String = 77 | form_urlencoded::byte_serialize(task_id.as_bytes()).collect(); 78 | base_url = base_url.join(&format!( 79 | "dags/{dag_id}/runs/{escaped_dag_run_id}/tasks/{escaped_task_id}" 80 | ))?; 81 | base_url.set_query(Some(&format!("tab=logs&try_number={task_try}"))); 82 | } 83 | } 84 | 85 | Ok(base_url.to_string()) 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /src/app/worker/dagruns.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, Mutex}; 2 | 3 | use log::debug; 4 | 5 | use crate::airflow::traits::AirflowClient; 6 | use crate::app::model::popup::dagruns::mark::MarkState; 7 | use crate::app::model::popup::error::ErrorPopup; 8 | use crate::app::state::App; 9 | 10 | /// Handle updating the list of DAG runs for a specific DAG. 11 | pub async fn handle_update_dag_runs( 12 | app: &Arc>, 13 | client: &Arc, 14 | dag_id: &str, 15 | ) { 16 | let dag_runs = client.list_dagruns(dag_id).await; 17 | let mut app = app.lock().unwrap(); 18 | match dag_runs { 19 | Ok(dag_runs) => { 20 | // Store DAG runs in the environment state 21 | if let Some(env) = app.environment_state.get_active_environment_mut() { 22 | for dag_run in &dag_runs.dag_runs { 23 | env.upsert_dag_run(dag_run.clone()); 24 | } 25 | } 26 | // Sync panel data from environment state to refresh with new API data 27 | app.sync_panel_data(); 28 | } 29 | Err(e) => { 30 | app.dagruns.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 31 | } 32 | } 33 | } 34 | 35 | /// Handle clearing a DAG run (resets all task instances). 36 | pub async fn handle_clear_dag_run( 37 | app: &Arc>, 38 | client: &Arc, 39 | dag_id: &str, 40 | dag_run_id: &str, 41 | ) { 42 | debug!("Clearing dag_run: {dag_run_id}"); 43 | let dag_run = client.clear_dagrun(dag_id, dag_run_id).await; 44 | if let Err(e) = dag_run { 45 | debug!("Error clearing dag_run: {e}"); 46 | let mut app = app.lock().unwrap(); 47 | app.dagruns.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 48 | } 49 | } 50 | 51 | /// Handle marking a DAG run with a new state (success/failed). 52 | pub async fn handle_mark_dag_run( 53 | app: &Arc>, 54 | client: &Arc, 55 | dag_id: &str, 56 | dag_run_id: &str, 57 | status: MarkState, 58 | ) { 59 | debug!("Marking dag_run: {dag_run_id}"); 60 | { 61 | // Update the local state before sending the request; this way, the UI will update immediately 62 | let mut app = app.lock().unwrap(); 63 | app.dagruns.mark_dag_run(dag_run_id, &status.to_string()); 64 | } 65 | let dag_run = client 66 | .mark_dag_run(dag_id, dag_run_id, &status.to_string()) 67 | .await; 68 | if let Err(e) = dag_run { 69 | debug!("Error marking dag_run: {e}"); 70 | let mut app = app.lock().unwrap(); 71 | app.dagruns.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 72 | } 73 | } 74 | 75 | /// Handle triggering a new DAG run. 76 | pub async fn handle_trigger_dag_run( 77 | app: &Arc>, 78 | client: &Arc, 79 | dag_id: &str, 80 | ) { 81 | debug!("Triggering dag_run: {dag_id}"); 82 | let dag_run = client.trigger_dag_run(dag_id, None).await; 83 | if let Err(e) = dag_run { 84 | debug!("Error triggering dag_run: {e}"); 85 | let mut app = app.lock().unwrap(); 86 | app.dagruns.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /src/commands/config/update.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use inquire::Select; 4 | use log::info; 5 | use strum::IntoEnumIterator; 6 | 7 | use super::model::UpdateCommand; 8 | use crate::{ 9 | airflow::config::{AirflowAuth, AirflowConfig, BasicAuth, FlowrsConfig, TokenCmd}, 10 | commands::config::model::{validate_endpoint, ConfigOption}, 11 | }; 12 | 13 | use anyhow::Result; 14 | 15 | impl UpdateCommand { 16 | pub fn run(&self) -> Result<()> { 17 | let path = self.file.as_ref().map(PathBuf::from); 18 | let mut config = FlowrsConfig::from_file(path.as_ref())?; 19 | 20 | if config.servers.is_none() { 21 | println!("❌ No servers found in config file"); 22 | return Ok(()); 23 | } 24 | 25 | let mut servers = config.servers.unwrap(); 26 | 27 | let name: String = if self.name.is_none() { 28 | Select::new( 29 | "name", 30 | servers.iter().map(|server| server.name.clone()).collect(), 31 | ) 32 | .prompt()? 33 | } else { 34 | self.name.clone().unwrap() 35 | }; 36 | 37 | let airflow_config: &mut AirflowConfig = servers 38 | .iter_mut() 39 | .find(|server| server.name == name) 40 | .expect("🤔 Airflow config not found ..."); 41 | 42 | let name = inquire::Text::new("name") 43 | .with_default(&airflow_config.name) 44 | .prompt()?; 45 | let endpoint = inquire::Text::new("endpoint") 46 | .with_default(&airflow_config.endpoint) 47 | .with_validator(validate_endpoint) 48 | .prompt()?; 49 | 50 | let auth_type = 51 | Select::new("authentication type", ConfigOption::iter().collect()).prompt()?; 52 | 53 | airflow_config.name = name; 54 | airflow_config.endpoint = endpoint; 55 | match auth_type { 56 | ConfigOption::BasicAuth => { 57 | let username = inquire::Text::new("username").prompt()?; 58 | let password = inquire::Password::new("password") 59 | .with_display_toggle_enabled() 60 | .prompt()?; 61 | 62 | airflow_config.auth = AirflowAuth::Basic(BasicAuth { username, password }); 63 | } 64 | ConfigOption::Token(_) => { 65 | let cmd = Some(inquire::Text::new("cmd").prompt()?); 66 | let token: String; 67 | if let Some(cmd) = &cmd { 68 | info!("🔑 Running command: {cmd}"); 69 | let output = std::process::Command::new(cmd) 70 | .output() 71 | .expect("failed to execute process"); 72 | token = String::from_utf8(output.stdout)?; 73 | } else { 74 | token = inquire::Text::new("token").prompt()?; 75 | } 76 | airflow_config.auth = AirflowAuth::Token(TokenCmd { 77 | cmd, 78 | token: Some(token), 79 | }); 80 | } 81 | } 82 | 83 | config.servers = Some(servers); 84 | config.write_to_file()?; 85 | 86 | println!("✅ Config updated successfully!"); 87 | Ok(()) 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /src/airflow/config/paths.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use dirs::home_dir; 4 | 5 | /// Manages configuration file paths with XDG support and legacy fallback. 6 | pub struct ConfigPaths { 7 | /// Path to read config from (XDG if exists, else legacy) 8 | pub read_path: PathBuf, 9 | /// Path to write config to (always XDG) 10 | pub write_path: PathBuf, 11 | /// True if both XDG and legacy config files exist 12 | pub has_legacy_conflict: bool, 13 | } 14 | 15 | impl ConfigPaths { 16 | /// Resolves configuration paths according to XDG spec with legacy fallback. 17 | /// 18 | /// Read precedence: 19 | /// 1. `$XDG_CONFIG_HOME/flowrs/config.toml` (or `~/.config/flowrs/config.toml`) 20 | /// 2. `~/.flowrs` (legacy) 21 | /// 22 | /// Write always goes to XDG path. 23 | pub fn resolve() -> Self { 24 | let xdg_path = Self::xdg_config_path(); 25 | let legacy_path = Self::legacy_config_path(); 26 | 27 | let xdg_exists = xdg_path.exists(); 28 | let legacy_exists = legacy_path.exists(); 29 | 30 | let read_path = if xdg_exists { 31 | xdg_path.clone() 32 | } else if legacy_exists { 33 | legacy_path.clone() 34 | } else { 35 | // Neither exists - default to XDG for new configs 36 | xdg_path.clone() 37 | }; 38 | 39 | ConfigPaths { 40 | read_path, 41 | write_path: xdg_path, 42 | has_legacy_conflict: xdg_exists && legacy_exists, 43 | } 44 | } 45 | 46 | /// Returns the XDG config path: `$XDG_CONFIG_HOME/flowrs/config.toml` 47 | /// Falls back to `~/.config/flowrs/config.toml` if `XDG_CONFIG_HOME` is unset or empty. 48 | fn xdg_config_path() -> PathBuf { 49 | // Check XDG_CONFIG_HOME first, fall back to ~/.config (per XDG spec) 50 | let base_dir = std::env::var("XDG_CONFIG_HOME") 51 | .ok() 52 | .filter(|s| !s.is_empty()) 53 | .map_or_else( 54 | || { 55 | home_dir() 56 | .expect("Could not determine user home directory") 57 | .join(".config") 58 | }, 59 | PathBuf::from, 60 | ); 61 | 62 | base_dir.join("flowrs").join("config.toml") 63 | } 64 | 65 | /// Returns the legacy config path: `~/.flowrs` 66 | fn legacy_config_path() -> PathBuf { 67 | home_dir() 68 | .expect("Could not determine user home directory") 69 | .join(".flowrs") 70 | } 71 | 72 | /// Returns the XDG config directory (for creating if needed). 73 | pub fn xdg_config_dir(&self) -> PathBuf { 74 | self.write_path 75 | .parent() 76 | .expect("Config write path should have a parent directory") 77 | .to_path_buf() 78 | } 79 | } 80 | 81 | #[cfg(test)] 82 | mod tests { 83 | use super::*; 84 | 85 | #[test] 86 | fn test_xdg_config_path_structure() { 87 | let path = ConfigPaths::xdg_config_path(); 88 | assert!(path.ends_with("flowrs/config.toml")); 89 | } 90 | 91 | #[test] 92 | fn test_legacy_config_path_structure() { 93 | let path = ConfigPaths::legacy_config_path(); 94 | assert!(path.ends_with(".flowrs")); 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /docs/plans/2025-12-13-xdg-config-design.md: -------------------------------------------------------------------------------- 1 | # XDG Configuration Support 2 | 3 | ## Overview 4 | 5 | Add XDG Base Directory Specification support for flowrs configuration, with backwards compatibility for the legacy `~/.flowrs` config file. 6 | 7 | ## Config Paths 8 | 9 | **XDG Path:** `$XDG_CONFIG_HOME/flowrs/config.toml` 10 | - Falls back to `~/.config/flowrs/config.toml` if `$XDG_CONFIG_HOME` is unset or empty 11 | 12 | **Legacy Path:** `~/.flowrs` 13 | 14 | ## Read Behavior 15 | 16 | Resolution order: 17 | 1. If XDG path exists → use it 18 | 2. Else if legacy path exists → use it 19 | 3. Else → no config found 20 | 21 | If **both** paths exist, use XDG but display a warning popup. 22 | 23 | ## Write Behavior 24 | 25 | All writes go to the XDG path. Create the directory `$XDG_CONFIG_HOME/flowrs/` if it doesn't exist. 26 | 27 | ## Warning Popup 28 | 29 | When both config files exist, display a warning popup on the Config panel: 30 | 31 | - Yellow/amber border (distinct from red error popups) 32 | - Title: "Warning - Press or to close" 33 | - Message: 34 | ``` 35 | Configuration file found in both locations: 36 | - ~/.config/flowrs/config.toml (active) 37 | - ~/.flowrs (ignored) 38 | 39 | Consider removing the legacy file at ~/.flowrs 40 | ``` 41 | - Dismissed once per session (won't reappear until next launch) 42 | 43 | ## Code Changes 44 | 45 | ### New Module: `src/airflow/config/paths.rs` 46 | 47 | ```rust 48 | pub struct ConfigPaths { 49 | pub read_path: PathBuf, // Where to read config from 50 | pub write_path: PathBuf, // Where to write config to (always XDG) 51 | pub has_legacy_conflict: bool, // True if both paths exist 52 | } 53 | 54 | impl ConfigPaths { 55 | pub fn resolve() -> Self { ... } 56 | } 57 | ``` 58 | 59 | ### Changes to `src/main.rs` 60 | 61 | Replace: 62 | ```rust 63 | static CONFIG_FILE: LazyLock = LazyLock::new(|| home_dir().unwrap().join(".flowrs")); 64 | ``` 65 | 66 | With: 67 | ```rust 68 | static CONFIG_PATHS: LazyLock = LazyLock::new(ConfigPaths::resolve); 69 | ``` 70 | 71 | ### Changes to `src/airflow/config/mod.rs` 72 | 73 | Update all references from `CONFIG_FILE` to use `CONFIG_PATHS.read_path` or `CONFIG_PATHS.write_path` as appropriate. 74 | 75 | ### New File: `src/app/model/popup/warning.rs` 76 | 77 | Similar structure to `error.rs`, but with yellow styling. 78 | 79 | ### Changes to Config Panel (`src/app/model/config.rs`) 80 | 81 | Add `warning_popup: Option` field, initialized based on `CONFIG_PATHS.has_legacy_conflict`. 82 | 83 | ## Edge Cases 84 | 85 | 1. **`$XDG_CONFIG_HOME` is set but empty string** → Treat as unset, fall back to `~/.config` 86 | 87 | 2. **XDG directory exists but `config.toml` doesn't** → Fall through to legacy path 88 | 89 | 3. **Neither config exists** → No warning, app starts normally 90 | 91 | 4. **Write fails due to permissions** → Existing error handling surfaces this 92 | 93 | ## Dependencies 94 | 95 | No new dependencies. The `dirs` crate already provides `dirs::config_dir()` which returns the appropriate XDG path. 96 | 97 | ## Documentation Updates 98 | 99 | - Update `README.md` to mention both config locations 100 | - Update `CLAUDE.md` to reflect the new config path behavior 101 | -------------------------------------------------------------------------------- /src/airflow/client/v2/model/dag.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use time::OffsetDateTime; 3 | 4 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 5 | pub struct DagList { 6 | pub dags: Vec, 7 | pub total_entries: i64, 8 | } 9 | 10 | #[allow(clippy::struct_excessive_bools, clippy::struct_field_names)] 11 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 12 | #[serde(rename_all = "camelCase")] 13 | pub struct Dag { 14 | #[serde(rename = "dag_id")] 15 | pub dag_id: String, 16 | #[serde(rename = "dag_display_name")] 17 | pub dag_display_name: String, 18 | #[serde(rename = "is_paused")] 19 | pub is_paused: bool, 20 | #[serde(rename = "is_stale")] 21 | pub is_stale: bool, 22 | #[serde(rename = "last_parsed_time", with = "time::serde::iso8601::option")] 23 | pub last_parsed_time: Option, 24 | #[serde(rename = "last_parse_duration")] 25 | pub last_parse_duration: Option, 26 | #[serde(rename = "last_expired", with = "time::serde::iso8601::option")] 27 | pub last_expired: Option, 28 | #[serde(rename = "bundle_name")] 29 | pub bundle_name: Option, 30 | #[serde(rename = "bundle_version")] 31 | pub bundle_version: Option, 32 | #[serde(rename = "relative_fileloc")] 33 | pub relative_fileloc: Option, 34 | pub fileloc: String, 35 | pub description: Option, 36 | #[serde(rename = "timetable_summary")] 37 | pub timetable_summary: Option, 38 | #[serde(rename = "timetable_description")] 39 | pub timetable_description: Option, 40 | pub tags: Vec, 41 | #[serde(rename = "max_active_tasks")] 42 | pub max_active_tasks: i64, 43 | #[serde(rename = "max_active_runs")] 44 | pub max_active_runs: Option, 45 | #[serde(rename = "max_consecutive_failed_dag_runs")] 46 | pub max_consecutive_failed_dag_runs: i64, 47 | #[serde(rename = "has_task_concurrency_limits")] 48 | pub has_task_concurrency_limits: bool, 49 | #[serde(rename = "has_import_errors")] 50 | pub has_import_errors: bool, 51 | #[serde( 52 | rename = "next_dagrun_logical_date", 53 | with = "time::serde::iso8601::option" 54 | )] 55 | pub next_dagrun_logical_date: Option, 56 | #[serde( 57 | rename = "next_dagrun_data_interval_start", 58 | with = "time::serde::iso8601::option" 59 | )] 60 | pub next_dagrun_data_interval_start: Option, 61 | #[serde( 62 | rename = "next_dagrun_data_interval_end", 63 | with = "time::serde::iso8601::option" 64 | )] 65 | pub next_dagrun_data_interval_end: Option, 66 | #[serde( 67 | rename = "next_dagrun_run_after", 68 | with = "time::serde::iso8601::option" 69 | )] 70 | pub next_dagrun_run_after: Option, 71 | #[serde(rename = "owners")] 72 | pub owners: Vec, 73 | #[serde(rename = "file_token")] 74 | pub file_token: String, 75 | } 76 | 77 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 78 | pub struct Tag { 79 | pub name: String, 80 | } 81 | 82 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 83 | pub struct DagSource { 84 | pub content: String, 85 | pub dag_id: String, 86 | pub version_number: i64, 87 | } 88 | -------------------------------------------------------------------------------- /docs/plans/2025-12-18-focus-tracking-design.md: -------------------------------------------------------------------------------- 1 | # Focus Tracking Design 2 | 3 | ## Overview 4 | 5 | Implement focus change tracking using crossterm's focus events. When the TUI loses focus, automatic API refreshes (Tick events) are paused to avoid unnecessary network calls while the user isn't watching. 6 | 7 | ## Behavior 8 | 9 | - When focused: Normal operation, Tick events trigger API refreshes 10 | - When unfocused: Tick events are discarded, no automatic refreshes 11 | - On focus regain: Clean slate, refreshes resume normally (no queued messages to process) 12 | 13 | ## Changes 14 | 15 | ### 1. Terminal Setup (`src/main.rs`) 16 | 17 | Enable focus change reporting on startup, disable on shutdown: 18 | 19 | ```rust 20 | // On startup (after entering raw mode): 21 | crossterm::terminal::enable_focus_change_reporting()?; 22 | 23 | // On shutdown (before leaving raw mode): 24 | crossterm::terminal::disable_focus_change_reporting()?; 25 | ``` 26 | 27 | The disable call must be in the cleanup path that runs even on panic/error. 28 | 29 | ### 2. Event Types (`src/app/events/custom.rs`) 30 | 31 | Add two new variants to `FlowrsEvent`: 32 | 33 | ```rust 34 | pub enum FlowrsEvent { 35 | Tick, 36 | Key(KeyEvent), 37 | Mouse, 38 | FocusGained, // new 39 | FocusLost, // new 40 | } 41 | ``` 42 | 43 | Update `From` to map: 44 | - `Event::FocusGained` -> `FlowrsEvent::FocusGained` 45 | - `Event::FocusLost` -> `FlowrsEvent::FocusLost` 46 | 47 | ### 3. App State (`src/app/state.rs`) 48 | 49 | Add a boolean field to track focus: 50 | 51 | ```rust 52 | pub struct App { 53 | // ... existing fields ... 54 | pub focused: bool, // defaults to true 55 | } 56 | ``` 57 | 58 | ### 4. Main Loop (`src/app.rs`) 59 | 60 | Handle focus events early in the event loop and filter Ticks when unfocused: 61 | 62 | ```rust 63 | if let Some(event) = events.next().await { 64 | // Handle focus changes first 65 | match &event { 66 | FlowrsEvent::FocusGained => { 67 | app.lock().unwrap().focused = true; 68 | continue; 69 | } 70 | FlowrsEvent::FocusLost => { 71 | app.lock().unwrap().focused = false; 72 | continue; 73 | } 74 | _ => {} 75 | } 76 | 77 | // Skip tick processing when unfocused 78 | if let FlowrsEvent::Tick = &event { 79 | if !app.lock().unwrap().focused { 80 | continue; 81 | } 82 | } 83 | 84 | // ... rest of existing event handling ... 85 | } 86 | ``` 87 | 88 | ### 5. UI Indicator (Optional) 89 | 90 | Since `focused` is in App state, a visual indicator can be shown when paused (e.g., dimmed UI, "Paused" text in status bar, or different border color). 91 | 92 | ## File Summary 93 | 94 | | File | Change | 95 | |------|--------| 96 | | `src/main.rs` | Enable/disable focus reporting in terminal setup/teardown | 97 | | `src/app/events/custom.rs` | Add `FocusGained`/`FocusLost` variants + From impl | 98 | | `src/app/state.rs` | Add `focused: bool` field to App | 99 | | `src/app.rs` | Handle focus events, skip Ticks when unfocused | 100 | | `src/ui.rs` (optional) | Show paused indicator | 101 | 102 | ## Compatibility 103 | 104 | Most modern terminals support focus reporting (iTerm2, Kitty, Windows Terminal, Alacritty, etc.). Terminals that don't support it will simply ignore the escape sequences - the app continues to work normally, just without the pause-on-unfocus feature. 105 | -------------------------------------------------------------------------------- /src/airflow/model/common/dagrun.rs: -------------------------------------------------------------------------------- 1 | use crate::airflow::client::v1; 2 | use crate::airflow::client::v2; 3 | use serde::{Deserialize, Serialize}; 4 | use time::OffsetDateTime; 5 | 6 | /// Common `DagRun` model used by the application 7 | #[allow(clippy::struct_field_names)] 8 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 9 | pub struct DagRun { 10 | pub dag_id: String, 11 | pub dag_run_id: String, 12 | pub logical_date: Option, 13 | pub data_interval_end: Option, 14 | pub data_interval_start: Option, 15 | pub end_date: Option, 16 | pub start_date: Option, 17 | pub last_scheduling_decision: Option, 18 | pub run_type: String, 19 | pub state: String, 20 | pub note: Option, 21 | pub external_trigger: Option, 22 | } 23 | 24 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 25 | pub struct DagRunList { 26 | pub dag_runs: Vec, 27 | pub total_entries: i64, 28 | } 29 | 30 | // From trait implementations for v1 models 31 | impl From for DagRun { 32 | fn from(value: v1::model::dagrun::DAGRunResponse) -> Self { 33 | DagRun { 34 | dag_id: value.dag_id, 35 | dag_run_id: value.dag_run_id.unwrap_or_default(), 36 | logical_date: value.logical_date, 37 | data_interval_end: value.data_interval_end, 38 | data_interval_start: value.data_interval_start, 39 | end_date: value.end_date, 40 | start_date: value.start_date, 41 | last_scheduling_decision: value.last_scheduling_decision, 42 | run_type: value.run_type, 43 | state: value.state, 44 | note: value.note, 45 | external_trigger: Some(value.external_trigger), 46 | } 47 | } 48 | } 49 | 50 | impl From for DagRunList { 51 | fn from(value: v1::model::dagrun::DAGRunCollectionResponse) -> Self { 52 | DagRunList { 53 | dag_runs: value 54 | .dag_runs 55 | .into_iter() 56 | .map(std::convert::Into::into) 57 | .collect(), 58 | total_entries: value.total_entries, 59 | } 60 | } 61 | } 62 | 63 | // From trait implementations for v2 models 64 | impl From for DagRun { 65 | fn from(value: v2::model::dagrun::DagRun) -> Self { 66 | DagRun { 67 | dag_id: value.dag_id, 68 | dag_run_id: value.dag_run_id, 69 | logical_date: value.logical_date, 70 | data_interval_end: value.data_interval_end, 71 | data_interval_start: value.data_interval_start, 72 | end_date: value.end_date, 73 | start_date: value.start_date, 74 | last_scheduling_decision: value.last_scheduling_decision, 75 | run_type: value.run_type, 76 | state: value.state, 77 | note: value.note, 78 | external_trigger: None, 79 | } 80 | } 81 | } 82 | 83 | impl From for DagRunList { 84 | fn from(value: v2::model::dagrun::DagRunList) -> Self { 85 | DagRunList { 86 | dag_runs: value 87 | .dag_runs 88 | .into_iter() 89 | .map(std::convert::Into::into) 90 | .collect(), 91 | total_entries: value.total_entries, 92 | } 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/app/model/popup/commands_help.rs: -------------------------------------------------------------------------------- 1 | use ratatui::{ 2 | buffer::Buffer, 3 | layout::Rect, 4 | style::{Modifier, Style, Stylize}, 5 | text::{Line, Span, Text}, 6 | widgets::{Block, BorderType, Borders, Clear, Paragraph, Widget, Wrap}, 7 | }; 8 | 9 | use super::popup_area; 10 | 11 | pub struct Command<'a> { 12 | pub name: &'a str, 13 | pub key_binding: &'a str, 14 | pub description: &'a str, 15 | } 16 | pub struct CommandPopUp<'a> { 17 | pub title: String, 18 | pub commands: Vec>, 19 | } 20 | 21 | impl Widget for &CommandPopUp<'_> { 22 | fn render(self, area: Rect, buf: &mut Buffer) { 23 | let popup_area = popup_area(area, 80, 80); 24 | let popup = Block::default() 25 | .border_type(BorderType::Rounded) 26 | .title(self.title.as_str()) 27 | .borders(Borders::ALL); 28 | 29 | Clear.render(popup_area, buf); 30 | 31 | let text = self 32 | .commands 33 | .iter() 34 | .map(|c| { 35 | Line::from(vec![ 36 | Span::styled( 37 | format!("<{}>: ", c.key_binding), 38 | Style::default().add_modifier(Modifier::BOLD), 39 | ), 40 | Span::styled( 41 | format!("{} - {}", c.name, c.description), 42 | Style::default().dark_gray(), 43 | ), 44 | ]) 45 | }) 46 | .collect::(); 47 | 48 | let command_paragraph = Paragraph::new(text).wrap(Wrap { trim: true }).block(popup); 49 | command_paragraph.render(popup_area, buf); 50 | } 51 | } 52 | 53 | pub struct DefaultCommands(pub Vec>); 54 | 55 | impl Default for DefaultCommands { 56 | fn default() -> Self { 57 | Self::new() 58 | } 59 | } 60 | 61 | impl DefaultCommands { 62 | pub fn new() -> Self { 63 | Self(vec![ 64 | Command { 65 | name: "Enter", 66 | key_binding: "Enter", 67 | description: "Open the selected item", 68 | }, 69 | Command { 70 | name: "Filter", 71 | key_binding: "/", 72 | description: "Filter items", 73 | }, 74 | Command { 75 | name: "Open", 76 | key_binding: "o", 77 | description: "Open the selected item in the browser", 78 | }, 79 | Command { 80 | name: "Previous", 81 | key_binding: "k / Up", 82 | description: "Move to the previous item", 83 | }, 84 | Command { 85 | name: "Next", 86 | key_binding: "j / Down", 87 | description: "Move to the next item", 88 | }, 89 | Command { 90 | name: "Previous tab", 91 | key_binding: "h / Left / Esc", 92 | description: "Move to the previous tab", 93 | }, 94 | Command { 95 | name: "Next tab", 96 | key_binding: "l / Right", 97 | description: "Move to the next tab", 98 | }, 99 | Command { 100 | name: "Help", 101 | key_binding: "?", 102 | description: "Show help", 103 | }, 104 | Command { 105 | name: "Quit", 106 | key_binding: "q / Ctrl-c", 107 | description: "Quit", 108 | }, 109 | ]) 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /src/app/worker/dags.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{Arc, Mutex}; 2 | 3 | use crate::airflow::traits::AirflowClient; 4 | use crate::app::model::popup::error::ErrorPopup; 5 | use crate::app::state::App; 6 | 7 | /// Handle updating DAGs and their statistics from the Airflow server. 8 | /// Fetches DAGs first, then fetches stats for all DAG IDs in parallel. 9 | pub async fn handle_update_dags_and_stats(app: &Arc>, client: &Arc) { 10 | // First, fetch DAGs 11 | let dag_list_result = client.list_dags().await; 12 | 13 | // Collect DAG IDs for stats query 14 | let dag_ids: Vec = if let Ok(dag_list) = &dag_list_result { 15 | dag_list.dags.iter().map(|dag| dag.dag_id.clone()).collect() 16 | } else { 17 | // If DAG list failed, try to use cached DAG IDs 18 | let app_lock = app.lock().unwrap(); 19 | app_lock 20 | .environment_state 21 | .get_active_dags() 22 | .iter() 23 | .map(|dag| dag.dag_id.clone()) 24 | .collect() 25 | }; 26 | 27 | // Fetch stats for all DAGs 28 | let dag_ids_refs: Vec<&str> = dag_ids.iter().map(String::as_str).collect(); 29 | let dag_stats_result = client.get_dag_stats(dag_ids_refs).await; 30 | 31 | let mut app = app.lock().unwrap(); 32 | 33 | // Process DAGs 34 | match dag_list_result { 35 | Ok(dag_list) => { 36 | if let Some(env) = app.environment_state.get_active_environment_mut() { 37 | for dag in &dag_list.dags { 38 | env.upsert_dag(dag.clone()); 39 | } 40 | } 41 | } 42 | Err(e) => { 43 | app.dags.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 44 | } 45 | } 46 | 47 | // Process stats 48 | match dag_stats_result { 49 | Ok(dag_stats) => { 50 | if let Some(env) = app.environment_state.get_active_environment_mut() { 51 | for dag_stats in dag_stats.dags { 52 | env.update_dag_stats(&dag_stats.dag_id, dag_stats.stats); 53 | } 54 | } 55 | } 56 | Err(e) => { 57 | // Don't overwrite existing error popup, just log 58 | log::error!("Failed to fetch dag stats: {e}"); 59 | } 60 | } 61 | 62 | // Sync panel data from environment state 63 | app.sync_panel_data(); 64 | } 65 | 66 | /// Handle toggling the paused state of a DAG. 67 | pub async fn handle_toggle_dag( 68 | app: &Arc>, 69 | client: &Arc, 70 | dag_id: &str, 71 | is_paused: bool, 72 | ) { 73 | let dag = client.toggle_dag(dag_id, is_paused).await; 74 | if let Err(e) = dag { 75 | let mut app = app.lock().unwrap(); 76 | app.dags.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 77 | } 78 | } 79 | 80 | /// Handle fetching the DAG source code. 81 | pub async fn handle_get_dag_code( 82 | app: &Arc>, 83 | client: &Arc, 84 | dag_id: &str, 85 | ) { 86 | let current_dag = { 87 | let app_lock = app.lock().unwrap(); 88 | app_lock.environment_state.get_active_dag(dag_id) 89 | }; 90 | 91 | if let Some(current_dag) = current_dag { 92 | let dag_code = client.get_dag_code(¤t_dag).await; 93 | let mut app = app.lock().unwrap(); 94 | match dag_code { 95 | Ok(dag_code) => { 96 | app.dagruns.dag_code.set_code(&dag_code); 97 | } 98 | Err(e) => { 99 | app.dags.error_popup = Some(ErrorPopup::from_strings(vec![e.to_string()])); 100 | } 101 | } 102 | } else { 103 | let mut app = app.lock().unwrap(); 104 | app.dags.error_popup = Some(ErrorPopup::from_strings(vec!["DAG not found".to_string()])); 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /src/commands/config/add.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use inquire::Select; 4 | use log::info; 5 | use strum::IntoEnumIterator; 6 | 7 | use super::model::AddCommand; 8 | use crate::{ 9 | airflow::config::{ 10 | AirflowAuth, AirflowConfig, AirflowVersion, BasicAuth, FlowrsConfig, TokenCmd, 11 | }, 12 | commands::config::model::{validate_endpoint, ConfigOption}, 13 | }; 14 | use anyhow::{Context, Result}; 15 | 16 | impl AddCommand { 17 | pub fn run(&self) -> Result<()> { 18 | let name = inquire::Text::new("name").prompt()?; 19 | let endpoint = inquire::Text::new("endpoint") 20 | .with_validator(validate_endpoint) 21 | .prompt()?; 22 | 23 | let version_str = inquire::Select::new("Airflow version", vec!["v2", "v3"]) 24 | .with_help_message("Select the Airflow API version") 25 | .prompt()?; 26 | 27 | let version = match version_str { 28 | "v3" => AirflowVersion::V3, 29 | _ => AirflowVersion::V2, 30 | }; 31 | 32 | let auth_type = 33 | Select::new("authentication type", ConfigOption::iter().collect()).prompt()?; 34 | 35 | let new_config = match auth_type { 36 | ConfigOption::BasicAuth => { 37 | let username = inquire::Text::new("username").prompt()?; 38 | let password = inquire::Password::new("password") 39 | .with_display_toggle_enabled() 40 | .prompt()?; 41 | 42 | AirflowConfig { 43 | name, 44 | endpoint, 45 | auth: AirflowAuth::Basic(BasicAuth { username, password }), 46 | managed: None, 47 | version, 48 | timeout_secs: 30, 49 | } 50 | } 51 | ConfigOption::Token(_) => { 52 | let cmd = Some(inquire::Text::new("cmd").prompt()?); 53 | let token: String; 54 | if let Some(cmd) = &cmd { 55 | info!("🔑 Running command: {cmd}"); 56 | let output = std::process::Command::new("sh") 57 | .arg("-c") 58 | .arg(cmd) 59 | .output() 60 | .with_context(|| format!("Failed to execute token command: {cmd}"))?; 61 | token = String::from_utf8(output.stdout)?.trim().to_string(); 62 | } else { 63 | token = inquire::Text::new("token").prompt()?; 64 | } 65 | 66 | AirflowConfig { 67 | name, 68 | endpoint, 69 | auth: AirflowAuth::Token(TokenCmd { 70 | cmd, 71 | token: Some(token), 72 | }), 73 | managed: None, 74 | version, 75 | timeout_secs: 30, 76 | } 77 | } 78 | }; 79 | 80 | let path = self.file.as_ref().map(PathBuf::from); 81 | let mut config = FlowrsConfig::from_file(path.as_ref())?; 82 | 83 | // If the user provided a custom path, override the config path so write_to_file 84 | // uses the user-specified location even if it didn't exist during from_file 85 | if let Some(user_path) = path { 86 | config.path = Some(user_path); 87 | } 88 | 89 | if let Some(mut servers) = config.servers.clone() { 90 | servers.retain(|server| server.name != new_config.name && server.managed.is_none()); 91 | servers.push(new_config); 92 | config.servers = Some(servers); 93 | } else { 94 | config.servers = Some(vec![new_config]); 95 | } 96 | 97 | config.write_to_file()?; 98 | 99 | println!("✅ Config added successfully!"); 100 | Ok(()) 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /docs/plans/2025-12-18-ci-testing-design.md: -------------------------------------------------------------------------------- 1 | # CI Testing Design 2 | 3 | ## Overview 4 | 5 | Set up unit testing and integration testing in CI for the flowrs project. 6 | 7 | - **Unit tests**: Run on every push to a PR 8 | - **Integration tests**: Run when merging to main 9 | 10 | ## Decisions 11 | 12 | | Decision | Choice | Rationale | 13 | |----------|--------|-----------| 14 | | Test separation | `src/` for unit, `tests/` for integration | Cargo convention, clear separation | 15 | | PR checks | Tests + Clippy + Format | Catches most issues without being slow | 16 | | Integration approach | External Airflow services | Tests real API behavior | 17 | | Airflow versions | 2.x (V1 API) + 3.x (V2 API) | Covers both client implementations | 18 | | OS matrix | Linux only | Sufficient for API-focused code | 19 | 20 | ## Workflow Structure 21 | 22 | ### PR Workflow (`.github/workflows/ci.yml`) 23 | 24 | **Triggers:** `pull_request` to `main` 25 | 26 | **Jobs (run in parallel):** 27 | 28 | 1. **fmt** - Check code formatting 29 | - `cargo fmt --check` 30 | 31 | 2. **clippy** - Lint code 32 | - `cargo clippy -- -D warnings` 33 | 34 | 3. **test** - Run unit tests 35 | - `cargo test --lib --bins` 36 | 37 | **Key features:** 38 | - Uses `Swatinem/rust-cache@v2` for faster builds 39 | - Treats clippy warnings as errors 40 | - Only runs tests in `src/` (skips `tests/` directory) 41 | 42 | ### Integration Workflow (`.github/workflows/integration.yml`) 43 | 44 | **Triggers:** `push` to `main` 45 | 46 | **Matrix:** 47 | | Airflow Version | API Version | 48 | |-----------------|-------------| 49 | | 2.10.4 | v1 | 50 | | 3.0.1 | v2 | 51 | 52 | **Jobs:** 53 | 1. Spin up Airflow container via GitHub Services 54 | 2. Wait for Airflow health endpoint 55 | 3. Run integration tests with environment variables: 56 | - `TEST_AIRFLOW_URL=http://localhost:8080` 57 | - `TEST_API_VERSION=v1` or `v2` 58 | 59 | **Airflow container configuration:** 60 | - LocalExecutor with SQLite 61 | - Basic auth enabled for API access 62 | - Port 8080 exposed 63 | 64 | ## Integration Test Structure 65 | 66 | ``` 67 | tests/ 68 | ├── common/ 69 | │ └── mod.rs # Shared test utilities 70 | ├── v1_api_test.rs # Tests for Airflow 2.x V1 API 71 | └── v2_api_test.rs # Tests for Airflow 3.x V2 API 72 | ``` 73 | 74 | ### Common Module (`tests/common/mod.rs`) 75 | 76 | Provides: 77 | - `create_test_client()` - Creates appropriate client based on `TEST_API_VERSION` 78 | - Test fixtures and helper assertions 79 | - Airflow authentication setup 80 | 81 | ### Test Pattern 82 | 83 | Tests check the `TEST_API_VERSION` environment variable and skip if not applicable: 84 | 85 | ```rust 86 | #[tokio::test] 87 | async fn test_list_dags() { 88 | let api_version = env::var("TEST_API_VERSION").unwrap_or("v1".into()); 89 | if api_version != "v1" { 90 | return; // Skip if not testing V1 API 91 | } 92 | 93 | let client = common::create_test_client().await; 94 | let dags = client.list_dags(None, None).await.unwrap(); 95 | assert!(!dags.dags.is_empty()); 96 | } 97 | ``` 98 | 99 | ## Files to Create 100 | 101 | | File | Purpose | 102 | |------|---------| 103 | | `.github/workflows/ci.yml` | PR workflow | 104 | | `.github/workflows/integration.yml` | Integration test workflow | 105 | | `tests/common/mod.rs` | Shared test utilities | 106 | | `tests/v1_api_test.rs` | V1 API integration tests | 107 | | `tests/v2_api_test.rs` | V2 API integration tests | 108 | 109 | ## Known Issues 110 | 111 | The existing test `test_list_conveyor_environments` fails in CI because it requires the `conveyor` CLI. Options: 112 | 1. Mark with `#[ignore]` for CI 113 | 2. Move to integration tests 114 | 3. Mock the external dependency 115 | 116 | ## Implementation Notes 117 | 118 | - Use `cargo test --lib --bins` to run only unit tests (excludes `tests/` directory) 119 | - Use `cargo test --test '*'` to run only integration tests (only `tests/` directory) 120 | - GitHub Services handles container lifecycle automatically 121 | - Tests read environment variables to determine which API version to test 122 | -------------------------------------------------------------------------------- /src/app/model/popup/dagruns/trigger.rs: -------------------------------------------------------------------------------- 1 | use crossterm::event::KeyCode; 2 | use ratatui::{ 3 | buffer::Buffer, 4 | layout::{Constraint, Flex, Layout, Rect}, 5 | widgets::{Block, BorderType, Borders, Clear, Paragraph, Widget}, 6 | }; 7 | 8 | use crate::{ 9 | app::{ 10 | events::custom::FlowrsEvent, 11 | model::{ 12 | popup::{popup_area, themed_button}, 13 | Model, 14 | }, 15 | worker::WorkerMessage, 16 | }, 17 | ui::theme::{BORDER_STYLE, DEFAULT_STYLE, SURFACE_STYLE}, 18 | }; 19 | 20 | pub struct TriggerDagRunPopUp { 21 | pub dag_id: String, 22 | pub confirm: bool, 23 | } 24 | 25 | impl TriggerDagRunPopUp { 26 | pub fn new(dag_id: String) -> Self { 27 | TriggerDagRunPopUp { 28 | dag_id, 29 | confirm: false, 30 | } 31 | } 32 | } 33 | 34 | impl Model for TriggerDagRunPopUp { 35 | fn update(&mut self, event: &FlowrsEvent) -> (Option, Vec) { 36 | if let FlowrsEvent::Key(key_event) = event { 37 | match key_event.code { 38 | KeyCode::Enter => { 39 | // On Enter, we always return the key event, so the parent can close the popup 40 | // If the confirm flag is set, we also return a WorkerMessage to clear the dag run 41 | if self.confirm { 42 | return ( 43 | Some(FlowrsEvent::Key(*key_event)), 44 | vec![WorkerMessage::TriggerDagRun { 45 | dag_id: self.dag_id.clone(), 46 | }], 47 | ); 48 | } 49 | return (Some(FlowrsEvent::Key(*key_event)), vec![]); 50 | } 51 | KeyCode::Char('j' | 'k' | 'h' | 'l') 52 | | KeyCode::Down 53 | | KeyCode::Up 54 | | KeyCode::Left 55 | | KeyCode::Right => { 56 | // For any movement vim key, we toggle the confirm flag, and we consume the event 57 | self.confirm = !self.confirm; 58 | return (None, vec![]); 59 | } 60 | KeyCode::Char('q') | KeyCode::Esc => { 61 | // On Esc, we always return the key event, so the parent can close the popup, without clearing the dag run 62 | return (Some(FlowrsEvent::Key(*key_event)), vec![]); 63 | } 64 | _ => {} 65 | } 66 | } 67 | (Some(event.clone()), vec![]) 68 | } 69 | } 70 | 71 | impl Widget for &mut TriggerDagRunPopUp { 72 | fn render(self, area: Rect, buffer: &mut Buffer) { 73 | // Smaller popup: 40% width, auto height 74 | let area = popup_area(area, 40, 30); 75 | 76 | let popup_block = Block::default() 77 | .border_type(BorderType::Rounded) 78 | .borders(Borders::ALL) 79 | .border_style(BORDER_STYLE) 80 | .style(SURFACE_STYLE); 81 | 82 | // Use inner area for content layout to avoid overlapping the border 83 | let inner = popup_block.inner(area); 84 | 85 | let [_, header, options, _] = Layout::vertical([ 86 | Constraint::Length(1), 87 | Constraint::Length(2), 88 | Constraint::Length(3), 89 | Constraint::Min(1), 90 | ]) 91 | .flex(Flex::Center) 92 | .areas(inner); 93 | 94 | let text = Paragraph::new("Trigger a new DAG Run?") 95 | .style(DEFAULT_STYLE) 96 | .centered(); 97 | 98 | let [_, yes, _, no, _] = Layout::horizontal([ 99 | Constraint::Fill(1), 100 | Constraint::Length(8), 101 | Constraint::Length(2), 102 | Constraint::Length(8), 103 | Constraint::Fill(1), 104 | ]) 105 | .areas(options); 106 | 107 | let yes_btn = themed_button("Yes", self.confirm); 108 | let no_btn = themed_button("No", !self.confirm); 109 | 110 | Clear.render(area, buffer); 111 | popup_block.render(area, buffer); 112 | text.render(header, buffer); 113 | yes_btn.render(yes, buffer); 114 | no_btn.render(no, buffer); 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /docs/client-architecture.md: -------------------------------------------------------------------------------- 1 | # Airflow Client Architecture 2 | 3 | ## Overview 4 | 5 | The Flowrs codebase now supports both Airflow API v2 and v3 through a trait-based architecture. This allows the application to work with different Airflow versions while maintaining a consistent interface. 6 | 7 | ## Architecture Components 8 | 9 | ### 1. AirflowClient Trait (`src/airflow/traits.rs`) 10 | 11 | The `AirflowClient` trait defines the common interface for all Airflow API operations: 12 | - DAG operations (list, toggle, get code) 13 | - DAG Run operations (list, mark, clear, trigger) 14 | - Task Instance operations (list, mark, clear) 15 | - Log operations (get task logs) 16 | - Statistics operations (get dag stats) 17 | 18 | ### 2. BaseClient (`src/airflow/client/base.rs`) 19 | 20 | The `BaseClient` handles: 21 | - HTTP client initialization 22 | - Authentication (Basic, Token, Conveyor) 23 | - Base request building with proper API paths 24 | 25 | It provides two methods: 26 | - `base_api(method, endpoint, api_version)` - Build a request for a specific API version 27 | - `base_api_legacy(method, endpoint)` - Build a request using the configured API version (for backward compatibility) 28 | 29 | ### 3. V1Client and V2Client 30 | 31 | Both implementations wrap a `BaseClient` and implement the `AirflowClient` trait: 32 | - **V1Client** (`src/airflow/client/v1/*.rs`) - Uses `/api/v1` endpoint (for Airflow v2) 33 | - **V2Client** (`src/airflow/client/v2/*.rs`) - Uses `/api/v2` endpoint (for Airflow v3) 34 | 35 | Key differences between v1 and v2: 36 | - v2 uses `-logical_date` for ordering DAG runs instead of v1's `-execution_date` 37 | - v2 doesn't use `update_mask` query parameter for PATCH operations 38 | 39 | ### 4. Factory Function 40 | 41 | `create_client(config: AirflowConfig)` creates the appropriate client based on the `version` field in the configuration: 42 | 43 | ```rust 44 | pub fn create_client(config: AirflowConfig) -> Result> { 45 | let base = BaseClient::new(config.clone())?; 46 | 47 | match config.version { 48 | AirflowVersion::V2 => Ok(Arc::new(V1Client::new(base))), // V2 uses API v1 49 | AirflowVersion::V3 => Ok(Arc::new(V2Client::new(base))), // V3 uses API v2 50 | } 51 | } 52 | 53 | // Usage: 54 | let client = create_client(config)?; 55 | // Returns Arc 56 | ``` 57 | 58 | ## Configuration 59 | 60 | Set the Airflow version in your configuration file (`~/.flowrs`): 61 | 62 | ```toml 63 | [[servers]] 64 | name = "my-airflow-v2" 65 | endpoint = "http://localhost:8080" 66 | version = "V2" # Default if not specified 67 | 68 | [servers.auth.BasicAuth] 69 | username = "airflow" 70 | password = "airflow" 71 | 72 | [[servers]] 73 | name = "my-airflow-v3" 74 | endpoint = "http://localhost:8081" 75 | version = "V3" 76 | 77 | [servers.auth.BasicAuth] 78 | username = "airflow" 79 | password = "airflow" 80 | ``` 81 | 82 | ## Backward Compatibility 83 | 84 | The `BaseClient` provides backward compatibility through: 85 | - `base_api_legacy()` method which automatically uses the configured API version 86 | - Direct method implementations for common operations (list_dags, get_dag_runs, etc.) 87 | - Existing tests and application code that use `BaseClient` directly continue to work without modification 88 | 89 | ## Usage Examples 90 | 91 | ### Using the Trait (Recommended for new code) 92 | 93 | ```rust 94 | use crate::airflow::client::create_client; 95 | use crate::airflow::traits::AirflowClient; 96 | 97 | let client = create_client(config)?; 98 | let dags = client.list_dags().await?; 99 | ``` 100 | 101 | ### Using BaseClient Directly (Backward compatible) 102 | 103 | ```rust 104 | use crate::airflow::client::BaseClient; 105 | 106 | let client = BaseClient::new(config)?; 107 | let dags = client.list_dags().await?; 108 | ``` 109 | 110 | ## Updating the Application 111 | 112 | To use the trait-based client in the application: 113 | 114 | 1. Use `Arc` as the client type in the `App` struct 115 | 2. Use `create_client()` factory function to create clients 116 | 3. The factory automatically selects V1Client or V2Client based on configuration 117 | 118 | Example: 119 | ```rust 120 | use crate::airflow::client::create_client; 121 | use crate::airflow::traits::AirflowClient; 122 | 123 | let client: Arc = create_client(config)?; 124 | let dags = client.list_dags().await?; 125 | ``` 126 | -------------------------------------------------------------------------------- /src/airflow/client/base.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, Result}; 2 | use log::{debug, info}; 3 | use reqwest::{Method, Url}; 4 | use std::convert::TryFrom; 5 | use std::time::Duration; 6 | 7 | use crate::airflow::config::{AirflowAuth, AirflowConfig}; 8 | use crate::airflow::managed_services::conveyor::ConveyorClient; 9 | 10 | /// Base HTTP client for Airflow API communication. 11 | /// Handles authentication and provides base request building functionality. 12 | #[derive(Debug, Clone)] 13 | pub struct BaseClient { 14 | pub client: reqwest::Client, 15 | pub config: AirflowConfig, 16 | } 17 | 18 | impl BaseClient { 19 | pub fn new(config: AirflowConfig) -> Result { 20 | let client = reqwest::Client::builder() 21 | .timeout(Duration::from_secs(config.timeout_secs)) 22 | .use_rustls_tls() 23 | .build()?; 24 | Ok(Self { client, config }) 25 | } 26 | 27 | /// Build a base request with authentication for the specified API version 28 | pub fn base_api( 29 | &self, 30 | method: Method, 31 | endpoint: &str, 32 | api_version: &str, 33 | ) -> Result { 34 | let base_url = Url::parse(&self.config.endpoint)?; 35 | let url = base_url.join(format!("{api_version}/{endpoint}").as_str())?; 36 | debug!("🔗 Request URL: {url}"); 37 | 38 | match &self.config.auth { 39 | AirflowAuth::Basic(auth) => { 40 | info!("🔑 Basic Auth: {}", auth.username); 41 | Ok(self 42 | .client 43 | .request(method, url) 44 | .basic_auth(&auth.username, Some(&auth.password))) 45 | } 46 | AirflowAuth::Token(token) => { 47 | info!("🔑 Token Auth: {:?}", token.cmd); 48 | if let Some(cmd) = &token.cmd { 49 | let output = std::process::Command::new("sh") 50 | .arg("-c") 51 | .arg(cmd) 52 | .output() 53 | .context("Failed to run token helper command")?; 54 | 55 | if !output.status.success() { 56 | let stderr = String::from_utf8_lossy(&output.stderr); 57 | let stdout = String::from_utf8_lossy(&output.stdout); 58 | return Err(anyhow::anyhow!( 59 | "Token helper command failed with exit code {:?}\nstdout: {}\nstderr: {}", 60 | output.status.code(), 61 | stdout, 62 | stderr 63 | )); 64 | } 65 | 66 | let token = String::from_utf8(output.stdout) 67 | .context("Token helper returned invalid UTF-8")? 68 | .trim() 69 | .replace('"', ""); 70 | Ok(self.client.request(method, url).bearer_auth(token)) 71 | } else { 72 | if let Some(token) = &token.token { 73 | return Ok(self.client.request(method, url).bearer_auth(token.trim())); 74 | } 75 | Err(anyhow::anyhow!("Token not found")) 76 | } 77 | } 78 | AirflowAuth::Conveyor => { 79 | info!("🔑 Conveyor Auth"); 80 | let token: String = ConveyorClient::get_token()?; 81 | Ok(self.client.request(method, url).bearer_auth(token)) 82 | } 83 | AirflowAuth::Mwaa(auth) => { 84 | info!("🔑 MWAA Auth: {}", auth.environment_name); 85 | Ok(self 86 | .client 87 | .request(method, url) 88 | .header("Cookie", format!("session={}", auth.session_cookie))) 89 | } 90 | AirflowAuth::Astronomer(auth) => { 91 | info!("🔑 Astronomer Auth"); 92 | Ok(self 93 | .client 94 | .request(method, url) 95 | .bearer_auth(&auth.api_token)) 96 | } 97 | } 98 | } 99 | } 100 | 101 | impl TryFrom<&AirflowConfig> for BaseClient { 102 | type Error = anyhow::Error; 103 | 104 | fn try_from(config: &AirflowConfig) -> Result { 105 | Self::new(config.clone()) 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /tests/v2_api_test.rs: -------------------------------------------------------------------------------- 1 | mod common; 2 | 3 | use common::{create_test_client, should_run_for_api_version}; 4 | 5 | #[tokio::test] 6 | async fn test_v2_list_dags() { 7 | if !should_run_for_api_version("v2") { 8 | println!("Skipping V2 test - TEST_API_VERSION is not 'v2'"); 9 | return; 10 | } 11 | 12 | let client = create_test_client().expect("Failed to create test client"); 13 | let result = client.list_dags().await; 14 | 15 | assert!(result.is_ok(), "Failed to list DAGs: {:?}", result.err()); 16 | 17 | let dag_list = result.unwrap(); 18 | // Airflow with LOAD_EXAMPLES=true should have example DAGs 19 | assert!( 20 | !dag_list.dags.is_empty(), 21 | "Expected at least one DAG, got none" 22 | ); 23 | } 24 | 25 | #[tokio::test] 26 | async fn test_v2_dag_has_required_fields() { 27 | if !should_run_for_api_version("v2") { 28 | return; 29 | } 30 | 31 | let client = create_test_client().expect("Failed to create test client"); 32 | let dag_list = client.list_dags().await.expect("Failed to list DAGs"); 33 | 34 | if let Some(dag) = dag_list.dags.first() { 35 | // Verify DAG has required fields populated 36 | assert!(!dag.dag_id.is_empty(), "DAG ID should not be empty"); 37 | } 38 | } 39 | 40 | #[tokio::test] 41 | async fn test_v2_get_dag_code() { 42 | if !should_run_for_api_version("v2") { 43 | return; 44 | } 45 | 46 | let client = create_test_client().expect("Failed to create test client"); 47 | let dag_list = client.list_dags().await.expect("Failed to list DAGs"); 48 | 49 | if let Some(dag) = dag_list.dags.first() { 50 | let code = client 51 | .get_dag_code(dag) 52 | .await 53 | .expect("Failed to get DAG code"); 54 | assert!(code.contains(&dag.dag_id), "DAG code should contain DAG ID"); 55 | } 56 | } 57 | 58 | #[tokio::test] 59 | async fn test_v2_list_dagruns() { 60 | if !should_run_for_api_version("v2") { 61 | return; 62 | } 63 | 64 | let client = create_test_client().expect("Failed to create test client"); 65 | let dag_list = client.list_dags().await.expect("Failed to list DAGs"); 66 | 67 | if let Some(dag) = dag_list.dags.first() { 68 | let result = client.list_dagruns(&dag.dag_id).await; 69 | assert!( 70 | result.is_ok(), 71 | "Failed to list DAG runs: {:?}", 72 | result.err() 73 | ); 74 | // Note: dag_runs may be empty if no runs have been triggered 75 | } 76 | } 77 | 78 | #[tokio::test] 79 | async fn test_v2_dag_stats() { 80 | if !should_run_for_api_version("v2") { 81 | return; 82 | } 83 | 84 | let client = create_test_client().expect("Failed to create test client"); 85 | let dag_list = client.list_dags().await.expect("Failed to list DAGs"); 86 | 87 | if !dag_list.dags.is_empty() { 88 | let dag_ids: Vec<&str> = dag_list 89 | .dags 90 | .iter() 91 | .take(2) 92 | .map(|d| d.dag_id.as_str()) 93 | .collect(); 94 | let result = client.get_dag_stats(dag_ids.clone()).await; 95 | assert!( 96 | result.is_ok(), 97 | "Failed to get DAG stats: {:?}", 98 | result.err() 99 | ); 100 | 101 | let dag_stats = result.unwrap(); 102 | assert!( 103 | !dag_stats.dags.is_empty(), 104 | "Expected at least one DAG in stats response" 105 | ); 106 | } 107 | } 108 | 109 | #[tokio::test] 110 | async fn test_v2_list_task_instances() { 111 | if !should_run_for_api_version("v2") { 112 | return; 113 | } 114 | 115 | let client = create_test_client().expect("Failed to create test client"); 116 | let dag_list = client.list_dags().await.expect("Failed to list DAGs"); 117 | 118 | if let Some(dag) = dag_list.dags.first() { 119 | let dagruns = client 120 | .list_dagruns(&dag.dag_id) 121 | .await 122 | .expect("Failed to list DAG runs"); 123 | 124 | if let Some(dagrun) = dagruns.dag_runs.first() { 125 | let result = client 126 | .list_task_instances(&dag.dag_id, &dagrun.dag_run_id) 127 | .await; 128 | assert!( 129 | result.is_ok(), 130 | "Failed to list task instances: {:?}", 131 | result.err() 132 | ); 133 | // Note: task instances may be empty depending on DAG configuration 134 | } 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /src/airflow/model/common/taskinstance.rs: -------------------------------------------------------------------------------- 1 | use crate::airflow::client::v1; 2 | use crate::airflow::client::v2; 3 | use serde::{Deserialize, Serialize}; 4 | use time::OffsetDateTime; 5 | 6 | /// Common `TaskInstance` model used by the application 7 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 8 | pub struct TaskInstance { 9 | pub task_id: String, 10 | pub dag_id: String, 11 | pub dag_run_id: String, 12 | pub logical_date: Option, 13 | pub start_date: Option, 14 | pub end_date: Option, 15 | pub duration: Option, 16 | pub state: Option, 17 | pub try_number: i64, 18 | pub max_tries: i64, 19 | pub map_index: i64, 20 | pub hostname: Option, 21 | pub unixname: Option, 22 | pub pool: String, 23 | pub pool_slots: i64, 24 | pub queue: Option, 25 | pub priority_weight: Option, 26 | pub operator: Option, 27 | pub queued_when: Option, 28 | pub pid: Option, 29 | pub note: Option, 30 | } 31 | 32 | #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] 33 | pub struct TaskInstanceList { 34 | pub task_instances: Vec, 35 | pub total_entries: i64, 36 | } 37 | 38 | // From trait implementations for v1 models 39 | impl From for TaskInstance { 40 | fn from(value: v1::model::taskinstance::TaskInstanceResponse) -> Self { 41 | TaskInstance { 42 | task_id: value.task_id, 43 | dag_id: value.dag_id, 44 | dag_run_id: value.dag_run_id, 45 | logical_date: Some(value.execution_date), 46 | start_date: value.start_date, 47 | end_date: value.end_date, 48 | duration: value.duration, 49 | state: value.state, 50 | try_number: value.try_number, 51 | max_tries: value.max_tries, 52 | map_index: value.map_index, 53 | hostname: Some(value.hostname), 54 | unixname: Some(value.unixname), 55 | pool: value.pool, 56 | pool_slots: value.pool_slots, 57 | queue: value.queue, 58 | priority_weight: value.priority_weight, 59 | operator: value.operator, 60 | queued_when: value.queued_when, 61 | pid: value.pid, 62 | note: value.note, 63 | } 64 | } 65 | } 66 | 67 | impl From for TaskInstanceList { 68 | fn from(value: v1::model::taskinstance::TaskInstanceCollectionResponse) -> Self { 69 | TaskInstanceList { 70 | task_instances: value 71 | .task_instances 72 | .into_iter() 73 | .map(std::convert::Into::into) 74 | .collect(), 75 | total_entries: value.total_entries, 76 | } 77 | } 78 | } 79 | 80 | // From trait implementations for v2 models 81 | impl From for TaskInstance { 82 | fn from(value: v2::model::taskinstance::TaskInstance) -> Self { 83 | TaskInstance { 84 | task_id: value.task_id, 85 | dag_id: value.dag_id, 86 | dag_run_id: value.dag_run_id, 87 | logical_date: value.logical_date, 88 | start_date: value.start_date, 89 | end_date: value.end_date, 90 | duration: value.duration, 91 | state: value.state, 92 | try_number: value.try_number, 93 | max_tries: value.max_tries, 94 | map_index: value.map_index, 95 | hostname: value.hostname, 96 | unixname: value.unixname, 97 | pool: value.pool, 98 | pool_slots: value.pool_slots, 99 | queue: value.queue, 100 | priority_weight: value.priority_weight, 101 | operator: value.operator, 102 | queued_when: value.queued_when, 103 | pid: value.pid, 104 | note: value.note, 105 | } 106 | } 107 | } 108 | 109 | impl From for TaskInstanceList { 110 | fn from(value: v2::model::taskinstance::TaskInstanceList) -> Self { 111 | TaskInstanceList { 112 | task_instances: value 113 | .task_instances 114 | .into_iter() 115 | .map(std::convert::Into::into) 116 | .collect(), 117 | total_entries: value.total_entries, 118 | } 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /src/app/model/popup/dagruns/clear.rs: -------------------------------------------------------------------------------- 1 | use crossterm::event::KeyCode; 2 | use ratatui::{ 3 | buffer::Buffer, 4 | layout::{Constraint, Flex, Layout, Rect}, 5 | widgets::{Block, BorderType, Borders, Clear, Paragraph, Widget}, 6 | }; 7 | 8 | use crate::{ 9 | app::{ 10 | events::custom::FlowrsEvent, 11 | model::{ 12 | popup::{popup_area, themed_button}, 13 | Model, 14 | }, 15 | worker::WorkerMessage, 16 | }, 17 | ui::theme::{BORDER_STYLE, DEFAULT_STYLE, SURFACE_STYLE}, 18 | }; 19 | 20 | pub struct ClearDagRunPopup { 21 | pub dag_run_ids: Vec, 22 | pub dag_id: String, 23 | pub confirm: bool, 24 | } 25 | 26 | impl ClearDagRunPopup { 27 | pub fn new(dag_run_ids: Vec, dag_id: String) -> Self { 28 | ClearDagRunPopup { 29 | dag_run_ids, 30 | dag_id, 31 | confirm: false, 32 | } 33 | } 34 | } 35 | 36 | impl Model for ClearDagRunPopup { 37 | fn update(&mut self, event: &FlowrsEvent) -> (Option, Vec) { 38 | if let FlowrsEvent::Key(key_event) = event { 39 | match key_event.code { 40 | KeyCode::Enter => { 41 | // On Enter, we always return the key event, so the parent can close the popup 42 | // If the confirm flag is set, we also return WorkerMessages to clear the dag runs 43 | if self.confirm { 44 | return ( 45 | Some(FlowrsEvent::Key(*key_event)), 46 | self.dag_run_ids 47 | .iter() 48 | .map(|dag_run_id| WorkerMessage::ClearDagRun { 49 | dag_run_id: dag_run_id.clone(), 50 | dag_id: self.dag_id.clone(), 51 | }) 52 | .collect(), 53 | ); 54 | } 55 | return (Some(FlowrsEvent::Key(*key_event)), vec![]); 56 | } 57 | KeyCode::Char('j' | 'k' | 'h' | 'l') 58 | | KeyCode::Down 59 | | KeyCode::Up 60 | | KeyCode::Left 61 | | KeyCode::Right => { 62 | // For any movement vim key, we toggle the confirm flag, and we consume the event 63 | self.confirm = !self.confirm; 64 | return (None, vec![]); 65 | } 66 | KeyCode::Char('q') | KeyCode::Esc => { 67 | // On Esc, we always return the key event, so the parent can close the popup, without clearing the dag run 68 | return (Some(FlowrsEvent::Key(*key_event)), vec![]); 69 | } 70 | _ => {} 71 | } 72 | } 73 | (Some(event.clone()), vec![]) 74 | } 75 | } 76 | 77 | impl Widget for &mut ClearDagRunPopup { 78 | fn render(self, area: Rect, buffer: &mut Buffer) { 79 | // Smaller popup: 40% width, auto height 80 | let area = popup_area(area, 40, 30); 81 | 82 | let popup_block = Block::default() 83 | .border_type(BorderType::Rounded) 84 | .borders(Borders::ALL) 85 | .border_style(BORDER_STYLE) 86 | .style(SURFACE_STYLE); 87 | 88 | // Use inner area for content layout to avoid overlapping the border 89 | let inner = popup_block.inner(area); 90 | 91 | let [_, header, options, _] = Layout::vertical([ 92 | Constraint::Length(1), 93 | Constraint::Length(2), 94 | Constraint::Length(3), 95 | Constraint::Min(1), 96 | ]) 97 | .flex(Flex::Center) 98 | .areas(inner); 99 | 100 | let message = if self.dag_run_ids.len() == 1 { 101 | "Clear this DAG Run?".to_string() 102 | } else { 103 | format!("Clear {} DAG Runs?", self.dag_run_ids.len()) 104 | }; 105 | let text = Paragraph::new(message).style(DEFAULT_STYLE).centered(); 106 | 107 | let [_, yes, _, no, _] = Layout::horizontal([ 108 | Constraint::Fill(1), 109 | Constraint::Length(8), 110 | Constraint::Length(2), 111 | Constraint::Length(8), 112 | Constraint::Fill(1), 113 | ]) 114 | .areas(options); 115 | 116 | let yes_btn = themed_button("Yes", self.confirm); 117 | let no_btn = themed_button("No", !self.confirm); 118 | 119 | Clear.render(area, buffer); 120 | popup_block.render(area, buffer); 121 | text.render(header, buffer); 122 | yes_btn.render(yes, buffer); 123 | no_btn.render(no, buffer); 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/app/model/popup/taskinstances/clear.rs: -------------------------------------------------------------------------------- 1 | use crossterm::event::KeyCode; 2 | use ratatui::{ 3 | buffer::Buffer, 4 | layout::{Constraint, Flex, Layout, Rect}, 5 | widgets::{Block, BorderType, Borders, Clear, Paragraph, Widget}, 6 | }; 7 | 8 | use crate::{ 9 | app::{ 10 | events::custom::FlowrsEvent, 11 | model::{ 12 | popup::{popup_area, themed_button}, 13 | Model, 14 | }, 15 | worker::WorkerMessage, 16 | }, 17 | ui::theme::{BORDER_STYLE, DEFAULT_STYLE, SURFACE_STYLE}, 18 | }; 19 | 20 | pub struct ClearTaskInstancePopup { 21 | pub dag_run_id: String, 22 | pub dag_id: String, 23 | pub task_ids: Vec, 24 | pub confirm: bool, 25 | } 26 | 27 | impl ClearTaskInstancePopup { 28 | pub fn new(dag_run_id: &str, dag_id: &str, task_ids: Vec) -> Self { 29 | ClearTaskInstancePopup { 30 | dag_run_id: dag_run_id.to_string(), 31 | dag_id: dag_id.to_string(), 32 | task_ids, 33 | confirm: false, 34 | } 35 | } 36 | } 37 | 38 | impl Model for ClearTaskInstancePopup { 39 | fn update(&mut self, event: &FlowrsEvent) -> (Option, Vec) { 40 | if let FlowrsEvent::Key(key_event) = event { 41 | match key_event.code { 42 | KeyCode::Enter => { 43 | // On Enter, we always return the key event, so the parent can close the popup 44 | // If the confirm flag is set, we also return WorkerMessages to clear the task instances 45 | if self.confirm { 46 | return ( 47 | Some(FlowrsEvent::Key(*key_event)), 48 | self.task_ids 49 | .iter() 50 | .map(|task_id| WorkerMessage::ClearTaskInstance { 51 | dag_run_id: self.dag_run_id.clone(), 52 | dag_id: self.dag_id.clone(), 53 | task_id: task_id.clone(), 54 | }) 55 | .collect(), 56 | ); 57 | } 58 | return (Some(FlowrsEvent::Key(*key_event)), vec![]); 59 | } 60 | KeyCode::Char('j' | 'k' | 'h' | 'l') 61 | | KeyCode::Down 62 | | KeyCode::Up 63 | | KeyCode::Left 64 | | KeyCode::Right => { 65 | // For any movement vim key, we toggle the confirm flag, and we consume the event 66 | self.confirm = !self.confirm; 67 | return (None, vec![]); 68 | } 69 | KeyCode::Char('q') | KeyCode::Esc => { 70 | // On Esc, we always return the key event, so the parent can close the popup, without clearing the dag run 71 | return (Some(FlowrsEvent::Key(*key_event)), vec![]); 72 | } 73 | _ => {} 74 | } 75 | } 76 | (Some(event.clone()), vec![]) 77 | } 78 | } 79 | 80 | impl Widget for &mut ClearTaskInstancePopup { 81 | fn render(self, area: Rect, buffer: &mut Buffer) { 82 | // Smaller popup: 40% width, auto height 83 | let area = popup_area(area, 40, 30); 84 | 85 | let popup_block = Block::default() 86 | .border_type(BorderType::Rounded) 87 | .borders(Borders::ALL) 88 | .border_style(BORDER_STYLE) 89 | .style(SURFACE_STYLE); 90 | 91 | // Use inner area for content layout to avoid overlapping the border 92 | let inner = popup_block.inner(area); 93 | 94 | let [_, header, options, _] = Layout::vertical([ 95 | Constraint::Length(1), 96 | Constraint::Length(2), 97 | Constraint::Length(3), 98 | Constraint::Min(1), 99 | ]) 100 | .flex(Flex::Center) 101 | .areas(inner); 102 | 103 | let message = if self.task_ids.len() == 1 { 104 | "Clear this Task Instance?".to_string() 105 | } else { 106 | format!("Clear {} Task Instances?", self.task_ids.len()) 107 | }; 108 | let text = Paragraph::new(message).style(DEFAULT_STYLE).centered(); 109 | 110 | let [_, yes, _, no, _] = Layout::horizontal([ 111 | Constraint::Fill(1), 112 | Constraint::Length(8), 113 | Constraint::Length(2), 114 | Constraint::Length(8), 115 | Constraint::Fill(1), 116 | ]) 117 | .areas(options); 118 | 119 | let yes_btn = themed_button("Yes", self.confirm); 120 | let no_btn = themed_button("No", !self.confirm); 121 | 122 | Clear.render(area, buffer); 123 | popup_block.render(area, buffer); 124 | text.render(header, buffer); 125 | yes_btn.render(yes, buffer); 126 | no_btn.render(no, buffer); 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /tests/managed_services_test.rs: -------------------------------------------------------------------------------- 1 | //! Integration tests for managed service integrations (Conveyor, MWAA, Astronomer) 2 | //! 3 | //! These tests require external service credentials and are skipped by default. 4 | //! To run specific tests: 5 | //! - Conveyor: Requires `conveyor` CLI installed and authenticated 6 | //! - MWAA: Requires AWS credentials with MWAA access 7 | //! - Astronomer: Requires `ASTRO_API_TOKEN` environment variable 8 | 9 | use std::env; 10 | 11 | use flowrs_tui::airflow::managed_services::astronomer::{ 12 | get_astronomer_environment_servers, AstronomerClient, 13 | }; 14 | use flowrs_tui::airflow::managed_services::conveyor::{ 15 | get_conveyor_environment_servers, ConveyorClient, 16 | }; 17 | use flowrs_tui::airflow::managed_services::mwaa::{get_mwaa_environment_servers, MwaaClient}; 18 | 19 | // ============================================================================ 20 | // Conveyor Tests 21 | // ============================================================================ 22 | 23 | #[tokio::test] 24 | #[ignore = "Requires conveyor CLI to be installed and authenticated"] 25 | async fn test_conveyor_list_environments() { 26 | let environments = 27 | get_conveyor_environment_servers().expect("Failed to list Conveyor environments"); 28 | 29 | println!("Found {} Conveyor environments", environments.len()); 30 | for env in &environments { 31 | println!(" - {} ({})", env.name, env.endpoint); 32 | } 33 | 34 | assert!( 35 | !environments.is_empty(), 36 | "Expected at least one Conveyor environment" 37 | ); 38 | } 39 | 40 | #[test] 41 | #[ignore = "Requires conveyor CLI to be installed and authenticated"] 42 | fn test_conveyor_get_token() { 43 | let token = ConveyorClient::get_token().expect("Failed to get Conveyor token"); 44 | 45 | assert!(!token.is_empty(), "Token should not be empty"); 46 | } 47 | 48 | // ============================================================================ 49 | // MWAA Tests 50 | // ============================================================================ 51 | 52 | fn should_run_mwaa_tests() -> bool { 53 | // MWAA tests require AWS credentials - check if they're available 54 | env::var("AWS_ACCESS_KEY_ID").is_ok() || env::var("AWS_PROFILE").is_ok() 55 | } 56 | 57 | #[tokio::test] 58 | #[ignore = "Requires AWS credentials with MWAA access"] 59 | async fn test_mwaa_list_environments() { 60 | if !should_run_mwaa_tests() { 61 | println!("Skipping MWAA test - AWS credentials not configured"); 62 | return; 63 | } 64 | 65 | let result = get_mwaa_environment_servers().await; 66 | 67 | match result { 68 | Ok(environments) => { 69 | println!("Found {} MWAA environments", environments.len()); 70 | for env in &environments { 71 | println!(" - {} ({})", env.name, env.endpoint); 72 | } 73 | } 74 | Err(e) => { 75 | // This may fail if no MWAA environments exist, which is acceptable 76 | println!("MWAA list environments result: {e}"); 77 | } 78 | } 79 | } 80 | 81 | #[tokio::test] 82 | #[ignore = "Requires AWS credentials"] 83 | async fn test_mwaa_client_new() { 84 | if !should_run_mwaa_tests() { 85 | println!("Skipping MWAA test - AWS credentials not configured"); 86 | return; 87 | } 88 | 89 | let client = MwaaClient::new().await; 90 | assert!( 91 | client.is_ok(), 92 | "Failed to create MWAA client: {:?}", 93 | client.err() 94 | ); 95 | } 96 | 97 | // ============================================================================ 98 | // Astronomer Tests 99 | // ============================================================================ 100 | 101 | fn should_run_astronomer_tests() -> bool { 102 | env::var("ASTRO_API_TOKEN").is_ok() 103 | } 104 | 105 | #[tokio::test] 106 | #[ignore = "Requires ASTRO_API_TOKEN environment variable"] 107 | async fn test_astronomer_list_environments() { 108 | if !should_run_astronomer_tests() { 109 | println!("Skipping Astronomer test - ASTRO_API_TOKEN not set"); 110 | return; 111 | } 112 | 113 | let (environments, errors) = get_astronomer_environment_servers().await; 114 | 115 | println!("Found {} Astronomer deployments", environments.len()); 116 | for env in &environments { 117 | println!(" - {} ({})", env.name, env.endpoint); 118 | } 119 | 120 | if !errors.is_empty() { 121 | println!("Errors encountered: {errors:?}"); 122 | } 123 | } 124 | 125 | #[tokio::test] 126 | #[ignore = "Requires ASTRO_API_TOKEN environment variable"] 127 | async fn test_astronomer_client_new() { 128 | if !should_run_astronomer_tests() { 129 | println!("Skipping Astronomer test - ASTRO_API_TOKEN not set"); 130 | return; 131 | } 132 | 133 | let client = AstronomerClient::new(); 134 | assert!( 135 | client.is_ok(), 136 | "Failed to create Astronomer client: {:?}", 137 | client.err() 138 | ); 139 | } 140 | -------------------------------------------------------------------------------- /src/ui.rs: -------------------------------------------------------------------------------- 1 | use crate::app::state::{App, Panel}; 2 | use crate::ui::tabs::{TabBar, TAB_BAR_HEIGHT}; 3 | use crate::ui::theme::{HEADER_BG, HEADER_FG, TEXT_PRIMARY}; 4 | use init_screen::render_init_screen; 5 | use ratatui::layout::{Constraint, Layout}; 6 | use ratatui::style::{Modifier, Style}; 7 | use ratatui::text::{Line, Span}; 8 | use ratatui::widgets::{Block, Paragraph, Widget}; 9 | use ratatui::Frame; 10 | use std::sync::{Arc, Mutex}; 11 | use throbber_widgets_tui::Throbber; 12 | 13 | pub mod common; 14 | pub mod constants; 15 | mod init_screen; 16 | pub mod tabs; 17 | pub mod theme; 18 | 19 | pub const TIME_FORMAT: &str = "[year]-[month]-[day] [hour]:[minute]:[second]"; 20 | 21 | pub fn draw_ui(f: &mut Frame, app: &Arc>) { 22 | let mut app = app.lock().unwrap(); 23 | if app.startup && app.ticks <= 10 { 24 | render_init_screen(f, app.ticks); 25 | return; 26 | } 27 | app.startup = false; 28 | 29 | // Split area vertically: header (1 line), tab bar (3 lines), panel (remaining) 30 | let [top_line, tab_area, panel_area] = Layout::vertical([ 31 | Constraint::Length(1), 32 | Constraint::Length(TAB_BAR_HEIGHT), 33 | Constraint::Min(0), 34 | ]) 35 | .areas(f.area()); 36 | 37 | // First, fill the entire top line with header background 38 | let header_bg_block = Block::default().style(Style::default().bg(HEADER_BG)); 39 | f.render_widget(header_bg_block, top_line); 40 | 41 | // Split top line horizontally to align throbber to the right 42 | let [app_info, throbber_area] = 43 | Layout::horizontal([Constraint::Min(0), Constraint::Length(20)]).areas(top_line); 44 | 45 | // Render app name and version on the left, with breadcrumb - prominent purple header 46 | let version = env!("CARGO_PKG_VERSION"); 47 | let breadcrumb = app.breadcrumb(); 48 | 49 | let header_line = if let Some(ref crumb) = breadcrumb { 50 | Line::from(vec![ 51 | Span::styled( 52 | format!(" Flowrs v{version} "), 53 | Style::default().fg(HEADER_FG).bg(HEADER_BG), 54 | ), 55 | Span::styled( 56 | format!(" {crumb} "), 57 | Style::default() 58 | .fg(TEXT_PRIMARY) 59 | .bg(HEADER_BG) 60 | .add_modifier(Modifier::ITALIC), 61 | ), 62 | ]) 63 | } else { 64 | Line::from(Span::styled( 65 | format!(" Flowrs v{version} "), 66 | Style::default().fg(HEADER_FG).bg(HEADER_BG), 67 | )) 68 | }; 69 | 70 | f.render_widget( 71 | Paragraph::new(header_line).style(Style::default().bg(HEADER_BG)), 72 | app_info, 73 | ); 74 | 75 | // Render throbber only when loading 76 | if app.loading { 77 | let throbber = Throbber::default() 78 | .label("Fetching data...") 79 | .style(Style::default().fg(HEADER_FG).bg(HEADER_BG)) 80 | .throbber_set(throbber_widgets_tui::OGHAM_C); 81 | f.render_stateful_widget(throbber, throbber_area, &mut app.throbber_state); 82 | } 83 | 84 | // Render tab bar 85 | let active_tab_index = match app.active_panel { 86 | Panel::Config => 0, 87 | Panel::Dag => 1, 88 | Panel::DAGRun => 2, 89 | Panel::TaskInstance => 3, 90 | Panel::Logs => 4, 91 | }; 92 | let tab_bar = TabBar::new(active_tab_index); 93 | f.render_widget(tab_bar, tab_area); 94 | 95 | // Only frame has the ability to set the cursor position, so we need to control the cursor filter from here 96 | // Not very elegant, and quite some duplication... Should be refactored 97 | match app.active_panel { 98 | Panel::Config => { 99 | app.configs.render(panel_area, f.buffer_mut()); 100 | if app.configs.filter.is_enabled() { 101 | f.set_cursor_position(app.configs.filter.cursor.position); 102 | } 103 | } 104 | Panel::Dag => { 105 | app.dags.render(panel_area, f.buffer_mut()); 106 | if app.dags.filter.is_enabled() { 107 | f.set_cursor_position(app.dags.filter.cursor.position); 108 | } 109 | } 110 | Panel::DAGRun => { 111 | app.dagruns.render(panel_area, f.buffer_mut()); 112 | if app.dagruns.filter.is_enabled() { 113 | f.set_cursor_position(app.dagruns.filter.cursor.position); 114 | } 115 | } 116 | Panel::TaskInstance => { 117 | app.task_instances.render(panel_area, f.buffer_mut()); 118 | if app.task_instances.filter.is_enabled() { 119 | f.set_cursor_position(app.task_instances.filter.cursor.position); 120 | } 121 | } 122 | Panel::Logs => app.logs.render(panel_area, f.buffer_mut()), 123 | } 124 | 125 | // Render global warning popup on top of all panels 126 | if let Some(warning_popup) = &app.warning_popup { 127 | warning_popup.render(panel_area, f.buffer_mut()); 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /docs/plans/2024-12-14-tabs-navigation-design.md: -------------------------------------------------------------------------------- 1 | # Tabs Navigation Design 2 | 3 | ## Overview 4 | 5 | Add a visual tab bar for navigating between panels (Config, DAGs, Runs, Tasks, Logs) in the Flowrs TUI. The tabs provide a clear indicator of the current panel while maintaining existing keyboard navigation. 6 | 7 | ## Visual Design 8 | 9 | The tab bar uses a Lip Gloss-inspired style where the active tab visually "opens" into the content below: 10 | 11 | ``` 12 | ╭──────────╮╭─────────╮╭────────╮╭─────────╮╭────────╮ 13 | │ ⚙ Config ││ 𖣘 DAGs ││ ▶ Runs ││ ◉ Tasks ││ ≣ Logs │ 14 | ├──────────┴┘ └┴────────┴┴─────────┴┴────────┴───────╮ 15 | │ │ 16 | │ Active Panel Table │ 17 | │ │ 18 | ╰────────────────────────────────────────────────────────────╯ 19 | ``` 20 | 21 | ### Key Visual Elements 22 | 23 | - **All tabs**: Three-sided border (top, left, right) using rounded corners (`╭ ╮`) 24 | - **Inactive tabs**: Bottom border with `┴` connectors 25 | - **Active tab**: No bottom border - left side uses `┘`, right side uses `└`, leaving a gap 26 | - **First tab special case**: 27 | - If active: left edge uses `│` (vertical only, no horizontal into gap) 28 | - If inactive: left edge uses `├` (T-junction connecting to panel border) 29 | - **Shared border line**: Extends from tabs to panel edges, ends with `╮` (rounded) 30 | - **Content panel**: Only has side borders and rounded bottom corners (no top border) 31 | 32 | ### Tab Labels 33 | 34 | | Panel | Label | 35 | |-------|-------| 36 | | Config | `⚙ Config` | 37 | | Dag | `𖣘 DAGs` | 38 | | DAGRun | `▶ Runs` | 39 | | TaskInstance | `◉ Tasks` | 40 | | Logs | `≣ Logs` | 41 | 42 | ### Styling 43 | 44 | - **Active tab**: `PURPLE` background with `TEXT_PRIMARY` foreground 45 | - **Inactive tabs**: `BORDER_STYLE` (dimmed purple) borders, default text 46 | - **Panel borders**: Rounded bottom corners with `BorderType::Rounded` 47 | - **Panel titles**: Show "Press to see available commands" 48 | 49 | ## Layout Structure 50 | 51 | ``` 52 | ┌─────────────────────────────────────────┐ 53 | │ Flowrs v0.x.x Fetching... │ ← Header (1 line) 54 | ├─────────────────────────────────────────┤ 55 | │ ╭─────╮╭─────╮╭─────╮╭─────╮╭─────╮ │ 56 | │ │ Tab ││ Tab ││ Tab ││ Tab ││ Tab │ │ ← Tab bar (3 lines) 57 | │ ├─────┴┘ └┴─────┴┴─────┴┴─────┴────╮│ 58 | ├─────────────────────────────────────────┤ 59 | │ Press to see available commands │ 60 | │ Active Panel Table │ ← Panel content (remaining) 61 | │ │ 62 | ╰─────────────────────────────────────────╯ 63 | ``` 64 | 65 | **Vertical allocation:** 66 | - Header: 1 line (unchanged) 67 | - Tab bar: 3 lines (tab tops, tab content, shared border) 68 | - Panel: remaining space 69 | 70 | ## Behavior 71 | 72 | **Purely visual** - the tabs indicate current position but don't change navigation: 73 | - `Enter` / `Right` / `l`: Move to next panel 74 | - `Esc` / `Left` / `h`: Move to previous panel 75 | 76 | No new keyboard shortcuts or click handling. 77 | 78 | ## Implementation 79 | 80 | ### New Component: `TabBar` Widget 81 | 82 | Created `src/ui/tabs.rs` with a custom `TabBar` widget: 83 | 84 | 1. Defines `Tab` struct with icon and label 85 | 2. `TABS` constant array with all five panel tabs 86 | 3. `TabBar` widget that renders: 87 | - Line 1: Tab tops (`╭───╮`) 88 | - Line 2: Tab content (`│ icon label │`) 89 | - Line 3: Shared border line with proper connectors 90 | 4. Handles active vs inactive tab styling 91 | 5. Extends border line to panel edges with rounded corner (`╮`) 92 | 93 | ### Border Connection Characters 94 | 95 | | Position | Active Tab | Inactive Tab | 96 | |----------|------------|--------------| 97 | | First tab left | `│` | `├` | 98 | | Other tab left | `┘` | `┴` | 99 | | Tab right | `└` | `┴` | 100 | | Right edge | `╮` | `╮` | 101 | 102 | ### Changes to Existing Code 103 | 104 | **`src/ui.rs`:** 105 | - Added `pub mod tabs` 106 | - Import `TabBar` and `TAB_BAR_HEIGHT` 107 | - Layout split: header (1) + tab bar (3) + panel (remaining) 108 | - Render `TabBar` with active panel index 109 | 110 | **Panel models (`src/app/model/*.rs`):** 111 | - Changed `Borders::ALL` to `Borders::LEFT | Borders::RIGHT | Borders::BOTTOM` 112 | - Changed `BorderType::Plain` to `BorderType::Rounded` 113 | - Updated titles to show "Press to see available commands" 114 | - Removed redundant panel names from titles (now shown in tabs) 115 | 116 | ### Why Custom Widget 117 | 118 | Ratatui's built-in `Tabs` widget doesn't support: 119 | - Per-tab borders (only a divider between tabs) 120 | - The shared border line with selective gaps 121 | - Three-sided tab styling with proper connection characters 122 | 123 | Custom rendering gives full control over the visual effect. 124 | 125 | ## Edge Cases 126 | 127 | - **Narrow terminal**: Tabs stop rendering if they exceed available width 128 | - **Terminal font**: Icons (`⚙ 𖣘 ▶ ◉ ≣`) require Unicode support (standard in modern terminals) 129 | -------------------------------------------------------------------------------- /src/airflow/managed_services/conveyor.rs: -------------------------------------------------------------------------------- 1 | use crate::airflow::config::{AirflowAuth, AirflowConfig, ManagedService}; 2 | use anyhow::{Context, Result}; 3 | use dirs::home_dir; 4 | use expectrl::spawn; 5 | use log::info; 6 | use serde::{Deserialize, Serialize}; 7 | use std::io::Read; 8 | 9 | // New ConveyorClient struct 10 | #[derive(Debug, Clone)] 11 | pub struct ConveyorClient {} 12 | 13 | impl ConveyorClient { 14 | pub fn get_token() -> Result { 15 | // Use expectrl to spawn the command in a pseudo-terminal 16 | let mut session = spawn("conveyor auth get --quiet") 17 | .context("Failed to spawn conveyor auth get command")?; 18 | 19 | // Create a buffer to read the output into 20 | let mut output_bytes = Vec::new(); 21 | 22 | // Read all output until EOF into the buffer 23 | session 24 | .read_to_end(&mut output_bytes) 25 | .context("Failed to read output from conveyor auth get")?; 26 | 27 | let token = serde_json::from_str::( 28 | &String::from_utf8(output_bytes).context("Failed to decode output as UTF-8")?, 29 | ) 30 | .context("Failed to parse JSON token from conveyor output")? 31 | .access_token; 32 | 33 | Ok(token) 34 | } 35 | } 36 | 37 | #[derive(Deserialize, Serialize, Debug, Clone)] 38 | pub struct ConveyorEnvironment { 39 | pub name: String, 40 | #[serde(rename = "clusterName")] 41 | pub cluster_name: String, 42 | #[serde(rename = "tenantId")] 43 | pub tenant_id: String, 44 | #[serde(rename = "airflowVersion")] 45 | pub airflow_version: String, 46 | } 47 | 48 | pub fn list_conveyor_environments() -> Result> { 49 | // Use the new ConveyorClient to authenticate 50 | ConveyorClient::get_token()?; // Ensure authentication before listing environments 51 | 52 | let output = std::process::Command::new("conveyor") 53 | .arg("environment") 54 | .arg("list") 55 | .arg("-o") 56 | .arg("json") 57 | .output() 58 | .context("Failed to execute conveyor environment list command")?; 59 | 60 | if !output.status.success() { 61 | let stderr = String::from_utf8_lossy(&output.stderr); 62 | anyhow::bail!("conveyor environment list failed: {stderr}"); 63 | } 64 | 65 | let environments: Vec = 66 | serde_json::from_str(&String::from_utf8(output.stdout)?) 67 | .context("Failed to parse conveyor environment list output")?; 68 | 69 | info!("Found {} Conveyor environment(s)", environments.len()); 70 | Ok(environments) 71 | } 72 | 73 | pub fn get_conveyor_environment_servers() -> Result> { 74 | let environments = list_conveyor_environments()?; 75 | let api_endpoint = get_conveyor_api_endpoint()?; 76 | 77 | let servers = environments 78 | .iter() 79 | .map(|env| { 80 | let version = match env.airflow_version.as_str() { 81 | "AirflowVersion_V3" => crate::airflow::config::AirflowVersion::V3, 82 | _ => crate::airflow::config::AirflowVersion::V2, 83 | }; 84 | AirflowConfig { 85 | name: env.name.clone(), 86 | endpoint: format!("{}/environments/{}/airflow/", api_endpoint, env.name), 87 | auth: AirflowAuth::Conveyor, 88 | managed: Some(ManagedService::Conveyor), 89 | version, 90 | timeout_secs: 30, 91 | } 92 | }) 93 | .collect(); 94 | Ok(servers) 95 | } 96 | 97 | #[derive(Deserialize, Serialize, Debug, Clone)] 98 | pub struct ConveyorTokenResponse { 99 | pub access_token: String, 100 | } 101 | 102 | #[derive(Deserialize, Debug)] 103 | struct ConveyorProfiles { 104 | activeprofile: String, 105 | #[serde(rename = "version")] 106 | _version: Option, 107 | #[serde(flatten)] 108 | profiles: std::collections::HashMap, 109 | } 110 | 111 | #[derive(Deserialize, Debug)] 112 | struct ConveyorProfile { 113 | api: String, 114 | } 115 | 116 | fn get_conveyor_api_endpoint() -> Result { 117 | let profiles_path = home_dir() 118 | .context("Could not determine home directory")? 119 | .join(".conveyor/profiles.toml"); 120 | 121 | let profiles_content = std::fs::read_to_string(&profiles_path) 122 | .context("Failed to read ~/.conveyor/profiles.toml")?; 123 | 124 | let profiles_config: ConveyorProfiles = 125 | toml::from_str(&profiles_content).context("Failed to parse profiles.toml")?; 126 | 127 | if profiles_config.activeprofile.as_str() == "default" { 128 | return Ok("https://app.conveyordata.com".to_string()); 129 | } 130 | 131 | let active_profile = profiles_config 132 | .profiles 133 | .get(&profiles_config.activeprofile) 134 | .context(format!( 135 | "Active profile '{}' not found in profiles.toml", 136 | profiles_config.activeprofile 137 | ))?; 138 | 139 | Ok(active_profile.api.clone()) 140 | } 141 | --------------------------------------------------------------------------------