├── .github └── workflows │ └── rust.yml ├── .gitignore ├── Cargo.toml ├── LICENSE ├── Makefile ├── README.md ├── alfred-workflow ├── Cargo.toml ├── README.md └── src │ └── lib.rs ├── buildkite-workflow ├── Cargo.toml ├── README.md ├── buildkite.png └── src │ ├── bin │ └── main.rs │ ├── buildkite_api │ ├── errors.rs │ ├── mod.rs │ └── models.rs │ ├── database │ ├── errors.rs │ ├── mod.rs │ └── models.rs │ ├── errors.rs │ ├── lib.rs │ └── workflow.rs ├── datadog-workflow ├── Cargo.toml ├── README.md ├── datadog.png └── src │ ├── bin │ └── main.rs │ ├── database │ ├── errors.rs │ ├── mod.rs │ ├── models.rs │ ├── monitors.rs │ ├── screenboards.rs │ └── timeboards.rs │ ├── datadog.rs │ ├── errors.rs │ ├── lib.rs │ └── workflow.rs ├── date-formats-workflow ├── Cargo.toml ├── README.md └── src │ ├── errors.rs │ └── main.rs └── github-workflow ├── Cargo.toml ├── README.md ├── github.png └── src ├── bin └── main.rs ├── database ├── errors.rs ├── mod.rs └── models.rs ├── errors.rs ├── github.rs ├── lib.rs └── workflow.rs /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: Lint & Test 2 | on: 3 | pull_request: 4 | types: [opened, edited, reopened, synchronize] 5 | 6 | concurrency: 7 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} 8 | cancel-in-progress: true 9 | 10 | jobs: 11 | test: 12 | strategy: 13 | matrix: 14 | platform: [macos-latest] 15 | runs-on: ${{ matrix.platform }} 16 | steps: 17 | - name: Checkout code 18 | uses: actions/checkout@v4 19 | 20 | - name: Install Rust Stable 21 | uses: dtolnay/rust-toolchain@stable 22 | 23 | - name: Rust Cache 24 | uses: Swatinem/rust-cache@v2 25 | with: 26 | key: ${{ matrix.platform }}-v2 27 | 28 | - name: Clippy Install 29 | uses: dtolnay/rust-toolchain@clippy 30 | 31 | - name: Lint 32 | if: matrix.platform != 'windows-latest' 33 | run: cargo clippy --all-features --all-targets --tests -- -Dclippy::all -Dclippy::pedantic 34 | 35 | - name: Lint 36 | if: matrix.platform == 'windows-latest' 37 | run: cargo clippy --all-features --all-targets --tests 38 | 39 | - name: Test 40 | run: cargo test --all-features -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | test.db -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | 3 | edition = "2024" 4 | resolver = "3" 5 | 6 | members = [ 7 | "datadog-workflow", 8 | "alfred-workflow", 9 | "github-workflow", 10 | "date-formats-workflow", 11 | "buildkite-workflow", 12 | ] 13 | 14 | [workspace.dependencies] 15 | anyhow = "1.0.97" 16 | thiserror = "2.0.12" 17 | alfred = "4.0.2" 18 | rusqlite = { version = "0.34.0", features = ["bundled", "chrono"]} 19 | clap = { version = "4.5.35", features = ["derive"] } 20 | 21 | # good read - https://lifthrasiir.github.io/rustlog/why-is-a-rust-executable-large.html 22 | [profile.release] 23 | lto = true 24 | strip = true 25 | codegen-units = 1 26 | panic = 'abort' # not a normal but a pretty backtrace for panics not really necessary in release build 27 | #opt-level = 's' # optimize for size and keep loop vectorization 28 | #target-cpu = 'native' # optimize for the current CPU -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2018 Dean Karn 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | lint: 2 | cargo fmt --all -- --check 3 | 4 | test: 5 | cargo test 6 | 7 | build: 8 | cargo build 9 | 10 | release: 11 | cargo build --release --target aarch64-apple-darwin 12 | cargo build --release --target x86_64-apple-darwin 13 | 14 | .PHONY: lint test build release -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Alfred Workflows(Rust) 2 | 3 | This repository contains multiple alfred workflows to help seppd up your day-to-day. 4 | 5 | Workflows 6 | ---------- 7 | | Workflow | Description | 8 | | ---------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------ | 9 | | [Datadog](https://github.com/rust-playground/alfred-workflows-rs/tree/master/datadog-workflow) | Datadog Alfred Workflow to search dashboards, screenboards and monitors. | 10 | | [Buildkite](https://github.com/rust-playground/alfred-workflows-rs/tree/master/buildkite-workflow) | Buildkite Alfred Workflow to search pipelines. | 11 | | [Github](https://github.com/rust-playground/alfred-workflows-rs/tree/master/github-workflow) | GitHub Alfred Workflow to quickly search and open repositories. | 12 | | [DateTime Formatting](https://github.com/rust-playground/alfred-workflows-rs/tree/master/date-formats-workflow) | Date Formats Alfred Workflow to parse and manipulate common date formats and timezones. | -------------------------------------------------------------------------------- /alfred-workflow/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "alfred-workflow" 3 | description = "This contains common and reusable abstractions for creating workflows." 4 | repository = "https://github.com/rust-playground/alfred-workflows-rs/tree/master/alfred-workflow" 5 | license = " MIT" 6 | version = "1.3.0" 7 | authors = ["Dean Karn "] 8 | edition = "2024" 9 | readme = "README.md" 10 | keywords = ["alfred", "workflow"] 11 | categories = ["development-tools"] 12 | 13 | [dependencies] 14 | alfred.workspace = true 15 | anyhow.workspace = true 16 | rusqlite.workspace = true 17 | dirs = "6.0.0" 18 | -------------------------------------------------------------------------------- /alfred-workflow/README.md: -------------------------------------------------------------------------------- 1 | # Alfred Workflow 2 | 3 | This contains common and reusable abstractions for creating workflows. -------------------------------------------------------------------------------- /alfred-workflow/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! This contains common abstractions for reuse in multiple workflows 2 | //! 3 | use alfred::{json, Item}; 4 | use anyhow::{anyhow, Error}; 5 | use rusqlite::Connection; 6 | use std::{fs, io::Write}; 7 | 8 | /// Opens or creates if not exists an `SQLite` database. 9 | /// 10 | /// # Arguments 11 | /// * `name` - The name of the workflow, which will create a dedicated sub-directory for.vec! 12 | /// * `f` - A lazily evaluated function that is called when the database is first created. 13 | /// 14 | /// # Remarks 15 | /// `name` must be unique or it may conflict with other workflows. 16 | /// 17 | /// # Errors 18 | /// 19 | /// Will return `Err` if connection to database fails 20 | /// 21 | /// # Examples 22 | /// 23 | /// ``` 24 | /// use anyhow::Error; 25 | /// use rusqlite::Connection; 26 | /// 27 | /// fn main() -> Result<(), Error> { 28 | /// let conn = alfred_workflow::open_database_or_else("myworkflow", create_tables)?; 29 | /// Ok(()) 30 | /// } 31 | /// 32 | /// fn create_tables(conn: &Connection) -> Result<(), Error> { 33 | /// conn.execute( 34 | /// "CREATE TABLE IF NOT EXISTS config ( 35 | /// key TEXT NOT NULL PRIMARY KEY, 36 | /// value TEXT NOT NULL 37 | /// );", 38 | /// [], 39 | /// )?; 40 | /// Ok(()) 41 | /// } 42 | /// ``` 43 | pub fn open_database_or_else(name: &str, f: F) -> Result 44 | where 45 | F: Fn(&Connection) -> Result<(), Error>, 46 | { 47 | let conn: Connection; 48 | let path = dirs::home_dir() 49 | .ok_or_else(|| anyhow!("Impossible to get your home dir!"))? 50 | .join(".alfred") 51 | .join("workflows") 52 | .join(name); 53 | 54 | let db = path.join("db.sqlite3"); 55 | if db.exists() { 56 | conn = Connection::open(&db)?; 57 | } else { 58 | fs::create_dir_all(path)?; 59 | conn = Connection::open(&db)?; 60 | f(&conn)?; 61 | } 62 | Ok(conn) 63 | } 64 | 65 | /// Writes Alfred items to the provided writer. 66 | /// 67 | /// # Arguments 68 | /// * `writer` - the writer to writer iterms to.vec! 69 | /// * `items` - the Alfred items to be written.vec! 70 | /// 71 | /// # Errors 72 | /// 73 | /// Will return `Err` if items cannot be serialized to JSON. 74 | /// 75 | /// # Examples 76 | /// ``` 77 | /// use alfred::{json, Item}; 78 | /// use std::{io, io::Write}; 79 | /// use anyhow::Error; 80 | /// 81 | /// fn main() -> Result<(), Error> { 82 | /// let item = alfred::ItemBuilder::new("settings") 83 | /// .subtitle("settings for the workflow") 84 | /// .into_item(); 85 | /// alfred_workflow::write_items(io::stdout(), &[item]) 86 | /// } 87 | /// ``` 88 | pub fn write_items(writer: W, items: &[Item]) -> Result<(), Error> 89 | where 90 | W: Write, 91 | { 92 | json::write_items(writer, items) 93 | .map_err(|e| anyhow!("failed to write alfred items->json: {}", e)) 94 | } 95 | -------------------------------------------------------------------------------- /buildkite-workflow/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["Dean Karn "] 3 | categories = ["development-tools"] 4 | description = "GitHub Alfred Workflow to quickly search and open buildkite pipelines." 5 | edition = "2024" 6 | keywords = [ 7 | "alfred", 8 | "workflow", 9 | "buildkite", 10 | ] 11 | license = "MIT" 12 | name = "buildkite-workflow" 13 | readme = "README.md" 14 | repository = "https://github.com/rust-playground/alfred-workflows-rs/tree/master/buildkite-workflow" 15 | version = "1.4.0" 16 | 17 | [[bin]] 18 | name = "buildkite-workflow" 19 | path = "src/bin/main.rs" 20 | 21 | [dependencies] 22 | alfred.workspace = true 23 | thiserror.workspace = true 24 | anyhow.workspace = true 25 | rusqlite.workspace = true 26 | clap.workspace = true 27 | regex = "1.10.3" 28 | serde = { version = "1.0.197", features = ["derive"] } 29 | chrono = { version = "0.4.34", features = ["serde"] } 30 | reqwest = { version = "0.12.15", features = ["rustls-tls","blocking", "json"] } 31 | 32 | [lib] 33 | name = "buildkite_workflow_lib" 34 | path = "src/lib.rs" 35 | -------------------------------------------------------------------------------- /buildkite-workflow/README.md: -------------------------------------------------------------------------------- 1 | # Buildkite Workflow 2 | 3 | Buildkite Alfred Workflow to cache and search pipelines 4 | 5 | Requirements 6 | ------------- 7 | sqlite - cache and config values are stored in an sqlite database 8 | Buildkite API Key - for Buildkite API access with organization + pipeline read permissions 9 | 10 | Installation 11 | ------------- 12 | 1. Download buildkite-workflow.alfredworkflow from the repo's [releases](https://github.com/rust-playground/alfred-workflows-rs/releases) section 13 | 2. Install in Alfred (double-click) 14 | 15 | Setup 16 | ------ 17 | 1. Have your Buildkite API Key ready, if you don't have one you can find/generate here `https://buildkite.com/user/api-access-tokens` 18 | 2. In Alfred set the `API_KEY` environment variable for the workflow. ![Alfred Settings](https://github.com/rust-playground/alfred-workflows-rs/raw/master/buildkite-workflow/buildkite.png) 19 | 3. In Alfred type `bk `, navigate to refresh, hit *ENTER* to cache/index your Buildkite pipelines; this may take some time depending on the number your organizations have, there will be a notification popup once complete. 20 | 21 | Usage 22 | ------ 23 | - `bk [query]...` which queries Buildkite pipelines 24 | -------------------------------------------------------------------------------- /buildkite-workflow/buildkite.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rust-playground/alfred-workflows-rs/793b06439ee2bb7e5c60ad4e9b46cb8176c0c0d7/buildkite-workflow/buildkite.png -------------------------------------------------------------------------------- /buildkite-workflow/src/bin/main.rs: -------------------------------------------------------------------------------- 1 | use alfred::{json, Item}; 2 | use anyhow::Error; 3 | use buildkite_workflow_lib::workflow::Workflow; 4 | use clap::{Parser, Subcommand}; 5 | use std::io::Write; 6 | use std::process::Command; 7 | use std::{env, io}; 8 | 9 | const SUBCOMMAND_REFRESH: &str = "refresh"; 10 | 11 | #[derive(Parser)] 12 | #[command(author, version, about, long_about = None)] 13 | #[command(propagate_version = true)] 14 | struct Cli { 15 | #[arg()] 16 | name: Option>, 17 | 18 | #[command(subcommand)] 19 | command: Option, 20 | } 21 | 22 | #[derive(Subcommand)] 23 | enum Commands { 24 | Refresh, 25 | Open { url: String }, 26 | } 27 | 28 | fn main() -> Result<(), Error> { 29 | let opts = Cli::parse(); 30 | 31 | let api_key = env::var("API_KEY")?; 32 | let database_url = env::var("DATABASE_URL")?; 33 | let mut wf = Workflow::new(&api_key, &database_url)?; 34 | 35 | match opts.command { 36 | Some(Commands::Refresh) => { 37 | wf.refresh_cache()?; 38 | println!("Successfully Refreshed Buildkite cache"); 39 | } 40 | Some(Commands::Open { url }) => { 41 | Command::new("open").arg(url).output()?; 42 | } 43 | _ => { 44 | if let Some(name_parts) = opts.name { 45 | let results = wf.query(&name_parts)?; 46 | write_items(io::stdout(), &results)?; 47 | } else { 48 | let refresh = alfred::ItemBuilder::new(SUBCOMMAND_REFRESH) 49 | .subtitle("Refresh Cache, be patient you will be notified once complete") 50 | .arg(SUBCOMMAND_REFRESH) 51 | .into_item(); 52 | write_items(io::stdout(), &[refresh])?; 53 | } 54 | } 55 | } 56 | Ok(()) 57 | } 58 | 59 | fn write_items(writer: W, items: &[Item]) -> Result<(), Error> 60 | where 61 | W: Write, 62 | { 63 | json::write_items(writer, items)?; 64 | Ok(()) 65 | } 66 | -------------------------------------------------------------------------------- /buildkite-workflow/src/buildkite_api/errors.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | pub type Result = std::result::Result; 3 | 4 | #[derive(Debug, Error)] 5 | pub enum Error { 6 | #[error("HTTP error: {}", _0)] 7 | Http(String), 8 | 9 | #[error(transparent)] 10 | ReqwestError(#[from] reqwest::Error), 11 | } 12 | -------------------------------------------------------------------------------- /buildkite-workflow/src/buildkite_api/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod errors; 2 | pub mod models; 3 | 4 | use crate::buildkite_api::models::{Organization, Pipeline}; 5 | use errors::{Error, Result}; 6 | use regex::Regex; 7 | use reqwest::header::{HeaderValue, CONTENT_TYPE, LINK}; 8 | 9 | #[derive(Debug)] 10 | pub struct BuildkiteAPI<'a> { 11 | token: &'a str, 12 | re: Regex, 13 | } 14 | 15 | impl<'a> BuildkiteAPI<'a> { 16 | #[inline] 17 | pub fn new(token: &'a str) -> Self { 18 | let re = Regex::new(r#"<(.*)>; rel="next""#).unwrap(); 19 | Self { token, re } 20 | } 21 | 22 | #[inline] 23 | pub fn get_organizations_paginated(&self) -> OrganizationsIter { 24 | OrganizationsIter { 25 | api: self, 26 | next: Some("https://api.buildkite.com/v2/organizations?per_page=100".to_owned()), 27 | } 28 | } 29 | 30 | #[inline] 31 | pub fn get_pipelines_paginated(&self, organization: &str) -> PipelinesIter { 32 | PipelinesIter { 33 | api: self, 34 | next: Some(format!( 35 | "https://api.buildkite.com/v2/organizations/{organization}/pipelines?per_page=100" 36 | )), 37 | } 38 | } 39 | 40 | #[inline] 41 | fn fetch_organizations(&self, url: &str) -> Result { 42 | let response = reqwest::blocking::Client::new() 43 | .get(url) 44 | .bearer_auth(self.token) 45 | .header(CONTENT_TYPE, "application/json") 46 | .send()?; 47 | 48 | if !response.status().is_success() { 49 | return Err(Error::Http(response.text()?)); 50 | } 51 | 52 | let link = response.headers().get(LINK); 53 | let next = self.extract_next(link); 54 | let results: Vec = response.json()?; 55 | Ok(OrganizationResponse { next, results }) 56 | } 57 | 58 | #[inline] 59 | fn fetch_pipelines(&self, url: &str) -> Result { 60 | let response = reqwest::blocking::Client::new() 61 | .get(url) 62 | .bearer_auth(self.token) 63 | .header(CONTENT_TYPE, "application/json") 64 | .send()?; 65 | 66 | if !response.status().is_success() { 67 | return Err(Error::Http(response.text()?)); 68 | } 69 | 70 | let link = response.headers().get(LINK); 71 | let next = self.extract_next(link); 72 | let results: Vec = response.json()?; 73 | Ok(PipelineResponse { next, results }) 74 | } 75 | 76 | #[inline] 77 | fn extract_next(&self, link: Option<&HeaderValue>) -> Option { 78 | link.and_then(|h| self.re.captures(h.to_str().unwrap())) 79 | .and_then(|cap| cap.get(1)) 80 | .map(|c| c.as_str().to_owned()) 81 | } 82 | } 83 | 84 | struct OrganizationResponse { 85 | next: Option, 86 | results: Vec, 87 | } 88 | 89 | pub struct OrganizationsIter<'a> { 90 | api: &'a BuildkiteAPI<'a>, 91 | next: Option, 92 | } 93 | 94 | impl Iterator for OrganizationsIter<'_> { 95 | type Item = Result>; 96 | 97 | fn next(&mut self) -> Option { 98 | let response = self.api.fetch_organizations(self.next.as_ref()?); 99 | if response.is_err() { 100 | return Some(Err(response.err().unwrap())); 101 | } 102 | let response = response.unwrap(); 103 | self.next = response.next; 104 | Some(Ok(response.results)) 105 | } 106 | } 107 | 108 | struct PipelineResponse { 109 | next: Option, 110 | results: Vec, 111 | } 112 | 113 | pub struct PipelinesIter<'a> { 114 | api: &'a BuildkiteAPI<'a>, 115 | next: Option, 116 | } 117 | 118 | impl Iterator for PipelinesIter<'_> { 119 | type Item = Result>; 120 | 121 | fn next(&mut self) -> Option { 122 | let response = self.api.fetch_pipelines(self.next.as_ref()?); 123 | if response.is_err() { 124 | return Some(Err(response.err().unwrap())); 125 | } 126 | let response = response.unwrap(); 127 | self.next = response.next; 128 | Some(Ok(response.results)) 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /buildkite-workflow/src/buildkite_api/models.rs: -------------------------------------------------------------------------------- 1 | use chrono::{DateTime, Utc}; 2 | 3 | #[derive(Debug, Serialize, Deserialize)] 4 | pub struct Organization { 5 | pub id: String, 6 | pub url: String, 7 | pub web_url: String, 8 | pub name: String, 9 | pub slug: String, 10 | pub pipelines_url: String, 11 | pub agents_url: String, 12 | pub emojis_url: String, 13 | pub created_at: DateTime, 14 | } 15 | 16 | #[derive(Debug, Serialize, Deserialize)] 17 | pub struct Pipeline { 18 | pub id: String, 19 | pub url: String, 20 | pub web_url: String, 21 | pub name: String, 22 | pub slug: String, 23 | pub repository: String, 24 | pub branch_configuration: Option, 25 | pub default_branch: Option, 26 | pub provider: Provider, 27 | pub skip_queued_branch_builds: bool, 28 | pub skip_queued_branch_builds_filter: Option, 29 | pub cancel_running_branch_builds: bool, 30 | pub cancel_running_branch_builds_filter: Option, 31 | pub builds_url: String, 32 | pub badge_url: String, 33 | pub created_at: DateTime, 34 | pub scheduled_builds_count: i32, 35 | pub running_builds_count: i32, 36 | pub scheduled_jobs_count: i32, 37 | pub running_jobs_count: i32, 38 | pub waiting_jobs_count: i32, 39 | pub visibility: String, 40 | } 41 | 42 | #[derive(Debug, Serialize, Deserialize)] 43 | pub struct Provider { 44 | pub id: String, 45 | pub webhook_url: String, 46 | pub settings: ProviderSettings, 47 | } 48 | 49 | #[allow(clippy::struct_excessive_bools)] 50 | #[derive(Debug, Serialize, Deserialize)] 51 | pub struct ProviderSettings { 52 | pub publish_commit_status: bool, 53 | pub build_pull_requests: bool, 54 | pub build_pull_request_forks: bool, 55 | pub build_tags: bool, 56 | pub publish_commit_status_per_step: bool, 57 | pub repository: String, 58 | pub trigger_mode: String, 59 | } 60 | -------------------------------------------------------------------------------- /buildkite-workflow/src/database/errors.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | pub type Result = std::result::Result; 4 | 5 | #[derive(Debug, Error)] 6 | pub enum Error { 7 | #[error(transparent)] 8 | Sqlite(#[from] rusqlite::Error), 9 | } 10 | -------------------------------------------------------------------------------- /buildkite-workflow/src/database/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod errors; 2 | pub mod models; 3 | 4 | use crate::database::models::Pipeline; 5 | use errors::Result; 6 | use rusqlite::{Connection, ToSql}; 7 | 8 | pub struct DbContext { 9 | conn: Connection, 10 | } 11 | 12 | impl DbContext { 13 | #[inline] 14 | pub fn new(database_url: &str) -> Result { 15 | let conn = Connection::open(database_url)?; 16 | Ok(DbContext { conn }) 17 | } 18 | 19 | #[inline] 20 | pub fn run_migrations(&self) -> Result<()> { 21 | self.conn.execute_batch( 22 | "CREATE TABLE IF NOT EXISTS pipelines ( 23 | unique_name TEXT NOT NULL PRIMARY KEY, 24 | name TEXT NOT NULL, 25 | url TEXT NOT NULL 26 | );", 27 | )?; 28 | Ok(()) 29 | } 30 | 31 | #[inline] 32 | pub fn delete_pipelines(&self) -> Result<()> { 33 | self.conn.execute("DELETE FROM pipelines;", [])?; 34 | Ok(()) 35 | } 36 | 37 | #[inline] 38 | pub fn find_pipelines(&self, repo_name: &[String], limit: i64) -> Result> { 39 | // This will allow searching by full name or just the words within the name; 40 | // it's not a regex but it's good enough. 41 | let query = format!( 42 | "%{}%", 43 | repo_name 44 | .iter() 45 | .flat_map(|s| s.split_terminator(' ')) 46 | .flat_map(|s| s.split_terminator('_')) 47 | .flat_map(|s| s.split_terminator('-')) 48 | .collect::>() 49 | .join("%") 50 | ); 51 | 52 | let results = self.conn.prepare( 53 | "SELECT unique_name, name, url FROM pipelines WHERE name LIKE ? ORDER BY name ASC LIMIT ?", 54 | )?.query_map([&query as &dyn ToSql,&limit], |row| { 55 | Ok(Pipeline{ 56 | unique_name: row.get(0)?, 57 | name:row.get(1)?, 58 | url:row.get(2)?, 59 | }) 60 | })? 61 | .collect::, _>>()?; 62 | Ok(results) 63 | } 64 | 65 | #[inline] 66 | pub fn insert_pipelines(&mut self, pipelines: &[Pipeline]) -> Result<()> { 67 | let tx = self.conn.transaction()?; 68 | let mut stmt = 69 | tx.prepare("INSERT INTO pipelines (unique_name, name, url) VALUES (?1, ?2, ?3)")?; 70 | 71 | for pipeline in pipelines { 72 | stmt.execute([ 73 | &pipeline.unique_name as &dyn ToSql, 74 | &pipeline.name, 75 | &pipeline.url, 76 | ])?; 77 | } 78 | 79 | stmt.finalize()?; 80 | tx.commit()?; 81 | Ok(()) 82 | } 83 | 84 | #[inline] 85 | pub fn optimize(&self) -> Result<()> { 86 | // since this workflow is READ heavy, let's optimize the SQLite indexes and DB 87 | self.conn.execute("VACUUM;", [])?; 88 | Ok(()) 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /buildkite-workflow/src/database/models.rs: -------------------------------------------------------------------------------- 1 | #[derive(Debug)] 2 | pub struct Pipeline { 3 | pub unique_name: String, 4 | pub name: String, 5 | pub url: String, 6 | } 7 | -------------------------------------------------------------------------------- /buildkite-workflow/src/errors.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use thiserror::Error; 3 | 4 | #[derive(Error, Debug)] 5 | pub enum Error { 6 | #[error(transparent)] 7 | API(#[from] crate::buildkite_api::errors::Error), 8 | 9 | #[error(transparent)] 10 | SQLite(#[from] crate::database::errors::Error), 11 | 12 | #[error("failed to write alfred items->json {}", _0)] 13 | WriteItems(#[from] io::Error), 14 | } 15 | -------------------------------------------------------------------------------- /buildkite-workflow/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde; 3 | 4 | pub(crate) mod buildkite_api; 5 | pub(crate) mod database; 6 | pub mod errors; 7 | pub mod workflow; 8 | -------------------------------------------------------------------------------- /buildkite-workflow/src/workflow.rs: -------------------------------------------------------------------------------- 1 | use crate::buildkite_api::BuildkiteAPI; 2 | use crate::database::models::Pipeline; 3 | use crate::database::DbContext; 4 | use crate::errors::Error; 5 | use alfred::Item; 6 | 7 | pub struct Workflow<'a> { 8 | api_key: &'a str, 9 | db: DbContext, 10 | } 11 | 12 | impl<'a> Workflow<'a> { 13 | /// Create a new Workflow 14 | /// 15 | /// # Errors 16 | /// 17 | /// Will return `Err` if database connection fails. 18 | /// 19 | #[inline] 20 | pub fn new(api_key: &'a str, database_url: &str) -> Result { 21 | let db = DbContext::new(database_url)?; 22 | Ok(Workflow { api_key, db }) 23 | } 24 | 25 | /// Refreshes DB with all Buildkite information. 26 | /// 27 | /// # Errors 28 | /// 29 | /// Will return `Err` if database connection fails or hitting the `Buildkite` API fails 30 | /// 31 | #[inline] 32 | pub fn refresh_cache(&mut self) -> Result<(), Error> { 33 | self.db.run_migrations()?; 34 | let api = BuildkiteAPI::new(self.api_key); 35 | self.db.delete_pipelines()?; 36 | for organizations in api.get_organizations_paginated() { 37 | for org in organizations? { 38 | for pipelines in api.get_pipelines_paginated(&org.slug) { 39 | let pl = pipelines? 40 | .into_iter() 41 | .map(|p| Pipeline { 42 | url: format!("https://buildkite.com/{}/{}", &org.slug, &p.name), 43 | unique_name: format!("{}/{}", &org.slug, &p.name), 44 | name: p.name, 45 | }) 46 | .collect::>(); 47 | self.db.insert_pipelines(&pl)?; 48 | } 49 | } 50 | } 51 | // and DB cleanup work 52 | self.db.optimize()?; 53 | Ok(()) 54 | } 55 | 56 | /// Queries the stored information using the given query. 57 | /// 58 | /// # Errors 59 | /// 60 | /// Will return `Err` if database connection fails. 61 | /// 62 | #[inline] 63 | pub fn query<'items>(&self, repo_name: &[String]) -> Result>, Error> { 64 | self.db 65 | .find_pipelines(repo_name, 10)? 66 | .into_iter() 67 | .map(|repo| { 68 | Ok(alfred::ItemBuilder::new(repo.unique_name) 69 | .subtitle(repo.name.clone()) 70 | .autocomplete(repo.name) 71 | .arg(format!("open {}", repo.url)) 72 | .into_item()) 73 | }) 74 | .collect::, _>>() 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /datadog-workflow/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["Dean Karn "] 3 | categories = ["development-tools"] 4 | description = "Datadog Alfred Workflow to search dashboards, screenboards and monitors" 5 | edition = "2024" 6 | keywords = [ 7 | "alfred", 8 | "workflow", 9 | "datadog", 10 | ] 11 | license = " MIT" 12 | name = "datadog-workflow" 13 | readme = "README.md" 14 | repository = "https://github.com/rust-playground/alfred-workflows-rs/tree/master/datadog-workflow" 15 | version = "2.2.0" 16 | 17 | [[bin]] 18 | name = "datadog-workflow" 19 | path = "src/bin/main.rs" 20 | 21 | [dependencies] 22 | alfred.workspace = true 23 | thiserror.workspace = true 24 | anyhow.workspace = true 25 | rusqlite.workspace = true 26 | clap = "2.34.0" 27 | serde = { version = "1.0.192", features = ["derive"] } 28 | chrono = { version = "0.4.31", features = ["serde"] } 29 | reqwest = { version = "0.12.15", features = ["rustls-tls","blocking", "json"] } 30 | 31 | [lib] 32 | name = "datadog_workflow_lib" 33 | path = "src/lib.rs" 34 | -------------------------------------------------------------------------------- /datadog-workflow/README.md: -------------------------------------------------------------------------------- 1 | # Datadog Workflow 2 | 3 | Datadog Alfred Workflow to cache and search dashboards, screenboards and monitors 4 | 5 | Requirements 6 | ------------- 7 | sqlite - cache and config values are stored in an sqlite database 8 | Datadog Application & API Key - for Datadog API access 9 | Datadog API URL - they differ for US vs EU eg. https://api.datadoghq.com/api 10 | Datadog Company Subdomain - for building the URL's eg. https://.datadoghq.com/monitors/ 11 | 12 | Installation 13 | ------------- 14 | 1. Download datadog-workflow.alfredworkflow from the repo's [releases](https://github.com/rust-playground/alfred-workflows-rs/releases) section 15 | 2. Install in Alfred (double-click) 16 | 17 | Setup 18 | ------ 19 | 1. Have your Datadog Application key ready, if you don't have one you can find/generate here `https://{company}.datadoghq.com/account/settings#api` 20 | 2. In Alfred set the `API_KEY` and `APPLICATION_KEY` environment variables for the workflow. ![Alfred Settings](https://github.com/rust-playground/alfred-workflows-rs/raw/master/datadog-workflow/datadog.png) 21 | 3. In Alfred set the `SUBDOMAIN`. 22 | 4. In Alfred set the Datadog `API_URL`, by default it's set to the US value. 23 | 5. In Alfred type `dd `, navigate to refresh, hit *ENTER* to cache/index your Datadog timeboards, screenboards and monitors; this may take some time depending on the number your organization has, there will be a notification popup once complete. 24 | 25 | Usage 26 | ------ 27 | - `dd d [query]...` which queries for timeboards and screenboards together 28 | - `dd t [query]...` which queries for timeboards 29 | - `dd s [query]...` which queries for screenboards 30 | - `dd m [OPTIONS] [query]...` which queries for monitors 31 | - `--tag ` this options allows you to filter monitors by a singe tag attached to them. 32 | -------------------------------------------------------------------------------- /datadog-workflow/datadog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rust-playground/alfred-workflows-rs/793b06439ee2bb7e5c60ad4e9b46cb8176c0c0d7/datadog-workflow/datadog.png -------------------------------------------------------------------------------- /datadog-workflow/src/bin/main.rs: -------------------------------------------------------------------------------- 1 | use alfred::{json, Item}; 2 | use anyhow::{anyhow, Error}; 3 | use clap::{ 4 | app_from_crate, crate_authors, crate_description, crate_name, crate_version, Arg, SubCommand, 5 | }; 6 | use datadog_workflow_lib::workflow::Workflow; 7 | use std::io::Write; 8 | use std::{env, io, process::Command}; 9 | 10 | const SUBCOMMAND_SETTINGS: &str = "settings"; 11 | const SUBCOMMAND_REFRESH: &str = "refresh"; 12 | const SUBCOMMAND_TIMEBOARDS: &str = "t"; 13 | const SUBCOMMAND_SCREENBOARDS: &str = "s"; 14 | const SUBCOMMAND_DASHBOARDS: &str = "d"; 15 | const SUBCOMMAND_MONITORS: &str = "m"; 16 | const SUBCOMMAND_OPEN: &str = "open"; 17 | const ARG_INPUT: &str = "input"; 18 | const ARG_QUERY: &str = "query"; 19 | const ARG_TAG: &str = "tag"; 20 | 21 | #[allow(clippy::too_many_lines)] 22 | fn main() -> Result<(), Error> { 23 | let matches = app_from_crate!("\n") 24 | .subcommand( 25 | SubCommand::with_name(SUBCOMMAND_OPEN) 26 | .about("opens the provided argument (https://)") 27 | .arg( 28 | Arg::with_name(ARG_INPUT) 29 | .long(ARG_INPUT) 30 | .help("the input value to open") 31 | .index(1), 32 | ), 33 | ) 34 | .subcommand( 35 | SubCommand::with_name(SUBCOMMAND_SETTINGS) 36 | .about("settings to control") 37 | .subcommand( 38 | SubCommand::with_name(SUBCOMMAND_REFRESH).help("refreshes the cached data"), 39 | ), 40 | ) 41 | .subcommand( 42 | SubCommand::with_name(SUBCOMMAND_TIMEBOARDS) 43 | .about("search for timeboards") 44 | .arg( 45 | Arg::with_name(ARG_QUERY) 46 | .long(ARG_QUERY) 47 | .help("the title of the timeboard to query") 48 | .multiple(true) 49 | .index(1), 50 | ), 51 | ) 52 | .subcommand( 53 | SubCommand::with_name(SUBCOMMAND_SCREENBOARDS) 54 | .about("search for screenboards") 55 | .arg( 56 | Arg::with_name(ARG_QUERY) 57 | .long(ARG_QUERY) 58 | .help("the title of the screenboard to query") 59 | .multiple(true) 60 | .index(1), 61 | ), 62 | ) 63 | .subcommand( 64 | SubCommand::with_name(SUBCOMMAND_DASHBOARDS) 65 | .about("search for dashboards(timeboards + screenboards)") 66 | .arg( 67 | Arg::with_name(ARG_QUERY) 68 | .long(ARG_QUERY) 69 | .help("the title of the dashboard to query") 70 | .multiple(true) 71 | .index(1), 72 | ), 73 | ) 74 | .subcommand( 75 | SubCommand::with_name(SUBCOMMAND_MONITORS) 76 | .about("search for monitors") 77 | .arg( 78 | Arg::with_name(ARG_TAG) 79 | .long(ARG_TAG) 80 | .help("the tag to filter monitors by") 81 | .takes_value(true), 82 | ) 83 | .arg( 84 | Arg::with_name(ARG_QUERY) 85 | .long(ARG_QUERY) 86 | .help("the name of the monitor to query") 87 | .multiple(true) 88 | .index(1), 89 | ), 90 | ) 91 | .get_matches(); 92 | 93 | let api_key = env::var("API_KEY")?; 94 | let application_key = env::var("APPLICATION_KEY")?; 95 | let database_url = env::var("DATABASE_URL")?; 96 | let api_url = env::var("API_URL")?; 97 | let subdomain = env::var("SUBDOMAIN")?; 98 | let mut wf = Workflow::new( 99 | &api_key, 100 | &application_key, 101 | &database_url, 102 | &api_url, 103 | &subdomain, 104 | )?; 105 | 106 | match matches.subcommand() { 107 | (SUBCOMMAND_DASHBOARDS, Some(m)) => { 108 | let query = m 109 | .values_of(ARG_QUERY) 110 | .unwrap_or_default() 111 | .collect::>() 112 | .join(" "); 113 | let items = wf.query_dashboards(&query)?; 114 | write_items(io::stdout(), &items) 115 | } 116 | (SUBCOMMAND_MONITORS, Some(m)) => { 117 | let query = m 118 | .values_of(ARG_QUERY) 119 | .unwrap_or_default() 120 | .collect::>() 121 | .join(" "); 122 | let tag = m.value_of(ARG_TAG); 123 | let items = wf.query_monitors(&query, tag)?; 124 | write_items(io::stdout(), &items) 125 | } 126 | (SUBCOMMAND_TIMEBOARDS, Some(m)) => { 127 | let query = m 128 | .values_of(ARG_QUERY) 129 | .unwrap_or_default() 130 | .collect::>() 131 | .join(" "); 132 | let items = wf.query_timeboards(&query)?; 133 | write_items(io::stdout(), &items) 134 | } 135 | (SUBCOMMAND_SCREENBOARDS, Some(m)) => { 136 | let query = m 137 | .values_of(ARG_QUERY) 138 | .unwrap_or_default() 139 | .collect::>() 140 | .join(" "); 141 | let items = wf.query_screenboards(&query)?; 142 | write_items(io::stdout(), &items) 143 | } 144 | (SUBCOMMAND_SETTINGS, Some(m)) => match m.subcommand() { 145 | (SUBCOMMAND_REFRESH, Some(_)) => { 146 | wf.refresh_cache()?; 147 | println!("Successfully Refreshed Datadog cache"); 148 | Ok(()) 149 | } 150 | _ => Err(anyhow!("No suitable SubCommand found")), 151 | }, 152 | (SUBCOMMAND_OPEN, Some(m)) => { 153 | let input = m.value_of(ARG_INPUT).unwrap_or_default(); 154 | if input.starts_with("https://") { 155 | Command::new("open") 156 | .arg(input) 157 | .output() 158 | .map_err(|e| anyhow!("failed to execute process: {}", e))?; 159 | } 160 | Ok(()) 161 | } 162 | _ => { 163 | let refresh = alfred::ItemBuilder::new(SUBCOMMAND_REFRESH) 164 | .subtitle("Refresh Cache, be patient you will be notified once complete") 165 | .arg(format!("{SUBCOMMAND_SETTINGS} {SUBCOMMAND_REFRESH}")) 166 | .into_item(); 167 | write_items(io::stdout(), &[refresh]) 168 | } 169 | } 170 | } 171 | 172 | fn write_items(writer: W, items: &[Item]) -> Result<(), Error> 173 | where 174 | W: Write, 175 | { 176 | json::write_items(writer, items) 177 | .map_err(|e| anyhow!("failed to write alfred items->json: {}", e)) 178 | } 179 | -------------------------------------------------------------------------------- /datadog-workflow/src/database/errors.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | #[derive(Error, Debug)] 4 | pub enum Error { 5 | #[error(transparent)] 6 | SQLite(#[from] rusqlite::Error), 7 | } 8 | -------------------------------------------------------------------------------- /datadog-workflow/src/database/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod errors; 2 | pub mod models; 3 | pub mod monitors; 4 | pub mod screenboards; 5 | pub mod timeboards; 6 | 7 | use crate::database::errors::Error; 8 | use crate::database::models::Dashboard; 9 | use crate::database::monitors::Monitors; 10 | use crate::database::screenboards::Screenboards; 11 | use crate::database::timeboards::Timeboards; 12 | use rusqlite::{Connection, ToSql}; 13 | 14 | #[derive(Debug)] 15 | pub struct DbContext { 16 | conn: Connection, 17 | subdomain: String, 18 | } 19 | 20 | impl DbContext { 21 | #[inline] 22 | pub fn new(database_url: &str, subdomain: String) -> Result { 23 | let conn = Connection::open(database_url)?; 24 | Ok(DbContext { conn, subdomain }) 25 | } 26 | 27 | // TODO: make interior mutable instead of everything having to be mutable 28 | 29 | #[inline] 30 | pub fn monitors(&mut self) -> Monitors { 31 | Monitors::new(self) 32 | } 33 | 34 | #[inline] 35 | pub fn timeboards(&mut self) -> Timeboards { 36 | Timeboards::new(self) 37 | } 38 | 39 | #[inline] 40 | pub fn screenboards(&mut self) -> Screenboards { 41 | Screenboards::new(self) 42 | } 43 | 44 | #[inline] 45 | pub fn find_dashboard(&self, title: &str, limit: i64) -> Result, Error> { 46 | // This will allow searching by full name or just the words within the name; 47 | // it's not a regex but it's good enough. 48 | let query = format!( 49 | "%{}%", 50 | title 51 | .split_terminator(' ') 52 | .flat_map(|s| s.split_terminator('_')) 53 | .flat_map(|s| s.split_terminator('-')) 54 | .collect::>() 55 | .join("%") 56 | ); 57 | 58 | self.conn.prepare( 59 | "SELECT title, description, url, modified FROM timeboards WHERE title LIKE ?1 60 | UNION ALL 61 | SELECT title, description, url, modified FROM screenboards WHERE title LIKE ?1 62 | ORDER BY modified DESC 63 | LIMIT ?", 64 | )?.query_map([&query as &dyn ToSql,&limit], |row| { 65 | Ok(Dashboard{ 66 | title:row.get(0)?, 67 | description:row.get(1)?, 68 | url: row.get(2)?, 69 | }) 70 | })?.map(|r|{ 71 | Ok(r?) 72 | }).collect::, _>>() 73 | } 74 | 75 | #[inline] 76 | pub fn run_migrations(&mut self) -> Result<(), Error> { 77 | self.timeboards().run_migrations()?; 78 | self.screenboards().run_migrations()?; 79 | self.monitors().run_migrations()?; 80 | Ok(()) 81 | } 82 | 83 | #[inline] 84 | pub fn optimize(&self) -> Result<(), Error> { 85 | // since this workflow is READ heavy, let's optimize the SQLite indexes and DB 86 | self.conn.execute("VACUUM;", [])?; 87 | Ok(()) 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /datadog-workflow/src/database/models.rs: -------------------------------------------------------------------------------- 1 | use chrono::{DateTime, Utc}; 2 | 3 | #[derive(Debug, Deserialize)] 4 | pub struct InsertMonitor { 5 | pub id: i32, 6 | pub name: String, 7 | pub tags: Vec, 8 | pub modified: DateTime, 9 | } 10 | 11 | #[derive(Debug)] 12 | pub struct Monitor { 13 | pub id: i32, 14 | pub name: String, 15 | pub url: String, 16 | pub modified: DateTime, 17 | } 18 | 19 | #[derive(Debug, Deserialize)] 20 | pub struct InsertTimeBoard { 21 | pub id: String, 22 | pub title: String, 23 | pub description: Option, 24 | pub modified: DateTime, 25 | } 26 | 27 | #[derive(Debug, Deserialize)] 28 | pub struct TimeBoard { 29 | pub id: String, 30 | pub title: String, 31 | pub description: String, 32 | pub url: String, 33 | pub modified: DateTime, 34 | } 35 | 36 | #[derive(Debug, Deserialize)] 37 | pub struct InsertScreenBoard { 38 | pub id: i32, 39 | pub title: String, 40 | pub description: Option, 41 | pub modified: DateTime, 42 | } 43 | 44 | #[derive(Debug, Deserialize)] 45 | pub struct ScreenBoard { 46 | pub id: i32, 47 | pub title: String, 48 | pub description: String, 49 | pub url: String, 50 | pub modified: DateTime, 51 | } 52 | 53 | #[derive(Debug)] 54 | pub struct Dashboard { 55 | pub title: String, 56 | pub description: String, 57 | pub url: String, 58 | } 59 | -------------------------------------------------------------------------------- /datadog-workflow/src/database/monitors.rs: -------------------------------------------------------------------------------- 1 | use crate::database::errors::Error; 2 | use crate::database::models::{InsertMonitor, Monitor}; 3 | use crate::database::DbContext; 4 | use rusqlite::ToSql; 5 | 6 | pub struct Monitors<'a> { 7 | db: &'a mut DbContext, 8 | } 9 | 10 | impl<'a> Monitors<'a> { 11 | #[inline] 12 | pub fn new(db: &'a mut DbContext) -> Self { 13 | Self { db } 14 | } 15 | 16 | #[inline] 17 | pub fn run_migrations(&self) -> Result<(), Error> { 18 | self.db.conn.execute_batch( 19 | "CREATE TABLE IF NOT EXISTS monitors ( 20 | id INTEGER NOT NULL PRIMARY KEY, 21 | name TEXT NOT NULL, 22 | url TEXT NOT NULL, 23 | modified DATETIME NOT NULL 24 | ); 25 | CREATE INDEX IF NOT EXISTS idx_monitors_name_modified ON monitors (name, modified); 26 | CREATE TABLE IF NOT EXISTS monitor_tags ( 27 | id INTEGER NOT NULL, 28 | name TEXT NOT NULL, 29 | CONSTRAINT fk_monitors 30 | FOREIGN KEY (id) 31 | REFERENCES monitors(id) 32 | ON DELETE CASCADE 33 | ); 34 | CREATE INDEX IF NOT EXISTS idx_monitor_tags_id ON monitor_tags (id); 35 | CREATE INDEX IF NOT EXISTS idx_monitor_tags_name ON monitor_tags (name);", 36 | )?; 37 | Ok(()) 38 | } 39 | 40 | #[inline] 41 | pub fn delete_all(&self) -> Result<(), Error> { 42 | self.db.conn.execute("DELETE FROM monitors;", [])?; 43 | Ok(()) 44 | } 45 | 46 | #[inline] 47 | pub fn insert(&mut self, monitors: &[InsertMonitor]) -> Result<(), Error> { 48 | let tx = self.db.conn.transaction()?; 49 | let mut stmt_monitor = 50 | tx.prepare("INSERT INTO monitors (id, name, url, modified) VALUES (?1, ?2, ?3, ?4)")?; 51 | let mut stmt_tags = tx.prepare("INSERT INTO monitor_tags (id, name) VALUES (?1, ?2)")?; 52 | 53 | for monitor in monitors { 54 | let url = format!( 55 | "https://{}.datadoghq.com/monitors/{}", 56 | self.db.subdomain, monitor.id 57 | ); 58 | stmt_monitor.execute([ 59 | &monitor.id as &dyn ToSql, 60 | &monitor.name, 61 | &url, 62 | &monitor.modified, 63 | ])?; 64 | for tag in &monitor.tags { 65 | stmt_tags.execute([&monitor.id as &dyn ToSql, &tag])?; 66 | } 67 | } 68 | 69 | stmt_monitor.finalize()?; 70 | stmt_tags.finalize()?; 71 | tx.commit()?; 72 | Ok(()) 73 | } 74 | 75 | #[inline] 76 | pub fn find(&self, name: &str, tag: Option<&str>, limit: i64) -> Result, Error> { 77 | // This will allow searching by full name or just the words within the name; 78 | // it's not a regex but it's good enough. 79 | let query = format!( 80 | "%{}%", 81 | name.split_terminator(' ') 82 | .flat_map(|s| s.split_terminator('_')) 83 | .flat_map(|s| s.split_terminator('-')) 84 | .collect::>() 85 | .join("%") 86 | ); 87 | 88 | let tag_query: String; 89 | let mut params: Vec<&dyn ToSql> = vec![&query]; 90 | let mut select = "SELECT m.id, m.name, m.url, m.modified FROM monitors m ".to_owned(); 91 | match tag { 92 | Some(t) => { 93 | select += "LEFT JOIN monitor_tags t ON t.id = m.id WHERE m.name LIKE ? AND t.name LIKE ? "; 94 | tag_query = format!( 95 | "%{}%", 96 | t.split_terminator(' ') 97 | .flat_map(|s| s.split_terminator('_')) 98 | .flat_map(|s| s.split_terminator('-')) 99 | .collect::>() 100 | .join("%") 101 | ); 102 | params.push(&tag_query); 103 | } 104 | _ => select += "WHERE m.name LIKE ? ", 105 | } 106 | select += "ORDER BY m.modified DESC LIMIT ?"; 107 | params.push(&limit); 108 | 109 | self.db 110 | .conn 111 | .prepare(&select)? 112 | .query_map(&*params, |row| { 113 | Ok(Monitor { 114 | id: row.get(0)?, 115 | name: row.get(1)?, 116 | url: row.get(2)?, 117 | modified: row.get(3)?, 118 | }) 119 | })? 120 | .map(|r| Ok(r?)) 121 | .collect::, _>>() 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /datadog-workflow/src/database/screenboards.rs: -------------------------------------------------------------------------------- 1 | use crate::database::errors::Error; 2 | use crate::database::models::{InsertScreenBoard, ScreenBoard}; 3 | use crate::database::DbContext; 4 | use rusqlite::ToSql; 5 | 6 | pub struct Screenboards<'a> { 7 | db: &'a mut DbContext, 8 | } 9 | 10 | impl<'a> Screenboards<'a> { 11 | #[inline] 12 | pub fn new(db: &'a mut DbContext) -> Self { 13 | Self { db } 14 | } 15 | 16 | #[inline] 17 | pub fn run_migrations(&self) -> Result<(), Error> { 18 | self.db.conn.execute_batch( 19 | "CREATE TABLE IF NOT EXISTS screenboards ( 20 | id INTEGER NOT NULL PRIMARY KEY, 21 | title TEXT NOT NULL, 22 | description TEXT NOT NULL, 23 | url TEXT NOT NULL, 24 | modified DATETIME NOT NULL 25 | ); 26 | CREATE INDEX IF NOT EXISTS idx_screenboards_title_modified ON screenboards (title, modified);", 27 | )?; 28 | Ok(()) 29 | } 30 | 31 | #[inline] 32 | pub fn delete_all(&self) -> Result<(), Error> { 33 | self.db.conn.execute("DELETE FROM screenboards;", [])?; 34 | Ok(()) 35 | } 36 | 37 | #[inline] 38 | pub fn insert(&mut self, screenboards: &[InsertScreenBoard]) -> Result<(), Error> { 39 | let tx = self.db.conn.transaction()?; 40 | let mut stmt = tx.prepare("INSERT INTO screenboards (id, title, description, url, modified) VALUES (?1, ?2, ?3, ?4, ?5)")?; 41 | 42 | for board in screenboards { 43 | let url = format!( 44 | "https://{}.datadoghq.com/screen/{}", 45 | self.db.subdomain, board.id 46 | ); 47 | stmt.execute([ 48 | &board.id as &dyn ToSql, 49 | &board.title, 50 | &board.description.clone().unwrap_or_default(), 51 | &url, 52 | &board.modified.timestamp(), 53 | ])?; 54 | } 55 | 56 | stmt.finalize()?; 57 | tx.commit()?; 58 | Ok(()) 59 | } 60 | 61 | #[inline] 62 | pub fn find(&self, title: &str, limit: i64) -> Result, Error> { 63 | // This will allow searching by full name or just the words within the name; 64 | // it's not a regex but it's good enough. 65 | let query = format!( 66 | "%{}%", 67 | title 68 | .split_terminator(' ') 69 | .flat_map(|s| s.split_terminator('_')) 70 | .flat_map(|s| s.split_terminator('-')) 71 | .collect::>() 72 | .join("%") 73 | ); 74 | 75 | self.db.conn.prepare( 76 | "SELECT id, title, description, url, modified FROM screenboards WHERE title LIKE ? ORDER BY modified DESC LIMIT ?", 77 | )?.query_map([&query as &dyn ToSql,&limit], |row| { 78 | Ok(ScreenBoard{ 79 | id: row.get(0)?, 80 | title:row.get(1)?, 81 | description:row.get(2)?, 82 | url: row.get(3)?, 83 | modified:row.get(4)?, 84 | }) 85 | })?.map(|r|{ 86 | Ok(r?) 87 | }).collect::, _>>() 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /datadog-workflow/src/database/timeboards.rs: -------------------------------------------------------------------------------- 1 | use crate::database::errors::Error; 2 | use crate::database::models::{InsertTimeBoard, TimeBoard}; 3 | use crate::database::DbContext; 4 | use rusqlite::ToSql; 5 | 6 | pub struct Timeboards<'a> { 7 | db: &'a mut DbContext, 8 | } 9 | 10 | impl<'a> Timeboards<'a> { 11 | #[inline] 12 | pub fn new(db: &'a mut DbContext) -> Self { 13 | Self { db } 14 | } 15 | 16 | #[inline] 17 | pub fn run_migrations(&self) -> Result<(), Error> { 18 | self.db.conn.execute_batch( 19 | "CREATE TABLE IF NOT EXISTS timeboards ( 20 | id TEXT NOT NULL PRIMARY KEY, 21 | title TEXT NOT NULL, 22 | description TEXT NOT NULL, 23 | url TEXT NOT NULL, 24 | modified DATETIME NOT NULL 25 | ); 26 | CREATE INDEX IF NOT EXISTS idx_timeboards_title_modified ON timeboards (title, modified);", 27 | )?; 28 | Ok(()) 29 | } 30 | 31 | #[inline] 32 | pub fn delete_all(&self) -> Result<(), Error> { 33 | self.db.conn.execute("DELETE FROM timeboards;", [])?; 34 | Ok(()) 35 | } 36 | 37 | #[inline] 38 | pub fn insert(&mut self, timeboards: &[InsertTimeBoard]) -> Result<(), Error> { 39 | let tx = self.db.conn.transaction()?; 40 | let mut stmt = tx.prepare("INSERT INTO timeboards (id, title, description, url, modified) VALUES (?1, ?2, ?3, ?4, ?5)")?; 41 | 42 | for board in timeboards { 43 | let url = format!( 44 | "https://{}.datadoghq.com/dash/{}", 45 | self.db.subdomain, board.id 46 | ); 47 | stmt.execute([ 48 | &board.id as &dyn ToSql, 49 | &board.title, 50 | &board.description.clone().unwrap_or_default(), 51 | &url, 52 | &board.modified, 53 | ])?; 54 | } 55 | 56 | stmt.finalize()?; 57 | tx.commit()?; 58 | Ok(()) 59 | } 60 | 61 | #[inline] 62 | pub fn find(&self, title: &str, limit: i64) -> Result, Error> { 63 | // This will allow searching by full name or just the words within the name; 64 | // it's not a regex but it's good enough. 65 | let query = format!( 66 | "%{}%", 67 | title 68 | .split_terminator(' ') 69 | .flat_map(|s| s.split_terminator('_')) 70 | .flat_map(|s| s.split_terminator('-')) 71 | .collect::>() 72 | .join("%") 73 | ); 74 | 75 | self.db.conn.prepare( 76 | "SELECT id, title, description, url, modified FROM timeboards WHERE title LIKE ? ORDER BY modified DESC LIMIT ?", 77 | )?.query_map([&query as &dyn ToSql,&limit], |row| { 78 | Ok(TimeBoard { 79 | id: row.get(0)?, 80 | title:row.get(1)?, 81 | description:row.get(2)?, 82 | url:row.get(3)?, 83 | modified:row.get(4)?, 84 | }) 85 | })?.map(|r|{ 86 | Ok(r?) 87 | }).collect::, _>>() 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /datadog-workflow/src/datadog.rs: -------------------------------------------------------------------------------- 1 | use crate::database::models::{InsertMonitor, InsertScreenBoard, InsertTimeBoard}; 2 | use crate::errors::Error; 3 | use reqwest::blocking::Client; 4 | 5 | const APPLICATION_KEY: &str = "application_key"; 6 | const API_KEY: &str = "api_key"; 7 | 8 | pub struct Api<'a> { 9 | key: &'a str, 10 | application_key: &'a str, 11 | url: &'a str, 12 | subdomain: &'a str, 13 | client: Client, 14 | } 15 | 16 | impl<'a> Api<'a> { 17 | #[inline] 18 | pub fn new(key: &'a str, application_key: &'a str, url: &'a str, subdomain: &'a str) -> Self { 19 | Self { 20 | key, 21 | application_key, 22 | url, 23 | subdomain, 24 | client: reqwest::blocking::Client::new(), 25 | } 26 | } 27 | 28 | #[inline] 29 | pub fn get_timeboards(&self) -> Result, Error> { 30 | #[derive(Debug, Deserialize)] 31 | struct Dashboards { 32 | #[serde(rename = "dashes")] 33 | boards: Vec, 34 | } 35 | let results = self 36 | .client 37 | .get(format!("{}/v1/dash", self.url)) 38 | .query(&[(APPLICATION_KEY, self.application_key), (API_KEY, self.key)]) 39 | .send()? 40 | .json::()? 41 | .boards; 42 | Ok(results) 43 | } 44 | 45 | #[inline] 46 | pub fn get_screenboards(&self) -> Result, Error> { 47 | #[derive(Debug, Deserialize)] 48 | struct ScreenBoards { 49 | #[serde(rename = "screenboards")] 50 | boards: Vec, 51 | } 52 | 53 | let results = self 54 | .client 55 | .get(format!("{}/v1/screen", self.url)) 56 | .query(&[(APPLICATION_KEY, self.application_key), (API_KEY, self.key)]) 57 | .send()? 58 | .json::()? 59 | .boards; 60 | Ok(results) 61 | } 62 | 63 | #[inline] 64 | pub fn get_monitors(&self) -> Result, Error> { 65 | let results = self 66 | .client 67 | .get(format!("{}/v1/monitor", self.url)) 68 | .query(&[(APPLICATION_KEY, self.application_key), (API_KEY, self.key)]) 69 | .send()? 70 | .json::>()?; 71 | Ok(results) 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /datadog-workflow/src/errors.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | #[derive(Error, Debug)] 4 | pub enum Error { 5 | #[error(transparent)] 6 | Request(#[from] reqwest::Error), 7 | 8 | #[error(transparent)] 9 | SQLite(#[from] crate::database::errors::Error), 10 | } 11 | -------------------------------------------------------------------------------- /datadog-workflow/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde; 3 | 4 | pub(crate) mod database; 5 | pub(crate) mod datadog; 6 | pub mod errors; 7 | pub mod workflow; 8 | -------------------------------------------------------------------------------- /datadog-workflow/src/workflow.rs: -------------------------------------------------------------------------------- 1 | use crate::database::DbContext; 2 | use crate::datadog::Api; 3 | use crate::errors::Error; 4 | use alfred::Item; 5 | use std::str; 6 | 7 | pub struct Workflow<'a> { 8 | api_key: &'a str, 9 | application_key: &'a str, 10 | api_url: &'a str, 11 | db: DbContext, 12 | subdomain: &'a str, 13 | } 14 | 15 | impl<'a> Workflow<'a> { 16 | /// Creates a new `DataDog` workflow for use. 17 | /// 18 | /// # Errors 19 | /// Can return when database error occurs. 20 | #[inline] 21 | pub fn new( 22 | api_key: &'a str, 23 | application_key: &'a str, 24 | database_url: &str, 25 | api_url: &'a str, 26 | subdomain: &'a str, 27 | ) -> Result { 28 | let db = DbContext::new(database_url, subdomain.to_owned())?; 29 | Ok(Workflow { 30 | api_key, 31 | application_key, 32 | api_url, 33 | db, 34 | subdomain, 35 | }) 36 | } 37 | 38 | /// Refreshes cached data from `DataDog` API 39 | /// 40 | /// # Errors 41 | /// can return when database error occurs or API. 42 | pub fn refresh_cache(&mut self) -> Result<(), Error> { 43 | let datadog_api = Api::new( 44 | self.api_key, 45 | self.application_key, 46 | self.api_url, 47 | self.subdomain, 48 | ); 49 | self.db.run_migrations()?; 50 | self.refresh_timeboards(&datadog_api)?; 51 | self.refresh_screenboards(&datadog_api)?; 52 | self.refresh_monitors(&datadog_api)?; 53 | 54 | // and DB cleanup work 55 | Ok(self.db.optimize()?) 56 | } 57 | 58 | fn refresh_timeboards(&mut self, datadog_api: &Api) -> Result<(), Error> { 59 | let mut db = self.db.timeboards(); 60 | db.delete_all()?; 61 | let results = datadog_api.get_timeboards()?; 62 | db.insert(&results)?; 63 | Ok(()) 64 | } 65 | 66 | fn refresh_screenboards(&mut self, datadog_api: &Api) -> Result<(), Error> { 67 | let mut db = self.db.screenboards(); 68 | db.delete_all()?; 69 | let results = datadog_api.get_screenboards()?; 70 | db.insert(&results)?; 71 | Ok(()) 72 | } 73 | 74 | fn refresh_monitors(&mut self, datadog_api: &Api) -> Result<(), Error> { 75 | let mut db = self.db.monitors(); 76 | db.delete_all()?; 77 | let results = datadog_api.get_monitors()?; 78 | db.insert(&results)?; 79 | Ok(()) 80 | } 81 | 82 | /// Query `DataDog` Time Boards 83 | /// 84 | /// # Errors 85 | /// can return when database error occurs. 86 | pub fn query_timeboards<'items>(&mut self, title: &str) -> Result>, Error> { 87 | let results = self.db.timeboards().find(title, 10)?; 88 | let items = results 89 | .into_iter() 90 | .map(|m| { 91 | alfred::ItemBuilder::new(m.title.clone()) 92 | .subtitle(m.description) 93 | .autocomplete(m.title) 94 | .arg(format!("open {}", m.url)) 95 | .into_item() 96 | }) 97 | .collect(); 98 | Ok(items) 99 | } 100 | 101 | /// Query `DataDog` Screen Boards 102 | /// 103 | /// # Errors 104 | /// can return when database error occurs. 105 | pub fn query_screenboards<'items>(&mut self, title: &str) -> Result>, Error> { 106 | let results = self.db.screenboards().find(title, 10)?; 107 | let items = results 108 | .into_iter() 109 | .map(|m| { 110 | alfred::ItemBuilder::new(m.title.clone()) 111 | .subtitle(m.description) 112 | .autocomplete(m.title) 113 | .arg(format!("open {}", m.url)) 114 | .into_item() 115 | }) 116 | .collect(); 117 | Ok(items) 118 | } 119 | 120 | /// Query `DataDog` Dashboards 121 | /// 122 | /// # Errors 123 | /// can return when database error occurs. 124 | pub fn query_dashboards<'items>(&self, title: &str) -> Result>, Error> { 125 | let results = self.db.find_dashboard(title, 10)?; 126 | let items = results 127 | .into_iter() 128 | .map(|m| { 129 | alfred::ItemBuilder::new(m.title.clone()) 130 | .subtitle(m.description) 131 | .autocomplete(m.title) 132 | .arg(format!("open {}", m.url)) 133 | .into_item() 134 | }) 135 | .collect(); 136 | Ok(items) 137 | } 138 | 139 | /// Query `DataDog` Monitors 140 | /// 141 | /// # Errors 142 | /// can return when database error occurs. 143 | pub fn query_monitors<'items>( 144 | &mut self, 145 | name: &str, 146 | tag: Option<&str>, 147 | ) -> Result>, Error> { 148 | let results = self.db.monitors().find(name, tag, 10)?; 149 | let items = results 150 | .into_iter() 151 | .map(|m| { 152 | alfred::ItemBuilder::new(m.name.clone()) 153 | .subtitle(m.name.clone()) 154 | .autocomplete(m.name) 155 | .arg(format!("open {}", m.url)) 156 | .into_item() 157 | }) 158 | .collect(); 159 | Ok(items) 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /date-formats-workflow/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["Dean Karn "] 3 | categories = [ 4 | "date-and-time", 5 | "development-tools", 6 | ] 7 | description = "Date Formats Workflow to quickly handle common data formats and conversions." 8 | edition = "2024" 9 | keywords = [ 10 | "alfred", 11 | "workflow", 12 | "date", 13 | "time", 14 | "datetime", 15 | ] 16 | license = "MIT" 17 | name = "date-formats-workflow" 18 | readme = "README.md" 19 | repository = "https://github.com/rust-playground/alfred-workflows-rs/tree/master/date-formats-workflow" 20 | version = "1.6.0" 21 | 22 | [dependencies] 23 | alfred.workspace = true 24 | thiserror.workspace = true 25 | anyhow.workspace = true 26 | clap.workspace = true 27 | chrono = "0.4.40" 28 | chrono-tz = "0.10.3" 29 | anydate = "0.4.0" 30 | -------------------------------------------------------------------------------- /date-formats-workflow/README.md: -------------------------------------------------------------------------------- 1 | # Date Formats Workflow 2 | 3 | Date Formats Alfred Workflow for quick DateTime formatting and locale adjustment. 4 | 5 | Requirements 6 | ------------- 7 | N/A 8 | 9 | Installation 10 | ------------- 11 | 1. Download date-formats-workflow.alfredworkflow from the repo's [releases](https://github.com/rust-playground/alfred-workflows-rs/releases) section 12 | 2. Install in Alfred (double-click) 13 | 14 | Usage 15 | ------ 16 | By default all date & times are displayed in UTC, however, this is overridable by specifying the 17 | timezone with the ending argument `-t,--tz [timezone]`; examples shown below. 18 | 19 | - `df now` displays various date formats for the current date & time. 20 | - `df [date & time string]` the date & time string provided is parsed and date formats for this time displayed. Supported formats include: 21 | - ISO8601 22 | - RFC3339 23 | - RFC2822 24 | - yyyy-mm-dd 25 | - yyyy-mm-dd hh:mm:ss 26 | - yyyy-mm-dd hh:mm:ss tz 27 | - Fri Nov 28 12:00:09 2014 28 | - UNIX Timestamp - seconds, millisecond and nanoseconds 29 | - Adding `-t [timezone]` or `--tz [timezone]` to the command will convert the dates to the provided timezone, eg. `df now --tz PST` will display the current time in PST('America/Vancouver' in this case). 30 | -------------------------------------------------------------------------------- /date-formats-workflow/src/errors.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use thiserror::Error; 3 | 4 | #[derive(Error, Debug)] 5 | pub enum Error { 6 | #[error("{}", _0)] 7 | Text(String), 8 | 9 | #[error("failed to write alfred items->json {}", _0)] 10 | WriteItems(#[from] io::Error), 11 | } 12 | -------------------------------------------------------------------------------- /date-formats-workflow/src/main.rs: -------------------------------------------------------------------------------- 1 | mod errors; 2 | 3 | use alfred::{json, Item}; 4 | use anyhow::Error as AnyError; 5 | use chrono::prelude::*; 6 | use chrono::{Local, Utc}; 7 | use chrono_tz::Tz; 8 | use clap::Parser; 9 | use errors::Error; 10 | use std::io; 11 | use std::io::Write; 12 | use std::str::FromStr; 13 | 14 | #[derive(Parser)] 15 | #[command(author, version, about, long_about = None)] 16 | #[command(propagate_version = true)] 17 | struct Cli { 18 | #[arg(global = true, short, long, default_value = "UTC")] 19 | tz: Option, 20 | 21 | #[arg()] 22 | date: Option>, 23 | } 24 | 25 | const NOW_DT: &str = "now"; 26 | 27 | fn main() -> Result<(), AnyError> { 28 | let opts = Cli::parse(); 29 | 30 | let tz = opts.tz.unwrap(); // safe because there is a default value 31 | 32 | if let Some(mut date_parts) = opts.date { 33 | let date_str = if date_parts.len() == 1 { 34 | date_parts.swap_remove(0) 35 | } else { 36 | date_parts.join(" ") 37 | }; 38 | let date_str = date_str.trim(); 39 | 40 | if date_str == NOW_DT { 41 | let now = Utc::now(); 42 | let dt = parse_timezone_and_date(&now, &tz)?; 43 | write_variations(&dt)?; 44 | } else { 45 | let parsed = anydate::parse_utc(date_str)?; 46 | let dt = parse_timezone_and_date(&parsed, &tz)?; 47 | write_variations(&dt)?; 48 | } 49 | } else { 50 | let now = alfred::ItemBuilder::new(NOW_DT) 51 | .subtitle("Common date time formats for the current time in UTC.") 52 | .autocomplete(format!(" {NOW_DT}")) 53 | .arg(format!(" {} --tz {NOW_DT}", &tz)) 54 | .into_item(); 55 | write_items(io::stdout(), &[now])?; 56 | } 57 | Ok(()) 58 | } 59 | 60 | #[inline] 61 | fn parse_timezone_and_date(ndt: &DateTime, tz: &str) -> Result, Error> { 62 | // there isn't a real timezone PST etc.. so doing a common mapping for ease of use. 63 | let tz = match tz.to_lowercase().as_str() { 64 | "pst" => "America/Vancouver", 65 | "cst" => "America/Winnipeg", 66 | _ => tz, 67 | }; 68 | Tz::from_str(tz) 69 | .map_err(|e| Error::Text(e.to_string())) 70 | .map(|tz| ndt.with_timezone(&tz)) 71 | } 72 | 73 | #[inline] 74 | fn write_items(writer: W, items: &[Item]) -> Result<(), Error> 75 | where 76 | W: Write, 77 | { 78 | Ok(json::write_items(writer, items)?) 79 | } 80 | 81 | #[inline] 82 | fn write_variations(dt: &DateTime) -> Result<(), Error> { 83 | let unix_sec = build_item(dt.timestamp().to_string(), "UNIX timestamp - seconds"); 84 | let unix_milli = build_item( 85 | dt.timestamp_millis().to_string(), 86 | "UNIX timestamp - milliseconds", 87 | ); 88 | let unix_nano = build_item( 89 | dt.timestamp_nanos_opt().unwrap_or_default().to_string(), 90 | "UNIX timestamp - nanoseconds", 91 | ); 92 | let rfc_3339 = build_item( 93 | dt.to_rfc3339_opts(SecondsFormat::Secs, true), 94 | "rfc_3339 - iso8601 compatible", 95 | ); 96 | let rfc_3339_nano = build_item( 97 | dt.to_rfc3339_opts(SecondsFormat::Nanos, true), 98 | "rfc_3339_nano - iso8601 compatible", 99 | ); 100 | let rfc_2822 = build_item(dt.to_rfc2822(), "rfc_2822"); 101 | let alt = build_item(dt.format("%e %b %Y %H:%M:%S").to_string(), ""); 102 | 103 | let diff = dt.with_timezone(&Utc).signed_duration_since(Utc::now()); 104 | let attr = if diff.num_nanoseconds().unwrap() < 0 { 105 | "ago" 106 | } else { 107 | "to go" 108 | }; 109 | let decor = if diff.num_nanoseconds().unwrap() < 0 { 110 | "Time since" 111 | } else { 112 | "Time until" 113 | }; 114 | let diff_str = format!( 115 | "{:?}d, {:?}h, {:?}m, {:?}s {}", 116 | diff.num_days().abs(), 117 | diff.num_hours().abs() % 24, 118 | diff.num_minutes().abs() % 60, 119 | diff.num_seconds().abs() % 60, 120 | attr 121 | ); 122 | let time_since = build_item(diff_str, decor); 123 | 124 | let time_current_tz = build_item( 125 | dt.with_timezone(&Local) 126 | .format("%e %b %Y %H:%M:%S") 127 | .to_string(), 128 | "Time in local timezone", 129 | ); 130 | 131 | write_items( 132 | io::stdout(), 133 | &[ 134 | unix_sec, 135 | unix_milli, 136 | unix_nano, 137 | alt, 138 | time_current_tz, 139 | rfc_2822, 140 | rfc_3339, 141 | rfc_3339_nano, 142 | time_since, 143 | ], 144 | ) 145 | } 146 | 147 | #[inline] 148 | fn build_item(date_string: String, subtitle: &str) -> Item { 149 | alfred::ItemBuilder::new(date_string.clone()) 150 | .subtitle(subtitle) 151 | .arg(date_string) 152 | .into_item() 153 | } 154 | -------------------------------------------------------------------------------- /github-workflow/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | authors = ["Dean Karn "] 3 | categories = ["development-tools"] 4 | description = "GitHub Alfred Workflow to quickly search and open repositories." 5 | edition = "2024" 6 | keywords = [ 7 | "alfred", 8 | "workflow", 9 | "github", 10 | ] 11 | license = "MIT" 12 | name = "github-workflow" 13 | readme = "README.md" 14 | repository = "https://github.com/rust-playground/alfred-workflows-rs/tree/master/github-workflow" 15 | version = "2.5.0" 16 | 17 | [[bin]] 18 | name = "github-workflow" 19 | path = "src/bin/main.rs" 20 | 21 | [dependencies] 22 | alfred.workspace = true 23 | thiserror.workspace = true 24 | anyhow.workspace = true 25 | rusqlite.workspace = true 26 | clap.workspace = true 27 | serde = { version = "1.0.197", features = ["derive"] } 28 | chrono = { version = "0.4.34", features = ["serde"] } 29 | reqwest = { version = "0.12.15", features = ["rustls-tls","blocking", "json"] } 30 | 31 | [lib] 32 | name = "github_workflow_lib" 33 | path = "src/lib.rs" 34 | -------------------------------------------------------------------------------- /github-workflow/README.md: -------------------------------------------------------------------------------- 1 | # Github Workflow 2 | 3 | Github Alfred Workflow to cache and search repositories 4 | 5 | Requirements 6 | ------------- 7 | sqlite - cache and config values are stored in an sqlite database 8 | Github Access Token - for Github API access 9 | 10 | Installation 11 | ------------- 12 | 1. Download github-workflow.alfredworkflow from the repo's [releases](https://github.com/rust-playground/alfred-workflows-rs/releases) section 13 | 2. Install in Alfred (double-click) 14 | 15 | Setup 16 | ------ 17 | 1. Have your GitHub Access Token ready, if you don't have one you can generate here https://github.com/settings/tokens; you may have to ensure it is authorized for SSO. 18 | 2. In Alfred set the `API_KEY` environment variable to your GitHub API key/Access Token. ![Alfred Settings](https://github.com/rust-playground/alfred-workflows-rs/raw/master/github-workflow/github.png) 19 | 3. In Alfred type `gh `, navigate to refresh, hit *ENTER* to cache/index your GitHub repositories. This may take some time depending on the number of organizations and repositories you have access to, there will be a notification popup once complete. 20 | 21 | Usage 22 | ------ 23 | - `gh ` which queries Github repositories 24 | -------------------------------------------------------------------------------- /github-workflow/github.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rust-playground/alfred-workflows-rs/793b06439ee2bb7e5c60ad4e9b46cb8176c0c0d7/github-workflow/github.png -------------------------------------------------------------------------------- /github-workflow/src/bin/main.rs: -------------------------------------------------------------------------------- 1 | use alfred::{json, Item}; 2 | use anyhow::{anyhow, Error}; 3 | use clap::{Parser, Subcommand}; 4 | use github_workflow_lib::workflow::Workflow; 5 | use std::io::Write; 6 | use std::{env, io, process::Command}; 7 | 8 | const SUBCOMMAND_REFRESH: &str = "refresh"; 9 | 10 | #[derive(Parser)] 11 | #[command(author, version, about, long_about = None)] 12 | #[command(propagate_version = true)] 13 | struct Cli { 14 | #[arg()] 15 | name: Option>, 16 | 17 | #[command(subcommand)] 18 | command: Option, 19 | } 20 | 21 | #[derive(Subcommand)] 22 | enum Commands { 23 | Refresh, 24 | Open { url: String }, 25 | } 26 | 27 | fn main() -> Result<(), Error> { 28 | let opts = Cli::parse(); 29 | 30 | let api_key = env::var("API_KEY")?; 31 | let database_url = env::var("DATABASE_URL")?; 32 | let mut wf = Workflow::new(&api_key, &database_url)?; 33 | 34 | match opts.command { 35 | Some(Commands::Refresh) => { 36 | wf.refresh_cache()?; 37 | println!("Successfully Refreshed GitHub cache"); 38 | } 39 | Some(Commands::Open { url }) => { 40 | Command::new("open") 41 | .arg(url) 42 | .output() 43 | .map_err(|e| anyhow!("failed to execute process: {}", e))?; 44 | } 45 | _ => { 46 | if let Some(mut name_parts) = opts.name { 47 | let search_str = if name_parts.len() == 1 { 48 | name_parts.swap_remove(0) 49 | } else { 50 | name_parts.join(" ") 51 | }; 52 | let search_str = search_str.trim(); 53 | 54 | let items = wf.query(search_str)?; 55 | write_items(io::stdout(), &items)?; 56 | } else { 57 | let refresh = alfred::ItemBuilder::new(SUBCOMMAND_REFRESH) 58 | .subtitle("Refresh Cache, be patient you will be notified once complete") 59 | .arg(SUBCOMMAND_REFRESH) 60 | .into_item(); 61 | write_items(io::stdout(), &[refresh])?; 62 | } 63 | } 64 | } 65 | Ok(()) 66 | } 67 | 68 | fn write_items(writer: W, items: &[Item]) -> Result<(), Error> 69 | where 70 | W: Write, 71 | { 72 | json::write_items(writer, items) 73 | .map_err(|e| anyhow!("failed to write alfred items->json: {}", e)) 74 | } 75 | -------------------------------------------------------------------------------- /github-workflow/src/database/errors.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | #[derive(Error, Debug)] 4 | pub enum Error { 5 | #[error(transparent)] 6 | SQLite(#[from] rusqlite::Error), 7 | } 8 | -------------------------------------------------------------------------------- /github-workflow/src/database/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod errors; 2 | pub mod models; 3 | 4 | use crate::database::models::Repository; 5 | use errors::Error; 6 | use rusqlite::{Connection, ToSql}; 7 | 8 | pub struct DbContext { 9 | conn: Connection, 10 | } 11 | 12 | impl DbContext { 13 | #[inline] 14 | pub fn new(database_url: &str) -> Result { 15 | let conn = Connection::open(database_url)?; 16 | Ok(DbContext { conn }) 17 | } 18 | 19 | #[inline] 20 | pub fn run_migrations(&self) -> Result<(), Error> { 21 | self.conn.execute_batch( 22 | "CREATE TABLE IF NOT EXISTS repositories ( 23 | name_with_owner TEXT NOT NULL PRIMARY KEY, 24 | name TEXT NOT NULL, 25 | url TEXT NOT NULL, 26 | pushed_at DATETIME NOT NULL 27 | );", 28 | )?; 29 | Ok(()) 30 | } 31 | 32 | #[inline] 33 | pub fn delete_repositories(&self) -> Result<(), Error> { 34 | self.conn.execute_batch("DELETE FROM repositories;")?; 35 | Ok(()) 36 | } 37 | 38 | #[inline] 39 | pub fn find_repositories(&self, repo_name: &str, limit: i64) -> Result, Error> { 40 | // This will allow searching by full name or just the words within the name; 41 | // it's not a regex but it's good enough. 42 | let name = format!( 43 | "%{}%", 44 | repo_name 45 | .split_terminator(' ') 46 | .flat_map(|s| s.split_terminator('_')) 47 | .flat_map(|s| s.split_terminator('-')) 48 | .collect::>() 49 | .join("%") 50 | ); 51 | 52 | self.conn.prepare( 53 | "SELECT name_with_owner, name, url, pushed_at FROM repositories WHERE name LIKE ? ORDER BY pushed_at DESC LIMIT ?", 54 | )?.query_map([&name as &dyn ToSql,&limit], |row| { 55 | Ok(Repository{ 56 | name_with_owner: row.get(0)?, 57 | name:row.get(1)?, 58 | url:row.get(2)?, 59 | pushed_at:row.get(3)?, 60 | }) 61 | })?.map(|r|{ 62 | Ok(r?) 63 | }).collect::, _>>() 64 | } 65 | 66 | #[inline] 67 | pub fn insert_repositories(&mut self, repositories: &[Repository]) -> Result<(), Error> { 68 | let tx = self.conn.transaction()?; 69 | let mut stmt = tx.prepare("INSERT INTO repositories (name_with_owner, name, url, pushed_at) VALUES (?1, ?2, ?3, ?4)")?; 70 | 71 | for repo in repositories { 72 | stmt.execute([ 73 | &repo.name_with_owner as &dyn ToSql, 74 | &repo.name, 75 | &repo.url, 76 | &repo.pushed_at, 77 | ])?; 78 | } 79 | 80 | stmt.finalize()?; 81 | tx.commit()?; 82 | 83 | Ok(()) 84 | } 85 | 86 | #[inline] 87 | pub fn optimize(&self) -> Result<(), Error> { 88 | // since this workflow is READ heavy, let's optimize the SQLite indexes and DB 89 | self.conn.execute("VACUUM;", [])?; 90 | Ok(()) 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /github-workflow/src/database/models.rs: -------------------------------------------------------------------------------- 1 | use chrono::{DateTime, Utc}; 2 | 3 | #[derive(Debug)] 4 | pub struct Repository { 5 | pub name_with_owner: String, 6 | pub name: String, 7 | pub url: String, 8 | pub pushed_at: DateTime, 9 | } 10 | -------------------------------------------------------------------------------- /github-workflow/src/errors.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | #[derive(Error, Debug)] 4 | pub enum Error { 5 | #[error(transparent)] 6 | Request(#[from] reqwest::Error), 7 | 8 | #[error(transparent)] 9 | SQLite(#[from] crate::database::errors::Error), 10 | } 11 | -------------------------------------------------------------------------------- /github-workflow/src/github.rs: -------------------------------------------------------------------------------- 1 | use crate::database::models::Repository; 2 | use crate::errors::Error; 3 | use chrono::{DateTime, Utc}; 4 | use reqwest::header::{CONTENT_TYPE, USER_AGENT}; 5 | 6 | #[derive(Debug)] 7 | pub struct GitHubAPI<'a> { 8 | token: &'a str, 9 | } 10 | 11 | impl<'a> GitHubAPI<'a> { 12 | #[inline] 13 | pub const fn new(token: &'a str) -> Self { 14 | Self { token } 15 | } 16 | 17 | #[inline] 18 | pub const fn accessible_repositories(&self) -> OwnedRepositories { 19 | OwnedRepositories { 20 | api: self, 21 | has_more: true, 22 | cursor: None, 23 | } 24 | } 25 | 26 | #[inline] 27 | fn fetch_repositories(&self, cursor: Option) -> Result { 28 | let arg = cursor.map_or_else(String::new, |v| format!(", after:\"{v}\"")); 29 | let query = format!( 30 | "query {{ \ 31 | viewer {{ \ 32 | repositories(first: 100, affiliations: [OWNER, COLLABORATOR, ORGANIZATION_MEMBER], ownerAffiliations: [OWNER, COLLABORATOR, ORGANIZATION_MEMBER]{arg}) {{ \ 33 | pageInfo {{ \ 34 | hasNextPage \ 35 | endCursor \ 36 | }} \ 37 | edges {{ \ 38 | node {{ \ 39 | pushedAt \ 40 | url \ 41 | }} \ 42 | }} \ 43 | }} \ 44 | }} \ 45 | }}" 46 | ); 47 | 48 | // TODO: clean this up with a proper type that will escape automatically when serialized to JSON 49 | let mut escaped = query; 50 | escaped = escaped.replace('\n', "\\n"); 51 | escaped = escaped.replace('\"', "\\\""); 52 | 53 | let mut q = String::from("{ \"query\": \""); 54 | q.push_str(&escaped); 55 | q.push_str("\" }"); 56 | 57 | let results: Results = reqwest::blocking::Client::new() 58 | .post("https://api.github.com/graphql") 59 | .bearer_auth(self.token) 60 | .header(CONTENT_TYPE, "application/json") 61 | .header(USER_AGENT, "Alfred Github Workflow") 62 | .body(q) 63 | .send()? 64 | .json()?; 65 | 66 | Ok(results) 67 | } 68 | } 69 | 70 | pub struct OwnedRepositories<'a> { 71 | api: &'a GitHubAPI<'a>, 72 | has_more: bool, 73 | cursor: Option, 74 | } 75 | 76 | impl Iterator for OwnedRepositories<'_> { 77 | type Item = Vec; 78 | 79 | fn next(&mut self) -> Option { 80 | if !self.has_more { 81 | return None; 82 | } 83 | let results = self 84 | .api 85 | .fetch_repositories(self.cursor.take()) 86 | .expect("unable to fetch data from the GitHub API"); 87 | self.has_more = results.data.viewer.repositories.page_info.has_next_page; 88 | if self.has_more { 89 | self.cursor = Some(results.data.viewer.repositories.page_info.end_cursor); 90 | } 91 | Some( 92 | results 93 | .data 94 | .viewer 95 | .repositories 96 | .edges 97 | .into_iter() 98 | .filter_map(|edge| match edge { 99 | Some(e) => e.node, 100 | _ => None, 101 | }) 102 | .map(|node| { 103 | let mut s = node.url.rsplit('/'); 104 | let name = s.next().unwrap_or_default().to_string(); 105 | let owner = s.next().unwrap_or_default(); 106 | Repository { 107 | name_with_owner: format!("{owner}/{name}"), 108 | name, 109 | url: node.url, 110 | pushed_at: node.pushed_at, 111 | } 112 | }) 113 | .collect(), 114 | ) 115 | } 116 | } 117 | 118 | #[derive(Debug, Deserialize)] 119 | struct PageInfo { 120 | #[serde(rename = "endCursor")] 121 | end_cursor: String, 122 | #[serde(rename = "hasNextPage")] 123 | has_next_page: bool, 124 | } 125 | 126 | #[derive(Debug, Deserialize)] 127 | struct Node { 128 | url: String, 129 | #[serde(rename = "pushedAt")] 130 | pushed_at: DateTime, 131 | } 132 | 133 | #[derive(Debug, Deserialize)] 134 | struct Edge { 135 | node: Option, 136 | } 137 | 138 | #[derive(Debug, Deserialize)] 139 | struct Repositories { 140 | edges: Vec>, 141 | #[serde(rename = "pageInfo")] 142 | page_info: PageInfo, 143 | } 144 | 145 | #[derive(Debug, Deserialize)] 146 | struct Viewer { 147 | repositories: Repositories, 148 | } 149 | 150 | #[derive(Debug, Deserialize)] 151 | struct Data { 152 | viewer: Viewer, 153 | } 154 | 155 | #[derive(Debug, Deserialize)] 156 | struct Results { 157 | data: Data, 158 | } 159 | -------------------------------------------------------------------------------- /github-workflow/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate serde; 3 | 4 | pub(crate) mod database; 5 | pub mod errors; 6 | pub(crate) mod github; 7 | pub mod workflow; 8 | -------------------------------------------------------------------------------- /github-workflow/src/workflow.rs: -------------------------------------------------------------------------------- 1 | use crate::database::DbContext; 2 | use crate::errors::Error; 3 | use crate::github::GitHubAPI; 4 | use alfred::Item; 5 | 6 | pub struct Workflow<'a> { 7 | api_key: &'a str, 8 | db: DbContext, 9 | } 10 | 11 | impl<'a> Workflow<'a> { 12 | /// # Errors 13 | /// 14 | /// Will return `Err` if `database` could not be connected to. 15 | /// 16 | #[inline] 17 | pub fn new(api_key: &'a str, database_url: &str) -> Result { 18 | let db = DbContext::new(database_url)?; 19 | Ok(Workflow { api_key, db }) 20 | } 21 | 22 | /// # Errors 23 | /// 24 | /// Will return `Err` if Contacting GitHub returns an error. 25 | /// 26 | #[inline] 27 | pub fn refresh_cache(&mut self) -> Result<(), Error> { 28 | self.db.run_migrations()?; 29 | let api = GitHubAPI::new(self.api_key); 30 | 31 | self.db.delete_repositories()?; 32 | 33 | for v in api.accessible_repositories() { 34 | self.db.insert_repositories(&v)?; 35 | } 36 | // and DB cleanup work 37 | self.db.optimize()?; 38 | Ok(()) 39 | } 40 | 41 | /// # Errors 42 | /// 43 | /// Will return `Err` if querying the database fails. 44 | /// 45 | #[inline] 46 | pub fn query<'items>(&self, repo_name: &str) -> Result>, Error> { 47 | self.db 48 | .find_repositories(repo_name, 10)? 49 | .into_iter() 50 | .map(|repo| { 51 | Ok(alfred::ItemBuilder::new(repo.name_with_owner) 52 | .subtitle(repo.name.clone()) 53 | .autocomplete(repo.name) 54 | .arg(format!("open {}", repo.url)) 55 | .into_item()) 56 | }) 57 | .collect::, _>>() 58 | } 59 | } 60 | --------------------------------------------------------------------------------